added contextual awareness

This commit is contained in:
Dani 2025-04-24 13:28:53 -04:00
parent 699a21ad84
commit 2cf713ca97
4 changed files with 46 additions and 3 deletions

View File

@ -0,0 +1,34 @@
import json
import os
import time
from typing import List
CONTEXT_FILE = "data/memory/context.json"
MAX_MEMORY = 100
def load_context() -> List[dict]:
if os.path.exists(CONTEXT_FILE):
with open(CONTEXT_FILE, "r", encoding="utf-8") as f:
return json.load(f)
return []
def save_context(mem: List[dict]):
with open(CONTEXT_FILE, "w", encoding="utf-8") as f:
json.dump(mem[-MAX_MEMORY:], f, indent=2)
def add_to_context(text: str, source: str = "user"):
mem = load_context()
mem.append({
"timestamp": time.time(),
"source": source,
"text": text
})
save_context(mem)
def get_recent_context(n: int = 5) -> List[str]:
mem = load_context()
return [entry["text"] for entry in mem[-n:]]

View File

@ -1,6 +1,7 @@
from flask import Flask, render_template from flask import Flask, render_template
from model.memory import load_dreams from model.memory import load_dreams
from model.tokenizer import Tokenizer from model.tokenizer import Tokenizer
from context.context import load_context
import threading import threading
@ -12,9 +13,11 @@ tokenizer = Tokenizer()
def index(): def index():
dreams = load_dreams() dreams = load_dreams()
top_dreams = dreams[:5] top_dreams = dreams[:5]
memory_size = len(load_context())
return render_template("index.html", return render_template("index.html",
vocab_size=len(tokenizer.vocab), vocab_size=len(tokenizer.vocab),
top_dreams=top_dreams) top_dreams=top_dreams,
memory_size=memory_size)
def run_dashboard(): def run_dashboard():

View File

@ -6,6 +6,7 @@
<body> <body>
<h1>Ruby is running</h1> <h1>Ruby is running</h1>
<p><strong>Vocabulary Size:</strong> {{ vocab_size }}</p> <p><strong>Vocabulary Size:</strong> {{ vocab_size }}</p>
<p><strong>Memory Entries:</strong> {{ memory_size }}</p>
<h2>🏆 Highest Scoring Dreams</h2> <h2>🏆 Highest Scoring Dreams</h2>
<ul> <ul>

View File

@ -3,6 +3,7 @@ import torch.nn as nn
import random import random
import time import time
from model.brain import model, tokenizer, DEVICE, optimizer, loss_fn, daydream from model.brain import model, tokenizer, DEVICE, optimizer, loss_fn, daydream
from context.context import get_recent_context, add_to_context
_last_thought = time.time() _last_thought = time.time()
@ -10,7 +11,10 @@ _last_thought = time.time()
def train_on_message(text: str): def train_on_message(text: str):
global _last_thought global _last_thought
model.train() model.train()
tokens = tokenizer.tokenize(text) context_texts = get_recent_context(3)
augmented_text = " ".join(context_texts + [text])
tokens = tokenizer.tokenize(augmented_text)
if len(tokens) < 2: if len(tokens) < 2:
return return
@ -24,7 +28,8 @@ def train_on_message(text: str):
loss.backward() loss.backward()
optimizer.step() optimizer.step()
# Idle dreaming every 15 seconds add_to_context(text)
now = time.time() now = time.time()
if now - _last_thought > 15: if now - _last_thought > 15:
for _ in range(3): for _ in range(3):