updated the path changes made

This commit is contained in:
Dani 2025-05-02 18:49:54 -04:00
parent aff53bee7e
commit 97214980b3
13 changed files with 44 additions and 20 deletions

5
.gitignore vendored
View File

@ -169,6 +169,5 @@ cython_debug/
#.idea/
.vscode/launch.json
/data/books/*
/data/memory/*
/data/logs/*
/content/*
/memory/*

View File

@ -5,7 +5,7 @@ from brain.brain_architecture import TinyTransformer
from ego.tokenizer import Tokenizer
DEVICE = torch.device("cuda" if torch.cuda.is_available() else "cpu")
MODEL_SAVE_PATH = "data/memory/model.pt"
MODEL_SAVE_PATH = "memory/model.pt"
tokenizer = Tokenizer()
VOCAB_SIZE = len(tokenizer.vocab) + 10 # with a small buffer

View File

@ -5,8 +5,8 @@ from sklearn.cluster import KMeans
import numpy as np
from utils.unicleaner import clean_unicode
BRAINMAP_PATH = "data/memory/brainmap.json" # actual connection data
BRAINMAP_CACHE_PATH = "data/memory/brainmap_cache.json" # for dashboard rendering only
BRAINMAP_PATH = "memory/brainmap.json" # actual connection data
BRAINMAP_CACHE_PATH = "memory/brainmap_cache.json" # for dashboard rendering only
brainmap = {}
MAX_CONNECTIONS = 50 # Max neighbors to keep per word

View File

@ -21,7 +21,7 @@ next_cycle_time = time.time() + 900 # Example: 15 minutes from now
def load_loss_data():
path = "data/logs/loss.log"
path = "content/REM/loss.log"
if not os.path.exists(path):
return []
with open(path, "r", encoding="utf-8") as f:
@ -30,7 +30,7 @@ def load_loss_data():
def load_vocab_growth():
path = "data/logs/vocab_growth.log"
path = "content/REM/vocab_growth.log"
if not os.path.exists(path):
return []
with open(path, "r", encoding="utf-8") as f:
@ -45,7 +45,7 @@ def load_vocab_growth():
def get_vocab_size():
path = "data/memory/vocab.json"
path = "memory/vocab.json"
if not os.path.exists(path):
return 0
with open(path, "r", encoding="utf-8") as f:
@ -119,7 +119,7 @@ def growth():
@app.route("/brainmap")
def brainmap():
try:
with open("data/memory/brainmap_cache.json", "r", encoding="utf-8") as f:
with open("memory/brainmap_cache.json", "r", encoding="utf-8") as f:
cached = json.load(f)
nodes = cached.get("nodes", [])
links = cached.get("links", [])

View File

@ -3,7 +3,7 @@ import os
import time
from typing import List
CONTEXT_PATH = "data/memory/context.json"
CONTEXT_PATH = "memory/context.json"
MAX_MEMORY = 100

View File

@ -1,7 +1,7 @@
import json
import os
DREAM_LOG_PATH = "data/memory/dreams.json"
DREAM_LOG_PATH = "memory/dreams.json"
def load_dreams():

View File

@ -2,7 +2,7 @@ import os
import json
import random
JOURNAL_PATH = "data/memory/journal.json"
JOURNAL_PATH = "memory/journal.json"
def record_to_journal(entry: dict):

View File

@ -3,7 +3,7 @@ import os
import json
from utils.unicleaner import clean_unicode
VOCAB_PATH = "data/memory/vocab.json"
VOCAB_PATH = "memory/vocab.json"
def load_vocab():

View File

@ -6,8 +6,8 @@ from brain.brainmap import add_to_brainmap, refresh_brainmap_cache
from ego.journal import record_to_journal
from ego.context import add_to_context, get_recent_context
LOSS_FILE = "data/logs/loss.log"
VOCAB_GROWTH_FILE = "data/logs/vocab_growth.log"
LOSS_FILE = "content/REM/loss.log"
VOCAB_GROWTH_FILE = "content/REM/vocab_growth.log"
def log_vocab_growth():

25
id/mirror.py Normal file
View File

@ -0,0 +1,25 @@
import json
from datetime import datetime
from pathlib import Path
MIRROR_LOG = Path("content/REM/mirror_log.jsonl")
def reflect(event_type: str, context: dict):
"""Logs introspective meta-data about Ruby's internal state/actions."""
entry = {
"timestamp": datetime.utcnow().isoformat(),
"event": event_type,
"context": context
}
with open(MIRROR_LOG, "a", encoding="utf-8") as f:
f.write(json.dumps(entry) + "\n")
def load_reflections(limit: int = 100):
"""Loads the last `limit` reflections."""
if not MIRROR_LOG.exists():
return []
with open(MIRROR_LOG, "r", encoding="utf-8") as f:
lines = f.readlines()[-limit:]
return [json.loads(line) for line in lines]

View File

@ -7,8 +7,8 @@ from ego.trainer import train_on_message
from utils.scheduler import set_next_action
from reader.filter import is_valid_line
BOOK_DIR = "data/books"
PROGRESS_FILE = "data/memory/book_progress.json"
BOOK_DIR = "content/books"
PROGRESS_FILE = "memory/book_progress.json"
READ_DELAY = 0.2 # seconds between paragraphs
PARAGRAPH_MIN_LENGTH = 20
END_PUNCTUATION = {".", "!", "?"}

View File

@ -1,7 +1,7 @@
import json
import os
BRAINMAP_PATH = "data/memory/brainmap.json"
BRAINMAP_PATH = "memory/brainmap.json"
def analyze_brainmap(path=BRAINMAP_PATH):

View File

@ -7,7 +7,7 @@ from ego.dreams import DREAM_LOG_PATH
from ego.context import CONTEXT_PATH
from brain.brainmap import load_brainmap, save_brainmap
CLEANUP_LOG = "data/logs/cleanup.log"
CLEANUP_LOG = "content/REM/cleanup.log"
def log(msg):