Updated the brainmap

This commit is contained in:
Dani 2025-04-30 22:32:18 -04:00
parent 6067337dc8
commit c082a9882b
5 changed files with 128 additions and 39 deletions

View File

@ -3,19 +3,23 @@ import os
import time import time
from typing import List from typing import List
CONTEXT_FILE = "data/memory/context.json" CONTEXT_PATH = "data/memory/context.json"
MAX_MEMORY = 100 MAX_MEMORY = 100
def load_context() -> List[dict]: def load_context():
if os.path.exists(CONTEXT_FILE): if not os.path.exists(CONTEXT_PATH):
with open(CONTEXT_FILE, "r", encoding="utf-8") as f: return []
try:
with open(CONTEXT_PATH, "r", encoding="utf-8") as f:
return json.load(f) return json.load(f)
return [] except json.JSONDecodeError:
print("[Context] Corrupted context.json. Returning empty list.")
return []
def save_context(mem: List[dict]): def save_context(mem: List[dict]):
with open(CONTEXT_FILE, "w", encoding="utf-8") as f: with open(CONTEXT_PATH, "w", encoding="utf-8") as f:
json.dump(mem[-MAX_MEMORY:], f, indent=2) json.dump(mem[-MAX_MEMORY:], f, indent=2)

View File

@ -9,13 +9,12 @@ from model.journal import read_journal_entries
from model.dreams import load_dreams from model.dreams import load_dreams
from model.tokenizer import Tokenizer from model.tokenizer import Tokenizer
from model.abstraction import cluster_vocab from model.abstraction import cluster_vocab
from model.dreams import load_dreams
from model.scheduler import get_time_until_next_action, get_next_action_label from model.scheduler import get_time_until_next_action, get_next_action_label
from context.context import load_context from context.context import load_context
from reader.reader import get_books, load_progress from reader.reader import get_books, load_progress
app = Flask(__name__) app = Flask(__name__, static_folder="static")
tokenizer = Tokenizer() tokenizer = Tokenizer()
next_cycle_time = time.time() + 900 # Example: 15 minutes from now next_cycle_time = time.time() + 900 # Example: 15 minutes from now
@ -128,7 +127,7 @@ def brainmap():
print(f"[Dashboard] Failed to load brainmap cache: {e}") print(f"[Dashboard] Failed to load brainmap cache: {e}")
nodes, links = [], [] nodes, links = [], []
return render_template("brainmap.html", nodes=json.dumps(nodes), links=json.dumps(links)) return render_template("brainmap.html", nodes=nodes, links=links)
@app.route("/journal") @app.route("/journal")

View File

@ -2,16 +2,14 @@
<html lang="en"> <html lang="en">
<head> <head>
<meta charset="UTF-8"> <meta charset="UTF-8">
<meta http-equiv="refresh" content="60">
<title>Ruby's Brain Map</title> <title>Ruby's Brain Map</title>
<script src="https://cdn.jsdelivr.net/npm/force-graph"></script> <script src="https://cdn.jsdelivr.net/npm/force-graph"></script>
<style> <style>
body { body {
margin: 0;
background-color: #121212; background-color: #121212;
color: #e0e0e0; color: #e0e0e0;
margin: 0; font-family: 'Segoe UI', Tahoma, sans-serif;
padding: 0;
overflow: hidden;
} }
#graph { #graph {
width: 100vw; width: 100vw;
@ -21,21 +19,37 @@
position: absolute; position: absolute;
top: 10px; top: 10px;
left: 10px; left: 10px;
background-color: #1e1e1e; background: rgba(0, 0, 0, 0.7);
padding: 10px; padding: 8px 14px;
border-radius: 8px;
z-index: 1000; z-index: 1000;
} }
.nav a { .nav a {
color: #e0e0e0; color: #e0e0e0;
margin-right: 20px; margin-right: 15px;
text-decoration: none; text-decoration: none;
font-weight: bold;
}
#searchBox {
position: absolute;
top: 10px;
right: 10px;
z-index: 1000;
padding: 6px 10px;
font-size: 14px;
border-radius: 6px;
border: none;
outline: none;
background-color: #1e1e1e;
color: #fff;
width: 200px;
} }
</style> </style>
</head> </head>
<body> <body>
<div class="nav"> <div class="nav">
<a href="/">🏠 Home</a> <a href="/">📊 Dashboard</a>
<a href="/journal">📓 Journal</a> <a href="/journal">📓 Journal</a>
<a href="/concepts">🧠 Concepts</a> <a href="/concepts">🧠 Concepts</a>
<a href="/brainmap">🕸️ Brain Map</a> <a href="/brainmap">🕸️ Brain Map</a>
@ -44,21 +58,55 @@
<a href="/health">❤️ Health</a> <a href="/health">❤️ Health</a>
</div> </div>
<input type="text" id="searchBox" placeholder="Search word...">
<div id="graph"></div> <div id="graph"></div>
<script> <script>
const graphData = { const graphData = {
nodes: {{ nodes | safe }}, nodes: {{ nodes | safe }},
links: {{ links | safe }} links: {{ links | safe }}
}; };
const Graph = ForceGraph()(document.getElementById('graph')) const degreeMap = {};
.graphData(graphData) graphData.links.forEach(link => {
.nodeAutoColorBy('id') degreeMap[link.source] = (degreeMap[link.source] || 0) + 1;
.nodeLabel('id') degreeMap[link.target] = (degreeMap[link.target] || 0) + 1;
.linkWidth(link => Math.log(link.value + 1)) });
.linkColor(() => 'rgba(255,255,255,0.2)')
.backgroundColor('#121212'); const Graph = ForceGraph()(document.getElementById("graph"))
.graphData(graphData)
.nodeId("id")
.nodeLabel(node => `${node.id} (${degreeMap[node.id] || 0} links)`)
.nodeAutoColorBy("group")
.nodeVal(node => Math.max(2, degreeMap[node.id] || 1))
.linkWidth(link => Math.log(link.value + 1))
.linkColor(() => "rgba(255,255,255,0.2)")
.backgroundColor("#121212");
// Search filter
document.getElementById("searchBox").addEventListener("input", e => {
const search = e.target.value.toLowerCase();
Graph.nodeVisibility(node => node.id.toLowerCase().includes(search));
});
// Click-to-highlight
Graph.onNodeClick(node => {
const neighbors = new Set();
graphData.links.forEach(link => {
if (link.source === node.id) neighbors.add(link.target);
else if (link.target === node.id) neighbors.add(link.source);
});
Graph.nodeColor(n =>
n.id === node.id ? "#ffff00" :
neighbors.has(n.id) ? "#ffa500" : "#333"
);
Graph.linkColor(l =>
l.source === node.id || l.target === node.id ? "#ffffff" : "rgba(255,255,255,0.05)"
);
});
</script> </script>
</body> </body>

View File

@ -1,6 +1,9 @@
import re import re
import json import json
import os import os
import shutil
from sklearn.cluster import KMeans
import numpy as np
from utils.unicleaner import clean_unicode from utils.unicleaner import clean_unicode
BRAINMAP_PATH = "data/memory/brainmap.json" # actual connection data BRAINMAP_PATH = "data/memory/brainmap.json" # actual connection data
@ -99,7 +102,12 @@ def get_brainmap():
return brainmap return brainmap
def refresh_brainmap_cache(min_weight=5, max_nodes=300): def refresh_brainmap_cache(min_weight=2, max_nodes=300):
"""
Generates a clustered brainmap view and writes to:
- data/memory/brainmap_cache.json (master copy)
- static/brainmap.json (served to frontend)
"""
map_data = get_brainmap() map_data = get_brainmap()
links = [] links = []
seen_words = set() seen_words = set()
@ -108,7 +116,6 @@ def refresh_brainmap_cache(min_weight=5, max_nodes=300):
if not isinstance(connections, dict): if not isinstance(connections, dict):
print(f"[Brainmap] Skipping corrupted entry: {word} => {type(connections)}") print(f"[Brainmap] Skipping corrupted entry: {word} => {type(connections)}")
continue continue
for linked_word, weight in connections.items(): for linked_word, weight in connections.items():
if weight >= min_weight: if weight >= min_weight:
links.append({ links.append({
@ -119,13 +126,44 @@ def refresh_brainmap_cache(min_weight=5, max_nodes=300):
seen_words.add(word) seen_words.add(word)
seen_words.add(linked_word) seen_words.add(linked_word)
nodes = [{"id": word} for word in seen_words] node_set = {link["source"] for link in links} | {link["target"] for link in links}
nodes = sorted(node_set)
if len(nodes) > max_nodes: if len(nodes) > max_nodes:
nodes = nodes[:max_nodes] nodes = nodes[:max_nodes]
node_set = {n["id"] for n in nodes} node_set = set(nodes)
links = [l for l in links if l["source"] in node_set and l["target"] in node_set] links = [l for l in links if l["source"] in node_set and l["target"] in node_set]
index_lookup = {word: i for i, word in enumerate(nodes)}
word_vectors = []
for word in nodes:
vec = np.zeros(len(nodes), dtype=np.float32)
connections = map_data.get(word, {})
for other, strength in connections.items():
if other in index_lookup:
vec[index_lookup[other]] = strength
word_vectors.append(vec)
if len(word_vectors) < 2:
print("[Brainmap] Not enough nodes to cluster.")
return
kmeans = KMeans(n_clusters=min(8, len(nodes)), n_init="auto")
labels = kmeans.fit_predict(word_vectors)
clustered_nodes = [{"id": word, "group": int(label)} for word, label in zip(nodes, labels)]
output = {
"nodes": clustered_nodes,
"links": links
}
os.makedirs("data/memory", exist_ok=True) os.makedirs("data/memory", exist_ok=True)
with open(BRAINMAP_CACHE_PATH, "w", encoding="utf-8") as f: os.makedirs("static", exist_ok=True)
json.dump({"nodes": nodes, "links": links}, f, indent=2)
cache_path = "data/memory/brainmap_cache.json"
static_path = "static/brainmap.json"
with open(cache_path, "w", encoding="utf-8") as f:
json.dump(output, f, indent=2)
shutil.copyfile(cache_path, static_path)
# print(f"[Brainmap] Cache written to {cache_path} and copied to {static_path}")

View File

@ -4,7 +4,7 @@ import os
import time import time
from model.tokenizer import VOCAB_PATH from model.tokenizer import VOCAB_PATH
from model.dreams import DREAM_LOG_PATH from model.dreams import DREAM_LOG_PATH
from context.context import CONTEXT_FILE from context.context import CONTEXT_PATH
from model.brainmap import load_brainmap, save_brainmap from model.brainmap import load_brainmap, save_brainmap
CLEANUP_LOG = "data/logs/cleanup.log" CLEANUP_LOG = "data/logs/cleanup.log"
@ -59,15 +59,15 @@ def cleanup_dreams():
def cleanup_context(): def cleanup_context():
if not os.path.exists(CONTEXT_FILE): if not os.path.exists(CONTEXT_PATH):
return return
with open(CONTEXT_FILE, "r", encoding="utf-8") as f: with open(CONTEXT_PATH, "r", encoding="utf-8") as f:
context = json.load(f) context = json.load(f)
filtered = context[-100:] filtered = context[-100:]
with open(CONTEXT_FILE, "w", encoding="utf-8") as f: with open(CONTEXT_PATH, "w", encoding="utf-8") as f:
json.dump(filtered, f, indent=2) json.dump(filtered, f, indent=2)
if len(filtered) < len(context): if len(filtered) < len(context):