fixed the dashboard
This commit is contained in:
parent
a8b3129806
commit
6067337dc8
@ -60,19 +60,34 @@ def update_next_cycle(seconds):
|
|||||||
|
|
||||||
|
|
||||||
def get_status_summary():
|
def get_status_summary():
|
||||||
progress = load_progress()
|
progress_data = load_progress()
|
||||||
books = get_books()
|
progress = progress_data.get("progress", {})
|
||||||
current_book = books[0] if books else None
|
current_book = None
|
||||||
current_line = progress.get(current_book, 0)
|
current_line = 0
|
||||||
|
|
||||||
|
# Find the book with the highest progress (e.g., currently being read)
|
||||||
|
if progress:
|
||||||
|
completed = set(progress_data.get("completed", []))
|
||||||
|
in_progress = {k: v for k, v in progress.items() if k not in completed}
|
||||||
|
if in_progress:
|
||||||
|
current_book = max(in_progress.items(), key=lambda x: x[1])[0]
|
||||||
|
current_line = in_progress[current_book]
|
||||||
|
current_line = progress[current_book]
|
||||||
|
|
||||||
total_lines = 1
|
total_lines = 1
|
||||||
if current_book:
|
if current_book:
|
||||||
with open(f"books/{current_book}", "r", encoding="utf-8") as f:
|
book_path = os.path.join("data", "books", current_book)
|
||||||
total_lines = len(f.readlines())
|
if os.path.exists(book_path):
|
||||||
|
with open(book_path, "r", encoding="utf-8") as f:
|
||||||
|
total_lines = len(f.readlines())
|
||||||
|
else:
|
||||||
|
current_book = None
|
||||||
|
current_line = 0
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"current_book": current_book,
|
"current_book": current_book,
|
||||||
"current_line": current_line,
|
"current_line": current_line,
|
||||||
"percent_done": round((current_line / total_lines) * 100, 2),
|
"percent_done": round((current_line / total_lines) * 100, 2) if total_lines > 0 else 0,
|
||||||
"memory_size": len(load_context()),
|
"memory_size": len(load_context()),
|
||||||
"vocab_size": get_vocab_size(),
|
"vocab_size": get_vocab_size(),
|
||||||
"brainmap_size": len(get_brainmap()),
|
"brainmap_size": len(get_brainmap()),
|
||||||
@ -104,26 +119,14 @@ def growth():
|
|||||||
|
|
||||||
@app.route("/brainmap")
|
@app.route("/brainmap")
|
||||||
def brainmap():
|
def brainmap():
|
||||||
map_data = get_brainmap()
|
try:
|
||||||
|
with open("data/memory/brainmap_cache.json", "r", encoding="utf-8") as f:
|
||||||
nodes = []
|
cached = json.load(f)
|
||||||
links = []
|
nodes = cached.get("nodes", [])
|
||||||
MIN_LINK_WEIGHT = 2 # only show links seen at least 2 times
|
links = cached.get("links", [])
|
||||||
seen_words = set()
|
except Exception as e:
|
||||||
|
print(f"[Dashboard] Failed to load brainmap cache: {e}")
|
||||||
for word, connections in map_data.items():
|
nodes, links = [], []
|
||||||
for linked_word, weight in connections.items():
|
|
||||||
if weight >= MIN_LINK_WEIGHT:
|
|
||||||
links.append({
|
|
||||||
"source": word,
|
|
||||||
"target": linked_word,
|
|
||||||
"value": weight
|
|
||||||
})
|
|
||||||
seen_words.add(word)
|
|
||||||
seen_words.add(linked_word)
|
|
||||||
|
|
||||||
for word in seen_words:
|
|
||||||
nodes.append({"id": word})
|
|
||||||
|
|
||||||
return render_template("brainmap.html", nodes=json.dumps(nodes), links=json.dumps(links))
|
return render_template("brainmap.html", nodes=json.dumps(nodes), links=json.dumps(links))
|
||||||
|
|
||||||
|
@ -3,7 +3,8 @@ import json
|
|||||||
import os
|
import os
|
||||||
from utils.unicleaner import clean_unicode
|
from utils.unicleaner import clean_unicode
|
||||||
|
|
||||||
BRAINMAP_PATH = "data/memory/brainmap.json"
|
BRAINMAP_PATH = "data/memory/brainmap.json" # actual connection data
|
||||||
|
BRAINMAP_CACHE_PATH = "data/memory/brainmap_cache.json" # for dashboard rendering only
|
||||||
brainmap = {}
|
brainmap = {}
|
||||||
|
|
||||||
MAX_CONNECTIONS = 50 # Max neighbors to keep per word
|
MAX_CONNECTIONS = 50 # Max neighbors to keep per word
|
||||||
@ -96,3 +97,35 @@ def prune_brainmap(min_neighbors=2, min_strength=2):
|
|||||||
|
|
||||||
def get_brainmap():
|
def get_brainmap():
|
||||||
return brainmap
|
return brainmap
|
||||||
|
|
||||||
|
|
||||||
|
def refresh_brainmap_cache(min_weight=5, max_nodes=300):
|
||||||
|
map_data = get_brainmap()
|
||||||
|
links = []
|
||||||
|
seen_words = set()
|
||||||
|
|
||||||
|
for word, connections in map_data.items():
|
||||||
|
if not isinstance(connections, dict):
|
||||||
|
print(f"[Brainmap] Skipping corrupted entry: {word} => {type(connections)}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
for linked_word, weight in connections.items():
|
||||||
|
if weight >= min_weight:
|
||||||
|
links.append({
|
||||||
|
"source": word,
|
||||||
|
"target": linked_word,
|
||||||
|
"value": weight
|
||||||
|
})
|
||||||
|
seen_words.add(word)
|
||||||
|
seen_words.add(linked_word)
|
||||||
|
|
||||||
|
nodes = [{"id": word} for word in seen_words]
|
||||||
|
|
||||||
|
if len(nodes) > max_nodes:
|
||||||
|
nodes = nodes[:max_nodes]
|
||||||
|
node_set = {n["id"] for n in nodes}
|
||||||
|
links = [l for l in links if l["source"] in node_set and l["target"] in node_set]
|
||||||
|
|
||||||
|
os.makedirs("data/memory", exist_ok=True)
|
||||||
|
with open(BRAINMAP_CACHE_PATH, "w", encoding="utf-8") as f:
|
||||||
|
json.dump({"nodes": nodes, "links": links}, f, indent=2)
|
||||||
|
@ -2,7 +2,7 @@ import torch
|
|||||||
import time
|
import time
|
||||||
from model.dynamic_expand import expand_model_if_needed, _last_expansion_time
|
from model.dynamic_expand import expand_model_if_needed, _last_expansion_time
|
||||||
from model.brain_state import model, tokenizer, DEVICE, loss_fn, optimizer, scheduler
|
from model.brain_state import model, tokenizer, DEVICE, loss_fn, optimizer, scheduler
|
||||||
from model.brainmap import add_to_brainmap
|
from model.brainmap import add_to_brainmap, refresh_brainmap_cache
|
||||||
from model.journal import record_to_journal
|
from model.journal import record_to_journal
|
||||||
from context.context import add_to_context, get_recent_context
|
from context.context import add_to_context, get_recent_context
|
||||||
|
|
||||||
@ -65,6 +65,7 @@ async def train_on_message(text: str, source: str = "user"):
|
|||||||
|
|
||||||
add_to_brainmap(augmented_text.split())
|
add_to_brainmap(augmented_text.split())
|
||||||
add_to_context(text, source=source)
|
add_to_context(text, source=source)
|
||||||
|
refresh_brainmap_cache()
|
||||||
|
|
||||||
record_to_journal({
|
record_to_journal({
|
||||||
"timestamp": time.time(),
|
"timestamp": time.time(),
|
||||||
|
Loading…
x
Reference in New Issue
Block a user