Ruby/dashboard/dashboard.py
2025-04-27 11:11:39 -04:00

133 lines
3.8 KiB
Python

from flask import Flask, render_template
import json
import os
import time
import datetime
import logging
from model.brainmap import get_brainmap
from model.journal import read_journal_entries
from model.memory import load_dreams
from model.tokenizer import Tokenizer
from model.abstraction import cluster_vocab
from model.memory import load_dreams
from model.scheduler import get_time_until_next_action, get_next_action_label
from context.context import load_context
app = Flask(__name__)
tokenizer = Tokenizer()
next_cycle_time = time.time() + 900 # Example: 15 minutes from now
def load_loss_data():
path = "data/logs/loss.log"
if not os.path.exists(path):
return []
with open(path, "r", encoding="utf-8") as f:
lines = f.readlines()
return [float(line.strip().split(",")[1]) for line in lines[-50:]]
def load_vocab_growth():
path = "data/logs/vocab_growth.log"
if not os.path.exists(path):
return []
with open(path, "r", encoding="utf-8") as f:
lines = f.readlines()
data = []
for line in lines:
timestamp, vocab_size = line.strip().split(",")
# Reformat timestamp to human-readable
readable_time = datetime.datetime.fromtimestamp(float(timestamp)).strftime("%H:%M:%S")
data.append((readable_time, int(vocab_size)))
return data
def update_next_cycle(seconds):
global next_cycle_time
next_cycle_time = time.time() + seconds
@app.route("/")
def index():
dreams = load_dreams()
top_dreams = dreams[:5]
memory_size = len(load_context())
loss_data = load_loss_data()
next_cycle = get_time_until_next_action()
next_action_label = get_next_action_label()
return render_template("index.html",
vocab_size=len(tokenizer.vocab),
top_dreams=top_dreams,
memory_size=memory_size,
loss_data=loss_data,
next_cycle=next_cycle,
next_action_label=next_action_label)
@app.route("/growth")
def growth():
vocab_size = len(tokenizer.vocab)
brainmap_size = len(get_brainmap())
memory_size = len(load_context())
vocab_growth = load_vocab_growth()
return render_template("growth.html",
vocab_size=vocab_size,
brainmap_size=brainmap_size,
memory_size=memory_size,
vocab_growth=vocab_growth)
@app.route("/brainmap")
def brainmap():
map_data = get_brainmap()
nodes = []
links = []
MIN_LINK_WEIGHT = 2 # only show links seen at least 2 times
seen_words = set()
for word, connections in map_data.items():
for linked_word, weight in connections.items():
if weight >= MIN_LINK_WEIGHT:
links.append({
"source": word,
"target": linked_word,
"value": weight
})
seen_words.add(word)
seen_words.add(linked_word)
for word in seen_words:
nodes.append({"id": word})
return render_template("brainmap.html", nodes=json.dumps(nodes), links=json.dumps(links))
@app.route("/journal")
def journal():
entries = read_journal_entries()
return render_template("journal.html", entries=entries)
@app.route("/concepts")
def concepts():
clusters = cluster_vocab(n_clusters=10)
return render_template("concepts.html", clusters=clusters)
@app.route("/dreams")
def dreams():
dreams = load_dreams()
recent = dreams[-20:][::-1] # Last 20 dreams, newest first
return render_template("dreams.html", dreams=recent)
def run_dashboard():
log = logging.getLogger('werkzeug')
log.setLevel(logging.ERROR)
app.run(host="0.0.0.0", port=5000, debug=False, use_reloader=False)