from flask import Flask, render_template from model.brainmap import get_brainmap from model.journal import read_journal_entries from model.memory import load_dreams from model.tokenizer import Tokenizer from model.abstraction import cluster_vocab from context.context import load_context import json import os import time app = Flask(__name__) tokenizer = Tokenizer() next_cycle_time = time.time() + 900 # Example: 15 minutes from now def load_loss_data(): path = "data/logs/loss.log" if not os.path.exists(path): return [] with open(path, "r", encoding="utf-8") as f: lines = f.readlines() return [float(line.strip().split(",")[1]) for line in lines[-50:]] def update_next_cycle(seconds): global next_cycle_time next_cycle_time = time.time() + seconds @app.route("/") def index(): dreams = load_dreams() top_dreams = dreams[:5] memory_size = len(load_context()) loss_data = load_loss_data() remaining = max(0, int(next_cycle_time - time.time())) return render_template("index.html", vocab_size=len(tokenizer.vocab), top_dreams=top_dreams, memory_size=memory_size, loss_data=loss_data, next_cycle=remaining) @app.route("/brainmap") def brainmap(): map_data = get_brainmap() nodes = [] links = [] for word, connections in map_data.items(): nodes.append({"id": word}) for linked_word, weight in connections.items(): links.append({ "source": word, "target": linked_word, "value": weight }) return render_template("brainmap.html", nodes=json.dumps(nodes), links=json.dumps(links)) @app.route("/journal") def journal(): entries = read_journal_entries() return render_template("journal.html", entries=entries) @app.route("/concepts") def concepts(): clusters = cluster_vocab(n_clusters=10) return render_template("concepts.html", clusters=clusters) def run_dashboard(): app.run(host="0.0.0.0", port=5000, debug=False, use_reloader=False)