Fixed an issue with the dream mechanic being broken
This commit is contained in:
parent
ed288d094b
commit
c174c3159e
21
trainer.py
21
trainer.py
@ -1,6 +1,7 @@
|
||||
import torch
|
||||
import torch.nn.functional as F
|
||||
from datetime import datetime
|
||||
import os
|
||||
from model import MiniGPT
|
||||
|
||||
|
||||
@ -114,6 +115,26 @@ class RubyTrainer:
|
||||
new_tokens = input_ids.squeeze(0).tolist()[1:]
|
||||
return self.tokenizer.detokenize([t for t in new_tokens if t != self.tokenizer.vocab["<END>"]])
|
||||
|
||||
def dream(self, log_path="logs/messages.log", max_lines=50):
|
||||
print("[DREAM] Ruby is dreaming...")
|
||||
|
||||
if not os.path.exists(log_path):
|
||||
print("[DREAM] No memory to dream from.")
|
||||
return
|
||||
|
||||
with open(log_path, "r", encoding="utf-8") as f:
|
||||
lines = f.readlines()[-max_lines:]
|
||||
|
||||
learned = 0
|
||||
for line in lines:
|
||||
parts = line.strip().split("|")
|
||||
if len(parts) >= 3:
|
||||
text = parts[2].strip()
|
||||
self.train_on_tokens_from_text(text)
|
||||
learned += 1
|
||||
|
||||
print(f"[DREAM] Dream complete. Trained on {learned} memories.")
|
||||
|
||||
def daydream(self, rounds=5, log_output="logs/dreams.log", say_thought=False):
|
||||
print("[DAYDREAM] Ruby is imagining new thoughts...")
|
||||
thoughts = []
|
||||
|
Loading…
x
Reference in New Issue
Block a user