Created the project, added files named similar to body parts.
This commit is contained in:
parent
58442b86ee
commit
a655d73813
95
body.py
Normal file
95
body.py
Normal file
@ -0,0 +1,95 @@
|
|||||||
|
import os
|
||||||
|
import torch
|
||||||
|
import torch.nn.functional as F
|
||||||
|
import discord
|
||||||
|
from discord import Intents
|
||||||
|
|
||||||
|
from sensory_system.eyes import Eyes
|
||||||
|
from nervous_system.cortex import Cortex
|
||||||
|
from nervous_system.meta_learning import MetaLearner
|
||||||
|
from memory.hippocampus import Hippocampus
|
||||||
|
from motor_system.motor_cortex import MotorCortex
|
||||||
|
from headspace.dashboard import run_dashboard
|
||||||
|
|
||||||
|
|
||||||
|
class Organism:
|
||||||
|
def __init__(self) -> None:
|
||||||
|
# Device
|
||||||
|
self.device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
||||||
|
|
||||||
|
# Sensory organ
|
||||||
|
self.eyes = Eyes(books_path="content/books")
|
||||||
|
|
||||||
|
# Memory & learning
|
||||||
|
self.memory = Hippocampus()
|
||||||
|
self.nervous_system = Cortex(...).to(self.device)
|
||||||
|
self.meta = MetaLearner(self.nervous_system)
|
||||||
|
self.motor = MotorCortex()
|
||||||
|
|
||||||
|
# (Optional) Pre-load your 21+ books for future pre-training:
|
||||||
|
self._load_corpus("content/books")
|
||||||
|
|
||||||
|
def _load_corpus(self, folder_path: str) -> None:
|
||||||
|
"""Read all text files in content/books into memory for later use."""
|
||||||
|
self.corpus = []
|
||||||
|
for fn in os.listdir(folder_path):
|
||||||
|
if fn.lower().endswith(".txt"):
|
||||||
|
with open(os.path.join(folder_path, fn), encoding="utf-8") as f:
|
||||||
|
self.corpus.append(f.read())
|
||||||
|
|
||||||
|
def learn_and_respond(self, message: str) -> str:
|
||||||
|
# 1) Perception via eyes
|
||||||
|
input_ids = self.eyes.preprocess(message)
|
||||||
|
input_tensor = torch.tensor([input_ids], dtype=torch.long, device=self.device)
|
||||||
|
|
||||||
|
# 2) Inference
|
||||||
|
logits = self.nervous_system(input_tensor)
|
||||||
|
response_ids = logits.argmax(dim=-1)[0].tolist()
|
||||||
|
response = self.motor.decode(response_ids)
|
||||||
|
|
||||||
|
# 3) Self-supervised loss (predict input back)
|
||||||
|
loss = F.cross_entropy(
|
||||||
|
logits.view(-1, logits.size(-1)),
|
||||||
|
input_tensor.view(-1),
|
||||||
|
)
|
||||||
|
|
||||||
|
# 4) Online meta-learning update
|
||||||
|
self.meta.meta_update(loss)
|
||||||
|
|
||||||
|
# 5) Store interaction
|
||||||
|
self.memory.store({
|
||||||
|
"input_ids": input_tensor.cpu(),
|
||||||
|
"output_ids": response_ids,
|
||||||
|
"input_text": message,
|
||||||
|
"output_text": response
|
||||||
|
})
|
||||||
|
|
||||||
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
# ————— Discord setup (all in one “body” file) —————
|
||||||
|
intents = Intents.default()
|
||||||
|
intents.message_content = True
|
||||||
|
client = discord.Client(intents=intents)
|
||||||
|
organism = Organism()
|
||||||
|
|
||||||
|
|
||||||
|
@client.event
|
||||||
|
async def on_ready() -> None:
|
||||||
|
print(f"Logged in as {client.user}")
|
||||||
|
|
||||||
|
|
||||||
|
@client.event
|
||||||
|
async def on_message(message: discord.Message) -> None:
|
||||||
|
if message.author == client.user or not message.content:
|
||||||
|
return
|
||||||
|
reply = organism.learn_and_respond(message.content)
|
||||||
|
await message.channel.send(reply)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
TOKEN = os.getenv("DISCORD_TOKEN")
|
||||||
|
if not TOKEN:
|
||||||
|
raise RuntimeError("DISCORD_TOKEN environment variable not set.")
|
||||||
|
run_dashboard(organism, host="0.0.0.0", port=5000)
|
||||||
|
client.run(TOKEN)
|
40
headspace/dashboard.py
Normal file
40
headspace/dashboard.py
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
# dashboard.py
|
||||||
|
|
||||||
|
import threading
|
||||||
|
from flask import Flask, render_template_string
|
||||||
|
|
||||||
|
app = Flask(__name__)
|
||||||
|
_organism = None # will be set by run_dashboard()
|
||||||
|
|
||||||
|
|
||||||
|
@app.route("/")
|
||||||
|
def index():
|
||||||
|
# Basic stats
|
||||||
|
total = len(_organism.memory.memory)
|
||||||
|
# Show up to 10 most recent text interactions
|
||||||
|
recent = list(_organism.memory.memory)[-10:][::-1]
|
||||||
|
items = ""
|
||||||
|
for i in recent:
|
||||||
|
inp = i.get("input_text", "<no input>")
|
||||||
|
out = i.get("output_text", "<no output>")
|
||||||
|
items += f"<li><b>In:</b> {inp}<br/><b>Out:</b> {out}</li>"
|
||||||
|
|
||||||
|
html = f"""
|
||||||
|
<h1>Ruby Dashboard</h1>
|
||||||
|
<p><strong>Total stored interactions:</strong> {total}</p>
|
||||||
|
<h2>Last {len(recent)} exchanges</h2>
|
||||||
|
<ul>{items}</ul>
|
||||||
|
"""
|
||||||
|
return render_template_string(html)
|
||||||
|
|
||||||
|
|
||||||
|
def run_dashboard(organism, host="0.0.0.0", port=5000):
|
||||||
|
"""Call this to launch the dashboard in a background thread."""
|
||||||
|
global _organism
|
||||||
|
_organism = organism
|
||||||
|
# start Flask in its own thread so it doesn’t block Discord
|
||||||
|
thread = threading.Thread(
|
||||||
|
target=lambda: app.run(host=host, port=port, debug=False, use_reloader=False),
|
||||||
|
daemon=True,
|
||||||
|
)
|
||||||
|
thread.start()
|
10
motor_system/motor_cortex.py
Normal file
10
motor_system/motor_cortex.py
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
from typing import List
|
||||||
|
|
||||||
|
|
||||||
|
class MotorCortex:
|
||||||
|
"""Converts model outputs (char codes) back into a string."""
|
||||||
|
def __init__(self) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def decode(self, output_ids: List[int]) -> str:
|
||||||
|
return "".join(chr(i) for i in output_ids)
|
45
nervous_system/context.py
Normal file
45
nervous_system/context.py
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
import torch
|
||||||
|
import torch.nn as nn
|
||||||
|
|
||||||
|
|
||||||
|
class Cortex(nn.Module):
|
||||||
|
"""The ‘brain’: a char‐level Transformer encoder for self-supervised learning."""
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
embed_dim: int = 256,
|
||||||
|
num_heads: int = 4,
|
||||||
|
num_layers: int = 4,
|
||||||
|
ff_dim: int = 512,
|
||||||
|
max_seq_len: int = 1024,
|
||||||
|
) -> None:
|
||||||
|
super().__init__()
|
||||||
|
self.vocab_size = 256 # ASCII
|
||||||
|
self.embed_dim = embed_dim
|
||||||
|
self.token_embedding = nn.Embedding(self.vocab_size, embed_dim)
|
||||||
|
self.position_embedding = nn.Embedding(max_seq_len, embed_dim)
|
||||||
|
|
||||||
|
encoder_layer = nn.TransformerEncoderLayer(
|
||||||
|
d_model=embed_dim,
|
||||||
|
nhead=num_heads,
|
||||||
|
dim_feedforward=ff_dim,
|
||||||
|
)
|
||||||
|
self.transformer = nn.TransformerEncoder(
|
||||||
|
encoder_layer, num_layers=num_layers
|
||||||
|
)
|
||||||
|
self.fc_out = nn.Linear(embed_dim, self.vocab_size)
|
||||||
|
self.max_seq_len = max_seq_len
|
||||||
|
|
||||||
|
def forward(self, input_ids: torch.Tensor) -> torch.Tensor:
|
||||||
|
# input_ids: (batch, seq_len)
|
||||||
|
batch_size, seq_len = input_ids.size()
|
||||||
|
positions = (
|
||||||
|
torch.arange(0, seq_len, device=input_ids.device)
|
||||||
|
.unsqueeze(0)
|
||||||
|
.expand(batch_size, -1)
|
||||||
|
)
|
||||||
|
x = self.token_embedding(input_ids) + self.position_embedding(positions)
|
||||||
|
x = x.permute(1, 0, 2) # (seq_len, batch, embed_dim)
|
||||||
|
x = self.transformer(x)
|
||||||
|
x = x.permute(1, 0, 2) # back to (batch, seq_len, embed_dim)
|
||||||
|
logits = self.fc_out(x)
|
||||||
|
return logits
|
13
nervous_system/meta_learning.py
Normal file
13
nervous_system/meta_learning.py
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
import torch
|
||||||
|
|
||||||
|
|
||||||
|
class MetaLearner:
|
||||||
|
"""Handles online, first-order meta-updates to the cortex."""
|
||||||
|
def __init__(self, model: torch.nn.Module, lr: float = 1e-4) -> None:
|
||||||
|
self.model = model
|
||||||
|
self.meta_optimizer = torch.optim.Adam(model.parameters(), lr=lr)
|
||||||
|
|
||||||
|
def meta_update(self, loss: torch.Tensor) -> None:
|
||||||
|
self.meta_optimizer.zero_grad()
|
||||||
|
loss.backward(retain_graph=True)
|
||||||
|
self.meta_optimizer.step()
|
24
sensory_system/eyes.py
Normal file
24
sensory_system/eyes.py
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
# sensory_system/eyes.py
|
||||||
|
|
||||||
|
import os
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
|
||||||
|
class Eyes:
|
||||||
|
"""The ‘eyes’ read both live input and, on startup, your book corpus."""
|
||||||
|
def __init__(self, books_path: str = None) -> None:
|
||||||
|
self.corpus: List[str] = []
|
||||||
|
if books_path:
|
||||||
|
self.load_books(books_path)
|
||||||
|
|
||||||
|
def load_books(self, folder_path: str) -> None:
|
||||||
|
"""Load all .txt files from folder_path into self.corpus."""
|
||||||
|
for fn in os.listdir(folder_path):
|
||||||
|
if fn.lower().endswith(".txt"):
|
||||||
|
full = os.path.join(folder_path, fn)
|
||||||
|
with open(full, encoding="utf-8") as f:
|
||||||
|
self.corpus.append(f.read())
|
||||||
|
|
||||||
|
def preprocess(self, text: str) -> List[int]:
|
||||||
|
"""Turn an input string into a list of char codes (0–255)."""
|
||||||
|
return [ord(c) % 256 for c in text]
|
Loading…
x
Reference in New Issue
Block a user