Initial commit: NOVA - Neuro-Optimizing Versatile Agent

Complete transformer LLM built from scratch with:

Core Features:
- Full transformer architecture (RoPE, RMSNorm, SwiGLU, KV-cache)
- SentencePiece tokenizer (BPE/Unigram)
- Training pipeline (AMP, gradient checkpointing, DDP)
- Persona system with personality matrix (NO AI disclosure by default)
- Genetic evolution (NOVA-EVO) for hyperparameter optimization
- Legal-only data pipeline with license tracking
- Chat interface (CLI + REST API)
- Conversation memory (SQLite)

Model Sizes:
- 125M, 350M, 1.3B, 3B parameters
- Local-first, runs on CPU or GPU
- Python 3.10.6+, PyTorch 2.0+

Personas:
- girlfriend_gentle (high warmth, high empathy)
- girlfriend_playful (high humor, high playfulness)
- girlfriend_supportive (balanced, default)

Documentation:
- Complete README with quickstart
- Model card with ethical considerations
- Privacy documentation (local-first, zero telemetry)
- Data licenses and attribution
- Contributing guide

Infrastructure:
- GitHub Actions CI/CD
- Comprehensive test suite
- Quickstart script
- CLI tool

License: Apache 2.0

🤖 Generated with Claude Code
https://claude.com/claude-code

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
2025-10-12 20:56:37 -04:00
commit a7f091aa45
50 changed files with 6437 additions and 0 deletions

169
nova_chat/memory.py Normal file
View File

@@ -0,0 +1,169 @@
"""
Conversation memory system using SQLite
"""
import sqlite3
from typing import List, Dict, Optional
from pathlib import Path
import json
from datetime import datetime
class ConversationMemory:
"""
Simple conversation memory using SQLite
Stores conversation history for retrieval and continuity
"""
def __init__(self, db_path: Optional[str] = None):
"""
Args:
db_path: Path to SQLite database (default: memory.db in current dir)
"""
self.db_path = db_path or "memory.db"
self._init_db()
def _init_db(self):
"""Initialize database schema"""
Path(self.db_path).parent.mkdir(parents=True, exist_ok=True)
conn = sqlite3.connect(self.db_path)
cursor = conn.cursor()
# Conversations table
cursor.execute('''
CREATE TABLE IF NOT EXISTS conversations (
conversation_id TEXT PRIMARY KEY,
created_at TEXT,
last_message_at TEXT,
metadata TEXT
)
''')
# Messages table
cursor.execute('''
CREATE TABLE IF NOT EXISTS messages (
id INTEGER PRIMARY KEY AUTOINCREMENT,
conversation_id TEXT,
role TEXT,
content TEXT,
timestamp TEXT,
FOREIGN KEY (conversation_id) REFERENCES conversations(conversation_id)
)
''')
# Create indexes
cursor.execute('''
CREATE INDEX IF NOT EXISTS idx_messages_conversation
ON messages(conversation_id)
''')
conn.commit()
conn.close()
def add_message(
self,
conversation_id: str,
role: str,
content: str,
metadata: Optional[Dict] = None
):
"""Add a message to conversation history"""
conn = sqlite3.connect(self.db_path)
cursor = conn.cursor()
timestamp = datetime.now().isoformat()
# Ensure conversation exists
cursor.execute('''
INSERT OR IGNORE INTO conversations (conversation_id, created_at, last_message_at, metadata)
VALUES (?, ?, ?, ?)
''', (conversation_id, timestamp, timestamp, json.dumps(metadata or {})))
# Update last message time
cursor.execute('''
UPDATE conversations
SET last_message_at = ?
WHERE conversation_id = ?
''', (timestamp, conversation_id))
# Add message
cursor.execute('''
INSERT INTO messages (conversation_id, role, content, timestamp)
VALUES (?, ?, ?, ?)
''', (conversation_id, role, content, timestamp))
conn.commit()
conn.close()
def load_conversation(self, conversation_id: str) -> List[Dict[str, str]]:
"""
Load conversation history
Returns:
List of message dicts with 'role' and 'content'
"""
conn = sqlite3.connect(self.db_path)
cursor = conn.cursor()
cursor.execute('''
SELECT role, content
FROM messages
WHERE conversation_id = ?
ORDER BY id ASC
''', (conversation_id,))
messages = [
{'role': row[0], 'content': row[1]}
for row in cursor.fetchall()
]
conn.close()
return messages
def get_recent_conversations(self, limit: int = 10) -> List[Dict]:
"""Get list of recent conversations"""
conn = sqlite3.connect(self.db_path)
cursor = conn.cursor()
cursor.execute('''
SELECT conversation_id, created_at, last_message_at
FROM conversations
ORDER BY last_message_at DESC
LIMIT ?
''', (limit,))
conversations = [
{
'conversation_id': row[0],
'created_at': row[1],
'last_message_at': row[2]
}
for row in cursor.fetchall()
]
conn.close()
return conversations
def delete_conversation(self, conversation_id: str):
"""Delete a conversation and all its messages"""
conn = sqlite3.connect(self.db_path)
cursor = conn.cursor()
cursor.execute('DELETE FROM messages WHERE conversation_id = ?', (conversation_id,))
cursor.execute('DELETE FROM conversations WHERE conversation_id = ?', (conversation_id,))
conn.commit()
conn.close()
def clear_all(self):
"""Clear all conversations (use with caution!)"""
conn = sqlite3.connect(self.db_path)
cursor = conn.cursor()
cursor.execute('DELETE FROM messages')
cursor.execute('DELETE FROM conversations')
conn.commit()
conn.close()