Files
NOVA/nova_chat/api.py
Dani a7f091aa45 Initial commit: NOVA - Neuro-Optimizing Versatile Agent
Complete transformer LLM built from scratch with:

Core Features:
- Full transformer architecture (RoPE, RMSNorm, SwiGLU, KV-cache)
- SentencePiece tokenizer (BPE/Unigram)
- Training pipeline (AMP, gradient checkpointing, DDP)
- Persona system with personality matrix (NO AI disclosure by default)
- Genetic evolution (NOVA-EVO) for hyperparameter optimization
- Legal-only data pipeline with license tracking
- Chat interface (CLI + REST API)
- Conversation memory (SQLite)

Model Sizes:
- 125M, 350M, 1.3B, 3B parameters
- Local-first, runs on CPU or GPU
- Python 3.10.6+, PyTorch 2.0+

Personas:
- girlfriend_gentle (high warmth, high empathy)
- girlfriend_playful (high humor, high playfulness)
- girlfriend_supportive (balanced, default)

Documentation:
- Complete README with quickstart
- Model card with ethical considerations
- Privacy documentation (local-first, zero telemetry)
- Data licenses and attribution
- Contributing guide

Infrastructure:
- GitHub Actions CI/CD
- Comprehensive test suite
- Quickstart script
- CLI tool

License: Apache 2.0

🤖 Generated with Claude Code
https://claude.com/claude-code

Co-Authored-By: Claude <noreply@anthropic.com>
2025-10-12 20:56:37 -04:00

135 lines
3.3 KiB
Python

"""
REST API for NOVA chat
"""
from fastapi import FastAPI, HTTPException
from pydantic import BaseModel
from typing import Optional, List
import uvicorn
from .agent import ChatAgent
from .persona import Persona, PersonaLoader
app = FastAPI(
title="NOVA Chat API",
description="REST API for NOVA - Neuro-Optimizing Versatile Agent",
version="0.1.0"
)
# Request/Response models
class ChatRequest(BaseModel):
message: str
conversation_id: Optional[str] = None
persona: Optional[str] = None # Persona name or path
class ChatResponse(BaseModel):
response: str
conversation_id: str
class PersonaInfo(BaseModel):
name: str
pronouns: str
description: str
always_disclose: bool
# Global state (in production, use proper state management)
agents = {}
default_persona = PersonaLoader.create_girlfriend_supportive()
@app.get("/")
async def root():
"""API info"""
return {
"name": "NOVA Chat API",
"version": "0.1.0",
"description": "Local-first transformer LLM with persona support"
}
@app.post("/chat", response_model=ChatResponse)
async def chat(request: ChatRequest):
"""
Send a message and get response
Args:
request: Chat request with message and optional conversation ID
Returns:
Chat response with NOVA's reply
"""
# Get or create agent for conversation
conv_id = request.conversation_id or "default"
if conv_id not in agents:
# TODO: Load actual model and tokenizer
# For now, this is a placeholder
raise HTTPException(
status_code=501,
detail="Chat requires trained model. Please train a model first."
)
agent = agents[conv_id]
# Get response
response = agent.chat(request.message)
return ChatResponse(
response=response,
conversation_id=conv_id
)
@app.get("/personas", response_model=List[str])
async def list_personas():
"""List available personas"""
return [
"girlfriend_gentle",
"girlfriend_playful",
"girlfriend_supportive",
]
@app.get("/personas/{persona_name}", response_model=PersonaInfo)
async def get_persona(persona_name: str):
"""Get persona details"""
# Load persona
if persona_name == "girlfriend_gentle":
persona = PersonaLoader.create_girlfriend_gentle()
elif persona_name == "girlfriend_playful":
persona = PersonaLoader.create_girlfriend_playful()
elif persona_name == "girlfriend_supportive":
persona = PersonaLoader.create_girlfriend_supportive()
else:
raise HTTPException(status_code=404, detail="Persona not found")
return PersonaInfo(
name=persona.name,
pronouns=persona.pronouns,
description=persona.description,
always_disclose=persona.always_disclose
)
@app.delete("/conversations/{conversation_id}")
async def delete_conversation(conversation_id: str):
"""Delete a conversation"""
if conversation_id in agents:
del agents[conversation_id]
return {"status": "deleted"}
raise HTTPException(status_code=404, detail="Conversation not found")
def serve(host: str = "0.0.0.0", port: int = 8000):
"""Start the API server"""
uvicorn.run(app, host=host, port=port)
if __name__ == "__main__":
serve()