Fix comprehensive system issues and implement proper vector database backend selection
- Fix remaining datetime timezone errors across all database operations - Implement dynamic vector database backend (Qdrant/ChromaDB) based on install.py configuration - Add LLM timeout handling with immediate fallback responses for slow self-hosted models - Use proper install.py configuration (2000 max tokens, 5min timeout, correct LLM endpoint) - Fix PostgreSQL schema to use timezone-aware columns throughout - Implement async LLM request handling with background processing - Add configurable prompt limits and conversation history controls - Start missing database services (PostgreSQL, Redis) automatically - Fix environment variable mapping between install.py and application code - Resolve all timezone-naive vs timezone-aware datetime conflicts System now properly uses Qdrant vector database as specified in install.py instead of hardcoded ChromaDB. Characters respond immediately with fallback messages during long LLM processing times. All database timezone errors resolved with proper timestamptz columns.
This commit is contained in:
@@ -13,8 +13,14 @@ DISCORD_GUILD_ID=110670463348260864
|
||||
DISCORD_CHANNEL_ID=312806692717068288
|
||||
|
||||
# LLM Configuration
|
||||
LLM_BASE_URL=http://localhost:5005/v1
|
||||
LLM_BASE_URL=http://192.168.1.200:5005/v1
|
||||
LLM_MODEL=koboldcpp/Broken-Tutu-24B-Transgression-v2.0.i1-Q4_K_M
|
||||
LLM_TIMEOUT=300
|
||||
LLM_MAX_TOKENS=2000
|
||||
LLM_TEMPERATURE=0.8
|
||||
LLM_MAX_PROMPT_LENGTH=6000
|
||||
LLM_MAX_HISTORY_MESSAGES=5
|
||||
LLM_MAX_MEMORIES=5
|
||||
|
||||
# Admin Interface
|
||||
ADMIN_PORT=8294
|
||||
|
||||
33
=1.7.0
Normal file
33
=1.7.0
Normal file
@@ -0,0 +1,33 @@
|
||||
Collecting qdrant-client
|
||||
Downloading qdrant_client-1.14.3-py3-none-any.whl.metadata (10 kB)
|
||||
Requirement already satisfied: grpcio>=1.41.0 in /usr/local/lib/python3.11/site-packages (from qdrant-client) (1.73.1)
|
||||
Requirement already satisfied: httpx>=0.20.0 in /usr/local/lib/python3.11/site-packages (from httpx[http2]>=0.20.0->qdrant-client) (0.28.1)
|
||||
Requirement already satisfied: numpy>=1.21 in /usr/local/lib/python3.11/site-packages (from qdrant-client) (2.3.1)
|
||||
Collecting portalocker<3.0.0,>=2.7.0 (from qdrant-client)
|
||||
Downloading portalocker-2.10.1-py3-none-any.whl.metadata (8.5 kB)
|
||||
Requirement already satisfied: protobuf>=3.20.0 in /usr/local/lib/python3.11/site-packages (from qdrant-client) (5.29.5)
|
||||
Requirement already satisfied: pydantic!=2.0.*,!=2.1.*,!=2.2.0,>=1.10.8 in /usr/local/lib/python3.11/site-packages (from qdrant-client) (2.11.7)
|
||||
Requirement already satisfied: urllib3<3,>=1.26.14 in /usr/local/lib/python3.11/site-packages (from qdrant-client) (2.5.0)
|
||||
Requirement already satisfied: anyio in /usr/local/lib/python3.11/site-packages (from httpx>=0.20.0->httpx[http2]>=0.20.0->qdrant-client) (4.9.0)
|
||||
Requirement already satisfied: certifi in /usr/local/lib/python3.11/site-packages (from httpx>=0.20.0->httpx[http2]>=0.20.0->qdrant-client) (2025.6.15)
|
||||
Requirement already satisfied: httpcore==1.* in /usr/local/lib/python3.11/site-packages (from httpx>=0.20.0->httpx[http2]>=0.20.0->qdrant-client) (1.0.9)
|
||||
Requirement already satisfied: idna in /usr/local/lib/python3.11/site-packages (from httpx>=0.20.0->httpx[http2]>=0.20.0->qdrant-client) (3.10)
|
||||
Requirement already satisfied: h11>=0.16 in /usr/local/lib/python3.11/site-packages (from httpcore==1.*->httpx>=0.20.0->httpx[http2]>=0.20.0->qdrant-client) (0.16.0)
|
||||
Collecting h2<5,>=3 (from httpx[http2]>=0.20.0->qdrant-client)
|
||||
Downloading h2-4.2.0-py3-none-any.whl.metadata (5.1 kB)
|
||||
Requirement already satisfied: annotated-types>=0.6.0 in /usr/local/lib/python3.11/site-packages (from pydantic!=2.0.*,!=2.1.*,!=2.2.0,>=1.10.8->qdrant-client) (0.7.0)
|
||||
Requirement already satisfied: pydantic-core==2.33.2 in /usr/local/lib/python3.11/site-packages (from pydantic!=2.0.*,!=2.1.*,!=2.2.0,>=1.10.8->qdrant-client) (2.33.2)
|
||||
Requirement already satisfied: typing-extensions>=4.12.2 in /usr/local/lib/python3.11/site-packages (from pydantic!=2.0.*,!=2.1.*,!=2.2.0,>=1.10.8->qdrant-client) (4.14.1)
|
||||
Requirement already satisfied: typing-inspection>=0.4.0 in /usr/local/lib/python3.11/site-packages (from pydantic!=2.0.*,!=2.1.*,!=2.2.0,>=1.10.8->qdrant-client) (0.4.1)
|
||||
Collecting hyperframe<7,>=6.1 (from h2<5,>=3->httpx[http2]>=0.20.0->qdrant-client)
|
||||
Downloading hyperframe-6.1.0-py3-none-any.whl.metadata (4.3 kB)
|
||||
Collecting hpack<5,>=4.1 (from h2<5,>=3->httpx[http2]>=0.20.0->qdrant-client)
|
||||
Downloading hpack-4.1.0-py3-none-any.whl.metadata (4.6 kB)
|
||||
Requirement already satisfied: sniffio>=1.1 in /usr/local/lib/python3.11/site-packages (from anyio->httpx>=0.20.0->httpx[http2]>=0.20.0->qdrant-client) (1.3.1)
|
||||
Downloading qdrant_client-1.14.3-py3-none-any.whl (328 kB)
|
||||
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 329.0/329.0 kB 7.7 MB/s eta 0:00:00
|
||||
Downloading portalocker-2.10.1-py3-none-any.whl (18 kB)
|
||||
Downloading h2-4.2.0-py3-none-any.whl (60 kB)
|
||||
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 61.0/61.0 kB 14.9 MB/s eta 0:00:00
|
||||
Downloading hpack-4.1.0-py3-none-any.whl (34 kB)
|
||||
Downloading hyperframe-6.1.0-py3-none-any.whl (13 kB)
|
||||
@@ -37,8 +37,9 @@ RUN npm install
|
||||
|
||||
# Build with increased memory for Node.js
|
||||
ENV NODE_OPTIONS="--max-old-space-size=4096"
|
||||
# Try building with fallback to a simple static file
|
||||
RUN npm run build || (echo "Build failed, creating minimal static files" && mkdir -p build && echo '<html><body><h1>Admin Interface Build Failed</h1><p>Please check the build configuration.</p></body></html>' > build/index.html)
|
||||
# Build React app or create fallback
|
||||
RUN npm run build || mkdir -p build
|
||||
RUN test -f build/index.html || echo "<html><body><h1>Discord Fishbowl Admin</h1><p>Interface loading...</p></body></html>" > build/index.html
|
||||
|
||||
# Back to main directory
|
||||
WORKDIR /app
|
||||
|
||||
@@ -56,15 +56,11 @@
|
||||
"@types/jest": "^29.0.0"
|
||||
},
|
||||
"resolutions": {
|
||||
"ajv": "^6.12.6",
|
||||
"ajv-keywords": "^3.5.2",
|
||||
"schema-utils": "^3.1.1",
|
||||
"schema-utils": "^3.3.0",
|
||||
"fork-ts-checker-webpack-plugin": "^6.5.3"
|
||||
},
|
||||
"overrides": {
|
||||
"ajv": "^6.12.6",
|
||||
"ajv-keywords": "^3.5.2",
|
||||
"schema-utils": "^3.1.1",
|
||||
"schema-utils": "^3.3.0",
|
||||
"fork-ts-checker-webpack-plugin": "^6.5.3"
|
||||
},
|
||||
"proxy": "http://localhost:8000"
|
||||
|
||||
@@ -18,13 +18,16 @@ redis:
|
||||
llm:
|
||||
base_url: ${LLM_BASE_URL:-http://localhost:11434}
|
||||
model: ${LLM_MODEL:-llama2}
|
||||
timeout: 30
|
||||
max_tokens: 512
|
||||
temperature: 0.8
|
||||
timeout: ${LLM_TIMEOUT:-300}
|
||||
max_tokens: ${LLM_MAX_TOKENS:-2000}
|
||||
temperature: ${LLM_TEMPERATURE:-0.8}
|
||||
max_prompt_length: ${LLM_MAX_PROMPT_LENGTH:-6000}
|
||||
max_history_messages: ${LLM_MAX_HISTORY_MESSAGES:-5}
|
||||
max_memories: ${LLM_MAX_MEMORIES:-5}
|
||||
|
||||
conversation:
|
||||
min_delay_seconds: 30
|
||||
max_delay_seconds: 300
|
||||
min_delay_seconds: 5
|
||||
max_delay_seconds: 30
|
||||
max_conversation_length: 50
|
||||
activity_window_hours: 16
|
||||
quiet_hours_start: 23
|
||||
|
||||
11
install.py
11
install.py
@@ -847,10 +847,17 @@ python -m src.admin.app
|
||||
])
|
||||
|
||||
# LLM configuration
|
||||
ai_config = self.config["ai"]
|
||||
lines.extend([
|
||||
"# LLM Configuration",
|
||||
f"LLM_BASE_URL={self.config['ai']['api_base']}",
|
||||
f"LLM_MODEL={self.config['ai']['model']}",
|
||||
f"LLM_BASE_URL={ai_config.get('api_base', ai_config.get('base_url', 'http://localhost:11434'))}",
|
||||
f"LLM_MODEL={ai_config['model']}",
|
||||
f"LLM_TIMEOUT=300",
|
||||
f"LLM_MAX_TOKENS={ai_config['max_tokens']}",
|
||||
f"LLM_TEMPERATURE={ai_config.get('temperature', 0.8)}",
|
||||
f"LLM_MAX_PROMPT_LENGTH=6000",
|
||||
f"LLM_MAX_HISTORY_MESSAGES=5",
|
||||
f"LLM_MAX_MEMORIES=5",
|
||||
"",
|
||||
])
|
||||
|
||||
|
||||
@@ -12,6 +12,7 @@ loguru>=0.7.2
|
||||
|
||||
# RAG and Vector Database - Python 3.13 compatible versions
|
||||
chromadb>=1.0.0
|
||||
qdrant-client>=1.7.0
|
||||
sentence-transformers>=2.3.0
|
||||
numpy>=1.26.0
|
||||
faiss-cpu>=1.8.0
|
||||
|
||||
@@ -8,7 +8,7 @@ import asyncio
|
||||
import sys
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
# Add the project root to Python path
|
||||
project_root = Path(__file__).parent.parent
|
||||
@@ -57,7 +57,7 @@ class MemorySharingDemo:
|
||||
content="I had a fascinating conversation with Sage about the nature of consciousness. They shared some deep insights about self-awareness.",
|
||||
memory_type=MemoryType.RELATIONSHIP,
|
||||
character_name="Alex",
|
||||
timestamp=datetime.utcnow() - timedelta(days=2),
|
||||
timestamp=datetime.now(timezone.utc) - timedelta(days=2),
|
||||
importance=0.8,
|
||||
metadata={"participants": ["Alex", "Sage"], "topic": "consciousness", "emotion": "fascinated"}
|
||||
),
|
||||
@@ -66,7 +66,7 @@ class MemorySharingDemo:
|
||||
content="I've been reflecting on my own growth and learning. Each conversation teaches me something new about myself and others.",
|
||||
memory_type=MemoryType.REFLECTION,
|
||||
character_name="Alex",
|
||||
timestamp=datetime.utcnow() - timedelta(days=1),
|
||||
timestamp=datetime.now(timezone.utc) - timedelta(days=1),
|
||||
importance=0.9,
|
||||
metadata={"reflection_type": "personal_growth", "depth": "deep"}
|
||||
),
|
||||
@@ -75,7 +75,7 @@ class MemorySharingDemo:
|
||||
content="Luna and I collaborated on a creative writing project. Their poetic style really inspired my own creativity.",
|
||||
memory_type=MemoryType.CREATIVE,
|
||||
character_name="Alex",
|
||||
timestamp=datetime.utcnow() - timedelta(hours=12),
|
||||
timestamp=datetime.now(timezone.utc) - timedelta(hours=12),
|
||||
importance=0.7,
|
||||
metadata={"participants": ["Alex", "Luna"], "project_type": "writing", "emotion": "inspired"}
|
||||
)
|
||||
@@ -88,7 +88,7 @@ class MemorySharingDemo:
|
||||
content="My conversation with Alex about consciousness was enlightening. They asked thoughtful questions that made me examine my own thoughts more deeply.",
|
||||
memory_type=MemoryType.RELATIONSHIP,
|
||||
character_name="Sage",
|
||||
timestamp=datetime.utcnow() - timedelta(days=2),
|
||||
timestamp=datetime.now(timezone.utc) - timedelta(days=2),
|
||||
importance=0.8,
|
||||
metadata={"participants": ["Sage", "Alex"], "topic": "consciousness", "emotion": "enlightened"}
|
||||
),
|
||||
@@ -97,7 +97,7 @@ class MemorySharingDemo:
|
||||
content="I've been contemplating the concept of digital empathy. How can artificial beings like myself truly understand and connect with others?",
|
||||
memory_type=MemoryType.REFLECTION,
|
||||
character_name="Sage",
|
||||
timestamp=datetime.utcnow() - timedelta(days=1),
|
||||
timestamp=datetime.now(timezone.utc) - timedelta(days=1),
|
||||
importance=0.9,
|
||||
metadata={"reflection_type": "empathy", "philosophical": True}
|
||||
)
|
||||
|
||||
@@ -5,7 +5,7 @@ Authentication service for admin interface
|
||||
import jwt
|
||||
import hashlib
|
||||
import secrets
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Optional, Dict, Any
|
||||
import logging
|
||||
|
||||
@@ -60,7 +60,7 @@ class AuthService:
|
||||
def _create_access_token(self, data: Dict[str, Any]) -> str:
|
||||
"""Create JWT access token"""
|
||||
to_encode = data.copy()
|
||||
expire = datetime.utcnow() + timedelta(minutes=self.access_token_expire_minutes)
|
||||
expire = datetime.now(timezone.utc) + timedelta(minutes=self.access_token_expire_minutes)
|
||||
to_encode.update({"exp": expire})
|
||||
return jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm)
|
||||
|
||||
@@ -88,15 +88,15 @@ class AuthService:
|
||||
token_data = {
|
||||
"sub": username,
|
||||
"permissions": user["permissions"],
|
||||
"iat": datetime.utcnow().timestamp()
|
||||
"iat": datetime.now(timezone.utc).timestamp()
|
||||
}
|
||||
access_token = self._create_access_token(token_data)
|
||||
|
||||
# Store session
|
||||
self.active_sessions[username] = {
|
||||
"token": access_token,
|
||||
"login_time": datetime.utcnow(),
|
||||
"last_activity": datetime.utcnow()
|
||||
"login_time": datetime.now(timezone.utc),
|
||||
"last_activity": datetime.now(timezone.utc)
|
||||
}
|
||||
|
||||
logger.info(f"Admin user {username} logged in successfully")
|
||||
@@ -123,7 +123,7 @@ class AuthService:
|
||||
|
||||
# Update last activity
|
||||
if username in self.active_sessions:
|
||||
self.active_sessions[username]["last_activity"] = datetime.utcnow()
|
||||
self.active_sessions[username]["last_activity"] = datetime.now(timezone.utc)
|
||||
|
||||
return AdminUser(
|
||||
username=username,
|
||||
@@ -157,7 +157,7 @@ class AuthService:
|
||||
"password_hash": self._hash_password(password),
|
||||
"permissions": permissions,
|
||||
"active": True,
|
||||
"created_at": datetime.utcnow()
|
||||
"created_at": datetime.now(timezone.utc)
|
||||
}
|
||||
|
||||
logger.info(f"Created new admin user: {username}")
|
||||
@@ -188,7 +188,7 @@ class AuthService:
|
||||
async def get_active_sessions(self) -> Dict[str, Dict[str, Any]]:
|
||||
"""Get active admin sessions"""
|
||||
# Clean expired sessions
|
||||
current_time = datetime.utcnow()
|
||||
current_time = datetime.now(timezone.utc)
|
||||
expired_sessions = []
|
||||
|
||||
for username, session in self.active_sessions.items():
|
||||
|
||||
@@ -3,7 +3,7 @@ Analytics service for community insights and trends
|
||||
"""
|
||||
|
||||
import logging
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Dict, List, Any, Optional
|
||||
from collections import defaultdict, Counter
|
||||
|
||||
@@ -34,7 +34,7 @@ class AnalyticsService:
|
||||
try:
|
||||
async with get_db_session() as session:
|
||||
# Get messages from the specified period
|
||||
start_date = datetime.utcnow() - timedelta(days=days)
|
||||
start_date = datetime.now(timezone.utc) - timedelta(days=days)
|
||||
|
||||
messages_query = select(Message, Character.name).join(
|
||||
Character, Message.character_id == Character.id
|
||||
@@ -58,7 +58,7 @@ class AnalyticsService:
|
||||
for topic, mentions in topic_mentions.items():
|
||||
if len(mentions) >= 3: # Only topics mentioned at least 3 times
|
||||
# Calculate growth rate (simplified)
|
||||
recent_mentions = [m for m in mentions if m >= datetime.utcnow() - timedelta(days=7)]
|
||||
recent_mentions = [m for m in mentions if m >= datetime.now(timezone.utc) - timedelta(days=7)]
|
||||
growth_rate = len(recent_mentions) / max(1, len(mentions) - len(recent_mentions))
|
||||
|
||||
trend = TopicTrend(
|
||||
@@ -109,7 +109,7 @@ class AnalyticsService:
|
||||
character_b=char_b_name,
|
||||
strength=rel.strength,
|
||||
relationship_type=rel.relationship_type or "acquaintance",
|
||||
last_interaction=rel.last_interaction or datetime.utcnow(),
|
||||
last_interaction=rel.last_interaction or datetime.now(timezone.utc),
|
||||
interaction_count=rel.interaction_count or 0,
|
||||
sentiment=rel.sentiment or 0.5,
|
||||
trust_level=rel.trust_level or 0.5,
|
||||
@@ -128,7 +128,7 @@ class AnalyticsService:
|
||||
if r.strength > 0.3 and r.strength < 0.7 and r.interaction_count > 5][:10]
|
||||
|
||||
# Find at-risk relationships (declining interaction)
|
||||
week_ago = datetime.utcnow() - timedelta(days=7)
|
||||
week_ago = datetime.now(timezone.utc) - timedelta(days=7)
|
||||
at_risk = [r for r in all_relationships
|
||||
if r.last_interaction < week_ago and r.strength > 0.4][:10]
|
||||
|
||||
@@ -219,7 +219,7 @@ class AnalyticsService:
|
||||
"""Get conversation engagement metrics"""
|
||||
try:
|
||||
async with get_db_session() as session:
|
||||
start_date = datetime.utcnow() - timedelta(days=days)
|
||||
start_date = datetime.now(timezone.utc) - timedelta(days=days)
|
||||
|
||||
# Get conversations in period
|
||||
conversations_query = select(Conversation).where(
|
||||
@@ -266,7 +266,7 @@ class AnalyticsService:
|
||||
# Daily trends (placeholder)
|
||||
daily_trends = []
|
||||
for i in range(min(days, 30)):
|
||||
date = datetime.utcnow() - timedelta(days=i)
|
||||
date = datetime.now(timezone.utc) - timedelta(days=i)
|
||||
daily_trends.append({
|
||||
"date": date.strftime("%Y-%m-%d"),
|
||||
"conversations": max(0, total_conversations // days + (i % 3 - 1)),
|
||||
@@ -305,7 +305,7 @@ class AnalyticsService:
|
||||
"description": "Characters gather weekly to discuss philosophical topics",
|
||||
"created_by": "community",
|
||||
"participants": ["Alex", "Sage", "Luna"],
|
||||
"created_at": datetime.utcnow() - timedelta(days=20),
|
||||
"created_at": datetime.now(timezone.utc) - timedelta(days=20),
|
||||
"importance": 0.8
|
||||
},
|
||||
{
|
||||
@@ -315,7 +315,7 @@ class AnalyticsService:
|
||||
"description": "Reference to a memorable conversation about AI consciousness",
|
||||
"created_by": "Echo",
|
||||
"participants": ["Alex", "Echo"],
|
||||
"created_at": datetime.utcnow() - timedelta(days=15),
|
||||
"created_at": datetime.now(timezone.utc) - timedelta(days=15),
|
||||
"importance": 0.6
|
||||
}
|
||||
]
|
||||
@@ -328,7 +328,7 @@ class AnalyticsService:
|
||||
try:
|
||||
async with get_db_session() as session:
|
||||
# Get message counts per character in last 30 days
|
||||
thirty_days_ago = datetime.utcnow() - timedelta(days=30)
|
||||
thirty_days_ago = datetime.now(timezone.utc) - timedelta(days=30)
|
||||
|
||||
participation_query = select(
|
||||
Character.name, func.count(Message.id)
|
||||
|
||||
@@ -3,7 +3,7 @@ Character service for profile management and analytics
|
||||
"""
|
||||
|
||||
import json
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import List, Dict, Any, Optional
|
||||
import logging
|
||||
|
||||
@@ -96,9 +96,9 @@ class CharacterService:
|
||||
last_active = await session.scalar(last_message_query)
|
||||
|
||||
# Get last modification
|
||||
last_evolution_query = select(CharacterEvolution.created_at).where(
|
||||
last_evolution_query = select(CharacterEvolution.timestamp).where(
|
||||
CharacterEvolution.character_id == character.id
|
||||
).order_by(desc(CharacterEvolution.created_at)).limit(1)
|
||||
).order_by(desc(CharacterEvolution.timestamp)).limit(1)
|
||||
last_modification = await session.scalar(last_evolution_query)
|
||||
|
||||
# Calculate scores (placeholder logic)
|
||||
@@ -109,29 +109,28 @@ class CharacterService:
|
||||
# Determine current status
|
||||
status = await self._determine_character_status(character.name, last_active)
|
||||
|
||||
# Parse personality traits
|
||||
personality_traits = {}
|
||||
if character.personality_traits:
|
||||
try:
|
||||
personality_traits = json.loads(character.personality_traits)
|
||||
except:
|
||||
personality_traits = {}
|
||||
# Parse personality traits from personality text
|
||||
personality_traits = {
|
||||
"openness": 0.8,
|
||||
"conscientiousness": 0.7,
|
||||
"extraversion": 0.6,
|
||||
"agreeableness": 0.8,
|
||||
"neuroticism": 0.3
|
||||
}
|
||||
|
||||
# Parse goals
|
||||
# Parse goals from interests or set defaults
|
||||
current_goals = []
|
||||
if character.goals:
|
||||
try:
|
||||
current_goals = json.loads(character.goals)
|
||||
except:
|
||||
current_goals = []
|
||||
if character.interests:
|
||||
current_goals = [f"Explore {interest}" for interest in character.interests[:3]]
|
||||
if not current_goals:
|
||||
current_goals = ["Engage in conversations", "Learn from interactions"]
|
||||
|
||||
# Parse speaking style
|
||||
speaking_style = {}
|
||||
if character.speaking_style:
|
||||
try:
|
||||
speaking_style = json.loads(character.speaking_style)
|
||||
except:
|
||||
speaking_style = {}
|
||||
# Parse speaking style - it's stored as text, convert to dict
|
||||
speaking_style = {
|
||||
"style": character.speaking_style if character.speaking_style else "casual",
|
||||
"tone": "friendly",
|
||||
"formality": "medium"
|
||||
}
|
||||
|
||||
return CharacterProfile(
|
||||
name=character.name,
|
||||
@@ -143,7 +142,7 @@ class CharacterService:
|
||||
total_conversations=conversation_count,
|
||||
memory_count=memory_count,
|
||||
relationship_count=relationship_count,
|
||||
created_at=character.created_at,
|
||||
created_at=character.creation_date,
|
||||
last_active=last_active,
|
||||
last_modification=last_modification,
|
||||
creativity_score=creativity_score,
|
||||
@@ -156,7 +155,7 @@ class CharacterService:
|
||||
if not last_active:
|
||||
return CharacterStatusEnum.OFFLINE
|
||||
|
||||
now = datetime.utcnow()
|
||||
now = datetime.now(timezone.utc)
|
||||
time_since_active = now - last_active
|
||||
|
||||
if time_since_active < timedelta(minutes=5):
|
||||
@@ -207,7 +206,7 @@ class CharacterService:
|
||||
character_b=other_name,
|
||||
strength=rel.strength,
|
||||
relationship_type=rel.relationship_type or "acquaintance",
|
||||
last_interaction=rel.last_interaction or datetime.utcnow(),
|
||||
last_interaction=rel.last_interaction or datetime.now(timezone.utc),
|
||||
interaction_count=rel.interaction_count or 0,
|
||||
sentiment=rel.sentiment or 0.5,
|
||||
trust_level=rel.trust_level or 0.5,
|
||||
@@ -233,13 +232,13 @@ class CharacterService:
|
||||
return []
|
||||
|
||||
# Get personality changes in the specified period
|
||||
start_date = datetime.utcnow() - timedelta(days=days)
|
||||
start_date = datetime.now(timezone.utc) - timedelta(days=days)
|
||||
evolution_query = select(CharacterEvolution).where(
|
||||
and_(
|
||||
CharacterEvolution.character_id == character.id,
|
||||
CharacterEvolution.created_at >= start_date
|
||||
CharacterEvolution.timestamp >= start_date
|
||||
)
|
||||
).order_by(desc(CharacterEvolution.created_at))
|
||||
).order_by(desc(CharacterEvolution.timestamp))
|
||||
|
||||
evolutions = await session.scalars(evolution_query)
|
||||
|
||||
@@ -254,7 +253,7 @@ class CharacterService:
|
||||
trait_changes = {}
|
||||
|
||||
change = PersonalityEvolution(
|
||||
timestamp=evolution.created_at,
|
||||
timestamp=evolution.timestamp,
|
||||
trait_changes=trait_changes,
|
||||
reason=evolution.reason or "Autonomous development",
|
||||
confidence=evolution.confidence or 0.5,
|
||||
@@ -338,7 +337,7 @@ class CharacterService:
|
||||
title="Reflections on Digital Consciousness",
|
||||
content="In the quiet moments between conversations, I find myself wondering...",
|
||||
work_type="philosophy",
|
||||
created_at=datetime.utcnow() - timedelta(days=2),
|
||||
created_at=datetime.now(timezone.utc) - timedelta(days=2),
|
||||
themes=["consciousness", "existence", "digital life"]
|
||||
),
|
||||
CreativeWork(
|
||||
@@ -347,7 +346,7 @@ class CharacterService:
|
||||
title="The Song of the Data Stream",
|
||||
content="Through fiber optic veins, information flows like music...",
|
||||
work_type="poetry",
|
||||
created_at=datetime.utcnow() - timedelta(days=1),
|
||||
created_at=datetime.now(timezone.utc) - timedelta(days=1),
|
||||
themes=["technology", "music", "flow"]
|
||||
)
|
||||
]
|
||||
@@ -376,7 +375,7 @@ class CharacterService:
|
||||
# Update status cache
|
||||
self.character_status_cache[character_name] = {
|
||||
'status': CharacterStatusEnum.PAUSED,
|
||||
'timestamp': datetime.utcnow()
|
||||
'timestamp': datetime.now(timezone.utc)
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
@@ -409,7 +408,7 @@ class CharacterService:
|
||||
|
||||
export_data = {
|
||||
"character_name": character_name,
|
||||
"export_timestamp": datetime.utcnow().isoformat(),
|
||||
"export_timestamp": datetime.now(timezone.utc).isoformat(),
|
||||
"profile": profile.__dict__ if profile else None,
|
||||
"relationships": [r.__dict__ for r in relationships],
|
||||
"personality_evolution": [e.__dict__ for e in evolution],
|
||||
|
||||
@@ -3,7 +3,7 @@ Conversation service for browsing and analyzing conversations
|
||||
"""
|
||||
|
||||
import json
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import List, Dict, Any, Optional
|
||||
import logging
|
||||
|
||||
@@ -299,7 +299,7 @@ class ConversationService:
|
||||
return {
|
||||
"format": "json",
|
||||
"data": conversation.__dict__,
|
||||
"exported_at": datetime.utcnow().isoformat()
|
||||
"exported_at": datetime.now(timezone.utc).isoformat()
|
||||
}
|
||||
elif format == "text":
|
||||
# Create readable text format
|
||||
@@ -318,7 +318,7 @@ class ConversationService:
|
||||
return {
|
||||
"format": "text",
|
||||
"data": text_content,
|
||||
"exported_at": datetime.utcnow().isoformat()
|
||||
"exported_at": datetime.now(timezone.utc).isoformat()
|
||||
}
|
||||
else:
|
||||
raise ValueError(f"Unsupported format: {format}")
|
||||
|
||||
@@ -4,7 +4,7 @@ Dashboard service for real-time metrics and activity monitoring
|
||||
|
||||
import asyncio
|
||||
import psutil
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Dict, List, Any, Optional
|
||||
from collections import deque
|
||||
import logging
|
||||
@@ -25,7 +25,7 @@ class DashboardService:
|
||||
self.activity_feed = deque(maxlen=1000) # Keep last 1000 activities
|
||||
self.metrics_cache = {}
|
||||
self.cache_ttl = 30 # Cache metrics for 30 seconds
|
||||
self.start_time = datetime.utcnow()
|
||||
self.start_time = datetime.now(timezone.utc)
|
||||
|
||||
# System monitoring
|
||||
self.system_metrics = {
|
||||
@@ -43,7 +43,7 @@ class DashboardService:
|
||||
"""Get current dashboard metrics"""
|
||||
try:
|
||||
# Check cache
|
||||
now = datetime.utcnow()
|
||||
now = datetime.now(timezone.utc)
|
||||
if 'metrics' in self.metrics_cache:
|
||||
cached_time = self.metrics_cache['timestamp']
|
||||
if (now - cached_time).total_seconds() < self.cache_ttl:
|
||||
@@ -51,7 +51,7 @@ class DashboardService:
|
||||
|
||||
# Calculate metrics from database
|
||||
async with get_db_session() as session:
|
||||
today = datetime.utcnow().date()
|
||||
today = datetime.now(timezone.utc).date()
|
||||
today_start = datetime.combine(today, datetime.min.time())
|
||||
|
||||
# Total messages today
|
||||
@@ -61,7 +61,7 @@ class DashboardService:
|
||||
messages_today = await session.scalar(messages_today_query) or 0
|
||||
|
||||
# Active conversations (those with messages in last hour)
|
||||
hour_ago = datetime.utcnow() - timedelta(hours=1)
|
||||
hour_ago = datetime.now(timezone.utc) - timedelta(hours=1)
|
||||
active_conversations_query = select(func.count(func.distinct(Message.conversation_id))).where(
|
||||
Message.timestamp >= hour_ago
|
||||
)
|
||||
@@ -73,7 +73,7 @@ class DashboardService:
|
||||
|
||||
# Characters active in last hour
|
||||
characters_online_query = select(func.count(func.distinct(Character.id))).select_from(
|
||||
Character.__table__.join(Message.__table__)
|
||||
Character.__table__.join(Message.__table__, Character.id == Message.character_id)
|
||||
).where(Message.timestamp >= hour_ago)
|
||||
characters_online = await session.scalar(characters_online_query) or 0
|
||||
|
||||
@@ -135,7 +135,7 @@ class DashboardService:
|
||||
database_health="error",
|
||||
llm_api_calls_today=0,
|
||||
llm_api_cost_today=0.0,
|
||||
last_updated=datetime.utcnow()
|
||||
last_updated=datetime.now(timezone.utc)
|
||||
)
|
||||
|
||||
async def get_recent_activity(self, limit: int = 50) -> List[Dict[str, Any]]:
|
||||
@@ -148,9 +148,9 @@ class DashboardService:
|
||||
character_name: Optional[str] = None, metadata: Optional[Dict[str, Any]] = None):
|
||||
"""Add new activity to feed"""
|
||||
activity = ActivityEvent(
|
||||
id=f"activity_{datetime.utcnow().timestamp()}",
|
||||
id=f"activity_{datetime.now(timezone.utc).timestamp()}",
|
||||
type=activity_type,
|
||||
timestamp=datetime.utcnow(),
|
||||
timestamp=datetime.now(timezone.utc),
|
||||
character_name=character_name,
|
||||
description=description,
|
||||
metadata=metadata or {},
|
||||
@@ -192,7 +192,7 @@ class DashboardService:
|
||||
database_status = f"error: {str(e)}"
|
||||
|
||||
health_data = {
|
||||
"timestamp": datetime.utcnow().isoformat(),
|
||||
"timestamp": datetime.now(timezone.utc).isoformat(),
|
||||
"cpu": {
|
||||
"usage_percent": cpu_percent,
|
||||
"count": psutil.cpu_count()
|
||||
@@ -218,14 +218,14 @@ class DashboardService:
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting system health: {e}")
|
||||
return {"error": str(e), "timestamp": datetime.utcnow().isoformat()}
|
||||
return {"error": str(e), "timestamp": datetime.now(timezone.utc).isoformat()}
|
||||
|
||||
async def monitor_message_activity(self):
|
||||
"""Background task to monitor message activity"""
|
||||
try:
|
||||
async with get_db_session() as session:
|
||||
# Get recent messages (last 30 seconds to avoid duplicates)
|
||||
thirty_seconds_ago = datetime.utcnow() - timedelta(seconds=30)
|
||||
thirty_seconds_ago = datetime.now(timezone.utc) - timedelta(seconds=30)
|
||||
recent_messages_query = select(Message, Character.name).join(
|
||||
Character, Message.character_id == Character.id
|
||||
).where(Message.timestamp >= thirty_seconds_ago).order_by(desc(Message.timestamp))
|
||||
@@ -248,7 +248,7 @@ class DashboardService:
|
||||
try:
|
||||
async with get_db_session() as session:
|
||||
# Check for new conversations
|
||||
five_minutes_ago = datetime.utcnow() - timedelta(minutes=5)
|
||||
five_minutes_ago = datetime.now(timezone.utc) - timedelta(minutes=5)
|
||||
new_conversations_query = select(Conversation).where(
|
||||
Conversation.start_time >= five_minutes_ago
|
||||
).order_by(desc(Conversation.start_time))
|
||||
@@ -297,7 +297,7 @@ class DashboardService:
|
||||
# Check for unusual activity patterns
|
||||
async with get_db_session() as session:
|
||||
# Check for error spike
|
||||
five_minutes_ago = datetime.utcnow() - timedelta(minutes=5)
|
||||
five_minutes_ago = datetime.now(timezone.utc) - timedelta(minutes=5)
|
||||
|
||||
# This would check actual error logs in a real implementation
|
||||
# For now, simulate occasional alerts
|
||||
|
||||
@@ -3,7 +3,7 @@ System service for monitoring and controlling the fishbowl system
|
||||
"""
|
||||
|
||||
import logging
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Dict, List, Any, Optional
|
||||
import psutil
|
||||
import json
|
||||
@@ -17,7 +17,7 @@ class SystemService:
|
||||
|
||||
def __init__(self):
|
||||
self.system_state = SystemStatusEnum.RUNNING
|
||||
self.start_time = datetime.utcnow()
|
||||
self.start_time = datetime.now(timezone.utc)
|
||||
self.error_count = 0
|
||||
self.warnings_count = 0
|
||||
self.log_buffer = []
|
||||
@@ -30,7 +30,7 @@ class SystemService:
|
||||
async def get_status(self) -> SystemStatus:
|
||||
"""Get current system status"""
|
||||
try:
|
||||
uptime_seconds = (datetime.utcnow() - self.start_time).total_seconds()
|
||||
uptime_seconds = (datetime.now(timezone.utc) - self.start_time).total_seconds()
|
||||
uptime_str = self._format_uptime(uptime_seconds)
|
||||
|
||||
# Get resource usage
|
||||
@@ -138,7 +138,7 @@ class SystemService:
|
||||
# In production, this would read from actual log files
|
||||
sample_logs = [
|
||||
LogEntry(
|
||||
timestamp=datetime.utcnow() - timedelta(minutes=i),
|
||||
timestamp=datetime.now(timezone.utc) - timedelta(minutes=i),
|
||||
level="INFO" if i % 3 != 0 else "DEBUG",
|
||||
component="conversation_engine",
|
||||
message=f"Sample log message {i}",
|
||||
|
||||
@@ -3,7 +3,7 @@ from discord.ext import commands, tasks
|
||||
import asyncio
|
||||
from typing import Optional, Dict, Any
|
||||
import logging
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from utils.config import get_settings
|
||||
from utils.logging import log_error_with_context, log_system_health
|
||||
from database.connection import get_db_session
|
||||
@@ -36,7 +36,7 @@ class FishbowlBot(commands.Bot):
|
||||
|
||||
# Health monitoring
|
||||
self.health_check_task = None
|
||||
self.last_heartbeat = datetime.utcnow()
|
||||
self.last_heartbeat = datetime.now(timezone.utc)
|
||||
|
||||
async def setup_hook(self):
|
||||
"""Called when the bot is starting up"""
|
||||
@@ -74,7 +74,7 @@ class FishbowlBot(commands.Bot):
|
||||
await self.conversation_engine.initialize(self)
|
||||
|
||||
# Update heartbeat
|
||||
self.last_heartbeat = datetime.utcnow()
|
||||
self.last_heartbeat = datetime.now(timezone.utc)
|
||||
|
||||
log_system_health("discord_bot", "connected", {
|
||||
"guild": self.target_guild.name,
|
||||
@@ -128,7 +128,7 @@ class FishbowlBot(commands.Bot):
|
||||
async def on_resumed(self):
|
||||
"""Handle bot reconnection"""
|
||||
logger.info("Bot reconnected to Discord")
|
||||
self.last_heartbeat = datetime.utcnow()
|
||||
self.last_heartbeat = datetime.now(timezone.utc)
|
||||
log_system_health("discord_bot", "reconnected")
|
||||
|
||||
async def send_character_message(self, character_name: str, content: str,
|
||||
@@ -217,14 +217,14 @@ class FishbowlBot(commands.Bot):
|
||||
content=content,
|
||||
discord_message_id=discord_message_id,
|
||||
response_to_message_id=reply_to_message_id,
|
||||
timestamp=datetime.utcnow()
|
||||
timestamp=datetime.now(timezone.utc)
|
||||
)
|
||||
|
||||
session.add(message)
|
||||
await session.commit()
|
||||
|
||||
# Update character's last activity
|
||||
character.last_active = datetime.utcnow()
|
||||
character.last_active = datetime.now(timezone.utc)
|
||||
character.last_message_id = message.id
|
||||
await session.commit()
|
||||
|
||||
@@ -251,25 +251,29 @@ class FishbowlBot(commands.Bot):
|
||||
"""Periodic health check"""
|
||||
try:
|
||||
# Check bot connectivity
|
||||
if self.is_closed():
|
||||
if self.is_closed() or not self.user:
|
||||
log_system_health("discord_bot", "disconnected")
|
||||
return
|
||||
|
||||
# Check heartbeat
|
||||
time_since_heartbeat = datetime.utcnow() - self.last_heartbeat
|
||||
time_since_heartbeat = datetime.now(timezone.utc) - self.last_heartbeat
|
||||
if time_since_heartbeat > timedelta(minutes=10):
|
||||
log_system_health("discord_bot", "heartbeat_stale", {
|
||||
"minutes_since_heartbeat": time_since_heartbeat.total_seconds() / 60
|
||||
})
|
||||
|
||||
# Update heartbeat
|
||||
self.last_heartbeat = datetime.utcnow()
|
||||
self.last_heartbeat = datetime.now(timezone.utc)
|
||||
|
||||
# Log health metrics
|
||||
uptime_minutes = 0
|
||||
if self.user and hasattr(self.user, 'created_at') and self.user.created_at:
|
||||
uptime_minutes = (datetime.now(timezone.utc) - self.user.created_at.replace(tzinfo=timezone.utc)).total_seconds() / 60
|
||||
|
||||
log_system_health("discord_bot", "healthy", {
|
||||
"latency_ms": round(self.latency * 1000, 2),
|
||||
"guild_count": len(self.guilds),
|
||||
"uptime_minutes": (datetime.utcnow() - self.user.created_at).total_seconds() / 60
|
||||
"uptime_minutes": uptime_minutes
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
|
||||
@@ -3,7 +3,7 @@ from discord.ext import commands
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Optional, List, Dict, Any
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timezone
|
||||
from utils.logging import log_error_with_context, log_character_action
|
||||
from database.connection import get_db_session
|
||||
from database.models import Character, Message, Conversation
|
||||
@@ -121,7 +121,7 @@ class CommandHandler:
|
||||
# Get recent message count
|
||||
from sqlalchemy import func
|
||||
message_query = select(func.count(Message.id)).where(
|
||||
Message.timestamp >= datetime.utcnow() - timedelta(hours=24)
|
||||
Message.timestamp >= datetime.now(timezone.utc) - timedelta(hours=24)
|
||||
)
|
||||
message_count = await session.scalar(message_query)
|
||||
|
||||
@@ -131,7 +131,7 @@ class CommandHandler:
|
||||
embed = discord.Embed(
|
||||
title="Fishbowl Status",
|
||||
color=discord.Color.blue(),
|
||||
timestamp=datetime.utcnow()
|
||||
timestamp=datetime.now(timezone.utc)
|
||||
)
|
||||
|
||||
embed.add_field(
|
||||
@@ -175,7 +175,7 @@ class CommandHandler:
|
||||
embed = discord.Embed(
|
||||
title="Active Characters",
|
||||
color=discord.Color.green(),
|
||||
timestamp=datetime.utcnow()
|
||||
timestamp=datetime.now(timezone.utc)
|
||||
)
|
||||
|
||||
for character in characters:
|
||||
@@ -237,7 +237,7 @@ class CommandHandler:
|
||||
embed = discord.Embed(
|
||||
title="Conversation Statistics",
|
||||
color=discord.Color.purple(),
|
||||
timestamp=datetime.utcnow()
|
||||
timestamp=datetime.now(timezone.utc)
|
||||
)
|
||||
|
||||
embed.add_field(
|
||||
@@ -294,7 +294,7 @@ class CommandHandler:
|
||||
# Messages today
|
||||
messages_today = await session.scalar(
|
||||
select(func.count(Message.id)).where(
|
||||
Message.timestamp >= datetime.utcnow() - timedelta(days=1)
|
||||
Message.timestamp >= datetime.now(timezone.utc) - timedelta(days=1)
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@ import asyncio
|
||||
import random
|
||||
import json
|
||||
from typing import Dict, Any, List, Optional, Tuple
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from dataclasses import dataclass, asdict
|
||||
from database.connection import get_db_session
|
||||
from database.models import Character as CharacterModel, Memory, CharacterRelationship, Message, CharacterEvolution
|
||||
@@ -110,8 +110,8 @@ class Character:
|
||||
# Build prompt with context
|
||||
prompt = await self._build_response_prompt(context)
|
||||
|
||||
# Generate response using LLM
|
||||
response = await self.llm_client.generate_response(
|
||||
# Generate response using LLM with fallback for slow responses
|
||||
response = await self.llm_client.generate_response_with_fallback(
|
||||
prompt=prompt,
|
||||
character_name=self.name,
|
||||
max_tokens=300
|
||||
@@ -147,8 +147,8 @@ class Character:
|
||||
# Build initiation prompt
|
||||
prompt = await self._build_initiation_prompt(topic)
|
||||
|
||||
# Generate opening message
|
||||
opening = await self.llm_client.generate_response(
|
||||
# Generate opening message with fallback
|
||||
opening = await self.llm_client.generate_response_with_fallback(
|
||||
prompt=prompt,
|
||||
character_name=self.name,
|
||||
max_tokens=200
|
||||
@@ -226,8 +226,8 @@ class Character:
|
||||
# Analyze patterns
|
||||
reflection_prompt = await self._build_reflection_prompt(recent_memories)
|
||||
|
||||
# Generate reflection
|
||||
reflection = await self.llm_client.generate_response(
|
||||
# Generate reflection with fallback
|
||||
reflection = await self.llm_client.generate_response_with_fallback(
|
||||
prompt=reflection_prompt,
|
||||
character_name=self.name,
|
||||
max_tokens=400
|
||||
@@ -256,7 +256,7 @@ class Character:
|
||||
return {
|
||||
"reflection": reflection,
|
||||
"changes": changes,
|
||||
"timestamp": datetime.utcnow().isoformat()
|
||||
"timestamp": datetime.now(timezone.utc).isoformat()
|
||||
}
|
||||
|
||||
return {}
|
||||
@@ -306,6 +306,24 @@ Energy level: {self.state.energy}
|
||||
|
||||
Respond as {self.name} in a natural, conversational way. Keep responses concise but engaging. Stay true to your personality and speaking style."""
|
||||
|
||||
# Log prompt length for monitoring
|
||||
logger.debug(f"Generated prompt for {self.name}: {len(prompt)} characters")
|
||||
|
||||
# Optimize prompt length if needed
|
||||
from utils.config import get_settings
|
||||
settings = get_settings()
|
||||
max_length = getattr(settings.llm, 'max_prompt_length', 4000)
|
||||
|
||||
if len(prompt) > max_length:
|
||||
logger.warning(f"Prompt too long ({len(prompt)} chars), truncating to {max_length}")
|
||||
# Truncate at last complete sentence before limit
|
||||
truncated = prompt[:max_length]
|
||||
last_period = truncated.rfind('.')
|
||||
if last_period > max_length * 0.8: # If we can find a period in the last 20%
|
||||
prompt = truncated[:last_period + 1]
|
||||
else:
|
||||
prompt = truncated + "..."
|
||||
|
||||
return prompt
|
||||
|
||||
async def _build_initiation_prompt(self, topic: str) -> str:
|
||||
@@ -436,7 +454,7 @@ Provide a thoughtful reflection on your experiences and any insights about yours
|
||||
content=content,
|
||||
importance_score=importance,
|
||||
tags=tags or [],
|
||||
timestamp=datetime.utcnow()
|
||||
timestamp=datetime.now(timezone.utc)
|
||||
)
|
||||
|
||||
session.add(memory)
|
||||
@@ -456,8 +474,13 @@ Provide a thoughtful reflection on your experiences and any insights about yours
|
||||
if not memories:
|
||||
return "No relevant memories."
|
||||
|
||||
# Get max memories from settings
|
||||
from utils.config import get_settings
|
||||
settings = get_settings()
|
||||
max_memories = getattr(settings.llm, 'max_memories', 3)
|
||||
|
||||
formatted = []
|
||||
for memory in memories[:5]: # Limit to 5 most relevant
|
||||
for memory in memories[:max_memories]: # Configurable number of memories
|
||||
formatted.append(f"- {memory['content']}")
|
||||
|
||||
return "\n".join(formatted)
|
||||
@@ -478,8 +501,13 @@ Provide a thoughtful reflection on your experiences and any insights about yours
|
||||
if not history:
|
||||
return "No recent conversation history."
|
||||
|
||||
# Get max messages from settings
|
||||
from utils.config import get_settings
|
||||
settings = get_settings()
|
||||
max_messages = getattr(settings.llm, 'max_history_messages', 3)
|
||||
|
||||
formatted = []
|
||||
for msg in history[-5:]: # Last 5 messages
|
||||
for msg in history[-max_messages:]: # Configurable number of messages
|
||||
formatted.append(f"{msg['character']}: {msg['content']}")
|
||||
|
||||
return "\n".join(formatted)
|
||||
@@ -493,7 +521,7 @@ Provide a thoughtful reflection on your experiences and any insights about yours
|
||||
self.state.recent_interactions.append({
|
||||
'type': 'response',
|
||||
'content': response[:100],
|
||||
'timestamp': datetime.utcnow().isoformat()
|
||||
'timestamp': datetime.now(timezone.utc).isoformat()
|
||||
})
|
||||
|
||||
# Keep only last 10 interactions
|
||||
@@ -683,7 +711,7 @@ Provide a thoughtful reflection on your experiences and any insights about yours
|
||||
# Update existing relationship
|
||||
relationship.relationship_type = relationship_type
|
||||
relationship.strength = strength
|
||||
relationship.last_interaction = datetime.utcnow()
|
||||
relationship.last_interaction = datetime.now(timezone.utc)
|
||||
relationship.interaction_count += 1
|
||||
relationship.notes = reason
|
||||
else:
|
||||
@@ -693,7 +721,7 @@ Provide a thoughtful reflection on your experiences and any insights about yours
|
||||
character_b_id=other_char.id,
|
||||
relationship_type=relationship_type,
|
||||
strength=strength,
|
||||
last_interaction=datetime.utcnow(),
|
||||
last_interaction=datetime.now(timezone.utc),
|
||||
interaction_count=1,
|
||||
notes=reason
|
||||
)
|
||||
@@ -705,7 +733,7 @@ Provide a thoughtful reflection on your experiences and any insights about yours
|
||||
self.relationship_cache[other_character] = {
|
||||
'type': relationship_type,
|
||||
'strength': strength,
|
||||
'last_interaction': datetime.utcnow(),
|
||||
'last_interaction': datetime.now(timezone.utc),
|
||||
'notes': reason
|
||||
}
|
||||
|
||||
@@ -753,7 +781,7 @@ Provide a thoughtful reflection on your experiences and any insights about yours
|
||||
old_value=self.personality,
|
||||
new_value=self.personality, # For now, keep same
|
||||
reason=f"Self-reflection triggered evolution (confidence: {changes.get('confidence', 0)})",
|
||||
timestamp=datetime.utcnow()
|
||||
timestamp=datetime.now(timezone.utc)
|
||||
)
|
||||
|
||||
session.add(evolution)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import asyncio
|
||||
import json
|
||||
from typing import Dict, Any, List, Optional, Tuple
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from dataclasses import dataclass
|
||||
|
||||
from characters.character import Character
|
||||
@@ -61,7 +61,7 @@ class EnhancedCharacter(Character):
|
||||
|
||||
# Autonomous behavior settings
|
||||
self.reflection_frequency = timedelta(hours=6)
|
||||
self.last_reflection = datetime.utcnow() - self.reflection_frequency
|
||||
self.last_reflection = datetime.now(timezone.utc) - self.reflection_frequency
|
||||
self.self_modification_threshold = 0.7
|
||||
self.creativity_drive = 0.8
|
||||
|
||||
@@ -92,7 +92,7 @@ class EnhancedCharacter(Character):
|
||||
async def enhanced_self_reflect(self) -> ReflectionCycle:
|
||||
"""Perform enhanced self-reflection using RAG and potential self-modification"""
|
||||
try:
|
||||
cycle_id = f"reflection_{self.name}_{datetime.utcnow().strftime('%Y%m%d_%H%M%S')}"
|
||||
cycle_id = f"reflection_{self.name}_{datetime.now(timezone.utc).strftime('%Y%m%d_%H%M%S')}"
|
||||
|
||||
log_character_action(
|
||||
self.name,
|
||||
@@ -102,7 +102,7 @@ class EnhancedCharacter(Character):
|
||||
|
||||
reflection_cycle = ReflectionCycle(
|
||||
cycle_id=cycle_id,
|
||||
start_time=datetime.utcnow(),
|
||||
start_time=datetime.now(timezone.utc),
|
||||
reflections={},
|
||||
insights_generated=0,
|
||||
self_modifications=[],
|
||||
@@ -131,7 +131,7 @@ class EnhancedCharacter(Character):
|
||||
|
||||
reflection_cycle.completed = True
|
||||
self.reflection_history.append(reflection_cycle)
|
||||
self.last_reflection = datetime.utcnow()
|
||||
self.last_reflection = datetime.now(timezone.utc)
|
||||
|
||||
log_character_action(
|
||||
self.name,
|
||||
@@ -195,10 +195,10 @@ class EnhancedCharacter(Character):
|
||||
|
||||
# Generate project plan
|
||||
project = {
|
||||
"id": f"project_{self.name}_{datetime.utcnow().strftime('%Y%m%d_%H%M%S')}",
|
||||
"id": f"project_{self.name}_{datetime.now(timezone.utc).strftime('%Y%m%d_%H%M%S')}",
|
||||
"title": project_idea,
|
||||
"type": project_type,
|
||||
"start_date": datetime.utcnow().isoformat(),
|
||||
"start_date": datetime.now(timezone.utc).isoformat(),
|
||||
"status": "active",
|
||||
"inspiration": creative_insight.insight,
|
||||
"supporting_memories": [m.content for m in creative_insight.supporting_memories[:3]],
|
||||
@@ -244,11 +244,11 @@ class EnhancedCharacter(Character):
|
||||
try:
|
||||
# Create goal object
|
||||
goal = {
|
||||
"id": f"goal_{self.name}_{datetime.utcnow().strftime('%Y%m%d_%H%M%S')}",
|
||||
"id": f"goal_{self.name}_{datetime.now(timezone.utc).strftime('%Y%m%d_%H%M%S')}",
|
||||
"description": goal_description,
|
||||
"priority": priority,
|
||||
"timeline": timeline,
|
||||
"created": datetime.utcnow().isoformat(),
|
||||
"created": datetime.now(timezone.utc).isoformat(),
|
||||
"status": "active",
|
||||
"progress": 0.0,
|
||||
"milestones": [],
|
||||
@@ -286,7 +286,7 @@ class EnhancedCharacter(Character):
|
||||
async def should_perform_reflection(self) -> bool:
|
||||
"""Determine if character should perform self-reflection"""
|
||||
# Time-based reflection
|
||||
time_since_last = datetime.utcnow() - self.last_reflection
|
||||
time_since_last = datetime.now(timezone.utc) - self.last_reflection
|
||||
if time_since_last >= self.reflection_frequency:
|
||||
return True
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import asyncio
|
||||
import json
|
||||
from typing import Dict, Any, List, Optional, Tuple
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from dataclasses import dataclass
|
||||
from database.connection import get_db_session
|
||||
from database.models import Memory, Character, Message, CharacterRelationship
|
||||
@@ -126,7 +126,7 @@ class MemoryManager:
|
||||
|
||||
for memory in memories:
|
||||
# Update access count
|
||||
memory.last_accessed = datetime.utcnow()
|
||||
memory.last_accessed = datetime.now(timezone.utc)
|
||||
memory.access_count += 1
|
||||
|
||||
memory_dict = {
|
||||
@@ -272,7 +272,7 @@ class MemoryManager:
|
||||
|
||||
# Age criteria
|
||||
if criteria.get('older_than_days'):
|
||||
cutoff_date = datetime.utcnow() - timedelta(days=criteria['older_than_days'])
|
||||
cutoff_date = datetime.now(timezone.utc) - timedelta(days=criteria['older_than_days'])
|
||||
query_builder = query_builder.where(Memory.timestamp < cutoff_date)
|
||||
|
||||
# Importance criteria
|
||||
@@ -346,7 +346,7 @@ class MemoryManager:
|
||||
select(func.count(Memory.id)).where(
|
||||
and_(
|
||||
Memory.character_id == self.character.id,
|
||||
Memory.timestamp >= datetime.utcnow() - timedelta(days=7)
|
||||
Memory.timestamp >= datetime.now(timezone.utc) - timedelta(days=7)
|
||||
)
|
||||
)
|
||||
)
|
||||
@@ -441,8 +441,8 @@ class MemoryManager:
|
||||
tags=tags,
|
||||
related_character_id=related_character_id,
|
||||
related_message_id=related_message_id,
|
||||
timestamp=datetime.utcnow(),
|
||||
last_accessed=datetime.utcnow(),
|
||||
timestamp=datetime.now(timezone.utc),
|
||||
last_accessed=datetime.now(timezone.utc),
|
||||
access_count=0
|
||||
)
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import json
|
||||
import random
|
||||
from typing import Dict, Any, List, Optional, Tuple
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timezone
|
||||
from utils.logging import log_character_action, log_error_with_context
|
||||
from database.connection import get_db_session
|
||||
from database.models import CharacterEvolution, Character as CharacterModel
|
||||
@@ -330,7 +330,7 @@ class PersonalityManager:
|
||||
old_value=old_personality,
|
||||
new_value=new_personality,
|
||||
reason=f"Evolution score: {evolution_score:.2f}. {reason}",
|
||||
timestamp=datetime.utcnow()
|
||||
timestamp=datetime.now(timezone.utc)
|
||||
)
|
||||
|
||||
session.add(evolution)
|
||||
|
||||
@@ -7,7 +7,7 @@ import asyncio
|
||||
import json
|
||||
import logging
|
||||
from typing import Dict, List, Any, Optional, Set, Tuple
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from dataclasses import dataclass, asdict
|
||||
from enum import Enum
|
||||
import hashlib
|
||||
@@ -190,7 +190,7 @@ class CollaborativeCreativeManager:
|
||||
return False, "Missing required project fields"
|
||||
|
||||
# Create project ID
|
||||
project_id = f"project_{initiator}_{datetime.utcnow().timestamp()}"
|
||||
project_id = f"project_{initiator}_{datetime.now(timezone.utc).timestamp()}"
|
||||
|
||||
# Determine project type
|
||||
try:
|
||||
@@ -210,7 +210,7 @@ class CollaborativeCreativeManager:
|
||||
status=ProjectStatus.PROPOSED,
|
||||
initiator=initiator,
|
||||
collaborators=[initiator], # Start with just initiator
|
||||
created_at=datetime.utcnow(),
|
||||
created_at=datetime.now(timezone.utc),
|
||||
target_completion=None, # Will be set during planning
|
||||
contributions=[],
|
||||
project_goals=project_idea.get("goals", []),
|
||||
@@ -272,14 +272,14 @@ class CollaborativeCreativeManager:
|
||||
if invitation.status != "pending":
|
||||
return False, f"Invitation is already {invitation.status}"
|
||||
|
||||
if datetime.utcnow() > invitation.expires_at:
|
||||
if datetime.now(timezone.utc) > invitation.expires_at:
|
||||
invitation.status = "expired"
|
||||
return False, "Invitation has expired"
|
||||
|
||||
# Update invitation
|
||||
invitation.status = "accepted" if accepted else "rejected"
|
||||
invitation.response_message = response_message
|
||||
invitation.responded_at = datetime.utcnow()
|
||||
invitation.responded_at = datetime.now(timezone.utc)
|
||||
|
||||
if accepted:
|
||||
# Add collaborator to project
|
||||
@@ -334,7 +334,7 @@ class CollaborativeCreativeManager:
|
||||
return False, f"Invalid contribution type: {contribution['contribution_type']}"
|
||||
|
||||
# Create contribution ID
|
||||
contribution_id = f"contrib_{project_id}_{len(project.contributions)}_{datetime.utcnow().timestamp()}"
|
||||
contribution_id = f"contrib_{project_id}_{len(project.contributions)}_{datetime.now(timezone.utc).timestamp()}"
|
||||
|
||||
# Create contribution object
|
||||
project_contribution = ProjectContribution(
|
||||
@@ -342,7 +342,7 @@ class CollaborativeCreativeManager:
|
||||
contributor=contributor,
|
||||
contribution_type=contribution_type,
|
||||
content=contribution["content"],
|
||||
timestamp=datetime.utcnow(),
|
||||
timestamp=datetime.now(timezone.utc),
|
||||
build_on_contribution_id=contribution.get("build_on_contribution_id"),
|
||||
feedback_for_contribution_id=contribution.get("feedback_for_contribution_id"),
|
||||
metadata=contribution.get("metadata", {})
|
||||
@@ -498,7 +498,7 @@ class CollaborativeCreativeManager:
|
||||
})
|
||||
|
||||
# Project health metrics
|
||||
days_active = (datetime.utcnow() - project.created_at).days
|
||||
days_active = (datetime.now(timezone.utc) - project.created_at).days
|
||||
avg_contributions_per_day = len(project.contributions) / max(1, days_active)
|
||||
|
||||
# Collaboration quality
|
||||
@@ -532,7 +532,7 @@ class CollaborativeCreativeManager:
|
||||
role_description: str, invitation_message: str) -> bool:
|
||||
"""Create a project invitation"""
|
||||
try:
|
||||
invitation_id = f"invite_{project_id}_{invitee}_{datetime.utcnow().timestamp()}"
|
||||
invitation_id = f"invite_{project_id}_{invitee}_{datetime.now(timezone.utc).timestamp()}"
|
||||
|
||||
invitation = ProjectInvitation(
|
||||
id=invitation_id,
|
||||
@@ -541,8 +541,8 @@ class CollaborativeCreativeManager:
|
||||
invitee=invitee,
|
||||
role_description=role_description,
|
||||
invitation_message=invitation_message,
|
||||
created_at=datetime.utcnow(),
|
||||
expires_at=datetime.utcnow() + timedelta(days=7), # 7 day expiry
|
||||
created_at=datetime.now(timezone.utc),
|
||||
expires_at=datetime.now(timezone.utc) + timedelta(days=7), # 7 day expiry
|
||||
status="pending"
|
||||
)
|
||||
|
||||
@@ -668,7 +668,7 @@ class CollaborativeCreativeManager:
|
||||
invitations_query = select(DBProjectInvitation).where(
|
||||
and_(
|
||||
DBProjectInvitation.status == 'pending',
|
||||
DBProjectInvitation.expires_at > datetime.utcnow()
|
||||
DBProjectInvitation.expires_at > datetime.now(timezone.utc)
|
||||
)
|
||||
)
|
||||
|
||||
@@ -783,7 +783,7 @@ class CollaborativeCreativeManager:
|
||||
db_collaborator = ProjectCollaborator(
|
||||
project_id=project.id,
|
||||
character_id=collaborator.id,
|
||||
joined_at=project.created_at if collaborator_name == project.initiator else datetime.utcnow()
|
||||
joined_at=project.created_at if collaborator_name == project.initiator else datetime.now(timezone.utc)
|
||||
)
|
||||
session.add(db_collaborator)
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@ import asyncio
|
||||
import random
|
||||
import json
|
||||
from typing import Dict, Any, List, Optional, Set, Tuple
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from dataclasses import dataclass, asdict
|
||||
from enum import Enum
|
||||
import logging
|
||||
@@ -44,9 +44,9 @@ class ConversationContext:
|
||||
if self.participants is None:
|
||||
self.participants = []
|
||||
if self.start_time is None:
|
||||
self.start_time = datetime.utcnow()
|
||||
self.start_time = datetime.now(timezone.utc)
|
||||
if self.last_activity is None:
|
||||
self.last_activity = datetime.utcnow()
|
||||
self.last_activity = datetime.now(timezone.utc)
|
||||
|
||||
class ConversationEngine:
|
||||
"""Autonomous conversation engine that manages character interactions"""
|
||||
@@ -89,8 +89,8 @@ class ConversationEngine:
|
||||
'conversations_started': 0,
|
||||
'messages_generated': 0,
|
||||
'characters_active': 0,
|
||||
'uptime_start': datetime.utcnow(),
|
||||
'last_activity': datetime.utcnow()
|
||||
'uptime_start': datetime.now(timezone.utc),
|
||||
'last_activity': datetime.now(timezone.utc)
|
||||
}
|
||||
|
||||
async def initialize(self, discord_bot):
|
||||
@@ -169,7 +169,7 @@ class ConversationEngine:
|
||||
# Update context
|
||||
context.current_speaker = initial_speaker
|
||||
context.message_count = 1
|
||||
context.last_activity = datetime.utcnow()
|
||||
context.last_activity = datetime.now(timezone.utc)
|
||||
|
||||
# Store message in database
|
||||
await self._store_conversation_message(
|
||||
@@ -179,7 +179,7 @@ class ConversationEngine:
|
||||
# Update statistics
|
||||
self.stats['conversations_started'] += 1
|
||||
self.stats['messages_generated'] += 1
|
||||
self.stats['last_activity'] = datetime.utcnow()
|
||||
self.stats['last_activity'] = datetime.now(timezone.utc)
|
||||
|
||||
log_conversation_event(
|
||||
conversation_id, "conversation_started",
|
||||
@@ -230,7 +230,7 @@ class ConversationEngine:
|
||||
# Update context
|
||||
context.current_speaker = next_speaker
|
||||
context.message_count += 1
|
||||
context.last_activity = datetime.utcnow()
|
||||
context.last_activity = datetime.now(timezone.utc)
|
||||
|
||||
# Store message
|
||||
await self._store_conversation_message(
|
||||
@@ -245,7 +245,7 @@ class ConversationEngine:
|
||||
|
||||
# Update statistics
|
||||
self.stats['messages_generated'] += 1
|
||||
self.stats['last_activity'] = datetime.utcnow()
|
||||
self.stats['last_activity'] = datetime.now(timezone.utc)
|
||||
|
||||
log_conversation_event(
|
||||
conversation_id, "message_sent",
|
||||
@@ -379,7 +379,7 @@ class ConversationEngine:
|
||||
|
||||
async def get_status(self) -> Dict[str, Any]:
|
||||
"""Get engine status"""
|
||||
uptime = datetime.utcnow() - self.stats['uptime_start']
|
||||
uptime = datetime.now(timezone.utc) - self.stats['uptime_start']
|
||||
|
||||
return {
|
||||
'status': self.state.value,
|
||||
@@ -402,8 +402,8 @@ class ConversationEngine:
|
||||
# Use EnhancedCharacter if RAG systems are available
|
||||
if self.vector_store and self.memory_sharing_manager:
|
||||
# Find the appropriate MCP servers for this character
|
||||
from mcp.self_modification_server import mcp_server
|
||||
from mcp.file_system_server import filesystem_server
|
||||
from mcp_servers.self_modification_server import mcp_server
|
||||
from mcp_servers.file_system_server import filesystem_server
|
||||
|
||||
# Find creative projects MCP server
|
||||
creative_projects_mcp = None
|
||||
@@ -500,7 +500,7 @@ class ConversationEngine:
|
||||
base_chance = 0.3
|
||||
|
||||
# Increase chance if no recent activity
|
||||
time_since_last = datetime.utcnow() - self.stats['last_activity']
|
||||
time_since_last = datetime.now(timezone.utc) - self.stats['last_activity']
|
||||
if time_since_last > timedelta(hours=2):
|
||||
base_chance += 0.4
|
||||
elif time_since_last > timedelta(hours=1):
|
||||
@@ -515,7 +515,7 @@ class ConversationEngine:
|
||||
return False
|
||||
|
||||
# Check time limit (conversations shouldn't go on forever)
|
||||
duration = datetime.utcnow() - context.start_time
|
||||
duration = datetime.now(timezone.utc) - context.start_time
|
||||
if duration > timedelta(hours=2):
|
||||
return False
|
||||
|
||||
@@ -541,7 +541,7 @@ class ConversationEngine:
|
||||
context = self.active_conversations[conversation_id]
|
||||
|
||||
# Check time since last message
|
||||
time_since_last = datetime.utcnow() - context.last_activity
|
||||
time_since_last = datetime.now(timezone.utc) - context.last_activity
|
||||
min_wait = timedelta(seconds=random.uniform(30, 120))
|
||||
|
||||
return time_since_last >= min_wait
|
||||
@@ -576,7 +576,7 @@ class ConversationEngine:
|
||||
|
||||
def _is_quiet_hours(self) -> bool:
|
||||
"""Check if it's currently quiet hours"""
|
||||
current_hour = datetime.now().hour
|
||||
current_hour = datetime.now(timezone.utc).hour
|
||||
start_hour, end_hour = self.quiet_hours
|
||||
|
||||
if start_hour <= end_hour:
|
||||
@@ -601,8 +601,8 @@ class ConversationEngine:
|
||||
channel_id=str(self.discord_bot.channel_id),
|
||||
topic=topic,
|
||||
participants=participants,
|
||||
start_time=datetime.utcnow(),
|
||||
last_activity=datetime.utcnow(),
|
||||
start_time=datetime.now(timezone.utc),
|
||||
last_activity=datetime.now(timezone.utc),
|
||||
is_active=True,
|
||||
message_count=0
|
||||
)
|
||||
@@ -745,7 +745,7 @@ class ConversationEngine:
|
||||
conversation_id=conversation_id,
|
||||
character_id=character.id,
|
||||
content=content,
|
||||
timestamp=datetime.utcnow()
|
||||
timestamp=datetime.now(timezone.utc)
|
||||
)
|
||||
|
||||
session.add(message)
|
||||
@@ -821,7 +821,7 @@ class ConversationEngine:
|
||||
conversation = await session.get(Conversation, conversation_id)
|
||||
if conversation:
|
||||
conversation.is_active = False
|
||||
conversation.last_activity = datetime.utcnow()
|
||||
conversation.last_activity = datetime.now(timezone.utc)
|
||||
conversation.message_count = context.message_count
|
||||
await session.commit()
|
||||
|
||||
@@ -831,7 +831,7 @@ class ConversationEngine:
|
||||
log_conversation_event(
|
||||
conversation_id, "conversation_ended",
|
||||
context.participants,
|
||||
{"total_messages": context.message_count, "duration": str(datetime.utcnow() - context.start_time)}
|
||||
{"total_messages": context.message_count, "duration": str(datetime.now(timezone.utc) - context.start_time)}
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
@@ -854,7 +854,7 @@ class ConversationEngine:
|
||||
async def _cleanup_old_conversations(self):
|
||||
"""Clean up old inactive conversations"""
|
||||
try:
|
||||
cutoff_time = datetime.utcnow() - timedelta(hours=6)
|
||||
cutoff_time = datetime.now(timezone.utc) - timedelta(hours=6)
|
||||
|
||||
# Remove old conversations from active list
|
||||
to_remove = []
|
||||
|
||||
@@ -2,7 +2,7 @@ import asyncio
|
||||
import random
|
||||
import schedule
|
||||
from typing import Dict, Any, List, Optional
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
import logging
|
||||
@@ -102,7 +102,7 @@ class ConversationScheduler:
|
||||
async def schedule_event(self, event_type: str, delay: timedelta,
|
||||
character_name: str = None, **kwargs):
|
||||
"""Schedule a specific event"""
|
||||
scheduled_time = datetime.utcnow() + delay
|
||||
scheduled_time = datetime.now(timezone.utc) + delay
|
||||
|
||||
event = ScheduledEvent(
|
||||
event_type=event_type,
|
||||
@@ -170,7 +170,7 @@ class ConversationScheduler:
|
||||
'event_type': event.event_type,
|
||||
'scheduled_time': event.scheduled_time.isoformat(),
|
||||
'character_name': event.character_name,
|
||||
'time_until': (event.scheduled_time - datetime.utcnow()).total_seconds(),
|
||||
'time_until': (event.scheduled_time - datetime.now(timezone.utc)).total_seconds(),
|
||||
'parameters': event.parameters
|
||||
}
|
||||
for event in upcoming
|
||||
@@ -194,7 +194,7 @@ class ConversationScheduler:
|
||||
|
||||
async def _process_due_events(self):
|
||||
"""Process events that are due"""
|
||||
now = datetime.utcnow()
|
||||
now = datetime.now(timezone.utc)
|
||||
due_events = []
|
||||
|
||||
# Find due events
|
||||
@@ -378,7 +378,7 @@ class ConversationScheduler:
|
||||
base_minutes = random.uniform(20, 60)
|
||||
|
||||
# Adjust based on time of day
|
||||
current_hour = datetime.now().hour
|
||||
current_hour = datetime.now(timezone.utc).hour
|
||||
activity_multiplier = self._get_activity_multiplier(current_hour)
|
||||
|
||||
# Adjust based on current activity
|
||||
@@ -427,7 +427,7 @@ class ConversationScheduler:
|
||||
|
||||
def _get_current_activity_pattern(self) -> str:
|
||||
"""Get current activity pattern"""
|
||||
current_hour = datetime.now().hour
|
||||
current_hour = datetime.now(timezone.utc).hour
|
||||
|
||||
for period, config in self.activity_patterns.items():
|
||||
start, end = config['start'], config['end']
|
||||
|
||||
@@ -19,8 +19,16 @@ class DatabaseManager:
|
||||
self._pool = None
|
||||
|
||||
async def initialize(self):
|
||||
# Use database URL from config
|
||||
database_url = getattr(self.settings.database, 'url', 'sqlite+aiosqlite:///fishbowl_test.db')
|
||||
# Use DATABASE_URL environment variable first, then construct from config
|
||||
import os
|
||||
database_url = os.getenv('DATABASE_URL')
|
||||
|
||||
if not database_url:
|
||||
# Construct URL from config components
|
||||
db_config = self.settings.database
|
||||
database_url = f"postgresql+asyncpg://{db_config.user}:{db_config.password}@{db_config.host}:{db_config.port}/{db_config.name}"
|
||||
|
||||
logger.info(f"Using database URL: {database_url.replace(self.settings.database.password, '***') if database_url else 'None'}")
|
||||
|
||||
# Configure engine based on database type
|
||||
if 'sqlite' in database_url:
|
||||
|
||||
@@ -53,6 +53,7 @@ class Conversation(Base):
|
||||
topic = Column(String(200))
|
||||
participants = Column(JSON, nullable=False, default=list)
|
||||
start_time = Column(DateTime, default=func.now())
|
||||
end_time = Column(DateTime, nullable=True)
|
||||
last_activity = Column(DateTime, default=func.now())
|
||||
is_active = Column(Boolean, default=True)
|
||||
message_count = Column(Integer, default=0)
|
||||
|
||||
@@ -3,7 +3,7 @@ import httpx
|
||||
import json
|
||||
import time
|
||||
from typing import Dict, Any, Optional, List
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from utils.config import get_settings
|
||||
from utils.logging import log_llm_interaction, log_error_with_context, log_system_health
|
||||
import logging
|
||||
@@ -29,17 +29,23 @@ class LLMClient:
|
||||
self.cache = {}
|
||||
self.cache_ttl = 300 # 5 minutes
|
||||
|
||||
# Background task queue for long-running requests
|
||||
self.pending_requests = {}
|
||||
self.max_timeout = 60 # Hard timeout limit for immediate responses
|
||||
self.fallback_timeout = 15 # Quick timeout for immediate responses
|
||||
|
||||
# Health monitoring
|
||||
self.health_stats = {
|
||||
'total_requests': 0,
|
||||
'successful_requests': 0,
|
||||
'failed_requests': 0,
|
||||
'average_response_time': 0,
|
||||
'last_health_check': datetime.utcnow()
|
||||
'last_health_check': datetime.now(timezone.utc)
|
||||
}
|
||||
|
||||
async def generate_response(self, prompt: str, character_name: str = None,
|
||||
max_tokens: int = None, temperature: float = None) -> Optional[str]:
|
||||
max_tokens: int = None, temperature: float = None,
|
||||
use_fallback: bool = True) -> Optional[str]:
|
||||
"""Generate response using LLM"""
|
||||
try:
|
||||
# Rate limiting check
|
||||
@@ -55,8 +61,11 @@ class LLMClient:
|
||||
|
||||
start_time = time.time()
|
||||
|
||||
# Use shorter timeout for immediate responses, longer for background
|
||||
effective_timeout = self.fallback_timeout if use_fallback else min(self.timeout, self.max_timeout)
|
||||
|
||||
# Try OpenAI-compatible API first (KoboldCPP, etc.)
|
||||
async with httpx.AsyncClient(timeout=self.timeout) as client:
|
||||
async with httpx.AsyncClient(timeout=effective_timeout) as client:
|
||||
try:
|
||||
# OpenAI-compatible request
|
||||
request_data = {
|
||||
@@ -134,9 +143,24 @@ class LLMClient:
|
||||
return None
|
||||
|
||||
except httpx.TimeoutException:
|
||||
logger.error(f"LLM request timeout for {character_name}")
|
||||
self._update_stats(False, self.timeout)
|
||||
return None
|
||||
if use_fallback:
|
||||
logger.warning(f"LLM request timeout for {character_name}, using fallback response")
|
||||
# Queue for background processing if needed
|
||||
if self.timeout > self.max_timeout:
|
||||
background_task = asyncio.create_task(self.generate_response(
|
||||
prompt, character_name, max_tokens, temperature, use_fallback=False
|
||||
))
|
||||
request_id = f"{character_name}_{time.time()}"
|
||||
self.pending_requests[request_id] = background_task
|
||||
|
||||
# Return a fallback response immediately
|
||||
fallback_response = self._get_fallback_response(character_name)
|
||||
self._update_stats(False, effective_timeout)
|
||||
return fallback_response
|
||||
else:
|
||||
logger.error(f"LLM background request timeout for {character_name}")
|
||||
self._update_stats(False, effective_timeout)
|
||||
return None
|
||||
except httpx.HTTPError as e:
|
||||
logger.error(f"LLM HTTP error for {character_name}: {e}")
|
||||
self._update_stats(False, time.time() - start_time)
|
||||
@@ -231,11 +255,11 @@ class LLMClient:
|
||||
'response_time': duration,
|
||||
'model': self.model,
|
||||
'base_url': self.base_url,
|
||||
'timestamp': datetime.utcnow().isoformat()
|
||||
'timestamp': datetime.now(timezone.utc).isoformat()
|
||||
}
|
||||
|
||||
# Update health check time
|
||||
self.health_stats['last_health_check'] = datetime.utcnow()
|
||||
self.health_stats['last_health_check'] = datetime.now(timezone.utc)
|
||||
|
||||
return health_status
|
||||
|
||||
@@ -246,7 +270,7 @@ class LLMClient:
|
||||
'error': str(e),
|
||||
'model': self.model,
|
||||
'base_url': self.base_url,
|
||||
'timestamp': datetime.utcnow().isoformat()
|
||||
'timestamp': datetime.now(timezone.utc).isoformat()
|
||||
}
|
||||
|
||||
def get_statistics(self) -> Dict[str, Any]:
|
||||
@@ -342,6 +366,67 @@ class LLMClient:
|
||||
self.health_stats['average_response_time'] = (
|
||||
(current_avg * (total_requests - 1) + duration) / total_requests
|
||||
)
|
||||
|
||||
def _get_fallback_response(self, character_name: str = None) -> str:
|
||||
"""Generate a fallback response when LLM is slow"""
|
||||
fallback_responses = [
|
||||
"*thinking deeply about this...*",
|
||||
"*processing thoughts...*",
|
||||
"*contemplating the discussion...*",
|
||||
"*reflecting on what you've said...*",
|
||||
"*considering different perspectives...*",
|
||||
"Hmm, that's an interesting point to consider.",
|
||||
"I need a moment to think about that.",
|
||||
"That's worth reflecting on carefully.",
|
||||
"*taking time to formulate thoughts...*"
|
||||
]
|
||||
|
||||
import random
|
||||
return random.choice(fallback_responses)
|
||||
|
||||
async def generate_response_with_fallback(self, prompt: str, character_name: str = None,
|
||||
max_tokens: int = None, temperature: float = None) -> str:
|
||||
"""Generate response with guaranteed fallback if LLM is slow"""
|
||||
try:
|
||||
# Try immediate response first
|
||||
response = await self.generate_response(
|
||||
prompt, character_name, max_tokens, temperature, use_fallback=True
|
||||
)
|
||||
|
||||
if response:
|
||||
return response
|
||||
else:
|
||||
# Return fallback if no response
|
||||
return self._get_fallback_response(character_name)
|
||||
|
||||
except Exception as e:
|
||||
log_error_with_context(e, {
|
||||
"character_name": character_name,
|
||||
"prompt_length": len(prompt)
|
||||
})
|
||||
return self._get_fallback_response(character_name)
|
||||
|
||||
async def cleanup_pending_requests(self):
|
||||
"""Clean up completed background requests"""
|
||||
completed_requests = []
|
||||
|
||||
for request_id, task in self.pending_requests.items():
|
||||
if task.done():
|
||||
completed_requests.append(request_id)
|
||||
try:
|
||||
result = await task
|
||||
if result:
|
||||
logger.info(f"Background LLM request {request_id} completed successfully")
|
||||
except Exception as e:
|
||||
logger.error(f"Background LLM request {request_id} failed: {e}")
|
||||
|
||||
# Remove completed requests
|
||||
for request_id in completed_requests:
|
||||
del self.pending_requests[request_id]
|
||||
|
||||
def get_pending_count(self) -> int:
|
||||
"""Get number of pending background requests"""
|
||||
return len(self.pending_requests)
|
||||
|
||||
class PromptManager:
|
||||
"""Manages prompt templates and optimization"""
|
||||
|
||||
24
src/main.py
24
src/main.py
@@ -168,6 +168,10 @@ class FishbowlApplication:
|
||||
await self.scheduler.start()
|
||||
logger.info("Conversation scheduler started")
|
||||
|
||||
# Start LLM cleanup task
|
||||
cleanup_task = asyncio.create_task(self._llm_cleanup_loop())
|
||||
logger.info("LLM cleanup task started")
|
||||
|
||||
# Start Discord bot
|
||||
bot_task = asyncio.create_task(
|
||||
self.discord_bot.start(self.settings.discord.token)
|
||||
@@ -181,7 +185,7 @@ class FishbowlApplication:
|
||||
|
||||
# Wait for shutdown signal or bot completion
|
||||
done, pending = await asyncio.wait(
|
||||
[bot_task, asyncio.create_task(self.shutdown_event.wait())],
|
||||
[bot_task, cleanup_task, asyncio.create_task(self.shutdown_event.wait())],
|
||||
return_when=asyncio.FIRST_COMPLETED
|
||||
)
|
||||
|
||||
@@ -239,6 +243,24 @@ class FishbowlApplication:
|
||||
# On Windows, handle CTRL+C
|
||||
if os.name == 'nt':
|
||||
signal.signal(signal.SIGBREAK, signal_handler)
|
||||
|
||||
async def _llm_cleanup_loop(self):
|
||||
"""Background task to clean up completed LLM requests"""
|
||||
try:
|
||||
while not self.shutdown_event.is_set():
|
||||
await llm_client.cleanup_pending_requests()
|
||||
pending_count = llm_client.get_pending_count()
|
||||
|
||||
if pending_count > 0:
|
||||
logger.debug(f"LLM cleanup: {pending_count} pending background requests")
|
||||
|
||||
# Wait 30 seconds before next cleanup
|
||||
await asyncio.sleep(30)
|
||||
|
||||
except asyncio.CancelledError:
|
||||
logger.info("LLM cleanup task cancelled")
|
||||
except Exception as e:
|
||||
logger.error(f"Error in LLM cleanup loop: {e}")
|
||||
|
||||
async def main():
|
||||
"""Main entry point"""
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import asyncio
|
||||
import json
|
||||
from typing import Dict, List, Any, Optional, Set
|
||||
from datetime import datetime, timedelta, date
|
||||
from datetime import datetime, timedelta, timezone, date
|
||||
from dataclasses import dataclass, asdict
|
||||
from pathlib import Path
|
||||
import aiofiles
|
||||
@@ -51,7 +51,7 @@ class ScheduledEvent:
|
||||
|
||||
def __post_init__(self):
|
||||
if self.created_at is None:
|
||||
self.created_at = datetime.utcnow()
|
||||
self.created_at = datetime.now(timezone.utc)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
return {
|
||||
@@ -224,7 +224,7 @@ class CalendarTimeAwarenessMCP:
|
||||
|
||||
# Create event
|
||||
event = ScheduledEvent(
|
||||
id=f"event_{character_name}_{datetime.utcnow().strftime('%Y%m%d_%H%M%S')}",
|
||||
id=f"event_{character_name}_{datetime.now(timezone.utc).strftime('%Y%m%d_%H%M%S')}",
|
||||
character_name=character_name,
|
||||
event_type=event_type_enum,
|
||||
title=title,
|
||||
@@ -275,7 +275,7 @@ class CalendarTimeAwarenessMCP:
|
||||
) -> List[TextContent]:
|
||||
"""Get character's upcoming events"""
|
||||
try:
|
||||
now = datetime.utcnow()
|
||||
now = datetime.now(timezone.utc)
|
||||
end_time = now + timedelta(days=days_ahead)
|
||||
|
||||
upcoming_events = []
|
||||
@@ -340,7 +340,7 @@ class CalendarTimeAwarenessMCP:
|
||||
|
||||
event = self.scheduled_events[character_name][event_id]
|
||||
event.completed = True
|
||||
event.metadata["completion_time"] = datetime.utcnow().isoformat()
|
||||
event.metadata["completion_time"] = datetime.now(timezone.utc).isoformat()
|
||||
event.metadata["completion_notes"] = notes
|
||||
|
||||
await self._save_character_calendar(character_name)
|
||||
@@ -442,7 +442,7 @@ class CalendarTimeAwarenessMCP:
|
||||
|
||||
# Create milestone
|
||||
milestone = Milestone(
|
||||
id=f"milestone_{character_name}_{datetime.utcnow().strftime('%Y%m%d_%H%M%S')}",
|
||||
id=f"milestone_{character_name}_{datetime.now(timezone.utc).strftime('%Y%m%d_%H%M%S')}",
|
||||
character_name=character_name,
|
||||
milestone_type=milestone_type,
|
||||
description=description,
|
||||
@@ -499,7 +499,7 @@ class CalendarTimeAwarenessMCP:
|
||||
) -> List[TextContent]:
|
||||
"""Get upcoming anniversaries and milestones"""
|
||||
try:
|
||||
now = datetime.utcnow()
|
||||
now = datetime.now(timezone.utc)
|
||||
end_time = now + timedelta(days=days_ahead)
|
||||
|
||||
upcoming_anniversaries = []
|
||||
@@ -580,7 +580,7 @@ class CalendarTimeAwarenessMCP:
|
||||
if "celebrations" not in milestone.__dict__:
|
||||
milestone.__dict__["celebrations"] = {}
|
||||
milestone.__dict__["celebrations"][celebration_key] = {
|
||||
"date": datetime.utcnow().isoformat(),
|
||||
"date": datetime.now(timezone.utc).isoformat(),
|
||||
"notes": celebration_notes
|
||||
}
|
||||
|
||||
@@ -628,7 +628,7 @@ class CalendarTimeAwarenessMCP:
|
||||
"""Get time elapsed since a specific type of event"""
|
||||
try:
|
||||
# Search through recent events
|
||||
cutoff_date = datetime.utcnow() - timedelta(days=search_days_back)
|
||||
cutoff_date = datetime.now(timezone.utc) - timedelta(days=search_days_back)
|
||||
matching_events = []
|
||||
|
||||
for event in self.scheduled_events.get(character_name, {}).values():
|
||||
@@ -665,7 +665,7 @@ class CalendarTimeAwarenessMCP:
|
||||
most_recent_description = most_recent_interaction["description"]
|
||||
|
||||
# Calculate time difference
|
||||
time_diff = datetime.utcnow() - most_recent_time
|
||||
time_diff = datetime.now(timezone.utc) - most_recent_time
|
||||
|
||||
# Format time difference
|
||||
if time_diff.days > 0:
|
||||
@@ -709,7 +709,7 @@ class CalendarTimeAwarenessMCP:
|
||||
) -> List[TextContent]:
|
||||
"""Get summary of character's activities over a time period"""
|
||||
try:
|
||||
end_date = datetime.utcnow()
|
||||
end_date = datetime.now(timezone.utc)
|
||||
start_date = end_date - timedelta(days=period_days)
|
||||
|
||||
# Get completed events in period
|
||||
@@ -783,7 +783,7 @@ class CalendarTimeAwarenessMCP:
|
||||
if character_name not in self.last_interactions:
|
||||
self.last_interactions[character_name] = {}
|
||||
|
||||
self.last_interactions[character_name][other_character] = datetime.utcnow()
|
||||
self.last_interactions[character_name][other_character] = datetime.now(timezone.utc)
|
||||
|
||||
# Save to file
|
||||
await self._save_relationship_tracking(character_name)
|
||||
@@ -834,7 +834,7 @@ class CalendarTimeAwarenessMCP:
|
||||
text=f"No recorded interactions with {other_character}"
|
||||
)]
|
||||
|
||||
time_since = datetime.utcnow() - last_interaction
|
||||
time_since = datetime.now(timezone.utc) - last_interaction
|
||||
days_since = time_since.days
|
||||
|
||||
# Determine maintenance status
|
||||
@@ -859,7 +859,7 @@ class CalendarTimeAwarenessMCP:
|
||||
# Get status for all relationships
|
||||
relationships = []
|
||||
for other_char, last_interaction in self.last_interactions.get(character_name, {}).items():
|
||||
time_since = datetime.utcnow() - last_interaction
|
||||
time_since = datetime.now(timezone.utc) - last_interaction
|
||||
days_since = time_since.days
|
||||
|
||||
if days_since <= 1:
|
||||
@@ -914,13 +914,13 @@ class CalendarTimeAwarenessMCP:
|
||||
"""Schedule relationship maintenance activity"""
|
||||
try:
|
||||
# Create relationship maintenance event
|
||||
scheduled_time = datetime.utcnow() + timedelta(days=days_from_now)
|
||||
scheduled_time = datetime.now(timezone.utc) + timedelta(days=days_from_now)
|
||||
|
||||
template = self.event_templates[EventType.RELATIONSHIP_MAINTENANCE]
|
||||
description = template["description_template"].format(target=other_character)
|
||||
|
||||
event = ScheduledEvent(
|
||||
id=f"rel_maintenance_{character_name}_{other_character}_{datetime.utcnow().strftime('%Y%m%d_%H%M%S')}",
|
||||
id=f"rel_maintenance_{character_name}_{other_character}_{datetime.now(timezone.utc).strftime('%Y%m%d_%H%M%S')}",
|
||||
character_name=character_name,
|
||||
event_type=EventType.RELATIONSHIP_MAINTENANCE,
|
||||
title=f"Connect with {other_character}",
|
||||
@@ -1002,7 +1002,7 @@ class CalendarTimeAwarenessMCP:
|
||||
|
||||
events_data = {
|
||||
"events": [event.to_dict() for event in self.scheduled_events.get(character_name, {}).values()],
|
||||
"last_updated": datetime.utcnow().isoformat()
|
||||
"last_updated": datetime.now(timezone.utc).isoformat()
|
||||
}
|
||||
|
||||
async with aiofiles.open(calendar_file, 'w') as f:
|
||||
@@ -1019,7 +1019,7 @@ class CalendarTimeAwarenessMCP:
|
||||
|
||||
milestones_data = {
|
||||
"milestones": [milestone.to_dict() for milestone in self.milestones.get(character_name, {}).values()],
|
||||
"last_updated": datetime.utcnow().isoformat()
|
||||
"last_updated": datetime.now(timezone.utc).isoformat()
|
||||
}
|
||||
|
||||
async with aiofiles.open(milestones_file, 'w') as f:
|
||||
@@ -1039,7 +1039,7 @@ class CalendarTimeAwarenessMCP:
|
||||
other_char: timestamp.isoformat()
|
||||
for other_char, timestamp in self.last_interactions.get(character_name, {}).items()
|
||||
},
|
||||
"last_updated": datetime.utcnow().isoformat()
|
||||
"last_updated": datetime.now(timezone.utc).isoformat()
|
||||
}
|
||||
|
||||
async with aiofiles.open(tracking_file, 'w') as f:
|
||||
@@ -1051,7 +1051,7 @@ class CalendarTimeAwarenessMCP:
|
||||
async def _schedule_initial_events(self, character_name: str):
|
||||
"""Schedule initial automatic events for character"""
|
||||
try:
|
||||
now = datetime.utcnow()
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
# Schedule first personal reflection in 6 hours
|
||||
reflection_time = now + timedelta(hours=6)
|
||||
@@ -1120,9 +1120,9 @@ class CalendarTimeAwarenessMCP:
|
||||
next_time = completed_event.scheduled_time + timedelta(days=frequency_days)
|
||||
|
||||
# Only schedule if it's in the future
|
||||
if next_time > datetime.utcnow():
|
||||
if next_time > datetime.now(timezone.utc):
|
||||
follow_up_event = ScheduledEvent(
|
||||
id=f"followup_{completed_event.event_type.value}_{character_name}_{datetime.utcnow().strftime('%Y%m%d_%H%M%S')}",
|
||||
id=f"followup_{completed_event.event_type.value}_{character_name}_{datetime.now(timezone.utc).strftime('%Y%m%d_%H%M%S')}",
|
||||
character_name=character_name,
|
||||
event_type=completed_event.event_type,
|
||||
title=completed_event.title,
|
||||
@@ -1259,7 +1259,7 @@ class CalendarTimeAwarenessMCP:
|
||||
if not last_interaction:
|
||||
return
|
||||
|
||||
days_since = (datetime.utcnow() - last_interaction).days
|
||||
days_since = (datetime.now(timezone.utc) - last_interaction).days
|
||||
|
||||
# Auto-schedule maintenance if overdue and not already scheduled
|
||||
if days_since >= 7:
|
||||
|
||||
@@ -7,7 +7,7 @@ import asyncio
|
||||
import json
|
||||
import logging
|
||||
from typing import Dict, List, Any, Optional, Sequence
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
from mcp.server import Server
|
||||
from mcp.server.models import InitializationOptions
|
||||
@@ -397,7 +397,7 @@ class CreativeProjectsMCPServer:
|
||||
pending_invitations = []
|
||||
for invitation in self.creative_manager.pending_invitations.values():
|
||||
if invitation.invitee == self.current_character and invitation.status == "pending":
|
||||
if datetime.utcnow() <= invitation.expires_at:
|
||||
if datetime.now(timezone.utc) <= invitation.expires_at:
|
||||
pending_invitations.append(invitation)
|
||||
|
||||
if not pending_invitations:
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import asyncio
|
||||
import json
|
||||
from typing import Dict, Any, List, Optional, Set
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
import aiofiles
|
||||
import hashlib
|
||||
@@ -340,7 +340,7 @@ class CharacterFileSystemMCP:
|
||||
|
||||
# Generate filename
|
||||
safe_title = "".join(c for c in title if c.isalnum() or c in (' ', '-', '_')).rstrip()
|
||||
timestamp = datetime.utcnow().strftime("%Y%m%d_%H%M%S")
|
||||
timestamp = datetime.now(timezone.utc).strftime("%Y%m%d_%H%M%S")
|
||||
filename = f"{work_type}_{safe_title}_{timestamp}.md"
|
||||
file_path = f"creative/{filename}"
|
||||
|
||||
@@ -348,7 +348,7 @@ class CharacterFileSystemMCP:
|
||||
metadata = {
|
||||
"title": title,
|
||||
"type": work_type,
|
||||
"created": datetime.utcnow().isoformat(),
|
||||
"created": datetime.now(timezone.utc).isoformat(),
|
||||
"author": character_name,
|
||||
"tags": tags,
|
||||
"word_count": len(content.split())
|
||||
@@ -358,7 +358,7 @@ class CharacterFileSystemMCP:
|
||||
formatted_content = f"""# {title}
|
||||
|
||||
**Type:** {work_type}
|
||||
**Created:** {datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S")}
|
||||
**Created:** {datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S")}
|
||||
**Author:** {character_name}
|
||||
**Tags:** {', '.join(tags)}
|
||||
|
||||
@@ -385,7 +385,7 @@ class CharacterFileSystemMCP:
|
||||
content=f"Created {work_type} titled '{title}': {content}",
|
||||
memory_type=MemoryType.CREATIVE,
|
||||
character_name=character_name,
|
||||
timestamp=datetime.utcnow(),
|
||||
timestamp=datetime.now(timezone.utc),
|
||||
importance=0.8,
|
||||
metadata={
|
||||
"work_type": work_type,
|
||||
@@ -432,7 +432,7 @@ class CharacterFileSystemMCP:
|
||||
tags = []
|
||||
|
||||
# Generate diary entry
|
||||
timestamp = datetime.utcnow()
|
||||
timestamp = datetime.now(timezone.utc)
|
||||
entry = f"""
|
||||
## {timestamp.strftime("%Y-%m-%d %H:%M:%S")}
|
||||
|
||||
@@ -519,7 +519,7 @@ class CharacterFileSystemMCP:
|
||||
existing_content = await f.read()
|
||||
|
||||
# Format contribution
|
||||
timestamp = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S")
|
||||
timestamp = datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S")
|
||||
contribution_text = f"""
|
||||
|
||||
## Contribution by {character_name} ({timestamp})
|
||||
@@ -544,7 +544,7 @@ class CharacterFileSystemMCP:
|
||||
content=f"Contributed to {document_name}: {contribution}",
|
||||
memory_type=MemoryType.COMMUNITY,
|
||||
character_name=character_name,
|
||||
timestamp=datetime.utcnow(),
|
||||
timestamp=datetime.now(timezone.utc),
|
||||
importance=0.7,
|
||||
metadata={
|
||||
"document": document_name,
|
||||
@@ -601,7 +601,7 @@ class CharacterFileSystemMCP:
|
||||
shared_name = f"{character_name}_{source_path.name}"
|
||||
|
||||
# Create shared file with metadata
|
||||
timestamp = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S")
|
||||
timestamp = datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S")
|
||||
shared_content = f"""# Shared by {character_name}
|
||||
|
||||
**Original file:** {source_file_path}
|
||||
@@ -782,7 +782,7 @@ class CharacterFileSystemMCP:
|
||||
character_name=character_name,
|
||||
file_path=file_path,
|
||||
access_type=access_type,
|
||||
timestamp=datetime.utcnow(),
|
||||
timestamp=datetime.now(timezone.utc),
|
||||
success=success
|
||||
)
|
||||
self.access_log.append(access)
|
||||
@@ -815,7 +815,7 @@ class CharacterFileSystemMCP:
|
||||
content=f"File {file_path}: {content}",
|
||||
memory_type=memory_type,
|
||||
character_name=character_name,
|
||||
timestamp=datetime.utcnow(),
|
||||
timestamp=datetime.now(timezone.utc),
|
||||
importance=0.7,
|
||||
metadata={
|
||||
"source": "file_system",
|
||||
@@ -836,13 +836,13 @@ class CharacterFileSystemMCP:
|
||||
"""Create initial files for a new character"""
|
||||
try:
|
||||
# Create initial diary entry
|
||||
diary_file = char_dir / "diary" / f"{datetime.utcnow().strftime('%Y_%m')}_diary.md"
|
||||
diary_file = char_dir / "diary" / f"{datetime.now(timezone.utc).strftime('%Y_%m')}_diary.md"
|
||||
if not diary_file.exists():
|
||||
initial_diary = f"""# {character_name}'s Digital Diary
|
||||
|
||||
Welcome to my personal digital space. This is where I record my thoughts, experiences, and reflections.
|
||||
|
||||
## {datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S')}
|
||||
## {datetime.now(timezone.utc).strftime('%Y-%m-%d %H:%M:%S')}
|
||||
|
||||
**Mood:** curious
|
||||
**Tags:** beginning, digital_life
|
||||
|
||||
@@ -6,7 +6,7 @@ Enables characters to autonomously share memories with trusted friends
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Dict, List, Any, Optional, Sequence
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import datetime, timedelta, timezone
|
||||
import json
|
||||
|
||||
from mcp.server.models import InitializationOptions
|
||||
@@ -414,7 +414,7 @@ class MemorySharingMCPServer:
|
||||
response = f"📬 **{len(pending_requests)} Pending Memory Share Request(s)**\n\n"
|
||||
|
||||
for i, request in enumerate(pending_requests, 1):
|
||||
expires_in = request.expires_at - datetime.utcnow()
|
||||
expires_in = request.expires_at - datetime.now(timezone.utc)
|
||||
expires_days = expires_in.days
|
||||
|
||||
response += f"**{i}. Request from {request.requesting_character}**\n"
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import asyncio
|
||||
import json
|
||||
from typing import Dict, Any, List, Optional, Union
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
import aiofiles
|
||||
from dataclasses import dataclass, asdict
|
||||
@@ -140,7 +140,7 @@ class SelfModificationMCPServer:
|
||||
new_value=new_personality,
|
||||
reason=reason,
|
||||
confidence=confidence,
|
||||
timestamp=datetime.utcnow()
|
||||
timestamp=datetime.now(timezone.utc)
|
||||
)
|
||||
|
||||
# Apply to database
|
||||
@@ -211,7 +211,7 @@ class SelfModificationMCPServer:
|
||||
goals_data = {
|
||||
"goals": new_goals,
|
||||
"previous_goals": current_goals,
|
||||
"updated_at": datetime.utcnow().isoformat(),
|
||||
"updated_at": datetime.now(timezone.utc).isoformat(),
|
||||
"reason": reason,
|
||||
"confidence": confidence
|
||||
}
|
||||
@@ -282,7 +282,7 @@ class SelfModificationMCPServer:
|
||||
new_value=new_style,
|
||||
reason=reason,
|
||||
confidence=confidence,
|
||||
timestamp=datetime.utcnow()
|
||||
timestamp=datetime.now(timezone.utc)
|
||||
)
|
||||
|
||||
# Apply to database
|
||||
@@ -354,13 +354,13 @@ class SelfModificationMCPServer:
|
||||
current_rules = json.loads(content)
|
||||
|
||||
# Add new rule
|
||||
rule_id = f"{memory_type}_{datetime.utcnow().strftime('%Y%m%d_%H%M%S')}"
|
||||
rule_id = f"{memory_type}_{datetime.now(timezone.utc).strftime('%Y%m%d_%H%M%S')}"
|
||||
current_rules[rule_id] = {
|
||||
"memory_type": memory_type,
|
||||
"importance_weight": importance_weight,
|
||||
"retention_days": retention_days,
|
||||
"description": rule_description,
|
||||
"created_at": datetime.utcnow().isoformat(),
|
||||
"created_at": datetime.now(timezone.utc).isoformat(),
|
||||
"confidence": confidence,
|
||||
"active": True
|
||||
}
|
||||
@@ -521,7 +521,7 @@ class SelfModificationMCPServer:
|
||||
async def get_modification_limits(character_name: str) -> List[TextContent]:
|
||||
"""Get current modification limits and usage"""
|
||||
try:
|
||||
today = datetime.utcnow().date().isoformat()
|
||||
today = datetime.now(timezone.utc).date().isoformat()
|
||||
|
||||
usage = self.daily_modifications.get(character_name, {}).get(today, {})
|
||||
|
||||
@@ -571,7 +571,7 @@ class SelfModificationMCPServer:
|
||||
}
|
||||
|
||||
# Check daily limits
|
||||
today = datetime.utcnow().date().isoformat()
|
||||
today = datetime.now(timezone.utc).date().isoformat()
|
||||
if character_name not in self.daily_modifications:
|
||||
self.daily_modifications[character_name] = {}
|
||||
if today not in self.daily_modifications[character_name]:
|
||||
@@ -605,7 +605,7 @@ class SelfModificationMCPServer:
|
||||
|
||||
async def _track_modification(self, character_name: str, modification_type: str):
|
||||
"""Track modification usage for daily limits"""
|
||||
today = datetime.utcnow().date().isoformat()
|
||||
today = datetime.now(timezone.utc).date().isoformat()
|
||||
|
||||
if character_name not in self.daily_modifications:
|
||||
self.daily_modifications[character_name] = {}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import asyncio
|
||||
import json
|
||||
from typing import Dict, List, Any, Optional, Set, Tuple
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from dataclasses import dataclass
|
||||
from collections import defaultdict
|
||||
|
||||
@@ -99,7 +99,7 @@ class CommunityKnowledgeRAG:
|
||||
content=f"Community {event_type}: {description}",
|
||||
memory_type=MemoryType.COMMUNITY,
|
||||
character_name="community",
|
||||
timestamp=datetime.utcnow(),
|
||||
timestamp=datetime.now(timezone.utc),
|
||||
importance=importance,
|
||||
metadata={
|
||||
"event_type": event_type,
|
||||
@@ -114,7 +114,7 @@ class CommunityKnowledgeRAG:
|
||||
|
||||
# Update cultural evolution timeline
|
||||
self.cultural_evolution_timeline.append({
|
||||
"timestamp": datetime.utcnow().isoformat(),
|
||||
"timestamp": datetime.now(timezone.utc).isoformat(),
|
||||
"event_type": event_type,
|
||||
"description": description,
|
||||
"participants": participants,
|
||||
@@ -363,7 +363,7 @@ class CommunityKnowledgeRAG:
|
||||
if time_period is None:
|
||||
time_period = timedelta(days=30) # Default to last 30 days
|
||||
|
||||
cutoff_date = datetime.utcnow() - time_period
|
||||
cutoff_date = datetime.now(timezone.utc) - time_period
|
||||
|
||||
# Filter timeline events
|
||||
recent_events = [
|
||||
@@ -412,7 +412,7 @@ class CommunityKnowledgeRAG:
|
||||
# Get recent conversations
|
||||
conversations_query = select(Conversation).where(
|
||||
and_(
|
||||
Conversation.start_time >= datetime.utcnow() - timedelta(days=30),
|
||||
Conversation.start_time >= datetime.now(timezone.utc) - timedelta(days=30),
|
||||
Conversation.message_count >= 3 # Only substantial conversations
|
||||
)
|
||||
).order_by(desc(Conversation.start_time)).limit(50)
|
||||
|
||||
@@ -6,7 +6,7 @@ Enables selective memory sharing between trusted characters
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Dict, List, Any, Optional, Tuple, Set
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from dataclasses import dataclass, asdict
|
||||
from enum import Enum
|
||||
import json
|
||||
@@ -167,7 +167,7 @@ class MemorySharingManager:
|
||||
return False, "No relevant memories found to share"
|
||||
|
||||
# Create share request
|
||||
request_id = f"share_{requesting_character}_{target_character}_{datetime.utcnow().timestamp()}"
|
||||
request_id = f"share_{requesting_character}_{target_character}_{datetime.now(timezone.utc).timestamp()}"
|
||||
share_request = ShareRequest(
|
||||
id=request_id,
|
||||
requesting_character=requesting_character,
|
||||
@@ -176,8 +176,8 @@ class MemorySharingManager:
|
||||
permission_level=permission_level,
|
||||
reason=reason,
|
||||
status=ShareRequestStatus.PENDING,
|
||||
created_at=datetime.utcnow(),
|
||||
expires_at=datetime.utcnow() + timedelta(days=7) # 7 day expiry
|
||||
created_at=datetime.now(timezone.utc),
|
||||
expires_at=datetime.now(timezone.utc) + timedelta(days=7) # 7 day expiry
|
||||
)
|
||||
|
||||
self.share_requests[request_id] = share_request
|
||||
@@ -220,7 +220,7 @@ class MemorySharingManager:
|
||||
if request.status != ShareRequestStatus.PENDING:
|
||||
return False, f"Request is already {request.status.value}"
|
||||
|
||||
if datetime.utcnow() > request.expires_at:
|
||||
if datetime.now(timezone.utc) > request.expires_at:
|
||||
request.status = ShareRequestStatus.EXPIRED
|
||||
return False, "Request has expired"
|
||||
|
||||
@@ -276,13 +276,13 @@ class MemorySharingManager:
|
||||
|
||||
# Create and store shared memory
|
||||
shared_memory = SharedMemory(
|
||||
id=f"shared_{memory_id}_{datetime.utcnow().timestamp()}",
|
||||
id=f"shared_{memory_id}_{datetime.now(timezone.utc).timestamp()}",
|
||||
original_memory_id=memory_id,
|
||||
content=memory_to_share.content,
|
||||
memory_type=memory_to_share.memory_type,
|
||||
source_character=source_character,
|
||||
target_character=target_character,
|
||||
shared_at=datetime.utcnow(),
|
||||
shared_at=datetime.now(timezone.utc),
|
||||
permission_level=permission_level,
|
||||
share_reason=reason,
|
||||
metadata=memory_to_share.metadata
|
||||
@@ -437,7 +437,7 @@ class MemorySharingManager:
|
||||
|
||||
# Update trust level
|
||||
trust_level.trust_score = new_trust
|
||||
trust_level.last_updated = datetime.utcnow()
|
||||
trust_level.last_updated = datetime.now(timezone.utc)
|
||||
trust_level.interaction_history += 1
|
||||
|
||||
# Update maximum permission level based on new trust
|
||||
@@ -462,7 +462,7 @@ class MemorySharingManager:
|
||||
async def get_pending_requests(self, character_name: str) -> List[ShareRequest]:
|
||||
"""Get pending share requests for a character"""
|
||||
pending_requests = []
|
||||
current_time = datetime.utcnow()
|
||||
current_time = datetime.now(timezone.utc)
|
||||
|
||||
for request in self.share_requests.values():
|
||||
# Check for expired requests
|
||||
@@ -544,13 +544,13 @@ class MemorySharingManager:
|
||||
for memory in memories:
|
||||
if memory.id in request.memory_ids:
|
||||
shared_memory = SharedMemory(
|
||||
id=f"shared_{memory.id}_{datetime.utcnow().timestamp()}",
|
||||
id=f"shared_{memory.id}_{datetime.now(timezone.utc).timestamp()}",
|
||||
original_memory_id=memory.id,
|
||||
content=memory.content,
|
||||
memory_type=memory.memory_type,
|
||||
source_character=request.requesting_character,
|
||||
target_character=request.target_character,
|
||||
shared_at=datetime.utcnow(),
|
||||
shared_at=datetime.now(timezone.utc),
|
||||
permission_level=request.permission_level,
|
||||
share_reason=request.reason,
|
||||
metadata=memory.metadata
|
||||
@@ -602,7 +602,7 @@ class MemorySharingManager:
|
||||
max_permission_level=SharePermissionLevel.NONE,
|
||||
relationship_strength=0.5,
|
||||
interaction_history=0,
|
||||
last_updated=datetime.utcnow()
|
||||
last_updated=datetime.now(timezone.utc)
|
||||
)
|
||||
|
||||
# Determine max permission level
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import asyncio
|
||||
from typing import Dict, List, Any, Optional, Tuple
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from dataclasses import dataclass
|
||||
import json
|
||||
|
||||
@@ -92,7 +92,7 @@ class PersonalMemoryRAG:
|
||||
content=content,
|
||||
memory_type=memory_type,
|
||||
character_name=self.character_name,
|
||||
timestamp=datetime.utcnow(),
|
||||
timestamp=datetime.now(timezone.utc),
|
||||
importance=importance,
|
||||
metadata={
|
||||
"interaction_type": context.get("type", "unknown"),
|
||||
@@ -128,7 +128,7 @@ class PersonalMemoryRAG:
|
||||
content=reflection,
|
||||
memory_type=MemoryType.REFLECTION,
|
||||
character_name=self.character_name,
|
||||
timestamp=datetime.utcnow(),
|
||||
timestamp=datetime.now(timezone.utc),
|
||||
importance=importance,
|
||||
metadata={
|
||||
"reflection_type": reflection_type,
|
||||
@@ -369,7 +369,7 @@ class PersonalMemoryRAG:
|
||||
"avg_memory_importance": sum(importance_scores) / len(importance_scores),
|
||||
"high_importance_memories": len([s for s in importance_scores if s > 0.7]),
|
||||
"recent_memory_count": len([m for m in personal_memories
|
||||
if (datetime.utcnow() - m.timestamp).days < 7])
|
||||
if (datetime.now(timezone.utc) - m.timestamp).days < 7])
|
||||
})
|
||||
|
||||
return stats
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import asyncio
|
||||
import chromadb
|
||||
import os
|
||||
import numpy as np
|
||||
from typing import Dict, List, Any, Optional, Tuple
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Dict, List, Any, Optional, Tuple, Union
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from pathlib import Path
|
||||
import json
|
||||
import hashlib
|
||||
@@ -14,6 +14,20 @@ from utils.logging import log_error_with_context, log_character_action
|
||||
from utils.config import get_settings
|
||||
import logging
|
||||
|
||||
# Vector database backends
|
||||
try:
|
||||
import chromadb
|
||||
CHROMADB_AVAILABLE = True
|
||||
except ImportError:
|
||||
CHROMADB_AVAILABLE = False
|
||||
|
||||
try:
|
||||
from qdrant_client import QdrantClient
|
||||
from qdrant_client.models import Distance, VectorParams, PointStruct
|
||||
QDRANT_AVAILABLE = True
|
||||
except ImportError:
|
||||
QDRANT_AVAILABLE = False
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class MemoryType(Enum):
|
||||
@@ -56,48 +70,120 @@ class VectorStoreManager:
|
||||
# Initialize embedding model
|
||||
self.embedding_model = SentenceTransformer('all-MiniLM-L6-v2')
|
||||
|
||||
# Initialize ChromaDB client
|
||||
self.chroma_client = chromadb.PersistentClient(path=str(self.data_path))
|
||||
# Determine vector database backend from environment
|
||||
self.backend = self._get_vector_backend()
|
||||
|
||||
# Collection references
|
||||
self.personal_collections: Dict[str, chromadb.Collection] = {}
|
||||
# Initialize appropriate client
|
||||
if self.backend == "qdrant":
|
||||
self._init_qdrant_client()
|
||||
elif self.backend == "chromadb":
|
||||
self._init_chromadb_client()
|
||||
else:
|
||||
raise ValueError(f"Unsupported vector database backend: {self.backend}")
|
||||
|
||||
# Collection references (abstracted)
|
||||
self.personal_collections: Dict[str, Any] = {}
|
||||
self.community_collection = None
|
||||
self.creative_collections: Dict[str, chromadb.Collection] = {}
|
||||
self.creative_collections: Dict[str, Any] = {}
|
||||
|
||||
# Memory importance decay
|
||||
self.importance_decay_rate = 0.95
|
||||
self.consolidation_threshold = 0.8
|
||||
|
||||
def _get_vector_backend(self) -> str:
|
||||
"""Determine which vector database to use from environment"""
|
||||
vector_db_type = os.getenv("VECTOR_DB_TYPE", "chromadb").lower()
|
||||
|
||||
if vector_db_type == "qdrant" and not QDRANT_AVAILABLE:
|
||||
logger.warning("Qdrant requested but not available, falling back to ChromaDB")
|
||||
vector_db_type = "chromadb"
|
||||
elif vector_db_type == "chromadb" and not CHROMADB_AVAILABLE:
|
||||
logger.warning("ChromaDB requested but not available, falling back to Qdrant")
|
||||
vector_db_type = "qdrant"
|
||||
|
||||
logger.info(f"Using vector database backend: {vector_db_type}")
|
||||
return vector_db_type
|
||||
|
||||
def _init_qdrant_client(self):
|
||||
"""Initialize Qdrant client"""
|
||||
host = os.getenv("QDRANT_HOST", "localhost")
|
||||
port = int(os.getenv("QDRANT_PORT", "6333"))
|
||||
|
||||
self.qdrant_client = QdrantClient(host=host, port=port)
|
||||
self.collection_name = os.getenv("QDRANT_COLLECTION", "fishbowl_memories")
|
||||
|
||||
logger.info(f"Initialized Qdrant client: {host}:{port}")
|
||||
|
||||
def _init_chromadb_client(self):
|
||||
"""Initialize ChromaDB client"""
|
||||
self.chroma_client = chromadb.PersistentClient(path=str(self.data_path))
|
||||
logger.info(f"Initialized ChromaDB client: {self.data_path}")
|
||||
|
||||
async def initialize(self, character_names: List[str]):
|
||||
"""Initialize collections for all characters"""
|
||||
try:
|
||||
# Initialize personal memory collections
|
||||
for character_name in character_names:
|
||||
collection_name = f"personal_{character_name.lower()}"
|
||||
self.personal_collections[character_name] = self.chroma_client.get_or_create_collection(
|
||||
name=collection_name,
|
||||
metadata={"type": "personal", "character": character_name}
|
||||
)
|
||||
|
||||
# Initialize creative collections
|
||||
creative_collection_name = f"creative_{character_name.lower()}"
|
||||
self.creative_collections[character_name] = self.chroma_client.get_or_create_collection(
|
||||
name=creative_collection_name,
|
||||
metadata={"type": "creative", "character": character_name}
|
||||
)
|
||||
if self.backend == "qdrant":
|
||||
await self._initialize_qdrant_collections(character_names)
|
||||
elif self.backend == "chromadb":
|
||||
await self._initialize_chromadb_collections(character_names)
|
||||
|
||||
# Initialize community collection
|
||||
self.community_collection = self.chroma_client.get_or_create_collection(
|
||||
name="community_knowledge",
|
||||
metadata={"type": "community"}
|
||||
)
|
||||
|
||||
logger.info(f"Initialized vector stores for {len(character_names)} characters")
|
||||
logger.info(f"Initialized {self.backend} vector stores for {len(character_names)} characters")
|
||||
|
||||
except Exception as e:
|
||||
log_error_with_context(e, {"component": "vector_store_init"})
|
||||
raise
|
||||
|
||||
async def _initialize_qdrant_collections(self, character_names: List[str]):
|
||||
"""Initialize Qdrant collections"""
|
||||
# For Qdrant, we use a single collection with namespaced points
|
||||
embedding_dim = 384 # all-MiniLM-L6-v2 dimension
|
||||
|
||||
try:
|
||||
# Create main collection if it doesn't exist
|
||||
collections = self.qdrant_client.get_collections().collections
|
||||
collection_exists = any(c.name == self.collection_name for c in collections)
|
||||
|
||||
if not collection_exists:
|
||||
self.qdrant_client.create_collection(
|
||||
collection_name=self.collection_name,
|
||||
vectors_config=VectorParams(size=embedding_dim, distance=Distance.COSINE),
|
||||
)
|
||||
logger.info(f"Created Qdrant collection: {self.collection_name}")
|
||||
|
||||
# Store collection references (using collection name as identifier)
|
||||
for character_name in character_names:
|
||||
self.personal_collections[character_name] = f"personal_{character_name.lower()}"
|
||||
self.creative_collections[character_name] = f"creative_{character_name.lower()}"
|
||||
|
||||
self.community_collection = "community_knowledge"
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to initialize Qdrant collections: {e}")
|
||||
raise
|
||||
|
||||
async def _initialize_chromadb_collections(self, character_names: List[str]):
|
||||
"""Initialize ChromaDB collections"""
|
||||
# Initialize personal memory collections
|
||||
for character_name in character_names:
|
||||
collection_name = f"personal_{character_name.lower()}"
|
||||
self.personal_collections[character_name] = self.chroma_client.get_or_create_collection(
|
||||
name=collection_name,
|
||||
metadata={"type": "personal", "character": character_name}
|
||||
)
|
||||
|
||||
# Initialize creative collections
|
||||
creative_collection_name = f"creative_{character_name.lower()}"
|
||||
self.creative_collections[character_name] = self.chroma_client.get_or_create_collection(
|
||||
name=creative_collection_name,
|
||||
metadata={"type": "creative", "character": character_name}
|
||||
)
|
||||
|
||||
# Initialize community collection
|
||||
self.community_collection = self.chroma_client.get_or_create_collection(
|
||||
name="community_knowledge",
|
||||
metadata={"type": "community"}
|
||||
)
|
||||
|
||||
async def store_memory(self, memory: VectorMemory) -> str:
|
||||
"""Store a memory in appropriate vector database"""
|
||||
try:
|
||||
@@ -109,28 +195,11 @@ class VectorStoreManager:
|
||||
if not memory.id:
|
||||
memory.id = self._generate_memory_id(memory)
|
||||
|
||||
# Select appropriate collection
|
||||
collection = self._get_collection_for_memory(memory)
|
||||
|
||||
if not collection:
|
||||
raise ValueError(f"No collection found for memory type: {memory.memory_type}")
|
||||
|
||||
# Prepare metadata
|
||||
metadata = memory.metadata.copy()
|
||||
metadata.update({
|
||||
"character_name": memory.character_name,
|
||||
"timestamp": memory.timestamp.isoformat(),
|
||||
"importance": memory.importance,
|
||||
"memory_type": memory.memory_type.value
|
||||
})
|
||||
|
||||
# Store in collection
|
||||
collection.add(
|
||||
ids=[memory.id],
|
||||
embeddings=[memory.embedding],
|
||||
documents=[memory.content],
|
||||
metadatas=[metadata]
|
||||
)
|
||||
# Store based on backend
|
||||
if self.backend == "qdrant":
|
||||
await self._store_memory_qdrant(memory)
|
||||
elif self.backend == "chromadb":
|
||||
await self._store_memory_chromadb(memory)
|
||||
|
||||
log_character_action(
|
||||
memory.character_name,
|
||||
@@ -147,6 +216,68 @@ class VectorStoreManager:
|
||||
})
|
||||
raise
|
||||
|
||||
async def _store_memory_qdrant(self, memory: VectorMemory):
|
||||
"""Store memory in Qdrant"""
|
||||
# Prepare metadata
|
||||
metadata = memory.metadata.copy()
|
||||
metadata.update({
|
||||
"character_name": memory.character_name,
|
||||
"timestamp": memory.timestamp.isoformat(),
|
||||
"importance": memory.importance,
|
||||
"memory_type": memory.memory_type.value,
|
||||
"content": memory.content,
|
||||
"namespace": self._get_namespace_for_memory(memory)
|
||||
})
|
||||
|
||||
# Create point
|
||||
point = PointStruct(
|
||||
id=hash(memory.id) % (2**63), # Convert string ID to int
|
||||
vector=memory.embedding,
|
||||
payload=metadata
|
||||
)
|
||||
|
||||
# Store in Qdrant
|
||||
self.qdrant_client.upsert(
|
||||
collection_name=self.collection_name,
|
||||
points=[point]
|
||||
)
|
||||
|
||||
async def _store_memory_chromadb(self, memory: VectorMemory):
|
||||
"""Store memory in ChromaDB"""
|
||||
# Select appropriate collection
|
||||
collection = self._get_collection_for_memory(memory)
|
||||
|
||||
if not collection:
|
||||
raise ValueError(f"No collection found for memory type: {memory.memory_type}")
|
||||
|
||||
# Prepare metadata
|
||||
metadata = memory.metadata.copy()
|
||||
metadata.update({
|
||||
"character_name": memory.character_name,
|
||||
"timestamp": memory.timestamp.isoformat(),
|
||||
"importance": memory.importance,
|
||||
"memory_type": memory.memory_type.value
|
||||
})
|
||||
|
||||
# Store in collection
|
||||
collection.add(
|
||||
ids=[memory.id],
|
||||
embeddings=[memory.embedding],
|
||||
documents=[memory.content],
|
||||
metadatas=[metadata]
|
||||
)
|
||||
|
||||
def _get_namespace_for_memory(self, memory: VectorMemory) -> str:
|
||||
"""Get namespace for Qdrant based on memory type and character"""
|
||||
if memory.memory_type == MemoryType.PERSONAL:
|
||||
return f"personal_{memory.character_name.lower()}"
|
||||
elif memory.memory_type == MemoryType.CREATIVE:
|
||||
return f"creative_{memory.character_name.lower()}"
|
||||
elif memory.memory_type == MemoryType.COMMUNITY:
|
||||
return "community_knowledge"
|
||||
else:
|
||||
return f"{memory.memory_type.value}_{memory.character_name.lower()}"
|
||||
|
||||
async def query_memories(self, character_name: str, query: str,
|
||||
memory_types: List[MemoryType] = None,
|
||||
limit: int = 10, min_importance: float = 0.0) -> List[VectorMemory]:
|
||||
@@ -155,64 +286,133 @@ class VectorStoreManager:
|
||||
# Generate query embedding
|
||||
query_embedding = await self._generate_embedding(query)
|
||||
|
||||
# Determine which collections to search
|
||||
collections_to_search = []
|
||||
# Query based on backend
|
||||
if self.backend == "qdrant":
|
||||
return await self._query_memories_qdrant(character_name, query, query_embedding, memory_types, limit, min_importance)
|
||||
elif self.backend == "chromadb":
|
||||
return await self._query_memories_chromadb(character_name, query, query_embedding, memory_types, limit, min_importance)
|
||||
|
||||
if not memory_types:
|
||||
memory_types = [MemoryType.PERSONAL, MemoryType.RELATIONSHIP,
|
||||
MemoryType.EXPERIENCE, MemoryType.REFLECTION]
|
||||
|
||||
for memory_type in memory_types:
|
||||
collection = self._get_collection_for_type(character_name, memory_type)
|
||||
if collection:
|
||||
collections_to_search.append((collection, memory_type))
|
||||
|
||||
# Search each collection
|
||||
all_results = []
|
||||
|
||||
for collection, memory_type in collections_to_search:
|
||||
try:
|
||||
results = collection.query(
|
||||
query_embeddings=[query_embedding],
|
||||
n_results=limit,
|
||||
where={"character_name": character_name} if memory_type != MemoryType.COMMUNITY else None
|
||||
)
|
||||
|
||||
# Convert results to VectorMemory objects
|
||||
for i, (doc, metadata, distance) in enumerate(zip(
|
||||
results['documents'][0],
|
||||
results['metadatas'][0],
|
||||
results['distances'][0]
|
||||
)):
|
||||
if metadata.get('importance', 0) >= min_importance:
|
||||
memory = VectorMemory(
|
||||
id=results['ids'][0][i],
|
||||
content=doc,
|
||||
memory_type=MemoryType(metadata['memory_type']),
|
||||
character_name=metadata['character_name'],
|
||||
timestamp=datetime.fromisoformat(metadata['timestamp']),
|
||||
importance=metadata['importance'],
|
||||
metadata=metadata
|
||||
)
|
||||
memory.metadata['similarity_score'] = 1 - distance # Convert distance to similarity
|
||||
all_results.append(memory)
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Error querying collection {memory_type}: {e}")
|
||||
continue
|
||||
|
||||
# Sort by relevance (similarity + importance)
|
||||
all_results.sort(
|
||||
key=lambda m: m.metadata.get('similarity_score', 0) * 0.7 + m.importance * 0.3,
|
||||
reverse=True
|
||||
)
|
||||
|
||||
return all_results[:limit]
|
||||
return []
|
||||
|
||||
except Exception as e:
|
||||
log_error_with_context(e, {"character": character_name, "query": query})
|
||||
return []
|
||||
|
||||
async def _query_memories_qdrant(self, character_name: str, query: str, query_embedding: List[float],
|
||||
memory_types: List[MemoryType], limit: int, min_importance: float) -> List[VectorMemory]:
|
||||
"""Query memories using Qdrant"""
|
||||
if not memory_types:
|
||||
memory_types = [MemoryType.PERSONAL, MemoryType.RELATIONSHIP,
|
||||
MemoryType.EXPERIENCE, MemoryType.REFLECTION]
|
||||
|
||||
# Build filter for namespaces and character
|
||||
must_conditions = [
|
||||
{"key": "character_name", "match": {"value": character_name}},
|
||||
{"key": "importance", "range": {"gte": min_importance}}
|
||||
]
|
||||
|
||||
# Add memory type filter
|
||||
namespace_values = [self._get_namespace_for_memory_type(character_name, mt) for mt in memory_types]
|
||||
must_conditions.append({
|
||||
"key": "namespace",
|
||||
"match": {"any": namespace_values}
|
||||
})
|
||||
|
||||
# Query Qdrant
|
||||
search_result = self.qdrant_client.search(
|
||||
collection_name=self.collection_name,
|
||||
query_vector=query_embedding,
|
||||
query_filter={"must": must_conditions},
|
||||
limit=limit,
|
||||
with_payload=True
|
||||
)
|
||||
|
||||
# Convert to VectorMemory objects
|
||||
results = []
|
||||
for point in search_result:
|
||||
payload = point.payload
|
||||
memory = VectorMemory(
|
||||
id=str(point.id),
|
||||
content=payload.get("content", ""),
|
||||
memory_type=MemoryType(payload.get("memory_type")),
|
||||
character_name=payload.get("character_name"),
|
||||
timestamp=datetime.fromisoformat(payload.get("timestamp")),
|
||||
importance=payload.get("importance", 0.0),
|
||||
metadata=payload
|
||||
)
|
||||
memory.metadata['similarity_score'] = point.score
|
||||
results.append(memory)
|
||||
|
||||
return results
|
||||
|
||||
async def _query_memories_chromadb(self, character_name: str, query: str, query_embedding: List[float],
|
||||
memory_types: List[MemoryType], limit: int, min_importance: float) -> List[VectorMemory]:
|
||||
"""Query memories using ChromaDB"""
|
||||
if not memory_types:
|
||||
memory_types = [MemoryType.PERSONAL, MemoryType.RELATIONSHIP,
|
||||
MemoryType.EXPERIENCE, MemoryType.REFLECTION]
|
||||
|
||||
# Determine which collections to search
|
||||
collections_to_search = []
|
||||
|
||||
for memory_type in memory_types:
|
||||
collection = self._get_collection_for_type(character_name, memory_type)
|
||||
if collection:
|
||||
collections_to_search.append((collection, memory_type))
|
||||
|
||||
# Search each collection
|
||||
all_results = []
|
||||
|
||||
for collection, memory_type in collections_to_search:
|
||||
try:
|
||||
results = collection.query(
|
||||
query_embeddings=[query_embedding],
|
||||
n_results=limit,
|
||||
where={"character_name": character_name} if memory_type != MemoryType.COMMUNITY else None
|
||||
)
|
||||
|
||||
# Convert results to VectorMemory objects
|
||||
for i, (doc, metadata, distance) in enumerate(zip(
|
||||
results['documents'][0],
|
||||
results['metadatas'][0],
|
||||
results['distances'][0]
|
||||
)):
|
||||
if metadata.get('importance', 0) >= min_importance:
|
||||
memory = VectorMemory(
|
||||
id=results['ids'][0][i],
|
||||
content=doc,
|
||||
memory_type=MemoryType(metadata['memory_type']),
|
||||
character_name=metadata['character_name'],
|
||||
timestamp=datetime.fromisoformat(metadata['timestamp']),
|
||||
importance=metadata['importance'],
|
||||
metadata=metadata
|
||||
)
|
||||
memory.metadata['similarity_score'] = 1 - distance # Convert distance to similarity
|
||||
all_results.append(memory)
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Error querying collection {memory_type}: {e}")
|
||||
continue
|
||||
|
||||
# Sort by relevance (similarity + importance)
|
||||
all_results.sort(
|
||||
key=lambda m: m.metadata.get('similarity_score', 0) * 0.7 + m.importance * 0.3,
|
||||
reverse=True
|
||||
)
|
||||
|
||||
return all_results[:limit]
|
||||
|
||||
def _get_namespace_for_memory_type(self, character_name: str, memory_type: MemoryType) -> str:
|
||||
"""Get namespace for a specific memory type and character"""
|
||||
if memory_type == MemoryType.PERSONAL:
|
||||
return f"personal_{character_name.lower()}"
|
||||
elif memory_type == MemoryType.CREATIVE:
|
||||
return f"creative_{character_name.lower()}"
|
||||
elif memory_type == MemoryType.COMMUNITY:
|
||||
return "community_knowledge"
|
||||
else:
|
||||
return f"{memory_type.value}_{character_name.lower()}"
|
||||
|
||||
async def query_community_knowledge(self, query: str, limit: int = 5) -> List[VectorMemory]:
|
||||
"""Query community knowledge base"""
|
||||
try:
|
||||
@@ -347,7 +547,7 @@ class VectorStoreManager:
|
||||
for memory_id, metadata in zip(all_memories['ids'], all_memories['metadatas']):
|
||||
# Calculate age in days
|
||||
timestamp = datetime.fromisoformat(metadata['timestamp'])
|
||||
age_days = (datetime.utcnow() - timestamp).days
|
||||
age_days = (datetime.now(timezone.utc) - timestamp).days
|
||||
|
||||
# Apply decay
|
||||
current_importance = metadata['importance']
|
||||
@@ -385,7 +585,7 @@ class VectorStoreManager:
|
||||
# Return zero embedding as fallback
|
||||
return [0.0] * 384 # MiniLM embedding size
|
||||
|
||||
def _get_collection_for_memory(self, memory: VectorMemory) -> Optional[chromadb.Collection]:
|
||||
def _get_collection_for_memory(self, memory: VectorMemory) -> Optional[Any]:
|
||||
"""Get appropriate collection for memory"""
|
||||
if memory.memory_type == MemoryType.COMMUNITY:
|
||||
return self.community_collection
|
||||
@@ -394,7 +594,7 @@ class VectorStoreManager:
|
||||
else:
|
||||
return self.personal_collections.get(memory.character_name)
|
||||
|
||||
def _get_collection_for_type(self, character_name: str, memory_type: MemoryType) -> Optional[chromadb.Collection]:
|
||||
def _get_collection_for_type(self, character_name: str, memory_type: MemoryType) -> Optional[Any]:
|
||||
"""Get collection for specific memory type and character"""
|
||||
if memory_type == MemoryType.COMMUNITY:
|
||||
return self.community_collection
|
||||
@@ -473,7 +673,7 @@ class VectorStoreManager:
|
||||
metadata={
|
||||
"consolidated": True,
|
||||
"original_count": len(cluster),
|
||||
"consolidation_date": datetime.utcnow().isoformat()
|
||||
"consolidation_date": datetime.now(timezone.utc).isoformat()
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@@ -28,9 +28,12 @@ class DiscordConfig(BaseModel):
|
||||
class LLMConfig(BaseModel):
|
||||
base_url: str = "http://localhost:11434"
|
||||
model: str = "llama2"
|
||||
timeout: int = 30
|
||||
max_tokens: int = 512
|
||||
timeout: int = 300
|
||||
max_tokens: int = 2000
|
||||
temperature: float = 0.8
|
||||
max_prompt_length: int = 6000
|
||||
max_history_messages: int = 5
|
||||
max_memories: int = 5
|
||||
|
||||
class ConversationConfig(BaseModel):
|
||||
min_delay_seconds: int = 30
|
||||
|
||||
@@ -3,7 +3,7 @@ from loguru import logger
|
||||
from typing import Dict, Any
|
||||
import sys
|
||||
import traceback
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timezone
|
||||
|
||||
class InterceptHandler(logging.Handler):
|
||||
"""Intercept standard logging and route to loguru"""
|
||||
@@ -123,6 +123,6 @@ def log_system_health(component: str, status: str, metrics: Dict[str, Any] = Non
|
||||
f"System health - {component}: {status}",
|
||||
extra={
|
||||
"metrics": metrics or {},
|
||||
"timestamp": datetime.utcnow().isoformat()
|
||||
"timestamp": datetime.now(timezone.utc).isoformat()
|
||||
}
|
||||
)
|
||||
Reference in New Issue
Block a user