Sistema local de gestión de contexto para IA: - Log inmutable (blockchain-style) - Algoritmos versionados y mejorables - Agnóstico al modelo (Anthropic, OpenAI, Ollama) - Sistema de métricas y A/B testing 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
60 lines
1.2 KiB
YAML
60 lines
1.2 KiB
YAML
# Context Manager - Configuración por defecto
|
|
|
|
database:
|
|
host: ${PGHOST:localhost}
|
|
port: ${PGPORT:5432}
|
|
name: ${PGDATABASE:context_manager}
|
|
user: ${PGUSER:postgres}
|
|
password: ${PGPASSWORD:}
|
|
pool:
|
|
min_connections: 1
|
|
max_connections: 10
|
|
|
|
algorithm:
|
|
default:
|
|
max_tokens: 4000
|
|
sources:
|
|
system_prompts: true
|
|
context_blocks: true
|
|
memory: true
|
|
knowledge: true
|
|
history: true
|
|
ambient: true
|
|
weights:
|
|
priority: 0.4
|
|
relevance: 0.3
|
|
recency: 0.2
|
|
frequency: 0.1
|
|
history_config:
|
|
max_messages: 20
|
|
summarize_after: 10
|
|
include_system: false
|
|
memory_config:
|
|
max_items: 15
|
|
min_importance: 30
|
|
knowledge_config:
|
|
max_items: 5
|
|
require_keyword_match: true
|
|
|
|
providers:
|
|
anthropic:
|
|
model: claude-sonnet-4-20250514
|
|
max_tokens: 4096
|
|
openai:
|
|
model: gpt-4
|
|
max_tokens: 4096
|
|
ollama:
|
|
host: ${OLLAMA_HOST:localhost}
|
|
port: ${OLLAMA_PORT:11434}
|
|
model: llama3
|
|
|
|
metrics:
|
|
auto_evaluate: false
|
|
evaluation_model: null # Modelo para evaluación automática
|
|
retention_days: 90
|
|
|
|
experiments:
|
|
default_traffic_split: 0.5
|
|
min_samples: 100
|
|
max_samples: 1000
|