- Introduction of dynamic Processors - Introduction of caching system - Introduction of a better template manager - Adaptation of ModelVariables to support dynamic Processors / Retrievers / Specialists - Start adaptation of chat client
51 lines
1.7 KiB
Python
51 lines
1.7 KiB
Python
from abc import ABC, abstractmethod
|
|
from typing import Dict, Any
|
|
from flask import current_app
|
|
|
|
from config.logging_config import TuningLogger
|
|
from eveai_chat_workers.specialists.specialist_typing import SpecialistArguments, SpecialistResult
|
|
|
|
|
|
class BaseSpecialist(ABC):
|
|
"""Base class for all specialists"""
|
|
|
|
def __init__(self, tenant_id: int, specialist_id: int, session_id: str):
|
|
self.tenant_id = tenant_id
|
|
self.specialist_id = specialist_id
|
|
self.session_id = session_id
|
|
self.tuning = False
|
|
self.tuning_logger = None
|
|
self._setup_tuning_logger()
|
|
|
|
@property
|
|
@abstractmethod
|
|
def type(self) -> str:
|
|
"""The type of the specialist"""
|
|
pass
|
|
|
|
def _setup_tuning_logger(self):
|
|
try:
|
|
self.tuning_logger = TuningLogger(
|
|
'tuning',
|
|
tenant_id=self.tenant_id,
|
|
specialist_id=self.specialist_id,
|
|
)
|
|
# Verify logger is working with a test message
|
|
if self.tuning:
|
|
self.tuning_logger.log_tuning('specialist', "Tuning logger initialized")
|
|
except Exception as e:
|
|
current_app.logger.error(f"Failed to setup tuning logger: {str(e)}")
|
|
raise
|
|
|
|
def _log_tuning(self, message: str, data: Dict[str, Any] = None) -> None:
|
|
if self.tuning and self.tuning_logger:
|
|
try:
|
|
self.tuning_logger.log_tuning('specialist', message, data)
|
|
except Exception as e:
|
|
current_app.logger.error(f"Processor: Error in tuning logging: {e}")
|
|
|
|
@abstractmethod
|
|
def execute(self, arguments: SpecialistArguments) -> SpecialistResult:
|
|
"""Execute the specialist's logic"""
|
|
pass
|