- adding usage to specialist execution
- Correcting implementation of usage - Removed some obsolete debug statements
This commit is contained in:
@@ -2,13 +2,14 @@ import os
|
||||
import uuid
|
||||
from contextlib import contextmanager
|
||||
from datetime import datetime
|
||||
from typing import Dict, Any, Optional
|
||||
from typing import Dict, Any, Optional, List
|
||||
from datetime import datetime as dt, timezone as tz
|
||||
import logging
|
||||
|
||||
from .business_event_context import BusinessEventContext
|
||||
from common.models.entitlements import BusinessEventLog
|
||||
from common.extensions import db
|
||||
from .celery_utils import current_celery
|
||||
|
||||
|
||||
class BusinessEvent:
|
||||
@@ -38,6 +39,7 @@ class BusinessEvent:
|
||||
'call_count': 0,
|
||||
'interaction_type': None
|
||||
}
|
||||
self._log_buffer = []
|
||||
|
||||
def update_attribute(self, attribute: str, value: any):
|
||||
if hasattr(self, attribute):
|
||||
@@ -80,7 +82,7 @@ class BusinessEvent:
|
||||
self.span_name = span_name
|
||||
self.parent_span_id = parent_span_id
|
||||
|
||||
self.log(f"Starting span {span_name}")
|
||||
self.log(f"Start")
|
||||
|
||||
try:
|
||||
yield
|
||||
@@ -88,7 +90,7 @@ class BusinessEvent:
|
||||
if self.llm_metrics['call_count'] > 0:
|
||||
self.log_final_metrics()
|
||||
self.reset_llm_metrics()
|
||||
self.log(f"Ending span {span_name}")
|
||||
self.log(f"End")
|
||||
# Restore the previous span info
|
||||
if self.spans:
|
||||
self.span_id, self.span_name, self.parent_span_id = self.spans.pop()
|
||||
@@ -98,8 +100,8 @@ class BusinessEvent:
|
||||
self.parent_span_id = None
|
||||
|
||||
def log(self, message: str, level: str = 'info'):
|
||||
logger = logging.getLogger('business_events')
|
||||
log_data = {
|
||||
'timestamp': dt.now(tz=tz.utc),
|
||||
'event_type': self.event_type,
|
||||
'tenant_id': self.tenant_id,
|
||||
'trace_id': self.trace_id,
|
||||
@@ -111,34 +113,16 @@ class BusinessEvent:
|
||||
'chat_session_id': self.chat_session_id,
|
||||
'interaction_id': self.interaction_id,
|
||||
'environment': self.environment,
|
||||
'message': message,
|
||||
}
|
||||
# log to Graylog
|
||||
getattr(logger, level)(message, extra=log_data)
|
||||
|
||||
# Log to database
|
||||
event_log = BusinessEventLog(
|
||||
timestamp=dt.now(tz=tz.utc),
|
||||
event_type=self.event_type,
|
||||
tenant_id=self.tenant_id,
|
||||
trace_id=self.trace_id,
|
||||
span_id=self.span_id,
|
||||
span_name=self.span_name,
|
||||
parent_span_id=self.parent_span_id,
|
||||
document_version_id=self.document_version_id,
|
||||
document_version_file_size=self.document_version_file_size,
|
||||
chat_session_id=self.chat_session_id,
|
||||
interaction_id=self.interaction_id,
|
||||
environment=self.environment,
|
||||
message=message
|
||||
)
|
||||
db.session.add(event_log)
|
||||
db.session.commit()
|
||||
self._log_buffer.append(log_data)
|
||||
|
||||
def log_llm_metrics(self, metrics: dict, level: str = 'info'):
|
||||
self.update_llm_metrics(metrics)
|
||||
message = "LLM Metrics"
|
||||
logger = logging.getLogger('business_events')
|
||||
log_data = {
|
||||
'timestamp': dt.now(tz=tz.utc),
|
||||
'event_type': self.event_type,
|
||||
'tenant_id': self.tenant_id,
|
||||
'trace_id': self.trace_id,
|
||||
@@ -155,38 +139,15 @@ class BusinessEvent:
|
||||
'llm_metrics_completion_tokens': metrics['completion_tokens'],
|
||||
'llm_metrics_total_time': metrics['time_elapsed'],
|
||||
'llm_interaction_type': metrics['interaction_type'],
|
||||
'message': message,
|
||||
}
|
||||
# log to Graylog
|
||||
getattr(logger, level)(message, extra=log_data)
|
||||
|
||||
# Log to database
|
||||
event_log = BusinessEventLog(
|
||||
timestamp=dt.now(tz=tz.utc),
|
||||
event_type=self.event_type,
|
||||
tenant_id=self.tenant_id,
|
||||
trace_id=self.trace_id,
|
||||
span_id=self.span_id,
|
||||
span_name=self.span_name,
|
||||
parent_span_id=self.parent_span_id,
|
||||
document_version_id=self.document_version_id,
|
||||
document_version_file_size=self.document_version_file_size,
|
||||
chat_session_id=self.chat_session_id,
|
||||
interaction_id=self.interaction_id,
|
||||
environment=self.environment,
|
||||
llm_metrics_total_tokens=metrics['total_tokens'],
|
||||
llm_metrics_prompt_tokens=metrics['prompt_tokens'],
|
||||
llm_metrics_completion_tokens=metrics['completion_tokens'],
|
||||
llm_metrics_total_time=metrics['time_elapsed'],
|
||||
llm_interaction_type=metrics['interaction_type'],
|
||||
message=message
|
||||
)
|
||||
db.session.add(event_log)
|
||||
db.session.commit()
|
||||
self._log_buffer.append(log_data)
|
||||
|
||||
def log_final_metrics(self, level: str = 'info'):
|
||||
logger = logging.getLogger('business_events')
|
||||
message = "Final LLM Metrics"
|
||||
log_data = {
|
||||
'timestamp': dt.now(tz=tz.utc),
|
||||
'event_type': self.event_type,
|
||||
'tenant_id': self.tenant_id,
|
||||
'trace_id': self.trace_id,
|
||||
@@ -204,34 +165,65 @@ class BusinessEvent:
|
||||
'llm_metrics_total_time': self.llm_metrics['total_time'],
|
||||
'llm_metrics_call_count': self.llm_metrics['call_count'],
|
||||
'llm_interaction_type': self.llm_metrics['interaction_type'],
|
||||
'message': message,
|
||||
}
|
||||
# log to Graylog
|
||||
getattr(logger, level)(message, extra=log_data)
|
||||
self._log_buffer.append(log_data)
|
||||
|
||||
# Log to database
|
||||
event_log = BusinessEventLog(
|
||||
timestamp=dt.now(tz=tz.utc),
|
||||
event_type=self.event_type,
|
||||
tenant_id=self.tenant_id,
|
||||
trace_id=self.trace_id,
|
||||
span_id=self.span_id,
|
||||
span_name=self.span_name,
|
||||
parent_span_id=self.parent_span_id,
|
||||
document_version_id=self.document_version_id,
|
||||
document_version_file_size=self.document_version_file_size,
|
||||
chat_session_id=self.chat_session_id,
|
||||
interaction_id=self.interaction_id,
|
||||
environment=self.environment,
|
||||
llm_metrics_total_tokens=self.llm_metrics['total_tokens'],
|
||||
llm_metrics_prompt_tokens=self.llm_metrics['prompt_tokens'],
|
||||
llm_metrics_completion_tokens=self.llm_metrics['completion_tokens'],
|
||||
llm_metrics_total_time=self.llm_metrics['total_time'],
|
||||
llm_metrics_call_count=self.llm_metrics['call_count'],
|
||||
llm_interaction_type=self.llm_metrics['interaction_type'],
|
||||
message=message
|
||||
)
|
||||
db.session.add(event_log)
|
||||
db.session.commit()
|
||||
@staticmethod
|
||||
def _direct_db_persist(log_entries: List[Dict[str, Any]]):
|
||||
"""Fallback method to directly persist logs to DB if async fails"""
|
||||
try:
|
||||
db_entries = []
|
||||
for entry in log_entries:
|
||||
event_log = BusinessEventLog(
|
||||
timestamp=entry.pop('timestamp'),
|
||||
event_type=entry.pop('event_type'),
|
||||
tenant_id=entry.pop('tenant_id'),
|
||||
trace_id=entry.pop('trace_id'),
|
||||
span_id=entry.pop('span_id', None),
|
||||
span_name=entry.pop('span_name', None),
|
||||
parent_span_id=entry.pop('parent_span_id', None),
|
||||
document_version_id=entry.pop('document_version_id', None),
|
||||
document_version_file_size=entry.pop('document_version_file_size', None),
|
||||
chat_session_id=entry.pop('chat_session_id', None),
|
||||
interaction_id=entry.pop('interaction_id', None),
|
||||
environment=entry.pop('environment', None),
|
||||
llm_metrics_total_tokens=entry.pop('llm_metrics_total_tokens', None),
|
||||
llm_metrics_prompt_tokens=entry.pop('llm_metrics_prompt_tokens', None),
|
||||
llm_metrics_completion_tokens=entry.pop('llm_metrics_completion_tokens', None),
|
||||
llm_metrics_total_time=entry.pop('llm_metrics_total_time', None),
|
||||
llm_metrics_call_count=entry.pop('llm_metrics_call_count', None),
|
||||
llm_interaction_type=entry.pop('llm_interaction_type', None),
|
||||
message=entry.pop('message', None)
|
||||
)
|
||||
db_entries.append(event_log)
|
||||
|
||||
# Bulk insert
|
||||
db.session.bulk_save_objects(db_entries)
|
||||
db.session.commit()
|
||||
except Exception as e:
|
||||
logger = logging.getLogger('business_events')
|
||||
logger.error(f"Failed to persist logs directly to DB: {e}")
|
||||
db.session.rollback()
|
||||
|
||||
def _flush_log_buffer(self):
|
||||
"""Flush the log buffer to the database via a Celery task"""
|
||||
if self._log_buffer:
|
||||
try:
|
||||
# Send to Celery task
|
||||
current_celery.send_task(
|
||||
'persist_business_events',
|
||||
args=[self._log_buffer],
|
||||
queue='entitlements' # Or dedicated log queue
|
||||
)
|
||||
except Exception as e:
|
||||
# Fallback to direct DB write in case of issues with Celery
|
||||
logger = logging.getLogger('business_events')
|
||||
logger.error(f"Failed to send logs to Celery. Falling back to direct DB: {e}")
|
||||
self._direct_db_persist(self._log_buffer)
|
||||
|
||||
# Clear the buffer after sending
|
||||
self._log_buffer = []
|
||||
|
||||
def __enter__(self):
|
||||
self.log(f'Starting Trace for {self.event_type}')
|
||||
@@ -242,4 +234,5 @@ class BusinessEvent:
|
||||
self.log_final_metrics()
|
||||
self.reset_llm_metrics()
|
||||
self.log(f'Ending Trace for {self.event_type}')
|
||||
self._flush_log_buffer()
|
||||
return BusinessEventContext(self).__exit__(exc_type, exc_val, exc_tb)
|
||||
|
||||
Reference in New Issue
Block a user