- Adding Prometheus and grafana services in development

- Adding Prometheus metrics to the business events
- Ensure asynchronous behaviour of crewai specialists.
- Adapt Business events to working in mixed synchronous / asynchronous contexts
- Extend business events with specialist information
- Started adding a grafana dashboard (TBC)
This commit is contained in:
Josako
2025-03-24 16:39:22 +01:00
parent 238bdb58f4
commit b6ee7182de
25 changed files with 1337 additions and 83 deletions

View File

@@ -123,10 +123,10 @@ class RAGFlow(EveAICrewAIFlow[RAGFlowState]):
return ""
@listen(process_inputs)
def execute_rag(self):
async def execute_rag(self):
inputs = self.state.input.model_dump()
try:
crew_output = self.rag_crew.kickoff(inputs=inputs)
crew_output = await self.rag_crew.kickoff_async(inputs=inputs)
self.specialist_executor.log_tuning("RAG Crew Output", crew_output.model_dump())
output_pydantic = crew_output.pydantic
if not output_pydantic:
@@ -139,13 +139,13 @@ class RAGFlow(EveAICrewAIFlow[RAGFlowState]):
self.exception_raised = True
raise e
def kickoff(self, inputs=None):
with current_event.create_span("RAG Specialist Execution"):
async def execute_async(self, inputs=None):
async with current_event.create_span_async("RAG Specialist Execution"):
self.specialist_executor.log_tuning("Inputs retrieved", inputs)
self.state.input = RAGSpecialistInput.model_validate(inputs)
self.specialist.update_progress("EveAI Flow Start", {"name": "RAG"})
try:
result = super().kickoff()
result = await super().kickoff_async()
except Exception as e:
current_app.logger.error(f"Error kicking of Flow: {str(e)}")

View File

@@ -1,3 +1,4 @@
import asyncio
import json
from os import wait
from typing import Optional, List
@@ -136,8 +137,7 @@ class SpecialistExecutor(CrewAIBaseSpecialistExecutor):
"nr_of_questions": self.specialist.configuration.get('nr_of_questions', ''),
"identification": arguments.identification,
}
# crew_results = self.rag_crew.kickoff(inputs=flow_inputs)
# current_app.logger.debug(f"Test Crew Output received: {crew_results}")
flow_results = self.flow.kickoff(inputs=flow_inputs)
flow_state = self.flow.state
@@ -214,10 +214,10 @@ class SPINFlow(EveAICrewAIFlow[SPINFlowState]):
return ""
@listen(process_inputs)
def execute_rag(self):
async def execute_rag(self):
inputs = self.state.input.model_dump()
try:
crew_output = self.rag_crew.kickoff(inputs=inputs)
crew_output = await self.rag_crew.kickoff_async(inputs=inputs)
self.specialist_executor.log_tuning("RAG Crew Output", crew_output.model_dump())
output_pydantic = crew_output.pydantic
if not output_pydantic:
@@ -231,10 +231,11 @@ class SPINFlow(EveAICrewAIFlow[SPINFlowState]):
raise e
@listen(process_inputs)
def execute_spin(self):
async def execute_spin(self):
inputs = self.state.input.model_dump()
try:
crew_output = self.spin_crew.kickoff(inputs=inputs)
crew_output = await self.spin_crew.kickoff_async(inputs=inputs)
current_app.logger.info(f"SPIN Crew Executed, output: {crew_output.model_dump()}")
self.specialist_executor.log_tuning("Spin Crew Output", crew_output.model_dump())
output_pydantic = crew_output.pydantic
if not output_pydantic:
@@ -248,10 +249,10 @@ class SPINFlow(EveAICrewAIFlow[SPINFlowState]):
raise e
@listen(process_inputs)
def execute_identification(self):
async def execute_identification(self):
inputs = self.state.input.model_dump()
try:
crew_output = self.identification_crew.kickoff(inputs=inputs)
crew_output = await self.identification_crew.kickoff_async(inputs=inputs)
self.specialist_executor.log_tuning("Identification Crew Output", crew_output.model_dump())
output_pydantic = crew_output.pydantic
if not output_pydantic:
@@ -265,7 +266,7 @@ class SPINFlow(EveAICrewAIFlow[SPINFlowState]):
raise e
@listen(and_(execute_rag, execute_spin, execute_identification))
def consolidate(self):
async def consolidate(self):
inputs = self.state.input.model_dump()
if self.state.rag_output:
inputs["prepared_answers"] = self.state.rag_output.answer
@@ -277,7 +278,7 @@ class SPINFlow(EveAICrewAIFlow[SPINFlowState]):
current_app.logger.debug(f"Additional Questions: {additional_questions}")
inputs["additional_questions"] = additional_questions
try:
crew_output = self.rag_consolidation_crew.kickoff(inputs=inputs)
crew_output = await self.rag_consolidation_crew.kickoff_async(inputs=inputs)
self.specialist_executor.log_tuning("RAG Consolidation Crew Output", crew_output.model_dump())
output_pydantic = crew_output.pydantic
if not output_pydantic:
@@ -290,13 +291,16 @@ class SPINFlow(EveAICrewAIFlow[SPINFlowState]):
self.exception_raised = True
raise e
def kickoff(self, inputs=None):
with current_event.create_span("SPIN Specialist Execution"):
async def execute_async(self, inputs=None):
current_app.logger.debug(f"Async kickoff {self.name}")
async with current_event.create_span_async("SPIN Specialist Execution"):
self.specialist_executor.log_tuning("Inputs retrieved", inputs)
self.state.input = SPINSpecialistInput.model_validate(inputs)
self.specialist.update_progress("EveAI Flow Start", {"name": "SPIN"})
try:
result = super().kickoff()
current_app.logger.debug(f"Async super kickoff {self.name}")
result = await super().kickoff_async()
current_app.logger.debug(f"Async super kickoff {self.name} ended")
except Exception as e:
current_app.logger.error(f"Error kicking of Flow: {str(e)}")

View File

@@ -28,13 +28,13 @@ class BaseSpecialistExecutor(ABC):
@abstractmethod
def type(self) -> str:
"""The type of the specialist"""
pass
raise NotImplementedError
@property
@abstractmethod
def type_version(self) -> str:
"""The type version of the specialist"""
pass
raise NotImplementedError
def _initialize_retrievers(self) -> List[BaseRetriever]:
"""Initialize all retrievers associated with this specialist"""
@@ -96,7 +96,7 @@ class BaseSpecialistExecutor(ABC):
@abstractmethod
def execute_specialist(self, arguments: SpecialistArguments) -> SpecialistResult:
"""Execute the specialist's logic"""
pass
raise NotImplementedError
def get_specialist_class(specialist_type: str, type_version: str):

View File

@@ -1,5 +1,7 @@
import asyncio
import json
import time
from abc import abstractmethod
from crewai import Agent, Task, Crew, Flow
from crewai.agents.parser import AgentAction, AgentFinish
@@ -21,8 +23,6 @@ class EveAICrewAIAgent(Agent):
super().__init__(**kwargs)
self.specialist = specialist
self.name = name
self.specialist.log_tuning("Initializing EveAICrewAIAgent", {"name": name})
self.specialist.update_progress("EveAI Agent Initialisation", {"name": self.name})
def execute_task(
self,
@@ -40,28 +40,30 @@ class EveAICrewAIAgent(Agent):
Returns:
Output of the agent
"""
with current_event.create_span(f"Task Execution {task.name} by {self.name}"):
self.specialist.log_tuning("EveAI Agent Task Start",
{"name": self.name,
'task': task.name,
})
self.specialist.update_progress("EveAI Agent Task Start",
{"name": self.name,
'task': task.name,
})
current_app.logger.debug(f"Task Execution {task.name} by {self.name}")
# with current_event.create_span(f"Task Execution {task.name} by {self.name}"):
self.specialist.log_tuning("EveAI Agent Task Start",
{"name": self.name,
'task': task.name,
})
self.specialist.update_progress("EveAI Agent Task Start",
{"name": self.name,
'task': task.name,
})
result = super().execute_task(task, context, tools)
result = super().execute_task(task, context, tools)
self.specialist.log_tuning("EveAI Agent Task Complete",
{"name": self.name,
'task': task.name,
'result': result,
})
self.specialist.update_progress("EveAI Agent Task Complete",
{"name": self.name,
'task': task.name,
})
self.specialist.log_tuning("EveAI Agent Task Complete",
{"name": self.name,
'task': task.name,
'result': result,
})
self.specialist.update_progress("EveAI Agent Task Complete",
{"name": self.name,
'task': task.name,
})
current_app.logger.debug(f"Task Execution Ended {task.name} by {self.name}")
return result
@@ -76,8 +78,6 @@ class EveAICrewAITask(Task):
# current_app.logger.debug(f"Task pydantic class for {name}: {"class", self.output_pydantic}")
self.specialist = specialist
self.name = name
self.specialist.log_tuning("Initializing EveAICrewAITask", {"name": name})
self.specialist.update_progress("EveAI Task Initialisation", {"name": name})
class EveAICrewAICrew(Crew):
@@ -89,12 +89,10 @@ class EveAICrewAICrew(Crew):
super().__init__(**kwargs)
self.specialist = specialist
self.name = name
self.specialist.log_tuning("Initializing EveAICrewAICrew", {"name": self.name})
self.specialist.update_progress("EveAI Crew Initialisation", {"name": self.name})
def kickoff(
self,
inputs: Optional[Dict[str, Any]] = None,
self,
inputs: Optional[Dict[str, Any]] = None,
) -> CrewOutput:
with current_event.create_span(f"Crew {self.name} kickoff"):
start_time = time.time()
@@ -111,6 +109,26 @@ class EveAICrewAICrew(Crew):
return results
async def kickoff_async(
self,
inputs: Optional[Dict[str, Any]] = None,
) -> CrewOutput:
async with current_event.create_span_async(f"Crew {self.name} kickoff"):
start_time = time.time()
results = await super().kickoff_async(inputs)
end_time = time.time()
metrics = {
"total_tokens": self.usage_metrics.total_tokens,
"prompt_tokens": self.usage_metrics.prompt_tokens,
"completion_tokens": self.usage_metrics.completion_tokens,
"time_elapsed": end_time - start_time,
"interaction_type": "Crew Execution"
}
current_event.log_llm_metrics(metrics)
return results
class EveAICrewAIFlow(Flow):
specialist: Any = Field(default=None, exclude=True)
name: str = Field(default=None, exclude=True)
@@ -123,10 +141,14 @@ class EveAICrewAIFlow(Flow):
self.specialist.log_tuning("Initializing EveAICrewAIFlow", {"name": self.name})
self.specialist.update_progress("EveAI Flow Initialisation", {"name": self.name})
def kickoff(self, inputs=None):
result = asyncio.run(self.execute_async(inputs=inputs))
@abstractmethod
async def execute_async(self, inputs=None):
raise NotImplementedError
class EveAIFlowState(BaseModel):
"""Base class for all EveAI flow states"""
pass

View File

@@ -90,6 +90,7 @@ class CrewAIBaseSpecialistExecutor(BaseSpecialistExecutor):
def _config_task_agents(self):
"""Configure the task agents by adding task-agent combinations. Use _add_task_agent()
"""
raise NotImplementedError
@property
def task_agents(self) -> Dict[str, str]:
@@ -103,6 +104,7 @@ class CrewAIBaseSpecialistExecutor(BaseSpecialistExecutor):
@abstractmethod
def _config_pydantic_outputs(self):
"""Configure the task pydantic outputs by adding task-output combinations. Use _add_pydantic_output()"""
raise NotImplementedError
@property
def task_pydantic_outputs(self):
@@ -203,6 +205,7 @@ class CrewAIBaseSpecialistExecutor(BaseSpecialistExecutor):
def _instantiate_specialist(self):
"""Instantiate a crew (or flow) to set up the complete specialist, using the assets (agents, tasks, tools).
The assets can be retrieved using their type name in lower case, e.g. rag_agent"""
raise NotImplementedError
def _detail_question(self, language: str, question: str) -> str:
"""Detail question based on conversation history"""
@@ -211,9 +214,7 @@ class CrewAIBaseSpecialistExecutor(BaseSpecialistExecutor):
# Get LLM and template
llm = self.model_variables.get_llm(temperature=0.3)
template = cache_manager.prompts_config_cache.get_config('history').get('content', '')
current_app.logger.debug(f"History Template: {template}")
language_template = create_language_template(template, language)
current_app.logger.debug(f"History Language Template: {template}")
# Create prompt
history_prompt = ChatPromptTemplate.from_template(language_template)
@@ -226,7 +227,6 @@ class CrewAIBaseSpecialistExecutor(BaseSpecialistExecutor):
)
# Execute chain
current_app.logger.debug(f"Formatted History: {self.formatted_history}")
detailed_question = chain.invoke({
"history": self.formatted_history,
"question": question
@@ -254,7 +254,6 @@ class CrewAIBaseSpecialistExecutor(BaseSpecialistExecutor):
"num_retrievers": len(self.retrievers),
"all arguments": arguments.model_dump(),
})
current_app.logger.debug(f"Retrieving context from arguments: {arguments}")
original_query = arguments.query
detailed_query = self._detail_question(arguments.language, original_query)
@@ -289,7 +288,6 @@ class CrewAIBaseSpecialistExecutor(BaseSpecialistExecutor):
retriever_args = RetrieverArguments(**current_retriever_args)
# Each retriever gets its own specific arguments
current_app.logger.debug(f"Retrieving context {retriever_id} with arguments {retriever_args}")
retriever_result = retriever.retrieve(retriever_args)
all_context.extend(retriever_result)
@@ -326,7 +324,7 @@ class CrewAIBaseSpecialistExecutor(BaseSpecialistExecutor):
@abstractmethod
def execute(self, arguments: SpecialistArguments, formatted_context: str, citations: List[int]) -> SpecialistResult:
pass
raise NotImplementedError
def execute_specialist(self, arguments: SpecialistArguments) -> SpecialistResult:
# Detail the incoming query