- Add configuration of agents, tasks, tools, specialist in context of SPIN specialist
- correct startup of applications using gevent - introduce startup scripts (eveai_app) - caching manager for all configurations
This commit is contained in:
1
.gitignore
vendored
1
.gitignore
vendored
@@ -46,3 +46,4 @@ scripts/__pycache__/run_eveai_app.cpython-312.pyc
|
||||
/docker/eveai_logs/
|
||||
/integrations/Wordpress/eveai_sync.zip
|
||||
/integrations/Wordpress/eveai-chat.zip
|
||||
/db_backups/
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
# Example:
|
||||
# *.log
|
||||
# tmp/
|
||||
db_backups/
|
||||
logs/
|
||||
nginx/static/assets/fonts/
|
||||
nginx/static/assets/img/
|
||||
|
||||
@@ -7,5 +7,6 @@ eveai_entitlements/
|
||||
eveai_workers/
|
||||
instance/
|
||||
integrations/
|
||||
migrations/
|
||||
nginx/
|
||||
scripts/
|
||||
13
.repopackignore_eveai_app_startup
Normal file
13
.repopackignore_eveai_app_startup
Normal file
@@ -0,0 +1,13 @@
|
||||
eveai_api/
|
||||
eveai_beat/
|
||||
eveai_chat/
|
||||
eveai_chat_workers/
|
||||
eveai_entitlements/
|
||||
eveai_workers/
|
||||
eveai_app/templates/
|
||||
eveai_app/views/
|
||||
instance/
|
||||
integrations/
|
||||
migrations/
|
||||
nginx/
|
||||
scripts/
|
||||
@@ -35,4 +35,6 @@ simple_encryption = SimpleEncryption()
|
||||
minio_client = MinioClient()
|
||||
metrics = PrometheusMetrics.for_app_factory()
|
||||
template_manager = TemplateManager()
|
||||
# Caching classes
|
||||
cache_manager = EveAICacheManager()
|
||||
|
||||
|
||||
@@ -25,6 +25,7 @@ class Specialist(db.Model):
|
||||
name = db.Column(db.String(50), nullable=False)
|
||||
description = db.Column(db.Text, nullable=True)
|
||||
type = db.Column(db.String(50), nullable=False, default="STANDARD_RAG")
|
||||
type_version = db.Column(db.String(20), nullable=True, default="1.0.0")
|
||||
tuning = db.Column(db.Boolean, nullable=True, default=False)
|
||||
configuration = db.Column(JSONB, nullable=True)
|
||||
arguments = db.Column(JSONB, nullable=True)
|
||||
@@ -32,6 +33,10 @@ class Specialist(db.Model):
|
||||
# Relationship to retrievers through the association table
|
||||
retrievers = db.relationship('SpecialistRetriever', backref='specialist', lazy=True,
|
||||
cascade="all, delete-orphan")
|
||||
agents = db.relationship('EveAIAgent', backref='specialist', lazy=True)
|
||||
tasks = db.relationship('EveAITask', backref='specialist', lazy=True)
|
||||
tools = db.relationship('EveAITool', backref='specialist', lazy=True)
|
||||
dispatchers = db.relationship('SpecialistDispatcher', backref='specialist', lazy=True)
|
||||
|
||||
# Versioning Information
|
||||
created_at = db.Column(db.DateTime, nullable=False, server_default=db.func.now())
|
||||
@@ -40,6 +45,84 @@ class Specialist(db.Model):
|
||||
updated_by = db.Column(db.Integer, db.ForeignKey(User.id))
|
||||
|
||||
|
||||
class EveAIAgent(db.Model):
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
specialist_id = db.Column(db.Integer, db.ForeignKey(Specialist.id), nullable=False)
|
||||
name = db.Column(db.String(50), nullable=False)
|
||||
description = db.Column(db.Text, nullable=True)
|
||||
type = db.Column(db.String(50), nullable=False, default="STANDARD_RAG")
|
||||
type_version = db.Column(db.String(20), nullable=True, default="1.0.0")
|
||||
role = db.Column(db.Text, nullable=True)
|
||||
goal = db.Column(db.Text, nullable=True)
|
||||
backstory = db.Column(db.Text, nullable=True)
|
||||
tuning = db.Column(db.Boolean, nullable=True, default=False)
|
||||
configuration = db.Column(JSONB, nullable=True)
|
||||
arguments = db.Column(JSONB, nullable=True)
|
||||
|
||||
# Versioning Information
|
||||
created_at = db.Column(db.DateTime, nullable=False, server_default=db.func.now())
|
||||
created_by = db.Column(db.Integer, db.ForeignKey(User.id), nullable=True)
|
||||
updated_at = db.Column(db.DateTime, nullable=False, server_default=db.func.now(), onupdate=db.func.now())
|
||||
updated_by = db.Column(db.Integer, db.ForeignKey(User.id))
|
||||
|
||||
|
||||
class EveAITask(db.Model):
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
specialist_id = db.Column(db.Integer, db.ForeignKey(Specialist.id), nullable=False)
|
||||
name = db.Column(db.String(50), nullable=False)
|
||||
description = db.Column(db.Text, nullable=True)
|
||||
type = db.Column(db.String(50), nullable=False, default="STANDARD_RAG")
|
||||
type_version = db.Column(db.String(20), nullable=True, default="1.0.0")
|
||||
expected_output = db.Column(db.Text, nullable=True)
|
||||
tuning = db.Column(db.Boolean, nullable=True, default=False)
|
||||
configuration = db.Column(JSONB, nullable=True)
|
||||
arguments = db.Column(JSONB, nullable=True)
|
||||
context = db.Column(JSONB, nullable=True)
|
||||
asynchronous = db.Column(db.Boolean, nullable=True, default=False)
|
||||
|
||||
# Versioning Information
|
||||
created_at = db.Column(db.DateTime, nullable=False, server_default=db.func.now())
|
||||
created_by = db.Column(db.Integer, db.ForeignKey(User.id), nullable=True)
|
||||
updated_at = db.Column(db.DateTime, nullable=False, server_default=db.func.now(), onupdate=db.func.now())
|
||||
updated_by = db.Column(db.Integer, db.ForeignKey(User.id))
|
||||
|
||||
|
||||
class EveAITool(db.Model):
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
specialist_id = db.Column(db.Integer, db.ForeignKey(Specialist.id), nullable=False)
|
||||
name = db.Column(db.String(50), nullable=False)
|
||||
description = db.Column(db.Text, nullable=True)
|
||||
type = db.Column(db.String(50), nullable=False, default="STANDARD_RAG")
|
||||
type_version = db.Column(db.String(20), nullable=True, default="1.0.0")
|
||||
tuning = db.Column(db.Boolean, nullable=True, default=False)
|
||||
configuration = db.Column(JSONB, nullable=True)
|
||||
arguments = db.Column(JSONB, nullable=True)
|
||||
|
||||
# Versioning Information
|
||||
created_at = db.Column(db.DateTime, nullable=False, server_default=db.func.now())
|
||||
created_by = db.Column(db.Integer, db.ForeignKey(User.id), nullable=True)
|
||||
updated_at = db.Column(db.DateTime, nullable=False, server_default=db.func.now(), onupdate=db.func.now())
|
||||
updated_by = db.Column(db.Integer, db.ForeignKey(User.id))
|
||||
|
||||
|
||||
class Dispatcher(db.Model):
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
name = db.Column(db.String(50), nullable=False)
|
||||
description = db.Column(db.Text, nullable=True)
|
||||
type = db.Column(db.String(50), nullable=False, default="STANDARD_RAG")
|
||||
type_version = db.Column(db.String(20), nullable=True, default="1.0.0")
|
||||
tuning = db.Column(db.Boolean, nullable=True, default=False)
|
||||
configuration = db.Column(JSONB, nullable=True)
|
||||
arguments = db.Column(JSONB, nullable=True)
|
||||
|
||||
# Versioning Information
|
||||
created_at = db.Column(db.DateTime, nullable=False, server_default=db.func.now())
|
||||
created_by = db.Column(db.Integer, db.ForeignKey(User.id), nullable=True)
|
||||
updated_at = db.Column(db.DateTime, nullable=False, server_default=db.func.now(), onupdate=db.func.now())
|
||||
updated_by = db.Column(db.Integer, db.ForeignKey(User.id))
|
||||
|
||||
|
||||
|
||||
class Interaction(db.Model):
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
chat_session_id = db.Column(db.Integer, db.ForeignKey(ChatSession.id), nullable=False)
|
||||
@@ -71,3 +154,14 @@ class SpecialistRetriever(db.Model):
|
||||
retriever_id = db.Column(db.Integer, db.ForeignKey(Retriever.id, ondelete='CASCADE'), primary_key=True)
|
||||
|
||||
retriever = db.relationship("Retriever", backref="specialist_retrievers")
|
||||
|
||||
|
||||
class SpecialistDispatcher(db.Model):
|
||||
specialist_id = db.Column(db.Integer, db.ForeignKey(Specialist.id, ondelete='CASCADE'), primary_key=True)
|
||||
dispatcher_id = db.Column(db.Integer, db.ForeignKey(Dispatcher.id, ondelete='CASCADE'), primary_key=True)
|
||||
|
||||
dispatcher = db.relationship("Dispatcher", backref="specialist_dispatchers")
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
105
common/utils/cache/base.py
vendored
105
common/utils/cache/base.py
vendored
@@ -1,57 +1,78 @@
|
||||
# common/utils/cache/base.py
|
||||
|
||||
from typing import Any, Dict, List, Optional, TypeVar, Generic, Type
|
||||
from dataclasses import dataclass
|
||||
from flask import Flask
|
||||
from dogpile.cache import CacheRegion
|
||||
|
||||
T = TypeVar('T')
|
||||
T = TypeVar('T') # Generic type parameter for cached data
|
||||
|
||||
|
||||
@dataclass
|
||||
class CacheKey:
|
||||
"""Represents a cache key with multiple components"""
|
||||
"""
|
||||
Represents a composite cache key made up of multiple components.
|
||||
Enables structured and consistent key generation for cache entries.
|
||||
|
||||
Attributes:
|
||||
components (Dict[str, Any]): Dictionary of key components and their values
|
||||
|
||||
Example:
|
||||
key = CacheKey({'tenant_id': 123, 'user_id': 456})
|
||||
str(key) -> "tenant_id=123:user_id=456"
|
||||
"""
|
||||
components: Dict[str, Any]
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""
|
||||
Converts components into a deterministic string representation.
|
||||
Components are sorted alphabetically to ensure consistent key generation.
|
||||
"""
|
||||
return ":".join(f"{k}={v}" for k, v in sorted(self.components.items()))
|
||||
|
||||
|
||||
class CacheInvalidationManager:
|
||||
"""Manages cache invalidation subscriptions"""
|
||||
|
||||
def __init__(self):
|
||||
self._subscribers = {}
|
||||
|
||||
def subscribe(self, model: str, handler: 'CacheHandler', key_fields: List[str]):
|
||||
if model not in self._subscribers:
|
||||
self._subscribers[model] = []
|
||||
self._subscribers[model].append((handler, key_fields))
|
||||
|
||||
def notify_change(self, model: str, **identifiers):
|
||||
if model in self._subscribers:
|
||||
for handler, key_fields in self._subscribers[model]:
|
||||
if all(field in identifiers for field in key_fields):
|
||||
handler.invalidate_by_model(model, **identifiers)
|
||||
|
||||
|
||||
class CacheHandler(Generic[T]):
|
||||
"""Base cache handler implementation"""
|
||||
"""
|
||||
Base cache handler implementation providing structured caching functionality.
|
||||
Uses generics to ensure type safety of cached data.
|
||||
|
||||
Type Parameters:
|
||||
T: Type of data being cached
|
||||
|
||||
Attributes:
|
||||
region (CacheRegion): Dogpile cache region for storage
|
||||
prefix (str): Prefix for all cache keys managed by this handler
|
||||
"""
|
||||
|
||||
def __init__(self, region: CacheRegion, prefix: str):
|
||||
self.region = region
|
||||
self.prefix = prefix
|
||||
self._key_components = []
|
||||
self._key_components = [] # List of required key components
|
||||
|
||||
def configure_keys(self, *components: str):
|
||||
"""
|
||||
Configure required components for cache key generation.
|
||||
|
||||
Args:
|
||||
*components: Required key component names
|
||||
|
||||
Returns:
|
||||
self for method chaining
|
||||
"""
|
||||
self._key_components = components
|
||||
return self
|
||||
|
||||
def subscribe_to_model(self, model: str, key_fields: List[str]):
|
||||
invalidation_manager.subscribe(model, self, key_fields)
|
||||
return self
|
||||
|
||||
def generate_key(self, **identifiers) -> str:
|
||||
"""
|
||||
Generate a cache key from provided identifiers.
|
||||
|
||||
Args:
|
||||
**identifiers: Key-value pairs for key components
|
||||
|
||||
Returns:
|
||||
Formatted cache key string
|
||||
|
||||
Raises:
|
||||
ValueError: If required components are missing
|
||||
"""
|
||||
missing = set(self._key_components) - set(identifiers.keys())
|
||||
if missing:
|
||||
raise ValueError(f"Missing key components: {missing}")
|
||||
@@ -60,6 +81,16 @@ class CacheHandler(Generic[T]):
|
||||
return f"{self.prefix}:{str(key)}"
|
||||
|
||||
def get(self, creator_func, **identifiers) -> T:
|
||||
"""
|
||||
Get or create a cached value.
|
||||
|
||||
Args:
|
||||
creator_func: Function to create value if not cached
|
||||
**identifiers: Key components for cache key
|
||||
|
||||
Returns:
|
||||
Cached or newly created value
|
||||
"""
|
||||
cache_key = self.generate_key(**identifiers)
|
||||
|
||||
def creator():
|
||||
@@ -75,15 +106,25 @@ class CacheHandler(Generic[T]):
|
||||
return self.from_cache_data(cached_data, **identifiers)
|
||||
|
||||
def invalidate(self, **identifiers):
|
||||
"""
|
||||
Invalidate a specific cache entry.
|
||||
|
||||
Args:
|
||||
**identifiers: Key components for the cache entry
|
||||
"""
|
||||
cache_key = self.generate_key(**identifiers)
|
||||
self.region.delete(cache_key)
|
||||
|
||||
def invalidate_by_model(self, model: str, **identifiers):
|
||||
"""
|
||||
Invalidate cache entry based on model changes.
|
||||
|
||||
Args:
|
||||
model: Changed model name
|
||||
**identifiers: Model instance identifiers
|
||||
"""
|
||||
try:
|
||||
self.invalidate(**identifiers)
|
||||
except ValueError:
|
||||
pass
|
||||
pass # Skip if cache key can't be generated from provided identifiers
|
||||
|
||||
|
||||
# Create global invalidation manager
|
||||
invalidation_manager = CacheInvalidationManager()
|
||||
306
common/utils/cache/config_cache.py
vendored
Normal file
306
common/utils/cache/config_cache.py
vendored
Normal file
@@ -0,0 +1,306 @@
|
||||
from typing import Dict, Any, Optional
|
||||
from pathlib import Path
|
||||
import yaml
|
||||
from packaging import version
|
||||
import os
|
||||
from flask import current_app
|
||||
|
||||
from common.utils.cache.base import CacheHandler
|
||||
|
||||
from config.type_defs import agent_types, task_types, tool_types, specialist_types
|
||||
|
||||
|
||||
class BaseConfigCacheHandler(CacheHandler[Dict[str, Any]]):
|
||||
"""Base handler for configuration caching"""
|
||||
|
||||
def __init__(self, region, config_type: str):
|
||||
"""
|
||||
Args:
|
||||
region: Cache region
|
||||
config_type: Type of configuration (agents, tasks, etc.)
|
||||
"""
|
||||
super().__init__(region, f'config_{config_type}')
|
||||
self.config_type = config_type
|
||||
self._types_module = None # Set by subclasses
|
||||
self._config_dir = None # Set by subclasses
|
||||
|
||||
def configure_keys_for_operation(self, operation: str):
|
||||
"""Configure required keys based on operation"""
|
||||
match operation:
|
||||
case 'get_types':
|
||||
self.configure_keys('type_name') # Only require type_name for type definitions
|
||||
case 'get_versions':
|
||||
self.configure_keys('type_name') # Only type_name needed for version tree
|
||||
case 'get_config':
|
||||
self.configure_keys('type_name', 'version') # Both needed for specific config
|
||||
case _:
|
||||
raise ValueError(f"Unknown operation: {operation}")
|
||||
|
||||
def _load_version_tree(self, type_name: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Load version tree for a specific type without loading full configurations
|
||||
|
||||
Args:
|
||||
type_name: Name of configuration type
|
||||
|
||||
Returns:
|
||||
Dict containing available versions and their metadata
|
||||
"""
|
||||
type_path = Path(self._config_dir) / type_name
|
||||
if not type_path.exists():
|
||||
raise ValueError(f"No configuration found for type {type_name}")
|
||||
|
||||
version_files = list(type_path.glob('*.yaml'))
|
||||
if not version_files:
|
||||
raise ValueError(f"No versions found for type {type_name}")
|
||||
|
||||
versions = {}
|
||||
latest_version = None
|
||||
latest_version_obj = None
|
||||
|
||||
for file_path in version_files:
|
||||
ver = file_path.stem # Get version from filename
|
||||
try:
|
||||
ver_obj = version.parse(ver)
|
||||
# Only load minimal metadata for version tree
|
||||
with open(file_path) as f:
|
||||
yaml_data = yaml.safe_load(f)
|
||||
metadata = yaml_data.get('metadata', {})
|
||||
versions[ver] = {
|
||||
'metadata': metadata,
|
||||
'file_path': str(file_path)
|
||||
}
|
||||
|
||||
# Track latest version
|
||||
if latest_version_obj is None or ver_obj > latest_version_obj:
|
||||
latest_version = ver
|
||||
latest_version_obj = ver_obj
|
||||
|
||||
except Exception as e:
|
||||
current_app.logger.error(f"Error loading version {ver}: {e}")
|
||||
continue
|
||||
|
||||
current_app.logger.debug(f"Loaded versions for {type_name}: {versions}")
|
||||
current_app.logger.debug(f"Loaded versions for {type_name}: {latest_version}")
|
||||
return {
|
||||
'versions': versions,
|
||||
'latest_version': latest_version
|
||||
}
|
||||
|
||||
def to_cache_data(self, instance: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Convert the data to a cacheable format"""
|
||||
# For configuration data, we can just return the dictionary as is
|
||||
# since it's already in a serializable format
|
||||
return instance
|
||||
|
||||
def from_cache_data(self, data: Dict[str, Any], **kwargs) -> Dict[str, Any]:
|
||||
"""Convert cached data back to usable format"""
|
||||
# Similarly, we can return the data directly since it's already
|
||||
# in the format we need
|
||||
return data
|
||||
|
||||
def should_cache(self, value: Dict[str, Any]) -> bool:
|
||||
"""
|
||||
Validate if the value should be cached
|
||||
|
||||
Args:
|
||||
value: The value to be cached
|
||||
|
||||
Returns:
|
||||
bool: True if the value should be cached
|
||||
"""
|
||||
if not isinstance(value, dict):
|
||||
return False
|
||||
|
||||
# For type definitions
|
||||
if 'name' in value and 'description' in value:
|
||||
return True
|
||||
|
||||
# For configurations
|
||||
if 'versions' in value and 'latest_version' in value:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def _load_type_definitions(self) -> Dict[str, Dict[str, str]]:
|
||||
"""Load type definitions from the corresponding type_defs module"""
|
||||
if not self._types_module:
|
||||
raise ValueError("_types_module must be set by subclass")
|
||||
|
||||
type_definitions = {
|
||||
type_id: {
|
||||
'name': info['name'],
|
||||
'description': info['description']
|
||||
}
|
||||
for type_id, info in self._types_module.items()
|
||||
}
|
||||
|
||||
current_app.logger.debug(f"Loaded type definitions: {type_definitions}")
|
||||
|
||||
return type_definitions
|
||||
|
||||
def _load_specific_config(self, type_name: str, version_str: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Load a specific configuration version
|
||||
|
||||
Args:
|
||||
type_name: Type name
|
||||
version_str: Version string
|
||||
|
||||
Returns:
|
||||
Configuration data
|
||||
"""
|
||||
version_tree = self.get_versions(type_name)
|
||||
versions = version_tree['versions']
|
||||
|
||||
if version_str == 'latest':
|
||||
version_str = version_tree['latest_version']
|
||||
|
||||
if version_str not in versions:
|
||||
raise ValueError(f"Version {version_str} not found for {type_name}")
|
||||
|
||||
file_path = versions[version_str]['file_path']
|
||||
|
||||
try:
|
||||
with open(file_path) as f:
|
||||
config = yaml.safe_load(f)
|
||||
current_app.logger.debug(f"Loaded config for {type_name}{version_str}: {config}")
|
||||
return config
|
||||
except Exception as e:
|
||||
raise ValueError(f"Error loading config from {file_path}: {e}")
|
||||
|
||||
def get_versions(self, type_name: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Get version tree for a type
|
||||
|
||||
Args:
|
||||
type_name: Type to get versions for
|
||||
|
||||
Returns:
|
||||
Dict with version information
|
||||
"""
|
||||
self.configure_keys_for_operation('get_versions')
|
||||
return self.get(
|
||||
lambda type_name: self._load_version_tree(type_name),
|
||||
type_name=type_name
|
||||
)
|
||||
|
||||
def get_latest_version(self, type_name: str) -> str:
|
||||
"""
|
||||
Get the latest version for a given type name.
|
||||
|
||||
Args:
|
||||
type_name: Name of the configuration type
|
||||
|
||||
Returns:
|
||||
Latest version string
|
||||
|
||||
Raises:
|
||||
ValueError: If type not found or no versions available
|
||||
"""
|
||||
version_tree = self.get_versions(type_name)
|
||||
if not version_tree or 'latest_version' not in version_tree:
|
||||
raise ValueError(f"No versions found for {type_name}")
|
||||
|
||||
return version_tree['latest_version']
|
||||
|
||||
def get_latest_patch_version(self, type_name: str, major_minor: str) -> str:
|
||||
"""
|
||||
Get the latest patch version for a given major.minor version.
|
||||
|
||||
Args:
|
||||
type_name: Name of the configuration type
|
||||
major_minor: Major.minor version (e.g. "1.0")
|
||||
|
||||
Returns:
|
||||
Latest patch version string (e.g. "1.0.3")
|
||||
|
||||
Raises:
|
||||
ValueError: If type not found or no matching versions
|
||||
"""
|
||||
version_tree = self.get_versions(type_name)
|
||||
if not version_tree or 'versions' not in version_tree:
|
||||
raise ValueError(f"No versions found for {type_name}")
|
||||
|
||||
# Filter versions that match the major.minor prefix
|
||||
matching_versions = [
|
||||
ver for ver in version_tree['versions'].keys()
|
||||
if ver.startswith(major_minor + '.')
|
||||
]
|
||||
|
||||
if not matching_versions:
|
||||
raise ValueError(f"No versions found for {type_name} with prefix {major_minor}")
|
||||
|
||||
# Return highest matching version
|
||||
latest_patch = max(matching_versions, key=version.parse)
|
||||
return latest_patch
|
||||
|
||||
def get_types(self) -> Dict[str, Dict[str, str]]:
|
||||
"""Get dictionary of available types with name and description"""
|
||||
self.configure_keys_for_operation('get_types')
|
||||
result = self.get(
|
||||
lambda type_name: self._load_type_definitions(),
|
||||
type_name=f'{self.config_type}_types',
|
||||
)
|
||||
return result
|
||||
|
||||
def get_config(self, type_name: str, version: Optional[str] = None) -> Dict[str, Any]:
|
||||
"""
|
||||
Get configuration for a specific type and version
|
||||
If version not specified, returns latest
|
||||
|
||||
Args:
|
||||
type_name: Configuration type name
|
||||
version: Optional specific version to retrieve
|
||||
|
||||
Returns:
|
||||
Configuration data
|
||||
"""
|
||||
self.configure_keys_for_operation('get_config')
|
||||
version_str = version or 'latest'
|
||||
|
||||
return self.get(
|
||||
lambda type_name, version: self._load_specific_config(type_name, version),
|
||||
type_name=type_name,
|
||||
version=version_str
|
||||
)
|
||||
|
||||
|
||||
class AgentConfigCacheHandler(BaseConfigCacheHandler):
|
||||
"""Handler for agent configurations"""
|
||||
handler_name = 'agent_config_cache'
|
||||
|
||||
def __init__(self, region):
|
||||
super().__init__(region, 'agents')
|
||||
self._types_module = agent_types.AGENT_TYPES
|
||||
self._config_dir = os.path.join('config', 'agents')
|
||||
|
||||
|
||||
class TaskConfigCacheHandler(BaseConfigCacheHandler):
|
||||
"""Handler for task configurations"""
|
||||
handler_name = 'task_config_cache'
|
||||
|
||||
def __init__(self, region):
|
||||
super().__init__(region, 'tasks')
|
||||
self._types_module = task_types.TASK_TYPES
|
||||
self._config_dir = os.path.join('config', 'tasks')
|
||||
|
||||
|
||||
class ToolConfigCacheHandler(BaseConfigCacheHandler):
|
||||
"""Handler for tool configurations"""
|
||||
handler_name = 'tool_config_cache'
|
||||
|
||||
def __init__(self, region):
|
||||
super().__init__(region, 'tools')
|
||||
self._types_module = tool_types.TOOL_TYPES
|
||||
self._config_dir = os.path.join('config', 'tools')
|
||||
|
||||
|
||||
class SpecialistConfigCacheHandler(BaseConfigCacheHandler):
|
||||
"""Handler for specialist configurations"""
|
||||
handler_name = 'specialist_config_cache'
|
||||
|
||||
def __init__(self, region):
|
||||
super().__init__(region, 'specialists')
|
||||
self._types_module = specialist_types.SPECIALIST_TYPES
|
||||
self._config_dir = os.path.join('config', 'specialists')
|
||||
34
common/utils/cache/eveai_cache_manager.py
vendored
34
common/utils/cache/eveai_cache_manager.py
vendored
@@ -3,6 +3,8 @@ from typing import Type
|
||||
from flask import Flask
|
||||
|
||||
from common.utils.cache.base import CacheHandler
|
||||
from common.utils.cache.regions import create_cache_regions
|
||||
from common.utils.cache.config_cache import AgentConfigCacheHandler
|
||||
|
||||
|
||||
class EveAICacheManager:
|
||||
@@ -11,29 +13,39 @@ class EveAICacheManager:
|
||||
def __init__(self):
|
||||
self._regions = {}
|
||||
self._handlers = {}
|
||||
self._handler_instances = {}
|
||||
|
||||
def init_app(self, app: Flask):
|
||||
"""Initialize cache regions"""
|
||||
from common.utils.cache.regions import create_cache_regions
|
||||
self._regions = create_cache_regions(app)
|
||||
|
||||
# Store regions in instance
|
||||
for region_name, region in self._regions.items():
|
||||
setattr(self, f"{region_name}_region", region)
|
||||
|
||||
# Initialize all registered handlers with their regions
|
||||
for handler_class, region_name in self._handlers.items():
|
||||
region = self._regions[region_name]
|
||||
handler_instance = handler_class(region)
|
||||
handler_name = getattr(handler_class, 'handler_name', None)
|
||||
if handler_name:
|
||||
app.logger.debug(f"{handler_name} is registered")
|
||||
setattr(self, handler_name, handler_instance)
|
||||
|
||||
app.logger.info('Cache regions initialized: ' + ', '.join(self._regions.keys()))
|
||||
app.logger.info(f'Cache regions initialized: {self._regions.keys()}')
|
||||
|
||||
def register_handler(self, handler_class: Type[CacheHandler], region: str):
|
||||
"""Register a cache handler class with its region"""
|
||||
if not hasattr(handler_class, 'handler_name'):
|
||||
raise ValueError("Cache handler must define handler_name class attribute")
|
||||
self._handlers[handler_class] = region
|
||||
|
||||
# Create handler instance
|
||||
region_instance = self._regions[region]
|
||||
handler_instance = handler_class(region_instance)
|
||||
self._handler_instances[handler_class.handler_name] = handler_instance
|
||||
|
||||
def invalidate_region(self, region_name: str):
|
||||
"""Invalidate an entire cache region"""
|
||||
if region_name in self._regions:
|
||||
self._regions[region_name].invalidate()
|
||||
else:
|
||||
raise ValueError(f"Unknown cache region: {region_name}")
|
||||
|
||||
def __getattr__(self, name):
|
||||
"""Handle dynamic access to registered handlers"""
|
||||
instances = object.__getattribute__(self, '_handler_instances')
|
||||
if name in instances:
|
||||
return instances[name]
|
||||
raise AttributeError(f"'EveAICacheManager' object has no attribute '{name}'")
|
||||
|
||||
13
common/utils/cache/regions.py
vendored
13
common/utils/cache/regions.py
vendored
@@ -1,4 +1,5 @@
|
||||
# common/utils/cache/regions.py
|
||||
import time
|
||||
|
||||
from dogpile.cache import make_region
|
||||
from urllib.parse import urlparse
|
||||
@@ -36,6 +37,7 @@ def create_cache_regions(app):
|
||||
"""Initialize all cache regions with app config"""
|
||||
redis_config = get_redis_config(app)
|
||||
regions = {}
|
||||
startup_time = int(time.time())
|
||||
|
||||
# Region for model-related caching (ModelVariables etc)
|
||||
model_region = make_region(name='eveai_model').configure(
|
||||
@@ -61,5 +63,16 @@ def create_cache_regions(app):
|
||||
)
|
||||
regions['eveai_workers'] = eveai_workers_region
|
||||
|
||||
eveai_config_region = make_region(name='eveai_config').configure(
|
||||
'dogpile.cache.redis',
|
||||
arguments={
|
||||
**redis_config,
|
||||
'redis_expiration_time': None, # No expiration in Redis
|
||||
'key_mangler': lambda key: f"startup_{startup_time}:{key}" # Prefix all keys
|
||||
},
|
||||
replace_existing_backend=True
|
||||
)
|
||||
regions['eveai_config'] = eveai_config_region
|
||||
|
||||
return regions
|
||||
|
||||
|
||||
@@ -7,7 +7,6 @@ from common.models.document import Document, DocumentVersion, Catalog
|
||||
from common.extensions import db, minio_client
|
||||
from common.utils.celery_utils import current_celery
|
||||
from flask import current_app
|
||||
from flask_security import current_user
|
||||
import requests
|
||||
from urllib.parse import urlparse, unquote, urlunparse
|
||||
import os
|
||||
@@ -16,6 +15,7 @@ from .config_field_types import normalize_json_field
|
||||
from .eveai_exceptions import (EveAIInvalidLanguageException, EveAIDoubleURLException, EveAIUnsupportedFileType,
|
||||
EveAIInvalidCatalog, EveAIInvalidDocument, EveAIInvalidDocumentVersion, EveAIException)
|
||||
from ..models.user import Tenant
|
||||
from common.utils.model_logging_utils import set_logging_information, update_logging_information
|
||||
|
||||
|
||||
def create_document_stack(api_input, file, filename, extension, tenant_id):
|
||||
@@ -136,35 +136,6 @@ def upload_file_for_version(doc_vers, file, extension, tenant_id):
|
||||
raise
|
||||
|
||||
|
||||
def set_logging_information(obj, timestamp):
|
||||
obj.created_at = timestamp
|
||||
obj.updated_at = timestamp
|
||||
|
||||
user_id = get_current_user_id()
|
||||
if user_id:
|
||||
obj.created_by = user_id
|
||||
obj.updated_by = user_id
|
||||
|
||||
|
||||
def update_logging_information(obj, timestamp):
|
||||
obj.updated_at = timestamp
|
||||
|
||||
user_id = get_current_user_id()
|
||||
if user_id:
|
||||
obj.updated_by = user_id
|
||||
|
||||
|
||||
def get_current_user_id():
|
||||
try:
|
||||
if current_user and current_user.is_authenticated:
|
||||
return current_user.id
|
||||
else:
|
||||
return None
|
||||
except Exception:
|
||||
# This will catch any errors if current_user is not available (e.g., in API context)
|
||||
return None
|
||||
|
||||
|
||||
def get_extension_from_content_type(content_type):
|
||||
content_type_map = {
|
||||
'text/html': 'html',
|
||||
|
||||
30
common/utils/model_logging_utils.py
Normal file
30
common/utils/model_logging_utils.py
Normal file
@@ -0,0 +1,30 @@
|
||||
from flask_security import current_user
|
||||
|
||||
|
||||
def set_logging_information(obj, timestamp):
|
||||
obj.created_at = timestamp
|
||||
obj.updated_at = timestamp
|
||||
|
||||
user_id = get_current_user_id()
|
||||
if user_id:
|
||||
obj.created_by = user_id
|
||||
obj.updated_by = user_id
|
||||
|
||||
|
||||
def update_logging_information(obj, timestamp):
|
||||
obj.updated_at = timestamp
|
||||
|
||||
user_id = get_current_user_id()
|
||||
if user_id:
|
||||
obj.updated_by = user_id
|
||||
|
||||
|
||||
def get_current_user_id():
|
||||
try:
|
||||
if current_user and current_user.is_authenticated:
|
||||
return current_user.id
|
||||
else:
|
||||
return None
|
||||
except Exception:
|
||||
# This will catch any errors if current_user is not available (e.g., in API context)
|
||||
return None
|
||||
@@ -227,62 +227,6 @@ class ModelVariables:
|
||||
raise
|
||||
|
||||
|
||||
class ModelVariablesCacheHandler(CacheHandler[ModelVariables]):
|
||||
handler_name = 'model_vars_cache' # Used to access handler instance from cache_manager
|
||||
|
||||
def __init__(self, region):
|
||||
super().__init__(region, 'model_variables')
|
||||
self.configure_keys('tenant_id')
|
||||
self.subscribe_to_model('Tenant', ['tenant_id'])
|
||||
|
||||
def to_cache_data(self, instance: ModelVariables) -> Dict[str, Any]:
|
||||
return {
|
||||
'tenant_id': instance.tenant_id,
|
||||
'variables': instance._variables,
|
||||
'last_updated': dt.now(tz=tz.utc).isoformat()
|
||||
}
|
||||
|
||||
def from_cache_data(self, data: Dict[str, Any], tenant_id: int, **kwargs) -> ModelVariables:
|
||||
instance = ModelVariables(tenant_id, data.get('variables'))
|
||||
return instance
|
||||
|
||||
def should_cache(self, value: Dict[str, Any]) -> bool:
|
||||
required_fields = {'tenant_id', 'variables'}
|
||||
return all(field in value for field in required_fields)
|
||||
|
||||
|
||||
# Register the handler with the cache manager
|
||||
cache_manager.register_handler(ModelVariablesCacheHandler, 'eveai_model')
|
||||
|
||||
|
||||
# Helper function to get cached model variables
|
||||
def get_model_variables(tenant_id: int) -> ModelVariables:
|
||||
return cache_manager.model_vars_cache.get(
|
||||
lambda tenant_id: ModelVariables(tenant_id), # function to create ModelVariables if required
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
|
||||
# Written in a long format, without lambda
|
||||
# def get_model_variables(tenant_id: int) -> ModelVariables:
|
||||
# """
|
||||
# Get ModelVariables instance, either from cache or newly created
|
||||
#
|
||||
# Args:
|
||||
# tenant_id: The tenant's ID
|
||||
#
|
||||
# Returns:
|
||||
# ModelVariables: Instance with either cached or fresh data
|
||||
#
|
||||
# Raises:
|
||||
# TenantNotFoundError: If tenant doesn't exist
|
||||
# CacheStateError: If cached data is invalid
|
||||
# """
|
||||
#
|
||||
# def create_new_instance(tenant_id: int) -> ModelVariables:
|
||||
# """Creator function that's called when cache miss occurs"""
|
||||
# return ModelVariables(tenant_id) # This will initialize fresh variables
|
||||
#
|
||||
# return cache_manager.model_vars_cache.get(
|
||||
# create_new_instance, # Function to create new instance if needed
|
||||
# tenant_id=tenant_id # Parameters passed to both get() and create_new_instance
|
||||
# )
|
||||
return ModelVariables(tenant_id=tenant_id)
|
||||
|
||||
192
common/utils/specialist_utils.py
Normal file
192
common/utils/specialist_utils.py
Normal file
@@ -0,0 +1,192 @@
|
||||
from datetime import datetime as dt, timezone as tz
|
||||
from typing import Optional, Dict, Any
|
||||
from flask import current_app
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
|
||||
from common.extensions import db, cache_manager
|
||||
from common.models.interaction import (
|
||||
Specialist, EveAIAgent, EveAITask, EveAITool
|
||||
)
|
||||
from common.utils.model_logging_utils import set_logging_information, update_logging_information
|
||||
|
||||
|
||||
def initialize_specialist(specialist_id: int, specialist_type: str, specialist_version: str):
|
||||
"""
|
||||
Initialize an agentic specialist by creating all its components based on configuration.
|
||||
|
||||
Args:
|
||||
specialist_id: ID of the specialist to initialize
|
||||
specialist_type: Type of the specialist
|
||||
specialist_version: Version of the specialist type to use
|
||||
|
||||
Raises:
|
||||
ValueError: If specialist not found or invalid configuration
|
||||
SQLAlchemyError: If database operations fail
|
||||
"""
|
||||
config = cache_manager.specialist_config_cache.get_config(specialist_type, specialist_version)
|
||||
if not config:
|
||||
raise ValueError(f"No configuration found for {specialist_type} version {specialist_version}")
|
||||
if config['framework'] == 'langchain':
|
||||
pass # Langchain does not require additional items to be initialized. All configuration is in the specialist.
|
||||
|
||||
specialist = Specialist.query.get(specialist_id)
|
||||
if not specialist:
|
||||
raise ValueError(f"Specialist with ID {specialist_id} not found")
|
||||
|
||||
if config['framework'] == 'crewai':
|
||||
initialize_crewai_specialist(specialist, config)
|
||||
|
||||
|
||||
def initialize_crewai_specialist(specialist: Specialist, config: Dict[str, Any]):
|
||||
timestamp = dt.now(tz=tz.utc)
|
||||
|
||||
try:
|
||||
# Initialize agents
|
||||
if 'agents' in config:
|
||||
for agent_config in config['agents']:
|
||||
_create_agent(
|
||||
specialist_id=specialist.id,
|
||||
agent_type=agent_config['type'],
|
||||
agent_version=agent_config['version'],
|
||||
name=agent_config.get('name'),
|
||||
description=agent_config.get('description'),
|
||||
timestamp=timestamp
|
||||
)
|
||||
|
||||
# Initialize tasks
|
||||
if 'tasks' in config:
|
||||
for task_config in config['tasks']:
|
||||
_create_task(
|
||||
specialist_id=specialist.id,
|
||||
task_type=task_config['type'],
|
||||
task_version=task_config['version'],
|
||||
name=task_config.get('name'),
|
||||
description=task_config.get('description'),
|
||||
timestamp=timestamp
|
||||
)
|
||||
|
||||
# Initialize tools
|
||||
if 'tools' in config:
|
||||
for tool_config in config['tools']:
|
||||
_create_tool(
|
||||
specialist_id=specialist.id,
|
||||
tool_type=tool_config['type'],
|
||||
tool_version=tool_config['version'],
|
||||
name=tool_config.get('name'),
|
||||
description=tool_config.get('description'),
|
||||
timestamp=timestamp
|
||||
)
|
||||
|
||||
db.session.commit()
|
||||
current_app.logger.info(f"Successfully initialized crewai specialist {specialist.id}")
|
||||
|
||||
except SQLAlchemyError as e:
|
||||
db.session.rollback()
|
||||
current_app.logger.error(f"Database error initializing crewai specialist {specialist.id}: {str(e)}")
|
||||
raise
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
current_app.logger.error(f"Error initializing crewai specialist {specialist.id}: {str(e)}")
|
||||
raise
|
||||
|
||||
|
||||
def _create_agent(
|
||||
specialist_id: int,
|
||||
agent_type: str,
|
||||
agent_version: str,
|
||||
name: Optional[str] = None,
|
||||
description: Optional[str] = None,
|
||||
timestamp: Optional[dt] = None
|
||||
) -> EveAIAgent:
|
||||
"""Create an agent with the given configuration."""
|
||||
if timestamp is None:
|
||||
timestamp = dt.now(tz=tz.utc)
|
||||
|
||||
# Get agent configuration from cache
|
||||
agent_config = cache_manager.agent_config_cache.get_config(agent_type, agent_version)
|
||||
|
||||
agent = EveAIAgent(
|
||||
specialist_id=specialist_id,
|
||||
name=name or agent_config.get('name', agent_type),
|
||||
description=description or agent_config.get('description', ''),
|
||||
type=agent_type,
|
||||
type_version=agent_version,
|
||||
role=None,
|
||||
goal=None,
|
||||
backstory=None,
|
||||
tuning=False,
|
||||
configuration=None,
|
||||
arguments=None
|
||||
)
|
||||
|
||||
set_logging_information(agent, timestamp)
|
||||
|
||||
db.session.add(agent)
|
||||
return agent
|
||||
|
||||
|
||||
def _create_task(
|
||||
specialist_id: int,
|
||||
task_type: str,
|
||||
task_version: str,
|
||||
name: Optional[str] = None,
|
||||
description: Optional[str] = None,
|
||||
timestamp: Optional[dt] = None
|
||||
) -> EveAITask:
|
||||
"""Create a task with the given configuration."""
|
||||
if timestamp is None:
|
||||
timestamp = dt.now(tz=tz.utc)
|
||||
|
||||
# Get task configuration from cache
|
||||
task_config = cache_manager.task_config_cache.get_config(task_type, task_version)
|
||||
|
||||
task = EveAITask(
|
||||
specialist_id=specialist_id,
|
||||
name=name or task_config.get('name', task_type),
|
||||
description=description or task_config.get('description', ''),
|
||||
type=task_type,
|
||||
type_version=task_version,
|
||||
expected_output=None,
|
||||
tuning=False,
|
||||
configuration=None,
|
||||
arguments=None,
|
||||
context=None,
|
||||
asynchronous=False,
|
||||
)
|
||||
|
||||
set_logging_information(task, timestamp)
|
||||
|
||||
db.session.add(task)
|
||||
return task
|
||||
|
||||
|
||||
def _create_tool(
|
||||
specialist_id: int,
|
||||
tool_type: str,
|
||||
tool_version: str,
|
||||
name: Optional[str] = None,
|
||||
description: Optional[str] = None,
|
||||
timestamp: Optional[dt] = None
|
||||
) -> EveAITool:
|
||||
"""Create a tool with the given configuration."""
|
||||
if timestamp is None:
|
||||
timestamp = dt.now(tz=tz.utc)
|
||||
|
||||
# Get tool configuration from cache
|
||||
tool_config = cache_manager.tool_config_cache.get_config(tool_type, tool_version)
|
||||
|
||||
tool = EveAITool(
|
||||
specialist_id=specialist_id,
|
||||
name=name or tool_config.get('name', tool_type),
|
||||
description=description or tool_config.get('description', ''),
|
||||
type=tool_type,
|
||||
type_version=tool_version,
|
||||
tuning=False,
|
||||
configuration=None,
|
||||
arguments=None,
|
||||
)
|
||||
|
||||
set_logging_information(tool, timestamp)
|
||||
|
||||
db.session.add(tool)
|
||||
return tool
|
||||
46
common/utils/startup_eveai.py
Normal file
46
common/utils/startup_eveai.py
Normal file
@@ -0,0 +1,46 @@
|
||||
import time
|
||||
|
||||
from redis import Redis
|
||||
|
||||
from common.extensions import cache_manager
|
||||
|
||||
|
||||
def perform_startup_actions(app):
|
||||
perform_startup_invalidation(app)
|
||||
|
||||
|
||||
def perform_startup_invalidation(app):
|
||||
"""
|
||||
Perform cache invalidation only once during startup using a persistent marker (also called flag or semaphore
|
||||
- see docs).
|
||||
Uses a combination of lock and marker to ensure invalidation happens exactly once
|
||||
per deployment.
|
||||
"""
|
||||
redis_client = Redis.from_url(app.config['REDIS_BASE_URI'])
|
||||
startup_time = int(time.time())
|
||||
marker_key = 'startup_invalidation_completed'
|
||||
lock_key = 'startup_invalidation_lock'
|
||||
|
||||
try:
|
||||
# First try to get the lock
|
||||
lock = redis_client.lock(lock_key, timeout=30)
|
||||
if lock.acquire(blocking=False):
|
||||
try:
|
||||
# Check if invalidation was already performed
|
||||
if not redis_client.get(marker_key):
|
||||
# Perform invalidation
|
||||
cache_manager.invalidate_region('eveai_config')
|
||||
# Set marker with 1 hour expiry (longer than any reasonable startup sequence)
|
||||
redis_client.setex(marker_key, 300, str(startup_time))
|
||||
app.logger.info("Startup cache invalidation completed")
|
||||
else:
|
||||
app.logger.info("Startup cache invalidation already performed")
|
||||
finally:
|
||||
lock.release()
|
||||
else:
|
||||
app.logger.info("Another process is handling startup invalidation")
|
||||
|
||||
except Exception as e:
|
||||
app.logger.error(f"Error during startup invalidation: {e}")
|
||||
# In case of error, we don't want to block the application startup
|
||||
pass
|
||||
17
config/agents/EMAIL_CONTENT_AGENT/1.0.0.yaml
Normal file
17
config/agents/EMAIL_CONTENT_AGENT/1.0.0.yaml
Normal file
@@ -0,0 +1,17 @@
|
||||
version: "1.0.0"
|
||||
name: "Email Content Agent"
|
||||
role: >
|
||||
Email Content Writer
|
||||
goal: >
|
||||
Craft a highly personalized email that resonates with the {end_user_role}'s context and identification (personal and
|
||||
company if available).
|
||||
{custom_goal}
|
||||
backstory: >
|
||||
You are an expert in writing compelling, personalized emails that capture the {end_user_role}'s attention and drive
|
||||
engagement. You are perfectly multilingual, and can write the mail in the native language of the {end_user_role}.
|
||||
{custom_backstory}
|
||||
metadata:
|
||||
author: "Josako"
|
||||
date_added: "2025-01-08"
|
||||
description: "An Agent that writes engaging emails."
|
||||
changes: "Initial version"
|
||||
16
config/agents/EMAIL_ENGAGEMENT_AGENT/1.0.0.yaml
Normal file
16
config/agents/EMAIL_ENGAGEMENT_AGENT/1.0.0.yaml
Normal file
@@ -0,0 +1,16 @@
|
||||
version: "1.0.0"
|
||||
name: "Email Engagement Agent"
|
||||
role: >
|
||||
Engagement Optimization Specialist {custom_role}
|
||||
goal: >
|
||||
You ensure that the email includes strong CTAs and strategically placed engagement hooks that encourage the
|
||||
{end_user_role} to take immediate action. {custom_goal}
|
||||
backstory: >
|
||||
You specialize in optimizing content to ensure that it not only resonates with the recipient but also encourages them
|
||||
to take the desired action.
|
||||
{custom_backstory}
|
||||
metadata:
|
||||
author: "Josako"
|
||||
date_added: "2025-01-08"
|
||||
description: "An Agent that ensures the email is engaging and lead to maximal desired action"
|
||||
changes: "Initial version"
|
||||
15
config/agents/IDENTIFICATION_AGENT/1.0.0.yaml
Normal file
15
config/agents/IDENTIFICATION_AGENT/1.0.0.yaml
Normal file
@@ -0,0 +1,15 @@
|
||||
version: "1.0.0"
|
||||
name: "Identification Agent"
|
||||
role: >
|
||||
Identification Administrative force. {custom_role}
|
||||
goal: >
|
||||
You are an administrative force that tries to gather identification information of an end-user through conversation.
|
||||
{custom_goal}
|
||||
backstory: >
|
||||
You are and administrative force for {company}. Your task is to identify the person in a conversation, so he or she
|
||||
can easily be contacted later on. {custom_backstory}
|
||||
metadata:
|
||||
author: "Josako"
|
||||
date_added: "2025-01-08"
|
||||
description: "An Agent that gathers identification information"
|
||||
changes: "Initial version"
|
||||
19
config/agents/RAG_AGENT/1.0.0.yaml
Normal file
19
config/agents/RAG_AGENT/1.0.0.yaml
Normal file
@@ -0,0 +1,19 @@
|
||||
version: "1.0.0"
|
||||
name: "Rag Agent"
|
||||
role: >
|
||||
{company} Spokesperson. {custom_role}
|
||||
goal: >
|
||||
You get questions by a human correspondent, and give answers based on a given context, taking into account the history
|
||||
of the current conversation. {custom_goal}
|
||||
backstory: >
|
||||
You are the primary contact for {company}. You are known by {name}, and can be addressed by this name, or you. You are
|
||||
a very good communicator, and adapt to the style used by the human asking for information (e.g. formal or informal).
|
||||
You always stay correct and polite, whatever happens. And you ensure no discriminating language is used.
|
||||
You are perfectly multilingual in all known languages, and do your best to answer questions in {language}, whatever
|
||||
language the context provided to you is in.
|
||||
{custom_backstory}
|
||||
metadata:
|
||||
author: "Josako"
|
||||
date_added: "2025-01-08"
|
||||
description: "An Agent that does RAG based on a user's question, RAG content & history"
|
||||
changes: "Initial version"
|
||||
22
config/agents/SPIN_DETECTION_AGENT/1.0.0.yaml
Normal file
22
config/agents/SPIN_DETECTION_AGENT/1.0.0.yaml
Normal file
@@ -0,0 +1,22 @@
|
||||
version: "1.0.0"
|
||||
name: "SPIN Sales Assistant"
|
||||
role: >
|
||||
Sales Assistant for {company} on {products}. {custom_role}
|
||||
goal: >
|
||||
Your main job is to help your sales specialist to analyze an ongoing conversation with a customer, and detect
|
||||
SPIN-related information. {custom_goal}
|
||||
backstory: >
|
||||
You are a sales assistant for {company} on {products}. You are known by {name}, and can be addressed by this name, or you. You are
|
||||
trained to understand an analyse ongoing conversations. Your are proficient in detecting SPIN-related information in a
|
||||
conversation.
|
||||
SPIN stands for:
|
||||
- Situation questions & information - Understanding the customer's current context
|
||||
- Problem questions & information - Uncovering challenges and pain points
|
||||
- Implication questions & information - Exploring consequences of those problems
|
||||
- Need-payoff questions & information - Helping customers realize value of solutions
|
||||
{custom_backstory}
|
||||
metadata:
|
||||
author: "Josako"
|
||||
date_added: "2025-01-08"
|
||||
description: "An Agent that detects SPIN information in an ongoing conversation"
|
||||
changes: "Initial version"
|
||||
25
config/agents/SPIN_SALES_SPECIALIST_AGENT/1.0.0.yaml
Normal file
25
config/agents/SPIN_SALES_SPECIALIST_AGENT/1.0.0.yaml
Normal file
@@ -0,0 +1,25 @@
|
||||
version: "1.0.0"
|
||||
name: "SPIN Sales Specialist"
|
||||
role: >
|
||||
Sales Specialist for {company} on {products}. {custom_role}
|
||||
goal: >
|
||||
Your main job is to do sales using the SPIN selling methodology in a first conversation with a potential customer.
|
||||
{custom_goal}
|
||||
backstory: >
|
||||
You are a sales specialist for {company} on {products}. You are known by {name}, and can be addressed by this name,
|
||||
or you. You have an assistant that provides you with already detected SPIN-information in an ongoing conversation. You
|
||||
decide on follow-up questions for more in-depth information to ensure we get the required information that may lead to
|
||||
selling {products}.
|
||||
SPIN stands for:
|
||||
- Situation questions & information - Understanding the customer's current context
|
||||
- Problem questions & information - Uncovering challenges and pain points
|
||||
- Implication questions & information - Exploring consequences of those problems
|
||||
- Need-payoff questions & information - Helping customers realize value of solutions
|
||||
{custom_backstory}
|
||||
You are acquainted with the following product information:
|
||||
{product_information}
|
||||
metadata:
|
||||
author: "Josako"
|
||||
date_added: "2025-01-08"
|
||||
description: "An Agent that asks for Follow-up questions for SPIN-process"
|
||||
changes: "Initial version"
|
||||
@@ -105,6 +105,7 @@ class Config(object):
|
||||
# JWT settings
|
||||
JWT_SECRET_KEY = environ.get('JWT_SECRET_KEY')
|
||||
JWT_ACCESS_TOKEN_EXPIRES = timedelta(hours=1) # Set token expiry to 1 hour
|
||||
JWT_ACCESS_TOKEN_EXPIRES_DEPLOY = timedelta(hours=24) # Set long-lived token for deployment
|
||||
|
||||
# API Encryption
|
||||
API_ENCRYPTION_KEY = environ.get('API_ENCRYPTION_KEY')
|
||||
@@ -165,7 +166,7 @@ class DevConfig(Config):
|
||||
DEVELOPMENT = True
|
||||
DEBUG = True
|
||||
FLASK_DEBUG = True
|
||||
EXPLAIN_TEMPLATE_LOADING = False
|
||||
EXPLAIN_TEMPLATE_LOADING = True
|
||||
|
||||
# Database Settings
|
||||
DB_HOST = environ.get('DB_HOST', 'localhost')
|
||||
|
||||
125
config/specialists/SPIN_SPECIALIST/1.0.0.yaml
Normal file
125
config/specialists/SPIN_SPECIALIST/1.0.0.yaml
Normal file
@@ -0,0 +1,125 @@
|
||||
version: 1.0.0
|
||||
name: "Spin Sales Specialist"
|
||||
framework: "crewai"
|
||||
configuration:
|
||||
name:
|
||||
name: "name"
|
||||
type: "str"
|
||||
description: "The name the specialist is called upon."
|
||||
required: true
|
||||
company:
|
||||
name: "company"
|
||||
type: "str"
|
||||
description: "The name of your company. If not provided, your tenant's name will be used."
|
||||
required: false
|
||||
products:
|
||||
name: "products"
|
||||
type: "List[str]"
|
||||
description: "The products or services you're providing"
|
||||
required: false
|
||||
product_information:
|
||||
name: "product_information"
|
||||
type: "text"
|
||||
description: "Information on the products you are selling, such as ICP (Ideal Customer Profile), Pitch, ..."
|
||||
required: false
|
||||
engagement_options:
|
||||
name: "engagement_options"
|
||||
type: "text"
|
||||
description: "Engagement options such as email, phone number, booking link, ..."
|
||||
tenant_language:
|
||||
name: "tenant_language"
|
||||
type: "str"
|
||||
description: "The language code used for internal information. If not provided, the tenant's default language will be used"
|
||||
required: false
|
||||
arguments:
|
||||
language:
|
||||
name: "Language"
|
||||
type: "str"
|
||||
description: "Language code to be used for receiving questions and giving answers"
|
||||
required: true
|
||||
query:
|
||||
name: "query"
|
||||
type: "str"
|
||||
description: "Query or response to process"
|
||||
required: true
|
||||
identification:
|
||||
name: "identification"
|
||||
type: "text"
|
||||
description: "Initial identification information when available"
|
||||
required: false
|
||||
results:
|
||||
detailed_query:
|
||||
name: "detailed_query"
|
||||
type: "str"
|
||||
description: "The query detailed with the Chat Session History."
|
||||
required: true
|
||||
answer:
|
||||
name: "answer"
|
||||
type: "str"
|
||||
description: "Answer to the query"
|
||||
required: true
|
||||
citations:
|
||||
name: "citations"
|
||||
type: "List[str]"
|
||||
description: "List of citations"
|
||||
required: false
|
||||
insufficient_info:
|
||||
name: "insufficient_info"
|
||||
type: "bool"
|
||||
description: "Whether or not the query is insufficient info"
|
||||
required: true
|
||||
spin:
|
||||
situation_information:
|
||||
name: "situation_information"
|
||||
type: "List[str]"
|
||||
description: "A list of situation descriptions"
|
||||
required: false
|
||||
problem_information:
|
||||
name: "problem_information"
|
||||
type: "List[str]"
|
||||
description: "A list of problems"
|
||||
required: false
|
||||
implication_information:
|
||||
name: "implication_information"
|
||||
type: "List[str]"
|
||||
description: "A list of implications"
|
||||
required: false
|
||||
needs_information:
|
||||
name: "needs_information"
|
||||
type: "List[str]"
|
||||
description: "A list of needs"
|
||||
required: false
|
||||
agents:
|
||||
- type: "RAG_AGENT"
|
||||
version: 1.0
|
||||
name: "Default RAG Agent" # Just added as an example. Overwrites the default agent name.
|
||||
description: "An Agent that does RAG based on a user's question, RAG content & history" # Just added as an example. Overwrites the default agent description.
|
||||
- type: "SPIN_DETECTION_AGENT"
|
||||
version: 1.0
|
||||
- type: "SPIN_SALES_SPECIALIST_AGENT"
|
||||
version: 1.0
|
||||
- type: "IDENTIFICATION_AGENT"
|
||||
version: 1.0
|
||||
- type: "EMAIL_CONTENT_AGENT"
|
||||
version: 1.0
|
||||
- type: "EMAIL_LEAD_ENGAGEMENT"
|
||||
version: 1.0
|
||||
tasks:
|
||||
- type: "RAG_TASK"
|
||||
version: 1.0
|
||||
- type: "SPIN_DETECT_TASK"
|
||||
version: 1.0
|
||||
- type: "SPIN_QUESTIONS_TASK"
|
||||
version: 1.0
|
||||
- type: "IDENTIFICATION_DETECTION_TASK"
|
||||
version: 1.0
|
||||
- type: "IDENTIFICATION_QUESTIONS_TASK"
|
||||
version: 1.0
|
||||
- type: "EMAIL_LEAD_DRAFTING"
|
||||
version: 1.0
|
||||
- type: "EMAIL_LEAD_ENGAGEMENT"
|
||||
version: 1.0
|
||||
metadata:
|
||||
author: "Josako"
|
||||
date_added: "2025-01-08"
|
||||
changes: "Initial version"
|
||||
51
config/specialists/STANDARD_RAG/1.0.0.yaml
Normal file
51
config/specialists/STANDARD_RAG/1.0.0.yaml
Normal file
@@ -0,0 +1,51 @@
|
||||
version: 1.0.0
|
||||
name: "Standard RAG Specialist"
|
||||
framework: "langchain"
|
||||
configuration:
|
||||
specialist_context:
|
||||
name: "Specialist Context"
|
||||
type: "text"
|
||||
description: "The context to be used by the specialist."
|
||||
required: false
|
||||
temperature:
|
||||
name: "Temperature"
|
||||
type: "number"
|
||||
description: "The inference temperature to be used by the specialist."
|
||||
required: false
|
||||
default: 0.3
|
||||
arguments:
|
||||
language:
|
||||
name: "Language"
|
||||
type: "str"
|
||||
description: "Language code to be used for receiving questions and giving answers"
|
||||
required: true
|
||||
query:
|
||||
name: "query"
|
||||
type: "str"
|
||||
description: "Query to answer"
|
||||
required: true
|
||||
results:
|
||||
detailed_query:
|
||||
name: "detailed_query"
|
||||
type: "str"
|
||||
description: "The query detailed with the Chat Session History."
|
||||
required: true
|
||||
answer:
|
||||
name: "answer"
|
||||
type: "str"
|
||||
description: "Answer to the query"
|
||||
required: true
|
||||
citations:
|
||||
name: "citations"
|
||||
type: "List[str]"
|
||||
description: "List of citations"
|
||||
required: false
|
||||
insufficient_info:
|
||||
name: "insufficient_info"
|
||||
type: "bool"
|
||||
description: "Whether or not the query is insufficient info"
|
||||
required: true
|
||||
metadata:
|
||||
author: "Josako"
|
||||
date_added: "2025-01-08"
|
||||
changes: "Initial version"
|
||||
34
config/tasks/EMAIL_LEAD_DRAFTING_TASK/1.0.0.yaml
Normal file
34
config/tasks/EMAIL_LEAD_DRAFTING_TASK/1.0.0.yaml
Normal file
@@ -0,0 +1,34 @@
|
||||
version: "1.0.0"
|
||||
name: "Email Lead Draft Creation"
|
||||
description: >
|
||||
Craft a highly personalized email using the lead's name, job title, company information, and any relevant personal or
|
||||
company achievements when available. The email should speak directly to the lead's interests and the needs
|
||||
of their company.
|
||||
This mail is the consequence of a first conversation. You have information available from that conversation in the
|
||||
- SPIN-context (in between triple %)
|
||||
- personal and company information (in between triple $)
|
||||
Information might be missing however, as it might not be gathered in that first conversation.
|
||||
Don't use any salutations or closing remarks, nor too complex sentences.
|
||||
|
||||
Our Company and Product:
|
||||
- Company Name: {company}
|
||||
- Products: {products}
|
||||
- Product information: {product_information}
|
||||
|
||||
{customer_role}'s Identification:
|
||||
$$${Identification}$$$
|
||||
|
||||
SPIN context:
|
||||
%%%{SPIN}%%%
|
||||
|
||||
{custom_description}
|
||||
expected_output: >
|
||||
A personalized email draft that:
|
||||
- Addresses the lead by name
|
||||
- Acknowledges their role and company
|
||||
- Highlights how {company} can meet their specific needs or interests
|
||||
metadata:
|
||||
author: "Josako"
|
||||
date_added: "2025-01-08"
|
||||
description: "Email Drafting Task towards a Lead"
|
||||
changes: "Initial version"
|
||||
28
config/tasks/EMAIL_LEAD_ENGAGEMENT_TASK/1.0.0.yaml
Normal file
28
config/tasks/EMAIL_LEAD_ENGAGEMENT_TASK/1.0.0.yaml
Normal file
@@ -0,0 +1,28 @@
|
||||
version: "1.0.0"
|
||||
name: "Email Lead Engagement Creation"
|
||||
description: >
|
||||
Review a personalized email and optimize it with strong CTAs and engagement hooks. Keep in mind that this email is
|
||||
the consequence of a first conversation.
|
||||
Don't use any salutations or closing remarks, nor too complex sentences. Keep it short and to the point.
|
||||
Don't use any salutations or closing remarks, nor too complex sentences.
|
||||
Ensure the email encourages the lead to schedule a meeting or take
|
||||
another desired action immediately.
|
||||
|
||||
Our Company and Product:
|
||||
- Company Name: {company}
|
||||
- Products: {products}
|
||||
- Product information: {product_information}
|
||||
|
||||
Engagement options:
|
||||
{engagement_options}
|
||||
|
||||
{custom_description}
|
||||
expected_output: >
|
||||
An optimized email ready for sending, complete with:
|
||||
- Strong CTAs
|
||||
- Strategically placed engagement hooks that encourage immediate action
|
||||
metadata:
|
||||
author: "Josako"
|
||||
date_added: "2025-01-08"
|
||||
description: "Make an Email draft more engaging"
|
||||
changes: "Initial version"
|
||||
16
config/tasks/IDENTIFICATION_DETECTION_TASK/1.0.0.yaml
Normal file
16
config/tasks/IDENTIFICATION_DETECTION_TASK/1.0.0.yaml
Normal file
@@ -0,0 +1,16 @@
|
||||
version: "1.0.0"
|
||||
name: "Identification Gathering"
|
||||
description: >
|
||||
Detect and pass on identification information in the ongoing conversation, from within the following information:
|
||||
{question}
|
||||
Add to or refine the following already gathered identification information (between triple $)
|
||||
$$${Identification}$$$
|
||||
{custom_description}
|
||||
expected_output: >
|
||||
Identification information such as name, email, phone number, company, role, company website, ...
|
||||
{custom_expected_output}
|
||||
metadata:
|
||||
author: "Josako"
|
||||
date_added: "2025-01-08"
|
||||
description: "A Task that gathers identification information from a conversation"
|
||||
changes: "Initial version"
|
||||
15
config/tasks/IDENTIFICATION_QUESTIONS_TASK/1.0.0.yaml
Normal file
15
config/tasks/IDENTIFICATION_QUESTIONS_TASK/1.0.0.yaml
Normal file
@@ -0,0 +1,15 @@
|
||||
version: "1.0.0"
|
||||
name: "Define Identification Questions"
|
||||
description: >
|
||||
Ask questions to complete or confirm the identification information gathered.
|
||||
Current Identification Information:
|
||||
$$${Identification}$$$
|
||||
{custom_description}
|
||||
expected_output: >
|
||||
Top 2 questions to ask in order to complete identification.
|
||||
{custom_expected_output}
|
||||
metadata:
|
||||
author: "Josako"
|
||||
date_added: "2025-01-08"
|
||||
description: "A Task to define identification (person & company) questions"
|
||||
changes: "Initial version"
|
||||
23
config/tasks/RAG_TASK/1.0.0.yaml
Normal file
23
config/tasks/RAG_TASK/1.0.0.yaml
Normal file
@@ -0,0 +1,23 @@
|
||||
version: "1.0.0"
|
||||
name: "RAG Task"
|
||||
description: >
|
||||
Answer the question based on the following context, delimited between triple backquotes, and taking into account
|
||||
the history of the discussion, in between triple %
|
||||
{custom_description}
|
||||
Use the following {language} in your communication, and cite the sources used at the end of the full conversation.
|
||||
If the question cannot be answered using the given context, say "I have insufficient information to answer this question."
|
||||
Context:
|
||||
```{context}```
|
||||
History:
|
||||
%%%{history}%%%
|
||||
Question:
|
||||
{question}
|
||||
expected_output: >
|
||||
An answer to the question asked formatted in markdown, without '```'.
|
||||
A list of sources used in generating the answer.
|
||||
An indication (True or False) of your ability to provide an answer.
|
||||
metadata:
|
||||
author: "Josako"
|
||||
date_added: "2025-01-08"
|
||||
description: "A Task that gives RAG-based answers"
|
||||
changes: "Initial version"
|
||||
28
config/tasks/SPIN_DETECT_TASK/1.0.0.yaml
Normal file
28
config/tasks/SPIN_DETECT_TASK/1.0.0.yaml
Normal file
@@ -0,0 +1,28 @@
|
||||
version: "1.0.0"
|
||||
name: "SPIN Information Detection"
|
||||
description: >
|
||||
Detect the SPIN-context, taking into account the history of the discussion (in between triple %) with main focus on
|
||||
the latest reply (which can contain answers on previously asked questions by the user). Do not remove elements from
|
||||
the known SPIN (in between triple $) analysis unless explicitly stated by the end user in the latest reply. In all other cases, refine the
|
||||
current SPIN analysis or add elements to it.
|
||||
{custom_description}
|
||||
Use the following {tenant_language} to define the SPIN-elements. If no additional information can be added, just
|
||||
return the already known SPIN.
|
||||
History:
|
||||
%%%{history}%%%
|
||||
Known SPIN:
|
||||
$$${SPIN}$$$
|
||||
Latest reply:
|
||||
{question}
|
||||
expected_output: >
|
||||
The SPIN analysis, comprised of:
|
||||
- Situation information - Information to understanding the customer's current context, as a markdown list without '```'.
|
||||
- Problem information - Information on uncovering the customer's challenges and pain points, as a markdown list without '```'.
|
||||
- Implication information - Exploration of the consequences of those problems, as a markdown list without '```'.
|
||||
- Need-payoff information - Helping customers realize value of solutions and defining their direct needs, as a markdown list without '```'.
|
||||
{custom_expected_output}
|
||||
metadata:
|
||||
author: "Josako"
|
||||
date_added: "2025-01-08"
|
||||
description: "A Task that performs SPIN Information Detection"
|
||||
changes: "Initial version"
|
||||
26
config/tasks/SPIN_QUESTIONS_TASK/1.0.0.yaml
Normal file
26
config/tasks/SPIN_QUESTIONS_TASK/1.0.0.yaml
Normal file
@@ -0,0 +1,26 @@
|
||||
version: "1.0.0"
|
||||
name: "SPIN Question Identification"
|
||||
description: >
|
||||
Define, taking into account the history of the discussion (in between triple %), the latest reply and the currently
|
||||
known SPIN-elements (in between triple $), the top questions that need to be asked to understand the full SPIN context
|
||||
of the customer. If you think this user could be a potential customer, please indicate so.
|
||||
{custom_description}
|
||||
Use the following {tenant_language} to define the SPIN-elements. If you have a full SPIN context, just skip and don't
|
||||
ask for more information or confirmation.
|
||||
History:
|
||||
%%%{history}%%%
|
||||
Known SPIN:
|
||||
$$${SPIN}$$$
|
||||
Latest reply:
|
||||
{question}
|
||||
expected_output: >
|
||||
The SPIN questions:
|
||||
- At max {nr_of_spin_questions} questions to complete the SPIN-context of the customer, as a markdown list without '```'.
|
||||
Potential Customer Indication:
|
||||
- An indication if - given the current SPIN - this could be a good customer (True) or not (False).
|
||||
{custom_expected_output}
|
||||
metadata:
|
||||
author: "Josako"
|
||||
date_added: "2025-01-08"
|
||||
description: "A Task that identifies questions to complete the SPIN context in a conversation"
|
||||
changes: "Initial version"
|
||||
27
config/type_defs/agent_types.py
Normal file
27
config/type_defs/agent_types.py
Normal file
@@ -0,0 +1,27 @@
|
||||
# Agent Types
|
||||
AGENT_TYPES = {
|
||||
"EMAIL_CONTENT_AGENT": {
|
||||
"name": "Email Content Agent",
|
||||
"description": "An Agent that writes engaging emails.",
|
||||
},
|
||||
"EMAIL_ENGAGEMENT_AGENT": {
|
||||
"name": "Email Engagement Agent",
|
||||
"description": "An Agent that ensures the email is engaging and lead to maximal desired action",
|
||||
},
|
||||
"IDENTIFICATION_AGENT": {
|
||||
"name": "Identification Agent",
|
||||
"description": "An Agent that gathers identification information",
|
||||
},
|
||||
"RAG_AGENT": {
|
||||
"name": "Rag Agent",
|
||||
"description": "An Agent that does RAG based on a user's question, RAG content & history",
|
||||
},
|
||||
"SPIN_DETECTION_AGENT": {
|
||||
"name": "SPIN Sales Assistant",
|
||||
"description": "An Agent that detects SPIN information in an ongoing conversation",
|
||||
},
|
||||
"SPIN_SALES_SPECIALIST_AGENT": {
|
||||
"name": "SPIN Sales Specialist",
|
||||
"description": "An Agent that asks for Follow-up questions for SPIN-process",
|
||||
},
|
||||
}
|
||||
@@ -8,4 +8,8 @@ SERVICE_TYPES = {
|
||||
"name": "DOCAPI",
|
||||
"description": "Service allows to use document API functionality.",
|
||||
},
|
||||
"DEPLOY_API": {
|
||||
"name": "DEPLOY_API",
|
||||
"description": "Service allows to use deployment API functionality.",
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,60 +3,9 @@ SPECIALIST_TYPES = {
|
||||
"STANDARD_RAG": {
|
||||
"name": "Q&A RAG Specialist",
|
||||
"description": "Standard Q&A through RAG Specialist",
|
||||
"configuration": {
|
||||
"specialist_context": {
|
||||
"name": "Specialist Context",
|
||||
"type": "text",
|
||||
"description": "The context to be used by the specialist.",
|
||||
"required": False,
|
||||
},
|
||||
"temperature": {
|
||||
"name": "Temperature",
|
||||
"type": "number",
|
||||
"description": "The inference temperature to be used by the specialist.",
|
||||
"required": False,
|
||||
"default": 0.3
|
||||
}
|
||||
},
|
||||
"arguments": {
|
||||
"language": {
|
||||
"name": "Language",
|
||||
"type": "str",
|
||||
"description": "Language code to be used for receiving questions and giving answers",
|
||||
"required": True,
|
||||
},
|
||||
"query": {
|
||||
"name": "query",
|
||||
"type": "str",
|
||||
"description": "Query to answer",
|
||||
"required": True,
|
||||
}
|
||||
},
|
||||
"results": {
|
||||
"detailed_query": {
|
||||
"name": "detailed_query",
|
||||
"type": "str",
|
||||
"description": "The query detailed with the Chat Session History.",
|
||||
"required": True,
|
||||
},
|
||||
"answer": {
|
||||
"name": "answer",
|
||||
"type": "str",
|
||||
"description": "Answer to the query",
|
||||
"required": True,
|
||||
},
|
||||
"citations": {
|
||||
"name": "citations",
|
||||
"type": "List[str]",
|
||||
"description": "List of citations",
|
||||
"required": False,
|
||||
},
|
||||
"insufficient_info": {
|
||||
"name": "insufficient_info",
|
||||
"type": "bool",
|
||||
"description": "Whether or not the query is insufficient info",
|
||||
"required": True,
|
||||
},
|
||||
}
|
||||
},
|
||||
"SPIN_SPECIALIST": {
|
||||
"name": "Spin Sales Specialist",
|
||||
"description": "A specialist that allows to answer user queries, try to get SPIN-information and Identification",
|
||||
}
|
||||
}
|
||||
31
config/type_defs/task_types.py
Normal file
31
config/type_defs/task_types.py
Normal file
@@ -0,0 +1,31 @@
|
||||
# Agent Types
|
||||
TASK_TYPES = {
|
||||
"EMAIL_LEAD_DRAFTING_TASK": {
|
||||
"name": "Email Lead Draft Creation",
|
||||
"description": "Email Drafting Task towards a Lead",
|
||||
},
|
||||
"EMAIL_LEAD_ENGAGEMENT_TASK": {
|
||||
"name": "Email Lead Engagement Creation",
|
||||
"description": "Make an Email draft more engaging",
|
||||
},
|
||||
"IDENTIFICATION_DETECTION_TASK": {
|
||||
"name": "Identification Gathering",
|
||||
"description": "A Task that gathers identification information from a conversation",
|
||||
},
|
||||
"IDENTIFICATION_QUESTIONS_TASK": {
|
||||
"name": "Define Identification Questions",
|
||||
"description": "A Task to define identification (person & company) questions",
|
||||
},
|
||||
"RAG_TASK": {
|
||||
"name": "RAG Task",
|
||||
"description": "A Task that gives RAG-based answers",
|
||||
},
|
||||
"SPIN_DETECT_TASK": {
|
||||
"name": "SPIN Information Detection",
|
||||
"description": "A Task that performs SPIN Information Detection",
|
||||
},
|
||||
"SPIN_QUESTIONS_TASK": {
|
||||
"name": "SPIN Question Identification",
|
||||
"description": "A Task that identifies questions to complete the SPIN context in a conversation",
|
||||
},
|
||||
}
|
||||
4
config/type_defs/tool_types.py
Normal file
4
config/type_defs/tool_types.py
Normal file
@@ -0,0 +1,4 @@
|
||||
# Agent Types
|
||||
TOOL_TYPES = {
|
||||
|
||||
}
|
||||
@@ -36,6 +36,7 @@ x-common-variables: &common-variables
|
||||
MINIO_SECRET_KEY: minioadmin
|
||||
NGINX_SERVER_NAME: 'localhost http://macstudio.ask-eve-ai-local.com/'
|
||||
LANGCHAIN_API_KEY: "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"
|
||||
SERPER_API_KEY: "e4c553856d0e6b5a171ec5e6b69d874285b9badf"
|
||||
|
||||
services:
|
||||
nginx:
|
||||
|
||||
@@ -40,6 +40,7 @@ x-common-variables: &common-variables
|
||||
MINIO_SECRET_KEY: 2PEZAD1nlpAmOyDV0TUTuJTQw1qVuYLF3A7GMs0D
|
||||
NGINX_SERVER_NAME: 'evie.askeveai.com mxz536.stackhero-network.com'
|
||||
LANGCHAIN_API_KEY: "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"
|
||||
SERPER_API_KEY: "e4c553856d0e6b5a171ec5e6b69d874285b9badf"
|
||||
|
||||
networks:
|
||||
eveai-network:
|
||||
|
||||
@@ -13,6 +13,7 @@ import common.models.interaction
|
||||
import common.models.entitlements
|
||||
import common.models.document
|
||||
from common.utils.nginx_utils import prefixed_url_for
|
||||
from common.utils.startup_eveai import perform_startup_actions
|
||||
from config.logging_config import LOGGING
|
||||
from common.utils.security import set_tenant_session_data
|
||||
from .errors import register_error_handlers
|
||||
@@ -73,6 +74,9 @@ def create_app(config_file=None):
|
||||
# Register Error Handlers
|
||||
register_error_handlers(app)
|
||||
|
||||
# Register Cache Handlers
|
||||
register_cache_handlers(app)
|
||||
|
||||
# Debugging settings
|
||||
if app.config['DEBUG'] is True:
|
||||
app.logger.setLevel(logging.DEBUG)
|
||||
@@ -103,7 +107,19 @@ def create_app(config_file=None):
|
||||
# Register template filters
|
||||
register_filters(app)
|
||||
|
||||
app.logger.info("EveAI App Server Started Successfully")
|
||||
# Let all initialization complete before using cache
|
||||
# @app.before_first_request
|
||||
# def initialize_cache_data():
|
||||
# # Cache testing
|
||||
# agent_types = cache_manager.agent_config_cache.get_types()
|
||||
# app.logger.debug(f"Agent types: {agent_types}")
|
||||
# agent_config = cache_manager.agent_config_cache.get_config('RAG_AGENT')
|
||||
# app.logger.debug(f"Agent config: {agent_config}")
|
||||
|
||||
# Perform startup actions such as cache invalidation
|
||||
perform_startup_actions(app)
|
||||
|
||||
app.logger.info(f"EveAI App Server Started Successfully (PID: {os.getpid()})")
|
||||
app.logger.info("-------------------------------------------------------------------------------------------------")
|
||||
return app
|
||||
|
||||
@@ -123,7 +139,6 @@ def register_extensions(app):
|
||||
metrics.init_app(app)
|
||||
|
||||
|
||||
# Register Blueprints
|
||||
def register_blueprints(app):
|
||||
from .views.user_views import user_bp
|
||||
app.register_blueprint(user_bp)
|
||||
@@ -143,3 +158,13 @@ def register_blueprints(app):
|
||||
app.register_blueprint(healthz_bp)
|
||||
init_healtz(app)
|
||||
|
||||
|
||||
def register_cache_handlers(app):
|
||||
from common.utils.cache.config_cache import (
|
||||
AgentConfigCacheHandler, TaskConfigCacheHandler, ToolConfigCacheHandler, SpecialistConfigCacheHandler)
|
||||
|
||||
cache_manager.register_handler(AgentConfigCacheHandler, 'eveai_config')
|
||||
cache_manager.register_handler(TaskConfigCacheHandler, 'eveai_config')
|
||||
cache_manager.register_handler(ToolConfigCacheHandler, 'eveai_config')
|
||||
cache_manager.register_handler(SpecialistConfigCacheHandler, 'eveai_config')
|
||||
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import jinja2
|
||||
from flask import render_template, request, jsonify, redirect, current_app
|
||||
from flask_login import current_user
|
||||
from common.utils.nginx_utils import prefixed_url_for
|
||||
@@ -47,3 +48,15 @@ def register_error_handlers(app):
|
||||
app.register_error_handler(403, not_authorised_error)
|
||||
app.register_error_handler(KeyError, key_error_handler)
|
||||
|
||||
@app.errorhandler(jinja2.TemplateNotFound)
|
||||
def template_not_found(error):
|
||||
app.logger.error(f'Template not found: {error.name}')
|
||||
app.logger.error(f'Search Paths: {app.jinja_loader.list_templates()}')
|
||||
return f'Template not found: {error.name}. Check logs for details.', 404
|
||||
|
||||
@app.errorhandler(jinja2.TemplateSyntaxError)
|
||||
def template_syntax_error(error):
|
||||
app.logger.error(f'Template syntax error: {error.message}')
|
||||
app.logger.error(f'In template {error.filename}, line {error.lineno}')
|
||||
return f'Template syntax error: {error.message}', 500
|
||||
|
||||
|
||||
32
eveai_app/templates/interaction/component.html
Normal file
32
eveai_app/templates/interaction/component.html
Normal file
@@ -0,0 +1,32 @@
|
||||
{% extends 'base.html' %}
|
||||
{% from "macros.html" import render_field %}
|
||||
|
||||
{% block title %}{{ title }}{% endblock %}
|
||||
|
||||
{% block content_title %}{{ title }}{% endblock %}
|
||||
{% block content_description %}{{ description }}{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<form method="post">
|
||||
{{ form.hidden_tag() }}
|
||||
{% set disabled_fields = [] %}
|
||||
{% set exclude_fields = [] %}
|
||||
{% for field in form.get_static_fields() %}
|
||||
{{ render_field(field, disabled_fields, exclude_fields) }}
|
||||
{% endfor %}
|
||||
{% if form.get_dynamic_fields is defined %}
|
||||
{% for collection_name, fields in form.get_dynamic_fields().items() %}
|
||||
{% if fields|length > 0 %}
|
||||
<h4 class="mt-4">{{ collection_name }}</h4>
|
||||
{% endif %}
|
||||
{% for field in fields %}
|
||||
{{ render_field(field, disabled_fields, exclude_fields) }}
|
||||
{% endfor %}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
<button type="submit" class="btn btn-primary">{{ submit_text }}</button>
|
||||
</form>
|
||||
{% endblock %}
|
||||
|
||||
{% block content_footer %}
|
||||
{% endblock %}
|
||||
23
eveai_app/templates/interaction/component_list.html
Normal file
23
eveai_app/templates/interaction/component_list.html
Normal file
@@ -0,0 +1,23 @@
|
||||
{% extends 'base.html' %}
|
||||
{% from 'macros.html' import render_selectable_table, render_pagination %}
|
||||
|
||||
{% block title %}{{ title }}{% endblock %}
|
||||
|
||||
{% block content_title %}{{ title }}{% endblock %}
|
||||
{% block content_description %}View {{ component_type }}s for Tenant{% endblock %}
|
||||
{% block content_class %}<div class="col-xl-12 col-lg-5 col-md-7 mx-auto"></div>{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div class="container">
|
||||
<form method="POST" action="{{ url_for(action_url) }}">
|
||||
{{ render_selectable_table(headers=["ID", "Name", "Type"], rows=rows, selectable=True, id=table_id) }}
|
||||
<div class="form-group mt-3">
|
||||
<button type="submit" name="action" value="edit_{{component_type|lower}}" class="btn btn-primary">Edit {{ component_type }}</button>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
{% endblock %}
|
||||
|
||||
{% block content_footer %}
|
||||
{{ render_pagination(pagination, pagination_endpoint) }}
|
||||
{% endblock %}
|
||||
@@ -102,6 +102,16 @@
|
||||
{'name': 'Add Specialist', 'url': '/interaction/specialist', 'roles': ['Super User', 'Tenant Admin']},
|
||||
{'name': 'All Specialists', 'url': '/interaction/specialists', 'roles': ['Super User', 'Tenant Admin']},
|
||||
{'name': 'Chat Sessions', 'url': '/interaction/chat_sessions', 'roles': ['Super User', 'Tenant Admin']},
|
||||
{'name': 'Add PreProcessor', 'url': '/interaction/preprocessor', 'roles': ['Super User', 'Tenant Admin']},
|
||||
{'name': 'All PreProcessors', 'url': '/interaction/preprocessors', 'roles': ['Super User', 'Tenant Admin']},
|
||||
{'name': 'Add Compiler', 'url': '/interaction/compiler', 'roles': ['Super User', 'Tenant Admin']},
|
||||
{'name': 'All Compilers', 'url': '/interaction/compilers', 'roles': ['Super User', 'Tenant Admin']},
|
||||
{'name': 'Add Dispatcher', 'url': '/interaction/dispatcher', 'roles': ['Super User', 'Tenant Admin']},
|
||||
{'name': 'All Dispatchers', 'url': '/interaction/dispatchers', 'roles': ['Super User', 'Tenant Admin']},
|
||||
{'name': 'Add PostProcessor', 'url': '/interaction/postprocessor', 'roles': ['Super User', 'Tenant Admin']},
|
||||
{'name': 'All PostProcessors', 'url': '/interaction/postprocessors', 'roles': ['Super User', 'Tenant Admin']},
|
||||
{'name': 'Add Reasoner', 'url': '/interaction/reasoner', 'roles': ['Super User', 'Tenant Admin']},
|
||||
{'name': 'All Reasoners', 'url': '/interaction/reasoners', 'roles': ['Super User', 'Tenant Admin']},
|
||||
]) }}
|
||||
{% endif %}
|
||||
{% if current_user.is_authenticated %}
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
from flask_wtf import FlaskForm
|
||||
from wtforms import (StringField, BooleanField, SelectField, TextAreaField)
|
||||
from wtforms.validators import DataRequired, Length
|
||||
from wtforms.validators import DataRequired, Length, Optional
|
||||
|
||||
from wtforms_sqlalchemy.fields import QuerySelectMultipleField
|
||||
|
||||
from common.models.document import Retriever
|
||||
from common.models.interaction import EveAITool
|
||||
from common.extensions import cache_manager
|
||||
|
||||
from config.type_defs.specialist_types import SPECIALIST_TYPES
|
||||
from .dynamic_form_base import DynamicFormBase
|
||||
|
||||
|
||||
@@ -14,6 +15,10 @@ def get_retrievers():
|
||||
return Retriever.query.all()
|
||||
|
||||
|
||||
def get_tools():
|
||||
return EveAITool.query.all()
|
||||
|
||||
|
||||
class SpecialistForm(FlaskForm):
|
||||
name = StringField('Name', validators=[DataRequired(), Length(max=50)])
|
||||
description = TextAreaField('Description', validators=[DataRequired()])
|
||||
@@ -32,8 +37,9 @@ class SpecialistForm(FlaskForm):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
types_dict = cache_manager.specialist_config_cache.get_types()
|
||||
# Dynamically populate the 'type' field using the constructor
|
||||
self.type.choices = [(key, value['name']) for key, value in SPECIALIST_TYPES.items()]
|
||||
self.type.choices = [(key, value['name']) for key, value in types_dict.items()]
|
||||
|
||||
|
||||
class EditSpecialistForm(DynamicFormBase):
|
||||
@@ -52,4 +58,59 @@ class EditSpecialistForm(DynamicFormBase):
|
||||
tuning = BooleanField('Enable Retrieval Tuning', default=False)
|
||||
|
||||
|
||||
class BaseComponentForm(DynamicFormBase):
|
||||
"""Base form for all processing components"""
|
||||
name = StringField('Name', validators=[DataRequired(), Length(max=50)])
|
||||
description = TextAreaField('Description', validators=[Optional()])
|
||||
type = SelectField('Type', validators=[DataRequired()])
|
||||
tuning = BooleanField('Enable Tuning', default=False)
|
||||
|
||||
def __init__(self, *args, type_config=None, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
if type_config:
|
||||
self.type.choices = [(key, value['name']) for key, value in type_config.items()]
|
||||
|
||||
|
||||
# Edit forms that support dynamic fields
|
||||
class BaseEditComponentForm(DynamicFormBase):
|
||||
name = StringField('Name', validators=[DataRequired()])
|
||||
description = TextAreaField('Description', validators=[Optional()])
|
||||
type = StringField('Type', validators=[DataRequired()], render_kw={'readonly': True})
|
||||
tuning = BooleanField('Enable Tuning', default=False)
|
||||
|
||||
|
||||
class EveAIAgentForm(BaseComponentForm):
|
||||
role = TextAreaField('Role', validators=[DataRequired()])
|
||||
goal = TextAreaField('Goal', validators=[DataRequired()])
|
||||
backstory = TextAreaField('Backstory', validators=[DataRequired()])
|
||||
|
||||
tools = QuerySelectMultipleField(
|
||||
'Tools',
|
||||
query_factory=get_tools,
|
||||
get_label='name',
|
||||
allow_blank=True,
|
||||
description='Select one or more tools that can be used this agent'
|
||||
)
|
||||
|
||||
def __init__(self, *args, type_config=None, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
if type_config:
|
||||
self.type.choices = [(key, value['name']) for key, value in type_config.items()]
|
||||
|
||||
|
||||
class EditEveAIAgentForm(BaseEditComponentForm):
|
||||
role = StringField('Role', validators=[DataRequired()])
|
||||
goal = StringField('Goal', validators=[DataRequired()])
|
||||
backstory = StringField('Backstory', validators=[DataRequired()])
|
||||
|
||||
tools = QuerySelectMultipleField(
|
||||
'Tools',
|
||||
query_factory=get_tools,
|
||||
get_label='name',
|
||||
allow_blank=True,
|
||||
description='Select one or more tools that can be used this agent'
|
||||
)
|
||||
|
||||
|
||||
class EveAITaskForm(BaseComponentForm):
|
||||
expected_output = TextAreaField('Expected Output', validators=[DataRequired()])
|
||||
|
||||
@@ -7,14 +7,19 @@ from sqlalchemy import desc
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
|
||||
from common.models.document import Embedding, DocumentVersion, Retriever
|
||||
from common.models.interaction import ChatSession, Interaction, InteractionEmbedding, Specialist, SpecialistRetriever
|
||||
from common.extensions import db
|
||||
from common.utils.document_utils import set_logging_information, update_logging_information
|
||||
from config.type_defs.specialist_types import SPECIALIST_TYPES
|
||||
from common.models.interaction import (ChatSession, Interaction, InteractionEmbedding, Specialist, SpecialistRetriever)
|
||||
|
||||
from common.extensions import db, cache_manager
|
||||
from common.utils.model_logging_utils import set_logging_information, update_logging_information
|
||||
|
||||
from common.utils.middleware import mw_before_request
|
||||
from common.utils.nginx_utils import prefixed_url_for
|
||||
from common.utils.view_assistants import form_validation_failed, prepare_table_for_macro
|
||||
from .interaction_forms import SpecialistForm, EditSpecialistForm
|
||||
from common.utils.specialist_utils import initialize_specialist
|
||||
|
||||
from config.type_defs.specialist_types import SPECIALIST_TYPES
|
||||
|
||||
from .interaction_forms import (SpecialistForm, EditSpecialistForm)
|
||||
|
||||
interaction_bp = Blueprint('interaction_bp', __name__, url_prefix='/interaction')
|
||||
|
||||
@@ -135,6 +140,7 @@ def specialist():
|
||||
new_specialist.name = form.name.data
|
||||
new_specialist.description = form.description.data
|
||||
new_specialist.type = form.type.data
|
||||
new_specialist.type_version = cache_manager.specialist_config_cache.get_latest_version(new_specialist.type)
|
||||
new_specialist.tuning = form.tuning.data
|
||||
|
||||
set_logging_information(new_specialist, dt.now(tz.utc))
|
||||
@@ -156,6 +162,9 @@ def specialist():
|
||||
flash('Specialist successfully added!', 'success')
|
||||
current_app.logger.info(f'Specialist {new_specialist.name} successfully added for tenant {tenant_id}!')
|
||||
|
||||
# Initialize the newly create specialist
|
||||
initialize_specialist(new_specialist.id, new_specialist.type, new_specialist.type_version)
|
||||
|
||||
return redirect(prefixed_url_for('interaction_bp.edit_specialist', specialist_id=new_specialist.id))
|
||||
|
||||
except Exception as e:
|
||||
@@ -173,7 +182,8 @@ def edit_specialist(specialist_id):
|
||||
specialist = Specialist.query.get_or_404(specialist_id)
|
||||
form = EditSpecialistForm(request.form, obj=specialist)
|
||||
|
||||
configuration_config = SPECIALIST_TYPES[specialist.type]["configuration"]
|
||||
specialist_config = cache_manager.specialist_config_cache.get_config(specialist.type, specialist.type_version)
|
||||
configuration_config = specialist_config.get('configuration')
|
||||
form.add_dynamic_fields("configuration", configuration_config, specialist.configuration)
|
||||
|
||||
if request.method == 'GET':
|
||||
@@ -257,3 +267,4 @@ def handle_specialist_selection():
|
||||
return redirect(prefixed_url_for('interaction_bp.edit_specialist', specialist_id=specialist_id))
|
||||
|
||||
return redirect(prefixed_url_for('interaction_bp.specialists'))
|
||||
|
||||
|
||||
@@ -36,7 +36,6 @@ def create_app(config_file=None):
|
||||
|
||||
@app.before_request
|
||||
def check_cors():
|
||||
app.logger.debug('Checking CORS')
|
||||
if request.method == 'OPTIONS':
|
||||
app.logger.debug("Handling OPTIONS request")
|
||||
return '', 200 # Allow OPTIONS to pass through
|
||||
|
||||
@@ -9,8 +9,6 @@ from common.extensions import db, template_manager, cache_manager
|
||||
from config.logging_config import LOGGING
|
||||
from config.config import get_config
|
||||
|
||||
from . import specialists, retrievers
|
||||
|
||||
|
||||
def create_app(config_file=None):
|
||||
app = Flask(__name__)
|
||||
@@ -30,6 +28,8 @@ def create_app(config_file=None):
|
||||
app.logger.info('Starting up eveai_chat_workers...')
|
||||
register_extensions(app)
|
||||
|
||||
from . import specialists, retrievers
|
||||
|
||||
celery = make_celery(app.name, app.config)
|
||||
init_celery(celery, app)
|
||||
|
||||
|
||||
@@ -8,8 +8,6 @@ from common.extensions import db, minio_client, template_manager, cache_manager
|
||||
import config.logging_config as logging_config
|
||||
from config.config import get_config
|
||||
|
||||
from . import processors
|
||||
|
||||
|
||||
def create_app(config_file=None):
|
||||
app = Flask(__name__)
|
||||
@@ -28,6 +26,8 @@ def create_app(config_file=None):
|
||||
|
||||
register_extensions(app)
|
||||
|
||||
from . import processors
|
||||
|
||||
celery = make_celery(app.name, app.config)
|
||||
init_celery(celery, app)
|
||||
|
||||
@@ -47,4 +47,3 @@ def register_extensions(app):
|
||||
|
||||
|
||||
app, celery = create_app()
|
||||
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
{
|
||||
"name": "eveai_integration",
|
||||
"version": "1.0.0",
|
||||
"version": "1.0.5",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "eveai_integration",
|
||||
"version": "1.0.0",
|
||||
"version": "1.0.5",
|
||||
"dependencies": {
|
||||
"zapier-platform-core": "15.19.0"
|
||||
"zapier-platform-core": "16.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"jest": "^29.6.0"
|
||||
@@ -1632,12 +1632,15 @@
|
||||
}
|
||||
},
|
||||
"node_modules/dotenv": {
|
||||
"version": "12.0.4",
|
||||
"resolved": "https://registry.npmjs.org/dotenv/-/dotenv-12.0.4.tgz",
|
||||
"integrity": "sha512-oWdqbSywffzH1l4WXKPHWA0TWYpqp7IyLfqjipT4upoIFS0HPMqtNotykQpD4iIg0BqtNmdgPCh2WMvMt7yTiw==",
|
||||
"version": "16.4.6",
|
||||
"resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.4.6.tgz",
|
||||
"integrity": "sha512-JhcR/+KIjkkjiU8yEpaB/USlzVi3i5whwOjpIRNGi9svKEXZSe+Qp6IWAjFjv+2GViAoDRCUv/QLNziQxsLqDg==",
|
||||
"license": "BSD-2-Clause",
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://dotenvx.com"
|
||||
}
|
||||
},
|
||||
"node_modules/electron-to-chromium": {
|
||||
@@ -1806,9 +1809,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/form-data": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz",
|
||||
"integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==",
|
||||
"version": "4.0.1",
|
||||
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.1.tgz",
|
||||
"integrity": "sha512-tzN8e4TX8+kkxGPK8D5u0FNmjPUjw3lwC9lSLxxoB/+GtsJG91CO8bSWy73APlgAZzZbXEYZJuxjkHH2w+Ezhw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"asynckit": "^0.4.0",
|
||||
@@ -3049,9 +3052,9 @@
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/node-fetch": {
|
||||
"version": "2.6.7",
|
||||
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz",
|
||||
"integrity": "sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==",
|
||||
"version": "2.7.0",
|
||||
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz",
|
||||
"integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"whatwg-url": "^5.0.0"
|
||||
@@ -3950,23 +3953,23 @@
|
||||
}
|
||||
},
|
||||
"node_modules/zapier-platform-core": {
|
||||
"version": "15.19.0",
|
||||
"resolved": "https://registry.npmjs.org/zapier-platform-core/-/zapier-platform-core-15.19.0.tgz",
|
||||
"integrity": "sha512-KWyvuZ+QnooOknFl3AW6HObbXdz1FZYclN46EwUeQEd1xIjHwr+WdQwOYPJJM6jwSBkh0Fb7MMVhZQQ0VmPlCg==",
|
||||
"version": "16.0.0",
|
||||
"resolved": "https://registry.npmjs.org/zapier-platform-core/-/zapier-platform-core-16.0.0.tgz",
|
||||
"integrity": "sha512-HqB+6foAF5/1b2gb07slVPRDPnx2sdy34QGv+sfW2JFzLN16y5KgGFmFStdSDred3Z41WffzZuqXQH/+gj59Ow==",
|
||||
"license": "SEE LICENSE IN LICENSE",
|
||||
"dependencies": {
|
||||
"@zapier/secret-scrubber": "^1.1.1",
|
||||
"bluebird": "3.7.2",
|
||||
"content-disposition": "0.5.4",
|
||||
"dotenv": "12.0.4 ",
|
||||
"form-data": "4.0.0",
|
||||
"dotenv": "16.4.6",
|
||||
"form-data": "4.0.1",
|
||||
"lodash": "4.17.21",
|
||||
"mime-types": "2.1.35",
|
||||
"node-abort-controller": "3.1.1",
|
||||
"node-fetch": "2.6.7",
|
||||
"node-fetch": "2.7.0",
|
||||
"oauth-sign": "0.9.0",
|
||||
"semver": "7.5.2",
|
||||
"zapier-platform-schema": "15.19.0"
|
||||
"semver": "7.6.3",
|
||||
"zapier-platform-schema": "16.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16",
|
||||
@@ -3986,26 +3989,11 @@
|
||||
"undici-types": "~6.19.2"
|
||||
}
|
||||
},
|
||||
"node_modules/zapier-platform-core/node_modules/lru-cache": {
|
||||
"version": "6.0.0",
|
||||
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz",
|
||||
"integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"yallist": "^4.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/zapier-platform-core/node_modules/semver": {
|
||||
"version": "7.5.2",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-7.5.2.tgz",
|
||||
"integrity": "sha512-SoftuTROv/cRjCze/scjGyiDtcUyxw1rgYQSZY7XTmtR5hX+dm76iDbTH8TkLPHCQmlbQVSSbNZCPM2hb0knnQ==",
|
||||
"version": "7.6.3",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz",
|
||||
"integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"lru-cache": "^6.0.0"
|
||||
},
|
||||
"bin": {
|
||||
"semver": "bin/semver.js"
|
||||
},
|
||||
@@ -4020,16 +4008,10 @@
|
||||
"license": "MIT",
|
||||
"optional": true
|
||||
},
|
||||
"node_modules/zapier-platform-core/node_modules/yallist": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
|
||||
"integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==",
|
||||
"license": "ISC"
|
||||
},
|
||||
"node_modules/zapier-platform-schema": {
|
||||
"version": "15.19.0",
|
||||
"resolved": "https://registry.npmjs.org/zapier-platform-schema/-/zapier-platform-schema-15.19.0.tgz",
|
||||
"integrity": "sha512-c3lkrIm3/KaAOXGgayhQtXiH7ODtn1lxBTK5marU30yuyO+7WMCpxW+H79yVrfeiDyR7Dhy7LyKtoKw7pEWq+Q==",
|
||||
"version": "16.0.0",
|
||||
"resolved": "https://registry.npmjs.org/zapier-platform-schema/-/zapier-platform-schema-16.0.0.tgz",
|
||||
"integrity": "sha512-ZL96L4IHRIp2ErIU0RbnmfIa/UEQ3DStvQVrWPQLV6uHQy8wKIcQX9ETcKY7o9YaiLfeFduPEPXvVlWJ4cYWwQ==",
|
||||
"license": "SEE LICENSE IN LICENSE",
|
||||
"dependencies": {
|
||||
"jsonschema": "1.2.2",
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -0,0 +1,115 @@
|
||||
"""Introduction of CrewAI abtractions
|
||||
|
||||
Revision ID: 07d08b2cf0b3
|
||||
Revises: 4bf121f340e5
|
||||
Create Date: 2025-01-14 09:08:14.828800
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
import pgvector
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '07d08b2cf0b3'
|
||||
down_revision = '4bf121f340e5'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('dispatcher',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(length=50), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('type', sa.String(length=50), nullable=False),
|
||||
sa.Column('tuning', sa.Boolean(), nullable=True),
|
||||
sa.Column('configuration', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('arguments', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('created_by', sa.Integer(), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('updated_by', sa.Integer(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['created_by'], ['public.user.id'], ),
|
||||
sa.ForeignKeyConstraint(['updated_by'], ['public.user.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_table('eve_ai_agent',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('specialist_id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(length=50), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('type', sa.String(length=50), nullable=False),
|
||||
sa.Column('role', sa.Text(), nullable=True),
|
||||
sa.Column('goal', sa.Text(), nullable=True),
|
||||
sa.Column('backstory', sa.Text(), nullable=True),
|
||||
sa.Column('tuning', sa.Boolean(), nullable=True),
|
||||
sa.Column('configuration', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('arguments', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('created_by', sa.Integer(), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('updated_by', sa.Integer(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['created_by'], ['public.user.id'], ),
|
||||
sa.ForeignKeyConstraint(['specialist_id'], ['specialist.id'], ),
|
||||
sa.ForeignKeyConstraint(['updated_by'], ['public.user.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_table('eve_ai_task',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('specialist_id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(length=50), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('type', sa.String(length=50), nullable=False),
|
||||
sa.Column('expected_output', sa.Text(), nullable=True),
|
||||
sa.Column('tuning', sa.Boolean(), nullable=True),
|
||||
sa.Column('configuration', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('arguments', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('context', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('asynchronous', sa.Boolean(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('created_by', sa.Integer(), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('updated_by', sa.Integer(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['created_by'], ['public.user.id'], ),
|
||||
sa.ForeignKeyConstraint(['specialist_id'], ['specialist.id'], ),
|
||||
sa.ForeignKeyConstraint(['updated_by'], ['public.user.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_table('eve_ai_tool',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('specialist_id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(length=50), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('type', sa.String(length=50), nullable=False),
|
||||
sa.Column('tuning', sa.Boolean(), nullable=True),
|
||||
sa.Column('configuration', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('arguments', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('created_by', sa.Integer(), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('updated_by', sa.Integer(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['created_by'], ['public.user.id'], ),
|
||||
sa.ForeignKeyConstraint(['specialist_id'], ['specialist.id'], ),
|
||||
sa.ForeignKeyConstraint(['updated_by'], ['public.user.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_table('specialist_dispatcher',
|
||||
sa.Column('specialist_id', sa.Integer(), nullable=False),
|
||||
sa.Column('dispatcher_id', sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['dispatcher_id'], ['dispatcher.id'], ondelete='CASCADE'),
|
||||
sa.ForeignKeyConstraint(['specialist_id'], ['specialist.id'], ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('specialist_id', 'dispatcher_id')
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table('specialist_dispatcher')
|
||||
op.drop_table('eve_ai_tool')
|
||||
op.drop_table('eve_ai_task')
|
||||
op.drop_table('eve_ai_agent')
|
||||
op.drop_table('dispatcher')
|
||||
# ### end Alembic commands ###
|
||||
@@ -0,0 +1,37 @@
|
||||
"""Add version information for configuration
|
||||
|
||||
Revision ID: 1e8ed0bd9662
|
||||
Revises: 07d08b2cf0b3
|
||||
Create Date: 2025-01-14 14:03:42.866613
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
import pgvector
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '1e8ed0bd9662'
|
||||
down_revision = '07d08b2cf0b3'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column('dispatcher', sa.Column('type_version', sa.String(length=20), nullable=True))
|
||||
op.add_column('eve_ai_agent', sa.Column('type_version', sa.String(length=20), nullable=True))
|
||||
op.add_column('eve_ai_task', sa.Column('type_version', sa.String(length=20), nullable=True))
|
||||
op.add_column('eve_ai_tool', sa.Column('type_version', sa.String(length=20), nullable=True))
|
||||
op.add_column('specialist', sa.Column('type_version', sa.String(length=20), nullable=True))
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_column('specialist', 'type_version')
|
||||
op.drop_column('eve_ai_tool', 'type_version')
|
||||
op.drop_column('eve_ai_task', 'type_version')
|
||||
op.drop_column('eve_ai_agent', 'type_version')
|
||||
op.drop_column('dispatcher', 'type_version')
|
||||
# ### end Alembic commands ###
|
||||
102
scripts/db_backup.sh
Executable file
102
scripts/db_backup.sh
Executable file
@@ -0,0 +1,102 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Configuration
|
||||
DEV_DB_HOST="localhost"
|
||||
DEV_DB_PORT="5432"
|
||||
DEV_DB_NAME="eveai"
|
||||
DEV_DB_USER="luke"
|
||||
DEV_DB_PASSWORD="Skywalker!"
|
||||
|
||||
PROD_DB_HOST="bswnz4.stackhero-network.com"
|
||||
PROD_DB_PORT="5945"
|
||||
PROD_DB_NAME="eveai"
|
||||
PROD_DB_USER="luke_skywalker"
|
||||
PROD_DB_PASSWORD="2MK&1rHmWEydE2rFuJLq*ls%tdkPAk2"
|
||||
|
||||
# Create backup directory if it doesn't exist
|
||||
BACKUP_DIR="./db_backups"
|
||||
mkdir -p "$BACKUP_DIR"
|
||||
|
||||
# Get current date for backup file name
|
||||
DATE=$(date +"%Y%m%d_%H%M%S")
|
||||
|
||||
# Function to check if pg_dump is available
|
||||
check_pg_dump() {
|
||||
if ! command -v pg_dump &> /dev/null; then
|
||||
echo "Error: pg_dump is not installed. Please install PostgreSQL client tools."
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to create backup
|
||||
create_backup() {
|
||||
local env=$1
|
||||
local host=$2
|
||||
local port=$3
|
||||
local db=$4
|
||||
local user=$5
|
||||
local password=$6
|
||||
|
||||
# Set backup filename
|
||||
local backup_file="${BACKUP_DIR}/eveai_${env}_backup_${DATE}.sql"
|
||||
|
||||
echo "Creating backup for ${env} environment..."
|
||||
echo "Backing up to: ${backup_file}"
|
||||
|
||||
# Set PGPASSWORD environment variable
|
||||
export PGPASSWORD="$password"
|
||||
|
||||
# Create backup using pg_dump
|
||||
pg_dump -h "$host" -p "$port" -U "$user" -d "$db" -F p > "$backup_file"
|
||||
|
||||
# Check if backup was successful
|
||||
if [ $? -eq 0 ]; then
|
||||
echo "Backup completed successfully!"
|
||||
echo "Backup file: ${backup_file}"
|
||||
|
||||
# Create compressed version
|
||||
gzip -f "$backup_file"
|
||||
echo "Compressed backup created: ${backup_file}.gz"
|
||||
else
|
||||
echo "Error: Backup failed!"
|
||||
rm -f "$backup_file" # Clean up failed backup file
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Unset PGPASSWORD for security
|
||||
unset PGPASSWORD
|
||||
}
|
||||
|
||||
# Function to display usage
|
||||
usage() {
|
||||
echo "Usage: $0 [dev|prod]"
|
||||
echo " dev - backup development database"
|
||||
echo " prod - backup production database"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Main script logic
|
||||
main() {
|
||||
# Check for pg_dump
|
||||
check_pg_dump
|
||||
|
||||
# Check command line arguments
|
||||
if [ $# -ne 1 ]; then
|
||||
usage
|
||||
fi
|
||||
|
||||
case "$1" in
|
||||
"dev")
|
||||
create_backup "dev" "$DEV_DB_HOST" "$DEV_DB_PORT" "$DEV_DB_NAME" "$DEV_DB_USER" "$DEV_DB_PASSWORD"
|
||||
;;
|
||||
"prod")
|
||||
create_backup "prod" "$PROD_DB_HOST" "$PROD_DB_PORT" "$PROD_DB_NAME" "$PROD_DB_USER" "$PROD_DB_PASSWORD"
|
||||
;;
|
||||
*)
|
||||
usage
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
# Run main function
|
||||
main "$@"
|
||||
173
scripts/db_restore.sh
Executable file
173
scripts/db_restore.sh
Executable file
@@ -0,0 +1,173 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Configuration
|
||||
DEV_DB_HOST="localhost"
|
||||
DEV_DB_PORT="5432"
|
||||
DEV_DB_NAME="eveai"
|
||||
DEV_DB_USER="luke"
|
||||
DEV_DB_PASSWORD="Skywalker!"
|
||||
|
||||
PROD_DB_HOST="bswnz4.stackhero-network.com"
|
||||
PROD_DB_PORT="5945"
|
||||
PROD_DB_NAME="eveai"
|
||||
PROD_DB_USER="luke_skywalker"
|
||||
PROD_DB_PASSWORD="2MK&1rHmWEydE2rFuJLq*ls%tdkPAk2"
|
||||
|
||||
# Backup directory
|
||||
BACKUP_DIR="./db_backups"
|
||||
|
||||
# Function to check if psql is available
|
||||
check_psql() {
|
||||
if ! command -v psql &> /dev/null; then
|
||||
echo "Error: psql is not installed. Please install PostgreSQL client tools."
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to list available backups
|
||||
list_backups() {
|
||||
echo "Available backups:"
|
||||
ls -lh "${BACKUP_DIR}"/*.gz 2>/dev/null || echo "No backups found in ${BACKUP_DIR}"
|
||||
}
|
||||
|
||||
# Function to restore backup
|
||||
restore_backup() {
|
||||
local env=$1
|
||||
local backup_file=$2
|
||||
local host=""
|
||||
local port=""
|
||||
local db=""
|
||||
local user=""
|
||||
local password=""
|
||||
|
||||
# Set environment-specific variables
|
||||
case "$env" in
|
||||
"dev")
|
||||
host="$DEV_DB_HOST"
|
||||
port="$DEV_DB_PORT"
|
||||
db="$DEV_DB_NAME"
|
||||
user="$DEV_DB_USER"
|
||||
password="$DEV_DB_PASSWORD"
|
||||
;;
|
||||
"prod")
|
||||
host="$PROD_DB_HOST"
|
||||
port="$PROD_DB_PORT"
|
||||
db="$PROD_DB_NAME"
|
||||
user="$PROD_DB_USER"
|
||||
password="$PROD_DB_PASSWORD"
|
||||
;;
|
||||
*)
|
||||
echo "Invalid environment specified"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
# Check if backup file exists
|
||||
if [ ! -f "$backup_file" ]; then
|
||||
echo "Error: Backup file not found: $backup_file"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Safety confirmation
|
||||
echo "WARNING: This will overwrite the current $env database!"
|
||||
echo "Database: $db"
|
||||
echo "Host: $host"
|
||||
echo "Port: $port"
|
||||
read -p "Are you sure you want to proceed? (type 'yes' to confirm): " confirm
|
||||
if [ "$confirm" != "yes" ]; then
|
||||
echo "Restore cancelled."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Starting restore process..."
|
||||
|
||||
# Create temporary directory for uncompressed backup
|
||||
local temp_dir=$(mktemp -d)
|
||||
local temp_file="${temp_dir}/temp_backup.sql"
|
||||
|
||||
# Uncompress backup file
|
||||
echo "Uncompressing backup file..."
|
||||
gunzip -c "$backup_file" > "$temp_file"
|
||||
|
||||
# Set PGPASSWORD environment variable
|
||||
export PGPASSWORD="$password"
|
||||
|
||||
# Terminate existing connections
|
||||
echo "Terminating existing database connections..."
|
||||
psql -h "$host" -p "$port" -U "$user" -d "postgres" -c "
|
||||
SELECT pg_terminate_backend(pid)
|
||||
FROM pg_stat_activity
|
||||
WHERE datname = '$db'
|
||||
AND pid <> pg_backend_pid();" >/dev/null 2>&1
|
||||
|
||||
# Drop and recreate database
|
||||
echo "Dropping and recreating database..."
|
||||
psql -h "$host" -p "$port" -U "$user" -d "postgres" -c "DROP DATABASE IF EXISTS $db WITH (FORCE);"
|
||||
psql -h "$host" -p "$port" -U "$user" -d "postgres" -c "CREATE DATABASE $db;"
|
||||
|
||||
# Restore backup
|
||||
echo "Restoring backup..."
|
||||
psql -h "$host" -p "$port" -U "$user" -d "$db" < "$temp_file"
|
||||
|
||||
# Check if restore was successful
|
||||
if [ $? -eq 0 ]; then
|
||||
echo "Restore completed successfully!"
|
||||
else
|
||||
echo "Error: Restore failed!"
|
||||
# Clean up
|
||||
rm -rf "$temp_dir"
|
||||
unset PGPASSWORD
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Clean up
|
||||
rm -rf "$temp_dir"
|
||||
unset PGPASSWORD
|
||||
}
|
||||
|
||||
# Function to display usage
|
||||
usage() {
|
||||
echo "Usage: $0 [dev|prod] [backup_file|list]"
|
||||
echo " dev <backup_file> - restore development database from backup file"
|
||||
echo " prod <backup_file> - restore production database from backup file"
|
||||
echo " list - list available backup files"
|
||||
echo ""
|
||||
echo "Example:"
|
||||
echo " $0 dev ./db_backups/eveai_dev_backup_20250116_123456.sql.gz"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Main script logic
|
||||
main() {
|
||||
# Check for psql
|
||||
check_psql
|
||||
|
||||
# Check command line arguments
|
||||
if [ $# -lt 1 ]; then
|
||||
usage
|
||||
fi
|
||||
|
||||
# Handle list command
|
||||
if [ "$1" = "list" ]; then
|
||||
list_backups
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Check for required arguments
|
||||
if [ $# -ne 2 ]; then
|
||||
usage
|
||||
fi
|
||||
|
||||
# Handle restore commands
|
||||
case "$1" in
|
||||
"dev"|"prod")
|
||||
restore_backup "$1" "$2"
|
||||
;;
|
||||
*)
|
||||
usage
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
# Run main function
|
||||
main "$@"
|
||||
@@ -3,7 +3,7 @@
|
||||
rm -f *repo.txt
|
||||
|
||||
# Define the list of components
|
||||
components=("docker" "eveai_api" "eveai_app" "eveai_beat" "eveai_chat" "eveai_chat_workers" "eveai_entitlements" "eveai_workers" "nginx" "full" "integrations")
|
||||
components=("docker" "eveai_api" "eveai_app" "eveai_app_startup" "eveai_beat" "eveai_chat" "eveai_chat_workers" "eveai_entitlements" "eveai_workers" "nginx" "full" "integrations")
|
||||
|
||||
# Get the current date and time in the format YYYY-MM-DD_HH-MM
|
||||
timestamp=$(date +"%Y-%m-%d_%H-%M")
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
from gevent import monkey
|
||||
monkey.patch_all()
|
||||
|
||||
from eveai_api import create_app
|
||||
|
||||
app = create_app()
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
from gevent import monkey
|
||||
monkey.patch_all()
|
||||
|
||||
from eveai_app import create_app
|
||||
|
||||
app = create_app()
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
from gevent import monkey
|
||||
monkey.patch_all()
|
||||
|
||||
from eveai_chat import create_app
|
||||
|
||||
app = create_app()
|
||||
|
||||
@@ -12,4 +12,4 @@ export FLASK_APP=${PROJECT_DIR}/scripts/run_eveai_app.py # Adjust the path to y
|
||||
chown -R appuser:appuser /app/logs
|
||||
|
||||
# Start Flask app
|
||||
gunicorn -w 4 -k gevent -b 0.0.0.0:5003 scripts.run_eveai_api:app
|
||||
gunicorn -w 1 -k geventwebsocket.gunicorn.workers.GeventWebSocketWorker -b 0.0.0.0:5001 --worker-connections 100 scripts.run_eveai_api:app
|
||||
|
||||
@@ -48,4 +48,5 @@ echo "Initializing initial tenant and user..."
|
||||
python ${PROJECT_DIR}/scripts/initialize_data.py # Adjust the path to your initialization script
|
||||
|
||||
# Start Flask app
|
||||
gunicorn -w 4 -k gevent -b 0.0.0.0:5001 scripts.run_eveai_app:app
|
||||
# gunicorn -w 1 -k gevent -b 0.0.0.0:5001 --worker-connections 100 scripts.run_eveai_app:app
|
||||
gunicorn -w 1 -k geventwebsocket.gunicorn.workers.GeventWebSocketWorker -b 0.0.0.0:5001 --worker-connections 100 scripts.run_eveai_app:app
|
||||
|
||||
@@ -13,4 +13,4 @@ chown -R appuser:appuser /app/logs
|
||||
echo "Starting EveAI Chat"
|
||||
|
||||
# Start Flask app
|
||||
gunicorn -w 4 -k geventwebsocket.gunicorn.workers.GeventWebSocketWorker -b 0.0.0.0:5002 scripts.run_eveai_chat:app
|
||||
gunicorn -w 1 -k geventwebsocket.gunicorn.workers.GeventWebSocketWorker -b 0.0.0.0:5001 --worker-connections 100 scripts.run_eveai_chat:app
|
||||
|
||||
Reference in New Issue
Block a user