5 Commits

Author SHA1 Message Date
Josako
5ffad160b1 - Prepared Release 1.0.10-alfa 2024-10-08 09:18:59 +02:00
Josako
d6a7743f26 - Minor corrections to entitlement changes and upgrades
- started new eveai_entitlements component (not finished)
2024-10-08 09:12:16 +02:00
Josako
9782e31ae5 - Refined entitlements to work with MiB for both embeddings and storage
- Improved DocumentVersion storage attributes to reflect Minio settings
- Added size to DocumentVersions to easily calculate usage
- License / LicenseTier forms and views added
2024-10-07 14:17:44 +02:00
Josako
f638860e90 - Improvements on audio processing to limit CPU and memory usage
- Removed Portkey from the equation, and defined explicit monitoring using Langchain native code
- Optimization of Business Event logging
2024-10-02 14:12:16 +02:00
Josako
b700cfac64 - Improvements on audio processing to limit CPU and memory usage
- Removed Portkey from the equation, and defined explicit monitoring using Langchain native code
- Optimization of Business Event logging
2024-10-02 14:11:46 +02:00
47 changed files with 2318 additions and 306 deletions

6
.idea/sqldialects.xml generated
View File

@@ -1,6 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="SqlDialectMappings">
<file url="PROJECT" dialect="PostgreSQL" />
</component>
</project>

View File

@@ -25,6 +25,38 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
### Security
- In case of vulnerabilities.
## [1.0.10-alfa]
### Added
- BusinessEventLog monitoring using Langchain native code
### Changed
- Allow longer audio files (or video) to be uploaded and processed
- Storage and Embedding usage now expressed in MiB iso tokens (more logical)
- Views for License / LicenseTier
### Removed
- Portkey removed for monitoring usage
## [1.0.9-alfa] - 2024/10/01
### Added
- Business Event tracing (eveai_workers & eveai_chat_workers)
- Flower Container added for monitoring
### Changed
- Healthcheck improvements
- model_utils turned into a class with lazy loading
### Deprecated
- For soon-to-be removed features.
### Removed
- For now removed features.
### Fixed
- Set default language when registering Documents or URLs.
## [1.0.8-alfa] - 2024-09-12
### Added

View File

@@ -0,0 +1,49 @@
import time
from langchain.callbacks.base import BaseCallbackHandler
from typing import Dict, Any, List
from langchain.schema import LLMResult
from common.utils.business_event_context import current_event
from flask import current_app
class LLMMetricsHandler(BaseCallbackHandler):
def __init__(self):
self.total_tokens: int = 0
self.prompt_tokens: int = 0
self.completion_tokens: int = 0
self.start_time: float = 0
self.end_time: float = 0
self.total_time: float = 0
def reset(self):
self.total_tokens = 0
self.prompt_tokens = 0
self.completion_tokens = 0
self.start_time = 0
self.end_time = 0
self.total_time = 0
def on_llm_start(self, serialized: Dict[str, Any], prompts: List[str], **kwargs: Any) -> None:
self.start_time = time.time()
def on_llm_end(self, response: LLMResult, **kwargs: Any) -> None:
self.end_time = time.time()
self.total_time = self.end_time - self.start_time
usage = response.llm_output.get('token_usage', {})
self.prompt_tokens += usage.get('prompt_tokens', 0)
self.completion_tokens += usage.get('completion_tokens', 0)
self.total_tokens = self.prompt_tokens + self.completion_tokens
metrics = self.get_metrics()
current_event.log_llm_metrics(metrics)
self.reset() # Reset for the next call
def get_metrics(self) -> Dict[str, int | float]:
return {
'total_tokens': self.total_tokens,
'prompt_tokens': self.prompt_tokens,
'completion_tokens': self.completion_tokens,
'time_elapsed': self.total_time,
'interaction_type': 'LLM',
}

View File

@@ -0,0 +1,51 @@
from langchain_openai import OpenAIEmbeddings
from typing import List, Any
import time
from common.utils.business_event_context import current_event
class TrackedOpenAIEmbeddings(OpenAIEmbeddings):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def embed_documents(self, texts: list[str]) -> list[list[float]]:
start_time = time.time()
result = super().embed_documents(texts)
end_time = time.time()
# Estimate token usage (OpenAI uses tiktoken for this)
import tiktoken
enc = tiktoken.encoding_for_model(self.model)
total_tokens = sum(len(enc.encode(text)) for text in texts)
metrics = {
'total_tokens': total_tokens,
'prompt_tokens': total_tokens, # For embeddings, all tokens are prompt tokens
'completion_tokens': 0,
'time_elapsed': end_time - start_time,
'interaction_type': 'Embedding',
}
current_event.log_llm_metrics(metrics)
return result
def embed_query(self, text: str) -> List[float]:
start_time = time.time()
result = super().embed_query(text)
end_time = time.time()
# Estimate token usage
import tiktoken
enc = tiktoken.encoding_for_model(self.model)
total_tokens = len(enc.encode(text))
metrics = {
'total_tokens': total_tokens,
'prompt_tokens': total_tokens,
'completion_tokens': 0,
'time_elapsed': end_time - start_time,
'interaction_type': 'Embedding',
}
current_event.log_llm_metrics(metrics)
return result

View File

@@ -0,0 +1,27 @@
import time
from common.utils.business_event_context import current_event
def tracked_transcribe(client, *args, **kwargs):
start_time = time.time()
# Extract the file and model from kwargs if present, otherwise use defaults
file = kwargs.get('file')
model = kwargs.get('model', 'whisper-1')
duration = kwargs.pop('duration', 600)
result = client.audio.transcriptions.create(*args, **kwargs)
end_time = time.time()
# Token usage for transcriptions is actually the duration in seconds we pass, as the whisper model is priced per second transcribed
metrics = {
'total_tokens': duration,
'prompt_tokens': 0, # For transcriptions, all tokens are considered "completion"
'completion_tokens': duration,
'time_elapsed': end_time - start_time,
'interaction_type': 'ASR',
}
current_event.log_llm_metrics(metrics)
return result

View File

@@ -28,9 +28,10 @@ class DocumentVersion(db.Model):
id = db.Column(db.Integer, primary_key=True)
doc_id = db.Column(db.Integer, db.ForeignKey(Document.id), nullable=False)
url = db.Column(db.String(200), nullable=True)
file_location = db.Column(db.String(255), nullable=True)
file_name = db.Column(db.String(200), nullable=True)
bucket_name = db.Column(db.String(255), nullable=True)
object_name = db.Column(db.String(200), nullable=True)
file_type = db.Column(db.String(20), nullable=True)
file_size = db.Column(db.Float, nullable=True)
language = db.Column(db.String(2), nullable=False)
user_context = db.Column(db.Text, nullable=True)
system_context = db.Column(db.Text, nullable=True)

View File

@@ -0,0 +1,110 @@
from common.extensions import db
class BusinessEventLog(db.Model):
__bind_key__ = 'public'
__table_args__ = {'schema': 'public'}
id = db.Column(db.Integer, primary_key=True)
timestamp = db.Column(db.DateTime, nullable=False)
event_type = db.Column(db.String(50), nullable=False)
tenant_id = db.Column(db.Integer, nullable=False)
trace_id = db.Column(db.String(50), nullable=False)
span_id = db.Column(db.String(50))
span_name = db.Column(db.String(50))
parent_span_id = db.Column(db.String(50))
document_version_id = db.Column(db.Integer)
document_version_file_size = db.Column(db.Float)
chat_session_id = db.Column(db.String(50))
interaction_id = db.Column(db.Integer)
environment = db.Column(db.String(20))
llm_metrics_total_tokens = db.Column(db.Integer)
llm_metrics_prompt_tokens = db.Column(db.Integer)
llm_metrics_completion_tokens = db.Column(db.Integer)
llm_metrics_total_time = db.Column(db.Float)
llm_metrics_call_count = db.Column(db.Integer)
llm_interaction_type = db.Column(db.String(20))
message = db.Column(db.Text)
license_usage_id = db.Column(db.Integer, db.ForeignKey('public.license_usage.id'), nullable=True)
license_usage = db.relationship('LicenseUsage', backref='events')
class License(db.Model):
__bind_key__ = 'public'
__table_args__ = {'schema': 'public'}
id = db.Column(db.Integer, primary_key=True)
tenant_id = db.Column(db.Integer, db.ForeignKey('public.tenant.id'), nullable=False)
tier_id = db.Column(db.Integer, db.ForeignKey('public.license_tier.id'),nullable=False) # 'small', 'medium', 'custom'
start_date = db.Column(db.Date, nullable=False)
end_date = db.Column(db.Date, nullable=True)
currency = db.Column(db.String(20), nullable=False)
yearly_payment = db.Column(db.Boolean, nullable=False, default=False)
basic_fee = db.Column(db.Float, nullable=False)
max_storage_mb = db.Column(db.Integer, nullable=False)
additional_storage_price = db.Column(db.Float, nullable=False)
additional_storage_bucket = db.Column(db.Integer, nullable=False)
included_embedding_mb = db.Column(db.Integer, nullable=False)
additional_embedding_price = db.Column(db.Numeric(10, 4), nullable=False)
additional_embedding_bucket = db.Column(db.Integer, nullable=False)
included_interaction_tokens = db.Column(db.Integer, nullable=False)
additional_interaction_token_price = db.Column(db.Numeric(10, 4), nullable=False)
additional_interaction_bucket = db.Column(db.Integer, nullable=False)
overage_embedding = db.Column(db.Float, nullable=False, default=0)
overage_interaction = db.Column(db.Float, nullable=False, default=0)
tenant = db.relationship('Tenant', back_populates='licenses')
license_tier = db.relationship('LicenseTier', back_populates='licenses')
usages = db.relationship('LicenseUsage', order_by='LicenseUsage.period_start_date', back_populates='license')
class LicenseTier(db.Model):
__bind_key__ = 'public'
__table_args__ = {'schema': 'public'}
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(50), nullable=False)
version = db.Column(db.String(50), nullable=False)
start_date = db.Column(db.Date, nullable=False)
end_date = db.Column(db.Date, nullable=True)
basic_fee_d = db.Column(db.Float, nullable=True)
basic_fee_e = db.Column(db.Float, nullable=True)
max_storage_mb = db.Column(db.Integer, nullable=False)
additional_storage_price_d = db.Column(db.Numeric(10, 4), nullable=False)
additional_storage_price_e = db.Column(db.Numeric(10, 4), nullable=False)
additional_storage_bucket = db.Column(db.Integer, nullable=False)
included_embedding_mb = db.Column(db.Integer, nullable=False)
additional_embedding_price_d = db.Column(db.Numeric(10, 4), nullable=False)
additional_embedding_price_e = db.Column(db.Numeric(10, 4), nullable=False)
additional_embedding_bucket = db.Column(db.Integer, nullable=False)
included_interaction_tokens = db.Column(db.Integer, nullable=False)
additional_interaction_token_price_d = db.Column(db.Numeric(10, 4), nullable=False)
additional_interaction_token_price_e = db.Column(db.Numeric(10, 4), nullable=False)
additional_interaction_bucket = db.Column(db.Integer, nullable=False)
standard_overage_embedding = db.Column(db.Float, nullable=False, default=0)
standard_overage_interaction = db.Column(db.Float, nullable=False, default=0)
licenses = db.relationship('License', back_populates='license_tier')
class LicenseUsage(db.Model):
__bind_key__ = 'public'
__table_args__ = {'schema': 'public'}
id = db.Column(db.Integer, primary_key=True)
license_id = db.Column(db.Integer, db.ForeignKey('public.license.id'), nullable=False)
tenant_id = db.Column(db.Integer, db.ForeignKey('public.tenant.id'), nullable=False)
storage_mb_used = db.Column(db.Integer, default=0)
embedding_mb_used = db.Column(db.Integer, default=0)
embedding_prompt_tokens_used = db.Column(db.Integer, default=0)
embedding_completion_tokens_used = db.Column(db.Integer, default=0)
embedding_total_tokens_used = db.Column(db.Integer, default=0)
interaction_prompt_tokens_used = db.Column(db.Integer, default=0)
interaction_completion_tokens_used = db.Column(db.Integer, default=0)
interaction_total_tokens_used = db.Column(db.Integer, default=0)
period_start_date = db.Column(db.Date, nullable=False)
period_end_date = db.Column(db.Date, nullable=False)
license = db.relationship('License', back_populates='usages')

View File

@@ -1,21 +0,0 @@
from common.extensions import db
class BusinessEventLog(db.Model):
__bind_key__ = 'public'
__table_args__ = {'schema': 'public'}
id = db.Column(db.Integer, primary_key=True)
timestamp = db.Column(db.DateTime, nullable=False)
event_type = db.Column(db.String(50), nullable=False)
tenant_id = db.Column(db.Integer, nullable=False)
trace_id = db.Column(db.String(50), nullable=False)
span_id = db.Column(db.String(50))
span_name = db.Column(db.String(50))
parent_span_id = db.Column(db.String(50))
document_version_id = db.Column(db.Integer)
chat_session_id = db.Column(db.String(50))
interaction_id = db.Column(db.Integer)
environment = db.Column(db.String(20))
message = db.Column(db.Text)
# Add any other fields relevant for invoicing or warnings

View File

@@ -1,8 +1,12 @@
from datetime import date
from common.extensions import db
from flask_security import UserMixin, RoleMixin
from sqlalchemy.dialects.postgresql import ARRAY
import sqlalchemy as sa
from common.models.entitlements import License
class Tenant(db.Model):
"""Tenant model"""
@@ -50,9 +54,6 @@ class Tenant(db.Model):
fallback_algorithms = db.Column(ARRAY(sa.String(50)), nullable=True)
# Licensing Information
license_start_date = db.Column(db.Date, nullable=True)
license_end_date = db.Column(db.Date, nullable=True)
allowed_monthly_interactions = db.Column(db.Integer, nullable=True)
encrypted_chat_api_key = db.Column(db.String(500), nullable=True)
encrypted_api_key = db.Column(db.String(500), nullable=True)
@@ -60,9 +61,25 @@ class Tenant(db.Model):
embed_tuning = db.Column(db.Boolean, nullable=True, default=False)
rag_tuning = db.Column(db.Boolean, nullable=True, default=False)
# Entitlements
currency = db.Column(db.String(20), nullable=True)
usage_email = db.Column(db.String(255), nullable=True)
storage_dirty = db.Column(db.Boolean, nullable=True, default=False)
# Relations
users = db.relationship('User', backref='tenant')
domains = db.relationship('TenantDomain', backref='tenant')
licenses = db.relationship('License', back_populates='tenant')
license_usages = db.relationship('LicenseUsage', backref='tenant')
@property
def current_license(self):
today = date.today()
return License.query.filter(
License.tenant_id == self.id,
License.start_date <= today,
(License.end_date.is_(None) | (License.end_date >= today))
).order_by(License.start_date.desc()).first()
def __repr__(self):
return f"<Tenant {self.id}: {self.name}>"
@@ -91,11 +108,10 @@ class Tenant(db.Model):
'chat_RAG_temperature': self.chat_RAG_temperature,
'chat_no_RAG_temperature': self.chat_no_RAG_temperature,
'fallback_algorithms': self.fallback_algorithms,
'license_start_date': self.license_start_date,
'license_end_date': self.license_end_date,
'allowed_monthly_interactions': self.allowed_monthly_interactions,
'embed_tuning': self.embed_tuning,
'rag_tuning': self.rag_tuning,
'currency': self.currency,
'usage_email': self.usage_email,
}

View File

@@ -8,7 +8,7 @@ from portkey_ai import Portkey, Config
import logging
from .business_event_context import BusinessEventContext
from common.models.monitoring import BusinessEventLog
from common.models.entitlements import BusinessEventLog
from common.extensions import db
@@ -25,11 +25,20 @@ class BusinessEvent:
self.span_name = None
self.parent_span_id = None
self.document_version_id = kwargs.get('document_version_id')
self.document_version_file_size = kwargs.get('document_version_file_size')
self.chat_session_id = kwargs.get('chat_session_id')
self.interaction_id = kwargs.get('interaction_id')
self.environment = os.environ.get("FLASK_ENV", "development")
self.span_counter = 0
self.spans = []
self.llm_metrics = {
'total_tokens': 0,
'prompt_tokens': 0,
'completion_tokens': 0,
'total_time': 0,
'call_count': 0,
'interaction_type': None
}
def update_attribute(self, attribute: str, value: any):
if hasattr(self, attribute):
@@ -37,6 +46,22 @@ class BusinessEvent:
else:
raise AttributeError(f"'{self.__class__.__name__}' object has no attribute '{attribute}'")
def update_llm_metrics(self, metrics: dict):
self.llm_metrics['total_tokens'] += metrics['total_tokens']
self.llm_metrics['prompt_tokens'] += metrics['prompt_tokens']
self.llm_metrics['completion_tokens'] += metrics['completion_tokens']
self.llm_metrics['total_time'] += metrics['time_elapsed']
self.llm_metrics['call_count'] += 1
self.llm_metrics['interaction_type'] = metrics['interaction_type']
def reset_llm_metrics(self):
self.llm_metrics['total_tokens'] = 0
self.llm_metrics['prompt_tokens'] = 0
self.llm_metrics['completion_tokens'] = 0
self.llm_metrics['total_time'] = 0
self.llm_metrics['call_count'] = 0
self.llm_metrics['interaction_type'] = None
@contextmanager
def create_span(self, span_name: str):
# The create_span method is designed to be used as a context manager. We want to perform some actions when
@@ -61,6 +86,9 @@ class BusinessEvent:
try:
yield
finally:
if self.llm_metrics['call_count'] > 0:
self.log_final_metrics()
self.reset_llm_metrics()
self.log(f"Ending span {span_name}")
# Restore the previous span info
if self.spans:
@@ -80,9 +108,10 @@ class BusinessEvent:
'span_name': self.span_name,
'parent_span_id': self.parent_span_id,
'document_version_id': self.document_version_id,
'document_version_file_size': self.document_version_file_size,
'chat_session_id': self.chat_session_id,
'interaction_id': self.interaction_id,
'environment': self.environment
'environment': self.environment,
}
# log to Graylog
getattr(logger, level)(message, extra=log_data)
@@ -97,6 +126,7 @@ class BusinessEvent:
span_name=self.span_name,
parent_span_id=self.parent_span_id,
document_version_id=self.document_version_id,
document_version_file_size=self.document_version_file_size,
chat_session_id=self.chat_session_id,
interaction_id=self.interaction_id,
environment=self.environment,
@@ -105,10 +135,112 @@ class BusinessEvent:
db.session.add(event_log)
db.session.commit()
def log_llm_metrics(self, metrics: dict, level: str = 'info'):
self.update_llm_metrics(metrics)
message = "LLM Metrics"
logger = logging.getLogger('business_events')
log_data = {
'event_type': self.event_type,
'tenant_id': self.tenant_id,
'trace_id': self.trace_id,
'span_id': self.span_id,
'span_name': self.span_name,
'parent_span_id': self.parent_span_id,
'document_version_id': self.document_version_id,
'document_version_file_size': self.document_version_file_size,
'chat_session_id': self.chat_session_id,
'interaction_id': self.interaction_id,
'environment': self.environment,
'llm_metrics_total_tokens': metrics['total_tokens'],
'llm_metrics_prompt_tokens': metrics['prompt_tokens'],
'llm_metrics_completion_tokens': metrics['completion_tokens'],
'llm_metrics_total_time': metrics['time_elapsed'],
'llm_interaction_type': metrics['interaction_type'],
}
# log to Graylog
getattr(logger, level)(message, extra=log_data)
# Log to database
event_log = BusinessEventLog(
timestamp=dt.now(tz=tz.utc),
event_type=self.event_type,
tenant_id=self.tenant_id,
trace_id=self.trace_id,
span_id=self.span_id,
span_name=self.span_name,
parent_span_id=self.parent_span_id,
document_version_id=self.document_version_id,
document_version_file_size=self.document_version_file_size,
chat_session_id=self.chat_session_id,
interaction_id=self.interaction_id,
environment=self.environment,
llm_metrics_total_tokens=metrics['total_tokens'],
llm_metrics_prompt_tokens=metrics['prompt_tokens'],
llm_metrics_completion_tokens=metrics['completion_tokens'],
llm_metrics_total_time=metrics['time_elapsed'],
llm_interaction_type=metrics['interaction_type'],
message=message
)
db.session.add(event_log)
db.session.commit()
def log_final_metrics(self, level: str = 'info'):
logger = logging.getLogger('business_events')
message = "Final LLM Metrics"
log_data = {
'event_type': self.event_type,
'tenant_id': self.tenant_id,
'trace_id': self.trace_id,
'span_id': self.span_id,
'span_name': self.span_name,
'parent_span_id': self.parent_span_id,
'document_version_id': self.document_version_id,
'document_version_file_size': self.document_version_file_size,
'chat_session_id': self.chat_session_id,
'interaction_id': self.interaction_id,
'environment': self.environment,
'llm_metrics_total_tokens': self.llm_metrics['total_tokens'],
'llm_metrics_prompt_tokens': self.llm_metrics['prompt_tokens'],
'llm_metrics_completion_tokens': self.llm_metrics['completion_tokens'],
'llm_metrics_total_time': self.llm_metrics['total_time'],
'llm_metrics_call_count': self.llm_metrics['call_count'],
'llm_interaction_type': self.llm_metrics['interaction_type'],
}
# log to Graylog
getattr(logger, level)(message, extra=log_data)
# Log to database
event_log = BusinessEventLog(
timestamp=dt.now(tz=tz.utc),
event_type=self.event_type,
tenant_id=self.tenant_id,
trace_id=self.trace_id,
span_id=self.span_id,
span_name=self.span_name,
parent_span_id=self.parent_span_id,
document_version_id=self.document_version_id,
document_version_file_size=self.document_version_file_size,
chat_session_id=self.chat_session_id,
interaction_id=self.interaction_id,
environment=self.environment,
llm_metrics_total_tokens=self.llm_metrics['total_tokens'],
llm_metrics_prompt_tokens=self.llm_metrics['prompt_tokens'],
llm_metrics_completion_tokens=self.llm_metrics['completion_tokens'],
llm_metrics_total_time=self.llm_metrics['total_time'],
llm_metrics_call_count=self.llm_metrics['call_count'],
llm_interaction_type=self.llm_metrics['interaction_type'],
message=message
)
db.session.add(event_log)
db.session.commit()
def __enter__(self):
self.log(f'Starting Trace for {self.event_type}')
return BusinessEventContext(self).__enter__()
def __exit__(self, exc_type, exc_val, exc_tb):
if self.llm_metrics['call_count'] > 0:
self.log_final_metrics()
self.reset_llm_metrics()
self.log(f'Ending Trace for {self.event_type}')
return BusinessEventContext(self).__exit__(exc_type, exc_val, exc_tb)

View File

@@ -12,6 +12,7 @@ import requests
from urllib.parse import urlparse, unquote
import os
from .eveai_exceptions import EveAIInvalidLanguageException, EveAIDoubleURLException, EveAIUnsupportedFileType
from ..models.user import Tenant
def create_document_stack(api_input, file, filename, extension, tenant_id):
@@ -81,19 +82,19 @@ def create_version_for_document(document, url, language, user_context, user_meta
set_logging_information(new_doc_vers, dt.now(tz.utc))
mark_tenant_storage_dirty(document.tenant_id)
return new_doc_vers
def upload_file_for_version(doc_vers, file, extension, tenant_id):
doc_vers.file_type = extension
doc_vers.file_name = doc_vers.calc_file_name()
doc_vers.file_location = doc_vers.calc_file_location()
# Normally, the tenant bucket should exist. But let's be on the safe side if a migration took place.
minio_client.create_tenant_bucket(tenant_id)
try:
minio_client.upload_document_file(
bn, on, size = minio_client.upload_document_file(
tenant_id,
doc_vers.doc_id,
doc_vers.language,
@@ -101,6 +102,10 @@ def upload_file_for_version(doc_vers, file, extension, tenant_id):
doc_vers.file_name,
file
)
doc_vers.bucket_name = bn
doc_vers.object_name = on
doc_vers.file_size_mb = size / 1048576 # Convert bytes to MB
db.session.commit()
current_app.logger.info(f'Successfully saved document to MinIO for tenant {tenant_id} for '
f'document version {doc_vers.id} while uploading file.')
@@ -336,3 +341,10 @@ def refresh_document(doc_id):
}
return refresh_document_with_info(doc_id, api_input)
# Function triggered when a document_version is created or updated
def mark_tenant_storage_dirty(tenant_id):
tenant = db.session.query(Tenant).filter_by(id=tenant_id).first()
tenant.storage_dirty = True
db.session.commit()

View File

@@ -34,3 +34,10 @@ class EveAIUnsupportedFileType(EveAIException):
super().__init__(message, status_code, payload)
class EveAINoLicenseForTenant(EveAIException):
"""Raised when no active license for a tenant is provided"""
def __init__(self, message="No license for tenant found", status_code=400, payload=None):
super().__init__(message, status_code, payload)

View File

@@ -50,7 +50,7 @@ class MinioClient:
self.client.put_object(
bucket_name, object_name, io.BytesIO(file_data), len(file_data)
)
return True
return bucket_name, object_name, len(file_data)
except S3Error as err:
raise Exception(f"Error occurred while uploading file: {err}")

View File

@@ -11,6 +11,9 @@ from openai import OpenAI
from portkey_ai import createHeaders, PORTKEY_GATEWAY_URL
from portkey_ai.langchain.portkey_langchain_callback_handler import LangchainCallbackHandler
from common.langchain.llm_metrics_handler import LLMMetricsHandler
from common.langchain.tracked_openai_embeddings import TrackedOpenAIEmbeddings
from common.langchain.tracked_transcribe import tracked_transcribe
from common.models.document import EmbeddingSmallOpenAI, EmbeddingLargeOpenAI
from common.models.user import Tenant
from config.model_config import MODEL_CONFIG
@@ -48,6 +51,8 @@ class ModelVariables(MutableMapping):
self._transcription_client = None
self._prompt_templates = {}
self._embedding_db_model = None
self.llm_metrics_handler = LLMMetricsHandler()
self._transcription_client = None
def _initialize_variables(self):
variables = {}
@@ -89,26 +94,20 @@ class ModelVariables(MutableMapping):
if variables['tool_calling_supported']:
variables['cited_answer_cls'] = CitedAnswer
variables['max_compression_duration'] = current_app.config['MAX_COMPRESSION_DURATION']
variables['max_transcription_duration'] = current_app.config['MAX_TRANSCRIPTION_DURATION']
variables['compression_cpu_limit'] = current_app.config['COMPRESSION_CPU_LIMIT']
variables['compression_process_delay'] = current_app.config['COMPRESSION_PROCESS_DELAY']
return variables
@property
def embedding_model(self):
portkey_metadata = self.get_portkey_metadata()
portkey_headers = createHeaders(api_key=os.getenv('PORTKEY_API_KEY'),
provider=self._variables['embedding_provider'],
metadata=portkey_metadata,
trace_id=current_event.trace_id,
span_id=current_event.span_id,
span_name=current_event.span_name,
parent_span_id=current_event.parent_span_id
)
api_key = os.getenv('OPENAI_API_KEY')
model = self._variables['embedding_model']
self._embedding_model = OpenAIEmbeddings(api_key=api_key,
self._embedding_model = TrackedOpenAIEmbeddings(api_key=api_key,
model=model,
base_url=PORTKEY_GATEWAY_URL,
default_headers=portkey_headers)
)
self._embedding_db_model = EmbeddingSmallOpenAI \
if model == 'text-embedding-3-small' \
else EmbeddingLargeOpenAI
@@ -117,49 +116,22 @@ class ModelVariables(MutableMapping):
@property
def llm(self):
portkey_headers = self.get_portkey_headers_for_llm()
api_key = self.get_api_key_for_llm()
self._llm = ChatOpenAI(api_key=api_key,
model=self._variables['llm_model'],
temperature=self._variables['RAG_temperature'],
base_url=PORTKEY_GATEWAY_URL,
default_headers=portkey_headers)
callbacks=[self.llm_metrics_handler])
return self._llm
@property
def llm_no_rag(self):
portkey_headers = self.get_portkey_headers_for_llm()
api_key = self.get_api_key_for_llm()
self._llm_no_rag = ChatOpenAI(api_key=api_key,
model=self._variables['llm_model'],
temperature=self._variables['RAG_temperature'],
base_url=PORTKEY_GATEWAY_URL,
default_headers=portkey_headers)
callbacks=[self.llm_metrics_handler])
return self._llm_no_rag
def get_portkey_headers_for_llm(self):
portkey_metadata = self.get_portkey_metadata()
portkey_headers = createHeaders(api_key=os.getenv('PORTKEY_API_KEY'),
metadata=portkey_metadata,
provider=self._variables['llm_provider'],
trace_id=current_event.trace_id,
span_id=current_event.span_id,
span_name=current_event.span_name,
parent_span_id=current_event.parent_span_id
)
return portkey_headers
def get_portkey_metadata(self):
environment = os.getenv('FLASK_ENV', 'development')
portkey_metadata = {'tenant_id': str(self.tenant.id),
'environment': environment,
'trace_id': current_event.trace_id,
'span_id': current_event.span_id,
'span_name': current_event.span_name,
'parent_span_id': current_event.parent_span_id,
}
return portkey_metadata
def get_api_key_for_llm(self):
if self._variables['llm_provider'] == 'openai':
api_key = os.getenv('OPENAI_API_KEY')
@@ -168,57 +140,16 @@ class ModelVariables(MutableMapping):
return api_key
# def _initialize_llm(self):
#
#
# if self._variables['llm_provider'] == 'openai':
# portkey_headers = createHeaders(api_key=os.getenv('PORTKEY_API_KEY'),
# metadata=portkey_metadata,
# provider='openai')
#
# self._llm = ChatOpenAI(api_key=api_key,
# model=self._variables['llm_model'],
# temperature=self._variables['RAG_temperature'],
# base_url=PORTKEY_GATEWAY_URL,
# default_headers=portkey_headers)
# self._llm_no_rag = ChatOpenAI(api_key=api_key,
# model=self._variables['llm_model'],
# temperature=self._variables['no_RAG_temperature'],
# base_url=PORTKEY_GATEWAY_URL,
# default_headers=portkey_headers)
# self._variables['tool_calling_supported'] = self._variables['llm_model'] in ['gpt-4o', 'gpt-4o-mini']
# elif self._variables['llm_provider'] == 'anthropic':
# api_key = os.getenv('ANTHROPIC_API_KEY')
# llm_model_ext = os.getenv('ANTHROPIC_LLM_VERSIONS', {}).get(self._variables['llm_model'])
# self._llm = ChatAnthropic(api_key=api_key,
# model=llm_model_ext,
# temperature=self._variables['RAG_temperature'])
# self._llm_no_rag = ChatAnthropic(api_key=api_key,
# model=llm_model_ext,
# temperature=self._variables['RAG_temperature'])
# self._variables['tool_calling_supported'] = True
# else:
# raise ValueError(f"Invalid chat provider: {self._variables['llm_provider']}")
@property
def transcription_client(self):
environment = os.getenv('FLASK_ENV', 'development')
portkey_metadata = self.get_portkey_metadata()
portkey_headers = createHeaders(api_key=os.getenv('PORTKEY_API_KEY'),
metadata=portkey_metadata,
provider='openai',
trace_id=current_event.trace_id,
span_id=current_event.span_id,
span_name=current_event.span_name,
parent_span_id=current_event.parent_span_id
)
api_key = os.getenv('OPENAI_API_KEY')
self._transcription_client = OpenAI(api_key=api_key,
base_url=PORTKEY_GATEWAY_URL,
default_headers=portkey_headers)
self._transcription_client = OpenAI(api_key=api_key, )
self._variables['transcription_model'] = 'whisper-1'
return self._transcription_client
def transcribe(self, *args, **kwargs):
return tracked_transcribe(self._transcription_client, *args, **kwargs)
@property
def embedding_db_model(self):
if self._embedding_db_model is None:

View File

@@ -1,99 +0,0 @@
import requests
import json
from typing import Optional
# Define a function to make the GET request
def get_metadata_grouped_data(
api_key: str,
metadata_key: str,
time_of_generation_min: Optional[str] = None,
time_of_generation_max: Optional[str] = None,
total_units_min: Optional[int] = None,
total_units_max: Optional[int] = None,
cost_min: Optional[float] = None,
cost_max: Optional[float] = None,
prompt_token_min: Optional[int] = None,
prompt_token_max: Optional[int] = None,
completion_token_min: Optional[int] = None,
completion_token_max: Optional[int] = None,
status_code: Optional[str] = None,
weighted_feedback_min: Optional[float] = None,
weighted_feedback_max: Optional[float] = None,
virtual_keys: Optional[str] = None,
configs: Optional[str] = None,
workspace_slug: Optional[str] = None,
api_key_ids: Optional[str] = None,
current_page: Optional[int] = 1,
page_size: Optional[int] = 20,
metadata: Optional[str] = None,
ai_org_model: Optional[str] = None,
trace_id: Optional[str] = None,
span_id: Optional[str] = None,
):
url = f"https://api.portkey.ai/v1/analytics/groups/metadata/{metadata_key}"
# Set up query parameters
params = {
"time_of_generation_min": time_of_generation_min,
"time_of_generation_max": time_of_generation_max,
"total_units_min": total_units_min,
"total_units_max": total_units_max,
"cost_min": cost_min,
"cost_max": cost_max,
"prompt_token_min": prompt_token_min,
"prompt_token_max": prompt_token_max,
"completion_token_min": completion_token_min,
"completion_token_max": completion_token_max,
"status_code": status_code,
"weighted_feedback_min": weighted_feedback_min,
"weighted_feedback_max": weighted_feedback_max,
"virtual_keys": virtual_keys,
"configs": configs,
"workspace_slug": workspace_slug,
"api_key_ids": api_key_ids,
"current_page": current_page,
"page_size": page_size,
"metadata": metadata,
"ai_org_model": ai_org_model,
"trace_id": trace_id,
"span_id": span_id,
}
# Remove any keys with None values
params = {k: v for k, v in params.items() if v is not None}
# Set up the headers
headers = {
"Authorization": f"Bearer {api_key}",
"Content-Type": "application/json"
}
# Make the GET request
response = requests.get(url, headers=headers, params=params)
# Check for successful response
if response.status_code == 200:
return response.json() # Return JSON data
else:
response.raise_for_status() # Raise an exception for errors
# Example usage
# Replace 'your_api_key' and 'your_metadata_key' with actual values
api_key = 'your_api_key'
metadata_key = 'your_metadata_key'
try:
data = get_metadata_grouped_data(
api_key=api_key,
metadata_key=metadata_key,
time_of_generation_min="2024-08-23T15:50:23+05:30",
time_of_generation_max="2024-09-23T15:50:23+05:30",
total_units_min=100,
total_units_max=1000,
cost_min=10,
cost_max=100,
status_code="200,201"
)
print(json.dumps(data, indent=4))
except Exception as e:
print(f"Error occurred: {str(e)}")

View File

@@ -44,7 +44,7 @@ def form_validation_failed(request, form):
for fieldName, errorMessages in form.errors.items():
for err in errorMessages:
flash(f"Error in {fieldName}: {err}", 'danger')
current_app.logger.debug(f"Error in {fieldName}: {err}", 'danger')
current_app.logger.debug(f"Error in {fieldName}: {err}")
def form_to_dict(form):

View File

@@ -59,6 +59,9 @@ class Config(object):
# supported languages
SUPPORTED_LANGUAGES = ['en', 'fr', 'nl', 'de', 'es']
# supported currencies
SUPPORTED_CURRENCIES = ['', '$']
# supported LLMs
SUPPORTED_EMBEDDINGS = ['openai.text-embedding-3-small', 'openai.text-embedding-3-large', 'mistral.mistral-embed']
SUPPORTED_LLMS = ['openai.gpt-4o', 'anthropic.claude-3-5-sonnet', 'openai.gpt-4o-mini']
@@ -147,6 +150,15 @@ class Config(object):
TENANT_TYPES = ['Active', 'Demo', 'Inactive', 'Test']
# The maximum number of seconds allowed for audio compression (to save resources)
MAX_COMPRESSION_DURATION = 60*10 # 10 minutes
# The maximum number of seconds allowed for transcribing audio
MAX_TRANSCRIPTION_DURATION = 60*10 # 10 minutes
# Maximum CPU usage for a compression task
COMPRESSION_CPU_LIMIT = 50
# Delay between compressing chunks in seconds
COMPRESSION_PROCESS_DELAY = 1
class DevConfig(Config):
DEVELOPMENT = True

View File

@@ -10,7 +10,7 @@ from common.extensions import (db, migrate, bootstrap, security, mail, login_man
minio_client, simple_encryption, metrics)
from common.models.user import User, Role, Tenant, TenantDomain
import common.models.interaction
import common.models.monitoring
import common.models.entitlements
import common.models.document
from common.utils.nginx_utils import prefixed_url_for
from config.logging_config import LOGGING
@@ -134,6 +134,8 @@ def register_blueprints(app):
app.register_blueprint(security_bp)
from .views.interaction_views import interaction_bp
app.register_blueprint(interaction_bp)
from .views.entitlements_views import entitlements_bp
app.register_blueprint(entitlements_bp)
from .views.healthz_views import healthz_bp, init_healtz
app.register_blueprint(healthz_bp)
init_healtz(app)

View File

@@ -0,0 +1,71 @@
{% extends 'base.html' %}
{% from "macros.html" import render_field, render_included_field %}
{% block title %}Edit License for Current Tenant{% endblock %}
{% block content_title %}Edit License for Current Tenant{% endblock %}
{% block content_description %}Edit a License based on the selected License Tier for the current Tenant{% endblock %}
{% block content %}
<form method="post">
{{ form.hidden_tag() }}
{% set main_fields = ['start_date', 'end_date', 'currency', 'yearly_payment', 'basic_fee'] %}
{% for field in form %}
{{ render_included_field(field, disabled_fields=['currency'], include_fields=main_fields) }}
{% endfor %}
<!-- Nav Tabs -->
<div class="row mt-5">
<div class="col-lg-12">
<div class="nav-wrapper position-relative end-0">
<ul class="nav nav-pills nav-fill p-1" role="tablist">
<li class="nav-item" role="presentation">
<a class="nav-link mb-0 px-0 py-1 active" data-toggle="tab" href="#storage-tab" role="tab" aria-controls="model-info" aria-selected="true">
Storage
</a>
</li>
<li class="nav-item">
<a class="nav-link mb-0 px-0 py-1" data-toggle="tab" href="#embedding-tab" role="tab" aria-controls="license-info" aria-selected="false">
Embedding
</a>
</li>
<li class="nav-item">
<a class="nav-link mb-0 px-0 py-1" data-toggle="tab" href="#interaction-tab" role="tab" aria-controls="chunking" aria-selected="false">
Interaction
</a>
</li>
</ul>
</div>
<div class="tab-content tab-space">
<!-- Storage Tab -->
<div class="tab-pane fade show active" id="storage-tab" role="tabpanel">
{% set storage_fields = ['max_storage_tokens', 'additional_storage_token_price', 'additional_storage_bucket'] %}
{% for field in form %}
{{ render_included_field(field, disabled_fields=[], include_fields=storage_fields) }}
{% endfor %}
</div>
<!-- Embedding Tab -->
<div class="tab-pane fade" id="embedding-tab" role="tabpanel">
{% set embedding_fields = ['included_embedding_tokens', 'additional_embedding_token_price', 'additional_embedding_bucket'] %}
{% for field in form %}
{{ render_included_field(field, disabled_fields=[], include_fields=embedding_fields) }}
{% endfor %}
</div>
<!-- Interaction Tab -->
<div class="tab-pane fade" id="interaction-tab" role="tabpanel">
{% set interaction_fields = ['included_interaction_tokens', 'additional_interaction_token_price', 'additional_interaction_bucket'] %}
{% for field in form %}
{{ render_included_field(field, disabled_fields=[], include_fields=interaction_fields) }}
{% endfor %}
</div>
</div>
</div>
</div>
<button type="submit" class="btn btn-primary">Save License</button>
</form>
{% endblock %}
{% block content_footer %}
{% endblock %}

View File

@@ -0,0 +1,71 @@
{% extends 'base.html' %}
{% from "macros.html" import render_field, render_included_field %}
{% block title %}Create or Edit License for Current Tenant{% endblock %}
{% block content_title %}Create or Edit License for Current Tenant{% endblock %}
{% block content_description %}Create or Edit a new License based on the selected License Tier for the current Tenant{% endblock %}
{% block content %}
<form method="post">
{{ form.hidden_tag() }}
{% set main_fields = ['start_date', 'end_date', 'currency', 'yearly_payment', 'basic_fee'] %}
{% for field in form %}
{{ render_included_field(field, disabled_fields=ext_disabled_fields + ['currency'], include_fields=main_fields) }}
{% endfor %}
<!-- Nav Tabs -->
<div class="row mt-5">
<div class="col-lg-12">
<div class="nav-wrapper position-relative end-0">
<ul class="nav nav-pills nav-fill p-1" role="tablist">
<li class="nav-item" role="presentation">
<a class="nav-link mb-0 px-0 py-1 active" data-toggle="tab" href="#storage-tab" role="tab" aria-controls="model-info" aria-selected="true">
Storage
</a>
</li>
<li class="nav-item">
<a class="nav-link mb-0 px-0 py-1" data-toggle="tab" href="#embedding-tab" role="tab" aria-controls="license-info" aria-selected="false">
Embedding
</a>
</li>
<li class="nav-item">
<a class="nav-link mb-0 px-0 py-1" data-toggle="tab" href="#interaction-tab" role="tab" aria-controls="chunking" aria-selected="false">
Interaction
</a>
</li>
</ul>
</div>
<div class="tab-content tab-space">
<!-- Storage Tab -->
<div class="tab-pane fade show active" id="storage-tab" role="tabpanel">
{% set storage_fields = ['max_storage_mb', 'additional_storage_price', 'additional_storage_bucket'] %}
{% for field in form %}
{{ render_included_field(field, disabled_fields=ext_disabled_fields, include_fields=storage_fields) }}
{% endfor %}
</div>
<!-- Embedding Tab -->
<div class="tab-pane fade" id="embedding-tab" role="tabpanel">
{% set embedding_fields = ['included_embedding_mb', 'additional_embedding_price', 'additional_embedding_bucket', 'overage_embedding'] %}
{% for field in form %}
{{ render_included_field(field, disabled_fields=ext_disabled_fields, include_fields=embedding_fields) }}
{% endfor %}
</div>
<!-- Interaction Tab -->
<div class="tab-pane fade" id="interaction-tab" role="tabpanel">
{% set interaction_fields = ['included_interaction_tokens', 'additional_interaction_token_price', 'additional_interaction_bucket', 'overage_interaction'] %}
{% for field in form %}
{{ render_included_field(field, disabled_fields=ext_disabled_fields, include_fields=interaction_fields) }}
{% endfor %}
</div>
</div>
</div>
</div>
<button type="submit" class="btn btn-primary">Save License</button>
</form>
{% endblock %}
{% block content_footer %}
{% endblock %}

View File

@@ -0,0 +1,71 @@
{% extends 'base.html' %}
{% from "macros.html" import render_field, render_included_field %}
{% block title %}Register or Edit License Tier{% endblock %}
{% block content_title %}Register or Edit License Tier{% endblock %}
{% block content_description %}Register or Edit License Tier{% endblock %}
{% block content %}
<form method="post">
{{ form.hidden_tag() }}
{% set main_fields = ['name', 'version', 'start_date', 'end_date', 'basic_fee_d', 'basic_fee_e'] %}
{% for field in form %}
{{ render_included_field(field, disabled_fields=[], include_fields=main_fields) }}
{% endfor %}
<!-- Nav Tabs -->
<div class="row mt-5">
<div class="col-lg-12">
<div class="nav-wrapper position-relative end-0">
<ul class="nav nav-pills nav-fill p-1" role="tablist">
<li class="nav-item" role="presentation">
<a class="nav-link mb-0 px-0 py-1 active" data-toggle="tab" href="#storage-tab" role="tab" aria-controls="model-info" aria-selected="true">
Storage
</a>
</li>
<li class="nav-item">
<a class="nav-link mb-0 px-0 py-1" data-toggle="tab" href="#embedding-tab" role="tab" aria-controls="license-info" aria-selected="false">
Embedding
</a>
</li>
<li class="nav-item">
<a class="nav-link mb-0 px-0 py-1" data-toggle="tab" href="#interaction-tab" role="tab" aria-controls="chunking" aria-selected="false">
Interaction
</a>
</li>
</ul>
</div>
<div class="tab-content tab-space">
<!-- Storage Tab -->
<div class="tab-pane fade show active" id="storage-tab" role="tabpanel">
{% set storage_fields = ['max_storage_mb', 'additional_storage_price_d', 'additional_storage_price_e', 'additional_storage_bucket'] %}
{% for field in form %}
{{ render_included_field(field, disabled_fields=[], include_fields=storage_fields) }}
{% endfor %}
</div>
<!-- Embedding Tab -->
<div class="tab-pane fade" id="embedding-tab" role="tabpanel">
{% set embedding_fields = ['included_embedding_mb', 'additional_embedding_price_d', 'additional_embedding_price_e', 'additional_embedding_bucket', 'standard_overage_embedding'] %}
{% for field in form %}
{{ render_included_field(field, disabled_fields=[], include_fields=embedding_fields) }}
{% endfor %}
</div>
<!-- Interaction Tab -->
<div class="tab-pane fade" id="interaction-tab" role="tabpanel">
{% set interaction_fields = ['included_interaction_tokens', 'additional_interaction_token_price_d', 'additional_interaction_token_price_e', 'additional_interaction_bucket', 'standard_overage_interaction'] %}
{% for field in form %}
{{ render_included_field(field, disabled_fields=[], include_fields=interaction_fields) }}
{% endfor %}
</div>
</div>
</div>
</div>
<button type="submit" class="btn btn-primary">Save License Tier</button>
</form>
{% endblock %}
{% block content_footer %}
{% endblock %}

View File

@@ -0,0 +1,24 @@
{% extends 'base.html' %}
{% from "macros.html" import render_selectable_table, render_pagination, render_field %}
{% block title %}License Tier Selection{% endblock %}
{% block content_title %}Select a License Tier{% endblock %}
{% block content_description %}Select a License Tier to continue{% endblock %}
{% block content %}
<!-- License Tier Selection Form -->
<form method="POST" action="{{ url_for('entitlements_bp.handle_license_tier_selection') }}">
{{ render_selectable_table(headers=["ID", "Name", "Version", "Start Date", "End Date"], rows=rows, selectable=True, id="licenseTierTable") }}
<div class="form-group mt-3">
<button type="submit" name="action" value="edit_license_tier" class="btn btn-primary">Edit License Tier</button>
<button type="submit" name="action" value="create_license_for_tenant" class="btn btn-secondary">Create License for Current Tenant</button>
</div>
</form>
{% endblock %}
{% block content_footer %}
{{ render_pagination(pagination, 'user_bp.select_tenant') }}
{% endblock %}

View File

@@ -94,6 +94,12 @@
{'name': 'Chat Sessions', 'url': '/interaction/chat_sessions', 'roles': ['Super User', 'Tenant Admin']},
]) }}
{% endif %}
{% if current_user.is_authenticated %}
{{ dropdown('Administration', 'settings', [
{'name': 'License Tier Registration', 'url': '/entitlements/license_tier', 'roles': ['Super User']},
{'name': 'All License Tiers', 'url': '/entitlements/view_license_tiers', 'roles': ['Super User']},
]) }}
{% endif %}
{% if current_user.is_authenticated %}
{{ dropdown(current_user.user_name, 'person', [
{'name': 'Session Defaults', 'url': '/session_defaults', 'roles': ['Super User', 'Tenant Admin']},

View File

@@ -1,21 +1,219 @@
{% extends 'base.html' %}
{% from "macros.html" import render_field %}
{% from "macros.html" import render_field, render_included_field %}
{% block title %}Tenant Registration{% endblock %}
{% block title %}Create or Edit Tenant{% endblock %}
{% block content_title %}Register Tenant{% endblock %}
{% block content_description %}Add a new tenant to EveAI{% endblock %}
{% block content_title %}Create or Edit Tenant{% endblock %}
{% block content_description %}Create or Edit Tenant{% endblock %}
{% block content %}
<form method="post">
{{ form.hidden_tag() }}
{% set disabled_fields = [] %}
{% set exclude_fields = [] %}
<!-- Main Tenant Information -->
{% set main_fields = ['name', 'website', 'default_language', 'allowed_languages', 'rag_context', 'type'] %}
{% for field in form %}
{{ render_field(field, disabled_fields, exclude_fields) }}
{{ render_included_field(field, disabled_fields=[], include_fields=main_fields) }}
{% endfor %}
<button type="submit" class="btn btn-primary">Register Tenant</button>
<!-- Nav Tabs -->
<div class="row mt-5">
<div class="col-lg-12">
<div class="nav-wrapper position-relative end-0">
<ul class="nav nav-pills nav-fill p-1" role="tablist">
<li class="nav-item" role="presentation">
<a class="nav-link mb-0 px-0 py-1 active" data-toggle="tab" href="#model-info-tab" role="tab" aria-controls="model-info" aria-selected="true">
Model Information
</a>
</li>
<li class="nav-item">
<a class="nav-link mb-0 px-0 py-1" data-toggle="tab" href="#license-info-tab" role="tab" aria-controls="license-info" aria-selected="false">
License Information
</a>
</li>
<li class="nav-item">
<a class="nav-link mb-0 px-0 py-1" data-toggle="tab" href="#chunking-tab" role="tab" aria-controls="chunking" aria-selected="false">
Chunking
</a>
</li>
<li class="nav-item">
<a class="nav-link mb-0 px-0 py-1" data-toggle="tab" href="#embedding-search-tab" role="tab" aria-controls="html-chunking" aria-selected="false">
Embedding Search
</a>
</li>
<li class="nav-item">
<a class="nav-link mb-0 px-0 py-1" data-toggle="tab" href="#tuning-tab" role="tab" aria-controls="html-chunking" aria-selected="false">
Tuning
</a>
</li>
</ul>
</div>
<div class="tab-content tab-space">
<!-- Model Information Tab -->
<div class="tab-pane fade show active" id="model-info-tab" role="tabpanel">
{% set model_fields = ['embedding_model', 'llm_model'] %}
{% for field in form %}
{{ render_included_field(field, disabled_fields=[], include_fields=model_fields) }}
{% endfor %}
</div>
<!-- License Information Tab -->
<div class="tab-pane fade" id="license-info-tab" role="tabpanel">
{% set license_fields = ['currency', 'usage_email', ] %}
{% for field in form %}
{{ render_included_field(field, disabled_fields=[], include_fields=license_fields) }}
{% endfor %}
<!-- Register API Key Button -->
<button type="button" class="btn btn-primary" onclick="generateNewChatApiKey()">Register Chat API Key</button>
<button type="button" class="btn btn-primary" onclick="generateNewApiKey()">Register API Key</button>
<!-- API Key Display Field -->
<div id="chat-api-key-field" style="display:none;">
<label for="chat-api-key">Chat API Key:</label>
<input type="text" id="chat-api-key" class="form-control" readonly>
<button type="button" id="copy-chat-button" class="btn btn-primary">Copy to Clipboard</button>
<p id="copy-chat-message" style="display:none;color:green;">Chat API key copied to clipboard</p>
</div>
<div id="api-key-field" style="display:none;">
<label for="api-key">API Key:</label>
<input type="text" id="api-key" class="form-control" readonly>
<button type="button" id="copy-api-button" class="btn btn-primary">Copy to Clipboard</button>
<p id="copy-message" style="display:none;color:green;">API key copied to clipboard</p>
</div>
</div>
<!-- Chunking Settings Tab -->
<div class="tab-pane fade" id="chunking-tab" role="tabpanel">
{% set html_fields = ['html_tags', 'html_end_tags', 'html_included_elements', 'html_excluded_elements', 'html_excluded_classes', 'min_chunk_size', 'max_chunk_size'] %}
{% for field in form %}
{{ render_included_field(field, disabled_fields=[], include_fields=html_fields) }}
{% endfor %}
</div>
<!-- Embedding Search Settings Tab -->
<div class="tab-pane fade" id="embedding-search-tab" role="tabpanel">
{% set es_fields = ['es_k', 'es_similarity_threshold', ] %}
{% for field in form %}
{{ render_included_field(field, disabled_fields=[], include_fields=es_fields) }}
{% endfor %}
</div>
<!-- Tuning Settings Tab -->
<div class="tab-pane fade" id="tuning-tab" role="tabpanel">
{% set tuning_fields = ['embed_tuning', 'rag_tuning', ] %}
{% for field in form %}
{{ render_included_field(field, disabled_fields=[], include_fields=tuning_fields) }}
{% endfor %}
</div>
</div>
</div>
</div>
<button type="submit" class="btn btn-primary">Save Tenant</button>
</form>
{% endblock %}
{% block content_footer %} {% endblock %}
{% block content_footer %}
{% endblock %}
{% block scripts %}
<script>
// Function to generate a new Chat API Key
function generateNewChatApiKey() {
generateApiKey('/admin/user/generate_chat_api_key', '#chat-api-key', '#chat-api-key-field');
}
// Function to generate a new general API Key
function generateNewApiKey() {
generateApiKey('/admin/user/generate_api_api_key', '#api-key', '#api-key-field');
}
// Reusable function to handle API key generation
function generateApiKey(url, inputSelector, fieldSelector) {
$.ajax({
url: url,
type: 'POST',
contentType: 'application/json',
success: function(response) {
$(inputSelector).val(response.api_key);
$(fieldSelector).show();
},
error: function(error) {
alert('Error generating new API key: ' + error.responseText);
}
});
}
// Function to copy text to clipboard
function copyToClipboard(selector, messageSelector) {
const element = document.querySelector(selector);
if (element) {
const text = element.value;
if (navigator.clipboard && navigator.clipboard.writeText) {
navigator.clipboard.writeText(text).then(function() {
showCopyMessage(messageSelector);
}).catch(function(error) {
alert('Failed to copy text: ' + error);
});
} else {
fallbackCopyToClipboard(text, messageSelector);
}
} else {
console.error('Element not found for selector:', selector);
}
}
// Fallback method for copying text to clipboard
function fallbackCopyToClipboard(text, messageSelector) {
const textArea = document.createElement('textarea');
textArea.value = text;
document.body.appendChild(textArea);
textArea.focus();
textArea.select();
try {
document.execCommand('copy');
showCopyMessage(messageSelector);
} catch (err) {
alert('Fallback: Oops, unable to copy', err);
}
document.body.removeChild(textArea);
}
// Function to show copy confirmation message
function showCopyMessage(messageSelector) {
const message = document.querySelector(messageSelector);
if (message) {
message.style.display = 'block';
setTimeout(function() {
message.style.display = 'none';
}, 2000);
}
}
// Event listeners for copy buttons
document.getElementById('copy-chat-button').addEventListener('click', function() {
copyToClipboard('#chat-api-key', '#copy-chat-message');
});
document.getElementById('copy-api-button').addEventListener('click', function() {
copyToClipboard('#api-key', '#copy-message');
});
</script>
<script>
// JavaScript to detect user's timezone
document.addEventListener('DOMContentLoaded', (event) => {
// Detect timezone
const userTimezone = Intl.DateTimeFormat().resolvedOptions().timeZone;
// Send timezone to the server via a POST request
fetch('/set_user_timezone', {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify({ timezone: userTimezone })
}).then(response => {
if (response.ok) {
console.log('Timezone sent to server successfully');
} else {
console.error('Failed to send timezone to server');
}
});
});
</script>
{% endblock %}

View File

@@ -16,7 +16,7 @@
{% endfor %}
<!-- Nav Tabs -->
<div class="row">
<div class="row mt-5">
<div class="col-lg-12">
<div class="nav-wrapper position-relative end-0">
<ul class="nav nav-pills nav-fill p-1" role="tablist">
@@ -57,7 +57,7 @@
</div>
<!-- License Information Tab -->
<div class="tab-pane fade" id="license-info-tab" role="tabpanel">
{% set license_fields = ['license_start_date', 'license_end_date', 'allowed_monthly_interactions', ] %}
{% set license_fields = ['currency', 'usage_email', ] %}
{% for field in form %}
{{ render_included_field(field, disabled_fields=license_fields, include_fields=license_fields) }}
{% endfor %}

View File

@@ -0,0 +1,76 @@
from flask import current_app
from flask_wtf import FlaskForm
from wtforms import (StringField, PasswordField, BooleanField, SubmitField, EmailField, IntegerField, DateField,
SelectField, SelectMultipleField, FieldList, FormField, FloatField, TextAreaField)
from wtforms.validators import DataRequired, Length, Email, NumberRange, Optional, ValidationError, InputRequired
import pytz
class LicenseTierForm(FlaskForm):
name = StringField('Name', validators=[DataRequired(), Length(max=50)])
version = StringField('Version', validators=[DataRequired(), Length(max=50)])
start_date = DateField('Start Date', id='form-control datepicker', validators=[DataRequired()])
end_date = DateField('End Date', id='form-control datepicker', validators=[Optional()])
basic_fee_d = FloatField('Basic Fee ($)', validators=[InputRequired(), NumberRange(min=0)])
basic_fee_e = FloatField('Basic Fee (€)', validators=[InputRequired(), NumberRange(min=0)])
max_storage_mb = IntegerField('Max Storage (MiB)', validators=[DataRequired(), NumberRange(min=1)])
additional_storage_price_d = FloatField('Additional Storage Fee ($)',
validators=[InputRequired(), NumberRange(min=0)])
additional_storage_price_e = FloatField('Additional Storage Fee (€)',
validators=[InputRequired(), NumberRange(min=0)])
additional_storage_bucket = IntegerField('Additional Storage Bucket Size (MiB)',
validators=[DataRequired(), NumberRange(min=1)])
included_embedding_mb = IntegerField('Included Embeddings (MiB)',
validators=[DataRequired(), NumberRange(min=1)])
additional_embedding_price_d = FloatField('Additional Embedding Fee ($)',
validators=[InputRequired(), NumberRange(min=0)])
additional_embedding_price_e = FloatField('Additional Embedding Fee (€)',
validators=[InputRequired(), NumberRange(min=0)])
additional_embedding_bucket = IntegerField('Additional Embedding Bucket Size (MiB)',
validators=[DataRequired(), NumberRange(min=1)])
included_interaction_tokens = IntegerField('Included Embedding Tokens',
validators=[DataRequired(), NumberRange(min=1)])
additional_interaction_token_price_d = FloatField('Additional Interaction Token Fee ($)',
validators=[InputRequired(), NumberRange(min=0)])
additional_interaction_token_price_e = FloatField('Additional Interaction Token Fee (€)',
validators=[InputRequired(), NumberRange(min=0)])
additional_interaction_bucket = IntegerField('Additional Interaction Bucket Size',
validators=[DataRequired(), NumberRange(min=1)])
standard_overage_embedding = FloatField('Standard Overage Embedding (%)',
validators=[DataRequired(), NumberRange(min=0)],
default=0)
standard_overage_interaction = FloatField('Standard Overage Interaction (%)',
validators=[DataRequired(), NumberRange(min=0)],
default=0)
class LicenseForm(FlaskForm):
start_date = DateField('Start Date', id='form-control datepicker', validators=[DataRequired()])
end_date = DateField('End Date', id='form-control datepicker', validators=[DataRequired()])
currency = StringField('Currency', validators=[Optional(), Length(max=20)])
yearly_payment = BooleanField('Yearly Payment', validators=[DataRequired()], default=False)
basic_fee = FloatField('Basic Fee', validators=[InputRequired(), NumberRange(min=0)])
max_storage_mb = IntegerField('Max Storage (MiB)', validators=[DataRequired(), NumberRange(min=1)])
additional_storage_price = FloatField('Additional Storage Token Fee',
validators=[InputRequired(), NumberRange(min=0)])
additional_storage_bucket = IntegerField('Additional Storage Bucket Size (MiB)',
validators=[DataRequired(), NumberRange(min=1)])
included_embedding_mb = IntegerField('Included Embedding Tokens (MiB)',
validators=[DataRequired(), NumberRange(min=1)])
additional_embedding_price = FloatField('Additional Embedding Token Fee',
validators=[InputRequired(), NumberRange(min=0)])
additional_embedding_bucket = IntegerField('Additional Embedding Bucket Size (MiB)',
validators=[DataRequired(), NumberRange(min=1)])
included_interaction_tokens = IntegerField('Included Interaction Tokens',
validators=[DataRequired(), NumberRange(min=1)])
additional_interaction_token_price = FloatField('Additional Interaction Token Fee',
validators=[InputRequired(), NumberRange(min=0)])
additional_interaction_bucket = IntegerField('Additional Interaction Bucket Size',
validators=[DataRequired(), NumberRange(min=1)])
overage_embedding = FloatField('Overage Embedding (%)',
validators=[DataRequired(), NumberRange(min=0)],
default=0)
overage_interaction = FloatField('Overage Interaction (%)',
validators=[DataRequired(), NumberRange(min=0)],
default=0)

View File

@@ -0,0 +1,217 @@
import uuid
from datetime import datetime as dt, timezone as tz
from flask import request, redirect, flash, render_template, Blueprint, session, current_app, jsonify
from flask_security import hash_password, roles_required, roles_accepted, current_user
from itsdangerous import URLSafeTimedSerializer
from sqlalchemy.exc import SQLAlchemyError
from sqlalchemy import or_
import ast
from common.models.entitlements import License, LicenseTier, LicenseUsage, BusinessEventLog
from common.extensions import db, security, minio_client, simple_encryption
from common.utils.security_utils import send_confirmation_email, send_reset_email
from .entitlements_forms import LicenseTierForm, LicenseForm
from common.utils.database import Database
from common.utils.view_assistants import prepare_table_for_macro, form_validation_failed
from common.utils.simple_encryption import generate_api_key
from common.utils.nginx_utils import prefixed_url_for
entitlements_bp = Blueprint('entitlements_bp', __name__, url_prefix='/entitlements')
@entitlements_bp.route('/license_tier', methods=['GET', 'POST'])
@roles_accepted('Super User')
def license_tier():
form = LicenseTierForm()
if form.validate_on_submit():
current_app.logger.info("Adding License Tier")
new_license_tier = LicenseTier()
form.populate_obj(new_license_tier)
try:
db.session.add(new_license_tier)
db.session.commit()
except SQLAlchemyError as e:
db.session.rollback()
current_app.logger.error(f'Failed to add license tier to database. Error: {str(e)}')
flash(f'Failed to add license tier to database. Error: {str(e)}', 'success')
return render_template('entitlements/license_tier.html', form=form)
current_app.logger.info(f"Successfully created license tier {new_license_tier.id}")
flash(f"Successfully created tenant license tier {new_license_tier.id}")
return redirect(prefixed_url_for('entitlements_bp.view_license_tiers'))
else:
form_validation_failed(request, form)
return render_template('entitlements/license_tier.html', form=form)
@entitlements_bp.route('/view_license_tiers', methods=['GET', 'POST'])
@roles_required('Super User')
def view_license_tiers():
page = request.args.get('page', 1, type=int)
per_page = request.args.get('per_page', 10, type=int)
today = dt.now(tz.utc)
query = LicenseTier.query.filter(
or_(
LicenseTier.end_date == None,
LicenseTier.end_date >= today
)
).order_by(LicenseTier.start_date.desc(), LicenseTier.id)
pagination = query.paginate(page=page, per_page=per_page, error_out=False)
license_tiers = pagination.items
rows = prepare_table_for_macro(license_tiers, [('id', ''), ('name', ''), ('version', ''), ('start_date', ''),
('end_date', '')])
return render_template('entitlements/view_license_tiers.html', rows=rows, pagination=pagination)
@entitlements_bp.route('/handle_license_tier_selection', methods=['POST'])
@roles_required('Super User')
def handle_license_tier_selection():
license_tier_identification = request.form['selected_row']
license_tier_id = ast.literal_eval(license_tier_identification).get('value')
the_license_tier = LicenseTier.query.get(license_tier_id)
action = request.form['action']
match action:
case 'edit_license_tier':
return redirect(prefixed_url_for('entitlements_bp.edit_license_tier',
license_tier_id=license_tier_id))
case 'create_license_for_tenant':
return redirect(prefixed_url_for('entitlements_bp.create_license',
license_tier_id=license_tier_id))
# Add more conditions for other actions
return redirect(prefixed_url_for('entitlements_bp.view_license_tiers'))
@entitlements_bp.route('/license_tier/<int:license_tier_id>', methods=['GET', 'POST'])
@roles_accepted('Super User')
def edit_license_tier(license_tier_id):
license_tier = LicenseTier.query.get_or_404(license_tier_id) # This will return a 404 if no license tier is found
form = LicenseTierForm(obj=license_tier)
if form.validate_on_submit():
# Populate the license_tier with form data
form.populate_obj(license_tier)
try:
db.session.add(license_tier)
db.session.commit()
except SQLAlchemyError as e:
db.session.rollback()
current_app.logger.error(f'Failed to edit License Tier. Error: {str(e)}')
flash(f'Failed to edit License Tier. Error: {str(e)}', 'danger')
return render_template('entitlements/license_tier.html', form=form, license_tier_id=license_tier.id)
flash('License Tier updated successfully.', 'success')
return redirect(
prefixed_url_for('entitlements_bp.edit_license_tier', license_tier_id=license_tier_id))
else:
form_validation_failed(request, form)
return render_template('entitlements/license_tier.html', form=form, license_tier_id=license_tier.id)
@entitlements_bp.route('/create_license/<int:license_tier_id>', methods=['GET', 'POST'])
@roles_accepted('Super User')
def create_license(license_tier_id):
form = LicenseForm()
tenant_id = session.get('tenant').get('id')
currency = session.get('tenant').get('currency')
if request.method == 'GET':
# Fetch the LicenseTier
license_tier = LicenseTier.query.get_or_404(license_tier_id)
# Prefill the form with LicenseTier data
# Currency depending data
if currency == '$':
form.basic_fee.data = license_tier.basic_fee_d
form.additional_storage_price.data = license_tier.additional_storage_price_d
form.additional_embedding_price.data = license_tier.additional_embedding_price_d
form.additional_interaction_token_price.data = license_tier.additional_interaction_token_price_d
elif currency == '':
form.basic_fee.data = license_tier.basic_fee_e
form.additional_storage_price.data = license_tier.additional_storage_price_e
form.additional_embedding_price.data = license_tier.additional_embedding_price_e
form.additional_interaction_token_price.data = license_tier.additional_interaction_token_price_e
else:
current_app.logger.error(f'Invalid currency {currency} for tenant {tenant_id} while creating license.')
flash(f"Invalid currency {currency} for tenant {tenant_id} while creating license. "
f"Check tenant's currency and try again.", 'danger')
return redirect(prefixed_url_for('user_bp.edit_tenant', tenant_id=tenant_id))
# General data
form.currency.data = currency
form.max_storage_mb.data = license_tier.max_storage_mb
form.additional_storage_bucket.data = license_tier.additional_storage_bucket
form.included_embedding_mb.data = license_tier.included_embedding_mb
form.additional_embedding_bucket.data = license_tier.additional_embedding_bucket
form.included_interaction_tokens.data = license_tier.included_interaction_tokens
form.additional_interaction_bucket.data = license_tier.additional_interaction_bucket
form.overage_embedding.data = license_tier.standard_overage_embedding
form.overage_interaction.data = license_tier.standard_overage_interaction
else: # POST
# Create a new License instance
new_license = License(
tenant_id=tenant_id,
tier_id=license_tier_id,
)
current_app.logger.debug(f"Currency data in form: {form.currency.data}")
if form.validate_on_submit():
# Update the license with form data
form.populate_obj(new_license)
# Currency is added here again, as a form doesn't include disabled fields when passing it in the request
new_license.currency = currency
try:
db.session.add(new_license)
db.session.commit()
flash('License created successfully', 'success')
return redirect(prefixed_url_for('entitlements_bp/edit_license', license_id=new_license.id))
except Exception as e:
db.session.rollback()
flash(f'Error creating license: {str(e)}', 'error')
else:
form_validation_failed(request, form)
return render_template('entitlements/license.html', form=form)
@entitlements_bp.route('/license/<int:license_id>', methods=['GET', 'POST'])
@roles_accepted('Super User')
def edit_license(license_id):
license = License.query.get_or_404(license_id) # This will return a 404 if no license tier is found
form = LicenseForm(obj=license)
disabled_fields = []
if len(license.usages) > 0: # There already are usage records linked to this license
# Define which fields should be disabled
disabled_fields = [field.name for field in form if field.name != 'end_date']
if form.validate_on_submit():
# Populate the license with form data
form.populate_obj(license)
try:
db.session.add(license)
db.session.commit()
except SQLAlchemyError as e:
db.session.rollback()
current_app.logger.error(f'Failed to edit License. Error: {str(e)}')
flash(f'Failed to edit License. Error: {str(e)}', 'danger')
return render_template('entitlements/license.html', form=form)
flash('License updated successfully.', 'success')
return redirect(
prefixed_url_for('entitlements_bp.edit_license', license_tier_id=license_id))
else:
form_validation_failed(request, form)
return render_template('entitlements/license.html', form=form, license_tier_id=license_tier.id,
ext_disabled_fields=disabled_fields)

View File

@@ -14,6 +14,9 @@ class TenantForm(FlaskForm):
# language fields
default_language = SelectField('Default Language', choices=[], validators=[DataRequired()])
allowed_languages = SelectMultipleField('Allowed Languages', choices=[], validators=[DataRequired()])
# invoicing fields
currency = SelectField('Currency', choices=[], validators=[DataRequired()])
usage_email = EmailField('Usage Email', validators=[DataRequired(), Email()])
# Timezone
timezone = SelectField('Timezone', choices=[], validators=[DataRequired()])
# RAG context
@@ -23,10 +26,6 @@ class TenantForm(FlaskForm):
# LLM fields
embedding_model = SelectField('Embedding Model', choices=[], validators=[DataRequired()])
llm_model = SelectField('Large Language Model', choices=[], validators=[DataRequired()])
# license fields
license_start_date = DateField('License Start Date', id='form-control datepicker', validators=[Optional()])
license_end_date = DateField('License End Date', id='form-control datepicker', validators=[Optional()])
allowed_monthly_interactions = IntegerField('Allowed Monthly Interactions', validators=[NumberRange(min=0)])
# Embedding variables
html_tags = StringField('HTML Tags', validators=[DataRequired()],
default='p, h1, h2, h3, h4, h5, h6, li')
@@ -59,6 +58,8 @@ class TenantForm(FlaskForm):
# initialise language fields
self.default_language.choices = [(lang, lang.lower()) for lang in current_app.config['SUPPORTED_LANGUAGES']]
self.allowed_languages.choices = [(lang, lang.lower()) for lang in current_app.config['SUPPORTED_LANGUAGES']]
# initialise currency field
self.currency.choices = [(curr, curr) for curr in current_app.config['SUPPORTED_CURRENCIES']]
# initialise timezone
self.timezone.choices = [(tz, tz) for tz in pytz.all_timezones]
# initialise LLM fields

View File

@@ -47,18 +47,6 @@ def tenant():
# Handle the required attributes
new_tenant = Tenant()
form.populate_obj(new_tenant)
# new_tenant = Tenant(name=form.name.data,
# website=form.website.data,
# default_language=form.default_language.data,
# allowed_languages=form.allowed_languages.data,
# timezone=form.timezone.data,
# embedding_model=form.embedding_model.data,
# llm_model=form.llm_model.data,
# license_start_date=form.license_start_date.data,
# license_end_date=form.license_end_date.data,
# allowed_monthly_interactions=form.allowed_monthly_interactions.data,
# embed_tuning=form.embed_tuning.data,
# rag_tuning=form.rag_tuning.data)
# Handle Embedding Variables
new_tenant.html_tags = [tag.strip() for tag in form.html_tags.data.split(',')] if form.html_tags.data else []
@@ -87,7 +75,7 @@ def tenant():
db.session.commit()
except SQLAlchemyError as e:
current_app.logger.error(f'Failed to add tenant to database. Error: {str(e)}')
flash(f'Failed to add tenant to database. Error: {str(e)}')
flash(f'Failed to add tenant to database. Error: {str(e)}', 'danger')
return render_template('user/tenant.html', form=form)
current_app.logger.info(f"Successfully created tenant {new_tenant.id} in Database")
@@ -152,7 +140,7 @@ def edit_tenant(tenant_id):
current_app.logger.debug(f'Tenant update failed with errors: {form.errors}')
form_validation_failed(request, form)
return render_template('user/edit_tenant.html', form=form, tenant_id=tenant_id)
return render_template('user/tenant.html', form=form, tenant_id=tenant_id)
@user_bp.route('/user', methods=['GET', 'POST'])

View File

@@ -0,0 +1,44 @@
import logging
import logging.config
from flask import Flask
import os
from common.utils.celery_utils import make_celery, init_celery
from common.extensions import db, minio_client
from config.logging_config import LOGGING
from config.config import get_config
def create_app(config_file=None):
app = Flask(__name__)
environment = os.getenv('FLASK_ENV', 'development')
match environment:
case 'development':
app.config.from_object(get_config('dev'))
case 'production':
app.config.from_object(get_config('prod'))
case _:
app.config.from_object(get_config('dev'))
logging.config.dictConfig(LOGGING)
register_extensions(app)
celery = make_celery(app.name, app.config)
init_celery(celery, app)
from . import tasks
app.logger.info("EveAI Entitlements Server Started Successfully")
app.logger.info("-------------------------------------------------------------------------------------------------")
return app, celery
def register_extensions(app):
db.init_app(app)
app, celery = create_app()

226
eveai_entitlements/tasks.py Normal file
View File

@@ -0,0 +1,226 @@
import io
import os
from datetime import datetime as dt, timezone as tz, datetime
from celery import states
from dateutil.relativedelta import relativedelta
from flask import current_app
from sqlalchemy import or_, and_
from sqlalchemy.exc import SQLAlchemyError
from common.extensions import db
from common.models.user import Tenant
from common.models.entitlements import BusinessEventLog, LicenseUsage, License
from common.utils.celery_utils import current_celery
from common.utils.eveai_exceptions import EveAINoLicenseForTenant
# Healthcheck task
@current_celery.task(name='ping', queue='entitlements')
def ping():
return 'pong'
@current_celery.task(name='update_usages', queue='entitlements')
def update_usages():
current_timestamp = dt.now(tz.utc)
tenant_ids = get_all_tenant_ids()
for tenant_id in tenant_ids:
tenant = Tenant.query.get(tenant_id)
if tenant.storage_dirty:
recalculate_storage_for_tenant(tenant)
check_and_create_license_usage_for_tenant(tenant_id)
logs = get_logs_for_processing(tenant_id, current_timestamp)
if not logs:
continue # If no logs to be processed, continu to the next tenant
# Get the min and max timestamp from the logs
min_timestamp = min(log.timestamp for log in logs)
max_timestamp = max(log.timestamp for log in logs)
# Retrieve relevant LicenseUsage records
license_usages = get_relevant_license_usages(db.session, tenant_id, min_timestamp, max_timestamp)
# Split logs based on LicenseUsage periods
logs_by_usage = split_logs_by_license_usage(logs, license_usages)
# Now you can process logs for each LicenseUsage
for license_usage_id, logs in logs_by_usage.items():
process_logs_for_license_usage(tenant_id, license_usage_id, logs)
def get_all_tenant_ids():
tenant_ids = db.session.query(Tenant.tenant_id).all()
return [tenant_id[0] for tenant_id in tenant_ids] # Extract tenant_id from tuples
def check_and_create_license_usage_for_tenant(tenant_id):
current_date = dt.now(tz.utc).date()
license_usages = (db.session.query(LicenseUsage)
.filter_by(tenant_id=tenant_id)
.filter_by(and_(LicenseUsage.period_start_date <= current_date,
LicenseUsage.period_end_date >= current_date))
.all())
if not license_usages:
active_license = (db.session.query(License).filter_by(tenant_id=tenant_id)
.filter_by(and_(License.start_date <= current_date,
License.end_date >= current_date))
.one())
if not active_license:
current_app.logger.error(f"No License defined for {tenant_id}. "
f"Impossible to calculate license usage.")
raise EveAINoLicenseForTenant(message=f"No License defined for {tenant_id}. "
f"Impossible to calculate license usage.")
start_date, end_date = calculate_valid_period(current_date, active_license.period_start_date)
new_license_usage = LicenseUsage(period_start_date=start_date,
period_end_date=end_date,
license_id=active_license.id,
tenant_id=tenant_id
)
try:
db.session.add(new_license_usage)
db.session.commit()
except SQLAlchemyError as e:
db.session.rollback()
current_app.logger.error(f"Error trying to create new license usage for tenant {tenant_id}. "
f"Error: {str(e)}")
raise e
def calculate_valid_period(given_date, original_start_date):
# Ensure both dates are of datetime.date type
if isinstance(given_date, datetime):
given_date = given_date.date()
if isinstance(original_start_date, datetime):
original_start_date = original_start_date.date()
# Step 1: Find the most recent start_date less than or equal to given_date
start_date = original_start_date
while start_date <= given_date:
next_start_date = start_date + relativedelta(months=1)
if next_start_date > given_date:
break
start_date = next_start_date
# Step 2: Calculate the end_date for this period
end_date = start_date + relativedelta(months=1, days=-1)
# Ensure the given date falls within the period
if start_date <= given_date <= end_date:
return start_date, end_date
else:
raise ValueError("Given date does not fall within a valid period.")
def get_logs_for_processing(tenant_id, end_time_stamp):
return (db.session.query(BusinessEventLog).filter(
BusinessEventLog.tenant_id == tenant_id,
BusinessEventLog.license_usage_id == None,
BusinessEventLog.timestamp <= end_time_stamp,
).all())
def get_relevant_license_usages(session, tenant_id, min_timestamp, max_timestamp):
# Fetch LicenseUsage records where the log timestamps fall between period_start_date and period_end_date
return session.query(LicenseUsage).filter(
LicenseUsage.tenant_id == tenant_id,
LicenseUsage.period_start_date <= max_timestamp,
LicenseUsage.period_end_date >= min_timestamp
).order_by(LicenseUsage.period_start_date).all()
def split_logs_by_license_usage(logs, license_usages):
# Dictionary to hold logs categorized by LicenseUsage
logs_by_usage = {lu.id: [] for lu in license_usages}
for log in logs:
# Find the corresponding LicenseUsage for each log based on the timestamp
for license_usage in license_usages:
if license_usage.period_start_date <= log.timestamp <= license_usage.period_end_date:
logs_by_usage[license_usage.id].append(log)
break
return logs_by_usage
def process_logs_for_license_usage(tenant_id, license_usage_id, logs):
# Retrieve the LicenseUsage record
license_usage = db.session.query(LicenseUsage).filter_by(id=license_usage_id).first()
if not license_usage:
raise ValueError(f"LicenseUsage with id {license_usage_id} not found.")
# Initialize variables to accumulate usage data
embedding_mb_used = 0
embedding_prompt_tokens_used = 0
embedding_completion_tokens_used = 0
embedding_total_tokens_used = 0
interaction_prompt_tokens_used = 0
interaction_completion_tokens_used = 0
interaction_total_tokens_used = 0
# Process each log
for log in logs:
# Case for 'Create Embeddings' event
if log.event_type == 'Create Embeddings':
if log.message == 'Starting Trace for Create Embeddings':
embedding_mb_used += log.document_version_file_size
elif log.message == 'Final LLM Metrics':
embedding_prompt_tokens_used += log.llm_metrics_prompt_tokens
embedding_completion_tokens_used += log.llm_metrics_completion_tokens
embedding_total_tokens_used += log.llm_metrics_total_tokens
# Case for 'Ask Question' event
elif log.event_type == 'Ask Question':
if log.message == 'Final LLM Metrics':
interaction_prompt_tokens_used += log.llm_metrics_prompt_tokens
interaction_completion_tokens_used += log.llm_metrics_completion_tokens
interaction_total_tokens_used += log.llm_metrics_total_tokens
# Mark the log as processed by setting the license_usage_id
log.license_usage_id = license_usage_id
# Update the LicenseUsage record with the accumulated values
license_usage.embedding_mb += embedding_mb_used
license_usage.embedding_prompt_tokens_used += embedding_prompt_tokens_used
license_usage.embedding_completion_tokens_used += embedding_completion_tokens_used
license_usage.embedding_total_tokens_used += embedding_total_tokens_used
license_usage.interaction_prompt_tokens_used += interaction_prompt_tokens_used
license_usage.interaction_completion_tokens_used += interaction_completion_tokens_used
license_usage.interaction_total_tokens_used += interaction_total_tokens_used
# Commit the updates to the LicenseUsage and log records
try:
db.session.add(license_usage)
db.session.add(logs)
db.session.commit()
except SQLAlchemyError as e:
db.session.rollback()
current_app.logger.error(f"Error trying to update license usage and logs for tenant {tenant_id}. ")
raise e
def recalculate_storage_for_tenant(tenant):
# Perform a SUM operation to get the total file size from document_versions
total_storage = db.session.execute(f"""
SELECT SUM(file_size)
FROM {tenant.id}.document_versions
""").scalar()
# Update the LicenseUsage with the recalculated storage
license_usage = db.session.query(LicenseUsage).filter_by(tenant_id=tenant.id).first()
license_usage.storage_mb = total_storage / (1024 * 1024) # Convert bytes to MB
# Reset the dirty flag after recalculating
tenant.storage_dirty = False
# Commit the changes
try:
db.session.add(tenant)
db.session.add(license_usage)
db.session.commit()
except SQLAlchemyError as e:
db.session.rollback()
current_app.logger.error(f"Error trying to update tenant {tenant.id} for Dirty Storage. ")

View File

@@ -1,6 +1,8 @@
import io
import os
import time
import psutil
from pydub import AudioSegment
import tempfile
from common.extensions import minio_client
@@ -16,6 +18,11 @@ class AudioProcessor(TranscriptionProcessor):
self.transcription_client = model_variables['transcription_client']
self.transcription_model = model_variables['transcription_model']
self.ffmpeg_path = 'ffmpeg'
self.max_compression_duration = model_variables['max_compression_duration']
self.max_transcription_duration = model_variables['max_transcription_duration']
self.compression_cpu_limit = model_variables.get('compression_cpu_limit', 50) # CPU usage limit in percentage
self.compression_process_delay = model_variables.get('compression_process_delay', 0.1) # Delay between processing chunks in seconds
self.file_type = document_version.file_type
def _get_transcription(self):
file_data = minio_client.download_document_file(
@@ -26,68 +33,121 @@ class AudioProcessor(TranscriptionProcessor):
self.document_version.file_name
)
with current_event.create_span("Audio Processing"):
with current_event.create_span("Audio Compression"):
compressed_audio = self._compress_audio(file_data)
with current_event.create_span("Transcription Generation"):
with current_event.create_span("Audio Transcription"):
transcription = self._transcribe_audio(compressed_audio)
return transcription
def _compress_audio(self, audio_data):
self._log("Compressing audio")
with tempfile.NamedTemporaryFile(delete=False, suffix=f'.{self.document_version.file_type}') as temp_input:
temp_input.write(audio_data)
temp_input.flush()
# Use a unique filename for the output to avoid conflicts
output_filename = f'compressed_{os.urandom(8).hex()}.mp3'
output_path = os.path.join(tempfile.gettempdir(), output_filename)
with tempfile.NamedTemporaryFile(delete=False, suffix=f'.{self.document_version.file_type}') as temp_file:
temp_file.write(audio_data)
temp_file_path = temp_file.name
try:
result = subprocess.run(
[self.ffmpeg_path, '-y', '-i', temp_input.name, '-b:a', '64k', '-f', 'mp3', output_path],
capture_output=True,
text=True,
check=True
self._log("Creating AudioSegment from file")
audio_info = AudioSegment.from_file(temp_file_path, format=self.document_version.file_type)
self._log("Finished creating AudioSegment from file")
total_duration = len(audio_info)
self._log(f"Audio duration: {total_duration / 1000} seconds")
segment_length = self.max_compression_duration * 1000 # Convert to milliseconds
total_chunks = (total_duration + segment_length - 1) // segment_length
compressed_segments = AudioSegment.empty()
for i in range(total_chunks):
self._log(f"Compressing segment {i + 1} of {total_chunks}")
start_time = i * segment_length
end_time = min((i + 1) * segment_length, total_duration)
chunk = AudioSegment.from_file(
temp_file_path,
format=self.document_version.file_type,
start_second=start_time / 1000,
duration=(end_time - start_time) / 1000
)
with open(output_path, 'rb') as f:
compressed_data = f.read()
compressed_chunk = self._compress_segment(chunk)
compressed_segments += compressed_chunk
time.sleep(self.compression_process_delay)
# Save compressed audio to MinIO
compressed_filename = f"{self.document_version.id}_compressed.mp3"
with io.BytesIO() as compressed_buffer:
compressed_segments.export(compressed_buffer, format="mp3")
compressed_buffer.seek(0)
minio_client.upload_document_file(
self.tenant.id,
self.document_version.doc_id,
self.document_version.language,
self.document_version.id,
compressed_filename,
compressed_data
compressed_buffer.read()
)
self._log(f"Saved compressed audio to MinIO: {compressed_filename}")
return compressed_data
except subprocess.CalledProcessError as e:
error_message = f"Compression failed: {e.stderr}"
self._log(error_message, level='error')
raise Exception(error_message)
return compressed_segments
except Exception as e:
self._log(f"Error during audio processing: {str(e)}", level='error')
raise
finally:
# Clean up temporary files
os.unlink(temp_input.name)
if os.path.exists(output_path):
os.unlink(output_path)
os.unlink(temp_file_path) # Ensure the temporary file is deleted
def _compress_segment(self, audio_segment):
with io.BytesIO() as segment_buffer:
audio_segment.export(segment_buffer, format="wav")
segment_buffer.seek(0)
with io.BytesIO() as output_buffer:
command = [
'nice', '-n', '19',
'ffmpeg',
'-i', 'pipe:0',
'-ar', '16000',
'-ac', '1',
'-b:a', '32k',
'-filter:a', 'loudnorm',
'-f', 'mp3',
'pipe:1'
]
process = psutil.Popen(command, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = process.communicate(input=segment_buffer.read())
if process.returncode != 0:
self._log(f"FFmpeg error: {stderr.decode()}", level='error')
raise Exception("FFmpeg compression failed")
output_buffer.write(stdout)
output_buffer.seek(0)
compressed_segment = AudioSegment.from_mp3(output_buffer)
return compressed_segment
def _transcribe_audio(self, audio_data):
self._log("Starting audio transcription")
audio = AudioSegment.from_file(io.BytesIO(audio_data), format="mp3")
# audio = AudioSegment.from_file(io.BytesIO(audio_data), format="mp3")
audio = audio_data
segment_length = 10 * 60 * 1000 # 10 minutes in milliseconds
segment_length = self.max_transcription_duration * 1000 # calculate milliseconds
transcriptions = []
total_chunks = len(audio) // segment_length + 1
for i, chunk in enumerate(audio[::segment_length]):
self._log(f'Processing chunk {i + 1} of {len(audio) // segment_length + 1}')
self._log(f'Processing chunk {i + 1} of {total_chunks}')
segment_duration = 0
if i == total_chunks - 1:
segment_duration = (len(audio) % segment_length) // 1000
else:
segment_duration = self.max_transcription_duration
with tempfile.NamedTemporaryFile(suffix=".mp3", delete=False) as temp_audio:
chunk.export(temp_audio.name, format="mp3")
@@ -103,11 +163,12 @@ class AudioProcessor(TranscriptionProcessor):
audio_file.seek(0) # Reset file pointer to the beginning
self._log("Calling transcription API")
transcription = self.transcription_client.audio.transcriptions.create(
transcription = self.model_variables.transcribe(
file=audio_file,
model=self.transcription_model,
language=self.document_version.language,
response_format='verbose_json',
duration=segment_duration,
)
self._log("Transcription API call completed")

View File

@@ -36,8 +36,14 @@ def ping():
@current_celery.task(name='create_embeddings', queue='embeddings')
def create_embeddings(tenant_id, document_version_id):
# Retrieve document version to process
document_version = DocumentVersion.query.get(document_version_id)
if document_version is None:
raise Exception(f'Document version {document_version_id} not found')
# BusinessEvent creates a context, which is why we need to use it with a with block
with BusinessEvent('Create Embeddings', tenant_id, document_version_id=document_version_id):
with BusinessEvent('Create Embeddings', tenant_id,
document_version_id=document_version_id,
document_version_file_size=document_version.file_size):
current_app.logger.info(f'Creating embeddings for tenant {tenant_id} on document version {document_version_id}')
try:
# Retrieve Tenant for which we are processing
@@ -52,11 +58,6 @@ def create_embeddings(tenant_id, document_version_id):
model_variables = select_model_variables(tenant)
current_app.logger.debug(f'Model variables: {model_variables}')
# Retrieve document version to process
document_version = DocumentVersion.query.get(document_version_id)
if document_version is None:
raise Exception(f'Document version {document_version_id} not found')
except Exception as e:
current_app.logger.error(f'Create Embeddings request received '
f'for non existing document version {document_version_id} '
@@ -171,9 +172,11 @@ def embed_markdown(tenant, model_variables, document_version, markdown, title):
model_variables['max_chunk_size'])
# Enrich chunks
with current_event.create_span("Enrich Chunks"):
enriched_chunks = enrich_chunks(tenant, model_variables, document_version, title, chunks)
# Create embeddings
with current_event.create_span("Create Embeddings"):
embeddings = embed_chunks(tenant, model_variables, document_version, enriched_chunks)
# Update document version and save embeddings
@@ -194,7 +197,6 @@ def embed_markdown(tenant, model_variables, document_version, markdown, title):
def enrich_chunks(tenant, model_variables, document_version, title, chunks):
current_event.log("Starting Enriching Chunks Processing")
current_app.logger.debug(f'Enriching chunks for tenant {tenant.id} '
f'on document version {document_version.id}')
@@ -227,7 +229,6 @@ def enrich_chunks(tenant, model_variables, document_version, title, chunks):
current_app.logger.debug(f'Finished enriching chunks for tenant {tenant.id} '
f'on document version {document_version.id}')
current_event.log("Finished Enriching Chunks Processing")
return enriched_chunks
@@ -261,7 +262,6 @@ def summarize_chunk(tenant, model_variables, document_version, chunk):
def embed_chunks(tenant, model_variables, document_version, chunks):
current_event.log("Starting Embedding Chunks Processing")
current_app.logger.debug(f'Embedding chunks for tenant {tenant.id} '
f'on document version {document_version.id}')
embedding_model = model_variables['embedding_model']

View File

@@ -0,0 +1,40 @@
"""Add LLM metrics information to business events
Revision ID: 16f70b210557
Revises: 829094f07d44
Create Date: 2024-10-01 09:46:49.372953
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '16f70b210557'
down_revision = '829094f07d44'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('business_event_log', schema=None) as batch_op:
batch_op.add_column(sa.Column('llm_metrics_total_tokens', sa.Integer(), nullable=True))
batch_op.add_column(sa.Column('llm_metrics_prompt_tokens', sa.Integer(), nullable=True))
batch_op.add_column(sa.Column('llm_metrics_completion_tokens', sa.Integer(), nullable=True))
batch_op.add_column(sa.Column('llm_metrics_total_time', sa.Float(), nullable=True))
batch_op.add_column(sa.Column('llm_metrics_call_count', sa.Integer(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('business_event_log', schema=None) as batch_op:
batch_op.drop_column('llm_metrics_call_count')
batch_op.drop_column('llm_metrics_total_time')
batch_op.drop_column('llm_metrics_completion_tokens')
batch_op.drop_column('llm_metrics_prompt_tokens')
batch_op.drop_column('llm_metrics_total_tokens')
# ### end Alembic commands ###

View File

@@ -0,0 +1,68 @@
"""Adding interaction type (LLM, Embedding, ASR) to Business Events
Revision ID: 254932fe7fe3
Revises: 16f70b210557
Create Date: 2024-10-02 10:04:58.705839
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '254932fe7fe3'
down_revision = '16f70b210557'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('business_event_log', schema=None) as batch_op:
batch_op.add_column(sa.Column('llm_interaction_type', sa.String(length=20), nullable=True))
with op.batch_alter_table('roles_users', schema=None) as batch_op:
batch_op.drop_constraint('roles_users_user_id_fkey', type_='foreignkey')
batch_op.drop_constraint('roles_users_role_id_fkey', type_='foreignkey')
batch_op.create_foreign_key(None, 'user', ['user_id'], ['id'], referent_schema='public', ondelete='CASCADE')
batch_op.create_foreign_key(None, 'role', ['role_id'], ['id'], referent_schema='public', ondelete='CASCADE')
with op.batch_alter_table('tenant_domain', schema=None) as batch_op:
batch_op.drop_constraint('tenant_domain_created_by_fkey', type_='foreignkey')
batch_op.drop_constraint('tenant_domain_updated_by_fkey', type_='foreignkey')
batch_op.drop_constraint('tenant_domain_tenant_id_fkey', type_='foreignkey')
batch_op.create_foreign_key(None, 'user', ['created_by'], ['id'], referent_schema='public')
batch_op.create_foreign_key(None, 'tenant', ['tenant_id'], ['id'], referent_schema='public')
batch_op.create_foreign_key(None, 'user', ['updated_by'], ['id'], referent_schema='public')
with op.batch_alter_table('user', schema=None) as batch_op:
batch_op.drop_constraint('user_tenant_id_fkey', type_='foreignkey')
batch_op.create_foreign_key(None, 'tenant', ['tenant_id'], ['id'], referent_schema='public')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('user', schema=None) as batch_op:
batch_op.drop_constraint(None, type_='foreignkey')
batch_op.create_foreign_key('user_tenant_id_fkey', 'tenant', ['tenant_id'], ['id'])
with op.batch_alter_table('tenant_domain', schema=None) as batch_op:
batch_op.drop_constraint(None, type_='foreignkey')
batch_op.drop_constraint(None, type_='foreignkey')
batch_op.drop_constraint(None, type_='foreignkey')
batch_op.create_foreign_key('tenant_domain_tenant_id_fkey', 'tenant', ['tenant_id'], ['id'])
batch_op.create_foreign_key('tenant_domain_updated_by_fkey', 'user', ['updated_by'], ['id'])
batch_op.create_foreign_key('tenant_domain_created_by_fkey', 'user', ['created_by'], ['id'])
with op.batch_alter_table('roles_users', schema=None) as batch_op:
batch_op.drop_constraint(None, type_='foreignkey')
batch_op.drop_constraint(None, type_='foreignkey')
batch_op.create_foreign_key('roles_users_role_id_fkey', 'role', ['role_id'], ['id'], ondelete='CASCADE')
batch_op.create_foreign_key('roles_users_user_id_fkey', 'user', ['user_id'], ['id'], ondelete='CASCADE')
with op.batch_alter_table('business_event_log', schema=None) as batch_op:
batch_op.drop_column('llm_interaction_type')
# ### end Alembic commands ###

View File

@@ -0,0 +1,40 @@
"""Add entitlement information to Tenant
Revision ID: 48714f1baac5
Revises: f201bfd23152
Create Date: 2024-10-03 14:49:53.922320
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '48714f1baac5'
down_revision = 'f201bfd23152'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('license', schema=None) as batch_op:
batch_op.add_column(sa.Column('yearly_payment', sa.Boolean(), nullable=False))
with op.batch_alter_table('tenant', schema=None) as batch_op:
batch_op.add_column(sa.Column('currency', sa.String(length=20), nullable=True))
batch_op.add_column(sa.Column('usage_email', sa.String(length=255), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('tenant', schema=None) as batch_op:
batch_op.drop_column('usage_email')
batch_op.drop_column('currency')
with op.batch_alter_table('license', schema=None) as batch_op:
batch_op.drop_column('yearly_payment')
# ### end Alembic commands ###

View File

@@ -0,0 +1,106 @@
"""Introducing new licensing approach
Revision ID: 560d08d91e5b
Revises: 254932fe7fe3
Create Date: 2024-10-02 15:29:05.963865
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '560d08d91e5b'
down_revision = '254932fe7fe3'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('license_tier',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=50), nullable=False),
sa.Column('version', sa.String(length=50), nullable=False),
sa.Column('start_date', sa.Date(), nullable=False),
sa.Column('end_date', sa.Date(), nullable=True),
sa.Column('basic_fee_d', sa.Float(), nullable=True),
sa.Column('basic_fee_e', sa.Float(), nullable=True),
sa.Column('max_storage_tokens', sa.Integer(), nullable=False),
sa.Column('additional_storage_token_price_d', sa.Numeric(precision=10, scale=4), nullable=False),
sa.Column('additional_storage_token_price_e', sa.Numeric(precision=10, scale=4), nullable=False),
sa.Column('included_embedding_tokens', sa.Integer(), nullable=False),
sa.Column('additional_embedding_token_price_d', sa.Numeric(precision=10, scale=4), nullable=False),
sa.Column('additional_embedding_token_price_e', sa.Numeric(precision=10, scale=4), nullable=False),
sa.Column('additional_embedding_bucket', sa.Integer(), nullable=False),
sa.Column('included_interaction_tokens', sa.Integer(), nullable=False),
sa.Column('additional_interaction_token_price_d', sa.Numeric(precision=10, scale=4), nullable=False),
sa.Column('additional_interaction_token_price_e', sa.Numeric(precision=10, scale=4), nullable=False),
sa.Column('additional_interaction_bucket', sa.Integer(), nullable=False),
sa.Column('allow_overage', sa.Boolean(), nullable=True),
sa.PrimaryKeyConstraint('id'),
schema='public'
)
op.create_table('license',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('tenant_id', sa.Integer(), nullable=False),
sa.Column('tier_id', sa.Integer(), nullable=False),
sa.Column('start_date', sa.Date(), nullable=False),
sa.Column('end_date', sa.Date(), nullable=True),
sa.Column('currency', sa.String(length=20), nullable=False),
sa.Column('basic_fee', sa.Float(), nullable=False),
sa.Column('max_storage_tokens', sa.Integer(), nullable=False),
sa.Column('additional_storage_token_price', sa.Float(), nullable=False),
sa.Column('included_embedding_tokens', sa.Integer(), nullable=False),
sa.Column('additional_embedding_token_price', sa.Numeric(precision=10, scale=4), nullable=False),
sa.Column('additional_embedding_bucket', sa.Integer(), nullable=False),
sa.Column('included_interaction_tokens', sa.Integer(), nullable=False),
sa.Column('additional_interaction_token_price', sa.Numeric(precision=10, scale=4), nullable=False),
sa.Column('additional_interaction_bucket', sa.Integer(), nullable=False),
sa.Column('allow_overage', sa.Boolean(), nullable=True),
sa.ForeignKeyConstraint(['tenant_id'], ['public.tenant.id'], ),
sa.ForeignKeyConstraint(['tier_id'], ['public.license_tier.id'], ),
sa.PrimaryKeyConstraint('id'),
schema='public'
)
op.create_table('license_usage',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('license_id', sa.Integer(), nullable=False),
sa.Column('tenant_id', sa.Integer(), nullable=False),
sa.Column('storage_tokens_used', sa.Integer(), nullable=True),
sa.Column('embedding_tokens_used', sa.Integer(), nullable=True),
sa.Column('interaction_tokens_used', sa.Integer(), nullable=True),
sa.Column('period_start_date', sa.Date(), nullable=False),
sa.Column('period_end_date', sa.Date(), nullable=False),
sa.ForeignKeyConstraint(['license_id'], ['public.license.id'], ),
sa.ForeignKeyConstraint(['tenant_id'], ['public.tenant.id'], ),
sa.PrimaryKeyConstraint('id'),
schema='public'
)
with op.batch_alter_table('business_event_log', schema=None) as batch_op:
batch_op.add_column(sa.Column('license_usage_id', sa.Integer(), nullable=True))
batch_op.create_foreign_key(None, 'license_usage', ['license_usage_id'], ['id'], referent_schema='public')
with op.batch_alter_table('tenant', schema=None) as batch_op:
batch_op.drop_column('license_end_date')
batch_op.drop_column('allowed_monthly_interactions')
batch_op.drop_column('license_start_date')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('tenant', schema=None) as batch_op:
batch_op.add_column(sa.Column('license_start_date', sa.DATE(), autoincrement=False, nullable=True))
batch_op.add_column(sa.Column('allowed_monthly_interactions', sa.INTEGER(), autoincrement=False, nullable=True))
batch_op.add_column(sa.Column('license_end_date', sa.DATE(), autoincrement=False, nullable=True))
with op.batch_alter_table('business_event_log', schema=None) as batch_op:
batch_op.drop_constraint(None, type_='foreignkey')
batch_op.drop_column('license_usage_id')
op.drop_table('license_usage', schema='public')
op.drop_table('license', schema='public')
op.drop_table('license_tier', schema='public')
# ### end Alembic commands ###

View File

@@ -0,0 +1,38 @@
"""Adapt LicenseUsage to accept mb iso tokoens for Storage and Embeddings
Revision ID: 6a7743d08106
Revises: 9429f244f1a5
Create Date: 2024-10-07 06:04:39.424243
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '6a7743d08106'
down_revision = '9429f244f1a5'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('license_usage', schema=None) as batch_op:
batch_op.add_column(sa.Column('storage_mb_used', sa.Integer(), nullable=True))
batch_op.add_column(sa.Column('embedding_mb_used', sa.Integer(), nullable=True))
batch_op.drop_column('storage_tokens_used')
batch_op.drop_column('embedding_tokens_used')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('license_usage', schema=None) as batch_op:
batch_op.add_column(sa.Column('embedding_tokens_used', sa.INTEGER(), autoincrement=False, nullable=True))
batch_op.add_column(sa.Column('storage_tokens_used', sa.INTEGER(), autoincrement=False, nullable=True))
batch_op.drop_column('embedding_mb_used')
batch_op.drop_column('storage_mb_used')
# ### end Alembic commands ###

View File

@@ -0,0 +1,56 @@
"""LicenseUsage and Tenant updates
Revision ID: 8fdd7f2965c1
Revises: 6a7743d08106
Create Date: 2024-10-08 06:33:50.297396
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '8fdd7f2965c1'
down_revision = '6a7743d08106'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('business_event_log', schema=None) as batch_op:
batch_op.add_column(sa.Column('document_version_file_size', sa.Float(), nullable=True))
with op.batch_alter_table('license_usage', schema=None) as batch_op:
batch_op.add_column(sa.Column('embedding_prompt_tokens_used', sa.Integer(), nullable=True))
batch_op.add_column(sa.Column('embedding_completion_tokens_used', sa.Integer(), nullable=True))
batch_op.add_column(sa.Column('embedding_total_tokens_used', sa.Integer(), nullable=True))
batch_op.add_column(sa.Column('interaction_prompt_tokens_used', sa.Integer(), nullable=True))
batch_op.add_column(sa.Column('interaction_completion_tokens_used', sa.Integer(), nullable=True))
batch_op.add_column(sa.Column('interaction_total_tokens_used', sa.Integer(), nullable=True))
batch_op.drop_column('interaction_tokens_used')
with op.batch_alter_table('tenant', schema=None) as batch_op:
batch_op.add_column(sa.Column('storage_dirty', sa.Boolean(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('tenant', schema=None) as batch_op:
batch_op.drop_column('storage_dirty')
with op.batch_alter_table('license_usage', schema=None) as batch_op:
batch_op.add_column(sa.Column('interaction_tokens_used', sa.INTEGER(), autoincrement=False, nullable=True))
batch_op.drop_column('interaction_total_tokens_used')
batch_op.drop_column('interaction_completion_tokens_used')
batch_op.drop_column('interaction_prompt_tokens_used')
batch_op.drop_column('embedding_total_tokens_used')
batch_op.drop_column('embedding_completion_tokens_used')
batch_op.drop_column('embedding_prompt_tokens_used')
with op.batch_alter_table('business_event_log', schema=None) as batch_op:
batch_op.drop_column('document_version_file_size')
# ### end Alembic commands ###

View File

@@ -0,0 +1,86 @@
"""Moved storage and embedding licensing to Mb iso tokens
Revision ID: 9429f244f1a5
Revises: 48714f1baac5
Create Date: 2024-10-04 08:07:47.976861
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '9429f244f1a5'
down_revision = '48714f1baac5'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('license', schema=None) as batch_op:
batch_op.add_column(sa.Column('max_storage_mb', sa.Integer(), nullable=False))
batch_op.add_column(sa.Column('additional_storage_price', sa.Float(), nullable=False))
batch_op.add_column(sa.Column('included_embedding_mb', sa.Integer(), nullable=False))
batch_op.add_column(sa.Column('additional_embedding_price', sa.Numeric(precision=10, scale=4), nullable=False))
batch_op.add_column(sa.Column('overage_embedding', sa.Float(), nullable=False))
batch_op.add_column(sa.Column('overage_interaction', sa.Float(), nullable=False))
batch_op.drop_column('additional_storage_token_price')
batch_op.drop_column('additional_embedding_token_price')
batch_op.drop_column('max_storage_tokens')
batch_op.drop_column('allow_overage')
batch_op.drop_column('included_embedding_tokens')
with op.batch_alter_table('license_tier', schema=None) as batch_op:
batch_op.add_column(sa.Column('max_storage_mb', sa.Integer(), nullable=False))
batch_op.add_column(sa.Column('additional_storage_price_d', sa.Numeric(precision=10, scale=4), nullable=False))
batch_op.add_column(sa.Column('additional_storage_price_e', sa.Numeric(precision=10, scale=4), nullable=False))
batch_op.add_column(sa.Column('included_embedding_mb', sa.Integer(), nullable=False))
batch_op.add_column(sa.Column('additional_embedding_price_d', sa.Numeric(precision=10, scale=4), nullable=False))
batch_op.add_column(sa.Column('additional_embedding_price_e', sa.Numeric(precision=10, scale=4), nullable=False))
batch_op.add_column(sa.Column('standard_overage_embedding', sa.Float(), nullable=False))
batch_op.add_column(sa.Column('standard_overage_interaction', sa.Float(), nullable=False))
batch_op.drop_column('max_storage_tokens')
batch_op.drop_column('additional_storage_token_price_e')
batch_op.drop_column('allow_overage')
batch_op.drop_column('additional_embedding_token_price_d')
batch_op.drop_column('included_embedding_tokens')
batch_op.drop_column('additional_embedding_token_price_e')
batch_op.drop_column('additional_storage_token_price_d')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('license_tier', schema=None) as batch_op:
batch_op.add_column(sa.Column('additional_storage_token_price_d', sa.NUMERIC(precision=10, scale=4), autoincrement=False, nullable=False))
batch_op.add_column(sa.Column('additional_embedding_token_price_e', sa.NUMERIC(precision=10, scale=4), autoincrement=False, nullable=False))
batch_op.add_column(sa.Column('included_embedding_tokens', sa.INTEGER(), autoincrement=False, nullable=False))
batch_op.add_column(sa.Column('additional_embedding_token_price_d', sa.NUMERIC(precision=10, scale=4), autoincrement=False, nullable=False))
batch_op.add_column(sa.Column('allow_overage', sa.BOOLEAN(), autoincrement=False, nullable=True))
batch_op.add_column(sa.Column('additional_storage_token_price_e', sa.NUMERIC(precision=10, scale=4), autoincrement=False, nullable=False))
batch_op.add_column(sa.Column('max_storage_tokens', sa.INTEGER(), autoincrement=False, nullable=False))
batch_op.drop_column('standard_overage_interaction')
batch_op.drop_column('standard_overage_embedding')
batch_op.drop_column('additional_embedding_price_e')
batch_op.drop_column('additional_embedding_price_d')
batch_op.drop_column('included_embedding_mb')
batch_op.drop_column('additional_storage_price_e')
batch_op.drop_column('additional_storage_price_d')
batch_op.drop_column('max_storage_mb')
with op.batch_alter_table('license', schema=None) as batch_op:
batch_op.add_column(sa.Column('included_embedding_tokens', sa.INTEGER(), autoincrement=False, nullable=False))
batch_op.add_column(sa.Column('allow_overage', sa.BOOLEAN(), autoincrement=False, nullable=True))
batch_op.add_column(sa.Column('max_storage_tokens', sa.INTEGER(), autoincrement=False, nullable=False))
batch_op.add_column(sa.Column('additional_embedding_token_price', sa.NUMERIC(precision=10, scale=4), autoincrement=False, nullable=False))
batch_op.add_column(sa.Column('additional_storage_token_price', sa.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=False))
batch_op.drop_column('overage_interaction')
batch_op.drop_column('overage_embedding')
batch_op.drop_column('additional_embedding_price')
batch_op.drop_column('included_embedding_mb')
batch_op.drop_column('additional_storage_price')
batch_op.drop_column('max_storage_mb')
# ### end Alembic commands ###

View File

@@ -0,0 +1,58 @@
"""Corrections on Licensing models
Revision ID: d616ea937a6a
Revises: 560d08d91e5b
Create Date: 2024-10-02 15:39:27.668741
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'd616ea937a6a'
down_revision = '560d08d91e5b'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('business_event_log', schema=None) as batch_op:
batch_op.drop_constraint('business_event_log_license_usage_id_fkey', type_='foreignkey')
batch_op.create_foreign_key(None, 'license_usage', ['license_usage_id'], ['id'], referent_schema='public')
with op.batch_alter_table('license', schema=None) as batch_op:
batch_op.drop_constraint('license_tenant_id_fkey', type_='foreignkey')
batch_op.drop_constraint('license_tier_id_fkey', type_='foreignkey')
batch_op.create_foreign_key(None, 'license_tier', ['tier_id'], ['id'], referent_schema='public')
batch_op.create_foreign_key(None, 'tenant', ['tenant_id'], ['id'], referent_schema='public')
with op.batch_alter_table('license_usage', schema=None) as batch_op:
batch_op.drop_constraint('license_usage_license_id_fkey', type_='foreignkey')
batch_op.drop_constraint('license_usage_tenant_id_fkey', type_='foreignkey')
batch_op.create_foreign_key(None, 'tenant', ['tenant_id'], ['id'], referent_schema='public')
batch_op.create_foreign_key(None, 'license', ['license_id'], ['id'], referent_schema='public')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('license_usage', schema=None) as batch_op:
batch_op.drop_constraint(None, type_='foreignkey')
batch_op.drop_constraint(None, type_='foreignkey')
batch_op.create_foreign_key('license_usage_tenant_id_fkey', 'tenant', ['tenant_id'], ['id'])
batch_op.create_foreign_key('license_usage_license_id_fkey', 'license', ['license_id'], ['id'])
with op.batch_alter_table('license', schema=None) as batch_op:
batch_op.drop_constraint(None, type_='foreignkey')
batch_op.drop_constraint(None, type_='foreignkey')
batch_op.create_foreign_key('license_tier_id_fkey', 'license_tier', ['tier_id'], ['id'])
batch_op.create_foreign_key('license_tenant_id_fkey', 'tenant', ['tenant_id'], ['id'])
with op.batch_alter_table('business_event_log', schema=None) as batch_op:
batch_op.drop_constraint(None, type_='foreignkey')
batch_op.create_foreign_key('business_event_log_license_usage_id_fkey', 'license_usage', ['license_usage_id'], ['id'])
# ### end Alembic commands ###

View File

@@ -0,0 +1,66 @@
"""Add buckets to License information
Revision ID: f201bfd23152
Revises: d616ea937a6a
Create Date: 2024-10-03 09:44:32.867470
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'f201bfd23152'
down_revision = 'd616ea937a6a'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('business_event_log', schema=None) as batch_op:
batch_op.drop_constraint('business_event_log_license_usage_id_fkey', type_='foreignkey')
batch_op.create_foreign_key(None, 'license_usage', ['license_usage_id'], ['id'], referent_schema='public')
with op.batch_alter_table('license', schema=None) as batch_op:
batch_op.add_column(sa.Column('additional_storage_bucket', sa.Integer(), nullable=False))
batch_op.drop_constraint('license_tier_id_fkey', type_='foreignkey')
batch_op.drop_constraint('license_tenant_id_fkey', type_='foreignkey')
batch_op.create_foreign_key(None, 'license_tier', ['tier_id'], ['id'], referent_schema='public')
batch_op.create_foreign_key(None, 'tenant', ['tenant_id'], ['id'], referent_schema='public')
with op.batch_alter_table('license_tier', schema=None) as batch_op:
batch_op.add_column(sa.Column('additional_storage_bucket', sa.Integer(), nullable=False))
with op.batch_alter_table('license_usage', schema=None) as batch_op:
batch_op.drop_constraint('license_usage_tenant_id_fkey', type_='foreignkey')
batch_op.drop_constraint('license_usage_license_id_fkey', type_='foreignkey')
batch_op.create_foreign_key(None, 'license', ['license_id'], ['id'], referent_schema='public')
batch_op.create_foreign_key(None, 'tenant', ['tenant_id'], ['id'], referent_schema='public')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('license_usage', schema=None) as batch_op:
batch_op.drop_constraint(None, type_='foreignkey')
batch_op.drop_constraint(None, type_='foreignkey')
batch_op.create_foreign_key('license_usage_license_id_fkey', 'license', ['license_id'], ['id'])
batch_op.create_foreign_key('license_usage_tenant_id_fkey', 'tenant', ['tenant_id'], ['id'])
with op.batch_alter_table('license_tier', schema=None) as batch_op:
batch_op.drop_column('additional_storage_bucket')
with op.batch_alter_table('license', schema=None) as batch_op:
batch_op.drop_constraint(None, type_='foreignkey')
batch_op.drop_constraint(None, type_='foreignkey')
batch_op.create_foreign_key('license_tenant_id_fkey', 'tenant', ['tenant_id'], ['id'])
batch_op.create_foreign_key('license_tier_id_fkey', 'license_tier', ['tier_id'], ['id'])
batch_op.drop_column('additional_storage_bucket')
with op.batch_alter_table('business_event_log', schema=None) as batch_op:
batch_op.drop_constraint(None, type_='foreignkey')
batch_op.create_foreign_key('business_event_log_license_usage_id_fkey', 'license_usage', ['license_usage_id'], ['id'])
# ### end Alembic commands ###

View File

@@ -50,7 +50,8 @@ target_db = current_app.extensions['migrate'].db
def get_public_table_names():
# TODO: This function should include the necessary functionality to automatically retrieve table names
return ['role', 'roles_users', 'tenant', 'user', 'tenant_domain']
return ['role', 'roles_users', 'tenant', 'user', 'tenant_domain','license_tier', 'license', 'license_usage',
'business_event_log']
PUBLIC_TABLES = get_public_table_names()

View File

@@ -0,0 +1,94 @@
"""DocumentVersion update to bucket_name, object_name and file_size to better reflet Minio reality
Revision ID: 322d3cf1f17b
Revises: 711a09a77680
Create Date: 2024-10-07 07:45:19.014017
"""
from alembic import op
import sqlalchemy as sa
import pgvector
from sqlalchemy.dialects import postgresql
from sqlalchemy.orm import Session
from sqlalchemy import text
from flask import current_app
# revision identifiers, used by Alembic.
revision = '322d3cf1f17b'
down_revision = '711a09a77680'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - Add new fields ###
op.add_column('document_version', sa.Column('bucket_name', sa.String(length=255), nullable=True))
op.add_column('document_version', sa.Column('object_name', sa.String(length=200), nullable=True))
op.add_column('document_version', sa.Column('file_size', sa.Float(), nullable=True))
# ### Upgrade values for bucket_name, object_name and file_size to reflect minio reality ###
from common.models.document import DocumentVersion
from common.extensions import minio_client
from minio.error import S3Error
# Create a connection
connection = op.get_bind()
session = Session(bind=connection)
# Get the current schema name (which should be the tenant ID)
current_schema = connection.execute(text("SELECT current_schema()")).scalar()
tenant_id = int(current_schema)
doc_versions = session.query(DocumentVersion).all()
for doc_version in doc_versions:
try:
object_name = minio_client.generate_object_name(doc_version.doc_id,
doc_version.language,
doc_version.id,
doc_version.file_name)
bucket_name = minio_client.generate_bucket_name(tenant_id)
doc_version.object_name = object_name
doc_version.bucket_name = bucket_name
try:
stat = minio_client.client.stat_object(
bucket_name=bucket_name,
object_name=object_name
)
doc_version.file_size = stat.size / 1048576
current_app.logger.info(f"Processed Upgrade for DocumentVersion {doc_version.id} for Tenant {tenant_id}")
except S3Error as e:
if e.code == "NoSuchKey":
current_app.logger.warning(
f"Object {doc_version.file_location} not found in bucket {doc_version.bucket_name}. Skipping.")
continue # Move to the next item
else:
raise e # Handle other types of S3 errors
except Exception as e:
session.rollback()
current_app.logger.error(f"Couldn't process upgrade for DocumentVersion {doc_version.id} for "
f"Tenant {tenant_id}. Error: {str(e)}")
try:
session.commit()
current_app.logger.info(f"Successfully updated file sizes for tenant schema {current_schema}")
except Exception as e:
session.rollback()
current_app.logger.error(f"Error committing changes for tenant schema {current_schema}: {str(e)}")
# ### commands auto generated by Alembic - Remove old fields ###
# op.drop_column('document_version', 'file_location')
# op.drop_column('document_version', 'file_name')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('document_version', sa.Column('file_name', sa.VARCHAR(length=200), autoincrement=False, nullable=True))
op.add_column('document_version', sa.Column('file_location', sa.VARCHAR(length=255), autoincrement=False, nullable=True))
op.drop_column('document_version', 'file_size')
op.drop_column('document_version', 'object_name')
op.drop_column('document_version', 'bucket_name')
# ### end Alembic commands ###

View File

@@ -0,0 +1,31 @@
"""Remove obsolete fields from DocumentVersion
Revision ID: 5a75fb6da7b8
Revises: 322d3cf1f17b
Create Date: 2024-10-08 06:49:57.349346
"""
from alembic import op
import sqlalchemy as sa
import pgvector
# revision identifiers, used by Alembic.
revision = '5a75fb6da7b8'
down_revision = '322d3cf1f17b'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('document_version', 'file_name')
op.drop_column('document_version', 'file_location')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('document_version', sa.Column('file_location', sa.VARCHAR(length=255), autoincrement=False, nullable=True))
op.add_column('document_version', sa.Column('file_name', sa.VARCHAR(length=200), autoincrement=False, nullable=True))
# ### end Alembic commands ###

View File

@@ -80,3 +80,4 @@ langsmith~=0.1.121
anthropic~=0.34.2
prometheus-client~=0.20.0
flower~=2.0.1
psutil~=6.0.0

17
scripts/repopack_eveai.sh Executable file
View File

@@ -0,0 +1,17 @@
#!/bin/bash
# Run repopack to generate the file
repopack
# Check if repopack generated the eveai_repo.txt file
if [[ -f "eveai_repo.txt" ]]; then
# Get the current timestamp in the format YYYY-DD-MM_HH:MM:SS
timestamp=$(date +"%Y-%d-%m_%H-%M-%S")
# Rename the file with the timestamp
mv eveai_repo.txt "${timestamp}_eveai_repo.txt"
echo "File renamed to ${timestamp}_eveai_repo.txt"
else
echo "Error: eveai_repo.txt not found. repopack may have failed."
fi