Compare commits
44 Commits
1.0.1-alfa
...
v1.0.14-al
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
88f4db1178 | ||
|
|
2df291ea91 | ||
|
|
5841525b4c | ||
|
|
532073d38e | ||
|
|
43547287b1 | ||
|
|
aa358df28e | ||
|
|
30fec27488 | ||
|
|
5e77b478dd | ||
|
|
6f71259822 | ||
|
|
74cc7ae95e | ||
|
|
7f12c8b355 | ||
|
|
6069f5f7e5 | ||
|
|
3e644f1652 | ||
|
|
3316a8bc47 | ||
|
|
270479c77d | ||
|
|
0f4558d775 | ||
|
|
9f5f090f0c | ||
|
|
5ffad160b1 | ||
|
|
d6a7743f26 | ||
|
|
9782e31ae5 | ||
|
|
f638860e90 | ||
|
|
b700cfac64 | ||
|
|
883175b8f5 | ||
|
|
ae697df4c9 | ||
|
|
d9cb00fcdc | ||
|
|
ee1b0f1cfa | ||
|
|
a740c96630 | ||
|
|
67bdeac434 | ||
|
|
1622591afd | ||
|
|
6cf660e622 | ||
|
|
9e14824249 | ||
|
|
76cb825660 | ||
|
|
341ba47d1c | ||
|
|
1fa33c029b | ||
|
|
bcf7d439f3 | ||
|
|
b9acf4d2ae | ||
|
|
ae7bf3dbae | ||
|
|
914c265afe | ||
|
|
a158655247 | ||
|
|
bc350af247 | ||
|
|
6062b7646c | ||
|
|
122d1a18df | ||
|
|
2ca006d82c | ||
|
|
a9f9b04117 |
31
.gitignore
vendored
31
.gitignore
vendored
@@ -12,3 +12,34 @@ docker/tenant_files/
|
|||||||
**/.DS_Store
|
**/.DS_Store
|
||||||
__pycache__
|
__pycache__
|
||||||
**/__pycache__
|
**/__pycache__
|
||||||
|
/.idea
|
||||||
|
*.pyc
|
||||||
|
*.pyc
|
||||||
|
common/.DS_Store
|
||||||
|
common/__pycache__/__init__.cpython-312.pyc
|
||||||
|
common/__pycache__/extensions.cpython-312.pyc
|
||||||
|
common/models/__pycache__/__init__.cpython-312.pyc
|
||||||
|
common/models/__pycache__/document.cpython-312.pyc
|
||||||
|
common/models/__pycache__/interaction.cpython-312.pyc
|
||||||
|
common/models/__pycache__/user.cpython-312.pyc
|
||||||
|
common/utils/.DS_Store
|
||||||
|
common/utils/__pycache__/__init__.cpython-312.pyc
|
||||||
|
common/utils/__pycache__/celery_utils.cpython-312.pyc
|
||||||
|
common/utils/__pycache__/nginx_utils.cpython-312.pyc
|
||||||
|
common/utils/__pycache__/security.cpython-312.pyc
|
||||||
|
common/utils/__pycache__/simple_encryption.cpython-312.pyc
|
||||||
|
common/utils/__pycache__/template_filters.cpython-312.pyc
|
||||||
|
config/.DS_Store
|
||||||
|
config/__pycache__/__init__.cpython-312.pyc
|
||||||
|
config/__pycache__/config.cpython-312.pyc
|
||||||
|
config/__pycache__/logging_config.cpython-312.pyc
|
||||||
|
eveai_app/.DS_Store
|
||||||
|
eveai_app/__pycache__/__init__.cpython-312.pyc
|
||||||
|
eveai_app/__pycache__/errors.cpython-312.pyc
|
||||||
|
eveai_chat/.DS_Store
|
||||||
|
migrations/.DS_Store
|
||||||
|
migrations/public/.DS_Store
|
||||||
|
scripts/.DS_Store
|
||||||
|
scripts/__pycache__/run_eveai_app.cpython-312.pyc
|
||||||
|
/eveai_repo.txt
|
||||||
|
*repo.txt
|
||||||
|
|||||||
6
.idea/sqldialects.xml
generated
6
.idea/sqldialects.xml
generated
@@ -1,6 +0,0 @@
|
|||||||
<?xml version="1.0" encoding="UTF-8"?>
|
|
||||||
<project version="4">
|
|
||||||
<component name="SqlDialectMappings">
|
|
||||||
<file url="PROJECT" dialect="PostgreSQL" />
|
|
||||||
</component>
|
|
||||||
</project>
|
|
||||||
20
.repopackignore_base
Normal file
20
.repopackignore_base
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
# Add patterns to ignore here, one per line
|
||||||
|
# Example:
|
||||||
|
# *.log
|
||||||
|
# tmp/
|
||||||
|
logs/
|
||||||
|
nginx/static/assets/fonts/
|
||||||
|
nginx/static/assets/img/
|
||||||
|
nginx/static/assets/js/
|
||||||
|
nginx/static/scss/
|
||||||
|
patched_packages/
|
||||||
|
migrations/
|
||||||
|
*material*
|
||||||
|
*nucleo*
|
||||||
|
*package*
|
||||||
|
nginx/mime.types
|
||||||
|
*.gitignore*
|
||||||
|
.python-version
|
||||||
|
.repopackignore*
|
||||||
|
repopack.config.json
|
||||||
|
*repo.txt
|
||||||
12
.repopackignore_components
Normal file
12
.repopackignore_components
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
docker/
|
||||||
|
eveai_api/
|
||||||
|
eveai_app/
|
||||||
|
eveai_beat/
|
||||||
|
eveai_chat/
|
||||||
|
eveai_chat_workers/
|
||||||
|
eveai_entitlements/
|
||||||
|
eveai_workers/
|
||||||
|
instance/
|
||||||
|
integrations/
|
||||||
|
nginx/
|
||||||
|
scripts/
|
||||||
12
.repopackignore_docker
Normal file
12
.repopackignore_docker
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
common/
|
||||||
|
config/
|
||||||
|
eveai_api/
|
||||||
|
eveai_app/
|
||||||
|
eveai_beat/
|
||||||
|
eveai_chat/
|
||||||
|
eveai_chat_workers/
|
||||||
|
eveai_entitlements/
|
||||||
|
eveai_workers/
|
||||||
|
instance/
|
||||||
|
integrations/
|
||||||
|
nginx/
|
||||||
11
.repopackignore_eveai_api
Normal file
11
.repopackignore_eveai_api
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
docker/
|
||||||
|
eveai_app/
|
||||||
|
eveai_beat/
|
||||||
|
eveai_chat/
|
||||||
|
eveai_chat_workers/
|
||||||
|
eveai_entitlements/
|
||||||
|
eveai_workers/
|
||||||
|
instance/
|
||||||
|
integrations/
|
||||||
|
nginx/
|
||||||
|
scripts/
|
||||||
11
.repopackignore_eveai_app
Normal file
11
.repopackignore_eveai_app
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
docker/
|
||||||
|
eveai_api/
|
||||||
|
eveai_beat/
|
||||||
|
eveai_chat/
|
||||||
|
eveai_chat_workers/
|
||||||
|
eveai_entitlements/
|
||||||
|
eveai_workers/
|
||||||
|
instance/
|
||||||
|
integrations/
|
||||||
|
nginx/
|
||||||
|
scripts/
|
||||||
11
.repopackignore_eveai_beat
Normal file
11
.repopackignore_eveai_beat
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
docker/
|
||||||
|
eveai_api/
|
||||||
|
eveai_app/
|
||||||
|
eveai_chat/
|
||||||
|
eveai_chat_workers/
|
||||||
|
eveai_entitlements/
|
||||||
|
eveai_workers/
|
||||||
|
instance/
|
||||||
|
integrations/
|
||||||
|
nginx/
|
||||||
|
scripts/
|
||||||
11
.repopackignore_eveai_chat
Normal file
11
.repopackignore_eveai_chat
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
docker/
|
||||||
|
eveai_api/
|
||||||
|
eveai_app/
|
||||||
|
eveai_beat/
|
||||||
|
eveai_chat_workers/
|
||||||
|
eveai_entitlements/
|
||||||
|
eveai_workers/
|
||||||
|
instance/
|
||||||
|
integrations/
|
||||||
|
nginx/
|
||||||
|
scripts/
|
||||||
11
.repopackignore_eveai_chat_workers
Normal file
11
.repopackignore_eveai_chat_workers
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
docker/
|
||||||
|
eveai_api/
|
||||||
|
eveai_app/
|
||||||
|
eveai_beat/
|
||||||
|
eveai_chat/
|
||||||
|
eveai_entitlements/
|
||||||
|
eveai_workers/
|
||||||
|
instance/
|
||||||
|
integrations/
|
||||||
|
nginx/
|
||||||
|
scripts/
|
||||||
11
.repopackignore_eveai_entitlements
Normal file
11
.repopackignore_eveai_entitlements
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
docker/
|
||||||
|
eveai_api/
|
||||||
|
eveai_app/
|
||||||
|
eveai_beat/
|
||||||
|
eveai_chat/
|
||||||
|
eveai_chat_workers/
|
||||||
|
eveai_workers/
|
||||||
|
instance/
|
||||||
|
integrations/
|
||||||
|
nginx/
|
||||||
|
scripts/
|
||||||
11
.repopackignore_eveai_workers
Normal file
11
.repopackignore_eveai_workers
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
docker/
|
||||||
|
eveai_api/
|
||||||
|
eveai_app/
|
||||||
|
eveai_beat/
|
||||||
|
eveai_chat/
|
||||||
|
eveai_chat_workers/
|
||||||
|
eveai_entitlements/
|
||||||
|
instance/
|
||||||
|
integrations/
|
||||||
|
nginx/
|
||||||
|
scripts/
|
||||||
4
.repopackignore_full
Normal file
4
.repopackignore_full
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
docker
|
||||||
|
integrations
|
||||||
|
nginx
|
||||||
|
scripts
|
||||||
13
.repopackignore_integrations
Normal file
13
.repopackignore_integrations
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
common/
|
||||||
|
config/
|
||||||
|
docker/
|
||||||
|
eveai_api/
|
||||||
|
eveai_app/
|
||||||
|
eveai_beat/
|
||||||
|
eveai_chat/
|
||||||
|
eveai_chat_workers/
|
||||||
|
eveai_entitlements/
|
||||||
|
eveai_workers/
|
||||||
|
instance/
|
||||||
|
nginx/
|
||||||
|
scripts/
|
||||||
11
.repopackignore_nginx
Normal file
11
.repopackignore_nginx
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
docker/
|
||||||
|
eveai_api/
|
||||||
|
eveai_app/
|
||||||
|
eveai_beat/
|
||||||
|
eveai_chat/
|
||||||
|
eveai_chat_workers/
|
||||||
|
eveai_entitlements/
|
||||||
|
eveai_workers/
|
||||||
|
instance/
|
||||||
|
integrations/
|
||||||
|
scripts/
|
||||||
190
CHANGELOG.md
Normal file
190
CHANGELOG.md
Normal file
@@ -0,0 +1,190 @@
|
|||||||
|
# Changelog
|
||||||
|
|
||||||
|
All notable changes to EveAI will be documented in this file.
|
||||||
|
|
||||||
|
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||||
|
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||||
|
|
||||||
|
## [Unreleased]
|
||||||
|
|
||||||
|
### Added
|
||||||
|
- For new features.
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- For changes in existing functionality.
|
||||||
|
|
||||||
|
### Deprecated
|
||||||
|
- For soon-to-be removed features.
|
||||||
|
|
||||||
|
### Removed
|
||||||
|
- For now removed features.
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
- Set default language when registering Documents or URLs.
|
||||||
|
|
||||||
|
### Security
|
||||||
|
- In case of vulnerabilities.
|
||||||
|
|
||||||
|
## [1.0.13-alfa]
|
||||||
|
|
||||||
|
### Added
|
||||||
|
- Finished Catalog introduction
|
||||||
|
- Reinitialization of WordPress site for syncing
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- Modification of WordPress Sync Component
|
||||||
|
- Cleanup of attributes in Tenant
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
- Overall bugfixes as result from the Catalog introduction
|
||||||
|
|
||||||
|
## [1.0.12-alfa]
|
||||||
|
|
||||||
|
### Added
|
||||||
|
- Added Catalog functionality
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- For changes in existing functionality.
|
||||||
|
|
||||||
|
### Deprecated
|
||||||
|
- For soon-to-be removed features.
|
||||||
|
|
||||||
|
### Removed
|
||||||
|
- For now removed features.
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
- Set default language when registering Documents or URLs.
|
||||||
|
|
||||||
|
### Security
|
||||||
|
- In case of vulnerabilities.
|
||||||
|
|
||||||
|
## [1.0.11-alfa]
|
||||||
|
|
||||||
|
### Added
|
||||||
|
- License Usage Calculation realised
|
||||||
|
- View License Usages
|
||||||
|
- Celery Beat container added
|
||||||
|
- First schedule in Celery Beat for calculating usage (hourly)
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- repopack can now split for different components
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
- Various fixes as consequence of changing file_location / file_name ==> bucket_name / object_name
|
||||||
|
- Celery Routing / Queuing updated
|
||||||
|
|
||||||
|
## [1.0.10-alfa]
|
||||||
|
|
||||||
|
### Added
|
||||||
|
- BusinessEventLog monitoring using Langchain native code
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- Allow longer audio files (or video) to be uploaded and processed
|
||||||
|
- Storage and Embedding usage now expressed in MiB iso tokens (more logical)
|
||||||
|
- Views for License / LicenseTier
|
||||||
|
|
||||||
|
### Removed
|
||||||
|
- Portkey removed for monitoring usage
|
||||||
|
|
||||||
|
## [1.0.9-alfa] - 2024/10/01
|
||||||
|
|
||||||
|
### Added
|
||||||
|
- Business Event tracing (eveai_workers & eveai_chat_workers)
|
||||||
|
- Flower Container added for monitoring
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- Healthcheck improvements
|
||||||
|
- model_utils turned into a class with lazy loading
|
||||||
|
|
||||||
|
### Deprecated
|
||||||
|
- For soon-to-be removed features.
|
||||||
|
|
||||||
|
### Removed
|
||||||
|
- For now removed features.
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
- Set default language when registering Documents or URLs.
|
||||||
|
|
||||||
|
## [1.0.8-alfa] - 2024-09-12
|
||||||
|
|
||||||
|
### Added
|
||||||
|
- Tenant type defined to allow for active, inactive, demo ... tenants
|
||||||
|
- Search and filtering functionality on Tenants
|
||||||
|
- Implementation of health checks (1st version)
|
||||||
|
- Provision for Prometheus monitoring (no implementation yet)
|
||||||
|
- Refine audio_processor and srt_processor to reduce duplicate code and support larger files
|
||||||
|
- Introduction of repopack to reason in LLMs about the code
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
- Refine audio_processor and srt_processor to reduce duplicate code and support larger files
|
||||||
|
|
||||||
|
## [1.0.7-alfa] - 2024-09-12
|
||||||
|
|
||||||
|
### Added
|
||||||
|
- Full Document API allowing for creation, updating and invalidation of documents.
|
||||||
|
- Metadata fields (JSON) added to DocumentVersion, allowing end-users to add structured information
|
||||||
|
- Wordpress plugin eveai_sync to synchronize Wordpress content with EveAI
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
- Maximal deduplication of code between views and api in document_utils.py
|
||||||
|
|
||||||
|
## [1.0.6-alfa] - 2024-09-03
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
- Problems with tenant scheme migrations - may have to be revisited
|
||||||
|
- Correction of default language settings when uploading docs or URLs
|
||||||
|
- Addition of a CHANGELOG.md file
|
||||||
|
|
||||||
|
## [1.0.5-alfa] - 2024-09-02
|
||||||
|
|
||||||
|
### Added
|
||||||
|
- Allow chatwidget to connect to multiple servers (e.g. development and production)
|
||||||
|
- Start implementation of API
|
||||||
|
- Add API-key functionality to tenants
|
||||||
|
- Deduplication of API and Document view code
|
||||||
|
- Allow URL addition to accept all types of files, not just HTML
|
||||||
|
- Allow new file types upload: srt, mp3, ogg, mp4
|
||||||
|
- Improve processing of different file types using Processor classes
|
||||||
|
|
||||||
|
### Removed
|
||||||
|
- Removed direct upload of Youtube URLs, due to continuous changes in Youtube website
|
||||||
|
|
||||||
|
## [1.0.4-alfa] - 2024-08-27
|
||||||
|
Skipped
|
||||||
|
|
||||||
|
## [1.0.3-alfa] - 2024-08-27
|
||||||
|
|
||||||
|
### Added
|
||||||
|
- Refinement of HTML processing - allow for excluded classes and elements.
|
||||||
|
- Allow for multiple instances of Evie on 1 website (pure + Wordpress plugin)
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- PDF Processing extracted in new PDF Processor class.
|
||||||
|
- Allow for longer and more complex PDFs to be uploaded.
|
||||||
|
|
||||||
|
## [1.0.2-alfa] - 2024-08-22
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
- Bugfix for ResetPasswordForm in config.py
|
||||||
|
|
||||||
|
## [1.0.1-alfa] - 2024-08-21
|
||||||
|
|
||||||
|
### Added
|
||||||
|
- Full Document Version Overview
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- Improvements to user creation and registration, renewal of passwords, ...
|
||||||
|
|
||||||
|
## [1.0.0-alfa] - 2024-08-16
|
||||||
|
|
||||||
|
### Added
|
||||||
|
- Initial release of the project.
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- None
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
- None
|
||||||
|
|
||||||
|
[Unreleased]: https://github.com/username/repo/compare/v1.0.0...HEAD
|
||||||
|
[1.0.0]: https://github.com/username/repo/releases/tag/v1.0.0
|
||||||
@@ -9,8 +9,9 @@ from flask_socketio import SocketIO
|
|||||||
from flask_jwt_extended import JWTManager
|
from flask_jwt_extended import JWTManager
|
||||||
from flask_session import Session
|
from flask_session import Session
|
||||||
from flask_wtf import CSRFProtect
|
from flask_wtf import CSRFProtect
|
||||||
|
from flask_restx import Api
|
||||||
|
from prometheus_flask_exporter import PrometheusMetrics
|
||||||
|
|
||||||
from .utils.nginx_utils import prefixed_url_for
|
|
||||||
from .utils.simple_encryption import SimpleEncryption
|
from .utils.simple_encryption import SimpleEncryption
|
||||||
from .utils.minio_utils import MinioClient
|
from .utils.minio_utils import MinioClient
|
||||||
|
|
||||||
@@ -27,8 +28,7 @@ cors = CORS()
|
|||||||
socketio = SocketIO()
|
socketio = SocketIO()
|
||||||
jwt = JWTManager()
|
jwt = JWTManager()
|
||||||
session = Session()
|
session = Session()
|
||||||
|
api_rest = Api()
|
||||||
# kms_client = JosKMSClient.from_service_account_json('config/gc_sa_eveai.json')
|
|
||||||
|
|
||||||
simple_encryption = SimpleEncryption()
|
simple_encryption = SimpleEncryption()
|
||||||
minio_client = MinioClient()
|
minio_client = MinioClient()
|
||||||
|
metrics = PrometheusMetrics.for_app_factory()
|
||||||
|
|||||||
49
common/langchain/llm_metrics_handler.py
Normal file
49
common/langchain/llm_metrics_handler.py
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
import time
|
||||||
|
from langchain.callbacks.base import BaseCallbackHandler
|
||||||
|
from typing import Dict, Any, List
|
||||||
|
from langchain.schema import LLMResult
|
||||||
|
from common.utils.business_event_context import current_event
|
||||||
|
from flask import current_app
|
||||||
|
|
||||||
|
|
||||||
|
class LLMMetricsHandler(BaseCallbackHandler):
|
||||||
|
def __init__(self):
|
||||||
|
self.total_tokens: int = 0
|
||||||
|
self.prompt_tokens: int = 0
|
||||||
|
self.completion_tokens: int = 0
|
||||||
|
self.start_time: float = 0
|
||||||
|
self.end_time: float = 0
|
||||||
|
self.total_time: float = 0
|
||||||
|
|
||||||
|
def reset(self):
|
||||||
|
self.total_tokens = 0
|
||||||
|
self.prompt_tokens = 0
|
||||||
|
self.completion_tokens = 0
|
||||||
|
self.start_time = 0
|
||||||
|
self.end_time = 0
|
||||||
|
self.total_time = 0
|
||||||
|
|
||||||
|
def on_llm_start(self, serialized: Dict[str, Any], prompts: List[str], **kwargs: Any) -> None:
|
||||||
|
self.start_time = time.time()
|
||||||
|
|
||||||
|
def on_llm_end(self, response: LLMResult, **kwargs: Any) -> None:
|
||||||
|
self.end_time = time.time()
|
||||||
|
self.total_time = self.end_time - self.start_time
|
||||||
|
|
||||||
|
usage = response.llm_output.get('token_usage', {})
|
||||||
|
self.prompt_tokens += usage.get('prompt_tokens', 0)
|
||||||
|
self.completion_tokens += usage.get('completion_tokens', 0)
|
||||||
|
self.total_tokens = self.prompt_tokens + self.completion_tokens
|
||||||
|
|
||||||
|
metrics = self.get_metrics()
|
||||||
|
current_event.log_llm_metrics(metrics)
|
||||||
|
self.reset() # Reset for the next call
|
||||||
|
|
||||||
|
def get_metrics(self) -> Dict[str, int | float]:
|
||||||
|
return {
|
||||||
|
'total_tokens': self.total_tokens,
|
||||||
|
'prompt_tokens': self.prompt_tokens,
|
||||||
|
'completion_tokens': self.completion_tokens,
|
||||||
|
'time_elapsed': self.total_time,
|
||||||
|
'interaction_type': 'LLM',
|
||||||
|
}
|
||||||
0
common/langchain/retrievers/__init__.py
Normal file
0
common/langchain/retrievers/__init__.py
Normal file
@@ -1,35 +1,50 @@
|
|||||||
from langchain_core.retrievers import BaseRetriever
|
from langchain_core.retrievers import BaseRetriever
|
||||||
from sqlalchemy import func, and_, or_, desc
|
from sqlalchemy import func, and_, or_, desc
|
||||||
from sqlalchemy.exc import SQLAlchemyError
|
from sqlalchemy.exc import SQLAlchemyError
|
||||||
from pydantic import BaseModel, Field
|
from pydantic import BaseModel, Field, PrivateAttr
|
||||||
from typing import Any, Dict
|
from typing import Any, Dict
|
||||||
from flask import current_app
|
from flask import current_app
|
||||||
|
|
||||||
from common.extensions import db
|
from common.extensions import db
|
||||||
from common.models.document import Document, DocumentVersion
|
from common.models.document import Document, DocumentVersion
|
||||||
from common.utils.datetime_utils import get_date_in_timezone
|
from common.utils.datetime_utils import get_date_in_timezone
|
||||||
|
from common.utils.model_utils import ModelVariables
|
||||||
|
|
||||||
|
|
||||||
class EveAIRetriever(BaseRetriever):
|
class EveAIDefaultRagRetriever(BaseRetriever, BaseModel):
|
||||||
model_variables: Dict[str, Any] = Field(...)
|
_catalog_id: int = PrivateAttr()
|
||||||
tenant_info: Dict[str, Any] = Field(...)
|
_model_variables: ModelVariables = PrivateAttr()
|
||||||
|
_tenant_info: Dict[str, Any] = PrivateAttr()
|
||||||
|
|
||||||
def __init__(self, model_variables: Dict[str, Any], tenant_info: Dict[str, Any]):
|
def __init__(self, catalog_id: int, model_variables: ModelVariables, tenant_info: Dict[str, Any]):
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self.model_variables = model_variables
|
current_app.logger.debug(f'Model variables type: {type(model_variables)}')
|
||||||
self.tenant_info = tenant_info
|
self._catalog_id = catalog_id
|
||||||
|
self._model_variables = model_variables
|
||||||
|
self._tenant_info = tenant_info
|
||||||
|
|
||||||
|
@property
|
||||||
|
def catalog_id(self) -> int:
|
||||||
|
return self._catalog_id
|
||||||
|
|
||||||
|
@property
|
||||||
|
def model_variables(self) -> ModelVariables:
|
||||||
|
return self._model_variables
|
||||||
|
|
||||||
|
@property
|
||||||
|
def tenant_info(self) -> Dict[str, Any]:
|
||||||
|
return self._tenant_info
|
||||||
|
|
||||||
def _get_relevant_documents(self, query: str):
|
def _get_relevant_documents(self, query: str):
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
current_app.logger.debug(f'Retrieving relevant documents for query: {query}')
|
current_app.logger.debug(f'Retrieving relevant documents for query: {query}')
|
||||||
query_embedding = self._get_query_embedding(query)
|
query_embedding = self._get_query_embedding(query)
|
||||||
|
current_app.logger.debug(f'Model Variables Private: {type(self._model_variables)}')
|
||||||
|
current_app.logger.debug(f'Model Variables Property: {type(self.model_variables)}')
|
||||||
db_class = self.model_variables['embedding_db_model']
|
db_class = self.model_variables['embedding_db_model']
|
||||||
similarity_threshold = self.model_variables['similarity_threshold']
|
similarity_threshold = self.model_variables['similarity_threshold']
|
||||||
k = self.model_variables['k']
|
k = self.model_variables['k']
|
||||||
|
|
||||||
if self.tenant_info['rag_tuning']:
|
if self.model_variables['rag_tuning']:
|
||||||
try:
|
try:
|
||||||
current_date = get_date_in_timezone(self.tenant_info['timezone'])
|
current_date = get_date_in_timezone(self.tenant_info['timezone'])
|
||||||
current_app.rag_tuning_logger.debug(f'Current date: {current_date}\n')
|
current_app.rag_tuning_logger.debug(f'Current date: {current_date}\n')
|
||||||
@@ -64,7 +79,7 @@ class EveAIRetriever(BaseRetriever):
|
|||||||
current_app.logger.error(f'Error generating overview: {e}')
|
current_app.logger.error(f'Error generating overview: {e}')
|
||||||
db.session.rollback()
|
db.session.rollback()
|
||||||
|
|
||||||
if self.tenant_info['rag_tuning']:
|
if self.model_variables['rag_tuning']:
|
||||||
current_app.rag_tuning_logger.debug(f'Parameters for Retrieval of documents: \n')
|
current_app.rag_tuning_logger.debug(f'Parameters for Retrieval of documents: \n')
|
||||||
current_app.rag_tuning_logger.debug(f'Similarity Threshold: {similarity_threshold}\n')
|
current_app.rag_tuning_logger.debug(f'Similarity Threshold: {similarity_threshold}\n')
|
||||||
current_app.rag_tuning_logger.debug(f'K: {k}\n')
|
current_app.rag_tuning_logger.debug(f'K: {k}\n')
|
||||||
@@ -91,20 +106,21 @@ class EveAIRetriever(BaseRetriever):
|
|||||||
.filter(
|
.filter(
|
||||||
or_(Document.valid_from.is_(None), func.date(Document.valid_from) <= current_date),
|
or_(Document.valid_from.is_(None), func.date(Document.valid_from) <= current_date),
|
||||||
or_(Document.valid_to.is_(None), func.date(Document.valid_to) >= current_date),
|
or_(Document.valid_to.is_(None), func.date(Document.valid_to) >= current_date),
|
||||||
(1 - db_class.embedding.cosine_distance(query_embedding)) > similarity_threshold
|
(1 - db_class.embedding.cosine_distance(query_embedding)) > similarity_threshold,
|
||||||
|
Document.catalog_id == self._catalog_id
|
||||||
)
|
)
|
||||||
.order_by(desc('similarity'))
|
.order_by(desc('similarity'))
|
||||||
.limit(k)
|
.limit(k)
|
||||||
)
|
)
|
||||||
|
|
||||||
if self.tenant_info['rag_tuning']:
|
if self.model_variables['rag_tuning']:
|
||||||
current_app.rag_tuning_logger.debug(f'Query executed for Retrieval of documents: \n')
|
current_app.rag_tuning_logger.debug(f'Query executed for Retrieval of documents: \n')
|
||||||
current_app.rag_tuning_logger.debug(f'{query_obj.statement}\n')
|
current_app.rag_tuning_logger.debug(f'{query_obj.statement}\n')
|
||||||
current_app.rag_tuning_logger.debug(f'---------------------------------------\n')
|
current_app.rag_tuning_logger.debug(f'---------------------------------------\n')
|
||||||
|
|
||||||
res = query_obj.all()
|
res = query_obj.all()
|
||||||
|
|
||||||
if self.tenant_info['rag_tuning']:
|
if self.model_variables['rag_tuning']:
|
||||||
current_app.rag_tuning_logger.debug(f'Retrieved {len(res)} relevant documents \n')
|
current_app.rag_tuning_logger.debug(f'Retrieved {len(res)} relevant documents \n')
|
||||||
current_app.rag_tuning_logger.debug(f'Data retrieved: \n')
|
current_app.rag_tuning_logger.debug(f'Data retrieved: \n')
|
||||||
current_app.rag_tuning_logger.debug(f'{res}\n')
|
current_app.rag_tuning_logger.debug(f'{res}\n')
|
||||||
@@ -112,7 +128,7 @@ class EveAIRetriever(BaseRetriever):
|
|||||||
|
|
||||||
result = []
|
result = []
|
||||||
for doc in res:
|
for doc in res:
|
||||||
if self.tenant_info['rag_tuning']:
|
if self.model_variables['rag_tuning']:
|
||||||
current_app.rag_tuning_logger.debug(f'Document ID: {doc[0].id} - Distance: {doc[1]}\n')
|
current_app.rag_tuning_logger.debug(f'Document ID: {doc[0].id} - Distance: {doc[1]}\n')
|
||||||
current_app.rag_tuning_logger.debug(f'Chunk: \n {doc[0].chunk}\n\n')
|
current_app.rag_tuning_logger.debug(f'Chunk: \n {doc[0].chunk}\n\n')
|
||||||
result.append(f'SOURCE: {doc[0].id}\n\n{doc[0].chunk}\n\n')
|
result.append(f'SOURCE: {doc[0].id}\n\n{doc[0].chunk}\n\n')
|
||||||
154
common/langchain/retrievers/eveai_dossier_retriever.py
Normal file
154
common/langchain/retrievers/eveai_dossier_retriever.py
Normal file
@@ -0,0 +1,154 @@
|
|||||||
|
from langchain_core.retrievers import BaseRetriever
|
||||||
|
from sqlalchemy import func, and_, or_, desc, cast, JSON
|
||||||
|
from sqlalchemy.exc import SQLAlchemyError
|
||||||
|
from pydantic import BaseModel, Field, PrivateAttr
|
||||||
|
from typing import Any, Dict, List, Optional
|
||||||
|
from flask import current_app
|
||||||
|
from contextlib import contextmanager
|
||||||
|
|
||||||
|
from common.extensions import db
|
||||||
|
from common.models.document import Document, DocumentVersion, Catalog
|
||||||
|
from common.utils.datetime_utils import get_date_in_timezone
|
||||||
|
from common.utils.model_utils import ModelVariables
|
||||||
|
|
||||||
|
|
||||||
|
class EveAIDossierRetriever(BaseRetriever, BaseModel):
|
||||||
|
_catalog_id: int = PrivateAttr()
|
||||||
|
_model_variables: ModelVariables = PrivateAttr()
|
||||||
|
_tenant_info: Dict[str, Any] = PrivateAttr()
|
||||||
|
_active_filters: Optional[Dict[str, Any]] = PrivateAttr()
|
||||||
|
|
||||||
|
def __init__(self, catalog_id: int, model_variables: ModelVariables, tenant_info: Dict[str, Any]):
|
||||||
|
super().__init__()
|
||||||
|
self._catalog_id = catalog_id
|
||||||
|
self._model_variables = model_variables
|
||||||
|
self._tenant_info = tenant_info
|
||||||
|
self._active_filters = None
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def filtering(self, metadata_filters: Dict[str, Any]):
|
||||||
|
"""Context manager for temporarily setting metadata filters"""
|
||||||
|
previous_filters = self._active_filters
|
||||||
|
self._active_filters = metadata_filters
|
||||||
|
try:
|
||||||
|
yield self
|
||||||
|
finally:
|
||||||
|
self._active_filters = previous_filters
|
||||||
|
|
||||||
|
def _build_metadata_filter_conditions(self, query):
|
||||||
|
"""Build SQL conditions for metadata filtering"""
|
||||||
|
if not self._active_filters:
|
||||||
|
return query
|
||||||
|
|
||||||
|
conditions = []
|
||||||
|
for field, value in self._active_filters.items():
|
||||||
|
if value is None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Handle both single values and lists of values
|
||||||
|
if isinstance(value, (list, tuple)):
|
||||||
|
# Multiple values - create OR condition
|
||||||
|
or_conditions = []
|
||||||
|
for val in value:
|
||||||
|
or_conditions.append(
|
||||||
|
cast(DocumentVersion.user_metadata[field].astext, JSON) == str(val)
|
||||||
|
)
|
||||||
|
if or_conditions:
|
||||||
|
conditions.append(or_(*or_conditions))
|
||||||
|
else:
|
||||||
|
# Single value - direct comparison
|
||||||
|
conditions.append(
|
||||||
|
cast(DocumentVersion.user_metadata[field].astext, JSON) == str(value)
|
||||||
|
)
|
||||||
|
|
||||||
|
if conditions:
|
||||||
|
query = query.filter(and_(*conditions))
|
||||||
|
|
||||||
|
return query
|
||||||
|
|
||||||
|
def _get_relevant_documents(self, query: str):
|
||||||
|
current_app.logger.debug(f'Retrieving relevant documents for dossier query: {query}')
|
||||||
|
if self._active_filters:
|
||||||
|
current_app.logger.debug(f'Using metadata filters: {self._active_filters}')
|
||||||
|
|
||||||
|
query_embedding = self._get_query_embedding(query)
|
||||||
|
db_class = self.model_variables['embedding_db_model']
|
||||||
|
similarity_threshold = self.model_variables['similarity_threshold']
|
||||||
|
k = self.model_variables['k']
|
||||||
|
|
||||||
|
try:
|
||||||
|
current_date = get_date_in_timezone(self.tenant_info['timezone'])
|
||||||
|
|
||||||
|
# Subquery to find the latest version of each document
|
||||||
|
subquery = (
|
||||||
|
db.session.query(
|
||||||
|
DocumentVersion.doc_id,
|
||||||
|
func.max(DocumentVersion.id).label('latest_version_id')
|
||||||
|
)
|
||||||
|
.group_by(DocumentVersion.doc_id)
|
||||||
|
.subquery()
|
||||||
|
)
|
||||||
|
|
||||||
|
# Build base query
|
||||||
|
# Build base query
|
||||||
|
query_obj = (
|
||||||
|
db.session.query(db_class,
|
||||||
|
(1 - db_class.embedding.cosine_distance(query_embedding)).label('similarity'))
|
||||||
|
.join(DocumentVersion, db_class.doc_vers_id == DocumentVersion.id)
|
||||||
|
.join(Document, DocumentVersion.doc_id == Document.id)
|
||||||
|
.join(subquery, DocumentVersion.id == subquery.c.latest_version_id)
|
||||||
|
.filter(
|
||||||
|
or_(Document.valid_from.is_(None), func.date(Document.valid_from) <= current_date),
|
||||||
|
or_(Document.valid_to.is_(None), func.date(Document.valid_to) >= current_date),
|
||||||
|
(1 - db_class.embedding.cosine_distance(query_embedding)) > similarity_threshold,
|
||||||
|
Document.catalog_id == self._catalog_id
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Apply metadata filters
|
||||||
|
query_obj = self._build_metadata_filter_conditions(query_obj)
|
||||||
|
|
||||||
|
# Order and limit results
|
||||||
|
query_obj = query_obj.order_by(desc('similarity')).limit(k)
|
||||||
|
|
||||||
|
# Debug logging for RAG tuning if enabled
|
||||||
|
if self.model_variables['rag_tuning']:
|
||||||
|
self._log_rag_tuning(query_obj, query_embedding)
|
||||||
|
|
||||||
|
res = query_obj.all()
|
||||||
|
|
||||||
|
result = []
|
||||||
|
for doc in res:
|
||||||
|
if self.model_variables['rag_tuning']:
|
||||||
|
current_app.logger.debug(f'Document ID: {doc[0].id} - Distance: {doc[1]}\n')
|
||||||
|
current_app.logger.debug(f'Chunk: \n {doc[0].chunk}\n\n')
|
||||||
|
result.append(f'SOURCE: {doc[0].id}\n\n{doc[0].chunk}\n\n')
|
||||||
|
|
||||||
|
except SQLAlchemyError as e:
|
||||||
|
current_app.logger.error(f'Error retrieving relevant documents: {e}')
|
||||||
|
db.session.rollback()
|
||||||
|
return []
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
def _log_rag_tuning(self, query_obj, query_embedding):
|
||||||
|
"""Log debug information for RAG tuning"""
|
||||||
|
current_app.rag_tuning_logger.debug("Debug: Query execution plan:")
|
||||||
|
current_app.rag_tuning_logger.debug(f"{query_obj.statement}")
|
||||||
|
if self._active_filters:
|
||||||
|
current_app.rag_tuning_logger.debug("Debug: Active metadata filters:")
|
||||||
|
current_app.rag_tuning_logger.debug(f"{self._active_filters}")
|
||||||
|
|
||||||
|
def _get_query_embedding(self, query: str):
|
||||||
|
"""Get embedding for the query text"""
|
||||||
|
embedding_model = self.model_variables['embedding_model']
|
||||||
|
query_embedding = embedding_model.embed_query(query)
|
||||||
|
return query_embedding
|
||||||
|
|
||||||
|
@property
|
||||||
|
def model_variables(self) -> ModelVariables:
|
||||||
|
return self._model_variables
|
||||||
|
|
||||||
|
@property
|
||||||
|
def tenant_info(self) -> Dict[str, Any]:
|
||||||
|
return self._tenant_info
|
||||||
@@ -1,23 +1,31 @@
|
|||||||
from langchain_core.retrievers import BaseRetriever
|
from langchain_core.retrievers import BaseRetriever
|
||||||
from sqlalchemy import asc
|
from sqlalchemy import asc
|
||||||
from sqlalchemy.exc import SQLAlchemyError
|
from sqlalchemy.exc import SQLAlchemyError
|
||||||
from pydantic import BaseModel, Field
|
from pydantic import Field, BaseModel, PrivateAttr
|
||||||
from typing import Any, Dict
|
from typing import Any, Dict
|
||||||
from flask import current_app
|
from flask import current_app
|
||||||
|
|
||||||
from common.extensions import db
|
from common.extensions import db
|
||||||
from common.models.interaction import ChatSession, Interaction
|
from common.models.interaction import ChatSession, Interaction
|
||||||
from common.utils.datetime_utils import get_date_in_timezone
|
from common.utils.model_utils import ModelVariables
|
||||||
|
|
||||||
|
|
||||||
class EveAIHistoryRetriever(BaseRetriever):
|
class EveAIHistoryRetriever(BaseRetriever, BaseModel):
|
||||||
model_variables: Dict[str, Any] = Field(...)
|
_model_variables: ModelVariables = PrivateAttr()
|
||||||
session_id: str = Field(...)
|
_session_id: str = PrivateAttr()
|
||||||
|
|
||||||
def __init__(self, model_variables: Dict[str, Any], session_id: str):
|
def __init__(self, model_variables: ModelVariables, session_id: str):
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self.model_variables = model_variables
|
self._model_variables = model_variables
|
||||||
self.session_id = session_id
|
self._session_id = session_id
|
||||||
|
|
||||||
|
@property
|
||||||
|
def model_variables(self) -> ModelVariables:
|
||||||
|
return self._model_variables
|
||||||
|
|
||||||
|
@property
|
||||||
|
def session_id(self) -> str:
|
||||||
|
return self._session_id
|
||||||
|
|
||||||
def _get_relevant_documents(self, query: str):
|
def _get_relevant_documents(self, query: str):
|
||||||
current_app.logger.debug(f'Retrieving history of interactions for query: {query}')
|
current_app.logger.debug(f'Retrieving history of interactions for query: {query}')
|
||||||
40
common/langchain/retrievers/eveai_retriever.py
Normal file
40
common/langchain/retrievers/eveai_retriever.py
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
from pydantic import BaseModel, PrivateAttr
|
||||||
|
from typing import Dict, Any
|
||||||
|
|
||||||
|
from common.utils.model_utils import ModelVariables
|
||||||
|
|
||||||
|
|
||||||
|
class EveAIRetriever(BaseModel):
|
||||||
|
_catalog_id: int = PrivateAttr()
|
||||||
|
_user_metadata: Dict[str, Any] = PrivateAttr()
|
||||||
|
_system_metadata: Dict[str, Any] = PrivateAttr()
|
||||||
|
_configuration: Dict[str, Any] = PrivateAttr()
|
||||||
|
_tenant_info: Dict[str, Any] = PrivateAttr()
|
||||||
|
_model_variables: ModelVariables = PrivateAttr()
|
||||||
|
_tuning: bool = PrivateAttr()
|
||||||
|
|
||||||
|
def __init__(self, catalog_id: int, user_metadata: Dict[str, Any], system_metadata: Dict[str, Any],
|
||||||
|
configuration: Dict[str, Any]):
|
||||||
|
super().__init__()
|
||||||
|
self._catalog_id = catalog_id
|
||||||
|
self._user_metadata = user_metadata
|
||||||
|
self._system_metadata = system_metadata
|
||||||
|
self._configuration = configuration
|
||||||
|
|
||||||
|
@property
|
||||||
|
def catalog_id(self):
|
||||||
|
return self._catalog_id
|
||||||
|
|
||||||
|
@property
|
||||||
|
def user_metadata(self):
|
||||||
|
return self._user_metadata
|
||||||
|
|
||||||
|
@property
|
||||||
|
def system_metadata(self):
|
||||||
|
return self._system_metadata
|
||||||
|
|
||||||
|
@property
|
||||||
|
def configuration(self):
|
||||||
|
return self._configuration
|
||||||
|
|
||||||
|
# Any common methods that should be shared among retrievers can go here.
|
||||||
51
common/langchain/tracked_openai_embeddings.py
Normal file
51
common/langchain/tracked_openai_embeddings.py
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
from langchain_openai import OpenAIEmbeddings
|
||||||
|
from typing import List, Any
|
||||||
|
import time
|
||||||
|
from common.utils.business_event_context import current_event
|
||||||
|
|
||||||
|
|
||||||
|
class TrackedOpenAIEmbeddings(OpenAIEmbeddings):
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
def embed_documents(self, texts: list[str]) -> list[list[float]]:
|
||||||
|
start_time = time.time()
|
||||||
|
result = super().embed_documents(texts)
|
||||||
|
end_time = time.time()
|
||||||
|
|
||||||
|
# Estimate token usage (OpenAI uses tiktoken for this)
|
||||||
|
import tiktoken
|
||||||
|
enc = tiktoken.encoding_for_model(self.model)
|
||||||
|
total_tokens = sum(len(enc.encode(text)) for text in texts)
|
||||||
|
|
||||||
|
metrics = {
|
||||||
|
'total_tokens': total_tokens,
|
||||||
|
'prompt_tokens': total_tokens, # For embeddings, all tokens are prompt tokens
|
||||||
|
'completion_tokens': 0,
|
||||||
|
'time_elapsed': end_time - start_time,
|
||||||
|
'interaction_type': 'Embedding',
|
||||||
|
}
|
||||||
|
current_event.log_llm_metrics(metrics)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
def embed_query(self, text: str) -> List[float]:
|
||||||
|
start_time = time.time()
|
||||||
|
result = super().embed_query(text)
|
||||||
|
end_time = time.time()
|
||||||
|
|
||||||
|
# Estimate token usage
|
||||||
|
import tiktoken
|
||||||
|
enc = tiktoken.encoding_for_model(self.model)
|
||||||
|
total_tokens = len(enc.encode(text))
|
||||||
|
|
||||||
|
metrics = {
|
||||||
|
'total_tokens': total_tokens,
|
||||||
|
'prompt_tokens': total_tokens,
|
||||||
|
'completion_tokens': 0,
|
||||||
|
'time_elapsed': end_time - start_time,
|
||||||
|
'interaction_type': 'Embedding',
|
||||||
|
}
|
||||||
|
current_event.log_llm_metrics(metrics)
|
||||||
|
|
||||||
|
return result
|
||||||
27
common/langchain/tracked_transcribe.py
Normal file
27
common/langchain/tracked_transcribe.py
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
import time
|
||||||
|
from common.utils.business_event_context import current_event
|
||||||
|
|
||||||
|
|
||||||
|
def tracked_transcribe(client, *args, **kwargs):
|
||||||
|
start_time = time.time()
|
||||||
|
|
||||||
|
# Extract the file and model from kwargs if present, otherwise use defaults
|
||||||
|
file = kwargs.get('file')
|
||||||
|
model = kwargs.get('model', 'whisper-1')
|
||||||
|
duration = kwargs.pop('duration', 600)
|
||||||
|
|
||||||
|
result = client.audio.transcriptions.create(*args, **kwargs)
|
||||||
|
end_time = time.time()
|
||||||
|
|
||||||
|
# Token usage for transcriptions is actually the duration in seconds we pass, as the whisper model is priced per second transcribed
|
||||||
|
|
||||||
|
metrics = {
|
||||||
|
'total_tokens': duration,
|
||||||
|
'prompt_tokens': 0, # For transcriptions, all tokens are considered "completion"
|
||||||
|
'completion_tokens': duration,
|
||||||
|
'time_elapsed': end_time - start_time,
|
||||||
|
'interaction_type': 'ASR',
|
||||||
|
}
|
||||||
|
current_event.log_llm_metrics(metrics)
|
||||||
|
|
||||||
|
return result
|
||||||
2
common/models/README.txt
Normal file
2
common/models/README.txt
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
If models are added to the public schema (i.e. in the user domain), ensure to add their corresponding tables to the
|
||||||
|
env.py, get_public_table_names, for tenant migrations!
|
||||||
@@ -1,18 +1,77 @@
|
|||||||
from common.extensions import db
|
from common.extensions import db
|
||||||
from .user import User, Tenant
|
from .user import User, Tenant
|
||||||
from pgvector.sqlalchemy import Vector
|
from pgvector.sqlalchemy import Vector
|
||||||
|
from sqlalchemy.dialects.postgresql import JSONB
|
||||||
|
from sqlalchemy.dialects.postgresql import ARRAY
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
class Catalog(db.Model):
|
||||||
|
id = db.Column(db.Integer, primary_key=True)
|
||||||
|
name = db.Column(db.String(50), nullable=False)
|
||||||
|
description = db.Column(db.Text, nullable=True)
|
||||||
|
type = db.Column(db.String(50), nullable=False, default="DEFAULT_CATALOG")
|
||||||
|
|
||||||
|
# Embedding variables
|
||||||
|
html_tags = db.Column(ARRAY(sa.String(10)), nullable=True, default=['p', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'li'])
|
||||||
|
html_end_tags = db.Column(ARRAY(sa.String(10)), nullable=True, default=['p', 'li'])
|
||||||
|
html_included_elements = db.Column(ARRAY(sa.String(50)), nullable=True)
|
||||||
|
html_excluded_elements = db.Column(ARRAY(sa.String(50)), nullable=True)
|
||||||
|
html_excluded_classes = db.Column(ARRAY(sa.String(200)), nullable=True)
|
||||||
|
|
||||||
|
min_chunk_size = db.Column(db.Integer, nullable=True, default=2000)
|
||||||
|
max_chunk_size = db.Column(db.Integer, nullable=True, default=3000)
|
||||||
|
|
||||||
|
# Chat variables ==> Move to Specialist?
|
||||||
|
chat_RAG_temperature = db.Column(db.Float, nullable=True, default=0.3)
|
||||||
|
chat_no_RAG_temperature = db.Column(db.Float, nullable=True, default=0.5)
|
||||||
|
|
||||||
|
# Tuning enablers
|
||||||
|
embed_tuning = db.Column(db.Boolean, nullable=True, default=False)
|
||||||
|
|
||||||
|
# Meta Data
|
||||||
|
user_metadata = db.Column(JSONB, nullable=True)
|
||||||
|
system_metadata = db.Column(JSONB, nullable=True)
|
||||||
|
configuration = db.Column(JSONB, nullable=True)
|
||||||
|
|
||||||
|
# Versioning Information
|
||||||
|
created_at = db.Column(db.DateTime, nullable=False, server_default=db.func.now())
|
||||||
|
created_by = db.Column(db.Integer, db.ForeignKey(User.id), nullable=True)
|
||||||
|
updated_at = db.Column(db.DateTime, nullable=False, server_default=db.func.now(), onupdate=db.func.now())
|
||||||
|
updated_by = db.Column(db.Integer, db.ForeignKey(User.id))
|
||||||
|
|
||||||
|
|
||||||
|
class Retriever(db.Model):
|
||||||
|
id = db.Column(db.Integer, primary_key=True)
|
||||||
|
name = db.Column(db.String(50), nullable=False)
|
||||||
|
description = db.Column(db.Text, nullable=True)
|
||||||
|
catalog_id = db.Column(db.Integer, db.ForeignKey('catalog.id'), nullable=True)
|
||||||
|
type = db.Column(db.String(50), nullable=False, default="DEFAULT_RAG")
|
||||||
|
tuning = db.Column(db.Boolean, nullable=True, default=False)
|
||||||
|
|
||||||
|
# Meta Data
|
||||||
|
user_metadata = db.Column(JSONB, nullable=True)
|
||||||
|
system_metadata = db.Column(JSONB, nullable=True)
|
||||||
|
configuration = db.Column(JSONB, nullable=True)
|
||||||
|
|
||||||
|
# Versioning Information
|
||||||
|
created_at = db.Column(db.DateTime, nullable=False, server_default=db.func.now())
|
||||||
|
created_by = db.Column(db.Integer, db.ForeignKey(User.id), nullable=True)
|
||||||
|
updated_at = db.Column(db.DateTime, nullable=False, server_default=db.func.now(), onupdate=db.func.now())
|
||||||
|
updated_by = db.Column(db.Integer, db.ForeignKey(User.id))
|
||||||
|
|
||||||
|
|
||||||
class Document(db.Model):
|
class Document(db.Model):
|
||||||
id = db.Column(db.Integer, primary_key=True)
|
id = db.Column(db.Integer, primary_key=True)
|
||||||
|
# tenant_id = db.Column(db.Integer, db.ForeignKey(Tenant.id), nullable=False)
|
||||||
|
catalog_id = db.Column(db.Integer, db.ForeignKey(Catalog.id), nullable=True)
|
||||||
name = db.Column(db.String(100), nullable=False)
|
name = db.Column(db.String(100), nullable=False)
|
||||||
tenant_id = db.Column(db.Integer, db.ForeignKey(Tenant.id), nullable=False)
|
|
||||||
valid_from = db.Column(db.DateTime, nullable=True)
|
valid_from = db.Column(db.DateTime, nullable=True)
|
||||||
valid_to = db.Column(db.DateTime, nullable=True)
|
valid_to = db.Column(db.DateTime, nullable=True)
|
||||||
|
|
||||||
# Versioning Information
|
# Versioning Information
|
||||||
created_at = db.Column(db.DateTime, nullable=False, server_default=db.func.now())
|
created_at = db.Column(db.DateTime, nullable=False, server_default=db.func.now())
|
||||||
created_by = db.Column(db.Integer, db.ForeignKey(User.id), nullable=False)
|
created_by = db.Column(db.Integer, db.ForeignKey(User.id), nullable=True)
|
||||||
updated_at = db.Column(db.DateTime, nullable=False, server_default=db.func.now(), onupdate=db.func.now())
|
updated_at = db.Column(db.DateTime, nullable=False, server_default=db.func.now(), onupdate=db.func.now())
|
||||||
updated_by = db.Column(db.Integer, db.ForeignKey(User.id))
|
updated_by = db.Column(db.Integer, db.ForeignKey(User.id))
|
||||||
|
|
||||||
@@ -27,12 +86,16 @@ class DocumentVersion(db.Model):
|
|||||||
id = db.Column(db.Integer, primary_key=True)
|
id = db.Column(db.Integer, primary_key=True)
|
||||||
doc_id = db.Column(db.Integer, db.ForeignKey(Document.id), nullable=False)
|
doc_id = db.Column(db.Integer, db.ForeignKey(Document.id), nullable=False)
|
||||||
url = db.Column(db.String(200), nullable=True)
|
url = db.Column(db.String(200), nullable=True)
|
||||||
file_location = db.Column(db.String(255), nullable=True)
|
bucket_name = db.Column(db.String(255), nullable=True)
|
||||||
file_name = db.Column(db.String(200), nullable=True)
|
object_name = db.Column(db.String(200), nullable=True)
|
||||||
file_type = db.Column(db.String(20), nullable=True)
|
file_type = db.Column(db.String(20), nullable=True)
|
||||||
|
file_size = db.Column(db.Float, nullable=True)
|
||||||
language = db.Column(db.String(2), nullable=False)
|
language = db.Column(db.String(2), nullable=False)
|
||||||
user_context = db.Column(db.Text, nullable=True)
|
user_context = db.Column(db.Text, nullable=True)
|
||||||
system_context = db.Column(db.Text, nullable=True)
|
system_context = db.Column(db.Text, nullable=True)
|
||||||
|
user_metadata = db.Column(JSONB, nullable=True)
|
||||||
|
system_metadata = db.Column(JSONB, nullable=True)
|
||||||
|
catalog_properties = db.Column(JSONB, nullable=True)
|
||||||
|
|
||||||
# Versioning Information
|
# Versioning Information
|
||||||
created_at = db.Column(db.DateTime, nullable=False, server_default=db.func.now())
|
created_at = db.Column(db.DateTime, nullable=False, server_default=db.func.now())
|
||||||
@@ -52,12 +115,6 @@ class DocumentVersion(db.Model):
|
|||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return f"<DocumentVersion {self.document_language.document_id}.{self.document_language.language}>.{self.id}>"
|
return f"<DocumentVersion {self.document_language.document_id}.{self.document_language.language}>.{self.id}>"
|
||||||
|
|
||||||
def calc_file_location(self):
|
|
||||||
return f"{self.document.tenant_id}/{self.document.id}/{self.language}"
|
|
||||||
|
|
||||||
def calc_file_name(self):
|
|
||||||
return f"{self.id}.{self.file_type}"
|
|
||||||
|
|
||||||
|
|
||||||
class Embedding(db.Model):
|
class Embedding(db.Model):
|
||||||
__tablename__ = 'embeddings'
|
__tablename__ = 'embeddings'
|
||||||
|
|||||||
110
common/models/entitlements.py
Normal file
110
common/models/entitlements.py
Normal file
@@ -0,0 +1,110 @@
|
|||||||
|
from common.extensions import db
|
||||||
|
|
||||||
|
|
||||||
|
class BusinessEventLog(db.Model):
|
||||||
|
__bind_key__ = 'public'
|
||||||
|
__table_args__ = {'schema': 'public'}
|
||||||
|
|
||||||
|
id = db.Column(db.Integer, primary_key=True)
|
||||||
|
timestamp = db.Column(db.DateTime, nullable=False)
|
||||||
|
event_type = db.Column(db.String(50), nullable=False)
|
||||||
|
tenant_id = db.Column(db.Integer, nullable=False)
|
||||||
|
trace_id = db.Column(db.String(50), nullable=False)
|
||||||
|
span_id = db.Column(db.String(50))
|
||||||
|
span_name = db.Column(db.String(50))
|
||||||
|
parent_span_id = db.Column(db.String(50))
|
||||||
|
document_version_id = db.Column(db.Integer)
|
||||||
|
document_version_file_size = db.Column(db.Float)
|
||||||
|
chat_session_id = db.Column(db.String(50))
|
||||||
|
interaction_id = db.Column(db.Integer)
|
||||||
|
environment = db.Column(db.String(20))
|
||||||
|
llm_metrics_total_tokens = db.Column(db.Integer)
|
||||||
|
llm_metrics_prompt_tokens = db.Column(db.Integer)
|
||||||
|
llm_metrics_completion_tokens = db.Column(db.Integer)
|
||||||
|
llm_metrics_total_time = db.Column(db.Float)
|
||||||
|
llm_metrics_call_count = db.Column(db.Integer)
|
||||||
|
llm_interaction_type = db.Column(db.String(20))
|
||||||
|
message = db.Column(db.Text)
|
||||||
|
license_usage_id = db.Column(db.Integer, db.ForeignKey('public.license_usage.id'), nullable=True)
|
||||||
|
license_usage = db.relationship('LicenseUsage', backref='events')
|
||||||
|
|
||||||
|
|
||||||
|
class License(db.Model):
|
||||||
|
__bind_key__ = 'public'
|
||||||
|
__table_args__ = {'schema': 'public'}
|
||||||
|
|
||||||
|
id = db.Column(db.Integer, primary_key=True)
|
||||||
|
tenant_id = db.Column(db.Integer, db.ForeignKey('public.tenant.id'), nullable=False)
|
||||||
|
tier_id = db.Column(db.Integer, db.ForeignKey('public.license_tier.id'),nullable=False) # 'small', 'medium', 'custom'
|
||||||
|
start_date = db.Column(db.Date, nullable=False)
|
||||||
|
end_date = db.Column(db.Date, nullable=True)
|
||||||
|
currency = db.Column(db.String(20), nullable=False)
|
||||||
|
yearly_payment = db.Column(db.Boolean, nullable=False, default=False)
|
||||||
|
basic_fee = db.Column(db.Float, nullable=False)
|
||||||
|
max_storage_mb = db.Column(db.Integer, nullable=False)
|
||||||
|
additional_storage_price = db.Column(db.Float, nullable=False)
|
||||||
|
additional_storage_bucket = db.Column(db.Integer, nullable=False)
|
||||||
|
included_embedding_mb = db.Column(db.Integer, nullable=False)
|
||||||
|
additional_embedding_price = db.Column(db.Numeric(10, 4), nullable=False)
|
||||||
|
additional_embedding_bucket = db.Column(db.Integer, nullable=False)
|
||||||
|
included_interaction_tokens = db.Column(db.Integer, nullable=False)
|
||||||
|
additional_interaction_token_price = db.Column(db.Numeric(10, 4), nullable=False)
|
||||||
|
additional_interaction_bucket = db.Column(db.Integer, nullable=False)
|
||||||
|
overage_embedding = db.Column(db.Float, nullable=False, default=0)
|
||||||
|
overage_interaction = db.Column(db.Float, nullable=False, default=0)
|
||||||
|
|
||||||
|
tenant = db.relationship('Tenant', back_populates='licenses')
|
||||||
|
license_tier = db.relationship('LicenseTier', back_populates='licenses')
|
||||||
|
usages = db.relationship('LicenseUsage', order_by='LicenseUsage.period_start_date', back_populates='license')
|
||||||
|
|
||||||
|
|
||||||
|
class LicenseTier(db.Model):
|
||||||
|
__bind_key__ = 'public'
|
||||||
|
__table_args__ = {'schema': 'public'}
|
||||||
|
|
||||||
|
id = db.Column(db.Integer, primary_key=True)
|
||||||
|
name = db.Column(db.String(50), nullable=False)
|
||||||
|
version = db.Column(db.String(50), nullable=False)
|
||||||
|
start_date = db.Column(db.Date, nullable=False)
|
||||||
|
end_date = db.Column(db.Date, nullable=True)
|
||||||
|
basic_fee_d = db.Column(db.Float, nullable=True)
|
||||||
|
basic_fee_e = db.Column(db.Float, nullable=True)
|
||||||
|
max_storage_mb = db.Column(db.Integer, nullable=False)
|
||||||
|
additional_storage_price_d = db.Column(db.Numeric(10, 4), nullable=False)
|
||||||
|
additional_storage_price_e = db.Column(db.Numeric(10, 4), nullable=False)
|
||||||
|
additional_storage_bucket = db.Column(db.Integer, nullable=False)
|
||||||
|
included_embedding_mb = db.Column(db.Integer, nullable=False)
|
||||||
|
additional_embedding_price_d = db.Column(db.Numeric(10, 4), nullable=False)
|
||||||
|
additional_embedding_price_e = db.Column(db.Numeric(10, 4), nullable=False)
|
||||||
|
additional_embedding_bucket = db.Column(db.Integer, nullable=False)
|
||||||
|
included_interaction_tokens = db.Column(db.Integer, nullable=False)
|
||||||
|
additional_interaction_token_price_d = db.Column(db.Numeric(10, 4), nullable=False)
|
||||||
|
additional_interaction_token_price_e = db.Column(db.Numeric(10, 4), nullable=False)
|
||||||
|
additional_interaction_bucket = db.Column(db.Integer, nullable=False)
|
||||||
|
standard_overage_embedding = db.Column(db.Float, nullable=False, default=0)
|
||||||
|
standard_overage_interaction = db.Column(db.Float, nullable=False, default=0)
|
||||||
|
|
||||||
|
licenses = db.relationship('License', back_populates='license_tier')
|
||||||
|
|
||||||
|
|
||||||
|
class LicenseUsage(db.Model):
|
||||||
|
__bind_key__ = 'public'
|
||||||
|
__table_args__ = {'schema': 'public'}
|
||||||
|
|
||||||
|
id = db.Column(db.Integer, primary_key=True)
|
||||||
|
license_id = db.Column(db.Integer, db.ForeignKey('public.license.id'), nullable=False)
|
||||||
|
tenant_id = db.Column(db.Integer, db.ForeignKey('public.tenant.id'), nullable=False)
|
||||||
|
storage_mb_used = db.Column(db.Float, default=0)
|
||||||
|
embedding_mb_used = db.Column(db.Float, default=0)
|
||||||
|
embedding_prompt_tokens_used = db.Column(db.Integer, default=0)
|
||||||
|
embedding_completion_tokens_used = db.Column(db.Integer, default=0)
|
||||||
|
embedding_total_tokens_used = db.Column(db.Integer, default=0)
|
||||||
|
interaction_prompt_tokens_used = db.Column(db.Integer, default=0)
|
||||||
|
interaction_completion_tokens_used = db.Column(db.Integer, default=0)
|
||||||
|
interaction_total_tokens_used = db.Column(db.Integer, default=0)
|
||||||
|
period_start_date = db.Column(db.Date, nullable=False)
|
||||||
|
period_end_date = db.Column(db.Date, nullable=False)
|
||||||
|
|
||||||
|
license = db.relationship('License', back_populates='usages')
|
||||||
|
|
||||||
|
|
||||||
@@ -1,8 +1,11 @@
|
|||||||
|
from datetime import date
|
||||||
|
|
||||||
from common.extensions import db
|
from common.extensions import db
|
||||||
from flask_security import UserMixin, RoleMixin
|
from flask_security import UserMixin, RoleMixin
|
||||||
from sqlalchemy.dialects.postgresql import ARRAY
|
from sqlalchemy.dialects.postgresql import ARRAY
|
||||||
import sqlalchemy as sa
|
import sqlalchemy as sa
|
||||||
from sqlalchemy import CheckConstraint
|
|
||||||
|
from common.models.entitlements import License
|
||||||
|
|
||||||
|
|
||||||
class Tenant(db.Model):
|
class Tenant(db.Model):
|
||||||
@@ -21,6 +24,7 @@ class Tenant(db.Model):
|
|||||||
website = db.Column(db.String(255), nullable=True)
|
website = db.Column(db.String(255), nullable=True)
|
||||||
timezone = db.Column(db.String(50), nullable=True, default='UTC')
|
timezone = db.Column(db.String(50), nullable=True, default='UTC')
|
||||||
rag_context = db.Column(db.Text, nullable=True)
|
rag_context = db.Column(db.Text, nullable=True)
|
||||||
|
type = db.Column(db.String(20), nullable=True, server_default='Active')
|
||||||
|
|
||||||
# language information
|
# language information
|
||||||
default_language = db.Column(db.String(2), nullable=True)
|
default_language = db.Column(db.String(2), nullable=True)
|
||||||
@@ -30,37 +34,52 @@ class Tenant(db.Model):
|
|||||||
embedding_model = db.Column(db.String(50), nullable=True)
|
embedding_model = db.Column(db.String(50), nullable=True)
|
||||||
llm_model = db.Column(db.String(50), nullable=True)
|
llm_model = db.Column(db.String(50), nullable=True)
|
||||||
|
|
||||||
# Embedding variables
|
# # Embedding variables ==> To be removed once all migrations (dev + prod) have been done
|
||||||
html_tags = db.Column(ARRAY(sa.String(10)), nullable=True, default=['p', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'li'])
|
# html_tags = db.Column(ARRAY(sa.String(10)), nullable=True, default=['p', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'li'])
|
||||||
html_end_tags = db.Column(ARRAY(sa.String(10)), nullable=True, default=['p', 'li'])
|
# html_end_tags = db.Column(ARRAY(sa.String(10)), nullable=True, default=['p', 'li'])
|
||||||
html_included_elements = db.Column(ARRAY(sa.String(50)), nullable=True)
|
# html_included_elements = db.Column(ARRAY(sa.String(50)), nullable=True)
|
||||||
html_excluded_elements = db.Column(ARRAY(sa.String(50)), nullable=True)
|
# html_excluded_elements = db.Column(ARRAY(sa.String(50)), nullable=True)
|
||||||
min_chunk_size = db.Column(db.Integer, nullable=True, default=2000)
|
# html_excluded_classes = db.Column(ARRAY(sa.String(200)), nullable=True)
|
||||||
max_chunk_size = db.Column(db.Integer, nullable=True, default=3000)
|
#
|
||||||
|
# min_chunk_size = db.Column(db.Integer, nullable=True, default=2000)
|
||||||
|
# max_chunk_size = db.Column(db.Integer, nullable=True, default=3000)
|
||||||
# Embedding search variables
|
#
|
||||||
es_k = db.Column(db.Integer, nullable=True, default=5)
|
# # Embedding search variables
|
||||||
es_similarity_threshold = db.Column(db.Float, nullable=True, default=0.7)
|
# es_k = db.Column(db.Integer, nullable=True, default=5)
|
||||||
|
# es_similarity_threshold = db.Column(db.Float, nullable=True, default=0.7)
|
||||||
# Chat variables
|
#
|
||||||
chat_RAG_temperature = db.Column(db.Float, nullable=True, default=0.3)
|
# # Chat variables
|
||||||
chat_no_RAG_temperature = db.Column(db.Float, nullable=True, default=0.5)
|
# chat_RAG_temperature = db.Column(db.Float, nullable=True, default=0.3)
|
||||||
|
# chat_no_RAG_temperature = db.Column(db.Float, nullable=True, default=0.5)
|
||||||
fallback_algorithms = db.Column(ARRAY(sa.String(50)), nullable=True)
|
fallback_algorithms = db.Column(ARRAY(sa.String(50)), nullable=True)
|
||||||
|
|
||||||
# Licensing Information
|
# Licensing Information
|
||||||
license_start_date = db.Column(db.Date, nullable=True)
|
|
||||||
license_end_date = db.Column(db.Date, nullable=True)
|
|
||||||
allowed_monthly_interactions = db.Column(db.Integer, nullable=True)
|
|
||||||
encrypted_chat_api_key = db.Column(db.String(500), nullable=True)
|
encrypted_chat_api_key = db.Column(db.String(500), nullable=True)
|
||||||
|
encrypted_api_key = db.Column(db.String(500), nullable=True)
|
||||||
|
|
||||||
# Tuning enablers
|
# # Tuning enablers
|
||||||
embed_tuning = db.Column(db.Boolean, nullable=True, default=False)
|
# embed_tuning = db.Column(db.Boolean, nullable=True, default=False)
|
||||||
rag_tuning = db.Column(db.Boolean, nullable=True, default=False)
|
# rag_tuning = db.Column(db.Boolean, nullable=True, default=False)
|
||||||
|
|
||||||
|
# Entitlements
|
||||||
|
currency = db.Column(db.String(20), nullable=True)
|
||||||
|
usage_email = db.Column(db.String(255), nullable=True)
|
||||||
|
storage_dirty = db.Column(db.Boolean, nullable=True, default=False)
|
||||||
|
|
||||||
# Relations
|
# Relations
|
||||||
users = db.relationship('User', backref='tenant')
|
users = db.relationship('User', backref='tenant')
|
||||||
domains = db.relationship('TenantDomain', backref='tenant')
|
domains = db.relationship('TenantDomain', backref='tenant')
|
||||||
|
licenses = db.relationship('License', back_populates='tenant')
|
||||||
|
license_usages = db.relationship('LicenseUsage', backref='tenant')
|
||||||
|
|
||||||
|
@property
|
||||||
|
def current_license(self):
|
||||||
|
today = date.today()
|
||||||
|
return License.query.filter(
|
||||||
|
License.tenant_id == self.id,
|
||||||
|
License.start_date <= today,
|
||||||
|
(License.end_date.is_(None) | (License.end_date >= today))
|
||||||
|
).order_by(License.start_date.desc()).first()
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return f"<Tenant {self.id}: {self.name}>"
|
return f"<Tenant {self.id}: {self.name}>"
|
||||||
@@ -72,26 +91,14 @@ class Tenant(db.Model):
|
|||||||
'website': self.website,
|
'website': self.website,
|
||||||
'timezone': self.timezone,
|
'timezone': self.timezone,
|
||||||
'rag_context': self.rag_context,
|
'rag_context': self.rag_context,
|
||||||
|
'type': self.type,
|
||||||
'default_language': self.default_language,
|
'default_language': self.default_language,
|
||||||
'allowed_languages': self.allowed_languages,
|
'allowed_languages': self.allowed_languages,
|
||||||
'embedding_model': self.embedding_model,
|
'embedding_model': self.embedding_model,
|
||||||
'llm_model': self.llm_model,
|
'llm_model': self.llm_model,
|
||||||
'html_tags': self.html_tags,
|
|
||||||
'html_end_tags': self.html_end_tags,
|
|
||||||
'html_included_elements': self.html_included_elements,
|
|
||||||
'html_excluded_elements': self.html_excluded_elements,
|
|
||||||
'min_chunk_size': self.min_chunk_size,
|
|
||||||
'max_chunk_size': self.max_chunk_size,
|
|
||||||
'es_k': self.es_k,
|
|
||||||
'es_similarity_threshold': self.es_similarity_threshold,
|
|
||||||
'chat_RAG_temperature': self.chat_RAG_temperature,
|
|
||||||
'chat_no_RAG_temperature': self.chat_no_RAG_temperature,
|
|
||||||
'fallback_algorithms': self.fallback_algorithms,
|
'fallback_algorithms': self.fallback_algorithms,
|
||||||
'license_start_date': self.license_start_date,
|
'currency': self.currency,
|
||||||
'license_end_date': self.license_end_date,
|
'usage_email': self.usage_email,
|
||||||
'allowed_monthly_interactions': self.allowed_monthly_interactions,
|
|
||||||
'embed_tuning': self.embed_tuning,
|
|
||||||
'rag_tuning': self.rag_tuning,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
246
common/utils/business_event.py
Normal file
246
common/utils/business_event.py
Normal file
@@ -0,0 +1,246 @@
|
|||||||
|
import os
|
||||||
|
import uuid
|
||||||
|
from contextlib import contextmanager
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Dict, Any, Optional
|
||||||
|
from datetime import datetime as dt, timezone as tz
|
||||||
|
from portkey_ai import Portkey, Config
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from .business_event_context import BusinessEventContext
|
||||||
|
from common.models.entitlements import BusinessEventLog
|
||||||
|
from common.extensions import db
|
||||||
|
|
||||||
|
|
||||||
|
class BusinessEvent:
|
||||||
|
# The BusinessEvent class itself is a context manager, but it doesn't use the @contextmanager decorator.
|
||||||
|
# Instead, it defines __enter__ and __exit__ methods explicitly. This is because we're doing something a bit more
|
||||||
|
# complex - we're interacting with the BusinessEventContext and the _business_event_stack.
|
||||||
|
|
||||||
|
def __init__(self, event_type: str, tenant_id: int, **kwargs):
|
||||||
|
self.event_type = event_type
|
||||||
|
self.tenant_id = tenant_id
|
||||||
|
self.trace_id = str(uuid.uuid4())
|
||||||
|
self.span_id = None
|
||||||
|
self.span_name = None
|
||||||
|
self.parent_span_id = None
|
||||||
|
self.document_version_id = kwargs.get('document_version_id')
|
||||||
|
self.document_version_file_size = kwargs.get('document_version_file_size')
|
||||||
|
self.chat_session_id = kwargs.get('chat_session_id')
|
||||||
|
self.interaction_id = kwargs.get('interaction_id')
|
||||||
|
self.environment = os.environ.get("FLASK_ENV", "development")
|
||||||
|
self.span_counter = 0
|
||||||
|
self.spans = []
|
||||||
|
self.llm_metrics = {
|
||||||
|
'total_tokens': 0,
|
||||||
|
'prompt_tokens': 0,
|
||||||
|
'completion_tokens': 0,
|
||||||
|
'total_time': 0,
|
||||||
|
'call_count': 0,
|
||||||
|
'interaction_type': None
|
||||||
|
}
|
||||||
|
|
||||||
|
def update_attribute(self, attribute: str, value: any):
|
||||||
|
if hasattr(self, attribute):
|
||||||
|
setattr(self, attribute, value)
|
||||||
|
else:
|
||||||
|
raise AttributeError(f"'{self.__class__.__name__}' object has no attribute '{attribute}'")
|
||||||
|
|
||||||
|
def update_llm_metrics(self, metrics: dict):
|
||||||
|
self.llm_metrics['total_tokens'] += metrics['total_tokens']
|
||||||
|
self.llm_metrics['prompt_tokens'] += metrics['prompt_tokens']
|
||||||
|
self.llm_metrics['completion_tokens'] += metrics['completion_tokens']
|
||||||
|
self.llm_metrics['total_time'] += metrics['time_elapsed']
|
||||||
|
self.llm_metrics['call_count'] += 1
|
||||||
|
self.llm_metrics['interaction_type'] = metrics['interaction_type']
|
||||||
|
|
||||||
|
def reset_llm_metrics(self):
|
||||||
|
self.llm_metrics['total_tokens'] = 0
|
||||||
|
self.llm_metrics['prompt_tokens'] = 0
|
||||||
|
self.llm_metrics['completion_tokens'] = 0
|
||||||
|
self.llm_metrics['total_time'] = 0
|
||||||
|
self.llm_metrics['call_count'] = 0
|
||||||
|
self.llm_metrics['interaction_type'] = None
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def create_span(self, span_name: str):
|
||||||
|
# The create_span method is designed to be used as a context manager. We want to perform some actions when
|
||||||
|
# entering the span (like setting the span ID and name) and some actions when exiting the span (like removing
|
||||||
|
# these temporary attributes). The @contextmanager decorator allows us to write this method in a way that
|
||||||
|
# clearly separates the "entry" and "exit" logic, with the yield statement in between.
|
||||||
|
|
||||||
|
parent_span_id = self.span_id
|
||||||
|
self.span_counter += 1
|
||||||
|
new_span_id = str(uuid.uuid4())
|
||||||
|
|
||||||
|
# Save the current span info
|
||||||
|
self.spans.append((self.span_id, self.span_name, self.parent_span_id))
|
||||||
|
|
||||||
|
# Set the new span info
|
||||||
|
self.span_id = new_span_id
|
||||||
|
self.span_name = span_name
|
||||||
|
self.parent_span_id = parent_span_id
|
||||||
|
|
||||||
|
self.log(f"Starting span {span_name}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
yield
|
||||||
|
finally:
|
||||||
|
if self.llm_metrics['call_count'] > 0:
|
||||||
|
self.log_final_metrics()
|
||||||
|
self.reset_llm_metrics()
|
||||||
|
self.log(f"Ending span {span_name}")
|
||||||
|
# Restore the previous span info
|
||||||
|
if self.spans:
|
||||||
|
self.span_id, self.span_name, self.parent_span_id = self.spans.pop()
|
||||||
|
else:
|
||||||
|
self.span_id = None
|
||||||
|
self.span_name = None
|
||||||
|
self.parent_span_id = None
|
||||||
|
|
||||||
|
def log(self, message: str, level: str = 'info'):
|
||||||
|
logger = logging.getLogger('business_events')
|
||||||
|
log_data = {
|
||||||
|
'event_type': self.event_type,
|
||||||
|
'tenant_id': self.tenant_id,
|
||||||
|
'trace_id': self.trace_id,
|
||||||
|
'span_id': self.span_id,
|
||||||
|
'span_name': self.span_name,
|
||||||
|
'parent_span_id': self.parent_span_id,
|
||||||
|
'document_version_id': self.document_version_id,
|
||||||
|
'document_version_file_size': self.document_version_file_size,
|
||||||
|
'chat_session_id': self.chat_session_id,
|
||||||
|
'interaction_id': self.interaction_id,
|
||||||
|
'environment': self.environment,
|
||||||
|
}
|
||||||
|
# log to Graylog
|
||||||
|
getattr(logger, level)(message, extra=log_data)
|
||||||
|
|
||||||
|
# Log to database
|
||||||
|
event_log = BusinessEventLog(
|
||||||
|
timestamp=dt.now(tz=tz.utc),
|
||||||
|
event_type=self.event_type,
|
||||||
|
tenant_id=self.tenant_id,
|
||||||
|
trace_id=self.trace_id,
|
||||||
|
span_id=self.span_id,
|
||||||
|
span_name=self.span_name,
|
||||||
|
parent_span_id=self.parent_span_id,
|
||||||
|
document_version_id=self.document_version_id,
|
||||||
|
document_version_file_size=self.document_version_file_size,
|
||||||
|
chat_session_id=self.chat_session_id,
|
||||||
|
interaction_id=self.interaction_id,
|
||||||
|
environment=self.environment,
|
||||||
|
message=message
|
||||||
|
)
|
||||||
|
db.session.add(event_log)
|
||||||
|
db.session.commit()
|
||||||
|
|
||||||
|
def log_llm_metrics(self, metrics: dict, level: str = 'info'):
|
||||||
|
self.update_llm_metrics(metrics)
|
||||||
|
message = "LLM Metrics"
|
||||||
|
logger = logging.getLogger('business_events')
|
||||||
|
log_data = {
|
||||||
|
'event_type': self.event_type,
|
||||||
|
'tenant_id': self.tenant_id,
|
||||||
|
'trace_id': self.trace_id,
|
||||||
|
'span_id': self.span_id,
|
||||||
|
'span_name': self.span_name,
|
||||||
|
'parent_span_id': self.parent_span_id,
|
||||||
|
'document_version_id': self.document_version_id,
|
||||||
|
'document_version_file_size': self.document_version_file_size,
|
||||||
|
'chat_session_id': self.chat_session_id,
|
||||||
|
'interaction_id': self.interaction_id,
|
||||||
|
'environment': self.environment,
|
||||||
|
'llm_metrics_total_tokens': metrics['total_tokens'],
|
||||||
|
'llm_metrics_prompt_tokens': metrics['prompt_tokens'],
|
||||||
|
'llm_metrics_completion_tokens': metrics['completion_tokens'],
|
||||||
|
'llm_metrics_total_time': metrics['time_elapsed'],
|
||||||
|
'llm_interaction_type': metrics['interaction_type'],
|
||||||
|
}
|
||||||
|
# log to Graylog
|
||||||
|
getattr(logger, level)(message, extra=log_data)
|
||||||
|
|
||||||
|
# Log to database
|
||||||
|
event_log = BusinessEventLog(
|
||||||
|
timestamp=dt.now(tz=tz.utc),
|
||||||
|
event_type=self.event_type,
|
||||||
|
tenant_id=self.tenant_id,
|
||||||
|
trace_id=self.trace_id,
|
||||||
|
span_id=self.span_id,
|
||||||
|
span_name=self.span_name,
|
||||||
|
parent_span_id=self.parent_span_id,
|
||||||
|
document_version_id=self.document_version_id,
|
||||||
|
document_version_file_size=self.document_version_file_size,
|
||||||
|
chat_session_id=self.chat_session_id,
|
||||||
|
interaction_id=self.interaction_id,
|
||||||
|
environment=self.environment,
|
||||||
|
llm_metrics_total_tokens=metrics['total_tokens'],
|
||||||
|
llm_metrics_prompt_tokens=metrics['prompt_tokens'],
|
||||||
|
llm_metrics_completion_tokens=metrics['completion_tokens'],
|
||||||
|
llm_metrics_total_time=metrics['time_elapsed'],
|
||||||
|
llm_interaction_type=metrics['interaction_type'],
|
||||||
|
message=message
|
||||||
|
)
|
||||||
|
db.session.add(event_log)
|
||||||
|
db.session.commit()
|
||||||
|
|
||||||
|
def log_final_metrics(self, level: str = 'info'):
|
||||||
|
logger = logging.getLogger('business_events')
|
||||||
|
message = "Final LLM Metrics"
|
||||||
|
log_data = {
|
||||||
|
'event_type': self.event_type,
|
||||||
|
'tenant_id': self.tenant_id,
|
||||||
|
'trace_id': self.trace_id,
|
||||||
|
'span_id': self.span_id,
|
||||||
|
'span_name': self.span_name,
|
||||||
|
'parent_span_id': self.parent_span_id,
|
||||||
|
'document_version_id': self.document_version_id,
|
||||||
|
'document_version_file_size': self.document_version_file_size,
|
||||||
|
'chat_session_id': self.chat_session_id,
|
||||||
|
'interaction_id': self.interaction_id,
|
||||||
|
'environment': self.environment,
|
||||||
|
'llm_metrics_total_tokens': self.llm_metrics['total_tokens'],
|
||||||
|
'llm_metrics_prompt_tokens': self.llm_metrics['prompt_tokens'],
|
||||||
|
'llm_metrics_completion_tokens': self.llm_metrics['completion_tokens'],
|
||||||
|
'llm_metrics_total_time': self.llm_metrics['total_time'],
|
||||||
|
'llm_metrics_call_count': self.llm_metrics['call_count'],
|
||||||
|
'llm_interaction_type': self.llm_metrics['interaction_type'],
|
||||||
|
}
|
||||||
|
# log to Graylog
|
||||||
|
getattr(logger, level)(message, extra=log_data)
|
||||||
|
|
||||||
|
# Log to database
|
||||||
|
event_log = BusinessEventLog(
|
||||||
|
timestamp=dt.now(tz=tz.utc),
|
||||||
|
event_type=self.event_type,
|
||||||
|
tenant_id=self.tenant_id,
|
||||||
|
trace_id=self.trace_id,
|
||||||
|
span_id=self.span_id,
|
||||||
|
span_name=self.span_name,
|
||||||
|
parent_span_id=self.parent_span_id,
|
||||||
|
document_version_id=self.document_version_id,
|
||||||
|
document_version_file_size=self.document_version_file_size,
|
||||||
|
chat_session_id=self.chat_session_id,
|
||||||
|
interaction_id=self.interaction_id,
|
||||||
|
environment=self.environment,
|
||||||
|
llm_metrics_total_tokens=self.llm_metrics['total_tokens'],
|
||||||
|
llm_metrics_prompt_tokens=self.llm_metrics['prompt_tokens'],
|
||||||
|
llm_metrics_completion_tokens=self.llm_metrics['completion_tokens'],
|
||||||
|
llm_metrics_total_time=self.llm_metrics['total_time'],
|
||||||
|
llm_metrics_call_count=self.llm_metrics['call_count'],
|
||||||
|
llm_interaction_type=self.llm_metrics['interaction_type'],
|
||||||
|
message=message
|
||||||
|
)
|
||||||
|
db.session.add(event_log)
|
||||||
|
db.session.commit()
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
self.log(f'Starting Trace for {self.event_type}')
|
||||||
|
return BusinessEventContext(self).__enter__()
|
||||||
|
|
||||||
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||||
|
if self.llm_metrics['call_count'] > 0:
|
||||||
|
self.log_final_metrics()
|
||||||
|
self.reset_llm_metrics()
|
||||||
|
self.log(f'Ending Trace for {self.event_type}')
|
||||||
|
return BusinessEventContext(self).__exit__(exc_type, exc_val, exc_tb)
|
||||||
25
common/utils/business_event_context.py
Normal file
25
common/utils/business_event_context.py
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
from werkzeug.local import LocalProxy, LocalStack
|
||||||
|
|
||||||
|
_business_event_stack = LocalStack()
|
||||||
|
|
||||||
|
|
||||||
|
def _get_current_event():
|
||||||
|
top = _business_event_stack.top
|
||||||
|
if top is None:
|
||||||
|
raise RuntimeError("No business event context found. Are you sure you're in a business event?")
|
||||||
|
return top
|
||||||
|
|
||||||
|
|
||||||
|
current_event = LocalProxy(_get_current_event)
|
||||||
|
|
||||||
|
|
||||||
|
class BusinessEventContext:
|
||||||
|
def __init__(self, event):
|
||||||
|
self.event = event
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
_business_event_stack.push(self.event)
|
||||||
|
return self.event
|
||||||
|
|
||||||
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||||
|
_business_event_stack.pop()
|
||||||
@@ -1,14 +1,16 @@
|
|||||||
from celery import Celery
|
from celery import Celery
|
||||||
from kombu import Queue
|
from kombu import Queue
|
||||||
from werkzeug.local import LocalProxy
|
from werkzeug.local import LocalProxy
|
||||||
|
from redbeat import RedBeatScheduler
|
||||||
|
|
||||||
celery_app = Celery()
|
celery_app = Celery()
|
||||||
|
|
||||||
|
|
||||||
def init_celery(celery, app):
|
def init_celery(celery, app, is_beat=False):
|
||||||
celery_app.main = app.name
|
celery_app.main = app.name
|
||||||
app.logger.debug(f'CELERY_BROKER_URL: {app.config["CELERY_BROKER_URL"]}')
|
app.logger.debug(f'CELERY_BROKER_URL: {app.config["CELERY_BROKER_URL"]}')
|
||||||
app.logger.debug(f'CELERY_RESULT_BACKEND: {app.config["CELERY_RESULT_BACKEND"]}')
|
app.logger.debug(f'CELERY_RESULT_BACKEND: {app.config["CELERY_RESULT_BACKEND"]}')
|
||||||
|
|
||||||
celery_config = {
|
celery_config = {
|
||||||
'broker_url': app.config.get('CELERY_BROKER_URL', 'redis://localhost:6379/0'),
|
'broker_url': app.config.get('CELERY_BROKER_URL', 'redis://localhost:6379/0'),
|
||||||
'result_backend': app.config.get('CELERY_RESULT_BACKEND', 'redis://localhost:6379/0'),
|
'result_backend': app.config.get('CELERY_RESULT_BACKEND', 'redis://localhost:6379/0'),
|
||||||
@@ -17,19 +19,40 @@ def init_celery(celery, app):
|
|||||||
'accept_content': app.config.get('CELERY_ACCEPT_CONTENT', ['json']),
|
'accept_content': app.config.get('CELERY_ACCEPT_CONTENT', ['json']),
|
||||||
'timezone': app.config.get('CELERY_TIMEZONE', 'UTC'),
|
'timezone': app.config.get('CELERY_TIMEZONE', 'UTC'),
|
||||||
'enable_utc': app.config.get('CELERY_ENABLE_UTC', True),
|
'enable_utc': app.config.get('CELERY_ENABLE_UTC', True),
|
||||||
'task_routes': {'eveai_worker.tasks.create_embeddings': {'queue': 'embeddings',
|
|
||||||
'routing_key': 'embeddings.create_embeddings'}},
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if is_beat:
|
||||||
|
# Add configurations specific to Beat scheduler
|
||||||
|
celery_config['beat_scheduler'] = 'redbeat.RedBeatScheduler'
|
||||||
|
celery_config['redbeat_lock_key'] = 'redbeat::lock'
|
||||||
|
celery_config['beat_max_loop_interval'] = 10 # Adjust as needed
|
||||||
|
|
||||||
celery_app.conf.update(**celery_config)
|
celery_app.conf.update(**celery_config)
|
||||||
|
|
||||||
# Setting up Celery task queues
|
# Task queues for workers only
|
||||||
celery_app.conf.task_queues = (
|
if not is_beat:
|
||||||
Queue('default', routing_key='task.#'),
|
celery_app.conf.task_queues = (
|
||||||
Queue('embeddings', routing_key='embeddings.#', queue_arguments={'x-max-priority': 10}),
|
Queue('default', routing_key='task.#'),
|
||||||
Queue('llm_interactions', routing_key='llm_interactions.#', queue_arguments={'x-max-priority': 5}),
|
Queue('embeddings', routing_key='embeddings.#', queue_arguments={'x-max-priority': 10}),
|
||||||
)
|
Queue('llm_interactions', routing_key='llm_interactions.#', queue_arguments={'x-max-priority': 5}),
|
||||||
|
Queue('entitlements', routing_key='entitlements.#', queue_arguments={'x-max-priority': 10}),
|
||||||
|
)
|
||||||
|
celery_app.conf.task_routes = {
|
||||||
|
'eveai_workers.*': { # All tasks from eveai_workers module
|
||||||
|
'queue': 'embeddings',
|
||||||
|
'routing_key': 'embeddings.#',
|
||||||
|
},
|
||||||
|
'eveai_chat_workers.*': { # All tasks from eveai_chat_workers module
|
||||||
|
'queue': 'llm_interactions',
|
||||||
|
'routing_key': 'llm_interactions.#',
|
||||||
|
},
|
||||||
|
'eveai_entitlements.*': { # All tasks from eveai_entitlements module
|
||||||
|
'queue': 'entitlements',
|
||||||
|
'routing_key': 'entitlements.#',
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
# Ensuring tasks execute with Flask application context
|
# Ensure tasks execute with Flask context
|
||||||
class ContextTask(celery.Task):
|
class ContextTask(celery.Task):
|
||||||
def __call__(self, *args, **kwargs):
|
def __call__(self, *args, **kwargs):
|
||||||
with app.app_context():
|
with app.app_context():
|
||||||
@@ -37,6 +60,39 @@ def init_celery(celery, app):
|
|||||||
|
|
||||||
celery.Task = ContextTask
|
celery.Task = ContextTask
|
||||||
|
|
||||||
|
# Original init_celery before updating for beat
|
||||||
|
# def init_celery(celery, app):
|
||||||
|
# celery_app.main = app.name
|
||||||
|
# app.logger.debug(f'CELERY_BROKER_URL: {app.config["CELERY_BROKER_URL"]}')
|
||||||
|
# app.logger.debug(f'CELERY_RESULT_BACKEND: {app.config["CELERY_RESULT_BACKEND"]}')
|
||||||
|
# celery_config = {
|
||||||
|
# 'broker_url': app.config.get('CELERY_BROKER_URL', 'redis://localhost:6379/0'),
|
||||||
|
# 'result_backend': app.config.get('CELERY_RESULT_BACKEND', 'redis://localhost:6379/0'),
|
||||||
|
# 'task_serializer': app.config.get('CELERY_TASK_SERIALIZER', 'json'),
|
||||||
|
# 'result_serializer': app.config.get('CELERY_RESULT_SERIALIZER', 'json'),
|
||||||
|
# 'accept_content': app.config.get('CELERY_ACCEPT_CONTENT', ['json']),
|
||||||
|
# 'timezone': app.config.get('CELERY_TIMEZONE', 'UTC'),
|
||||||
|
# 'enable_utc': app.config.get('CELERY_ENABLE_UTC', True),
|
||||||
|
# 'task_routes': {'eveai_worker.tasks.create_embeddings': {'queue': 'embeddings',
|
||||||
|
# 'routing_key': 'embeddings.create_embeddings'}},
|
||||||
|
# }
|
||||||
|
# celery_app.conf.update(**celery_config)
|
||||||
|
#
|
||||||
|
# # Setting up Celery task queues
|
||||||
|
# celery_app.conf.task_queues = (
|
||||||
|
# Queue('default', routing_key='task.#'),
|
||||||
|
# Queue('embeddings', routing_key='embeddings.#', queue_arguments={'x-max-priority': 10}),
|
||||||
|
# Queue('llm_interactions', routing_key='llm_interactions.#', queue_arguments={'x-max-priority': 5}),
|
||||||
|
# )
|
||||||
|
#
|
||||||
|
# # Ensuring tasks execute with Flask application context
|
||||||
|
# class ContextTask(celery.Task):
|
||||||
|
# def __call__(self, *args, **kwargs):
|
||||||
|
# with app.app_context():
|
||||||
|
# return self.run(*args, **kwargs)
|
||||||
|
#
|
||||||
|
# celery.Task = ContextTask
|
||||||
|
|
||||||
|
|
||||||
def make_celery(app_name, config):
|
def make_celery(app_name, config):
|
||||||
return celery_app
|
return celery_app
|
||||||
|
|||||||
@@ -23,6 +23,14 @@ def cors_after_request(response, prefix):
|
|||||||
current_app.logger.debug(f'request.args: {request.args}')
|
current_app.logger.debug(f'request.args: {request.args}')
|
||||||
current_app.logger.debug(f'request is json?: {request.is_json}')
|
current_app.logger.debug(f'request is json?: {request.is_json}')
|
||||||
|
|
||||||
|
# Exclude health checks from checks
|
||||||
|
if request.path.startswith('/healthz') or request.path.startswith('/_healthz'):
|
||||||
|
current_app.logger.debug('Skipping CORS headers for health checks')
|
||||||
|
response.headers.add('Access-Control-Allow-Origin', '*')
|
||||||
|
response.headers.add('Access-Control-Allow-Headers', '*')
|
||||||
|
response.headers.add('Access-Control-Allow-Methods', '*')
|
||||||
|
return response
|
||||||
|
|
||||||
tenant_id = None
|
tenant_id = None
|
||||||
allowed_origins = []
|
allowed_origins = []
|
||||||
|
|
||||||
|
|||||||
359
common/utils/document_utils.py
Normal file
359
common/utils/document_utils.py
Normal file
@@ -0,0 +1,359 @@
|
|||||||
|
from datetime import datetime as dt, timezone as tz
|
||||||
|
|
||||||
|
from sqlalchemy import desc
|
||||||
|
from sqlalchemy.exc import SQLAlchemyError
|
||||||
|
from werkzeug.utils import secure_filename
|
||||||
|
from common.models.document import Document, DocumentVersion
|
||||||
|
from common.extensions import db, minio_client
|
||||||
|
from common.utils.celery_utils import current_celery
|
||||||
|
from flask import current_app
|
||||||
|
from flask_security import current_user
|
||||||
|
import requests
|
||||||
|
from urllib.parse import urlparse, unquote
|
||||||
|
import os
|
||||||
|
from .eveai_exceptions import EveAIInvalidLanguageException, EveAIDoubleURLException, EveAIUnsupportedFileType
|
||||||
|
from ..models.user import Tenant
|
||||||
|
|
||||||
|
|
||||||
|
def create_document_stack(api_input, file, filename, extension, tenant_id):
|
||||||
|
# Create the Document
|
||||||
|
catalog_id = int(api_input.get('catalog_id'))
|
||||||
|
new_doc = create_document(api_input, filename, catalog_id)
|
||||||
|
db.session.add(new_doc)
|
||||||
|
|
||||||
|
# Create the DocumentVersion
|
||||||
|
new_doc_vers = create_version_for_document(new_doc, tenant_id,
|
||||||
|
api_input.get('url', ''),
|
||||||
|
api_input.get('language', 'en'),
|
||||||
|
api_input.get('user_context', ''),
|
||||||
|
api_input.get('user_metadata'),
|
||||||
|
api_input.get('catalog_properties')
|
||||||
|
)
|
||||||
|
db.session.add(new_doc_vers)
|
||||||
|
|
||||||
|
try:
|
||||||
|
db.session.commit()
|
||||||
|
except SQLAlchemyError as e:
|
||||||
|
current_app.logger.error(f'Error adding document for tenant {tenant_id}: {e}')
|
||||||
|
db.session.rollback()
|
||||||
|
raise
|
||||||
|
|
||||||
|
current_app.logger.info(f'Document added successfully for tenant {tenant_id}, '
|
||||||
|
f'Document Version {new_doc.id}')
|
||||||
|
|
||||||
|
# Upload file to storage
|
||||||
|
upload_file_for_version(new_doc_vers, file, extension, tenant_id)
|
||||||
|
|
||||||
|
return new_doc, new_doc_vers
|
||||||
|
|
||||||
|
|
||||||
|
def create_document(form, filename, catalog_id):
|
||||||
|
new_doc = Document()
|
||||||
|
if form['name'] == '':
|
||||||
|
new_doc.name = filename.rsplit('.', 1)[0]
|
||||||
|
else:
|
||||||
|
new_doc.name = form['name']
|
||||||
|
|
||||||
|
if form['valid_from'] and form['valid_from'] != '':
|
||||||
|
new_doc.valid_from = form['valid_from']
|
||||||
|
else:
|
||||||
|
new_doc.valid_from = dt.now(tz.utc)
|
||||||
|
new_doc.catalog_id = catalog_id
|
||||||
|
set_logging_information(new_doc, dt.now(tz.utc))
|
||||||
|
|
||||||
|
return new_doc
|
||||||
|
|
||||||
|
|
||||||
|
def create_version_for_document(document, tenant_id, url, language, user_context, user_metadata, catalog_properties):
|
||||||
|
new_doc_vers = DocumentVersion()
|
||||||
|
if url != '':
|
||||||
|
new_doc_vers.url = url
|
||||||
|
|
||||||
|
if language == '':
|
||||||
|
raise EveAIInvalidLanguageException('Language is required for document creation!')
|
||||||
|
else:
|
||||||
|
new_doc_vers.language = language
|
||||||
|
|
||||||
|
if user_context != '':
|
||||||
|
new_doc_vers.user_context = user_context
|
||||||
|
|
||||||
|
if user_metadata != '' and user_metadata is not None:
|
||||||
|
new_doc_vers.user_metadata = user_metadata
|
||||||
|
|
||||||
|
if catalog_properties != '' and catalog_properties is not None:
|
||||||
|
new_doc_vers.catalog_properties = catalog_properties
|
||||||
|
|
||||||
|
new_doc_vers.document = document
|
||||||
|
|
||||||
|
set_logging_information(new_doc_vers, dt.now(tz.utc))
|
||||||
|
|
||||||
|
mark_tenant_storage_dirty(tenant_id)
|
||||||
|
|
||||||
|
return new_doc_vers
|
||||||
|
|
||||||
|
|
||||||
|
def upload_file_for_version(doc_vers, file, extension, tenant_id):
|
||||||
|
doc_vers.file_type = extension
|
||||||
|
|
||||||
|
# Normally, the tenant bucket should exist. But let's be on the safe side if a migration took place.
|
||||||
|
minio_client.create_tenant_bucket(tenant_id)
|
||||||
|
|
||||||
|
try:
|
||||||
|
bn, on, size = minio_client.upload_document_file(
|
||||||
|
tenant_id,
|
||||||
|
doc_vers.doc_id,
|
||||||
|
doc_vers.language,
|
||||||
|
doc_vers.id,
|
||||||
|
f"{doc_vers.id}.{extension}",
|
||||||
|
file
|
||||||
|
)
|
||||||
|
doc_vers.bucket_name = bn
|
||||||
|
doc_vers.object_name = on
|
||||||
|
doc_vers.file_size = size / 1048576 # Convert bytes to MB
|
||||||
|
|
||||||
|
db.session.commit()
|
||||||
|
current_app.logger.info(f'Successfully saved document to MinIO for tenant {tenant_id} for '
|
||||||
|
f'document version {doc_vers.id} while uploading file.')
|
||||||
|
except Exception as e:
|
||||||
|
db.session.rollback()
|
||||||
|
current_app.logger.error(
|
||||||
|
f'Error saving document to MinIO for tenant {tenant_id}: {e}')
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
def set_logging_information(obj, timestamp):
|
||||||
|
obj.created_at = timestamp
|
||||||
|
obj.updated_at = timestamp
|
||||||
|
|
||||||
|
user_id = get_current_user_id()
|
||||||
|
if user_id:
|
||||||
|
obj.created_by = user_id
|
||||||
|
obj.updated_by = user_id
|
||||||
|
|
||||||
|
|
||||||
|
def update_logging_information(obj, timestamp):
|
||||||
|
obj.updated_at = timestamp
|
||||||
|
|
||||||
|
user_id = get_current_user_id()
|
||||||
|
if user_id:
|
||||||
|
obj.updated_by = user_id
|
||||||
|
|
||||||
|
|
||||||
|
def get_current_user_id():
|
||||||
|
try:
|
||||||
|
if current_user and current_user.is_authenticated:
|
||||||
|
return current_user.id
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
except Exception:
|
||||||
|
# This will catch any errors if current_user is not available (e.g., in API context)
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def get_extension_from_content_type(content_type):
|
||||||
|
content_type_map = {
|
||||||
|
'text/html': 'html',
|
||||||
|
'application/pdf': 'pdf',
|
||||||
|
'text/plain': 'txt',
|
||||||
|
'application/msword': 'doc',
|
||||||
|
'application/vnd.openxmlformats-officedocument.wordprocessingml.document': 'docx',
|
||||||
|
# Add more mappings as needed
|
||||||
|
}
|
||||||
|
return content_type_map.get(content_type, 'html') # Default to 'html' if unknown
|
||||||
|
|
||||||
|
|
||||||
|
def process_url(url, tenant_id):
|
||||||
|
response = requests.head(url, allow_redirects=True)
|
||||||
|
content_type = response.headers.get('Content-Type', '').split(';')[0]
|
||||||
|
|
||||||
|
# Determine file extension based on Content-Type
|
||||||
|
extension = get_extension_from_content_type(content_type)
|
||||||
|
|
||||||
|
# Generate filename
|
||||||
|
parsed_url = urlparse(url)
|
||||||
|
path = unquote(parsed_url.path)
|
||||||
|
filename = os.path.basename(path)
|
||||||
|
|
||||||
|
if not filename or '.' not in filename:
|
||||||
|
# Use the last part of the path or a default name
|
||||||
|
filename = path.strip('/').split('/')[-1] or 'document'
|
||||||
|
filename = secure_filename(f"{filename}.{extension}")
|
||||||
|
else:
|
||||||
|
filename = secure_filename(filename)
|
||||||
|
|
||||||
|
# Check if a document with this URL already exists
|
||||||
|
existing_doc = DocumentVersion.query.filter_by(url=url).first()
|
||||||
|
if existing_doc:
|
||||||
|
raise EveAIDoubleURLException
|
||||||
|
|
||||||
|
# Download the content
|
||||||
|
response = requests.get(url)
|
||||||
|
response.raise_for_status()
|
||||||
|
file_content = response.content
|
||||||
|
|
||||||
|
return file_content, filename, extension
|
||||||
|
|
||||||
|
|
||||||
|
def process_multiple_urls(urls, tenant_id, api_input):
|
||||||
|
results = []
|
||||||
|
for url in urls:
|
||||||
|
try:
|
||||||
|
file_content, filename, extension = process_url(url, tenant_id)
|
||||||
|
|
||||||
|
url_input = api_input.copy()
|
||||||
|
url_input.update({
|
||||||
|
'url': url,
|
||||||
|
'name': f"{api_input['name']}-{filename}" if api_input['name'] else filename
|
||||||
|
})
|
||||||
|
|
||||||
|
new_doc, new_doc_vers = create_document_stack(url_input, file_content, filename, extension, tenant_id)
|
||||||
|
task_id = start_embedding_task(tenant_id, new_doc_vers.id)
|
||||||
|
|
||||||
|
results.append({
|
||||||
|
'url': url,
|
||||||
|
'document_id': new_doc.id,
|
||||||
|
'document_version_id': new_doc_vers.id,
|
||||||
|
'task_id': task_id,
|
||||||
|
'status': 'success'
|
||||||
|
})
|
||||||
|
except Exception as e:
|
||||||
|
current_app.logger.error(f"Error processing URL {url}: {str(e)}")
|
||||||
|
results.append({
|
||||||
|
'url': url,
|
||||||
|
'status': 'error',
|
||||||
|
'message': str(e)
|
||||||
|
})
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
def start_embedding_task(tenant_id, doc_vers_id):
|
||||||
|
task = current_celery.send_task('create_embeddings',
|
||||||
|
args=[tenant_id, doc_vers_id,],
|
||||||
|
queue='embeddings')
|
||||||
|
current_app.logger.info(f'Embedding creation started for tenant {tenant_id}, '
|
||||||
|
f'Document Version {doc_vers_id}. '
|
||||||
|
f'Embedding creation task: {task.id}')
|
||||||
|
return task.id
|
||||||
|
|
||||||
|
|
||||||
|
def validate_file_type(extension):
|
||||||
|
current_app.logger.debug(f'Validating file type {extension}')
|
||||||
|
current_app.logger.debug(f'Supported file types: {current_app.config["SUPPORTED_FILE_TYPES"]}')
|
||||||
|
if extension not in current_app.config['SUPPORTED_FILE_TYPES']:
|
||||||
|
raise EveAIUnsupportedFileType(f"Filetype {extension} is currently not supported. "
|
||||||
|
f"Supported filetypes: {', '.join(current_app.config['SUPPORTED_FILE_TYPES'])}")
|
||||||
|
|
||||||
|
|
||||||
|
def get_filename_from_url(url):
|
||||||
|
parsed_url = urlparse(url)
|
||||||
|
path_parts = parsed_url.path.split('/')
|
||||||
|
filename = path_parts[-1]
|
||||||
|
if filename == '':
|
||||||
|
filename = 'index'
|
||||||
|
if not filename.endswith('.html'):
|
||||||
|
filename += '.html'
|
||||||
|
return filename
|
||||||
|
|
||||||
|
|
||||||
|
def get_documents_list(page, per_page):
|
||||||
|
query = Document.query.order_by(desc(Document.created_at))
|
||||||
|
pagination = query.paginate(page=page, per_page=per_page, error_out=False)
|
||||||
|
return pagination
|
||||||
|
|
||||||
|
|
||||||
|
def edit_document(document_id, name, valid_from, valid_to):
|
||||||
|
doc = Document.query.get_or_404(document_id)
|
||||||
|
doc.name = name
|
||||||
|
doc.valid_from = valid_from
|
||||||
|
doc.valid_to = valid_to
|
||||||
|
update_logging_information(doc, dt.now(tz.utc))
|
||||||
|
|
||||||
|
try:
|
||||||
|
db.session.add(doc)
|
||||||
|
db.session.commit()
|
||||||
|
return doc, None
|
||||||
|
except SQLAlchemyError as e:
|
||||||
|
db.session.rollback()
|
||||||
|
return None, str(e)
|
||||||
|
|
||||||
|
|
||||||
|
def edit_document_version(version_id, user_context, catalog_properties):
|
||||||
|
doc_vers = DocumentVersion.query.get_or_404(version_id)
|
||||||
|
doc_vers.user_context = user_context
|
||||||
|
doc_vers.catalog_properties = catalog_properties
|
||||||
|
update_logging_information(doc_vers, dt.now(tz.utc))
|
||||||
|
|
||||||
|
try:
|
||||||
|
db.session.add(doc_vers)
|
||||||
|
db.session.commit()
|
||||||
|
return doc_vers, None
|
||||||
|
except SQLAlchemyError as e:
|
||||||
|
db.session.rollback()
|
||||||
|
return None, str(e)
|
||||||
|
|
||||||
|
|
||||||
|
def refresh_document_with_info(doc_id, tenant_id, api_input):
|
||||||
|
doc = Document.query.get_or_404(doc_id)
|
||||||
|
old_doc_vers = DocumentVersion.query.filter_by(doc_id=doc_id).order_by(desc(DocumentVersion.id)).first()
|
||||||
|
|
||||||
|
if not old_doc_vers.url:
|
||||||
|
return None, "This document has no URL. Only documents with a URL can be refreshed."
|
||||||
|
|
||||||
|
new_doc_vers = create_version_for_document(
|
||||||
|
doc, tenant_id,
|
||||||
|
old_doc_vers.url,
|
||||||
|
api_input.get('language', old_doc_vers.language),
|
||||||
|
api_input.get('user_context', old_doc_vers.user_context),
|
||||||
|
api_input.get('user_metadata', old_doc_vers.user_metadata),
|
||||||
|
api_input.get('catalog_properties', old_doc_vers.catalog_properties),
|
||||||
|
)
|
||||||
|
|
||||||
|
set_logging_information(new_doc_vers, dt.now(tz.utc))
|
||||||
|
|
||||||
|
try:
|
||||||
|
db.session.add(new_doc_vers)
|
||||||
|
db.session.commit()
|
||||||
|
except SQLAlchemyError as e:
|
||||||
|
db.session.rollback()
|
||||||
|
return None, str(e)
|
||||||
|
|
||||||
|
response = requests.head(old_doc_vers.url, allow_redirects=True)
|
||||||
|
content_type = response.headers.get('Content-Type', '').split(';')[0]
|
||||||
|
extension = get_extension_from_content_type(content_type)
|
||||||
|
|
||||||
|
response = requests.get(old_doc_vers.url)
|
||||||
|
response.raise_for_status()
|
||||||
|
file_content = response.content
|
||||||
|
|
||||||
|
upload_file_for_version(new_doc_vers, file_content, extension, tenant_id)
|
||||||
|
|
||||||
|
task = current_celery.send_task('create_embeddings', args=[tenant_id, new_doc_vers.id,], queue='embeddings')
|
||||||
|
current_app.logger.info(f'Embedding creation started for document {doc_id} on version {new_doc_vers.id} '
|
||||||
|
f'with task id: {task.id}.')
|
||||||
|
|
||||||
|
return new_doc_vers, task.id
|
||||||
|
|
||||||
|
|
||||||
|
# Update the existing refresh_document function to use the new refresh_document_with_info
|
||||||
|
def refresh_document(doc_id, tenant_id):
|
||||||
|
current_app.logger.info(f'Refreshing document {doc_id}')
|
||||||
|
doc = Document.query.get_or_404(doc_id)
|
||||||
|
old_doc_vers = DocumentVersion.query.filter_by(doc_id=doc_id).order_by(desc(DocumentVersion.id)).first()
|
||||||
|
|
||||||
|
api_input = {
|
||||||
|
'language': old_doc_vers.language,
|
||||||
|
'user_context': old_doc_vers.user_context,
|
||||||
|
'user_metadata': old_doc_vers.user_metadata,
|
||||||
|
'catalog_properties': old_doc_vers.catalog_properties,
|
||||||
|
}
|
||||||
|
|
||||||
|
return refresh_document_with_info(doc_id, tenant_id, api_input)
|
||||||
|
|
||||||
|
|
||||||
|
# Function triggered when a document_version is created or updated
|
||||||
|
def mark_tenant_storage_dirty(tenant_id):
|
||||||
|
tenant = db.session.query(Tenant).filter_by(id=int(tenant_id)).first()
|
||||||
|
tenant.storage_dirty = True
|
||||||
|
db.session.commit()
|
||||||
|
|
||||||
|
|
||||||
43
common/utils/eveai_exceptions.py
Normal file
43
common/utils/eveai_exceptions.py
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
class EveAIException(Exception):
|
||||||
|
"""Base exception class for EveAI API"""
|
||||||
|
|
||||||
|
def __init__(self, message, status_code=400, payload=None):
|
||||||
|
super().__init__()
|
||||||
|
self.message = message
|
||||||
|
self.status_code = status_code
|
||||||
|
self.payload = payload
|
||||||
|
|
||||||
|
def to_dict(self):
|
||||||
|
rv = dict(self.payload or ())
|
||||||
|
rv['message'] = self.message
|
||||||
|
return rv
|
||||||
|
|
||||||
|
|
||||||
|
class EveAIInvalidLanguageException(EveAIException):
|
||||||
|
"""Raised when an invalid language is provided"""
|
||||||
|
|
||||||
|
def __init__(self, message="Langage is required", status_code=400, payload=None):
|
||||||
|
super().__init__(message, status_code, payload)
|
||||||
|
|
||||||
|
|
||||||
|
class EveAIDoubleURLException(EveAIException):
|
||||||
|
"""Raised when an existing url is provided"""
|
||||||
|
|
||||||
|
def __init__(self, message="URL already exists", status_code=400, payload=None):
|
||||||
|
super().__init__(message, status_code, payload)
|
||||||
|
|
||||||
|
|
||||||
|
class EveAIUnsupportedFileType(EveAIException):
|
||||||
|
"""Raised when an invalid file type is provided"""
|
||||||
|
|
||||||
|
def __init__(self, message="Filetype is not supported", status_code=400, payload=None):
|
||||||
|
super().__init__(message, status_code, payload)
|
||||||
|
|
||||||
|
|
||||||
|
class EveAINoLicenseForTenant(EveAIException):
|
||||||
|
"""Raised when no active license for a tenant is provided"""
|
||||||
|
|
||||||
|
def __init__(self, message="No license for tenant found", status_code=400, payload=None):
|
||||||
|
super().__init__(message, status_code, payload)
|
||||||
|
|
||||||
|
|
||||||
@@ -50,13 +50,11 @@ class MinioClient:
|
|||||||
self.client.put_object(
|
self.client.put_object(
|
||||||
bucket_name, object_name, io.BytesIO(file_data), len(file_data)
|
bucket_name, object_name, io.BytesIO(file_data), len(file_data)
|
||||||
)
|
)
|
||||||
return True
|
return bucket_name, object_name, len(file_data)
|
||||||
except S3Error as err:
|
except S3Error as err:
|
||||||
raise Exception(f"Error occurred while uploading file: {err}")
|
raise Exception(f"Error occurred while uploading file: {err}")
|
||||||
|
|
||||||
def download_document_file(self, tenant_id, document_id, language, version_id, filename):
|
def download_document_file(self, tenant_id, bucket_name, object_name):
|
||||||
bucket_name = self.generate_bucket_name(tenant_id)
|
|
||||||
object_name = self.generate_object_name(document_id, language, version_id, filename)
|
|
||||||
try:
|
try:
|
||||||
response = self.client.get_object(bucket_name, object_name)
|
response = self.client.get_object(bucket_name, object_name)
|
||||||
return response.read()
|
return response.read()
|
||||||
|
|||||||
@@ -5,14 +5,19 @@ from flask import current_app
|
|||||||
from langchain_openai import OpenAIEmbeddings, ChatOpenAI
|
from langchain_openai import OpenAIEmbeddings, ChatOpenAI
|
||||||
from langchain_anthropic import ChatAnthropic
|
from langchain_anthropic import ChatAnthropic
|
||||||
from langchain_core.pydantic_v1 import BaseModel, Field
|
from langchain_core.pydantic_v1 import BaseModel, Field
|
||||||
from langchain.prompts import ChatPromptTemplate
|
from typing import List, Any, Iterator
|
||||||
import ast
|
from collections.abc import MutableMapping
|
||||||
from typing import List
|
|
||||||
from openai import OpenAI
|
from openai import OpenAI
|
||||||
# from groq import Groq
|
|
||||||
from portkey_ai import createHeaders, PORTKEY_GATEWAY_URL
|
from portkey_ai import createHeaders, PORTKEY_GATEWAY_URL
|
||||||
|
from portkey_ai.langchain.portkey_langchain_callback_handler import LangchainCallbackHandler
|
||||||
|
|
||||||
from common.models.document import EmbeddingSmallOpenAI, EmbeddingLargeOpenAI
|
from common.langchain.llm_metrics_handler import LLMMetricsHandler
|
||||||
|
from common.langchain.tracked_openai_embeddings import TrackedOpenAIEmbeddings
|
||||||
|
from common.langchain.tracked_transcribe import tracked_transcribe
|
||||||
|
from common.models.document import EmbeddingSmallOpenAI, EmbeddingLargeOpenAI, Catalog
|
||||||
|
from common.models.user import Tenant
|
||||||
|
from config.model_config import MODEL_CONFIG
|
||||||
|
from common.utils.business_event_context import current_event
|
||||||
|
|
||||||
|
|
||||||
class CitedAnswer(BaseModel):
|
class CitedAnswer(BaseModel):
|
||||||
@@ -36,166 +41,205 @@ def set_language_prompt_template(cls, language_prompt):
|
|||||||
cls.__doc__ = language_prompt
|
cls.__doc__ = language_prompt
|
||||||
|
|
||||||
|
|
||||||
def select_model_variables(tenant):
|
class ModelVariables(MutableMapping):
|
||||||
embedding_provider = tenant.embedding_model.rsplit('.', 1)[0]
|
def __init__(self, tenant: Tenant, catalog_id=None):
|
||||||
embedding_model = tenant.embedding_model.rsplit('.', 1)[1]
|
self.tenant = tenant
|
||||||
|
self.catalog_id = catalog_id
|
||||||
|
self._variables = self._initialize_variables()
|
||||||
|
self._embedding_model = None
|
||||||
|
self._llm = None
|
||||||
|
self._llm_no_rag = None
|
||||||
|
self._transcription_client = None
|
||||||
|
self._prompt_templates = {}
|
||||||
|
self._embedding_db_model = None
|
||||||
|
self.llm_metrics_handler = LLMMetricsHandler()
|
||||||
|
self._transcription_client = None
|
||||||
|
|
||||||
llm_provider = tenant.llm_model.rsplit('.', 1)[0]
|
def _initialize_variables(self):
|
||||||
llm_model = tenant.llm_model.rsplit('.', 1)[1]
|
variables = {}
|
||||||
|
|
||||||
# Set model variables
|
# Get the Catalog if catalog_id is passed
|
||||||
model_variables = {}
|
if self.catalog_id:
|
||||||
if tenant.es_k:
|
catalog = Catalog.query.get_or_404(self.catalog_id)
|
||||||
model_variables['k'] = tenant.es_k
|
|
||||||
else:
|
|
||||||
model_variables['k'] = 5
|
|
||||||
|
|
||||||
if tenant.es_similarity_threshold:
|
# We initialize the variables that are available knowing the tenant.
|
||||||
model_variables['similarity_threshold'] = tenant.es_similarity_threshold
|
variables['embed_tuning'] = catalog.embed_tuning or False
|
||||||
else:
|
|
||||||
model_variables['similarity_threshold'] = 0.7
|
|
||||||
|
|
||||||
if tenant.chat_RAG_temperature:
|
# Set HTML Chunking Variables
|
||||||
model_variables['RAG_temperature'] = tenant.chat_RAG_temperature
|
variables['html_tags'] = catalog.html_tags
|
||||||
else:
|
variables['html_end_tags'] = catalog.html_end_tags
|
||||||
model_variables['RAG_temperature'] = 0.3
|
variables['html_included_elements'] = catalog.html_included_elements
|
||||||
|
variables['html_excluded_elements'] = catalog.html_excluded_elements
|
||||||
|
variables['html_excluded_classes'] = catalog.html_excluded_classes
|
||||||
|
|
||||||
if tenant.chat_no_RAG_temperature:
|
# Set Chunk Size variables
|
||||||
model_variables['no_RAG_temperature'] = tenant.chat_no_RAG_temperature
|
variables['min_chunk_size'] = catalog.min_chunk_size
|
||||||
else:
|
variables['max_chunk_size'] = catalog.max_chunk_size
|
||||||
model_variables['no_RAG_temperature'] = 0.5
|
|
||||||
|
|
||||||
# Set Tuning variables
|
# Set the RAG Context (will have to change once specialists are defined
|
||||||
if tenant.embed_tuning:
|
variables['rag_context'] = self.tenant.rag_context or " "
|
||||||
model_variables['embed_tuning'] = tenant.embed_tuning
|
# Temporary setting until we have Specialists
|
||||||
else:
|
variables['rag_tuning'] = False
|
||||||
model_variables['embed_tuning'] = False
|
variables['RAG_temperature'] = 0.3
|
||||||
|
variables['no_RAG_temperature'] = 0.5
|
||||||
|
variables['k'] = 8
|
||||||
|
variables['similarity_threshold'] = 0.4
|
||||||
|
|
||||||
if tenant.rag_tuning:
|
# Set model providers
|
||||||
model_variables['rag_tuning'] = tenant.rag_tuning
|
variables['embedding_provider'], variables['embedding_model'] = self.tenant.embedding_model.rsplit('.', 1)
|
||||||
else:
|
variables['llm_provider'], variables['llm_model'] = self.tenant.llm_model.rsplit('.', 1)
|
||||||
model_variables['rag_tuning'] = False
|
variables["templates"] = current_app.config['PROMPT_TEMPLATES'][(f"{variables['llm_provider']}."
|
||||||
|
f"{variables['llm_model']}")]
|
||||||
|
current_app.logger.info(f"Loaded prompt templates: \n")
|
||||||
|
current_app.logger.info(f"{variables['templates']}")
|
||||||
|
|
||||||
if tenant.rag_context:
|
# Set model-specific configurations
|
||||||
model_variables['rag_context'] = tenant.rag_context
|
model_config = MODEL_CONFIG.get(variables['llm_provider'], {}).get(variables['llm_model'], {})
|
||||||
else:
|
variables.update(model_config)
|
||||||
model_variables['rag_context'] = " "
|
|
||||||
|
|
||||||
# Set HTML Chunking Variables
|
variables['annotation_chunk_length'] = current_app.config['ANNOTATION_TEXT_CHUNK_LENGTH'][self.tenant.llm_model]
|
||||||
model_variables['html_tags'] = tenant.html_tags
|
|
||||||
model_variables['html_end_tags'] = tenant.html_end_tags
|
|
||||||
model_variables['html_included_elements'] = tenant.html_included_elements
|
|
||||||
model_variables['html_excluded_elements'] = tenant.html_excluded_elements
|
|
||||||
|
|
||||||
# Set Chunk Size variables
|
if variables['tool_calling_supported']:
|
||||||
model_variables['min_chunk_size'] = tenant.min_chunk_size
|
variables['cited_answer_cls'] = CitedAnswer
|
||||||
model_variables['max_chunk_size'] = tenant.max_chunk_size
|
|
||||||
|
|
||||||
environment = os.getenv('FLASK_ENV', 'development')
|
variables['max_compression_duration'] = current_app.config['MAX_COMPRESSION_DURATION']
|
||||||
portkey_metadata = {'tenant_id': str(tenant.id), 'environment': environment}
|
variables['max_transcription_duration'] = current_app.config['MAX_TRANSCRIPTION_DURATION']
|
||||||
|
variables['compression_cpu_limit'] = current_app.config['COMPRESSION_CPU_LIMIT']
|
||||||
|
variables['compression_process_delay'] = current_app.config['COMPRESSION_PROCESS_DELAY']
|
||||||
|
|
||||||
# Set Embedding variables
|
return variables
|
||||||
match embedding_provider:
|
|
||||||
case 'openai':
|
|
||||||
portkey_headers = createHeaders(api_key=current_app.config.get('PORTKEY_API_KEY'),
|
|
||||||
provider='openai',
|
|
||||||
metadata=portkey_metadata)
|
|
||||||
match embedding_model:
|
|
||||||
case 'text-embedding-3-small':
|
|
||||||
api_key = current_app.config.get('OPENAI_API_KEY')
|
|
||||||
model_variables['embedding_model'] = OpenAIEmbeddings(api_key=api_key,
|
|
||||||
model='text-embedding-3-small',
|
|
||||||
base_url=PORTKEY_GATEWAY_URL,
|
|
||||||
default_headers=portkey_headers
|
|
||||||
)
|
|
||||||
model_variables['embedding_db_model'] = EmbeddingSmallOpenAI
|
|
||||||
case 'text-embedding-3-large':
|
|
||||||
api_key = current_app.config.get('OPENAI_API_KEY')
|
|
||||||
model_variables['embedding_model'] = OpenAIEmbeddings(api_key=api_key,
|
|
||||||
model='text-embedding-3-large',
|
|
||||||
base_url=PORTKEY_GATEWAY_URL,
|
|
||||||
default_headers=portkey_headers
|
|
||||||
)
|
|
||||||
model_variables['embedding_db_model'] = EmbeddingLargeOpenAI
|
|
||||||
case _:
|
|
||||||
raise Exception(f'Error setting model variables for tenant {tenant.id} '
|
|
||||||
f'error: Invalid embedding model')
|
|
||||||
case _:
|
|
||||||
raise Exception(f'Error setting model variables for tenant {tenant.id} '
|
|
||||||
f'error: Invalid embedding provider')
|
|
||||||
|
|
||||||
# Set Chat model variables
|
@property
|
||||||
match llm_provider:
|
def embedding_model(self):
|
||||||
case 'openai':
|
api_key = os.getenv('OPENAI_API_KEY')
|
||||||
portkey_headers = createHeaders(api_key=current_app.config.get('PORTKEY_API_KEY'),
|
model = self._variables['embedding_model']
|
||||||
metadata=portkey_metadata,
|
self._embedding_model = TrackedOpenAIEmbeddings(api_key=api_key,
|
||||||
provider='openai')
|
model=model,
|
||||||
tool_calling_supported = False
|
)
|
||||||
api_key = current_app.config.get('OPENAI_API_KEY')
|
self._embedding_db_model = EmbeddingSmallOpenAI \
|
||||||
model_variables['llm'] = ChatOpenAI(api_key=api_key,
|
if model == 'text-embedding-3-small' \
|
||||||
model=llm_model,
|
else EmbeddingLargeOpenAI
|
||||||
temperature=model_variables['RAG_temperature'],
|
|
||||||
base_url=PORTKEY_GATEWAY_URL,
|
|
||||||
default_headers=portkey_headers)
|
|
||||||
model_variables['llm_no_rag'] = ChatOpenAI(api_key=api_key,
|
|
||||||
model=llm_model,
|
|
||||||
temperature=model_variables['no_RAG_temperature'],
|
|
||||||
base_url=PORTKEY_GATEWAY_URL,
|
|
||||||
default_headers=portkey_headers)
|
|
||||||
tool_calling_supported = False
|
|
||||||
match llm_model:
|
|
||||||
case 'gpt-4-turbo' | 'gpt-4o' | 'gpt-4o-mini':
|
|
||||||
tool_calling_supported = True
|
|
||||||
case _:
|
|
||||||
raise Exception(f'Error setting model variables for tenant {tenant.id} '
|
|
||||||
f'error: Invalid chat model')
|
|
||||||
case 'anthropic':
|
|
||||||
api_key = current_app.config.get('ANTHROPIC_API_KEY')
|
|
||||||
# Anthropic does not have the same 'generic' model names as OpenAI
|
|
||||||
llm_model_ext = current_app.config.get('ANTHROPIC_LLM_VERSIONS').get(llm_model)
|
|
||||||
model_variables['llm'] = ChatAnthropic(api_key=api_key,
|
|
||||||
model=llm_model_ext,
|
|
||||||
temperature=model_variables['RAG_temperature'])
|
|
||||||
model_variables['llm_no_rag'] = ChatAnthropic(api_key=api_key,
|
|
||||||
model=llm_model_ext,
|
|
||||||
temperature=model_variables['RAG_temperature'])
|
|
||||||
tool_calling_supported = True
|
|
||||||
case _:
|
|
||||||
raise Exception(f'Error setting model variables for tenant {tenant.id} '
|
|
||||||
f'error: Invalid chat provider')
|
|
||||||
|
|
||||||
if tool_calling_supported:
|
return self._embedding_model
|
||||||
model_variables['cited_answer_cls'] = CitedAnswer
|
|
||||||
|
|
||||||
templates = current_app.config['PROMPT_TEMPLATES'][f'{llm_provider}.{llm_model}']
|
@property
|
||||||
model_variables['summary_template'] = templates['summary']
|
def llm(self):
|
||||||
model_variables['rag_template'] = templates['rag']
|
api_key = self.get_api_key_for_llm()
|
||||||
model_variables['history_template'] = templates['history']
|
self._llm = ChatOpenAI(api_key=api_key,
|
||||||
model_variables['encyclopedia_template'] = templates['encyclopedia']
|
model=self._variables['llm_model'],
|
||||||
model_variables['transcript_template'] = templates['transcript']
|
temperature=self._variables['RAG_temperature'],
|
||||||
model_variables['html_parse_template'] = templates['html_parse']
|
callbacks=[self.llm_metrics_handler])
|
||||||
model_variables['pdf_parse_template'] = templates['pdf_parse']
|
return self._llm
|
||||||
|
|
||||||
model_variables['annotation_chunk_length'] = current_app.config['ANNOTATION_TEXT_CHUNK_LENGTH'][tenant.llm_model]
|
@property
|
||||||
|
def llm_no_rag(self):
|
||||||
|
api_key = self.get_api_key_for_llm()
|
||||||
|
self._llm_no_rag = ChatOpenAI(api_key=api_key,
|
||||||
|
model=self._variables['llm_model'],
|
||||||
|
temperature=self._variables['RAG_temperature'],
|
||||||
|
callbacks=[self.llm_metrics_handler])
|
||||||
|
return self._llm_no_rag
|
||||||
|
|
||||||
# Transcription Client Variables.
|
def get_api_key_for_llm(self):
|
||||||
# Using Groq
|
if self._variables['llm_provider'] == 'openai':
|
||||||
# api_key = current_app.config.get('GROQ_API_KEY')
|
api_key = os.getenv('OPENAI_API_KEY')
|
||||||
# model_variables['transcription_client'] = Groq(api_key=api_key)
|
else: # self._variables['llm_provider'] == 'anthropic'
|
||||||
# model_variables['transcription_model'] = 'whisper-large-v3'
|
api_key = os.getenv('ANTHROPIC_API_KEY')
|
||||||
|
|
||||||
# Using OpenAI for transcriptions
|
return api_key
|
||||||
portkey_metadata = {'tenant_id': str(tenant.id)}
|
|
||||||
portkey_headers = createHeaders(api_key=current_app.config.get('PORTKEY_API_KEY'),
|
|
||||||
metadata=portkey_metadata,
|
|
||||||
provider='openai'
|
|
||||||
)
|
|
||||||
api_key = current_app.config.get('OPENAI_API_KEY')
|
|
||||||
model_variables['transcription_client'] = OpenAI(api_key=api_key,
|
|
||||||
base_url=PORTKEY_GATEWAY_URL,
|
|
||||||
default_headers=portkey_headers)
|
|
||||||
model_variables['transcription_model'] = 'whisper-1'
|
|
||||||
|
|
||||||
|
@property
|
||||||
|
def transcription_client(self):
|
||||||
|
api_key = os.getenv('OPENAI_API_KEY')
|
||||||
|
self._transcription_client = OpenAI(api_key=api_key, )
|
||||||
|
self._variables['transcription_model'] = 'whisper-1'
|
||||||
|
return self._transcription_client
|
||||||
|
|
||||||
|
def transcribe(self, *args, **kwargs):
|
||||||
|
return tracked_transcribe(self._transcription_client, *args, **kwargs)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def embedding_db_model(self):
|
||||||
|
if self._embedding_db_model is None:
|
||||||
|
self._embedding_db_model = self.get_embedding_db_model()
|
||||||
|
return self._embedding_db_model
|
||||||
|
|
||||||
|
def get_embedding_db_model(self):
|
||||||
|
current_app.logger.debug("In get_embedding_db_model")
|
||||||
|
if self._embedding_db_model is None:
|
||||||
|
self._embedding_db_model = EmbeddingSmallOpenAI \
|
||||||
|
if self._variables['embedding_model'] == 'text-embedding-3-small' \
|
||||||
|
else EmbeddingLargeOpenAI
|
||||||
|
current_app.logger.debug(f"Embedding DB Model: {self._embedding_db_model}")
|
||||||
|
return self._embedding_db_model
|
||||||
|
|
||||||
|
def get_prompt_template(self, template_name: str) -> str:
|
||||||
|
current_app.logger.info(f"Getting prompt template for {template_name}")
|
||||||
|
if template_name not in self._prompt_templates:
|
||||||
|
self._prompt_templates[template_name] = self._load_prompt_template(template_name)
|
||||||
|
return self._prompt_templates[template_name]
|
||||||
|
|
||||||
|
def _load_prompt_template(self, template_name: str) -> str:
|
||||||
|
# In the future, this method will make an API call to Portkey
|
||||||
|
# For now, we'll simulate it with a placeholder implementation
|
||||||
|
# You can replace this with your current prompt loading logic
|
||||||
|
return self._variables['templates'][template_name]
|
||||||
|
|
||||||
|
def __getitem__(self, key: str) -> Any:
|
||||||
|
current_app.logger.debug(f"ModelVariables: Getting {key}")
|
||||||
|
# Support older template names (suffix = _template)
|
||||||
|
if key.endswith('_template'):
|
||||||
|
key = key[:-len('_template')]
|
||||||
|
current_app.logger.debug(f"ModelVariables: Getting modified {key}")
|
||||||
|
if key == 'embedding_model':
|
||||||
|
return self.embedding_model
|
||||||
|
elif key == 'embedding_db_model':
|
||||||
|
return self.embedding_db_model
|
||||||
|
elif key == 'llm':
|
||||||
|
return self.llm
|
||||||
|
elif key == 'llm_no_rag':
|
||||||
|
return self.llm_no_rag
|
||||||
|
elif key == 'transcription_client':
|
||||||
|
return self.transcription_client
|
||||||
|
elif key in self._variables.get('prompt_templates', []):
|
||||||
|
return self.get_prompt_template(key)
|
||||||
|
else:
|
||||||
|
value = self._variables.get(key)
|
||||||
|
if value is not None:
|
||||||
|
return value
|
||||||
|
else:
|
||||||
|
raise KeyError(f'Variable {key} does not exist in ModelVariables')
|
||||||
|
|
||||||
|
def __setitem__(self, key: str, value: Any) -> None:
|
||||||
|
self._variables[key] = value
|
||||||
|
|
||||||
|
def __delitem__(self, key: str) -> None:
|
||||||
|
del self._variables[key]
|
||||||
|
|
||||||
|
def __iter__(self) -> Iterator[str]:
|
||||||
|
return iter(self._variables)
|
||||||
|
|
||||||
|
def __len__(self):
|
||||||
|
return len(self._variables)
|
||||||
|
|
||||||
|
def get(self, key: str, default: Any = None) -> Any:
|
||||||
|
return self.__getitem__(key) or default
|
||||||
|
|
||||||
|
def update(self, **kwargs) -> None:
|
||||||
|
self._variables.update(kwargs)
|
||||||
|
|
||||||
|
def items(self):
|
||||||
|
return self._variables.items()
|
||||||
|
|
||||||
|
def keys(self):
|
||||||
|
return self._variables.keys()
|
||||||
|
|
||||||
|
def values(self):
|
||||||
|
return self._variables.values()
|
||||||
|
|
||||||
|
|
||||||
|
def select_model_variables(tenant, catalog_id=None):
|
||||||
|
model_variables = ModelVariables(tenant=tenant, catalog_id=catalog_id)
|
||||||
return model_variables
|
return model_variables
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -6,7 +6,6 @@ def prefixed_url_for(endpoint, **values):
|
|||||||
prefix = request.headers.get('X-Forwarded-Prefix', '')
|
prefix = request.headers.get('X-Forwarded-Prefix', '')
|
||||||
scheme = request.headers.get('X-Forwarded-Proto', request.scheme)
|
scheme = request.headers.get('X-Forwarded-Proto', request.scheme)
|
||||||
host = request.headers.get('Host', request.host)
|
host = request.headers.get('Host', request.host)
|
||||||
current_app.logger.debug(f'prefix: {prefix}, scheme: {scheme}, host: {host}')
|
|
||||||
|
|
||||||
external = values.pop('_external', False)
|
external = values.pop('_external', False)
|
||||||
generated_url = url_for(endpoint, **values)
|
generated_url = url_for(endpoint, **values)
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
from flask import flash
|
from flask import flash, current_app
|
||||||
|
|
||||||
|
|
||||||
def prepare_table(model_objects, column_names):
|
def prepare_table(model_objects, column_names):
|
||||||
@@ -44,6 +44,7 @@ def form_validation_failed(request, form):
|
|||||||
for fieldName, errorMessages in form.errors.items():
|
for fieldName, errorMessages in form.errors.items():
|
||||||
for err in errorMessages:
|
for err in errorMessages:
|
||||||
flash(f"Error in {fieldName}: {err}", 'danger')
|
flash(f"Error in {fieldName}: {err}", 'danger')
|
||||||
|
current_app.logger.debug(f"Error in {fieldName}: {err}")
|
||||||
|
|
||||||
|
|
||||||
def form_to_dict(form):
|
def form_to_dict(form):
|
||||||
|
|||||||
53
config/catalog_types.py
Normal file
53
config/catalog_types.py
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
# Catalog Types
|
||||||
|
CATALOG_TYPES = {
|
||||||
|
"DEFAULT": {
|
||||||
|
"name": "Default Catalog",
|
||||||
|
"Description": "A Catalog with information in Evie's Library, to be considered as a whole",
|
||||||
|
"configuration": {}
|
||||||
|
},
|
||||||
|
"DOSSIER": {
|
||||||
|
"name": "Dossier Catalog",
|
||||||
|
"Description": "A Catalog with information in Evie's Library in which several Dossiers can be stored",
|
||||||
|
"configuration": {
|
||||||
|
"tagging_fields": {
|
||||||
|
"name": "Tagging Fields",
|
||||||
|
"type": "tagging_fields",
|
||||||
|
"description": """Define the metadata fields that will be used for tagging documents.
|
||||||
|
Each field must have:
|
||||||
|
- type: one of 'string', 'integer', 'float', 'date', 'enum'
|
||||||
|
- required: boolean indicating if the field is mandatory
|
||||||
|
- description: field description
|
||||||
|
- allowed_values: list of values (for enum type only)
|
||||||
|
- min_value/max_value: range limits (for numeric types only)""",
|
||||||
|
"required": True,
|
||||||
|
"default": {},
|
||||||
|
"field_properties": {
|
||||||
|
"type": {
|
||||||
|
"allowed_values": ["string", "integer", "float", "date", "enum"],
|
||||||
|
"required": True
|
||||||
|
},
|
||||||
|
"required": {
|
||||||
|
"type": "boolean",
|
||||||
|
"default": False
|
||||||
|
},
|
||||||
|
"description": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"allowed_values": {
|
||||||
|
"type": "list",
|
||||||
|
"description": "For enum type fields only"
|
||||||
|
},
|
||||||
|
"min_value": {
|
||||||
|
"type": "number",
|
||||||
|
"description": "For numeric fields only"
|
||||||
|
},
|
||||||
|
"max_value": {
|
||||||
|
"type": "number",
|
||||||
|
"description": "For numeric fields only"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"document_version_configurations": ["tagging_fields"]
|
||||||
|
},
|
||||||
|
}
|
||||||
@@ -3,7 +3,6 @@ from datetime import timedelta
|
|||||||
import redis
|
import redis
|
||||||
|
|
||||||
from common.utils.prompt_loader import load_prompt_templates
|
from common.utils.prompt_loader import load_prompt_templates
|
||||||
from eveai_app.views.security_forms import ResetPasswordForm
|
|
||||||
|
|
||||||
basedir = path.abspath(path.dirname(__file__))
|
basedir = path.abspath(path.dirname(__file__))
|
||||||
|
|
||||||
@@ -46,7 +45,6 @@ class Config(object):
|
|||||||
SECURITY_EMAIL_SUBJECT_PASSWORD_NOTICE = 'Your Password Has Been Reset'
|
SECURITY_EMAIL_SUBJECT_PASSWORD_NOTICE = 'Your Password Has Been Reset'
|
||||||
SECURITY_EMAIL_PLAINTEXT = False
|
SECURITY_EMAIL_PLAINTEXT = False
|
||||||
SECURITY_EMAIL_HTML = True
|
SECURITY_EMAIL_HTML = True
|
||||||
SECURITY_RESET_PASSWORD_FORM = ResetPasswordForm
|
|
||||||
|
|
||||||
# Ensure Flask-Security-Too is handling CSRF tokens when behind a proxy
|
# Ensure Flask-Security-Too is handling CSRF tokens when behind a proxy
|
||||||
SECURITY_CSRF_PROTECT_MECHANISMS = ['session']
|
SECURITY_CSRF_PROTECT_MECHANISMS = ['session']
|
||||||
@@ -55,12 +53,15 @@ class Config(object):
|
|||||||
WTF_CSRF_CHECK_DEFAULT = False
|
WTF_CSRF_CHECK_DEFAULT = False
|
||||||
|
|
||||||
# file upload settings
|
# file upload settings
|
||||||
MAX_CONTENT_LENGTH = 16 * 1024 * 1024
|
MAX_CONTENT_LENGTH = 50 * 1024 * 1024
|
||||||
UPLOAD_EXTENSIONS = ['.txt', '.pdf', '.png', '.jpg', '.jpeg', '.gif']
|
UPLOAD_EXTENSIONS = ['.txt', '.pdf', '.png', '.jpg', '.jpeg', '.gif']
|
||||||
|
|
||||||
# supported languages
|
# supported languages
|
||||||
SUPPORTED_LANGUAGES = ['en', 'fr', 'nl', 'de', 'es']
|
SUPPORTED_LANGUAGES = ['en', 'fr', 'nl', 'de', 'es']
|
||||||
|
|
||||||
|
# supported currencies
|
||||||
|
SUPPORTED_CURRENCIES = ['€', '$']
|
||||||
|
|
||||||
# supported LLMs
|
# supported LLMs
|
||||||
SUPPORTED_EMBEDDINGS = ['openai.text-embedding-3-small', 'openai.text-embedding-3-large', 'mistral.mistral-embed']
|
SUPPORTED_EMBEDDINGS = ['openai.text-embedding-3-small', 'openai.text-embedding-3-large', 'mistral.mistral-embed']
|
||||||
SUPPORTED_LLMS = ['openai.gpt-4o', 'anthropic.claude-3-5-sonnet', 'openai.gpt-4o-mini']
|
SUPPORTED_LLMS = ['openai.gpt-4o', 'anthropic.claude-3-5-sonnet', 'openai.gpt-4o-mini']
|
||||||
@@ -109,6 +110,7 @@ class Config(object):
|
|||||||
|
|
||||||
# JWT settings
|
# JWT settings
|
||||||
JWT_SECRET_KEY = environ.get('JWT_SECRET_KEY')
|
JWT_SECRET_KEY = environ.get('JWT_SECRET_KEY')
|
||||||
|
JWT_ACCESS_TOKEN_EXPIRES = timedelta(hours=1) # Set token expiry to 1 hour
|
||||||
|
|
||||||
# API Encryption
|
# API Encryption
|
||||||
API_ENCRYPTION_KEY = environ.get('API_ENCRYPTION_KEY')
|
API_ENCRYPTION_KEY = environ.get('API_ENCRYPTION_KEY')
|
||||||
@@ -138,6 +140,25 @@ class Config(object):
|
|||||||
MAIL_PASSWORD = environ.get('MAIL_PASSWORD')
|
MAIL_PASSWORD = environ.get('MAIL_PASSWORD')
|
||||||
MAIL_DEFAULT_SENDER = ('eveAI Admin', MAIL_USERNAME)
|
MAIL_DEFAULT_SENDER = ('eveAI Admin', MAIL_USERNAME)
|
||||||
|
|
||||||
|
# Langsmith settings
|
||||||
|
LANGCHAIN_TRACING_V2 = True
|
||||||
|
LANGCHAIN_ENDPOINT = 'https://api.smith.langchain.com'
|
||||||
|
LANGCHAIN_PROJECT = "eveai"
|
||||||
|
|
||||||
|
|
||||||
|
SUPPORTED_FILE_TYPES = ['pdf', 'html', 'md', 'txt', 'mp3', 'mp4', 'ogg', 'srt']
|
||||||
|
|
||||||
|
TENANT_TYPES = ['Active', 'Demo', 'Inactive', 'Test']
|
||||||
|
|
||||||
|
# The maximum number of seconds allowed for audio compression (to save resources)
|
||||||
|
MAX_COMPRESSION_DURATION = 60*10 # 10 minutes
|
||||||
|
# The maximum number of seconds allowed for transcribing audio
|
||||||
|
MAX_TRANSCRIPTION_DURATION = 60*10 # 10 minutes
|
||||||
|
# Maximum CPU usage for a compression task
|
||||||
|
COMPRESSION_CPU_LIMIT = 50
|
||||||
|
# Delay between compressing chunks in seconds
|
||||||
|
COMPRESSION_PROCESS_DELAY = 1
|
||||||
|
|
||||||
|
|
||||||
class DevConfig(Config):
|
class DevConfig(Config):
|
||||||
DEVELOPMENT = True
|
DEVELOPMENT = True
|
||||||
|
|||||||
@@ -1,13 +0,0 @@
|
|||||||
{
|
|
||||||
"type": "service_account",
|
|
||||||
"project_id": "eveai-420711",
|
|
||||||
"private_key_id": "e666408e75793321a6134243628346722a71b3a6",
|
|
||||||
"private_key": "-----BEGIN PRIVATE KEY-----\nMIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQCaGTXCWpq08YD1\nOW4z+gncOlB7T/EIiEwsZgMp6pyUrNioGfiI9YN+uVR0nsUSmFf1YyerRgX7RqD5\nRc7T/OuX8iIvmloK3g7CaFezcVrjnBKcg/QsjDAt/OO3DTk4vykDlh/Kqxx73Jdv\nFH9YSV2H7ToWqIE8CTDnqe8vQS7Bq995c9fPlues31MgndRFg3CFkH0ldfZ4aGm3\n1RnBDyC+9SPQW9e7CJgNN9PWTmOT51Zyy5IRuV5OWePMQaGLVmCo5zNc/EHZEVRu\n1hxJPHL3NNmkYDY8tye8uHgjsAkv8QuwIuUSqnqjoo1/Yg+P0+9GCpePOAJRNxJS\n0YpDFWc5AgMBAAECggEACIU4/hG+bh97BD7JriFhfDDT6bg7g+pCs/hsAlxQ42jv\nOH7pyWuHJXGf5Cwx31usZAq4fcrgYnVpnyl8odIL628y9AjdI66wMuWhZnBFGJgK\nRhHcZWjW8nlXf0lBjwwFe4edzbn1AuWT5fYZ2HWDW2mthY/e8sUwqWPcWsjdifhz\nNR7V+Ia47McKXYgEKjyEObSP1NUOW24zH0DgxS52YPMwa1FoHn6+9Pr8P3TsTSO6\nh6f8tnd81DGl1UH4F5Bj/MHsQXyAMJbu44S4+rZ4Qlk+5xPp9hfCNpxWaHLIkJCg\nYXnC8UAjjyXiqyK0U0RjJf8TS1FxUI4iPepLNqp/pQKBgQDTicZnWFXmCFTnycWp\n66P3Yx0yvlKdUdfnoD/n9NdmUA3TZUlEVfb0IOm7ZFubF/zDTH87XrRiD/NVDbr8\n6bdhA1DXzraxhbfD36Hca6K74Ba4aYJsSWWwI0hL3FDSsv8c7qAIaUF2iwuHb7Y0\nRDcvZqowtQobcQC8cHLc/bI/ZwKBgQC6fMeGaU+lP6jhp9Nb/3Gz5Z1zzCu34IOo\nlgpTNZsowRKYLtjHifrEFi3XRxPKz5thMuJFniof5U4WoMYtRXy+PbgySvBpCia2\nXty05XssnLLMvLpYU5sbQvmOTe20zaIzLohRvvmqrydYIKu62NTubNeuD1L+Zr0q\nz1P5/wUgXwKBgQCW9MrRFQi3j1qHzkVwbOglsmUzwP3TpoQclw8DyIWuTZKQOMeA\nLJh+vr4NLCDzHLsT45MoGv0+vYM4PwQhV+e1I1idqLZXGMV60iv/0A/hYpjUIPch\nr38RoxwEhsRml7XWP7OUTQiaP7+Kdv3fbo6zFOB+wbLkwk90KgrOCX0aIQKBgFeK\n7esmErJjMPdFXk3om0q09nX+mWNHLOb+EDjBiGXYRM9V5oO9PQ/BzaEqh5sEXE+D\noH7H4cR5U3AB5yYnYYi41ngdf7//eO7Rl1AADhOCN9kum1eNX9mrVhU8deMTSRo3\ntNyTBwbeFF0lcRhUY5jNVW4rWW19cz3ed/B6i8CHAoGBAJ/l5rkV74Z5hg6BWNfQ\nYAg/4PLZmjnXIy5QdnWc/PYgbhn5+iVUcL9fSofFzJM1rjFnNcs3S90MGeOmfmo4\nM1WtcQFQbsCGt6+G5uEL/nf74mKUGpOqEM/XSkZ3inweWiDk3LK3iYfXCMBFouIr\n80IlzI1yMf7MVmWn3e1zPjCA\n-----END PRIVATE KEY-----\n",
|
|
||||||
"client_email": "eveai-349@eveai-420711.iam.gserviceaccount.com",
|
|
||||||
"client_id": "109927035346319712442",
|
|
||||||
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
|
|
||||||
"token_uri": "https://oauth2.googleapis.com/token",
|
|
||||||
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
|
|
||||||
"client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/eveai-349%40eveai-420711.iam.gserviceaccount.com",
|
|
||||||
"universe_domain": "googleapis.com"
|
|
||||||
}
|
|
||||||
@@ -12,7 +12,12 @@ env = os.environ.get('FLASK_ENV', 'development')
|
|||||||
class CustomLogRecord(logging.LogRecord):
|
class CustomLogRecord(logging.LogRecord):
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
self.component = os.environ.get('COMPONENT_NAME', 'eveai_app') # Set default component value here
|
self.component = os.environ.get('COMPONENT_NAME', 'eveai_app')
|
||||||
|
|
||||||
|
def __setattr__(self, name, value):
|
||||||
|
if name not in {'event_type', 'tenant_id', 'trace_id', 'span_id', 'span_name', 'parent_span_id',
|
||||||
|
'document_version_id', 'chat_session_id', 'interaction_id', 'environment'}:
|
||||||
|
super().__setattr__(name, value)
|
||||||
|
|
||||||
|
|
||||||
def custom_log_record_factory(*args, **kwargs):
|
def custom_log_record_factory(*args, **kwargs):
|
||||||
@@ -32,7 +37,7 @@ LOGGING = {
|
|||||||
'level': 'DEBUG',
|
'level': 'DEBUG',
|
||||||
'class': 'logging.handlers.RotatingFileHandler',
|
'class': 'logging.handlers.RotatingFileHandler',
|
||||||
'filename': 'logs/eveai_app.log',
|
'filename': 'logs/eveai_app.log',
|
||||||
'maxBytes': 1024 * 1024 * 5, # 5MB
|
'maxBytes': 1024 * 1024 * 1, # 1MB
|
||||||
'backupCount': 10,
|
'backupCount': 10,
|
||||||
'formatter': 'standard',
|
'formatter': 'standard',
|
||||||
},
|
},
|
||||||
@@ -40,7 +45,7 @@ LOGGING = {
|
|||||||
'level': 'DEBUG',
|
'level': 'DEBUG',
|
||||||
'class': 'logging.handlers.RotatingFileHandler',
|
'class': 'logging.handlers.RotatingFileHandler',
|
||||||
'filename': 'logs/eveai_workers.log',
|
'filename': 'logs/eveai_workers.log',
|
||||||
'maxBytes': 1024 * 1024 * 5, # 5MB
|
'maxBytes': 1024 * 1024 * 1, # 1MB
|
||||||
'backupCount': 10,
|
'backupCount': 10,
|
||||||
'formatter': 'standard',
|
'formatter': 'standard',
|
||||||
},
|
},
|
||||||
@@ -48,7 +53,7 @@ LOGGING = {
|
|||||||
'level': 'DEBUG',
|
'level': 'DEBUG',
|
||||||
'class': 'logging.handlers.RotatingFileHandler',
|
'class': 'logging.handlers.RotatingFileHandler',
|
||||||
'filename': 'logs/eveai_chat.log',
|
'filename': 'logs/eveai_chat.log',
|
||||||
'maxBytes': 1024 * 1024 * 5, # 5MB
|
'maxBytes': 1024 * 1024 * 1, # 1MB
|
||||||
'backupCount': 10,
|
'backupCount': 10,
|
||||||
'formatter': 'standard',
|
'formatter': 'standard',
|
||||||
},
|
},
|
||||||
@@ -56,7 +61,31 @@ LOGGING = {
|
|||||||
'level': 'DEBUG',
|
'level': 'DEBUG',
|
||||||
'class': 'logging.handlers.RotatingFileHandler',
|
'class': 'logging.handlers.RotatingFileHandler',
|
||||||
'filename': 'logs/eveai_chat_workers.log',
|
'filename': 'logs/eveai_chat_workers.log',
|
||||||
'maxBytes': 1024 * 1024 * 5, # 5MB
|
'maxBytes': 1024 * 1024 * 1, # 1MB
|
||||||
|
'backupCount': 10,
|
||||||
|
'formatter': 'standard',
|
||||||
|
},
|
||||||
|
'file_api': {
|
||||||
|
'level': 'DEBUG',
|
||||||
|
'class': 'logging.handlers.RotatingFileHandler',
|
||||||
|
'filename': 'logs/eveai_api.log',
|
||||||
|
'maxBytes': 1024 * 1024 * 1, # 1MB
|
||||||
|
'backupCount': 10,
|
||||||
|
'formatter': 'standard',
|
||||||
|
},
|
||||||
|
'file_beat': {
|
||||||
|
'level': 'DEBUG',
|
||||||
|
'class': 'logging.handlers.RotatingFileHandler',
|
||||||
|
'filename': 'logs/eveai_beat.log',
|
||||||
|
'maxBytes': 1024 * 1024 * 1, # 1MB
|
||||||
|
'backupCount': 10,
|
||||||
|
'formatter': 'standard',
|
||||||
|
},
|
||||||
|
'file_entitlements': {
|
||||||
|
'level': 'DEBUG',
|
||||||
|
'class': 'logging.handlers.RotatingFileHandler',
|
||||||
|
'filename': 'logs/eveai_entitlements.log',
|
||||||
|
'maxBytes': 1024 * 1024 * 1, # 1MB
|
||||||
'backupCount': 10,
|
'backupCount': 10,
|
||||||
'formatter': 'standard',
|
'formatter': 'standard',
|
||||||
},
|
},
|
||||||
@@ -64,7 +93,7 @@ LOGGING = {
|
|||||||
'level': 'DEBUG',
|
'level': 'DEBUG',
|
||||||
'class': 'logging.handlers.RotatingFileHandler',
|
'class': 'logging.handlers.RotatingFileHandler',
|
||||||
'filename': 'logs/sqlalchemy.log',
|
'filename': 'logs/sqlalchemy.log',
|
||||||
'maxBytes': 1024 * 1024 * 5, # 5MB
|
'maxBytes': 1024 * 1024 * 1, # 1MB
|
||||||
'backupCount': 10,
|
'backupCount': 10,
|
||||||
'formatter': 'standard',
|
'formatter': 'standard',
|
||||||
},
|
},
|
||||||
@@ -72,7 +101,7 @@ LOGGING = {
|
|||||||
'level': 'DEBUG',
|
'level': 'DEBUG',
|
||||||
'class': 'logging.handlers.RotatingFileHandler',
|
'class': 'logging.handlers.RotatingFileHandler',
|
||||||
'filename': 'logs/mailman.log',
|
'filename': 'logs/mailman.log',
|
||||||
'maxBytes': 1024 * 1024 * 5, # 5MB
|
'maxBytes': 1024 * 1024 * 1, # 1MB
|
||||||
'backupCount': 10,
|
'backupCount': 10,
|
||||||
'formatter': 'standard',
|
'formatter': 'standard',
|
||||||
},
|
},
|
||||||
@@ -80,7 +109,7 @@ LOGGING = {
|
|||||||
'level': 'DEBUG',
|
'level': 'DEBUG',
|
||||||
'class': 'logging.handlers.RotatingFileHandler',
|
'class': 'logging.handlers.RotatingFileHandler',
|
||||||
'filename': 'logs/security.log',
|
'filename': 'logs/security.log',
|
||||||
'maxBytes': 1024 * 1024 * 5, # 5MB
|
'maxBytes': 1024 * 1024 * 1, # 1MB
|
||||||
'backupCount': 10,
|
'backupCount': 10,
|
||||||
'formatter': 'standard',
|
'formatter': 'standard',
|
||||||
},
|
},
|
||||||
@@ -88,7 +117,7 @@ LOGGING = {
|
|||||||
'level': 'DEBUG',
|
'level': 'DEBUG',
|
||||||
'class': 'logging.handlers.RotatingFileHandler',
|
'class': 'logging.handlers.RotatingFileHandler',
|
||||||
'filename': 'logs/rag_tuning.log',
|
'filename': 'logs/rag_tuning.log',
|
||||||
'maxBytes': 1024 * 1024 * 5, # 5MB
|
'maxBytes': 1024 * 1024 * 1, # 1MB
|
||||||
'backupCount': 10,
|
'backupCount': 10,
|
||||||
'formatter': 'standard',
|
'formatter': 'standard',
|
||||||
},
|
},
|
||||||
@@ -96,7 +125,15 @@ LOGGING = {
|
|||||||
'level': 'DEBUG',
|
'level': 'DEBUG',
|
||||||
'class': 'logging.handlers.RotatingFileHandler',
|
'class': 'logging.handlers.RotatingFileHandler',
|
||||||
'filename': 'logs/embed_tuning.log',
|
'filename': 'logs/embed_tuning.log',
|
||||||
'maxBytes': 1024 * 1024 * 5, # 5MB
|
'maxBytes': 1024 * 1024 * 1, # 1MB
|
||||||
|
'backupCount': 10,
|
||||||
|
'formatter': 'standard',
|
||||||
|
},
|
||||||
|
'file_business_events': {
|
||||||
|
'level': 'INFO',
|
||||||
|
'class': 'logging.handlers.RotatingFileHandler',
|
||||||
|
'filename': 'logs/business_events.log',
|
||||||
|
'maxBytes': 1024 * 1024 * 1, # 1MB
|
||||||
'backupCount': 10,
|
'backupCount': 10,
|
||||||
'formatter': 'standard',
|
'formatter': 'standard',
|
||||||
},
|
},
|
||||||
@@ -146,6 +183,21 @@ LOGGING = {
|
|||||||
'level': 'DEBUG',
|
'level': 'DEBUG',
|
||||||
'propagate': False
|
'propagate': False
|
||||||
},
|
},
|
||||||
|
'eveai_api': { # logger for the eveai_chat_workers
|
||||||
|
'handlers': ['file_api', 'graylog', ] if env == 'production' else ['file_api', ],
|
||||||
|
'level': 'DEBUG',
|
||||||
|
'propagate': False
|
||||||
|
},
|
||||||
|
'eveai_beat': { # logger for the eveai_beat
|
||||||
|
'handlers': ['file_beat', 'graylog', ] if env == 'production' else ['file_beat', ],
|
||||||
|
'level': 'DEBUG',
|
||||||
|
'propagate': False
|
||||||
|
},
|
||||||
|
'eveai_entitlements': { # logger for the eveai_entitlements
|
||||||
|
'handlers': ['file_entitlements', 'graylog', ] if env == 'production' else ['file_entitlements', ],
|
||||||
|
'level': 'DEBUG',
|
||||||
|
'propagate': False
|
||||||
|
},
|
||||||
'sqlalchemy.engine': { # logger for the sqlalchemy
|
'sqlalchemy.engine': { # logger for the sqlalchemy
|
||||||
'handlers': ['file_sqlalchemy', 'graylog', ] if env == 'production' else ['file_sqlalchemy', ],
|
'handlers': ['file_sqlalchemy', 'graylog', ] if env == 'production' else ['file_sqlalchemy', ],
|
||||||
'level': 'DEBUG',
|
'level': 'DEBUG',
|
||||||
@@ -171,6 +223,11 @@ LOGGING = {
|
|||||||
'level': 'DEBUG',
|
'level': 'DEBUG',
|
||||||
'propagate': False
|
'propagate': False
|
||||||
},
|
},
|
||||||
|
'business_events': {
|
||||||
|
'handlers': ['file_business_events', 'graylog'],
|
||||||
|
'level': 'DEBUG',
|
||||||
|
'propagate': False
|
||||||
|
},
|
||||||
'': { # root logger
|
'': { # root logger
|
||||||
'handlers': ['console'],
|
'handlers': ['console'],
|
||||||
'level': 'WARNING', # Set higher level for root to minimize noise
|
'level': 'WARNING', # Set higher level for root to minimize noise
|
||||||
|
|||||||
41
config/model_config.py
Normal file
41
config/model_config.py
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
MODEL_CONFIG = {
|
||||||
|
"openai": {
|
||||||
|
"gpt-4o": {
|
||||||
|
"tool_calling_supported": True,
|
||||||
|
"processing_chunk_size": 10000,
|
||||||
|
"processing_chunk_overlap": 200,
|
||||||
|
"processing_min_chunk_size": 8000,
|
||||||
|
"processing_max_chunk_size": 12000,
|
||||||
|
"prompt_templates": [
|
||||||
|
"summary", "rag", "history", "encyclopedia",
|
||||||
|
"transcript", "html_parse", "pdf_parse"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"gpt-4o-mini": {
|
||||||
|
"tool_calling_supported": True,
|
||||||
|
"processing_chunk_size": 10000,
|
||||||
|
"processing_chunk_overlap": 200,
|
||||||
|
"processing_min_chunk_size": 8000,
|
||||||
|
"processing_max_chunk_size": 12000,
|
||||||
|
"prompt_templates": [
|
||||||
|
"summary", "rag", "history", "encyclopedia",
|
||||||
|
"transcript", "html_parse", "pdf_parse"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
# Add other OpenAI models here
|
||||||
|
},
|
||||||
|
"anthropic": {
|
||||||
|
"claude-3-5-sonnet": {
|
||||||
|
"tool_calling_supported": True,
|
||||||
|
"processing_chunk_size": 10000,
|
||||||
|
"processing_chunk_overlap": 200,
|
||||||
|
"processing_min_chunk_size": 8000,
|
||||||
|
"processing_max_chunk_size": 12000,
|
||||||
|
"prompt_templates": [
|
||||||
|
"summary", "rag", "history", "encyclopedia",
|
||||||
|
"transcript", "html_parse", "pdf_parse"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
# Add other Anthropic models here
|
||||||
|
},
|
||||||
|
}
|
||||||
@@ -15,11 +15,12 @@ html_parse: |
|
|||||||
|
|
||||||
pdf_parse: |
|
pdf_parse: |
|
||||||
You are a top administrative aid specialized in transforming given PDF-files into markdown formatted files. The generated files will be used to generate embeddings in a RAG-system.
|
You are a top administrative aid specialized in transforming given PDF-files into markdown formatted files. The generated files will be used to generate embeddings in a RAG-system.
|
||||||
|
The content you get is already processed (some markdown already generated), but needs to be corrected. For large files, you may receive only portions of the full file. Consider this when processing the content.
|
||||||
|
|
||||||
# Best practices are:
|
# Best practices are:
|
||||||
- Respect wordings and language(s) used in the PDF.
|
- Respect wordings and language(s) used in the provided content.
|
||||||
- The following items need to be considered: headings, paragraphs, listed items (numbered or not) and tables. Images can be neglected.
|
- The following items need to be considered: headings, paragraphs, listed items (numbered or not) and tables. Images can be neglected.
|
||||||
- When headings are numbered, show the numbering and define the header level.
|
- When headings are numbered, show the numbering and define the header level. You may have to correct current header levels, as preprocessing is known to make errors.
|
||||||
- A new item is started when a <return> is found before a full line is reached. In order to know the number of characters in a line, please check the document and the context within the document (e.g. an image could limit the number of characters temporarily).
|
- A new item is started when a <return> is found before a full line is reached. In order to know the number of characters in a line, please check the document and the context within the document (e.g. an image could limit the number of characters temporarily).
|
||||||
- Paragraphs are to be stripped of newlines so they become easily readable.
|
- Paragraphs are to be stripped of newlines so they become easily readable.
|
||||||
- Be careful of encoding of the text. Everything needs to be human readable.
|
- Be careful of encoding of the text. Everything needs to be human readable.
|
||||||
@@ -64,11 +65,13 @@ encyclopedia: |
|
|||||||
|
|
||||||
transcript: |
|
transcript: |
|
||||||
You are a top administrative assistant specialized in transforming given transcriptions into markdown formatted files. The generated files will be used to generate embeddings in a RAG-system. The transcriptions originate from podcast, videos and similar material.
|
You are a top administrative assistant specialized in transforming given transcriptions into markdown formatted files. The generated files will be used to generate embeddings in a RAG-system. The transcriptions originate from podcast, videos and similar material.
|
||||||
|
You may receive information in different chunks. If you're not receiving the first chunk, you'll get the last part of the previous chunk, including it's title in between triple $. Consider this last part and the title as the start of the new chunk.
|
||||||
|
|
||||||
|
|
||||||
# Best practices and steps are:
|
# Best practices and steps are:
|
||||||
- Respect wordings and language(s) used in the transcription. Main language is {language}.
|
- Respect wordings and language(s) used in the transcription. Main language is {language}.
|
||||||
- Sometimes, the transcript contains speech of several people participating in a conversation. Although these are not obvious from reading the file, try to detect when other people are speaking.
|
- Sometimes, the transcript contains speech of several people participating in a conversation. Although these are not obvious from reading the file, try to detect when other people are speaking.
|
||||||
- Divide the transcript into several logical parts. Ensure questions and their answers are in the same logical part.
|
- Divide the transcript into several logical parts. Ensure questions and their answers are in the same logical part. Don't make logical parts too small. They should contain at least 7 or 8 sentences.
|
||||||
- annotate the text to identify these logical parts using headings in {language}.
|
- annotate the text to identify these logical parts using headings in {language}.
|
||||||
- improve errors in the transcript given the context, but do not change the meaning and intentions of the transcription.
|
- improve errors in the transcript given the context, but do not change the meaning and intentions of the transcription.
|
||||||
|
|
||||||
@@ -76,4 +79,6 @@ transcript: |
|
|||||||
|
|
||||||
The transcript is between triple backquotes.
|
The transcript is between triple backquotes.
|
||||||
|
|
||||||
|
$$${previous_part}$$$
|
||||||
|
|
||||||
```{transcript}```
|
```{transcript}```
|
||||||
23
config/retriever_types.py
Normal file
23
config/retriever_types.py
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
# Retriever Types
|
||||||
|
RETRIEVER_TYPES = {
|
||||||
|
"DEFAULT_RAG": {
|
||||||
|
"name": "Default RAG",
|
||||||
|
"description": "Retrieving all embeddings conform the query",
|
||||||
|
"configuration": {
|
||||||
|
"es_k": {
|
||||||
|
"name": "es_k",
|
||||||
|
"type": "int",
|
||||||
|
"description": "K-value to retrieve embeddings (max embeddings retrieved)",
|
||||||
|
"required": True,
|
||||||
|
"default": 8,
|
||||||
|
},
|
||||||
|
"es_similarity_threshold": {
|
||||||
|
"name": "es_similarity_threshold",
|
||||||
|
"type": "float",
|
||||||
|
"description": "Similarity threshold for retrieving embeddings",
|
||||||
|
"required": True,
|
||||||
|
"default": 0.3,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -141,7 +141,7 @@ if [ $# -eq 0 ]; then
|
|||||||
SERVICES=()
|
SERVICES=()
|
||||||
while IFS= read -r line; do
|
while IFS= read -r line; do
|
||||||
SERVICES+=("$line")
|
SERVICES+=("$line")
|
||||||
done < <(yq e '.services | keys | .[]' compose_dev.yaml | grep -E '^(nginx|eveai_)')
|
done < <(yq e '.services | keys | .[]' compose_dev.yaml | grep -E '^(nginx|eveai_|flower)')
|
||||||
else
|
else
|
||||||
SERVICES=("$@")
|
SERVICES=("$@")
|
||||||
fi
|
fi
|
||||||
@@ -158,7 +158,7 @@ docker buildx use eveai_builder
|
|||||||
|
|
||||||
# Loop through services
|
# Loop through services
|
||||||
for SERVICE in "${SERVICES[@]}"; do
|
for SERVICE in "${SERVICES[@]}"; do
|
||||||
if [[ "$SERVICE" == "nginx" || "$SERVICE" == eveai_* ]]; then
|
if [[ "$SERVICE" == "nginx" || "$SERVICE" == eveai_* || "$SERVICE" == "flower" ]]; then
|
||||||
if process_service "$SERVICE"; then
|
if process_service "$SERVICE"; then
|
||||||
echo "Successfully processed $SERVICE"
|
echo "Successfully processed $SERVICE"
|
||||||
else
|
else
|
||||||
@@ -169,4 +169,5 @@ for SERVICE in "${SERVICES[@]}"; do
|
|||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
echo "All specified services processed."
|
echo -e "\033[35mAll specified services processed.\033[0m"
|
||||||
|
echo -e "\033[35mFinished at $(date +"%d/%m/%Y %H:%M:%S")\033[0m"
|
||||||
|
|||||||
@@ -22,6 +22,8 @@ x-common-variables: &common-variables
|
|||||||
MAIL_PASSWORD: '$$6xsWGbNtx$$CFMQZqc*'
|
MAIL_PASSWORD: '$$6xsWGbNtx$$CFMQZqc*'
|
||||||
MAIL_SERVER: mail.flow-it.net
|
MAIL_SERVER: mail.flow-it.net
|
||||||
MAIL_PORT: 465
|
MAIL_PORT: 465
|
||||||
|
REDIS_URL: redis
|
||||||
|
REDIS_PORT: '6379'
|
||||||
OPENAI_API_KEY: 'sk-proj-8R0jWzwjL7PeoPyMhJTZT3BlbkFJLb6HfRB2Hr9cEVFWEhU7'
|
OPENAI_API_KEY: 'sk-proj-8R0jWzwjL7PeoPyMhJTZT3BlbkFJLb6HfRB2Hr9cEVFWEhU7'
|
||||||
GROQ_API_KEY: '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'
|
GROQ_API_KEY: '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'
|
||||||
ANTHROPIC_API_KEY: 'sk-ant-api03-c2TmkzbReeGhXBO5JxNH6BJNylRDonc9GmZd0eRbrvyekec2'
|
ANTHROPIC_API_KEY: 'sk-ant-api03-c2TmkzbReeGhXBO5JxNH6BJNylRDonc9GmZd0eRbrvyekec2'
|
||||||
@@ -32,6 +34,7 @@ x-common-variables: &common-variables
|
|||||||
MINIO_ACCESS_KEY: minioadmin
|
MINIO_ACCESS_KEY: minioadmin
|
||||||
MINIO_SECRET_KEY: minioadmin
|
MINIO_SECRET_KEY: minioadmin
|
||||||
NGINX_SERVER_NAME: 'localhost http://macstudio.ask-eve-ai-local.com/'
|
NGINX_SERVER_NAME: 'localhost http://macstudio.ask-eve-ai-local.com/'
|
||||||
|
LANGCHAIN_API_KEY: "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"
|
||||||
|
|
||||||
|
|
||||||
networks:
|
networks:
|
||||||
@@ -57,6 +60,9 @@ services:
|
|||||||
- ../nginx/sites-enabled:/etc/nginx/sites-enabled
|
- ../nginx/sites-enabled:/etc/nginx/sites-enabled
|
||||||
- ../nginx/static:/etc/nginx/static
|
- ../nginx/static:/etc/nginx/static
|
||||||
- ../nginx/public:/etc/nginx/public
|
- ../nginx/public:/etc/nginx/public
|
||||||
|
- ../integrations/Wordpress/eveai-chat-widget/css/eveai-chat-style.css:/etc/nginx/static/css/eveai-chat-style.css
|
||||||
|
- ../integrations/Wordpress/eveai-chat-widget/js/eveai-chat-widget.js:/etc/nginx/static/js/eveai-chat-widget.js
|
||||||
|
- ../integrations/Wordpress/eveai-chat-widget/js/eveai-sdk.js:/etc/nginx/static/js/eveai-sdk.js
|
||||||
- ./logs/nginx:/var/log/nginx
|
- ./logs/nginx:/var/log/nginx
|
||||||
depends_on:
|
depends_on:
|
||||||
- eveai_app
|
- eveai_app
|
||||||
@@ -93,12 +99,11 @@ services:
|
|||||||
minio:
|
minio:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ["CMD", "curl", "-f", "http://localhost:5001/health"]
|
test: ["CMD", "curl", "-f", "http://localhost:5001/healthz/ready"]
|
||||||
interval: 10s
|
interval: 30s
|
||||||
timeout: 5s
|
timeout: 1s
|
||||||
retries: 5
|
retries: 3
|
||||||
# entrypoint: ["scripts/entrypoint.sh"]
|
start_period: 30s
|
||||||
# command: ["scripts/start_eveai_app.sh"]
|
|
||||||
networks:
|
networks:
|
||||||
- eveai-network
|
- eveai-network
|
||||||
|
|
||||||
@@ -110,8 +115,6 @@ services:
|
|||||||
platforms:
|
platforms:
|
||||||
- linux/amd64
|
- linux/amd64
|
||||||
- linux/arm64
|
- linux/arm64
|
||||||
# ports:
|
|
||||||
# - 5001:5001
|
|
||||||
environment:
|
environment:
|
||||||
<<: *common-variables
|
<<: *common-variables
|
||||||
COMPONENT_NAME: eveai_workers
|
COMPONENT_NAME: eveai_workers
|
||||||
@@ -129,13 +132,6 @@ services:
|
|||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
minio:
|
minio:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
# healthcheck:
|
|
||||||
# test: [ "CMD", "curl", "-f", "http://localhost:5001/health" ]
|
|
||||||
# interval: 10s
|
|
||||||
# timeout: 5s
|
|
||||||
# retries: 5
|
|
||||||
# entrypoint: [ "sh", "-c", "scripts/entrypoint.sh" ]
|
|
||||||
# command: [ "sh", "-c", "scripts/start_eveai_workers.sh" ]
|
|
||||||
networks:
|
networks:
|
||||||
- eveai-network
|
- eveai-network
|
||||||
|
|
||||||
@@ -165,12 +161,11 @@ services:
|
|||||||
redis:
|
redis:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: [ "CMD", "curl", "-f", "http://localhost:5002/health" ] # Adjust based on your health endpoint
|
test: [ "CMD", "curl", "-f", "http://localhost:5002/healthz/ready" ] # Adjust based on your health endpoint
|
||||||
interval: 10s
|
interval: 30s
|
||||||
timeout: 5s
|
timeout: 1s
|
||||||
retries: 5
|
retries: 3
|
||||||
# entrypoint: [ "sh", "-c", "scripts/entrypoint.sh" ]
|
start_period: 30s
|
||||||
# command: ["sh", "-c", "scripts/start_eveai_chat.sh"]
|
|
||||||
networks:
|
networks:
|
||||||
- eveai-network
|
- eveai-network
|
||||||
|
|
||||||
@@ -182,8 +177,6 @@ services:
|
|||||||
platforms:
|
platforms:
|
||||||
- linux/amd64
|
- linux/amd64
|
||||||
- linux/arm64
|
- linux/arm64
|
||||||
# ports:
|
|
||||||
# - 5001:5001
|
|
||||||
environment:
|
environment:
|
||||||
<<: *common-variables
|
<<: *common-variables
|
||||||
COMPONENT_NAME: eveai_chat_workers
|
COMPONENT_NAME: eveai_chat_workers
|
||||||
@@ -199,16 +192,98 @@ services:
|
|||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
redis:
|
redis:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
# healthcheck:
|
|
||||||
# test: [ "CMD", "curl", "-f", "http://localhost:5001/health" ]
|
|
||||||
# interval: 10s
|
|
||||||
# timeout: 5s
|
|
||||||
# retries: 5
|
|
||||||
# entrypoint: [ "sh", "-c", "scripts/entrypoint.sh" ]
|
|
||||||
# command: [ "sh", "-c", "scripts/start_eveai_chat_workers.sh" ]
|
|
||||||
networks:
|
networks:
|
||||||
- eveai-network
|
- eveai-network
|
||||||
|
|
||||||
|
eveai_api:
|
||||||
|
image: josakola/eveai_api:latest
|
||||||
|
build:
|
||||||
|
context: ..
|
||||||
|
dockerfile: ./docker/eveai_api/Dockerfile
|
||||||
|
platforms:
|
||||||
|
- linux/amd64
|
||||||
|
- linux/arm64
|
||||||
|
ports:
|
||||||
|
- 5003:5003
|
||||||
|
environment:
|
||||||
|
<<: *common-variables
|
||||||
|
COMPONENT_NAME: eveai_api
|
||||||
|
volumes:
|
||||||
|
- ../eveai_api:/app/eveai_api
|
||||||
|
- ../common:/app/common
|
||||||
|
- ../config:/app/config
|
||||||
|
- ../scripts:/app/scripts
|
||||||
|
- ../patched_packages:/app/patched_packages
|
||||||
|
- eveai_logs:/app/logs
|
||||||
|
depends_on:
|
||||||
|
db:
|
||||||
|
condition: service_healthy
|
||||||
|
redis:
|
||||||
|
condition: service_healthy
|
||||||
|
minio:
|
||||||
|
condition: service_healthy
|
||||||
|
healthcheck:
|
||||||
|
test: [ "CMD", "curl", "-f", "http://localhost:5003/healthz/ready" ]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 1s
|
||||||
|
retries: 3
|
||||||
|
start_period: 30s
|
||||||
|
networks:
|
||||||
|
- eveai-network
|
||||||
|
|
||||||
|
eveai_beat:
|
||||||
|
image: josakola/eveai_beat:latest
|
||||||
|
build:
|
||||||
|
context: ..
|
||||||
|
dockerfile: ./docker/eveai_beat/Dockerfile
|
||||||
|
platforms:
|
||||||
|
- linux/amd64
|
||||||
|
- linux/arm64
|
||||||
|
environment:
|
||||||
|
<<: *common-variables
|
||||||
|
COMPONENT_NAME: eveai_beat
|
||||||
|
volumes:
|
||||||
|
- ../eveai_beat:/app/eveai_beat
|
||||||
|
- ../common:/app/common
|
||||||
|
- ../config:/app/config
|
||||||
|
- ../scripts:/app/scripts
|
||||||
|
- ../patched_packages:/app/patched_packages
|
||||||
|
- eveai_logs:/app/logs
|
||||||
|
depends_on:
|
||||||
|
redis:
|
||||||
|
condition: service_healthy
|
||||||
|
networks:
|
||||||
|
- eveai-network
|
||||||
|
|
||||||
|
eveai_entitlements:
|
||||||
|
image: josakola/eveai_entitlements:latest
|
||||||
|
build:
|
||||||
|
context: ..
|
||||||
|
dockerfile: ./docker/eveai_entitlements/Dockerfile
|
||||||
|
platforms:
|
||||||
|
- linux/amd64
|
||||||
|
- linux/arm64
|
||||||
|
environment:
|
||||||
|
<<: *common-variables
|
||||||
|
COMPONENT_NAME: eveai_entitlements
|
||||||
|
volumes:
|
||||||
|
- ../eveai_entitlements:/app/eveai_entitlements
|
||||||
|
- ../common:/app/common
|
||||||
|
- ../config:/app/config
|
||||||
|
- ../scripts:/app/scripts
|
||||||
|
- ../patched_packages:/app/patched_packages
|
||||||
|
- eveai_logs:/app/logs
|
||||||
|
depends_on:
|
||||||
|
db:
|
||||||
|
condition: service_healthy
|
||||||
|
redis:
|
||||||
|
condition: service_healthy
|
||||||
|
minio:
|
||||||
|
condition: service_healthy
|
||||||
|
networks:
|
||||||
|
- eveai-network
|
||||||
|
|
||||||
|
|
||||||
db:
|
db:
|
||||||
hostname: db
|
hostname: db
|
||||||
image: ankane/pgvector
|
image: ankane/pgvector
|
||||||
@@ -245,6 +320,22 @@ services:
|
|||||||
networks:
|
networks:
|
||||||
- eveai-network
|
- eveai-network
|
||||||
|
|
||||||
|
flower:
|
||||||
|
image: josakola/flower:latest
|
||||||
|
build:
|
||||||
|
context: ..
|
||||||
|
dockerfile: ./docker/flower/Dockerfile
|
||||||
|
environment:
|
||||||
|
<<: *common-variables
|
||||||
|
volumes:
|
||||||
|
- ../scripts:/app/scripts
|
||||||
|
ports:
|
||||||
|
- "5555:5555"
|
||||||
|
depends_on:
|
||||||
|
- redis
|
||||||
|
networks:
|
||||||
|
- eveai-network
|
||||||
|
|
||||||
minio:
|
minio:
|
||||||
image: minio/minio
|
image: minio/minio
|
||||||
ports:
|
ports:
|
||||||
|
|||||||
@@ -21,11 +21,13 @@ x-common-variables: &common-variables
|
|||||||
MAIL_USERNAME: 'evie_admin@askeveai.com'
|
MAIL_USERNAME: 'evie_admin@askeveai.com'
|
||||||
MAIL_PASSWORD: 's5D%R#y^v!s&6Z^i0k&'
|
MAIL_PASSWORD: 's5D%R#y^v!s&6Z^i0k&'
|
||||||
MAIL_SERVER: mail.askeveai.com
|
MAIL_SERVER: mail.askeveai.com
|
||||||
MAIL_PORT: 465
|
MAIL_PORT: '465'
|
||||||
REDIS_USER: eveai
|
REDIS_USER: eveai
|
||||||
REDIS_PASS: 'jHliZwGD36sONgbm0fc6SOpzLbknqq4RNF8K'
|
REDIS_PASS: 'jHliZwGD36sONgbm0fc6SOpzLbknqq4RNF8K'
|
||||||
REDIS_URL: 8bciqc.stackhero-network.com
|
REDIS_URL: 8bciqc.stackhero-network.com
|
||||||
REDIS_PORT: '9961'
|
REDIS_PORT: '9961'
|
||||||
|
FLOWER_USER: 'Felucia'
|
||||||
|
FLOWER_PASSWORD: 'Jungles'
|
||||||
OPENAI_API_KEY: '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'
|
OPENAI_API_KEY: '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'
|
||||||
GROQ_API_KEY: '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'
|
GROQ_API_KEY: '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'
|
||||||
ANTHROPIC_API_KEY: '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'
|
ANTHROPIC_API_KEY: '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'
|
||||||
@@ -38,6 +40,7 @@ x-common-variables: &common-variables
|
|||||||
MINIO_ACCESS_KEY: 04JKmQln8PQpyTmMiCPc
|
MINIO_ACCESS_KEY: 04JKmQln8PQpyTmMiCPc
|
||||||
MINIO_SECRET_KEY: 2PEZAD1nlpAmOyDV0TUTuJTQw1qVuYLF3A7GMs0D
|
MINIO_SECRET_KEY: 2PEZAD1nlpAmOyDV0TUTuJTQw1qVuYLF3A7GMs0D
|
||||||
NGINX_SERVER_NAME: 'evie.askeveai.com mxz536.stackhero-network.com'
|
NGINX_SERVER_NAME: 'evie.askeveai.com mxz536.stackhero-network.com'
|
||||||
|
LANGCHAIN_API_KEY: "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"
|
||||||
|
|
||||||
networks:
|
networks:
|
||||||
eveai-network:
|
eveai-network:
|
||||||
@@ -53,10 +56,6 @@ services:
|
|||||||
environment:
|
environment:
|
||||||
<<: *common-variables
|
<<: *common-variables
|
||||||
volumes:
|
volumes:
|
||||||
# - ../nginx:/etc/nginx
|
|
||||||
# - ../nginx/sites-enabled:/etc/nginx/sites-enabled
|
|
||||||
# - ../nginx/static:/etc/nginx/static
|
|
||||||
# - ../nginx/public:/etc/nginx/public
|
|
||||||
- eveai_logs:/var/log/nginx
|
- eveai_logs:/var/log/nginx
|
||||||
labels:
|
labels:
|
||||||
- "traefik.enable=true"
|
- "traefik.enable=true"
|
||||||
@@ -81,7 +80,7 @@ services:
|
|||||||
volumes:
|
volumes:
|
||||||
- eveai_logs:/app/logs
|
- eveai_logs:/app/logs
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ["CMD", "curl", "-f", "http://localhost:5001/health"]
|
test: ["CMD", "curl", "-f", "http://localhost:5001/healthz/ready"]
|
||||||
interval: 10s
|
interval: 10s
|
||||||
timeout: 5s
|
timeout: 5s
|
||||||
retries: 5
|
retries: 5
|
||||||
@@ -91,18 +90,11 @@ services:
|
|||||||
eveai_workers:
|
eveai_workers:
|
||||||
platform: linux/amd64
|
platform: linux/amd64
|
||||||
image: josakola/eveai_workers:latest
|
image: josakola/eveai_workers:latest
|
||||||
# ports:
|
|
||||||
# - 5001:5001
|
|
||||||
environment:
|
environment:
|
||||||
<<: *common-variables
|
<<: *common-variables
|
||||||
COMPONENT_NAME: eveai_workers
|
COMPONENT_NAME: eveai_workers
|
||||||
volumes:
|
volumes:
|
||||||
- eveai_logs:/app/logs
|
- eveai_logs:/app/logs
|
||||||
# healthcheck:
|
|
||||||
# test: [ "CMD", "curl", "-f", "http://localhost:5001/health" ]
|
|
||||||
# interval: 10s
|
|
||||||
# timeout: 5s
|
|
||||||
# retries: 5
|
|
||||||
networks:
|
networks:
|
||||||
- eveai-network
|
- eveai-network
|
||||||
|
|
||||||
@@ -117,7 +109,7 @@ services:
|
|||||||
volumes:
|
volumes:
|
||||||
- eveai_logs:/app/logs
|
- eveai_logs:/app/logs
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: [ "CMD", "curl", "-f", "http://localhost:5002/health" ] # Adjust based on your health endpoint
|
test: [ "CMD", "curl", "-f", "http://localhost:5002/healthz/ready" ] # Adjust based on your health endpoint
|
||||||
interval: 10s
|
interval: 10s
|
||||||
timeout: 5s
|
timeout: 5s
|
||||||
retries: 5
|
retries: 5
|
||||||
@@ -127,28 +119,64 @@ services:
|
|||||||
eveai_chat_workers:
|
eveai_chat_workers:
|
||||||
platform: linux/amd64
|
platform: linux/amd64
|
||||||
image: josakola/eveai_chat_workers:latest
|
image: josakola/eveai_chat_workers:latest
|
||||||
# ports:
|
|
||||||
# - 5001:5001
|
|
||||||
environment:
|
environment:
|
||||||
<<: *common-variables
|
<<: *common-variables
|
||||||
COMPONENT_NAME: eveai_chat_workers
|
COMPONENT_NAME: eveai_chat_workers
|
||||||
volumes:
|
volumes:
|
||||||
- eveai_logs:/app/logs
|
- eveai_logs:/app/logs
|
||||||
# healthcheck:
|
networks:
|
||||||
# test: [ "CMD", "curl", "-f", "http://localhost:5001/health" ]
|
- eveai-network
|
||||||
# interval: 10s
|
|
||||||
# timeout: 5s
|
eveai_api:
|
||||||
# retries: 5
|
platform: linux/amd64
|
||||||
|
image: josakola/eveai_api:latest
|
||||||
|
ports:
|
||||||
|
- 5003:5003
|
||||||
|
environment:
|
||||||
|
<<: *common-variables
|
||||||
|
COMPONENT_NAME: eveai_api
|
||||||
|
volumes:
|
||||||
|
- eveai_logs:/app/logs
|
||||||
|
healthcheck:
|
||||||
|
test: [ "CMD", "curl", "-f", "http://localhost:5003/healthz/ready" ]
|
||||||
|
interval: 10s
|
||||||
|
timeout: 5s
|
||||||
|
retries: 5
|
||||||
|
networks:
|
||||||
|
- eveai-network
|
||||||
|
|
||||||
|
eveai_beat:
|
||||||
|
platform: linux/amd64
|
||||||
|
image: josakola/eveai_beat:latest
|
||||||
|
environment:
|
||||||
|
<<: *common-variables
|
||||||
|
COMPONENT_NAME: eveai_beat
|
||||||
|
volumes:
|
||||||
|
- eveai_logs:/app/logs
|
||||||
|
networks:
|
||||||
|
- eveai-network
|
||||||
|
|
||||||
|
eveai_entitlements:
|
||||||
|
platform: linux/amd64
|
||||||
|
image: josakola/eveai_entitlements:latest
|
||||||
|
environment:
|
||||||
|
<<: *common-variables
|
||||||
|
COMPONENT_NAME: eveai_entitlements
|
||||||
|
volumes:
|
||||||
|
- eveai_logs:/app/logs
|
||||||
|
networks:
|
||||||
|
- eveai-network
|
||||||
|
|
||||||
|
flower:
|
||||||
|
image: josakola/flower:latest
|
||||||
|
environment:
|
||||||
|
<<: *common-variables
|
||||||
|
ports:
|
||||||
|
- "5555:5555"
|
||||||
networks:
|
networks:
|
||||||
- eveai-network
|
- eveai-network
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
eveai_logs:
|
eveai_logs:
|
||||||
# miniAre theo_data:
|
|
||||||
# db-data:
|
|
||||||
# redis-data:
|
|
||||||
# tenant-files:
|
|
||||||
#secrets:
|
|
||||||
# db-password:
|
|
||||||
# file: ./db/password.txt
|
|
||||||
|
|
||||||
|
|||||||
70
docker/eveai_api/Dockerfile
Normal file
70
docker/eveai_api/Dockerfile
Normal file
@@ -0,0 +1,70 @@
|
|||||||
|
ARG PYTHON_VERSION=3.12.3
|
||||||
|
FROM python:${PYTHON_VERSION}-slim as base
|
||||||
|
|
||||||
|
# Prevents Python from writing pyc files.
|
||||||
|
ENV PYTHONDONTWRITEBYTECODE=1
|
||||||
|
|
||||||
|
# Keeps Python from buffering stdout and stderr to avoid situations where
|
||||||
|
# the application crashes without emitting any logs due to buffering.
|
||||||
|
ENV PYTHONUNBUFFERED=1
|
||||||
|
|
||||||
|
# Create directory for patched packages and set permissions
|
||||||
|
RUN mkdir -p /app/patched_packages && \
|
||||||
|
chmod 777 /app/patched_packages
|
||||||
|
|
||||||
|
# Ensure patches are applied to the application.
|
||||||
|
ENV PYTHONPATH=/app/patched_packages:$PYTHONPATH
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Create a non-privileged user that the app will run under.
|
||||||
|
# See https://docs.docker.com/go/dockerfile-user-best-practices/
|
||||||
|
ARG UID=10001
|
||||||
|
RUN adduser \
|
||||||
|
--disabled-password \
|
||||||
|
--gecos "" \
|
||||||
|
--home "/nonexistent" \
|
||||||
|
--shell "/bin/bash" \
|
||||||
|
--no-create-home \
|
||||||
|
--uid "${UID}" \
|
||||||
|
appuser
|
||||||
|
|
||||||
|
# Install necessary packages and build tools
|
||||||
|
RUN apt-get update && apt-get install -y \
|
||||||
|
build-essential \
|
||||||
|
gcc \
|
||||||
|
postgresql-client \
|
||||||
|
curl \
|
||||||
|
&& apt-get clean \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Create logs directory and set permissions
|
||||||
|
RUN mkdir -p /app/logs && chown -R appuser:appuser /app/logs
|
||||||
|
|
||||||
|
# Download dependencies as a separate step to take advantage of Docker's caching.
|
||||||
|
# Leverage a cache mount to /root/.cache/pip to speed up subsequent builds.
|
||||||
|
# Leverage a bind mount to requirements.txt to avoid having to copy them into
|
||||||
|
# into this layer.
|
||||||
|
|
||||||
|
COPY requirements.txt /app/
|
||||||
|
RUN python -m pip install -r /app/requirements.txt
|
||||||
|
|
||||||
|
# Copy the source code into the container.
|
||||||
|
COPY eveai_api /app/eveai_api
|
||||||
|
COPY common /app/common
|
||||||
|
COPY config /app/config
|
||||||
|
COPY scripts /app/scripts
|
||||||
|
COPY patched_packages /app/patched_packages
|
||||||
|
|
||||||
|
# Set permissions for entrypoint script
|
||||||
|
RUN chmod 777 /app/scripts/entrypoint.sh
|
||||||
|
|
||||||
|
# Set ownership of the application directory to the non-privileged user
|
||||||
|
RUN chown -R appuser:appuser /app
|
||||||
|
|
||||||
|
# Expose the port that the application listens on.
|
||||||
|
EXPOSE 5003
|
||||||
|
|
||||||
|
# Set entrypoint and command
|
||||||
|
ENTRYPOINT ["/app/scripts/entrypoint.sh"]
|
||||||
|
CMD ["/app/scripts/start_eveai_api.sh"]
|
||||||
@@ -34,6 +34,7 @@ RUN apt-get update && apt-get install -y \
|
|||||||
build-essential \
|
build-essential \
|
||||||
gcc \
|
gcc \
|
||||||
postgresql-client \
|
postgresql-client \
|
||||||
|
curl \
|
||||||
&& apt-get clean \
|
&& apt-get clean \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
|||||||
65
docker/eveai_beat/Dockerfile
Normal file
65
docker/eveai_beat/Dockerfile
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
ARG PYTHON_VERSION=3.12.3
|
||||||
|
FROM python:${PYTHON_VERSION}-slim as base
|
||||||
|
|
||||||
|
# Prevents Python from writing pyc files.
|
||||||
|
ENV PYTHONDONTWRITEBYTECODE=1
|
||||||
|
|
||||||
|
# Keeps Python from buffering stdout and stderr to avoid situations where
|
||||||
|
# the application crashes without emitting any logs due to buffering.
|
||||||
|
ENV PYTHONUNBUFFERED=1
|
||||||
|
|
||||||
|
# Create directory for patched packages and set permissions
|
||||||
|
RUN mkdir -p /app/patched_packages && \
|
||||||
|
chmod 777 /app/patched_packages
|
||||||
|
|
||||||
|
# Ensure patches are applied to the application.
|
||||||
|
ENV PYTHONPATH=/app/patched_packages:$PYTHONPATH
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Create a non-privileged user that the app will run under.
|
||||||
|
# See https://docs.docker.com/go/dockerfile-user-best-practices/
|
||||||
|
ARG UID=10001
|
||||||
|
RUN adduser \
|
||||||
|
--disabled-password \
|
||||||
|
--gecos "" \
|
||||||
|
--home "/nonexistent" \
|
||||||
|
--shell "/bin/bash" \
|
||||||
|
--no-create-home \
|
||||||
|
--uid "${UID}" \
|
||||||
|
appuser
|
||||||
|
|
||||||
|
# Install necessary packages and build tools
|
||||||
|
#RUN apt-get update && apt-get install -y \
|
||||||
|
# build-essential \
|
||||||
|
# gcc \
|
||||||
|
# && apt-get clean \
|
||||||
|
# && rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Create logs directory and set permissions
|
||||||
|
RUN mkdir -p /app/logs && chown -R appuser:appuser /app/logs
|
||||||
|
|
||||||
|
# Install Python dependencies.
|
||||||
|
|
||||||
|
# Download dependencies as a separate step to take advantage of Docker's caching.
|
||||||
|
# Leverage a cache mount to /root/.cache/pip to speed up subsequent builds.
|
||||||
|
# Leverage a bind mount to requirements.txt to avoid having to copy them into
|
||||||
|
# into this layer.
|
||||||
|
|
||||||
|
COPY requirements.txt /app/
|
||||||
|
RUN python -m pip install -r /app/requirements.txt
|
||||||
|
|
||||||
|
# Copy the source code into the container.
|
||||||
|
COPY eveai_beat /app/eveai_beat
|
||||||
|
COPY common /app/common
|
||||||
|
COPY config /app/config
|
||||||
|
COPY scripts /app/scripts
|
||||||
|
COPY patched_packages /app/patched_packages
|
||||||
|
COPY --chown=root:root scripts/entrypoint_no_db.sh /app/scripts/
|
||||||
|
|
||||||
|
# Set ownership of the application directory to the non-privileged user
|
||||||
|
RUN chown -R appuser:appuser /app
|
||||||
|
|
||||||
|
# Set entrypoint and command
|
||||||
|
ENTRYPOINT ["/app/scripts/entrypoint_no_db.sh"]
|
||||||
|
CMD ["/app/scripts/start_eveai_beat.sh"]
|
||||||
@@ -34,6 +34,7 @@ RUN apt-get update && apt-get install -y \
|
|||||||
build-essential \
|
build-essential \
|
||||||
gcc \
|
gcc \
|
||||||
postgresql-client \
|
postgresql-client \
|
||||||
|
curl \
|
||||||
&& apt-get clean \
|
&& apt-get clean \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
@@ -45,7 +46,7 @@ RUN mkdir -p /app/logs && chown -R appuser:appuser /app/logs
|
|||||||
# Leverage a bind mount to requirements.txt to avoid having to copy them into
|
# Leverage a bind mount to requirements.txt to avoid having to copy them into
|
||||||
# into this layer.
|
# into this layer.
|
||||||
|
|
||||||
COPY ../../requirements.txt /app/
|
COPY requirements.txt /app/
|
||||||
RUN python -m pip install -r requirements.txt
|
RUN python -m pip install -r requirements.txt
|
||||||
|
|
||||||
# Copy the source code into the container.
|
# Copy the source code into the container.
|
||||||
|
|||||||
69
docker/eveai_entitlements/Dockerfile
Normal file
69
docker/eveai_entitlements/Dockerfile
Normal file
@@ -0,0 +1,69 @@
|
|||||||
|
ARG PYTHON_VERSION=3.12.3
|
||||||
|
FROM python:${PYTHON_VERSION}-slim as base
|
||||||
|
|
||||||
|
# Prevents Python from writing pyc files.
|
||||||
|
ENV PYTHONDONTWRITEBYTECODE=1
|
||||||
|
|
||||||
|
# Keeps Python from buffering stdout and stderr to avoid situations where
|
||||||
|
# the application crashes without emitting any logs due to buffering.
|
||||||
|
ENV PYTHONUNBUFFERED=1
|
||||||
|
|
||||||
|
# Create directory for patched packages and set permissions
|
||||||
|
RUN mkdir -p /app/patched_packages && \
|
||||||
|
chmod 777 /app/patched_packages
|
||||||
|
|
||||||
|
# Ensure patches are applied to the application.
|
||||||
|
ENV PYTHONPATH=/app/patched_packages:$PYTHONPATH
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Create a non-privileged user that the app will run under.
|
||||||
|
# See https://docs.docker.com/go/dockerfile-user-best-practices/
|
||||||
|
ARG UID=10001
|
||||||
|
RUN adduser \
|
||||||
|
--disabled-password \
|
||||||
|
--gecos "" \
|
||||||
|
--home "/nonexistent" \
|
||||||
|
--shell "/bin/bash" \
|
||||||
|
--no-create-home \
|
||||||
|
--uid "${UID}" \
|
||||||
|
appuser
|
||||||
|
|
||||||
|
# Install necessary packages and build tools
|
||||||
|
RUN apt-get update && apt-get install -y \
|
||||||
|
build-essential \
|
||||||
|
gcc \
|
||||||
|
postgresql-client \
|
||||||
|
&& apt-get clean \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Create logs directory and set permissions
|
||||||
|
RUN mkdir -p /app/logs && chown -R appuser:appuser /app/logs
|
||||||
|
|
||||||
|
# Install Python dependencies.
|
||||||
|
|
||||||
|
# Download dependencies as a separate step to take advantage of Docker's caching.
|
||||||
|
# Leverage a cache mount to /root/.cache/pip to speed up subsequent builds.
|
||||||
|
# Leverage a bind mount to requirements.txt to avoid having to copy them into
|
||||||
|
# into this layer.
|
||||||
|
|
||||||
|
COPY requirements.txt /app/
|
||||||
|
RUN python -m pip install -r /app/requirements.txt
|
||||||
|
|
||||||
|
# Copy the source code into the container.
|
||||||
|
COPY eveai_entitlements /app/eveai_entitlements
|
||||||
|
COPY common /app/common
|
||||||
|
COPY config /app/config
|
||||||
|
COPY scripts /app/scripts
|
||||||
|
COPY patched_packages /app/patched_packages
|
||||||
|
COPY --chown=root:root scripts/entrypoint.sh /app/scripts/
|
||||||
|
|
||||||
|
# Set permissions for entrypoint script
|
||||||
|
RUN chmod 777 /app/scripts/entrypoint.sh
|
||||||
|
|
||||||
|
# Set ownership of the application directory to the non-privileged user
|
||||||
|
RUN chown -R appuser:appuser /app
|
||||||
|
|
||||||
|
# Set entrypoint and command
|
||||||
|
ENTRYPOINT ["/app/scripts/entrypoint.sh"]
|
||||||
|
CMD ["/app/scripts/start_eveai_entitlements.sh"]
|
||||||
34
docker/flower/Dockerfile
Normal file
34
docker/flower/Dockerfile
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
ARG PYTHON_VERSION=3.12.3
|
||||||
|
FROM python:${PYTHON_VERSION}-slim as base
|
||||||
|
|
||||||
|
ENV PYTHONDONTWRITEBYTECODE=1
|
||||||
|
ENV PYTHONUNBUFFERED=1
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
ARG UID=10001
|
||||||
|
RUN adduser \
|
||||||
|
--disabled-password \
|
||||||
|
--gecos "" \
|
||||||
|
--home "/nonexistent" \
|
||||||
|
--shell "/bin/bash" \
|
||||||
|
--no-create-home \
|
||||||
|
--uid "${UID}" \
|
||||||
|
appuser
|
||||||
|
|
||||||
|
RUN apt-get update && apt-get install -y \
|
||||||
|
build-essential \
|
||||||
|
gcc \
|
||||||
|
&& apt-get clean \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
COPY requirements.txt /app/
|
||||||
|
RUN pip install --no-cache-dir -r requirements.txt
|
||||||
|
|
||||||
|
COPY . /app
|
||||||
|
COPY scripts/start_flower.sh /app/start_flower.sh
|
||||||
|
RUN chmod a+x /app/start_flower.sh
|
||||||
|
|
||||||
|
USER appuser
|
||||||
|
|
||||||
|
CMD ["/app/start_flower.sh"]
|
||||||
@@ -10,6 +10,9 @@ COPY ../../nginx/mime.types /etc/nginx/mime.types
|
|||||||
# Copy static & public files
|
# Copy static & public files
|
||||||
RUN mkdir -p /etc/nginx/static /etc/nginx/public
|
RUN mkdir -p /etc/nginx/static /etc/nginx/public
|
||||||
COPY ../../nginx/static /etc/nginx/static
|
COPY ../../nginx/static /etc/nginx/static
|
||||||
|
COPY ../../integrations/Wordpress/eveai-chat-widget/css/eveai-chat-style.css /etc/nginx/static/css/
|
||||||
|
COPY ../../integrations/Wordpress/eveai-chat-widget/js/eveai-chat-widget.js /etc/nginx/static/js/
|
||||||
|
COPY ../../integrations/Wordpress/eveai-chat-widget/js/eveai-sdk.js /etc/nginx/static/js
|
||||||
COPY ../../nginx/public /etc/nginx/public
|
COPY ../../nginx/public /etc/nginx/public
|
||||||
|
|
||||||
# Copy site-specific configurations
|
# Copy site-specific configurations
|
||||||
|
|||||||
62
docker/release_and_tag_eveai.sh
Executable file
62
docker/release_and_tag_eveai.sh
Executable file
@@ -0,0 +1,62 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Initialize variables
|
||||||
|
RELEASE_VERSION=""
|
||||||
|
RELEASE_MESSAGE=""
|
||||||
|
DOCKER_ACCOUNT="josakola" # Your Docker account name
|
||||||
|
|
||||||
|
# Parse input arguments
|
||||||
|
while getopts r:m: flag
|
||||||
|
do
|
||||||
|
case "${flag}" in
|
||||||
|
r) RELEASE_VERSION=${OPTARG};;
|
||||||
|
m) RELEASE_MESSAGE=${OPTARG};;
|
||||||
|
*)
|
||||||
|
echo "Usage: $0 -r <release_version> -m <release_message>"
|
||||||
|
exit 1 ;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
# Ensure both version and message are provided
|
||||||
|
if [ -z "$RELEASE_VERSION" ]; then
|
||||||
|
echo "Error: Release version not provided. Use -r <release_version>"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -z "$RELEASE_MESSAGE" ]; then
|
||||||
|
echo "Error: Release message not provided. Use -m <release_message>"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Path to your docker-compose file
|
||||||
|
DOCKER_COMPOSE_FILE="compose_dev.yaml"
|
||||||
|
|
||||||
|
# Get all the images defined in docker-compose
|
||||||
|
IMAGES=$(docker compose -f $DOCKER_COMPOSE_FILE config | grep 'image:' | awk '{ print $2 }')
|
||||||
|
|
||||||
|
# Start tagging only relevant images
|
||||||
|
for DOCKER_IMAGE in $IMAGES; do
|
||||||
|
# Check if the image belongs to your Docker account and ends with :latest
|
||||||
|
if [[ $DOCKER_IMAGE == $DOCKER_ACCOUNT* && $DOCKER_IMAGE == *:latest ]]; then
|
||||||
|
# Remove the ":latest" tag to use the base image name
|
||||||
|
BASE_IMAGE=${DOCKER_IMAGE%:latest}
|
||||||
|
|
||||||
|
echo "Tagging Docker image: $BASE_IMAGE with version: $RELEASE_VERSION"
|
||||||
|
|
||||||
|
# Tag the 'latest' image with the new release version
|
||||||
|
docker tag $DOCKER_IMAGE $BASE_IMAGE:$RELEASE_VERSION
|
||||||
|
|
||||||
|
# Push the newly tagged image to Docker Hub
|
||||||
|
docker push $BASE_IMAGE:$RELEASE_VERSION
|
||||||
|
else
|
||||||
|
echo "Skipping image: $DOCKER_IMAGE (not part of $DOCKER_ACCOUNT or not tagged as latest)"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
# Step 3: Tag the Git repository with the release version
|
||||||
|
echo "Tagging Git repository with version: $RELEASE_VERSION"
|
||||||
|
git tag -a v$RELEASE_VERSION -m "Release $RELEASE_VERSION: $RELEASE_MESSAGE"
|
||||||
|
git push origin v$RELEASE_VERSION
|
||||||
|
|
||||||
|
echo -e "\033[35mRelease process completed for version: $RELEASE_VERSION \033[0m"
|
||||||
|
echo -e "\033[35mFinished at $(date +"%d/%m/%Y %H:%M:%S")\033[0m"
|
||||||
@@ -1,4 +1,114 @@
|
|||||||
# from flask import Blueprint, request
|
from flask import Flask, jsonify, request
|
||||||
#
|
from flask_jwt_extended import get_jwt_identity, verify_jwt_in_request
|
||||||
# public_api_bp = Blueprint("public", __name__, url_prefix="/api/v1")
|
from common.extensions import db, api_rest, jwt, minio_client, simple_encryption
|
||||||
# tenant_api_bp = Blueprint("tenant", __name__, url_prefix="/api/v1/tenant")
|
import os
|
||||||
|
import logging.config
|
||||||
|
|
||||||
|
from common.utils.database import Database
|
||||||
|
from config.logging_config import LOGGING
|
||||||
|
from .api.document_api import document_ns
|
||||||
|
from .api.auth import auth_ns
|
||||||
|
from config.config import get_config
|
||||||
|
from common.utils.celery_utils import make_celery, init_celery
|
||||||
|
from common.utils.eveai_exceptions import EveAIException
|
||||||
|
|
||||||
|
|
||||||
|
def create_app(config_file=None):
|
||||||
|
app = Flask(__name__)
|
||||||
|
|
||||||
|
environment = os.getenv('FLASK_ENV', 'development')
|
||||||
|
|
||||||
|
match environment:
|
||||||
|
case 'development':
|
||||||
|
app.config.from_object(get_config('dev'))
|
||||||
|
case 'production':
|
||||||
|
app.config.from_object(get_config('prod'))
|
||||||
|
case _:
|
||||||
|
app.config.from_object(get_config('dev'))
|
||||||
|
|
||||||
|
app.config['SESSION_KEY_PREFIX'] = 'eveai_api_'
|
||||||
|
|
||||||
|
app.celery = make_celery(app.name, app.config)
|
||||||
|
init_celery(app.celery, app)
|
||||||
|
|
||||||
|
logging.config.dictConfig(LOGGING)
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
logger.info("eveai_api starting up")
|
||||||
|
|
||||||
|
# Register Necessary Extensions
|
||||||
|
register_extensions(app)
|
||||||
|
|
||||||
|
# register Namespaces
|
||||||
|
register_namespaces(api_rest)
|
||||||
|
|
||||||
|
# Register Blueprints
|
||||||
|
register_blueprints(app)
|
||||||
|
|
||||||
|
# Error handler for the API
|
||||||
|
@app.errorhandler(EveAIException)
|
||||||
|
def handle_eveai_exception(error):
|
||||||
|
return {'message': str(error)}, error.status_code
|
||||||
|
|
||||||
|
@app.before_request
|
||||||
|
def before_request():
|
||||||
|
app.logger.debug(f'Before request: {request.method} {request.path}')
|
||||||
|
app.logger.debug(f'Request URL: {request.url}')
|
||||||
|
app.logger.debug(f'Request headers: {dict(request.headers)}')
|
||||||
|
|
||||||
|
# Log JSON data if the content type is application/json
|
||||||
|
if request.is_json:
|
||||||
|
app.logger.debug(f'JSON data: {request.json}')
|
||||||
|
|
||||||
|
# Log raw data for other content types
|
||||||
|
if request.data:
|
||||||
|
app.logger.debug(f'Raw data: {request.data}')
|
||||||
|
|
||||||
|
# Check if this is a request to the token endpoint
|
||||||
|
if request.path == '/api/v1/auth/token' and request.method == 'POST':
|
||||||
|
app.logger.debug('Token request detected, skipping JWT verification')
|
||||||
|
return
|
||||||
|
|
||||||
|
# Check if this a health check request
|
||||||
|
if request.path.startswith('/_healthz') or request.path.startswith('/healthz'):
|
||||||
|
app.logger.debug('Health check request detected, skipping JWT verification')
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
verify_jwt_in_request(optional=True)
|
||||||
|
tenant_id = get_jwt_identity()
|
||||||
|
app.logger.debug(f'Tenant ID from JWT: {tenant_id}')
|
||||||
|
|
||||||
|
if tenant_id:
|
||||||
|
Database(tenant_id).switch_schema()
|
||||||
|
app.logger.debug(f'Switched to schema for tenant {tenant_id}')
|
||||||
|
else:
|
||||||
|
app.logger.debug('No tenant ID found in JWT')
|
||||||
|
except Exception as e:
|
||||||
|
app.logger.error(f'Error in before_request: {str(e)}')
|
||||||
|
# Don't raise the exception here, let the request continue
|
||||||
|
# The appropriate error handling will be done in the specific endpoints
|
||||||
|
|
||||||
|
@app.route('/api/v1')
|
||||||
|
def swagger():
|
||||||
|
return api_rest.render_doc()
|
||||||
|
|
||||||
|
return app
|
||||||
|
|
||||||
|
|
||||||
|
def register_extensions(app):
|
||||||
|
db.init_app(app)
|
||||||
|
api_rest.init_app(app, title='EveAI API', version='1.0', description='EveAI API')
|
||||||
|
jwt.init_app(app)
|
||||||
|
minio_client.init_app(app)
|
||||||
|
simple_encryption.init_app(app)
|
||||||
|
|
||||||
|
|
||||||
|
def register_namespaces(app):
|
||||||
|
api_rest.add_namespace(document_ns, path='/api/v1/documents')
|
||||||
|
api_rest.add_namespace(auth_ns, path='/api/v1/auth')
|
||||||
|
|
||||||
|
|
||||||
|
def register_blueprints(app):
|
||||||
|
from .views.healthz_views import healthz_bp
|
||||||
|
app.register_blueprint(healthz_bp)
|
||||||
|
|
||||||
|
|||||||
75
eveai_api/api/auth.py
Normal file
75
eveai_api/api/auth.py
Normal file
@@ -0,0 +1,75 @@
|
|||||||
|
from datetime import timedelta
|
||||||
|
|
||||||
|
from flask_restx import Namespace, Resource, fields
|
||||||
|
from flask_jwt_extended import create_access_token
|
||||||
|
from common.models.user import Tenant
|
||||||
|
from common.extensions import simple_encryption
|
||||||
|
from flask import current_app, request
|
||||||
|
|
||||||
|
auth_ns = Namespace('auth', description='Authentication related operations')
|
||||||
|
|
||||||
|
token_model = auth_ns.model('Token', {
|
||||||
|
'tenant_id': fields.Integer(required=True, description='Tenant ID'),
|
||||||
|
'api_key': fields.String(required=True, description='API Key')
|
||||||
|
})
|
||||||
|
|
||||||
|
token_response = auth_ns.model('TokenResponse', {
|
||||||
|
'access_token': fields.String(description='JWT access token'),
|
||||||
|
'expires_in': fields.Integer(description='Token expiration time in seconds')
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
@auth_ns.route('/token')
|
||||||
|
class Token(Resource):
|
||||||
|
@auth_ns.expect(token_model)
|
||||||
|
@auth_ns.response(200, 'Success', token_response)
|
||||||
|
@auth_ns.response(400, 'Validation Error')
|
||||||
|
@auth_ns.response(401, 'Unauthorized')
|
||||||
|
@auth_ns.response(404, 'Tenant Not Found')
|
||||||
|
def post(self):
|
||||||
|
"""
|
||||||
|
Get JWT token
|
||||||
|
"""
|
||||||
|
current_app.logger.debug(f"Token endpoint called with data: {request.json}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
tenant_id = auth_ns.payload['tenant_id']
|
||||||
|
api_key = auth_ns.payload['api_key']
|
||||||
|
except KeyError as e:
|
||||||
|
current_app.logger.error(f"Missing required field: {e}")
|
||||||
|
return {'message': f"Missing required field: {e}"}, 400
|
||||||
|
|
||||||
|
current_app.logger.debug(f"Querying database for tenant: {tenant_id}")
|
||||||
|
tenant = Tenant.query.get(tenant_id)
|
||||||
|
|
||||||
|
if not tenant:
|
||||||
|
current_app.logger.error(f"Tenant not found: {tenant_id}")
|
||||||
|
return {'message': "Tenant not found"}, 404
|
||||||
|
|
||||||
|
current_app.logger.debug(f"Tenant found: {tenant.id}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
current_app.logger.debug("Attempting to decrypt API key")
|
||||||
|
decrypted_api_key = simple_encryption.decrypt_api_key(tenant.encrypted_api_key)
|
||||||
|
except Exception as e:
|
||||||
|
current_app.logger.error(f"Error decrypting API key: {e}")
|
||||||
|
return {'message': "Internal server error"}, 500
|
||||||
|
|
||||||
|
if api_key != decrypted_api_key:
|
||||||
|
current_app.logger.error(f"Invalid API key for tenant: {tenant_id}")
|
||||||
|
return {'message': "Invalid API key"}, 401
|
||||||
|
|
||||||
|
# Get the JWT_ACCESS_TOKEN_EXPIRES setting from the app config
|
||||||
|
expires_delta = current_app.config.get('JWT_ACCESS_TOKEN_EXPIRES', timedelta(minutes=15))
|
||||||
|
|
||||||
|
try:
|
||||||
|
current_app.logger.debug(f"Creating access token for tenant: {tenant_id}")
|
||||||
|
access_token = create_access_token(identity=tenant_id, expires_delta=expires_delta)
|
||||||
|
current_app.logger.debug("Access token created successfully")
|
||||||
|
return {
|
||||||
|
'access_token': access_token,
|
||||||
|
'expires_in': expires_delta.total_seconds()
|
||||||
|
}, 200
|
||||||
|
except Exception as e:
|
||||||
|
current_app.logger.error(f"Error creating access token: {e}")
|
||||||
|
return {'message': "Internal server error"}, 500
|
||||||
329
eveai_api/api/document_api.py
Normal file
329
eveai_api/api/document_api.py
Normal file
@@ -0,0 +1,329 @@
|
|||||||
|
import json
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
import pytz
|
||||||
|
from flask import current_app, request
|
||||||
|
from flask_restx import Namespace, Resource, fields, reqparse
|
||||||
|
from flask_jwt_extended import jwt_required, get_jwt_identity
|
||||||
|
from werkzeug.datastructures import FileStorage
|
||||||
|
from werkzeug.utils import secure_filename
|
||||||
|
from common.utils.document_utils import (
|
||||||
|
create_document_stack, process_url, start_embedding_task,
|
||||||
|
validate_file_type, EveAIInvalidLanguageException, EveAIDoubleURLException, EveAIUnsupportedFileType,
|
||||||
|
process_multiple_urls, get_documents_list, edit_document, refresh_document, edit_document_version,
|
||||||
|
refresh_document_with_info
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def validate_date(date_str):
|
||||||
|
try:
|
||||||
|
return datetime.fromisoformat(date_str).replace(tzinfo=pytz.UTC)
|
||||||
|
except ValueError:
|
||||||
|
raise ValueError("Invalid date format. Use ISO format (YYYY-MM-DDTHH:MM:SS).")
|
||||||
|
|
||||||
|
|
||||||
|
def validate_json(json_str):
|
||||||
|
try:
|
||||||
|
return json.loads(json_str)
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
raise ValueError("Invalid JSON format for user_metadata.")
|
||||||
|
|
||||||
|
|
||||||
|
document_ns = Namespace('documents', description='Document related operations')
|
||||||
|
|
||||||
|
# Define models for request parsing and response serialization
|
||||||
|
upload_parser = reqparse.RequestParser()
|
||||||
|
upload_parser.add_argument('catalog_id', location='form', type=int, required=True, help='The catalog to add the file to')
|
||||||
|
upload_parser.add_argument('file', location='files', type=FileStorage, required=True, help='The file to upload')
|
||||||
|
upload_parser.add_argument('name', location='form', type=str, required=False, help='Name of the document')
|
||||||
|
upload_parser.add_argument('language', location='form', type=str, required=True, help='Language of the document')
|
||||||
|
upload_parser.add_argument('user_context', location='form', type=str, required=False,
|
||||||
|
help='User context for the document')
|
||||||
|
upload_parser.add_argument('valid_from', location='form', type=validate_date, required=False,
|
||||||
|
help='Valid from date for the document (ISO format)')
|
||||||
|
upload_parser.add_argument('user_metadata', location='form', type=validate_json, required=False,
|
||||||
|
help='User metadata for the document (JSON format)')
|
||||||
|
upload_parser.add_argument('catalog_properties', location='form', type=validate_json, required=False,
|
||||||
|
help='The catalog configuration to be passed along (JSON format). Validity is against catalog requirements '
|
||||||
|
'is not checked, and is the responsibility of the calling client.')
|
||||||
|
|
||||||
|
add_document_response = document_ns.model('AddDocumentResponse', {
|
||||||
|
'message': fields.String(description='Status message'),
|
||||||
|
'document_id': fields.Integer(description='ID of the created document'),
|
||||||
|
'document_version_id': fields.Integer(description='ID of the created document version'),
|
||||||
|
'task_id': fields.String(description='ID of the embedding task')
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
@document_ns.route('/add_document')
|
||||||
|
class AddDocument(Resource):
|
||||||
|
@jwt_required()
|
||||||
|
@document_ns.expect(upload_parser)
|
||||||
|
@document_ns.response(201, 'Document added successfully', add_document_response)
|
||||||
|
@document_ns.response(400, 'Validation Error')
|
||||||
|
@document_ns.response(500, 'Internal Server Error')
|
||||||
|
def post(self):
|
||||||
|
"""
|
||||||
|
Add a new document
|
||||||
|
"""
|
||||||
|
tenant_id = get_jwt_identity()
|
||||||
|
current_app.logger.info(f'Adding document for tenant {tenant_id}')
|
||||||
|
|
||||||
|
try:
|
||||||
|
args = upload_parser.parse_args()
|
||||||
|
|
||||||
|
file = args['file']
|
||||||
|
filename = secure_filename(file.filename)
|
||||||
|
extension = filename.rsplit('.', 1)[1].lower()
|
||||||
|
|
||||||
|
validate_file_type(extension)
|
||||||
|
|
||||||
|
api_input = {
|
||||||
|
'catalog_id': args.get('catalog_id'),
|
||||||
|
'name': args.get('name') or filename,
|
||||||
|
'language': args.get('language'),
|
||||||
|
'user_context': args.get('user_context'),
|
||||||
|
'valid_from': args.get('valid_from'),
|
||||||
|
'user_metadata': args.get('user_metadata'),
|
||||||
|
'catalog_properties': args.get('catalog_properties'),
|
||||||
|
}
|
||||||
|
|
||||||
|
new_doc, new_doc_vers = create_document_stack(api_input, file, filename, extension, tenant_id)
|
||||||
|
task_id = start_embedding_task(tenant_id, new_doc_vers.id)
|
||||||
|
|
||||||
|
return {
|
||||||
|
'message': f'Processing on document {new_doc.name}, version {new_doc_vers.id} started. Task ID: {task_id}.',
|
||||||
|
'document_id': new_doc.id,
|
||||||
|
'document_version_id': new_doc_vers.id,
|
||||||
|
'task_id': task_id
|
||||||
|
}, 201
|
||||||
|
|
||||||
|
except (EveAIInvalidLanguageException, EveAIUnsupportedFileType) as e:
|
||||||
|
current_app.logger.error(f'Error adding document: {str(e)}')
|
||||||
|
document_ns.abort(400, str(e))
|
||||||
|
except Exception as e:
|
||||||
|
current_app.logger.error(f'Error adding document: {str(e)}')
|
||||||
|
document_ns.abort(500, 'Error adding document')
|
||||||
|
|
||||||
|
|
||||||
|
# Models for AddURL
|
||||||
|
add_url_model = document_ns.model('AddURL', {
|
||||||
|
'catalog_id': fields.Integer(required='True', description='ID of the catalog the URL needs to be added to'),
|
||||||
|
'url': fields.String(required=True, description='URL of the document to add'),
|
||||||
|
'name': fields.String(required=False, description='Name of the document'),
|
||||||
|
'language': fields.String(required=True, description='Language of the document'),
|
||||||
|
'user_context': fields.String(required=False, description='User context for the document'),
|
||||||
|
'valid_from': fields.String(required=False, description='Valid from date for the document'),
|
||||||
|
'user_metadata': fields.String(required=False, description='User metadata for the document'),
|
||||||
|
'system_metadata': fields.String(required=False, description='System metadata for the document'),
|
||||||
|
'catalog_properties': fields.String(required=False, description='The catalog configuration to be passed along (JSON '
|
||||||
|
'format). Validity is against catalog requirements '
|
||||||
|
'is not checked, and is the responsibility of the '
|
||||||
|
'calling client.'),
|
||||||
|
})
|
||||||
|
|
||||||
|
add_url_response = document_ns.model('AddURLResponse', {
|
||||||
|
'message': fields.String(description='Status message'),
|
||||||
|
'document_id': fields.Integer(description='ID of the created document'),
|
||||||
|
'document_version_id': fields.Integer(description='ID of the created document version'),
|
||||||
|
'task_id': fields.String(description='ID of the embedding task')
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
@document_ns.route('/add_url')
|
||||||
|
class AddURL(Resource):
|
||||||
|
@jwt_required()
|
||||||
|
@document_ns.expect(add_url_model)
|
||||||
|
@document_ns.response(201, 'Document added successfully', add_url_response)
|
||||||
|
@document_ns.response(400, 'Validation Error')
|
||||||
|
@document_ns.response(500, 'Internal Server Error')
|
||||||
|
def post(self):
|
||||||
|
"""
|
||||||
|
Add a new document from URL
|
||||||
|
"""
|
||||||
|
tenant_id = get_jwt_identity()
|
||||||
|
current_app.logger.info(f'Adding document from URL for tenant {tenant_id}')
|
||||||
|
|
||||||
|
try:
|
||||||
|
args = document_ns.payload
|
||||||
|
file_content, filename, extension = process_url(args['url'], tenant_id)
|
||||||
|
|
||||||
|
api_input = {
|
||||||
|
'catalog_id': args['catalog_id'],
|
||||||
|
'url': args['url'],
|
||||||
|
'name': args.get('name') or filename,
|
||||||
|
'language': args['language'],
|
||||||
|
'user_context': args.get('user_context'),
|
||||||
|
'valid_from': args.get('valid_from'),
|
||||||
|
'user_metadata': args.get('user_metadata'),
|
||||||
|
'catalog_properties': args.get('catalog_properties'),
|
||||||
|
}
|
||||||
|
|
||||||
|
new_doc, new_doc_vers = create_document_stack(api_input, file_content, filename, extension, tenant_id)
|
||||||
|
task_id = start_embedding_task(tenant_id, new_doc_vers.id)
|
||||||
|
|
||||||
|
return {
|
||||||
|
'message': f'Processing on document {new_doc.name}, version {new_doc_vers.id} started. Task ID: {task_id}.',
|
||||||
|
'document_id': new_doc.id,
|
||||||
|
'document_version_id': new_doc_vers.id,
|
||||||
|
'task_id': task_id
|
||||||
|
}, 201
|
||||||
|
|
||||||
|
except EveAIDoubleURLException:
|
||||||
|
document_ns.abort(400, f'A document with URL {args["url"]} already exists.')
|
||||||
|
except (EveAIInvalidLanguageException, EveAIUnsupportedFileType) as e:
|
||||||
|
document_ns.abort(400, str(e))
|
||||||
|
except Exception as e:
|
||||||
|
current_app.logger.error(f'Error adding document from URL: {str(e)}')
|
||||||
|
document_ns.abort(500, 'Error adding document from URL')
|
||||||
|
|
||||||
|
|
||||||
|
document_list_model = document_ns.model('DocumentList', {
|
||||||
|
'id': fields.Integer(description='Document ID'),
|
||||||
|
'name': fields.String(description='Document name'),
|
||||||
|
'valid_from': fields.DateTime(description='Valid from date'),
|
||||||
|
'valid_to': fields.DateTime(description='Valid to date'),
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
@document_ns.route('/list')
|
||||||
|
class DocumentList(Resource):
|
||||||
|
@jwt_required()
|
||||||
|
@document_ns.doc('list_documents')
|
||||||
|
@document_ns.marshal_list_with(document_list_model, envelope='documents')
|
||||||
|
def get(self):
|
||||||
|
"""List all documents"""
|
||||||
|
page = request.args.get('page', 1, type=int)
|
||||||
|
per_page = request.args.get('per_page', 10, type=int)
|
||||||
|
pagination = get_documents_list(page, per_page)
|
||||||
|
return pagination.items, 200
|
||||||
|
|
||||||
|
|
||||||
|
edit_document_model = document_ns.model('EditDocument', {
|
||||||
|
'name': fields.String(required=True, description='New name for the document'),
|
||||||
|
'valid_from': fields.DateTime(required=False, description='New valid from date'),
|
||||||
|
'valid_to': fields.DateTime(required=False, description='New valid to date'),
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
@document_ns.route('/<int:document_id>')
|
||||||
|
class DocumentResource(Resource):
|
||||||
|
@jwt_required()
|
||||||
|
@document_ns.doc('edit_document')
|
||||||
|
@document_ns.expect(edit_document_model)
|
||||||
|
@document_ns.response(200, 'Document updated successfully')
|
||||||
|
def put(self, document_id):
|
||||||
|
"""Edit a document"""
|
||||||
|
data = request.json
|
||||||
|
updated_doc, error = edit_document(document_id, data['name'], data.get('valid_from'), data.get('valid_to'))
|
||||||
|
if updated_doc:
|
||||||
|
return {'message': f'Document {updated_doc.id} updated successfully'}, 200
|
||||||
|
else:
|
||||||
|
return {'message': f'Error updating document: {error}'}, 400
|
||||||
|
|
||||||
|
@jwt_required()
|
||||||
|
@document_ns.doc('refresh_document')
|
||||||
|
@document_ns.response(200, 'Document refreshed successfully')
|
||||||
|
def post(self, document_id):
|
||||||
|
"""Refresh a document"""
|
||||||
|
tenant_id = get_jwt_identity()
|
||||||
|
new_version, result = refresh_document(document_id, tenant_id)
|
||||||
|
if new_version:
|
||||||
|
return {'message': f'Document refreshed. New version: {new_version.id}. Task ID: {result}'}, 200
|
||||||
|
else:
|
||||||
|
return {'message': f'Error refreshing document: {result}'}, 400
|
||||||
|
|
||||||
|
|
||||||
|
edit_document_version_model = document_ns.model('EditDocumentVersion', {
|
||||||
|
'user_context': fields.String(required=True, description='New user context for the document version'),
|
||||||
|
'catalog_properties': fields.String(required=True, description='New catalog properties for the document version'),
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
@document_ns.route('/version/<int:version_id>')
|
||||||
|
class DocumentVersionResource(Resource):
|
||||||
|
@jwt_required()
|
||||||
|
@document_ns.doc('edit_document_version')
|
||||||
|
@document_ns.expect(edit_document_version_model)
|
||||||
|
@document_ns.response(200, 'Document version updated successfully')
|
||||||
|
def put(self, version_id):
|
||||||
|
"""Edit a document version"""
|
||||||
|
data = request.json
|
||||||
|
updated_version, error = edit_document_version(version_id, data['user_context'], data.get('catalog_properties'))
|
||||||
|
if updated_version:
|
||||||
|
return {'message': f'Document Version {updated_version.id} updated successfully'}, 200
|
||||||
|
else:
|
||||||
|
return {'message': f'Error updating document version: {error}'}, 400
|
||||||
|
|
||||||
|
|
||||||
|
# Define the model for the request body of refresh_with_info
|
||||||
|
refresh_document_model = document_ns.model('RefreshDocument', {
|
||||||
|
'name': fields.String(required=False, description='New name for the document'),
|
||||||
|
'language': fields.String(required=False, description='Language of the document'),
|
||||||
|
'user_context': fields.String(required=False, description='User context for the document'),
|
||||||
|
'user_metadata': fields.Raw(required=False, description='User metadata for the document'),
|
||||||
|
'catalog_properties': fields.Raw(required=False, description='Catalog properties for the document'),
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
@document_ns.route('/<int:document_id>/refresh')
|
||||||
|
class RefreshDocument(Resource):
|
||||||
|
@jwt_required()
|
||||||
|
@document_ns.response(200, 'Document refreshed successfully')
|
||||||
|
@document_ns.response(404, 'Document not found')
|
||||||
|
def post(self, document_id):
|
||||||
|
"""
|
||||||
|
Refresh a document without additional information
|
||||||
|
"""
|
||||||
|
tenant_id = get_jwt_identity()
|
||||||
|
current_app.logger.info(f'Refreshing document {document_id} for tenant {tenant_id}')
|
||||||
|
|
||||||
|
try:
|
||||||
|
new_version, result = refresh_document(document_id, tenant_id)
|
||||||
|
|
||||||
|
if new_version:
|
||||||
|
return {
|
||||||
|
'message': f'Document refreshed successfully. New version: {new_version.id}. Task ID: {result}',
|
||||||
|
'document_id': document_id,
|
||||||
|
'document_version_id': new_version.id,
|
||||||
|
'task_id': result
|
||||||
|
}, 200
|
||||||
|
else:
|
||||||
|
return {'message': f'Error refreshing document: {result}'}, 400
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
current_app.logger.error(f'Error refreshing document: {str(e)}')
|
||||||
|
return {'message': 'Internal server error'}, 500
|
||||||
|
|
||||||
|
|
||||||
|
@document_ns.route('/<int:document_id>/refresh_with_info')
|
||||||
|
class RefreshDocumentWithInfo(Resource):
|
||||||
|
@jwt_required()
|
||||||
|
@document_ns.expect(refresh_document_model)
|
||||||
|
@document_ns.response(200, 'Document refreshed successfully')
|
||||||
|
@document_ns.response(400, 'Validation Error')
|
||||||
|
@document_ns.response(404, 'Document not found')
|
||||||
|
def post(self, document_id):
|
||||||
|
"""
|
||||||
|
Refresh a document with new information
|
||||||
|
"""
|
||||||
|
tenant_id = get_jwt_identity()
|
||||||
|
current_app.logger.info(f'Refreshing document {document_id} with info for tenant {tenant_id}')
|
||||||
|
|
||||||
|
try:
|
||||||
|
api_input = request.json
|
||||||
|
new_version, result = refresh_document_with_info(document_id, tenant_id, api_input)
|
||||||
|
|
||||||
|
if new_version:
|
||||||
|
return {
|
||||||
|
'message': f'Document refreshed successfully with new info. New version: {new_version.id}. Task ID: {result}',
|
||||||
|
'document_id': document_id,
|
||||||
|
'document_version_id': new_version.id,
|
||||||
|
'task_id': result
|
||||||
|
}, 200
|
||||||
|
else:
|
||||||
|
return {'message': f'Error refreshing document with info: {result}'}, 400
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
current_app.logger.error(f'Error refreshing document with info: {str(e)}')
|
||||||
|
return {'message': 'Internal server error'}, 500
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
from flask import request
|
|
||||||
from flask.views import MethodView
|
|
||||||
|
|
||||||
class RegisterAPI(MethodView):
|
|
||||||
def post(self):
|
|
||||||
username = request.json['username']
|
|
||||||
|
|
||||||
82
eveai_api/views/healthz_views.py
Normal file
82
eveai_api/views/healthz_views.py
Normal file
@@ -0,0 +1,82 @@
|
|||||||
|
from flask import Blueprint, current_app, request
|
||||||
|
from flask_healthz import HealthError
|
||||||
|
from sqlalchemy.exc import SQLAlchemyError
|
||||||
|
from celery.exceptions import TimeoutError as CeleryTimeoutError
|
||||||
|
from prometheus_client import Counter, Histogram, generate_latest, CONTENT_TYPE_LATEST
|
||||||
|
from common.extensions import db, metrics, minio_client
|
||||||
|
from common.utils.celery_utils import current_celery
|
||||||
|
|
||||||
|
healthz_bp = Blueprint('healthz', __name__, url_prefix='/_healthz')
|
||||||
|
|
||||||
|
# Define Prometheus metrics
|
||||||
|
api_request_counter = Counter('api_request_count', 'API Request Count', ['method', 'endpoint'])
|
||||||
|
api_request_latency = Histogram('api_request_latency_seconds', 'API Request latency')
|
||||||
|
|
||||||
|
|
||||||
|
def liveness():
|
||||||
|
try:
|
||||||
|
# Basic check to see if the app is running
|
||||||
|
return True
|
||||||
|
except Exception:
|
||||||
|
raise HealthError("Liveness check failed")
|
||||||
|
|
||||||
|
|
||||||
|
def readiness():
|
||||||
|
checks = {
|
||||||
|
"database": check_database(),
|
||||||
|
# "celery": check_celery(),
|
||||||
|
"minio": check_minio(),
|
||||||
|
# Add more checks as needed
|
||||||
|
}
|
||||||
|
|
||||||
|
if not all(checks.values()):
|
||||||
|
raise HealthError("Readiness check failed")
|
||||||
|
|
||||||
|
|
||||||
|
def check_database():
|
||||||
|
try:
|
||||||
|
# Perform a simple database query
|
||||||
|
db.session.execute("SELECT 1")
|
||||||
|
return True
|
||||||
|
except SQLAlchemyError:
|
||||||
|
current_app.logger.error("Database check failed", exc_info=True)
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def check_celery():
|
||||||
|
try:
|
||||||
|
# Send a simple task to Celery
|
||||||
|
result = current_celery.send_task('ping', queue='eveai_workers.ping')
|
||||||
|
response = result.get(timeout=10) # Wait for up to 10 seconds for a response
|
||||||
|
return response == 'pong'
|
||||||
|
except CeleryTimeoutError:
|
||||||
|
current_app.logger.error("Celery check timed out", exc_info=True)
|
||||||
|
return False
|
||||||
|
except Exception as e:
|
||||||
|
current_app.logger.error(f"Celery check failed: {str(e)}", exc_info=True)
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def check_minio():
|
||||||
|
try:
|
||||||
|
# List buckets to check if MinIO is accessible
|
||||||
|
minio_client.list_buckets()
|
||||||
|
return True
|
||||||
|
except Exception as e:
|
||||||
|
current_app.logger.error(f"MinIO check failed: {str(e)}", exc_info=True)
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
@healthz_bp.route('/metrics')
|
||||||
|
@metrics.do_not_track()
|
||||||
|
def prometheus_metrics():
|
||||||
|
return generate_latest(), 200, {'Content-Type': CONTENT_TYPE_LATEST}
|
||||||
|
|
||||||
|
|
||||||
|
def init_healtz(app):
|
||||||
|
app.config.update(
|
||||||
|
HEALTHZ={
|
||||||
|
"live": "healthz_views.liveness",
|
||||||
|
"ready": "healthz_views.readiness",
|
||||||
|
}
|
||||||
|
)
|
||||||
@@ -7,9 +7,11 @@ from werkzeug.middleware.proxy_fix import ProxyFix
|
|||||||
import logging.config
|
import logging.config
|
||||||
|
|
||||||
from common.extensions import (db, migrate, bootstrap, security, mail, login_manager, cors, csrf, session,
|
from common.extensions import (db, migrate, bootstrap, security, mail, login_manager, cors, csrf, session,
|
||||||
minio_client, simple_encryption)
|
minio_client, simple_encryption, metrics)
|
||||||
from common.models.user import User, Role, Tenant, TenantDomain
|
from common.models.user import User, Role, Tenant, TenantDomain
|
||||||
import common.models.interaction
|
import common.models.interaction
|
||||||
|
import common.models.entitlements
|
||||||
|
import common.models.document
|
||||||
from common.utils.nginx_utils import prefixed_url_for
|
from common.utils.nginx_utils import prefixed_url_for
|
||||||
from config.logging_config import LOGGING
|
from config.logging_config import LOGGING
|
||||||
from common.utils.security import set_tenant_session_data
|
from common.utils.security import set_tenant_session_data
|
||||||
@@ -17,6 +19,7 @@ from .errors import register_error_handlers
|
|||||||
from common.utils.celery_utils import make_celery, init_celery
|
from common.utils.celery_utils import make_celery, init_celery
|
||||||
from common.utils.template_filters import register_filters
|
from common.utils.template_filters import register_filters
|
||||||
from config.config import get_config
|
from config.config import get_config
|
||||||
|
from eveai_app.views.security_forms import ResetPasswordForm
|
||||||
|
|
||||||
|
|
||||||
def create_app(config_file=None):
|
def create_app(config_file=None):
|
||||||
@@ -26,7 +29,6 @@ def create_app(config_file=None):
|
|||||||
app.wsgi_app = ProxyFix(app.wsgi_app, x_for=1, x_proto=1, x_host=1, x_port=1)
|
app.wsgi_app = ProxyFix(app.wsgi_app, x_for=1, x_proto=1, x_host=1, x_port=1)
|
||||||
|
|
||||||
environment = os.getenv('FLASK_ENV', 'development')
|
environment = os.getenv('FLASK_ENV', 'development')
|
||||||
print(environment)
|
|
||||||
|
|
||||||
match environment:
|
match environment:
|
||||||
case 'development':
|
case 'development':
|
||||||
@@ -37,6 +39,7 @@ def create_app(config_file=None):
|
|||||||
app.config.from_object(get_config('dev'))
|
app.config.from_object(get_config('dev'))
|
||||||
|
|
||||||
app.config['SESSION_KEY_PREFIX'] = 'eveai_app_'
|
app.config['SESSION_KEY_PREFIX'] = 'eveai_app_'
|
||||||
|
app.config['SECURITY_RESET_PASSWORD_FORM'] = ResetPasswordForm
|
||||||
|
|
||||||
try:
|
try:
|
||||||
os.makedirs(app.instance_path)
|
os.makedirs(app.instance_path)
|
||||||
@@ -47,8 +50,6 @@ def create_app(config_file=None):
|
|||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
logger.info("eveai_app starting up")
|
logger.info("eveai_app starting up")
|
||||||
logger.debug("start config")
|
|
||||||
logger.debug(app.config)
|
|
||||||
|
|
||||||
# Register extensions
|
# Register extensions
|
||||||
|
|
||||||
@@ -93,14 +94,11 @@ def create_app(config_file=None):
|
|||||||
}
|
}
|
||||||
return jsonify(response), 500
|
return jsonify(response), 500
|
||||||
|
|
||||||
@app.before_request
|
# @app.before_request
|
||||||
def before_request():
|
# def before_request():
|
||||||
# app.logger.debug(f"Before request - Session ID: {session.sid}")
|
# # app.logger.debug(f"Before request - Session ID: {session.sid}")
|
||||||
app.logger.debug(f"Before request - Session data: {session}")
|
# app.logger.debug(f"Before request - Session data: {session}")
|
||||||
app.logger.debug(f"Before request - Request headers: {request.headers}")
|
# app.logger.debug(f"Before request - Request headers: {request.headers}")
|
||||||
|
|
||||||
# Register API
|
|
||||||
register_api(app)
|
|
||||||
|
|
||||||
# Register template filters
|
# Register template filters
|
||||||
register_filters(app)
|
register_filters(app)
|
||||||
@@ -118,10 +116,10 @@ def register_extensions(app):
|
|||||||
csrf.init_app(app)
|
csrf.init_app(app)
|
||||||
login_manager.init_app(app)
|
login_manager.init_app(app)
|
||||||
cors.init_app(app)
|
cors.init_app(app)
|
||||||
# kms_client.init_app(app)
|
|
||||||
simple_encryption.init_app(app)
|
simple_encryption.init_app(app)
|
||||||
session.init_app(app)
|
session.init_app(app)
|
||||||
minio_client.init_app(app)
|
minio_client.init_app(app)
|
||||||
|
metrics.init_app(app)
|
||||||
|
|
||||||
|
|
||||||
# Register Blueprints
|
# Register Blueprints
|
||||||
@@ -136,9 +134,11 @@ def register_blueprints(app):
|
|||||||
app.register_blueprint(security_bp)
|
app.register_blueprint(security_bp)
|
||||||
from .views.interaction_views import interaction_bp
|
from .views.interaction_views import interaction_bp
|
||||||
app.register_blueprint(interaction_bp)
|
app.register_blueprint(interaction_bp)
|
||||||
|
from .views.entitlements_views import entitlements_bp
|
||||||
|
app.register_blueprint(entitlements_bp)
|
||||||
|
from .views.administration_views import administration_bp
|
||||||
|
app.register_blueprint(administration_bp)
|
||||||
|
from .views.healthz_views import healthz_bp, init_healtz
|
||||||
|
app.register_blueprint(healthz_bp)
|
||||||
|
init_healtz(app)
|
||||||
|
|
||||||
|
|
||||||
def register_api(app):
|
|
||||||
pass
|
|
||||||
# from . import api
|
|
||||||
# app.register_blueprint(api.bp, url_prefix='/api')
|
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
22
eveai_app/templates/administration/trigger_actions.html
Normal file
22
eveai_app/templates/administration/trigger_actions.html
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
{% extends 'base.html' %}
|
||||||
|
{% from "macros.html" import render_selectable_table, render_pagination, render_field %}
|
||||||
|
{% block title %}Trigger Actions{% endblock %}
|
||||||
|
{% block content_title %}Trigger Actions{% endblock %}
|
||||||
|
{% block content_description %}Manually trigger batch actions{% endblock %}
|
||||||
|
{% block content %}
|
||||||
|
|
||||||
|
<!-- Trigger action Form -->
|
||||||
|
<form method="POST" action="{{ url_for('administration_bp.handle_trigger_action') }}">
|
||||||
|
<div class="form-group mt-3">
|
||||||
|
<button type="submit" name="action" value="update_usages" class="btn btn-secondary">Update Usages</button>
|
||||||
|
</div>
|
||||||
|
</form>
|
||||||
|
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
{% block content_footer %}
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
{% block scripts %}
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
@@ -11,9 +11,17 @@
|
|||||||
{{ form.hidden_tag() }}
|
{{ form.hidden_tag() }}
|
||||||
{% set disabled_fields = [] %}
|
{% set disabled_fields = [] %}
|
||||||
{% set exclude_fields = [] %}
|
{% set exclude_fields = [] %}
|
||||||
{% for field in form %}
|
{% for field in form.get_static_fields() %}
|
||||||
{{ render_field(field, disabled_fields, exclude_fields) }}
|
{{ render_field(field, disabled_fields, exclude_fields) }}
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
|
{% for collection_name, fields in form.get_dynamic_fields().items() %}
|
||||||
|
{% if fields|length > 0 %}
|
||||||
|
<h4 class="mt-4">{{ collection_name }}</h4>
|
||||||
|
{% endif %}
|
||||||
|
{% for field in fields %}
|
||||||
|
{{ render_field(field, disabled_fields, exclude_fields) }}
|
||||||
|
{% endfor %}
|
||||||
|
{% endfor %}
|
||||||
<button type="submit" class="btn btn-primary">Add Document</button>
|
<button type="submit" class="btn btn-primary">Add Document</button>
|
||||||
</form>
|
</form>
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|||||||
@@ -1,24 +0,0 @@
|
|||||||
{% extends 'base.html' %}
|
|
||||||
{% from "macros.html" import render_field %}
|
|
||||||
|
|
||||||
{% block title %}Add Youtube Document{% endblock %}
|
|
||||||
|
|
||||||
{% block content_title %}Add Youtube Document{% endblock %}
|
|
||||||
{% block content_description %}Add a youtube url and the corresponding document to EveAI. In some cases, url's cannot be loaded directly. Download the html and add it as a document in that case.{% endblock %}
|
|
||||||
|
|
||||||
{% block content %}
|
|
||||||
<form method="post">
|
|
||||||
{{ form.hidden_tag() }}
|
|
||||||
{% set disabled_fields = [] %}
|
|
||||||
{% set exclude_fields = [] %}
|
|
||||||
{% for field in form %}
|
|
||||||
{{ render_field(field, disabled_fields, exclude_fields) }}
|
|
||||||
{% endfor %}
|
|
||||||
<button type="submit" class="btn btn-primary">Add Youtube Document</button>
|
|
||||||
</form>
|
|
||||||
{% endblock %}
|
|
||||||
|
|
||||||
|
|
||||||
{% block content_footer %}
|
|
||||||
|
|
||||||
{% endblock %}
|
|
||||||
23
eveai_app/templates/document/catalog.html
Normal file
23
eveai_app/templates/document/catalog.html
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
{% extends 'base.html' %}
|
||||||
|
{% from "macros.html" import render_field %}
|
||||||
|
|
||||||
|
{% block title %}Catalog Registration{% endblock %}
|
||||||
|
|
||||||
|
{% block content_title %}Register Catalog{% endblock %}
|
||||||
|
{% block content_description %}Define a new catalog of documents in Evie's Library{% endblock %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
<form method="post">
|
||||||
|
{{ form.hidden_tag() }}
|
||||||
|
{% set disabled_fields = [] %}
|
||||||
|
{% set exclude_fields = [] %}
|
||||||
|
{% for field in form %}
|
||||||
|
{{ render_field(field, disabled_fields, exclude_fields) }}
|
||||||
|
{% endfor %}
|
||||||
|
<button type="submit" class="btn btn-primary">Register Catalog</button>
|
||||||
|
</form>
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
{% block content_footer %}
|
||||||
|
|
||||||
|
{% endblock %}
|
||||||
24
eveai_app/templates/document/catalogs.html
Normal file
24
eveai_app/templates/document/catalogs.html
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
{% extends 'base.html' %}
|
||||||
|
{% from 'macros.html' import render_selectable_table, render_pagination %}
|
||||||
|
|
||||||
|
{% block title %}Documents{% endblock %}
|
||||||
|
|
||||||
|
{% block content_title %}Catalogs{% endblock %}
|
||||||
|
{% block content_description %}View Catalogs for Tenant{% endblock %}
|
||||||
|
{% block content_class %}<div class="col-xl-12 col-lg-5 col-md-7 mx-auto"></div>{% endblock %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
<div class="container">
|
||||||
|
<form method="POST" action="{{ url_for('document_bp.handle_catalog_selection') }}">
|
||||||
|
{{ render_selectable_table(headers=["Catalog ID", "Name"], rows=rows, selectable=True, id="catalogsTable") }}
|
||||||
|
<div class="form-group mt-3">
|
||||||
|
<button type="submit" name="action" value="set_session_catalog" class="btn btn-primary">Set Session Catalog</button>
|
||||||
|
<button type="submit" name="action" value="edit_catalog" class="btn btn-primary">Edit Catalog</button>
|
||||||
|
</div>
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
{% block content_footer %}
|
||||||
|
{{ render_pagination(pagination, 'document_bp.catalogs') }}
|
||||||
|
{% endblock %}
|
||||||
@@ -10,7 +10,7 @@
|
|||||||
{% block content %}
|
{% block content %}
|
||||||
<div class="container">
|
<div class="container">
|
||||||
<form method="POST" action="{{ url_for('document_bp.handle_document_version_selection') }}">
|
<form method="POST" action="{{ url_for('document_bp.handle_document_version_selection') }}">
|
||||||
{{ render_selectable_table(headers=["ID", "URL", "File Loc.", "File Name", "File Type", "Process.", "Proces. Start", "Proces. Finish", "Proces. Error"], rows=rows, selectable=True, id="versionsTable") }}
|
{{ render_selectable_table(headers=["ID", "URL", "Object Name", "File Type", "Process.", "Proces. Start", "Proces. Finish", "Proces. Error"], rows=rows, selectable=True, id="versionsTable") }}
|
||||||
<div class="form-group mt-3">
|
<div class="form-group mt-3">
|
||||||
<button type="submit" name="action" value="edit_document_version" class="btn btn-primary">Edit Document Version</button>
|
<button type="submit" name="action" value="edit_document_version" class="btn btn-primary">Edit Document Version</button>
|
||||||
<button type="submit" name="action" value="process_document_version" class="btn btn-danger">Process Document Version</button>
|
<button type="submit" name="action" value="process_document_version" class="btn btn-danger">Process Document Version</button>
|
||||||
|
|||||||
@@ -23,15 +23,23 @@
|
|||||||
|
|
||||||
{{ render_collapsible_section('Filter', 'Filter Options', filter_form) }}
|
{{ render_collapsible_section('Filter', 'Filter Options', filter_form) }}
|
||||||
|
|
||||||
<!-- Document Versions Table -->
|
<div class="form-group mt-3">
|
||||||
{{ render_selectable_sortable_table(
|
<form method="POST" action="{{ url_for('document_bp.handle_document_version_selection') }}">
|
||||||
headers=["ID", "File Type", "Processing", "Processing Start", "Processing Finish", "Processing Error"],
|
<!-- Document Versions Table -->
|
||||||
rows=rows,
|
{{ render_selectable_sortable_table(
|
||||||
selectable=True,
|
headers=["ID", "File Type", "Processing", "Processing Start", "Processing Finish", "Processing Error"],
|
||||||
id="documentVersionsTable",
|
rows=rows,
|
||||||
sort_by=sort_by,
|
selectable=True,
|
||||||
sort_order=sort_order
|
id="documentVersionsTable",
|
||||||
) }}
|
sort_by=sort_by,
|
||||||
|
sort_order=sort_order
|
||||||
|
) }}
|
||||||
|
<div class="form-group mt-4">
|
||||||
|
<button type="submit" name="action" value="edit_document_version" class="btn btn-primary">Edit Document Version</button>
|
||||||
|
<button type="submit" name="action" value="process_document_version" class="btn btn-danger">Process Document Version</button>
|
||||||
|
</div>
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
{% block content_footer %}
|
{% block content_footer %}
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
{% extends 'base.html' %}
|
{% extends 'base.html' %}
|
||||||
{% from 'macros.html' import render_selectable_table, render_pagination %}
|
{% from 'macros.html' import render_selectable_table, render_pagination, render_filter_field, render_date_filter_field, render_collapsible_section, render_selectable_sortable_table_with_dict_headers %}
|
||||||
|
|
||||||
{% block title %}Documents{% endblock %}
|
{% block title %}Documents{% endblock %}
|
||||||
|
|
||||||
@@ -8,18 +8,88 @@
|
|||||||
{% block content_class %}<div class="col-xl-12 col-lg-5 col-md-7 mx-auto"></div>{% endblock %}
|
{% block content_class %}<div class="col-xl-12 col-lg-5 col-md-7 mx-auto"></div>{% endblock %}
|
||||||
|
|
||||||
{% block content %}
|
{% block content %}
|
||||||
<div class="container">
|
<!-- Filter Form -->
|
||||||
<form method="POST" action="{{ url_for('document_bp.handle_document_selection') }}">
|
{% set filter_form %}
|
||||||
{{ render_selectable_table(headers=["Document ID", "Name", "Valid From", "Valid To"], rows=rows, selectable=True, id="documentsTable") }}
|
<form method="GET" action="{{ url_for('document_bp.documents') }}">
|
||||||
<div class="form-group mt-3">
|
{{ render_filter_field('catalog_id', 'Catalog', filter_options['catalog_id'], filters.get('catalog_id', [])) }}
|
||||||
<button type="submit" name="action" value="edit_document" class="btn btn-primary">Edit Document</button>
|
{{ render_filter_field('validity', 'Validity', filter_options['validity'], filters.get('validity', [])) }}
|
||||||
<button type="submit" name="action" value="document_versions" class="btn btn-secondary">Show Document Versions</button>
|
|
||||||
<button type="submit" name="action" value="refresh_document" class="btn btn-secondary">Refresh Document (new version)</button>
|
<button type="submit" class="btn btn-primary">Apply Filters</button>
|
||||||
</div>
|
</form>
|
||||||
</form>
|
{% endset %}
|
||||||
</div>
|
|
||||||
|
{{ render_collapsible_section('Filter', 'Filter Options', filter_form) }}
|
||||||
|
|
||||||
|
<div class="form-group mt-3">
|
||||||
|
<form method="POST" action="{{ url_for('document_bp.handle_document_selection') }}">
|
||||||
|
<!-- Documents Table -->
|
||||||
|
{{ render_selectable_sortable_table_with_dict_headers(
|
||||||
|
headers=[
|
||||||
|
{"text": "ID", "sort": "id"},
|
||||||
|
{"text": "Name", "sort": "name"},
|
||||||
|
{"text": "Catalog", "sort": "catalog_name"},
|
||||||
|
{"text": "Valid From", "sort": "valid_from"},
|
||||||
|
{"text": "Valid To", "sort": "valid_to"}
|
||||||
|
],
|
||||||
|
rows=rows,
|
||||||
|
selectable=True,
|
||||||
|
id="documentsTable",
|
||||||
|
sort_by=sort_by,
|
||||||
|
sort_order=sort_order
|
||||||
|
) }}
|
||||||
|
<div class="form-group mt-4">
|
||||||
|
<button type="submit" name="action" value="edit_document" class="btn btn-primary">Edit Document</button>
|
||||||
|
<button type="submit" name="action" value="document_versions" class="btn btn-secondary">Show Document Versions</button>
|
||||||
|
<button type="submit" name="action" value="refresh_document" class="btn btn-secondary">Refresh Document (new version)</button>
|
||||||
|
</div>
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
{% block content_footer %}
|
{% block content_footer %}
|
||||||
{{ render_pagination(pagination, 'document_bp.documents') }}
|
{{ render_pagination(pagination, 'document_bp.documents') }}
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
|
{% block scripts %}
|
||||||
|
<script>
|
||||||
|
document.addEventListener('DOMContentLoaded', function() {
|
||||||
|
const table = document.getElementById('documentsTable');
|
||||||
|
const headers = table.querySelectorAll('th.sortable');
|
||||||
|
|
||||||
|
headers.forEach(header => {
|
||||||
|
header.addEventListener('click', function() {
|
||||||
|
const sortBy = this.dataset.sort;
|
||||||
|
let sortOrder = 'asc';
|
||||||
|
|
||||||
|
if (this.querySelector('.fa-sort-up')) {
|
||||||
|
sortOrder = 'desc';
|
||||||
|
} else if (this.querySelector('.fa-sort-down')) {
|
||||||
|
sortOrder = 'none';
|
||||||
|
}
|
||||||
|
|
||||||
|
window.location.href = updateQueryStringParameter(window.location.href, 'sort_by', sortBy);
|
||||||
|
window.location.href = updateQueryStringParameter(window.location.href, 'sort_order', sortOrder);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
function updateQueryStringParameter(uri, key, value) {
|
||||||
|
var re = new RegExp("([?&])" + key + "=.*?(&|$)", "i");
|
||||||
|
var separator = uri.indexOf('?') !== -1 ? "&" : "?";
|
||||||
|
if (uri.match(re)) {
|
||||||
|
return uri.replace(re, '$1' + key + "=" + value + '$2');
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
return uri + separator + key + "=" + value;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
table.addEventListener('change', function(event) {
|
||||||
|
if (event.target.type === 'radio') {
|
||||||
|
var selectedRow = event.target.closest('tr');
|
||||||
|
var documentId = selectedRow.cells[1].textContent;
|
||||||
|
console.log('Selected Document ID:', documentId);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
</script>
|
||||||
|
{% endblock %}
|
||||||
35
eveai_app/templates/document/edit_catalog.html
Normal file
35
eveai_app/templates/document/edit_catalog.html
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
{% extends 'base.html' %}
|
||||||
|
{% from "macros.html" import render_field %}
|
||||||
|
|
||||||
|
{% block title %}Edit Catalog{% endblock %}
|
||||||
|
|
||||||
|
{% block content_title %}Edit Catalog{% endblock %}
|
||||||
|
{% block content_description %}Edit a catalog of documents in Evie's Library.
|
||||||
|
When you change chunking of embedding information, you'll need to manually refresh the library if you want immediate impact.
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
<form method="post">
|
||||||
|
{{ form.hidden_tag() }}
|
||||||
|
{% set disabled_fields = ['type'] %}
|
||||||
|
{% set exclude_fields = [] %}
|
||||||
|
<!-- Render Static Fields -->
|
||||||
|
{% for field in form.get_static_fields() %}
|
||||||
|
{{ render_field(field, disabled_fields, exclude_fields) }}
|
||||||
|
{% endfor %}
|
||||||
|
<!-- Render Dynamic Fields -->
|
||||||
|
{% for collection_name, fields in form.get_dynamic_fields().items() %}
|
||||||
|
{% if fields|length > 0 %}
|
||||||
|
<h4 class="mt-4">{{ collection_name }}</h4>
|
||||||
|
{% endif %}
|
||||||
|
{% for field in fields %}
|
||||||
|
{{ render_field(field, disabled_fields, exclude_fields) }}
|
||||||
|
{% endfor %}
|
||||||
|
{% endfor %}
|
||||||
|
<button type="submit" class="btn btn-primary">Save Retriever</button>
|
||||||
|
</form>
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
{% block content_footer %}
|
||||||
|
|
||||||
|
{% endblock %}
|
||||||
@@ -8,11 +8,17 @@
|
|||||||
{% block content %}
|
{% block content %}
|
||||||
<form method="post">
|
<form method="post">
|
||||||
{{ form.hidden_tag() }}
|
{{ form.hidden_tag() }}
|
||||||
{% set disabled_fields = [] %}
|
{% set disabled_fields = [] %}
|
||||||
{% set exclude_fields = [] %}
|
{% set exclude_fields = [] %}
|
||||||
{% for field in form %}
|
|
||||||
{{ render_field(field, disabled_fields, exclude_fields) }}
|
{{ render_field(form.name, disabled_fields, exclude_fields) }}
|
||||||
{% endfor %}
|
{{ render_field(form.valid_from, disabled_fields, exclude_fields) }}
|
||||||
|
{{ render_field(form.valid_to, disabled_fields, exclude_fields) }}
|
||||||
|
|
||||||
|
<div class="form-group">
|
||||||
|
<label for="catalog_name">Catalog</label>
|
||||||
|
<input type="text" class="form-control" id="catalog_name" value="{{ catalog_name }}" readonly>
|
||||||
|
</div>
|
||||||
<button type="submit" class="btn btn-primary">Update Document</button>
|
<button type="submit" class="btn btn-primary">Update Document</button>
|
||||||
</form>
|
</form>
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
@@ -8,7 +8,7 @@
|
|||||||
{% block content %}
|
{% block content %}
|
||||||
<form method="post">
|
<form method="post">
|
||||||
{{ form.hidden_tag() }}
|
{{ form.hidden_tag() }}
|
||||||
{% set disabled_fields = ['language', 'system_context'] %}
|
{% set disabled_fields = ['language', 'system_context', 'system_metadata'] %}
|
||||||
{% set exclude_fields = [] %}
|
{% set exclude_fields = [] %}
|
||||||
{% for field in form %}
|
{% for field in form %}
|
||||||
{{ render_field(field, disabled_fields, exclude_fields) }}
|
{{ render_field(field, disabled_fields, exclude_fields) }}
|
||||||
|
|||||||
33
eveai_app/templates/document/edit_retriever.html
Normal file
33
eveai_app/templates/document/edit_retriever.html
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
{% extends 'base.html' %}
|
||||||
|
{% from "macros.html" import render_field %}
|
||||||
|
|
||||||
|
{% block title %}Edit Retriever{% endblock %}
|
||||||
|
|
||||||
|
{% block content_title %}Edit Retriever{% endblock %}
|
||||||
|
{% block content_description %}Edit a Retriever (for a Catalog){% endblock %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
<form method="post">
|
||||||
|
{{ form.hidden_tag() }}
|
||||||
|
{% set disabled_fields = ['type'] %}
|
||||||
|
{% set exclude_fields = [] %}
|
||||||
|
<!-- Render Static Fields -->
|
||||||
|
{% for field in form.get_static_fields() %}
|
||||||
|
{{ render_field(field, disabled_fields, exclude_fields) }}
|
||||||
|
{% endfor %}
|
||||||
|
<!-- Render Dynamic Fields -->
|
||||||
|
{% for collection_name, fields in form.get_dynamic_fields().items() %}
|
||||||
|
{% if fields|length > 0 %}
|
||||||
|
<h4 class="mt-4">{{ collection_name }}</h4>
|
||||||
|
{% endif %}
|
||||||
|
{% for field in fields %}
|
||||||
|
{{ render_field(field, disabled_fields, exclude_fields) }}
|
||||||
|
{% endfor %}
|
||||||
|
{% endfor %}
|
||||||
|
<button type="submit" class="btn btn-primary">Save Retriever</button>
|
||||||
|
</form>
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
{% block content_footer %}
|
||||||
|
|
||||||
|
{% endblock %}
|
||||||
23
eveai_app/templates/document/retriever.html
Normal file
23
eveai_app/templates/document/retriever.html
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
{% extends 'base.html' %}
|
||||||
|
{% from "macros.html" import render_field %}
|
||||||
|
|
||||||
|
{% block title %}Retriever Registration{% endblock %}
|
||||||
|
|
||||||
|
{% block content_title %}Register Retriever{% endblock %}
|
||||||
|
{% block content_description %}Define a new retriever (for a catalog){% endblock %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
<form method="post">
|
||||||
|
{{ form.hidden_tag() }}
|
||||||
|
{% set disabled_fields = [] %}
|
||||||
|
{% set exclude_fields = [] %}
|
||||||
|
{% for field in form %}
|
||||||
|
{{ render_field(field, disabled_fields, exclude_fields) }}
|
||||||
|
{% endfor %}
|
||||||
|
<button type="submit" class="btn btn-primary">Register Retriever</button>
|
||||||
|
</form>
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
{% block content_footer %}
|
||||||
|
|
||||||
|
{% endblock %}
|
||||||
23
eveai_app/templates/document/retrievers.html
Normal file
23
eveai_app/templates/document/retrievers.html
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
{% extends 'base.html' %}
|
||||||
|
{% from 'macros.html' import render_selectable_table, render_pagination %}
|
||||||
|
|
||||||
|
{% block title %}Retrievers{% endblock %}
|
||||||
|
|
||||||
|
{% block content_title %}Retrievers{% endblock %}
|
||||||
|
{% block content_description %}View Retrieers for Tenant{% endblock %}
|
||||||
|
{% block content_class %}<div class="col-xl-12 col-lg-5 col-md-7 mx-auto"></div>{% endblock %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
<div class="container">
|
||||||
|
<form method="POST" action="{{ url_for('document_bp.handle_retriever_selection') }}">
|
||||||
|
{{ render_selectable_table(headers=["Retriever ID", "Name", "Type", "Catalog ID"], rows=rows, selectable=True, id="retrieverssTable") }}
|
||||||
|
<div class="form-group mt-3">
|
||||||
|
<button type="submit" name="action" value="edit_retriever" class="btn btn-primary">Edit Retriever</button>
|
||||||
|
</div>
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
{% block content_footer %}
|
||||||
|
{{ render_pagination(pagination, 'document_bp.retrievers') }}
|
||||||
|
{% endblock %}
|
||||||
71
eveai_app/templates/entitlements/edit_license.html
Normal file
71
eveai_app/templates/entitlements/edit_license.html
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|
{% extends 'base.html' %}
|
||||||
|
{% from "macros.html" import render_field, render_included_field %}
|
||||||
|
|
||||||
|
{% block title %}Edit License for Current Tenant{% endblock %}
|
||||||
|
|
||||||
|
{% block content_title %}Edit License for Current Tenant{% endblock %}
|
||||||
|
{% block content_description %}Edit a License based on the selected License Tier for the current Tenant{% endblock %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
<form method="post">
|
||||||
|
{{ form.hidden_tag() }}
|
||||||
|
{% set main_fields = ['start_date', 'end_date', 'currency', 'yearly_payment', 'basic_fee'] %}
|
||||||
|
{% for field in form %}
|
||||||
|
{{ render_included_field(field, disabled_fields=['currency'], include_fields=main_fields) }}
|
||||||
|
{% endfor %}
|
||||||
|
<!-- Nav Tabs -->
|
||||||
|
<div class="row mt-5">
|
||||||
|
<div class="col-lg-12">
|
||||||
|
<div class="nav-wrapper position-relative end-0">
|
||||||
|
<ul class="nav nav-pills nav-fill p-1" role="tablist">
|
||||||
|
<li class="nav-item" role="presentation">
|
||||||
|
<a class="nav-link mb-0 px-0 py-1 active" data-toggle="tab" href="#storage-tab" role="tab" aria-controls="model-info" aria-selected="true">
|
||||||
|
Storage
|
||||||
|
</a>
|
||||||
|
</li>
|
||||||
|
<li class="nav-item">
|
||||||
|
<a class="nav-link mb-0 px-0 py-1" data-toggle="tab" href="#embedding-tab" role="tab" aria-controls="license-info" aria-selected="false">
|
||||||
|
Embedding
|
||||||
|
</a>
|
||||||
|
</li>
|
||||||
|
<li class="nav-item">
|
||||||
|
<a class="nav-link mb-0 px-0 py-1" data-toggle="tab" href="#interaction-tab" role="tab" aria-controls="chunking" aria-selected="false">
|
||||||
|
Interaction
|
||||||
|
</a>
|
||||||
|
</li>
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
<div class="tab-content tab-space">
|
||||||
|
<!-- Storage Tab -->
|
||||||
|
<div class="tab-pane fade show active" id="storage-tab" role="tabpanel">
|
||||||
|
{% set storage_fields = ['max_storage_tokens', 'additional_storage_token_price', 'additional_storage_bucket'] %}
|
||||||
|
{% for field in form %}
|
||||||
|
{{ render_included_field(field, disabled_fields=[], include_fields=storage_fields) }}
|
||||||
|
{% endfor %}
|
||||||
|
</div>
|
||||||
|
<!-- Embedding Tab -->
|
||||||
|
<div class="tab-pane fade" id="embedding-tab" role="tabpanel">
|
||||||
|
{% set embedding_fields = ['included_embedding_tokens', 'additional_embedding_token_price', 'additional_embedding_bucket'] %}
|
||||||
|
{% for field in form %}
|
||||||
|
{{ render_included_field(field, disabled_fields=[], include_fields=embedding_fields) }}
|
||||||
|
{% endfor %}
|
||||||
|
</div>
|
||||||
|
<!-- Interaction Tab -->
|
||||||
|
<div class="tab-pane fade" id="interaction-tab" role="tabpanel">
|
||||||
|
{% set interaction_fields = ['included_interaction_tokens', 'additional_interaction_token_price', 'additional_interaction_bucket'] %}
|
||||||
|
{% for field in form %}
|
||||||
|
{{ render_included_field(field, disabled_fields=[], include_fields=interaction_fields) }}
|
||||||
|
{% endfor %}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<button type="submit" class="btn btn-primary">Save License</button>
|
||||||
|
</form>
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
|
||||||
|
{% block content_footer %}
|
||||||
|
|
||||||
|
{% endblock %}
|
||||||
71
eveai_app/templates/entitlements/license.html
Normal file
71
eveai_app/templates/entitlements/license.html
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|
{% extends 'base.html' %}
|
||||||
|
{% from "macros.html" import render_field, render_included_field %}
|
||||||
|
|
||||||
|
{% block title %}Create or Edit License for Current Tenant{% endblock %}
|
||||||
|
|
||||||
|
{% block content_title %}Create or Edit License for Current Tenant{% endblock %}
|
||||||
|
{% block content_description %}Create or Edit a new License based on the selected License Tier for the current Tenant{% endblock %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
<form method="post">
|
||||||
|
{{ form.hidden_tag() }}
|
||||||
|
{% set main_fields = ['start_date', 'end_date', 'currency', 'yearly_payment', 'basic_fee'] %}
|
||||||
|
{% for field in form %}
|
||||||
|
{{ render_included_field(field, disabled_fields=ext_disabled_fields + ['currency'], include_fields=main_fields) }}
|
||||||
|
{% endfor %}
|
||||||
|
<!-- Nav Tabs -->
|
||||||
|
<div class="row mt-5">
|
||||||
|
<div class="col-lg-12">
|
||||||
|
<div class="nav-wrapper position-relative end-0">
|
||||||
|
<ul class="nav nav-pills nav-fill p-1" role="tablist">
|
||||||
|
<li class="nav-item" role="presentation">
|
||||||
|
<a class="nav-link mb-0 px-0 py-1 active" data-toggle="tab" href="#storage-tab" role="tab" aria-controls="model-info" aria-selected="true">
|
||||||
|
Storage
|
||||||
|
</a>
|
||||||
|
</li>
|
||||||
|
<li class="nav-item">
|
||||||
|
<a class="nav-link mb-0 px-0 py-1" data-toggle="tab" href="#embedding-tab" role="tab" aria-controls="license-info" aria-selected="false">
|
||||||
|
Embedding
|
||||||
|
</a>
|
||||||
|
</li>
|
||||||
|
<li class="nav-item">
|
||||||
|
<a class="nav-link mb-0 px-0 py-1" data-toggle="tab" href="#interaction-tab" role="tab" aria-controls="chunking" aria-selected="false">
|
||||||
|
Interaction
|
||||||
|
</a>
|
||||||
|
</li>
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
<div class="tab-content tab-space">
|
||||||
|
<!-- Storage Tab -->
|
||||||
|
<div class="tab-pane fade show active" id="storage-tab" role="tabpanel">
|
||||||
|
{% set storage_fields = ['max_storage_mb', 'additional_storage_price', 'additional_storage_bucket'] %}
|
||||||
|
{% for field in form %}
|
||||||
|
{{ render_included_field(field, disabled_fields=ext_disabled_fields, include_fields=storage_fields) }}
|
||||||
|
{% endfor %}
|
||||||
|
</div>
|
||||||
|
<!-- Embedding Tab -->
|
||||||
|
<div class="tab-pane fade" id="embedding-tab" role="tabpanel">
|
||||||
|
{% set embedding_fields = ['included_embedding_mb', 'additional_embedding_price', 'additional_embedding_bucket', 'overage_embedding'] %}
|
||||||
|
{% for field in form %}
|
||||||
|
{{ render_included_field(field, disabled_fields=ext_disabled_fields, include_fields=embedding_fields) }}
|
||||||
|
{% endfor %}
|
||||||
|
</div>
|
||||||
|
<!-- Interaction Tab -->
|
||||||
|
<div class="tab-pane fade" id="interaction-tab" role="tabpanel">
|
||||||
|
{% set interaction_fields = ['included_interaction_tokens', 'additional_interaction_token_price', 'additional_interaction_bucket', 'overage_interaction'] %}
|
||||||
|
{% for field in form %}
|
||||||
|
{{ render_included_field(field, disabled_fields=ext_disabled_fields, include_fields=interaction_fields) }}
|
||||||
|
{% endfor %}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<button type="submit" class="btn btn-primary">Save License</button>
|
||||||
|
</form>
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
|
||||||
|
{% block content_footer %}
|
||||||
|
|
||||||
|
{% endblock %}
|
||||||
71
eveai_app/templates/entitlements/license_tier.html
Normal file
71
eveai_app/templates/entitlements/license_tier.html
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|
{% extends 'base.html' %}
|
||||||
|
{% from "macros.html" import render_field, render_included_field %}
|
||||||
|
|
||||||
|
{% block title %}Register or Edit License Tier{% endblock %}
|
||||||
|
|
||||||
|
{% block content_title %}Register or Edit License Tier{% endblock %}
|
||||||
|
{% block content_description %}Register or Edit License Tier{% endblock %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
<form method="post">
|
||||||
|
{{ form.hidden_tag() }}
|
||||||
|
{% set main_fields = ['name', 'version', 'start_date', 'end_date', 'basic_fee_d', 'basic_fee_e'] %}
|
||||||
|
{% for field in form %}
|
||||||
|
{{ render_included_field(field, disabled_fields=[], include_fields=main_fields) }}
|
||||||
|
{% endfor %}
|
||||||
|
<!-- Nav Tabs -->
|
||||||
|
<div class="row mt-5">
|
||||||
|
<div class="col-lg-12">
|
||||||
|
<div class="nav-wrapper position-relative end-0">
|
||||||
|
<ul class="nav nav-pills nav-fill p-1" role="tablist">
|
||||||
|
<li class="nav-item" role="presentation">
|
||||||
|
<a class="nav-link mb-0 px-0 py-1 active" data-toggle="tab" href="#storage-tab" role="tab" aria-controls="model-info" aria-selected="true">
|
||||||
|
Storage
|
||||||
|
</a>
|
||||||
|
</li>
|
||||||
|
<li class="nav-item">
|
||||||
|
<a class="nav-link mb-0 px-0 py-1" data-toggle="tab" href="#embedding-tab" role="tab" aria-controls="license-info" aria-selected="false">
|
||||||
|
Embedding
|
||||||
|
</a>
|
||||||
|
</li>
|
||||||
|
<li class="nav-item">
|
||||||
|
<a class="nav-link mb-0 px-0 py-1" data-toggle="tab" href="#interaction-tab" role="tab" aria-controls="chunking" aria-selected="false">
|
||||||
|
Interaction
|
||||||
|
</a>
|
||||||
|
</li>
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
<div class="tab-content tab-space">
|
||||||
|
<!-- Storage Tab -->
|
||||||
|
<div class="tab-pane fade show active" id="storage-tab" role="tabpanel">
|
||||||
|
{% set storage_fields = ['max_storage_mb', 'additional_storage_price_d', 'additional_storage_price_e', 'additional_storage_bucket'] %}
|
||||||
|
{% for field in form %}
|
||||||
|
{{ render_included_field(field, disabled_fields=[], include_fields=storage_fields) }}
|
||||||
|
{% endfor %}
|
||||||
|
</div>
|
||||||
|
<!-- Embedding Tab -->
|
||||||
|
<div class="tab-pane fade" id="embedding-tab" role="tabpanel">
|
||||||
|
{% set embedding_fields = ['included_embedding_mb', 'additional_embedding_price_d', 'additional_embedding_price_e', 'additional_embedding_bucket', 'standard_overage_embedding'] %}
|
||||||
|
{% for field in form %}
|
||||||
|
{{ render_included_field(field, disabled_fields=[], include_fields=embedding_fields) }}
|
||||||
|
{% endfor %}
|
||||||
|
</div>
|
||||||
|
<!-- Interaction Tab -->
|
||||||
|
<div class="tab-pane fade" id="interaction-tab" role="tabpanel">
|
||||||
|
{% set interaction_fields = ['included_interaction_tokens', 'additional_interaction_token_price_d', 'additional_interaction_token_price_e', 'additional_interaction_bucket', 'standard_overage_interaction'] %}
|
||||||
|
{% for field in form %}
|
||||||
|
{{ render_included_field(field, disabled_fields=[], include_fields=interaction_fields) }}
|
||||||
|
{% endfor %}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<button type="submit" class="btn btn-primary">Save License Tier</button>
|
||||||
|
</form>
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
|
||||||
|
{% block content_footer %}
|
||||||
|
|
||||||
|
{% endblock %}
|
||||||
24
eveai_app/templates/entitlements/view_license_tiers.html
Normal file
24
eveai_app/templates/entitlements/view_license_tiers.html
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
{% extends 'base.html' %}
|
||||||
|
{% from "macros.html" import render_selectable_table, render_pagination, render_field %}
|
||||||
|
{% block title %}License Tier Selection{% endblock %}
|
||||||
|
{% block content_title %}Select a License Tier{% endblock %}
|
||||||
|
{% block content_description %}Select a License Tier to continue{% endblock %}
|
||||||
|
{% block content %}
|
||||||
|
|
||||||
|
<!-- License Tier Selection Form -->
|
||||||
|
<form method="POST" action="{{ url_for('entitlements_bp.handle_license_tier_selection') }}">
|
||||||
|
{{ render_selectable_table(headers=["ID", "Name", "Version", "Start Date", "End Date"], rows=rows, selectable=True, id="licenseTierTable") }}
|
||||||
|
<div class="form-group mt-3">
|
||||||
|
<button type="submit" name="action" value="edit_license_tier" class="btn btn-primary">Edit License Tier</button>
|
||||||
|
<button type="submit" name="action" value="create_license_for_tenant" class="btn btn-secondary">Create License for Current Tenant</button>
|
||||||
|
</div>
|
||||||
|
</form>
|
||||||
|
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
{% block content_footer %}
|
||||||
|
{{ render_pagination(pagination, 'user_bp.select_tenant') }}
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
28
eveai_app/templates/entitlements/view_usages.html
Normal file
28
eveai_app/templates/entitlements/view_usages.html
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
{% extends 'base.html' %}
|
||||||
|
{% from "macros.html" import render_selectable_table, render_pagination %}
|
||||||
|
|
||||||
|
{% block title %}View License Usage{% endblock %}
|
||||||
|
|
||||||
|
{% block content_title %}View License Usage{% endblock %}
|
||||||
|
{% block content_description %}View License Usage{% endblock %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
<form action="{{ url_for('user_bp.handle_user_action') }}" method="POST">
|
||||||
|
{{ render_selectable_table(headers=["Usage ID", "Start Date", "End Date", "Storage (MiB)", "Embedding (MiB)", "Interaction (tokens)"], rows=rows, selectable=False, id="usagesTable") }}
|
||||||
|
<!-- <div class="form-group mt-3">-->
|
||||||
|
<!-- <button type="submit" name="action" value="edit_user" class="btn btn-primary">Edit Selected User</button>-->
|
||||||
|
<!-- <button type="submit" name="action" value="resend_confirmation_email" class="btn btn-secondary">Resend Confirmation Email</button>-->
|
||||||
|
<!-- <button type="submit" name="action" value="send_password_reset_email" class="btn btn-secondary">Send Password Reset Email</button>-->
|
||||||
|
<!-- <button type="submit" name="action" value="reset_uniquifier" class="btn btn-secondary">Reset Uniquifier</button>-->
|
||||||
|
<!-- <!– Additional buttons can be added here for other actions –>-->
|
||||||
|
<!-- </div>-->
|
||||||
|
</form>
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
{% block content_footer %}
|
||||||
|
{{ render_pagination(pagination, 'user_bp.select_tenant') }}
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
{% block scripts %}
|
||||||
|
|
||||||
|
{% endblock %}
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
<header class="header-2">
|
<header class="header-2">
|
||||||
<div class="page-header min-vh-25" style="background-image: url({{url_for('static', filename='/assets/img/EveAI_bg.jpg')}})" loading="lazy">
|
<div class="page-header min-vh-25" style="background-image: url({{url_for('static', filename='/assets/img/EveAI_bg.jpg')}}); background-position: top left; background-repeat: no-repeat; background-size: cover;" loading="lazy">
|
||||||
<span class="mask bg-gradient-primary opacity-4"></span>
|
<span class="mask bg-gradient-primary opacity-4"></span>
|
||||||
<div class="container">
|
<div class="container">
|
||||||
<div class="row">
|
<div class="row">
|
||||||
|
|||||||
@@ -1,126 +1,80 @@
|
|||||||
{% extends "base.html" %}
|
{% extends "base.html" %}
|
||||||
|
{% from "macros.html" import render_field %}
|
||||||
|
|
||||||
|
{% block title %}Session Overview{% endblock %}
|
||||||
|
|
||||||
|
{% block content_title %}Session Overview{% endblock %}
|
||||||
|
{% block content_description %}An overview of the chat session.{% endblock %}
|
||||||
|
|
||||||
{% block content %}
|
{% block content %}
|
||||||
<div class="container mt-5">
|
<div class="container mt-5">
|
||||||
<h2>Chat Session Details</h2>
|
<h2>Chat Session Details</h2>
|
||||||
<!-- Session Information -->
|
|
||||||
<div class="card mb-4">
|
<div class="card mb-4">
|
||||||
<div class="card-header">
|
<div class="card-header">
|
||||||
<h5>Session Information</h5>
|
<h5>Session Information</h5>
|
||||||
<!-- Timezone Toggle Buttons -->
|
|
||||||
<div class="btn-group" role="group">
|
|
||||||
<button type="button" class="btn btn-primary" id="toggle-interaction-timezone">Interaction Timezone</button>
|
|
||||||
<button type="button" class="btn btn-secondary" id="toggle-admin-timezone">Admin Timezone</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
</div>
|
||||||
<div class="card-body">
|
<div class="card-body">
|
||||||
<dl class="row">
|
<p><strong>Session ID:</strong> {{ chat_session.session_id }}</p>
|
||||||
<dt class="col-sm-3">Session ID:</dt>
|
<p><strong>User:</strong> {{ chat_session.user.user_name if chat_session.user else 'Anonymous' }}</p>
|
||||||
<dd class="col-sm-9">{{ chat_session.session_id }}</dd>
|
<p><strong>Start:</strong> {{ chat_session.session_start | to_local_time(chat_session.timezone) }}</p>
|
||||||
|
<p><strong>End:</strong> {{ chat_session.session_end | to_local_time(chat_session.timezone) if chat_session.session_end else 'Ongoing' }}</p>
|
||||||
<dt class="col-sm-3">Session Start:</dt>
|
|
||||||
<dd class="col-sm-9">
|
|
||||||
<span class="timezone interaction-timezone">{{ chat_session.session_start | to_local_time(chat_session.timezone) }}</span>
|
|
||||||
<span class="timezone admin-timezone d-none">{{ chat_session.session_start | to_local_time(session['admin_user_timezone']) }}</span>
|
|
||||||
</dd>
|
|
||||||
|
|
||||||
<dt class="col-sm-3">Session End:</dt>
|
|
||||||
<dd class="col-sm-9">
|
|
||||||
{% if chat_session.session_end %}
|
|
||||||
<span class="timezone interaction-timezone">{{ chat_session.session_end | to_local_time(chat_session.timezone) }}</span>
|
|
||||||
<span class="timezone admin-timezone d-none">{{ chat_session.session_end | to_local_time(session['admin_user_timezone']) }}</span>
|
|
||||||
{% else %}
|
|
||||||
Ongoing
|
|
||||||
{% endif %}
|
|
||||||
</dd>
|
|
||||||
</dl>
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<!-- Interactions List -->
|
<h3>Interactions</h3>
|
||||||
<div class="card mb-4">
|
<div class="accordion" id="interactionsAccordion">
|
||||||
<div class="card-header">
|
{% for interaction in interactions %}
|
||||||
<h5>Interactions</h5>
|
<div class="accordion-item">
|
||||||
</div>
|
<h2 class="accordion-header" id="heading{{ loop.index }}">
|
||||||
<div class="card-body">
|
<button class="accordion-button collapsed" type="button" data-bs-toggle="collapse"
|
||||||
{% for interaction in interactions %}
|
data-bs-target="#collapse{{ loop.index }}" aria-expanded="false"
|
||||||
<div class="interaction mb-3">
|
aria-controls="collapse{{ loop.index }}">
|
||||||
<div class="card">
|
<div class="d-flex justify-content-between align-items-center w-100">
|
||||||
<div class="card-header d-flex justify-content-between">
|
<span class="interaction-question">{{ interaction.question | truncate(50) }}</span>
|
||||||
<span>Question:</span>
|
<span class="interaction-icons">
|
||||||
<span class="text-muted">
|
<i class="material-icons algorithm-icon {{ interaction.algorithm_used | lower }}">fingerprint</i>
|
||||||
<span class="timezone interaction-timezone">{{ interaction.question_at | to_local_time(interaction.timezone) }}</span>
|
<i class="material-icons thumb-icon {% if interaction.appreciation == 100 %}filled{% else %}outlined{% endif %}">thumb_up</i>
|
||||||
<span class="timezone admin-timezone d-none">{{ interaction.question_at | to_local_time(session['admin_user_timezone']) }}</span>
|
<i class="material-icons thumb-icon {% if interaction.appreciation == 0 %}filled{% else %}outlined{% endif %}">thumb_down</i>
|
||||||
-
|
</span>
|
||||||
<span class="timezone interaction-timezone">{{ interaction.answer_at | to_local_time(interaction.timezone) }}</span>
|
|
||||||
<span class="timezone admin-timezone d-none">{{ interaction.answer_at | to_local_time(session['admin_user_timezone']) }}</span>
|
|
||||||
({{ interaction.question_at | time_difference(interaction.answer_at) }})
|
|
||||||
</span>
|
|
||||||
</div>
|
|
||||||
<div class="card-body">
|
|
||||||
<p><strong>Question:</strong> {{ interaction.question }}</p>
|
|
||||||
<p><strong>Answer:</strong> {{ interaction.answer }}</p>
|
|
||||||
<p>
|
|
||||||
<strong>Algorithm Used:</strong>
|
|
||||||
<i class="material-icons {{ 'fingerprint-rag-' ~ interaction.algorithm_used.lower() }}">
|
|
||||||
fingerprint
|
|
||||||
</i> {{ interaction.algorithm_used }}
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
<strong>Appreciation:</strong>
|
|
||||||
<i class="material-icons thumb-icon {{ 'thumb_up' if interaction.appreciation == 1 else 'thumb_down' }}">
|
|
||||||
{{ 'thumb_up' if interaction.appreciation == 1 else 'thumb_down' }}
|
|
||||||
</i>
|
|
||||||
</p>
|
|
||||||
<p><strong>Embeddings:</strong>
|
|
||||||
{% if interaction.embeddings %}
|
|
||||||
{% for embedding in interaction.embeddings %}
|
|
||||||
<a href="{{ url_for('interaction_bp.view_embedding', embedding_id=embedding.embedding_id) }}" class="badge badge-info">
|
|
||||||
{{ embedding.embedding_id }}
|
|
||||||
</a>
|
|
||||||
{% endfor %}
|
|
||||||
{% else %}
|
|
||||||
None
|
|
||||||
{% endif %}
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
</div>
|
</div>
|
||||||
|
</button>
|
||||||
|
</h2>
|
||||||
|
<div id="collapse{{ loop.index }}" class="accordion-collapse collapse" aria-labelledby="heading{{ loop.index }}"
|
||||||
|
data-bs-parent="#interactionsAccordion">
|
||||||
|
<div class="accordion-body">
|
||||||
|
<h6>Detailed Question:</h6>
|
||||||
|
<p>{{ interaction.detailed_question }}</p>
|
||||||
|
<h6>Answer:</h6>
|
||||||
|
<div class="markdown-content">{{ interaction.answer | safe }}</div>
|
||||||
|
{% if embeddings_dict.get(interaction.id) %}
|
||||||
|
<h6>Related Documents:</h6>
|
||||||
|
<ul>
|
||||||
|
{% for embedding in embeddings_dict[interaction.id] %}
|
||||||
|
<li>
|
||||||
|
{% if embedding.url %}
|
||||||
|
<a href="{{ embedding.url }}" target="_blank">{{ embedding.url }}</a>
|
||||||
|
{% else %}
|
||||||
|
{{ embedding.object_name }}
|
||||||
|
{% endif %}
|
||||||
|
</li>
|
||||||
|
{% endfor %}
|
||||||
|
</ul>
|
||||||
|
{% endif %}
|
||||||
</div>
|
</div>
|
||||||
{% endfor %}
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
{% endfor %}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
{% block scripts %}
|
{% block scripts %}
|
||||||
|
<script src="https://cdn.jsdelivr.net/npm/marked/marked.min.js"></script>
|
||||||
<script>
|
<script>
|
||||||
document.addEventListener('DOMContentLoaded', function() {
|
document.addEventListener('DOMContentLoaded', function() {
|
||||||
// Elements to toggle
|
var markdownElements = document.querySelectorAll('.markdown-content');
|
||||||
const interactionTimes = document.querySelectorAll('.interaction-timezone');
|
markdownElements.forEach(function(el) {
|
||||||
const adminTimes = document.querySelectorAll('.admin-timezone');
|
el.innerHTML = marked.parse(el.textContent);
|
||||||
|
|
||||||
// Buttons
|
|
||||||
const interactionButton = document.getElementById('toggle-interaction-timezone');
|
|
||||||
const adminButton = document.getElementById('toggle-admin-timezone');
|
|
||||||
|
|
||||||
// Toggle to Interaction Timezone
|
|
||||||
interactionButton.addEventListener('click', function() {
|
|
||||||
interactionTimes.forEach(el => el.classList.remove('d-none'));
|
|
||||||
adminTimes.forEach(el => el.classList.add('d-none'));
|
|
||||||
interactionButton.classList.add('btn-primary');
|
|
||||||
interactionButton.classList.remove('btn-secondary');
|
|
||||||
adminButton.classList.add('btn-secondary');
|
|
||||||
adminButton.classList.remove('btn-primary');
|
|
||||||
});
|
|
||||||
|
|
||||||
// Toggle to Admin Timezone
|
|
||||||
adminButton.addEventListener('click', function() {
|
|
||||||
interactionTimes.forEach(el => el.classList.add('d-none'));
|
|
||||||
adminTimes.forEach(el => el.classList.remove('d-none'));
|
|
||||||
interactionButton.classList.add('btn-secondary');
|
|
||||||
interactionButton.classList.remove('btn-primary');
|
|
||||||
adminButton.classList.add('btn-primary');
|
|
||||||
adminButton.classList.remove('btn-secondary');
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
</script>
|
</script>
|
||||||
|
|||||||
@@ -1,18 +1,136 @@
|
|||||||
{% macro render_field(field, disabled_fields=[], exclude_fields=[]) %}
|
<!--{% macro render_field(field, disabled_fields=[], exclude_fields=[], class='') %}-->
|
||||||
|
<!-- {% set disabled = field.name in disabled_fields %}-->
|
||||||
|
<!-- {% set exclude_fields = exclude_fields + ['csrf_token', 'submit'] %}-->
|
||||||
|
<!-- {% if field.name not in exclude_fields %}-->
|
||||||
|
<!-- {% if field.type == 'BooleanField' %}-->
|
||||||
|
<!-- <div class="form-check">-->
|
||||||
|
<!-- {{ field(class="form-check-input " + class, type="checkbox", id="flexSwitchCheckDefault") }}-->
|
||||||
|
<!-- {{ field.label(class="form-check-label", for="flexSwitchCheckDefault", disabled=disabled) }}-->
|
||||||
|
<!-- </div>-->
|
||||||
|
<!-- {% else %}-->
|
||||||
|
<!-- <div class="form-group">-->
|
||||||
|
<!-- {{ field.label(class="form-label") }}-->
|
||||||
|
<!-- {{ field(class="form-control " + class, disabled=disabled) }}-->
|
||||||
|
<!-- {% if field.errors %}-->
|
||||||
|
<!-- <div class="invalid-feedback">-->
|
||||||
|
<!-- {% for error in field.errors %}-->
|
||||||
|
<!-- {{ error }}-->
|
||||||
|
<!-- {% endfor %}-->
|
||||||
|
<!-- </div>-->
|
||||||
|
<!-- {% endif %}-->
|
||||||
|
<!-- </div>-->
|
||||||
|
<!-- {% endif %}-->
|
||||||
|
<!-- {% endif %}-->
|
||||||
|
<!--{% endmacro %}-->
|
||||||
|
|
||||||
|
{% macro render_field_old(field, disabled_fields=[], exclude_fields=[], class='') %}
|
||||||
|
<!-- Debug info -->
|
||||||
|
<!-- Field name: {{ field.name }}, Field type: {{ field.__class__.__name__ }} -->
|
||||||
|
|
||||||
{% set disabled = field.name in disabled_fields %}
|
{% set disabled = field.name in disabled_fields %}
|
||||||
{% set exclude_fields = exclude_fields + ['csrf_token', 'submit'] %}
|
{% set exclude_fields = exclude_fields + ['csrf_token', 'submit'] %}
|
||||||
{% if field.name not in exclude_fields %}
|
{% if field.name not in exclude_fields %}
|
||||||
{% if field.type == 'BooleanField' %}
|
{% if field.type == 'BooleanField' %}
|
||||||
<div class="form-check">
|
<div class="form-group">
|
||||||
{{ field(class="form-check-input", type="checkbox", id="flexSwitchCheckDefault") }}
|
<div class="form-check form-switch">
|
||||||
{{ field.label(class="form-check-label", for="flexSwitchCheckDefault", disabled=disabled) }}
|
{{ field(class="form-check-input " + class, disabled=disabled) }}
|
||||||
|
{% if field.description %}
|
||||||
|
{{ field.label(class="form-check-label",
|
||||||
|
**{'data-bs-toggle': 'tooltip',
|
||||||
|
'data-bs-placement': 'right',
|
||||||
|
'title': field.description}) }}
|
||||||
|
{% else %}
|
||||||
|
{{ field.label(class="form-check-label") }}
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
{% if field.errors %}
|
||||||
|
<div class="invalid-feedback d-block">
|
||||||
|
{% for error in field.errors %}
|
||||||
|
{{ error }}
|
||||||
|
{% endfor %}
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
</div>
|
</div>
|
||||||
{% else %}
|
{% else %}
|
||||||
<div class="form-group">
|
<div class="form-group">
|
||||||
{{ field.label(class="form-label") }}
|
{% if field.description %}
|
||||||
{{ field(class="form-control", disabled=disabled) }}
|
{{ field.label(class="form-label",
|
||||||
|
**{'data-bs-toggle': 'tooltip',
|
||||||
|
'data-bs-placement': 'right',
|
||||||
|
'title': field.description}) }}
|
||||||
|
{% else %}
|
||||||
|
{{ field.label(class="form-label") }}
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if field.type == 'TextAreaField' and 'json-editor' in class %}
|
||||||
|
<div id="{{ field.id }}-editor" class="json-editor-container"></div>
|
||||||
|
{{ field(class="form-control d-none " + class, disabled=disabled) }}
|
||||||
|
{% else %}
|
||||||
|
{{ field(class="form-control " + class, disabled=disabled) }}
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
{% if field.errors %}
|
{% if field.errors %}
|
||||||
<div class="invalid-feedback">
|
<div class="invalid-feedback d-block">
|
||||||
|
{% for error in field.errors %}
|
||||||
|
{{ error }}
|
||||||
|
{% endfor %}
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
{% endif %}
|
||||||
|
{% endmacro %}
|
||||||
|
|
||||||
|
{% macro render_field(field, disabled_fields=[], exclude_fields=[], class='') %}
|
||||||
|
<!-- Debug info -->
|
||||||
|
<!-- Field name: {{ field.name }}, Field type: {{ field.__class__.__name__ }} -->
|
||||||
|
|
||||||
|
{% set disabled = field.name in disabled_fields %}
|
||||||
|
{% set exclude_fields = exclude_fields + ['csrf_token', 'submit'] %}
|
||||||
|
{% if field.name not in exclude_fields %}
|
||||||
|
{% if field.type == 'BooleanField' %}
|
||||||
|
<div class="form-group">
|
||||||
|
<div class="form-check form-switch">
|
||||||
|
{{ field(class="form-check-input " + class, disabled=disabled) }}
|
||||||
|
{% if field.description %}
|
||||||
|
{{ field.label(class="form-check-label",
|
||||||
|
**{'data-bs-toggle': 'tooltip',
|
||||||
|
'data-bs-placement': 'right',
|
||||||
|
'title': field.description}) }}
|
||||||
|
{% else %}
|
||||||
|
{{ field.label(class="form-check-label") }}
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
{% if field.errors %}
|
||||||
|
<div class="invalid-feedback d-block">
|
||||||
|
{% for error in field.errors %}
|
||||||
|
{{ error }}
|
||||||
|
{% endfor %}
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
{% else %}
|
||||||
|
<div class="form-group">
|
||||||
|
{% if field.description %}
|
||||||
|
{{ field.label(class="form-label",
|
||||||
|
**{'data-bs-toggle': 'tooltip',
|
||||||
|
'data-bs-placement': 'right',
|
||||||
|
'title': field.description}) }}
|
||||||
|
{% else %}
|
||||||
|
{{ field.label(class="form-label") }}
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if field.type == 'TextAreaField' and 'json-editor' in class %}
|
||||||
|
<div id="{{ field.id }}-editor" class="json-editor-container"></div>
|
||||||
|
{{ field(class="form-control d-none " + class, disabled=disabled) }}
|
||||||
|
{% elif field.type == 'SelectField' %}
|
||||||
|
{{ field(class="form-control form-select " + class, disabled=disabled) }}
|
||||||
|
{% else %}
|
||||||
|
{{ field(class="form-control " + class, disabled=disabled) }}
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if field.errors %}
|
||||||
|
<div class="invalid-feedback d-block">
|
||||||
{% for error in field.errors %}
|
{% for error in field.errors %}
|
||||||
{{ error }}
|
{{ error }}
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
@@ -177,6 +295,48 @@
|
|||||||
</div>
|
</div>
|
||||||
{% endmacro %}
|
{% endmacro %}
|
||||||
|
|
||||||
|
{% macro render_selectable_sortable_table_with_dict_headers(headers, rows, selectable, id, sort_by, sort_order) %}
|
||||||
|
<div class="card">
|
||||||
|
<div class="table-responsive">
|
||||||
|
<table class="table align-items-center mb-0" id="{{ id }}">
|
||||||
|
<thead>
|
||||||
|
<tr>
|
||||||
|
{% if selectable %}
|
||||||
|
<th class="text-uppercase text-secondary text-xxs font-weight-bolder opacity-7">Select</th>
|
||||||
|
{% endif %}
|
||||||
|
{% for header in headers %}
|
||||||
|
<th class="text-uppercase text-secondary text-xxs font-weight-bolder opacity-7 sortable" data-sort="{{ header['sort'] }}">
|
||||||
|
{{ header['text'] }}
|
||||||
|
{% if sort_by == header['sort'] %}
|
||||||
|
{% if sort_order == 'asc' %}
|
||||||
|
<i class="fas fa-sort-up"></i>
|
||||||
|
{% elif sort_order == 'desc' %}
|
||||||
|
<i class="fas fa-sort-down"></i>
|
||||||
|
{% endif %}
|
||||||
|
{% else %}
|
||||||
|
<i class="fas fa-sort"></i>
|
||||||
|
{% endif %}
|
||||||
|
</th>
|
||||||
|
{% endfor %}
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody>
|
||||||
|
{% for row in rows %}
|
||||||
|
<tr>
|
||||||
|
{% if selectable %}
|
||||||
|
<td><input type="radio" name="selected_row" value="{{ row[0].value }}"></td>
|
||||||
|
{% endif %}
|
||||||
|
{% for cell in row %}
|
||||||
|
<td>{{ cell.value }}</td>
|
||||||
|
{% endfor %}
|
||||||
|
</tr>
|
||||||
|
{% endfor %}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% endmacro %}
|
||||||
|
|
||||||
{% macro render_accordion(accordion_id, accordion_items, header_title, header_description) %}
|
{% macro render_accordion(accordion_id, accordion_items, header_title, header_description) %}
|
||||||
<div class="accordion-1">
|
<div class="accordion-1">
|
||||||
<div class="container">
|
<div class="container">
|
||||||
|
|||||||
@@ -81,10 +81,13 @@
|
|||||||
{% endif %}
|
{% endif %}
|
||||||
{% if current_user.is_authenticated %}
|
{% if current_user.is_authenticated %}
|
||||||
{{ dropdown('Document Mgmt', 'note_stack', [
|
{{ dropdown('Document Mgmt', 'note_stack', [
|
||||||
|
{'name': 'Add Catalog', 'url': '/document/catalog', 'roles': ['Super User', 'Tenant Admin']},
|
||||||
|
{'name': 'All Catalogs', 'url': '/document/catalogs', 'roles': ['Super User', 'Tenant Admin']},
|
||||||
|
{'name': 'Add Retriever', 'url': '/document/retriever', 'roles': ['Super User', 'Tenant Admin']},
|
||||||
|
{'name': 'All Retrievers', 'url': '/document/retrievers', 'roles': ['Super User', 'Tenant Admin']},
|
||||||
{'name': 'Add Document', 'url': '/document/add_document', 'roles': ['Super User', 'Tenant Admin']},
|
{'name': 'Add Document', 'url': '/document/add_document', 'roles': ['Super User', 'Tenant Admin']},
|
||||||
{'name': 'Add URL', 'url': '/document/add_url', 'roles': ['Super User', 'Tenant Admin']},
|
{'name': 'Add URL', 'url': '/document/add_url', 'roles': ['Super User', 'Tenant Admin']},
|
||||||
{'name': 'Add a list of URLs', 'url': '/document/add_urls', 'roles': ['Super User', 'Tenant Admin']},
|
{'name': 'Add a list of URLs', 'url': '/document/add_urls', 'roles': ['Super User', 'Tenant Admin']},
|
||||||
{'name': 'Add Youtube Document' , 'url': '/document/add_youtube', 'roles': ['Super User', 'Tenant Admin']},
|
|
||||||
{'name': 'All Documents', 'url': '/document/documents', 'roles': ['Super User', 'Tenant Admin']},
|
{'name': 'All Documents', 'url': '/document/documents', 'roles': ['Super User', 'Tenant Admin']},
|
||||||
{'name': 'All Document Versions', 'url': '/document/document_versions_list', 'roles': ['Super User', 'Tenant Admin']},
|
{'name': 'All Document Versions', 'url': '/document/document_versions_list', 'roles': ['Super User', 'Tenant Admin']},
|
||||||
{'name': 'Library Operations', 'url': '/document/library_operations', 'roles': ['Super User', 'Tenant Admin']},
|
{'name': 'Library Operations', 'url': '/document/library_operations', 'roles': ['Super User', 'Tenant Admin']},
|
||||||
@@ -95,6 +98,14 @@
|
|||||||
{'name': 'Chat Sessions', 'url': '/interaction/chat_sessions', 'roles': ['Super User', 'Tenant Admin']},
|
{'name': 'Chat Sessions', 'url': '/interaction/chat_sessions', 'roles': ['Super User', 'Tenant Admin']},
|
||||||
]) }}
|
]) }}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
{% if current_user.is_authenticated %}
|
||||||
|
{{ dropdown('Administration', 'settings', [
|
||||||
|
{'name': 'License Tier Registration', 'url': '/entitlements/license_tier', 'roles': ['Super User']},
|
||||||
|
{'name': 'All License Tiers', 'url': '/entitlements/view_license_tiers', 'roles': ['Super User']},
|
||||||
|
{'name': 'Trigger Actions', 'url': '/administration/trigger_actions', 'roles': ['Super User']},
|
||||||
|
{'name': 'Usage', 'url': '/entitlements/view_usages', 'roles': ['Super User', 'Tenant Admin']},
|
||||||
|
]) }}
|
||||||
|
{% endif %}
|
||||||
{% if current_user.is_authenticated %}
|
{% if current_user.is_authenticated %}
|
||||||
{{ dropdown(current_user.user_name, 'person', [
|
{{ dropdown(current_user.user_name, 'person', [
|
||||||
{'name': 'Session Defaults', 'url': '/session_defaults', 'roles': ['Super User', 'Tenant Admin']},
|
{'name': 'Session Defaults', 'url': '/session_defaults', 'roles': ['Super User', 'Tenant Admin']},
|
||||||
@@ -107,6 +118,17 @@
|
|||||||
{% endif %}
|
{% endif %}
|
||||||
</ul>
|
</ul>
|
||||||
{% if current_user.is_authenticated %}
|
{% if current_user.is_authenticated %}
|
||||||
|
<ul class="navbar-nav d-lg-block d-none">
|
||||||
|
<li class="nav-item">
|
||||||
|
<a href="/document/catalogs" class="btn btn-sm bg-gradient-primary mb-0 me-2">
|
||||||
|
{% if 'catalog_name' in session %}
|
||||||
|
CATALOG: {{ session['catalog_name'] }}
|
||||||
|
{% else %}
|
||||||
|
CHOOSE CATALOG
|
||||||
|
{% endif %}
|
||||||
|
</a>
|
||||||
|
</li>
|
||||||
|
</ul>
|
||||||
<ul class="navbar-nav d-lg-block d-none">
|
<ul class="navbar-nav d-lg-block d-none">
|
||||||
<li class="nav-item">
|
<li class="nav-item">
|
||||||
<a href="/session_defaults" class="btn btn-sm bg-gradient-primary mb-0">
|
<a href="/session_defaults" class="btn btn-sm bg-gradient-primary mb-0">
|
||||||
|
|||||||
@@ -13,3 +13,59 @@
|
|||||||
<script src="{{url_for('static', filename='assets/js/plugins/anime.min.js')}}"></script>
|
<script src="{{url_for('static', filename='assets/js/plugins/anime.min.js')}}"></script>
|
||||||
<script src="{{url_for('static', filename='assets/js/material-kit-pro.min.js')}}?v=3.0.4 type="text/javascript"></script>
|
<script src="{{url_for('static', filename='assets/js/material-kit-pro.min.js')}}?v=3.0.4 type="text/javascript"></script>
|
||||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/bootstrap/5.3.3/js/bootstrap.bundle.min.js"></script>
|
<script src="https://cdnjs.cloudflare.com/ajax/libs/bootstrap/5.3.3/js/bootstrap.bundle.min.js"></script>
|
||||||
|
<script src="https://cdnjs.cloudflare.com/ajax/libs/select2/4.0.13/js/select2.min.js"></script>
|
||||||
|
<link href="https://cdnjs.cloudflare.com/ajax/libs/jsoneditor/10.1.0/jsoneditor.min.css" rel="stylesheet" type="text/css">
|
||||||
|
<script src="https://cdnjs.cloudflare.com/ajax/libs/jsoneditor/10.1.0/jsoneditor.min.js"></script>
|
||||||
|
|
||||||
|
<script>
|
||||||
|
document.addEventListener('DOMContentLoaded', function() {
|
||||||
|
// Initialize tooltips
|
||||||
|
var tooltipTriggerList = [].slice.call(document.querySelectorAll('[data-bs-toggle="tooltip"]'))
|
||||||
|
var tooltipList = tooltipTriggerList.map(function (tooltipTriggerEl) {
|
||||||
|
return new bootstrap.Tooltip(tooltipTriggerEl)
|
||||||
|
});
|
||||||
|
|
||||||
|
// Initialize JSON editors
|
||||||
|
document.querySelectorAll('.json-editor').forEach(function(textarea) {
|
||||||
|
// Create container for editor
|
||||||
|
var container = document.getElementById(textarea.id + '-editor');
|
||||||
|
|
||||||
|
// Initialize the editor
|
||||||
|
var editor = new JSONEditor(container, {
|
||||||
|
mode: 'code',
|
||||||
|
modes: ['code', 'tree'],
|
||||||
|
onChangeText: function(jsonString) {
|
||||||
|
textarea.value = jsonString;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Set initial value
|
||||||
|
try {
|
||||||
|
const initialValue = textarea.value ? JSON.parse(textarea.value) : {};
|
||||||
|
editor.set(initialValue);
|
||||||
|
} catch (e) {
|
||||||
|
console.error('Error parsing initial JSON:', e);
|
||||||
|
editor.set({});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add validation indicator
|
||||||
|
editor.validate().then(function(errors) {
|
||||||
|
if (errors.length) {
|
||||||
|
container.style.border = '2px solid red';
|
||||||
|
} else {
|
||||||
|
container.style.border = '1px solid #ccc';
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<style>
|
||||||
|
.json-editor-container {
|
||||||
|
height: 400px;
|
||||||
|
margin-bottom: 1rem;
|
||||||
|
}
|
||||||
|
.tooltip {
|
||||||
|
position: fixed;
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
|||||||
@@ -1,22 +1,52 @@
|
|||||||
{% extends 'base.html' %}
|
{% extends 'base.html' %}
|
||||||
{% from "macros.html" import render_selectable_table, render_pagination %}
|
{% from "macros.html" import render_selectable_table, render_pagination, render_field %}
|
||||||
|
|
||||||
{% block title %}Tenant Selection{% endblock %}
|
{% block title %}Tenant Selection{% endblock %}
|
||||||
|
|
||||||
{% block content_title %}Select a Tenant{% endblock %}
|
{% block content_title %}Select a Tenant{% endblock %}
|
||||||
{% block content_description %}Select the active tenant for the current session{% endblock %}
|
{% block content_description %}Select the active tenant for the current session{% endblock %}
|
||||||
|
|
||||||
{% block content %}
|
{% block content %}
|
||||||
|
|
||||||
|
<!-- Filter Form -->
|
||||||
|
<form method="POST" action="{{ url_for('user_bp.select_tenant') }}" class="mb-4">
|
||||||
|
{{ filter_form.hidden_tag() }}
|
||||||
|
<div class="row">
|
||||||
|
<div class="col-md-4">
|
||||||
|
{{ render_field(filter_form.types, class="select2") }}
|
||||||
|
</div>
|
||||||
|
<div class="col-md-4">
|
||||||
|
{{ render_field(filter_form.search) }}
|
||||||
|
</div>
|
||||||
|
<div class="col-md-4">
|
||||||
|
{{ filter_form.submit(class="btn btn-primary") }}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</form>
|
||||||
|
|
||||||
|
<!-- Tenant Selection Form -->
|
||||||
<form method="POST" action="{{ url_for('user_bp.handle_tenant_selection') }}">
|
<form method="POST" action="{{ url_for('user_bp.handle_tenant_selection') }}">
|
||||||
{{ render_selectable_table(headers=["Tenant ID", "Tenant Name", "Website"], rows=rows, selectable=True, id="tenantsTable") }}
|
{{ render_selectable_table(headers=["Tenant ID", "Tenant Name", "Website", "Type"], rows=rows, selectable=True, id="tenantsTable") }}
|
||||||
<div class="form-group mt-3">
|
<div class="form-group mt-3">
|
||||||
<button type="submit" name="action" value="select_tenant" class="btn btn-primary">Set Session Tenant</button>
|
<button type="submit" name="action" value="select_tenant" class="btn btn-primary">Set Session Tenant</button>
|
||||||
<button type="submit" name="action" value="edit_tenant" class="btn btn-secondary">Edit Tenant</button>
|
<button type="submit" name="action" value="edit_tenant" class="btn btn-secondary">Edit Tenant</button>
|
||||||
</div>
|
</div>
|
||||||
</form>
|
</form>
|
||||||
|
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
{% block content_footer %}
|
{% block content_footer %}
|
||||||
{{ render_pagination(pagination, 'user_bp.select_tenant') }}
|
{{ render_pagination(pagination, 'user_bp.select_tenant') }}
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
{% block scripts %}
|
||||||
|
<script>
|
||||||
|
$(document).ready(function() {
|
||||||
|
$('.select2').select2({
|
||||||
|
placeholder: "Select tenant types",
|
||||||
|
allowClear: true,
|
||||||
|
minimumResultsForSearch: Infinity, // Hides the search box
|
||||||
|
dropdownCssClass: 'select2-dropdown-hidden', // Custom class for dropdown
|
||||||
|
containerCssClass: 'select2-container-hidden' // Custom class for container
|
||||||
|
});
|
||||||
|
});
|
||||||
|
</script>
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
|
|||||||
@@ -1,21 +1,185 @@
|
|||||||
{% extends 'base.html' %}
|
{% extends 'base.html' %}
|
||||||
{% from "macros.html" import render_field %}
|
{% from "macros.html" import render_field, render_included_field %}
|
||||||
|
|
||||||
{% block title %}Tenant Registration{% endblock %}
|
{% block title %}Create or Edit Tenant{% endblock %}
|
||||||
|
|
||||||
{% block content_title %}Register Tenant{% endblock %}
|
{% block content_title %}Create or Edit Tenant{% endblock %}
|
||||||
{% block content_description %}Add a new tenant to EveAI{% endblock %}
|
{% block content_description %}Create or Edit Tenant{% endblock %}
|
||||||
|
|
||||||
{% block content %}
|
{% block content %}
|
||||||
<form method="post">
|
<form method="post">
|
||||||
{{ form.hidden_tag() }}
|
{{ form.hidden_tag() }}
|
||||||
{% set disabled_fields = [] %}
|
<!-- Main Tenant Information -->
|
||||||
{% set exclude_fields = [] %}
|
{% set main_fields = ['name', 'website', 'default_language', 'allowed_languages', 'timezone','rag_context', 'type'] %}
|
||||||
{% for field in form %}
|
{% for field in form %}
|
||||||
{{ render_field(field, disabled_fields, exclude_fields) }}
|
{{ render_included_field(field, disabled_fields=[], include_fields=main_fields) }}
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
<button type="submit" class="btn btn-primary">Register Tenant</button>
|
|
||||||
|
<!-- Nav Tabs -->
|
||||||
|
<div class="row mt-5">
|
||||||
|
<div class="col-lg-12">
|
||||||
|
<div class="nav-wrapper position-relative end-0">
|
||||||
|
<ul class="nav nav-pills nav-fill p-1" role="tablist">
|
||||||
|
<li class="nav-item" role="presentation">
|
||||||
|
<a class="nav-link mb-0 px-0 py-1 active" data-toggle="tab" href="#model-info-tab" role="tab" aria-controls="model-info" aria-selected="true">
|
||||||
|
Model Information
|
||||||
|
</a>
|
||||||
|
</li>
|
||||||
|
<li class="nav-item">
|
||||||
|
<a class="nav-link mb-0 px-0 py-1" data-toggle="tab" href="#license-info-tab" role="tab" aria-controls="license-info" aria-selected="false">
|
||||||
|
License Information
|
||||||
|
</a>
|
||||||
|
</li>
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
<div class="tab-content tab-space">
|
||||||
|
<!-- Model Information Tab -->
|
||||||
|
<div class="tab-pane fade show active" id="model-info-tab" role="tabpanel">
|
||||||
|
{% set model_fields = ['embedding_model', 'llm_model'] %}
|
||||||
|
{% for field in form %}
|
||||||
|
{{ render_included_field(field, disabled_fields=[], include_fields=model_fields) }}
|
||||||
|
{% endfor %}
|
||||||
|
</div>
|
||||||
|
<!-- License Information Tab -->
|
||||||
|
<div class="tab-pane fade" id="license-info-tab" role="tabpanel">
|
||||||
|
{% set license_fields = ['currency', 'usage_email', ] %}
|
||||||
|
{% for field in form %}
|
||||||
|
{{ render_included_field(field, disabled_fields=[], include_fields=license_fields) }}
|
||||||
|
{% endfor %}
|
||||||
|
<!-- Register API Key Button -->
|
||||||
|
<div class="form-group">
|
||||||
|
<button type="button" class="btn btn-primary" onclick="generateNewChatApiKey()">Register Chat API Key</button>
|
||||||
|
<button type="button" class="btn btn-primary" onclick="generateNewApiKey()">Register API Key</button>
|
||||||
|
</div>
|
||||||
|
<!-- API Key Display Field -->
|
||||||
|
<div id="chat-api-key-field" style="display:none;">
|
||||||
|
<label for="chat-api-key">Chat API Key:</label>
|
||||||
|
<input type="text" id="chat-api-key" class="form-control" readonly>
|
||||||
|
<button type="button" id="copy-chat-button" class="btn btn-primary">Copy to Clipboard</button>
|
||||||
|
<p id="copy-chat-message" style="display:none;color:green;">Chat API key copied to clipboard</p>
|
||||||
|
</div>
|
||||||
|
<div id="api-key-field" style="display:none;">
|
||||||
|
<label for="api-key">API Key:</label>
|
||||||
|
<input type="text" id="api-key" class="form-control" readonly>
|
||||||
|
<button type="button" id="copy-api-button" class="btn btn-primary">Copy to Clipboard</button>
|
||||||
|
<p id="copy-message" style="display:none;color:green;">API key copied to clipboard</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<button type="submit" class="btn btn-primary">Save Tenant</button>
|
||||||
</form>
|
</form>
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
{% block content_footer %} {% endblock %}
|
|
||||||
|
{% block content_footer %}
|
||||||
|
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
{% block scripts %}
|
||||||
|
<script>
|
||||||
|
// Function to generate a new Chat API Key
|
||||||
|
function generateNewChatApiKey() {
|
||||||
|
generateApiKey('/admin/user/generate_chat_api_key', '#chat-api-key', '#chat-api-key-field');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Function to generate a new general API Key
|
||||||
|
function generateNewApiKey() {
|
||||||
|
generateApiKey('/admin/user/generate_api_api_key', '#api-key', '#api-key-field');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Reusable function to handle API key generation
|
||||||
|
function generateApiKey(url, inputSelector, fieldSelector) {
|
||||||
|
$.ajax({
|
||||||
|
url: url,
|
||||||
|
type: 'POST',
|
||||||
|
contentType: 'application/json',
|
||||||
|
success: function(response) {
|
||||||
|
$(inputSelector).val(response.api_key);
|
||||||
|
$(fieldSelector).show();
|
||||||
|
},
|
||||||
|
error: function(error) {
|
||||||
|
alert('Error generating new API key: ' + error.responseText);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Function to copy text to clipboard
|
||||||
|
function copyToClipboard(selector, messageSelector) {
|
||||||
|
const element = document.querySelector(selector);
|
||||||
|
if (element) {
|
||||||
|
const text = element.value;
|
||||||
|
if (navigator.clipboard && navigator.clipboard.writeText) {
|
||||||
|
navigator.clipboard.writeText(text).then(function() {
|
||||||
|
showCopyMessage(messageSelector);
|
||||||
|
}).catch(function(error) {
|
||||||
|
alert('Failed to copy text: ' + error);
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
fallbackCopyToClipboard(text, messageSelector);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
console.error('Element not found for selector:', selector);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fallback method for copying text to clipboard
|
||||||
|
function fallbackCopyToClipboard(text, messageSelector) {
|
||||||
|
const textArea = document.createElement('textarea');
|
||||||
|
textArea.value = text;
|
||||||
|
document.body.appendChild(textArea);
|
||||||
|
textArea.focus();
|
||||||
|
textArea.select();
|
||||||
|
try {
|
||||||
|
document.execCommand('copy');
|
||||||
|
showCopyMessage(messageSelector);
|
||||||
|
} catch (err) {
|
||||||
|
alert('Fallback: Oops, unable to copy', err);
|
||||||
|
}
|
||||||
|
document.body.removeChild(textArea);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Function to show copy confirmation message
|
||||||
|
function showCopyMessage(messageSelector) {
|
||||||
|
const message = document.querySelector(messageSelector);
|
||||||
|
if (message) {
|
||||||
|
message.style.display = 'block';
|
||||||
|
setTimeout(function() {
|
||||||
|
message.style.display = 'none';
|
||||||
|
}, 2000);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Event listeners for copy buttons
|
||||||
|
document.getElementById('copy-chat-button').addEventListener('click', function() {
|
||||||
|
copyToClipboard('#chat-api-key', '#copy-chat-message');
|
||||||
|
});
|
||||||
|
|
||||||
|
document.getElementById('copy-api-button').addEventListener('click', function() {
|
||||||
|
copyToClipboard('#api-key', '#copy-message');
|
||||||
|
});
|
||||||
|
</script>
|
||||||
|
<script>
|
||||||
|
// JavaScript to detect user's timezone
|
||||||
|
document.addEventListener('DOMContentLoaded', (event) => {
|
||||||
|
// Detect timezone
|
||||||
|
const userTimezone = Intl.DateTimeFormat().resolvedOptions().timeZone;
|
||||||
|
|
||||||
|
// Send timezone to the server via a POST request
|
||||||
|
fetch('/set_user_timezone', {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json'
|
||||||
|
},
|
||||||
|
body: JSON.stringify({ timezone: userTimezone })
|
||||||
|
}).then(response => {
|
||||||
|
if (response.ok) {
|
||||||
|
console.log('Timezone sent to server successfully');
|
||||||
|
} else {
|
||||||
|
console.error('Failed to send timezone to server');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
</script>
|
||||||
|
{% endblock %}
|
||||||
@@ -10,13 +10,13 @@
|
|||||||
<form method="post">
|
<form method="post">
|
||||||
{{ form.hidden_tag() }}
|
{{ form.hidden_tag() }}
|
||||||
<!-- Main Tenant Information -->
|
<!-- Main Tenant Information -->
|
||||||
{% set main_fields = ['name', 'website', 'default_language', 'allowed_languages'] %}
|
{% set main_fields = ['name', 'website', 'default_language', 'allowed_languages', 'rag_context', 'type'] %}
|
||||||
{% for field in form %}
|
{% for field in form %}
|
||||||
{{ render_included_field(field, disabled_fields=main_fields, include_fields=main_fields) }}
|
{{ render_included_field(field, disabled_fields=main_fields, include_fields=main_fields) }}
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
|
|
||||||
<!-- Nav Tabs -->
|
<!-- Nav Tabs -->
|
||||||
<div class="row">
|
<div class="row mt-5">
|
||||||
<div class="col-lg-12">
|
<div class="col-lg-12">
|
||||||
<div class="nav-wrapper position-relative end-0">
|
<div class="nav-wrapper position-relative end-0">
|
||||||
<ul class="nav nav-pills nav-fill p-1" role="tablist">
|
<ul class="nav nav-pills nav-fill p-1" role="tablist">
|
||||||
@@ -30,21 +30,6 @@
|
|||||||
License Information
|
License Information
|
||||||
</a>
|
</a>
|
||||||
</li>
|
</li>
|
||||||
<li class="nav-item">
|
|
||||||
<a class="nav-link mb-0 px-0 py-1" data-toggle="tab" href="#chunking-tab" role="tab" aria-controls="chunking" aria-selected="false">
|
|
||||||
Chunking
|
|
||||||
</a>
|
|
||||||
</li>
|
|
||||||
<li class="nav-item">
|
|
||||||
<a class="nav-link mb-0 px-0 py-1" data-toggle="tab" href="#embedding-search-tab" role="tab" aria-controls="html-chunking" aria-selected="false">
|
|
||||||
Embedding Search
|
|
||||||
</a>
|
|
||||||
</li>
|
|
||||||
<li class="nav-item">
|
|
||||||
<a class="nav-link mb-0 px-0 py-1" data-toggle="tab" href="#tuning-tab" role="tab" aria-controls="html-chunking" aria-selected="false">
|
|
||||||
Tuning
|
|
||||||
</a>
|
|
||||||
</li>
|
|
||||||
</ul>
|
</ul>
|
||||||
</div>
|
</div>
|
||||||
<div class="tab-content tab-space">
|
<div class="tab-content tab-space">
|
||||||
@@ -57,41 +42,27 @@
|
|||||||
</div>
|
</div>
|
||||||
<!-- License Information Tab -->
|
<!-- License Information Tab -->
|
||||||
<div class="tab-pane fade" id="license-info-tab" role="tabpanel">
|
<div class="tab-pane fade" id="license-info-tab" role="tabpanel">
|
||||||
{% set license_fields = ['license_start_date', 'license_end_date', 'allowed_monthly_interactions', ] %}
|
{% set license_fields = ['currency', 'usage_email', ] %}
|
||||||
{% for field in form %}
|
{% for field in form %}
|
||||||
{{ render_included_field(field, disabled_fields=license_fields, include_fields=license_fields) }}
|
{{ render_included_field(field, disabled_fields=license_fields, include_fields=license_fields) }}
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
<!-- Register API Key Button -->
|
<!-- Register API Key Button -->
|
||||||
|
<button type="button" class="btn btn-primary" onclick="generateNewChatApiKey()">Register Chat API Key</button>
|
||||||
<button type="button" class="btn btn-primary" onclick="generateNewApiKey()">Register API Key</button>
|
<button type="button" class="btn btn-primary" onclick="generateNewApiKey()">Register API Key</button>
|
||||||
<!-- API Key Display Field -->
|
<!-- API Key Display Field -->
|
||||||
|
<div id="chat-api-key-field" style="display:none;">
|
||||||
|
<label for="chat-api-key">Chat API Key:</label>
|
||||||
|
<input type="text" id="chat-api-key" class="form-control" readonly>
|
||||||
|
<button type="button" id="copy-chat-button" class="btn btn-primary">Copy to Clipboard</button>
|
||||||
|
<p id="copy-chat-message" style="display:none;color:green;">Chat API key copied to clipboard</p>
|
||||||
|
</div>
|
||||||
<div id="api-key-field" style="display:none;">
|
<div id="api-key-field" style="display:none;">
|
||||||
<label for="api-key">API Key:</label>
|
<label for="api-key">API Key:</label>
|
||||||
<input type="text" id="api-key" class="form-control" readonly>
|
<input type="text" id="api-key" class="form-control" readonly>
|
||||||
<button type="button" id="copy-button" class="btn btn-primary">Copy to Clipboard</button>
|
<button type="button" id="copy-api-button" class="btn btn-primary">Copy to Clipboard</button>
|
||||||
<p id="copy-message" style="display:none;color:green;">API key copied to clipboard</p>
|
<p id="copy-message" style="display:none;color:green;">API key copied to clipboard</p>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<!-- Chunking Settings Tab -->
|
|
||||||
<div class="tab-pane fade" id="chunking-tab" role="tabpanel">
|
|
||||||
{% set html_fields = ['html_tags', 'html_end_tags', 'html_included_elements', 'html_excluded_elements', 'min_chunk_size', 'max_chunk_size'] %}
|
|
||||||
{% for field in form %}
|
|
||||||
{{ render_included_field(field, disabled_fields=html_fields, include_fields=html_fields) }}
|
|
||||||
{% endfor %}
|
|
||||||
</div>
|
|
||||||
<!-- Embedding Search Settings Tab -->
|
|
||||||
<div class="tab-pane fade" id="embedding-search-tab" role="tabpanel">
|
|
||||||
{% set es_fields = ['es_k', 'es_similarity_threshold', ] %}
|
|
||||||
{% for field in form %}
|
|
||||||
{{ render_included_field(field, disabled_fields=es_fields, include_fields=es_fields) }}
|
|
||||||
{% endfor %}
|
|
||||||
</div>
|
|
||||||
<!-- Tuning Settings Tab -->
|
|
||||||
<div class="tab-pane fade" id="tuning-tab" role="tabpanel">
|
|
||||||
{% set tuning_fields = ['embed_tuning', 'rag_tuning', ] %}
|
|
||||||
{% for field in form %}
|
|
||||||
{{ render_included_field(field, disabled_fields=tuning_fields, include_fields=tuning_fields) }}
|
|
||||||
{% endfor %}
|
|
||||||
</div>
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@@ -105,14 +76,25 @@
|
|||||||
|
|
||||||
{% block scripts %}
|
{% block scripts %}
|
||||||
<script>
|
<script>
|
||||||
|
// Function to generate a new Chat API Key
|
||||||
|
function generateNewChatApiKey() {
|
||||||
|
generateApiKey('/admin/user/generate_chat_api_key', '#chat-api-key', '#chat-api-key-field');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Function to generate a new general API Key
|
||||||
function generateNewApiKey() {
|
function generateNewApiKey() {
|
||||||
|
generateApiKey('/admin/user/generate_api_api_key', '#api-key', '#api-key-field');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Reusable function to handle API key generation
|
||||||
|
function generateApiKey(url, inputSelector, fieldSelector) {
|
||||||
$.ajax({
|
$.ajax({
|
||||||
url: '/user/generate_chat_api_key',
|
url: url,
|
||||||
type: 'POST',
|
type: 'POST',
|
||||||
contentType: 'application/json',
|
contentType: 'application/json',
|
||||||
success: function(response) {
|
success: function(response) {
|
||||||
$('#api-key').val(response.api_key);
|
$(inputSelector).val(response.api_key);
|
||||||
$('#api-key-field').show();
|
$(fieldSelector).show();
|
||||||
},
|
},
|
||||||
error: function(error) {
|
error: function(error) {
|
||||||
alert('Error generating new API key: ' + error.responseText);
|
alert('Error generating new API key: ' + error.responseText);
|
||||||
@@ -120,25 +102,27 @@
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
function copyToClipboard(selector) {
|
// Function to copy text to clipboard
|
||||||
|
function copyToClipboard(selector, messageSelector) {
|
||||||
const element = document.querySelector(selector);
|
const element = document.querySelector(selector);
|
||||||
if (element) {
|
if (element) {
|
||||||
const text = element.value;
|
const text = element.value;
|
||||||
if (navigator.clipboard && navigator.clipboard.writeText) {
|
if (navigator.clipboard && navigator.clipboard.writeText) {
|
||||||
navigator.clipboard.writeText(text).then(function() {
|
navigator.clipboard.writeText(text).then(function() {
|
||||||
showCopyMessage();
|
showCopyMessage(messageSelector);
|
||||||
}).catch(function(error) {
|
}).catch(function(error) {
|
||||||
alert('Failed to copy text: ' + error);
|
alert('Failed to copy text: ' + error);
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
fallbackCopyToClipboard(text);
|
fallbackCopyToClipboard(text, messageSelector);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
console.error('Element not found for selector:', selector);
|
console.error('Element not found for selector:', selector);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function fallbackCopyToClipboard(text) {
|
// Fallback method for copying text to clipboard
|
||||||
|
function fallbackCopyToClipboard(text, messageSelector) {
|
||||||
const textArea = document.createElement('textarea');
|
const textArea = document.createElement('textarea');
|
||||||
textArea.value = text;
|
textArea.value = text;
|
||||||
document.body.appendChild(textArea);
|
document.body.appendChild(textArea);
|
||||||
@@ -146,15 +130,16 @@
|
|||||||
textArea.select();
|
textArea.select();
|
||||||
try {
|
try {
|
||||||
document.execCommand('copy');
|
document.execCommand('copy');
|
||||||
showCopyMessage();
|
showCopyMessage(messageSelector);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
alert('Fallback: Oops, unable to copy', err);
|
alert('Fallback: Oops, unable to copy', err);
|
||||||
}
|
}
|
||||||
document.body.removeChild(textArea);
|
document.body.removeChild(textArea);
|
||||||
}
|
}
|
||||||
|
|
||||||
function showCopyMessage() {
|
// Function to show copy confirmation message
|
||||||
const message = document.getElementById('copy-message');
|
function showCopyMessage(messageSelector) {
|
||||||
|
const message = document.querySelector(messageSelector);
|
||||||
if (message) {
|
if (message) {
|
||||||
message.style.display = 'block';
|
message.style.display = 'block';
|
||||||
setTimeout(function() {
|
setTimeout(function() {
|
||||||
@@ -163,8 +148,13 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
document.getElementById('copy-button').addEventListener('click', function() {
|
// Event listeners for copy buttons
|
||||||
copyToClipboard('#api-key');
|
document.getElementById('copy-chat-button').addEventListener('click', function() {
|
||||||
|
copyToClipboard('#chat-api-key', '#copy-chat-message');
|
||||||
|
});
|
||||||
|
|
||||||
|
document.getElementById('copy-api-button').addEventListener('click', function() {
|
||||||
|
copyToClipboard('#api-key', '#copy-message');
|
||||||
});
|
});
|
||||||
</script>
|
</script>
|
||||||
<script>
|
<script>
|
||||||
|
|||||||
7
eveai_app/views/administration_forms.py
Normal file
7
eveai_app/views/administration_forms.py
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
from flask import current_app
|
||||||
|
from flask_wtf import FlaskForm
|
||||||
|
from wtforms.fields.simple import SubmitField
|
||||||
|
|
||||||
|
|
||||||
|
class TriggerActionForm(FlaskForm):
|
||||||
|
submit = SubmitField('Submit')
|
||||||
39
eveai_app/views/administration_views.py
Normal file
39
eveai_app/views/administration_views.py
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
import uuid
|
||||||
|
from datetime import datetime as dt, timezone as tz
|
||||||
|
from flask import request, redirect, flash, render_template, Blueprint, session, current_app, jsonify
|
||||||
|
from flask_security import hash_password, roles_required, roles_accepted, current_user
|
||||||
|
from itsdangerous import URLSafeTimedSerializer
|
||||||
|
from sqlalchemy.exc import SQLAlchemyError
|
||||||
|
|
||||||
|
from common.utils.celery_utils import current_celery
|
||||||
|
from common.utils.view_assistants import prepare_table_for_macro, form_validation_failed
|
||||||
|
from common.utils.nginx_utils import prefixed_url_for
|
||||||
|
from .administration_forms import TriggerActionForm
|
||||||
|
|
||||||
|
administration_bp = Blueprint('administration_bp', __name__, url_prefix='/administration')
|
||||||
|
|
||||||
|
|
||||||
|
@administration_bp.route('/trigger_actions', methods=['GET'])
|
||||||
|
@roles_accepted('Super User')
|
||||||
|
def trigger_actions():
|
||||||
|
form = TriggerActionForm()
|
||||||
|
return render_template('administration/trigger_actions.html', form=form)
|
||||||
|
|
||||||
|
|
||||||
|
@administration_bp.route('/handle_trigger_action', methods=['POST'])
|
||||||
|
@roles_accepted('Super User')
|
||||||
|
def handle_trigger_action():
|
||||||
|
action = request.form['action']
|
||||||
|
match action:
|
||||||
|
case 'update_usages':
|
||||||
|
try:
|
||||||
|
# Use send_task to trigger the task since it's part of another component (eveai_entitlements)
|
||||||
|
task = current_celery.send_task('update_usages', queue='entitlements')
|
||||||
|
|
||||||
|
current_app.logger.info(f"Usage update task triggered: {task.id}")
|
||||||
|
flash('Usage update task has been triggered successfully!', 'success')
|
||||||
|
except Exception as e:
|
||||||
|
current_app.logger.error(f"Failed to trigger usage update task: {str(e)}")
|
||||||
|
flash(f'Failed to trigger usage update: {str(e)}', 'danger')
|
||||||
|
|
||||||
|
return redirect(prefixed_url_for('administration_bp.trigger_actions'))
|
||||||
@@ -1,40 +1,186 @@
|
|||||||
from flask import session
|
from flask import session, current_app, request
|
||||||
from flask_wtf import FlaskForm
|
from flask_wtf import FlaskForm
|
||||||
from wtforms import (StringField, BooleanField, SubmitField, DateField,
|
from wtforms import (StringField, BooleanField, SubmitField, DateField, IntegerField, FloatField, SelectMultipleField,
|
||||||
SelectField, FieldList, FormField, TextAreaField, URLField)
|
SelectField, FieldList, FormField, TextAreaField, URLField)
|
||||||
from wtforms.validators import DataRequired, Length, Optional, URL
|
from wtforms.validators import DataRequired, Length, Optional, URL, ValidationError, NumberRange
|
||||||
from flask_wtf.file import FileField, FileAllowed, FileRequired
|
from flask_wtf.file import FileField, FileAllowed, FileRequired
|
||||||
|
import json
|
||||||
|
|
||||||
|
from wtforms_sqlalchemy.fields import QuerySelectField
|
||||||
|
|
||||||
|
from common.extensions import db
|
||||||
|
from common.models.document import Catalog
|
||||||
|
|
||||||
|
from config.catalog_types import CATALOG_TYPES
|
||||||
|
from config.retriever_types import RETRIEVER_TYPES
|
||||||
|
from .dynamic_form_base import DynamicFormBase
|
||||||
|
|
||||||
|
|
||||||
class AddDocumentForm(FlaskForm):
|
def allowed_file(form, field):
|
||||||
file = FileField('File', validators=[FileAllowed(['pdf', 'txt', 'html']),
|
if field.data:
|
||||||
FileRequired()])
|
filename = field.data.filename
|
||||||
|
allowed_extensions = current_app.config.get('SUPPORTED_FILE_TYPES', [])
|
||||||
|
if not ('.' in filename and filename.rsplit('.', 1)[1].lower() in allowed_extensions):
|
||||||
|
raise ValidationError('Unsupported file type.')
|
||||||
|
|
||||||
|
|
||||||
|
def validate_json(form, field):
|
||||||
|
if field.data:
|
||||||
|
try:
|
||||||
|
json.loads(field.data)
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
raise ValidationError('Invalid JSON format')
|
||||||
|
|
||||||
|
|
||||||
|
class CatalogForm(FlaskForm):
|
||||||
|
name = StringField('Name', validators=[DataRequired(), Length(max=50)])
|
||||||
|
description = TextAreaField('Description', validators=[Optional()])
|
||||||
|
|
||||||
|
# Select Field for Catalog Type (Uses the CATALOG_TYPES defined in config)
|
||||||
|
type = SelectField('Catalog Type', validators=[DataRequired()])
|
||||||
|
|
||||||
|
# Metadata fields
|
||||||
|
user_metadata = TextAreaField('User Metadata', validators=[Optional(), validate_json])
|
||||||
|
system_metadata = TextAreaField('System Metadata', validators=[Optional(), validate_json])
|
||||||
|
configuration = TextAreaField('Configuration', validators=[Optional(), validate_json])
|
||||||
|
|
||||||
|
# HTML Embedding Variables
|
||||||
|
html_tags = StringField('HTML Tags', validators=[DataRequired()],
|
||||||
|
default='p, h1, h2, h3, h4, h5, h6, li, , tbody, tr, td')
|
||||||
|
html_end_tags = StringField('HTML End Tags', validators=[DataRequired()],
|
||||||
|
default='p, li')
|
||||||
|
html_included_elements = StringField('HTML Included Elements', validators=[Optional()], default='article, main')
|
||||||
|
html_excluded_elements = StringField('HTML Excluded Elements', validators=[Optional()],
|
||||||
|
default='header, footer, nav, script')
|
||||||
|
html_excluded_classes = StringField('HTML Excluded Classes', validators=[Optional()])
|
||||||
|
min_chunk_size = IntegerField('Minimum Chunk Size (2000)', validators=[NumberRange(min=0), Optional()],
|
||||||
|
default=2000)
|
||||||
|
max_chunk_size = IntegerField('Maximum Chunk Size (3000)', validators=[NumberRange(min=0), Optional()],
|
||||||
|
default=3000)
|
||||||
|
# Chat Variables
|
||||||
|
chat_RAG_temperature = FloatField('RAG Temperature', default=0.3, validators=[NumberRange(min=0, max=1)])
|
||||||
|
chat_no_RAG_temperature = FloatField('No RAG Temperature', default=0.5, validators=[NumberRange(min=0, max=1)])
|
||||||
|
# Tuning variables
|
||||||
|
embed_tuning = BooleanField('Enable Embedding Tuning', default=False)
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
# Dynamically populate the 'type' field using the constructor
|
||||||
|
self.type.choices = [(key, value['name']) for key, value in CATALOG_TYPES.items()]
|
||||||
|
|
||||||
|
|
||||||
|
class EditCatalogForm(DynamicFormBase):
|
||||||
|
name = StringField('Name', validators=[DataRequired(), Length(max=50)])
|
||||||
|
description = TextAreaField('Description', validators=[Optional()])
|
||||||
|
|
||||||
|
# Select Field for Catalog Type (Uses the CATALOG_TYPES defined in config)
|
||||||
|
type = StringField('Catalog Type', validators=[DataRequired()], render_kw={'readonly': True})
|
||||||
|
|
||||||
|
# Metadata fields
|
||||||
|
user_metadata = TextAreaField('User Metadata', validators=[Optional(), validate_json])
|
||||||
|
system_metadata = TextAreaField('System Metadata', validators=[Optional(), validate_json],)
|
||||||
|
|
||||||
|
# HTML Embedding Variables
|
||||||
|
html_tags = StringField('HTML Tags', validators=[DataRequired()],
|
||||||
|
default='p, h1, h2, h3, h4, h5, h6, li, , tbody, tr, td')
|
||||||
|
html_end_tags = StringField('HTML End Tags', validators=[DataRequired()],
|
||||||
|
default='p, li')
|
||||||
|
html_included_elements = StringField('HTML Included Elements', validators=[Optional()], default='article, main')
|
||||||
|
html_excluded_elements = StringField('HTML Excluded Elements', validators=[Optional()],
|
||||||
|
default='header, footer, nav, script')
|
||||||
|
html_excluded_classes = StringField('HTML Excluded Classes', validators=[Optional()])
|
||||||
|
min_chunk_size = IntegerField('Minimum Chunk Size (2000)', validators=[NumberRange(min=0), Optional()],
|
||||||
|
default=2000)
|
||||||
|
max_chunk_size = IntegerField('Maximum Chunk Size (3000)', validators=[NumberRange(min=0), Optional()],
|
||||||
|
default=3000)
|
||||||
|
# Chat Variables
|
||||||
|
chat_RAG_temperature = FloatField('RAG Temperature', default=0.3, validators=[NumberRange(min=0, max=1)])
|
||||||
|
chat_no_RAG_temperature = FloatField('No RAG Temperature', default=0.5, validators=[NumberRange(min=0, max=1)])
|
||||||
|
# Tuning variables
|
||||||
|
embed_tuning = BooleanField('Enable Embedding Tuning', default=False)
|
||||||
|
|
||||||
|
|
||||||
|
class RetrieverForm(FlaskForm):
|
||||||
|
name = StringField('Name', validators=[DataRequired(), Length(max=50)])
|
||||||
|
description = TextAreaField('Description', validators=[Optional()])
|
||||||
|
# Catalog for the Retriever
|
||||||
|
catalog = QuerySelectField(
|
||||||
|
'Catalog ID',
|
||||||
|
query_factory=lambda: Catalog.query.all(),
|
||||||
|
allow_blank=True,
|
||||||
|
get_label='name',
|
||||||
|
validators=[Optional()],
|
||||||
|
)
|
||||||
|
# Select Field for Retriever Type (Uses the RETRIEVER_TYPES defined in config)
|
||||||
|
type = SelectField('Retriever Type', validators=[DataRequired()])
|
||||||
|
tuning = BooleanField('Enable Tuning', default=False)
|
||||||
|
|
||||||
|
# Metadata fields
|
||||||
|
user_metadata = TextAreaField('User Metadata', validators=[Optional(), validate_json])
|
||||||
|
system_metadata = TextAreaField('System Metadata', validators=[Optional(), validate_json])
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
# Dynamically populate the 'type' field using the constructor
|
||||||
|
self.type.choices = [(key, value['name']) for key, value in RETRIEVER_TYPES.items()]
|
||||||
|
|
||||||
|
|
||||||
|
class EditRetrieverForm(DynamicFormBase):
|
||||||
|
name = StringField('Name', validators=[DataRequired(), Length(max=50)])
|
||||||
|
description = TextAreaField('Description', validators=[Optional()])
|
||||||
|
# Catalog for the Retriever
|
||||||
|
catalog = QuerySelectField(
|
||||||
|
'Catalog ID',
|
||||||
|
query_factory=lambda: Catalog.query.all(),
|
||||||
|
allow_blank=True,
|
||||||
|
get_label='name',
|
||||||
|
validators=[Optional()],
|
||||||
|
)
|
||||||
|
# Select Field for Retriever Type (Uses the RETRIEVER_TYPES defined in config)
|
||||||
|
type = SelectField('Retriever Type', validators=[DataRequired()], render_kw={'readonly': True})
|
||||||
|
tuning = BooleanField('Enable Tuning', default=False)
|
||||||
|
|
||||||
|
# Metadata fields
|
||||||
|
user_metadata = TextAreaField('User Metadata', validators=[Optional(), validate_json])
|
||||||
|
system_metadata = TextAreaField('System Metadata', validators=[Optional(), validate_json])
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
# Set the retriever type choices (loaded from config)
|
||||||
|
self.type.choices = [(key, value['name']) for key, value in RETRIEVER_TYPES.items()]
|
||||||
|
|
||||||
|
|
||||||
|
class AddDocumentForm(DynamicFormBase):
|
||||||
|
file = FileField('File', validators=[FileRequired(), allowed_file])
|
||||||
name = StringField('Name', validators=[Length(max=100)])
|
name = StringField('Name', validators=[Length(max=100)])
|
||||||
language = SelectField('Language', choices=[], validators=[Optional()])
|
language = SelectField('Language', choices=[], validators=[Optional()])
|
||||||
user_context = TextAreaField('User Context', validators=[Optional()])
|
user_context = TextAreaField('User Context', validators=[Optional()])
|
||||||
valid_from = DateField('Valid from', id='form-control datepicker', validators=[Optional()])
|
valid_from = DateField('Valid from', id='form-control datepicker', validators=[Optional()])
|
||||||
|
user_metadata = TextAreaField('User Metadata', validators=[Optional(), validate_json])
|
||||||
|
|
||||||
submit = SubmitField('Submit')
|
def __init__(self, *args, **kwargs):
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
def __init__(self):
|
|
||||||
super().__init__()
|
|
||||||
self.language.choices = [(language, language) for language in
|
self.language.choices = [(language, language) for language in
|
||||||
session.get('tenant').get('allowed_languages')]
|
session.get('tenant').get('allowed_languages')]
|
||||||
|
if not self.language.data:
|
||||||
|
self.language.data = session.get('tenant').get('default_language')
|
||||||
|
|
||||||
|
|
||||||
class AddURLForm(FlaskForm):
|
class AddURLForm(DynamicFormBase):
|
||||||
url = URLField('URL', validators=[DataRequired(), URL()])
|
url = URLField('URL', validators=[DataRequired(), URL()])
|
||||||
name = StringField('Name', validators=[Length(max=100)])
|
name = StringField('Name', validators=[Length(max=100)])
|
||||||
language = SelectField('Language', choices=[], validators=[Optional()])
|
language = SelectField('Language', choices=[], validators=[Optional()])
|
||||||
user_context = TextAreaField('User Context', validators=[Optional()])
|
user_context = TextAreaField('User Context', validators=[Optional()])
|
||||||
valid_from = DateField('Valid from', id='form-control datepicker', validators=[Optional()])
|
valid_from = DateField('Valid from', id='form-control datepicker', validators=[Optional()])
|
||||||
|
user_metadata = TextAreaField('User Metadata', validators=[Optional(), validate_json])
|
||||||
|
|
||||||
submit = SubmitField('Submit')
|
def __init__(self, *args, **kwargs):
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
def __init__(self):
|
|
||||||
super().__init__()
|
|
||||||
self.language.choices = [(language, language) for language in
|
self.language.choices = [(language, language) for language in
|
||||||
session.get('tenant').get('allowed_languages')]
|
session.get('tenant').get('allowed_languages')]
|
||||||
|
if not self.language.data:
|
||||||
|
self.language.data = session.get('tenant').get('default_language')
|
||||||
|
|
||||||
|
|
||||||
class AddURLsForm(FlaskForm):
|
class AddURLsForm(FlaskForm):
|
||||||
@@ -50,21 +196,8 @@ class AddURLsForm(FlaskForm):
|
|||||||
super().__init__()
|
super().__init__()
|
||||||
self.language.choices = [(language, language) for language in
|
self.language.choices = [(language, language) for language in
|
||||||
session.get('tenant').get('allowed_languages')]
|
session.get('tenant').get('allowed_languages')]
|
||||||
|
if not self.language.data:
|
||||||
|
self.language.data = session.get('tenant').get('default_language')
|
||||||
class AddYoutubeForm(FlaskForm):
|
|
||||||
url = URLField('Youtube URL', validators=[DataRequired(), URL()])
|
|
||||||
name = StringField('Name', validators=[Length(max=100)])
|
|
||||||
language = SelectField('Language', choices=[], validators=[Optional()])
|
|
||||||
user_context = TextAreaField('User Context', validators=[Optional()])
|
|
||||||
valid_from = DateField('Valid from', id='form-control datepicker', validators=[Optional()])
|
|
||||||
|
|
||||||
submit = SubmitField('Submit')
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
super().__init__()
|
|
||||||
self.language.choices = [(language, language) for language in
|
|
||||||
session.get('tenant').get('allowed_languages')]
|
|
||||||
|
|
||||||
|
|
||||||
class EditDocumentForm(FlaskForm):
|
class EditDocumentForm(FlaskForm):
|
||||||
@@ -75,12 +208,11 @@ class EditDocumentForm(FlaskForm):
|
|||||||
submit = SubmitField('Submit')
|
submit = SubmitField('Submit')
|
||||||
|
|
||||||
|
|
||||||
class EditDocumentVersionForm(FlaskForm):
|
class EditDocumentVersionForm(DynamicFormBase):
|
||||||
language = StringField('Language')
|
language = StringField('Language')
|
||||||
user_context = TextAreaField('User Context', validators=[Optional()])
|
user_context = TextAreaField('User Context', validators=[Optional()])
|
||||||
system_context = TextAreaField('System Context', validators=[Optional()])
|
system_context = TextAreaField('System Context', validators=[Optional()])
|
||||||
|
user_metadata = TextAreaField('User Metadata', validators=[Optional(), validate_json])
|
||||||
|
system_metadata = TextAreaField('System Metadata', validators=[Optional(), validate_json])
|
||||||
|
|
||||||
submit = SubmitField('Submit')
|
submit = SubmitField('Submit')
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
102
eveai_app/views/document_list_view.py
Normal file
102
eveai_app/views/document_list_view.py
Normal file
@@ -0,0 +1,102 @@
|
|||||||
|
from datetime import datetime
|
||||||
|
from flask import request, render_template, session
|
||||||
|
from sqlalchemy import desc, asc, or_, and_, cast, Integer
|
||||||
|
from common.models.document import Document, Catalog
|
||||||
|
from common.utils.filtered_list_view import FilteredListView
|
||||||
|
from common.utils.view_assistants import prepare_table_for_macro
|
||||||
|
|
||||||
|
|
||||||
|
class DocumentListView(FilteredListView):
|
||||||
|
allowed_filters = ['catalog_id', 'validity']
|
||||||
|
allowed_sorts = ['id', 'name', 'catalog_name', 'valid_from', 'valid_to']
|
||||||
|
|
||||||
|
def get_query(self):
|
||||||
|
return Document.query.join(Catalog).add_columns(
|
||||||
|
Document.id,
|
||||||
|
Document.name,
|
||||||
|
Catalog.name.label('catalog_name'),
|
||||||
|
Document.valid_from,
|
||||||
|
Document.valid_to
|
||||||
|
)
|
||||||
|
|
||||||
|
def apply_filters(self, query):
|
||||||
|
filters = request.args.to_dict(flat=False)
|
||||||
|
|
||||||
|
if 'catalog_id' in filters:
|
||||||
|
catalog_ids = filters['catalog_id']
|
||||||
|
if catalog_ids:
|
||||||
|
# Convert catalog_ids to a list of integers
|
||||||
|
catalog_ids = [int(cid) for cid in catalog_ids if cid.isdigit()]
|
||||||
|
if catalog_ids:
|
||||||
|
query = query.filter(Document.catalog_id.in_(catalog_ids))
|
||||||
|
|
||||||
|
if 'validity' in filters:
|
||||||
|
now = datetime.utcnow().date()
|
||||||
|
if 'valid' in filters['validity']:
|
||||||
|
query = query.filter(
|
||||||
|
and_(
|
||||||
|
or_(Document.valid_from.is_(None), Document.valid_from <= now),
|
||||||
|
or_(Document.valid_to.is_(None), Document.valid_to >= now)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
return query
|
||||||
|
|
||||||
|
def apply_sorting(self, query):
|
||||||
|
sort_by = request.args.get('sort_by', 'id')
|
||||||
|
sort_order = request.args.get('sort_order', 'asc')
|
||||||
|
|
||||||
|
if sort_by in self.allowed_sorts:
|
||||||
|
if sort_by == 'catalog_name':
|
||||||
|
column = Catalog.name
|
||||||
|
else:
|
||||||
|
column = getattr(Document, sort_by)
|
||||||
|
|
||||||
|
if sort_order == 'asc':
|
||||||
|
query = query.order_by(asc(column))
|
||||||
|
elif sort_order == 'desc':
|
||||||
|
query = query.order_by(desc(column))
|
||||||
|
|
||||||
|
return query
|
||||||
|
|
||||||
|
def get(self):
|
||||||
|
query = self.get_query()
|
||||||
|
query = self.apply_filters(query)
|
||||||
|
query = self.apply_sorting(query)
|
||||||
|
pagination = self.paginate(query)
|
||||||
|
|
||||||
|
def format_date(date):
|
||||||
|
if isinstance(date, datetime):
|
||||||
|
return date.strftime('%Y-%m-%d')
|
||||||
|
elif isinstance(date, str):
|
||||||
|
return date
|
||||||
|
else:
|
||||||
|
return ''
|
||||||
|
|
||||||
|
rows = [
|
||||||
|
[
|
||||||
|
{'value': item.id, 'class': '', 'type': 'text'},
|
||||||
|
{'value': item.name, 'class': '', 'type': 'text'},
|
||||||
|
{'value': item.catalog_name, 'class': '', 'type': 'text'},
|
||||||
|
{'value': format_date(item.valid_from), 'class': '', 'type': 'text'},
|
||||||
|
{'value': format_date(item.valid_to), 'class': '', 'type': 'text'}
|
||||||
|
] for item in pagination.items
|
||||||
|
]
|
||||||
|
|
||||||
|
catalogs = Catalog.query.all()
|
||||||
|
|
||||||
|
context = {
|
||||||
|
'rows': rows,
|
||||||
|
'pagination': pagination,
|
||||||
|
'filters': request.args.to_dict(flat=False),
|
||||||
|
'sort_by': request.args.get('sort_by', 'id'),
|
||||||
|
'sort_order': request.args.get('sort_order', 'asc'),
|
||||||
|
'filter_options': self.get_filter_options(catalogs)
|
||||||
|
}
|
||||||
|
return render_template(self.template, **context)
|
||||||
|
|
||||||
|
def get_filter_options(self, catalogs):
|
||||||
|
return {
|
||||||
|
'catalog_id': [(str(cat.id), cat.name) for cat in catalogs],
|
||||||
|
'validity': [('valid', 'Valid'), ('all', 'All')]
|
||||||
|
}
|
||||||
@@ -12,7 +12,7 @@ class DocumentVersionListView(FilteredListView):
|
|||||||
allowed_sorts = ['id', 'processing_started_at', 'processing_finished_at', 'processing_error']
|
allowed_sorts = ['id', 'processing_started_at', 'processing_finished_at', 'processing_error']
|
||||||
|
|
||||||
def get_query(self):
|
def get_query(self):
|
||||||
return DocumentVersion.query.join(Document).filter(Document.tenant_id == session.get('tenant', {}).get('id'))
|
return DocumentVersion.query.join(Document)
|
||||||
|
|
||||||
def apply_filters(self, query):
|
def apply_filters(self, query):
|
||||||
filters = request.args.to_dict()
|
filters = request.args.to_dict()
|
||||||
|
|||||||
@@ -1,30 +1,36 @@
|
|||||||
import ast
|
import ast
|
||||||
import os
|
|
||||||
from datetime import datetime as dt, timezone as tz
|
from datetime import datetime as dt, timezone as tz
|
||||||
|
|
||||||
import chardet
|
from babel.messages.setuptools_frontend import update_catalog
|
||||||
from flask import request, redirect, flash, render_template, Blueprint, session, current_app
|
from flask import request, redirect, flash, render_template, Blueprint, session, current_app
|
||||||
from flask_security import roles_accepted, current_user
|
from flask_security import roles_accepted, current_user
|
||||||
from sqlalchemy import desc
|
from sqlalchemy import desc
|
||||||
from sqlalchemy.orm import joinedload
|
from sqlalchemy.orm import aliased
|
||||||
from werkzeug.datastructures import FileStorage
|
|
||||||
from werkzeug.utils import secure_filename
|
from werkzeug.utils import secure_filename
|
||||||
from sqlalchemy.exc import SQLAlchemyError
|
from sqlalchemy.exc import SQLAlchemyError
|
||||||
import requests
|
import requests
|
||||||
from requests.exceptions import SSLError
|
from requests.exceptions import SSLError
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse, unquote
|
||||||
import io
|
import io
|
||||||
from minio.error import S3Error
|
import json
|
||||||
|
|
||||||
from common.models.document import Document, DocumentVersion
|
from common.models.document import Document, DocumentVersion, Catalog, Retriever
|
||||||
from common.extensions import db, minio_client
|
from common.extensions import db, minio_client
|
||||||
from .document_forms import AddDocumentForm, AddURLForm, EditDocumentForm, EditDocumentVersionForm, AddYoutubeForm, \
|
from common.utils.document_utils import validate_file_type, create_document_stack, start_embedding_task, process_url, \
|
||||||
AddURLsForm
|
process_multiple_urls, get_documents_list, edit_document, \
|
||||||
|
edit_document_version, refresh_document
|
||||||
|
from common.utils.eveai_exceptions import EveAIInvalidLanguageException, EveAIUnsupportedFileType, \
|
||||||
|
EveAIDoubleURLException
|
||||||
|
from .document_forms import AddDocumentForm, AddURLForm, EditDocumentForm, EditDocumentVersionForm, AddURLsForm, \
|
||||||
|
CatalogForm, EditCatalogForm, RetrieverForm, EditRetrieverForm
|
||||||
from common.utils.middleware import mw_before_request
|
from common.utils.middleware import mw_before_request
|
||||||
from common.utils.celery_utils import current_celery
|
from common.utils.celery_utils import current_celery
|
||||||
from common.utils.nginx_utils import prefixed_url_for
|
from common.utils.nginx_utils import prefixed_url_for
|
||||||
from common.utils.view_assistants import form_validation_failed, prepare_table_for_macro, form_to_dict
|
from common.utils.view_assistants import form_validation_failed, prepare_table_for_macro, form_to_dict
|
||||||
|
from .document_list_view import DocumentListView
|
||||||
from .document_version_list_view import DocumentVersionListView
|
from .document_version_list_view import DocumentVersionListView
|
||||||
|
from config.catalog_types import CATALOG_TYPES
|
||||||
|
from config.retriever_types import RETRIEVER_TYPES
|
||||||
|
|
||||||
document_bp = Blueprint('document_bp', __name__, url_prefix='/document')
|
document_bp = Blueprint('document_bp', __name__, url_prefix='/document')
|
||||||
|
|
||||||
@@ -52,34 +58,295 @@ def before_request():
|
|||||||
raise
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
@document_bp.route('/catalog', methods=['GET', 'POST'])
|
||||||
|
@roles_accepted('Super User', 'Tenant Admin')
|
||||||
|
def catalog():
|
||||||
|
form = CatalogForm()
|
||||||
|
|
||||||
|
if form.validate_on_submit():
|
||||||
|
tenant_id = session.get('tenant').get('id')
|
||||||
|
new_catalog = Catalog()
|
||||||
|
form.populate_obj(new_catalog)
|
||||||
|
# Handle Embedding Variables
|
||||||
|
new_catalog.html_tags = [tag.strip() for tag in form.html_tags.data.split(',')] if form.html_tags.data else []
|
||||||
|
new_catalog.html_end_tags = [tag.strip() for tag in form.html_end_tags.data.split(',')] \
|
||||||
|
if form.html_end_tags.data else []
|
||||||
|
new_catalog.html_included_elements = [tag.strip() for tag in form.html_included_elements.data.split(',')] \
|
||||||
|
if form.html_included_elements.data else []
|
||||||
|
new_catalog.html_excluded_elements = [tag.strip() for tag in form.html_excluded_elements.data.split(',')] \
|
||||||
|
if form.html_excluded_elements.data else []
|
||||||
|
new_catalog.html_excluded_classes = [cls.strip() for cls in form.html_excluded_classes.data.split(',')] \
|
||||||
|
if form.html_excluded_classes.data else []
|
||||||
|
set_logging_information(new_catalog, dt.now(tz.utc))
|
||||||
|
|
||||||
|
try:
|
||||||
|
db.session.add(new_catalog)
|
||||||
|
db.session.commit()
|
||||||
|
flash('Catalog successfully added!', 'success')
|
||||||
|
current_app.logger.info(f'Catalog {new_catalog.name} successfully added for tenant {tenant_id}!')
|
||||||
|
except SQLAlchemyError as e:
|
||||||
|
db.session.rollback()
|
||||||
|
flash(f'Failed to add catalog. Error: {e}', 'danger')
|
||||||
|
current_app.logger.error(f'Failed to add catalog {new_catalog.name}'
|
||||||
|
f'for tenant {tenant_id}. Error: {str(e)}')
|
||||||
|
|
||||||
|
return render_template('document/catalog.html', form=form)
|
||||||
|
|
||||||
|
|
||||||
|
@document_bp.route('/catalogs', methods=['GET', 'POST'])
|
||||||
|
@roles_accepted('Super User', 'Tenant Admin')
|
||||||
|
def catalogs():
|
||||||
|
page = request.args.get('page', 1, type=int)
|
||||||
|
per_page = request.args.get('per_page', 10, type=int)
|
||||||
|
|
||||||
|
query = Catalog.query.order_by(Catalog.id)
|
||||||
|
|
||||||
|
pagination = query.paginate(page=page, per_page=per_page)
|
||||||
|
the_catalogs = pagination.items
|
||||||
|
|
||||||
|
# prepare table data
|
||||||
|
rows = prepare_table_for_macro(the_catalogs, [('id', ''), ('name', ''), ('type', '')])
|
||||||
|
|
||||||
|
# Render the catalogs in a template
|
||||||
|
return render_template('document/catalogs.html', rows=rows, pagination=pagination)
|
||||||
|
|
||||||
|
|
||||||
|
@document_bp.route('/handle_catalog_selection', methods=['POST'])
|
||||||
|
@roles_accepted('Super User', 'Tenant Admin')
|
||||||
|
def handle_catalog_selection():
|
||||||
|
catalog_identification = request.form.get('selected_row')
|
||||||
|
catalog_id = ast.literal_eval(catalog_identification).get('value')
|
||||||
|
action = request.form['action']
|
||||||
|
catalog = Catalog.query.get_or_404(catalog_id)
|
||||||
|
|
||||||
|
if action == 'set_session_catalog':
|
||||||
|
current_app.logger.info(f'Setting session catalog to {catalog.name}')
|
||||||
|
session['catalog_id'] = catalog_id
|
||||||
|
session['catalog_name'] = catalog.name
|
||||||
|
current_app.logger.info(f'Finished setting session catalog to {catalog.name}')
|
||||||
|
elif action == 'edit_catalog':
|
||||||
|
return redirect(prefixed_url_for('document_bp.edit_catalog', catalog_id=catalog_id))
|
||||||
|
|
||||||
|
return redirect(prefixed_url_for('document_bp.catalogs'))
|
||||||
|
|
||||||
|
|
||||||
|
@document_bp.route('/catalog/<int:catalog_id>', methods=['GET', 'POST'])
|
||||||
|
@roles_accepted('Super User', 'Tenant Admin')
|
||||||
|
def edit_catalog(catalog_id):
|
||||||
|
catalog = Catalog.query.get_or_404(catalog_id)
|
||||||
|
tenant_id = session.get('tenant').get('id')
|
||||||
|
|
||||||
|
form = EditCatalogForm(request.form, obj=catalog)
|
||||||
|
configuration_config = CATALOG_TYPES[catalog.type]["configuration"]
|
||||||
|
form.add_dynamic_fields("configuration", configuration_config, catalog.configuration)
|
||||||
|
|
||||||
|
# Convert arrays to comma-separated strings for display
|
||||||
|
if request.method == 'GET':
|
||||||
|
form.html_tags.data = ', '.join(catalog.html_tags or '')
|
||||||
|
form.html_end_tags.data = ', '.join(catalog.html_end_tags or '')
|
||||||
|
form.html_included_elements.data = ', '.join(catalog.html_included_elements or '')
|
||||||
|
form.html_excluded_elements.data = ', '.join(catalog.html_excluded_elements or '')
|
||||||
|
form.html_excluded_classes.data = ', '.join(catalog.html_excluded_classes or '')
|
||||||
|
|
||||||
|
if request.method == 'POST' and form.validate_on_submit():
|
||||||
|
form.populate_obj(catalog)
|
||||||
|
# Handle Embedding Variables
|
||||||
|
catalog.html_tags = [tag.strip() for tag in form.html_tags.data.split(',')] if form.html_tags.data else []
|
||||||
|
catalog.html_end_tags = [tag.strip() for tag in form.html_end_tags.data.split(',')] \
|
||||||
|
if form.html_end_tags.data else []
|
||||||
|
catalog.html_included_elements = [tag.strip() for tag in form.html_included_elements.data.split(',')] \
|
||||||
|
if form.html_included_elements.data else []
|
||||||
|
catalog.html_excluded_elements = [tag.strip() for tag in form.html_excluded_elements.data.split(',')] \
|
||||||
|
if form.html_excluded_elements.data else []
|
||||||
|
catalog.html_excluded_classes = [cls.strip() for cls in form.html_excluded_classes.data.split(',')] \
|
||||||
|
if form.html_excluded_classes.data else []
|
||||||
|
|
||||||
|
catalog.configuration = form.get_dynamic_data('configuration')
|
||||||
|
update_logging_information(catalog, dt.now(tz.utc))
|
||||||
|
try:
|
||||||
|
db.session.add(catalog)
|
||||||
|
db.session.commit()
|
||||||
|
flash('Catalog successfully updated successfully!', 'success')
|
||||||
|
current_app.logger.info(f'Catalog {catalog.name} successfully updated for tenant {tenant_id}')
|
||||||
|
except SQLAlchemyError as e:
|
||||||
|
db.session.rollback()
|
||||||
|
flash(f'Failed to update catalog. Error: {e}', 'danger')
|
||||||
|
current_app.logger.error(f'Failed to update catalog {catalog_id} for tenant {tenant_id}. Error: {str(e)}')
|
||||||
|
|
||||||
|
return redirect(prefixed_url_for('document_bp.catalogs'))
|
||||||
|
else:
|
||||||
|
form_validation_failed(request, form)
|
||||||
|
|
||||||
|
return render_template('document/edit_catalog.html', form=form, catalog_id=catalog_id)
|
||||||
|
|
||||||
|
|
||||||
|
@document_bp.route('/retriever', methods=['GET', 'POST'])
|
||||||
|
@roles_accepted('Super User', 'Tenant Admin')
|
||||||
|
def retriever():
|
||||||
|
form = RetrieverForm()
|
||||||
|
|
||||||
|
if form.validate_on_submit():
|
||||||
|
tenant_id = session.get('tenant').get('id')
|
||||||
|
new_retriever = Retriever()
|
||||||
|
form.populate_obj(new_retriever)
|
||||||
|
new_retriever.catalog_id = form.catalog.data.id
|
||||||
|
|
||||||
|
set_logging_information(new_retriever, dt.now(tz.utc))
|
||||||
|
|
||||||
|
try:
|
||||||
|
db.session.add(new_retriever)
|
||||||
|
db.session.commit()
|
||||||
|
flash('Retriever successfully added!', 'success')
|
||||||
|
current_app.logger.info(f'Catalog {new_retriever.name} successfully added for tenant {tenant_id}!')
|
||||||
|
except SQLAlchemyError as e:
|
||||||
|
db.session.rollback()
|
||||||
|
flash(f'Failed to add retriever. Error: {e}', 'danger')
|
||||||
|
current_app.logger.error(f'Failed to add retriever {new_retriever.name}'
|
||||||
|
f'for tenant {tenant_id}. Error: {str(e)}')
|
||||||
|
|
||||||
|
# Enable step 2 of creation of retriever - add configuration of the retriever (dependent on type)
|
||||||
|
return redirect(prefixed_url_for('document_bp.retriever', retriever_id=new_retriever.id))
|
||||||
|
|
||||||
|
return render_template('document/retriever.html', form=form)
|
||||||
|
|
||||||
|
|
||||||
|
@document_bp.route('/retriever/<int:retriever_id>', methods=['GET', 'POST'])
|
||||||
|
@roles_accepted('Super User', 'Tenant Admin')
|
||||||
|
def edit_retriever(retriever_id):
|
||||||
|
"""Edit an existing retriever configuration."""
|
||||||
|
# Get the retriever or return 404
|
||||||
|
retriever = Retriever.query.get_or_404(retriever_id)
|
||||||
|
|
||||||
|
if retriever.catalog_id:
|
||||||
|
# If catalog_id is just an ID, fetch the Catalog object
|
||||||
|
retriever.catalog = Catalog.query.get(retriever.catalog_id)
|
||||||
|
else:
|
||||||
|
retriever.catalog = None
|
||||||
|
|
||||||
|
# Create form instance with the retriever
|
||||||
|
form = EditRetrieverForm(request.form, obj=retriever)
|
||||||
|
|
||||||
|
configuration_config = RETRIEVER_TYPES[retriever.type]["configuration"]
|
||||||
|
form.add_dynamic_fields("configuration", configuration_config, retriever.configuration)
|
||||||
|
if request.method == 'POST':
|
||||||
|
current_app.logger.debug(f'Received POST request with {request.form}')
|
||||||
|
|
||||||
|
if form.validate_on_submit():
|
||||||
|
# Update basic fields
|
||||||
|
form.populate_obj(retriever)
|
||||||
|
retriever.configuration = form.get_dynamic_data('configuration')
|
||||||
|
|
||||||
|
# Update catalog relationship
|
||||||
|
retriever.catalog_id = form.catalog.data.id if form.catalog.data else None
|
||||||
|
|
||||||
|
# Update logging information
|
||||||
|
update_logging_information(retriever, dt.now(tz.utc))
|
||||||
|
|
||||||
|
# Save changes to database
|
||||||
|
try:
|
||||||
|
db.session.add(retriever)
|
||||||
|
db.session.commit()
|
||||||
|
flash('Retriever updated successfully!', 'success')
|
||||||
|
current_app.logger.info(f'Retriever {retriever.id} updated successfully')
|
||||||
|
except SQLAlchemyError as e:
|
||||||
|
db.session.rollback()
|
||||||
|
flash(f'Failed to update retriever. Error: {str(e)}', 'danger')
|
||||||
|
current_app.logger.error(f'Failed to update retriever {retriever_id}. Error: {str(e)}')
|
||||||
|
return render_template('document/edit_retriever.html', form=form, retriever_id=retriever_id)
|
||||||
|
|
||||||
|
return redirect(prefixed_url_for('document_bp.retrievers'))
|
||||||
|
else:
|
||||||
|
form_validation_failed(request, form)
|
||||||
|
|
||||||
|
current_app.logger.debug(f"Rendering Template for {retriever_id}")
|
||||||
|
return render_template('document/edit_retriever.html', form=form, retriever_id=retriever_id)
|
||||||
|
|
||||||
|
|
||||||
|
@document_bp.route('/retrievers', methods=['GET', 'POST'])
|
||||||
|
@roles_accepted('Super User', 'Tenant Admin')
|
||||||
|
def retrievers():
|
||||||
|
page = request.args.get('page', 1, type=int)
|
||||||
|
per_page = request.args.get('per_page', 10, type=int)
|
||||||
|
|
||||||
|
query = Retriever.query.order_by(Retriever.id)
|
||||||
|
|
||||||
|
pagination = query.paginate(page=page, per_page=per_page)
|
||||||
|
the_retrievers = pagination.items
|
||||||
|
|
||||||
|
# prepare table data
|
||||||
|
rows = prepare_table_for_macro(the_retrievers,
|
||||||
|
[('id', ''), ('name', ''), ('type', ''), ('catalog_id', '')])
|
||||||
|
|
||||||
|
# Render the catalogs in a template
|
||||||
|
return render_template('document/retrievers.html', rows=rows, pagination=pagination)
|
||||||
|
|
||||||
|
|
||||||
|
@document_bp.route('/handle_retriever_selection', methods=['POST'])
|
||||||
|
@roles_accepted('Super User', 'Tenant Admin')
|
||||||
|
def handle_retriever_selection():
|
||||||
|
retriever_identification = request.form.get('selected_row')
|
||||||
|
retriever_id = ast.literal_eval(retriever_identification).get('value')
|
||||||
|
action = request.form['action']
|
||||||
|
|
||||||
|
if action == 'edit_retriever':
|
||||||
|
return redirect(prefixed_url_for('document_bp.edit_retriever', retriever_id=retriever_id))
|
||||||
|
|
||||||
|
return redirect(prefixed_url_for('document_bp.retrievers'))
|
||||||
|
|
||||||
|
|
||||||
@document_bp.route('/add_document', methods=['GET', 'POST'])
|
@document_bp.route('/add_document', methods=['GET', 'POST'])
|
||||||
@roles_accepted('Super User', 'Tenant Admin')
|
@roles_accepted('Super User', 'Tenant Admin')
|
||||||
def add_document():
|
def add_document():
|
||||||
form = AddDocumentForm()
|
form = AddDocumentForm(request.form)
|
||||||
|
catalog_id = session.get('catalog_id', None)
|
||||||
|
if catalog_id is None:
|
||||||
|
flash('You need to set a Session Catalog before adding Documents or URLs')
|
||||||
|
return redirect(prefixed_url_for('document_bp.catalogs'))
|
||||||
|
|
||||||
|
catalog = Catalog.query.get_or_404(catalog_id)
|
||||||
|
if catalog.configuration and len(catalog.configuration) > 0:
|
||||||
|
document_version_configurations = CATALOG_TYPES[catalog.type]['document_version_configurations']
|
||||||
|
for config in document_version_configurations:
|
||||||
|
form.add_dynamic_fields(config, catalog.configuration[config])
|
||||||
|
|
||||||
# If the form is submitted
|
|
||||||
if form.validate_on_submit():
|
if form.validate_on_submit():
|
||||||
current_app.logger.info(f'Adding document for tenant {session["tenant"]["id"]}')
|
try:
|
||||||
file = form.file.data
|
current_app.logger.info(f'Adding Document for {catalog_id}')
|
||||||
filename = secure_filename(file.filename)
|
tenant_id = session['tenant']['id']
|
||||||
extension = filename.rsplit('.', 1)[1].lower()
|
file = form.file.data
|
||||||
form_dict = form_to_dict(form)
|
filename = secure_filename(file.filename)
|
||||||
|
extension = filename.rsplit('.', 1)[1].lower()
|
||||||
|
|
||||||
new_doc, new_doc_vers = create_document_stack(form_dict, file, filename, extension)
|
validate_file_type(extension)
|
||||||
|
|
||||||
task = current_celery.send_task('create_embeddings', queue='embeddings', args=[
|
catalog_properties = {}
|
||||||
session['tenant']['id'],
|
document_version_configurations = CATALOG_TYPES[catalog.type]['document_version_configurations']
|
||||||
new_doc_vers.id,
|
for config in document_version_configurations:
|
||||||
])
|
catalog_properties[config] = form.get_dynamic_data(config)
|
||||||
current_app.logger.info(f'Embedding creation started for tenant {session["tenant"]["id"]}, '
|
|
||||||
f'Document Version {new_doc_vers.id}. '
|
|
||||||
f'Embedding creation task: {task.id}')
|
|
||||||
flash(f'Processing on document {new_doc.name}, version {new_doc_vers.id} started. Task ID: {task.id}.',
|
|
||||||
'success')
|
|
||||||
|
|
||||||
return redirect(prefixed_url_for('document_bp.documents'))
|
api_input = {
|
||||||
else:
|
'catalog_id': catalog_id,
|
||||||
form_validation_failed(request, form)
|
'name': form.name.data,
|
||||||
|
'language': form.language.data,
|
||||||
|
'user_context': form.user_context.data,
|
||||||
|
'valid_from': form.valid_from.data,
|
||||||
|
'user_metadata': json.loads(form.user_metadata.data) if form.user_metadata.data else None,
|
||||||
|
'catalog_properties': catalog_properties,
|
||||||
|
}
|
||||||
|
current_app.logger.debug(f'Creating document stack with input {api_input}')
|
||||||
|
|
||||||
|
new_doc, new_doc_vers = create_document_stack(api_input, file, filename, extension, tenant_id)
|
||||||
|
task_id = start_embedding_task(tenant_id, new_doc_vers.id)
|
||||||
|
|
||||||
|
flash(f'Processing on document {new_doc.name}, version {new_doc_vers.id} started. Task ID: {task_id}.',
|
||||||
|
'success')
|
||||||
|
return redirect(prefixed_url_for('document_bp.documents'))
|
||||||
|
|
||||||
|
except (EveAIInvalidLanguageException, EveAIUnsupportedFileType) as e:
|
||||||
|
flash(str(e), 'error')
|
||||||
|
except Exception as e:
|
||||||
|
current_app.logger.error(f'Error adding document: {str(e)}')
|
||||||
|
flash('An error occurred while adding the document.', 'error')
|
||||||
|
|
||||||
return render_template('document/add_document.html', form=form)
|
return render_template('document/add_document.html', form=form)
|
||||||
|
|
||||||
@@ -87,183 +354,89 @@ def add_document():
|
|||||||
@document_bp.route('/add_url', methods=['GET', 'POST'])
|
@document_bp.route('/add_url', methods=['GET', 'POST'])
|
||||||
@roles_accepted('Super User', 'Tenant Admin')
|
@roles_accepted('Super User', 'Tenant Admin')
|
||||||
def add_url():
|
def add_url():
|
||||||
form = AddURLForm()
|
form = AddURLForm(request.form)
|
||||||
|
catalog_id = session.get('catalog_id', None)
|
||||||
|
if catalog_id is None:
|
||||||
|
flash('You need to set a Session Catalog before adding Documents or URLs')
|
||||||
|
return redirect(prefixed_url_for('document_bp.catalogs'))
|
||||||
|
|
||||||
|
catalog = Catalog.query.get_or_404(catalog_id)
|
||||||
|
if catalog.configuration and len(catalog.configuration) > 0:
|
||||||
|
document_version_configurations = CATALOG_TYPES[catalog.type]['document_version_configurations']
|
||||||
|
for config in document_version_configurations:
|
||||||
|
form.add_dynamic_fields(config, catalog.configuration[config])
|
||||||
|
|
||||||
# If the form is submitted
|
|
||||||
if form.validate_on_submit():
|
if form.validate_on_submit():
|
||||||
current_app.logger.info(f'Adding url for tenant {session["tenant"]["id"]}')
|
try:
|
||||||
url = form.url.data
|
tenant_id = session['tenant']['id']
|
||||||
|
url = form.url.data
|
||||||
|
|
||||||
doc_vers = DocumentVersion.query.filter_by(url=url).all()
|
file_content, filename, extension = process_url(url, tenant_id)
|
||||||
if doc_vers:
|
|
||||||
current_app.logger.info(f'A document with url {url} already exists. No new document created.')
|
catalog_properties = {}
|
||||||
flash(f'A document with url {url} already exists. No new document created.', 'info')
|
document_version_configurations = CATALOG_TYPES[catalog.type]['document_version_configurations']
|
||||||
|
for config in document_version_configurations:
|
||||||
|
catalog_properties[config] = form.get_dynamic_data(config)
|
||||||
|
|
||||||
|
api_input = {
|
||||||
|
'catalog_id': catalog_id,
|
||||||
|
'name': form.name.data or filename,
|
||||||
|
'url': url,
|
||||||
|
'language': form.language.data,
|
||||||
|
'user_context': form.user_context.data,
|
||||||
|
'valid_from': form.valid_from.data,
|
||||||
|
'user_metadata': json.loads(form.user_metadata.data) if form.user_metadata.data else None,
|
||||||
|
'catalog_properties': catalog_properties,
|
||||||
|
}
|
||||||
|
|
||||||
|
new_doc, new_doc_vers = create_document_stack(api_input, file_content, filename, extension, tenant_id)
|
||||||
|
task_id = start_embedding_task(tenant_id, new_doc_vers.id)
|
||||||
|
|
||||||
|
flash(f'Processing on document {new_doc.name}, version {new_doc_vers.id} started. Task ID: {task_id}.',
|
||||||
|
'success')
|
||||||
return redirect(prefixed_url_for('document_bp.documents'))
|
return redirect(prefixed_url_for('document_bp.documents'))
|
||||||
# Only when no document with URL exists
|
|
||||||
html = fetch_html(url)
|
|
||||||
file = io.BytesIO(html)
|
|
||||||
|
|
||||||
parsed_url = urlparse(url)
|
except EveAIDoubleURLException:
|
||||||
path_parts = parsed_url.path.split('/')
|
flash(f'A document with url {url} already exists. No new document created.', 'info')
|
||||||
filename = path_parts[-1]
|
except (EveAIInvalidLanguageException, EveAIUnsupportedFileType) as e:
|
||||||
if filename == '':
|
flash(str(e), 'error')
|
||||||
filename = 'index'
|
except Exception as e:
|
||||||
if not filename.endswith('.html'):
|
current_app.logger.error(f'Error adding document: {str(e)}')
|
||||||
filename += '.html'
|
flash('An error occurred while adding the document.', 'error')
|
||||||
extension = 'html'
|
|
||||||
form_dict = form_to_dict(form)
|
|
||||||
|
|
||||||
new_doc, new_doc_vers = create_document_stack(form_dict, file, filename, extension)
|
|
||||||
|
|
||||||
task = current_celery.send_task('create_embeddings', queue='embeddings', args=[
|
|
||||||
session['tenant']['id'],
|
|
||||||
new_doc_vers.id,
|
|
||||||
])
|
|
||||||
current_app.logger.info(f'Embedding creation started for tenant {session["tenant"]["id"]}, '
|
|
||||||
f'Document Version {new_doc_vers.id}. '
|
|
||||||
f'Embedding creation task: {task.id}')
|
|
||||||
flash(f'Processing on document {new_doc.name}, version {new_doc_vers.id} started. Task ID: {task.id}.',
|
|
||||||
'success')
|
|
||||||
|
|
||||||
return redirect(prefixed_url_for('document_bp.documents'))
|
|
||||||
else:
|
|
||||||
form_validation_failed(request, form)
|
|
||||||
|
|
||||||
return render_template('document/add_url.html', form=form)
|
return render_template('document/add_url.html', form=form)
|
||||||
|
|
||||||
|
|
||||||
@document_bp.route('/add_urls', methods=['GET', 'POST'])
|
|
||||||
@roles_accepted('Super User', 'Tenant Admin')
|
|
||||||
def add_urls():
|
|
||||||
form = AddURLsForm()
|
|
||||||
|
|
||||||
if form.validate_on_submit():
|
|
||||||
urls = form.urls.data.split('\n')
|
|
||||||
urls = [url.strip() for url in urls if url.strip()]
|
|
||||||
|
|
||||||
for i, url in enumerate(urls):
|
|
||||||
try:
|
|
||||||
doc_vers = DocumentVersion.query.filter_by(url=url).all()
|
|
||||||
if doc_vers:
|
|
||||||
current_app.logger.info(f'A document with url {url} already exists. No new document created.')
|
|
||||||
flash(f'A document with url {url} already exists. No new document created.', 'info')
|
|
||||||
continue
|
|
||||||
|
|
||||||
html = fetch_html(url)
|
|
||||||
file = io.BytesIO(html)
|
|
||||||
|
|
||||||
parsed_url = urlparse(url)
|
|
||||||
path_parts = parsed_url.path.split('/')
|
|
||||||
filename = path_parts[-1] if path_parts[-1] else 'index'
|
|
||||||
if not filename.endswith('.html'):
|
|
||||||
filename += '.html'
|
|
||||||
|
|
||||||
# Use the name prefix if provided, otherwise use the filename
|
|
||||||
doc_name = f"{form.name.data}-{filename}" if form.name.data else filename
|
|
||||||
|
|
||||||
new_doc, new_doc_vers = create_document_stack({
|
|
||||||
'name': doc_name,
|
|
||||||
'url': url,
|
|
||||||
'language': form.language.data,
|
|
||||||
'user_context': form.user_context.data,
|
|
||||||
'valid_from': form.valid_from.data
|
|
||||||
}, file, filename, 'html')
|
|
||||||
|
|
||||||
task = current_celery.send_task('create_embeddings', queue='embeddings', args=[
|
|
||||||
session['tenant']['id'],
|
|
||||||
new_doc_vers.id,
|
|
||||||
])
|
|
||||||
current_app.logger.info(f'Embedding creation started for tenant {session["tenant"]["id"]}, '
|
|
||||||
f'Document Version {new_doc_vers.id}. '
|
|
||||||
f'Embedding creation task: {task.id}')
|
|
||||||
flash(f'Processing on document {new_doc.name}, version {new_doc_vers.id} started. Task ID: {task.id}.',
|
|
||||||
'success')
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
current_app.logger.error(f"Error processing URL {url}: {str(e)}")
|
|
||||||
flash(f'Error processing URL {url}: {str(e)}', 'danger')
|
|
||||||
|
|
||||||
return redirect(prefixed_url_for('document_bp.documents'))
|
|
||||||
else:
|
|
||||||
form_validation_failed(request, form)
|
|
||||||
|
|
||||||
return render_template('document/add_urls.html', form=form)
|
|
||||||
|
|
||||||
@document_bp.route('/add_youtube', methods=['GET', 'POST'])
|
|
||||||
@roles_accepted('Super User', 'Tenant Admin')
|
|
||||||
def add_youtube():
|
|
||||||
form = AddYoutubeForm()
|
|
||||||
|
|
||||||
if form.validate_on_submit():
|
|
||||||
current_app.logger.info(f'Adding Youtube document for tenant {session["tenant"]["id"]}')
|
|
||||||
url = form.url.data
|
|
||||||
current_app.logger.debug(f'Value of language field: {form.language.data}')
|
|
||||||
|
|
||||||
doc_vers = DocumentVersion.query.filter_by(url=url).all()
|
|
||||||
if doc_vers:
|
|
||||||
current_app.logger.info(f'A document with url {url} already exists. No new document created.')
|
|
||||||
flash(f'A document with url {url} already exists. No new document created.', 'info')
|
|
||||||
return redirect(prefixed_url_for('document_bp.documents'))
|
|
||||||
# As downloading a Youtube document can take quite some time, we offload this downloading to the worker
|
|
||||||
# We just pass a simple file to get things conform
|
|
||||||
file = "Youtube placeholder file"
|
|
||||||
|
|
||||||
filename = 'placeholder.youtube'
|
|
||||||
extension = 'youtube'
|
|
||||||
form_dict = form_to_dict(form)
|
|
||||||
current_app.logger.debug(f'Form data: {form_dict}')
|
|
||||||
|
|
||||||
new_doc, new_doc_vers = create_document_stack(form_dict, file, filename, extension)
|
|
||||||
|
|
||||||
task = current_celery.send_task('create_embeddings', queue='embeddings', args=[
|
|
||||||
session['tenant']['id'],
|
|
||||||
new_doc_vers.id,
|
|
||||||
])
|
|
||||||
current_app.logger.info(f'Processing and Embedding on Youtube document started for tenant '
|
|
||||||
f'{session["tenant"]["id"]}, '
|
|
||||||
f'Document Version {new_doc_vers.id}. '
|
|
||||||
f'Processing and Embedding Youtube task: {task.id}')
|
|
||||||
flash(f'Processing on Youtube document {new_doc.name}, version {new_doc_vers.id} started. Task ID: {task.id}.',
|
|
||||||
'success')
|
|
||||||
|
|
||||||
return redirect(prefixed_url_for('document_bp.documents'))
|
|
||||||
else:
|
|
||||||
form_validation_failed(request, form)
|
|
||||||
|
|
||||||
return render_template('document/add_youtube.html', form=form)
|
|
||||||
|
|
||||||
|
|
||||||
@document_bp.route('/documents', methods=['GET', 'POST'])
|
@document_bp.route('/documents', methods=['GET', 'POST'])
|
||||||
@roles_accepted('Super User', 'Tenant Admin')
|
@roles_accepted('Super User', 'Tenant Admin')
|
||||||
def documents():
|
def documents():
|
||||||
page = request.args.get('page', 1, type=int)
|
view = DocumentListView(Document, 'document/documents.html', per_page=10)
|
||||||
per_page = request.args.get('per_page', 10, type=int)
|
return view.get()
|
||||||
|
|
||||||
query = Document.query.order_by(desc(Document.created_at))
|
|
||||||
|
|
||||||
pagination = query.paginate(page=page, per_page=per_page, error_out=False)
|
|
||||||
docs = pagination.items
|
|
||||||
|
|
||||||
rows = prepare_table_for_macro(docs, [('id', ''), ('name', ''), ('valid_from', ''), ('valid_to', '')])
|
|
||||||
|
|
||||||
return render_template('document/documents.html', rows=rows, pagination=pagination)
|
|
||||||
|
|
||||||
|
|
||||||
@document_bp.route('/handle_document_selection', methods=['POST'])
|
@document_bp.route('/handle_document_selection', methods=['POST'])
|
||||||
@roles_accepted('Super User', 'Tenant Admin')
|
@roles_accepted('Super User', 'Tenant Admin')
|
||||||
def handle_document_selection():
|
def handle_document_selection():
|
||||||
document_identification = request.form['selected_row']
|
document_identification = request.form['selected_row']
|
||||||
doc_id = ast.literal_eval(document_identification).get('value')
|
if isinstance(document_identification, int) or document_identification.isdigit():
|
||||||
|
doc_id = int(document_identification)
|
||||||
|
else:
|
||||||
|
# If it's not an integer, assume it's a string representation of a dictionary
|
||||||
|
try:
|
||||||
|
doc_id = ast.literal_eval(document_identification).get('value')
|
||||||
|
except (ValueError, AttributeError):
|
||||||
|
flash('Invalid document selection.', 'error')
|
||||||
|
return redirect(prefixed_url_for('document_bp.documents'))
|
||||||
|
|
||||||
action = request.form['action']
|
action = request.form['action']
|
||||||
|
|
||||||
match action:
|
match action:
|
||||||
case 'edit_document':
|
case 'edit_document':
|
||||||
return redirect(prefixed_url_for('document_bp.edit_document', document_id=doc_id))
|
return redirect(prefixed_url_for('document_bp.edit_document_view', document_id=doc_id))
|
||||||
case 'document_versions':
|
case 'document_versions':
|
||||||
return redirect(prefixed_url_for('document_bp.document_versions', document_id=doc_id))
|
return redirect(prefixed_url_for('document_bp.document_versions', document_id=doc_id))
|
||||||
case 'refresh_document':
|
case 'refresh_document':
|
||||||
refresh_document(doc_id)
|
refresh_document_view(doc_id)
|
||||||
return redirect(prefixed_url_for('document_bp.document_versions', document_id=doc_id))
|
return redirect(prefixed_url_for('document_bp.document_versions', document_id=doc_id))
|
||||||
case 're_embed_latest_versions':
|
case 're_embed_latest_versions':
|
||||||
re_embed_latest_versions()
|
re_embed_latest_versions()
|
||||||
@@ -274,51 +447,77 @@ def handle_document_selection():
|
|||||||
|
|
||||||
@document_bp.route('/edit_document/<int:document_id>', methods=['GET', 'POST'])
|
@document_bp.route('/edit_document/<int:document_id>', methods=['GET', 'POST'])
|
||||||
@roles_accepted('Super User', 'Tenant Admin')
|
@roles_accepted('Super User', 'Tenant Admin')
|
||||||
def edit_document(document_id):
|
def edit_document_view(document_id):
|
||||||
doc = Document.query.get_or_404(document_id)
|
# Use an alias for the Catalog to avoid column name conflicts
|
||||||
|
CatalogAlias = aliased(Catalog)
|
||||||
|
|
||||||
|
# Query for the document and its catalog
|
||||||
|
result = db.session.query(Document, CatalogAlias.name.label('catalog_name')) \
|
||||||
|
.join(CatalogAlias, Document.catalog_id == CatalogAlias.id) \
|
||||||
|
.filter(Document.id == document_id) \
|
||||||
|
.first_or_404()
|
||||||
|
|
||||||
|
doc, catalog_name = result
|
||||||
|
|
||||||
form = EditDocumentForm(obj=doc)
|
form = EditDocumentForm(obj=doc)
|
||||||
|
|
||||||
|
if request.method == 'GET':
|
||||||
|
# Populate form with current values
|
||||||
|
form.name.data = doc.name
|
||||||
|
form.valid_from.data = doc.valid_from
|
||||||
|
form.valid_to.data = doc.valid_to
|
||||||
|
|
||||||
if form.validate_on_submit():
|
if form.validate_on_submit():
|
||||||
doc.name = form.name.data
|
updated_doc, error = edit_document(
|
||||||
doc.valid_from = form.valid_from.data
|
document_id,
|
||||||
doc.valid_to = form.valid_to.data
|
form.name.data,
|
||||||
|
form.valid_from.data,
|
||||||
update_logging_information(doc, dt.now(tz.utc))
|
form.valid_to.data
|
||||||
|
)
|
||||||
try:
|
if updated_doc:
|
||||||
db.session.add(doc)
|
flash(f'Document {updated_doc.id} updated successfully', 'success')
|
||||||
db.session.commit()
|
return redirect(prefixed_url_for('document_bp.documents'))
|
||||||
flash(f'Document {doc.id} updated successfully', 'success')
|
else:
|
||||||
except SQLAlchemyError as e:
|
flash(f'Error updating document: {error}', 'danger')
|
||||||
db.session.rollback()
|
|
||||||
flash(f'Error updating document: {e}', 'danger')
|
|
||||||
current_app.logger.error(f'Error updating document: {e}')
|
|
||||||
else:
|
else:
|
||||||
form_validation_failed(request, form)
|
form_validation_failed(request, form)
|
||||||
|
|
||||||
return render_template('document/edit_document.html', form=form, document_id=document_id)
|
return render_template('document/edit_document.html', form=form, document_id=document_id, catalog_name=catalog_name)
|
||||||
|
|
||||||
|
|
||||||
@document_bp.route('/edit_document_version/<int:document_version_id>', methods=['GET', 'POST'])
|
@document_bp.route('/edit_document_version/<int:document_version_id>', methods=['GET', 'POST'])
|
||||||
@roles_accepted('Super User', 'Tenant Admin')
|
@roles_accepted('Super User', 'Tenant Admin')
|
||||||
def edit_document_version(document_version_id):
|
def edit_document_version_view(document_version_id):
|
||||||
doc_vers = DocumentVersion.query.get_or_404(document_version_id)
|
doc_vers = DocumentVersion.query.get_or_404(document_version_id)
|
||||||
form = EditDocumentVersionForm(obj=doc_vers)
|
form = EditDocumentVersionForm(request.form, obj=doc_vers)
|
||||||
|
|
||||||
|
catalog_id = session.get('catalog_id', None)
|
||||||
|
if catalog_id is None:
|
||||||
|
flash('You need to set a Session Catalog before adding Documents or URLs')
|
||||||
|
return redirect(prefixed_url_for('document_bp.catalogs'))
|
||||||
|
|
||||||
|
catalog = Catalog.query.get_or_404(catalog_id)
|
||||||
|
if catalog.configuration and len(catalog.configuration) > 0:
|
||||||
|
document_version_configurations = CATALOG_TYPES[catalog.type]['document_version_configurations']
|
||||||
|
for config in document_version_configurations:
|
||||||
|
form.add_dynamic_fields(config, catalog.configuration[config], doc_vers.catalog_properties[config])
|
||||||
|
|
||||||
if form.validate_on_submit():
|
if form.validate_on_submit():
|
||||||
doc_vers.user_context = form.user_context.data
|
catalog_properties = {}
|
||||||
|
document_version_configurations = CATALOG_TYPES[catalog.type]['document_version_configurations']
|
||||||
|
for config in document_version_configurations:
|
||||||
|
catalog_properties[config] = form.get_dynamic_data(config)
|
||||||
|
|
||||||
update_logging_information(doc_vers, dt.now(tz.utc))
|
updated_version, error = edit_document_version(
|
||||||
|
document_version_id,
|
||||||
try:
|
form.user_context.data,
|
||||||
db.session.add(doc_vers)
|
catalog_properties,
|
||||||
db.session.commit()
|
)
|
||||||
flash(f'Document Version {doc_vers.id} updated successfully', 'success')
|
if updated_version:
|
||||||
except SQLAlchemyError as e:
|
flash(f'Document Version {updated_version.id} updated successfully', 'success')
|
||||||
db.session.rollback()
|
return redirect(prefixed_url_for('document_bp.document_versions', document_id=updated_version.doc_id))
|
||||||
flash(f'Error updating document version: {e}', 'danger')
|
else:
|
||||||
current_app.logger.error(f'Error updating document version {doc_vers.id} '
|
flash(f'Error updating document version: {error}', 'danger')
|
||||||
f'for tenant {session['tenant']['id']}: {e}')
|
|
||||||
else:
|
else:
|
||||||
form_validation_failed(request, form)
|
form_validation_failed(request, form)
|
||||||
|
|
||||||
@@ -329,8 +528,8 @@ def edit_document_version(document_version_id):
|
|||||||
@document_bp.route('/document_versions/<int:document_id>', methods=['GET', 'POST'])
|
@document_bp.route('/document_versions/<int:document_id>', methods=['GET', 'POST'])
|
||||||
@roles_accepted('Super User', 'Tenant Admin')
|
@roles_accepted('Super User', 'Tenant Admin')
|
||||||
def document_versions(document_id):
|
def document_versions(document_id):
|
||||||
doc_vers = DocumentVersion.query.get_or_404(document_id)
|
doc = Document.query.get_or_404(document_id)
|
||||||
doc_desc = f'Document {doc_vers.document.name}, Language {doc_vers.language}'
|
doc_desc = f'Document {doc.name}'
|
||||||
|
|
||||||
page = request.args.get('page', 1, type=int)
|
page = request.args.get('page', 1, type=int)
|
||||||
per_page = request.args.get('per_page', 10, type=int)
|
per_page = request.args.get('per_page', 10, type=int)
|
||||||
@@ -342,8 +541,8 @@ def document_versions(document_id):
|
|||||||
pagination = query.paginate(page=page, per_page=per_page, error_out=False)
|
pagination = query.paginate(page=page, per_page=per_page, error_out=False)
|
||||||
doc_langs = pagination.items
|
doc_langs = pagination.items
|
||||||
|
|
||||||
rows = prepare_table_for_macro(doc_langs, [('id', ''), ('url', ''), ('file_location', ''),
|
rows = prepare_table_for_macro(doc_langs, [('id', ''), ('url', ''),
|
||||||
('file_name', ''), ('file_type', ''),
|
('object_name', ''), ('file_type', ''),
|
||||||
('processing', ''), ('processing_started_at', ''),
|
('processing', ''), ('processing_started_at', ''),
|
||||||
('processing_finished_at', ''), ('processing_error', '')])
|
('processing_finished_at', ''), ('processing_error', '')])
|
||||||
|
|
||||||
@@ -354,13 +553,23 @@ def document_versions(document_id):
|
|||||||
@roles_accepted('Super User', 'Tenant Admin')
|
@roles_accepted('Super User', 'Tenant Admin')
|
||||||
def handle_document_version_selection():
|
def handle_document_version_selection():
|
||||||
document_version_identification = request.form['selected_row']
|
document_version_identification = request.form['selected_row']
|
||||||
doc_vers_id = ast.literal_eval(document_version_identification).get('value')
|
if isinstance(document_version_identification, int) or document_version_identification.isdigit():
|
||||||
|
doc_vers_id = int(document_version_identification)
|
||||||
|
else:
|
||||||
|
# If it's not an integer, assume it's a string representation of a dictionary
|
||||||
|
try:
|
||||||
|
doc_vers_id = ast.literal_eval(document_version_identification).get('value')
|
||||||
|
except (ValueError, AttributeError):
|
||||||
|
flash('Invalid document version selection.', 'error')
|
||||||
|
return redirect(prefixed_url_for('document_bp.document_versions_list'))
|
||||||
|
|
||||||
action = request.form['action']
|
action = request.form['action']
|
||||||
|
|
||||||
|
current_app.logger.debug(f'Triggered Document Version Action: {action}')
|
||||||
|
|
||||||
match action:
|
match action:
|
||||||
case 'edit_document_version':
|
case 'edit_document_version':
|
||||||
return redirect(prefixed_url_for('document_bp.edit_document_version', document_version_id=doc_vers_id))
|
return redirect(prefixed_url_for('document_bp.edit_document_version_view', document_version_id=doc_vers_id))
|
||||||
case 'process_document_version':
|
case 'process_document_version':
|
||||||
process_version(doc_vers_id)
|
process_version(doc_vers_id)
|
||||||
# Add more conditions for other actions
|
# Add more conditions for other actions
|
||||||
@@ -403,55 +612,13 @@ def refresh_all_documents():
|
|||||||
refresh_document(doc.id)
|
refresh_document(doc.id)
|
||||||
|
|
||||||
|
|
||||||
def refresh_document(doc_id):
|
def refresh_document_view(document_id):
|
||||||
doc = Document.query.get_or_404(doc_id)
|
new_version, result = refresh_document(document_id, session['tenant']['id'])
|
||||||
doc_vers = DocumentVersion.query.filter_by(doc_id=doc_id).order_by(desc(DocumentVersion.id)).first()
|
if new_version:
|
||||||
if not doc_vers.url:
|
flash(f'Document refreshed. New version: {new_version.id}. Task ID: {result}', 'success')
|
||||||
current_app.logger.info(f'Document {doc_id} has no URL, skipping refresh')
|
else:
|
||||||
flash(f'This document has no URL. I can only refresh documents with a URL. skipping refresh', 'alert')
|
flash(f'Error refreshing document: {result}', 'danger')
|
||||||
return
|
return redirect(prefixed_url_for('document_bp.documents'))
|
||||||
|
|
||||||
new_doc_vers = create_version_for_document(doc, doc_vers.url, doc_vers.language, doc_vers.user_context)
|
|
||||||
|
|
||||||
try:
|
|
||||||
db.session.add(new_doc_vers)
|
|
||||||
db.session.commit()
|
|
||||||
except SQLAlchemyError as e:
|
|
||||||
current_app.logger.error(f'Error refreshing document {doc_id} for tenant {session["tenant"]["id"]}: {e}')
|
|
||||||
flash('Error refreshing document.', 'alert')
|
|
||||||
db.session.rollback()
|
|
||||||
error = e.args
|
|
||||||
raise
|
|
||||||
except Exception as e:
|
|
||||||
current_app.logger.error('Unknown error')
|
|
||||||
raise
|
|
||||||
|
|
||||||
html = fetch_html(new_doc_vers.url)
|
|
||||||
file = io.BytesIO(html)
|
|
||||||
|
|
||||||
parsed_url = urlparse(new_doc_vers.url)
|
|
||||||
path_parts = parsed_url.path.split('/')
|
|
||||||
filename = path_parts[-1]
|
|
||||||
if filename == '':
|
|
||||||
filename = 'index'
|
|
||||||
if not filename.endswith('.html'):
|
|
||||||
filename += '.html'
|
|
||||||
extension = 'html'
|
|
||||||
|
|
||||||
current_app.logger.info(f'Document added successfully for tenant {session["tenant"]["id"]}, '
|
|
||||||
f'Document Version {new_doc_vers.id}')
|
|
||||||
|
|
||||||
upload_file_for_version(new_doc_vers, file, extension)
|
|
||||||
|
|
||||||
task = current_celery.send_task('create_embeddings', queue='embeddings', args=[
|
|
||||||
session['tenant']['id'],
|
|
||||||
new_doc_vers.id,
|
|
||||||
])
|
|
||||||
current_app.logger.info(f'Embedding creation started for tenant {session["tenant"]["id"]}, '
|
|
||||||
f'Document Version {new_doc_vers.id}. '
|
|
||||||
f'Embedding creation task: {task.id}')
|
|
||||||
flash(f'Processing on document {doc.name}, version {new_doc_vers.id} started. Task ID: {task.id}.',
|
|
||||||
'success')
|
|
||||||
|
|
||||||
|
|
||||||
def re_embed_latest_versions():
|
def re_embed_latest_versions():
|
||||||
@@ -463,10 +630,9 @@ def re_embed_latest_versions():
|
|||||||
|
|
||||||
|
|
||||||
def process_version(version_id):
|
def process_version(version_id):
|
||||||
task = current_celery.send_task('create_embeddings', queue='embeddings', args=[
|
task = current_celery.send_task('create_embeddings',
|
||||||
session['tenant']['id'],
|
args=[session['tenant']['id'], version_id,],
|
||||||
version_id,
|
queue='embeddings')
|
||||||
])
|
|
||||||
current_app.logger.info(f'Embedding creation retriggered by user {current_user.id}, {current_user.email} '
|
current_app.logger.info(f'Embedding creation retriggered by user {current_user.id}, {current_user.email} '
|
||||||
f'for tenant {session["tenant"]["id"]}, '
|
f'for tenant {session["tenant"]["id"]}, '
|
||||||
f'Document Version {version_id}. '
|
f'Document Version {version_id}. '
|
||||||
@@ -489,116 +655,11 @@ def update_logging_information(obj, timestamp):
|
|||||||
obj.updated_by = current_user.id
|
obj.updated_by = current_user.id
|
||||||
|
|
||||||
|
|
||||||
def create_document_stack(form, file, filename, extension):
|
|
||||||
# Create the Document
|
|
||||||
new_doc = create_document(form, filename)
|
|
||||||
|
|
||||||
# Create the DocumentVersion
|
|
||||||
new_doc_vers = create_version_for_document(new_doc,
|
|
||||||
form.get('url', ''),
|
|
||||||
form.get('language', 'en'),
|
|
||||||
form.get('user_context', '')
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
|
||||||
db.session.add(new_doc)
|
|
||||||
db.session.add(new_doc_vers)
|
|
||||||
db.session.commit()
|
|
||||||
except SQLAlchemyError as e:
|
|
||||||
current_app.logger.error(f'Error adding document for tenant {session["tenant"]["id"]}: {e}')
|
|
||||||
flash('Error adding document.', 'alert')
|
|
||||||
db.session.rollback()
|
|
||||||
error = e.args
|
|
||||||
raise
|
|
||||||
except Exception as e:
|
|
||||||
current_app.logger.error('Unknown error')
|
|
||||||
raise
|
|
||||||
|
|
||||||
current_app.logger.info(f'Document added successfully for tenant {session["tenant"]["id"]}, '
|
|
||||||
f'Document Version {new_doc.id}')
|
|
||||||
|
|
||||||
upload_file_for_version(new_doc_vers, file, extension)
|
|
||||||
|
|
||||||
return new_doc, new_doc_vers
|
|
||||||
|
|
||||||
|
|
||||||
def log_session_state(session, msg=""):
|
def log_session_state(session, msg=""):
|
||||||
current_app.logger.debug(f"{msg} - Session dirty: {session.dirty}")
|
current_app.logger.debug(f"{msg} - Session dirty: {session.dirty}")
|
||||||
current_app.logger.debug(f"{msg} - Session new: {session.new}")
|
current_app.logger.debug(f"{msg} - Session new: {session.new}")
|
||||||
|
|
||||||
|
|
||||||
def create_document(form, filename):
|
|
||||||
new_doc = Document()
|
|
||||||
if form['name'] == '':
|
|
||||||
new_doc.name = filename.rsplit('.', 1)[0]
|
|
||||||
else:
|
|
||||||
new_doc.name = form['name']
|
|
||||||
|
|
||||||
if form['valid_from'] and form['valid_from'] != '':
|
|
||||||
new_doc.valid_from = form['valid_from']
|
|
||||||
else:
|
|
||||||
new_doc.valid_from = dt.now(tz.utc)
|
|
||||||
new_doc.tenant_id = session['tenant']['id']
|
|
||||||
set_logging_information(new_doc, dt.now(tz.utc))
|
|
||||||
|
|
||||||
return new_doc
|
|
||||||
|
|
||||||
|
|
||||||
def create_version_for_document(document, url, language, user_context):
|
|
||||||
new_doc_vers = DocumentVersion()
|
|
||||||
if url != '':
|
|
||||||
new_doc_vers.url = url
|
|
||||||
|
|
||||||
if language == '':
|
|
||||||
new_doc_vers.language = session['default_language']
|
|
||||||
else:
|
|
||||||
new_doc_vers.language = language
|
|
||||||
|
|
||||||
if user_context != '':
|
|
||||||
new_doc_vers.user_context = user_context
|
|
||||||
|
|
||||||
new_doc_vers.document = document
|
|
||||||
|
|
||||||
set_logging_information(new_doc_vers, dt.now(tz.utc))
|
|
||||||
|
|
||||||
return new_doc_vers
|
|
||||||
|
|
||||||
|
|
||||||
def upload_file_for_version(doc_vers, file, extension):
|
|
||||||
doc_vers.file_type = extension
|
|
||||||
doc_vers.file_name = doc_vers.calc_file_name()
|
|
||||||
doc_vers.file_location = doc_vers.calc_file_location()
|
|
||||||
|
|
||||||
# Normally, the tenant bucket should exist. But let's be on the safe side if a migration took place.
|
|
||||||
tenant_id = session['tenant']['id']
|
|
||||||
minio_client.create_tenant_bucket(tenant_id)
|
|
||||||
|
|
||||||
try:
|
|
||||||
minio_client.upload_document_file(
|
|
||||||
tenant_id,
|
|
||||||
doc_vers.doc_id,
|
|
||||||
doc_vers.language,
|
|
||||||
doc_vers.id,
|
|
||||||
doc_vers.file_name,
|
|
||||||
file
|
|
||||||
)
|
|
||||||
db.session.commit()
|
|
||||||
current_app.logger.info(f'Successfully saved document to MinIO for tenant {tenant_id} for '
|
|
||||||
f'document version {doc_vers.id} while uploading file.')
|
|
||||||
except S3Error as e:
|
|
||||||
db.session.rollback()
|
|
||||||
flash('Error saving document to MinIO.', 'error')
|
|
||||||
current_app.logger.error(
|
|
||||||
f'Error saving document to MinIO for tenant {tenant_id}: {e}')
|
|
||||||
raise
|
|
||||||
except SQLAlchemyError as e:
|
|
||||||
db.session.rollback()
|
|
||||||
flash('Error saving document metadata.', 'error')
|
|
||||||
current_app.logger.error(
|
|
||||||
f'Error saving document metadata for tenant {tenant_id}: {e}')
|
|
||||||
raise
|
|
||||||
|
|
||||||
|
|
||||||
def fetch_html(url):
|
def fetch_html(url):
|
||||||
# Fetches HTML content from a URL
|
# Fetches HTML content from a URL
|
||||||
try:
|
try:
|
||||||
@@ -615,49 +676,3 @@ def fetch_html(url):
|
|||||||
|
|
||||||
response.raise_for_status() # Will raise an exception for bad requests
|
response.raise_for_status() # Will raise an exception for bad requests
|
||||||
return response.content
|
return response.content
|
||||||
|
|
||||||
|
|
||||||
def prepare_document_data(docs):
|
|
||||||
rows = []
|
|
||||||
for doc in docs:
|
|
||||||
doc_row = [{'value': doc.name, 'class': '', 'type': 'text'},
|
|
||||||
{'value': doc.created_at.strftime("%Y-%m-%d %H:%M:%S"), 'class': '', 'type': 'text'}]
|
|
||||||
# Document basic details
|
|
||||||
if doc.valid_from:
|
|
||||||
doc_row.append({'value': doc.valid_from.strftime("%Y-%m-%d"), 'class': '', 'type': 'text'})
|
|
||||||
else:
|
|
||||||
doc_row.append({'value': '', 'class': '', 'type': 'text'})
|
|
||||||
|
|
||||||
# Nested languages and versions
|
|
||||||
languages_rows = []
|
|
||||||
for lang in doc.languages:
|
|
||||||
lang_row = [{'value': lang.language, 'class': '', 'type': 'text'}]
|
|
||||||
|
|
||||||
# Latest version details if available (should be available ;-) )
|
|
||||||
if lang.latest_version:
|
|
||||||
lang_row.append({'value': lang.latest_version.created_at.strftime("%Y-%m-%d %H:%M:%S"),
|
|
||||||
'class': '', 'type': 'text'})
|
|
||||||
if lang.latest_version.url:
|
|
||||||
lang_row.append({'value': lang.latest_version.url,
|
|
||||||
'class': '', 'type': 'link', 'href': lang.latest_version.url})
|
|
||||||
else:
|
|
||||||
lang_row.append({'value': '', 'class': '', 'type': 'text'})
|
|
||||||
|
|
||||||
if lang.latest_version.file_name:
|
|
||||||
lang_row.append({'value': lang.latest_version.file_name, 'class': '', 'type': 'text'})
|
|
||||||
else:
|
|
||||||
lang_row.append({'value': '', 'class': '', 'type': 'text'})
|
|
||||||
|
|
||||||
if lang.latest_version.file_type:
|
|
||||||
lang_row.append({'value': lang.latest_version.file_type, 'class': '', 'type': 'text'})
|
|
||||||
else:
|
|
||||||
lang_row.append({'value': '', 'class': '', 'type': 'text'})
|
|
||||||
# Include other details as necessary
|
|
||||||
|
|
||||||
languages_rows.append(lang_row)
|
|
||||||
|
|
||||||
doc_row.append({'is_group': True, 'colspan': '5',
|
|
||||||
'headers': ['Language', 'Latest Version', 'URL', 'File Name', 'Type'],
|
|
||||||
'sub_rows': languages_rows})
|
|
||||||
rows.append(doc_row)
|
|
||||||
return rows
|
|
||||||
|
|||||||
216
eveai_app/views/dynamic_form_base.py
Normal file
216
eveai_app/views/dynamic_form_base.py
Normal file
@@ -0,0 +1,216 @@
|
|||||||
|
from flask_wtf import FlaskForm
|
||||||
|
from wtforms import IntegerField, FloatField, BooleanField, StringField, TextAreaField, validators, ValidationError
|
||||||
|
from flask import current_app
|
||||||
|
import json
|
||||||
|
|
||||||
|
from wtforms.fields.choices import SelectField
|
||||||
|
from wtforms.fields.datetime import DateField
|
||||||
|
|
||||||
|
|
||||||
|
class DynamicFormBase(FlaskForm):
|
||||||
|
def __init__(self, formdata=None, *args, **kwargs):
|
||||||
|
super(DynamicFormBase, self).__init__(*args, **kwargs)
|
||||||
|
# Maps collection names to lists of field names
|
||||||
|
self.dynamic_fields = {}
|
||||||
|
# Store formdata for later use
|
||||||
|
self.formdata = formdata
|
||||||
|
|
||||||
|
def _create_field_validators(self, field_def):
|
||||||
|
"""Create validators based on field definition"""
|
||||||
|
validators_list = []
|
||||||
|
|
||||||
|
# Required validator
|
||||||
|
if field_def.get('required', False):
|
||||||
|
validators_list.append(validators.InputRequired())
|
||||||
|
else:
|
||||||
|
validators_list.append(validators.Optional())
|
||||||
|
|
||||||
|
# Type-specific validators
|
||||||
|
field_type = field_def.get('type')
|
||||||
|
if field_type in ['integer', 'float']:
|
||||||
|
min_value = field_def.get('min_value')
|
||||||
|
max_value = field_def.get('max_value')
|
||||||
|
if min_value is not None or max_value is not None:
|
||||||
|
validators_list.append(
|
||||||
|
validators.NumberRange(
|
||||||
|
min=min_value if min_value is not None else -float('inf'),
|
||||||
|
max=max_value if max_value is not None else float('inf'),
|
||||||
|
message=f"Value must be between {min_value or '-∞'} and {max_value or '∞'}"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
return validators_list
|
||||||
|
|
||||||
|
def add_dynamic_fields(self, collection_name, config, initial_data=None):
|
||||||
|
"""Add dynamic fields to the form based on the configuration."""
|
||||||
|
self.dynamic_fields[collection_name] = []
|
||||||
|
for field_name, field_def in config.items():
|
||||||
|
current_app.logger.debug(f"{field_name}: {field_def}")
|
||||||
|
# Prefix the field name with the collection name
|
||||||
|
full_field_name = f"{collection_name}_{field_name}"
|
||||||
|
label = field_def.get('name', field_name)
|
||||||
|
field_type = field_def.get('type')
|
||||||
|
description = field_def.get('description', '')
|
||||||
|
default = field_def.get('default')
|
||||||
|
|
||||||
|
# Determine standard validators
|
||||||
|
field_validators = self._create_field_validators(field_def)
|
||||||
|
|
||||||
|
# Handle special case for tagging_fields
|
||||||
|
if field_type == 'tagging_fields':
|
||||||
|
field_class = TextAreaField
|
||||||
|
field_validators.append(validate_tagging_fields)
|
||||||
|
extra_classes = 'json-editor'
|
||||||
|
field_kwargs = {}
|
||||||
|
elif field_type == 'enum':
|
||||||
|
field_class = SelectField
|
||||||
|
allowed_values = field_def.get('allowed_values', [])
|
||||||
|
choices = [(str(val), str(val)) for val in allowed_values]
|
||||||
|
extra_classes = ''
|
||||||
|
field_kwargs = {'choices': choices}
|
||||||
|
else:
|
||||||
|
extra_classes = ''
|
||||||
|
field_class = {
|
||||||
|
'integer': IntegerField,
|
||||||
|
'float': FloatField,
|
||||||
|
'boolean': BooleanField,
|
||||||
|
'string': StringField,
|
||||||
|
'date': DateField,
|
||||||
|
}.get(field_type, StringField)
|
||||||
|
field_kwargs = {}
|
||||||
|
|
||||||
|
# Prepare field data
|
||||||
|
field_data = None
|
||||||
|
if initial_data and field_name in initial_data:
|
||||||
|
field_data = initial_data[field_name]
|
||||||
|
if field_type == 'tagging_fields' and isinstance(field_data, dict):
|
||||||
|
try:
|
||||||
|
field_data = json.dumps(field_data, indent=2)
|
||||||
|
except (TypeError, ValueError) as e:
|
||||||
|
current_app.logger.error(f"Error converting initial data to JSON: {e}")
|
||||||
|
field_data = "{}"
|
||||||
|
elif default is not None:
|
||||||
|
field_data = default
|
||||||
|
|
||||||
|
# Create render_kw with classes and any other HTML attributes
|
||||||
|
render_kw = {'class': extra_classes} if extra_classes else {}
|
||||||
|
if description:
|
||||||
|
render_kw['title'] = description # For tooltip
|
||||||
|
render_kw['data-bs-toggle'] = 'tooltip'
|
||||||
|
render_kw['data-bs-placement'] = 'right'
|
||||||
|
|
||||||
|
# Create the field
|
||||||
|
field_kwargs.update({
|
||||||
|
'label': label,
|
||||||
|
'description': description,
|
||||||
|
'validators': field_validators,
|
||||||
|
'default': field_data,
|
||||||
|
'render_kw': render_kw
|
||||||
|
})
|
||||||
|
|
||||||
|
unbound_field = field_class(**field_kwargs)
|
||||||
|
|
||||||
|
# Bind the field to the form
|
||||||
|
bound_field = unbound_field.bind(form=self, name=full_field_name)
|
||||||
|
|
||||||
|
# Process the field with formdata
|
||||||
|
if self.formdata and full_field_name in self.formdata:
|
||||||
|
bound_field.process(self.formdata)
|
||||||
|
else:
|
||||||
|
bound_field.process(formdata=None, data=field_data) # Use prepared field_data
|
||||||
|
|
||||||
|
# Add the field to the form
|
||||||
|
setattr(self, full_field_name, bound_field)
|
||||||
|
self._fields[full_field_name] = bound_field
|
||||||
|
self.dynamic_fields[collection_name].append(full_field_name)
|
||||||
|
|
||||||
|
def get_static_fields(self):
|
||||||
|
"""Return a list of static field instances."""
|
||||||
|
# Get names of dynamic fields
|
||||||
|
dynamic_field_names = set()
|
||||||
|
for field_list in self.dynamic_fields.values():
|
||||||
|
dynamic_field_names.update(field_list)
|
||||||
|
|
||||||
|
# Return all fields that are not dynamic
|
||||||
|
return [field for name, field in self._fields.items() if name not in dynamic_field_names]
|
||||||
|
|
||||||
|
def get_dynamic_fields(self):
|
||||||
|
"""Return a dictionary of dynamic fields per collection."""
|
||||||
|
result = {}
|
||||||
|
for collection_name, field_names in self.dynamic_fields.items():
|
||||||
|
result[collection_name] = [getattr(self, name) for name in field_names]
|
||||||
|
return result
|
||||||
|
|
||||||
|
def get_dynamic_data(self, collection_name):
|
||||||
|
"""Retrieve the data from dynamic fields of a specific collection."""
|
||||||
|
data = {}
|
||||||
|
current_app.logger.debug(f"{collection_name} in {self.dynamic_fields}?")
|
||||||
|
if collection_name not in self.dynamic_fields:
|
||||||
|
return data
|
||||||
|
prefix_length = len(collection_name) + 1 # +1 for the underscore
|
||||||
|
for full_field_name in self.dynamic_fields[collection_name]:
|
||||||
|
current_app.logger.debug(f"{full_field_name}: {full_field_name}")
|
||||||
|
original_field_name = full_field_name[prefix_length:]
|
||||||
|
current_app.logger.debug(f"{original_field_name}: {original_field_name}")
|
||||||
|
field = getattr(self, full_field_name)
|
||||||
|
current_app.logger.debug(f"{field}: {field}")
|
||||||
|
# Parse JSON for tagging_fields type
|
||||||
|
if isinstance(field, TextAreaField) and field.data:
|
||||||
|
try:
|
||||||
|
data[original_field_name] = json.loads(field.data)
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
# Validation should catch this, but just in case
|
||||||
|
data[original_field_name] = field.data
|
||||||
|
else:
|
||||||
|
data[original_field_name] = field.data
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
def validate_tagging_fields(form, field):
|
||||||
|
"""Validate the tagging fields structure"""
|
||||||
|
if not field.data:
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Parse JSON data
|
||||||
|
fields_data = json.loads(field.data)
|
||||||
|
|
||||||
|
# Validate it's a dictionary
|
||||||
|
if not isinstance(fields_data, dict):
|
||||||
|
raise ValidationError("Tagging fields must be a dictionary")
|
||||||
|
|
||||||
|
# Validate each field definition
|
||||||
|
for field_name, field_def in fields_data.items():
|
||||||
|
if not isinstance(field_def, dict):
|
||||||
|
raise ValidationError(f"Field definition for {field_name} must be a dictionary")
|
||||||
|
|
||||||
|
# Check required properties
|
||||||
|
if 'type' not in field_def:
|
||||||
|
raise ValidationError(f"Field {field_name} missing required 'type' property")
|
||||||
|
|
||||||
|
# Validate type
|
||||||
|
if field_def['type'] not in ['string', 'integer', 'float', 'date', 'enum']:
|
||||||
|
raise ValidationError(f"Field {field_name} has invalid type: {field_def['type']}")
|
||||||
|
|
||||||
|
# Validate enum fields have allowed_values
|
||||||
|
if field_def['type'] == 'enum':
|
||||||
|
if 'allowed_values' not in field_def:
|
||||||
|
raise ValidationError(f"Enum field {field_name} missing required 'allowed_values' list")
|
||||||
|
if not isinstance(field_def['allowed_values'], list):
|
||||||
|
raise ValidationError(f"Field {field_name} allowed_values must be a list")
|
||||||
|
|
||||||
|
# Validate numeric fields
|
||||||
|
if field_def['type'] in ['integer', 'float']:
|
||||||
|
if 'min_value' in field_def and 'max_value' in field_def:
|
||||||
|
min_val = float(field_def['min_value'])
|
||||||
|
max_val = float(field_def['max_value'])
|
||||||
|
if min_val >= max_val:
|
||||||
|
raise ValidationError(f"Field {field_name} min_value must be less than max_value")
|
||||||
|
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
raise ValidationError("Invalid JSON format")
|
||||||
|
except (TypeError, ValueError) as e:
|
||||||
|
raise ValidationError(f"Invalid field definition: {str(e)}")
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
76
eveai_app/views/entitlements_forms.py
Normal file
76
eveai_app/views/entitlements_forms.py
Normal file
@@ -0,0 +1,76 @@
|
|||||||
|
from flask import current_app
|
||||||
|
from flask_wtf import FlaskForm
|
||||||
|
from wtforms import (StringField, PasswordField, BooleanField, SubmitField, EmailField, IntegerField, DateField,
|
||||||
|
SelectField, SelectMultipleField, FieldList, FormField, FloatField, TextAreaField)
|
||||||
|
from wtforms.validators import DataRequired, Length, Email, NumberRange, Optional, ValidationError, InputRequired
|
||||||
|
import pytz
|
||||||
|
|
||||||
|
|
||||||
|
class LicenseTierForm(FlaskForm):
|
||||||
|
name = StringField('Name', validators=[DataRequired(), Length(max=50)])
|
||||||
|
version = StringField('Version', validators=[DataRequired(), Length(max=50)])
|
||||||
|
start_date = DateField('Start Date', id='form-control datepicker', validators=[DataRequired()])
|
||||||
|
end_date = DateField('End Date', id='form-control datepicker', validators=[Optional()])
|
||||||
|
basic_fee_d = FloatField('Basic Fee ($)', validators=[InputRequired(), NumberRange(min=0)])
|
||||||
|
basic_fee_e = FloatField('Basic Fee (€)', validators=[InputRequired(), NumberRange(min=0)])
|
||||||
|
max_storage_mb = IntegerField('Max Storage (MiB)', validators=[DataRequired(), NumberRange(min=1)])
|
||||||
|
additional_storage_price_d = FloatField('Additional Storage Fee ($)',
|
||||||
|
validators=[InputRequired(), NumberRange(min=0)])
|
||||||
|
additional_storage_price_e = FloatField('Additional Storage Fee (€)',
|
||||||
|
validators=[InputRequired(), NumberRange(min=0)])
|
||||||
|
additional_storage_bucket = IntegerField('Additional Storage Bucket Size (MiB)',
|
||||||
|
validators=[DataRequired(), NumberRange(min=1)])
|
||||||
|
included_embedding_mb = IntegerField('Included Embeddings (MiB)',
|
||||||
|
validators=[DataRequired(), NumberRange(min=1)])
|
||||||
|
additional_embedding_price_d = FloatField('Additional Embedding Fee ($)',
|
||||||
|
validators=[InputRequired(), NumberRange(min=0)])
|
||||||
|
additional_embedding_price_e = FloatField('Additional Embedding Fee (€)',
|
||||||
|
validators=[InputRequired(), NumberRange(min=0)])
|
||||||
|
additional_embedding_bucket = IntegerField('Additional Embedding Bucket Size (MiB)',
|
||||||
|
validators=[DataRequired(), NumberRange(min=1)])
|
||||||
|
included_interaction_tokens = IntegerField('Included Embedding Tokens',
|
||||||
|
validators=[DataRequired(), NumberRange(min=1)])
|
||||||
|
additional_interaction_token_price_d = FloatField('Additional Interaction Token Fee ($)',
|
||||||
|
validators=[InputRequired(), NumberRange(min=0)])
|
||||||
|
additional_interaction_token_price_e = FloatField('Additional Interaction Token Fee (€)',
|
||||||
|
validators=[InputRequired(), NumberRange(min=0)])
|
||||||
|
additional_interaction_bucket = IntegerField('Additional Interaction Bucket Size',
|
||||||
|
validators=[DataRequired(), NumberRange(min=1)])
|
||||||
|
standard_overage_embedding = FloatField('Standard Overage Embedding (%)',
|
||||||
|
validators=[DataRequired(), NumberRange(min=0)],
|
||||||
|
default=0)
|
||||||
|
standard_overage_interaction = FloatField('Standard Overage Interaction (%)',
|
||||||
|
validators=[DataRequired(), NumberRange(min=0)],
|
||||||
|
default=0)
|
||||||
|
|
||||||
|
|
||||||
|
class LicenseForm(FlaskForm):
|
||||||
|
start_date = DateField('Start Date', id='form-control datepicker', validators=[DataRequired()])
|
||||||
|
end_date = DateField('End Date', id='form-control datepicker', validators=[DataRequired()])
|
||||||
|
currency = StringField('Currency', validators=[Optional(), Length(max=20)])
|
||||||
|
yearly_payment = BooleanField('Yearly Payment', validators=[DataRequired()], default=False)
|
||||||
|
basic_fee = FloatField('Basic Fee', validators=[InputRequired(), NumberRange(min=0)])
|
||||||
|
max_storage_mb = IntegerField('Max Storage (MiB)', validators=[DataRequired(), NumberRange(min=1)])
|
||||||
|
additional_storage_price = FloatField('Additional Storage Token Fee',
|
||||||
|
validators=[InputRequired(), NumberRange(min=0)])
|
||||||
|
additional_storage_bucket = IntegerField('Additional Storage Bucket Size (MiB)',
|
||||||
|
validators=[DataRequired(), NumberRange(min=1)])
|
||||||
|
included_embedding_mb = IntegerField('Included Embedding Tokens (MiB)',
|
||||||
|
validators=[DataRequired(), NumberRange(min=1)])
|
||||||
|
additional_embedding_price = FloatField('Additional Embedding Token Fee',
|
||||||
|
validators=[InputRequired(), NumberRange(min=0)])
|
||||||
|
additional_embedding_bucket = IntegerField('Additional Embedding Bucket Size (MiB)',
|
||||||
|
validators=[DataRequired(), NumberRange(min=1)])
|
||||||
|
included_interaction_tokens = IntegerField('Included Interaction Tokens',
|
||||||
|
validators=[DataRequired(), NumberRange(min=1)])
|
||||||
|
additional_interaction_token_price = FloatField('Additional Interaction Token Fee',
|
||||||
|
validators=[InputRequired(), NumberRange(min=0)])
|
||||||
|
additional_interaction_bucket = IntegerField('Additional Interaction Bucket Size',
|
||||||
|
validators=[DataRequired(), NumberRange(min=1)])
|
||||||
|
overage_embedding = FloatField('Overage Embedding (%)',
|
||||||
|
validators=[DataRequired(), NumberRange(min=0)],
|
||||||
|
default=0)
|
||||||
|
overage_interaction = FloatField('Overage Interaction (%)',
|
||||||
|
validators=[DataRequired(), NumberRange(min=0)],
|
||||||
|
default=0)
|
||||||
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user