Refactoring finished :-)

eveai_workers now working (with errors ;-) )
Remote debugging now available
This commit is contained in:
Josako
2024-05-07 22:51:48 +02:00
parent 494d5508ae
commit cd5afa0408
14 changed files with 147 additions and 31 deletions

View File

@@ -1,5 +0,0 @@
from eveai_app import create_app
flask_app = create_app()
celery_app = flask_app.extensions['celery']
print(flask_app.extensions)

View File

@@ -5,7 +5,7 @@ from gevent.pywsgi import WSGIServer
app = create_app()
if __name__ == '__main__':
print("Server starting on port 5000")
http_server = WSGIServer(('0.0.0.0', 5000), app) # Wrap up the Flask App using Gevent
print("Server starting on port 5001")
http_server = WSGIServer(('0.0.0.0', 5001), app) # Wrap up the Flask App using Gevent
http_server.serve_forever() # Continuously listens for incoming requests

View File

@@ -0,0 +1,4 @@
from eveai_workers import celery
if __name__ == '__main__':
celery.start()

View File

@@ -1,3 +0,0 @@
#!/usr/bin/env bash
source .venv/bin/activate
celery -A app.celery_app worker --loglevel=info -Q embeddings

15
scripts/start_eveai_app.sh Executable file
View File

@@ -0,0 +1,15 @@
#!/usr/bin/env bash
cd "/Volumes/OWC4M2_1/Dropbox/Josako's Dev/Josako/EveAI/Development/eveAI/" || exit 1
source "/Volumes/OWC4M2_1/Dropbox/Josako's Dev/Josako/EveAI/Development/eveAI/.venv/bin/activate"
export PYTHONPATH="$PYTHONPATH:/Volumes/OWC4M2_1/Dropbox/Josako's Dev/Josako/EveAI/Development/eveAI/"
# Set flask environment variables
export FLASK_ENV=development # Use 'production' as appropriate
export FLASK_DEBUG=1 # Use 0 for production
# Start Flask app
python scripts/run_eveai_app.py
deactivate

15
scripts/start_eveai_workers.sh Executable file
View File

@@ -0,0 +1,15 @@
#!/usr/bin/env bash
cd "/Volumes/OWC4M2_1/Dropbox/Josako's Dev/Josako/EveAI/Development/eveAI/" || exit 1
source "/Volumes/OWC4M2_1/Dropbox/Josako's Dev/Josako/EveAI/Development/eveAI/.venv/bin/activate"
# Start a worker for the 'embeddings' queue with higher concurrency
celery -A eveai_workers.celery worker --loglevel=info -Q embeddings --autoscale=1,4 --hostname=embeddings_worker@%h &
# Start a worker for the 'llm_interactions' queue with auto-scaling
celery -A eveai_workers.celery worker --loglevel=info - Q llm_interactions --autoscale=2,8 --hostname=interactions_worker@%h &
# Wait for all background processes to finish
wait
deactivate

View File

@@ -1,3 +1,9 @@
#!/usr/bin/env bash
source .venv/bin/activate
celery -A app.celery_app flower
cd "/Volumes/OWC4M2_1/Dropbox/Josako's Dev/Josako/EveAI/Development/eveAI/" || exit 1
source "/Volumes/OWC4M2_1/Dropbox/Josako's Dev/Josako/EveAI/Development/eveAI/.venv/bin/activate"
# on development machine, no authentication required
export FLOWER_UNAUTHENTICATED_API=True
# Start a worker for the 'embeddings' queue with higher concurrency
celery -A eveai_workers.celery flower

6
scripts/start_logdy.sh Executable file
View File

@@ -0,0 +1,6 @@
#!/usr/bin/env bash
cd "/Volumes/OWC4M2_1/Dropbox/Josako's Dev/Josako/EveAI/Development/eveAI/logs/" || exit 1
logdy follow --full-read eveai_app.log eveai_workers.log