- New Build and startup procedures for all services, compliant for both docker, podman and k8s
This commit is contained in:
67
scripts/run.py
Normal file
67
scripts/run.py
Normal file
@@ -0,0 +1,67 @@
|
||||
#!/usr/bin/env python3
|
||||
import os
|
||||
import sys
|
||||
import importlib
|
||||
|
||||
def main():
|
||||
component = os.getenv('COMPONENT_NAME', 'eveai_app')
|
||||
role = os.getenv('ROLE', 'web')
|
||||
|
||||
if role == 'web':
|
||||
# Web services
|
||||
from gevent import monkey
|
||||
monkey.patch_all()
|
||||
|
||||
try:
|
||||
module = importlib.import_module(component)
|
||||
app = module.create_app()
|
||||
|
||||
except ImportError as e:
|
||||
print(f"Error importing {component}: {e}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
except AttributeError as e:
|
||||
print(f"Error: {component} module does not have create_app function: {e}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
elif role in ['worker', 'beat']:
|
||||
# Worker services
|
||||
try:
|
||||
module = importlib.import_module(component)
|
||||
celery = module.celery
|
||||
|
||||
except ImportError as e:
|
||||
print(f"Error importing {component}: {e}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
except AttributeError as e:
|
||||
print(f"Error: {component} module does not have celery object: {e}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
else:
|
||||
print(f"Unknown role: {role}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
# For web services - expose app object for gunicorn
|
||||
if os.getenv('ROLE', 'web') == 'web':
|
||||
component = os.getenv('COMPONENT_NAME', 'eveai_app')
|
||||
try:
|
||||
from gevent import monkey
|
||||
monkey.patch_all()
|
||||
|
||||
module = importlib.import_module(component)
|
||||
app = module.create_app()
|
||||
except (ImportError, AttributeError) as e:
|
||||
print(f"Error setting up app for {component}: {e}", file=sys.stderr)
|
||||
app = None
|
||||
|
||||
# For worker/beat services - expose celery object
|
||||
elif os.getenv('ROLE') in ['worker', 'beat']:
|
||||
component = os.getenv('COMPONENT_NAME', 'eveai_workers')
|
||||
try:
|
||||
module = importlib.import_module(component)
|
||||
celery = module.celery
|
||||
except (ImportError, AttributeError) as e:
|
||||
print(f"Error setting up celery for {component}: {e}", file=sys.stderr)
|
||||
celery = None
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
41
scripts/start.sh
Executable file
41
scripts/start.sh
Executable file
@@ -0,0 +1,41 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
ROLE="${ROLE:-web}" # web | worker | beat
|
||||
PORT="${PORT:-8080}" # web poort (in k8s vaak 8080)
|
||||
WORKERS="${WORKERS:-1}" # gunicorn workers (web)
|
||||
WORKER_CLASS="${WORKER_CLASS:-gevent}" # web: gevent|sync
|
||||
WORKER_CONN="${WORKER_CONN:-100}" # web: --worker-connections
|
||||
LOGLEVEL="${LOGLEVEL:-info}"
|
||||
MAX_REQ="${MAX_REQUESTS:-1000}"
|
||||
MAX_JITTER="${MAX_REQUESTS_JITTER:-100}"
|
||||
COMPONENT_NAME="${COMPONENT_NAME:-eveai_app}" # component name for dynamic import
|
||||
|
||||
case "$ROLE" in
|
||||
web)
|
||||
echo "[start] role=web component=$COMPONENT_NAME port=$PORT workers=$WORKERS class=$WORKER_CLASS"
|
||||
exec gunicorn -w "$WORKERS" -k "$WORKER_CLASS" \
|
||||
-b "0.0.0.0:${PORT}" --worker-connections "$WORKER_CONN" \
|
||||
--access-logfile - --error-logfile - --log-level "$LOGLEVEL" \
|
||||
--graceful-timeout 25 --timeout 30 --keep-alive 5 \
|
||||
--max-requests "$MAX_REQ" --max-requests-jitter "$MAX_JITTER" \
|
||||
scripts.run:app
|
||||
;;
|
||||
worker)
|
||||
echo "[start] role=worker component=$COMPONENT_NAME"
|
||||
CONCURRENCY="${CELERY_CONCURRENCY:-2}"
|
||||
exec celery -A scripts.run worker \
|
||||
--loglevel="${CELERY_LOGLEVEL:-INFO}" \
|
||||
--concurrency="${CONCURRENCY}" \
|
||||
--max-tasks-per-child="${CELERY_MAX_TASKS_PER_CHILD:-1000}" \
|
||||
--prefetch-multiplier="${CELERY_PREFETCH:-1}" -O fair
|
||||
;;
|
||||
beat)
|
||||
echo "[start] role=beat component=$COMPONENT_NAME"
|
||||
exec celery -A scripts.run beat \
|
||||
--loglevel="${CELERY_LOGLEVEL:-INFO}"
|
||||
;;
|
||||
*)
|
||||
echo "Unknown ROLE=$ROLE" >&2; exit 1
|
||||
;;
|
||||
esac
|
||||
@@ -1,15 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
cd "/app" || exit 1
|
||||
export PROJECT_DIR="/app"
|
||||
export PYTHONPATH="$PROJECT_DIR/patched_packages:$PYTHONPATH:$PROJECT_DIR" # Include the app directory in the Python path & patched packages
|
||||
|
||||
# Set FLASK_APP environment variables
|
||||
export FLASK_APP=${PROJECT_DIR}/scripts/run_eveai_app.py # Adjust the path to your Flask app entry point
|
||||
|
||||
|
||||
# Ensure we can write the logs
|
||||
chown -R appuser:appuser /app/logs
|
||||
|
||||
# Start Flask app
|
||||
gunicorn -w 1 -k gevent -b 0.0.0.0:5003 --worker-connections 100 scripts.run_eveai_api:app
|
||||
@@ -1,52 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
cd "/app" || exit 1
|
||||
export PYTHONPATH="$PYTHONPATH:/app/"
|
||||
|
||||
# Ensure we can write the logs
|
||||
chown -R appuser:appuser /app/logs
|
||||
|
||||
# Wait for the database to be ready
|
||||
echo "Waiting for database to be ready"
|
||||
until pg_isready -h $DB_HOST -p $DB_PORT; do
|
||||
echo "Postgres is unavailable - sleeping"
|
||||
sleep 2
|
||||
done
|
||||
echo "Postgres is up - executing commands"
|
||||
|
||||
export PGPASSWORD=$DB_PASS
|
||||
# Check if the database exists and initialize if not
|
||||
if ! psql -U $DB_USER -h $DB_HOST -p $DB_PORT -d $DB_NAME -c '\dt' | grep -q 'No relations found'; then
|
||||
echo "Database eveai does not exist or is empty. Initializing..."
|
||||
psql -U $DB_USER -h $DB_HOST -p $DB_PORT -d postgres -c "CREATE DATABASE $DB_NAME;"
|
||||
psql -U $DB_USER -h $DB_HOST -p $DB_PORT -d $DB_NAME -c "CREATE EXTENSION IF NOT EXISTS vector;"
|
||||
fi
|
||||
|
||||
echo "Applying migrations to the public and tenant schema..."
|
||||
|
||||
# Set FLASK_APP environment variables
|
||||
PROJECT_DIR="/app"
|
||||
export FLASK_APP=${PROJECT_DIR}/scripts/run_eveai_app.py # Adjust the path to your Flask app entry point
|
||||
export PYTHONPATH="$PROJECT_DIR/patched_packages:$PYTHONPATH:$PROJECT_DIR" # Include the app directory in the Python path & patched packages
|
||||
|
||||
# Run Alembic upgrade for the public schema
|
||||
echo "Applying migrations to the public schema..."
|
||||
flask db upgrade -d "${PROJECT_DIR}/migrations/public"
|
||||
echo "Finished applying migrations to the public schema..."
|
||||
|
||||
# Run Alembic upgrade for the tenant schema
|
||||
echo "Applying migrations to the tenant schema..."
|
||||
flask db upgrade -d "${PROJECT_DIR}/migrations/tenant"
|
||||
echo "Finished applying migrations to the tenant schema..."
|
||||
|
||||
# Set flask environment variables
|
||||
#export FLASK_ENV=development # Use 'production' as appropriate
|
||||
#export FLASK_DEBUG=1 # Use 0 for production
|
||||
|
||||
# Initialize initial data (tenant and user)
|
||||
echo "Initializing initial tenant and user..."
|
||||
python ${PROJECT_DIR}/scripts/initialize_data.py # Adjust the path to your initialization script
|
||||
|
||||
# Start Flask app
|
||||
# gunicorn -w 1 -k gevent -b 0.0.0.0:5001 --worker-connections 100 scripts.run_eveai_app:app
|
||||
gunicorn -w 1 -k gevent -b 0.0.0.0:5001 --worker-connections 100 scripts.run_eveai_app:app
|
||||
@@ -1,17 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
cd "/app/" || exit 1
|
||||
export PROJECT_DIR="/app"
|
||||
export PYTHONPATH="$PROJECT_DIR/patched_packages:$PYTHONPATH:$PROJECT_DIR" # Include the app directory in the Python path & patched packages
|
||||
|
||||
# Ensure we can write the logs
|
||||
chown -R appuser:appuser /app/logs
|
||||
|
||||
# Start Celery Beat
|
||||
celery -A eveai_beat.celery beat --scheduler=redbeat.RedBeatScheduler --loglevel=debug &
|
||||
|
||||
# Start a worker for the 'llm_interactions' queue with auto-scaling - not necessary, in eveai_chat_workers
|
||||
# celery -A eveai_workers.celery worker --loglevel=info - Q llm_interactions --autoscale=2,8 --hostname=interactions_worker@%h &
|
||||
|
||||
# Wait for all background processes to finish
|
||||
wait
|
||||
@@ -1,23 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
cd "/app" || exit 1
|
||||
export PYTHONPATH="$PYTHONPATH:/app/"
|
||||
|
||||
# Ensure we can write the logs
|
||||
chown -R appuser:appuser /app/logs
|
||||
|
||||
# Wait for the database to be ready
|
||||
echo "Waiting for database to be ready"
|
||||
until pg_isready -h $DB_HOST -p $DB_PORT; do
|
||||
echo "Postgres is unavailable - sleeping"
|
||||
sleep 2
|
||||
done
|
||||
echo "Postgres is up - executing commands"
|
||||
|
||||
# Set FLASK_APP environment variables
|
||||
PROJECT_DIR="/app"
|
||||
export FLASK_APP=${PROJECT_DIR}/scripts/run_eveai_chat_client.py
|
||||
export PYTHONPATH="$PROJECT_DIR/patched_packages:$PYTHONPATH:$PROJECT_DIR"
|
||||
|
||||
# Start Flask app with Gunicorn
|
||||
gunicorn -w 1 -k gevent -b 0.0.0.0:5004 --worker-connections 100 scripts.run_eveai_chat_client:app
|
||||
@@ -1,14 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
cd "/app/" || exit 1
|
||||
export PROJECT_DIR="/app"
|
||||
export PYTHONPATH="$PROJECT_DIR/patched_packages:$PYTHONPATH:$PROJECT_DIR" # Include the app directory in the Python path & patched packages
|
||||
|
||||
# Ensure we can write the logs
|
||||
chown -R appuser:appuser /app/logs
|
||||
|
||||
# Start a worker for the 'llm_interactions' queue with auto-scaling
|
||||
celery -A eveai_chat_workers.celery worker --loglevel=info -Q llm_interactions --autoscale=2,8 --hostname=interactions_worker@%h &
|
||||
|
||||
# Wait for all background processes to finish
|
||||
wait
|
||||
@@ -1,17 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
cd "/app/" || exit 1
|
||||
export PROJECT_DIR="/app"
|
||||
export PYTHONPATH="$PROJECT_DIR/patched_packages:$PYTHONPATH:$PROJECT_DIR" # Include the app directory in the Python path & patched packages
|
||||
|
||||
# Ensure we can write the logs
|
||||
chown -R appuser:appuser /app/logs
|
||||
|
||||
# Start a worker for the 'embeddings' queue with higher concurrency
|
||||
celery -A eveai_entitlements.celery worker --loglevel=debug -Q entitlements --autoscale=2,8 --hostname=entitlements_worker@%h &
|
||||
|
||||
# Start a worker for the 'llm_interactions' queue with auto-scaling - not necessary, in eveai_chat_workers
|
||||
# celery -A eveai_workers.celery worker --loglevel=info - Q llm_interactions --autoscale=2,8 --hostname=interactions_worker@%h &
|
||||
|
||||
# Wait for all background processes to finish
|
||||
wait
|
||||
@@ -1,17 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
cd "/app/" || exit 1
|
||||
export PROJECT_DIR="/app"
|
||||
export PYTHONPATH="$PROJECT_DIR/patched_packages:$PYTHONPATH:$PROJECT_DIR" # Include the app directory in the Python path & patched packages
|
||||
|
||||
# Ensure we can write the logs
|
||||
chown -R appuser:appuser /app/logs
|
||||
|
||||
# Start a worker for the 'embeddings' queue with higher concurrency
|
||||
celery -A eveai_workers.celery worker --loglevel=debug -Q embeddings --autoscale=2,8 --hostname=embeddings_worker@%h &
|
||||
|
||||
# Start a worker for the 'llm_interactions' queue with auto-scaling - not necessary, in eveai_chat_workers
|
||||
# celery -A eveai_workers.celery worker --loglevel=info - Q llm_interactions --autoscale=2,8 --hostname=interactions_worker@%h &
|
||||
|
||||
# Wait for all background processes to finish
|
||||
wait
|
||||
Reference in New Issue
Block a user