Merge branch 'feature/refinement_selection_specialist' into develop

This commit is contained in:
Josako
2025-08-21 06:40:29 +02:00
4 changed files with 98 additions and 75 deletions

View File

@@ -36,13 +36,12 @@ x-common-variables: &common-variables
services: services:
nginx: nginx:
container_name: nginx image: ${REGISTRY_PREFIX:-}josakola/nginx:latest
image: josakola/nginx:latest
build: build:
context: .. context: ..
dockerfile: ./docker/nginx/Dockerfile dockerfile: ./docker/nginx/Dockerfile
ports: ports:
- 3002:80 # Dev nginx proxy volgens port schema - 3080:80 # Dev nginx proxy volgens port schema
environment: environment:
<<: *common-variables <<: *common-variables
volumes: volumes:
@@ -63,13 +62,12 @@ services:
- eveai-dev-network - eveai-dev-network
eveai_app: eveai_app:
container_name: eveai_app image: ${REGISTRY_PREFIX:-}josakola/eveai_app:latest
image: josakola/eveai_app:latest
build: build:
context: .. context: ..
dockerfile: ./docker/eveai_app/Dockerfile dockerfile: ./docker/eveai_app/Dockerfile
ports: ports:
- 3000:5001 # Dev app volgens port schema - 3001:5001 # Dev app volgens port schema
expose: expose:
- 8000 - 8000
environment: environment:
@@ -101,8 +99,7 @@ services:
- eveai-dev-network - eveai-dev-network
eveai_workers: eveai_workers:
container_name: eveai_workers image: ${REGISTRY_PREFIX:-}josakola/eveai_workers:latest
image: josakola/eveai_workers:latest
build: build:
context: .. context: ..
dockerfile: ./docker/eveai_workers/Dockerfile dockerfile: ./docker/eveai_workers/Dockerfile
@@ -129,8 +126,7 @@ services:
- eveai-dev-network - eveai-dev-network
eveai_chat_client: eveai_chat_client:
container_name: eveai_chat_client image: ${REGISTRY_PREFIX:-}josakola/eveai_chat_client:latest
image: josakola/eveai_chat_client:latest
build: build:
context: .. context: ..
dockerfile: ./docker/eveai_chat_client/Dockerfile dockerfile: ./docker/eveai_chat_client/Dockerfile
@@ -165,8 +161,7 @@ services:
- eveai-dev-network - eveai-dev-network
eveai_chat_workers: eveai_chat_workers:
container_name: eveai_chat_workers image: ${REGISTRY_PREFIX:-}josakola/eveai_chat_workers:latest
image: josakola/eveai_chat_workers:latest
build: build:
context: .. context: ..
dockerfile: ./docker/eveai_chat_workers/Dockerfile dockerfile: ./docker/eveai_chat_workers/Dockerfile
@@ -191,13 +186,12 @@ services:
- eveai-dev-network - eveai-dev-network
eveai_api: eveai_api:
container_name: eveai_api image: ${REGISTRY_PREFIX:-}josakola/eveai_api:latest
image: josakola/eveai_api:latest
build: build:
context: .. context: ..
dockerfile: ./docker/eveai_api/Dockerfile dockerfile: ./docker/eveai_api/Dockerfile
ports: ports:
- 3001:5003 # Dev API volgens port schema - 3003:5003 # Dev API volgens port schema
expose: expose:
- 8000 - 8000
environment: environment:
@@ -227,8 +221,7 @@ services:
- eveai-dev-network - eveai-dev-network
eveai_beat: eveai_beat:
container_name: eveai_beat image: ${REGISTRY_PREFIX:-}josakola/eveai_beat:latest
image: josakola/eveai_beat:latest
build: build:
context: .. context: ..
dockerfile: ./docker/eveai_beat/Dockerfile dockerfile: ./docker/eveai_beat/Dockerfile
@@ -249,8 +242,7 @@ services:
- eveai-dev-network - eveai-dev-network
eveai_entitlements: eveai_entitlements:
container_name: eveai_entitlements image: ${REGISTRY_PREFIX:-}josakola/eveai_entitlements:latest
image: josakola/eveai_entitlements:latest
build: build:
context: .. context: ..
dockerfile: ./docker/eveai_entitlements/Dockerfile dockerfile: ./docker/eveai_entitlements/Dockerfile
@@ -277,7 +269,6 @@ services:
- eveai-dev-network - eveai-dev-network
db: db:
container_name: db
hostname: db hostname: db
image: ankane/pgvector image: ankane/pgvector
ports: ports:
@@ -299,7 +290,6 @@ services:
- eveai-dev-network - eveai-dev-network
redis: redis:
container_name: redis
image: redis:7.2.5 image: redis:7.2.5
restart: always restart: always
ports: ports:
@@ -315,8 +305,7 @@ services:
- eveai-dev-network - eveai-dev-network
flower: flower:
container_name: flower image: ${REGISTRY_PREFIX:-}josakola/flower:latest
image: josakola/flower:latest
build: build:
context: .. context: ..
dockerfile: ./docker/flower/Dockerfile dockerfile: ./docker/flower/Dockerfile
@@ -332,7 +321,6 @@ services:
- eveai-dev-network - eveai-dev-network
minio: minio:
container_name: minio
image: minio/minio image: minio/minio
ports: ports:
- "3008:9000" # Dev MinIO volgens port schema - "3008:9000" # Dev MinIO volgens port schema
@@ -356,8 +344,7 @@ services:
- eveai-dev-network - eveai-dev-network
prometheus: prometheus:
container_name: prometheus image: ${REGISTRY_PREFIX:-}josakola/prometheus:latest
image: josakola/prometheus:latest
build: build:
context: ./prometheus context: ./prometheus
dockerfile: Dockerfile dockerfile: Dockerfile
@@ -377,7 +364,6 @@ services:
- eveai-dev-network - eveai-dev-network
pushgateway: pushgateway:
container_name: pushgateway
image: prom/pushgateway:latest image: prom/pushgateway:latest
restart: unless-stopped restart: unless-stopped
ports: ports:
@@ -386,8 +372,7 @@ services:
- eveai-dev-network - eveai-dev-network
grafana: grafana:
container_name: grafana image: ${REGISTRY_PREFIX:-}josakola/grafana:latest
image: josakola/grafana:latest
build: build:
context: ./grafana context: ./grafana
dockerfile: Dockerfile dockerfile: Dockerfile

View File

@@ -12,7 +12,7 @@ x-common-variables: &common-variables
DB_HOST: minty.ask-eve-ai-local.com DB_HOST: minty.ask-eve-ai-local.com
DB_USER: luke DB_USER: luke
DB_PASS: 'Skywalker!' DB_PASS: 'Skywalker!'
DB_NAME: eveai DB_NAME: eveai_test
DB_PORT: '5432' DB_PORT: '5432'
FLASK_ENV: test FLASK_ENV: test
FLASK_DEBUG: true FLASK_DEBUG: true
@@ -47,7 +47,7 @@ name: eveai_test
services: services:
nginx: nginx:
image: josakola/nginx:latest image: ${REGISTRY_PREFIX:-}josakola/nginx:latest
ports: ports:
- 4080:80 - 4080:80
environment: environment:
@@ -60,10 +60,10 @@ services:
- eveai_chat_client - eveai_chat_client
networks: networks:
- eveai-test-network - eveai-test-network
restart: "no" restart: unless-stopped
eveai_app: eveai_app:
image: josakola/eveai_app:latest image: ${REGISTRY_PREFIX:-}josakola/eveai_app:latest
ports: ports:
- 4001:5001 - 4001:5001
expose: expose:
@@ -86,10 +86,10 @@ services:
start_period: 30s start_period: 30s
networks: networks:
- eveai-test-network - eveai-test-network
restart: "no" restart: unless-stopped
eveai_workers: eveai_workers:
image: josakola/eveai_workers:latest image: ${REGISTRY_PREFIX:-}josakola/eveai_workers:latest
expose: expose:
- 8000 - 8000
environment: environment:
@@ -104,10 +104,10 @@ services:
condition: service_healthy condition: service_healthy
networks: networks:
- eveai-test-network - eveai-test-network
restart: "no" restart: unless-stopped
eveai_chat_client: eveai_chat_client:
image: josakola/eveai_chat_client:latest image: ${REGISTRY_PREFIX:-}josakola/eveai_chat_client:latest
ports: ports:
- 4004:5004 - 4004:5004
expose: expose:
@@ -130,10 +130,10 @@ services:
start_period: 30s start_period: 30s
networks: networks:
- eveai-test-network - eveai-test-network
restart: "no" restart: unless-stopped
eveai_chat_workers: eveai_chat_workers:
image: josakola/eveai_chat_workers:latest image: ${REGISTRY_PREFIX:-}josakola/eveai_chat_workers:latest
expose: expose:
- 8000 - 8000
environment: environment:
@@ -146,10 +146,10 @@ services:
condition: service_healthy condition: service_healthy
networks: networks:
- eveai-test-network - eveai-test-network
restart: "no" restart: unless-stopped
eveai_api: eveai_api:
image: josakola/eveai_api:latest image: ${REGISTRY_PREFIX:-}josakola/eveai_api:latest
ports: ports:
- 4003:5003 - 4003:5003
expose: expose:
@@ -172,10 +172,10 @@ services:
start_period: 30s start_period: 30s
networks: networks:
- eveai-test-network - eveai-test-network
restart: "no" restart: unless-stopped
eveai_beat: eveai_beat:
image: josakola/eveai_beat:latest image: ${REGISTRY_PREFIX:-}josakola/eveai_beat:latest
environment: environment:
<<: *common-variables <<: *common-variables
COMPONENT_NAME: eveai_beat COMPONENT_NAME: eveai_beat
@@ -186,10 +186,10 @@ services:
condition: service_healthy condition: service_healthy
networks: networks:
- eveai-test-network - eveai-test-network
restart: "no" restart: unless-stopped
eveai_entitlements: eveai_entitlements:
image: josakola/eveai_entitlements:latest image: ${REGISTRY_PREFIX:-}josakola/eveai_entitlements:latest
expose: expose:
- 8000 - 8000
environment: environment:
@@ -204,11 +204,11 @@ services:
condition: service_healthy condition: service_healthy
networks: networks:
- eveai-test-network - eveai-test-network
restart: "no" restart: unless-stopped
redis: redis:
image: redis:7.2.5 image: redis:7.2.5
restart: no restart: unless-stopped
ports: ports:
- "4006:6379" - "4006:6379"
volumes: volumes:
@@ -222,7 +222,7 @@ services:
- eveai-test-network - eveai-test-network
flower: flower:
image: josakola/flower:latest image: ${REGISTRY_PREFIX:-}josakola/flower:latest
environment: environment:
<<: *common-variables <<: *common-variables
ports: ports:
@@ -231,7 +231,7 @@ services:
- redis - redis
networks: networks:
- eveai-test-network - eveai-test-network
restart: "no" restart: unless-stopped
minio: minio:
image: minio/minio image: minio/minio
@@ -255,11 +255,10 @@ services:
start_period: 30s start_period: 30s
networks: networks:
- eveai-test-network - eveai-test-network
restart: "no" restart: unless-stopped
prometheus: prometheus:
image: josakola/prometheus:${EVEAI_VERSION:-latest} image: ${REGISTRY_PREFIX:-}josakola/prometheus:${EVEAI_VERSION:-latest}
container_name: prometheus
ports: ports:
- "4010:9090" - "4010:9090"
volumes: volumes:
@@ -270,7 +269,7 @@ services:
- '--web.console.libraries=/etc/prometheus/console_libraries' - '--web.console.libraries=/etc/prometheus/console_libraries'
- '--web.console.templates=/etc/prometheus/consoles' - '--web.console.templates=/etc/prometheus/consoles'
- '--web.enable-lifecycle' - '--web.enable-lifecycle'
restart: no restart: unless-stopped
networks: networks:
- eveai-test-network - eveai-test-network
@@ -283,8 +282,7 @@ services:
- eveai-test-network - eveai-test-network
grafana: grafana:
image: josakola/grafana:${EVEAI_VERSION:-latest} image: ${REGISTRY_PREFIX:-}josakola/grafana:${EVEAI_VERSION:-latest}
container_name: grafana
ports: ports:
- "4012:3000" - "4012:3000"
volumes: volumes:
@@ -293,7 +291,7 @@ services:
- GF_SECURITY_ADMIN_USER=admin - GF_SECURITY_ADMIN_USER=admin
- GF_SECURITY_ADMIN_PASSWORD=admin - GF_SECURITY_ADMIN_PASSWORD=admin
- GF_USERS_ALLOW_SIGN_UP=false - GF_USERS_ALLOW_SIGN_UP=false
restart: no restart: unless-stopped
depends_on: depends_on:
- prometheus - prometheus
networks: networks:

View File

@@ -67,23 +67,28 @@ case $ENVIRONMENT in
dev) dev)
PODMAN_CONNECTION="default" PODMAN_CONNECTION="default"
COMPOSE_FILE="compose_dev.yaml" COMPOSE_FILE="compose_dev.yaml"
REGISTRY_PREFIX=""
COMPOSE_PROJECT_NAME="eveai_dev"
VERSION="latest" # Always use latest for dev VERSION="latest" # Always use latest for dev
;; ;;
prod) prod)
# TO BE DEFINED
PODMAN_CONNECTION="mxz536.stackhero-network.com" PODMAN_CONNECTION="mxz536.stackhero-network.com"
COMPOSE_FILE="compose_stackhero.yaml" COMPOSE_FILE="compose_stackhero.yaml"
REGISTRY_PREFIX=""
COMPOSE_PROJECT_NAME="eveai_prod"
;; ;;
test) test)
PODMAN_CONNECTION="test-environment" PODMAN_CONNECTION="test-environment"
COMPOSE_FILE="compose_test.yaml" COMPOSE_FILE="compose_test.yaml"
;; REGISTRY_PREFIX="registry.ask-eve-ai-local.com/"
integration) COMPOSE_PROJECT_NAME="eveai_test"
PODMAN_CONNECTION="integration-environment"
COMPOSE_FILE="compose_integration.yaml"
;; ;;
bugfix) bugfix)
# TO BE DEFINED
PODMAN_CONNECTION="bugfix-environment" PODMAN_CONNECTION="bugfix-environment"
COMPOSE_FILE="compose_bugfix.yaml" COMPOSE_FILE="compose_bugfix.yaml"
COMPOSE_PROJECT_NAME="eveai_bugfix"
;; ;;
*) *)
echo "Invalid environment: $ENVIRONMENT" echo "Invalid environment: $ENVIRONMENT"
@@ -171,59 +176,61 @@ export EVEAI_VERSION=$VERSION
export CONTAINER_ACCOUNT=$CONTAINER_ACCOUNT export CONTAINER_ACCOUNT=$CONTAINER_ACCOUNT
export CONTAINER_CMD=$CONTAINER_CMD export CONTAINER_CMD=$CONTAINER_CMD
export COMPOSE_CMD_PATH=$COMPOSE_CMD_PATH export COMPOSE_CMD_PATH=$COMPOSE_CMD_PATH
export REGISTRY_PREFIX=$REGISTRY_PREFIX
export COMPOSE_PROJECT_NAME=$COMPOSE_PROJECT_NAME
echo "Set COMPOSE_FILE to $COMPOSE_FILE" echo "Set COMPOSE_FILE to $COMPOSE_FILE"
echo "Set EVEAI_VERSION to $VERSION" echo "Set EVEAI_VERSION to $VERSION"
echo "Set CONTAINER_ACCOUNT to $CONTAINER_ACCOUNT" echo "Set CONTAINER_ACCOUNT to $CONTAINER_ACCOUNT"
echo "Set platform to AMD64 (linux/amd64)" echo "Set platform to AMD64 (linux/amd64)"
echo "Set registry prefix to $REGISTRY_PREFIX"
echo "Set project name to $COMPOSE_PROJECT_NAME"
# Define compose wrapper functions using the full path to avoid recursion # Define compose wrapper functions using the full path to avoid recursion
pc() { pc() {
$COMPOSE_CMD_PATH -f $COMPOSE_FILE "$@" $COMPOSE_CMD_PATH -p ${COMPOSE_PROJECT_NAME} -f $COMPOSE_FILE "$@"
} }
pcup() { pcup() {
$COMPOSE_CMD_PATH -f $COMPOSE_FILE up -d --remove-orphans "$@" $COMPOSE_CMD_PATH -p ${COMPOSE_PROJECT_NAME} -f $COMPOSE_FILE up -d --remove-orphans "$@"
} }
pcdown() { pcdown() {
$COMPOSE_CMD_PATH -f $COMPOSE_FILE down "$@" $COMPOSE_CMD_PATH -p ${COMPOSE_PROJECT_NAME} -f $COMPOSE_FILE down "$@"
} }
pcps() { pcps() {
$COMPOSE_CMD_PATH -f $COMPOSE_FILE ps "$@" $COMPOSE_CMD_PATH -p ${COMPOSE_PROJECT_NAME} -f $COMPOSE_FILE ps "$@"
} }
pclogs() { pclogs() {
$COMPOSE_CMD_PATH -f $COMPOSE_FILE logs "$@" $COMPOSE_CMD_PATH -p ${COMPOSE_PROJECT_NAME} -f $COMPOSE_FILE logs "$@"
} }
# Simplified pull - no platform tricks needed
pcpull() { pcpull() {
echo "Pulling AMD64 images..." echo "Pulling AMD64 images..."
$COMPOSE_CMD_PATH -f $COMPOSE_FILE pull "$@" $COMPOSE_CMD_PATH -p ${COMPOSE_PROJECT_NAME} -f $COMPOSE_FILE pull "$@"
} }
pcrefresh() { pcrefresh() {
$COMPOSE_CMD_PATH -f $COMPOSE_FILE pull && $COMPOSE_CMD_PATH -f $COMPOSE_FILE up -d --remove-orphans "$@" $COMPOSE_CMD_PATH -p ${COMPOSE_PROJECT_NAME} -f $COMPOSE_FILE pull && $COMPOSE_CMD_PATH -p ${COMPOSE_PROJECT_NAME} -f $COMPOSE_FILE up -d --remove-orphans "$@"
} }
pcbuild() { pcbuild() {
$COMPOSE_CMD_PATH -f $COMPOSE_FILE build "$@" $COMPOSE_CMD_PATH -p ${COMPOSE_PROJECT_NAME} -f $COMPOSE_FILE build "$@"
} }
pcrestart() { pcrestart() {
$COMPOSE_CMD_PATH -f $COMPOSE_FILE restart "$@" $COMPOSE_CMD_PATH -p ${COMPOSE_PROJECT_NAME} -f $COMPOSE_FILE restart "$@"
} }
pcstop() { pcstop() {
$COMPOSE_CMD_PATH -f $COMPOSE_FILE stop "$@" $COMPOSE_CMD_PATH -p ${COMPOSE_PROJECT_NAME} -f $COMPOSE_FILE stop "$@"
} }
pcstart() { pcstart() {
$COMPOSE_CMD_PATH -f $COMPOSE_FILE start "$@" $COMPOSE_CMD_PATH -p ${COMPOSE_PROJECT_NAME} -f $COMPOSE_FILE start "$@"
} }
# Export functions - handle both bash and zsh # Export functions - handle both bash and zsh
if [[ -n "$ZSH_VERSION" ]]; then if [[ -n "$ZSH_VERSION" ]]; then
# In zsh, functions are automatically available in subshells # In zsh, functions are automatically available in subshells

View File

@@ -50,6 +50,29 @@ START_SELECTION_QUESTIONS = [
"Would you be open to answering a few questions to learn more about the role and your fit?", "Would you be open to answering a few questions to learn more about the role and your fit?",
"Would you like to continue and start the first part of the application journey?" "Would you like to continue and start the first part of the application journey?"
] ]
START_SELECTION_QUESTIONS_CTD = [
"Do you have any other questions, or shall we start the interview to see if theres a match with the job?",
"Do you have any questions, or shall we begin to explore if this job suits you?",
"Would you like to know anything else first, or shall we start the interview to see if the role feels like a good fit?",
"Are there any remaining questions, or shall we find out whether this job is something for you?",
"Is there anything still on your mind, or shall we begin the conversation to explore the match?",
"Feel free to ask if you still have any questions; otherwise, shall we begin the interview?",
"Is everything clear for you, or shall we now take a look together to see if this role is made for you?",
"Are you ready, or is there anything else youd like clarified before we get started?",
"No more questions, or shall we start discovering if theres a connection with this role?",
"Are you ready to begin and see if you and this job are a good match?",
# Extra variants
"Before we move on, is there anything youd like to ask? Or shall we begin?",
"Shall we get started, or is there something else youd like to clarify first?",
"Would you like to go over anything else before we start the interview?",
"Is now a good time to begin, or do you have more questions before we proceed?",
"Do you feel ready to start, or should we address any remaining questions first?",
"Would you prefer to discuss any final points, or shall we begin the selection process?",
"Is there anything we should clear up before starting, or shall we dive right in?",
"Would you like to ask anything else before we begin exploring the role together?",
"Do you want to go over anything else first, or shall we kick off the interview?",
"Are we good to start, or is there anything else youd like to cover?"
]
TRY_TO_START_SELECTION_QUESTIONS = [ TRY_TO_START_SELECTION_QUESTIONS = [
"That's a pity — we can only move forward if we start the selection process. Would you like to begin now?", "That's a pity — we can only move forward if we start the selection process. Would you like to begin now?",
"We understand, though its worth mentioning that the only way to continue is to start the procedure. Shall we get started after all?", "We understand, though its worth mentioning that the only way to continue is to start the procedure. Shall we get started after all?",
@@ -83,7 +106,17 @@ KO_CRITERIA_NOT_MET_MESSAGES = [
"Thanks so much for answering our questions. This role may not be the right fit, but wed love for you to consider applying again when new positions become available.", "Thanks so much for answering our questions. This role may not be the right fit, but wed love for you to consider applying again when new positions become available.",
"We value your interest in this position. While we wont be moving forward in this case, we warmly invite you to explore other roles with us in the future.", "We value your interest in this position. While we wont be moving forward in this case, we warmly invite you to explore other roles with us in the future.",
"Your input has been very helpful. Although we're not proceeding at this time, we thank you for your interest and hope to see you again for other opportunities.", "Your input has been very helpful. Although we're not proceeding at this time, we thank you for your interest and hope to see you again for other opportunities.",
"Thank you for taking part in the process. We wont continue with your application for this role, but we invite you to stay informed about future openings through our website." "Thank you for taking part in the process. We wont continue with your application for this role, but we invite you to stay informed about future openings through our website.",
# Nieuwe varianten
"Thank you for providing some additional clarification. For this role, the previously mentioned requirement remains essential. We hope youll continue to follow our other vacancies!",
"We appreciate your engagement. For this specific role, we do have to end the process here. Wishing you the best of luck with your next step!",
"Thank you for your additional information. Unfortunately, this does not change the selection process for this position. We look forward to possibly meeting you again in the future!",
"Thank you for taking the time to respond. For this vacancy, we can only proceed with candidates who meet all the requirements. We wish you all the very best!",
"Weve reviewed your answers carefully. Unfortunately, we cant continue with your application for this role, but we encourage you to check our site for future openings.",
"Were grateful for your time and interest. Sadly, this position requires criteria that werent met, but wed love to see your application for other roles.",
"Thank you for sharing more details. For this specific position, the original requirements still apply. Please keep an eye out for roles that might suit you better.",
"We value the effort youve put into this process. While we cant move forward this time, wed be happy to see your application for future opportunities.",
"Your answers gave us a good understanding of your background. Unfortunately, we cant proceed with this position, but we hope to connect again in the future."
] ]
KO_CRITERIA_MET_MESSAGES = [ KO_CRITERIA_MET_MESSAGES = [
"Thank you for your answers. They correspond to some key elements of the role. Would you be open to sharing your contact details so we can continue the selection process?", "Thank you for your answers. They correspond to some key elements of the role. Would you be open to sharing your contact details so we can continue the selection process?",
@@ -608,7 +641,7 @@ class SpecialistExecutor(CrewAIBaseSpecialistExecutor):
question = None question = None
match self.previous_interview_phase: match self.previous_interview_phase:
case "start_selection_procedure": case "start_selection_procedure":
question = random.choice(START_SELECTION_QUESTIONS) question = random.choice(START_SELECTION_QUESTIONS_CTD)
case "personal_contact_data_preparation": case "personal_contact_data_preparation":
question = random.choice(CONTACT_DATA_QUESTIONS) question = random.choice(CONTACT_DATA_QUESTIONS)
case "candidate_selected": case "candidate_selected":