Merge branch 'feature/refinement_selection_specialist' into develop

This commit is contained in:
Josako
2025-08-21 06:40:29 +02:00
4 changed files with 98 additions and 75 deletions

View File

@@ -36,13 +36,12 @@ x-common-variables: &common-variables
services:
nginx:
container_name: nginx
image: josakola/nginx:latest
image: ${REGISTRY_PREFIX:-}josakola/nginx:latest
build:
context: ..
dockerfile: ./docker/nginx/Dockerfile
ports:
- 3002:80 # Dev nginx proxy volgens port schema
- 3080:80 # Dev nginx proxy volgens port schema
environment:
<<: *common-variables
volumes:
@@ -63,13 +62,12 @@ services:
- eveai-dev-network
eveai_app:
container_name: eveai_app
image: josakola/eveai_app:latest
image: ${REGISTRY_PREFIX:-}josakola/eveai_app:latest
build:
context: ..
dockerfile: ./docker/eveai_app/Dockerfile
ports:
- 3000:5001 # Dev app volgens port schema
- 3001:5001 # Dev app volgens port schema
expose:
- 8000
environment:
@@ -101,8 +99,7 @@ services:
- eveai-dev-network
eveai_workers:
container_name: eveai_workers
image: josakola/eveai_workers:latest
image: ${REGISTRY_PREFIX:-}josakola/eveai_workers:latest
build:
context: ..
dockerfile: ./docker/eveai_workers/Dockerfile
@@ -129,8 +126,7 @@ services:
- eveai-dev-network
eveai_chat_client:
container_name: eveai_chat_client
image: josakola/eveai_chat_client:latest
image: ${REGISTRY_PREFIX:-}josakola/eveai_chat_client:latest
build:
context: ..
dockerfile: ./docker/eveai_chat_client/Dockerfile
@@ -165,8 +161,7 @@ services:
- eveai-dev-network
eveai_chat_workers:
container_name: eveai_chat_workers
image: josakola/eveai_chat_workers:latest
image: ${REGISTRY_PREFIX:-}josakola/eveai_chat_workers:latest
build:
context: ..
dockerfile: ./docker/eveai_chat_workers/Dockerfile
@@ -191,13 +186,12 @@ services:
- eveai-dev-network
eveai_api:
container_name: eveai_api
image: josakola/eveai_api:latest
image: ${REGISTRY_PREFIX:-}josakola/eveai_api:latest
build:
context: ..
dockerfile: ./docker/eveai_api/Dockerfile
ports:
- 3001:5003 # Dev API volgens port schema
- 3003:5003 # Dev API volgens port schema
expose:
- 8000
environment:
@@ -227,8 +221,7 @@ services:
- eveai-dev-network
eveai_beat:
container_name: eveai_beat
image: josakola/eveai_beat:latest
image: ${REGISTRY_PREFIX:-}josakola/eveai_beat:latest
build:
context: ..
dockerfile: ./docker/eveai_beat/Dockerfile
@@ -249,8 +242,7 @@ services:
- eveai-dev-network
eveai_entitlements:
container_name: eveai_entitlements
image: josakola/eveai_entitlements:latest
image: ${REGISTRY_PREFIX:-}josakola/eveai_entitlements:latest
build:
context: ..
dockerfile: ./docker/eveai_entitlements/Dockerfile
@@ -277,7 +269,6 @@ services:
- eveai-dev-network
db:
container_name: db
hostname: db
image: ankane/pgvector
ports:
@@ -299,7 +290,6 @@ services:
- eveai-dev-network
redis:
container_name: redis
image: redis:7.2.5
restart: always
ports:
@@ -315,8 +305,7 @@ services:
- eveai-dev-network
flower:
container_name: flower
image: josakola/flower:latest
image: ${REGISTRY_PREFIX:-}josakola/flower:latest
build:
context: ..
dockerfile: ./docker/flower/Dockerfile
@@ -332,7 +321,6 @@ services:
- eveai-dev-network
minio:
container_name: minio
image: minio/minio
ports:
- "3008:9000" # Dev MinIO volgens port schema
@@ -356,8 +344,7 @@ services:
- eveai-dev-network
prometheus:
container_name: prometheus
image: josakola/prometheus:latest
image: ${REGISTRY_PREFIX:-}josakola/prometheus:latest
build:
context: ./prometheus
dockerfile: Dockerfile
@@ -377,7 +364,6 @@ services:
- eveai-dev-network
pushgateway:
container_name: pushgateway
image: prom/pushgateway:latest
restart: unless-stopped
ports:
@@ -386,8 +372,7 @@ services:
- eveai-dev-network
grafana:
container_name: grafana
image: josakola/grafana:latest
image: ${REGISTRY_PREFIX:-}josakola/grafana:latest
build:
context: ./grafana
dockerfile: Dockerfile

View File

@@ -12,7 +12,7 @@ x-common-variables: &common-variables
DB_HOST: minty.ask-eve-ai-local.com
DB_USER: luke
DB_PASS: 'Skywalker!'
DB_NAME: eveai
DB_NAME: eveai_test
DB_PORT: '5432'
FLASK_ENV: test
FLASK_DEBUG: true
@@ -47,7 +47,7 @@ name: eveai_test
services:
nginx:
image: josakola/nginx:latest
image: ${REGISTRY_PREFIX:-}josakola/nginx:latest
ports:
- 4080:80
environment:
@@ -60,10 +60,10 @@ services:
- eveai_chat_client
networks:
- eveai-test-network
restart: "no"
restart: unless-stopped
eveai_app:
image: josakola/eveai_app:latest
image: ${REGISTRY_PREFIX:-}josakola/eveai_app:latest
ports:
- 4001:5001
expose:
@@ -86,10 +86,10 @@ services:
start_period: 30s
networks:
- eveai-test-network
restart: "no"
restart: unless-stopped
eveai_workers:
image: josakola/eveai_workers:latest
image: ${REGISTRY_PREFIX:-}josakola/eveai_workers:latest
expose:
- 8000
environment:
@@ -104,10 +104,10 @@ services:
condition: service_healthy
networks:
- eveai-test-network
restart: "no"
restart: unless-stopped
eveai_chat_client:
image: josakola/eveai_chat_client:latest
image: ${REGISTRY_PREFIX:-}josakola/eveai_chat_client:latest
ports:
- 4004:5004
expose:
@@ -130,10 +130,10 @@ services:
start_period: 30s
networks:
- eveai-test-network
restart: "no"
restart: unless-stopped
eveai_chat_workers:
image: josakola/eveai_chat_workers:latest
image: ${REGISTRY_PREFIX:-}josakola/eveai_chat_workers:latest
expose:
- 8000
environment:
@@ -146,10 +146,10 @@ services:
condition: service_healthy
networks:
- eveai-test-network
restart: "no"
restart: unless-stopped
eveai_api:
image: josakola/eveai_api:latest
image: ${REGISTRY_PREFIX:-}josakola/eveai_api:latest
ports:
- 4003:5003
expose:
@@ -172,10 +172,10 @@ services:
start_period: 30s
networks:
- eveai-test-network
restart: "no"
restart: unless-stopped
eveai_beat:
image: josakola/eveai_beat:latest
image: ${REGISTRY_PREFIX:-}josakola/eveai_beat:latest
environment:
<<: *common-variables
COMPONENT_NAME: eveai_beat
@@ -186,10 +186,10 @@ services:
condition: service_healthy
networks:
- eveai-test-network
restart: "no"
restart: unless-stopped
eveai_entitlements:
image: josakola/eveai_entitlements:latest
image: ${REGISTRY_PREFIX:-}josakola/eveai_entitlements:latest
expose:
- 8000
environment:
@@ -204,11 +204,11 @@ services:
condition: service_healthy
networks:
- eveai-test-network
restart: "no"
restart: unless-stopped
redis:
image: redis:7.2.5
restart: no
restart: unless-stopped
ports:
- "4006:6379"
volumes:
@@ -222,7 +222,7 @@ services:
- eveai-test-network
flower:
image: josakola/flower:latest
image: ${REGISTRY_PREFIX:-}josakola/flower:latest
environment:
<<: *common-variables
ports:
@@ -231,7 +231,7 @@ services:
- redis
networks:
- eveai-test-network
restart: "no"
restart: unless-stopped
minio:
image: minio/minio
@@ -255,11 +255,10 @@ services:
start_period: 30s
networks:
- eveai-test-network
restart: "no"
restart: unless-stopped
prometheus:
image: josakola/prometheus:${EVEAI_VERSION:-latest}
container_name: prometheus
image: ${REGISTRY_PREFIX:-}josakola/prometheus:${EVEAI_VERSION:-latest}
ports:
- "4010:9090"
volumes:
@@ -270,7 +269,7 @@ services:
- '--web.console.libraries=/etc/prometheus/console_libraries'
- '--web.console.templates=/etc/prometheus/consoles'
- '--web.enable-lifecycle'
restart: no
restart: unless-stopped
networks:
- eveai-test-network
@@ -283,8 +282,7 @@ services:
- eveai-test-network
grafana:
image: josakola/grafana:${EVEAI_VERSION:-latest}
container_name: grafana
image: ${REGISTRY_PREFIX:-}josakola/grafana:${EVEAI_VERSION:-latest}
ports:
- "4012:3000"
volumes:
@@ -293,7 +291,7 @@ services:
- GF_SECURITY_ADMIN_USER=admin
- GF_SECURITY_ADMIN_PASSWORD=admin
- GF_USERS_ALLOW_SIGN_UP=false
restart: no
restart: unless-stopped
depends_on:
- prometheus
networks:

View File

@@ -67,23 +67,28 @@ case $ENVIRONMENT in
dev)
PODMAN_CONNECTION="default"
COMPOSE_FILE="compose_dev.yaml"
REGISTRY_PREFIX=""
COMPOSE_PROJECT_NAME="eveai_dev"
VERSION="latest" # Always use latest for dev
;;
prod)
# TO BE DEFINED
PODMAN_CONNECTION="mxz536.stackhero-network.com"
COMPOSE_FILE="compose_stackhero.yaml"
REGISTRY_PREFIX=""
COMPOSE_PROJECT_NAME="eveai_prod"
;;
test)
PODMAN_CONNECTION="test-environment"
COMPOSE_FILE="compose_test.yaml"
;;
integration)
PODMAN_CONNECTION="integration-environment"
COMPOSE_FILE="compose_integration.yaml"
REGISTRY_PREFIX="registry.ask-eve-ai-local.com/"
COMPOSE_PROJECT_NAME="eveai_test"
;;
bugfix)
# TO BE DEFINED
PODMAN_CONNECTION="bugfix-environment"
COMPOSE_FILE="compose_bugfix.yaml"
COMPOSE_PROJECT_NAME="eveai_bugfix"
;;
*)
echo "Invalid environment: $ENVIRONMENT"
@@ -171,59 +176,61 @@ export EVEAI_VERSION=$VERSION
export CONTAINER_ACCOUNT=$CONTAINER_ACCOUNT
export CONTAINER_CMD=$CONTAINER_CMD
export COMPOSE_CMD_PATH=$COMPOSE_CMD_PATH
export REGISTRY_PREFIX=$REGISTRY_PREFIX
export COMPOSE_PROJECT_NAME=$COMPOSE_PROJECT_NAME
echo "Set COMPOSE_FILE to $COMPOSE_FILE"
echo "Set EVEAI_VERSION to $VERSION"
echo "Set CONTAINER_ACCOUNT to $CONTAINER_ACCOUNT"
echo "Set platform to AMD64 (linux/amd64)"
echo "Set registry prefix to $REGISTRY_PREFIX"
echo "Set project name to $COMPOSE_PROJECT_NAME"
# Define compose wrapper functions using the full path to avoid recursion
pc() {
$COMPOSE_CMD_PATH -f $COMPOSE_FILE "$@"
$COMPOSE_CMD_PATH -p ${COMPOSE_PROJECT_NAME} -f $COMPOSE_FILE "$@"
}
pcup() {
$COMPOSE_CMD_PATH -f $COMPOSE_FILE up -d --remove-orphans "$@"
$COMPOSE_CMD_PATH -p ${COMPOSE_PROJECT_NAME} -f $COMPOSE_FILE up -d --remove-orphans "$@"
}
pcdown() {
$COMPOSE_CMD_PATH -f $COMPOSE_FILE down "$@"
$COMPOSE_CMD_PATH -p ${COMPOSE_PROJECT_NAME} -f $COMPOSE_FILE down "$@"
}
pcps() {
$COMPOSE_CMD_PATH -f $COMPOSE_FILE ps "$@"
$COMPOSE_CMD_PATH -p ${COMPOSE_PROJECT_NAME} -f $COMPOSE_FILE ps "$@"
}
pclogs() {
$COMPOSE_CMD_PATH -f $COMPOSE_FILE logs "$@"
$COMPOSE_CMD_PATH -p ${COMPOSE_PROJECT_NAME} -f $COMPOSE_FILE logs "$@"
}
# Simplified pull - no platform tricks needed
pcpull() {
echo "Pulling AMD64 images..."
$COMPOSE_CMD_PATH -f $COMPOSE_FILE pull "$@"
$COMPOSE_CMD_PATH -p ${COMPOSE_PROJECT_NAME} -f $COMPOSE_FILE pull "$@"
}
pcrefresh() {
$COMPOSE_CMD_PATH -f $COMPOSE_FILE pull && $COMPOSE_CMD_PATH -f $COMPOSE_FILE up -d --remove-orphans "$@"
$COMPOSE_CMD_PATH -p ${COMPOSE_PROJECT_NAME} -f $COMPOSE_FILE pull && $COMPOSE_CMD_PATH -p ${COMPOSE_PROJECT_NAME} -f $COMPOSE_FILE up -d --remove-orphans "$@"
}
pcbuild() {
$COMPOSE_CMD_PATH -f $COMPOSE_FILE build "$@"
$COMPOSE_CMD_PATH -p ${COMPOSE_PROJECT_NAME} -f $COMPOSE_FILE build "$@"
}
pcrestart() {
$COMPOSE_CMD_PATH -f $COMPOSE_FILE restart "$@"
$COMPOSE_CMD_PATH -p ${COMPOSE_PROJECT_NAME} -f $COMPOSE_FILE restart "$@"
}
pcstop() {
$COMPOSE_CMD_PATH -f $COMPOSE_FILE stop "$@"
$COMPOSE_CMD_PATH -p ${COMPOSE_PROJECT_NAME} -f $COMPOSE_FILE stop "$@"
}
pcstart() {
$COMPOSE_CMD_PATH -f $COMPOSE_FILE start "$@"
$COMPOSE_CMD_PATH -p ${COMPOSE_PROJECT_NAME} -f $COMPOSE_FILE start "$@"
}
# Export functions - handle both bash and zsh
if [[ -n "$ZSH_VERSION" ]]; then
# In zsh, functions are automatically available in subshells

View File

@@ -50,6 +50,29 @@ START_SELECTION_QUESTIONS = [
"Would you be open to answering a few questions to learn more about the role and your fit?",
"Would you like to continue and start the first part of the application journey?"
]
START_SELECTION_QUESTIONS_CTD = [
"Do you have any other questions, or shall we start the interview to see if theres a match with the job?",
"Do you have any questions, or shall we begin to explore if this job suits you?",
"Would you like to know anything else first, or shall we start the interview to see if the role feels like a good fit?",
"Are there any remaining questions, or shall we find out whether this job is something for you?",
"Is there anything still on your mind, or shall we begin the conversation to explore the match?",
"Feel free to ask if you still have any questions; otherwise, shall we begin the interview?",
"Is everything clear for you, or shall we now take a look together to see if this role is made for you?",
"Are you ready, or is there anything else youd like clarified before we get started?",
"No more questions, or shall we start discovering if theres a connection with this role?",
"Are you ready to begin and see if you and this job are a good match?",
# Extra variants
"Before we move on, is there anything youd like to ask? Or shall we begin?",
"Shall we get started, or is there something else youd like to clarify first?",
"Would you like to go over anything else before we start the interview?",
"Is now a good time to begin, or do you have more questions before we proceed?",
"Do you feel ready to start, or should we address any remaining questions first?",
"Would you prefer to discuss any final points, or shall we begin the selection process?",
"Is there anything we should clear up before starting, or shall we dive right in?",
"Would you like to ask anything else before we begin exploring the role together?",
"Do you want to go over anything else first, or shall we kick off the interview?",
"Are we good to start, or is there anything else youd like to cover?"
]
TRY_TO_START_SELECTION_QUESTIONS = [
"That's a pity — we can only move forward if we start the selection process. Would you like to begin now?",
"We understand, though its worth mentioning that the only way to continue is to start the procedure. Shall we get started after all?",
@@ -83,7 +106,17 @@ KO_CRITERIA_NOT_MET_MESSAGES = [
"Thanks so much for answering our questions. This role may not be the right fit, but wed love for you to consider applying again when new positions become available.",
"We value your interest in this position. While we wont be moving forward in this case, we warmly invite you to explore other roles with us in the future.",
"Your input has been very helpful. Although we're not proceeding at this time, we thank you for your interest and hope to see you again for other opportunities.",
"Thank you for taking part in the process. We wont continue with your application for this role, but we invite you to stay informed about future openings through our website."
"Thank you for taking part in the process. We wont continue with your application for this role, but we invite you to stay informed about future openings through our website.",
# Nieuwe varianten
"Thank you for providing some additional clarification. For this role, the previously mentioned requirement remains essential. We hope youll continue to follow our other vacancies!",
"We appreciate your engagement. For this specific role, we do have to end the process here. Wishing you the best of luck with your next step!",
"Thank you for your additional information. Unfortunately, this does not change the selection process for this position. We look forward to possibly meeting you again in the future!",
"Thank you for taking the time to respond. For this vacancy, we can only proceed with candidates who meet all the requirements. We wish you all the very best!",
"Weve reviewed your answers carefully. Unfortunately, we cant continue with your application for this role, but we encourage you to check our site for future openings.",
"Were grateful for your time and interest. Sadly, this position requires criteria that werent met, but wed love to see your application for other roles.",
"Thank you for sharing more details. For this specific position, the original requirements still apply. Please keep an eye out for roles that might suit you better.",
"We value the effort youve put into this process. While we cant move forward this time, wed be happy to see your application for future opportunities.",
"Your answers gave us a good understanding of your background. Unfortunately, we cant proceed with this position, but we hope to connect again in the future."
]
KO_CRITERIA_MET_MESSAGES = [
"Thank you for your answers. They correspond to some key elements of the role. Would you be open to sharing your contact details so we can continue the selection process?",
@@ -608,7 +641,7 @@ class SpecialistExecutor(CrewAIBaseSpecialistExecutor):
question = None
match self.previous_interview_phase:
case "start_selection_procedure":
question = random.choice(START_SELECTION_QUESTIONS)
question = random.choice(START_SELECTION_QUESTIONS_CTD)
case "personal_contact_data_preparation":
question = random.choice(CONTACT_DATA_QUESTIONS)
case "candidate_selected":