- Portkey log retrieval started

- flower container added (dev and prod)
This commit is contained in:
Josako
2024-10-01 08:01:59 +02:00
parent ae697df4c9
commit 883175b8f5
9 changed files with 211 additions and 16 deletions

View File

@@ -97,7 +97,12 @@ class ModelVariables(MutableMapping):
portkey_headers = createHeaders(api_key=os.getenv('PORTKEY_API_KEY'),
provider=self._variables['embedding_provider'],
metadata=portkey_metadata)
metadata=portkey_metadata,
trace_id=current_event.trace_id,
span_id=current_event.span_id,
span_name=current_event.span_name,
parent_span_id=current_event.parent_span_id
)
api_key = os.getenv('OPENAI_API_KEY')
model = self._variables['embedding_model']
self._embedding_model = OpenAIEmbeddings(api_key=api_key,
@@ -136,7 +141,12 @@ class ModelVariables(MutableMapping):
portkey_metadata = self.get_portkey_metadata()
portkey_headers = createHeaders(api_key=os.getenv('PORTKEY_API_KEY'),
metadata=portkey_metadata,
provider=self._variables['llm_provider'])
provider=self._variables['llm_provider'],
trace_id=current_event.trace_id,
span_id=current_event.span_id,
span_name=current_event.span_name,
parent_span_id=current_event.parent_span_id
)
return portkey_headers
def get_portkey_metadata(self):
@@ -196,7 +206,12 @@ class ModelVariables(MutableMapping):
portkey_metadata = self.get_portkey_metadata()
portkey_headers = createHeaders(api_key=os.getenv('PORTKEY_API_KEY'),
metadata=portkey_metadata,
provider='openai')
provider='openai',
trace_id=current_event.trace_id,
span_id=current_event.span_id,
span_name=current_event.span_name,
parent_span_id=current_event.parent_span_id
)
api_key = os.getenv('OPENAI_API_KEY')
self._transcription_client = OpenAI(api_key=api_key,
base_url=PORTKEY_GATEWAY_URL,

View File

@@ -0,0 +1,99 @@
import requests
import json
from typing import Optional
# Define a function to make the GET request
def get_metadata_grouped_data(
api_key: str,
metadata_key: str,
time_of_generation_min: Optional[str] = None,
time_of_generation_max: Optional[str] = None,
total_units_min: Optional[int] = None,
total_units_max: Optional[int] = None,
cost_min: Optional[float] = None,
cost_max: Optional[float] = None,
prompt_token_min: Optional[int] = None,
prompt_token_max: Optional[int] = None,
completion_token_min: Optional[int] = None,
completion_token_max: Optional[int] = None,
status_code: Optional[str] = None,
weighted_feedback_min: Optional[float] = None,
weighted_feedback_max: Optional[float] = None,
virtual_keys: Optional[str] = None,
configs: Optional[str] = None,
workspace_slug: Optional[str] = None,
api_key_ids: Optional[str] = None,
current_page: Optional[int] = 1,
page_size: Optional[int] = 20,
metadata: Optional[str] = None,
ai_org_model: Optional[str] = None,
trace_id: Optional[str] = None,
span_id: Optional[str] = None,
):
url = f"https://api.portkey.ai/v1/analytics/groups/metadata/{metadata_key}"
# Set up query parameters
params = {
"time_of_generation_min": time_of_generation_min,
"time_of_generation_max": time_of_generation_max,
"total_units_min": total_units_min,
"total_units_max": total_units_max,
"cost_min": cost_min,
"cost_max": cost_max,
"prompt_token_min": prompt_token_min,
"prompt_token_max": prompt_token_max,
"completion_token_min": completion_token_min,
"completion_token_max": completion_token_max,
"status_code": status_code,
"weighted_feedback_min": weighted_feedback_min,
"weighted_feedback_max": weighted_feedback_max,
"virtual_keys": virtual_keys,
"configs": configs,
"workspace_slug": workspace_slug,
"api_key_ids": api_key_ids,
"current_page": current_page,
"page_size": page_size,
"metadata": metadata,
"ai_org_model": ai_org_model,
"trace_id": trace_id,
"span_id": span_id,
}
# Remove any keys with None values
params = {k: v for k, v in params.items() if v is not None}
# Set up the headers
headers = {
"Authorization": f"Bearer {api_key}",
"Content-Type": "application/json"
}
# Make the GET request
response = requests.get(url, headers=headers, params=params)
# Check for successful response
if response.status_code == 200:
return response.json() # Return JSON data
else:
response.raise_for_status() # Raise an exception for errors
# Example usage
# Replace 'your_api_key' and 'your_metadata_key' with actual values
api_key = 'your_api_key'
metadata_key = 'your_metadata_key'
try:
data = get_metadata_grouped_data(
api_key=api_key,
metadata_key=metadata_key,
time_of_generation_min="2024-08-23T15:50:23+05:30",
time_of_generation_max="2024-09-23T15:50:23+05:30",
total_units_min=100,
total_units_max=1000,
cost_min=10,
cost_max=100,
status_code="200,201"
)
print(json.dumps(data, indent=4))
except Exception as e:
print(f"Error occurred: {str(e)}")

View File

@@ -141,7 +141,7 @@ if [ $# -eq 0 ]; then
SERVICES=()
while IFS= read -r line; do
SERVICES+=("$line")
done < <(yq e '.services | keys | .[]' compose_dev.yaml | grep -E '^(nginx|eveai_)')
done < <(yq e '.services | keys | .[]' compose_dev.yaml | grep -E '^(nginx|eveai_|flower)')
else
SERVICES=("$@")
fi
@@ -158,7 +158,7 @@ docker buildx use eveai_builder
# Loop through services
for SERVICE in "${SERVICES[@]}"; do
if [[ "$SERVICE" == "nginx" || "$SERVICE" == eveai_* ]]; then
if [[ "$SERVICE" == "nginx" || "$SERVICE" == eveai_* || "$SERVICE" == "flower" ]]; then
if process_service "$SERVICE"; then
echo "Successfully processed $SERVICE"
else

View File

@@ -22,6 +22,8 @@ x-common-variables: &common-variables
MAIL_PASSWORD: '$$6xsWGbNtx$$CFMQZqc*'
MAIL_SERVER: mail.flow-it.net
MAIL_PORT: 465
REDIS_URL: redis
REDIS_PORT: '6379'
OPENAI_API_KEY: 'sk-proj-8R0jWzwjL7PeoPyMhJTZT3BlbkFJLb6HfRB2Hr9cEVFWEhU7'
GROQ_API_KEY: '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'
ANTHROPIC_API_KEY: 'sk-ant-api03-c2TmkzbReeGhXBO5JxNH6BJNylRDonc9GmZd0eRbrvyekec2'
@@ -265,6 +267,22 @@ services:
networks:
- eveai-network
flower:
image: josakola/flower:latest
build:
context: ..
dockerfile: ./docker/flower/Dockerfile
environment:
<<: *common-variables
volumes:
- ../scripts:/app/scripts
ports:
- "5555:5555"
depends_on:
- redis
networks:
- eveai-network
minio:
image: minio/minio
ports:

View File

@@ -21,11 +21,13 @@ x-common-variables: &common-variables
MAIL_USERNAME: 'evie_admin@askeveai.com'
MAIL_PASSWORD: 's5D%R#y^v!s&6Z^i0k&'
MAIL_SERVER: mail.askeveai.com
MAIL_PORT: 465
MAIL_PORT: '465'
REDIS_USER: eveai
REDIS_PASS: 'jHliZwGD36sONgbm0fc6SOpzLbknqq4RNF8K'
REDIS_URL: 8bciqc.stackhero-network.com
REDIS_PORT: '9961'
FLOWER_USER: 'Felucia'
FLOWER_PASSWORD: 'Jungles'
OPENAI_API_KEY: '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'
GROQ_API_KEY: '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'
ANTHROPIC_API_KEY: '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'
@@ -143,6 +145,15 @@ services:
networks:
- eveai-network
flower:
image: josakola/flower:latest
environment:
<<: *common-variables
ports:
- "5555:5555"
networks:
- eveai-network
volumes:
eveai_logs:

34
docker/flower/Dockerfile Normal file
View File

@@ -0,0 +1,34 @@
ARG PYTHON_VERSION=3.12.3
FROM python:${PYTHON_VERSION}-slim as base
ENV PYTHONDONTWRITEBYTECODE=1
ENV PYTHONUNBUFFERED=1
WORKDIR /app
ARG UID=10001
RUN adduser \
--disabled-password \
--gecos "" \
--home "/nonexistent" \
--shell "/bin/bash" \
--no-create-home \
--uid "${UID}" \
appuser
RUN apt-get update && apt-get install -y \
build-essential \
gcc \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
COPY requirements.txt /app/
RUN pip install --no-cache-dir -r requirements.txt
COPY . /app
COPY scripts/start_flower.sh /app/start_flower.sh
RUN chmod a+x /app/start_flower.sh
USER appuser
CMD ["/app/start_flower.sh"]

View File

@@ -159,13 +159,12 @@ http {
}
location /flower/ {
proxy_pass http://127.0.0.1:5555/;
proxy_pass http://flower:5555/flower/;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
}
}
include sites-enabled/*;

View File

@@ -79,4 +79,4 @@ flask-healthz~=1.0.1
langsmith~=0.1.121
anthropic~=0.34.2
prometheus-client~=0.20.0
flower~=2.0.1

33
scripts/start_flower.sh Executable file → Normal file
View File

@@ -1,9 +1,28 @@
#!/usr/bin/env bash
#!/bin/bash
set -e
cd "/Volumes/OWC4M2_1/Dropbox/Josako's Dev/Josako/EveAI/Development/eveAI/" || exit 1
source "/Volumes/OWC4M2_1/Dropbox/Josako's Dev/Josako/EveAI/Development/eveAI/.venv/bin/activate"
# scripts/start_flower.sh
# on development machine, no authentication required
export FLOWER_UNAUTHENTICATED_API=True
# Start a worker for the 'embeddings' queue with higher concurrency
celery -A eveai_workers.celery flower
# Set default values
REDIS_HOST=${REDIS_URL:-redis}
REDIS_PORT=${REDIS_PORT:-6379}
# Set environment-specific variables
if [ "$FLASK_ENV" = "production" ]; then
# Production settings
export FLOWER_BASIC_AUTH="${FLOWER_USER}:${FLOWER_PASSWORD}"
export FLOWER_BROKER_URL="redis://${REDIS_USER}:${REDIS_PASS}@${REDIS_URL}:${REDIS_PORT}/0"
export CELERY_BROKER_URL="redis://${REDIS_USER}:${REDIS_PASS}@${REDIS_URL}:${REDIS_PORT}/0"
else
# Development settings
export FLOWER_BROKER_URL="redis://${REDIS_HOST}:${REDIS_PORT}/0"
export CELERY_BROKER_URL="redis://${REDIS_HOST}:${REDIS_PORT}/0"
fi
echo $BROKER_URL
echo "----------"
# Start Flower
exec celery flower \
--url-prefix=/flower \
--port=5555