Portkey Integration

This commit is contained in:
Josako
2024-07-18 07:08:42 +02:00
parent 908a2eaf7e
commit 88ca04136d
6 changed files with 39 additions and 8 deletions

BIN
.DS_Store vendored

Binary file not shown.

View File

@@ -8,6 +8,7 @@ import ast
from typing import List from typing import List
from openai import OpenAI from openai import OpenAI
# from groq import Groq # from groq import Groq
from portkey_ai import createHeaders, PORTKEY_GATEWAY_URL
from common.models.document import EmbeddingSmallOpenAI, EmbeddingLargeOpenAI from common.models.document import EmbeddingSmallOpenAI, EmbeddingLargeOpenAI
@@ -91,16 +92,26 @@ def select_model_variables(tenant):
# Set Embedding variables # Set Embedding variables
match embedding_provider: match embedding_provider:
case 'openai': case 'openai':
portkey_metadata = {'tenant_id': str(tenant.id)}
portkey_headers = createHeaders(api_key=current_app.config.get('PORTKEY_API_KEY'),
provider='openai',
metadata=portkey_metadata)
match embedding_model: match embedding_model:
case 'text-embedding-3-small': case 'text-embedding-3-small':
api_key = current_app.config.get('OPENAI_API_KEY') api_key = current_app.config.get('OPENAI_API_KEY')
model_variables['embedding_model'] = OpenAIEmbeddings(api_key=api_key, model_variables['embedding_model'] = OpenAIEmbeddings(api_key=api_key,
model='text-embedding-3-small') model='text-embedding-3-small',
base_url=PORTKEY_GATEWAY_URL,
default_headers=portkey_headers
)
model_variables['embedding_db_model'] = EmbeddingSmallOpenAI model_variables['embedding_db_model'] = EmbeddingSmallOpenAI
case 'text-embedding-3-large': case 'text-embedding-3-large':
api_key = current_app.config.get('OPENAI_API_KEY') api_key = current_app.config.get('OPENAI_API_KEY')
model_variables['embedding_model'] = OpenAIEmbeddings(api_key=api_key, model_variables['embedding_model'] = OpenAIEmbeddings(api_key=api_key,
model='text-embedding-3-large') model='text-embedding-3-large',
base_url=PORTKEY_GATEWAY_URL,
default_headers=portkey_headers
)
model_variables['embedding_db_model'] = EmbeddingLargeOpenAI model_variables['embedding_db_model'] = EmbeddingLargeOpenAI
case _: case _:
raise Exception(f'Error setting model variables for tenant {tenant.id} ' raise Exception(f'Error setting model variables for tenant {tenant.id} '
@@ -112,13 +123,22 @@ def select_model_variables(tenant):
# Set Chat model variables # Set Chat model variables
match llm_provider: match llm_provider:
case 'openai': case 'openai':
portkey_metadata = {'tenant_id': str(tenant.id)}
portkey_headers = createHeaders(api_key=current_app.config.get('PORTKEY_API_KEY'),
metadata=portkey_metadata,
provider='openai')
tool_calling_supported = False
api_key = current_app.config.get('OPENAI_API_KEY') api_key = current_app.config.get('OPENAI_API_KEY')
model_variables['llm'] = ChatOpenAI(api_key=api_key, model_variables['llm'] = ChatOpenAI(api_key=api_key,
model=llm_model, model=llm_model,
temperature=model_variables['RAG_temperature']) temperature=model_variables['RAG_temperature'],
base_url=PORTKEY_GATEWAY_URL,
default_headers=portkey_headers)
model_variables['llm_no_rag'] = ChatOpenAI(api_key=api_key, model_variables['llm_no_rag'] = ChatOpenAI(api_key=api_key,
model=llm_model, model=llm_model,
temperature=model_variables['no_RAG_temperature']) temperature=model_variables['no_RAG_temperature'],
base_url=PORTKEY_GATEWAY_URL,
default_headers=portkey_headers)
tool_calling_supported = False tool_calling_supported = False
match llm_model: match llm_model:
case 'gpt-4-turbo' | 'gpt-4o': case 'gpt-4-turbo' | 'gpt-4o':
@@ -161,9 +181,16 @@ def select_model_variables(tenant):
# model_variables['transcription_client'] = Groq(api_key=api_key) # model_variables['transcription_client'] = Groq(api_key=api_key)
# model_variables['transcription_model'] = 'whisper-large-v3' # model_variables['transcription_model'] = 'whisper-large-v3'
# Using OpenAI # Using OpenAI for transcriptions
portkey_metadata = {'tenant_id': str(tenant.id)}
portkey_headers = createHeaders(api_key=current_app.config.get('PORTKEY_API_KEY'),
metadata=portkey_metadata,
provider='openai'
)
api_key = current_app.config.get('OPENAI_API_KEY') api_key = current_app.config.get('OPENAI_API_KEY')
model_variables['transcription_client'] = OpenAI(api_key=api_key) model_variables['transcription_client'] = OpenAI(api_key=api_key,
base_url=PORTKEY_GATEWAY_URL,
default_headers=portkey_headers)
model_variables['transcription_model'] = 'whisper-1' model_variables['transcription_model'] = 'whisper-1'
return model_variables return model_variables

View File

@@ -148,9 +148,12 @@ class DevConfig(Config):
# Groq API Keys # Groq API Keys
GROQ_API_KEY = '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' GROQ_API_KEY = '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'
# Antrhopic API Keys # Anthropic API Keys
ANTHROPIC_API_KEY = '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' ANTHROPIC_API_KEY = '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'
# Portkey API Keys
PORTKEY_API_KEY = 'T2Dt4QTpgCvWxa1OftYCJtj7NcDZ'
# Unstructured settings # Unstructured settings
UNSTRUCTURED_API_KEY = 'pDgCrXumYhM3CNvjvwV8msMldXC3uw' UNSTRUCTURED_API_KEY = 'pDgCrXumYhM3CNvjvwV8msMldXC3uw'
UNSTRUCTURED_BASE_URL = 'https://flowitbv-16c4us0m.api.unstructuredapp.io' UNSTRUCTURED_BASE_URL = 'https://flowitbv-16c4us0m.api.unstructuredapp.io'

BIN
docker/.DS_Store vendored

Binary file not shown.

BIN
docker/db/.DS_Store vendored

Binary file not shown.

View File

@@ -75,3 +75,4 @@ PyPDF2~=3.0.1
groq~=0.9.0 groq~=0.9.0
pydub~=0.25.1 pydub~=0.25.1
argparse~=1.4.0 argparse~=1.4.0
portkey_ai~=1.7.0