Improving chat functionality significantly throughout the application.
This commit is contained in:
@@ -1,11 +1,13 @@
|
||||
from langchain_core.retrievers import BaseRetriever
|
||||
from sqlalchemy import func, and_, or_
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
from pydantic import BaseModel, Field
|
||||
from typing import Any, Dict
|
||||
from flask import current_app
|
||||
from datetime import date
|
||||
|
||||
from common.extensions import db
|
||||
from flask import current_app
|
||||
from config.logging_config import LOGGING
|
||||
from common.models.document import Document, DocumentVersion, Embedding
|
||||
|
||||
|
||||
class EveAIRetriever(BaseRetriever):
|
||||
@@ -23,26 +25,53 @@ class EveAIRetriever(BaseRetriever):
|
||||
db_class = self.model_variables['embedding_db_model']
|
||||
similarity_threshold = self.model_variables['similarity_threshold']
|
||||
k = self.model_variables['k']
|
||||
|
||||
try:
|
||||
res = (
|
||||
current_date = date.today()
|
||||
# Subquery to find the latest version of each document
|
||||
subquery = (
|
||||
db.session.query(
|
||||
DocumentVersion.doc_id,
|
||||
func.max(DocumentVersion.id).label('latest_version_id')
|
||||
)
|
||||
.group_by(DocumentVersion.doc_id)
|
||||
.subquery()
|
||||
)
|
||||
# Main query to filter embeddings
|
||||
query_obj = (
|
||||
db.session.query(db_class,
|
||||
db_class.embedding.cosine_distance(query_embedding)
|
||||
.label('distance'))
|
||||
.filter(db_class.embedding.cosine_distance(query_embedding) < similarity_threshold)
|
||||
db_class.embedding.cosine_distance(query_embedding).label('distance'))
|
||||
.join(DocumentVersion, db_class.doc_vers_id == DocumentVersion.id)
|
||||
.join(Document, DocumentVersion.doc_id == Document.id)
|
||||
.join(subquery, DocumentVersion.id == subquery.c.latest_version_id)
|
||||
.filter(
|
||||
or_(Document.valid_from.is_(None), Document.valid_from <= current_date),
|
||||
or_(Document.valid_to.is_(None), Document.valid_to >= current_date),
|
||||
db_class.embedding.cosine_distance(query_embedding) < similarity_threshold
|
||||
)
|
||||
.order_by('distance')
|
||||
.limit(k)
|
||||
.all()
|
||||
)
|
||||
current_app.rag_tuning_logger.debug(f'Retrieved {len(res)} relevant documents')
|
||||
current_app.rag_tuning_logger.debug(f'---------------------------------------')
|
||||
|
||||
# Print the generated SQL statement for debugging
|
||||
current_app.logger.debug("SQL Statement:\n")
|
||||
current_app.logger.debug(query_obj.statement.compile(compile_kwargs={"literal_binds": True}))
|
||||
|
||||
res = query_obj.all()
|
||||
|
||||
# current_app.rag_tuning_logger.debug(f'Retrieved {len(res)} relevant documents')
|
||||
# current_app.rag_tuning_logger.debug(f'---------------------------------------')
|
||||
result = []
|
||||
for doc in res:
|
||||
current_app.rag_tuning_logger.debug(f'Document ID: {doc[0].id} - Distance: {doc[1]}\n')
|
||||
current_app.rag_tuning_logger.debug(f'Chunk: \n {doc[0].chunk}\n\n')
|
||||
# current_app.rag_tuning_logger.debug(f'Document ID: {doc[0].id} - Distance: {doc[1]}\n')
|
||||
# current_app.rag_tuning_logger.debug(f'Chunk: \n {doc[0].chunk}\n\n')
|
||||
result.append(f'SOURCE: {doc[0].id}\n\n{doc[0].chunk}\n\n')
|
||||
|
||||
except SQLAlchemyError as e:
|
||||
current_app.logger.error(f'Error retrieving relevant documents: {e}')
|
||||
db.session.rollback()
|
||||
return []
|
||||
return res
|
||||
return result
|
||||
|
||||
def _get_query_embedding(self, query: str):
|
||||
embedding_model = self.model_variables['embedding_model']
|
||||
|
||||
@@ -6,6 +6,7 @@ from .document import Embedding
|
||||
class ChatSession(db.Model):
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
user_id = db.Column(db.Integer, db.ForeignKey(User.id), nullable=True)
|
||||
session_id = db.Column(db.String(36), nullable=True)
|
||||
session_start = db.Column(db.DateTime, nullable=False)
|
||||
session_end = db.Column(db.DateTime, nullable=True)
|
||||
|
||||
@@ -21,6 +22,7 @@ class Interaction(db.Model):
|
||||
chat_session_id = db.Column(db.Integer, db.ForeignKey(ChatSession.id), nullable=False)
|
||||
question = db.Column(db.Text, nullable=False)
|
||||
answer = db.Column(db.Text, nullable=True)
|
||||
algorithm_used = db.Column(db.String(20), nullable=True)
|
||||
language = db.Column(db.String(2), nullable=False)
|
||||
appreciation = db.Column(db.Integer, nullable=True, default=100)
|
||||
|
||||
@@ -28,6 +30,9 @@ class Interaction(db.Model):
|
||||
question_at = db.Column(db.DateTime, nullable=False)
|
||||
answer_at = db.Column(db.DateTime, nullable=True)
|
||||
|
||||
# Relations
|
||||
embeddings = db.relationship('InteractionEmbedding', backref='interaction', lazy=True)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<Interaction {self.id}>"
|
||||
|
||||
|
||||
@@ -19,6 +19,7 @@ class Tenant(db.Model):
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
name = db.Column(db.String(80), unique=True, nullable=False)
|
||||
website = db.Column(db.String(255), nullable=True)
|
||||
timezone = db.Column(db.String(50), nullable=True, default='UTC')
|
||||
|
||||
# language information
|
||||
default_language = db.Column(db.String(2), nullable=True)
|
||||
@@ -70,7 +71,9 @@ class Tenant(db.Model):
|
||||
'llm_model': self.llm_model,
|
||||
'license_start_date': self.license_start_date,
|
||||
'license_end_date': self.license_end_date,
|
||||
'allowed_monthly_interactions': self.allowed_monthly_interactions
|
||||
'allowed_monthly_interactions': self.allowed_monthly_interactions,
|
||||
'embed_tuning': self.embed_tuning,
|
||||
'rag_tuning': self.rag_tuning,
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -1,12 +1,32 @@
|
||||
import langcodes
|
||||
from flask import current_app
|
||||
from langchain.embeddings import OpenAIEmbeddings
|
||||
from langchain.chat_models import ChatOpenAI
|
||||
from langchain_community.embeddings import OpenAIEmbeddings
|
||||
from langchain_openai import ChatOpenAI
|
||||
from langchain_core.pydantic_v1 import BaseModel, Field
|
||||
from langchain.prompts import ChatPromptTemplate
|
||||
import ast
|
||||
from typing import List
|
||||
|
||||
from common.models.document import EmbeddingSmallOpenAI
|
||||
|
||||
|
||||
class CitedAnswer(BaseModel):
|
||||
"""Default docstring - to be replaced with actual prompt"""
|
||||
|
||||
answer: str = Field(
|
||||
...,
|
||||
description="The answer to the user question, based on the given sources",
|
||||
)
|
||||
citations: List[int] = Field(
|
||||
...,
|
||||
description="The integer IDs of the SPECIFIC sources that were used to generate the answer"
|
||||
)
|
||||
|
||||
|
||||
def set_language_prompt_template(cls, language_prompt):
|
||||
cls.__doc__ = language_prompt
|
||||
|
||||
|
||||
def select_model_variables(tenant):
|
||||
embedding_provider = tenant.embedding_model.rsplit('.', 1)[0]
|
||||
embedding_model = tenant.embedding_model.rsplit('.', 1)[1]
|
||||
@@ -60,7 +80,7 @@ def select_model_variables(tenant):
|
||||
case 'text-embedding-3-small':
|
||||
api_key = current_app.config.get('OPENAI_API_KEY')
|
||||
model_variables['embedding_model'] = OpenAIEmbeddings(api_key=api_key,
|
||||
model='text-embedding-3-small')
|
||||
model='text-embedding-3-small')
|
||||
model_variables['embedding_db_model'] = EmbeddingSmallOpenAI
|
||||
model_variables['min_chunk_size'] = current_app.config.get('OAI_TE3S_MIN_CHUNK_SIZE')
|
||||
model_variables['max_chunk_size'] = current_app.config.get('OAI_TE3S_MAX_CHUNK_SIZE')
|
||||
@@ -78,20 +98,34 @@ def select_model_variables(tenant):
|
||||
model_variables['llm'] = ChatOpenAI(api_key=api_key,
|
||||
model=llm_model,
|
||||
temperature=model_variables['RAG_temperature'])
|
||||
tool_calling_supported = False
|
||||
match llm_model:
|
||||
case 'gpt-4-turbo' | 'gpt-4o':
|
||||
summary_template = current_app.config.get('GPT4_SUMMARY_TEMPLATE')
|
||||
rag_template = current_app.config.get('GPT4_RAG_TEMPLATE')
|
||||
tool_calling_supported = True
|
||||
case 'gpt-3-5-turbo':
|
||||
summary_template = current_app.config.get('GPT3_5_SUMMARY_TEMPLATE')
|
||||
rag_template = current_app.config.get('GPT3_5_RAG_TEMPLATE')
|
||||
case _:
|
||||
raise Exception(f'Error setting model variables for tenant {tenant.id} '
|
||||
f'error: Invalid chat model')
|
||||
model_variables['summary_prompt'] = ChatPromptTemplate.from_template(summary_template)
|
||||
model_variables['rag_prompt'] = ChatPromptTemplate.from_template(rag_template)
|
||||
model_variables['summary_template'] = summary_template
|
||||
model_variables['rag_template'] = rag_template
|
||||
if tool_calling_supported:
|
||||
model_variables['cited_answer_cls'] = CitedAnswer
|
||||
case _:
|
||||
raise Exception(f'Error setting model variables for tenant {tenant.id} '
|
||||
f'error: Invalid chat provider')
|
||||
|
||||
return model_variables
|
||||
|
||||
|
||||
def create_language_template(template, language):
|
||||
try:
|
||||
full_language = langcodes.Language.make(language=language)
|
||||
language_template = template.replace('{language}', full_language.display_name())
|
||||
except ValueError:
|
||||
language_template = template.replace('{language}', language)
|
||||
|
||||
return language_template
|
||||
|
||||
Reference in New Issue
Block a user