Started to work on interaction views. However, need a quick check in because of a python upgrade systemwide that breaks code.

This commit is contained in:
Josako
2024-06-21 09:52:06 +02:00
parent c5370c8026
commit cc9f6c95aa
19 changed files with 553 additions and 112 deletions

3
.idea/eveAI.iml generated
View File

@@ -6,8 +6,9 @@
<component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$">
<excludeFolder url="file://$MODULE_DIR$/.venv" />
<excludeFolder url="file://$MODULE_DIR$/.venv2" />
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="jdk" jdkName="Python 3.12 (eveAI)" jdkType="Python SDK" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
<component name="TemplatesService">

View File

@@ -9,6 +9,7 @@ class ChatSession(db.Model):
session_id = db.Column(db.String(36), nullable=True)
session_start = db.Column(db.DateTime, nullable=False)
session_end = db.Column(db.DateTime, nullable=True)
timezone = db.Column(db.String(30), nullable=True)
# Relations
interactions = db.relationship('Interaction', backref='chat_session', lazy=True)
@@ -25,6 +26,7 @@ class Interaction(db.Model):
answer = db.Column(db.Text, nullable=True)
algorithm_used = db.Column(db.String(20), nullable=True)
language = db.Column(db.String(2), nullable=False)
timezone = db.Column(db.String(30), nullable=True)
appreciation = db.Column(db.Integer, nullable=True)
# Timing information

View File

@@ -21,6 +21,10 @@ class CitedAnswer(BaseModel):
...,
description="The integer IDs of the SPECIFIC sources that were used to generate the answer"
)
insufficient_info: bool = Field(
False, # Default value is set to False
description="A boolean indicating wether given sources were sufficient or not to generate the answer"
)
def set_language_prompt_template(cls, language_prompt):
@@ -112,17 +116,20 @@ def select_model_variables(tenant):
summary_template = current_app.config.get('GPT4_SUMMARY_TEMPLATE')
rag_template = current_app.config.get('GPT4_RAG_TEMPLATE')
history_template = current_app.config.get('GPT4_HISTORY_TEMPLATE')
encyclopedia_template = current_app.config.get('GPT4_ENCYCLOPEDIA_TEMPLATE')
tool_calling_supported = True
case 'gpt-3-5-turbo':
summary_template = current_app.config.get('GPT3_5_SUMMARY_TEMPLATE')
rag_template = current_app.config.get('GPT3_5_RAG_TEMPLATE')
history_template = current_app.config.get('GPT3_5_HISTORY_TEMPLATE')
encyclopedia_template = current_app.config.get('GPT3_5_ENCYCLOPEDIA_TEMPLATE')
case _:
raise Exception(f'Error setting model variables for tenant {tenant.id} '
f'error: Invalid chat model')
model_variables['summary_template'] = summary_template
model_variables['rag_template'] = rag_template
model_variables['history_template'] = history_template
model_variables['encyclopedia_template'] = encyclopedia_template
if tool_calling_supported:
model_variables['cited_answer_cls'] = CitedAnswer
case _:

View File

@@ -0,0 +1,37 @@
# common/utils/filters.py
import pytz
from datetime import datetime
def to_local_time(utc_dt, timezone_str):
"""
Converts a UTC datetime to a local datetime based on the provided timezone string.
"""
if not utc_dt:
return "N/A"
local_tz = pytz.timezone(timezone_str)
local_dt = utc_dt.replace(tzinfo=pytz.utc).astimezone(local_tz)
return local_dt.strftime('%Y-%m-%d %H:%M:%S %Z%z')
def time_difference(start_dt, end_dt):
"""
Returns the time difference between two datetimes as a string.
If end_dt is None, returns "Ongoing".
"""
if not start_dt:
return "N/A"
if end_dt:
delta = end_dt - start_dt
# Customize the formatting as needed
return str(delta)
return "Ongoing"
def register_filters(app):
"""
Registers custom filters with the Flask app.
"""
app.jinja_env.filters['to_local_time'] = to_local_time
app.jinja_env.filters['time_difference'] = time_difference

View File

@@ -100,19 +100,28 @@ class Config(object):
{tenant_context}
The context is a conversation history, with the HUMAN asking questions, the AI answering questions.
The history is delimited between triple backquotes.
Your answer by stating the question in {language}.
You answer by stating the question in {language}.
History:
```{history}```
Question to be detailed:
{question}"""
# Fallback Algorithms
FALLBACK_ALGORITHMS = [
"RAG_TENANT",
"RAG_WIKIPEDIA",
"RAG_GOOGLE",
"LLM"
]
GPT3_5_HISTORY_TEMPLATE = """You are a helpful assistant that details a question based on a previous context,
in such a way that the question is understandable without the previous context.
{tenant_context}
The context is a conversation history, with the HUMAN asking questions, the AI answering questions.
The history is delimited between triple backquotes.
You answer by stating the question in {language}.
History:
```{history}```
Question to be detailed:
{question}"""
GPT4_ENCYCLOPEDIA_TEMPLATE = """You have a lot of background knowledge, and as such you are some kind of
'encyclopedia' to explain general terminology. Only answer if you have a clear understanding of the question.
If not, say you do not have sufficient information to answer the question. Use the {language} in your communication.
Question:
{question}"""
# SocketIO settings
# SOCKETIO_ASYNC_MODE = 'threading'
@@ -125,6 +134,14 @@ class Config(object):
PERMANENT_SESSION_LIFETIME = timedelta(minutes=60)
SESSION_REFRESH_EACH_REQUEST = True
# Fallback Algorithms
FALLBACK_ALGORITHMS = [
"RAG_TENANT",
"RAG_WIKIPEDIA",
"RAG_GOOGLE",
"LLM"
]
# Interaction algorithms
INTERACTION_ALGORITHMS = {
"RAG_TENANT": {"name": "RAG_TENANT", "description": "Algorithm using only information provided by the tenant"},

View File

@@ -13,8 +13,7 @@ from config.logging_config import LOGGING
from common.utils.security import set_tenant_session_data
from .errors import register_error_handlers
from common.utils.celery_utils import make_celery, init_celery
from common.utils.debug_utils import log_request_middleware
from common.utils.nginx_utils import prefixed_url_for
from common.utils.template_filters import register_filters
def create_app(config_file=None):
@@ -85,6 +84,9 @@ def create_app(config_file=None):
# Register API
register_api(app)
# Register template filters
register_filters(app)
app.logger.info("EveAI App Server Started Successfully")
app.logger.info("-------------------------------------------------------------------------------------------------")
return app
@@ -112,6 +114,8 @@ def register_blueprints(app):
app.register_blueprint(document_bp)
from .views.security_views import security_bp
app.register_blueprint(security_bp)
from .views.interaction_views import interaction_bp
app.register_blueprint(interaction_bp)
def register_api(app):

View File

@@ -0,0 +1,23 @@
{% extends 'base.html' %}
{% from 'macros.html' import render_selectable_table, render_pagination %}
{% block title %}Chat Sessions{% endblock %}
{% block content_title %}Chat Sessions{% endblock %}
{% block content_description %}View Chat Sessions for Tenant{% endblock %}
{% block content_class %}<div class="col-xl-12 col-lg-5 col-md-7 mx-auto"></div>{% endblock %}
{% block content %}
<div class="container">
<form method="POST" action="{{ url_for('interaction_bp.handle_chat_session_selection') }}">
{{ render_selectable_table(headers=["ID", "Session ID", "Session Start", "Session End"], rows=rows, selectable=True, id="documentsTable") }}
<div class="form-group mt-3">
<button type="submit" name="action" value="view_chat_session" class="btn btn-primary">View Chat Session</button>
</div>
</form>
</div>
{% endblock %}
{% block content_footer %}
{{ render_pagination(pagination, 'interaction_bp.chat_sessions') }}
{% endblock %}

View File

@@ -0,0 +1,127 @@
{% extends "base.html" %}
{% block content %}
<div class="container mt-5">
<h2>Chat Session Details</h2>
<!-- Session Information -->
<div class="card mb-4">
<div class="card-header">
<h5>Session Information</h5>
<!-- Timezone Toggle Buttons -->
<div class="btn-group" role="group">
<button type="button" class="btn btn-primary" id="toggle-interaction-timezone">Interaction Timezone</button>
<button type="button" class="btn btn-secondary" id="toggle-admin-timezone">Admin Timezone</button>
</div>
</div>
<div class="card-body">
<dl class="row">
<dt class="col-sm-3">Session ID:</dt>
<dd class="col-sm-9">{{ chat_session.session_id }}</dd>
<dt class="col-sm-3">Session Start:</dt>
<dd class="col-sm-9">
<span class="timezone interaction-timezone">{{ chat_session.session_start | to_local_time(chat_session.timezone) }}</span>
<span class="timezone admin-timezone d-none">{{ chat_session.session_start | to_local_time(session['admin_user_timezone']) }}</span>
</dd>
<dt class="col-sm-3">Session End:</dt>
<dd class="col-sm-9">
{% if chat_session.session_end %}
<span class="timezone interaction-timezone">{{ chat_session.session_end | to_local_time(chat_session.timezone) }}</span>
<span class="timezone admin-timezone d-none">{{ chat_session.session_end | to_local_time(session['admin_user_timezone']) }}</span>
{% else %}
Ongoing
{% endif %}
</dd>
</dl>
</div>
</div>
<!-- Interactions List -->
<div class="card mb-4">
<div class="card-header">
<h5>Interactions</h5>
</div>
<div class="card-body">
{% for interaction in interactions %}
<div class="interaction mb-3">
<div class="card">
<div class="card-header d-flex justify-content-between">
<span>Question:</span>
<span class="text-muted">
<span class="timezone interaction-timezone">{{ interaction.question_at | to_local_time(interaction.timezone) }}</span>
<span class="timezone admin-timezone d-none">{{ interaction.question_at | to_local_time(session['admin_user_timezone']) }}</span>
-
<span class="timezone interaction-timezone">{{ interaction.answer_at | to_local_time(interaction.timezone) }}</span>
<span class="timezone admin-timezone d-none">{{ interaction.answer_at | to_local_time(session['admin_user_timezone']) }}</span>
({{ interaction.question_at | time_difference(interaction.answer_at) }})
</span>
</div>
<div class="card-body">
<p><strong>Question:</strong> {{ interaction.question }}</p>
<p><strong>Answer:</strong> {{ interaction.answer }}</p>
<p>
<strong>Algorithm Used:</strong>
<i class="material-icons {{ 'fingerprint-rag-' ~ interaction.algorithm_used.lower() }}">
fingerprint
</i> {{ interaction.algorithm_used }}
</p>
<p>
<strong>Appreciation:</strong>
<i class="material-icons thumb-icon {{ 'thumb_up' if interaction.appreciation == 1 else 'thumb_down' }}">
{{ 'thumb_up' if interaction.appreciation == 1 else 'thumb_down' }}
</i>
</p>
<p><strong>Embeddings:</strong>
{% if interaction.embeddings %}
{% for embedding in interaction.embeddings %}
<a href="{{ url_for('interaction_bp.view_embedding', embedding_id=embedding.embedding_id) }}" class="badge badge-info">
{{ embedding.embedding_id }}
</a>
{% endfor %}
{% else %}
None
{% endif %}
</p>
</div>
</div>
</div>
{% endfor %}
</div>
</div>
</div>
{% endblock %}
{% block scripts %}
<script>
document.addEventListener('DOMContentLoaded', function() {
// Elements to toggle
const interactionTimes = document.querySelectorAll('.interaction-timezone');
const adminTimes = document.querySelectorAll('.admin-timezone');
// Buttons
const interactionButton = document.getElementById('toggle-interaction-timezone');
const adminButton = document.getElementById('toggle-admin-timezone');
// Toggle to Interaction Timezone
interactionButton.addEventListener('click', function() {
interactionTimes.forEach(el => el.classList.remove('d-none'));
adminTimes.forEach(el => el.classList.add('d-none'));
interactionButton.classList.add('btn-primary');
interactionButton.classList.remove('btn-secondary');
adminButton.classList.add('btn-secondary');
adminButton.classList.remove('btn-primary');
});
// Toggle to Admin Timezone
adminButton.addEventListener('click', function() {
interactionTimes.forEach(el => el.classList.add('d-none'));
adminTimes.forEach(el => el.classList.remove('d-none'));
interactionButton.classList.add('btn-secondary');
interactionButton.classList.remove('btn-primary');
adminButton.classList.add('btn-primary');
adminButton.classList.remove('btn-secondary');
});
});
</script>
{% endblock %}

View File

@@ -87,6 +87,11 @@
{'name': 'Library Operations', 'url': '/document/library_operations', 'roles': ['Super User', 'Tenant Admin']},
]) }}
{% endif %}
{% if current_user.is_authenticated %}
{{ dropdown('Interactions', 'contacts', [
{'name': 'Chat Sessions', 'url': '/interaction/chat_sessions', 'roles': ['Super User', 'Tenant Admin']},
]) }}
{% endif %}
{% if current_user.is_authenticated %}
{{ dropdown(current_user.user_name, 'contacts', [
{'name': 'Session Defaults', 'url': '/session_defaults', 'roles': ['Super User', 'Tenant Admin']},

View File

@@ -170,4 +170,26 @@
});
}
</script>
<script>
// JavaScript to detect user's timezone
document.addEventListener('DOMContentLoaded', (event) => {
// Detect timezone
const userTimezone = Intl.DateTimeFormat().resolvedOptions().timeZone;
// Send timezone to the server via a POST request
fetch('/set_user_timezone', {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify({ timezone: userTimezone })
}).then(response => {
if (response.ok) {
console.log('Timezone sent to server successfully');
} else {
console.error('Failed to send timezone to server');
}
});
});
</script>
{% endblock %}

View File

@@ -1,4 +1,4 @@
from flask import request, redirect, url_for, flash, render_template, Blueprint, session, current_app
from flask import request, render_template, Blueprint, session, current_app, jsonify
from flask_security import roles_required, roles_accepted
from .basic_forms import SessionDefaultsForm
@@ -41,3 +41,16 @@ def session_defaults():
session['default_language'] = form.default_language.data
return render_template('basic/session_defaults.html', form=form)
@basic_bp.route('/set_user_timezone', methods=['POST'])
def set_user_timezone():
data = request.get_json()
timezone = data.get('timezone')
if timezone:
session['admin_user_timezone'] = timezone
return jsonify({'status': 'success', 'timezone': timezone}), 200
else:
return jsonify({'status': 'error', 'message': 'Timezone not provided'}), 400

View File

@@ -0,0 +1,100 @@
import ast
import os
from datetime import datetime as dt, timezone as tz
import chardet
from flask import request, redirect, flash, render_template, Blueprint, session, current_app
from flask_security import roles_accepted, current_user
from sqlalchemy import desc
from sqlalchemy.orm import joinedload
from werkzeug.datastructures import FileStorage
from werkzeug.utils import secure_filename
from sqlalchemy.exc import SQLAlchemyError
import requests
from requests.exceptions import SSLError
from urllib.parse import urlparse
import io
from common.models.interaction import ChatSession, Interaction
from common.extensions import db
from .document_forms import AddDocumentForm, AddURLForm, EditDocumentForm, EditDocumentVersionForm
from common.utils.middleware import mw_before_request
from common.utils.celery_utils import current_celery
from common.utils.nginx_utils import prefixed_url_for
from common.utils.view_assistants import form_validation_failed, prepare_table_for_macro
interaction_bp = Blueprint('interaction_bp', __name__, url_prefix='/interaction')
@interaction_bp.before_request
def log_before_request():
current_app.logger.debug(f"Before request (interaction_bp): {request.method} {request.url}")
@interaction_bp.after_request
def log_after_request(response):
current_app.logger.debug(
f"After request (interaction_bp): {request.method} {request.url} - Status: {response.status}")
return response
@interaction_bp.before_request
def before_request():
try:
mw_before_request()
except Exception as e:
current_app.logger.error(f'Error switching schema in Interaction Blueprint: {e}')
for role in current_user.roles:
current_app.logger.debug(f'User {current_user.email} has role {role.name}')
raise
@interaction_bp.route('/chat_sessions', methods=['GET', 'POST'])
def chat_sessions():
page = request.args.get('page', 1, type=int)
per_page = request.args.get('per_page', 10, type=int)
query = ChatSession.query.order_by(desc(ChatSession.session_start))
pagination = query.paginate(page=page, per_page=per_page, error_out=False)
docs = pagination.items
rows = prepare_table_for_macro(docs, [('id', ''), ('session_id', ''), ('session_start', ''), ('session_end', '')])
return render_template('interaction/chat_sessions.html', rows=rows, pagination=pagination)
@interaction_bp.route('/handle_chat_session_selection', methods=['POST'])
@roles_accepted('Super User', 'Tenant Admin')
def handle_chat_session_selection():
chat_session_identification = request.form['selected_row']
cs_id = ast.literal_eval(chat_session_identification).get('value')
action = request.form['action']
match action:
case 'view_chat_session':
return redirect(prefixed_url_for('interaction_bp.view_chat_session', chat_session_id=cs_id))
# Add more conditions for other actions
return redirect(prefixed_url_for('interaction_bp.chat_sessions'))
@interaction_bp.route('/view_chat_session/<chat_session_id>', methods=['GET'])
@roles_accepted('Super User', 'Tenant Admin')
def view_chat_session(chat_session_id):
chat_session = ChatSession.query.get_or_404(chat_session_id)
show_chat_session(chat_session)
@interaction_bp.route('/view_chat_session_by_session_id/<session_id>', methods=['GET'])
@roles_accepted('Super User', 'Tenant Admin')
def view_chat_session_by_session_id(session_id):
chat_session = ChatSession.query.filter_by(session_id=session_id).first_or_404()
show_chat_session(chat_session)
def show_chat_session(chat_session):
interactions = Interaction.query.filter_by(chat_session_id=chat_session.id).all()
return render_template('interaction/view_chat_session.html', chat_session=chat_session, interactions=interactions)

View File

@@ -60,7 +60,8 @@ class TenantForm(FlaskForm):
self.embedding_model.choices = [(model, model) for model in current_app.config['SUPPORTED_EMBEDDINGS']]
self.llm_model.choices = [(model, model) for model in current_app.config['SUPPORTED_LLMS']]
# Initialize fallback algorithms
self.fallback_algorithms.choices = [(algorithm, algorithm.lower()) for algorithm in current_app.config['FALLBACK_ALGORITHMS']]
self.fallback_algorithms.choices = \
[(algorithm, algorithm.lower()) for algorithm in current_app.config['FALLBACK_ALGORITHMS']]
class BaseUserForm(FlaskForm):

View File

@@ -67,6 +67,7 @@ def handle_message(data):
data['message'],
data['language'],
session['session_id'],
data['timezone']
])
current_app.logger.debug(f'SocketIO: Message offloading for tenant {current_tenant_id}, '
f'Question: {task.id}')

View File

@@ -38,7 +38,6 @@ def detail_question(question, language, model_variables, session_id):
chain = setup_and_retrieval | history_prompt | llm | output_parser
try:
answer = chain.invoke(question)
return answer
@@ -48,7 +47,7 @@ def detail_question(question, language, model_variables, session_id):
@current_celery.task(name='ask_question', queue='llm_interactions')
def ask_question(tenant_id, question, language, session_id):
def ask_question(tenant_id, question, language, session_id, user_timezone):
"""returns result structured as follows:
result = {
'answer': 'Your answer here',
@@ -75,15 +74,34 @@ def ask_question(tenant_id, question, language, session_id):
chat_session = ChatSession()
chat_session.session_id = session_id
chat_session.session_start = dt.now(tz.utc)
chat_session.timezone = user_timezone
db.session.add(chat_session)
db.session.commit()
except SQLAlchemyError as e:
current_app.logger.error(f'ask_question: Error initializing chat session in database: {e}')
raise
result, interaction = answer_using_tenant_rag(question, language, tenant, chat_session)
result['algorithm'] = current_app.config['INTERACTION_ALGORITHMS']['RAG_TENANT']['name']
result['interaction_id'] = interaction.id
if result['insufficient_info']:
if 'LLM' in tenant.fallback_algorithms:
result, interaction = answer_using_llm(question, language, tenant, chat_session)
result['algorithm'] = current_app.config['INTERACTION_ALGORITHMS']['LLM']['name']
result['interaction_id'] = interaction.id
return result
except Exception as e:
current_app.logger.error(f'ask_question: Error processing question: {e}')
raise
def answer_using_tenant_rag(question, language, tenant, chat_session):
new_interaction = Interaction()
new_interaction.question = question
new_interaction.language = language
new_interaction.timezone = chat_session.timezone
new_interaction.appreciation = None
new_interaction.chat_session_id = chat_session.id
new_interaction.question_at = dt.now(tz.utc)
@@ -96,7 +114,7 @@ def ask_question(tenant_id, question, language, session_id):
# Langchain debugging if required
# set_debug(True)
detailed_question = detail_question(question, language, model_variables, session_id)
detailed_question = detail_question(question, language, model_variables, chat_session.session_id)
current_app.logger.debug(f'Original question:\n {question}\n\nDetailed question: {detailed_question}')
new_interaction.detailed_question = detailed_question
new_interaction.detailed_question_at = dt.now(tz.utc)
@@ -120,7 +138,8 @@ def ask_question(tenant_id, question, language, session_id):
new_interaction.answer = answer
result = {
'answer': answer,
'citations': []
'citations': [],
'insufficient_info': False
}
else: # The model supports structured feedback
@@ -131,6 +150,7 @@ def ask_question(tenant_id, question, language, session_id):
result = chain.invoke(detailed_question).dict()
current_app.logger.debug(f'ask_question: result answer: {result['answer']}')
current_app.logger.debug(f'ask_question: result citations: {result["citations"]}')
current_app.logger.debug(f'ask_question: insufficient information: {result["insufficient_info"]}')
new_interaction.answer = result['answer']
# Filter out the existing Embedding IDs
@@ -150,6 +170,9 @@ def ask_question(tenant_id, question, language, session_id):
result['citations'] = urls
# Disable langchain debugging if set above.
# set_debug(False)
new_interaction.answer_at = dt.now(tz.utc)
chat_session.session_end = dt.now(tz.utc)
@@ -158,18 +181,69 @@ def ask_question(tenant_id, question, language, session_id):
db.session.add(new_interaction)
db.session.add_all(new_interaction_embeddings)
db.session.commit()
return result, new_interaction
except SQLAlchemyError as e:
current_app.logger.error(f'ask_question: Error saving interaction to database: {e}')
raise
def answer_using_llm(question, language, tenant, chat_session):
new_interaction = Interaction()
new_interaction.question = question
new_interaction.language = language
new_interaction.timezone = chat_session.timezone
new_interaction.appreciation = None
new_interaction.chat_session_id = chat_session.id
new_interaction.question_at = dt.now(tz.utc)
new_interaction.algorithm_used = current_app.config['INTERACTION_ALGORITHMS']['LLM']['name']
# Select variables to work with depending on tenant model
model_variables = select_model_variables(tenant)
tenant_info = tenant.to_dict()
# Langchain debugging if required
# set_debug(True)
detailed_question = detail_question(question, language, model_variables, chat_session.session_id)
current_app.logger.debug(f'Original question:\n {question}\n\nDetailed question: {detailed_question}')
new_interaction.detailed_question = detailed_question
new_interaction.detailed_question_at = dt.now(tz.utc)
retriever = EveAIRetriever(model_variables, tenant_info)
llm = model_variables['llm_no_rag']
template = model_variables['encyclopedia_template']
language_template = create_language_template(template, language)
rag_prompt = ChatPromptTemplate.from_template(language_template)
setup = RunnablePassthrough()
output_parser = StrOutputParser()
new_interaction_embeddings = []
chain = setup | rag_prompt | llm | output_parser
input_question = {"question": detailed_question}
# Invoke the chain with the actual question
answer = chain.invoke(input_question)
new_interaction.answer = answer
result = {
'answer': answer,
'citations': [],
'insufficient_info': False
}
# Disable langchain debugging if set above.
# set_debug(False)
result['algorithm'] = current_app.config['INTERACTION_ALGORITHMS']['RAG_TENANT']['name']
result['interaction_id'] = new_interaction.id
return result
except Exception as e:
current_app.logger.error(f'ask_question: Error processing question: {e}')
new_interaction.answer_at = dt.now(tz.utc)
chat_session.session_end = dt.now(tz.utc)
try:
db.session.add(chat_session)
db.session.add(new_interaction)
db.session.commit()
return result, new_interaction
except SQLAlchemyError as e:
current_app.logger.error(f'ask_question: Error saving interaction to database: {e}')
raise

View File

@@ -19,7 +19,7 @@
'39',
'EveAI-CHAT-6919-1265-9848-6655-9870',
'http://macstudio.ask-eve-ai-local.com',
'en'
'nl'
);
eveAI.initializeChat('chat-container');
});

View File

@@ -10,7 +10,7 @@
--algorithm-color-rag-tenant: #0f0; /* Green for RAG_TENANT */
--algorithm-color-rag-wikipedia: #00f; /* Blue for RAG_WIKIPEDIA */
--algorithm-color-rag-google: #ff0; /* Yellow for RAG_GOOGLE */
--algorithm-color-rag-llm: #800080; /* Purple for RAG_LLM */
--algorithm-color-llm: #800080; /* Purple for RAG_LLM */
/*--font-family: 'Arial, sans-serif'; !* Default font family *!*/
--font-family: 'ui-sans-serif, -apple-system, system-ui, Segoe UI, Roboto, Ubuntu, Cantarell, Noto Sans, sans-serif, Helvetica, Apple Color Emoji, Arial, Segoe UI Emoji, Segoe UI Symbol';
@@ -193,8 +193,8 @@
color: var(--algorithm-color-rag-google);
}
.fingerprint-rag-llm {
color: var(--algorithm-color-rag-llm);
.fingerprint-llm {
color: var(--algorithm-color-llm);
}
/* Styling for citation links */

View File

@@ -8,6 +8,7 @@ class EveAIChatWidget extends HTMLElement {
this.socket = null; // Initialize socket to null
this.attributesSet = false; // Flag to check if all attributes are set
this.jwtToken = null; // Initialize jwtToken to null
this.userTimezone = Intl.DateTimeFormat().resolvedOptions().timeZone; // Detect user's timezone
console.log('EveAIChatWidget constructor called');
}
@@ -229,8 +230,8 @@ class EveAIChatWidget extends HTMLElement {
case 'RAG_GOOGLE':
algorithmClass = 'fingerprint-rag-google';
break;
case 'RAG_LLM':
algorithmClass = 'fingerprint-rag-llm';
case 'LLM':
algorithmClass = 'fingerprint-llm';
break;
default:
algorithmClass = '';
@@ -299,7 +300,13 @@ toggleFeedback(thumbsUp, thumbsDown, feedback, interactionId) {
return;
}
console.log('Sending message to backend');
this.socket.emit('user_message', { tenantId: this.tenantId, token: this.jwtToken, message, language: this.language });
this.socket.emit('user_message', {
tenantId: this.tenantId,
token: this.jwtToken,
message,
language: this.language,
timezone: this.userTimezone
});
this.setStatusMessage('Processing started ...')
}