Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 12 additions & 0 deletions backend/database/auth.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
import os

from firebase_admin import auth

from database.redis_db import cache_user_name, get_cached_user_name
Expand All @@ -10,6 +12,16 @@ def get_user_from_uid(uid: str):
print(e)
user = None
if not user:
if os.getenv('LOCAL_DEVELOPMENT') == 'true':
return {
'uid': uid,
'email': 'email',
'email_verified': True,
'phone_number': '',
'display_name': 'Debug',
'photo_url': None,
'disabled': False,
}
return None

return {
Expand Down
26 changes: 26 additions & 0 deletions backend/models/chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,6 +136,32 @@ def get_sender_name(message: Message) -> str:

return '\n'.join(formatted_messages)

@staticmethod
def get_messages_as_dict(
messages: List['Message'], use_user_name_if_available: bool = False, use_plugin_name_if_available: bool = False
) -> List[dict]:
sorted_messages = sorted(messages, key=lambda m: m.created_at)

def get_sender_name(message: Message) -> str:
if message.sender == 'human':
return 'user'
# elif use_plugin_name_if_available and message.app_id is not None:
# plugin = next((p for p in plugins if p.id == message.app_id), None)
# if plugin:
# return plugin.name RESTORE ME
return message.sender # TODO: use app id

formatted_messages = [
{
'role': get_sender_name(message),
'content': message.text,
}
for message in sorted_messages
]

return formatted_messages



class ResponseMessage(Message):
ask_for_nps: Optional[bool] = False
Expand Down
65 changes: 60 additions & 5 deletions backend/models/conversation.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,11 +112,12 @@ class ActionItem(BaseModel):

@staticmethod
def actions_to_string(action_items: List['ActionItem']) -> str:
if not action_items:
return 'None'
return '\n'.join(
[f"- {item.description} ({'completed' if item.completed else 'pending'})" for item in action_items]
)
result = []
for item in action_items:
if isinstance(item, dict):
item = ActionItem(**item)
result.append(f"- {item.description} ({'completed' if item.completed else 'pending'})")
return '\n'.join(result)


class Event(BaseModel):
Expand Down Expand Up @@ -278,6 +279,60 @@ def __init__(self, **data):
# Update plugins_results based on apps_results
self.plugins_results = [PluginResult(plugin_id=app.app_id, content=app.content) for app in self.apps_results]
self.processing_memory_id = self.processing_conversation_id
#
# def model_dump_for_llm(self) -> str:
# d = self.model_dump(include={'category', 'title', 'overview'})
# return d

@staticmethod
def conversations_for_llm(
conversations: List['Conversation'],
use_transcript: bool = False,
include_timestamps: bool = False,
people: List[Person] = None,
) -> List[dict]:
result = []
people_map = {p.id: p for p in people} if people else {}
for i, conversation in enumerate(conversations):
if isinstance(conversation, dict):
conversation = Conversation(**conversation)
item = {
'index': i + 1,
'category': str(conversation.structured.category.value),
'title': str(conversation.structured.title),
'overview': str(conversation.structured.overview),
'created_at': conversation.created_at.astimezone(timezone.utc).strftime("%d %b %Y at %H:%M") + " UTC",
}

# attendees
if people_map:
conv_person_ids = set(conversation.get_person_ids())
if conv_person_ids:
attendees_names = [people_map[pid].name for pid in conv_person_ids if pid in people_map]
if attendees_names:
item['attendees'] = attendees_names

if conversation.structured.action_items:
item['actions'] = [item.description for item in conversation.structured.action_items]

if conversation.structured.events:
item['events'] = [{'title': event.title, 'start': event.start,
'duration_minutes': event.duration} for event in conversation.structured.events]

if conversation.apps_results and len(conversation.apps_results) > 0:
item['summarization'] = conversation.apps_results[0].content

if use_transcript:
item['transcript'] = conversation.get_transcript(include_timestamps=include_timestamps, people=people)
# photos
photo_descriptions = conversation.get_photos_descriptions(include_timestamps=include_timestamps)
if photo_descriptions != 'None':
item['photos'] = photo_descriptions
# "type": "text"
# "text": "Your message here"
result.append(item)

return result

@staticmethod
def conversations_to_string(
Expand Down
6 changes: 3 additions & 3 deletions backend/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -92,9 +92,9 @@ jsonschema==4.23.0
jsonschema-specifications==2023.12.1
julius==0.2.7
kiwisolver==1.4.5
langchain==0.3.4
langchain-community==0.3.3
langchain-core==0.3.12
langchain==1.0.0a14
langchain-community==0.3.31
langchain-core==1.0.0a8
langchain-groq==0.2.0
langchain-openai==0.2.3
langchain-pinecone==0.2.0
Expand Down
4 changes: 4 additions & 0 deletions backend/routers/chat.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import os
import uuid
import re
import base64
Expand Down Expand Up @@ -35,6 +36,9 @@
from utils.other.chat_file import FileChatTool
from utils.retrieval.graph import execute_graph_chat, execute_graph_chat_stream, execute_persona_chat_stream

if os.getenv('CHAT_AGENTIC') == 'true':
from utils.retrieval.agentic_graph import execute_graph_chat_stream

router = APIRouter()


Expand Down
8 changes: 7 additions & 1 deletion backend/utils/llm/persona.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,13 @@

from models.app import App
from models.chat import Message, MessageSender
from langchain.schema import SystemMessage, HumanMessage, AIMessage

try:
from langchain_core.messages import SystemMessage, AIMessage, HumanMessage
except ImportError:
# Fallback for langchain<1.0.0
from langchain.schema import SystemMessage, HumanMessage, AIMessage

from .clients import llm_persona_mini_stream, llm_persona_medium_stream, llm_medium, llm_mini, llm_medium_experiment


Expand Down
Loading