-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathapp.py
More file actions
154 lines (126 loc) · 5.43 KB
/
app.py
File metadata and controls
154 lines (126 loc) · 5.43 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
from flask import Flask, request, jsonify, render_template
import os
import re
import json
import requests # Import the requests module for HTTP requests
app = Flask(__name__)
USER_DIR = "users"
os.makedirs(USER_DIR, exist_ok=True)
LMSTUDIO_URL = "http://localhost:54949"
def get_user_name_from_message(message):
"""Extract user name from message in %name,NAME% format.
This function looks for the pattern %name,NAME% in the message and extracts
the NAME part. If found, it also ensures a user file exists for that username.
Args:
message (str): The chat message to search for a username
Returns:
str or None: The extracted username if found, otherwise None
"""
# Extract name from message using regex
pattern = r"%name,(.*?)%"
match = re.search(pattern, message)
if match:
username = match.group(1).strip()
# Create user file if it doesn't exist
if not os.path.exists(os.path.join(USER_DIR, f"{username}.txt")):
save_user_info(username, {})
return username
return None
def load_user_info(username):
user_file = os.path.join(USER_DIR, f"{username}.txt")
if os.path.exists(user_file):
with open(user_file, "r") as f:
try:
return json.loads(f.read().strip())
except json.JSONDecodeError:
return {"info": f.read().strip()}
return {}
def save_user_info(username, info_dict):
user_file = os.path.join(USER_DIR, f"{username}.txt")
with open(user_file, "w") as f:
f.write(json.dumps(info_dict, indent=2))
def get_llm_response(prompt):
"""Get a response from the LLM model."""
try:
payload = {
"model": "default",
"messages": [{"role": "user", "content": prompt}],
"stream": False
}
response = requests.post(f"{LMSTUDIO_URL}/v1/chat/completions", json=payload)
if response.status_code == 200:
return response.json()["choices"][0]["message"]["content"]
else:
print(f"Error: {response.status_code}, {response.text}")
return "Sorry, I'm having trouble generating a response at the moment."
except Exception as e:
print(f"Exception occurred: {str(e)}")
# Mock response for demonstration purposes
return f"Mock response to: {prompt[:50]}..."
@app.route("/")
def index():
return render_template("index.html")
@app.route("/chat", methods=["POST"])
def chat():
try:
data = request.json
message = data.get("message")
if not message or message.strip() == "":
return jsonify({"status": "error", "message": "No message provided"})
# Extract username from message
username = get_user_name_from_message(message)
if not username:
username = "unknown"
# Load user info
user_info = load_user_info(username)
# Prepare prompt for LLM
prompt = (f"User: {username}\n"
f"Message: {message}\n\n"
f"User information:\n{json.dumps(user_info, indent=2)}\n\n"
"Please generate a response.")
# Get response from LLM
llm_response = get_llm_response(prompt)
# Return the response to the client
return jsonify({"status": "success", "response": llm_response})
except Exception as e:
print(f"Error in chat: {str(e)}")
return jsonify({"status": "error", "message": str(e)})
@app.route("/end_chat", methods=["POST"])
def end_chat():
try:
data = request.json
message_history = data.get("messageHistory", "")
username = data.get("username", "unknown")
if not message_history or message_history.strip() == "":
return jsonify({"status": "error", "message": "No message history provided"})
# Extract important user information from chat history
prompt = f"Please extract and summarize the important information about the user from this conversation. Return the result in JSON format with appropriate keys:\n\n{message_history}"
# Get summary from LLM
llm_summary = get_llm_response(prompt)
try:
# Try to parse as JSON
summary_dict = json.loads(llm_summary)
except (json.JSONDecodeError, TypeError):
# If not valid JSON, create a simple dict with the raw response
summary_dict = {"info": llm_summary}
if summary_dict:
# Load existing info if it exists
existing_info = load_user_info(username)
# Update with new information
for key, value in summary_dict.items():
if key not in existing_info or (isinstance(existing_info[key], list) and isinstance(value, list)):
# If the key doesn't exist yet or both values are lists (append to list)
if isinstance(existing_info.get(key), list) and isinstance(value, list):
existing_info[key] = existing_info.get(key, []) + value
else:
existing_info[key] = value
# Save updated user info
save_user_info(username, existing_info)
return jsonify({"status": "success"})
except Exception as e:
print(f"Error in end_chat: {str(e)}")
return jsonify({"status": "error", "message": str(e)})
if __name__ == "__main__":
# Use a standard port for consistency
PORT = 56627
app.run(host="0.0.0.0", port=PORT)