-
Notifications
You must be signed in to change notification settings - Fork 742
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Debug studio #1831
Debug studio #1831
Changes from all commits
e9aa1f6
f816836
25cd38d
86154f7
4c4b19d
04a950c
1daae8a
0bde1b1
8ed7d1c
4754915
d816597
6fa7c56
ccc9f57
b7dc1b5
4b80787
480ba87
17601ea
649bc94
996af70
f289e01
3ce1499
61da822
449b724
21e72df
cb74c81
324cfbe
7d82409
e30a806
052c437
776e727
18da680
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -9,4 +9,5 @@ | |
- `bench_serving.py`: Benchmark online serving with dynamic requests. | ||
- `global_config.py`: The global configs and constants. | ||
- `launch_server.py`: The entry point for launching the local server. | ||
- `launch_debug_server.py`: The entry point for launching the debug server + web app | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This is an experimental feature for frontend language only, so please move it under |
||
- `utils.py`: Common utilities. |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,3 +1,5 @@ | ||
import uuid | ||
from datetime import datetime | ||
from typing import List, Optional, Union | ||
|
||
import numpy as np | ||
|
@@ -42,6 +44,20 @@ def generate( | |
else: | ||
system = "" | ||
|
||
debug_request_id = str(uuid.uuid4()) | ||
s.log_debug( | ||
[ | ||
{ | ||
"id": debug_request_id, | ||
"requestPrompt": str( | ||
[{"role": "system", "content": system}] + messages | ||
), | ||
"requestTimestamp": datetime.now().isoformat(), | ||
"requestMetadata": sampling_params.to_anthropic_kwargs(), | ||
} | ||
] | ||
) | ||
|
||
ret = self.client.messages.create( | ||
model=self.model_name, | ||
system=system, | ||
|
@@ -50,6 +66,17 @@ def generate( | |
) | ||
comp = ret.content[0].text | ||
|
||
s.log_debug( | ||
[ | ||
{ | ||
"id": debug_request_id, | ||
"responseContent": comp, | ||
"responseTimestamp": datetime.now().isoformat(), | ||
"responseMetadata": ret.to_json(), | ||
} | ||
] | ||
) | ||
|
||
return comp, {} | ||
|
||
def generate_stream( | ||
|
@@ -67,6 +94,20 @@ def generate_stream( | |
else: | ||
system = "" | ||
|
||
debug_request_id = str(uuid.uuid4()) | ||
debug_obj = s.log_debug( | ||
[ | ||
{ | ||
"id": debug_request_id, | ||
"requestPrompt": str( | ||
[{"role": "system", "content": system}] + messages | ||
), | ||
"requestTimestamp": datetime.now().isoformat(), | ||
"requestMetadata": sampling_params.to_anthropic_kwargs(), | ||
} | ||
] | ||
) | ||
Comment on lines
+97
to
+109
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This is not efficient enough. When debug is turned off, you still run the code to construct the argument, which takes some time. Please minimize the overhead and do not construct any objects when debug is turned off. |
||
|
||
with self.client.messages.stream( | ||
model=self.model_name, | ||
system=system, | ||
|
@@ -75,3 +116,15 @@ def generate_stream( | |
) as stream: | ||
for text in stream.text_stream: | ||
yield text, {} | ||
final_message = stream.get_final_message() | ||
final_message_json = final_message.to_json() | ||
s.log_debug( | ||
[ | ||
{ | ||
"id": debug_request_id, | ||
"responseContent": final_message.content[0].text, | ||
"responseTimestamp": datetime.now().isoformat(), | ||
"responseMetadata": final_message_json, | ||
} | ||
] | ||
) |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.