-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathuw_llm.py
More file actions
23 lines (20 loc) · 957 Bytes
/
uw_llm.py
File metadata and controls
23 lines (20 loc) · 957 Bytes
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
import requests
from backend.constants import ServiceConfig
BASE_URL = ServiceConfig.NEBULA_BASE_URL # This is the stable endpoint
def generate(prompt: str, reasoning: bool = False) -> str:
response = requests.post(f"{BASE_URL}/generate", data={"prompt": prompt, "reasoning": reasoning})
response.raise_for_status()
data = response.json()
if "result" not in data:
raise ValueError("LLM API response missing 'result' field")
return data["result"]
def generate_vision(prompt: str, image_path: str, fast: bool = False) -> str:
with open(image_path, "rb") as img:
files = {"file": img}
data = {"prompt": prompt, "fast": str(fast).lower()}
response = requests.post(f"{BASE_URL}/generate_vision", data=data, files=files)
response.raise_for_status()
data = response.json()
if "result" not in data:
raise ValueError("LLM API response missing 'result' field")
return data["result"]