Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

added deepl(x) translataion support #42

Open
wants to merge 5 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion README.ja.md
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,7 @@ docker run --name raycast \
```sh
docker run --name raycast \
-e OPENAI_API_KEY=$OPENAI_API_KEY \
-e OPENAI_API_BASE=https://your-resource.openai.azure.com \
-e OPENAI_BASE_URL=https://your-resource.openai.azure.com \
-e OPENAI_API_VERSION=2023-05-15 \
-e OPENAI_API_TYPE=azure \
-e AZURE_DEPLOYMENT_ID=your-deployment-id \
Expand Down
13 changes: 12 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ Simply modify the corresponding environment variables.
```sh
docker run --name raycast \
-e OPENAI_API_KEY=$OPENAI_API_KEY \
-e OPENAI_API_BASE=https://your-resource.openai.azure.com \
-e OPENAI_BASE_URL=https://your-resource.openai.azure.com \
-e OPENAI_API_VERSION=2023-05-15 \
-e OPENAI_API_TYPE=azure \
-e AZURE_DEPLOYMENT_ID=your-deployment-id \
Expand Down Expand Up @@ -208,6 +208,17 @@ ALLOWED_USERS="[email protected],[email protected]"

The email addresses are the Raycast user email addresses, separated by commas.

#### 7. Use deepl(x) as Translation Service

If you want to use deepl(x) as the translation service and not the default OPENAI, you can set the `DEEPL_API_KEY` environment variable.

```env
TRANSLATION_MODEL=deeplx
DEEPLX_API_TOKEN=<your deeplx api key>
DEEPLX_BASE_URL=<your deeplx hosted url>
```


### Notes

1. DNS Designation
Expand Down
2 changes: 1 addition & 1 deletion README.zh.md
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ docker run --name raycast \
```sh
docker run --name raycast \
-e OPENAI_API_KEY=$OPENAI_API_KEY \
-e OPENAI_API_BASE=https://your-resource.openai.azure.com \
-e OPENAI_BASE_URL=https://your-resource.openai.azure.com \
-e OPENAI_API_VERSION=2023-05-15 \
-e OPENAI_API_TYPE=azure \
-e AZURE_DEPLOYMENT_ID=your-deployment-id \
Expand Down
32 changes: 18 additions & 14 deletions app/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
from fastapi.responses import StreamingResponse

from app.middleware import AuthMiddleware
from app.models import DEFAULT_MODELS, MODELS_AVAILABLE, get_bot
from app.models import DEFAULT_MODELS, MODELS_AVAILABLE, get_bot, get_trans_bot
from app.sync import router as sync_router
from app.utils import (
ProxyRequest,
Expand Down Expand Up @@ -52,19 +52,23 @@ async def chat_completions(request: Request):

@app.api_route("/api/v1/translations", methods=["POST"])
async def proxy_translations(request: Request):
raycast_data = await request.json()
result = []
logger.debug(f"Received translation request: {raycast_data}")
model_name = raycast_data.get("model")
async for content in get_bot(model_name).translate_completions(
raycast_data=raycast_data
):
result.append(content) if content else None
translated_text = "".join(result)
res = {"data": {"translations": [{"translatedText": translated_text}]}}
return Response(
status_code=200, content=json_dumps(res), media_type="application/json"
)
translation_model = os.environ.get("TRANSLATION_MODEL")
if translation_model == "deeplx":
return await get_trans_bot(translation_model).translate_completions(raycast_data)
else:
raycast_data = await request.json()
result = []
logger.debug(f"Received translation request: {raycast_data}")
model_name = raycast_data.get("model")
async for content in get_bot(model_name).translate_completions(
raycast_data=raycast_data
):
result.append(content) if content else None
translated_text = "".join(result)
res = {"data": {"translations": [{"translatedText": translated_text}]}}
return Response(
status_code=200, content=json_dumps(res), media_type="application/json"
)


@app.api_route("/api/v1/me", methods=["GET"])
Expand Down
77 changes: 76 additions & 1 deletion app/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,17 +3,19 @@
import logging
import os

import httpx
import anthropic
import google.generativeai as genai
import openai
from google.generativeai import GenerativeModel

from app.utils import json_dumps
from app.utils import json_dumps, pass_through_request, ProxyRequest

logger = logging.getLogger(__name__)

MAX_TOKENS = os.environ.get("MAX_TOKENS", 1024)

http_client = httpx.AsyncClient(verify=False)

class ChatBotAbc(abc.ABC):

Expand Down Expand Up @@ -560,10 +562,70 @@ def get_models(self):
]
return {"default_models": default_models, "models": models}

class translationBot(ChatBotAbc):
@classmethod
def is_start_available(cls):
return os.environ.get("DEEPLX_BASE_URL"); os.environ.get("DEEPLX_API_TOKEN"); False

def translate_completions(self, raycast_data: dict):
return self._transDeeplx(raycast_data["q"], raycast_data.get("source"), raycast_data["target"])

async def _transDeeplx(self, text, source, target):
url = os.environ.get("DEEPLX_BASE_URL")
deeplx_base_url = os.environ.get("DEEPLX_BASE_URL")
deeplx_api_token = os.environ.get("DEEPLX_API_TOKEN")

text = text.replace('\n', '\n')
# if not deeplx_api_token:
# deeplx_api_token = ""
# deeplHeader = {"Authorization": f"Bearer {deeplx_api_token}"}
body = { "text": text,
"target_lang": target,
}
if source:
body["source_lang"] = source

try:
req = ProxyRequest(
deeplx_base_url, "POST", '', json.dumps(body), query_params={}
# deeplx_base_url, "POST", headers, json.dumps(body), query_params={}
)
resp = await pass_through_request(http_client, req, nohttps=True, noheaders=True)
resp = json.loads(resp.content.decode("utf-8"))
try:
# translated_text = resp["alternatives"][0]
translated_text = resp["data"]
# translated_text = translated_text.replace('\\n', '\n')
# print(translated_text)
res = {"data": {"translations": [{"translatedText": translated_text}]}}
except TypeError:
# res = {"error": {"message": "Failed to translate"}}
# res = {"data": {"translations": [{"translatedText": "Failed to translate"}]}}
logger.warn(f'Text failed to translate: {text}, DEBUG: {translated_text}')
res = {"data": {"translations": [{"translatedText": text}]}}

if not source:
res["data"]["translations"][0]["detectedSourceLanguage"] = resp["source_lang"].lower()

return json.dumps(res)
except Exception as e:
logger.error(f"DEEPLX error: {e}")


async def chat_completions(self, raycast_data: dict):
messages = self.__build_messages(raycast_data)
model = raycast_data["model"]
temperature = os.environ.get("TEMPERATURE", 0.5)


MODELS_DICT = {}
MODELS_AVAILABLE = []
DEFAULT_MODELS = {}

MODELS_TRANS_DICT = {}
MODELS_TRANS_AVAILABLE = []
DEFAULT_TRANS_MODELS = {}

if GeminiChatBot.is_start_available():
logger.info("Google API is available")
_bot = GeminiChatBot()
Expand All @@ -586,8 +648,21 @@ def get_models(self):
DEFAULT_MODELS.update(_models["default_models"])
MODELS_DICT.update({model["model"]: _bot for model in _models["models"]})

if translationBot.is_start_available():
logger.info("DeepL API is available")
_bot = translationBot()
_models = _bot.get_models()
MODELS_TRANS_AVAILABLE.extend(_models["models"])
DEFAULT_TRANS_MODELS.update(_models["default_models"])
MODELS_TRANS_DICT.update({model["model"]: _bot for model in _models["models"]})


def get_bot(model_id):
if not model_id:
return next(iter(MODELS_DICT.values()))
return MODELS_DICT.get(model_id)

def get_trans_bot(model_trans_id):
if not model_trans_id:
return next(iter(MODELS_DICT.values()))
return MODELS_TRANS_DICT.get(model_trans_id)
Loading