diff --git a/crowdin_api/api_resources/ai/enums.py b/crowdin_api/api_resources/ai/enums.py index d107581..eb0fd85 100644 --- a/crowdin_api/api_resources/ai/enums.py +++ b/crowdin_api/api_resources/ai/enums.py @@ -42,3 +42,43 @@ class EditAIProviderPath(Enum): class DatasetPurpose(Enum): TRAINING = "training" VALIDATION = "validation" + + +class EditAiCustomPlaceholderPatchPath(Enum): + DESCRIPTION = "/description" + PLACEHOLDER = "/placeholder" + VALUE = "/value" + + +class AiPromptFineTuningJobStatus(Enum): + CREATED = "created" + IN_PROGRESS = "in_progress" + CANCELED = "canceled" + FAILED = "failed" + FINISHED = "finished" + + +class AiToolType(Enum): + FUNCTION = "function" + + +class AiReportType(Enum): + TOKENS_USAGE_RAW_DATA = "tokens-usage-raw-data" + + +class EditAiSettingsPatchPath(Enum): + ASSIST_ACTION_AI_PROMPT_ID = "/assistActionAiPromptId" + EDITOR_SUGGESTION_AI_PROMPT_ID = "/editorSuggestionAiPromptId" + SHORTCUTS = "/shortcuts" + + +class ListAiPromptFineTuningJobsOrderBy(Enum): + CREATED_AT = "createdAt" + UPDATED_AT = "updatedAt" + STARTED_AT = "startedAt" + FINISHED_AT = "finishedAt" + + +class AiReportFormat(Enum): + CSV = "csv" + JSON = "json" diff --git a/crowdin_api/api_resources/ai/resource.py b/crowdin_api/api_resources/ai/resource.py index 4bd8fab..adf325e 100644 --- a/crowdin_api/api_resources/ai/resource.py +++ b/crowdin_api/api_resources/ai/resource.py @@ -1,7 +1,7 @@ from typing import Iterable, Optional, Union from crowdin_api.api_resources.abstract.resources import BaseResource -from crowdin_api.api_resources.ai.enums import AIPromptAction +from crowdin_api.api_resources.ai.enums import AIPromptAction, AiPromptFineTuningJobStatus from crowdin_api.api_resources.ai.types import ( AddAIPromptRequestScheme, AddAIProviderReqeustScheme, @@ -11,7 +11,14 @@ OtherChatProxy, GenerateAIPromptFineTuningDatasetRequest, CreateAIPromptFineTuningJobRequest, + AddAiCustomPlaceholderRequest, + EditAiCustomPlaceholderPatch, + GenerateAiPromptCompletionRequest, + GenerateAiReportRequest, + EditAiSettingsPatch, ) +from crowdin_api.sorting import Sorting +from crowdin_api.utils import convert_enum_collection_to_string_if_exists class AIResource(BaseResource): @@ -270,6 +277,52 @@ def get_ai_prompt_fine_tuning_dataset_generation_status( path=self.get_ai_prompt_fine_tuning_datasets_path(user_id, ai_prompt_id, job_identifier), ) + def list_ai_prompt_fine_tuning_events( + self, + user_id: int, + ai_prompt_id: int, + job_identifier: str, + ): + """ + List AI Prompt Fine-Tuning Events + + Link to documentation: + https://support.crowdin.com/developer/api/v2/#tag/AI/operation/api.ai.prompts.fine-tuning.jobs.events.getMany + """ + + return self.requester.request( + method="get", + path=f"users/{user_id}/ai/prompts/{ai_prompt_id}/fine-tuning/jobs/{job_identifier}/events", + ) + + def list_ai_prompt_fine_tuning_jobs( + self, + user_id: int, + statuses: Optional[Iterable[AiPromptFineTuningJobStatus]] = None, + order_by: Optional[Sorting] = None, + limit: Optional[int] = None, + offset: Optional[int] = None, + ): + """ + List AI Prompt Fine-Tuning Jobs + + Link to documentation: + https://support.crowdin.com/developer/api/v2/#tag/AI/operation/api.ai.prompts.fine-tuning.jobs.getMany + """ + + params = { + "statuses": convert_enum_collection_to_string_if_exists(statuses), + "orderBy": order_by, + "limit": limit, + "offset": offset, + } + + return self.requester.request( + method="get", + path=f"users/{user_id}/ai/prompts/fine-tuning/jobs", + params=params + ) + def create_ai_prompt_fine_tuning_job( self, user_id: int, @@ -325,6 +378,282 @@ def download_ai_prompt_fine_tuning_dataset( path=self.get_ai_prompt_fine_tuning_datasets_path(user_id, ai_prompt_id, job_identifier) + "/download", ) + def get_ai_custom_placeholders_path(self, user_id: int, custom_placeholder_id: Optional[int] = None): + if custom_placeholder_id is not None: + return f"users/{user_id}/ai/settings/custom-placeholders/{custom_placeholder_id}" + + return f"users/{user_id}/ai/settings/custom-placeholders" + + def list_ai_custom_placeholders(self, user_id: int): + """ + List AI Custom Placeholders + + Link to documentation: + https://support.crowdin.com/developer/api/v2/#tag/AI/operation/api.ai.prompt.custom.placeholders.getMany + """ + + return self.requester.request( + method="get", + path=self.get_ai_custom_placeholders_path(user_id) + ) + + def add_ai_custom_placeholder(self, user_id: int, body: AddAiCustomPlaceholderRequest): + """ + Add AI Custom Placeholder + + Link to documentation: + https://support.crowdin.com/developer/api/v2/#tag/AI/operation/api.users.ai.settings.custom-placeholders.post + """ + + return self.requester.request( + method="post", + path=self.get_ai_custom_placeholders_path(user_id), + request_data=body, + ) + + def get_ai_custom_placeholder(self, user_id: int, ai_custom_placeholder_id: int): + """ + Get AI Custom Placeholder + + Link to documentation: + https://support.crowdin.com/developer/api/v2/#tag/AI/operation/api.users.ai.settings.custom-placeholders.get + """ + + return self.requester.request( + method="get", + path=self.get_ai_custom_placeholders_path(user_id, ai_custom_placeholder_id), + ) + + def delete_ai_custom_placeholder(self, user_id: int, ai_custom_placeholder_id: int): + """ + Delete AI Custom Placeholder + + Link to documentation: + https://support.crowdin.com/developer/api/v2/#tag/AI/operation/api.users.ai.settings.custom-placeholders.delete + """ + + return self.requester.request( + method="delete", + path=self.get_ai_custom_placeholders_path(user_id, ai_custom_placeholder_id), + ) + + def edit_ai_custom_placeholder( + self, + user_id: int, + ai_custom_placeholder_id: int, + patches: Iterable[EditAiCustomPlaceholderPatch] + ): + """ + Edit AI Custom Placeholder + + Link to documentation: + https://support.crowdin.com/developer/api/v2/#tag/AI/operation/api.users.ai.settings.custom-placeholders.patch + """ + + return self.requester.request( + method="patch", + path=self.get_ai_custom_placeholders_path(user_id, ai_custom_placeholder_id), + request_data=patches, + ) + + def clone_ai_prompt( + self, + user_id: int, + ai_prompt_id: int, + name: Optional[str] = None, + ): + """ + Clone AI Prompt + + Link to documentation: + https://support.crowdin.com/developer/api/v2/#tag/AI/operation/api.users.ai.prompts.clones.post + """ + + return self.requester.request( + method="post", + path=self.get_ai_path(user_id, ai_prompt_id) + "/clones", + request_data={ + "name": name + }, + ) + + def get_ai_prompt_completions_path( + self, + user_id: int, + ai_prompt_id: int, + completion_id: Optional[str] = None, + ): + if completion_id is not None: + return f"users/{user_id}/ai/prompts/{ai_prompt_id}/completions/{completion_id}" + return f"users/{user_id}/ai/prompts/{ai_prompt_id}/completions" + + def generate_ai_prompt_completion( + self, + user_id: int, + ai_prompt_id: int, + request: GenerateAiPromptCompletionRequest + ): + """ + Generate AI Prompt Completion + + Link to documentation: + https://support.crowdin.com/developer/api/v2/#tag/AI/operation/api.ai.prompts.completions.post + """ + + return self.requester.request( + method="post", + path=self.get_ai_prompt_completions_path(user_id, ai_prompt_id), + request_data=request, + ) + + def get_ai_prompt_completion_status( + self, + user_id: int, + ai_prompt_id: int, + completion_id: str + ): + """ + Get AI Prompt Completion Status + + Link to documentation: + https://support.crowdin.com/developer/api/v2/#tag/AI/operation/api.users.ai.prompts.completions.get + """ + + return self.requester.request( + method="get", + path=self.get_ai_prompt_completions_path(user_id, ai_prompt_id, completion_id), + ) + + def cancel_ai_prompt_completion( + self, + user_id: int, + ai_prompt_id: int, + completion_id: str + ): + """ + Cancel AI Prompt Completion + + Link to documentation: + https://support.crowdin.com/developer/api/v2/#tag/AI/operation/api.users.ai.prompts.completions.delete + """ + + return self.requester.request( + method="delete", + path=self.get_ai_prompt_completions_path(user_id, ai_prompt_id, completion_id), + ) + + def download_ai_prompt_completion( + self, + user_id: int, + ai_prompt_id: int, + completion_id: str + ): + """ + Download AI Prompt Completion + + Link to documentation: + https://support.crowdin.com/developer/api/v2/#tag/AI/operation/api.users.ai.prompts.completions.download.download + """ + + return self.requester.request( + method="get", + path=self.get_ai_prompt_completions_path(user_id, ai_prompt_id, completion_id) + "/download", + ) + + def get_ai_reports_path(self, user_id: int, ai_report_id: Optional[str] = None): + if ai_report_id is not None: + return f"users/{user_id}/ai/reports/{ai_report_id}" + return f"users/{user_id}/ai/reports" + + def generate_ai_report( + self, + user_id: int, + request: GenerateAiReportRequest + ): + """ + Generate AI Report + + Link to documentation: + https://support.crowdin.com/developer/api/v2/#tag/AI/operation/api.users.ai.reports.post + """ + + return self.requester.request( + method="post", + path=self.get_ai_reports_path(user_id), + request_data=request, + ) + + def check_ai_report_generation_status( + self, + user_id: int, + ai_report_id: str, + ): + """ + Check AI Report Generation Status + + Link to documentation: + https://support.crowdin.com/developer/api/v2/#tag/AI/operation/api.users.ai.reports.get + """ + + return self.requester.request( + method="get", + path=self.get_ai_reports_path(user_id, ai_report_id), + ) + + def download_ai_report( + self, + user_id: int, + ai_report_id: str + ): + """ + Download AI Report + + Link to documentation: + https://support.crowdin.com/developer/api/v2/#tag/AI/operation/api.users.ai.reports.download.download + """ + + return self.requester.request( + method="get", + path=self.get_ai_reports_path(user_id, ai_report_id) + "/download", + ) + + def get_ai_settings_path(self, user_id: int): + return f"users/{user_id}/ai/settings" + + def get_ai_settings( + self, + user_id: int, + ): + """ + Get AI Settings + + Link to documentation: + https://support.crowdin.com/developer/api/v2/#tag/AI/operation/api.users.ai.settings.get + """ + + return self.requester.request( + method="get", + path=self.get_ai_settings_path(user_id), + ) + + def edit_ai_settings( + self, + user_id: int, + patches: Iterable[EditAiSettingsPatch] + ): + """ + Edit AI Settings + + Link to documentation: + https://support.crowdin.com/developer/api/v2/#tag/AI/operation/api.users.ai.settings.patch + """ + + return self.requester.request( + method="patch", + path=self.get_ai_settings_path(user_id), + request_data=patches, + ) + class EnterpriseAIResource(BaseResource): """ @@ -520,3 +849,410 @@ def create_ai_proxy_chat_completion( + "/chat/completions", request_data=request_data, ) + + def get_ai_custom_placeholders_path(self, ai_custom_placeholder_id: Optional[int] = None): + if ai_custom_placeholder_id is not None: + return f"ai/settings/custom-placeholders/{ai_custom_placeholder_id}" + return "ai/settings/custom-placeholders" + + def list_ai_custom_placeholders(self): + """ + List AI Custom Placeholders + + Link to documentation: + https://support.crowdin.com/developer/enterprise/api/v2/#tag/AI/operation/api.ai.prompts.custom.placeholders.getMany + """ + + return self.requester.request( + method="get", + path=self.get_ai_custom_placeholders_path() + ) + + def add_ai_custom_placeholder(self, body: AddAiCustomPlaceholderRequest): + """ + Add AI Custom Placeholder + + Link to documentation: + https://support.crowdin.com/developer/enterprise/api/v2/#tag/AI/operation/api.ai.settings.custom-placeholders.post + """ + + return self.requester.request( + method="post", + path=self.get_ai_custom_placeholders_path(), + request_data=body, + ) + + def get_ai_custom_placeholder(self, ai_custom_placeholder_id: int): + """ + Get AI Custom Placeholder + + Link to documentation: + https://support.crowdin.com/developer/enterprise/api/v2/#tag/AI/operation/api.ai.settings.custom-placeholders.get + """ + + return self.requester.request( + method="get", + path=self.get_ai_custom_placeholders_path(ai_custom_placeholder_id), + ) + + def delete_ai_custom_placeholder(self, ai_custom_placeholder_id: int): + """ + Delete AI Custom Placeholder + + Link to documentation: + https://support.crowdin.com/developer/enterprise/api/v2/#tag/AI/operation/api.ai.settings.custom-placeholders.delete + """ + + return self.requester.request( + method="delete", + path=self.get_ai_custom_placeholders_path(ai_custom_placeholder_id), + ) + + def edit_ai_custom_placeholder( + self, + ai_custom_placeholder_id: int, + patches: Iterable[EditAiCustomPlaceholderPatch] + ): + """ + Edit AI Custom Placeholder + + Link to documentation: + https://support.crowdin.com/developer/enterprise/api/v2/#tag/AI/operation/api.ai.settings.custom-placeholders.patch + """ + + return self.requester.request( + method="patch", + path=self.get_ai_custom_placeholders_path(ai_custom_placeholder_id), + request_data=patches, + ) + + def get_ai_prompt_fine_tuning_datasets_path( + self, + ai_prompt_id: int, + job_identifier: Optional[str] = None + ): + if job_identifier is not None: + return f"ai/prompts/{ai_prompt_id}/fine-tuning/datasets/{job_identifier}" + return f"ai/prompts/{ai_prompt_id}/fine-tuning/datasets" + + def generate_ai_prompt_fine_tuning_dataset( + self, + ai_prompt_id: int, + request_data: GenerateAIPromptFineTuningDatasetRequest, + ): + """ + Generate AI Prompt Fine-Tuning Dataset + + Link to documentation: + https://support.crowdin.com/developer/enterprise/api/v2/#tag/AI/operation/api.ai.prompts.fine-tuning.datasets.post + """ + + return self.requester.request( + method="post", + path=self.get_ai_prompt_fine_tuning_datasets_path(ai_prompt_id), + request_data=request_data, + ) + + def get_ai_prompt_fine_tuning_dataset_generation_status( + self, + ai_prompt_id: int, + job_identifier: str + ): + """ + Get AI Prompt Fine-Tuning Dataset Generation Status + + Link to documentation: + https://support.crowdin.com/developer/enterprise/api/v2/#tag/AI/operation/api.ai.prompts.fine-tuning.datasets.get + """ + + return self.requester.request( + method="get", + path=self.get_ai_prompt_fine_tuning_datasets_path(ai_prompt_id, job_identifier), + ) + + def list_ai_prompt_fine_tuning_events( + self, + ai_prompt_id: int, + job_identifier: str, + ): + """ + List AI Prompt Fine-Tuning Events + + Link to documentation: + https://support.crowdin.com/developer/enterprise/api/v2/#tag/AI/operation/api.ai.prompts.fine-tuning.jobs.events.getMany + """ + + return self.requester.request( + method="get", + path=f"ai/prompts/{ai_prompt_id}/fine-tuning/jobs/{job_identifier}/events", + ) + + def list_ai_prompt_fine_tuning_jobs( + self, + statuses: Optional[Iterable[AiPromptFineTuningJobStatus]] = None, + order_by: Optional[Sorting] = None, + limit: Optional[int] = None, + offset: Optional[int] = None, + ): + """ + List AI Prompt Fine-Tuning Jobs + + Link to documentation: + https://support.crowdin.com/developer/enterprise/api/v2/#tag/AI/operation/api.ai.prompts.fine-tuning.jobs.getMany + """ + + params = { + "statuses": convert_enum_collection_to_string_if_exists(statuses), + "orderBy": order_by, + "limit": limit, + "offset": offset, + } + + return self.requester.request( + method="get", + path="ai/prompts/fine-tuning/jobs", + params=params + ) + + def get_ai_prompt_fine_tuning_jobs_path( + self, + ai_prompt_id: int, + job_identifier: Optional[str] = None + ): + if job_identifier is not None: + return f"ai/prompts/{ai_prompt_id}/fine-tuning/jobs/{job_identifier}" + return f"ai/prompts/{ai_prompt_id}/fine-tuning/jobs" + + def create_ai_prompt_fine_tuning_job( + self, + ai_prompt_id: int, + request_data: CreateAIPromptFineTuningJobRequest + ): + """ + Create AI Prompt Fine-Tuning Job + + Link to documentation: + https://support.crowdin.com/developer/enterprise/api/v2/#tag/AI/operation/api.ai.prompts.fine-tuning.jobs.post + """ + + return self.requester.request( + method="post", + path=self.get_ai_prompt_fine_tuning_jobs_path(ai_prompt_id), + request_data=request_data, + ) + + def get_ai_prompt_fine_tuning_job_status( + self, + ai_prompt_id: int, + job_identifier: str + ): + """ + Get AI Prompt Fine-Tuning Job Status + + Link to documentation: + https://support.crowdin.com/developer/enterprise/api/v2/#tag/AI/operation/api.ai.prompts.fine-tuning.jobs.get + """ + + return self.requester.request( + method="get", + path=self.get_ai_prompt_fine_tuning_jobs_path(ai_prompt_id, job_identifier), + ) + + def download_ai_prompt_fine_tuning_dataset( + self, + ai_prompt_id: int, + job_identifier: str + ): + """ + Download AI Prompt Fine-Tuning Dataset + + Link to documentation: + https://support.crowdin.com/developer/enterprise/api/v2/#tag/AI/operation/api.ai.prompts.fine-tuning.datasets.download.get + """ + + return self.requester.request( + method="get", + path=self.get_ai_prompt_fine_tuning_datasets_path(ai_prompt_id, job_identifier) + "/download", + ) + + def clone_ai_prompt( + self, + ai_prompt_id: int, + name: Optional[str] = None, + ): + """ + Clone AI Prompt + + Link to documentation: + https://support.crowdin.com/developer/enterprise/api/v2/#tag/AI/operation/api.ai.prompts.clones.post + """ + + return self.requester.request( + method="post", + path=self.get_ai_path(ai_prompt_id) + "/clones", + request_data={ + "name": name + }, + ) + + def get_ai_prompt_completions_path( + self, + ai_prompt_id: int, + completion_id: Optional[str] = None + ): + if completion_id is not None: + return f"ai/prompts/{ai_prompt_id}/completions/{completion_id}" + return f"ai/prompts/{ai_prompt_id}/completions" + + def generate_ai_prompt_completion( + self, + ai_prompt_id: int, + request: GenerateAiPromptCompletionRequest + ): + """ + Generate AI Prompt Completion + + Link to documentation: + https://support.crowdin.com/developer/enterprise/api/v2/#tag/AI/operation/api.ai.prompts.completions.post + """ + + return self.requester.request( + method="post", + path=self.get_ai_prompt_completions_path(ai_prompt_id), + request_data=request, + ) + + def get_ai_prompt_completion_status( + self, + ai_prompt_id: int, + completion_id: str + ): + """ + Get AI Prompt Completion Status + + Link to documentation: + https://support.crowdin.com/developer/enterprise/api/v2/#tag/AI/operation/api.ai.prompts.completions.get + """ + + return self.requester.request( + method="get", + path=self.get_ai_prompt_completions_path(ai_prompt_id, completion_id), + ) + + def cancel_ai_prompt_completion( + self, + ai_prompt_id: int, + completion_id: str + ): + """ + Cancel AI Prompt Completion + + Link to documentation: + https://support.crowdin.com/developer/enterprise/api/v2/#tag/AI/operation/api.ai.prompts.completions.delete + """ + + return self.requester.request( + method="delete", + path=self.get_ai_prompt_completions_path(ai_prompt_id, completion_id), + ) + + def download_ai_prompt_completion( + self, + ai_prompt_id: int, + completion_id: str + ): + """ + Download AI Prompt Completion + + Link to documentation: + https://support.crowdin.com/developer/enterprise/api/v2/#tag/AI/operation/api.ai.prompts.completions.download.download + """ + + return self.requester.request( + method="get", + path=self.get_ai_prompt_completions_path(ai_prompt_id, completion_id) + "/download", + ) + + def get_ai_reports_path(self, ai_report_id: Optional[str] = None): + if ai_report_id is not None: + return f"ai/reports/{ai_report_id}" + return "ai/reports" + + def generate_ai_report( + self, + request: GenerateAiReportRequest + ): + """ + Generate AI Report + + Link to documentation: + https://support.crowdin.com/developer/enterprise/api/v2/#tag/AI/operation/api.ai.reports.post + """ + + return self.requester.request( + method="post", + path=self.get_ai_reports_path(), + request_data=request, + ) + + def check_ai_report_generation_status( + self, + ai_report_id: str, + ): + """ + Check AI Report Generation Status + + Link to documentation: + https://support.crowdin.com/developer/enterprise/api/v2/#tag/AI/operation/api.ai.reports.get + """ + + return self.requester.request( + method="get", + path=self.get_ai_reports_path(ai_report_id), + ) + + def download_ai_report( + self, + ai_report_id: str + ): + """ + Download AI Report + + Link to documentation: + https://support.crowdin.com/developer/enterprise/api/v2/#tag/AI/operation/api.ai.reports.download.download + """ + + return self.requester.request( + method="get", + path=self.get_ai_reports_path(ai_report_id) + "/download", + ) + + def get_ai_settings(self): + """ + Get AI Settings + + Link to documentation: + https://support.crowdin.com/developer/enterprise/api/v2/#tag/AI/operation/api.ai.settings.get + """ + + return self.requester.request( + method="get", + path="ai/settings", + ) + + def edit_ai_settings( + self, + patches: Iterable[EditAiSettingsPatch] + ): + """ + Edit AI Settings + + Link to documentation: + https://support.crowdin.com/developer/enterprise/api/v2/#tag/AI/operation/api.ai.settings.patch + """ + + return self.requester.request( + method="patch", + path="ai/settings", + request_data=patches, + ) diff --git a/crowdin_api/api_resources/ai/tests/test_ai_resources.py b/crowdin_api/api_resources/ai/tests/test_ai_resources.py index 3f98197..567a3dd 100644 --- a/crowdin_api/api_resources/ai/tests/test_ai_resources.py +++ b/crowdin_api/api_resources/ai/tests/test_ai_resources.py @@ -2,16 +2,37 @@ from unittest import mock import pytest -from crowdin_api.api_resources.ai.enums import AIPromptAction, AIProviderType, DatasetPurpose + +from crowdin_api.api_resources.ai.enums import ( + AIPromptAction, + AIProviderType, + DatasetPurpose, + AiPromptFineTuningJobStatus, + ListAiPromptFineTuningJobsOrderBy, + EditAiCustomPlaceholderPatchPath, + AiToolType, + AiReportFormat, + EditAiSettingsPatchPath +) from crowdin_api.api_resources.ai.resource import AIResource, EnterpriseAIResource from crowdin_api.api_resources.ai.types import ( AIPromptOperation, EditAIPromptPath, CreateAIPromptFineTuningJobRequest, HyperParameters, - TrainingOptions, GenerateAIPromptFineTuningDatasetRequest + TrainingOptions, + GenerateAIPromptFineTuningDatasetRequest, + GenerateAiPromptCompletionRequest, + PreTranslateActionAiPromptContextResources, + AiTool, + AiToolObject, + AiToolFunction, + GenerateAiReportRequest, + GeneralReportSchema ) +from crowdin_api.api_resources.enums import PatchOperation from crowdin_api.requester import APIRequester +from crowdin_api.sorting import Sorting, SortingRule, SortingOrder class TestAIResources: @@ -420,10 +441,8 @@ def test_create_ai_proxy_chat_completion(self, m_request, base_absolut_url): projectIds=[1], tmIds=[2, 3], purpose=DatasetPurpose.TRAINING.value, - dateFrom=datetime(2019, 9, 23, 11, 26, 54, - tzinfo=timezone.utc).isoformat(), - dateTo=datetime(2019, 9, 23, 11, 26, 54, - tzinfo=timezone.utc).isoformat(), + dateFrom=datetime(2019, 9, 23, 11, 26, 54, tzinfo=timezone.utc).isoformat(), + dateTo=datetime(2019, 9, 23, 11, 26, 54, tzinfo=timezone.utc).isoformat(), maxFileSize=20, minExamplesCount=2, maxExamplesCount=10 @@ -495,10 +514,8 @@ def test_get_ai_prompt_fine_tuning_dataset_generation_status(self, m_request, ba trainingOptions=TrainingOptions( projectIds=[1], tmIds=[2], - dateFrom=datetime(2019, 9, 23, 11, 26, 54, - tzinfo=timezone.utc).isoformat(), - dateTo=datetime(2019, 9, 23, 11, 26, 54, - tzinfo=timezone.utc).isoformat(), + dateFrom=datetime(2019, 9, 23, 11, 26, 54, tzinfo=timezone.utc).isoformat(), + dateTo=datetime(2019, 9, 23, 11, 26, 54, tzinfo=timezone.utc).isoformat(), maxFileSize=10, minExamplesCount=200, maxExamplesCount=300 @@ -582,41 +599,23 @@ def test_download_ai_prompt_fine_tuning_dataset( path=f"users/{user_id}/ai/prompts/{ai_prompt_id}/fine-tuning/datasets/{job_identifier}/download", ) + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_list_ai_prompt_fine_tuning_events(self, m_request, base_absolut_url): + m_request.return_value = "response" -class TestEnterpriseAIResources: - resource_class = EnterpriseAIResource - - def get_resource(self, base_absolut_url): - return self.resource_class(requester=APIRequester(base_url=base_absolut_url)) - - def test_resource_with_id(self, base_absolut_url): - project_id = 1 - resource = self.resource_class( - requester=APIRequester(base_url=base_absolut_url), project_id=project_id - ) - assert resource.get_project_id() == project_id - - @pytest.mark.parametrize( - "in_params, path", - ( - ({}, "ai/prompts"), - ({"aiPromptId": 1}, "ai/prompts/1"), - ), - ) - def test_get_ai_path(self, in_params, path, base_absolut_url): - resource = self.get_resource(base_absolut_url) - assert resource.get_ai_path(**in_params) == path + user_id = 1 + ai_prompt_id = 2 + job_identifier = "id" - @pytest.mark.parametrize( - "in_params, path", - ( - ({}, "ai/providers"), - ({"aiProviderId": 1}, "ai/providers/1"), - ), - ) - def test_get_ai_provider_path(self, in_params, path, base_absolut_url): resource = self.get_resource(base_absolut_url) - assert resource.get_ai_provider_path(**in_params) == path + assert ( + resource.list_ai_prompt_fine_tuning_events(user_id, ai_prompt_id, job_identifier) + == "response" + ) + m_request.assert_called_once_with( + method="get", + path=f"users/{user_id}/ai/prompts/{ai_prompt_id}/fine-tuning/jobs/{job_identifier}/events", + ) @pytest.mark.parametrize( "incoming_data, request_params", @@ -624,326 +623,1410 @@ def test_get_ai_provider_path(self, in_params, path, base_absolut_url): ( {}, { - "projectId": None, - "action": None, - "limit": 25, - "offset": 0, + "statuses": None, + "orderBy": None, + "limit": None, + "offset": None, }, ), ( { - "projectId": 1, - "action": AIPromptAction.ASSIST, - "limit": 20, - "offset": 2, + "statuses": [ + AiPromptFineTuningJobStatus.CREATED, + AiPromptFineTuningJobStatus.IN_PROGRESS, + AiPromptFineTuningJobStatus.FINISHED + ], + "order_by": Sorting([ + SortingRule(ListAiPromptFineTuningJobsOrderBy.UPDATED_AT, SortingOrder.DESC), + SortingRule(ListAiPromptFineTuningJobsOrderBy.STARTED_AT, SortingOrder.DESC) + ]), + "limit": 10, + "offset": 2 }, { - "projectId": 1, - "action": AIPromptAction.ASSIST, - "limit": 20, - "offset": 2, + "statuses": "created,in_progress,finished", + "orderBy": Sorting([ + SortingRule(ListAiPromptFineTuningJobsOrderBy.UPDATED_AT, SortingOrder.DESC), + SortingRule(ListAiPromptFineTuningJobsOrderBy.STARTED_AT, SortingOrder.DESC) + ]), + "limit": 10, + "offset": 2 }, ), ), ) @mock.patch("crowdin_api.requester.APIRequester.request") - def test_list_ai_prompts( - self, m_request, incoming_data, request_params, base_absolut_url - ): + def test_list_ai_prompt_fine_tuning_jobs(self, m_request, incoming_data, request_params, base_absolut_url): m_request.return_value = "response" + user_id = 1 + resource = self.get_resource(base_absolut_url) - assert resource.list_ai_prompts(**incoming_data) == "response" + assert resource.list_ai_prompt_fine_tuning_jobs(user_id, **incoming_data) == "response" + m_request.assert_called_once_with( method="get", - path=resource.get_ai_path(), + path=f"users/{user_id}/ai/prompts/fine-tuning/jobs", params=request_params, ) + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_list_ai_custom_placeholders(self, m_request, base_absolut_url): + m_request.return_value = "response" + + user_id = 1 + + resource = self.get_resource(base_absolut_url) + assert resource.list_ai_custom_placeholders(user_id) == "response" + + m_request.assert_called_once_with( + method="get", + path=f"users/{user_id}/ai/settings/custom-placeholders", + ) + @pytest.mark.parametrize( - "incoming_data, request_data", + "incoming_data, request_params", ( ( { - "name": "basic", - "action": AIPromptAction.ASSIST, - "aiProviderId": 1, - "aiModelId": "gpt-3.5-turbo-instruct", - "config": {"mode": "advanced", "prompt": "test prompt"}, - }, - { - "name": "basic", - "action": AIPromptAction.ASSIST, - "aiProviderId": 1, - "aiModelId": "gpt-3.5-turbo-instruct", - "config": {"mode": "advanced", "prompt": "test prompt"}, - }, - ), - ( - { - "name": "basic", - "action": AIPromptAction.ASSIST, - "aiProviderId": 1, - "aiModelId": "gpt-3.5-turbo-instruct", - "isEnabled": False, - "enabledProjectIds": [1, 2, 3], - "config": { - "mode": "advanced", - "prompt": "test prompt", - "screenshot": True, - }, + "description": "Product description", + "placeholder": "%custom:productDescription%", + "value": "The product is the professional consulting service" }, { - "name": "basic", - "action": AIPromptAction.ASSIST, - "aiProviderId": 1, - "aiModelId": "gpt-3.5-turbo-instruct", - "isEnabled": False, - "enabledProjectIds": [1, 2, 3], - "config": { - "mode": "advanced", - "prompt": "test prompt", - "screenshot": True, - }, + "description": "Product description", + "placeholder": "%custom:productDescription%", + "value": "The product is the professional consulting service" }, ), ), ) @mock.patch("crowdin_api.requester.APIRequester.request") - def test_add_ai_prompt( - self, m_request, incoming_data, request_data, base_absolut_url - ): + def test_add_ai_custom_placeholder(self, m_request, incoming_data, request_params, base_absolut_url): m_request.return_value = "response" + user_id = 1 + resource = self.get_resource(base_absolut_url) - assert resource.add_ai_prompt(request_data=incoming_data) == "response" + assert resource.add_ai_custom_placeholder(user_id, incoming_data) == "response" + m_request.assert_called_once_with( method="post", - path=resource.get_ai_path(), - request_data=request_data, + path=f"users/{user_id}/ai/settings/custom-placeholders", + request_data=request_params, ) @mock.patch("crowdin_api.requester.APIRequester.request") - def test_get_ai_prompt(self, m_request, base_absolut_url): + def test_get_ai_custom_placeholder(self, m_request, base_absolut_url): m_request.return_value = "response" - aiPromptId = 1 + user_id = 1 + ai_custom_placeholder_id = 2 + resource = self.get_resource(base_absolut_url) - assert resource.get_ai_prompt(aiPromptId=aiPromptId) == "response" + assert resource.get_ai_custom_placeholder(user_id, ai_custom_placeholder_id) == "response" + m_request.assert_called_once_with( method="get", - path=resource.get_ai_path(aiPromptId=aiPromptId), + path=f"users/{user_id}/ai/settings/custom-placeholders/{ai_custom_placeholder_id}", ) @mock.patch("crowdin_api.requester.APIRequester.request") - def test_delete_ai_prompt(self, m_request, base_absolut_url): + def test_delete_ai_custom_placeholder(self, m_request, base_absolut_url): m_request.return_value = "response" - aiPromptId = 1 + user_id = 1 + ai_custom_placeholder_id = 2 + resource = self.get_resource(base_absolut_url) - assert resource.delete_ai_prompt(aiPromptId=aiPromptId) == "response" + assert resource.delete_ai_custom_placeholder(user_id, ai_custom_placeholder_id) == "response" + m_request.assert_called_once_with( method="delete", - path=resource.get_ai_path(aiPromptId=aiPromptId), + path=f"users/{user_id}/ai/settings/custom-placeholders/{ai_custom_placeholder_id}", ) + @pytest.mark.parametrize( + "incoming_data, request_params", + ( + ( + [ + { + "op": PatchOperation.REPLACE.value, + "path": EditAiCustomPlaceholderPatchPath.DESCRIPTION.value, + "value": "New description" + }, + { + "op": PatchOperation.REPLACE.value, + "path": EditAiCustomPlaceholderPatchPath.VALUE.value, + "value": "The product is the professional consulting service" + } + ], + [ + { + "op": "replace", + "path": "/description", + "value": "New description" + }, + { + "op": "replace", + "path": "/value", + "value": "The product is the professional consulting service" + } + ], + ), + ), + ) @mock.patch("crowdin_api.requester.APIRequester.request") - def test_edit_ai_prompt(self, m_request, base_absolut_url): + def test_edit_ai_custom_placeholder(self, m_request, incoming_data, request_params, base_absolut_url): m_request.return_value = "response" - aiPromptId = 1 - request_data = [ - { - "op": AIPromptOperation.REPLACE, - "path": EditAIPromptPath.NAME, - "value": "test", - } - ] + user_id = 1 + ai_custom_placeholder_id = 2 + resource = self.get_resource(base_absolut_url) - assert ( - resource.edit_ai_prompt(aiPromptId=aiPromptId, request_data=request_data) - == "response" - ) + assert resource.edit_ai_custom_placeholder(user_id, ai_custom_placeholder_id, incoming_data) == "response" + m_request.assert_called_once_with( method="patch", - path=resource.get_ai_path(aiPromptId=aiPromptId), - request_data=request_data, + path=f"users/{user_id}/ai/settings/custom-placeholders/{ai_custom_placeholder_id}", + request_data=request_params, ) - @pytest.mark.parametrize( - "incoming_data, request_params", - ( - ( - {}, - { - "limit": 25, - "offset": 0, - }, - ), - ( - { - "limit": 20, - "offset": 2, - }, - { - "limit": 20, - "offset": 2, - }, - ), - ), - ) @mock.patch("crowdin_api.requester.APIRequester.request") - def test_list_ai_providers( - self, m_request, incoming_data, request_params, base_absolut_url - ): + def test_clone_ai_prompt(self, m_request, base_absolut_url): m_request.return_value = "response" + user_id = 1 + ai_prompt_id = 2 + name = "name" + resource = self.get_resource(base_absolut_url) - assert resource.list_ai_providers(**incoming_data) == "response" + assert resource.clone_ai_prompt(user_id, ai_prompt_id, name) == "response" + m_request.assert_called_once_with( - method="get", - path=resource.get_ai_provider_path(), - params=request_params, + method="post", + path=f"users/{user_id}/ai/prompts/{ai_prompt_id}/clones", + request_data={ + "name": name + }, ) @pytest.mark.parametrize( - "incoming_data, request_data", + "incoming_data, request_params", ( ( - {"name": "basic", "type": AIProviderType.OPEN_AI}, - {"name": "basic", "type": AIProviderType.OPEN_AI}, - ), - ( + GenerateAiPromptCompletionRequest( + resources=PreTranslateActionAiPromptContextResources( + projectId=1, + sourceLanguageId="en", + targetLanguageId="uk", + stringIds=[1, 2, 3], + overridePromptValues={ + "property1": "string" + } + ), + tools=[ + AiToolObject( + tool=AiTool( + type=AiToolType.FUNCTION.value, + function=AiToolFunction( + name="Name", + description="Description", + parameters={} + ) + ) + ) + ], + tool_choice="string" + ), { - "name": "basic", - "type": AIProviderType.OPEN_AI, - "credentials": {"apiKey": "test-api-key"}, - "aiProviderId": 1, - "aiModelId": "gpt-3.5-turbo-instruct", - "enabledProjectIds": [1, 2, 3], - "config": { - "actionRules": [ - { - "action": AIPromptAction.PRE_TRANSLATE, - "availableAiModelIds": ["gpt-3.5-turbo-instruct"], - } - ] + "resources": { + "projectId": 1, + "sourceLanguageId": "en", + "targetLanguageId": "uk", + "stringIds": [1, 2, 3], + "overridePromptValues": { + "property1": "string" + } }, - "isEnabled": True, - "useSystemCredentials": False, - }, - { - "name": "basic", - "type": AIProviderType.OPEN_AI, - "credentials": {"apiKey": "test-api-key"}, - "aiProviderId": 1, - "aiModelId": "gpt-3.5-turbo-instruct", - "enabledProjectIds": [1, 2, 3], - "config": { - "actionRules": [ - { - "action": AIPromptAction.PRE_TRANSLATE, - "availableAiModelIds": ["gpt-3.5-turbo-instruct"], + "tools": [ + { + "tool": { + "type": "function", + "function": { + "name": "Name", + "description": "Description", + "parameters": {} + } } - ] - }, - "isEnabled": True, - "useSystemCredentials": False, + } + ], + "tool_choice": "string" }, ), ), ) @mock.patch("crowdin_api.requester.APIRequester.request") - def test_add_ai_provider( - self, m_request, incoming_data, request_data, base_absolut_url - ): + def test_generate_ai_prompt_completion(self, m_request, incoming_data, request_params, base_absolut_url): m_request.return_value = "response" + user_id = 1 + ai_prompt_id = 2 + resource = self.get_resource(base_absolut_url) - assert resource.add_ai_provider(request_data=incoming_data) == "response" + assert resource.generate_ai_prompt_completion(user_id, ai_prompt_id, incoming_data) == "response" + m_request.assert_called_once_with( method="post", - path=resource.get_ai_provider_path(), - request_data=request_data, + path=f"users/{user_id}/ai/prompts/{ai_prompt_id}/completions", + request_data=request_params, ) @mock.patch("crowdin_api.requester.APIRequester.request") - def test_get_ai_provider(self, m_request, base_absolut_url): + def test_get_ai_prompt_completion_status(self, m_request, base_absolut_url): m_request.return_value = "response" - aiProviderId = 1 + user_id = 1 + ai_prompt_id = 2 + completion_id = "id" + resource = self.get_resource(base_absolut_url) - assert resource.get_ai_provider(aiProviderId=aiProviderId) == "response" + assert resource.get_ai_prompt_completion_status(user_id, ai_prompt_id, completion_id) == "response" + m_request.assert_called_once_with( method="get", - path=resource.get_ai_provider_path(aiProviderId=aiProviderId), + path=f"users/{user_id}/ai/prompts/{ai_prompt_id}/completions/{completion_id}", ) @mock.patch("crowdin_api.requester.APIRequester.request") - def test_delete_ai_provider(self, m_request, base_absolut_url): + def test_cancel_ai_prompt_completion(self, m_request, base_absolut_url): m_request.return_value = "response" - aiProviderId = 1 + user_id = 1 + ai_prompt_id = 2 + completion_id = "id" + resource = self.get_resource(base_absolut_url) - assert resource.delete_ai_provider(aiProviderId=aiProviderId) == "response" + assert resource.cancel_ai_prompt_completion(user_id, ai_prompt_id, completion_id) == "response" + m_request.assert_called_once_with( method="delete", - path=resource.get_ai_provider_path(aiProviderId=aiProviderId), + path=f"users/{user_id}/ai/prompts/{ai_prompt_id}/completions/{completion_id}", ) @mock.patch("crowdin_api.requester.APIRequester.request") - def test_edit_ai_provider(self, m_request, base_absolut_url): + def test_download_ai_prompt_completion(self, m_request, base_absolut_url): m_request.return_value = "response" - aiProviderId = 1 - request_data = [ - { - "op": AIPromptOperation.REPLACE, - "path": EditAIPromptPath.NAME, - "value": "test", - } - ] + user_id = 1 + ai_prompt_id = 2 + completion_id = "id" + resource = self.get_resource(base_absolut_url) - assert ( - resource.edit_ai_provider( - aiProviderId=aiProviderId, request_data=request_data - ) - == "response" + assert resource.download_ai_prompt_completion(user_id, ai_prompt_id, completion_id) == "response" + + m_request.assert_called_once_with( + method="get", + path=f"users/{user_id}/ai/prompts/{ai_prompt_id}/completions/{completion_id}/download", ) + + @pytest.mark.parametrize( + "incoming_data, request_params", + ( + ( + GenerateAiReportRequest( + type="tokens-usage-raw-data", + schema=GeneralReportSchema( + dateFrom=datetime(2024, 1, 23, 7, 0, 14, tzinfo=timezone.utc).isoformat(), + dateTo=datetime(2024, 9, 27, 7, 0, 14, tzinfo=timezone.utc).isoformat(), + format=AiReportFormat.JSON.value, + projectIds=[1, 2, 3], + promptIds=[4, 5, 6], + userIds=[7, 8, 9] + ) + ), + { + "type": "tokens-usage-raw-data", + "schema": { + "dateFrom": "2024-01-23T07:00:14+00:00", + "dateTo": "2024-09-27T07:00:14+00:00", + "format": "json", + "projectIds": [1, 2, 3], + "promptIds": [4, 5, 6], + "userIds": [7, 8, 9] + } + }, + ), + ), + ) + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_generate_ai_report(self, m_request, incoming_data, request_params, base_absolut_url): + m_request.return_value = "response" + + user_id = 1 + + resource = self.get_resource(base_absolut_url) + assert resource.generate_ai_report(user_id, incoming_data) == "response" + m_request.assert_called_once_with( - method="patch", - path=resource.get_ai_provider_path(aiProviderId=aiProviderId), - request_data=request_data, + method="post", + path=f"users/{user_id}/ai/reports", + request_data=request_params, ) @mock.patch("crowdin_api.requester.APIRequester.request") - def test_list_ai_provider_models(self, m_request, base_absolut_url): + def test_check_ai_report_generation_status(self, m_request, base_absolut_url): m_request.return_value = "response" - aiProviderId = 1 + user_id = 1 + ai_report_id = "id" + resource = self.get_resource(base_absolut_url) - assert resource.list_ai_provider_models(aiProviderId=aiProviderId) == "response" + assert resource.check_ai_report_generation_status(user_id, ai_report_id) == "response" + m_request.assert_called_once_with( method="get", - path=resource.get_ai_provider_path(aiProviderId=aiProviderId) + "/models", + path=f"users/{user_id}/ai/reports/{ai_report_id}", ) @mock.patch("crowdin_api.requester.APIRequester.request") - def test_create_ai_proxy_chat_completion(self, m_request, base_absolut_url): + def test_download_ai_report(self, m_request, base_absolut_url): m_request.return_value = "response" - aiProviderId = 1 - request_data = {"model": "string", "stream": True} + user_id = 1 + ai_report_id = "id" + resource = self.get_resource(base_absolut_url) - assert ( - resource.create_ai_proxy_chat_completion( - aiProviderId=aiProviderId, request_data=request_data - ) - == "response" + assert resource.download_ai_report(user_id, ai_report_id) == "response" + + m_request.assert_called_once_with( + method="get", + path=f"users/{user_id}/ai/reports/{ai_report_id}/download", + ) + + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_get_ai_settings(self, m_request, base_absolut_url): + m_request.return_value = "response" + + user_id = 1 + + resource = self.get_resource(base_absolut_url) + assert resource.get_ai_settings(user_id) == "response" + + m_request.assert_called_once_with( + method="get", + path=f"users/{user_id}/ai/settings", + ) + + @pytest.mark.parametrize( + "incoming_data, request_params", + ( + ( + [ + { + "op": PatchOperation.REPLACE.value, + "path": EditAiSettingsPatchPath.ASSIST_ACTION_AI_PROMPT_ID.value, + "value": 1 + }, + { + "op": PatchOperation.REPLACE.value, + "path": EditAiSettingsPatchPath.EDITOR_SUGGESTION_AI_PROMPT_ID.value, + "value": 2 + } + ], + [ + { + "op": "replace", + "path": "/assistActionAiPromptId", + "value": 1 + }, + { + "op": "replace", + "path": "/editorSuggestionAiPromptId", + "value": 2 + } + ], + ), + ), + ) + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_edit_ai_settings(self, m_request, incoming_data, request_params, base_absolut_url): + m_request.return_value = "response" + + user_id = 1 + + resource = self.get_resource(base_absolut_url) + assert resource.edit_ai_settings(user_id, incoming_data) == "response" + + m_request.assert_called_once_with( + method="patch", + path=f"users/{user_id}/ai/settings", + request_data=request_params, + ) + + +class TestEnterpriseAIResources: + resource_class = EnterpriseAIResource + + def get_resource(self, base_absolut_url): + return self.resource_class(requester=APIRequester(base_url=base_absolut_url)) + + def test_resource_with_id(self, base_absolut_url): + project_id = 1 + resource = self.resource_class( + requester=APIRequester(base_url=base_absolut_url), project_id=project_id + ) + assert resource.get_project_id() == project_id + + @pytest.mark.parametrize( + "in_params, path", + ( + ({}, "ai/prompts"), + ({"aiPromptId": 1}, "ai/prompts/1"), + ), + ) + def test_get_ai_path(self, in_params, path, base_absolut_url): + resource = self.get_resource(base_absolut_url) + assert resource.get_ai_path(**in_params) == path + + @pytest.mark.parametrize( + "in_params, path", + ( + ({}, "ai/providers"), + ({"aiProviderId": 1}, "ai/providers/1"), + ), + ) + def test_get_ai_provider_path(self, in_params, path, base_absolut_url): + resource = self.get_resource(base_absolut_url) + assert resource.get_ai_provider_path(**in_params) == path + + @pytest.mark.parametrize( + "incoming_data, request_params", + ( + ( + {}, + { + "projectId": None, + "action": None, + "limit": 25, + "offset": 0, + }, + ), + ( + { + "projectId": 1, + "action": AIPromptAction.ASSIST, + "limit": 20, + "offset": 2, + }, + { + "projectId": 1, + "action": AIPromptAction.ASSIST, + "limit": 20, + "offset": 2, + }, + ), + ), + ) + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_list_ai_prompts( + self, m_request, incoming_data, request_params, base_absolut_url + ): + m_request.return_value = "response" + + resource = self.get_resource(base_absolut_url) + assert resource.list_ai_prompts(**incoming_data) == "response" + m_request.assert_called_once_with( + method="get", + path=resource.get_ai_path(), + params=request_params, ) + + @pytest.mark.parametrize( + "incoming_data, request_data", + ( + ( + { + "name": "basic", + "action": AIPromptAction.ASSIST, + "aiProviderId": 1, + "aiModelId": "gpt-3.5-turbo-instruct", + "config": {"mode": "advanced", "prompt": "test prompt"}, + }, + { + "name": "basic", + "action": AIPromptAction.ASSIST, + "aiProviderId": 1, + "aiModelId": "gpt-3.5-turbo-instruct", + "config": {"mode": "advanced", "prompt": "test prompt"}, + }, + ), + ( + { + "name": "basic", + "action": AIPromptAction.ASSIST, + "aiProviderId": 1, + "aiModelId": "gpt-3.5-turbo-instruct", + "isEnabled": False, + "enabledProjectIds": [1, 2, 3], + "config": { + "mode": "advanced", + "prompt": "test prompt", + "screenshot": True, + }, + }, + { + "name": "basic", + "action": AIPromptAction.ASSIST, + "aiProviderId": 1, + "aiModelId": "gpt-3.5-turbo-instruct", + "isEnabled": False, + "enabledProjectIds": [1, 2, 3], + "config": { + "mode": "advanced", + "prompt": "test prompt", + "screenshot": True, + }, + }, + ), + ), + ) + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_add_ai_prompt( + self, m_request, incoming_data, request_data, base_absolut_url + ): + m_request.return_value = "response" + + resource = self.get_resource(base_absolut_url) + assert resource.add_ai_prompt(request_data=incoming_data) == "response" m_request.assert_called_once_with( method="post", - path=resource.get_ai_provider_path(aiProviderId=aiProviderId) - + "/chat/completions", + path=resource.get_ai_path(), request_data=request_data, ) + + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_get_ai_prompt(self, m_request, base_absolut_url): + m_request.return_value = "response" + + aiPromptId = 1 + resource = self.get_resource(base_absolut_url) + assert resource.get_ai_prompt(aiPromptId=aiPromptId) == "response" + m_request.assert_called_once_with( + method="get", + path=resource.get_ai_path(aiPromptId=aiPromptId), + ) + + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_delete_ai_prompt(self, m_request, base_absolut_url): + m_request.return_value = "response" + + aiPromptId = 1 + resource = self.get_resource(base_absolut_url) + assert resource.delete_ai_prompt(aiPromptId=aiPromptId) == "response" + m_request.assert_called_once_with( + method="delete", + path=resource.get_ai_path(aiPromptId=aiPromptId), + ) + + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_edit_ai_prompt(self, m_request, base_absolut_url): + m_request.return_value = "response" + + aiPromptId = 1 + request_data = [ + { + "op": AIPromptOperation.REPLACE, + "path": EditAIPromptPath.NAME, + "value": "test", + } + ] + resource = self.get_resource(base_absolut_url) + assert ( + resource.edit_ai_prompt(aiPromptId=aiPromptId, request_data=request_data) + == "response" + ) + m_request.assert_called_once_with( + method="patch", + path=resource.get_ai_path(aiPromptId=aiPromptId), + request_data=request_data, + ) + + @pytest.mark.parametrize( + "incoming_data, request_params", + ( + ( + {}, + { + "limit": 25, + "offset": 0, + }, + ), + ( + { + "limit": 20, + "offset": 2, + }, + { + "limit": 20, + "offset": 2, + }, + ), + ), + ) + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_list_ai_providers( + self, m_request, incoming_data, request_params, base_absolut_url + ): + m_request.return_value = "response" + + resource = self.get_resource(base_absolut_url) + assert resource.list_ai_providers(**incoming_data) == "response" + m_request.assert_called_once_with( + method="get", + path=resource.get_ai_provider_path(), + params=request_params, + ) + + @pytest.mark.parametrize( + "incoming_data, request_data", + ( + ( + {"name": "basic", "type": AIProviderType.OPEN_AI}, + {"name": "basic", "type": AIProviderType.OPEN_AI}, + ), + ( + { + "name": "basic", + "type": AIProviderType.OPEN_AI, + "credentials": {"apiKey": "test-api-key"}, + "aiProviderId": 1, + "aiModelId": "gpt-3.5-turbo-instruct", + "enabledProjectIds": [1, 2, 3], + "config": { + "actionRules": [ + { + "action": AIPromptAction.PRE_TRANSLATE, + "availableAiModelIds": ["gpt-3.5-turbo-instruct"], + } + ] + }, + "isEnabled": True, + "useSystemCredentials": False, + }, + { + "name": "basic", + "type": AIProviderType.OPEN_AI, + "credentials": {"apiKey": "test-api-key"}, + "aiProviderId": 1, + "aiModelId": "gpt-3.5-turbo-instruct", + "enabledProjectIds": [1, 2, 3], + "config": { + "actionRules": [ + { + "action": AIPromptAction.PRE_TRANSLATE, + "availableAiModelIds": ["gpt-3.5-turbo-instruct"], + } + ] + }, + "isEnabled": True, + "useSystemCredentials": False, + }, + ), + ), + ) + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_add_ai_provider( + self, m_request, incoming_data, request_data, base_absolut_url + ): + m_request.return_value = "response" + + resource = self.get_resource(base_absolut_url) + assert resource.add_ai_provider(request_data=incoming_data) == "response" + m_request.assert_called_once_with( + method="post", + path=resource.get_ai_provider_path(), + request_data=request_data, + ) + + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_get_ai_provider(self, m_request, base_absolut_url): + m_request.return_value = "response" + + aiProviderId = 1 + resource = self.get_resource(base_absolut_url) + assert resource.get_ai_provider(aiProviderId=aiProviderId) == "response" + m_request.assert_called_once_with( + method="get", + path=resource.get_ai_provider_path(aiProviderId=aiProviderId), + ) + + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_delete_ai_provider(self, m_request, base_absolut_url): + m_request.return_value = "response" + + aiProviderId = 1 + resource = self.get_resource(base_absolut_url) + assert resource.delete_ai_provider(aiProviderId=aiProviderId) == "response" + m_request.assert_called_once_with( + method="delete", + path=resource.get_ai_provider_path(aiProviderId=aiProviderId), + ) + + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_edit_ai_provider(self, m_request, base_absolut_url): + m_request.return_value = "response" + + aiProviderId = 1 + request_data = [ + { + "op": AIPromptOperation.REPLACE, + "path": EditAIPromptPath.NAME, + "value": "test", + } + ] + resource = self.get_resource(base_absolut_url) + assert ( + resource.edit_ai_provider( + aiProviderId=aiProviderId, request_data=request_data + ) + == "response" + ) + m_request.assert_called_once_with( + method="patch", + path=resource.get_ai_provider_path(aiProviderId=aiProviderId), + request_data=request_data, + ) + + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_list_ai_provider_models(self, m_request, base_absolut_url): + m_request.return_value = "response" + + aiProviderId = 1 + resource = self.get_resource(base_absolut_url) + assert resource.list_ai_provider_models(aiProviderId=aiProviderId) == "response" + m_request.assert_called_once_with( + method="get", + path=resource.get_ai_provider_path(aiProviderId=aiProviderId) + "/models", + ) + + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_create_ai_proxy_chat_completion(self, m_request, base_absolut_url): + m_request.return_value = "response" + + aiProviderId = 1 + request_data = {"model": "string", "stream": True} + resource = self.get_resource(base_absolut_url) + assert ( + resource.create_ai_proxy_chat_completion( + aiProviderId=aiProviderId, request_data=request_data + ) + == "response" + ) + m_request.assert_called_once_with( + method="post", + path=resource.get_ai_provider_path(aiProviderId=aiProviderId) + + "/chat/completions", + request_data=request_data, + ) + + @pytest.mark.parametrize( + "incoming_data, request_data", + ( + ( + GenerateAIPromptFineTuningDatasetRequest( + projectIds=[1], + tmIds=[2, 3], + purpose=DatasetPurpose.TRAINING.value, + dateFrom=datetime(2019, 9, 23, 11, 26, 54, tzinfo=timezone.utc).isoformat(), + dateTo=datetime(2019, 9, 23, 11, 26, 54, tzinfo=timezone.utc).isoformat(), + maxFileSize=20, + minExamplesCount=2, + maxExamplesCount=10 + ), + { + "projectIds": [1], + "tmIds": [2, 3], + "purpose": "training", + "dateFrom": "2019-09-23T11:26:54+00:00", + "dateTo": "2019-09-23T11:26:54+00:00", + "maxFileSize": 20, + "minExamplesCount": 2, + "maxExamplesCount": 10 + } + ), + ), + ) + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_generate_ai_prompt_fine_tuning_dataset(self, m_request, incoming_data, request_data, base_absolut_url): + m_request.return_value = "response" + + ai_prompt_id = 1 + + resource = self.get_resource(base_absolut_url) + assert ( + resource.generate_ai_prompt_fine_tuning_dataset(ai_prompt_id, request_data=incoming_data) + == "response" + ) + m_request.assert_called_once_with( + method="post", + path=f"ai/prompts/{ai_prompt_id}/fine-tuning/datasets", + request_data=request_data, + ) + + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_get_ai_prompt_fine_tuning_dataset_generation_status(self, m_request, base_absolut_url): + m_request.return_value = "response" + + ai_prompt_id = 1 + job_identifier = "id" + + resource = self.get_resource(base_absolut_url) + assert ( + resource.get_ai_prompt_fine_tuning_dataset_generation_status(ai_prompt_id, job_identifier) + == "response" + ) + m_request.assert_called_once_with( + method="get", + path=f"ai/prompts/{ai_prompt_id}/fine-tuning/datasets/{job_identifier}", + ) + + @pytest.mark.parametrize( + "incoming_data, request_data", + ( + ( + CreateAIPromptFineTuningJobRequest( + dryRun=False, + hyperparameters=HyperParameters( + batchSize=1, + learningRateMultiplier=2.0, + nEpochs=100, + ), + trainingOptions=TrainingOptions( + projectIds=[1], + tmIds=[2], + dateFrom=datetime(2019, 9, 23, 11, 26, 54, tzinfo=timezone.utc).isoformat(), + dateTo=datetime(2019, 9, 23, 11, 26, 54, tzinfo=timezone.utc).isoformat(), + maxFileSize=10, + minExamplesCount=200, + maxExamplesCount=300 + ) + ), + { + "dryRun": False, + "hyperparameters": { + "batchSize": 1, + "learningRateMultiplier": 2.0, + "nEpochs": 100, + }, + "trainingOptions": { + "projectIds": [1], + "tmIds": [2], + "dateFrom": "2019-09-23T11:26:54+00:00", + "dateTo": "2019-09-23T11:26:54+00:00", + "maxFileSize": 10, + "minExamplesCount": 200, + "maxExamplesCount": 300 + } + } + ), + ), + ) + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_create_ai_prompt_fine_tuning_job(self, m_request, incoming_data, request_data, base_absolut_url): + m_request.return_value = "response" + + ai_prompt_id = 1 + + resource = self.get_resource(base_absolut_url) + assert ( + resource.create_ai_prompt_fine_tuning_job(ai_prompt_id, request_data=incoming_data) + == "response" + ) + m_request.assert_called_once_with( + method="post", + path=f"ai/prompts/{ai_prompt_id}/fine-tuning/jobs", + request_data=request_data, + ) + + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_get_ai_prompt_fine_tuning_job_status(self, m_request, base_absolut_url): + m_request.return_value = "response" + + ai_prompt_id = 1 + job_identifier = "id" + + resource = self.get_resource(base_absolut_url) + assert ( + resource.get_ai_prompt_fine_tuning_job_status(ai_prompt_id, job_identifier) + == "response" + ) + m_request.assert_called_once_with( + method="get", + path=f"ai/prompts/{ai_prompt_id}/fine-tuning/jobs/{job_identifier}", + ) + + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_download_ai_prompt_fine_tuning_dataset( + self, + m_request, + base_absolut_url + ): + m_request.return_value = "response" + + ai_prompt_id = 1 + job_identifier = "id" + + resource = self.get_resource(base_absolut_url) + assert ( + resource.download_ai_prompt_fine_tuning_dataset(ai_prompt_id, job_identifier) + == "response" + ) + m_request.assert_called_once_with( + method="get", + path=f"ai/prompts/{ai_prompt_id}/fine-tuning/datasets/{job_identifier}/download", + ) + + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_list_ai_prompt_fine_tuning_events(self, m_request, base_absolut_url): + m_request.return_value = "response" + + ai_prompt_id = 1 + job_identifier = "id" + + resource = self.get_resource(base_absolut_url) + assert ( + resource.list_ai_prompt_fine_tuning_events(ai_prompt_id, job_identifier) + == "response" + ) + m_request.assert_called_once_with( + method="get", + path=f"ai/prompts/{ai_prompt_id}/fine-tuning/jobs/{job_identifier}/events", + ) + + @pytest.mark.parametrize( + "incoming_data, request_params", + ( + ( + {}, + { + "statuses": None, + "orderBy": None, + "limit": None, + "offset": None, + }, + ), + ( + { + "statuses": [ + AiPromptFineTuningJobStatus.CREATED, + AiPromptFineTuningJobStatus.IN_PROGRESS, + AiPromptFineTuningJobStatus.FINISHED + ], + "order_by": Sorting([ + SortingRule(ListAiPromptFineTuningJobsOrderBy.UPDATED_AT, SortingOrder.DESC), + SortingRule(ListAiPromptFineTuningJobsOrderBy.STARTED_AT, SortingOrder.DESC) + ]), + "limit": 10, + "offset": 2 + }, + { + "statuses": "created,in_progress,finished", + "orderBy": Sorting([ + SortingRule(ListAiPromptFineTuningJobsOrderBy.UPDATED_AT, SortingOrder.DESC), + SortingRule(ListAiPromptFineTuningJobsOrderBy.STARTED_AT, SortingOrder.DESC) + ]), + "limit": 10, + "offset": 2 + }, + ), + ), + ) + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_list_ai_prompt_fine_tuning_jobs(self, m_request, incoming_data, request_params, base_absolut_url): + m_request.return_value = "response" + + resource = self.get_resource(base_absolut_url) + assert resource.list_ai_prompt_fine_tuning_jobs(**incoming_data) == "response" + + m_request.assert_called_once_with( + method="get", + path="ai/prompts/fine-tuning/jobs", + params=request_params, + ) + + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_list_ai_custom_placeholders(self, m_request, base_absolut_url): + m_request.return_value = "response" + + resource = self.get_resource(base_absolut_url) + assert resource.list_ai_custom_placeholders() == "response" + + m_request.assert_called_once_with( + method="get", + path="ai/settings/custom-placeholders", + ) + + @pytest.mark.parametrize( + "incoming_data, request_params", + ( + ( + { + "description": "Product description", + "placeholder": "%custom:productDescription%", + "value": "The product is the professional consulting service" + }, + { + "description": "Product description", + "placeholder": "%custom:productDescription%", + "value": "The product is the professional consulting service" + }, + ), + ), + ) + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_add_ai_custom_placeholder(self, m_request, incoming_data, request_params, base_absolut_url): + m_request.return_value = "response" + + resource = self.get_resource(base_absolut_url) + assert resource.add_ai_custom_placeholder(incoming_data) == "response" + + m_request.assert_called_once_with( + method="post", + path="ai/settings/custom-placeholders", + request_data=request_params, + ) + + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_get_ai_custom_placeholder(self, m_request, base_absolut_url): + m_request.return_value = "response" + + ai_custom_placeholder_id = 1 + + resource = self.get_resource(base_absolut_url) + assert resource.get_ai_custom_placeholder(ai_custom_placeholder_id) == "response" + + m_request.assert_called_once_with( + method="get", + path=f"ai/settings/custom-placeholders/{ai_custom_placeholder_id}", + ) + + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_delete_ai_custom_placeholder(self, m_request, base_absolut_url): + m_request.return_value = "response" + + ai_custom_placeholder_id = 1 + + resource = self.get_resource(base_absolut_url) + assert resource.delete_ai_custom_placeholder(ai_custom_placeholder_id) == "response" + + m_request.assert_called_once_with( + method="delete", + path=f"ai/settings/custom-placeholders/{ai_custom_placeholder_id}", + ) + + @pytest.mark.parametrize( + "incoming_data, request_params", + ( + ( + [ + { + "op": PatchOperation.REPLACE.value, + "path": EditAiCustomPlaceholderPatchPath.DESCRIPTION.value, + "value": "New description" + }, + { + "op": PatchOperation.REPLACE.value, + "path": EditAiCustomPlaceholderPatchPath.VALUE.value, + "value": "The product is the professional consulting service" + } + ], + [ + { + "op": "replace", + "path": "/description", + "value": "New description" + }, + { + "op": "replace", + "path": "/value", + "value": "The product is the professional consulting service" + } + ], + ), + ), + ) + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_edit_ai_custom_placeholder(self, m_request, incoming_data, request_params, base_absolut_url): + m_request.return_value = "response" + + ai_custom_placeholder_id = 1 + + resource = self.get_resource(base_absolut_url) + assert resource.edit_ai_custom_placeholder(ai_custom_placeholder_id, incoming_data) == "response" + + m_request.assert_called_once_with( + method="patch", + path=f"ai/settings/custom-placeholders/{ai_custom_placeholder_id}", + request_data=request_params, + ) + + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_clone_ai_prompt(self, m_request, base_absolut_url): + m_request.return_value = "response" + + ai_prompt_id = 1 + name = "name" + + resource = self.get_resource(base_absolut_url) + assert resource.clone_ai_prompt(ai_prompt_id, name) == "response" + + m_request.assert_called_once_with( + method="post", + path=f"ai/prompts/{ai_prompt_id}/clones", + request_data={ + "name": name + }, + ) + + @pytest.mark.parametrize( + "incoming_data, request_params", + ( + ( + GenerateAiPromptCompletionRequest( + resources=PreTranslateActionAiPromptContextResources( + projectId=1, + sourceLanguageId="en", + targetLanguageId="uk", + stringIds=[1, 2, 3], + overridePromptValues={ + "property1": "string" + } + ), + tools=[ + AiToolObject( + tool=AiTool( + type=AiToolType.FUNCTION.value, + function=AiToolFunction( + name="Name", + description="Description", + parameters={} + ) + ) + ) + ], + tool_choice="string" + ), + { + "resources": { + "projectId": 1, + "sourceLanguageId": "en", + "targetLanguageId": "uk", + "stringIds": [1, 2, 3], + "overridePromptValues": { + "property1": "string" + } + }, + "tools": [ + { + "tool": { + "type": "function", + "function": { + "name": "Name", + "description": "Description", + "parameters": {} + } + } + } + ], + "tool_choice": "string" + }, + ), + ), + ) + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_generate_ai_prompt_completion(self, m_request, incoming_data, request_params, base_absolut_url): + m_request.return_value = "response" + + ai_prompt_id = 1 + + resource = self.get_resource(base_absolut_url) + assert resource.generate_ai_prompt_completion(ai_prompt_id, incoming_data) == "response" + + m_request.assert_called_once_with( + method="post", + path=f"ai/prompts/{ai_prompt_id}/completions", + request_data=request_params, + ) + + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_get_ai_prompt_completion_status(self, m_request, base_absolut_url): + m_request.return_value = "response" + + ai_prompt_id = 1 + completion_id = "id" + + resource = self.get_resource(base_absolut_url) + assert resource.get_ai_prompt_completion_status(ai_prompt_id, completion_id) == "response" + + m_request.assert_called_once_with( + method="get", + path=f"ai/prompts/{ai_prompt_id}/completions/{completion_id}", + ) + + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_cancel_ai_prompt_completion(self, m_request, base_absolut_url): + m_request.return_value = "response" + + ai_prompt_id = 1 + completion_id = "id" + + resource = self.get_resource(base_absolut_url) + assert resource.cancel_ai_prompt_completion(ai_prompt_id, completion_id) == "response" + + m_request.assert_called_once_with( + method="delete", + path=f"ai/prompts/{ai_prompt_id}/completions/{completion_id}", + ) + + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_download_ai_prompt_completion(self, m_request, base_absolut_url): + m_request.return_value = "response" + + ai_prompt_id = 1 + completion_id = "id" + + resource = self.get_resource(base_absolut_url) + assert resource.download_ai_prompt_completion(ai_prompt_id, completion_id) == "response" + + m_request.assert_called_once_with( + method="get", + path=f"ai/prompts/{ai_prompt_id}/completions/{completion_id}/download", + ) + + @pytest.mark.parametrize( + "incoming_data, request_params", + ( + ( + GenerateAiReportRequest( + type="tokens-usage-raw-data", + schema=GeneralReportSchema( + dateFrom=datetime(2024, 1, 23, 7, 0, 14, tzinfo=timezone.utc).isoformat(), + dateTo=datetime(2024, 9, 27, 7, 0, 14, tzinfo=timezone.utc).isoformat(), + format=AiReportFormat.JSON.value, + projectIds=[1, 2, 3], + promptIds=[4, 5, 6], + userIds=[7, 8, 9] + ) + ), + { + "type": "tokens-usage-raw-data", + "schema": { + "dateFrom": "2024-01-23T07:00:14+00:00", + "dateTo": "2024-09-27T07:00:14+00:00", + "format": "json", + "projectIds": [1, 2, 3], + "promptIds": [4, 5, 6], + "userIds": [7, 8, 9] + } + }, + ), + ), + ) + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_generate_ai_report(self, m_request, incoming_data, request_params, base_absolut_url): + m_request.return_value = "response" + + resource = self.get_resource(base_absolut_url) + assert resource.generate_ai_report(incoming_data) == "response" + + m_request.assert_called_once_with( + method="post", + path="ai/reports", + request_data=request_params, + ) + + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_check_ai_report_generation_status(self, m_request, base_absolut_url): + m_request.return_value = "response" + + ai_report_id = "id" + + resource = self.get_resource(base_absolut_url) + assert resource.check_ai_report_generation_status(ai_report_id) == "response" + + m_request.assert_called_once_with( + method="get", + path=f"ai/reports/{ai_report_id}", + ) + + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_download_ai_report(self, m_request, base_absolut_url): + m_request.return_value = "response" + + ai_report_id = "id" + + resource = self.get_resource(base_absolut_url) + assert resource.download_ai_report(ai_report_id) == "response" + + m_request.assert_called_once_with( + method="get", + path=f"ai/reports/{ai_report_id}/download", + ) + + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_get_ai_settings(self, m_request, base_absolut_url): + m_request.return_value = "response" + + resource = self.get_resource(base_absolut_url) + assert resource.get_ai_settings() == "response" + + m_request.assert_called_once_with( + method="get", + path="ai/settings", + ) + + @pytest.mark.parametrize( + "incoming_data, request_params", + ( + ( + [ + { + "op": PatchOperation.REPLACE.value, + "path": EditAiSettingsPatchPath.ASSIST_ACTION_AI_PROMPT_ID.value, + "value": 1 + }, + { + "op": PatchOperation.REPLACE.value, + "path": EditAiSettingsPatchPath.EDITOR_SUGGESTION_AI_PROMPT_ID.value, + "value": 2 + } + ], + [ + { + "op": "replace", + "path": "/assistActionAiPromptId", + "value": 1 + }, + { + "op": "replace", + "path": "/editorSuggestionAiPromptId", + "value": 2 + } + ], + ), + ), + ) + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_edit_ai_settings(self, m_request, incoming_data, request_params, base_absolut_url): + m_request.return_value = "response" + + resource = self.get_resource(base_absolut_url) + assert resource.edit_ai_settings(incoming_data) == "response" + + m_request.assert_called_once_with( + method="patch", + path="ai/settings", + request_data=request_params, + ) diff --git a/crowdin_api/api_resources/ai/types.py b/crowdin_api/api_resources/ai/types.py index ecdbe1c..ce13ed7 100644 --- a/crowdin_api/api_resources/ai/types.py +++ b/crowdin_api/api_resources/ai/types.py @@ -6,7 +6,11 @@ AIProviderType, EditAIPromptPath, EditAIProviderPath, + EditAiCustomPlaceholderPatchPath, + AiToolType, + EditAiSettingsPatchPath, ) +from crowdin_api.api_resources.enums import PatchOperation from crowdin_api.typing import TypedDict @@ -192,3 +196,94 @@ class CreateAIPromptFineTuningJobRequest(TypedDict): hyperparameters: Optional[HyperParameters] trainingOptions: TrainingOptions validationOptions: Optional[ValidationOptions] + + +class AddAiCustomPlaceholderRequest(TypedDict): + description: str + placeholder: str + value: str + + +class EditAiCustomPlaceholderPatch(TypedDict): + op: PatchOperation + path: EditAiCustomPlaceholderPatchPath + value: Any + + +class AiToolFunction(TypedDict): + description: Optional[str] + name: str + parameters: Any + + +class AiTool(TypedDict): + type: AiToolType + function: AiToolFunction + + +class AiToolObject(TypedDict): + tool: AiTool + + +class AiPromptContextResources(TypedDict): + pass + + +class PreTranslateActionAiPromptContextResources(AiPromptContextResources): + projectId: int + sourceLanguageId: Optional[str] + targetLanguageId: Optional[str] + stringIds: Optional[Iterable[int]] + overridePromptValues: Optional[Dict[str, str]] + + +class AssistActionAiPromptContextResources(AiPromptContextResources): + projectId: int + sourceLanguageId: Optional[str] + targetLanguageId: Optional[str] + stringIds: Optional[Iterable[int]] + filteredStringIds: Optional[Iterable[int]] + overridePromptValues: Optional[Dict[str, str]] + + +class QaCheckActionAiPromptContextResources(AiPromptContextResources): + projectId: int + sourceLanguageId: Optional[str] + targetLanguageId: Optional[str] + stringIds: Optional[Iterable[int]] + overridePromptValues: Optional[Dict[str, str]] + + +class CustomActionAiPromptContextResources(AiPromptContextResources): + projectId: int + sourceLanguageId: Optional[str] + targetLanguageId: Optional[str] + stringIds: Optional[Iterable[int]] + overridePromptValues: Optional[Dict[str, str]] + customInstruction: Optional[str] + + +class GenerateAiPromptCompletionRequest(TypedDict): + resources: AiPromptContextResources + tools: Optional[Iterable[AiToolObject]] + tool_choice: Any + + +class GeneralReportSchema(TypedDict): + dateFrom: str + dateTo: str + format: Optional[str] + projectIds: Optional[Iterable[int]] + promptIds: Optional[Iterable[int]] + userIds: Optional[Iterable[int]] + + +class GenerateAiReportRequest(TypedDict): + type: str + schema: GeneralReportSchema + + +class EditAiSettingsPatch(TypedDict): + op: PatchOperation + path: EditAiSettingsPatchPath + value: Any diff --git a/crowdin_api/utils.py b/crowdin_api/utils.py index cedc9c5..7f78bf3 100644 --- a/crowdin_api/utils.py +++ b/crowdin_api/utils.py @@ -17,3 +17,9 @@ def convert_to_query_string( def convert_enum_to_string_if_exists(value: Optional[Enum]) -> Optional[str]: return value.value if value is not None else None + + +def convert_enum_collection_to_string_if_exists(value: Optional[Iterable[Enum]]) -> Optional[str]: + if value is None: + return None + return ','.join([item.value for item in value if isinstance(item, Enum)])