diff --git a/toolkits/community/todoist/Makefile b/toolkits/community/todoist/Makefile
new file mode 100644
index 000000000..fe0ad4bd5
--- /dev/null
+++ b/toolkits/community/todoist/Makefile
@@ -0,0 +1,55 @@
+.PHONY: help
+
+help:
+ @echo "🛠️ github Commands:\n"
+ @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'
+
+.PHONY: install
+install: ## Install the uv environment and install all packages with dependencies
+ @echo "🚀 Creating virtual environment and installing all packages using uv"
+ @uv sync --active --all-extras --no-sources
+ @if [ -f .pre-commit-config.yaml ]; then uv run --no-sources pre-commit install; fi
+ @echo "✅ All packages and dependencies installed via uv"
+
+.PHONY: install-local
+install-local: ## Install the uv environment and install all packages with dependencies with local Arcade sources
+ @echo "🚀 Creating virtual environment and installing all packages using uv"
+ @uv sync --active --all-extras
+ @if [ -f .pre-commit-config.yaml ]; then uv run pre-commit install; fi
+ @echo "✅ All packages and dependencies installed via uv"
+
+.PHONY: build
+build: clean-build ## Build wheel file using poetry
+ @echo "🚀 Creating wheel file"
+ uv build
+
+.PHONY: clean-build
+clean-build: ## clean build artifacts
+ @echo "🗑️ Cleaning dist directory"
+ rm -rf dist
+
+.PHONY: test
+test: ## Test the code with pytest
+ @echo "🚀 Testing code: Running pytest"
+ @uv run --no-sources pytest -W ignore -vv --cov --cov-config=pyproject.toml --cov-report=xml
+
+.PHONY: coverage
+coverage: ## Generate coverage report
+ @echo "coverage report"
+ @uv run --no-sources coverage report
+ @echo "Generating coverage report"
+ @uv run --no-sources coverage html
+
+.PHONY: bump-version
+bump-version: ## Bump the version in the pyproject.toml file by a patch version
+ @echo "🚀 Bumping version in pyproject.toml"
+ uv version --no-sources --bump patch
+
+.PHONY: check
+check: ## Run code quality tools.
+ @if [ -f .pre-commit-config.yaml ]; then\
+ echo "🚀 Linting code: Running pre-commit";\
+ uv run --no-sources pre-commit run -a;\
+ fi
+ @echo "🚀 Static type checking: Running mypy"
+ @uv run --no-sources mypy --config-file=pyproject.toml
diff --git a/toolkits/community/todoist/README.md b/toolkits/community/todoist/README.md
new file mode 100644
index 000000000..8b3761746
--- /dev/null
+++ b/toolkits/community/todoist/README.md
@@ -0,0 +1,26 @@
+
+

+
+
+
+
+
+
+
+
+# Arcade todoist Toolkit
+Allow agent to connect and interact with Todoist
+## Features
+
+- The todoist toolkit does not have any features yet.
+
+## Development
+
+Read the docs on how to create a toolkit [here](https://docs.arcade.dev/home/build-tools/create-a-toolkit)
diff --git a/toolkits/community/todoist/arcade_todoist/__init__.py b/toolkits/community/todoist/arcade_todoist/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/toolkits/community/todoist/arcade_todoist/errors.py b/toolkits/community/todoist/arcade_todoist/errors.py
new file mode 100644
index 000000000..21265ae84
--- /dev/null
+++ b/toolkits/community/todoist/arcade_todoist/errors.py
@@ -0,0 +1,59 @@
+from arcade_tdk.errors import ToolExecutionError
+
+
+class ProjectNotFoundError(ToolExecutionError):
+ """Raised when a project is not found."""
+
+ def __init__(self, project_name: str, partial_matches: list[str] | None = None):
+ if partial_matches:
+ matches_str = "', '".join(partial_matches)
+ super().__init__(
+ "Project not found",
+ developer_message=(
+ f"Project '{project_name}' not found, but found partial matches: "
+ f"{matches_str}. "
+ f"Please specify the exact project name."
+ ),
+ )
+ else:
+ super().__init__(
+ "Project not found",
+ developer_message=(
+ f"Project '{project_name}' not found. "
+ f"Ask the user to create the project first."
+ ),
+ )
+
+
+class TaskNotFoundError(ToolExecutionError):
+ """Raised when a task is not found."""
+
+ def __init__(self, task_description: str, partial_matches: list[str] | None = None):
+ if partial_matches:
+ matches_str = "', '".join(partial_matches)
+ super().__init__(
+ "Task not found",
+ developer_message=(
+ f"Task '{task_description}' not found, but found partial matches: "
+ f"{matches_str}. "
+ f"Please specify the exact task description."
+ ),
+ )
+ else:
+ super().__init__(
+ "Task not found", developer_message=f"Task '{task_description}' not found."
+ )
+
+
+class MultipleTasksFoundError(ToolExecutionError):
+ """Raised when multiple tasks match the search criteria."""
+
+ def __init__(self, task_description: str, task_matches: list[dict]):
+ matches_str = "', '".join([task["content"] for task in task_matches])
+ super().__init__(
+ "Multiple tasks found",
+ developer_message=(
+ f"Multiple tasks found for '{task_description}': '{matches_str}'. "
+ f"Please specify the exact task description to choose one."
+ ),
+ )
diff --git a/toolkits/community/todoist/arcade_todoist/tools/__init__.py b/toolkits/community/todoist/arcade_todoist/tools/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/toolkits/community/todoist/arcade_todoist/tools/projects.py b/toolkits/community/todoist/arcade_todoist/tools/projects.py
new file mode 100644
index 000000000..2c023dade
--- /dev/null
+++ b/toolkits/community/todoist/arcade_todoist/tools/projects.py
@@ -0,0 +1,35 @@
+from typing import Annotated
+
+import httpx
+from arcade_tdk import ToolContext, tool
+from arcade_tdk.auth import OAuth2
+
+from arcade_todoist.utils import get_headers, get_url, parse_projects
+
+
+@tool(
+ requires_auth=OAuth2(
+ id="todoist",
+ scopes=["data:read"],
+ ),
+)
+async def get_projects(
+ context: ToolContext,
+) -> Annotated[dict, "The projects object returned by the Todoist API."]:
+ """
+ Get all projects from the Todoist API. Use this when the user wants to see, list, or browse
+ their projects. Do NOT use this for creating tasks - use create_task instead even if a
+ project name is mentioned.
+ """
+
+ async with httpx.AsyncClient() as client:
+ url = get_url(context=context, endpoint="projects")
+ headers = get_headers(context=context)
+
+ response = await client.get(url, headers=headers)
+
+ response.raise_for_status()
+
+ projects = parse_projects(response.json()["results"])
+
+ return {"projects": projects}
diff --git a/toolkits/community/todoist/arcade_todoist/tools/tasks.py b/toolkits/community/todoist/arcade_todoist/tools/tasks.py
new file mode 100644
index 000000000..3f73578f2
--- /dev/null
+++ b/toolkits/community/todoist/arcade_todoist/tools/tasks.py
@@ -0,0 +1,324 @@
+from typing import Annotated
+
+import httpx
+from arcade_tdk import ToolContext, tool
+from arcade_tdk.auth import OAuth2
+
+from arcade_todoist.utils import (
+ get_headers,
+ get_tasks_with_pagination,
+ get_url,
+ parse_task,
+ parse_tasks,
+ resolve_project_id,
+)
+
+
+@tool(
+ requires_auth=OAuth2(
+ id="todoist",
+ scopes=["data:read"],
+ ),
+)
+async def get_all_tasks(
+ context: ToolContext,
+ limit: Annotated[
+ int,
+ "Number of tasks to return (min: 1, default: 50, max: 200). "
+ "Default is 50 which should be sufficient for most use cases.",
+ ] = 50,
+ next_page_token: Annotated[
+ str | None,
+ "Token for pagination. Use None for the first page, or the token returned "
+ "from a previous call to get the next page of results.",
+ ] = None,
+) -> Annotated[dict, "The tasks object with pagination info returned by the Todoist API."]:
+ """
+ Get all tasks from the Todoist API with pagination support. Use this when the user wants
+ to see, list, view, or browse ALL their existing tasks. For getting tasks from a specific
+ project, use get_tasks_by_project instead.
+
+ The response includes both tasks and a next_page_token. If next_page_token is not None,
+ there are more tasks available and you can call this function again with that token.
+ """
+ return await get_tasks_with_pagination(
+ context=context, limit=limit, next_page_token=next_page_token
+ )
+
+
+async def _get_tasks_by_project_id(
+ context: ToolContext,
+ project_id: Annotated[str, "The ID of the project to get tasks from."],
+ limit: Annotated[
+ int,
+ "Number of tasks to return (min: 1, default: 50, max: 200). "
+ "Default is 50 which should be sufficient for most use cases.",
+ ] = 50,
+ next_page_token: Annotated[
+ str | None,
+ "Token for pagination. Use None for the first page, or the token returned "
+ "from a previous call to get the next page of results.",
+ ] = None,
+) -> Annotated[dict, "The tasks object with pagination info returned by the Todoist API."]:
+ """
+ Internal utility function to get tasks from a specific project by project ID with
+ pagination support.
+
+ Args:
+ context: ToolContext for API access
+ project_id: The ID of the project to get tasks from
+ limit: Number of tasks to return (min: 1, default: 50, max: 200)
+ next_page_token: Token for pagination, use None for first page
+
+ Returns:
+ Dict containing tasks and next_page_token for pagination
+ """
+ return await get_tasks_with_pagination(
+ context=context, limit=limit, next_page_token=next_page_token, project_id=project_id
+ )
+
+
+@tool(
+ requires_auth=OAuth2(
+ id="todoist",
+ scopes=["data:read"],
+ ),
+)
+async def get_tasks_by_project(
+ context: ToolContext,
+ project: Annotated[str, "The ID or name of the project to get tasks from."],
+ limit: Annotated[
+ int,
+ "Number of tasks to return (min: 1, default: 50, max: 200). "
+ "Default is 50 which should be sufficient for most use cases.",
+ ] = 50,
+ next_page_token: Annotated[
+ str | None,
+ "Token for pagination. Use None for the first page, or the token returned "
+ "from a previous call to get the next page of results.",
+ ] = None,
+) -> Annotated[dict, "The tasks object with pagination info returned by the Todoist API."]:
+ """
+ Get tasks from a specific project by project ID or name with pagination support.
+ Use this when the user wants to see tasks from a specific project.
+
+ The function will first try to find a project with the given ID, and if that doesn't exist,
+ it will search for a project with the given name.
+
+ The response includes both tasks and a next_page_token. If next_page_token is not None,
+ there are more tasks available and you can call this function again with that token.
+ """
+ project_id = await resolve_project_id(context=context, project=project)
+
+ return await _get_tasks_by_project_id(
+ context=context, project_id=project_id, limit=limit, next_page_token=next_page_token
+ )
+
+
+async def _create_task_in_project(
+ context: ToolContext,
+ description: Annotated[str, "The title of the task to be created."],
+ project_id: Annotated[
+ str | None, "The ID of the project to add the task to. Use None to add to inbox."
+ ],
+) -> Annotated[dict, "The task object returned by the Todoist API."]:
+ """
+ Internal utility function to create a new task in a specific project by project ID.
+
+ Args:
+ context: ToolContext for API access
+ description: The title of the task to be created
+ project_id: The ID of the project to add the task to, use None to add to inbox
+
+ Returns:
+ Dict containing the created task object
+ """
+
+ async with httpx.AsyncClient() as client:
+ url = get_url(context=context, endpoint="tasks")
+ headers = get_headers(context=context)
+
+ response = await client.post(
+ url,
+ headers=headers,
+ json={
+ "content": description,
+ "project_id": project_id,
+ },
+ )
+
+ response.raise_for_status()
+
+ task = parse_task(response.json())
+
+ return task
+
+
+@tool(
+ requires_auth=OAuth2(
+ id="todoist",
+ scopes=["data:read_write"],
+ ),
+)
+async def create_task(
+ context: ToolContext,
+ description: Annotated[str, "The title of the task to be created."],
+ project: Annotated[
+ str | None,
+ "The ID or name of the project to add the task to. Use the project ID or name if "
+ "user mentions a specific project. Leave as None to add to inbox.",
+ ] = None,
+) -> Annotated[dict, "The task object returned by the Todoist API."]:
+ """
+ Create a new task for the user. Use this whenever the user wants to create, add, or make a task.
+ If the user mentions a specific project, pass the project ID or name as project.
+ If no project is mentioned, leave project as None to add to inbox.
+
+ The function will first try to find a project with the given ID, and if that doesn't exist,
+ it will search for a project with the given name.
+ """
+
+ project_id = None
+ if project is not None:
+ project_id = await resolve_project_id(context=context, project=project)
+
+ return await _create_task_in_project(
+ context=context, description=description, project_id=project_id
+ )
+
+
+async def _close_task_by_task_id(
+ context: ToolContext,
+ task_id: Annotated[str, "The id of the task to be closed."],
+) -> Annotated[dict, "The task object returned by the Todoist API."]:
+ """
+ Internal utility function to close a task by its ID.
+
+ Args:
+ context: ToolContext for API access
+ task_id: The ID of the task to be closed
+
+ Returns:
+ Dict with success message
+ """
+
+ async with httpx.AsyncClient() as client:
+ url = get_url(context=context, endpoint=f"tasks/{task_id}/close")
+ headers = get_headers(context=context)
+
+ response = await client.post(url, headers=headers)
+
+ response.raise_for_status()
+
+ return {"message": "Task closed successfully"}
+
+
+@tool(
+ requires_auth=OAuth2(
+ id="todoist",
+ scopes=["data:read_write"],
+ ),
+)
+async def close_task(
+ context: ToolContext,
+ task_id: Annotated[str, "The exact ID of the task to be closed."],
+) -> Annotated[dict, "The task object returned by the Todoist API."]:
+ """
+ Close a task by its exact ID. Use this whenever the user wants to
+ mark a task as completed, done, or closed.
+
+ """
+
+ return await _close_task_by_task_id(context=context, task_id=task_id)
+
+
+async def _delete_task_by_task_id(
+ context: ToolContext,
+ task_id: Annotated[str, "The id of the task to be deleted."],
+) -> Annotated[dict, "The task object returned by the Todoist API."]:
+ """
+ Internal utility function to delete a task by its ID.
+
+ Args:
+ context: ToolContext for API access
+ task_id: The ID of the task to be deleted
+
+ Returns:
+ Dict with success message
+ """
+
+ async with httpx.AsyncClient() as client:
+ url = get_url(context=context, endpoint=f"tasks/{task_id}")
+ headers = get_headers(context=context)
+
+ response = await client.delete(url, headers=headers)
+
+ response.raise_for_status()
+
+ return {"message": "Task deleted successfully"}
+
+
+@tool(
+ requires_auth=OAuth2(
+ id="todoist",
+ scopes=["data:read_write"],
+ ),
+)
+async def delete_task(
+ context: ToolContext,
+ task_id: Annotated[str, "The exact ID of the task to be deleted."],
+) -> Annotated[dict, "The task object returned by the Todoist API."]:
+ """
+ Delete a task by its exact ID. Use this whenever the user wants to
+ delete a task.
+ """
+
+ return await _delete_task_by_task_id(context=context, task_id=task_id)
+
+
+@tool(
+ requires_auth=OAuth2(
+ id="todoist",
+ scopes=["data:read"],
+ ),
+)
+async def get_tasks_by_filter(
+ context: ToolContext,
+ filter_query: Annotated[
+ str,
+ "The filter query to search tasks.",
+ ],
+ limit: Annotated[
+ int,
+ "Number of tasks to return (min: 1, default: 50, max: 200). "
+ "Default is 50 which should be sufficient for most use cases.",
+ ] = 50,
+ next_page_token: Annotated[
+ str | None,
+ "Token for pagination. Use None for the first page, or the token returned "
+ "from a previous call to get the next page of results.",
+ ] = None,
+) -> Annotated[dict, "The tasks object with pagination info returned by the Todoist API."]:
+ """
+ Get tasks by filter query with pagination support.
+ Use this when the user wants to search for specific tasks.
+
+ The response includes both tasks and a next_page_token. If next_page_token is not None,
+ there are more tasks available and you can call this function again with that token.
+ """
+ async with httpx.AsyncClient() as client:
+ url = get_url(context=context, endpoint="tasks/filter")
+ headers = get_headers(context=context)
+
+ params = {"query": filter_query, "limit": limit}
+ if next_page_token:
+ params["cursor"] = next_page_token
+
+ response = await client.get(url, headers=headers, params=params)
+ response.raise_for_status()
+
+ data = response.json()
+ tasks = parse_tasks(data["results"])
+ next_cursor = data.get("next_cursor")
+
+ return {"tasks": tasks, "next_page_token": next_cursor}
diff --git a/toolkits/community/todoist/arcade_todoist/utils.py b/toolkits/community/todoist/arcade_todoist/utils.py
new file mode 100644
index 000000000..26cbe13ab
--- /dev/null
+++ b/toolkits/community/todoist/arcade_todoist/utils.py
@@ -0,0 +1,265 @@
+from typing import Any
+
+import httpx
+from arcade_tdk import ToolContext
+from arcade_tdk.errors import ToolExecutionError
+
+from arcade_todoist.errors import MultipleTasksFoundError, ProjectNotFoundError, TaskNotFoundError
+
+
+class TodoistAuthError(ToolExecutionError):
+ """Raised when Todoist authentication token is missing."""
+
+ def __init__(self):
+ super().__init__("No token found")
+
+
+def get_headers(context: ToolContext) -> dict[str, str]:
+ """
+ Build headers for the Todoist API requests.
+ """
+
+ token = context.get_auth_token_or_empty()
+
+ if not token:
+ raise TodoistAuthError()
+
+ return {
+ "Authorization": f"Bearer {token}",
+ "Content-Type": "application/json",
+ }
+
+
+def get_url(
+ context: ToolContext,
+ endpoint: str,
+ api_version: str = "v1",
+) -> str:
+ """
+ Build the URL for the Todoist API request.
+ """
+
+ base_url = "https://api.todoist.com"
+
+ return f"{base_url}/api/{api_version}/{endpoint}"
+
+
+def parse_project(project: dict[str, Any]) -> dict[str, Any]:
+ """
+ Parse the project object returned by the Todoist API.
+ """
+
+ return {
+ "id": project["id"],
+ "name": project["name"],
+ "created_at": project["created_at"],
+ }
+
+
+def parse_projects(projects: list[dict[str, Any]]) -> list[dict[str, Any]]:
+ """
+ Parse the projects object returned by the Todoist API.
+ """
+
+ return [parse_project(project) for project in projects]
+
+
+def parse_task(task: dict[str, Any]) -> dict[str, Any]:
+ """
+ Parse the task object returned by the Todoist API.
+ """
+
+ return {
+ "id": task["id"],
+ "content": task["content"],
+ "added_at": task["added_at"],
+ "checked": task["checked"],
+ "project_id": task["project_id"],
+ }
+
+
+def parse_tasks(tasks: list[dict[str, Any]]) -> list[dict[str, Any]]:
+ """
+ Parse the tasks object returned by the Todoist API.
+ """
+
+ return [parse_task(task) for task in tasks]
+
+
+async def get_tasks_with_pagination(
+ context: ToolContext,
+ limit: int = 50,
+ next_page_token: str | None = None,
+ project_id: str | None = None,
+) -> dict:
+ """
+ Utility function to get tasks with pagination support.
+
+ Args:
+ context: ToolContext for API access
+ limit: Number of tasks to return (min: 1, default: 50, max: 200)
+ next_page_token: Token for pagination, use None for first page
+ project_id: Optional project ID to filter tasks by project
+
+ Returns:
+ Dict containing tasks and next_page_token for pagination
+ """
+
+ async with httpx.AsyncClient() as client:
+ url = get_url(context=context, endpoint="tasks")
+ headers = get_headers(context=context)
+
+ params = {"limit": limit}
+ if next_page_token:
+ params["cursor"] = next_page_token
+ if project_id:
+ params["project_id"] = project_id
+
+ response = await client.get(url, headers=headers, params=params)
+ response.raise_for_status()
+
+ data = response.json()
+ tasks = parse_tasks(data["results"])
+ next_cursor = data.get("next_cursor")
+
+ return {"tasks": tasks, "next_page_token": next_cursor}
+
+
+async def resolve_project_id(context: ToolContext, project: str) -> str:
+ """
+ Utility function to resolve a project identifier to project ID.
+
+ Args:
+ context: ToolContext for API access
+ project: Project ID or name to resolve
+
+ Returns:
+ The project ID
+
+ Raises:
+ ProjectNotFoundError: If the project is not found
+ """
+ from arcade_todoist.tools.projects import get_projects
+
+ projects = await get_projects(context=context)
+
+ for proj in projects["projects"]:
+ if proj["id"] == project:
+ return project
+
+ for proj in projects["projects"]:
+ if proj["name"].lower() == project.lower():
+ return proj["id"]
+
+ partial_matches = []
+ for proj in projects["projects"]:
+ if project.lower() in proj["name"].lower():
+ partial_matches.append(proj["name"])
+
+ if partial_matches:
+ raise ProjectNotFoundError(project, partial_matches)
+ else:
+ raise ProjectNotFoundError(project)
+
+
+def _check_exact_id_match(tasks: list[dict], task: str) -> str | None:
+ """Check if task matches any task ID exactly."""
+ for task_obj in tasks:
+ if task_obj["id"] == task:
+ return task
+ return None
+
+
+def _find_exact_content_matches(tasks: list[dict], task: str) -> list[dict]:
+ """Find tasks with exact content match (case-insensitive)."""
+ return [task_obj for task_obj in tasks if task_obj["content"].lower() == task.lower()]
+
+
+def _find_partial_content_matches(tasks: list[dict], task: str) -> list[dict]:
+ """Find tasks with partial content match (case-insensitive)."""
+ return [task_obj for task_obj in tasks if task.lower() in task_obj["content"].lower()]
+
+
+def _handle_task_matches(task: str, exact_matches: list[dict], partial_matches: list[dict]) -> str:
+ """Handle task matching logic and raise appropriate errors."""
+ if len(exact_matches) == 1:
+ return exact_matches[0]["id"]
+ elif len(exact_matches) > 1:
+ _raise_multiple_tasks_error(task, exact_matches)
+
+ if len(partial_matches) == 1:
+ return partial_matches[0]["id"]
+ elif len(partial_matches) > 1:
+ _raise_multiple_tasks_error(task, partial_matches)
+
+ # If we have partial matches but multiple, convert to content strings for error
+ if partial_matches:
+ partial_match_contents = [task_obj["content"] for task_obj in partial_matches]
+ _raise_task_not_found_error(task, partial_match_contents)
+
+ _raise_task_not_found_error(task)
+
+
+def _raise_multiple_tasks_error(task: str, matches: list[dict]) -> None:
+ """Raise MultipleTasksFoundError."""
+ raise MultipleTasksFoundError(task, matches)
+
+
+def _raise_task_not_found_error(task: str, suggestions: list[str] | None = None) -> None:
+ """Raise TaskNotFoundError."""
+ if suggestions:
+ raise TaskNotFoundError(task, suggestions)
+ raise TaskNotFoundError(task)
+
+
+def _resolve_task_from_task_list(tasks: list[dict], task: str) -> str:
+ """Resolve task ID from a list of tasks."""
+ # Check for exact ID match first
+ exact_id = _check_exact_id_match(tasks, task)
+ if exact_id:
+ return exact_id
+
+ # Check for exact and partial content matches
+ exact_matches = _find_exact_content_matches(tasks, task)
+ partial_matches = _find_partial_content_matches(tasks, task)
+
+ return _handle_task_matches(task, exact_matches, partial_matches)
+
+
+async def resolve_task_id(context: ToolContext, task: str) -> str:
+ """
+ Utility function to resolve a task identifier to task ID.
+
+ Args:
+ context: ToolContext for API access
+ task: Task ID or description/content to resolve
+
+ Returns:
+ The task ID
+
+ Raises:
+ TaskNotFoundError: If the task is not found
+ MultipleTasksFoundError: If multiple tasks match the criteria
+ """
+ from arcade_todoist.tools.tasks import get_tasks_by_filter
+
+ try:
+ tasks = await get_tasks_by_filter(context=context, filter_query=f"search: {task}")
+ return _resolve_task_from_task_list(tasks["tasks"], task)
+
+ except (TaskNotFoundError, MultipleTasksFoundError):
+ # Re-raise these specific errors
+ raise
+ except Exception as err:
+ # If search filter fails, fall back to getting all tasks
+ try:
+ from arcade_todoist.tools.tasks import get_all_tasks
+
+ tasks = await get_all_tasks(context=context)
+ return _resolve_task_from_task_list(tasks["tasks"], task)
+ except (TaskNotFoundError, MultipleTasksFoundError):
+ # Re-raise these specific errors from the fallback
+ raise
+ except Exception:
+ # If both methods fail, raise the original error
+ raise TaskNotFoundError(task) from err
diff --git a/toolkits/community/todoist/evals/eval_todoist.py b/toolkits/community/todoist/evals/eval_todoist.py
new file mode 100644
index 000000000..143889b4f
--- /dev/null
+++ b/toolkits/community/todoist/evals/eval_todoist.py
@@ -0,0 +1,235 @@
+import arcade_todoist
+from arcade_evals import (
+ EvalRubric,
+ EvalSuite,
+ ExpectedToolCall,
+ tool_eval,
+)
+from arcade_evals.critic import BinaryCritic, SimilarityCritic
+from arcade_tdk import ToolCatalog
+from arcade_todoist.tools.projects import get_projects
+from arcade_todoist.tools.tasks import (
+ close_task,
+ create_task,
+ delete_task,
+ get_all_tasks,
+ get_tasks_by_filter,
+ get_tasks_by_project,
+)
+
+rubric = EvalRubric(
+ fail_threshold=0.85,
+ warn_threshold=0.95,
+)
+
+catalog = ToolCatalog()
+catalog.add_module(arcade_todoist)
+
+
+@tool_eval()
+def todoist_eval_suite() -> EvalSuite:
+ suite = EvalSuite(
+ name="todoist Tools Evaluation",
+ system_message=(
+ "You are an AI assistant with access to todoist tools. "
+ "Use them to help the user with their tasks."
+ ),
+ catalog=catalog,
+ rubric=rubric,
+ )
+
+ suite.add_case(
+ name="Getting the projects",
+ user_message="Get all my projects",
+ expected_tool_calls=[ExpectedToolCall(func=get_projects, args={})],
+ rubric=rubric,
+ critics=[],
+ additional_messages=[],
+ )
+
+ suite.add_case(
+ name="Getting all the tasks",
+ user_message="Get all my tasks from across the board",
+ expected_tool_calls=[ExpectedToolCall(func=get_all_tasks, args={})],
+ rubric=rubric,
+ critics=[],
+ additional_messages=[],
+ )
+
+ suite.add_case(
+ name="Getting tasks from a specific project with project id",
+ user_message="What are my tasks in the project with id '12345'?",
+ expected_tool_calls=[
+ ExpectedToolCall(func=get_tasks_by_project, args={"project": "12345"})
+ ],
+ rubric=rubric,
+ critics=[SimilarityCritic(critic_field="project", weight=1)],
+ additional_messages=[],
+ )
+
+ suite.add_case(
+ name="Getting tasks from a specific project with project name",
+ user_message="What do I have left to do in the 'Personal' project?",
+ expected_tool_calls=[
+ ExpectedToolCall(func=get_tasks_by_project, args={"project": "Personal"})
+ ],
+ rubric=rubric,
+ critics=[SimilarityCritic(critic_field="project", weight=1)],
+ additional_messages=[],
+ )
+
+ suite.add_case(
+ name="Create a task for the inbox",
+ user_message="Hey! create a task to 'Buy groceries'",
+ expected_tool_calls=[
+ ExpectedToolCall(
+ func=create_task, args={"description": "Buy groceries", "project": None}
+ )
+ ],
+ rubric=rubric,
+ critics=[SimilarityCritic(critic_field="description", weight=1)],
+ additional_messages=[],
+ )
+
+ suite.add_case(
+ name="Create a task for the a specific project",
+ user_message="Hey! create a task to 'Check the email' in the 'Personal' project",
+ expected_tool_calls=[
+ ExpectedToolCall(
+ func=create_task,
+ args={"description": "Check the email", "project": "Personal"},
+ )
+ ],
+ rubric=rubric,
+ critics=[
+ SimilarityCritic(critic_field="description", weight=0.5),
+ SimilarityCritic(critic_field="project", weight=0.5),
+ ],
+ additional_messages=[],
+ )
+
+ suite.add_case(
+ name="Close a task by ID",
+ user_message="Mark task with ID '12345' as completed",
+ expected_tool_calls=[ExpectedToolCall(func=close_task, args={"task_id": "12345"})],
+ rubric=rubric,
+ critics=[SimilarityCritic(critic_field="task_id", weight=1)],
+ additional_messages=[],
+ )
+
+ suite.add_case(
+ name="Close a task by ID",
+ user_message="I'm done with task ID 'task_456'",
+ expected_tool_calls=[ExpectedToolCall(func=close_task, args={"task_id": "task_456"})],
+ rubric=rubric,
+ critics=[SimilarityCritic(critic_field="task_id", weight=1)],
+ additional_messages=[],
+ )
+
+ suite.add_case(
+ name="Complete a task by id",
+ user_message="Please close task with id abc123, I finished it",
+ expected_tool_calls=[ExpectedToolCall(func=close_task, args={"task_id": "abc123"})],
+ rubric=rubric,
+ critics=[SimilarityCritic(critic_field="task_id", weight=1)],
+ additional_messages=[],
+ )
+
+ suite.add_case(
+ name="Delete a task by ID",
+ user_message="Delete task with ID 'task_456'",
+ expected_tool_calls=[ExpectedToolCall(func=delete_task, args={"task_id": "task_456"})],
+ rubric=rubric,
+ critics=[SimilarityCritic(critic_field="task_id", weight=1)],
+ additional_messages=[],
+ )
+
+ suite.add_case(
+ name="Remove a task by ID",
+ user_message="I want to remove task with ID task_789 completely",
+ expected_tool_calls=[ExpectedToolCall(func=delete_task, args={"task_id": "task_789"})],
+ rubric=rubric,
+ critics=[SimilarityCritic(critic_field="task_id", weight=1)],
+ additional_messages=[],
+ )
+
+ suite.add_case(
+ name="Getting limited number of all tasks",
+ user_message="Get only 10 of my tasks from across the board",
+ expected_tool_calls=[ExpectedToolCall(func=get_all_tasks, args={"limit": 10})],
+ rubric=rubric,
+ critics=[BinaryCritic(critic_field="limit", weight=1)],
+ additional_messages=[],
+ )
+
+ suite.add_case(
+ name="Getting limited tasks from specific project",
+ user_message="Show me only 5 tasks from the 'Work' project",
+ expected_tool_calls=[
+ ExpectedToolCall(func=get_tasks_by_project, args={"project": "Work", "limit": 5})
+ ],
+ rubric=rubric,
+ critics=[
+ SimilarityCritic(critic_field="project", weight=0.5),
+ BinaryCritic(critic_field="limit", weight=0.5),
+ ],
+ additional_messages=[],
+ )
+
+ suite.add_case(
+ name="Search tasks using filter query",
+ user_message="Use filter search to find all tasks that contain the word 'meeting'",
+ expected_tool_calls=[
+ ExpectedToolCall(func=get_tasks_by_filter, args={"filter_query": "meeting"})
+ ],
+ rubric=rubric,
+ critics=[
+ SimilarityCritic(critic_field="filter_query", weight=1),
+ ],
+ additional_messages=[],
+ )
+
+ suite.add_case(
+ name="Search tasks with project filter",
+ user_message="Use the filter search to find tasks in project 'Work' that contain 'report'",
+ expected_tool_calls=[
+ ExpectedToolCall(
+ func=get_tasks_by_filter, args={"filter_query": "#Work & search:report"}
+ )
+ ],
+ rubric=rubric,
+ critics=[SimilarityCritic(critic_field="filter_query", weight=1)],
+ additional_messages=[],
+ )
+
+ suite.add_case(
+ name="Search tasks with limit",
+ user_message="Use filter search to find the first 3 tasks that contain 'urgent'",
+ expected_tool_calls=[
+ ExpectedToolCall(func=get_tasks_by_filter, args={"filter_query": "urgent", "limit": 3})
+ ],
+ rubric=rubric,
+ critics=[
+ SimilarityCritic(critic_field="filter_query", weight=0.5),
+ BinaryCritic(critic_field="limit", weight=0.5),
+ ],
+ additional_messages=[],
+ )
+
+ suite.add_case(
+ name="Create task with project ID",
+ user_message="Create a task 'Review documents' in project with ID 'proj_123'",
+ expected_tool_calls=[
+ ExpectedToolCall(
+ func=create_task, args={"description": "Review documents", "project": "proj_123"}
+ )
+ ],
+ rubric=rubric,
+ critics=[
+ SimilarityCritic(critic_field="description", weight=0.6),
+ SimilarityCritic(critic_field="project", weight=0.4),
+ ],
+ additional_messages=[],
+ )
+
+ return suite
diff --git a/toolkits/community/todoist/pyproject.toml b/toolkits/community/todoist/pyproject.toml
new file mode 100644
index 000000000..3be6f4d70
--- /dev/null
+++ b/toolkits/community/todoist/pyproject.toml
@@ -0,0 +1,52 @@
+[build-system]
+requires = [ "hatchling",]
+build-backend = "hatchling.build"
+
+[project]
+name = "arcade_todoist"
+version = "0.1.1"
+description = "Allow agent to connect and interact with Todoist"
+requires-python = ">=3.10"
+dependencies = [
+ "arcade-tdk>=2.0.0,<3.0.0",
+]
+
+
+[project.optional-dependencies]
+dev = [
+ "arcade-ai[evals]>=2.1.4",
+ "arcade-serve>=2.0.0,<3.0.0",
+ "pytest>=8.3.0,<8.4.0",
+ "pytest-cov>=4.0.0,<4.1.0",
+ "pytest-mock>=3.11.1,<3.12.0",
+ "pytest-asyncio>=0.24.0,<0.25.0",
+ "mypy>=1.5.1,<1.6.0",
+ "pre-commit>=3.4.0,<3.5.0",
+ "tox>=4.11.1,<4.12.0",
+ "ruff>=0.7.4,<0.8.0",
+]
+
+# Tell Arcade.dev that this package is a toolkit
+[project.entry-points.arcade_toolkits]
+toolkit_name = "arcade_todoist"
+
+[tool.mypy]
+files = [ "arcade_todoist/**/*.py",]
+python_version = "3.10"
+disallow_untyped_defs = "True"
+disallow_any_unimported = "True"
+no_implicit_optional = "True"
+check_untyped_defs = "True"
+warn_return_any = "True"
+warn_unused_ignores = "True"
+show_error_codes = "True"
+ignore_missing_imports = "True"
+
+[tool.pytest.ini_options]
+testpaths = [ "tests",]
+
+[tool.coverage.report]
+skip_empty = true
+
+[tool.hatch.build.targets.wheel]
+packages = [ "arcade_todoist",]
diff --git a/toolkits/community/todoist/tests/__init__.py b/toolkits/community/todoist/tests/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/toolkits/community/todoist/tests/conftest.py b/toolkits/community/todoist/tests/conftest.py
new file mode 100644
index 000000000..812ae77ad
--- /dev/null
+++ b/toolkits/community/todoist/tests/conftest.py
@@ -0,0 +1,14 @@
+import pytest
+from arcade_tdk import ToolContext
+
+
+@pytest.fixture
+def tool_context() -> ToolContext:
+ return ToolContext(authorization={"token": "test_token"})
+
+
+@pytest.fixture
+def httpx_mock(mocker):
+ mock_client = mocker.patch("httpx.AsyncClient", autospec=True)
+ async_mock_client = mock_client.return_value.__aenter__.return_value
+ return async_mock_client
diff --git a/toolkits/community/todoist/tests/fakes.py b/toolkits/community/todoist/tests/fakes.py
new file mode 100644
index 000000000..140e1ce2b
--- /dev/null
+++ b/toolkits/community/todoist/tests/fakes.py
@@ -0,0 +1,323 @@
+"""
+This module contains all mock data used across the test suite, organized by category:
+- API response formats (what Todoist API returns)
+- Parsed response formats (what our functions return after processing)
+- Test scenario specific data
+"""
+
+PROJECTS_API_RESPONSE = {
+ "results": [
+ {
+ "id": "project_123",
+ "name": "Work Project",
+ "created_at": "2021-01-01",
+ "can_assign_tasks": True,
+ "child_order": 0,
+ "color": "blue",
+ "creator_uid": "user_123",
+ "is_archived": False,
+ "is_deleted": False,
+ "is_favorite": True,
+ },
+ {
+ "id": "project_456",
+ "name": "Personal Tasks",
+ "created_at": "2021-01-01",
+ "can_assign_tasks": True,
+ "child_order": 1,
+ "color": "red",
+ "creator_uid": "user_123",
+ "is_archived": False,
+ "is_deleted": False,
+ "is_favorite": False,
+ },
+ ]
+}
+
+PROJECTS_PARSED_RESPONSE = {
+ "projects": [
+ {"id": "project_123", "name": "Work Project", "created_at": "2021-01-01"},
+ {"id": "project_456", "name": "Personal Tasks", "created_at": "2021-01-01"},
+ ]
+}
+
+SINGLE_TASK_API_RESPONSE = {
+ "results": [
+ {
+ "id": "1",
+ "content": "Task 1",
+ "added_at": "2021-01-01",
+ "priority": 1,
+ "project_id": "project_123",
+ "checked": True,
+ "description": "Description of the task",
+ }
+ ],
+ "next_cursor": None,
+}
+
+TASKS_WITH_PAGINATION_API_RESPONSE = {
+ "results": [
+ {
+ "id": "1",
+ "content": "Task 1",
+ "added_at": "2021-01-01",
+ "priority": 1,
+ "project_id": "project_123",
+ "checked": True,
+ "description": "Description of the task",
+ }
+ ],
+ "next_cursor": "next_page_cursor_123",
+}
+
+MULTIPLE_TASKS_API_RESPONSE = {
+ "results": [
+ {
+ "id": "1",
+ "content": "Buy groceries",
+ "added_at": "2021-01-01",
+ "priority": 1,
+ "project_id": "project_123",
+ "checked": False,
+ "description": "Need to buy weekly groceries",
+ },
+ {
+ "id": "2",
+ "content": "Grocery shopping",
+ "added_at": "2021-01-01",
+ "priority": 2,
+ "project_id": "project_456",
+ "checked": False,
+ "description": "Similar to grocery task",
+ },
+ {
+ "id": "3",
+ "content": "Meeting notes",
+ "added_at": "2021-01-01",
+ "priority": 1,
+ "project_id": "project_123",
+ "checked": False,
+ "description": "Take notes during meeting",
+ },
+ ],
+ "next_cursor": None,
+}
+
+PROJECT_SPECIFIC_TASKS_API_RESPONSE = {
+ "results": [
+ {
+ "id": "1",
+ "content": "Buy groceries",
+ "added_at": "2021-01-01",
+ "priority": 1,
+ "project_id": "project_123",
+ "checked": False,
+ "description": "Need to buy weekly groceries",
+ },
+ {
+ "id": "3",
+ "content": "Meeting notes",
+ "added_at": "2021-01-01",
+ "priority": 1,
+ "project_id": "project_123",
+ "checked": False,
+ "description": "Take notes during meeting",
+ },
+ ],
+ "next_cursor": None,
+}
+
+EMPTY_TASKS_API_RESPONSE = {
+ "results": [],
+ "next_cursor": None,
+}
+
+CREATE_TASK_API_RESPONSE = {
+ "id": "2",
+ "content": "New Task",
+ "added_at": "2024-01-01",
+ "project_id": "project_123",
+ "checked": False,
+ "priority": 1,
+ "description": "A new task description",
+}
+
+CUSTOM_LIMIT_TASK_API_RESPONSE = {
+ "results": [
+ {
+ "id": "1",
+ "content": "Task 1",
+ "added_at": "2021-01-01",
+ "priority": 1,
+ "project_id": "project_123",
+ "checked": False,
+ "description": "Description",
+ },
+ ],
+ "next_cursor": None,
+}
+
+PAGINATED_TASKS_API_RESPONSE = {
+ "results": [
+ {
+ "id": "1",
+ "content": "Task 1",
+ "added_at": "2021-01-01",
+ "priority": 1,
+ "project_id": "project_123",
+ "checked": False,
+ "description": "Description",
+ },
+ ],
+ "next_cursor": "next_page_token_456",
+}
+
+SINGLE_TASK_PARSED_RESPONSE = {
+ "tasks": [
+ {
+ "id": "1",
+ "content": "Task 1",
+ "added_at": "2021-01-01",
+ "project_id": "project_123",
+ "checked": True,
+ }
+ ],
+ "next_page_token": None,
+}
+
+TASKS_WITH_PAGINATION_PARSED_RESPONSE = {
+ "tasks": [
+ {
+ "id": "1",
+ "content": "Task 1",
+ "added_at": "2021-01-01",
+ "project_id": "project_123",
+ "checked": True,
+ }
+ ],
+ "next_page_token": "next_page_cursor_123",
+}
+
+MULTIPLE_TASKS_PARSED_RESPONSE = {
+ "tasks": [
+ {
+ "id": "1",
+ "content": "Buy groceries",
+ "added_at": "2021-01-01",
+ "project_id": "project_123",
+ "checked": False,
+ },
+ {
+ "id": "2",
+ "content": "Grocery shopping",
+ "added_at": "2021-01-01",
+ "project_id": "project_456",
+ "checked": False,
+ },
+ {
+ "id": "3",
+ "content": "Meeting notes",
+ "added_at": "2021-01-01",
+ "project_id": "project_123",
+ "checked": False,
+ },
+ ],
+ "next_page_token": None,
+}
+
+PROJECT_SPECIFIC_TASKS_PARSED_RESPONSE = {
+ "tasks": [
+ {
+ "id": "1",
+ "content": "Buy groceries",
+ "added_at": "2021-01-01",
+ "project_id": "project_123",
+ "checked": False,
+ },
+ {
+ "id": "3",
+ "content": "Meeting notes",
+ "added_at": "2021-01-01",
+ "project_id": "project_123",
+ "checked": False,
+ },
+ ],
+ "next_page_token": None,
+}
+
+EMPTY_TASKS_PARSED_RESPONSE = {
+ "tasks": [],
+ "next_page_token": None,
+}
+
+CREATE_TASK_PARSED_RESPONSE = {
+ "id": "2",
+ "content": "New Task",
+ "added_at": "2024-01-01",
+ "project_id": "project_123",
+ "checked": False,
+}
+
+CUSTOM_LIMIT_TASK_PARSED_RESPONSE = {
+ "tasks": [
+ {
+ "id": "1",
+ "content": "Task 1",
+ "added_at": "2021-01-01",
+ "project_id": "project_123",
+ "checked": False,
+ },
+ ],
+ "next_page_token": None,
+}
+
+PAGINATED_TASKS_PARSED_RESPONSE = {
+ "tasks": [
+ {
+ "id": "1",
+ "content": "Task 1",
+ "added_at": "2021-01-01",
+ "project_id": "project_123",
+ "checked": False,
+ },
+ ],
+ "next_page_token": "next_page_token_456",
+}
+
+PARTIAL_MATCH_TASKS_PARSED_RESPONSE = {
+ "tasks": [
+ {
+ "id": "1",
+ "content": "Complete task A",
+ "added_at": "2021-01-01",
+ "project_id": "project_123",
+ "checked": False,
+ },
+ {
+ "id": "2",
+ "content": "Complete task B",
+ "added_at": "2021-01-01",
+ "project_id": "project_456",
+ "checked": False,
+ },
+ ],
+ "next_page_token": None,
+}
+
+SINGLE_MATCH_TASK_PARSED_RESPONSE = {
+ "tasks": [
+ {
+ "id": "3",
+ "content": "Meeting notes",
+ "added_at": "2021-01-01",
+ "project_id": "project_123",
+ "checked": False,
+ },
+ ],
+ "next_page_token": None,
+}
+
+CLOSE_TASK_SUCCESS_RESPONSE = {"message": "Task closed successfully"}
+
+DELETE_TASK_SUCCESS_RESPONSE = {"message": "Task deleted successfully"}
diff --git a/toolkits/community/todoist/tests/test_projects.py b/toolkits/community/todoist/tests/test_projects.py
new file mode 100644
index 000000000..1425a9460
--- /dev/null
+++ b/toolkits/community/todoist/tests/test_projects.py
@@ -0,0 +1,20 @@
+from unittest.mock import MagicMock
+
+import pytest
+from arcade_todoist.tools.projects import get_projects
+
+from tests.fakes import PROJECTS_API_RESPONSE, PROJECTS_PARSED_RESPONSE
+
+
+@pytest.mark.asyncio
+async def test_get_projects_success(tool_context, httpx_mock) -> None:
+ mock_response = MagicMock()
+ mock_response.status_code = 200
+ mock_response.json.return_value = PROJECTS_API_RESPONSE
+ httpx_mock.get.return_value = mock_response
+
+ result = await get_projects(context=tool_context)
+
+ assert result == PROJECTS_PARSED_RESPONSE
+
+ httpx_mock.get.assert_called_once()
diff --git a/toolkits/community/todoist/tests/test_tasks.py b/toolkits/community/todoist/tests/test_tasks.py
new file mode 100644
index 000000000..fff4cd353
--- /dev/null
+++ b/toolkits/community/todoist/tests/test_tasks.py
@@ -0,0 +1,556 @@
+from unittest.mock import MagicMock
+
+import httpx
+import pytest
+from arcade_tdk.errors import ToolExecutionError
+from arcade_todoist.errors import ProjectNotFoundError
+from arcade_todoist.tools.tasks import (
+ _close_task_by_task_id,
+ _create_task_in_project,
+ _delete_task_by_task_id,
+ _get_tasks_by_project_id,
+ close_task,
+ create_task,
+ delete_task,
+ get_all_tasks,
+ get_tasks_by_filter,
+ get_tasks_by_project,
+)
+
+from tests.fakes import (
+ CLOSE_TASK_SUCCESS_RESPONSE,
+ CREATE_TASK_API_RESPONSE,
+ CREATE_TASK_PARSED_RESPONSE,
+ CUSTOM_LIMIT_TASK_API_RESPONSE,
+ CUSTOM_LIMIT_TASK_PARSED_RESPONSE,
+ DELETE_TASK_SUCCESS_RESPONSE,
+ EMPTY_TASKS_API_RESPONSE,
+ EMPTY_TASKS_PARSED_RESPONSE,
+ PAGINATED_TASKS_API_RESPONSE,
+ PAGINATED_TASKS_PARSED_RESPONSE,
+ PROJECT_SPECIFIC_TASKS_API_RESPONSE,
+ PROJECT_SPECIFIC_TASKS_PARSED_RESPONSE,
+ PROJECTS_PARSED_RESPONSE,
+ SINGLE_TASK_API_RESPONSE,
+ SINGLE_TASK_PARSED_RESPONSE,
+ TASKS_WITH_PAGINATION_API_RESPONSE,
+ TASKS_WITH_PAGINATION_PARSED_RESPONSE,
+)
+
+
+@pytest.mark.asyncio
+async def test_get_all_tasks_success(tool_context, httpx_mock) -> None:
+ mock_response = MagicMock()
+ mock_response.status_code = 200
+ mock_response.json.return_value = SINGLE_TASK_API_RESPONSE
+ httpx_mock.get.return_value = mock_response
+
+ result = await get_all_tasks(context=tool_context)
+
+ assert result == SINGLE_TASK_PARSED_RESPONSE
+
+ httpx_mock.get.assert_called_once()
+
+
+@pytest.mark.asyncio
+async def test_get_all_tasks_failure(tool_context, httpx_mock) -> None:
+ mock_response = MagicMock()
+ mock_response.status_code = 404
+ mock_response.json.return_value = {}
+ mock_response.raise_for_status.side_effect = httpx.HTTPStatusError(
+ message="Not Found", request=httpx.Request("GET", "http://test.com"), response=mock_response
+ )
+ httpx_mock.get.return_value = mock_response
+
+ with pytest.raises(ToolExecutionError):
+ await get_all_tasks(context=tool_context)
+
+ httpx_mock.get.assert_called_once()
+
+
+@pytest.mark.asyncio
+async def test_create_task_in_project_success(tool_context, httpx_mock) -> None:
+ mock_response = MagicMock()
+ mock_response.status_code = 200
+ mock_response.json.return_value = CREATE_TASK_API_RESPONSE
+ httpx_mock.post.return_value = mock_response
+
+ result = await _create_task_in_project(
+ context=tool_context, description="New Task", project_id="project_123"
+ )
+
+ assert result == CREATE_TASK_PARSED_RESPONSE
+
+ httpx_mock.post.assert_called_once()
+
+
+@pytest.mark.asyncio
+async def test_create_task_in_project_failure(tool_context, httpx_mock) -> None:
+ mock_response = MagicMock()
+ mock_response.status_code = 400
+ mock_response.json.return_value = {}
+ mock_response.raise_for_status.side_effect = httpx.HTTPStatusError(
+ message="Bad Request",
+ request=httpx.Request("POST", "http://test.com"),
+ response=mock_response,
+ )
+ httpx_mock.post.return_value = mock_response
+
+ with pytest.raises(httpx.HTTPStatusError):
+ await _create_task_in_project(
+ context=tool_context, description="New Task", project_id="project_123"
+ )
+
+ httpx_mock.post.assert_called_once()
+
+
+@pytest.mark.asyncio
+async def test_close_task_by_task_id_success(tool_context, httpx_mock) -> None:
+ mock_response = MagicMock()
+ mock_response.status_code = 200
+ mock_response.json.return_value = {}
+ httpx_mock.post.return_value = mock_response
+
+ result = await _close_task_by_task_id(context=tool_context, task_id="task_123")
+
+ assert result == CLOSE_TASK_SUCCESS_RESPONSE
+
+ httpx_mock.post.assert_called_once()
+
+
+@pytest.mark.asyncio
+async def test_close_task_by_task_id_failure(tool_context, httpx_mock) -> None:
+ mock_response = MagicMock()
+ mock_response.status_code = 404
+ mock_response.json.return_value = {}
+ mock_response.raise_for_status.side_effect = httpx.HTTPStatusError(
+ message="Not Found",
+ request=httpx.Request("POST", "http://test.com"),
+ response=mock_response,
+ )
+ httpx_mock.post.return_value = mock_response
+
+ with pytest.raises(httpx.HTTPStatusError):
+ await _close_task_by_task_id(context=tool_context, task_id="task_123")
+
+ httpx_mock.post.assert_called_once()
+
+
+@pytest.mark.asyncio
+async def test_delete_task_by_task_id_success(tool_context, httpx_mock) -> None:
+ mock_response = MagicMock()
+ mock_response.status_code = 200
+ mock_response.json.return_value = {}
+ httpx_mock.delete.return_value = mock_response
+
+ result = await _delete_task_by_task_id(context=tool_context, task_id="task_123")
+
+ assert result == DELETE_TASK_SUCCESS_RESPONSE
+
+ httpx_mock.delete.assert_called_once()
+
+
+@pytest.mark.asyncio
+async def test_delete_task_by_task_id_failure(tool_context, httpx_mock) -> None:
+ mock_response = MagicMock()
+ mock_response.status_code = 404
+ mock_response.json.return_value = {}
+ mock_response.raise_for_status.side_effect = httpx.HTTPStatusError(
+ message="Not Found",
+ request=httpx.Request("DELETE", "http://test.com"),
+ response=mock_response,
+ )
+ httpx_mock.delete.return_value = mock_response
+
+ with pytest.raises(httpx.HTTPStatusError):
+ await _delete_task_by_task_id(context=tool_context, task_id="task_123")
+
+ httpx_mock.delete.assert_called_once()
+
+
+@pytest.mark.asyncio
+async def test_create_task_success_exact_project_match(tool_context, mocker) -> None:
+ mock_get_projects = mocker.patch("arcade_todoist.tools.projects.get_projects")
+ mock_get_projects.return_value = PROJECTS_PARSED_RESPONSE
+
+ mock_create_task_in_project = mocker.patch("arcade_todoist.tools.tasks._create_task_in_project")
+ mock_create_task_in_project.return_value = CREATE_TASK_PARSED_RESPONSE
+
+ result = await create_task(context=tool_context, description="New Task", project="Work Project")
+
+ assert result == CREATE_TASK_PARSED_RESPONSE
+ mock_get_projects.assert_called_once_with(context=tool_context)
+ mock_create_task_in_project.assert_called_once_with(
+ context=tool_context, description="New Task", project_id="project_123"
+ )
+
+
+@pytest.mark.asyncio
+async def test_create_task_success_no_project(tool_context, mocker) -> None:
+ mock_create_task_in_project = mocker.patch("arcade_todoist.tools.tasks._create_task_in_project")
+ mock_create_task_in_project.return_value = CREATE_TASK_PARSED_RESPONSE
+
+ result = await create_task(context=tool_context, description="New Task", project=None)
+
+ assert result == CREATE_TASK_PARSED_RESPONSE
+ mock_create_task_in_project.assert_called_once_with(
+ context=tool_context, description="New Task", project_id=None
+ )
+
+
+@pytest.mark.asyncio
+async def test_create_task_project_not_found(tool_context, mocker) -> None:
+ mock_get_projects = mocker.patch("arcade_todoist.tools.projects.get_projects")
+ mock_get_projects.return_value = PROJECTS_PARSED_RESPONSE
+
+ with pytest.raises(ProjectNotFoundError) as exc_info:
+ await create_task(
+ context=tool_context, description="New Task", project="Nonexistent Project"
+ )
+
+ assert "Project not found" in str(exc_info.value)
+
+
+@pytest.mark.asyncio
+async def test_close_task_success_with_id(tool_context, mocker) -> None:
+ mock_close_task_by_task_id = mocker.patch("arcade_todoist.tools.tasks._close_task_by_task_id")
+ mock_close_task_by_task_id.return_value = CLOSE_TASK_SUCCESS_RESPONSE
+
+ result = await close_task(context=tool_context, task_id="1")
+
+ assert result == CLOSE_TASK_SUCCESS_RESPONSE
+ mock_close_task_by_task_id.assert_called_once_with(context=tool_context, task_id="1")
+
+
+@pytest.mark.asyncio
+async def test_delete_task_success_with_id(tool_context, mocker) -> None:
+ mock_delete_task_by_task_id = mocker.patch("arcade_todoist.tools.tasks._delete_task_by_task_id")
+ mock_delete_task_by_task_id.return_value = DELETE_TASK_SUCCESS_RESPONSE
+
+ result = await delete_task(context=tool_context, task_id="3")
+
+ assert result == DELETE_TASK_SUCCESS_RESPONSE
+ mock_delete_task_by_task_id.assert_called_once_with(context=tool_context, task_id="3")
+
+
+@pytest.mark.asyncio
+async def test_get_tasks_by_project_id_success(tool_context, httpx_mock) -> None:
+ mock_response = MagicMock()
+ mock_response.status_code = 200
+ mock_response.json.return_value = PROJECT_SPECIFIC_TASKS_API_RESPONSE
+ httpx_mock.get.return_value = mock_response
+
+ result = await _get_tasks_by_project_id(context=tool_context, project_id="project_123")
+
+ assert result == PROJECT_SPECIFIC_TASKS_PARSED_RESPONSE
+ httpx_mock.get.assert_called_once()
+
+ call_args = httpx_mock.get.call_args
+ assert call_args[1]["params"] == {"limit": 50, "project_id": "project_123"}
+
+
+@pytest.mark.asyncio
+async def test_get_tasks_by_project_id_empty_result(tool_context, httpx_mock) -> None:
+ mock_response = MagicMock()
+ mock_response.status_code = 200
+ mock_response.json.return_value = EMPTY_TASKS_API_RESPONSE
+ httpx_mock.get.return_value = mock_response
+
+ result = await _get_tasks_by_project_id(context=tool_context, project_id="empty_project")
+
+ assert result == EMPTY_TASKS_PARSED_RESPONSE
+ httpx_mock.get.assert_called_once()
+
+ call_args = httpx_mock.get.call_args
+ assert call_args[1]["params"] == {"limit": 50, "project_id": "empty_project"}
+
+
+@pytest.mark.asyncio
+async def test_get_tasks_by_project_id_failure(tool_context, httpx_mock) -> None:
+ mock_response = MagicMock()
+ mock_response.status_code = 404
+ mock_response.json.return_value = {}
+ mock_response.raise_for_status.side_effect = httpx.HTTPStatusError(
+ message="Not Found", request=httpx.Request("GET", "http://test.com"), response=mock_response
+ )
+ httpx_mock.get.return_value = mock_response
+
+ with pytest.raises(httpx.HTTPStatusError):
+ await _get_tasks_by_project_id(context=tool_context, project_id="project_123")
+
+ httpx_mock.get.assert_called_once()
+
+
+@pytest.mark.asyncio
+async def test_get_tasks_by_project_name_success(tool_context, mocker) -> None:
+ mock_get_projects = mocker.patch("arcade_todoist.tools.projects.get_projects")
+ mock_get_projects.return_value = PROJECTS_PARSED_RESPONSE
+
+ mock_get_tasks_by_project_id = mocker.patch(
+ "arcade_todoist.tools.tasks._get_tasks_by_project_id"
+ )
+ mock_get_tasks_by_project_id.return_value = PROJECT_SPECIFIC_TASKS_PARSED_RESPONSE
+
+ result = await get_tasks_by_project(context=tool_context, project="Work Project")
+
+ assert result == PROJECT_SPECIFIC_TASKS_PARSED_RESPONSE
+ mock_get_projects.assert_called_once_with(context=tool_context)
+ mock_get_tasks_by_project_id.assert_called_once_with(
+ context=tool_context, project_id="project_123", limit=50, next_page_token=None
+ )
+
+
+@pytest.mark.asyncio
+async def test_get_tasks_by_project_name_not_found(tool_context, mocker) -> None:
+ mock_get_projects = mocker.patch("arcade_todoist.tools.projects.get_projects")
+ mock_get_projects.return_value = PROJECTS_PARSED_RESPONSE
+
+ with pytest.raises(ProjectNotFoundError) as exc_info:
+ await get_tasks_by_project(context=tool_context, project="Nonexistent Project")
+
+ assert "Project not found" in str(exc_info.value)
+
+
+@pytest.mark.asyncio
+async def test_get_tasks_by_project_name_partial_match(tool_context, mocker) -> None:
+ mock_get_projects = mocker.patch("arcade_todoist.tools.projects.get_projects")
+ mock_get_projects.return_value = PROJECTS_PARSED_RESPONSE
+
+ with pytest.raises(ProjectNotFoundError) as exc_info:
+ await get_tasks_by_project(context=tool_context, project="Work")
+
+ assert "Project not found" in str(exc_info.value)
+
+
+@pytest.mark.asyncio
+async def test_get_all_tasks_with_custom_limit(tool_context, httpx_mock) -> None:
+ mock_response = MagicMock()
+ mock_response.status_code = 200
+ mock_response.json.return_value = SINGLE_TASK_API_RESPONSE
+ httpx_mock.get.return_value = mock_response
+
+ result = await get_all_tasks(context=tool_context, limit=25)
+
+ assert result == SINGLE_TASK_PARSED_RESPONSE
+ httpx_mock.get.assert_called_once()
+
+ call_args = httpx_mock.get.call_args
+ assert call_args[1]["params"] == {"limit": 25}
+
+
+@pytest.mark.asyncio
+async def test_get_all_tasks_with_pagination(tool_context, httpx_mock) -> None:
+ mock_response = MagicMock()
+ mock_response.status_code = 200
+ mock_response.json.return_value = TASKS_WITH_PAGINATION_API_RESPONSE
+ httpx_mock.get.return_value = mock_response
+
+ result = await get_all_tasks(context=tool_context, next_page_token="page_token_123") # noqa: S106
+
+ assert result == TASKS_WITH_PAGINATION_PARSED_RESPONSE
+ httpx_mock.get.assert_called_once()
+
+ call_args = httpx_mock.get.call_args
+ assert call_args[1]["params"] == {"limit": 50, "cursor": "page_token_123"}
+
+
+@pytest.mark.asyncio
+async def test_get_tasks_by_project_id_with_custom_limit(tool_context, httpx_mock) -> None:
+ mock_response = MagicMock()
+ mock_response.status_code = 200
+ mock_response.json.return_value = CUSTOM_LIMIT_TASK_API_RESPONSE
+ httpx_mock.get.return_value = mock_response
+
+ result = await _get_tasks_by_project_id(
+ context=tool_context, project_id="project_123", limit=100
+ )
+
+ assert result == CUSTOM_LIMIT_TASK_PARSED_RESPONSE
+ httpx_mock.get.assert_called_once()
+
+ call_args = httpx_mock.get.call_args
+ assert call_args[1]["params"] == {"limit": 100, "project_id": "project_123"}
+
+
+@pytest.mark.asyncio
+async def test_get_tasks_by_project_id_with_pagination(tool_context, httpx_mock) -> None:
+ mock_response = MagicMock()
+ mock_response.status_code = 200
+ mock_response.json.return_value = PAGINATED_TASKS_API_RESPONSE
+ httpx_mock.get.return_value = mock_response
+
+ result = await _get_tasks_by_project_id(
+ context=tool_context,
+ project_id="project_123",
+ limit=25,
+ next_page_token="previous_page_token", # noqa: S106
+ )
+
+ assert result == PAGINATED_TASKS_PARSED_RESPONSE
+ httpx_mock.get.assert_called_once()
+
+ call_args = httpx_mock.get.call_args
+ assert call_args[1]["params"] == {
+ "limit": 25,
+ "cursor": "previous_page_token",
+ "project_id": "project_123",
+ }
+
+
+@pytest.mark.asyncio
+async def test_get_tasks_by_project_name_with_pagination(tool_context, mocker) -> None:
+ mock_get_projects = mocker.patch("arcade_todoist.tools.projects.get_projects")
+ mock_get_projects.return_value = PROJECTS_PARSED_RESPONSE
+
+ mock_get_tasks_by_project_id = mocker.patch(
+ "arcade_todoist.tools.tasks._get_tasks_by_project_id"
+ )
+ mock_get_tasks_by_project_id.return_value = PAGINATED_TASKS_PARSED_RESPONSE
+
+ result = await get_tasks_by_project(
+ context=tool_context,
+ project="Work Project",
+ limit=10,
+ next_page_token="some_token", # noqa: S106
+ )
+
+ assert result == PAGINATED_TASKS_PARSED_RESPONSE
+ mock_get_projects.assert_called_once_with(context=tool_context)
+ mock_get_tasks_by_project_id.assert_called_once_with(
+ context=tool_context,
+ project_id="project_123",
+ limit=10,
+ next_page_token="some_token", # noqa: S106
+ )
+
+
+@pytest.mark.asyncio
+async def test_get_tasks_by_project_with_id(tool_context, mocker) -> None:
+ mock_get_projects = mocker.patch("arcade_todoist.tools.projects.get_projects")
+ mock_get_projects.return_value = PROJECTS_PARSED_RESPONSE
+
+ mock_get_tasks_by_project_id = mocker.patch(
+ "arcade_todoist.tools.tasks._get_tasks_by_project_id"
+ )
+ mock_get_tasks_by_project_id.return_value = CUSTOM_LIMIT_TASK_PARSED_RESPONSE
+
+ result = await get_tasks_by_project(context=tool_context, project="project_123")
+
+ assert result == CUSTOM_LIMIT_TASK_PARSED_RESPONSE
+ mock_get_projects.assert_called_once_with(context=tool_context)
+ mock_get_tasks_by_project_id.assert_called_once_with(
+ context=tool_context, project_id="project_123", limit=50, next_page_token=None
+ )
+
+
+@pytest.mark.asyncio
+async def test_get_tasks_by_project_with_name(tool_context, mocker) -> None:
+ mock_get_projects = mocker.patch("arcade_todoist.tools.projects.get_projects")
+ mock_get_projects.return_value = PROJECTS_PARSED_RESPONSE
+
+ mock_get_tasks_by_project_id = mocker.patch(
+ "arcade_todoist.tools.tasks._get_tasks_by_project_id"
+ )
+ mock_get_tasks_by_project_id.return_value = CUSTOM_LIMIT_TASK_PARSED_RESPONSE
+
+ result = await get_tasks_by_project(context=tool_context, project="Work Project")
+
+ assert result == CUSTOM_LIMIT_TASK_PARSED_RESPONSE
+ mock_get_projects.assert_called_once_with(context=tool_context)
+ mock_get_tasks_by_project_id.assert_called_once_with(
+ context=tool_context, project_id="project_123", limit=50, next_page_token=None
+ )
+
+
+@pytest.mark.asyncio
+async def test_create_task_with_project_id(tool_context, mocker) -> None:
+ mock_get_projects = mocker.patch("arcade_todoist.tools.projects.get_projects")
+ mock_get_projects.return_value = PROJECTS_PARSED_RESPONSE
+
+ mock_create_task_in_project = mocker.patch("arcade_todoist.tools.tasks._create_task_in_project")
+ mock_create_task_in_project.return_value = CREATE_TASK_PARSED_RESPONSE
+
+ result = await create_task(context=tool_context, description="New Task", project="project_123")
+
+ assert result == CREATE_TASK_PARSED_RESPONSE
+ mock_get_projects.assert_called_once_with(context=tool_context)
+ mock_create_task_in_project.assert_called_once_with(
+ context=tool_context, description="New Task", project_id="project_123"
+ )
+
+
+@pytest.mark.asyncio
+async def test_close_task_with_task_id(tool_context, mocker) -> None:
+ mock_close_task_by_task_id = mocker.patch("arcade_todoist.tools.tasks._close_task_by_task_id")
+ mock_close_task_by_task_id.return_value = CLOSE_TASK_SUCCESS_RESPONSE
+
+ result = await close_task(context=tool_context, task_id="1")
+
+ assert result == CLOSE_TASK_SUCCESS_RESPONSE
+ mock_close_task_by_task_id.assert_called_once_with(context=tool_context, task_id="1")
+
+
+@pytest.mark.asyncio
+async def test_delete_task_with_task_id(tool_context, mocker) -> None:
+ mock_delete_task_by_task_id = mocker.patch("arcade_todoist.tools.tasks._delete_task_by_task_id")
+ mock_delete_task_by_task_id.return_value = DELETE_TASK_SUCCESS_RESPONSE
+
+ result = await delete_task(context=tool_context, task_id="3")
+
+ assert result == DELETE_TASK_SUCCESS_RESPONSE
+ mock_delete_task_by_task_id.assert_called_once_with(context=tool_context, task_id="3")
+
+
+@pytest.mark.asyncio
+async def test_get_tasks_by_filter_success(tool_context, httpx_mock) -> None:
+ mock_response = MagicMock()
+ mock_response.status_code = 200
+ mock_response.json.return_value = SINGLE_TASK_API_RESPONSE
+ httpx_mock.get.return_value = mock_response
+
+ result = await get_tasks_by_filter(context=tool_context, filter_query="today")
+
+ assert result == SINGLE_TASK_PARSED_RESPONSE
+ httpx_mock.get.assert_called_once()
+
+ call_args = httpx_mock.get.call_args
+ assert call_args[1]["params"] == {"query": "today", "limit": 50}
+
+
+@pytest.mark.asyncio
+async def test_get_tasks_by_filter_with_pagination(tool_context, httpx_mock) -> None:
+ mock_response = MagicMock()
+ mock_response.status_code = 200
+ mock_response.json.return_value = TASKS_WITH_PAGINATION_API_RESPONSE
+ httpx_mock.get.return_value = mock_response
+
+ result = await get_tasks_by_filter(
+ context=tool_context,
+ filter_query="p1",
+ limit=25,
+ next_page_token="page_token_123", # noqa: S106
+ )
+
+ assert result == TASKS_WITH_PAGINATION_PARSED_RESPONSE
+ httpx_mock.get.assert_called_once()
+
+ call_args = httpx_mock.get.call_args
+ assert call_args[1]["params"] == {"query": "p1", "limit": 25, "cursor": "page_token_123"}
+
+
+@pytest.mark.asyncio
+async def test_get_tasks_by_filter_failure(tool_context, httpx_mock) -> None:
+ mock_response = MagicMock()
+ mock_response.status_code = 400
+ mock_response.json.return_value = {}
+ mock_response.raise_for_status.side_effect = httpx.HTTPStatusError(
+ message="Bad Request",
+ request=httpx.Request("GET", "http://test.com"),
+ response=mock_response,
+ )
+ httpx_mock.get.return_value = mock_response
+
+ with pytest.raises(ToolExecutionError):
+ await get_tasks_by_filter(context=tool_context, filter_query="invalid filter")
+
+ httpx_mock.get.assert_called_once()
diff --git a/toolkits/todoist/community/.pre-commit-config.yaml b/toolkits/todoist/community/.pre-commit-config.yaml
new file mode 100644
index 000000000..1aa88316d
--- /dev/null
+++ b/toolkits/todoist/community/.pre-commit-config.yaml
@@ -0,0 +1,18 @@
+files: ^.*/todoist/.*
+repos:
+ - repo: https://github.com/pre-commit/pre-commit-hooks
+ rev: "v4.4.0"
+ hooks:
+ - id: check-case-conflict
+ - id: check-merge-conflict
+ - id: check-toml
+ - id: check-yaml
+ - id: end-of-file-fixer
+ - id: trailing-whitespace
+
+ - repo: https://github.com/astral-sh/ruff-pre-commit
+ rev: v0.6.7
+ hooks:
+ - id: ruff
+ args: [--fix]
+ - id: ruff-format
diff --git a/toolkits/todoist/community/.ruff.toml b/toolkits/todoist/community/.ruff.toml
new file mode 100644
index 000000000..9519fe6c3
--- /dev/null
+++ b/toolkits/todoist/community/.ruff.toml
@@ -0,0 +1,44 @@
+target-version = "py310"
+line-length = 100
+fix = true
+
+[lint]
+select = [
+ # flake8-2020
+ "YTT",
+ # flake8-bandit
+ "S",
+ # flake8-bugbear
+ "B",
+ # flake8-builtins
+ "A",
+ # flake8-comprehensions
+ "C4",
+ # flake8-debugger
+ "T10",
+ # flake8-simplify
+ "SIM",
+ # isort
+ "I",
+ # mccabe
+ "C90",
+ # pycodestyle
+ "E", "W",
+ # pyflakes
+ "F",
+ # pygrep-hooks
+ "PGH",
+ # pyupgrade
+ "UP",
+ # ruff
+ "RUF",
+ # tryceratops
+ "TRY",
+]
+
+[lint.per-file-ignores]
+"**/tests/*" = ["S101"]
+
+[format]
+preview = true
+skip-magic-trailing-comma = false