|
| 1 | +from pathlib import Path |
| 2 | +from typing import List, Mapping, Optional |
| 3 | +import functools |
| 4 | +import json |
| 5 | +import operator |
| 6 | +import shutil |
| 7 | + |
| 8 | +import requests |
| 9 | + |
| 10 | + |
| 11 | +class APIHelperError(Exception): |
| 12 | + pass |
| 13 | + |
| 14 | +class DataUnavailable(APIHelperError): |
| 15 | + pass |
| 16 | + |
| 17 | +class InvalidResponse(APIHelperError): |
| 18 | + pass |
| 19 | + |
| 20 | +class FileAlreadyExists(APIHelperError): |
| 21 | + pass |
| 22 | + |
| 23 | + |
| 24 | +def query_api(url: str, params: Mapping[str, str], debug_requests=False) -> dict: |
| 25 | + if debug_requests: |
| 26 | + print(f'REQUEST URL: {url}') |
| 27 | + if len(params) > 0: |
| 28 | + print(f'QUERY: {params}') |
| 29 | + |
| 30 | + response = requests.get(url, params=params) |
| 31 | + response.raise_for_status() |
| 32 | + |
| 33 | + if debug_requests: |
| 34 | + json_response = response.json() |
| 35 | + print('========== RESPONSE ==========') |
| 36 | + if json_response is not None: |
| 37 | + print(json.dumps(json_response, indent=4)) |
| 38 | + else: |
| 39 | + print(response.content) |
| 40 | + print('==============================') |
| 41 | + |
| 42 | + return response.json() |
| 43 | + |
| 44 | + |
| 45 | +def download_file(url: str, target_path: Path, overwrite=False): |
| 46 | + if not overwrite and target_path.exists(): |
| 47 | + raise FileAlreadyExists(f"Refusing to overwrite existing file: '{target_path}'.") |
| 48 | + |
| 49 | + with requests.get(url, stream=True) as request: |
| 50 | + with open(target_path, 'wb') as target_file: |
| 51 | + shutil.copyfileobj(request.raw, target_file) |
| 52 | + |
| 53 | + |
| 54 | +class Github: |
| 55 | + BASE_URL = 'https://api.github.com' |
| 56 | + |
| 57 | + project_slug: str |
| 58 | + debug_requests: bool |
| 59 | + |
| 60 | + def __init__(self, project_slug: str, debug_requests: bool): |
| 61 | + self.project_slug = project_slug |
| 62 | + self.debug_requests = debug_requests |
| 63 | + |
| 64 | + def pull_request(self, pr_id: int) -> dict: |
| 65 | + return query_api( |
| 66 | + f'{self.BASE_URL}/repos/{self.project_slug}/pulls/{pr_id}', |
| 67 | + {}, |
| 68 | + self.debug_requests |
| 69 | + ) |
| 70 | + |
| 71 | + |
| 72 | +class CircleCI: |
| 73 | + # None might be a more logical default for max_pages but in most cases we'll actually |
| 74 | + # want some limit to prevent flooding the API with requests in case of a bug. |
| 75 | + DEFAULT_MAX_PAGES = 10 |
| 76 | + BASE_URL = 'https://circleci.com/api/v2' |
| 77 | + |
| 78 | + project_slug: str |
| 79 | + debug_requests: bool |
| 80 | + |
| 81 | + def __init__(self, project_slug: str, debug_requests: bool): |
| 82 | + self.project_slug = project_slug |
| 83 | + self.debug_requests = debug_requests |
| 84 | + |
| 85 | + def paginated_query_api_iterator(self, url: str, params: Mapping[str, str], max_pages: int = DEFAULT_MAX_PAGES): |
| 86 | + assert 'page-token' not in params |
| 87 | + |
| 88 | + page_count = 0 |
| 89 | + next_page_token = None |
| 90 | + while max_pages is None or page_count < max_pages: |
| 91 | + if next_page_token is not None: |
| 92 | + params = {**params, 'page-token': next_page_token} |
| 93 | + |
| 94 | + json_response = query_api(url, params, self.debug_requests) |
| 95 | + |
| 96 | + yield json_response['items'] |
| 97 | + next_page_token = json_response['next_page_token'] |
| 98 | + page_count += 1 |
| 99 | + if next_page_token is None: |
| 100 | + break |
| 101 | + |
| 102 | + def paginated_query_api(self, url: str, params: Mapping[str, str], max_pages: int = DEFAULT_MAX_PAGES): |
| 103 | + return functools.reduce(operator.add, self.paginated_query_api_iterator(url, params, max_pages), []) |
| 104 | + |
| 105 | + def pipelines( |
| 106 | + self, |
| 107 | + branch: Optional[str] = None, |
| 108 | + commit_hash: Optional[str] = None, |
| 109 | + excluded_trigger_types: List[str] = None, |
| 110 | + ) -> List[dict]: |
| 111 | + if excluded_trigger_types is None: |
| 112 | + excluded_trigger_types = [] |
| 113 | + |
| 114 | + for items in self.paginated_query_api_iterator( |
| 115 | + f'{self.BASE_URL}/project/gh/{self.project_slug}/pipeline', |
| 116 | + {'branch': branch} if branch is not None else {}, |
| 117 | + max_pages=10, |
| 118 | + ): |
| 119 | + matching_items = [ |
| 120 | + item |
| 121 | + for item in items |
| 122 | + if ( |
| 123 | + (commit_hash is None or item['vcs']['revision'] == commit_hash) and |
| 124 | + item['trigger']['type'] not in excluded_trigger_types |
| 125 | + ) |
| 126 | + ] |
| 127 | + if len(matching_items) > 0: |
| 128 | + return matching_items |
| 129 | + |
| 130 | + return [] |
| 131 | + |
| 132 | + def workflows(self, pipeline_id: str) -> dict: |
| 133 | + return self.paginated_query_api(f'{self.BASE_URL}/pipeline/{pipeline_id}/workflow', {}) |
| 134 | + |
| 135 | + def jobs(self, workflow_id: str) -> Mapping[str, dict]: |
| 136 | + items = self.paginated_query_api(f'{self.BASE_URL}/workflow/{workflow_id}/job', {}) |
| 137 | + jobs_by_name = {job['name']: job for job in items} |
| 138 | + |
| 139 | + assert len(jobs_by_name) <= len(items) |
| 140 | + if len(jobs_by_name) < len(items): |
| 141 | + raise InvalidResponse("Job names in the workflow are not unique.") |
| 142 | + |
| 143 | + return jobs_by_name |
| 144 | + |
| 145 | + def job(self, workflow_id: str, name: str, require_success: bool = False) -> dict: |
| 146 | + jobs = self.jobs(workflow_id) |
| 147 | + if name not in jobs: |
| 148 | + raise DataUnavailable(f"Job {name} is not present in the workflow.") |
| 149 | + |
| 150 | + if require_success and jobs[name]['status'] != 'success': |
| 151 | + raise DataUnavailable( |
| 152 | + f"Job {name} has failed or is still running. " |
| 153 | + f"Current status: {jobs[name]['status']}." |
| 154 | + ) |
| 155 | + |
| 156 | + return jobs[name] |
| 157 | + |
| 158 | + def artifacts(self, job_number: int) -> Mapping[str, dict]: |
| 159 | + items = self.paginated_query_api(f'{self.BASE_URL}/project/gh/{self.project_slug}/{job_number}/artifacts', {}) |
| 160 | + artifacts_by_name = {artifact['path']: artifact for artifact in items} |
| 161 | + |
| 162 | + assert len(artifacts_by_name) <= len(items) |
| 163 | + if len(artifacts_by_name) < len(items): |
| 164 | + raise InvalidResponse("Names of artifacts attached to the job are not unique.") |
| 165 | + |
| 166 | + return artifacts_by_name |
| 167 | + |
| 168 | + @staticmethod |
| 169 | + def latest_item(items: dict) -> dict: |
| 170 | + sorted_items = sorted(items, key=lambda item: item['created_at'], reverse=True) |
| 171 | + return sorted_items[0] if len(sorted_items) > 0 else None |
0 commit comments