Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,8 @@ pip install git+https://gitlab.com/meltano/tap-gitlab.git
"fetch_pipelines_extended": false,
"fetch_retried_jobs": false,
"fetch_group_variables": false,
"fetch_project_variables": false
"fetch_project_variables": false,
"fetch_bridges": false
}
```

Expand All @@ -99,6 +100,8 @@ pip install git+https://gitlab.com/meltano/tap-gitlab.git

If `fetch_project_variables` is true (defaults to false), then Project-level CI/CD variables will be retrieved for each available / specified project. This feature is treated as an opt-in to prevent users from accidentally extracting any potential secrets stored as Project-level CI/CD variables.

If `fetch_bridges` is true (defaults to false), then Pipelines triggered from other pipelines will be retrieved.

4. [Optional] Create the initial state file

You can provide JSON file that contains a date for the API endpoints
Expand Down
43 changes: 42 additions & 1 deletion tap_gitlab/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,8 @@
'fetch_retried_jobs': False,
'fetch_group_variables': False,
'fetch_project_variables': False,
# NOTE: bridges are the name gitlab gives to triggered pipelines
'fetch_bridges': False,
}
STATE = {}
CATALOG = None
Expand Down Expand Up @@ -205,7 +207,13 @@ def load_schema(entity):
'schema': load_schema('group_variables'),
'key_properties': ['group_id', 'key'],
'replication_method': 'FULL_TABLE',
}
},
"bridges": {
"url": "/projects/{id}/pipelines/{secondary_id}/bridges",
"schema": load_schema("bridges"),
"key_properties": ["id"],
"replication_method": "FULL_TABLE",
},
}

ULTIMATE_RESOURCES = ("epics", "epic_issues")
Expand All @@ -214,6 +222,7 @@ def load_schema(entity):
'pipelines_extended',
'group_variables',
'project_variables',
'bridges',
)

LOGGER = singer.get_logger()
Expand Down Expand Up @@ -733,8 +742,39 @@ def sync_pipelines(project):
# it's pipeline's updated_at is changed.
sync_jobs(project, transformed_row)

sync_bridges(project, transformed_row)

singer.write_state(STATE)


def sync_bridges(project, pipeline):
entity = "bridges"
stream = CATALOG.get_stream(entity)
if not stream.is_selected():
return

mdata = metadata.to_map(stream.metadata)
url = get_url(entity=entity, id=project["id"], secondary_id=pipeline["id"])

with Transformer(pre_hook=format_timestamp) as transformer:
for row in gen_request(url):
row["project_id"] = project["id"]
transformed_row = transformer.transform(
row, RESOURCES[entity]["schema"], mdata
)

singer.write_record(entity, transformed_row, time_extracted=utils.now())

if transformed_row["downstream_pipeline"]:
# Sync additional details of a pipeline using get-a-single-pipeline endpoint
# https://docs.gitlab.com/ee/api/pipelines.html#get-a-single-pipeline
sync_pipelines_extended(project, transformed_row["downstream_pipeline"])

# Sync all jobs attached to the pipeline.
# Although jobs cannot be queried by updated_at, if a job changes
# it's pipeline's updated_at is changed.
sync_jobs(project, transformed_row["downstream_pipeline"])

def sync_pipelines_extended(project, pipeline):
entity = "pipelines_extended"
stream = CATALOG.get_stream(entity)
Expand Down Expand Up @@ -938,6 +978,7 @@ def main_impl():
CONFIG['fetch_retried_jobs'] = truthy(CONFIG['fetch_retried_jobs'])
CONFIG['fetch_group_variables'] = truthy(CONFIG['fetch_group_variables'])
CONFIG['fetch_project_variables'] = truthy(CONFIG['fetch_project_variables'])
CONFIG['fetch_bridges'] = truthy(CONFIG['fetch_bridges'])

if '/api/' not in CONFIG['api_url']:
CONFIG['api_url'] += '/api/v4'
Expand Down
Loading