diff --git a/dagster/Pipfile b/dagster/Pipfile new file mode 100644 index 0000000..c398b0d --- /dev/null +++ b/dagster/Pipfile @@ -0,0 +1,11 @@ +[[source]] +url = "https://pypi.org/simple" +verify_ssl = true +name = "pypi" + +[packages] + +[dev-packages] + +[requires] +python_version = "3.10" diff --git a/dagster/readme.md b/dagster/readme.md new file mode 100644 index 0000000..0bd6d08 --- /dev/null +++ b/dagster/readme.md @@ -0,0 +1,68 @@ + +# Dagster Orchestration Layer + +## Getting started +setup virtual env, dependencies: +```bash +cd stargazer +pip install -e ".[dev]" +``` + +If you try to kick off a run immediately, it will fail, as there is no source data to ingest/transform, nor is there an active Airbyte connection. To get everything set up properly, read on. + +## Set up local Postgres + +We'll use a local postgres instance as the destination for our data. You can imagine the "destination" as a data warehouse (something like Snowflake). + +To get a postgres instance with the required source and destination databases running on your machine, you can run: + +```bash +docker pull postgres +docker run --name local-postgres -p 5432:5432 -e POSTGRES_PASSWORD=password -d postgres +``` + +## Set up Airbyte + +Now, you'll want to get Airbyte running locally. The full instructions can be found [here](https://docs.airbyte.com/deploying-airbyte/local-deployment), but if you just want to run some commands (in a separate terminal): + +```bash +git clone https://github.com/airbytehq/airbyte.git +cd airbyte +docker-compose up +``` + +Once you've done this, you should be able to go to http://localhost:8000, and see Airbyte's UI. + +## Set up data and connections + +```bash +dagster-me check --module assets_modern_data_stack.assets.stargazer:airbyte_reconciler +``` + +```bash +dagster-me apply --module assets_modern_data_stack.assets.stargazer:airbyte_reconciler +``` +➡️ Make sure you set the environment variable `AIRBYTE_PASSWORD` on your laptop. The default password is `password`. As well as [create](https://github.com/settings/tokens) a token `AIRBYTE_PERSONAL_GITHUB_TOKEN` for fetching the stargazers from the public repositories. + +## Start the UI of Dagster called Dagit + +To startup the dagster UI run: +```bash +dagit +```` + +You'll see the assets of airbyte, dbt that are created automatically in this demo. + +You can click "materialize" inside dagit to sync airbyte connections and run dbt. + + +## Start the BI Dashbaord with Meltano + +Start it in a seperate shell and follow the docs [Metabase Readme](../../visualization/metabase/readme.md). + + +See a step by step guide on [Airbyte Blog](https://airbyte.com/blog/). + + + + diff --git a/dagster/stargazer/assets_modern_data_stack/__init__.py b/dagster/stargazer/assets_modern_data_stack/__init__.py new file mode 100644 index 0000000..db588df --- /dev/null +++ b/dagster/stargazer/assets_modern_data_stack/__init__.py @@ -0,0 +1 @@ +from .repository import assets_modern_data_stack diff --git a/dagster/stargazer/assets_modern_data_stack/assets/__init__.py b/dagster/stargazer/assets_modern_data_stack/assets/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/dagster/stargazer/assets_modern_data_stack/assets/stargazer.py b/dagster/stargazer/assets_modern_data_stack/assets/stargazer.py new file mode 100644 index 0000000..c4aeeee --- /dev/null +++ b/dagster/stargazer/assets_modern_data_stack/assets/stargazer.py @@ -0,0 +1,158 @@ +from dagster import asset +from dagster_airbyte import ( + AirbyteManagedElementReconciler, + airbyte_resource, + AirbyteConnection, + AirbyteSyncMode, + load_assets_from_connections, +) +from dagster_airbyte.managed.generated.sources import GithubSource +from dagster_airbyte.managed.generated.destinations import ( + LocalJsonDestination, + PostgresDestination, +) +from typing import List +from dagster_dbt import load_assets_from_dbt_project + + +from bs4 import BeautifulSoup +import os +import requests + +import asyncio +import aiohttp +from ..utils.constants import DBT_PROJECT_DIR + + +AIRBYTE_PERSONAL_GITHUB_TOKEN = os.environ.get( + "AIRBYTE_PERSONAL_GITHUB_TOKEN", "please-set-your-token" +) +POSTGRES_PASSWORD = os.environ.get("POSTGRES_PASSWORD", "please-set-your-token") + + +airbyte_instance = airbyte_resource.configured( + { + "host": "localhost", + "port": "8000", + "username": "airbyte", + "password": {"env": "AIRBYTE_PASSWORD"}, + } +) +# two other possibilities to initialize the airbyte instance +# airbyte_assets = load_assets_from_airbyte_project( +# project_dir="../../../../airbyte/test", +# ) + +# airbyte_assets = with_resources( +# [load_assets_from_airbyte_project(project_dir="path/to/airbyte/project")], +# {"airbyte": airbyte_instance}, +# ) + + +async def get(url, session): + try: + # check if status_code is 200 + async with session.get(url) as response: + if response.status == 200: + return url + else: + return None + + except Exception as e: + print("Unable to get url {} due to {}.".format(url, e.__class__)) + + +async def check_websites_exists(urls) -> List[str]: + async with aiohttp.ClientSession() as session: + # get url and sessionm if return is not None + tasks = [get(url, session) for url in urls] + results = await asyncio.gather(*tasks) + results = [result for result in results if result is not None] + return results + # print("Finalized all. Return is a list of len {} outputs.".format(len(results))) + + +def get_awesome_repo_list() -> str: + + url = "https://github.com/igorbarinov/awesome-data-engineering" + html = requests.get(url) + soup = BeautifulSoup(html.text, "html.parser") + # parse all links into a list starting with github.com + links = [ + link.get("href") + for link in soup.find_all("a") + if link.get("href").startswith("https://github.com") + ] + # remove links that start with url + links = [ + link + for link in links + if not link.startswith(url) and not link.endswith("github.com") + ] + # remove last slash if there + links = [link[:-1] if link.endswith("/") else link for link in links] + # remove repos without organization + links = [link for link in links if len(link.split("/")) == 5] + # check if links are still existing in parallel to save time + existings_links = asyncio.run(check_websites_exists(links)) + # remove `https://github.com/` from links + links = [link.replace("https://github.com/", "") for link in existings_links] + + # due to timeout limits while airbyte is checking each repo, I limited it here to make this demo work for you + links = links[0:10] + + # return links as a string with blank space as separator + return " ".join(links) + + +gh_awesome_de_list_source = GithubSource( + name="gh_awesome_de_list", + credentials=GithubSource.PATCredentials(AIRBYTE_PERSONAL_GITHUB_TOKEN), + start_date="2020-01-01T00:00:00Z", + repository=get_awesome_repo_list(), # "prometheus/haproxy_exporter", + page_size_for_large_streams=100, +) + +postgres_destination = PostgresDestination( + name="postgres", + host="localhost", + port=5432, + database="postgres", + schema="public", + username="postgres", + password=POSTGRES_PASSWORD, + ssl_mode=PostgresDestination.Disable(), +) + +stargazer_connection = AirbyteConnection( + name="fetch_stargazer", + source=gh_awesome_de_list_source, + destination=postgres_destination, + stream_config={"stargazers": AirbyteSyncMode.incremental_append_dedup()}, + normalize_data=True, +) + +airbyte_reconciler = AirbyteManagedElementReconciler( + airbyte=airbyte_instance, + connections=[stargazer_connection], +) + +# load airbyte connection from above pythonic definitions +airbyte_assets = load_assets_from_connections( + airbyte=airbyte_instance, + connections=[stargazer_connection], + key_prefix=["postgres"], +) + +# preparing assets bassed on existing dbt project +dbt_assets = load_assets_from_dbt_project( + project_dir=DBT_PROJECT_DIR, io_manager_key="db_io_manager", key_prefix="postgres" +) + + +# @asset( +# description="The metabase dashboard where the stargazers are visualized", +# metadata={"dashboard_url": "http://localhost:3000/dashboard/1-airbyte-sync-status"}, +# ) +# def metabase_dashboard(mart_gh_cumulative): +# return "test" diff --git a/dagster/stargazer/assets_modern_data_stack/db_io_manager.py b/dagster/stargazer/assets_modern_data_stack/db_io_manager.py new file mode 100644 index 0000000..7cef125 --- /dev/null +++ b/dagster/stargazer/assets_modern_data_stack/db_io_manager.py @@ -0,0 +1,34 @@ +import pandas as pd + +from dagster import IOManager, io_manager + + +class DbIOManager(IOManager): + """Sample IOManager to handle loading the contents of tables as pandas DataFrames. + + Does not handle cases where data is written to different schemas for different outputs, and + uses the name of the asset key as the table name. + """ + + def __init__(self, con_string: str): + self._con = con_string + + def handle_output(self, context, obj): + if isinstance(obj, pd.DataFrame): + # write df to table + obj.to_sql(name=context.asset_key.path[-1], con=self._con, if_exists="replace") + elif obj is None: + # dbt has already written the data to this table + pass + else: + raise ValueError(f"Unsupported object type {type(obj)} for DbIOManager.") + + def load_input(self, context) -> pd.DataFrame: + """Load the contents of a table as a pandas DataFrame.""" + model_name = context.asset_key.path[-1] + return pd.read_sql(f"SELECT * FROM {model_name}", con=self._con) + + +@io_manager(config_schema={"con_string": str}) +def db_io_manager(context): + return DbIOManager(context.resource_config["con_string"]) diff --git a/dagster/stargazer/assets_modern_data_stack/repository.py b/dagster/stargazer/assets_modern_data_stack/repository.py new file mode 100644 index 0000000..e146b3a --- /dev/null +++ b/dagster/stargazer/assets_modern_data_stack/repository.py @@ -0,0 +1,27 @@ +from dagster_airbyte import airbyte_resource +from dagster_dbt import dbt_cli_resource + +from dagster import ( + repository, + with_resources, +) + +# from . import assets +from .db_io_manager import db_io_manager +from .utils.constants import DBT_CONFIG, POSTGRES_CONFIG +from .assets.stargazer import airbyte_assets, dbt_assets # , metabase_dashboard + + +@repository +def assets_modern_data_stack(): + return [ + airbyte_assets, + with_resources( + dbt_assets, # load_assets_from_package_module(assets), + resource_defs={ + "dbt": dbt_cli_resource.configured(DBT_CONFIG), + "db_io_manager": db_io_manager.configured(POSTGRES_CONFIG), + }, + ), + # metabase_dashboard, + ] diff --git a/dagster/stargazer/assets_modern_data_stack/utils/constants.py b/dagster/stargazer/assets_modern_data_stack/utils/constants.py new file mode 100644 index 0000000..d1bea31 --- /dev/null +++ b/dagster/stargazer/assets_modern_data_stack/utils/constants.py @@ -0,0 +1,28 @@ +from dagster_postgres.utils import get_conn_string +from dagster._utils import file_relative_path + + +# AIRBYTE_CONFIG = {"host": "localhost", "port": "8000"} +DBT_PROJECT_DIR = file_relative_path(__file__, "../../transformation_dbt") +DBT_PROFILES_DIR = file_relative_path(__file__, "../../transformation_dbt/config") + + +DBT_CONFIG = {"project_dir": DBT_PROJECT_DIR, "profiles_dir": DBT_PROFILES_DIR} + +PG_DESTINATION_CONFIG = { + "username": "postgres", + "password": "password", + "host": "localhost", + "port": 5432, + "database": "postgres", +} + +POSTGRES_CONFIG = { + "con_string": get_conn_string( + username=PG_DESTINATION_CONFIG["username"], + password=PG_DESTINATION_CONFIG["password"], + hostname=PG_DESTINATION_CONFIG["host"], + port=str(PG_DESTINATION_CONFIG["port"]), + db_name=PG_DESTINATION_CONFIG["database"], + ) +} diff --git a/dagster/stargazer/assets_modern_data_stack/utils/setup_airbyte.py b/dagster/stargazer/assets_modern_data_stack/utils/setup_airbyte.py new file mode 100644 index 0000000..ffed26c --- /dev/null +++ b/dagster/stargazer/assets_modern_data_stack/utils/setup_airbyte.py @@ -0,0 +1,155 @@ +# pylint: disable=print-call +""" +A basic script that will create tables in the source postgres database, then automatically +create an Airbyte Connection between the source database and destination database. +""" +# pylint: disable=print-call +import random +from typing import Any, Dict, Mapping + +import numpy as np +import pandas as pd +from dagster_airbyte import AirbyteResource +from dagster_postgres.utils import get_conn_string + +import dagster._check as check + +from .constants import PG_DESTINATION_CONFIG, PG_SOURCE_CONFIG + +# configures the number of records for each table +N_USERS = 100 +N_ORDERS = 10000 + + +def _safe_request(client: AirbyteResource, endpoint: str, data: Dict[str, object]) -> Mapping[str, Any]: + response = client.make_request(endpoint, data) + assert response, "Request returned null response" + return response + + +def _create_ab_source(client: AirbyteResource) -> str: + workspace_id = _safe_request(client, "/workspaces/list", data={})["workspaces"][0]["workspaceId"] + + # get latest available Postgres source definition + source_defs = _safe_request(client, "/source_definitions/list_latest", data={"workspaceId": workspace_id}) + postgres_definitions = [sd for sd in source_defs["sourceDefinitions"] if sd["name"] == "Postgres"] + if not postgres_definitions: + raise check.CheckError("Expected at least one Postgres source definition.") + source_definition_id = postgres_definitions[0]["sourceDefinitionId"] + + # create Postgres source + source_id = _safe_request( + client, + "/sources/create", + data={ + "sourceDefinitionId": source_definition_id, + "connectionConfiguration": dict(**PG_SOURCE_CONFIG, ssl=False), + "workspaceId": workspace_id, + "name": "Source Database", + }, + )["sourceId"] + print(f"Created Airbyte Source: {source_id}") + return source_id + + +def _create_ab_destination(client: AirbyteResource) -> str: + workspace_id = _safe_request(client, "/workspaces/list", data={})["workspaces"][0]["workspaceId"] + + # get the latest available Postgres destination definition + destination_defs = _safe_request(client, "/destination_definitions/list_latest", data={"workspaceId": workspace_id}) + postgres_definitions = [dd for dd in destination_defs["destinationDefinitions"] if dd["name"] == "Postgres"] + if not postgres_definitions: + raise check.CheckError("Expected at least one Postgres destination definition.") + destination_definition_id = postgres_definitions[0]["destinationDefinitionId"] + + # create Postgres destination + destination_id = _safe_request( + client, + "/destinations/create", + data={ + "destinationDefinitionId": destination_definition_id, + "connectionConfiguration": dict(**PG_DESTINATION_CONFIG, schema="public", ssl=False), + "workspaceId": workspace_id, + "name": "Destination Database", + }, + )["destinationId"] + print(f"Created Airbyte Destination: {destination_id}") + return destination_id + + +def setup_airbyte(): + client = AirbyteResource(host="localhost", port="8000", use_https=False, request_timeout=50) + source_id = _create_ab_source(client) + destination_id = _create_ab_destination(client) + + source_catalog = _safe_request(client, "/sources/discover_schema", data={"sourceId": source_id})["catalog"] + + # create a connection between the new source and destination + connection_id = _safe_request( + client, + "/connections/create", + data={ + "name": "Example Connection", + "sourceId": source_id, + "destinationId": destination_id, + "syncCatalog": source_catalog, + "prefix": "", + "status": "active", + }, + )["connectionId"] + + print(f"Created Airbyte Connection: {connection_id}") + + +def _random_dates(): + + start = pd.to_datetime("2021-01-01") + end = pd.to_datetime("2022-01-01") + + start_u = start.value // 10**9 + end_u = end.value // 10**9 + + dist = np.random.standard_exponential(size=N_ORDERS) / 10 + + clipped_flipped_dist = 1 - dist[dist <= 1] + clipped_flipped_dist = clipped_flipped_dist[:-1] + + if len(clipped_flipped_dist) < N_ORDERS: + clipped_flipped_dist = np.append(clipped_flipped_dist, clipped_flipped_dist[: N_ORDERS - len(clipped_flipped_dist)]) + + return pd.to_datetime((clipped_flipped_dist * (end_u - start_u)) + start_u, unit="s") + + +def add_data(): + con_string = get_conn_string( + username=PG_SOURCE_CONFIG["username"], + password=PG_SOURCE_CONFIG["password"], + hostname=PG_SOURCE_CONFIG["host"], + port=str(PG_SOURCE_CONFIG["port"]), + db_name=PG_SOURCE_CONFIG["database"], + ) + + users = pd.DataFrame( + { + "user_id": range(N_USERS), + "is_bot": [random.choice([True, False]) for _ in range(N_USERS)], + } + ) + + users.to_sql("users", con=con_string, if_exists="replace") + print("Created users table.") + + orders = pd.DataFrame( + { + "user_id": [random.randint(0, N_USERS) for _ in range(N_ORDERS)], + "order_time": _random_dates(), + "order_value": np.random.normal(loc=100.0, scale=15.0, size=N_ORDERS), + } + ) + + orders.to_sql("orders", con=con_string, if_exists="replace") + print("Created orders table.") + + +add_data() +setup_airbyte() diff --git a/dagster/stargazer/assets_modern_data_stack_tests/__init__.py b/dagster/stargazer/assets_modern_data_stack_tests/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/dagster/stargazer/assets_modern_data_stack_tests/__init__.py @@ -0,0 +1 @@ + diff --git a/dagster/stargazer/assets_modern_data_stack_tests/test_assets.py b/dagster/stargazer/assets_modern_data_stack_tests/test_assets.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/dagster/stargazer/assets_modern_data_stack_tests/test_assets.py @@ -0,0 +1 @@ + diff --git a/dagster/stargazer/assets_modern_data_stack_tests/test_repo_loads.py b/dagster/stargazer/assets_modern_data_stack_tests/test_repo_loads.py new file mode 100644 index 0000000..df2d134 --- /dev/null +++ b/dagster/stargazer/assets_modern_data_stack_tests/test_repo_loads.py @@ -0,0 +1,11 @@ +from assets_modern_data_stack import assets_modern_data_stack + + +def test_repo_can_load(): + assets_modern_data_stack.load_all_definitions() + + # Repo should have only one "default" asset group, which is represented a "__ASSET_JOB" job + assert [job.name for job in assets_modern_data_stack.get_all_jobs()] == [ + "__ASSET_JOB", + "all_assets", + ] diff --git a/dagster/stargazer/setup.cfg b/dagster/stargazer/setup.cfg new file mode 100644 index 0000000..2f1222e --- /dev/null +++ b/dagster/stargazer/setup.cfg @@ -0,0 +1,2 @@ +[metadata] +name = assets_modern_data_stack diff --git a/dagster/stargazer/setup.py b/dagster/stargazer/setup.py new file mode 100644 index 0000000..188e40b --- /dev/null +++ b/dagster/stargazer/setup.py @@ -0,0 +1,23 @@ +from setuptools import find_packages, setup + +setup( + name="assets_modern_data_stack", + packages=find_packages(exclude=["assets_modern_data_stack_tests"]), + package_data={"assets_modern_data_stack": ["transformation_dbt/*"]}, + install_requires=[ + "dagster", + "dagster-airbyte", + "dagster-managed-elements", + "dagster-dbt", + "dagster-postgres", + "pandas", + "numpy", + "scipy", + "dbt-core", + "dbt-postgres", + "aiohttp", + "requests", + "beautifulsoup4", + ], + extras_require={"dev": ["dagit", "pytest", "black"]}, +) diff --git a/dagster/stargazer/transformation_dbt/.gitignore b/dagster/stargazer/transformation_dbt/.gitignore new file mode 100644 index 0000000..49f147c --- /dev/null +++ b/dagster/stargazer/transformation_dbt/.gitignore @@ -0,0 +1,4 @@ + +target/ +dbt_packages/ +logs/ diff --git a/dagster/stargazer/transformation_dbt/.sqlfluff b/dagster/stargazer/transformation_dbt/.sqlfluff new file mode 100644 index 0000000..291b94e --- /dev/null +++ b/dagster/stargazer/transformation_dbt/.sqlfluff @@ -0,0 +1,97 @@ +[sqlfluff] +templater = jinja +verbose = 3 +dialect = postgres + +[sqlfluff:templater:dbt] +profile = airbyte_warehouse + +[sqlfluff:indentation] +indented_joins = False +indented_using_on = True +template_blocks_indent = False + +[sqlfluff:templater] +unwrap_wrapped_queries = True + +[sqlfluff:templater:jinja] +apply_dbt_builtins = True +load_macros_from_path = macros +library_path = dbt_packages + +[sqlfluff:templater:jinja:macros] +# Macros provided as builtins for dbt projects +dbt_ref = {% macro ref(model_ref) %}{{model_ref}}{% endmacro %} +dbt_source = {% macro source(source_name, table) %}{{source_name}}_{{table}}{% endmacro %} +dbt_config = {% macro config() %}{% for k in kwargs %}{% endfor %}{% endmacro %} +dbt_var = {% macro var(variable, default='') %}item{% endmacro %} +dbt_is_incremental = {% macro is_incremental() %}True{% endmacro %} + +# Some rules can be configured directly from the config common to other rules. +[sqlfluff:rules] +tab_space_size = 4 +max_line_length = 150 +indent_unit = space +comma_style = trailing +allow_scalar = True +single_table_references = consistent +unquoted_identifiers_policy = all + +# Some rules have their own specific config. +[sqlfluff:rules:L007] # Keywords +operator_new_lines = after + +[sqlfluff:rules:L010] # Keywords +capitalisation_policy = consistent + +[sqlfluff:rules:L011] # Aliasing +aliasing = explicit + +[sqlfluff:rules:L012] # Aliasing +aliasing = explicit + +[sqlfluff:rules:L014] # Unquoted identifiers +extended_capitalisation_policy = lower +unquoted_identifiers_policy = all + +[sqlfluff:rules:L016] +ignore_comment_lines = False + +[sqlfluff:rules:L026] +force_enable = False + +[sqlfluff:rules:L028] +force_enable = False + +[sqlfluff:rules:L029] # Keyword identifiers +unquoted_identifiers_policy = aliases +quoted_identifiers_policy = none + +[sqlfluff:rules:L030] # Function names +capitalisation_policy = lower + +[sqlfluff:rules:L038] +select_clause_trailing_comma = forbid + +[sqlfluff:rules:L040] # Null & Boolean Literals +capitalisation_policy = lower + +[sqlfluff:rules:L042] +# By default, allow subqueries in from clauses, but not join clauses. +forbid_subquery_in = join + +[sqlfluff:rules:L047] # Consistent syntax to count all rows +prefer_count_1 = False +prefer_count_0 = False + +[sqlfluff:rules:L052] # Semi-colon formatting approach. +multiline_newline = False +require_final_semicolon = False + +[sqlfluff:rules:L054] # GROUP BY/ORDER BY column references. +group_by_and_order_by_style = consistent + +[sqlfluff:rules:L057] # Special characters in identifiers +unquoted_identifiers_policy = all +quoted_identifiers_policy = all +allow_space_in_identifier = False diff --git a/dagster/stargazer/transformation_dbt/.sqlfluffignore b/dagster/stargazer/transformation_dbt/.sqlfluffignore new file mode 100644 index 0000000..e14ee6c --- /dev/null +++ b/dagster/stargazer/transformation_dbt/.sqlfluffignore @@ -0,0 +1,5 @@ +target/ +dbt_packages/ +macros/ +logs/ +.venv diff --git a/dagster/stargazer/transformation_dbt/config/.user.yml b/dagster/stargazer/transformation_dbt/config/.user.yml new file mode 100644 index 0000000..9ecfc60 --- /dev/null +++ b/dagster/stargazer/transformation_dbt/config/.user.yml @@ -0,0 +1 @@ +id: 79b07b4c-491a-4de7-be99-c3d4c8a0cb07 diff --git a/dagster/stargazer/transformation_dbt/config/profiles.yml b/dagster/stargazer/transformation_dbt/config/profiles.yml new file mode 100644 index 0000000..f9f6596 --- /dev/null +++ b/dagster/stargazer/transformation_dbt/config/profiles.yml @@ -0,0 +1,12 @@ +stargazers: + target: dev + outputs: + dev: + type: postgres + threads: 1 + host: localhost + port: 5432 + user: postgres + pass: password + dbname: postgres + schema: public diff --git a/dagster/stargazer/transformation_dbt/dbt_project.yml b/dagster/stargazer/transformation_dbt/dbt_project.yml new file mode 100644 index 0000000..001f972 --- /dev/null +++ b/dagster/stargazer/transformation_dbt/dbt_project.yml @@ -0,0 +1,53 @@ +name: 'stargazers' +version: '1.0' +config-version: 2 + +# This setting configures which "profile" dbt uses for this project. Profiles contain +# database connection information, and should be configured in the ~/.dbt/profiles.yml file +profile: 'stargazers' + +# These configurations specify where dbt should look for different types of files. +# The `source-paths` config, for example, states that source models can be found +docs-paths: ["docs"] +analysis-paths: ["analysis"] +test-paths: ["tests"] +seed-paths: ["seed"] +macro-paths: ["macros"] + +target-path: "target" # directory which will store compiled SQL files +log-path: "logs" # directory which will store DBT logs + +clean-targets: # directories to be removed by `dbt clean` + - "target" + - "logs" + +quoting: + database: true + schema: true + identifier: true + +dispatch: + - macro_namespace: dbt_utils + search_order: ['airbyte_warehouse', 'dbt_utils'] + +vars: + 'dbt_date:time_zone': 'America/Los_Angeles' + get_max_dt_lookback_window: 4 # Set how many days to look back when computing max dt for incremental + +# Using these configurations, you can enable or disable models, change how they +# are materialized, and more! +models: + stargazers: + # When schema changes on models we are building, we should run dbt with --full-refresh flag explicitely + +on_schema_change: "fail" + +materialized: view + staging: + +tags: staging + +materialized: view + +schema: staging + core: + +materialized: view + +schema: core + mart: + +materialized: view + +schema: mart diff --git a/dagster/stargazer/transformation_dbt/models/mart/mart_gh_cumulative.sql b/dagster/stargazer/transformation_dbt/models/mart/mart_gh_cumulative.sql new file mode 100644 index 0000000..7f81b9a --- /dev/null +++ b/dagster/stargazer/transformation_dbt/models/mart/mart_gh_cumulative.sql @@ -0,0 +1,25 @@ + + + +with stargazer as ( + select * + from {{ source('postgres', 'stargazers') }} +), +users as ( + select * + from {{ source('postgres', 'stargazers_user') }} +), + + +per_month as ( +SELECT repository , to_char(s.starred_at , 'YYYY-MM') as starred_at_month, count(*) as sum_stars +FROM stargazers s left outer join users su on s.user_id = su.id +group by repository,to_char(s.starred_at , 'YYYY-MM') +) + +SELECT repository, starred_at_month, sum_stars +, sum(s.sum_stars) over (partition by repository order by starred_at_month) as cumulative_stargazers + +FROM per_month s +group by repository, starred_at_month, sum_stars +order by repository, starred_at_month diff --git a/dagster/stargazer/transformation_dbt/models/mart/mart_gh_stargazer.sql b/dagster/stargazer/transformation_dbt/models/mart/mart_gh_stargazer.sql new file mode 100644 index 0000000..9993b23 --- /dev/null +++ b/dagster/stargazer/transformation_dbt/models/mart/mart_gh_stargazer.sql @@ -0,0 +1,11 @@ +with stargazer as ( + select * + from {{ source('postgres', 'stargazers') }} +), +users as ( + select * + from {{ source('postgres', 'stargazers_user') }} +) + +SELECT s.repository , s.starred_at, su.login as user_login +FROM stargazers s left outer join stargazers_user su on s.user_id = su.id diff --git a/dagster/stargazer/transformation_dbt/models/mart/schema.yml b/dagster/stargazer/transformation_dbt/models/mart/schema.yml new file mode 100644 index 0000000..0f2c162 --- /dev/null +++ b/dagster/stargazer/transformation_dbt/models/mart/schema.yml @@ -0,0 +1,17 @@ +version: 2 + +models: + - name: mart_gh_cumulative + description: cumulative count of stars + columns: + - name: repository + - name: starred_at_month + - name: sum_starscustomer_id + - name: cumulative_stargazers + + - name: mart_gh_stargazer + description: This model cleans up customer data + columns: + - name: repository + - name: starred_at + - name: user_login diff --git a/dagster/stargazer/transformation_dbt/models/staging/sources.yml b/dagster/stargazer/transformation_dbt/models/staging/sources.yml new file mode 100644 index 0000000..5fdd58b --- /dev/null +++ b/dagster/stargazer/transformation_dbt/models/staging/sources.yml @@ -0,0 +1,11 @@ +version: 2 +sources: +- name: postgres + database: postgres + schema: public + tables: + - name: stargazers + identifier: _airbyte_raw_stargazers + description: Given stars on GitHub repositories + - name: stargazers_user + description: GitHub users that started the GitHub repo diff --git a/dagster/stargazer/transformation_dbt/packages.yml b/dagster/stargazer/transformation_dbt/packages.yml new file mode 100644 index 0000000..cca7fdf --- /dev/null +++ b/dagster/stargazer/transformation_dbt/packages.yml @@ -0,0 +1,9 @@ +# add dependencies. these will get pulled during the `dbt deps` process. + +packages: + - package: calogica/dbt_expectations + version: [">=0.5.0", "<0.6.0"] + - package: dbt-labs/codegen + version: 0.7.0 + - package: dbt-labs/dbt_external_tables + version: 0.8.0 \ No newline at end of file diff --git a/dagster/stargazer/transformation_dbt/profiles.yml b/dagster/stargazer/transformation_dbt/profiles.yml new file mode 100644 index 0000000..f9f6596 --- /dev/null +++ b/dagster/stargazer/transformation_dbt/profiles.yml @@ -0,0 +1,12 @@ +stargazers: + target: dev + outputs: + dev: + type: postgres + threads: 1 + host: localhost + port: 5432 + user: postgres + pass: password + dbname: postgres + schema: public diff --git a/dagster/stargazer/workspace.yaml b/dagster/stargazer/workspace.yaml new file mode 100644 index 0000000..bd953f6 --- /dev/null +++ b/dagster/stargazer/workspace.yaml @@ -0,0 +1,2 @@ +load_from: + - python_package: assets_modern_data_stack diff --git a/visualization/metabase/metabase.db.mv.db b/visualization/metabase/metabase.db.mv.db index 778f843..c872914 100644 Binary files a/visualization/metabase/metabase.db.mv.db and b/visualization/metabase/metabase.db.mv.db differ diff --git a/visualization/metabase/metabase.db.trace.db b/visualization/metabase/metabase.db.trace.db index c0cce96..1f87f6b 100644 --- a/visualization/metabase/metabase.db.trace.db +++ b/visualization/metabase/metabase.db.trace.db @@ -1150,3 +1150,249 @@ org.h2.jdbc.JdbcSQLException: Database is already closed (to disable automatic c at metabase.core$destroy_BANG_.invoke(core.clj:72) at clojure.lang.AFn.run(AFn.java:22) at java.base/java.lang.Thread.run(Thread.java:833) +2022-12-13 10:26:21 jdbc[11]: exception +org.h2.jdbc.JdbcSQLException: Database is already closed (to disable automatic closing at VM shutdown, add ";DB_CLOSE_ON_EXIT=FALSE" to the db URL) [90121-197] + at org.h2.message.DbException.getJdbcSQLException(DbException.java:357) + at org.h2.message.DbException.get(DbException.java:179) + at org.h2.message.DbException.get(DbException.java:155) + at org.h2.message.DbException.get(DbException.java:144) + at org.h2.jdbc.JdbcConnection.checkClosed(JdbcConnection.java:1526) + at org.h2.jdbc.JdbcConnection.checkClosed(JdbcConnection.java:1502) + at org.h2.jdbc.JdbcConnection.prepareStatement(JdbcConnection.java:302) + at com.mchange.v2.c3p0.impl.NewProxyConnection.prepareStatement(NewProxyConnection.java:567) + at clojure.java.jdbc$prepare_statement.invokeStatic(jdbc.clj:679) + at clojure.java.jdbc$prepare_statement.invoke(jdbc.clj:626) + at clojure.java.jdbc$db_query_with_resultset_STAR_.invokeStatic(jdbc.clj:1112) + at clojure.java.jdbc$db_query_with_resultset_STAR_.invoke(jdbc.clj:1093) + at clojure.java.jdbc$query.invokeStatic(jdbc.clj:1182) + at clojure.java.jdbc$query.invoke(jdbc.clj:1144) + at toucan.db$query.invokeStatic(db.clj:308) + at toucan.db$query.doInvoke(db.clj:304) + at clojure.lang.RestFn.invoke(RestFn.java:410) + at toucan.db$simple_select.invokeStatic(db.clj:414) + at toucan.db$simple_select.invoke(db.clj:403) + at toucan.db$simple_select_one.invokeStatic(db.clj:440) + at toucan.db$simple_select_one.invoke(db.clj:429) + at toucan.db$select_one.invokeStatic(db.clj:670) + at toucan.db$select_one.doInvoke(db.clj:664) + at clojure.lang.RestFn.applyTo(RestFn.java:139) + at clojure.core$apply.invokeStatic(core.clj:669) + at clojure.core$apply.invoke(core.clj:662) + at toucan.db$select_one_field.invokeStatic(db.clj:682) + at toucan.db$select_one_field.doInvoke(db.clj:675) + at clojure.lang.RestFn.invoke(RestFn.java:442) + at metabase.models.setting.cache$cache_out_of_date_QMARK_.invokeStatic(cache.clj:111) + at metabase.models.setting.cache$cache_out_of_date_QMARK_.invoke(cache.clj:92) + at metabase.models.setting.cache$restore_cache_if_needed_BANG_.invokeStatic(cache.clj:161) + at metabase.models.setting.cache$restore_cache_if_needed_BANG_.invoke(cache.clj:141) + at metabase.models.setting$db_or_cache_value.invokeStatic(setting.clj:422) + at metabase.models.setting$db_or_cache_value.invoke(setting.clj:409) + at metabase.models.setting$get_raw_value.invokeStatic(setting.clj:463) + at metabase.models.setting$get_raw_value.invoke(setting.clj:437) + at metabase.models.setting$get_raw_value.invokeStatic(setting.clj:477) + at metabase.models.setting$get_raw_value.invoke(setting.clj:437) + at metabase.models.setting$fn__30726.invokeStatic(setting.clj:496) + at metabase.models.setting$fn__30726.invoke(setting.clj:494) + at clojure.lang.MultiFn.invoke(MultiFn.java:234) + at clojure.lang.Var.invoke(Var.java:388) + at metabase.util.i18n.impl$fn__3811$f__3812.invoke(impl.clj:211) + at metabase.util.i18n.impl$site_locale_from_setting.invokeStatic(impl.clj:222) + at metabase.util.i18n.impl$site_locale_from_setting.invoke(impl.clj:215) + at metabase.util.i18n$site_locale_string.invokeStatic(i18n.clj:37) + at metabase.util.i18n$site_locale_string.invoke(i18n.clj:32) + at metabase.util.i18n$site_locale.invokeStatic(i18n.clj:50) + at metabase.util.i18n$site_locale.invoke(i18n.clj:47) + at metabase.util.i18n$translate_site_locale.invokeStatic(i18n.clj:69) + at metabase.util.i18n$translate_site_locale.invoke(i18n.clj:66) + at metabase.util.i18n.SiteLocalizedString.toString(i18n.clj:94) + at clojure.core$str.invokeStatic(core.clj:555) + at clojure.core$str.invoke(core.clj:546) + at metabase.core$destroy_BANG_.invokeStatic(core.clj:79) + at metabase.core$destroy_BANG_.invoke(core.clj:76) + at clojure.lang.AFn.run(AFn.java:22) + at java.base/java.lang.Thread.run(Thread.java:833) +2022-12-13 10:26:21 jdbc[11]: exception +org.h2.jdbc.JdbcSQLException: Database is already closed (to disable automatic closing at VM shutdown, add ";DB_CLOSE_ON_EXIT=FALSE" to the db URL) [90121-197] + at org.h2.message.DbException.getJdbcSQLException(DbException.java:357) + at org.h2.message.DbException.get(DbException.java:179) + at org.h2.message.DbException.get(DbException.java:155) + at org.h2.message.DbException.get(DbException.java:144) + at org.h2.jdbc.JdbcConnection.checkClosed(JdbcConnection.java:1526) + at org.h2.jdbc.JdbcConnection.checkClosed(JdbcConnection.java:1502) + at org.h2.jdbc.JdbcConnection.getAutoCommit(JdbcConnection.java:476) + at com.mchange.v2.c3p0.impl.C3P0ImplUtils.resetTxnState(C3P0ImplUtils.java:245) + at com.mchange.v2.c3p0.impl.NewPooledConnection.reset(NewPooledConnection.java:461) + at com.mchange.v2.c3p0.impl.NewPooledConnection.markClosedProxyConnection(NewPooledConnection.java:417) + at com.mchange.v2.c3p0.impl.NewProxyConnection.close(NewProxyConnection.java:87) + at clojure.java.jdbc$db_query_with_resultset_STAR_.invokeStatic(jdbc.clj:1111) + at clojure.java.jdbc$db_query_with_resultset_STAR_.invoke(jdbc.clj:1093) + at clojure.java.jdbc$query.invokeStatic(jdbc.clj:1182) + at clojure.java.jdbc$query.invoke(jdbc.clj:1144) + at toucan.db$query.invokeStatic(db.clj:308) + at toucan.db$query.doInvoke(db.clj:304) + at clojure.lang.RestFn.invoke(RestFn.java:410) + at toucan.db$simple_select.invokeStatic(db.clj:414) + at toucan.db$simple_select.invoke(db.clj:403) + at toucan.db$simple_select_one.invokeStatic(db.clj:440) + at toucan.db$simple_select_one.invoke(db.clj:429) + at toucan.db$select_one.invokeStatic(db.clj:670) + at toucan.db$select_one.doInvoke(db.clj:664) + at clojure.lang.RestFn.applyTo(RestFn.java:139) + at clojure.core$apply.invokeStatic(core.clj:669) + at clojure.core$apply.invoke(core.clj:662) + at toucan.db$select_one_field.invokeStatic(db.clj:682) + at toucan.db$select_one_field.doInvoke(db.clj:675) + at clojure.lang.RestFn.invoke(RestFn.java:442) + at metabase.models.setting.cache$cache_out_of_date_QMARK_.invokeStatic(cache.clj:111) + at metabase.models.setting.cache$cache_out_of_date_QMARK_.invoke(cache.clj:92) + at metabase.models.setting.cache$restore_cache_if_needed_BANG_.invokeStatic(cache.clj:161) + at metabase.models.setting.cache$restore_cache_if_needed_BANG_.invoke(cache.clj:141) + at metabase.models.setting$db_or_cache_value.invokeStatic(setting.clj:422) + at metabase.models.setting$db_or_cache_value.invoke(setting.clj:409) + at metabase.models.setting$get_raw_value.invokeStatic(setting.clj:463) + at metabase.models.setting$get_raw_value.invoke(setting.clj:437) + at metabase.models.setting$get_raw_value.invokeStatic(setting.clj:477) + at metabase.models.setting$get_raw_value.invoke(setting.clj:437) + at metabase.models.setting$fn__30726.invokeStatic(setting.clj:496) + at metabase.models.setting$fn__30726.invoke(setting.clj:494) + at clojure.lang.MultiFn.invoke(MultiFn.java:234) + at clojure.lang.Var.invoke(Var.java:388) + at metabase.util.i18n.impl$fn__3811$f__3812.invoke(impl.clj:211) + at metabase.util.i18n.impl$site_locale_from_setting.invokeStatic(impl.clj:222) + at metabase.util.i18n.impl$site_locale_from_setting.invoke(impl.clj:215) + at metabase.util.i18n$site_locale_string.invokeStatic(i18n.clj:37) + at metabase.util.i18n$site_locale_string.invoke(i18n.clj:32) + at metabase.util.i18n$site_locale.invokeStatic(i18n.clj:50) + at metabase.util.i18n$site_locale.invoke(i18n.clj:47) + at metabase.util.i18n$translate_site_locale.invokeStatic(i18n.clj:69) + at metabase.util.i18n$translate_site_locale.invoke(i18n.clj:66) + at metabase.util.i18n.SiteLocalizedString.toString(i18n.clj:94) + at clojure.core$str.invokeStatic(core.clj:555) + at clojure.core$str.invoke(core.clj:546) + at metabase.core$destroy_BANG_.invokeStatic(core.clj:79) + at metabase.core$destroy_BANG_.invoke(core.clj:76) + at clojure.lang.AFn.run(AFn.java:22) + at java.base/java.lang.Thread.run(Thread.java:833) +2022-12-13 16:30:16 jdbc[11]: exception +org.h2.jdbc.JdbcSQLException: Database is already closed (to disable automatic closing at VM shutdown, add ";DB_CLOSE_ON_EXIT=FALSE" to the db URL) [90121-197] + at org.h2.message.DbException.getJdbcSQLException(DbException.java:357) + at org.h2.message.DbException.get(DbException.java:179) + at org.h2.message.DbException.get(DbException.java:155) + at org.h2.message.DbException.get(DbException.java:144) + at org.h2.jdbc.JdbcConnection.checkClosed(JdbcConnection.java:1526) + at org.h2.jdbc.JdbcConnection.checkClosed(JdbcConnection.java:1502) + at org.h2.jdbc.JdbcConnection.prepareStatement(JdbcConnection.java:302) + at com.mchange.v2.c3p0.impl.NewProxyConnection.prepareStatement(NewProxyConnection.java:567) + at clojure.java.jdbc$prepare_statement.invokeStatic(jdbc.clj:679) + at clojure.java.jdbc$prepare_statement.invoke(jdbc.clj:626) + at clojure.java.jdbc$db_query_with_resultset_STAR_.invokeStatic(jdbc.clj:1112) + at clojure.java.jdbc$db_query_with_resultset_STAR_.invoke(jdbc.clj:1093) + at clojure.java.jdbc$query.invokeStatic(jdbc.clj:1182) + at clojure.java.jdbc$query.invoke(jdbc.clj:1144) + at toucan.db$query.invokeStatic(db.clj:308) + at toucan.db$query.doInvoke(db.clj:304) + at clojure.lang.RestFn.invoke(RestFn.java:410) + at toucan.db$simple_select.invokeStatic(db.clj:414) + at toucan.db$simple_select.invoke(db.clj:403) + at toucan.db$simple_select_one.invokeStatic(db.clj:440) + at toucan.db$simple_select_one.invoke(db.clj:429) + at toucan.db$select_one.invokeStatic(db.clj:670) + at toucan.db$select_one.doInvoke(db.clj:664) + at clojure.lang.RestFn.applyTo(RestFn.java:139) + at clojure.core$apply.invokeStatic(core.clj:669) + at clojure.core$apply.invoke(core.clj:662) + at toucan.db$select_one_field.invokeStatic(db.clj:682) + at toucan.db$select_one_field.doInvoke(db.clj:675) + at clojure.lang.RestFn.invoke(RestFn.java:442) + at metabase.models.setting.cache$cache_out_of_date_QMARK_.invokeStatic(cache.clj:111) + at metabase.models.setting.cache$cache_out_of_date_QMARK_.invoke(cache.clj:92) + at metabase.models.setting.cache$restore_cache_if_needed_BANG_.invokeStatic(cache.clj:161) + at metabase.models.setting.cache$restore_cache_if_needed_BANG_.invoke(cache.clj:141) + at metabase.models.setting$db_or_cache_value.invokeStatic(setting.clj:422) + at metabase.models.setting$db_or_cache_value.invoke(setting.clj:409) + at metabase.models.setting$get_raw_value.invokeStatic(setting.clj:463) + at metabase.models.setting$get_raw_value.invoke(setting.clj:437) + at metabase.models.setting$get_raw_value.invokeStatic(setting.clj:477) + at metabase.models.setting$get_raw_value.invoke(setting.clj:437) + at metabase.models.setting$fn__30726.invokeStatic(setting.clj:496) + at metabase.models.setting$fn__30726.invoke(setting.clj:494) + at clojure.lang.MultiFn.invoke(MultiFn.java:234) + at clojure.lang.Var.invoke(Var.java:388) + at metabase.util.i18n.impl$fn__3811$f__3812.invoke(impl.clj:211) + at metabase.util.i18n.impl$site_locale_from_setting.invokeStatic(impl.clj:222) + at metabase.util.i18n.impl$site_locale_from_setting.invoke(impl.clj:215) + at metabase.util.i18n$site_locale_string.invokeStatic(i18n.clj:37) + at metabase.util.i18n$site_locale_string.invoke(i18n.clj:32) + at metabase.util.i18n$site_locale.invokeStatic(i18n.clj:50) + at metabase.util.i18n$site_locale.invoke(i18n.clj:47) + at metabase.util.i18n$translate_site_locale.invokeStatic(i18n.clj:69) + at metabase.util.i18n$translate_site_locale.invoke(i18n.clj:66) + at metabase.util.i18n.SiteLocalizedString.toString(i18n.clj:94) + at clojure.core$str.invokeStatic(core.clj:555) + at clojure.core$str.invoke(core.clj:546) + at metabase.core$destroy_BANG_.invokeStatic(core.clj:79) + at metabase.core$destroy_BANG_.invoke(core.clj:76) + at clojure.lang.AFn.run(AFn.java:22) + at java.base/java.lang.Thread.run(Thread.java:833) +2022-12-13 16:30:16 jdbc[11]: exception +org.h2.jdbc.JdbcSQLException: Database is already closed (to disable automatic closing at VM shutdown, add ";DB_CLOSE_ON_EXIT=FALSE" to the db URL) [90121-197] + at org.h2.message.DbException.getJdbcSQLException(DbException.java:357) + at org.h2.message.DbException.get(DbException.java:179) + at org.h2.message.DbException.get(DbException.java:155) + at org.h2.message.DbException.get(DbException.java:144) + at org.h2.jdbc.JdbcConnection.checkClosed(JdbcConnection.java:1526) + at org.h2.jdbc.JdbcConnection.checkClosed(JdbcConnection.java:1502) + at org.h2.jdbc.JdbcConnection.getAutoCommit(JdbcConnection.java:476) + at com.mchange.v2.c3p0.impl.C3P0ImplUtils.resetTxnState(C3P0ImplUtils.java:245) + at com.mchange.v2.c3p0.impl.NewPooledConnection.reset(NewPooledConnection.java:461) + at com.mchange.v2.c3p0.impl.NewPooledConnection.markClosedProxyConnection(NewPooledConnection.java:417) + at com.mchange.v2.c3p0.impl.NewProxyConnection.close(NewProxyConnection.java:87) + at clojure.java.jdbc$db_query_with_resultset_STAR_.invokeStatic(jdbc.clj:1111) + at clojure.java.jdbc$db_query_with_resultset_STAR_.invoke(jdbc.clj:1093) + at clojure.java.jdbc$query.invokeStatic(jdbc.clj:1182) + at clojure.java.jdbc$query.invoke(jdbc.clj:1144) + at toucan.db$query.invokeStatic(db.clj:308) + at toucan.db$query.doInvoke(db.clj:304) + at clojure.lang.RestFn.invoke(RestFn.java:410) + at toucan.db$simple_select.invokeStatic(db.clj:414) + at toucan.db$simple_select.invoke(db.clj:403) + at toucan.db$simple_select_one.invokeStatic(db.clj:440) + at toucan.db$simple_select_one.invoke(db.clj:429) + at toucan.db$select_one.invokeStatic(db.clj:670) + at toucan.db$select_one.doInvoke(db.clj:664) + at clojure.lang.RestFn.applyTo(RestFn.java:139) + at clojure.core$apply.invokeStatic(core.clj:669) + at clojure.core$apply.invoke(core.clj:662) + at toucan.db$select_one_field.invokeStatic(db.clj:682) + at toucan.db$select_one_field.doInvoke(db.clj:675) + at clojure.lang.RestFn.invoke(RestFn.java:442) + at metabase.models.setting.cache$cache_out_of_date_QMARK_.invokeStatic(cache.clj:111) + at metabase.models.setting.cache$cache_out_of_date_QMARK_.invoke(cache.clj:92) + at metabase.models.setting.cache$restore_cache_if_needed_BANG_.invokeStatic(cache.clj:161) + at metabase.models.setting.cache$restore_cache_if_needed_BANG_.invoke(cache.clj:141) + at metabase.models.setting$db_or_cache_value.invokeStatic(setting.clj:422) + at metabase.models.setting$db_or_cache_value.invoke(setting.clj:409) + at metabase.models.setting$get_raw_value.invokeStatic(setting.clj:463) + at metabase.models.setting$get_raw_value.invoke(setting.clj:437) + at metabase.models.setting$get_raw_value.invokeStatic(setting.clj:477) + at metabase.models.setting$get_raw_value.invoke(setting.clj:437) + at metabase.models.setting$fn__30726.invokeStatic(setting.clj:496) + at metabase.models.setting$fn__30726.invoke(setting.clj:494) + at clojure.lang.MultiFn.invoke(MultiFn.java:234) + at clojure.lang.Var.invoke(Var.java:388) + at metabase.util.i18n.impl$fn__3811$f__3812.invoke(impl.clj:211) + at metabase.util.i18n.impl$site_locale_from_setting.invokeStatic(impl.clj:222) + at metabase.util.i18n.impl$site_locale_from_setting.invoke(impl.clj:215) + at metabase.util.i18n$site_locale_string.invokeStatic(i18n.clj:37) + at metabase.util.i18n$site_locale_string.invoke(i18n.clj:32) + at metabase.util.i18n$site_locale.invokeStatic(i18n.clj:50) + at metabase.util.i18n$site_locale.invoke(i18n.clj:47) + at metabase.util.i18n$translate_site_locale.invokeStatic(i18n.clj:69) + at metabase.util.i18n$translate_site_locale.invoke(i18n.clj:66) + at metabase.util.i18n.SiteLocalizedString.toString(i18n.clj:94) + at clojure.core$str.invokeStatic(core.clj:555) + at clojure.core$str.invoke(core.clj:546) + at metabase.core$destroy_BANG_.invokeStatic(core.clj:79) + at metabase.core$destroy_BANG_.invoke(core.clj:76) + at clojure.lang.AFn.run(AFn.java:22) + at java.base/java.lang.Thread.run(Thread.java:833)