Skip to content

Commit ca070f1

Browse files
authored
Feature/logging setup (#221)
* Add LOGGING config - Add colorlog for local development - Add helper function for logging extra * Use log_extra with logging extra
1 parent d919a45 commit ca070f1

File tree

18 files changed

+165
-27
lines changed

18 files changed

+165
-27
lines changed

apps/etl/extraction/sources/base/extract.py

+5-2
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22
from celery.utils.log import get_task_logger
33

44
from apps.etl.models import ExtractionData
5+
from main.logging import log_extra
56

67
logger = get_task_logger(__name__)
78

@@ -61,7 +62,9 @@ def pull_data(self, source: int, retry_count: int, timeout: int = 30, ext_object
6162
instance_obj.save()
6263

6364
if not response.status_code == 204: # bypass exception when content is empty
64-
logger.error("Request failed with status", exc_info=True, extra={"response_code": response.status_code})
65+
logger.error(
66+
"Request failed with status", exc_info=True, extra=log_extra({"response_code": response.status_code})
67+
)
6568
raise Exception("Request failed")
6669

6770
resp_status = ExtractionData.Status.SUCCESS
@@ -80,6 +83,6 @@ def pull_data(self, source: int, retry_count: int, timeout: int = 30, ext_object
8083
"resp_text": "",
8184
}
8285
except requests.exceptions.RequestException:
83-
logger.error("Extraction failed", exc_info=True, extra={"source": source})
86+
logger.error("Extraction failed", exc_info=True, extra=log_extra({"source": source}))
8487
# FIXME: Check if this creates duplicate entry in Sentry. if yes, remove this.
8588
raise

apps/etl/extraction/sources/base/handler.py

+6-3
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@
1111
)
1212
from apps.etl.models import ExtractionData
1313
from main.celery import app
14+
from main.logging import log_extra
1415

1516
logger = logging.getLogger(__name__)
1617

@@ -154,9 +155,11 @@ def handle_extraction(cls, url: str, params: dict, headers: dict, source: int) -
154155
logger.error(
155156
"extraction failed",
156157
exc_info=True,
157-
extra={
158-
"source": instance.source,
159-
},
158+
extra=log_extra(
159+
{
160+
"source": instance.source,
161+
}
162+
),
160163
)
161164
raise
162165

apps/etl/extraction/sources/desinventar/extract.py

+6-3
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77
from apps.etl.extraction.sources.base.utils import manage_duplicate_file_content
88
from apps.etl.models import ExtractionData
99
from main.celery import app
10+
from main.logging import log_extra
1011

1112
logger = logging.getLogger(__name__)
1213

@@ -91,9 +92,11 @@ def handle_extraction(cls, url: str, params: dict, headers: dict, source: int) -
9192
logger.error(
9293
"extraction failed",
9394
exc_info=True,
94-
extra={
95-
"source": instance.source,
96-
},
95+
extra=log_extra(
96+
{
97+
"source": instance.source,
98+
}
99+
),
97100
)
98101
raise
99102

apps/etl/extraction/sources/emdat/extract.py

+2-1
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@
99
from django.core.files.base import ContentFile
1010

1111
from apps.etl.models import ExtractionData, HazardType
12+
from main.logging import log_extra
1213

1314
logger = logging.getLogger(__name__)
1415

@@ -151,6 +152,6 @@ def import_hazard_data(variables, **kwargs):
151152
# Set extraction status to Fail
152153
emdat_instance.status = ExtractionData.Status.FAILED
153154
emdat_instance.save(update_fields=["status"])
154-
logger.error("Extraction failed", exc_info=True, extra={"source": ExtractionData.Source.EMDAT})
155+
logger.error("Extraction failed", exc_info=True, extra=log_extra({"source": ExtractionData.Source.EMDAT}))
155156
# FIXME: Check if this creates duplicate entry in Sentry. if yes, remove this.
156157
raise

apps/etl/extraction/sources/gfd/extract.py

+6-3
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@
1313
from apps.etl.extraction.sources.base.utils import manage_duplicate_file_content
1414
from apps.etl.models import ExtractionData
1515
from main.celery import app
16+
from main.logging import log_extra
1617

1718
logger = logging.getLogger(__name__)
1819

@@ -144,9 +145,11 @@ def handle_extraction(cls, url: str, source: int, start_date, end_date) -> int:
144145
logger.error(
145146
"extraction failed",
146147
exc_info=True,
147-
extra={
148-
"source": instance.source,
149-
},
148+
extra=log_extra(
149+
{
150+
"source": instance.source,
151+
}
152+
),
150153
)
151154
raise
152155

apps/etl/extraction/sources/idu/extract.py

+6-3
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@
1313
)
1414
from apps.etl.models import ExtractionData
1515
from main.celery import app
16+
from main.logging import log_extra
1617

1718
logger = logging.getLogger(__name__)
1819

@@ -166,8 +167,10 @@ def handle_extraction(url) -> dict:
166167
logger.error(
167168
"IDU extraction failed",
168169
exc_info=True,
169-
extra={
170-
"source": ExtractionData.Source.IDU,
171-
},
170+
extra=log_extra(
171+
{
172+
"source": ExtractionData.Source.IDU,
173+
}
174+
),
172175
)
173176
raise

apps/etl/extraction/sources/ifrc_event/extract.py

+6-3
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@
99
from apps.etl.extraction.sources.base.utils import manage_duplicate_file_content
1010
from apps.etl.models import ExtractionData
1111
from main.celery import app
12+
from main.logging import log_extra
1213

1314
logger = logging.getLogger(__name__)
1415

@@ -119,9 +120,11 @@ def handle_extraction(cls, url: str, params: dict, headers: dict, source: int) -
119120
logger.error(
120121
"extraction failed",
121122
exc_info=True,
122-
extra={
123-
"source": instance.source,
124-
},
123+
extra=log_extra(
124+
{
125+
"source": instance.source,
126+
}
127+
),
125128
)
126129
raise
127130

apps/etl/transform/sources/desinventar.py

+2-1
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@
99
from apps.etl.models import ExtractionData, Transform
1010
from apps.etl.transform.sources.handler import BaseTransformerHandler
1111
from main.celery import app
12+
from main.logging import log_extra
1213

1314
logger = logging.getLogger(__name__)
1415

@@ -55,7 +56,7 @@ def handle_transformation(cls, extraction_id, country_code, iso3):
5556
logger.info("Transformation ended")
5657

5758
except Exception as e:
58-
logger.error("Transformation failed", exc_info=True, extra={"extraction_id": extraction_obj.id})
59+
logger.error("Transformation failed", exc_info=True, extra=log_extra({"extraction_id": extraction_obj.id}))
5960
transform_obj.status = Transform.Status.FAILED
6061
transform_obj.save(update_fields=["status"])
6162
# FIXME: Check if this creates duplicate entry in Sentry. if yes, remove this.

apps/etl/transform/sources/emdat.py

+4-1
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@
99

1010
from apps.etl.models import ExtractionData, PyStacLoadData, Transform
1111
from apps.etl.utils import read_file_data
12+
from main.logging import log_extra
1213
from main.managers import BulkCreateManager
1314

1415
logger = logging.getLogger(__name__)
@@ -68,7 +69,9 @@ def transform_data(source, transformer, data_source, extraction_id, data):
6869
transform_obj.status = Transform.Status.SUCCESS
6970
transform_obj.save(update_fields=["status"])
7071
except Exception as e:
71-
logger.error("Transformation failed", exc_info=True, extra={"extraction_id": ext_instance.id, "source": source})
72+
logger.error(
73+
"Transformation failed", exc_info=True, extra=log_extra({"extraction_id": ext_instance.id, "source": source})
74+
)
7275
# update transformation status to success
7376
transform_obj.status = Transform.Status.FAILED
7477
transform_obj.save(update_fields=["status"])

apps/etl/transform/sources/gdacs.py

+4-3
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@
1212

1313
from apps.etl.models import ExtractionData, PyStacLoadData, Transform
1414
from apps.etl.utils import read_file_data
15+
from main.logging import log_extra
1516
from main.managers import BulkCreateManager
1617

1718
logger = logging.getLogger(__name__)
@@ -45,7 +46,7 @@ def transform_event_data(event_extraction_data):
4546
transform_obj.status = Transform.Status.SUCCESS
4647
transform_obj.save(update_fields=["status"])
4748
except Exception as e:
48-
logger.error("Gdacs transformation failed", exc_info=True, extra={"extraction_id": gdacs_instance.id})
49+
logger.error("Gdacs transformation failed", exc_info=True, extra=log_extra({"extraction_id": gdacs_instance.id}))
4950

5051
transform_obj.status = Transform.Status.FAILED
5152
transform_obj.save(update_fields=["status"])
@@ -109,7 +110,7 @@ def transform_geo_data(geo_data_extraction_id, event_extraction_id):
109110
transform_obj.save(update_fields=["status"])
110111

111112
except Exception as e:
112-
logger.error("Gdacs transformation failed", exc_info=True, extra={"extraction_id": gdacs_instance.id})
113+
logger.error("Gdacs transformation failed", exc_info=True, extra=log_extra({"extraction_id": gdacs_instance.id}))
113114

114115
transform_obj.status = Transform.Status.FAILED
115116
transform_obj.save(update_fields=["status"])
@@ -152,7 +153,7 @@ def transform_impact_data(event_data):
152153
transform_obj.status = Transform.Status.SUCCESS
153154
transform_obj.save(update_fields=["status"])
154155
except Exception as e:
155-
logger.error("Gdacs transformation failed", exc_info=True, extra={"extraction_id": gdacs_instance.id})
156+
logger.error("Gdacs transformation failed", exc_info=True, extra=log_extra({"extraction_id": gdacs_instance.id}))
156157

157158
transform_obj.status = Transform.Status.FAILED
158159
transform_obj.save(update_fields=["status"])

apps/etl/transform/sources/glide.py

+2-1
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66

77
from apps.etl.models import ExtractionData, PyStacLoadData, Transform
88
from apps.etl.utils import read_file_data
9+
from main.logging import log_extra
910
from main.managers import BulkCreateManager
1011

1112
logger = logging.getLogger(__name__)
@@ -41,7 +42,7 @@ def transform_glide_event_data(extraction_id):
4142
transform_obj.status = Transform.Status.SUCCESS
4243
transform_obj.save(update_fields=["status"])
4344
except Exception as e:
44-
logger.error("Glide transformation failed", exc_info=True, extra={"extraction_id": glide_instance.id})
45+
logger.error("Glide transformation failed", exc_info=True, extra=log_extra({"extraction_id": glide_instance.id}))
4546
transform_obj.status = Transform.Status.FAILED
4647
transform_obj.save(update_fields=["status"])
4748
# FIXME: Check if this creates duplicate entry in Sentry. if yes, remove this.

apps/etl/transform/sources/handler.py

+2-1
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77

88
from apps.etl.models import ExtractionData, PyStacLoadData, Transform
99
from main.celery import app
10+
from main.logging import log_extra
1011
from main.managers import BulkCreateManager
1112

1213
logger = logging.getLogger(__name__)
@@ -64,7 +65,7 @@ def handle_transformation(cls, extraction_id: int, geocoder: Optional[MontyGeoCo
6465
logger.info("Transformation ended")
6566

6667
except Exception as e:
67-
logger.error("Transformation failed", exc_info=True, extra={"extraction_id": extraction_obj.id})
68+
logger.error("Transformation failed", exc_info=True, extra=log_extra({"extraction_id": extraction_obj.id}))
6869
transform_obj.status = Transform.Status.FAILED
6970
transform_obj.save(update_fields=["status"])
7071
# FIXME: Check if this creates duplicate entry in Sentry. if yes, remove this.

apps/etl/transform/sources/pdc.py

+2-1
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66

77
from apps.etl.models import ExtractionData, Transform
88
from main.celery import app
9+
from main.logging import log_extra
910

1011
from .handler import BaseTransformerHandler
1112

@@ -73,7 +74,7 @@ def handle_transformation(cls, extraction_id, geo_json_id):
7374
logger.info("Transformation ended")
7475

7576
except Exception as e:
76-
logger.error("Transformation failed", exc_info=True, extra={"extraction_id": extraction_obj.id})
77+
logger.error("Transformation failed", exc_info=True, extra=log_extra({"extraction_id": extraction_obj.id}))
7778
transform_obj.status = Transform.Status.FAILED
7879
transform_obj.save(update_fields=["status"])
7980
# FIXME: Check if this creates duplicate entry in Sentry. if yes, remove this.

apps/etl/transform/sources/usgs.py

+2-1
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88

99
from apps.etl.models import ExtractionData, PyStacLoadData, Transform
1010
from apps.etl.utils import read_file_data
11+
from main.logging import log_extra
1112
from main.managers import BulkCreateManager
1213

1314
logger = logging.getLogger(__name__)
@@ -45,7 +46,7 @@ def transform_usgs_event_data(extraction_id):
4546
transform_obj.status = Transform.Status.SUCCESS
4647
transform_obj.save(update_fields=["status"])
4748
except Exception as e:
48-
logger.error("usgs transformation failed", exc_info=True, extra={"extraction_id": usgs_instance.id})
49+
logger.error("usgs transformation failed", exc_info=True, extra=log_extra({"extraction_id": usgs_instance.id}))
4950
transform_obj.status = Transform.Status.FAILED
5051
transform_obj.save(update_fields=["status"])
5152
raise e

main/logging.py

+24
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,24 @@
1+
import logging
2+
3+
EXTRA_CONTEXT_KEY = "CONTEXT"
4+
5+
6+
def log_render_extra_context(record: logging.LogRecord):
7+
"""
8+
Append extra->context to logs
9+
NOTE: This will appear in logs when used with logger.xxx(..., extra={'context': {..content}})
10+
"""
11+
extra_str = ""
12+
if extra_raw := getattr(record, EXTRA_CONTEXT_KEY, None):
13+
extra_str = f" - EXTRA:{str(extra_raw)}"
14+
record.custom_extra = extra_str
15+
return True
16+
17+
18+
def log_extra(extra: dict):
19+
"""
20+
Basic helper function to view extra argument in logs using log_render_extra_context
21+
"""
22+
return {
23+
EXTRA_CONTEXT_KEY: extra,
24+
}

0 commit comments

Comments
 (0)