Skip to content

Commit 037db4e

Browse files
committed
[run-tests] log cache key
1 parent 6af651d commit 037db4e

File tree

1 file changed

+4
-2
lines changed

1 file changed

+4
-2
lines changed

bigquery_etl/dryrun.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -255,6 +255,8 @@ def _get_cache_key(self, sql):
255255
"cache_ttl_hours": self.cache_ttl_hours,
256256
}
257257

258+
print(f"Generated cache key for {self.sqlfile} ({cache_params})")
259+
258260
cache_string = json.dumps(cache_params, sort_keys=True)
259261
return hashlib.sha256(cache_string.encode("utf-8")).hexdigest()[:16]
260262

@@ -286,7 +288,7 @@ def _load_from_cache(self, cache_key):
286288
try:
287289
with open(cache_file_path, "rb") as f:
288290
cached_result = pickle.load(f)
289-
print(f"Using cached dry run result for {self.sqlfile}")
291+
print(f"Using cached dry run result for {self.sqlfile} ({cache_key})")
290292
return cached_result
291293
except (pickle.PickleError, OSError, EOFError) as e:
292294
print(f"Failed to load cache for {self.sqlfile}: {e}")
@@ -309,7 +311,7 @@ def _save_to_cache(self, cache_key, result):
309311
with open(temp_file_path, "wb") as f:
310312
pickle.dump(result, f)
311313
os.rename(temp_file_path, cache_file_path)
312-
print(f"Cached dry run result for {self.sqlfile}")
314+
print(f"Cached dry run result for {self.sqlfile} ({cache_key})")
313315
except (pickle.PickleError, OSError) as e:
314316
print(f"Failed to cache result for {self.sqlfile}: {e}")
315317
# clean up temp file if it exists

0 commit comments

Comments
 (0)