@@ -255,6 +255,8 @@ def _get_cache_key(self, sql):
255
255
"cache_ttl_hours" : self .cache_ttl_hours ,
256
256
}
257
257
258
+ print (f"Generated cache key for { self .sqlfile } ({ cache_params } )" )
259
+
258
260
cache_string = json .dumps (cache_params , sort_keys = True )
259
261
return hashlib .sha256 (cache_string .encode ("utf-8" )).hexdigest ()[:16 ]
260
262
@@ -286,7 +288,7 @@ def _load_from_cache(self, cache_key):
286
288
try :
287
289
with open (cache_file_path , "rb" ) as f :
288
290
cached_result = pickle .load (f )
289
- print (f"Using cached dry run result for { self .sqlfile } " )
291
+ print (f"Using cached dry run result for { self .sqlfile } ( { cache_key } ) " )
290
292
return cached_result
291
293
except (pickle .PickleError , OSError , EOFError ) as e :
292
294
print (f"Failed to load cache for { self .sqlfile } : { e } " )
@@ -309,7 +311,7 @@ def _save_to_cache(self, cache_key, result):
309
311
with open (temp_file_path , "wb" ) as f :
310
312
pickle .dump (result , f )
311
313
os .rename (temp_file_path , cache_file_path )
312
- print (f"Cached dry run result for { self .sqlfile } " )
314
+ print (f"Cached dry run result for { self .sqlfile } ( { cache_key } ) " )
313
315
except (pickle .PickleError , OSError ) as e :
314
316
print (f"Failed to cache result for { self .sqlfile } : { e } " )
315
317
# clean up temp file if it exists
0 commit comments