1010import os
1111import time
1212import multiprocessing
13+ from pathlib import Path
1314
1415from mkdocs import __version__ as mkdocs_version
1516from mkdocs .config import config_options
2829
2930from packaging .version import Version
3031
31- HERE = os . path . dirname ( os . path . abspath ( __file__ ) )
32+ HERE = Path ( __file__ ). parent . absolute ( )
3233
3334
3435class GitRevisionDateLocalizedPlugin (BasePlugin ):
@@ -144,19 +145,18 @@ def on_config(self, config: config_options.Config, **kwargs) -> Dict[str, Any]:
144145
145146 return config
146147
147-
148148 def parallel_compute_commit_timestamps (self , files , original_source : Optional [Dict ] = None , is_first_commit = False ):
149149 pool = multiprocessing .Pool (processes = min (10 , multiprocessing .cpu_count ()))
150150 results = []
151- for file in files :
152- if file .is_documentation_page ():
153- abs_src_path = file .abs_src_path
151+ for f in files :
152+ if f .is_documentation_page ():
153+ abs_src_path = f .abs_src_path
154154 # Support plugins like monorep that might have moved the files from the original source that is under git
155155 if original_source and abs_src_path in original_source :
156156 abs_src_path = original_source [abs_src_path ]
157- result = pool . apply_async (
158- self . util . get_git_commit_timestamp , args = ( abs_src_path , is_first_commit )
159- )
157+ assert Path ( abs_src_path ). exists ()
158+ abs_src_path = str ( Path ( abs_src_path ). absolute () )
159+ result = pool . apply_async ( self . util . get_git_commit_timestamp , args = ( abs_src_path , is_first_commit ) )
160160 results .append ((abs_src_path , result ))
161161 pool .close ()
162162 pool .join ()
@@ -173,18 +173,21 @@ def on_files(self, files: Files, config: MkDocsConfig):
173173 """
174174 if not self .config .get ("enabled" ) or not self .config .get ("enable_parallel_processing" ):
175175 return
176-
176+
177177 # Support monorepo/techdocs, which copies the docs_dir to a temporary directory
178- if "monorepo" in config .get (' plugins' , {}):
179- original_source = config .get (' plugins' ).get (' monorepo' ).merger .files_source_dir
178+ if "monorepo" in config .get (" plugins" , {}):
179+ original_source = config .get (" plugins" ).get (" monorepo" ).merger .files_source_dir
180180 else :
181181 original_source = None
182182
183- if not self .last_revision_commits :
184- self .parallel_compute_commit_timestamps (files = files , original_source = original_source , is_first_commit = False )
185- if not self .created_commits :
186- self .parallel_compute_commit_timestamps (files = files , original_source = original_source , is_first_commit = True )
187-
183+ try :
184+ if not self .last_revision_commits :
185+ self .parallel_compute_commit_timestamps (files = files , original_source = original_source , is_first_commit = False )
186+ if not self .created_commits :
187+ self .parallel_compute_commit_timestamps (files = files , original_source = original_source , is_first_commit = True )
188+ except Exception as e :
189+ logging .warning (f"Parallel processing failed: { str (e )} .\n To fall back to serial processing, use 'enable_parallel_processing: False' setting." )
190+
188191
189192 def on_page_markdown (self , markdown : str , page : Page , config : config_options .Config , files , ** kwargs ) -> str :
190193 """
@@ -240,7 +243,9 @@ def on_page_markdown(self, markdown: str, page: Page, config: config_options.Con
240243 if getattr (page .file , "generated_by" , None ):
241244 last_revision_hash , last_revision_timestamp = "" , int (time .time ())
242245 else :
243- last_revision_hash , last_revision_timestamp = self .last_revision_commits .get (page .file .abs_src_path , (None , None ))
246+ last_revision_hash , last_revision_timestamp = self .last_revision_commits .get (
247+ str (Path (page .file .abs_src_path ).absolute ()), (None , None )
248+ )
244249 if last_revision_timestamp is None :
245250 last_revision_hash , last_revision_timestamp = self .util .get_git_commit_timestamp (
246251 path = page .file .abs_src_path ,
@@ -314,8 +319,10 @@ def on_page_markdown(self, markdown: str, page: Page, config: config_options.Con
314319 if getattr (page .file , "generated_by" , None ):
315320 first_revision_hash , first_revision_timestamp = "" , int (time .time ())
316321 else :
317- first_revision_hash , first_revision_timestamp = self .created_commits .get (page .file .abs_src_path , (None , None ))
318- if first_revision_timestamp is None :
322+ first_revision_hash , first_revision_timestamp = self .created_commits .get (
323+ str (Path (page .file .abs_src_path ).absolute ()), (None , None )
324+ )
325+ if first_revision_timestamp is None :
319326 first_revision_hash , first_revision_timestamp = self .util .get_git_commit_timestamp (
320327 path = page .file .abs_src_path ,
321328 is_first_commit = True ,
@@ -374,8 +381,8 @@ def on_post_build(self, config: Dict[str, Any], **kwargs) -> None:
374381 "js/timeago_mkdocs_material.js" ,
375382 "css/timeago.css" ,
376383 ]
377- for file in files :
378- dest_file_path = os . path . join (config ["site_dir" ], file )
379- src_file_path = os . path . join ( HERE , file )
380- assert os . path . exists (src_file_path )
381- copy_file (src_file_path , dest_file_path )
384+ for f in files :
385+ dest_file_path = Path (config ["site_dir" ]) / f
386+ src_file_path = HERE / f
387+ assert src_file_path . exists ()
388+ copy_file (str ( src_file_path ), str ( dest_file_path ) )
0 commit comments