File tree Expand file tree Collapse file tree 1 file changed +10
-5
lines changed Expand file tree Collapse file tree 1 file changed +10
-5
lines changed Original file line number Diff line number Diff line change @@ -1133,13 +1133,18 @@ def unpin_memory(tensor):
11331133 if not is_device_cpu (tensor .device ):
11341134 return False
11351135
1136- if not tensor .is_pinned ():
1137- #NOTE: Cuda does detect when a tensor is already pinned and would
1138- #error below, but there are proven cases where this also queues an error
1139- #on the GPU async. So dont trust the CUDA API and guard here
1136+ ptr = tensor .data_ptr ()
1137+ size = tensor .numel () * tensor .element_size ()
1138+
1139+ size_stored = PINNED_MEMORY .get (ptr , None )
1140+ if size_stored is None :
1141+ logging .warning ("Tried to unpin tensor not pinned by ComfyUI" )
1142+ return False
1143+
1144+ if size != size_stored :
1145+ logging .warning ("Size of pinned tensor changed" )
11401146 return False
11411147
1142- ptr = tensor .data_ptr ()
11431148 if torch .cuda .cudart ().cudaHostUnregister (ptr ) == 0 :
11441149 TOTAL_PINNED_MEMORY -= PINNED_MEMORY .pop (ptr )
11451150 if len (PINNED_MEMORY ) == 0 :
You can’t perform that action at this time.
0 commit comments