@@ -126,12 +126,13 @@ def _load_state_dict_hook(
126
126
**kwargs (Dict[str, Any]): Additional keyword arguments.
127
127
128
128
Raises:
129
- ValueError: if loaded local or global embedding n_tokens_per_tile is not derived
130
- from a squared grid.
131
- ValueError: if after interpolation, the shape of the loaded local embedding
132
- is not compatible with the current embedding.
133
- ValueError: if after interpolation, the shape of the loaded global embedding
134
- is not compatible with the current embedding.
129
+ ValueError:
130
+ If loaded local or global embedding n_tokens_per_tile is not derived
131
+ from a squared grid, **or**
132
+ if after interpolation, the shape of the loaded local embedding
133
+ is not compatible with the current embedding, **or**
134
+ if after interpolation, the shape of the loaded global embedding
135
+ is not compatible with the current embedding.
135
136
"""
136
137
137
138
# process local_token_positional_embedding
@@ -530,9 +531,10 @@ def _load_state_dict_hook(
530
531
**kwargs (Dict[str, Any]): Additional keyword arguments.
531
532
532
533
Raises:
533
- ValueError: if the shape of the loaded embedding is not compatible with the current embedding.
534
- ValueError: if max_num_tiles_x, max_num_tiles_y are not equal.
535
- ValueError: if after interpolation, the shape of the loaded embedding is not compatible with the current embedding.
534
+ ValueError:
535
+ If the shape of the loaded embedding is not compatible with the current embedding, **or**
536
+ if ``max_num_tiles_x``, ``max_num_tiles_y`` are not equal, **or**
537
+ if after interpolation, the shape of the loaded embedding is not compatible with the current embedding.
536
538
"""
537
539
538
540
embedding = state_dict .get (prefix + "embedding" )
0 commit comments