Skip to content

Commit f045950

Browse files
committed
fix cyclic imports
1 parent d90c1da commit f045950

File tree

112 files changed

+713
-694
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

112 files changed

+713
-694
lines changed

internlm/__init__.py

+1-2
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,7 @@
11
from .initialize.initialize_trainer import initialize_trainer
2-
from .initialize.launch import get_default_parser, launch_from_slurm, launch_from_torch
2+
from .initialize.launch import launch_from_slurm, launch_from_torch
33

44
__all__ = [
5-
"get_default_parser",
65
"initialize_trainer",
76
"launch_from_slurm",
87
"launch_from_torch",

internlm/checkpoint/checkpoint_manager.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -16,8 +16,8 @@
1616
auto_resume_sanity_check,
1717
ckpt_info_sanity_check,
1818
)
19-
from internlm.model.base_model import BaseModel
20-
from internlm.model.registry import model_initializer
19+
from internlm.model.model_implementations.registry import model_initializer
20+
from internlm.model.model_implementations.transformers.base_model import BaseModel
2121
from internlm.monitor import send_alert_message
2222
from internlm.solver.optimizer import HybridZeroOptimizer, HybridZeroOptimizer_v2
2323
from internlm.utils.common import get_current_device

internlm/checkpoint/components.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@
99
from internlm.core.context import ParallelMode
1010
from internlm.core.context import global_context as gpc
1111
from internlm.core.trainer import TrainState
12-
from internlm.model.moe import MoE
12+
from internlm.model.model_ops.moe import MoE
1313
from internlm.solver.optimizer import HybridZeroOptimizer, HybridZeroOptimizer_v2
1414
from internlm.utils.common import get_current_device
1515
from internlm.utils.lazy import LazyObject

internlm/checkpoint/load_funcs.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
# Copyright (c) InternLM. All rights reserved.
22

3-
from internlm.model.modeling_internlm import InternLM1
4-
from internlm.model.modeling_internlm2 import InternLM2
5-
from internlm.model.modeling_llama import Llama2
3+
from internlm.model.model_implementations.transformers.modeling_internlm import InternLM1
4+
from internlm.model.model_implementations.transformers.modeling_internlm2 import InternLM2
5+
from internlm.model.model_implementations.transformers.modeling_llama import Llama2
66
from internlm.utils.logger import get_logger
77

88
logger = get_logger(__file__)

internlm/core/context/__init__.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,11 @@
1+
from .config import Config
12
from .parallel_context import (
23
IS_REPLICA_EXPERT_DATA_PARALLEL,
34
IS_REPLICA_ZERO_PARALLEL,
45
IS_TENSOR_EXPERT_DATA_PARALLEL,
56
IS_TENSOR_ZERO_PARALLEL,
67
IS_WEIGHT_EXPERT_DATA_PARALLEL,
78
IS_WEIGHT_ZERO_PARALLEL,
8-
Config,
99
ParallelContext,
1010
global_context,
1111
)

internlm/core/context/config.py

+95
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,95 @@
1+
import inspect
2+
import sys
3+
from importlib.machinery import SourceFileLoader
4+
from pathlib import Path
5+
6+
7+
class Config(dict):
8+
"""This is a wrapper class for dict objects so that values of which can be
9+
accessed as attributes.
10+
11+
Args:
12+
config (dict): The dict object to be wrapped.
13+
"""
14+
15+
def __init__(self, config: dict = None): # pylint: disable=W0231
16+
if config is not None:
17+
for k, v in config.items():
18+
self._add_item(k, v)
19+
20+
def __missing__(self, key):
21+
raise KeyError(key)
22+
23+
def __getattr__(self, key):
24+
try:
25+
value = super().__getitem__(key)
26+
return value
27+
except KeyError:
28+
raise AttributeError(key)
29+
30+
def __setattr__(self, key, value):
31+
super().__setitem__(key, value)
32+
33+
def _add_item(self, key, value):
34+
if isinstance(value, dict):
35+
self.__setattr__(key, Config(value))
36+
else:
37+
self.__setattr__(key, value)
38+
39+
def update(self, config):
40+
assert isinstance(config, (Config, dict)), "can only update dictionary or Config objects."
41+
for k, v in config.items():
42+
self._add_item(k, v)
43+
return self
44+
45+
@staticmethod
46+
def from_file(filename: str):
47+
"""Reads a python file and constructs a corresponding :class:`Config` object.
48+
49+
Args:
50+
filename (str): Name of the file to construct the return object.
51+
52+
Returns:
53+
:class:`Config`: A :class:`Config` object constructed with information in the file.
54+
55+
Raises:
56+
AssertionError: Raises an AssertionError if the file does not exist, or the file is not .py file
57+
"""
58+
59+
# check config path
60+
if isinstance(filename, str):
61+
filepath = Path(filename).absolute()
62+
elif isinstance(filename, Path):
63+
filepath = filename.absolute()
64+
65+
assert filepath.exists(), f"{filename} is not found, please check your configuration path"
66+
67+
# check extension
68+
extension = filepath.suffix
69+
assert extension == ".py", "only .py files are supported"
70+
71+
# import the config as module
72+
remove_path = False
73+
if filepath.parent not in sys.path:
74+
sys.path.insert(0, (filepath))
75+
remove_path = True
76+
77+
module_name = filepath.stem
78+
source_file = SourceFileLoader(fullname=str(module_name), path=str(filepath))
79+
module = source_file.load_module() # pylint: disable=W4902,E1120,W1505
80+
81+
# load into config
82+
config = Config()
83+
84+
for k, v in module.__dict__.items():
85+
if k.startswith("__") or inspect.ismodule(v) or inspect.isclass(v):
86+
continue
87+
else:
88+
config._add_item(k, v)
89+
90+
# remove module
91+
del sys.modules[module_name]
92+
if remove_path:
93+
sys.path.pop(0)
94+
95+
return config

internlm/core/context/parallel_context.py

+1-95
Original file line numberDiff line numberDiff line change
@@ -3,19 +3,16 @@
33

44
# adopted from https://github.com/hpcaitech/ColossalAI/blob/main/colossalai/context
55

6-
import inspect
76
import random
87
import socket
9-
import sys
10-
from importlib.machinery import SourceFileLoader
11-
from pathlib import Path
128
from typing import Union
139

1410
import numpy as np
1511
import torch
1612
import torch.distributed as dist
1713

1814
from internlm.accelerator import get_accelerator
15+
from internlm.core.context import Config
1916
from internlm.utils.common import SingletonMeta
2017
from internlm.utils.logger import get_logger
2118
from internlm.utils.timeout import LLM_NCCL_TIMEOUT
@@ -46,97 +43,6 @@
4643
internlm_accelerator = get_accelerator()
4744

4845

49-
class Config(dict):
50-
"""This is a wrapper class for dict objects so that values of which can be
51-
accessed as attributes.
52-
53-
Args:
54-
config (dict): The dict object to be wrapped.
55-
"""
56-
57-
def __init__(self, config: dict = None): # pylint: disable=W0231
58-
if config is not None:
59-
for k, v in config.items():
60-
self._add_item(k, v)
61-
62-
def __missing__(self, key):
63-
raise KeyError(key)
64-
65-
def __getattr__(self, key):
66-
try:
67-
value = super().__getitem__(key)
68-
return value
69-
except KeyError:
70-
raise AttributeError(key)
71-
72-
def __setattr__(self, key, value):
73-
super().__setitem__(key, value)
74-
75-
def _add_item(self, key, value):
76-
if isinstance(value, dict):
77-
self.__setattr__(key, Config(value))
78-
else:
79-
self.__setattr__(key, value)
80-
81-
def update(self, config):
82-
assert isinstance(config, (Config, dict)), "can only update dictionary or Config objects."
83-
for k, v in config.items():
84-
self._add_item(k, v)
85-
return self
86-
87-
@staticmethod
88-
def from_file(filename: str):
89-
"""Reads a python file and constructs a corresponding :class:`Config` object.
90-
91-
Args:
92-
filename (str): Name of the file to construct the return object.
93-
94-
Returns:
95-
:class:`Config`: A :class:`Config` object constructed with information in the file.
96-
97-
Raises:
98-
AssertionError: Raises an AssertionError if the file does not exist, or the file is not .py file
99-
"""
100-
101-
# check config path
102-
if isinstance(filename, str):
103-
filepath = Path(filename).absolute()
104-
elif isinstance(filename, Path):
105-
filepath = filename.absolute()
106-
107-
assert filepath.exists(), f"{filename} is not found, please check your configuration path"
108-
109-
# check extension
110-
extension = filepath.suffix
111-
assert extension == ".py", "only .py files are supported"
112-
113-
# import the config as module
114-
remove_path = False
115-
if filepath.parent not in sys.path:
116-
sys.path.insert(0, (filepath))
117-
remove_path = True
118-
119-
module_name = filepath.stem
120-
source_file = SourceFileLoader(fullname=str(module_name), path=str(filepath))
121-
module = source_file.load_module() # pylint: disable=W4902,E1120,W1505
122-
123-
# load into config
124-
config = Config()
125-
126-
for k, v in module.__dict__.items():
127-
if k.startswith("__") or inspect.ismodule(v) or inspect.isclass(v):
128-
continue
129-
else:
130-
config._add_item(k, v)
131-
132-
# remove module
133-
del sys.modules[module_name]
134-
if remove_path:
135-
sys.path.pop(0)
136-
137-
return config
138-
139-
14046
class ParallelContext(metaclass=SingletonMeta):
14147
"""This class provides interface functions for users to get the parallel context,
14248
such as the global rank, the local rank, the world size, etc. of each device.

internlm/core/parallel/comm/isp.py

+9-5
Original file line numberDiff line numberDiff line change
@@ -25,9 +25,8 @@
2525
expandKVPacked,
2626
reduce_scatter_raw,
2727
)
28-
from internlm.model.modules.embedding import Embedding1D
29-
from internlm.model.modules.linear import ParallelLinearWithCommExt
30-
from internlm.model.modules.utils import is_moe_param
28+
from internlm.model.model_ops.modules.linear import ParallelLinearWithCommExt
29+
from internlm.model.model_ops.modules.utils import is_moe_param
3130
from internlm.utils.common import SchedulerHook, UniqueChainMap, get_current_device
3231
from internlm.utils.utils import (
3332
CuSeqlenType,
@@ -179,14 +178,19 @@ class EmbeddingWeightParallelCommunicator:
179178
"""
180179

181180
def __init__(self, parallel_mode: ParallelMode) -> None:
181+
from internlm.model.model_ops.modules.embedding import Embedding1D
182+
183+
self.embedding1d_cls = Embedding1D
182184
self.parallel_mode = parallel_mode
183185
self.gather_dim = 0
184186

185187
self._cur_micro_step = 0
186188
self._num_micro_step = gpc.config.data.micro_num
187189

188-
def register_module_hook(self, module: Embedding1D) -> None:
189-
assert isinstance(module, Embedding1D), "Embbeding weight parallel communicator is only support Embedding1D"
190+
def register_module_hook(self, module: nn.Module) -> None:
191+
assert isinstance(
192+
module, self.embedding1d_cls
193+
), "Embbeding weight parallel communicator is only support Embedding1D"
190194

191195
module.weight.evo_tensor = None
192196
self.gather_dim = 0 if module.vocab_parallel else 1

internlm/core/parallel/comm/tensor.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -23,8 +23,8 @@
2323
reduce_scatter_raw,
2424
split_forward_gather_backward,
2525
)
26-
from internlm.model.modules.embedding import Embedding1D
27-
from internlm.model.moe.moe import MoE
26+
from internlm.model.model_ops.modules.embedding import Embedding1D
27+
from internlm.model.model_ops.moe.moe import MoE
2828

2929
# input gather dim
3030
_GATHER_DIM = 1 # shape: [batch, seqlen, dim] or [1, packlen, dim]

internlm/core/parallel/comm/zero.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -12,8 +12,8 @@
1212
from internlm.core.context import global_context as gpc
1313
from internlm.core.naive_amp import unwrap_naive_amp
1414
from internlm.core.parallel.comm.isp import ISPCommunicatorWrapper
15-
from internlm.model.modules.embedding import Embedding1D
16-
from internlm.model.modules.linear import ScaleColumnParallelLinear
15+
from internlm.model.model_ops.modules.embedding import Embedding1D
16+
from internlm.model.model_ops.modules.linear import ScaleColumnParallelLinear
1717
from internlm.solver.optimizer.utils import flatten
1818

1919

internlm/core/trainer_builder.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -17,14 +17,14 @@
1717
from internlm.data.train_state import get_train_state
1818
from internlm.eval.evaluation import evaluate_on_val_dls
1919
from internlm.initialize.initialize_trainer import initialize_trainer
20-
from internlm.model.losses.ce_loss import InternLoss
21-
from internlm.model.metrics import AccPerplex
20+
from internlm.model.model_ops.losses.ce_loss import InternLoss
21+
from internlm.model.model_ops.metrics import AccPerplex
22+
from internlm.model_inject.inject import inject_model
2223
from internlm.monitor.monitor import send_alert_message
2324
from internlm.train.pipeline import (
2425
get_scheduler_hooks,
2526
initialize_llm_profile,
2627
initialize_optimizer,
27-
inject_model,
2828
load_new_batch,
2929
record_current_batch_training_metrics,
3030
)

internlm/env.py

+1
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
VERSION = "0.5.3"

internlm/eval/evaluation.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@
99
from internlm.core.context import global_context as gpc
1010
from internlm.core.parallel.shard import split_data_for_sequence_parallel
1111
from internlm.core.scheduler.pipeline_scheduler_1f1b import get_tensor_shape
12-
from internlm.model.metrics import AccPerplex, SchedulerMetricHook
12+
from internlm.model.model_ops.metrics import AccPerplex, SchedulerMetricHook
1313
from internlm.utils.common import get_current_device
1414
from internlm.utils.parallel import is_using_isp
1515

internlm/initialize/__init__.py

-2
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,12 @@
11
from .initialize_trainer import initialize_trainer
22
from .launch import (
3-
get_default_parser,
43
initialize_distributed_env,
54
launch_from_slurm,
65
launch_from_torch,
76
try_bind_numa,
87
)
98

109
__all__ = [
11-
"get_default_parser",
1210
"initialize_trainer",
1311
"launch_from_slurm",
1412
"launch_from_torch",

0 commit comments

Comments
 (0)