Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 5 additions & 1 deletion lazyllm/module/llms/automodel.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,12 @@ class AutoModel:
def __new__(cls, model: Optional[str] = None, *, config_id: Optional[str] = None, source: Optional[str] = None, # noqa C901
type: Optional[str] = None, config: Union[str, bool] = True, **kwargs: Any):
# check and accomodate user params
model = model or kwargs.pop('base_model', kwargs.pop('embed_model_name', None))
model = model or kwargs.pop('base_model', kwargs.pop('embed_model_name', kwargs.pop('model_name', None)))
if model in lazyllm.online.chat:
if source is not None:
raise ValueError(
f'`{model!r}` is a recognised source name; pass it as `source=` and '
f'do not also set `source={source!r}`.')
source, model = model, None

if not model:
Expand Down
5 changes: 1 addition & 4 deletions lazyllm/module/llms/online_module.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,10 +36,7 @@ def __new__(self, model: Optional[str] = None, source: Optional[str] = None, *,
embed_kwargs = params.copy()
embed_kwargs.pop('function', None)
embed_kwargs.setdefault('type', 'rerank' if resolved_type == LLMType.RERANK else 'embed')
return OnlineEmbeddingModule(source=source,
embed_url=url,
embed_model_name=model,
**embed_kwargs)
return OnlineEmbeddingModule(model=model, source=source, url=url, **embed_kwargs)

if resolved_type in list(self._MULTI_TYPE_TO_FUNCTION.keys()):
multi_kwargs = params.copy()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
from lazyllm.components.utils.file_operate import _delete_old_files, _image_to_base64
from lazyllm.components.utils.downloader.model_downloader import LLMType
from ....servermodule import LLMBase, StaticParams
from .utils import LazyLLMOnlineBase
from .utils import LazyLLMOnlineBase, resolve_online_params

class LazyLLMOnlineChatModuleBase(LazyLLMOnlineBase, LLMBase):
TRAINABLE_MODEL_LIST = []
Expand Down Expand Up @@ -126,9 +126,10 @@ def forward(self, __input: Union[Dict, str] = None, *, llm_chat_history: List[Li
# TODO(dengyuang): if current forward set stream_output = False but self._stream = True, will use stream = True
stream_output = stream_output or self._stream
__input, files = self._get_files(__input, lazyllm_files)
runtime_base_url = url or kw.pop('base_url', None)
runtime_url = self._get_chat_url(runtime_base_url) if runtime_base_url else self._chat_url
runtime_model = model or kw.pop('model_name', None) or self._model_name
model, _, url, kw = resolve_online_params(model, None, url, kw,
model_aliases='model_name', url_aliases='base_url')
runtime_url = self._get_chat_url(url) if url else self._chat_url
runtime_model = model or self._model_name

params = {'input': __input, 'history': llm_chat_history, 'return_dict': True}
if tools: params['tools'] = tools
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from concurrent.futures import ThreadPoolExecutor, as_completed, wait
from lazyllm import LOG
from lazyllm.components.utils.downloader.model_downloader import LLMType
from .utils import LazyLLMOnlineBase
from .utils import LazyLLMOnlineBase, resolve_online_params
from lazyllm.components.utils.downloader import ModelManager


Expand Down Expand Up @@ -41,8 +41,12 @@ def batch_size(self, value: int):

def forward(self, input: Union[List, str], url: str = None, model: str = None, **kwargs
) -> Union[List[float], List[List[float]]]:
runtime_url = url or kwargs.pop('base_url', kwargs.pop('embed_url', None)) or self._embed_url
runtime_model = model or kwargs.pop('model_name', kwargs.pop('embed_model_name', None)) or self._embed_model_name
model, _, url, kwargs = resolve_online_params(
model, None, url, kwargs,
model_aliases=('model_name', 'embed_model_name', 'embed_name'),
url_aliases=('base_url', 'embed_url'))
runtime_url = url or self._embed_url
runtime_model = model or self._embed_model_name

if runtime_model is not None:
kwargs['model'] = runtime_model
Expand Down
23 changes: 9 additions & 14 deletions lazyllm/module/llms/onlinemodule/base/onlineMultiModalBase.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from typing import List, Dict, Union, Optional
import lazyllm
from ....servermodule import LLMBase
from .utils import LazyLLMOnlineBase
from .utils import LazyLLMOnlineBase, resolve_online_params
import base64
from pathlib import Path
import requests
Expand Down Expand Up @@ -29,23 +29,18 @@ def type(self):
return 'MultiModal'

def _forward(self, input: Union[Dict, str] = None, files: List[str] = None, **kwargs):
'''Forward method to be implemented by subclasses'''
raise NotImplementedError(f'Subclass {self.__class__.__name__} must implement this method')

def forward(self, input: Union[Dict, str] = None, *, lazyllm_files=None,
url: str = None, model: str = None, **kwargs):
'''Main forward method with file handling'''
try:
input, files = self._get_files(input, lazyllm_files or kwargs.pop('files', None))
runtime_url = url or kwargs.pop('base_url', None) or self._base_url
runtime_model = model or kwargs.pop('model_name', None) or self._model_name
call_params = {'input': input, **kwargs}
if files: call_params['files'] = files
return self._forward(**call_params, model=runtime_model, url=runtime_url)

except Exception as e:
lazyllm.LOG.error(f'Error in {self.__class__.__name__}.forward: {str(e)}')
raise
input, files = self._get_files(input, lazyllm_files or kwargs.pop('files', None))
model, _, url, kwargs = resolve_online_params(model, None, url, kwargs,
model_aliases='model_name', url_aliases='base_url')
runtime_url = url or self._base_url
runtime_model = model or self._model_name
call_params = {'input': input, **kwargs}
if files: call_params['files'] = files
return self._forward(**call_params, model=runtime_model, url=runtime_url)

def __repr__(self):
return lazyllm.make_repr('Module', 'OnlineMultiModalModule',
Expand Down
27 changes: 27 additions & 0 deletions lazyllm/module/llms/onlinemodule/base/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,33 @@ def select_source_with_default_key(available_models, explicit_source: Optional[s
f'You can set one of those environment: {excepted}')


def resolve_online_params(
model: Optional[str], source: Optional[str], url: Optional[str], extra: dict,
*, model_aliases: Union[str, tuple] = (), url_aliases: Union[str, tuple] = (), source_registry=None,
) -> tuple:
if isinstance(model_aliases, str): model_aliases = (model_aliases,)
if isinstance(url_aliases, str): url_aliases = (url_aliases,)
remaining = dict(extra)
for alias in model_aliases:
if (val := remaining.pop(alias, None)) is not None:
if model is not None:
raise ValueError(f'Conflicting parameters: `model` and `{alias}` are both provided. Use `model` only.')
model = val
for alias in url_aliases:
if (val := remaining.pop(alias, None)) is not None:
if url is not None:
raise ValueError(f'Conflicting parameters: `url` and `{alias}` are both provided. Use `url` only.')
url = val
if source_registry is not None and model is not None:
_in = (lambda x: x in source_registry) if hasattr(source_registry, '__contains__') else source_registry
if _in(model):
if source is not None and _in(source):
raise ValueError(f'`{model!r}` and `{source!r}` are both recognised source names; '
f'provide exactly one via `source=`.')
source, model = model, source
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

high

The logic for swapping model and source is incorrect when source is already provided but not present in the source_registry (e.g., when using source='dynamic'). Currently, if model is a recognized source name and source is set to something else (like 'dynamic'), the function will swap them because _in(source) will be false. This breaks dynamic routing by setting source to the model name and model to 'dynamic'. The swap should only occur if source is None. If both are provided and model is a recognized source, it should raise a ValueError to avoid ambiguity, consistent with the check added in automodel.py.

Suggested change
if source is not None and _in(source):
raise ValueError(f'`{model!r}` and `{source!r}` are both recognised source names; '
f'provide exactly one via `source=`.')
source, model = model, source
if source is not None:
raise ValueError(f'{model!r} is a recognised source name; pass it as source= and '
f'do not also set source={source!r}.')
source, model = model, None

return model, source, url, remaining


def check_and_add_config(key, description, cfg=config):
if key.lower() not in config.get_all_configs():
cfg.add(key, str, '', f'{key.upper()}', description=description)
Expand Down
19 changes: 11 additions & 8 deletions lazyllm/module/llms/onlinemodule/chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

from ...servermodule import LLMBase, StaticParams
from .base import OnlineChatModuleBase
from .base.utils import select_source_with_default_key
from .base.utils import select_source_with_default_key, resolve_online_params
from .dynamic_router import _DynamicSourceRouterMixin, dynamic_model_config_context


Expand All @@ -32,35 +32,38 @@ class OnlineChatModule(_DynamicSourceRouterMixin, LLMBase, metaclass=_ChatModule
_dynamic_module_slot = 'chat'
_dynamic_source_error = 'No source is configured for dynamic LLM source.'

def __new__(cls, model: str = None, source: str = None, base_url: str = None, stream: bool = True,
def __new__(cls, model: str = None, source: str = None, url: str = None, stream: bool = True,
return_trace: bool = False, skip_auth: bool = False, type: Optional[str] = None,
api_key: str = None, static_params: Optional[StaticParams] = None, id: Optional[str] = None,
name: Optional[str] = None, group_id: Optional[str] = None, dynamic_auth: bool = False, **kwargs):
if model in lazyllm.online.chat and source is None: source, model = model, source
model, source, url, kwargs = resolve_online_params(
model, source, url, kwargs, url_aliases='base_url', source_registry=lazyllm.online.chat)
Comment on lines +39 to +40
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

For consistency with AutoModel and other online modules, OnlineChatModule should also resolve model_name and base_model aliases into the model parameter. Currently, these remain in kwargs, which might lead to unexpected behavior or duplicate parameters being passed to the underlying supplier.

        model, source, url, kwargs = resolve_online_params(
            model, source, url, kwargs, model_aliases=('model_name', 'base_model'),
            url_aliases='base_url', source_registry=lazyllm.online.chat)

if cls._should_use_dynamic(source, dynamic_auth, skip_auth):
return super().__new__(cls)

if source is None and api_key is not None:
raise ValueError('No source is given but an api_key is provided.')
source, default_key = select_source_with_default_key(lazyllm.online.chat, source, LLMType.CHAT)
api_key = api_key if api_key is not None else default_key
if skip_auth and not base_url:
raise KeyError('base_url must be set for local serving.')
if skip_auth and not url:
raise ValueError('url must be set for local serving.')

type = cls._resolve_type_name(type, model, options=[LLMType.LLM, LLMType.CHAT, LLMType.VLM])
return getattr(lazyllm.online.chat, source)(
base_url=base_url, model=model, stream=stream, return_trace=return_trace,
base_url=url, model=model, stream=stream, return_trace=return_trace,
api_key=api_key, skip_auth=skip_auth, type=type, **kwargs)

def __init__(self, model: str = None, source: str = None, base_url: str = None, stream: bool = True,
def __init__(self, model: str = None, source: str = None, url: str = None, stream: bool = True,
return_trace: bool = False, skip_auth: bool = False, type: Optional[str] = None,
api_key: str = None, static_params: Optional[StaticParams] = None, id: Optional[str] = None,
name: Optional[str] = None, group_id: Optional[str] = None, dynamic_auth: bool = False, **kwargs):
model, source, url, kwargs = resolve_online_params(
model, source, url, kwargs, url_aliases='base_url', source_registry=lazyllm.online.chat)
normalized_type = self._resolve_type_name(type, model, options=[LLMType.LLM, LLMType.CHAT, LLMType.VLM])
_DynamicSourceRouterMixin.__init__(self, id=id, name=name, group_id=group_id, return_trace=return_trace)
LLMBase.__init__(self, stream=stream, type=normalized_type, static_params=static_params)
self._kwargs = kwargs
self._base_url = base_url
self._base_url = url
self._model_name = model
self._skip_auth = skip_auth
self._init_dynamic_auth(api_key, dynamic_auth)
Expand Down
8 changes: 5 additions & 3 deletions lazyllm/module/llms/onlinemodule/dynamic_router.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,14 +68,16 @@ def _get_supplier(self):
self._suppliers[supplier_key] = self._build_supplier(source, skip_auth)
return self._suppliers[supplier_key]

_URL_ALIASES = frozenset(('base_url', 'embed_url'))
_MODEL_ALIASES = frozenset(('model_name', 'embed_model_name', 'embed_name'))

def _merge_dynamic_forward_kwargs(self, kwargs: Dict[str, Any]) -> Dict[str, Any]:
bucket = self.__class__._get_dynamic_bucket()
if not bucket: return kwargs
out = dict(kwargs)
if 'url' not in out and 'base_url' not in out:
if 'url' not in out and not (self._URL_ALIASES & out.keys()):
if (u := bucket.get('url')) is not None: out['url'] = u
if ('model' not in out and 'model_name' not in out
and 'embed_model_name' not in out):
if 'model' not in out and not (self._MODEL_ALIASES & out.keys()):
if (m := bucket.get('model')) is not None: out['model'] = m
return out

Expand Down
34 changes: 23 additions & 11 deletions lazyllm/module/llms/onlinemodule/embedding.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from lazyllm.components.utils.downloader.model_downloader import LLMType
from lazyllm.common.bind import _MetaBind
from .base import OnlineEmbeddingModuleBase
from .base.utils import select_source_with_default_key
from .base.utils import select_source_with_default_key, resolve_online_params
from .supplier.doubao import DoubaoEmbed, DoubaoMultimodalEmbed
from .map_model_type import get_model_type
from .dynamic_router import _DynamicSourceRouterMixin, dynamic_model_config_context
Expand Down Expand Up @@ -52,36 +52,48 @@ def _create_supplier(source: str, type_name: str, embed_model_name: str, params:
return getattr(lazyllm.online.rerank, source)(**params)
raise ValueError('Unknown type of online embedding module.')

def __new__(cls, source: str = None, embed_url: str = None, embed_model_name: str = None,
@staticmethod
def _is_embed_source(name: str) -> bool:
return name in lazyllm.online.embed or name in lazyllm.online.rerank

def __new__(cls, model: str = None, source: str = None, url: str = None,
return_trace: bool = False, api_key: str = None, dynamic_auth: bool = False,
skip_auth: bool = False, id: Optional[str] = None, name: Optional[str] = None,
group_id: Optional[str] = None, type: Optional[str] = None, batch_size: int = 32, **kwargs):
model, source, url, kwargs = resolve_online_params(
model, source, url, kwargs,
model_aliases=('embed_model_name', 'model_name'), url_aliases=('embed_url', 'base_url'),
source_registry=OnlineEmbeddingModule._is_embed_source)
if cls._should_use_dynamic(source, dynamic_auth, skip_auth):
return super().__new__(cls)
if source is None and api_key is not None:
raise ValueError('No source is given but an api_key is provided.')
type_name = OnlineEmbeddingModule._resolve_type_name(type, embed_model_name)
type_name = OnlineEmbeddingModule._resolve_type_name(type, model)
if type_name == 'embed':
source, default_key = select_source_with_default_key(lazyllm.online.embed, source, LLMType.EMBED)
elif type_name == 'rerank':
source, default_key = select_source_with_default_key(lazyllm.online.rerank, source, LLMType.RERANK)
else:
raise ValueError('Unknown type of online embedding module.')
api_key = api_key if api_key is not None else default_key
if skip_auth and not embed_url:
raise KeyError('embed_url must be set for local serving.')
params = {'embed_url': embed_url, 'embed_model_name': embed_model_name, 'return_trace': return_trace,
if skip_auth and not url:
raise ValueError('url must be set for local serving.')
params = {'embed_url': url, 'embed_model_name': model, 'return_trace': return_trace,
'batch_size': batch_size, 'api_key': api_key, 'skip_auth': skip_auth, **kwargs}
return OnlineEmbeddingModule._create_supplier(source, type_name, embed_model_name, params)
return OnlineEmbeddingModule._create_supplier(source, type_name, model, params)

def __init__(self, source: str = None, embed_url: str = None, embed_model_name: str = None,
def __init__(self, model: str = None, source: str = None, url: str = None,
return_trace: bool = False, api_key: str = None, dynamic_auth: bool = False,
skip_auth: bool = False, id: Optional[str] = None, name: Optional[str] = None,
group_id: Optional[str] = None, type: Optional[str] = None, batch_size: int = 32, **kwargs):
model, source, url, kwargs = resolve_online_params(
model, source, url, kwargs,
model_aliases=('embed_model_name', 'model_name'), url_aliases=('embed_url', 'base_url'),
source_registry=OnlineEmbeddingModule._is_embed_source)
_DynamicSourceRouterMixin.__init__(self, id=id, name=name, group_id=group_id, return_trace=return_trace)
self._embed_url = embed_url
self._embed_model_name = embed_model_name
self._type = type
self._embed_url = url
self._embed_model_name = model
self._type = OnlineEmbeddingModule._resolve_type_name(type, model)
self._skip_auth = skip_auth
self._kwargs = kwargs
self._batch_size = batch_size
Expand Down
Loading
Loading