Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .github/workflows/main.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -18,14 +18,14 @@ concurrency:
cancel-in-progress: true

env:
WEAVIATE_125: 1.25.34
WEAVIATE_126: 1.26.17
WEAVIATE_127: 1.27.27
WEAVIATE_128: 1.28.16
WEAVIATE_129: 1.29.9
WEAVIATE_130: 1.30.12
WEAVIATE_131: 1.31.5
WEAVIATE_132: 1.32.0-rc.1-098c594
WEAVIATE_133: 1.33.0-dev-472b1e1.amd64

jobs:
lint-and-format:
Expand Down Expand Up @@ -278,7 +278,7 @@ jobs:
fail-fast: false
matrix:
version: [
{ extra: "1.62.0", server: $WEAVIATE_125},
{ extra: "1.62.0", server: $WEAVIATE_133},
{ extra: "1.66.2", server: $WEAVIATE_126},
{ extra: "1.70.0", server: $WEAVIATE_127},
{ extra: "1.59.5", server: $WEAVIATE_128},
Expand Down
38 changes: 38 additions & 0 deletions integration/test_vectors.py
Original file line number Diff line number Diff line change
Expand Up @@ -684,6 +684,44 @@ def test_same_target_vector_multiple_input_combinations(
assert sorted([obj.uuid for obj in objs]) == sorted([uuid2, uuid1])


@pytest.mark.parametrize(
"near_vector,target_vector",
[
({"first": [0, 0], "second": [[0, 1, 0], [0, 0, 1]]}, ["first", "second"]),
],
)
def test_multi_target_vector_return(
collection_factory: CollectionFactory,
near_vector: Dict[str, Union[Sequence[float], Sequence[Sequence[float]], _ListOfVectorsQuery]],
target_vector: List[str],
) -> None:
collection = collection_factory(
properties=[],
vector_config=[
wvc.config.Configure.Vectors.self_provided(name="first"),
wvc.config.Configure.Vectors.self_provided(name="second"),
],
)

if collection._connection._weaviate_version.is_lower_than(1, 33, 0):
pytest.skip("Multi vector per target is not supported in versions lower than 1.33.0")

uuid1 = collection.data.insert({}, vector={"first": [1, 0], "second": [0, 1, 0]})
uuid2 = collection.data.insert({}, vector={"first": [0, 1], "second": [1, 0, 0]})
objs = collection.query.near_vector(
near_vector,
target_vector=wvc.query.TargetVectors.sum(target_vector),
return_metadata=wvc.query.MetadataQuery.full(),
).objects

assert objs[0].uuid == uuid1
assert objs[1].uuid == uuid2
assert objs[0].metadata.distance == 2.0
assert objs[0].metadata.multi_target_distances == {"first": [1.0], "second": [0.0, 1.0]}
assert objs[1].metadata.distance == 3.0
assert objs[1].metadata.multi_target_distances == {"first": [1.0], "second": [1.0, 1.0]}


def test_deprecated_syntax(collection_factory: CollectionFactory):
dummy = collection_factory("dummy")
if dummy._connection._weaviate_version.is_at_least(
Expand Down
2 changes: 2 additions & 0 deletions weaviate/collections/classes/internal.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,7 @@ class MetadataReturn:
creation_time: Optional[datetime.datetime] = None
last_update_time: Optional[datetime.datetime] = None
distance: Optional[float] = None
multi_target_distances: Optional[Dict[str, List[float]]] = None
certainty: Optional[float] = None
score: Optional[float] = None
explain_score: Optional[str] = None
Expand All @@ -86,6 +87,7 @@ def _is_empty(self) -> bool:
self.explain_score is None,
self.is_consistent is None,
self.rerank_score is None,
self.multi_target_distances is None,
]
)

Expand Down
13 changes: 13 additions & 0 deletions weaviate/collections/queries/base_executor.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,7 @@ def __extract_metadata_for_object(
meta = MetadataReturn(
distance=add_props.distance if add_props.distance_present else None,
certainty=add_props.certainty if add_props.certainty_present else None,
multi_target_distances=self.__extract_multi_target_distances(add_props),
creation_time=(
self.__retrieve_timestamp(add_props.creation_time_unix)
if add_props.creation_time_unix_present
Expand All @@ -129,6 +130,18 @@ def __extract_metadata_for_object(
)
return meta

def __extract_multi_target_distances(
self,
add_props: "search_get_pb2.MetadataResult",
) -> Optional[Dict[str, List[float]]]:
if add_props.multi_target_distances is None or len(add_props.multi_target_distances) == 0:
return None

distances: Dict[str, List[float]] = {}
for key, value in add_props.multi_target_distances.items():
distances[key] = _ByteOps.decode_float32s(value.multi_target_distances)
return distances

def __extract_metadata_for_group_by_object(
self,
add_props: "search_get_pb2.MetadataResult",
Expand Down
2 changes: 1 addition & 1 deletion weaviate/collections/queries/near_vector/query/executor.py
Original file line number Diff line number Diff line change
Expand Up @@ -359,7 +359,7 @@ def resp(
) -> QuerySearchReturnType[Properties, References, TProperties, TReferences]:
return cast(
Any,
self._result_to_generative_return(
self._result_to_query_return(
res,
_QueryOptions.from_input(
return_metadata,
Expand Down
49 changes: 49 additions & 0 deletions weaviate/proto/v1/v1613/v1/file_replication_pb2.py

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

116 changes: 116 additions & 0 deletions weaviate/proto/v1/v1613/v1/file_replication_pb2.pyi
Original file line number Diff line number Diff line change
@@ -0,0 +1,116 @@
from google.protobuf.internal import containers as _containers
from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from typing import ClassVar as _ClassVar, Iterable as _Iterable, Optional as _Optional, Union as _Union

DESCRIPTOR: _descriptor.FileDescriptor

class CompressionType(int, metaclass=_enum_type_wrapper.EnumTypeWrapper):
__slots__ = ()
COMPRESSION_TYPE_UNSPECIFIED: _ClassVar[CompressionType]
COMPRESSION_TYPE_GZIP: _ClassVar[CompressionType]
COMPRESSION_TYPE_ZLIB: _ClassVar[CompressionType]
COMPRESSION_TYPE_DEFLATE: _ClassVar[CompressionType]
COMPRESSION_TYPE_UNSPECIFIED: CompressionType
COMPRESSION_TYPE_GZIP: CompressionType
COMPRESSION_TYPE_ZLIB: CompressionType
COMPRESSION_TYPE_DEFLATE: CompressionType

class PauseFileActivityRequest(_message.Message):
__slots__ = ("index_name", "shard_name", "schema_version")
INDEX_NAME_FIELD_NUMBER: _ClassVar[int]
SHARD_NAME_FIELD_NUMBER: _ClassVar[int]
SCHEMA_VERSION_FIELD_NUMBER: _ClassVar[int]
index_name: str
shard_name: str
schema_version: int
def __init__(self, index_name: _Optional[str] = ..., shard_name: _Optional[str] = ..., schema_version: _Optional[int] = ...) -> None: ...

class PauseFileActivityResponse(_message.Message):
__slots__ = ("index_name", "shard_name")
INDEX_NAME_FIELD_NUMBER: _ClassVar[int]
SHARD_NAME_FIELD_NUMBER: _ClassVar[int]
index_name: str
shard_name: str
def __init__(self, index_name: _Optional[str] = ..., shard_name: _Optional[str] = ...) -> None: ...

class ResumeFileActivityRequest(_message.Message):
__slots__ = ("index_name", "shard_name")
INDEX_NAME_FIELD_NUMBER: _ClassVar[int]
SHARD_NAME_FIELD_NUMBER: _ClassVar[int]
index_name: str
shard_name: str
def __init__(self, index_name: _Optional[str] = ..., shard_name: _Optional[str] = ...) -> None: ...

class ResumeFileActivityResponse(_message.Message):
__slots__ = ("index_name", "shard_name")
INDEX_NAME_FIELD_NUMBER: _ClassVar[int]
SHARD_NAME_FIELD_NUMBER: _ClassVar[int]
index_name: str
shard_name: str
def __init__(self, index_name: _Optional[str] = ..., shard_name: _Optional[str] = ...) -> None: ...

class ListFilesRequest(_message.Message):
__slots__ = ("index_name", "shard_name")
INDEX_NAME_FIELD_NUMBER: _ClassVar[int]
SHARD_NAME_FIELD_NUMBER: _ClassVar[int]
index_name: str
shard_name: str
def __init__(self, index_name: _Optional[str] = ..., shard_name: _Optional[str] = ...) -> None: ...

class ListFilesResponse(_message.Message):
__slots__ = ("index_name", "shard_name", "file_names")
INDEX_NAME_FIELD_NUMBER: _ClassVar[int]
SHARD_NAME_FIELD_NUMBER: _ClassVar[int]
FILE_NAMES_FIELD_NUMBER: _ClassVar[int]
index_name: str
shard_name: str
file_names: _containers.RepeatedScalarFieldContainer[str]
def __init__(self, index_name: _Optional[str] = ..., shard_name: _Optional[str] = ..., file_names: _Optional[_Iterable[str]] = ...) -> None: ...

class GetFileMetadataRequest(_message.Message):
__slots__ = ("index_name", "shard_name", "file_name")
INDEX_NAME_FIELD_NUMBER: _ClassVar[int]
SHARD_NAME_FIELD_NUMBER: _ClassVar[int]
FILE_NAME_FIELD_NUMBER: _ClassVar[int]
index_name: str
shard_name: str
file_name: str
def __init__(self, index_name: _Optional[str] = ..., shard_name: _Optional[str] = ..., file_name: _Optional[str] = ...) -> None: ...

class FileMetadata(_message.Message):
__slots__ = ("index_name", "shard_name", "file_name", "size", "crc32")
INDEX_NAME_FIELD_NUMBER: _ClassVar[int]
SHARD_NAME_FIELD_NUMBER: _ClassVar[int]
FILE_NAME_FIELD_NUMBER: _ClassVar[int]
SIZE_FIELD_NUMBER: _ClassVar[int]
CRC32_FIELD_NUMBER: _ClassVar[int]
index_name: str
shard_name: str
file_name: str
size: int
crc32: int
def __init__(self, index_name: _Optional[str] = ..., shard_name: _Optional[str] = ..., file_name: _Optional[str] = ..., size: _Optional[int] = ..., crc32: _Optional[int] = ...) -> None: ...

class GetFileRequest(_message.Message):
__slots__ = ("index_name", "shard_name", "file_name", "compression")
INDEX_NAME_FIELD_NUMBER: _ClassVar[int]
SHARD_NAME_FIELD_NUMBER: _ClassVar[int]
FILE_NAME_FIELD_NUMBER: _ClassVar[int]
COMPRESSION_FIELD_NUMBER: _ClassVar[int]
index_name: str
shard_name: str
file_name: str
compression: CompressionType
def __init__(self, index_name: _Optional[str] = ..., shard_name: _Optional[str] = ..., file_name: _Optional[str] = ..., compression: _Optional[_Union[CompressionType, str]] = ...) -> None: ...

class FileChunk(_message.Message):
__slots__ = ("offset", "data", "eof")
OFFSET_FIELD_NUMBER: _ClassVar[int]
DATA_FIELD_NUMBER: _ClassVar[int]
EOF_FIELD_NUMBER: _ClassVar[int]
offset: int
data: bytes
eof: bool
def __init__(self, offset: _Optional[int] = ..., data: _Optional[bytes] = ..., eof: bool = ...) -> None: ...
Loading
Loading