Added sonar exclusions/inclusions for tests #11006
This check has been archived and is scheduled for deletion.
Learn more about checks retention
GitHub Actions / JUnit Test Report
failed
Sep 5, 2023 in 0s
4824 tests run, 2484 passed, 2339 skipped, 1 failed.
Annotations
Check failure on line 1697 in deeplake/core/vectorstore/test_deeplake_vectorstore.py
github-actions / JUnit Test Report
test_deeplake_vectorstore.test_multiple_embeddings
Failed: Timeout >300.0s
Raw output
local_path = './hub_pytest/test_deeplake_vectorstore/test_multiple_embeddings'
@pytest.mark.slow
def test_multiple_embeddings(local_path):
vector_store = DeepLakeVectorStore(
path=local_path,
overwrite=True,
tensor_params=[
{
"name": "text",
"htype": "text",
},
{
"name": "embedding_1",
"htype": "embedding",
},
{
"name": "embedding_2",
"htype": "embedding",
},
],
)
with pytest.raises(AssertionError):
vector_store.add(
text=texts,
embedding_function=[embedding_fn, embedding_fn],
embedding_data=[texts],
embedding_tensor=["embedding_1", "embedding_2"],
)
with pytest.raises(AssertionError):
vector_store.add(
text=texts,
embedding_function=[embedding_fn, embedding_fn],
embedding_data=[texts, texts],
embedding_tensor=["embedding_1"],
)
with pytest.raises(AssertionError):
vector_store.add(
text=texts,
embedding_function=[embedding_fn],
embedding_data=[texts, texts],
embedding_tensor=["embedding_1", "embedding_2"],
)
vector_store.add(
text=texts,
embedding_function=[embedding_fn, embedding_fn],
embedding_data=[texts, texts],
embedding_tensor=["embedding_1", "embedding_2"],
)
vector_store.add(
text=texts, embedding_1=(embedding_fn, texts), embedding_2=(embedding_fn, texts)
)
vector_store.add(
text=texts,
embedding_function=embedding_fn,
embedding_data=[texts, texts],
embedding_tensor=["embedding_1", "embedding_2"],
)
# test with initial embedding function
vector_store.embedding_function = embedding_fn
vector_store.add(
text=texts,
embedding_data=[texts, texts],
embedding_tensor=["embedding_1", "embedding_2"],
)
number_of_data = 1000
_texts, embeddings, ids, metadatas, _ = utils.create_data(
number_of_data=number_of_data, embedding_dim=EMBEDDING_DIM
)
vector_store.add(
text=25 * _texts,
embedding_function=[embedding_fn3, embedding_fn3],
embedding_data=[25 * _texts, 25 * _texts],
embedding_tensor=["embedding_1", "embedding_2"],
)
> vector_store.add(
text=25 * _texts,
embedding_1=(embedding_fn3, 25 * _texts),
embedding_2=(embedding_fn3, 25 * _texts),
)
deeplake/core/vectorstore/test_deeplake_vectorstore.py:1697:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
deeplake/core/vectorstore/deeplake_vectorstore.py:277: in add
dataset_utils.extend_or_ingest_dataset(
deeplake/core/vectorstore/vector_search/dataset/dataset.py:460: in extend_or_ingest_dataset
extend(
deeplake/core/vectorstore/vector_search/dataset/dataset.py:445: in extend
dataset.extend(processed_tensors)
deeplake/core/dataset/dataset.py:3131: in extend
self.append(
deeplake/util/invalid_view_op.py:22: in inner
return callable(x, *args, **kwargs)
deeplake/core/dataset/dataset.py:3173: in append
self._append_or_extend(
deeplake/core/dataset/dataset.py:3047: in _append_or_extend
tensor.append(v)
deeplake/util/invalid_view_op.py:22: in inner
return callable(x, *args, **kwargs)
deeplake/core/tensor.py:404: in append
self.extend([sample], progressbar=False)
deeplake/util/invalid_view_op.py:22: in inner
return callable(x, *args, **kwargs)
deeplake/core/tensor.py:316: in extend
self.chunk_engine.extend(
deeplake/core/chunk_engine.py:1080: in extend
self._extend(samples, progressbar, pg_callback=pg_callback)
deeplake/core/chunk_engine.py:1015: in _extend
start_chunk=self.last_appended_chunk(allow_copy=False),
deeplake/core/chunk_engine.py:555: in last_appended_chunk
chunk = self.get_chunk(chunk_key)
deeplake/core/chunk_engine.py:577: in get_chunk
if not partial_chunk_bytes and isinstance(chunk.data_bytes, PartialReader):
deeplake/core/chunk/chunk_compressed_chunk.py:571: in data_bytes
self._compress()
deeplake/core/chunk/chunk_compressed_chunk.py:562: in _compress
self._data_bytes = compress_bytes(self.decompressed_bytes, self.compression)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
buffer = b'0163c264-4c33-11ee-af92-7715898d59320163c265-4c33-11ee-af92-7715898d59320163c266-4c33-11ee-af92-7715898d59320163c267...15898d593257c12881-4c33-11ee-af92-7715898d593257c12882-4c33-11ee-af92-7715898d593257c12883-4c33-11ee-af92-7715898d5932'
compression = 'lz4'
def compress_bytes(
buffer: Union[bytes, memoryview], compression: Optional[str]
) -> bytes:
if not buffer:
return b""
if compression == "lz4":
if not buffer:
return b""
> return numcodecs.lz4.compress(buffer)
E Failed: Timeout >300.0s
deeplake/core/compression.py:162: Failed
Loading