Skip to content

Commit

Permalink
Don't default to verbose=False on test datasets
Browse files Browse the repository at this point in the history
  • Loading branch information
nvoxland committed Aug 15, 2023
1 parent 63dee3c commit 62046db
Show file tree
Hide file tree
Showing 3 changed files with 29 additions and 53 deletions.
59 changes: 25 additions & 34 deletions deeplake/enterprise/test_pytorch.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,9 +133,7 @@ def test_pytorch_transform(local_auth_ds):
ds.create_tensor("image", max_chunk_size=PYTORCH_TESTS_MAX_CHUNK_SIZE)
ds.image.extend(([i * np.ones((i + 1, i + 1)) for i in range(16)]))
ds.checkout("alt", create=True)
ds.create_tensor(
"image2", max_chunk_size=PYTORCH_TESTS_MAX_CHUNK_SIZE
)
ds.create_tensor("image2", max_chunk_size=PYTORCH_TESTS_MAX_CHUNK_SIZE)
ds.image2.extend(np.array([i * np.ones((12, 12)) for i in range(16)]))

dl = (
Expand Down Expand Up @@ -174,17 +172,15 @@ def test_pytorch_transform_dict(local_auth_ds):
with ds:
ds.create_tensor("image", max_chunk_size=PYTORCH_TESTS_MAX_CHUNK_SIZE)
ds.image.extend(([i * np.ones((i + 1, i + 1)) for i in range(16)]))
ds.create_tensor(
"image2", max_chunk_size=PYTORCH_TESTS_MAX_CHUNK_SIZE
)
ds.create_tensor("image2", max_chunk_size=PYTORCH_TESTS_MAX_CHUNK_SIZE)
ds.image2.extend(np.array([i * np.ones((12, 12)) for i in range(16)]))
ds.create_tensor(
"image3", max_chunk_size=PYTORCH_TESTS_MAX_CHUNK_SIZE
)
ds.create_tensor("image3", max_chunk_size=PYTORCH_TESTS_MAX_CHUNK_SIZE)
ds.image3.extend(np.array([i * np.ones((12, 12)) for i in range(16)]))

dl = (
ds.dataloader().transform({"image": double, "image2": None}).pytorch(num_workers=0)
ds.dataloader()
.transform({"image": double, "image2": None})
.pytorch(num_workers=0)
)

assert len(dl.dataset) == 16
Expand Down Expand Up @@ -251,7 +247,9 @@ def test_custom_tensor_order(local_auth_ds):
with pytest.raises(TensorDoesNotExistError):
dl = ds.dataloader().pytorch(tensors=["c", "d", "e"], num_workers=0)

dl = ds.dataloader().pytorch(tensors=["c", "d", "a"], return_index=False, num_workers=0)
dl = ds.dataloader().pytorch(
tensors=["c", "d", "a"], return_index=False, num_workers=0
)

for i, batch in enumerate(dl):
c1, d1, a1 = batch
Expand Down Expand Up @@ -284,12 +282,8 @@ def test_custom_tensor_order(local_auth_ds):
def test_readonly_with_two_workers(local_auth_ds):
ds = local_auth_ds
with ds:
ds.create_tensor(
"images", max_chunk_size=PYTORCH_TESTS_MAX_CHUNK_SIZE
)
ds.create_tensor(
"labels", max_chunk_size=PYTORCH_TESTS_MAX_CHUNK_SIZE
)
ds.create_tensor("images", max_chunk_size=PYTORCH_TESTS_MAX_CHUNK_SIZE)
ds.create_tensor("labels", max_chunk_size=PYTORCH_TESTS_MAX_CHUNK_SIZE)
ds.images.extend(np.ones((10, 12, 12)))
ds.labels.extend(np.ones(10))

Expand Down Expand Up @@ -327,17 +321,16 @@ def test_groups(local_auth_ds, compressed_image_paths):
img1 = deeplake.read(compressed_image_paths["jpeg"][0])
img2 = deeplake.read(compressed_image_paths["png"][0])
with ds:
ds.create_tensor(
"images/jpegs/cats", htype="image", sample_compression="jpeg"
)
ds.create_tensor(
"images/pngs/flowers", htype="image", sample_compression="png"
)
ds.create_tensor("images/jpegs/cats", htype="image", sample_compression="jpeg")
ds.create_tensor("images/pngs/flowers", htype="image", sample_compression="png")
for _ in range(10):
ds.images.jpegs.cats.append(img1)
ds.images.pngs.flowers.append(img2)

another_ds = deeplake.dataset(ds.path, token=ds.token,)
another_ds = deeplake.dataset(
ds.path,
token=ds.token,
)
dl = another_ds.dataloader().pytorch(return_index=False, num_workers=0)
for i, (cat, flower) in enumerate(dl):
assert cat[0].shape == another_ds.images.jpegs.cats[i].numpy().shape
Expand Down Expand Up @@ -487,9 +480,7 @@ def test_pytorch_decode(local_auth_ds, compressed_image_paths, compression):
ds.image.extend(
np.array([i * np.ones((10, 10, 3), dtype=np.uint8) for i in range(5)])
)
ds.image.extend(
[deeplake.read(compressed_image_paths["jpeg"][0])] * 5
)
ds.image.extend([deeplake.read(compressed_image_paths["jpeg"][0])] * 5)

ptds = ds.dataloader().pytorch(decode_method={"image": "tobytes"}, num_workers=0)

Expand Down Expand Up @@ -551,11 +542,7 @@ def test_indexes(local_auth_ds, num_workers):
for i in range(8):
ds.xyz.append(i * np.ones((2, 2)))

ptds = (
ds.dataloader()
.batch(4)
.pytorch(num_workers=num_workers, return_index=True)
)
ptds = ds.dataloader().batch(4).pytorch(num_workers=num_workers, return_index=True)
if shuffle:
ptds = ptds.shuffle()

Expand Down Expand Up @@ -596,7 +583,9 @@ def test_indexes_transform(local_auth_ds, num_workers):

@requires_torch
@requires_libdeeplake
@pytest.mark.parametrize("num_workers", [pytest.param(0), pytest.param(2, marks=pytest.mark.skip)])
@pytest.mark.parametrize(
"num_workers", [pytest.param(0), pytest.param(2, marks=pytest.mark.skip)]
)
@pytest.mark.slow
def test_indexes_transform_dict(local_auth_ds, num_workers):
ds = local_auth_ds
Expand Down Expand Up @@ -861,7 +850,9 @@ def test_pytorch_data_decode(local_auth_ds, cat_path):
ptds = (
ds.dataloader()
.transform(identity)
.pytorch(decode_method=decode_method, collate_fn=identity_collate, num_workers=0)
.pytorch(
decode_method=decode_method, collate_fn=identity_collate, num_workers=0
)
)
for i, batch in enumerate(ptds):
sample = batch[0]
Expand Down
5 changes: 3 additions & 2 deletions deeplake/integrations/tests/test_pytorch.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,6 +147,7 @@ def test_pytorch_small(local_ds):
@requires_torch
def test_pytorch_transform(local_ds):
import torch

ds = local_ds

with ds:
Expand Down Expand Up @@ -397,8 +398,8 @@ def test_corrupt_dataset(local_ds, corrupt_image_paths, compressed_image_paths):
for (batch,) in dl:
num_batches += 1
num_samples += len(batch)
#TODO: Why Changed? assert num_samples == 30
#TODO: Why Changed? assert num_batches == 15
# TODO: Why Changed? assert num_samples == 30
# TODO: Why Changed? assert num_batches == 15


@requires_torch
Expand Down
18 changes: 1 addition & 17 deletions deeplake/tests/dataset_fixtures.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@

@pytest.fixture
def memory_ds(memory_path):
return deeplake.dataset(memory_path, verbose=False)
return deeplake.dataset(memory_path)


@pytest.fixture
Expand All @@ -79,8 +79,6 @@ def local_ds_generator(local_path):
def generate_local_ds(**kwargs):
if kwargs.get("lock_enabled") is None:
kwargs["lock_enabled"] = False
if kwargs.get("verbose") is None:
kwargs["verbose"] = False

return deeplake.dataset(local_path, **kwargs)

Expand All @@ -92,8 +90,6 @@ def local_auth_ds_generator(local_path, hub_cloud_dev_token):
def generate_local_auth_ds(**kwargs):
if kwargs.get("lock_enabled") is None:
kwargs["lock_enabled"] = False
if kwargs.get("verbose") is None:
kwargs["verbose"] = False

return deeplake.dataset(local_path, token=hub_cloud_dev_token, **kwargs)

Expand All @@ -110,8 +106,6 @@ def s3_ds_generator(s3_path):
def generate_s3_ds(**kwargs):
if kwargs.get("lock_enabled") is None:
kwargs["lock_enabled"] = False
if kwargs.get("verbose") is None:
kwargs["verbose"] = False

return deeplake.dataset(s3_path, **kwargs)

Expand All @@ -128,8 +122,6 @@ def gdrive_ds_generator(gdrive_path, gdrive_creds):
def generate_gdrive_ds(**kwargs):
if kwargs.get("lock_enabled") is None:
kwargs["lock_enabled"] = False
if kwargs.get("verbose") is None:
kwargs["verbose"] = False

return deeplake.dataset(gdrive_path, creds=gdrive_creds, **kwargs)

Expand All @@ -146,8 +138,6 @@ def gcs_ds_generator(gcs_path, gcs_creds):
def generate_gcs_ds(**kwargs):
if kwargs.get("lock_enabled") is None:
kwargs["lock_enabled"] = False
if kwargs.get("verbose") is None:
kwargs["verbose"] = False

return deeplake.dataset(gcs_path, creds=gcs_creds, **kwargs)

Expand All @@ -164,8 +154,6 @@ def azure_ds_generator(azure_path):
def generate_azure_ds(**kwargs):
if kwargs.get("lock_enabled") is None:
kwargs["lock_enabled"] = False
if kwargs.get("verbose") is None:
kwargs["verbose"] = False

return deeplake.dataset(azure_path, **kwargs)

Expand All @@ -182,8 +170,6 @@ def hub_cloud_ds_generator(hub_cloud_path, hub_cloud_dev_token):
def generate_hub_cloud_ds(**kwargs):
if kwargs.get("lock_enabled") is None:
kwargs["lock_enabled"] = False
if kwargs.get("verbose") is None:
kwargs["verbose"] = False

return deeplake.dataset(hub_cloud_path, token=hub_cloud_dev_token, **kwargs)

Expand All @@ -195,8 +181,6 @@ def hub_cloud_gcs_ds_generator(gcs_path, gcs_creds, hub_cloud_dev_token):
def generate_hub_cloud_gcs_ds(**kwargs):
if kwargs.get("lock_enabled") is None:
kwargs["lock_enabled"] = False

Check warning on line 183 in deeplake/tests/dataset_fixtures.py

View check run for this annotation

Codecov / codecov/patch

deeplake/tests/dataset_fixtures.py#L182-L183

Added lines #L182 - L183 were not covered by tests
if kwargs.get("verbose") is None:
kwargs["verbose"] = False

ds = deeplake.dataset(gcs_path, creds=gcs_creds, **kwargs)
ds.connect(
Expand Down

0 comments on commit 62046db

Please sign in to comment.