Skip to content

Commit

Permalink
cleanup tests
Browse files Browse the repository at this point in the history
  • Loading branch information
malmans2 committed Oct 26, 2023
1 parent 959bdbf commit a0c7ec8
Show file tree
Hide file tree
Showing 7 changed files with 62 additions and 57 deletions.
9 changes: 6 additions & 3 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,8 @@ def set_cache(
request: pytest.FixtureRequest,
s3_server: ThreadedMotoServer,
) -> Iterator[str]:
if hasattr(request, "param") and request.param.lower() == "cads":
param = getattr(request, "param", "file")
if param.lower() == "cads":
test_bucket_name = "test-bucket"
client_kwargs = create_test_bucket(s3_server, test_bucket_name)
with config.set(
Expand All @@ -53,10 +54,12 @@ def set_cache(
cache_files_urlpath=f"s3://{test_bucket_name}",
cache_files_storage_options={"client_kwargs": client_kwargs},
):
yield request.param
else:
yield "cads"
elif param.lower() in ("file", "local"):
with config.set(
cache_db_urlpath="sqlite:///" + str(tmp_path / "cacholote.db"),
cache_files_urlpath=str(tmp_path / "cache_files"),
):
yield "file"
else:
raise ValueError(f"{param=}")
14 changes: 7 additions & 7 deletions tests/test_01_settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,9 @@
does_not_raise = contextlib.nullcontext


def test_change_cache_db_urlpath(tmpdir: pathlib.Path) -> None:
def test_change_cache_db_urlpath(tmp_path: pathlib.Path) -> None:
old_db = config.get().cache_db_urlpath
new_db = "sqlite:///" + str(tmpdir / "dummy.db")
new_db = "sqlite:///" + str(tmp_path / "dummy.db")

with config.set(cache_db_urlpath=new_db):
assert str(config.get().engine.url) == config.get().cache_db_urlpath == new_db
Expand All @@ -33,18 +33,18 @@ def test_change_cache_db_urlpath(tmpdir: pathlib.Path) -> None:
],
)
def test_set_engine_and_sessionmaker(
tmpdir: pathlib.Path, key: str, reset: bool
tmp_path: pathlib.Path, key: str, reset: bool
) -> None:
old_engine = config.get().engine
old_sessionmaker = config.get().sessionmaker

kwargs: Dict[str, Any] = {}
if key == "cache_db_urlpath":
kwargs[key] = "sqlite:///" + str(tmpdir / "dummy.db")
kwargs[key] = "sqlite:///" + str(tmp_path / "dummy.db")
elif key == "create_engine_kwargs":
kwargs[key] = {"pool_recycle": 60}
elif key == "cache_files_urlpath":
kwargs[key] = str(tmpdir / "dummy_files")
kwargs[key] = str(tmp_path / "dummy_files")
else:
raise ValueError

Expand All @@ -67,13 +67,13 @@ def test_set_engine_and_sessionmaker(
assert config.get().sessionmaker is old_sessionmaker


def test_env_variables(tmpdir: pathlib.Path) -> None:
def test_env_variables(tmp_path: pathlib.Path) -> None:
# env variables
old_environ = dict(os.environ)
os.environ["CACHOLOTE_CACHE_DB_URLPATH"] = "sqlite://"

# env file
dotenv_path = tmpdir / ".env.cacholote"
dotenv_path = tmp_path / ".env.cacholote"
with dotenv_path.open("w") as f:
f.write("CACHOLOTE_IO_DELETE_ORIGINAL=TRUE")

Expand Down
10 changes: 5 additions & 5 deletions tests/test_02_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,16 +12,16 @@ def test_hexdigestify() -> None:
assert res == expected


def test_get_cache_files(tmpdir: pathlib.Path) -> None:
def test_get_cache_files(tmp_path: pathlib.Path) -> None:
assert utils.get_cache_files_fs_dirname() == (
fsspec.filesystem("file"),
str(tmpdir / "cache_files"),
str(tmp_path / "cache_files"),
)


def test_copy_buffered_file(tmpdir: pathlib.Path) -> None:
src = tmpdir / "test0"
dst = tmpdir / "test1"
def test_copy_buffered_file(tmp_path: pathlib.Path) -> None:
src = tmp_path / "test0"
dst = tmp_path / "test1"
with open(src, "wb") as f:
f.write(b"test")
with open(src, "rb") as f_src, open(dst, "wb") as f_dst:
Expand Down
6 changes: 3 additions & 3 deletions tests/test_30_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ def cached_error() -> None:
raise ValueError("test error")


def test_cacheable(tmpdir: pathlib.Path) -> None:
def test_cacheable(tmp_path: pathlib.Path) -> None:
con = config.get().engine.raw_connection()
cur = con.cursor()

Expand Down Expand Up @@ -60,7 +60,7 @@ def test_cacheable(tmpdir: pathlib.Path) -> None:


@pytest.mark.parametrize("raise_all_encoding_errors", [True, False])
def test_encode_errors(tmpdir: pathlib.Path, raise_all_encoding_errors: bool) -> None:
def test_encode_errors(tmp_path: pathlib.Path, raise_all_encoding_errors: bool) -> None:
config.set(raise_all_encoding_errors=raise_all_encoding_errors)

cfunc = cache.cacheable(func)
Expand Down Expand Up @@ -145,7 +145,7 @@ def test_expiration_and_return_cache_entry() -> None:
assert third.expiration == datetime.datetime(9999, 12, 31)


def test_tag(tmpdir: pathlib.Path) -> None:
def test_tag(tmp_path: pathlib.Path) -> None:
con = config.get().engine.raw_connection()
cur = con.cursor()

Expand Down
8 changes: 4 additions & 4 deletions tests/test_40_xarray_encoder.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,11 +24,11 @@ def get_grib_ds() -> "xr.Dataset":
@pytest.mark.filterwarnings(
"ignore:distutils Version classes are deprecated. Use packaging.version instead."
)
def test_dictify_xr_dataset(tmpdir: pathlib.Path) -> None:
def test_dictify_xr_dataset(tmp_path: pathlib.Path) -> None:
pytest.importorskip("netCDF4")

# Define readonly dir
readonly_dir = str(tmpdir / "readonly")
readonly_dir = str(tmp_path / "readonly")
fsspec.filesystem("file").mkdir(readonly_dir)
config.set(cache_files_urlpath_readonly=readonly_dir)

Expand All @@ -38,7 +38,7 @@ def test_dictify_xr_dataset(tmpdir: pathlib.Path) -> None:
# Check dict
actual = extra_encoders.dictify_xr_dataset(ds)
href = f"{readonly_dir}/247fd17e087ae491996519c097e70e48.nc"
local_path = f"{tmpdir}/cache_files/247fd17e087ae491996519c097e70e48.nc"
local_path = f"{tmp_path}/cache_files/247fd17e087ae491996519c097e70e48.nc"
expected = {
"type": "python_call",
"callable": "cacholote.extra_encoders:decode_xr_dataset",
Expand Down Expand Up @@ -77,7 +77,7 @@ def test_dictify_xr_dataset(tmpdir: pathlib.Path) -> None:
"ignore:distutils Version classes are deprecated. Use packaging.version instead."
)
def test_xr_cacheable(
tmpdir: pathlib.Path,
tmp_path: pathlib.Path,
xarray_cache_type: str,
ext: str,
importorskip: str,
Expand Down
28 changes: 14 additions & 14 deletions tests/test_50_io_encoder.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,23 +21,23 @@ def cached_open(*args: Any, **kwargs: Any) -> fsspec.spec.AbstractBufferedFile:


@pytest.mark.parametrize("io_delete_original", [True, False])
def test_dictify_io_object(tmpdir: pathlib.Path, io_delete_original: bool) -> None:
def test_dictify_io_object(tmp_path: pathlib.Path, io_delete_original: bool) -> None:
# Define readonly dir
readonly_dir = str(tmpdir / "readonly")
readonly_dir = str(tmp_path / "readonly")
fsspec.filesystem("file").mkdir(readonly_dir)
config.set(
io_delete_original=io_delete_original, cache_files_urlpath_readonly=readonly_dir
)

# Create file
tmpfile = tmpdir / "test.txt"
tmpfile = tmp_path / "test.txt"
fsspec.filesystem("file").pipe_file(tmpfile, b"test")
tmp_hash = f"{fsspec.filesystem('file').checksum(tmpfile):x}"

# Check dict and cached file
actual = extra_encoders.dictify_io_object(open(tmpfile, "rb"))
href = f"{readonly_dir}/{tmp_hash}.txt"
local_path = f"{tmpdir}/cache_files/{tmp_hash}.txt"
local_path = f"{tmp_path}/cache_files/{tmp_hash}.txt"
expected = {
"type": "python_call",
"callable": "cacholote.extra_encoders:decode_io_object",
Expand All @@ -63,11 +63,11 @@ def test_dictify_io_object(tmpdir: pathlib.Path, io_delete_original: bool) -> No

@pytest.mark.parametrize("obj", [io.BytesIO(b"test"), io.StringIO("test")])
def test_dictify_bytes_io_object(
tmpdir: pathlib.Path, obj: Union[io.BytesIO, io.StringIO]
tmp_path: pathlib.Path, obj: Union[io.BytesIO, io.StringIO]
) -> None:
actual = extra_encoders.dictify_io_object(obj)["args"]
obj_hash = hashlib.md5(f"{hash(obj)}".encode()).hexdigest()
local_path = f"{tmpdir}/cache_files/{obj_hash}"
local_path = f"{tmp_path}/cache_files/{obj_hash}"
type = (
"text/plain"
if importlib.util.find_spec("magic")
Expand All @@ -89,7 +89,7 @@ def test_dictify_bytes_io_object(

@pytest.mark.parametrize("set_cache", ["file", "cads"], indirect=True)
def test_copy_from_http_to_cache(
tmpdir: pathlib.Path,
tmp_path: pathlib.Path,
httpserver: pytest_httpserver.HTTPServer,
set_cache: str,
) -> None:
Expand Down Expand Up @@ -120,7 +120,7 @@ def test_copy_from_http_to_cache(


def test_io_corrupted_files(
tmpdir: pathlib.Path, httpserver: pytest_httpserver.HTTPServer
tmp_path: pathlib.Path, httpserver: pytest_httpserver.HTTPServer
) -> None:
# http server
httpserver.expect_request("/test").respond_with_data(b"test")
Expand Down Expand Up @@ -155,13 +155,13 @@ def test_io_corrupted_files(
),
)
def test_io_locker(
tmpdir: pathlib.Path,
tmp_path: pathlib.Path,
lock_timeout: Optional[float],
raises_or_warns: contextlib.nullcontext, # type: ignore[type-arg]
) -> None:
config.set(lock_timeout=lock_timeout, raise_all_encoding_errors=True)
# Create tmpfile
tmpfile = tmpdir / "test.txt"
tmpfile = tmp_path / "test.txt"
fsspec.filesystem("file").touch(tmpfile)

# Acquire lock
Expand All @@ -176,20 +176,20 @@ def test_io_locker(


@pytest.mark.parametrize("set_cache", ["cads"], indirect=True)
def test_content_type(tmpdir: pathlib.Path, set_cache: str) -> None:
tmpfile = str(tmpdir / "test.grib")
def test_content_type(tmp_path: pathlib.Path, set_cache: str) -> None:
tmpfile = str(tmp_path / "test.grib")
fsspec.filesystem("file").touch(tmpfile)
fs, _ = utils.get_cache_files_fs_dirname()
cached_grib = cached_open(tmpfile)
assert fs.info(cached_grib)["ContentType"] == "application/x-grib"


@pytest.mark.parametrize("set_cache", ["cads"], indirect=True)
def test_io_logging(capsys: pytest.CaptureFixture[str], tmpdir: pathlib.Path) -> None:
def test_io_logging(capsys: pytest.CaptureFixture[str], tmp_path: pathlib.Path) -> None:
config.set(logger=structlog.get_logger(), io_delete_original=True)

# Cache file
tmpfile = tmpdir / "test.txt"
tmpfile = tmp_path / "test.txt"
fsspec.filesystem("file").touch(tmpfile)
cached_file = cached_open(tmpfile)
captured = iter(capsys.readouterr().out.splitlines())
Expand Down
44 changes: 23 additions & 21 deletions tests/test_60_clean.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ def open_url(url: pathlib.Path) -> fsspec.spec.AbstractBufferedFile:
@pytest.mark.parametrize("method", ["LRU", "LFU"])
@pytest.mark.parametrize("set_cache", ["file", "cads"], indirect=True)
def test_clean_cache_files(
tmpdir: pathlib.Path,
tmp_path: pathlib.Path,
set_cache: str,
method: Literal["LRU", "LFU"],
) -> None:
Expand All @@ -32,12 +32,12 @@ def test_clean_cache_files(

# Create files
for algorithm in ("LRU", "LFU"):
filename = tmpdir / f"{algorithm}.txt"
filename = tmp_path / f"{algorithm}.txt"
fsspec.filesystem("file").pipe_file(filename, b"1")

# Copy to cache
(lru_path,) = {open_url(tmpdir / "LRU.txt").path for _ in range(2)}
lfu_path = open_url(tmpdir / "LFU.txt").path
(lru_path,) = {open_url(tmp_path / "LRU.txt").path for _ in range(2)}
lfu_path = open_url(tmp_path / "LFU.txt").path
assert set(fs.ls(dirname)) == {lru_path, lfu_path}

# Do not clean
Expand All @@ -53,11 +53,13 @@ def test_clean_cache_files(


@pytest.mark.parametrize("delete_unknown_files", [True, False])
def test_delete_unknown_files(tmpdir: pathlib.Path, delete_unknown_files: bool) -> None:
def test_delete_unknown_files(
tmp_path: pathlib.Path, delete_unknown_files: bool
) -> None:
fs, dirname = utils.get_cache_files_fs_dirname()

# Create file
tmpfile = tmpdir / "test.txt"
tmpfile = tmp_path / "test.txt"
fsspec.filesystem("file").pipe_file(tmpfile, b"1")

# Copy to cache
Expand Down Expand Up @@ -94,12 +96,12 @@ def test_delete_unknown_dirs(

@pytest.mark.parametrize("lock_validity_period", [None, 0])
def test_clean_locked_files(
tmpdir: pathlib.Path, lock_validity_period: Optional[float]
tmp_path: pathlib.Path, lock_validity_period: Optional[float]
) -> None:
fs, dirname = utils.get_cache_files_fs_dirname()

# Create file
tmpfile = tmpdir / "test.txt"
tmpfile = tmp_path / "test.txt"
fsspec.filesystem("file").pipe_file(tmpfile, b"1")

# Copy to cache
Expand Down Expand Up @@ -134,7 +136,7 @@ def test_clean_locked_files(
],
)
def test_clean_tagged_files(
tmpdir: pathlib.Path,
tmp_path: pathlib.Path,
tags_to_clean: Optional[Sequence[Optional[str]]],
tags_to_keep: Optional[Sequence[Optional[str]]],
cleaned: Sequence[Optional[str]],
Expand All @@ -143,7 +145,7 @@ def test_clean_tagged_files(

expected_ls = []
for tag in [None, "1", "2"]:
tmpfile = tmpdir / f"test_{tag}.txt"
tmpfile = tmp_path / f"test_{tag}.txt"
fsspec.filesystem("file").pipe_file(tmpfile, b"1")
with config.set(tag=tag):
cached_file = open_url(tmpfile).path
Expand Down Expand Up @@ -174,11 +176,11 @@ def test_clean_tagged_files_wrong_types(wrong_type: Any) -> None:
clean.clean_cache_files(1, tags_to_clean=wrong_type)


def test_delete_cache_entry_and_files(tmpdir: pathlib.Path) -> None:
def test_delete_cache_entry_and_files(tmp_path: pathlib.Path) -> None:
fs, dirname = utils.get_cache_files_fs_dirname()

# Create file
tmpfile = tmpdir / "test.txt"
tmpfile = tmp_path / "test.txt"
fsspec.filesystem("file").pipe_file(tmpfile, b"old")

# Copy to cache
Expand All @@ -200,23 +202,23 @@ def test_delete_cache_entry_and_files(tmpdir: pathlib.Path) -> None:
@pytest.mark.parametrize("check_expiration", [True, False])
@pytest.mark.parametrize("try_decode", [True, False])
def test_clean_invalid_cache_entries(
tmpdir: pathlib.Path, check_expiration: bool, try_decode: bool
tmp_path: pathlib.Path, check_expiration: bool, try_decode: bool
) -> None:
fs, dirname = utils.get_cache_files_fs_dirname()

# Valid cache file
fsspec.filesystem("file").pipe_file(tmpdir / "valid.txt", b"1")
valid = open_url(tmpdir / "valid.txt").path
fsspec.filesystem("file").pipe_file(tmp_path / "valid.txt", b"1")
valid = open_url(tmp_path / "valid.txt").path

# Corrupted cache file
fsspec.filesystem("file").pipe_file(tmpdir / "corrupted.txt", b"1")
corrupted = open_url(tmpdir / "corrupted.txt").path
fsspec.filesystem("file").pipe_file(tmp_path / "corrupted.txt", b"1")
corrupted = open_url(tmp_path / "corrupted.txt").path
fs.touch(corrupted)

# Expired cache file
fsspec.filesystem("file").pipe_file(tmpdir / "expired.txt", b"1")
fsspec.filesystem("file").pipe_file(tmp_path / "expired.txt", b"1")
with config.set(expiration=utils.utcnow() + datetime.timedelta(seconds=0.2)):
expired = open_url(tmpdir / "expired.txt").path
expired = open_url(tmp_path / "expired.txt").path
time.sleep(0.2)

# Clean
Expand All @@ -242,10 +244,10 @@ def test_clean_invalid_cache_entries(


def test_cleaner_logging(
capsys: pytest.CaptureFixture[str], tmpdir: pathlib.Path
capsys: pytest.CaptureFixture[str], tmp_path: pathlib.Path
) -> None:
# Cache file and create unknown
tmpfile = tmpdir / "test.txt"
tmpfile = tmp_path / "test.txt"
fsspec.filesystem("file").pipe_file(tmpfile, b"1")
cached_file = open_url(tmpfile)
fs, dirname = utils.get_cache_files_fs_dirname()
Expand Down

0 comments on commit a0c7ec8

Please sign in to comment.