Skip to content

Commit

Permalink
implement has_key filter for SQLite backend
Browse files Browse the repository at this point in the history
  • Loading branch information
rabbull authored and GeigerJ2 committed Nov 19, 2024
1 parent dd866ce commit 779cc29
Show file tree
Hide file tree
Showing 5 changed files with 19 additions and 15 deletions.
8 changes: 6 additions & 2 deletions src/aiida/storage/sqlite_zip/orm.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
from functools import singledispatch
from typing import Any, List, Optional, Tuple, Union

from sqlalchemy import JSON, case, func
from sqlalchemy import JSON, case, func, select
from sqlalchemy.orm.util import AliasedClass
from sqlalchemy.sql import ColumnElement

Expand Down Expand Up @@ -289,7 +289,11 @@ def _cast_json_type(comparator: JSON.Comparator, value: Any) -> Tuple[ColumnElem
raise NotImplementedError('The operator `contains` is not implemented for SQLite-based storage plugins.')

if operator == 'has_key':
raise NotImplementedError('The operator `has_key` is not implemented for SQLite-based storage plugins.')
return (
select(database_entity)
.where(func.json_each(database_entity).table_valued('key', joins_implicitly=True).c.key == value)
.exists()
)

if operator == 'in':
type_filter, casted_entity = _cast_json_type(database_entity, value[0])
Expand Down
4 changes: 0 additions & 4 deletions tests/cmdline/commands/test_calcjob.py
Original file line number Diff line number Diff line change
Expand Up @@ -241,9 +241,6 @@ def test_calcjob_outputcat(self):
retrieved.base.repository._repository.put_object_from_filelike(io.BytesIO(b'5\n'), 'aiida.out')
retrieved.base.repository._update_repository_metadata()

# This currently fails with sqlite backend since the filtering relies on the `has_key` filter which is not
# implemented in SQLite, see https://github.com/aiidateam/aiida-core/pull/6497
@pytest.mark.requires_psql
def test_calcjob_cleanworkdir_basic(self):
"""Test verdi calcjob cleanworkdir"""
# Specifying no filtering options and no explicit calcjobs should exit with non-zero status
Expand All @@ -269,7 +266,6 @@ def test_calcjob_cleanworkdir_basic(self):
result = self.cli_runner.invoke(command.calcjob_cleanworkdir, options)
assert result.exception is not None, result.output

@pytest.mark.requires_psql
def test_calcjob_cleanworkdir_advanced(self):
# Check applying both p and o filters
for flag_p in ['-p', '--past-days']:
Expand Down
1 change: 0 additions & 1 deletion tests/orm/test_querybuilder.py
Original file line number Diff line number Diff line change
Expand Up @@ -1537,7 +1537,6 @@ def test_iterall_with_store_group(self):
for pk, pk_clone in zip(pks, [e[1] for e in sorted(pks_clone)]):
assert orm.load_node(pk) == orm.load_node(pk_clone)

@pytest.mark.requires_psql
@pytest.mark.usefixtures('aiida_profile_clean')
def test_iterall_persistence(self, manager):
"""Test that mutations made during ``QueryBuilder.iterall`` context are automatically committed and persisted.
Expand Down
20 changes: 13 additions & 7 deletions tests/storage/sqlite/test_orm.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
({'attributes.float': {'of_type': 'number'}}, 1),
({'attributes.true': {'of_type': 'boolean'}}, 1),
({'attributes.false': {'of_type': 'boolean'}}, 1),
({'attributes.null': {'of_type': 'null'}}, 2),
({'attributes.null': {'of_type': 'null'}}, 3),
({'attributes.list': {'of_type': 'array'}}, 1),
({'attributes.dict': {'of_type': 'object'}}, 1),
# equality match
Expand All @@ -35,7 +35,7 @@
({'attributes.false': {'==': False}}, 1),
({'attributes.list': {'==': [1, 2]}}, 1),
({'attributes.list2': {'==': ['a', 'b']}}, 1),
({'attributes.dict': {'==': {'key1': 1, 'key2': None}}}, 1),
({'attributes.dict': {'==': {'key-1': 1, 'key-none': None}}}, 1),
# equality non-match
({'attributes.text': {'==': 'lmn'}}, 0),
({'attributes.integer': {'==': 2}}, 0),
Expand Down Expand Up @@ -89,9 +89,11 @@
({'attributes.integer': {'in': [5, 6, 7]}}, 0),
({'attributes.integer': {'in': [1, 2, 3]}}, 1),
# object operators
# Reenable when ``has_key`` operator is implemented, see https://github.com/aiidateam/aiida-core/issues/6498
# ({'attributes.dict': {'has_key': 'k'}}, 0),
# ({'attributes.dict': {'has_key': 'key1'}}, 1),
({'attributes.dict': {'has_key': 'non-exist'}}, 0),
({'attributes.dict': {'!has_key': 'non-exist'}}, 3),
({'attributes.dict': {'has_key': 'key-1'}}, 1),
({'attributes.dict': {'has_key': 'key-none'}}, 1),
({'attributes.dict': {'!has_key': 'key-none'}}, 2),
),
ids=json.dumps,
)
Expand All @@ -111,13 +113,17 @@ def test_qb_json_filters(filters, matches):
'list': [1, 2],
'list2': ['a', 'b'],
'dict': {
'key1': 1,
'key2': None,
'key-1': 1,
'key-none': None,
},
},
backend=backend,
).store()
Dict({'text2': 'abcxXYZ'}, backend=backend).store()

# a false dict, added to test `has_key`'s behavior when key is not of json type
Dict({'dict': 0xFA15ED1C7}, backend=backend).store()

qbuilder = QueryBuilder(backend=backend)
qbuilder.append(Dict, filters=filters)
assert qbuilder.count() == matches
Expand Down
1 change: 0 additions & 1 deletion tests/test_nodes.py
Original file line number Diff line number Diff line change
Expand Up @@ -162,7 +162,6 @@ def init_profile(self, aiida_localhost):
"""Initialize the profile."""
self.computer = aiida_localhost

@pytest.mark.requires_psql
def test_with_subclasses(self):
from aiida.plugins import DataFactory

Expand Down

0 comments on commit 779cc29

Please sign in to comment.