Skip to content

Commit

Permalink
Merge pull request #282 from mindsdb/staging
Browse files Browse the repository at this point in the history
Release 0.6.7
  • Loading branch information
ea-rus authored Jul 27, 2023
2 parents b2ef61e + f7660ba commit c5520b0
Show file tree
Hide file tree
Showing 9 changed files with 205 additions and 104 deletions.
2 changes: 1 addition & 1 deletion mindsdb_sql/__about__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
__title__ = 'mindsdb_sql'
__package_name__ = 'mindsdb_sql'
__version__ = '0.6.6'
__version__ = '0.6.7'
__description__ = "Pure python SQL parser"
__email__ = "[email protected]"
__author__ = 'MindsDB Inc'
Expand Down
8 changes: 7 additions & 1 deletion mindsdb_sql/parser/dialects/mindsdb/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,13 +108,19 @@ def drop_chat_bot(self, p):

# -- triggers --
@_('CREATE TRIGGER identifier ON identifier LPAREN raw_query RPAREN')
@_('CREATE TRIGGER identifier ON identifier COLUMNS ordering_terms LPAREN raw_query RPAREN')
def create_trigger(self, p):
query_str = tokens_to_string(p.raw_query)

columns = None
if hasattr(p, 'ordering_terms'):
columns = [i.field for i in p.ordering_terms]

return CreateTrigger(
name=p.identifier0,
table=p.identifier1,
query_str=query_str
query_str=query_str,
columns=columns
)

@_('DROP TRIGGER identifier')
Expand Down
14 changes: 13 additions & 1 deletion mindsdb_sql/parser/dialects/mindsdb/trigger.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,13 @@ def __init__(self,
name,
table,
query_str,
columns=None,
*args, **kwargs):
super().__init__(*args, **kwargs)
self.name = name
self.table = table
self.query_str = query_str
self.columns = columns

def to_tree(self, *args, level=0, **kwargs):
ind = indent(level)
Expand All @@ -24,16 +26,26 @@ def to_tree(self, *args, level=0, **kwargs):

query_str = f'\n{ind1}query_str={repr(self.query_str)},'

columns_str = ''
if self.columns:
columns_str = ', '.join([k.to_string() for k in self.columns])
columns_str = f'\n{ind1}columns=[{columns_str}],'

out_str = f'{ind}CreateTrigger(' \
f'{name_str}' \
f'{table_str}' \
f'{columns_str}' \
f'{query_str}' \
f'\n{ind})'
return out_str

def get_string(self, *args, **kwargs):
columns_str = ''
if self.columns:
columns_str = ', '.join([k.to_string() for k in self.columns])
columns_str = f' columns {columns_str}'

out_str = f'CREATE TRIGGER {self.name.to_string()} ON {self.table.to_string()} ({self.query_str})'
out_str = f'CREATE TRIGGER {self.name.to_string()} ON {self.table.to_string()}{columns_str} ({self.query_str})'
return out_str


Expand Down
122 changes: 67 additions & 55 deletions mindsdb_sql/planner/query_planner.py
Original file line number Diff line number Diff line change
Expand Up @@ -698,7 +698,7 @@ def resolve_table(table):
else:
integration = self.default_namespace

if integration is None:
if integration is None and not hasattr(table, 'sub_select'):
raise PlanningException(f'Integration not found for: {table}')

sub_select = getattr(table, 'sub_select', None)
Expand Down Expand Up @@ -837,6 +837,7 @@ def _check_condition(node, **kwargs):
namespace=item['integration'],
dataframe=data_step.result,
predictor=table_name,
params=query.using
))
step_stack.append(predictor_step)
else:
Expand Down Expand Up @@ -925,76 +926,87 @@ def get_aliased_fields(self, targets):
aliased_fields[target.alias.to_string()] = target
return aliased_fields

def plan_join(self, query, integration=None):
def adapt_dbt_query(self, query, integration):
orig_query = query

join = query.from_table
join_left = join.left
join_right = join.right

if isinstance(join_left, Select) and isinstance(join_left.from_table, Identifier):
# dbt query.
# dbt query.

# move latest into subquery
moved_conditions = []
# move latest into subquery
moved_conditions = []

def move_latest(node, **kwargs):
if isinstance(node, BinaryOperation):
if Latest() in node.args:
for arg in node.args:
if isinstance(arg, Identifier):
# remove table alias
arg.parts = [arg.parts[-1]]
moved_conditions.append(node)
def move_latest(node, **kwargs):
if isinstance(node, BinaryOperation):
if Latest() in node.args:
for arg in node.args:
if isinstance(arg, Identifier):
# remove table alias
arg.parts = [arg.parts[-1]]
moved_conditions.append(node)

query_traversal(query.where, move_latest)
query_traversal(query.where, move_latest)

# TODO make project step from query.target
# TODO make project step from query.target

# TODO support complex query. Only one table is supported at the moment.
# if not isinstance(join_left.from_table, Identifier):
# raise PlanningException(f'Statement not supported: {query.to_string()}')
# TODO support complex query. Only one table is supported at the moment.
# if not isinstance(join_left.from_table, Identifier):
# raise PlanningException(f'Statement not supported: {query.to_string()}')

# move properties to upper query
query = join_left
# move properties to upper query
query = join_left

if query.from_table.alias is not None:
table_alias = [query.from_table.alias.parts[0]]
if query.from_table.alias is not None:
table_alias = [query.from_table.alias.parts[0]]
else:
table_alias = query.from_table.parts

# add latest to query.where
for cond in moved_conditions:
if query.where is not None:
query.where = BinaryOperation('and', args=[query.where, cond])
else:
table_alias = query.from_table.parts
query.where = cond

# add latest to query.where
for cond in moved_conditions:
if query.where is not None:
query.where = BinaryOperation('and', args=[query.where, cond])
else:
query.where = cond

def add_aliases(node, is_table, **kwargs):
if not is_table and isinstance(node, Identifier):
if len(node.parts) == 1:
# add table alias to field
node.parts = table_alias + node.parts

query_traversal(query.where, add_aliases)

if isinstance(query.from_table, Identifier):
# DBT workaround: allow use tables without integration.
# if table.part[0] not in integration - take integration name from create table command
if (
integration is not None
and query.from_table.parts[0] not in self.databases
):
# add integration name to table
query.from_table.parts.insert(0, integration)
def add_aliases(node, is_table, **kwargs):
if not is_table and isinstance(node, Identifier):
if len(node.parts) == 1:
# add table alias to field
node.parts = table_alias + node.parts

query_traversal(query.where, add_aliases)

if isinstance(query.from_table, Identifier):
# DBT workaround: allow use tables without integration.
# if table.part[0] not in integration - take integration name from create table command
if (
integration is not None
and query.from_table.parts[0] not in self.databases
):
# add integration name to table
query.from_table.parts.insert(0, integration)

join_left = join_left.from_table

if orig_query.limit is not None:
if query.limit is None or query.limit.value > orig_query.limit.value:
query.limit = orig_query.limit
query.parentheses = False
query.alias = None

return query, join_left

def plan_join(self, query, integration=None):
orig_query = query

join_left = join_left.from_table
join = query.from_table
join_left = join.left
join_right = join.right

if orig_query.limit is not None:
if query.limit is None or query.limit.value > orig_query.limit.value:
query.limit = orig_query.limit
query.parentheses = False
query.alias = None
if isinstance(join_left, Select) and isinstance(join_left.from_table, Identifier):
if self.is_predictor(join_right) and self.get_predictor(join_right).get('timeseries'):
query, join_left = self.adapt_dbt_query(query, integration)

aliased_fields = self.get_aliased_fields(query.targets)

Expand Down
9 changes: 8 additions & 1 deletion mindsdb_sql/planner/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

from mindsdb_sql.exceptions import PlanningException
from mindsdb_sql.parser.ast import (Identifier, Operation, Star, Select, BinaryOperation, Constant,
OrderBy, BetweenOperation, NullConstant, TypeCast, Parameter)
OrderBy, UnaryOperation, NullConstant, TypeCast, Parameter)
from mindsdb_sql.parser import ast


Expand Down Expand Up @@ -44,6 +44,13 @@ def recursively_extract_column_values(op, row_dict, predictor):
id = op.args[0]
value = op.args[1]

if (
isinstance(value, UnaryOperation)
and value.op == '-'
and isinstance(value.args[0], Constant)
):
value = Constant(-value.args[0].value)

if not (
isinstance(id, Identifier)
and
Expand Down
2 changes: 2 additions & 0 deletions sly/lex.py
Original file line number Diff line number Diff line change
Expand Up @@ -302,6 +302,8 @@ def _build(cls):
elif callable(value):
cls._token_funcs[tokname] = value
pattern = getattr(value, 'pattern')
else:
continue

# Form the regular expression component
part = f'(?P<{tokname}>{pattern})'
Expand Down
36 changes: 36 additions & 0 deletions tests/test_parser/test_mindsdb/test_triggers.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,42 @@ def test_create_trigger(self):
assert str(ast) == str(expected_ast)
assert ast.to_tree() == expected_ast.to_tree()

def test_create_trigger_cols(self):
sql = '''
create trigger proj2.tname on db1.tbl1
columns aaa
(
retrain p1
)
'''
ast = parse_sql(sql, dialect='mindsdb')
expected_ast = CreateTrigger(
name=Identifier('proj2.tname'),
table=Identifier('db1.tbl1'),
columns=[Identifier('aaa')],
query_str="retrain p1",
)
assert str(ast) == str(expected_ast)
assert ast.to_tree() == expected_ast.to_tree()

# 2 columns
sql = '''
create trigger proj2.tname on db1.tbl1
columns aaa, bbb
(
retrain p1
)
'''
ast = parse_sql(sql, dialect='mindsdb')
expected_ast = CreateTrigger(
name=Identifier('proj2.tname'),
table=Identifier('db1.tbl1'),
columns=[Identifier('aaa'), Identifier('bbb')],
query_str="retrain p1",
)
assert str(ast) == str(expected_ast)
assert ast.to_tree() == expected_ast.to_tree()

def test_drop_trigger(self):
sql = '''
drop trigger proj1.tname
Expand Down
Loading

0 comments on commit c5520b0

Please sign in to comment.