From ddcd736c47afa4d3a753881c943c19d559f3ff92 Mon Sep 17 00:00:00 2001 From: Paul Blankley Date: Wed, 17 Jul 2024 16:25:28 -0600 Subject: [PATCH 01/53] add or filters to metrics layer spec --- metrics_layer/core/model/filter.py | 6 + metrics_layer/core/sql/query_filter.py | 40 ++++- .../core/sql/single_query_resolve.py | 21 ++- pyproject.toml | 2 +- tests/test_simple_query.py | 157 ++++++++++++++++++ 5 files changed, 222 insertions(+), 4 deletions(-) diff --git a/metrics_layer/core/model/filter.py b/metrics_layer/core/model/filter.py index a218820..792727d 100644 --- a/metrics_layer/core/model/filter.py +++ b/metrics_layer/core/model/filter.py @@ -24,6 +24,12 @@ def get_sql(self, **kwargs): return self.sql_query +class MetricsLayerFilterGroupLogicalOperatorType: + and_ = "AND" + or_ = "OR" + options = [and_, or_] + + class MetricsLayerFilterExpressionType(str, Enum): Unknown = "UNKNOWN" LessThan = "less_than" diff --git a/metrics_layer/core/sql/query_filter.py b/metrics_layer/core/sql/query_filter.py index 6604005..8549aa9 100644 --- a/metrics_layer/core/sql/query_filter.py +++ b/metrics_layer/core/sql/query_filter.py @@ -12,6 +12,7 @@ Filter, LiteralValueCriterion, MetricsLayerFilterExpressionType, + MetricsLayerFilterGroupLogicalOperatorType, ) from metrics_layer.core.sql.query_design import MetricsLayerDesign from metrics_layer.core.sql.query_errors import ParseError @@ -43,6 +44,9 @@ def __init__( # are properly defined in the design self.design = design self.is_literal_filter = "literal" in definition + # This is a filter with parenthesis like (XYZ or ABC) + self.is_filter_group = "conditions" in definition + if self.design: self.query_type = self.design.query_type else: @@ -51,7 +55,7 @@ def __init__( self.validate(definition) - if not self.is_literal_filter: + if not self.is_literal_filter and not self.is_filter_group: self.expression_type = MetricsLayerFilterExpressionType.parse(definition["expression"]) super().__init__(definition) @@ -70,6 +74,19 @@ def validate(self, definition: Dict) -> None: """ key = definition.get("field", None) filter_literal = definition.get("literal", None) + filter_group_conditions = definition.get("conditions", None) + if filter_group_conditions: + for f in filter_group_conditions: + MetricsLayerFilter(f, self.design, self.filter_type) + + if "logical_operator" not in definition: + raise ParseError(f"Filter group '{definition}' needs a logical_operator.") + elif definition["logical_operator"] not in MetricsLayerFilterGroupLogicalOperatorType.options: + raise ParseError( + f"Filter group '{definition}' needs a valid logical operator. Options are:" + f" {MetricsLayerFilterGroupLogicalOperatorType.options}" + ) + return is_boolean_value = str(definition.get("value")).lower() == "true" and key is None if is_boolean_value: @@ -123,11 +140,30 @@ def validate(self, definition: Dict) -> None: if self.field.type == "yesno" and "True" in str(definition["value"]): definition["expression"] = "boolean_true" + def group_sql_query(self, functional_pk: str): + pypika_conditions = [] + for condition in self.conditions: + condition_object = MetricsLayerFilter(condition, self.design, self.filter_type) + if condition_object.is_filter_group: + pypika_conditions.append(condition_object.group_sql_query(functional_pk)) + else: + pypika_conditions.append( + condition_object.criterion( + condition_object.field.sql_query(self.query_type, functional_pk) + ) + ) + if self.logical_operator == MetricsLayerFilterGroupLogicalOperatorType.and_: + return Criterion.all(pypika_conditions) + elif self.logical_operator == MetricsLayerFilterGroupLogicalOperatorType.or_: + return Criterion.any(pypika_conditions) + raise ParseError(f"Invalid logical operator: {self.logical_operator}") + def sql_query(self): if self.is_literal_filter: return LiteralValueCriterion(self.replace_fields_literal_filter()) functional_pk = self.design.functional_pk() - + if self.is_filter_group: + return self.group_sql_query(functional_pk) return self.criterion(self.field.sql_query(self.query_type, functional_pk)) def isin_sql_query(self, cte_alias, field_name, query_generator): diff --git a/metrics_layer/core/sql/single_query_resolve.py b/metrics_layer/core/sql/single_query_resolve.py index af66abc..d10cf81 100644 --- a/metrics_layer/core/sql/single_query_resolve.py +++ b/metrics_layer/core/sql/single_query_resolve.py @@ -184,14 +184,33 @@ def _is_literal(clause): @staticmethod def parse_identifiers_from_dicts(conditions: list): + flattened_conditions = SingleSQLQueryResolver.flatten_filters(conditions) try: - return [cond["field"] for cond in conditions if "group_by" not in cond] + return [cond["field"] for cond in flattened_conditions if "group_by" not in cond] except KeyError: for cond in conditions: if "field" not in cond: break raise QueryError(f"Identifier was missing required 'field' key: {cond}") + @staticmethod + def flatten_filters(filters: list): + flat_list = [] + + def recurse(filter_obj): + if isinstance(filter_obj, dict): + if "conditions" in filter_obj: + for f in filter_obj["conditions"]: + recurse(f) + else: + flat_list.append(filter_obj) + elif isinstance(filter_obj, list): + for item in filter_obj: + recurse(item) + + recurse(filters) + return flat_list + @staticmethod def _check_for_dict(conditions: list): if isinstance(conditions, dict): diff --git a/pyproject.toml b/pyproject.toml index 147fd76..7356066 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "metrics_layer" -version = "0.12.24" +version = "0.12.25" description = "The open source metrics layer." authors = ["Paul Blankley "] keywords = ["Metrics Layer", "Business Intelligence", "Analytics"] diff --git a/tests/test_simple_query.py b/tests/test_simple_query.py index 675d2fa..aa649ea 100644 --- a/tests/test_simple_query.py +++ b/tests/test_simple_query.py @@ -8,6 +8,7 @@ from metrics_layer.core.exceptions import AccessDeniedOrDoesNotExistException from metrics_layer.core.model import Definitions, Project from metrics_layer.core.parse.connections import BaseConnection +from metrics_layer.core.sql.query_errors import ParseError simple_model = { "type": "model", @@ -137,6 +138,12 @@ "name": "waiting", "label": "Between view and order", }, + { + "field_type": "dimension", + "type": "number", + "sql": "${TABLE}.discount_amt", + "name": "discount_amt", + }, { "field_type": "dimension", "type": "yesno", @@ -1683,3 +1690,153 @@ def test_simple_query_with_all(connections): "GROUP BY simple.sales_channel HAVING SUM(simple.revenue)>12 ORDER BY simple_total_revenue ASC;" ) assert query == correct + + +@pytest.mark.query +def test_simple_query_with_or_filters_no_nesting(connections): + project = Project(models=[simple_model], views=[simple_view]) + conn = MetricsLayerConnection(project=project, connections=connections) + query = conn.get_sql_query( + metrics=["total_revenue"], + dimensions=["channel"], + where=[ + { + "logical_operator": "OR", + "conditions": [ + {"field": "channel", "expression": "not_equal_to", "value": "Email"}, + {"field": "new_vs_repeat", "expression": "equal_to", "value": "New"}, + ], + }, + {"field": "discount_amt", "expression": "greater_than", "value": 1335}, + ], + ) + + correct = ( + "SELECT simple.sales_channel as simple_channel,SUM(simple.revenue) as simple_total_revenue FROM" + " analytics.orders simple WHERE (simple.sales_channel<>'Email' OR simple.new_vs_repeat='New')" + " AND simple.discount_amt>1335 GROUP BY simple.sales_channel ORDER BY simple_total_revenue DESC;" + ) + assert query == correct + + +@pytest.mark.query +def test_simple_query_with_or_filters_single_nesting(connections): + project = Project(models=[simple_model], views=[simple_view]) + conn = MetricsLayerConnection(project=project, connections=connections) + query = conn.get_sql_query( + metrics=["total_revenue"], + dimensions=["channel"], + where=[ + { + "logical_operator": "OR", + "conditions": [ + { + "logical_operator": "AND", + "conditions": [ + {"field": "channel", "expression": "not_equal_to", "value": "Email"}, + {"field": "discount_amt", "expression": "less_than", "value": 0.01}, + ], + }, + {"field": "new_vs_repeat", "expression": "equal_to", "value": "New"}, + ], + }, + {"field": "discount_amt", "expression": "greater_than", "value": 1335}, + ], + ) + + correct = ( + "SELECT simple.sales_channel as simple_channel,SUM(simple.revenue) as simple_total_revenue FROM" + " analytics.orders simple WHERE ((simple.sales_channel<>'Email' AND simple.discount_amt<0.01) OR" + " simple.new_vs_repeat='New') AND simple.discount_amt>1335 GROUP BY simple.sales_channel ORDER BY" + " simple_total_revenue DESC;" + ) + assert query == correct + + +@pytest.mark.query +def test_simple_query_with_or_filters_triple_nesting(connections): + project = Project(models=[simple_model], views=[simple_view]) + conn = MetricsLayerConnection(project=project, connections=connections) + query = conn.get_sql_query( + metrics=["total_revenue"], + dimensions=["channel"], + where=[ + {"field": "discount_amt", "expression": "greater_than", "value": 1335}, + { + "logical_operator": "OR", + "conditions": [ + { + "logical_operator": "AND", + "conditions": [ + {"field": "channel", "expression": "not_equal_to", "value": "Email"}, + {"field": "discount_amt", "expression": "less_than", "value": 0.01}, + { + "logical_operator": "OR", + "conditions": [ + {"field": "channel", "expression": "equal_to", "value": "Email"}, + {"field": "discount_amt", "expression": "less_than", "value": -100.05}, + { + "logical_operator": "AND", + "conditions": [ + { + "field": "channel", + "expression": "equal_to", + "value": "Facebook", + }, + { + "field": "new_vs_repeat", + "expression": "equal_to", + "value": "Repeat", + }, + ], + }, + ], + }, + ], + }, + {"field": "new_vs_repeat", "expression": "equal_to", "value": "New"}, + ], + }, + { + "logical_operator": "OR", + "conditions": [ + {"field": "channel", "expression": "not_equal_to", "value": "Email"}, + {"field": "discount_amt", "expression": "less_than", "value": 0.01}, + ], + }, + {"field": "discount_amt", "expression": "greater_than", "value": 13}, + ], + ) + + correct = ( + "SELECT simple.sales_channel as simple_channel,SUM(simple.revenue) as simple_total_revenue FROM" + " analytics.orders simple WHERE simple.discount_amt>1335 AND ((simple.sales_channel<>'Email' AND" + " simple.discount_amt<0.01 AND (simple.sales_channel='Email' OR simple.discount_amt<-100.05 OR" + " (simple.sales_channel='Facebook' AND simple.new_vs_repeat='Repeat'))) OR" + " simple.new_vs_repeat='New') AND (simple.sales_channel<>'Email' OR simple.discount_amt<0.01) AND" + " simple.discount_amt>13 GROUP BY simple.sales_channel ORDER BY simple_total_revenue DESC;" + ) + assert query == correct + + +@pytest.mark.query +def test_simple_query_with_or_filters_errors(connections): + project = Project(models=[simple_model], views=[simple_view]) + conn = MetricsLayerConnection(project=project, connections=connections) + with pytest.raises(ParseError) as exc_info: + conn.get_sql_query( + metrics=["total_revenue"], + dimensions=["channel"], + where=[ + { + "logical_operator": "ORR", + "conditions": [ + {"field": "channel", "expression": "not_equal_to", "value": "Email"}, + {"field": "new_vs_repeat", "expression": "equal_to", "value": "New"}, + ], + } + ], + ) + + assert exc_info.value + assert "needs a valid logical operator. Options are: ['AND', 'OR']" in str(exc_info.value) From 2498a6401679be6e4bfff3f95d2412b615605ec6 Mon Sep 17 00:00:00 2001 From: Paul Blankley Date: Thu, 1 Aug 2024 09:12:08 -0600 Subject: [PATCH 02/53] add error handling and tests for having in or filters --- .../core/sql/single_query_resolve.py | 30 +++++++++- tests/test_simple_query.py | 60 ++++++++++++++++++- 2 files changed, 88 insertions(+), 2 deletions(-) diff --git a/metrics_layer/core/sql/single_query_resolve.py b/metrics_layer/core/sql/single_query_resolve.py index d10cf81..c31400e 100644 --- a/metrics_layer/core/sql/single_query_resolve.py +++ b/metrics_layer/core/sql/single_query_resolve.py @@ -58,7 +58,7 @@ def get_query(self, semicolon: bool = True): "dimensions": self.dimensions, "funnel": self.funnel, "where": self.parse_where(self.where), - "having": self.having, + "having": self.parse_having(self.having), "order_by": self.order_by, "select_raw_sql": self.select_raw_sql, "limit": self.limit, @@ -110,9 +110,37 @@ def parse_where(self, where: list): w["query_class"] = FunnelQuery( funnel_query, design=self.design, suppress_warnings=self.suppress_warnings ) + if "logical_operator" in w: + field_types = set( + [self.field_lookup[f["field"]].field_type for f in self.flatten_filters(w["conditions"])] + ) + if "measure" in field_types and ( + "dimension" in field_types or "dimension_group" in field_types + ): + raise QueryError( + "Cannot mix dimensions and measures in a compound filter with a logical_operator" + ) where_with_query.append(w) return where_with_query + def parse_having(self, having: list): + if having is None or having == [] or self._is_literal(having): + return having + validated_having = [] + for h in having: + if "logical_operator" in h: + field_types = set( + [self.field_lookup[f["field"]].field_type for f in self.flatten_filters(h["conditions"])] + ) + if "measure" in field_types and ( + "dimension" in field_types or "dimension_group" in field_types + ): + raise QueryError( + "Cannot mix dimensions and measures in a compound filter with a logical_operator" + ) + validated_having.append(h) + return validated_having + def parse_input(self): all_field_names = self.metrics + self.dimensions if len(set(all_field_names)) != len(all_field_names): diff --git a/tests/test_simple_query.py b/tests/test_simple_query.py index aa649ea..47b4180 100644 --- a/tests/test_simple_query.py +++ b/tests/test_simple_query.py @@ -5,7 +5,10 @@ import pytest from metrics_layer.core import MetricsLayerConnection -from metrics_layer.core.exceptions import AccessDeniedOrDoesNotExistException +from metrics_layer.core.exceptions import ( + AccessDeniedOrDoesNotExistException, + QueryError, +) from metrics_layer.core.model import Definitions, Project from metrics_layer.core.parse.connections import BaseConnection from metrics_layer.core.sql.query_errors import ParseError @@ -1819,6 +1822,33 @@ def test_simple_query_with_or_filters_triple_nesting(connections): assert query == correct +@pytest.mark.query +def test_simple_query_with_or_filters_having(connections): + project = Project(models=[simple_model], views=[simple_view]) + conn = MetricsLayerConnection(project=project, connections=connections) + query = conn.get_sql_query( + metrics=["total_revenue"], + dimensions=["channel"], + having=[ + { + "logical_operator": "OR", + "conditions": [ + {"field": "average_order_value", "expression": "greater_than", "value": 250}, + {"field": "total_revenue", "expression": "less_than", "value": 25000}, + ], + }, + {"field": "total_revenue", "expression": "greater_than", "value": 20000}, + ], + ) + + correct = ( + "SELECT simple.sales_channel as simple_channel,SUM(simple.revenue) as simple_total_revenue FROM" + " analytics.orders simple GROUP BY simple.sales_channel HAVING (AVG(simple.revenue)>250 OR" + " SUM(simple.revenue)<25000) AND SUM(simple.revenue)>20000 ORDER BY simple_total_revenue DESC;" + ) + assert query == correct + + @pytest.mark.query def test_simple_query_with_or_filters_errors(connections): project = Project(models=[simple_model], views=[simple_view]) @@ -1840,3 +1870,31 @@ def test_simple_query_with_or_filters_errors(connections): assert exc_info.value assert "needs a valid logical operator. Options are: ['AND', 'OR']" in str(exc_info.value) + + +@pytest.mark.query +@pytest.mark.parametrize("filter_type", ["where", "having"]) +def test_simple_query_with_or_filters_invalid_field_types(connections, filter_type): + project = Project(models=[simple_model], views=[simple_view]) + conn = MetricsLayerConnection(project=project, connections=connections) + logical_filter = [ + { + "logical_operator": "OR", + "conditions": [ + {"field": "average_order_value", "expression": "greater_than", "value": 250}, + {"field": "new_vs_repeat", "expression": "equal_to", "value": "New"}, + ], + }, + ] + + if filter_type == "where": + filter_dict = {"where": logical_filter} + else: + filter_dict = {"having": logical_filter} + with pytest.raises(QueryError) as exc_info: + conn.get_sql_query(metrics=["total_revenue"], dimensions=["channel"], **filter_dict) + + assert exc_info.value + assert "Cannot mix dimensions and measures in a compound filter with a logical_operator" in str( + exc_info.value + ) From bc2506868d86a9cb1bf5e3aa76ecf83be0aeb42d Mon Sep 17 00:00:00 2001 From: Paul Blankley <31221512+pblankley@users.noreply.github.com> Date: Thu, 1 Aug 2024 13:01:12 -0600 Subject: [PATCH 03/53] fix referenced field in sql not changing join graph issue (#213) * fix referenced field in sql not changing join graph issue * bump version --- metrics_layer/core/model/field.py | 20 +++++++++++++++++-- pyproject.toml | 2 +- .../views/test_customers.yml | 5 +++++ tests/test_cli.py | 4 ++-- tests/test_join_query.py | 14 +++++++++++++ tests/test_listing_functions.py | 2 +- 6 files changed, 41 insertions(+), 6 deletions(-) diff --git a/metrics_layer/core/model/field.py b/metrics_layer/core/model/field.py index 93686f8..03f7c1b 100644 --- a/metrics_layer/core/model/field.py +++ b/metrics_layer/core/model/field.py @@ -2667,8 +2667,24 @@ def join_graphs(self): f"Could not find a model in view {self.view.name}, " "please pass the model or set the model_name argument in the view" ) - - base = self.view.project.join_graph.weak_join_graph_hashes(self.view.name) + # We need to pick the most restricted join graph based on all other + # views referenced in the SQL of this field. + sql_references = self.get_referenced_sql_query(strings_only=False) + referenced_views = set([self.view.name]) + + # First, we get the views that are referenced, and put them in a set. + for ref in sql_references: + referenced_views.add(ref.view.name) + + # Then, we get the weakly connected references to EACH view that is referenced in the SQL + base_collection = [] + for view_name in referenced_views: + base_collection.append(set(self.view.project.join_graph.weak_join_graph_hashes(view_name))) + + # Finally, we get intersection of the set of the weakly connected references + # to EACH view that is referenced in the SQL, to get the most restricted + # join graph that can actually be joined to this field + base = list(set.intersection(*base_collection)) if self.is_cumulative(): return base diff --git a/pyproject.toml b/pyproject.toml index 58cabd6..a1c46ea 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "metrics_layer" -version = "0.12.26" +version = "0.12.27" description = "The open source metrics layer." authors = ["Paul Blankley "] keywords = ["Metrics Layer", "Business Intelligence", "Analytics"] diff --git a/tests/config/metrics_layer_config/views/test_customers.yml b/tests/config/metrics_layer_config/views/test_customers.yml index 13c4180..50e1140 100644 --- a/tests/config/metrics_layer_config/views/test_customers.yml +++ b/tests/config/metrics_layer_config/views/test_customers.yml @@ -95,3 +95,8 @@ fields: field_type: 'measure' type: number sql: '${total_sessions} / (100 * 1.0)' + + - name: unique_user_iphone_sessions + field_type: 'measure' + type: count_distinct + sql: case when ${sessions.session_device} = 'iPhone' then ${customer_id} end diff --git a/tests/test_cli.py b/tests/test_cli.py index 8ed2390..96b6363 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -458,7 +458,7 @@ def test_cli_validate(connection, fresh_project, mocker): def test_cli_validate_broken_canon_date(connection, fresh_project, mocker): # Break something so validation fails project = fresh_project - project._views[2]["fields"][-2]["canon_date"] = "does_not_exist" + project._views[2]["fields"][-3]["canon_date"] = "does_not_exist" project.refresh_cache() project.join_graph @@ -639,7 +639,7 @@ def test_cli_validate_filter_with_no_field(connection, fresh_project, mocker): # Break something so validation fails project = fresh_project - project._views[2]["fields"][-2]["filters"][0] = {"is_churned": None, "value": False} + project._views[2]["fields"][-3]["filters"][0] = {"is_churned": None, "value": False} conn = MetricsLayerConnection(project=project, connections=connection._raw_connections[0]) mocker.patch("metrics_layer.cli.seeding.SeedMetricsLayer._init_profile", lambda profile, target: conn) diff --git a/tests/test_join_query.py b/tests/test_join_query.py index 1dbf98e..62ec7f3 100644 --- a/tests/test_join_query.py +++ b/tests/test_join_query.py @@ -1008,6 +1008,20 @@ def test_null_filter_handling_metric_filter(connection): assert query == correct +@pytest.mark.query +def test_join_graph_production_with_sql_reference(connection): + sessions_field = connection.project.get_field("unique_user_iphone_sessions") + sessions_no_merged_results = [jg for jg in sessions_field.join_graphs() if "merged_result" not in jg] + + revenue_field = connection.project.get_field("total_item_revenue") + revenue_no_merged_results = [jg for jg in revenue_field.join_graphs() if "merged_result" not in jg] + + # These should NOT overlap in join graphs (without merged results) because the + # first (though on the customers view), references a field in the sessions view, + # which requires a join that the revenue metric does not have as an option + assert set(revenue_no_merged_results).isdisjoint(sessions_no_merged_results) + + @pytest.mark.query def test_join_as_label(connection): view = connection.project.get_view("child_account") diff --git a/tests/test_listing_functions.py b/tests/test_listing_functions.py index ee3ca0c..1f56e55 100644 --- a/tests/test_listing_functions.py +++ b/tests/test_listing_functions.py @@ -4,7 +4,7 @@ @pytest.mark.project def test_list_metrics(connection): metrics = connection.list_metrics() - assert len(metrics) == 58 + assert len(metrics) == 59 metrics = connection.list_metrics(view_name="order_lines", names_only=True) assert len(metrics) == 11 From 403141c054d1b276416b030256e8744ca92e3082 Mon Sep 17 00:00:00 2001 From: Paul Blankley Date: Thu, 1 Aug 2024 13:09:26 -0600 Subject: [PATCH 04/53] improve desc length error and increase view to 1024 chars --- metrics_layer/core/model/field.py | 7 ++++--- metrics_layer/core/model/view.py | 5 +++-- tests/test_project_validation.py | 21 +++++++++++++++------ 3 files changed, 22 insertions(+), 11 deletions(-) diff --git a/metrics_layer/core/model/field.py b/metrics_layer/core/model/field.py index 03f7c1b..20d7258 100644 --- a/metrics_layer/core/model/field.py +++ b/metrics_layer/core/model/field.py @@ -1480,9 +1480,10 @@ def collect_errors(self): self._error( self._definition["description"], ( - f"Field {self.name} in view {self.view.name} has a description that is too long." - f" Descriptions must be {description_max_chars} characters or less. It will be" - f" truncated to the first {description_max_chars} characters." + f"Field {self.name} in view {self.view.name} has a description that is too long" + f" ({len(self.description)} characters). Descriptions must be" + f" {description_max_chars} characters or less. It will be truncated to the first" + f" {description_max_chars} characters." ), ) ) diff --git a/metrics_layer/core/model/view.py b/metrics_layer/core/model/view.py index dd91b05..578bed4 100644 --- a/metrics_layer/core/model/view.py +++ b/metrics_layer/core/model/view.py @@ -223,14 +223,15 @@ def collect_errors(self): ) # This value is pulled from the MAX_VIEW_DESCRIPTION_LENGTH constant in Zenlytic - view_description_max_chars = 512 + view_description_max_chars = 1024 if "description" in self._definition and isinstance(self.description, str): if len(self.description) > view_description_max_chars: errors.append( self._error( self._definition["description"], ( - f"View {self.name} has a description that is too long. Descriptions must be" + f"View {self.name} has a description that is too long" + f" ({len(self.description)} characters). Descriptions must be" f" {view_description_max_chars} characters or less. It will be truncated to the" f" first {view_description_max_chars} characters." ), diff --git a/tests/test_project_validation.py b/tests/test_project_validation.py index f1afae3..d0acf45 100644 --- a/tests/test_project_validation.py +++ b/tests/test_project_validation.py @@ -1097,11 +1097,20 @@ def test_validation_with_replaced_model_properties(connection, name, value, erro "Third, this is a really long description aimed at testing the warning on the length of the" " description, so I will keep writing more content to make sure I get to the maximum length" " of the description and therefore test the total length max of the description." + "Fourth, this is a really long description aimed at testing the warning on the length of the" + " description, so I will keep writing more content to make sure I get to the maximum length" + " of the description and therefore test the total length max of the description." + "Fifth, this is a really long description aimed at testing the warning on the length of the" + " description, so I will keep writing more content to make sure I get to the maximum length" + " of the description and therefore test the total length max of the description." + "Sixth, this is a really long description aimed at testing the warning on the length of the" + " description, so I will keep writing more content to make sure I get to the maximum length" + " of the description and therefore test the total length max of the description." ), [ - "View order_lines has a description that is too long. " - "Descriptions must be 512 characters or less. It will be truncated to the " - "first 512 characters." + "View order_lines has a description that is too long (1550 characters)." + " Descriptions must be 1024 characters or less. It will be truncated to the " + "first 1024 characters." ], ), ], @@ -2142,9 +2151,9 @@ def test_validation_with_replaced_view_properties(connection, name, value, error " of the description and therefore test the total length max of the description." ), [ - "Field parent_channel in view order_lines has a description that is too long. " - "Descriptions must be 512 characters or less. It will be truncated to the " - "first 512 characters." + "Field parent_channel in view order_lines has a description that is too long (772" + " characters). Descriptions must be 512 characters or less. It will be truncated to the first" + " 512 characters." ], ), ], From cec219d91378efd70f20a8d4338d368299e73d3e Mon Sep 17 00:00:00 2001 From: Paul Blankley Date: Thu, 1 Aug 2024 13:09:39 -0600 Subject: [PATCH 05/53] Release v0.12.27 From 3fa8a8d3446f7b5b5658b01330400b4604153bbb Mon Sep 17 00:00:00 2001 From: Paul Blankley Date: Thu, 1 Aug 2024 15:00:03 -0600 Subject: [PATCH 06/53] fix issue with mappings not correctly handled in filters --- metrics_layer/core/sql/resolve.py | 12 +++++- tests/test_join_query.py | 64 +++++++++++++++++++++++++++++++ 2 files changed, 75 insertions(+), 1 deletion(-) diff --git a/metrics_layer/core/sql/resolve.py b/metrics_layer/core/sql/resolve.py index 95cf72c..b4ee8a3 100644 --- a/metrics_layer/core/sql/resolve.py +++ b/metrics_layer/core/sql/resolve.py @@ -315,7 +315,17 @@ def _replace_dict_or_literal(self, where, to_replace, field): if self._is_literal(where): return where.replace(to_replace, field.id()) else: - return [{**w, "field": field.id()} if w["field"] == to_replace else w for w in where] + result = [] + for w in where: + if "field" in w and w["field"] == to_replace: + result.append({**w, "field": field.id()}) + elif "field" not in w: + result.append( + {**w, "conditions": self._replace_dict_or_literal(w["conditions"], to_replace, field)} + ) + else: + result.append(w) + return result def _get_model_for_query(self, model_name: str = None, metrics: list = [], dimensions: list = []): models = self.project.models() diff --git a/tests/test_join_query.py b/tests/test_join_query.py index 1dbf98e..09c3429 100644 --- a/tests/test_join_query.py +++ b/tests/test_join_query.py @@ -1041,3 +1041,67 @@ def test_join_as_label_field_level(connection): assert parent_account_name.name == "account_name" assert parent_account_name.label_prefix == "Parent" assert parent_account_name.label == "Parent Account Name" + + +@pytest.mark.query +def test_query_with_or_filters_with_mappings(connection): + query = connection.get_sql_query( + metrics=["total_item_revenue"], + dimensions=["channel"], + where=[ + { + "logical_operator": "OR", + "conditions": [ + {"field": "date", "expression": "less_than", "value": "2023-09-02"}, + {"field": "new_vs_repeat", "expression": "equal_to", "value": "New"}, + ], + }, + {"field": "date", "expression": "greater_than", "value": "2023-09-02"}, + ], + ) + + correct = ( + "SELECT order_lines.sales_channel as order_lines_channel,SUM(order_lines.revenue) as" + " order_lines_total_item_revenue FROM analytics.order_line_items order_lines LEFT JOIN" + " analytics.orders orders ON order_lines.order_unique_id=orders.id WHERE (DATE_TRUNC('DAY'," + " order_lines.order_date)<'2023-09-02' OR orders.new_vs_repeat='New') AND DATE_TRUNC('DAY'," + " order_lines.order_date)>'2023-09-02' GROUP BY order_lines.sales_channel ORDER BY" + " order_lines_total_item_revenue DESC;" + ) + assert query == correct + + +@pytest.mark.query +def test_query_with_or_filters_with_mappings_nested(connection): + query = connection.get_sql_query( + metrics=["total_item_revenue"], + dimensions=["channel"], + where=[ + { + "logical_operator": "OR", + "conditions": [ + {"field": "date", "expression": "less_than", "value": "2023-09-02"}, + {"field": "new_vs_repeat", "expression": "equal_to", "value": "New"}, + { + "logical_operator": "AND", + "conditions": [ + {"field": "date", "expression": "less_than", "value": "2023-09-02"}, + {"field": "new_vs_repeat", "expression": "equal_to", "value": "New"}, + ], + }, + ], + }, + {"field": "date", "expression": "greater_than", "value": "2023-09-02"}, + ], + ) + + correct = ( + "SELECT order_lines.sales_channel as order_lines_channel,SUM(order_lines.revenue) as" + " order_lines_total_item_revenue FROM analytics.order_line_items order_lines LEFT JOIN" + " analytics.orders orders ON order_lines.order_unique_id=orders.id WHERE (DATE_TRUNC('DAY'," + " order_lines.order_date)<'2023-09-02' OR orders.new_vs_repeat='New' OR (DATE_TRUNC('DAY'," + " order_lines.order_date)<'2023-09-02' AND orders.new_vs_repeat='New')) AND DATE_TRUNC('DAY'," + " order_lines.order_date)>'2023-09-02' GROUP BY order_lines.sales_channel ORDER BY" + " order_lines_total_item_revenue DESC;" + ) + assert query == correct From e546d88cf3e1a6f31aa92d7ed424ee9361f6cc1d Mon Sep 17 00:00:00 2001 From: Paul Blankley <31221512+pblankley@users.noreply.github.com> Date: Thu, 1 Aug 2024 19:34:43 -0600 Subject: [PATCH 07/53] Fix/catch invalid reference error in join graphs (#214) * catch invalid ref error in join graphs and provide validation message * bump version --- metrics_layer/core/model/field.py | 27 +++++++++++++-------------- pyproject.toml | 2 +- tests/test_cli.py | 11 ++++++++--- tests/test_project_validation.py | 27 +++++++++++++++++++++++++-- 4 files changed, 47 insertions(+), 20 deletions(-) diff --git a/metrics_layer/core/model/field.py b/metrics_layer/core/model/field.py index 20d7258..37cf1d3 100644 --- a/metrics_layer/core/model/field.py +++ b/metrics_layer/core/model/field.py @@ -2391,16 +2391,6 @@ def collect_errors(self): def collect_sql_errors(self, sql: str, property_name: str, error_func): errors = [] - if not isinstance(sql, str): - errors.append( - error_func( - sql, - ( - f"Field {self.name} in view {self.view.name} has an invalid {property_name} {sql}." - f" {property_name} must be a string." - ), - ) - ) if sql and sql == "${" + self.name + "}": error_text = ( f"Field {self.name} references itself in its '{property_name}' property. You need to" @@ -2409,13 +2399,20 @@ def collect_sql_errors(self, sql: str, property_name: str, error_func): ) errors.append(error_func(sql, error_text)) - # TODO improve this with sql parse or sql glot - if self.get_referenced_sql_query(strings_only=False) is None: + refs = self.get_referenced_sql_query(strings_only=False) + if refs is None: error_text = ( f"Field {self.name} in view {self.view.name} contains invalid SQL in property" f" {property_name}. Remove any Looker parameter references from the SQL." ) errors.append(error_func(sql, error_text)) + else: + for ref in refs: + if isinstance(ref, str): + error_text = ( + f"Field {self.name} in view {self.view.name} contains invalid field reference {ref}." + ) + errors.append(error_func(sql, error_text)) return errors def get_referenced_sql_query(self, strings_only=True): @@ -2674,8 +2671,10 @@ def join_graphs(self): referenced_views = set([self.view.name]) # First, we get the views that are referenced, and put them in a set. - for ref in sql_references: - referenced_views.add(ref.view.name) + if sql_references is not None: + for ref in sql_references: + if isinstance(ref, Field): + referenced_views.add(ref.view.name) # Then, we get the weakly connected references to EACH view that is referenced in the SQL base_collection = [] diff --git a/pyproject.toml b/pyproject.toml index a1c46ea..be2fbb4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "metrics_layer" -version = "0.12.27" +version = "0.12.28" description = "The open source metrics layer." authors = ["Paul Blankley "] keywords = ["Metrics Layer", "Business Intelligence", "Analytics"] diff --git a/tests/test_cli.py b/tests/test_cli.py index 96b6363..a6a6f84 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -446,11 +446,14 @@ def test_cli_validate(connection, fresh_project, mocker): # assert result.exit_code == 0 assert ( result.output - == "Found 4 errors in the project:\n\n" + == "Found 7 errors in the project:\n\n" "\nCould not locate reference revenue_dimension in field total_item_costs in view order_lines\n\n" + "\nField total_item_costs in view order_lines contains invalid field reference revenue_dimension.\n\n" "\nCould not locate reference revenue_dimension in field revenue_in_cents in view orders\n\n" "\nCould not locate reference revenue_dimension in field total_revenue in view orders\n\n" "\nDefault date sessions.session_date in view orders is not joinable to the view orders\n\n" + "\nField revenue_in_cents in view orders contains invalid field reference revenue_dimension.\n\n" + "\nField total_revenue in view orders contains invalid field reference revenue_dimension.\n\n" ) @@ -516,8 +519,9 @@ def test_cli_validate_personal_field_view_level_error(connection, fresh_project, assert result.exit_code == 0 assert result.output == ( - "Found 1 error in the project:\n\n" + "Found 2 errors in the project:\n\n" "\nWarning: Could not locate reference some_crazy_ref in field cancelled in view customers\n\n" # noqa + "\nWarning: Field cancelled in view customers contains invalid field reference some_crazy_ref.\n\n" # noqa ) @@ -706,9 +710,10 @@ def test_cli_validate_names(connection, fresh_project, mocker): assert result.exit_code == 0 assert result.output == ( - "Found 3 errors in the project:\n\n" + "Found 4 errors in the project:\n\n" "\nCould not locate reference days_between_orders in field an invalid @name\\ in view orders\n\n" "\nField name: an invalid @name\\ is invalid. Please reference the naming conventions (only letters, numbers, or underscores)\n\n" # noqa + "\nField an invalid @name\ in view orders contains invalid field reference days_between_orders.\n\n" "\nField between_orders in view orders is of type duration, but has property timeframes when it should have property intervals\n\n" # noqa ) diff --git a/tests/test_project_validation.py b/tests/test_project_validation.py index d0acf45..9247180 100644 --- a/tests/test_project_validation.py +++ b/tests/test_project_validation.py @@ -1635,7 +1635,10 @@ def test_validation_with_replaced_view_properties(connection, name, value, error "total_item_costs", "sql", "${TABL}.mycol", - ["Could not locate reference tabl in field total_item_costs in view order_lines"], + [ + "Could not locate reference tabl in field total_item_costs in view order_lines", + "Field total_item_costs in view order_lines contains invalid field reference tabl.", + ], ), ("total_item_costs", "sql", "${TABLE}.mycol", []), ("total_item_costs", "sql", "${order_date}", []), @@ -1680,7 +1683,10 @@ def test_validation_with_replaced_view_properties(connection, name, value, error "waiting", "sql_end", "${TABL}.mycol", - ["Could not locate reference tabl in field waiting in view order_lines"], + [ + "Could not locate reference tabl in field waiting in view order_lines", + "Field waiting in view order_lines contains invalid field reference tabl.", + ], ), ("waiting", "sql_end", "${TABLE}.mycol", []), ("waiting", "sql_end", "${order_date}", []), @@ -1863,6 +1869,10 @@ def test_validation_with_replaced_view_properties(connection, name, value, error {"name": "fake", "window_choice": "max"}, [ "Could not locate reference order_lines.fake in field total_item_costs in view order_lines", + ( + "Field total_item_costs in view order_lines contains invalid field reference " + "order_lines.fake." + ), ( "Field total_item_costs in view order_lines has an invalid " "non_additive_dimension. The field order_lines.fake referenced in " @@ -1922,6 +1932,10 @@ def test_validation_with_replaced_view_properties(connection, name, value, error {"name": "order_raw", "window_choice": "max", "window_groupings": ["fake"]}, [ "Could not locate reference order_lines.fake in field total_item_costs in view order_lines", + ( + "Field total_item_costs in view order_lines contains invalid field reference " + "order_lines.fake." + ), ( "Field total_item_costs in view order_lines has an invalid " "non_additive_dimension. The field order_lines.fake " @@ -2136,6 +2150,15 @@ def test_validation_with_replaced_view_properties(connection, name, value, error "dimensions of type string that are not hidden." ], ), + ( + "revenue_per_session", + "sql", + "${sessions.number_of_sess} * ${total_item_revenue} / nullif(${total_item_revenue}, 0)", + [ + "Field revenue_per_session in view order_lines contains invalid field " + "reference sessions.number_of_sess." + ], + ), ( "parent_channel", "description", From 9b1c8d611399b93be0efb4ebbfc9dca37ee775a1 Mon Sep 17 00:00:00 2001 From: Paul Blankley Date: Thu, 1 Aug 2024 19:35:02 -0600 Subject: [PATCH 08/53] Release v0.12.28 From 8caf466da58c2f65c5185508760c35f425adff5c Mon Sep 17 00:00:00 2001 From: Paul Blankley <31221512+pblankley@users.noreply.github.com> Date: Fri, 2 Aug 2024 13:59:05 -0600 Subject: [PATCH 09/53] handle array filters correctly in all locations for numeric datatypes (#215) --- metrics_layer/core/model/field.py | 9 +++-- metrics_layer/core/model/filter.py | 25 +++++++++++-- metrics_layer/core/sql/query_design.py | 2 +- metrics_layer/core/sql/query_filter.py | 9 +++-- .../views/test_created_workspace.yml | 4 ++- .../views/test_orders.yml | 13 +++++++ tests/test_cli.py | 8 ++--- tests/test_join_query.py | 35 ++++++++++++++----- tests/test_listing_functions.py | 6 ++-- tests/test_set_functions.py | 2 ++ 10 files changed, 87 insertions(+), 26 deletions(-) diff --git a/metrics_layer/core/model/field.py b/metrics_layer/core/model/field.py index 37cf1d3..01d8c8d 100644 --- a/metrics_layer/core/model/field.py +++ b/metrics_layer/core/model/field.py @@ -286,7 +286,8 @@ def sql(self): "for the view. You can do this by adding the tag 'primary_key: true' to the " "necessary dimension" ) - # You cannot apply a filter to a field that is the same name as the field itself (this doesn't make sense) + # You cannot apply a filter to a field that is the same name + # as the field itself (this doesn't make sense) filters_to_apply = [f for f in definition.get("filters", []) if f.get("field") != self.name] else_0 = False @@ -313,7 +314,7 @@ def sql(self): } ] definition["sql"] = Filter.translate_looker_filters_to_sql( - definition["sql"], filters_to_apply, else_0=else_0 + definition["sql"], filters_to_apply, self.view, else_0=else_0 ) if ( @@ -592,7 +593,9 @@ def _needs_symmetric_aggregate(self, functional_pk: MetricsLayerBase): def _get_sql_distinct_key(self, sql_distinct_key: str, query_type: str, alias_only: bool): if self.filters: - clean_sql_distinct_key = Filter.translate_looker_filters_to_sql(sql_distinct_key, self.filters) + clean_sql_distinct_key = Filter.translate_looker_filters_to_sql( + sql_distinct_key, self.filters, self.view + ) else: clean_sql_distinct_key = sql_distinct_key return self._replace_sql_query(clean_sql_distinct_key, query_type, alias_only=alias_only) diff --git a/metrics_layer/core/model/filter.py b/metrics_layer/core/model/filter.py index a218820..0531b91 100644 --- a/metrics_layer/core/model/filter.py +++ b/metrics_layer/core/model/filter.py @@ -1,6 +1,7 @@ import re from datetime import datetime from enum import Enum +from typing import TYPE_CHECKING import pandas as pd import pendulum @@ -13,6 +14,9 @@ from .base import MetricsLayerBase from .week_start_day_types import WeekStartDayTypes +if TYPE_CHECKING: + from metrics_layer.core.model.view import View + class LiteralValueCriterion(Criterion): def __init__(self, sql_query: str, alias: str = None) -> None: @@ -474,7 +478,7 @@ def _date_to_string(date_obj): return date_obj.strftime("%Y-%m-%dT%H:%M:%S") @staticmethod - def translate_looker_filters_to_sql(sql: str, filters: list, else_0: bool = False): + def translate_looker_filters_to_sql(sql: str, filters: list, view: "View", else_0: bool = False): case_sql = "case when " conditions = [] for f in filters: @@ -482,6 +486,15 @@ def translate_looker_filters_to_sql(sql: str, filters: list, else_0: bool = Fals if not all(k in f for k in ["field", "value"]): continue + if "." not in f["field"]: + field_id = f'{view.name}.{f["field"]}' + else: + field_id = f["field"] + try: + field = view.project.get_field(field_id) + field_datatype = field.type + except Exception: + field_datatype = "unknown" filter_dict = Filter._filter_dict( f["field"], f["value"], f.get("week_start_day"), f.get("timezone") ) @@ -494,7 +507,7 @@ def translate_looker_filters_to_sql(sql: str, filters: list, else_0: bool = Fals if filter_obj != {}: field_reference = "${" + f["field"] + "}" condition_value = Filter.sql_query( - field_reference, filter_obj["expression"], filter_obj["value"] + field_reference, filter_obj["expression"], filter_obj["value"], field_datatype ) condition = f"{condition_value}" conditions.append(condition) @@ -510,8 +523,14 @@ def translate_looker_filters_to_sql(sql: str, filters: list, else_0: bool = Fals return case_sql @staticmethod - def sql_query(sql_to_compare: str, expression_type: str, value): + def sql_query(sql_to_compare: str, expression_type: str, value, field_datatype: str): field = LiteralValue(sql_to_compare) + if ( + expression_type + in {MetricsLayerFilterExpressionType.IsIn, MetricsLayerFilterExpressionType.IsNotIn} + and field_datatype == "number" + ): + value = [pd.to_numeric(v) for v in value] criterion_strategies = { MetricsLayerFilterExpressionType.LessThan: lambda f: f < value, MetricsLayerFilterExpressionType.LessOrEqualThan: lambda f: f <= value, diff --git a/metrics_layer/core/sql/query_design.py b/metrics_layer/core/sql/query_design.py index 009fbe9..a1113e5 100644 --- a/metrics_layer/core/sql/query_design.py +++ b/metrics_layer/core/sql/query_design.py @@ -279,7 +279,7 @@ def get_access_filter(self): fields.append(field) for filter_dict in f.filter_dict(): filter_sql = Filter.sql_query( - field_sql, filter_dict["expression"], filter_dict["value"] + field_sql, filter_dict["expression"], filter_dict["value"], field.type ) conditions.append(str(filter_sql)) diff --git a/metrics_layer/core/sql/query_filter.py b/metrics_layer/core/sql/query_filter.py index 6604005..99291e8 100644 --- a/metrics_layer/core/sql/query_filter.py +++ b/metrics_layer/core/sql/query_filter.py @@ -1,6 +1,7 @@ import datetime from typing import Dict +import pandas as pd from pypika import Criterion, Field, Table from pypika.terms import LiteralValue @@ -172,9 +173,13 @@ def criterion(self, field_sql: str) -> Criterion: value = bigquery_cast(self.field, f["value"]) else: value = f["value"] - criteria.append(Filter.sql_query(field_sql, f["expression"], value)) + criteria.append(Filter.sql_query(field_sql, f["expression"], value, self.field.type)) return Criterion.all(criteria) - return Filter.sql_query(field_sql, self.expression_type, self.value) + if isinstance(self.field, MetricsLayerField): + field_datatype = self.field.type + else: + field_datatype = "unknown" + return Filter.sql_query(field_sql, self.expression_type, self.value, field_datatype) def cte(self, query_class, design_class): if not self.is_group_by: diff --git a/tests/config/metrics_layer_config/views/test_created_workspace.yml b/tests/config/metrics_layer_config/views/test_created_workspace.yml index 72627d4..1ab699b 100644 --- a/tests/config/metrics_layer_config/views/test_created_workspace.yml +++ b/tests/config/metrics_layer_config/views/test_created_workspace.yml @@ -19,11 +19,13 @@ always_filter: value: -NULL - field: context_os value: 1, Google, os:iOS + - field: session_id + value: -1, -44, -087 fields: - name: session_id field_type: 'dimension' - type: string + type: number primary_key: yes hidden: yes sql: '${TABLE}.id' diff --git a/tests/config/metrics_layer_config/views/test_orders.yml b/tests/config/metrics_layer_config/views/test_orders.yml index 15334a8..363ca63 100644 --- a/tests/config/metrics_layer_config/views/test_orders.yml +++ b/tests/config/metrics_layer_config/views/test_orders.yml @@ -54,6 +54,11 @@ fields: sql: '${TABLE}.account_id' group_label: "ID's" + - name: anon_id + field_type: 'dimension' + type: number + sql: '${TABLE}.anon_id' + - name: do_not_use field_type: 'dimension' type: string @@ -148,6 +153,14 @@ fields: type: sum sql: ${revenue_dimension} + - name: total_non_merchant_revenue + field_type: measure + type: sum + sql: ${TABLE}.revenue + filters: + - field: anon_id + value: -9, -3, -22, -9082 + - name: total_lifetime_revenue field_type: measure type: cumulative diff --git a/tests/test_cli.py b/tests/test_cli.py index a6a6f84..5741b36 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -434,7 +434,7 @@ def test_cli_validate(connection, fresh_project, mocker): project = fresh_project project._views[1]["default_date"] = "sessions.session_date" sorted_fields = sorted(project._views[1]["fields"], key=lambda x: x["name"]) - sorted_fields[19]["name"] = "rev_broken_dim" + sorted_fields[20]["name"] = "rev_broken_dim" project._views[1]["fields"] = sorted_fields conn = MetricsLayerConnection(project=project, connections=connection._raw_connections[0]) mocker.patch("metrics_layer.cli.seeding.SeedMetricsLayer._init_profile", lambda profile, target: conn) @@ -698,8 +698,8 @@ def test_cli_validate_names(connection, fresh_project, mocker): project = fresh_project sorted_fields = sorted(project._views[1]["fields"], key=lambda x: x["name"]) - sorted_fields[1]["name"] = "an invalid @name\\" - sorted_fields[4]["timeframes"] = ["date", "month", "year"] + sorted_fields[2]["name"] = "an invalid @name\\" + sorted_fields[5]["timeframes"] = ["date", "month", "year"] project._views[1]["fields"] = sorted_fields conn = MetricsLayerConnection(project=project, connections=connection._raw_connections[0]) mocker.patch("metrics_layer.cli.seeding.SeedMetricsLayer._init_profile", lambda profile, target: conn) @@ -742,7 +742,7 @@ def test_cli_validate_model_name_in_view(connection, fresh_project, mocker): def test_cli_validate_two_customer_tags(connection, fresh_project, mocker): # Break something so validation fails sorted_fields = sorted(fresh_project._views[1]["fields"], key=lambda x: x["name"]) - sorted_fields[6]["tags"] = ["customer"] + sorted_fields[7]["tags"] = ["customer"] conn = MetricsLayerConnection(project=fresh_project, connections=connection._raw_connections[0]) mocker.patch("metrics_layer.cli.seeding.SeedMetricsLayer._init_profile", lambda profile, target: conn) mocker.patch("metrics_layer.cli.seeding.SeedMetricsLayer.get_profile", lambda *args: "demo") diff --git a/tests/test_join_query.py b/tests/test_join_query.py index 62ec7f3..5973a4b 100644 --- a/tests/test_join_query.py +++ b/tests/test_join_query.py @@ -660,6 +660,24 @@ def test_query_number_measure_w_dimension_reference(connection): assert query == correct +@pytest.mark.query +def test_query_number_as_array_filter(connection): + query = connection.get_sql_query( + metrics=["total_non_merchant_revenue"], + dimensions=[], + where=[ + {"field": "orders.order_date", "expression": "greater_than", "value": "2022-04-03"}, + ], + ) + + correct = ( + "SELECT SUM(case when orders.anon_id NOT IN (9,3,22,9082) then orders.revenue end) as" + " orders_total_non_merchant_revenue FROM analytics.orders orders WHERE DATE_TRUNC('DAY'," + " orders.order_date)>'2022-04-03' ORDER BY orders_total_non_merchant_revenue DESC;" + ) + assert query == correct + + @pytest.mark.query @pytest.mark.parametrize("bool_value", ["True", "False"]) def test_query_bool_and_date_filter(connection, bool_value): @@ -844,15 +862,14 @@ def test_always_filter_with_and_without_join(connection): ) correct = ( - "SELECT DATE_TRUNC('DAY', created_workspace.session_date) as created_workspace_created_date," - "COUNT(created_workspace.id) as created_workspace_number_of_workspace_creations " - "FROM analytics.created_workspace created_workspace " - "LEFT JOIN analytics.customers customers " - "ON created_workspace.customer_id=customers.customer_id " - "WHERE NOT (customers.is_churned) AND NOT created_workspace.context_os IS NULL " - "AND created_workspace.context_os IN ('1','Google','os:iOS') " - "GROUP BY DATE_TRUNC('DAY', created_workspace.session_date) " - "ORDER BY created_workspace_number_of_workspace_creations DESC;" + "SELECT DATE_TRUNC('DAY', created_workspace.session_date) as" + " created_workspace_created_date,COUNT(created_workspace.id) as" + " created_workspace_number_of_workspace_creations FROM analytics.created_workspace created_workspace" + " LEFT JOIN analytics.customers customers ON created_workspace.customer_id=customers.customer_id" + " WHERE NOT (customers.is_churned) AND NOT created_workspace.context_os IS NULL AND" + " created_workspace.context_os IN ('1','Google','os:iOS') AND created_workspace.id NOT IN (1,44,87)" + " GROUP BY DATE_TRUNC('DAY', created_workspace.session_date) ORDER BY" + " created_workspace_number_of_workspace_creations DESC;" ) assert query == correct diff --git a/tests/test_listing_functions.py b/tests/test_listing_functions.py index 1f56e55..55e6f80 100644 --- a/tests/test_listing_functions.py +++ b/tests/test_listing_functions.py @@ -4,7 +4,7 @@ @pytest.mark.project def test_list_metrics(connection): metrics = connection.list_metrics() - assert len(metrics) == 59 + assert len(metrics) == 60 metrics = connection.list_metrics(view_name="order_lines", names_only=True) assert len(metrics) == 11 @@ -26,10 +26,10 @@ def test_list_metrics(connection): @pytest.mark.project def test_list_dimensions(connection): dimensions = connection.list_dimensions(show_hidden=True) - assert len(dimensions) == 97 + assert len(dimensions) == 98 dimensions = connection.list_dimensions() - assert len(dimensions) == 63 + assert len(dimensions) == 64 dimensions = connection.list_dimensions(view_name="order_lines", names_only=True, show_hidden=True) dimensions_present = { diff --git a/tests/test_set_functions.py b/tests/test_set_functions.py index a3dbea2..69f8fbe 100644 --- a/tests/test_set_functions.py +++ b/tests/test_set_functions.py @@ -37,6 +37,7 @@ def test_sets(connection): assert _set.field_names() == [ "orders.customer_id", "orders.account_id", + "orders.anon_id", "orders.do_not_use", "orders.order_raw", "orders.order_date", @@ -68,6 +69,7 @@ def test_sets(connection): "orders.number_of_orders", "orders.average_days_between_orders", "orders.total_revenue", + "orders.total_non_merchant_revenue", "orders.total_lifetime_revenue", "orders.cumulative_customers", "orders.cumulative_customers_no_change_grain", From ad82755aa207cadc540e4bc982451f2b3aa7db48 Mon Sep 17 00:00:00 2001 From: Paul Blankley <31221512+pblankley@users.noreply.github.com> Date: Fri, 2 Aug 2024 16:55:13 -0600 Subject: [PATCH 10/53] super class query builder and make nulls to be last by default (#217) * super class query builder and make nulls to be last by default * fix tests and bump version --- .../sql/query_arbitrary_merged_queries.py | 6 +- metrics_layer/core/sql/query_dialect.py | 167 +++++++++++--- metrics_layer/core/sql/query_generator.py | 22 +- pyproject.toml | 2 +- tests/test_access_grants_queries.py | 6 +- tests/test_arbitrary_merged_results.py | 101 ++++---- tests/test_cumulative_query.py | 10 +- tests/test_field_mappings.py | 40 ++-- tests/test_funnels.py | 2 +- tests/test_join_query.py | 143 ++++++------ tests/test_join_query_raw.py | 4 +- tests/test_merged_results.py | 158 ++++++------- tests/test_mql_parse.py | 42 ++-- tests/test_non_additive_dimensions.py | 217 +++++++++--------- tests/test_simple_query.py | 118 ++++++---- tests/test_symmetric_aggregates.py | 16 +- 16 files changed, 619 insertions(+), 435 deletions(-) diff --git a/metrics_layer/core/sql/query_arbitrary_merged_queries.py b/metrics_layer/core/sql/query_arbitrary_merged_queries.py index 7e51460..2c90ad8 100644 --- a/metrics_layer/core/sql/query_arbitrary_merged_queries.py +++ b/metrics_layer/core/sql/query_arbitrary_merged_queries.py @@ -5,7 +5,7 @@ from metrics_layer.core.model.filter import LiteralValueCriterion from metrics_layer.core.model.join import ZenlyticJoinType from metrics_layer.core.sql.query_base import MetricsLayerQueryBase -from metrics_layer.core.sql.query_dialect import query_lookup +from metrics_layer.core.sql.query_dialect import NullSorting, query_lookup class MetricsLayerMergedQueries(MetricsLayerQueryBase): @@ -51,7 +51,9 @@ def get_query(self, semicolon: bool = True): self._raise_query_error_from_cte(field.id(capitalize_alias=True)) order = Order.desc if order_clause.get("sort", "asc").lower() == "desc" else Order.asc - complete_query = complete_query.orderby(LiteralValue(order_by_alias), order=order) + complete_query = complete_query.orderby( + LiteralValue(order_by_alias), order=order, nulls=NullSorting.last + ) sql = str(complete_query.limit(self.limit)) if semicolon: diff --git a/metrics_layer/core/sql/query_dialect.py b/metrics_layer/core/sql/query_dialect.py index e6bee71..f9a8290 100644 --- a/metrics_layer/core/sql/query_dialect.py +++ b/metrics_layer/core/sql/query_dialect.py @@ -1,21 +1,110 @@ -from pypika import Query -from pypika.dialects import ( - MSSQLQueryBuilder, - PostgreSQLQueryBuilder, - RedShiftQueryBuilder, - SnowflakeQueryBuilder, -) +from enum import Enum +from typing import Any, Optional + +from pypika import Field, Query, Table +from pypika.dialects import MSSQLQueryBuilder, PostgreSQLQueryBuilder, QueryBuilder from pypika.enums import Dialects -from pypika.utils import builder +from pypika.utils import builder, format_quotes from metrics_layer.core.model.definitions import Definitions -SnowflakeQueryBuilder.ALIAS_QUOTE_CHAR = None -RedShiftQueryBuilder.ALIAS_QUOTE_CHAR = None -RedShiftQueryBuilder.QUOTE_CHAR = None PostgreSQLQueryBuilder.ALIAS_QUOTE_CHAR = None PostgreSQLQueryBuilder.QUOTE_CHAR = None -MSSQLQueryBuilder.QUOTE_CHAR = None + + +class NullSorting(Enum): + first = "FIRST" + last = "LAST" + + +class QueryBuilderWithOrderByNullsOption(QueryBuilder): + @builder + def replace_table(self, current_table: Optional[Table], new_table: Optional[Table]) -> "QueryBuilder": + """ + Replaces all occurrences of the specified table with the new table. Useful when reusing fields across + queries. + + :param current_table: + The table instance to be replaces. + :param new_table: + The table instance to replace with. + :return: + A copy of the query with the tables replaced. + """ + self._from = [new_table if table == current_table else table for table in self._from] + self._insert_table = new_table if self._insert_table == current_table else self._insert_table + self._update_table = new_table if self._update_table == current_table else self._update_table + + self._with = [alias_query.replace_table(current_table, new_table) for alias_query in self._with] + self._selects = [select.replace_table(current_table, new_table) for select in self._selects] + self._columns = [column.replace_table(current_table, new_table) for column in self._columns] + self._values = [ + [value.replace_table(current_table, new_table) for value in value_list] + for value_list in self._values + ] + + self._wheres = self._wheres.replace_table(current_table, new_table) if self._wheres else None + self._prewheres = self._prewheres.replace_table(current_table, new_table) if self._prewheres else None + self._groupbys = [groupby.replace_table(current_table, new_table) for groupby in self._groupbys] + self._havings = self._havings.replace_table(current_table, new_table) if self._havings else None + # Adding the slot for nulls first/last is the only change here + self._orderbys = [ + (orderby[0].replace_table(current_table, new_table), orderby[1], orderby[2]) + for orderby in self._orderbys + ] + self._joins = [join.replace_table(current_table, new_table) for join in self._joins] + + if current_table in self._select_star_tables: + self._select_star_tables.remove(current_table) + self._select_star_tables.add(new_table) + + @builder + def orderby(self, *fields: Any, **kwargs: Any) -> "QueryBuilder": + for field in fields: + field = Field(field, table=self._from[0]) if isinstance(field, str) else self.wrap_constant(field) + + self._orderbys.append((field, kwargs.get("order"), kwargs.get("nulls"))) + + def _orderby_sql( + self, + quote_char: Optional[str] = None, + alias_quote_char: Optional[str] = None, + orderby_alias: bool = True, + **kwargs: Any, + ) -> str: + """ + Produces the ORDER BY part of the query. This is a list of fields and possibly their + directionality, ASC or DESC and null sorting option (FIRST or LAST). + The clauses are stored in the query under self._orderbys as a list of tuples + containing the field, directionality (which can be None), + and null sorting option (which can be None). + + If an order by field is used in the select clause, + determined by a matching, and the orderby_alias + is set True then the ORDER BY clause will use + the alias, otherwise the field will be rendered as SQL. + """ + clauses = [] + selected_aliases = {s.alias for s in self._selects} + for field, directionality, nulls in self._orderbys: + term = ( + format_quotes(field.alias, alias_quote_char or quote_char) + if orderby_alias and field.alias and field.alias in selected_aliases + else field.get_sql(quote_char=quote_char, alias_quote_char=alias_quote_char, **kwargs) + ) + + if directionality is not None: + orient = f" {directionality.value}" + else: + orient = "" + + if nulls is not None: + null_sorting = f" NULLS {nulls.value}" + else: + null_sorting = "" + clauses.append(f"{term}{orient}{null_sorting}") + + return " ORDER BY {orderby}".format(orderby=",".join(clauses)) class SnowflakeQuery(Query): @@ -24,8 +113,18 @@ class SnowflakeQuery(Query): """ @classmethod - def _builder(cls, **kwargs) -> SnowflakeQueryBuilder: - return SnowflakeQueryBuilder(**kwargs) + def _builder(cls, **kwargs) -> "SnowflakeQueryBuilderWithOrderByNullsOption": + return SnowflakeQueryBuilderWithOrderByNullsOption(**kwargs) + + +class SnowflakeQueryBuilderWithOrderByNullsOption(QueryBuilderWithOrderByNullsOption): + QUOTE_CHAR = None + ALIAS_QUOTE_CHAR = None + QUERY_ALIAS_QUOTE_CHAR = "" + QUERY_CLS = SnowflakeQuery + + def __init__(self, **kwargs: Any) -> None: + super().__init__(dialect=Dialects.SNOWFLAKE, **kwargs) class PostgresQuery(Query): @@ -38,27 +137,41 @@ def _builder(cls, **kwargs) -> PostgreSQLQueryBuilder: return PostgreSQLQueryBuilder(**kwargs) -class RedshiftQuery(Query): +class PostgresQueryWithOrderByNullsOption(Query): """ - Defines a query class for use with Amazon Redshift. + Defines a query class for use with Snowflake. """ @classmethod - def _builder(cls, **kwargs) -> "RedShiftQueryBuilder": - return RedShiftQueryBuilder(dialect=Dialects.REDSHIFT, **kwargs) + def _builder(cls, **kwargs) -> "PostgreSQLQueryBuilderWithOrderByNullsOption": + return PostgreSQLQueryBuilderWithOrderByNullsOption(**kwargs) + + +class PostgreSQLQueryBuilderWithOrderByNullsOption( + PostgreSQLQueryBuilder, QueryBuilderWithOrderByNullsOption +): + QUERY_CLS = PostgresQueryWithOrderByNullsOption -class BigQueryQuery(Query): +class RedshiftQuery(Query): """ - Defines a query class for use with BigQuery. + Defines a query class for use with Amazon Redshift. """ @classmethod - def _builder(cls, **kwargs) -> SnowflakeQueryBuilder: - return SnowflakeQueryBuilder(**kwargs) + def _builder(cls, **kwargs) -> "RedShiftQueryBuilderWithOrderByNullsOption": + return RedShiftQueryBuilderWithOrderByNullsOption(dialect=Dialects.REDSHIFT, **kwargs) + + +class RedShiftQueryBuilderWithOrderByNullsOption(QueryBuilderWithOrderByNullsOption): + ALIAS_QUOTE_CHAR = None + QUOTE_CHAR = None + QUERY_CLS = RedshiftQuery class MSSQLQueryBuilderCorrectLimit(MSSQLQueryBuilder): + QUOTE_CHAR = None + @builder def limit(self, limit: int): self._top = limit @@ -76,12 +189,12 @@ def _builder(cls, **kwargs) -> MSSQLQueryBuilderCorrectLimit: query_lookup = { Definitions.snowflake: SnowflakeQuery, - Definitions.bigquery: BigQueryQuery, + Definitions.bigquery: SnowflakeQuery, # In terms of quoting, these are the same Definitions.redshift: RedshiftQuery, - Definitions.postgres: PostgresQuery, - Definitions.druid: PostgresQuery, # druid core query logic is postgres compatible - Definitions.duck_db: PostgresQuery, # duck db core query logic is postgres compatible - Definitions.databricks: PostgresQuery, # duck db core query logic is postgres compatible + Definitions.postgres: PostgresQueryWithOrderByNullsOption, + Definitions.druid: PostgresQuery, # druid core query logic is postgres compatible, minus null sorting + Definitions.duck_db: PostgresQueryWithOrderByNullsOption, # duck db core query logic = postgres + Definitions.databricks: PostgresQueryWithOrderByNullsOption, # duck db core query logic = postgres Definitions.sql_server: MSSSQLQuery, Definitions.azure_synapse: MSSSQLQuery, # Azure Synapse is a T-SQL flavor } diff --git a/metrics_layer/core/sql/query_generator.py b/metrics_layer/core/sql/query_generator.py index cae3480..ecee606 100644 --- a/metrics_layer/core/sql/query_generator.py +++ b/metrics_layer/core/sql/query_generator.py @@ -11,7 +11,7 @@ from metrics_layer.core.model.view import View from metrics_layer.core.sql.query_base import MetricsLayerQueryBase from metrics_layer.core.sql.query_design import MetricsLayerDesign -from metrics_layer.core.sql.query_dialect import query_lookup +from metrics_layer.core.sql.query_dialect import NullSorting, query_lookup from metrics_layer.core.sql.query_errors import ArgumentError from metrics_layer.core.sql.query_filter import MetricsLayerFilter @@ -122,10 +122,22 @@ def _parse_order_by_object(self, order_by): if isinstance(order_by, str): for order_clause in order_by.split(","): if "desc" in order_clause.lower(): - field_reference = order_clause.lower().replace("desc", "").strip() + field_reference = ( + order_clause.lower() + .replace("desc", "") + .replace("nulls last", "") + .replace("nulls first", "") + .strip() + ) results.append({"field": field_reference, "sort": "desc"}) else: - field_reference = order_clause.lower().replace("asc", "").strip() + field_reference = ( + order_clause.lower() + .replace("asc", "") + .replace("nulls last", "") + .replace("nulls first", "") + .strip() + ) results.append({"field": field_reference, "sort": "asc"}) # Handle JSON order_by @@ -255,7 +267,9 @@ def get_query(self, semicolon: bool = True): field = self.design.get_field(arg["field"]) arg["field"] = field.alias(with_view=True) order = Order.desc if arg["sort"] == "desc" else Order.asc - base_query = base_query.orderby(LiteralValue(arg["field"]), order=order) + base_query = base_query.orderby( + LiteralValue(arg["field"]), order=order, nulls=NullSorting.last + ) completed_query = base_query.limit(self.limit) if self.return_pypika_query: diff --git a/pyproject.toml b/pyproject.toml index be2fbb4..983bd5b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "metrics_layer" -version = "0.12.28" +version = "0.12.29" description = "The open source metrics layer." authors = ["Paul Blankley "] keywords = ["Metrics Layer", "Business Intelligence", "Analytics"] diff --git a/tests/test_access_grants_queries.py b/tests/test_access_grants_queries.py index 47e2be9..f9ab499 100644 --- a/tests/test_access_grants_queries.py +++ b/tests/test_access_grants_queries.py @@ -89,7 +89,7 @@ def test_access_filters_equal_to(connection): "SELECT orders.new_vs_repeat as orders_new_vs_repeat,SUM(orders.revenue) as orders_total_revenue " "FROM analytics.orders orders LEFT JOIN analytics.customers customers " "ON orders.customer_id=customers.customer_id WHERE customers.region='US-West' " - "GROUP BY orders.new_vs_repeat ORDER BY orders_total_revenue DESC;" + "GROUP BY orders.new_vs_repeat ORDER BY orders_total_revenue DESC NULLS LAST;" ) assert correct == query @@ -104,7 +104,7 @@ def test_access_filters_array(connection): "SELECT orders.new_vs_repeat as orders_new_vs_repeat,SUM(orders.revenue) as orders_total_revenue " "FROM analytics.orders orders LEFT JOIN analytics.customers customers " "ON orders.customer_id=customers.customer_id WHERE customers.region IN ('US-West','US-East') " - "GROUP BY orders.new_vs_repeat ORDER BY orders_total_revenue DESC;" + "GROUP BY orders.new_vs_repeat ORDER BY orders_total_revenue DESC NULLS LAST;" ) assert correct == query @@ -118,6 +118,6 @@ def test_access_filters_underscore(connection): correct = ( "SELECT orders.new_vs_repeat as orders_new_vs_repeat,SUM(orders.revenue) as orders_total_revenue " "FROM analytics.orders orders WHERE orders.warehouselocation='New Jersey' " - "GROUP BY orders.new_vs_repeat ORDER BY orders_total_revenue DESC;" + "GROUP BY orders.new_vs_repeat ORDER BY orders_total_revenue DESC NULLS LAST;" ) assert correct == query diff --git a/tests/test_arbitrary_merged_results.py b/tests/test_arbitrary_merged_results.py index 4dc9e90..04fabfd 100644 --- a/tests/test_arbitrary_merged_results.py +++ b/tests/test_arbitrary_merged_results.py @@ -58,9 +58,9 @@ def test_query_merged_queries_simple_one_dimension(connection): correct = ( "WITH merged_query_0 AS (SELECT sessions.session_device as sessions_session_device,COUNT(sessions.id)" " as sessions_number_of_sessions FROM analytics.sessions sessions GROUP BY sessions.session_device" - " ORDER BY sessions_number_of_sessions DESC) ,merged_query_1 AS (SELECT events.device as" + " ORDER BY sessions_number_of_sessions DESC NULLS LAST) ,merged_query_1 AS (SELECT events.device as" " events_device,COUNT(DISTINCT(events.id)) as events_number_of_events FROM analytics.events events" - " GROUP BY events.device ORDER BY events_number_of_events DESC) SELECT" + " GROUP BY events.device ORDER BY events_number_of_events DESC NULLS LAST) SELECT" " merged_query_0.sessions_number_of_sessions as" " sessions_number_of_sessions,merged_query_0.sessions_session_device as" " sessions_session_device,merged_query_1.events_number_of_events as events_number_of_events FROM" @@ -91,11 +91,11 @@ def test_query_merged_queries_simple_two_dimension_all_mapped(connection): "WITH merged_query_0 AS (SELECT sessions.utm_campaign as" " sessions_utm_campaign,sessions.session_device as sessions_session_device,COUNT(sessions.id) as" " sessions_number_of_sessions FROM analytics.sessions sessions GROUP BY" - " sessions.utm_campaign,sessions.session_device ORDER BY sessions_number_of_sessions DESC)" + " sessions.utm_campaign,sessions.session_device ORDER BY sessions_number_of_sessions DESC NULLS LAST)" " ,merged_query_1 AS (SELECT events.device as events_device,events.campaign as" " events_event_campaign,COUNT(DISTINCT(events.id)) as events_number_of_events FROM analytics.events" - " events GROUP BY events.device,events.campaign ORDER BY events_number_of_events DESC) SELECT" - " merged_query_0.sessions_number_of_sessions as" + " events GROUP BY events.device,events.campaign ORDER BY events_number_of_events DESC NULLS LAST)" + " SELECT merged_query_0.sessions_number_of_sessions as" " sessions_number_of_sessions,merged_query_0.sessions_utm_campaign as" " sessions_utm_campaign,merged_query_0.sessions_session_device as" " sessions_session_device,merged_query_1.events_number_of_events as events_number_of_events FROM" @@ -132,10 +132,10 @@ def test_query_merged_queries_simple_two_dimension_one_mapped(connection, join_t "WITH merged_query_0 AS (SELECT sessions.utm_campaign as" " sessions_utm_campaign,sessions.session_device as sessions_session_device,COUNT(sessions.id) as" " sessions_number_of_sessions FROM analytics.sessions sessions GROUP BY" - " sessions.utm_campaign,sessions.session_device ORDER BY sessions_number_of_sessions DESC)" + " sessions.utm_campaign,sessions.session_device ORDER BY sessions_number_of_sessions DESC NULLS LAST)" " ,merged_query_1 AS (SELECT events.campaign as events_event_campaign,COUNT(DISTINCT(events.id)) as" " events_number_of_events FROM analytics.events events GROUP BY events.campaign ORDER BY" - " events_number_of_events DESC) SELECT merged_query_0.sessions_number_of_sessions as" + " events_number_of_events DESC NULLS LAST) SELECT merged_query_0.sessions_number_of_sessions as" " sessions_number_of_sessions,merged_query_0.sessions_utm_campaign as" " sessions_utm_campaign,merged_query_0.sessions_session_device as" " sessions_session_device,merged_query_1.events_number_of_events as events_number_of_events FROM" @@ -161,12 +161,12 @@ def test_query_merged_queries_same_dimension_one_mapped_filter(connection): "WITH merged_query_0 AS (WITH order_lines_order__cte_subquery_0 AS (SELECT order_lines.product_name" " as order_lines_product_name,SUM(order_lines.revenue) as order_lines_total_item_revenue FROM" " analytics.order_line_items order_lines GROUP BY order_lines.product_name ORDER BY" - " order_lines_total_item_revenue DESC) ,orders_order__cte_subquery_1 AS (SELECT" + " order_lines_total_item_revenue DESC NULLS LAST) ,orders_order__cte_subquery_1 AS (SELECT" " order_lines.product_name as order_lines_product_name,NULLIF(COUNT(DISTINCT CASE WHEN (orders.id) " " IS NOT NULL THEN orders.id ELSE NULL END), 0) as orders_number_of_orders FROM" " analytics.order_line_items order_lines LEFT JOIN analytics.orders orders ON" " order_lines.order_unique_id=orders.id GROUP BY order_lines.product_name ORDER BY" - " orders_number_of_orders DESC) SELECT" + " orders_number_of_orders DESC NULLS LAST) SELECT" " order_lines_order__cte_subquery_0.order_lines_total_item_revenue as" " order_lines_total_item_revenue,orders_order__cte_subquery_1.orders_number_of_orders as" " orders_number_of_orders,ifnull(order_lines_order__cte_subquery_0.order_lines_product_name," @@ -179,7 +179,7 @@ def test_query_merged_queries_same_dimension_one_mapped_filter(connection): " order_lines_product_name,SUM(order_lines.revenue) as order_lines_total_item_revenue FROM" " analytics.order_line_items order_lines LEFT JOIN analytics.orders orders ON" " order_lines.order_unique_id=orders.id WHERE orders.new_vs_repeat<>'New' GROUP BY" - " order_lines.product_name ORDER BY order_lines_total_item_revenue DESC) SELECT" + " order_lines.product_name ORDER BY order_lines_total_item_revenue DESC NULLS LAST) SELECT" " merged_query_0.order_lines_line_item_aov as" " order_lines_line_item_aov,merged_query_0.order_lines_product_name as" " order_lines_product_name,merged_query_1.order_lines_total_item_revenue as" @@ -205,11 +205,11 @@ def test_query_merged_queries_same_dimension_same_measure(connection): "WITH merged_query_0 AS (SELECT order_lines.product_name as" " order_lines_product_name,SUM(order_lines.revenue) as order_lines_total_item_revenue FROM" " analytics.order_line_items order_lines GROUP BY order_lines.product_name ORDER BY" - " order_lines_total_item_revenue DESC) ,merged_query_1 AS (SELECT order_lines.product_name as" - " order_lines_product_name,SUM(order_lines.revenue) as order_lines_total_item_revenue FROM" + " order_lines_total_item_revenue DESC NULLS LAST) ,merged_query_1 AS (SELECT order_lines.product_name" + " as order_lines_product_name,SUM(order_lines.revenue) as order_lines_total_item_revenue FROM" " analytics.order_line_items order_lines LEFT JOIN analytics.orders orders ON" " order_lines.order_unique_id=orders.id WHERE orders.new_vs_repeat<>'New' GROUP BY" - " order_lines.product_name ORDER BY order_lines_total_item_revenue DESC) SELECT" + " order_lines.product_name ORDER BY order_lines_total_item_revenue DESC NULLS LAST) SELECT" " merged_query_0.order_lines_total_item_revenue as" " order_lines_total_item_revenue,merged_query_0.order_lines_product_name as order_lines_product_name" " FROM merged_query_0 LEFT JOIN merged_query_1 ON" @@ -234,12 +234,12 @@ def test_query_merged_queries_same_dimension_same_measure_with_extra(connection) "WITH merged_query_0 AS (SELECT order_lines.product_name as" " order_lines_product_name,SUM(order_lines.revenue) as order_lines_total_item_revenue FROM" " analytics.order_line_items order_lines GROUP BY order_lines.product_name ORDER BY" - " order_lines_total_item_revenue DESC) ,merged_query_1 AS (SELECT order_lines.product_name as" - " order_lines_product_name,SUM(order_lines.revenue) as order_lines_total_item_revenue,COUNT(case when" - " order_lines.sales_channel='Email' then order_lines.order_id end) as" + " order_lines_total_item_revenue DESC NULLS LAST) ,merged_query_1 AS (SELECT order_lines.product_name" + " as order_lines_product_name,SUM(order_lines.revenue) as order_lines_total_item_revenue,COUNT(case" + " when order_lines.sales_channel='Email' then order_lines.order_id end) as" " order_lines_number_of_email_purchased_items FROM analytics.order_line_items order_lines LEFT JOIN" " analytics.orders orders ON order_lines.order_unique_id=orders.id WHERE orders.new_vs_repeat<>'New'" - " GROUP BY order_lines.product_name ORDER BY order_lines_total_item_revenue DESC) SELECT" + " GROUP BY order_lines.product_name ORDER BY order_lines_total_item_revenue DESC NULLS LAST) SELECT" " merged_query_0.order_lines_total_item_revenue as" " order_lines_total_item_revenue,merged_query_0.order_lines_product_name as" " order_lines_product_name,merged_query_1.order_lines_number_of_email_purchased_items as" @@ -273,12 +273,12 @@ def test_query_merged_queries_all_options_in_second_query(connection): " order_lines.sales_channel='Email' then order_lines.order_id end) as" " order_lines_number_of_email_purchased_items FROM analytics.order_line_items order_lines WHERE NOT" " order_lines.product_name IS NULL GROUP BY order_lines.product_name ORDER BY" - " order_lines_number_of_email_purchased_items DESC) ,merged_query_1 AS (SELECT" + " order_lines_number_of_email_purchased_items DESC NULLS LAST) ,merged_query_1 AS (SELECT" " order_lines.product_name as order_lines_product_name,SUM(order_lines.revenue) as" " order_lines_total_item_revenue FROM analytics.order_line_items order_lines LEFT JOIN" " analytics.orders orders ON order_lines.order_unique_id=orders.id WHERE orders.new_vs_repeat<>'New'" " GROUP BY order_lines.product_name HAVING SUM(order_lines.revenue)>100 ORDER BY" - " order_lines_total_item_revenue DESC LIMIT 10) SELECT" + " order_lines_total_item_revenue DESC NULLS LAST LIMIT 10) SELECT" " merged_query_0.order_lines_number_of_email_purchased_items as" " order_lines_number_of_email_purchased_items,merged_query_0.order_lines_product_name as" " order_lines_product_name,merged_query_1.order_lines_total_item_revenue as" @@ -306,16 +306,17 @@ def test_query_merged_queries_order_by_asc_post_merge(connection): "WITH merged_query_0 AS (SELECT order_lines.product_name as order_lines_product_name,COUNT(case when" " order_lines.sales_channel='Email' then order_lines.order_id end) as" " order_lines_number_of_email_purchased_items FROM analytics.order_line_items order_lines GROUP BY" - " order_lines.product_name ORDER BY order_lines_number_of_email_purchased_items DESC) ,merged_query_1" - " AS (SELECT order_lines.product_name as order_lines_product_name,SUM(order_lines.revenue) as" - " order_lines_total_item_revenue FROM analytics.order_line_items order_lines GROUP BY" - " order_lines.product_name ORDER BY order_lines_total_item_revenue DESC) SELECT" + " order_lines.product_name ORDER BY order_lines_number_of_email_purchased_items DESC NULLS LAST)" + " ,merged_query_1 AS (SELECT order_lines.product_name as" + " order_lines_product_name,SUM(order_lines.revenue) as order_lines_total_item_revenue FROM" + " analytics.order_line_items order_lines GROUP BY order_lines.product_name ORDER BY" + " order_lines_total_item_revenue DESC NULLS LAST) SELECT" " merged_query_0.order_lines_number_of_email_purchased_items as" " order_lines_number_of_email_purchased_items,merged_query_0.order_lines_product_name as" " order_lines_product_name,merged_query_1.order_lines_total_item_revenue as" " order_lines_total_item_revenue FROM merged_query_0 LEFT JOIN merged_query_1 ON" " merged_query_0.order_lines_product_name=merged_query_1.order_lines_product_name ORDER BY" - " merged_query_0.order_lines_number_of_email_purchased_items ASC;" + " merged_query_0.order_lines_number_of_email_purchased_items ASC NULLS LAST;" ) assert query == correct @@ -339,16 +340,17 @@ def test_query_merged_queries_order_by_limit_post_merge(connection): "WITH merged_query_0 AS (SELECT order_lines.product_name as order_lines_product_name,COUNT(case when" " order_lines.sales_channel='Email' then order_lines.order_id end) as" " order_lines_number_of_email_purchased_items FROM analytics.order_line_items order_lines GROUP BY" - " order_lines.product_name ORDER BY order_lines_number_of_email_purchased_items DESC) ,merged_query_1" - " AS (SELECT order_lines.product_name as order_lines_product_name,SUM(order_lines.revenue) as" - " order_lines_total_item_revenue FROM analytics.order_line_items order_lines GROUP BY" - " order_lines.product_name ORDER BY order_lines_total_item_revenue DESC) SELECT" + " order_lines.product_name ORDER BY order_lines_number_of_email_purchased_items DESC NULLS LAST)" + " ,merged_query_1 AS (SELECT order_lines.product_name as" + " order_lines_product_name,SUM(order_lines.revenue) as order_lines_total_item_revenue FROM" + " analytics.order_line_items order_lines GROUP BY order_lines.product_name ORDER BY" + " order_lines_total_item_revenue DESC NULLS LAST) SELECT" " merged_query_0.order_lines_number_of_email_purchased_items as" " order_lines_number_of_email_purchased_items,merged_query_0.order_lines_product_name as" " order_lines_product_name,merged_query_1.order_lines_total_item_revenue as" " order_lines_total_item_revenue FROM merged_query_0 LEFT JOIN merged_query_1 ON" " merged_query_0.order_lines_product_name=merged_query_1.order_lines_product_name ORDER BY" - " merged_query_1.order_lines_total_item_revenue DESC LIMIT 5;" + " merged_query_1.order_lines_total_item_revenue DESC NULLS LAST LIMIT 5;" ) assert query == correct @@ -381,8 +383,8 @@ def test_query_merged_queries_dim_group(connection, query_type): else: lines_date_trunc_group = lines_date_trunc = "DATE_TRUNC('DAY', order_lines.order_date)" orders_date_trunc_group = orders_date_trunc = "DATE_TRUNC('DAY', orders.order_date)" - lines_order_by = " ORDER BY order_lines_total_item_revenue DESC" - orders_order_by = " ORDER BY orders_number_of_orders DESC" + lines_order_by = " ORDER BY order_lines_total_item_revenue DESC NULLS LAST" + orders_order_by = " ORDER BY orders_number_of_orders DESC NULLS LAST" product_group = "order_lines.product_name" time = "'2018-01-02T00:00:00'" condition = "merged_query_0.orders_order_date=merged_query_1.order_lines_order_date" @@ -425,16 +427,16 @@ def test_query_merged_queries_three_way(connection): " order_lines_order_date,COUNT(case when order_lines.sales_channel='Email' then order_lines.order_id" " end) as order_lines_number_of_email_purchased_items FROM analytics.order_line_items order_lines" " GROUP BY order_lines.product_name,DATE_TRUNC('DAY', order_lines.order_date) ORDER BY" - " order_lines_number_of_email_purchased_items DESC) ,merged_query_1 AS (SELECT DATE_TRUNC('DAY'," - " order_lines.order_date) as order_lines_order_date,SUM(order_lines.revenue) as" + " order_lines_number_of_email_purchased_items DESC NULLS LAST) ,merged_query_1 AS (SELECT" + " DATE_TRUNC('DAY', order_lines.order_date) as order_lines_order_date,SUM(order_lines.revenue) as" " order_lines_total_item_revenue FROM analytics.order_line_items order_lines GROUP BY" - " DATE_TRUNC('DAY', order_lines.order_date) ORDER BY order_lines_total_item_revenue DESC)" + " DATE_TRUNC('DAY', order_lines.order_date) ORDER BY order_lines_total_item_revenue DESC NULLS LAST)" " ,merged_query_2 AS (SELECT order_lines.product_name as order_lines_product_name,SUM(case when" " order_lines.product_name='Portable Charger' and order_lines.product_name IN ('Portable" " Charger','Dual Charger') and orders.revenue * 100>100 then order_lines.item_costs end) as" " order_lines_total_item_costs FROM analytics.order_line_items order_lines LEFT JOIN analytics.orders" " orders ON order_lines.order_unique_id=orders.id GROUP BY order_lines.product_name ORDER BY" - " order_lines_total_item_costs DESC) SELECT" + " order_lines_total_item_costs DESC NULLS LAST) SELECT" " merged_query_0.order_lines_number_of_email_purchased_items as" " order_lines_number_of_email_purchased_items,merged_query_0.order_lines_product_name as" " order_lines_product_name,merged_query_0.order_lines_order_date as" @@ -466,10 +468,11 @@ def test_query_merged_queries_where_having_post_merge(connection): "WITH merged_query_0 AS (SELECT order_lines.product_name as order_lines_product_name,COUNT(case when" " order_lines.sales_channel='Email' then order_lines.order_id end) as" " order_lines_number_of_email_purchased_items FROM analytics.order_line_items order_lines GROUP BY" - " order_lines.product_name ORDER BY order_lines_number_of_email_purchased_items DESC) ,merged_query_1" - " AS (SELECT order_lines.product_name as order_lines_product_name,SUM(order_lines.revenue) as" - " order_lines_total_item_revenue FROM analytics.order_line_items order_lines GROUP BY" - " order_lines.product_name ORDER BY order_lines_total_item_revenue DESC) SELECT" + " order_lines.product_name ORDER BY order_lines_number_of_email_purchased_items DESC NULLS LAST)" + " ,merged_query_1 AS (SELECT order_lines.product_name as" + " order_lines_product_name,SUM(order_lines.revenue) as order_lines_total_item_revenue FROM" + " analytics.order_line_items order_lines GROUP BY order_lines.product_name ORDER BY" + " order_lines_total_item_revenue DESC NULLS LAST) SELECT" " merged_query_0.order_lines_number_of_email_purchased_items as" " order_lines_number_of_email_purchased_items,merged_query_0.order_lines_product_name as" " order_lines_product_name,merged_query_1.order_lines_total_item_revenue as" @@ -517,11 +520,11 @@ def test_query_merged_queries_mapped_where_post_merge(connection): " order_lines.sales_channel='Email' then order_lines.order_id end) as" " order_lines_number_of_email_purchased_items FROM analytics.order_line_items order_lines LEFT JOIN" " analytics.orders orders ON order_lines.order_unique_id=orders.id GROUP BY orders.campaign ORDER BY" - " order_lines_number_of_email_purchased_items DESC) ,merged_query_1 AS (SELECT orders.campaign as" - " orders_campaign,SUM(order_lines.revenue) as order_lines_total_item_revenue FROM" + " order_lines_number_of_email_purchased_items DESC NULLS LAST) ,merged_query_1 AS (SELECT" + " orders.campaign as orders_campaign,SUM(order_lines.revenue) as order_lines_total_item_revenue FROM" " analytics.order_line_items order_lines LEFT JOIN analytics.orders orders ON" " order_lines.order_unique_id=orders.id GROUP BY orders.campaign ORDER BY" - " order_lines_total_item_revenue DESC) SELECT" + " order_lines_total_item_revenue DESC NULLS LAST) SELECT" " merged_query_0.order_lines_number_of_email_purchased_items as" " order_lines_number_of_email_purchased_items,merged_query_0.orders_campaign as" " orders_campaign,merged_query_1.order_lines_total_item_revenue as order_lines_total_item_revenue" @@ -545,12 +548,12 @@ def test_query_merged_queries_handle_mappings_in_join_fields(connection): correct = ( "WITH merged_query_0 AS (SELECT DATE_TRUNC('DAY', orders.order_date) as" " orders_order_date,COUNT(orders.id) as orders_number_of_orders FROM analytics.orders orders GROUP BY" - " DATE_TRUNC('DAY', orders.order_date) ORDER BY orders_number_of_orders DESC) ,merged_query_1 AS" - " (SELECT order_lines.product_name as order_lines_product_name,DATE_TRUNC('DAY'," + " DATE_TRUNC('DAY', orders.order_date) ORDER BY orders_number_of_orders DESC NULLS LAST)" + " ,merged_query_1 AS (SELECT order_lines.product_name as order_lines_product_name,DATE_TRUNC('DAY'," " order_lines.order_date) as order_lines_order_date,SUM(order_lines.revenue) as" " order_lines_total_item_revenue FROM analytics.order_line_items order_lines GROUP BY" " order_lines.product_name,DATE_TRUNC('DAY', order_lines.order_date) ORDER BY" - " order_lines_total_item_revenue DESC) SELECT merged_query_0.orders_number_of_orders as" + " order_lines_total_item_revenue DESC NULLS LAST) SELECT merged_query_0.orders_number_of_orders as" " orders_number_of_orders,merged_query_0.orders_order_date as" " orders_order_date,merged_query_1.order_lines_total_item_revenue as" " order_lines_total_item_revenue,merged_query_1.order_lines_product_name as order_lines_product_name" @@ -577,10 +580,10 @@ def test_query_merged_queries_handle_non_date_mappings_in_join_fields(connection "WITH merged_query_0 AS (SELECT events.device as events_device,DATE_TRUNC('DAY', events.event_date)" " as events_event_date,COUNT(DISTINCT(events.id)) as events_number_of_events FROM analytics.events" " events GROUP BY events.device,DATE_TRUNC('DAY', events.event_date) ORDER BY events_number_of_events" - " DESC) ,merged_query_1 AS (SELECT events.device as" + " DESC NULLS LAST) ,merged_query_1 AS (SELECT events.device as" " login_events_device,COUNT(DISTINCT(login_events.id)) as login_events_number_of_login_events FROM" " analytics.login_events login_events LEFT JOIN analytics.events events ON login_events.id=events.id" - " GROUP BY events.device ORDER BY login_events_number_of_login_events DESC) SELECT" + " GROUP BY events.device ORDER BY login_events_number_of_login_events DESC NULLS LAST) SELECT" " merged_query_0.events_number_of_events as events_number_of_events,merged_query_0.events_device as" " events_device,merged_query_0.events_event_date as" " events_event_date,merged_query_1.login_events_number_of_login_events as" @@ -708,8 +711,8 @@ def test_query_merged_queries_all_db_flavors(connection, query_type): query = connection.get_sql_query(merged_queries=[primary_query, query_2], query_type=query_type) if query_type != Definitions.bigquery: - order_by_1 = " ORDER BY order_lines_number_of_email_purchased_items DESC" - order_by_2 = " ORDER BY order_lines_total_item_revenue DESC" + order_by_1 = " ORDER BY order_lines_number_of_email_purchased_items DESC NULLS LAST" + order_by_2 = " ORDER BY order_lines_total_item_revenue DESC NULLS LAST" group_by_product = "order_lines.product_name" else: order_by_1 = "" diff --git a/tests/test_cumulative_query.py b/tests/test_cumulative_query.py index 49cf8fc..5121202 100644 --- a/tests/test_cumulative_query.py +++ b/tests/test_cumulative_query.py @@ -61,7 +61,7 @@ def test_cumulative_query_metric_with_number(connection, query_type): "select dateadd(day, seq4(), '2000-01-01') as date from table(generator(rowcount => 365*40))" ) date_trunc = "DATE_TRUNC('DAY', orders.order_date)" - order_by = " ORDER BY orders_average_order_value_custom DESC" + order_by = " ORDER BY orders_average_order_value_custom DESC NULLS LAST" time = "'2018-01-02T00:00:00'" correct = ( f"WITH date_spine AS ({date_spine}) ,subquery_orders_cumulative_aov " @@ -153,7 +153,7 @@ def test_cumulative_query_metric_dimension_no_time(connection): "LEFT JOIN analytics.orders orders ON order_lines.order_unique_id=orders.id " "LEFT JOIN analytics.customers customers ON order_lines.customer_id=customers.customer_id " "WHERE customers.region='West' GROUP BY orders.new_vs_repeat " - "ORDER BY order_lines_total_item_revenue DESC) " + "ORDER BY order_lines_total_item_revenue DESC NULLS LAST) " "SELECT base.orders_new_vs_repeat as orders_new_vs_repeat," "aggregated_orders_total_lifetime_revenue.orders_total_revenue " "as orders_total_lifetime_revenue FROM base LEFT JOIN " @@ -188,7 +188,7 @@ def test_cumulative_metric_and_non_cumulative(connection): "customers.customer_id ELSE NULL END), 0) as customers_number_of_customers FROM " "analytics.orders orders LEFT JOIN analytics.customers customers ON orders.customer_id" "=customers.customer_id WHERE customers.region='West' GROUP BY orders.new_vs_repeat " - "ORDER BY customers_number_of_customers DESC) SELECT base.orders_new_vs_repeat as " + "ORDER BY customers_number_of_customers DESC NULLS LAST) SELECT base.orders_new_vs_repeat as " "orders_new_vs_repeat,(aggregated_orders_total_lifetime_revenue.orders_total_revenue) " "/ nullif((COUNT(customers_number_of_customers)), 0) as orders_ltr FROM base LEFT JOIN" " aggregated_orders_total_lifetime_revenue ON base.orders_new_vs_repeat=" @@ -281,7 +281,7 @@ def test_cumulative_query_metrics_and_time(connection, query_type): date_trunc_group = date_trunc = "DATE_TRUNC('DAY', orders.order_date)" spine_date_trunc = "DATE_TRUNC('MONTH', date_spine.date)" month_date_trunc = "DATE_TRUNC('MONTH', orders.order_date)" - order_by = " ORDER BY order_lines_total_item_revenue DESC" + order_by = " ORDER BY order_lines_total_item_revenue DESC NULLS LAST" time1 = "'2018-01-02T00:00:00'" time2 = "'2019-01-01T00:00:00'" correct = ( @@ -370,7 +370,7 @@ def test_cumulative_query_metrics_dimensions_and_time(connection): "as orders_order_date,SUM(order_lines.revenue) as order_lines_total_item_revenue " "FROM analytics.order_line_items order_lines LEFT JOIN analytics.orders orders " "ON order_lines.order_unique_id=orders.id GROUP BY orders.new_vs_repeat," - "DATE_TRUNC('DAY', orders.order_date) ORDER BY order_lines_total_item_revenue DESC) " + "DATE_TRUNC('DAY', orders.order_date) ORDER BY order_lines_total_item_revenue DESC NULLS LAST) " "SELECT base.orders_new_vs_repeat as orders_new_vs_repeat,base.orders_order_date as " "orders_order_date,aggregated_orders_total_lifetime_revenue.orders_total_revenue " "as orders_total_lifetime_revenue,aggregated_orders_cumulative_customers" diff --git a/tests/test_field_mappings.py b/tests/test_field_mappings.py index d477e5c..4f0b2f5 100644 --- a/tests/test_field_mappings.py +++ b/tests/test_field_mappings.py @@ -13,7 +13,7 @@ def test_mapping_date_only(connection): correct = ( "SELECT DATE_TRUNC('DAY', orders.order_date) as orders_order_date " "FROM analytics.orders orders GROUP BY DATE_TRUNC('DAY', " - "orders.order_date) ORDER BY orders_order_date ASC;" + "orders.order_date) ORDER BY orders_order_date ASC NULLS LAST;" ) assert query == correct @@ -24,7 +24,7 @@ def test_mapping_dimension_only(connection): correct = ( "SELECT sessions.utm_source as sessions_utm_source FROM analytics.sessions " - "sessions GROUP BY sessions.utm_source ORDER BY sessions_utm_source ASC;" + "sessions GROUP BY sessions.utm_source ORDER BY sessions_utm_source ASC NULLS LAST;" ) assert query == correct @@ -68,7 +68,7 @@ def test_mapping_metric_mapped_date_and_filter(connection, time_grain): f"SELECT {date_part} as orders_order_{time_grain}," "COUNT(orders.id) as orders_number_of_orders FROM analytics.orders orders " "WHERE DATE_TRUNC('DAY', orders.order_date)>='2022-01-05T00:00:00' " - f"GROUP BY {date_part} ORDER BY orders_number_of_orders DESC;" + f"GROUP BY {date_part} ORDER BY orders_number_of_orders DESC NULLS LAST;" ) assert query == correct @@ -94,7 +94,7 @@ def test_mapping_multiple_metric_same_canon_date_mapped_date_and_filter(connecti "SUM(order_lines.revenue) as order_lines_total_item_revenue FROM analytics.order_line_items " "order_lines WHERE DATE_TRUNC('DAY', order_lines.order_date)>='2022-01-05T00:00:00' " "GROUP BY DATE_TRUNC('DAY', order_lines.order_date) " - "ORDER BY order_lines_number_of_email_purchased_items DESC;" + "ORDER BY order_lines_number_of_email_purchased_items DESC NULLS LAST;" ) assert query == correct @@ -126,13 +126,13 @@ def test_mapping_multiple_metric_different_canon_date_merged_mapped_date_and_fil "orders_order_date,COUNT(orders.id) as orders_number_of_orders FROM analytics.orders " "orders WHERE DATE_TRUNC('DAY', orders.order_date)>='2022-01-05T00:00:00' AND " "DATE_TRUNC('DAY', orders.order_date)<'2023-03-05T00:00:00' " - "GROUP BY DATE_TRUNC('DAY', orders.order_date) ORDER BY orders_number_of_orders DESC) ," + "GROUP BY DATE_TRUNC('DAY', orders.order_date) ORDER BY orders_number_of_orders DESC NULLS LAST) ," f"{sessions_cte} AS (SELECT DATE_TRUNC('DAY', sessions.session_date) " "as sessions_session_date,COUNT(sessions.id) as sessions_number_of_sessions " "FROM analytics.sessions sessions WHERE DATE_TRUNC('DAY', sessions.session_date)" ">='2022-01-05T00:00:00' AND DATE_TRUNC('DAY', sessions.session_date)<'2023-03-05T00:00:00' " "GROUP BY DATE_TRUNC('DAY', sessions.session_date) " - f"ORDER BY sessions_number_of_sessions DESC) SELECT {orders_cte}." + f"ORDER BY sessions_number_of_sessions DESC NULLS LAST) SELECT {orders_cte}." f"orders_number_of_orders as orders_number_of_orders,{sessions_cte}." f"sessions_number_of_sessions as sessions_number_of_sessions,ifnull({orders_cte}." f"orders_order_date, {sessions_cte}.sessions_session_date) as orders_order_date," @@ -172,7 +172,7 @@ def test_mapping_multiple_metric_different_canon_date_joinable_mapped_date_dim_a "orders_number_of_orders FROM analytics.orders orders WHERE DATE_TRUNC('DAY', " "orders.order_date)>='2022-01-05T00:00:00' AND DATE_TRUNC('DAY', orders.order_date)" "<'2023-03-05T00:00:00' GROUP BY orders.sub_channel,DATE_TRUNC('DAY', orders.order_date) " - "ORDER BY orders_number_of_orders DESC) ," + "ORDER BY orders_number_of_orders DESC NULLS LAST) ," f"{order_lines_cte} AS (" "SELECT orders.sub_channel as orders_sub_channel,DATE_TRUNC('DAY', order_lines.order_date) " "as order_lines_order_date,SUM(order_lines.revenue) as order_lines_total_item_revenue " @@ -180,7 +180,7 @@ def test_mapping_multiple_metric_different_canon_date_joinable_mapped_date_dim_a "ON order_lines.order_unique_id=orders.id WHERE DATE_TRUNC('DAY', " "order_lines.order_date)>='2022-01-05T00:00:00' AND DATE_TRUNC('DAY', " "order_lines.order_date)<'2023-03-05T00:00:00' GROUP BY orders.sub_channel," - "DATE_TRUNC('DAY', order_lines.order_date) ORDER BY order_lines_total_item_revenue DESC) " + "DATE_TRUNC('DAY', order_lines.order_date) ORDER BY order_lines_total_item_revenue DESC NULLS LAST) " f"SELECT {order_lines_cte}.order_lines_total_item_revenue as " f"order_lines_total_item_revenue,{orders_cte}.orders_number_of_orders " f"as orders_number_of_orders,ifnull({order_lines_cte}.orders_sub_channel, " @@ -241,8 +241,8 @@ def test_mapping_mapped_metric_joined_dim(connection, query_type): ) orders_date_ref = "orders.order_date" customers_date_ref = "customers.first_order_date" - order_by_count = " ORDER BY orders_number_of_orders DESC" - order_by_avg = " ORDER BY customers_average_customer_ltv DESC" + order_by_count = " ORDER BY orders_number_of_orders DESC NULLS LAST" + order_by_avg = " ORDER BY customers_average_customer_ltv DESC NULLS LAST" semi = ";" correct = ( f"WITH {orders_cte} AS (SELECT order_lines.sales_channel as order_lines_channel," @@ -289,7 +289,7 @@ def test_mapping_mapped_metric_mapped_date_and_filter(connection): "ON order_lines.order_unique_id=orders.id WHERE DATE_TRUNC('DAY', " "order_lines.order_date)>='2022-01-05T00:00:00' AND orders.new_vs_repeat='New' " "GROUP BY DATE_TRUNC('DAY', order_lines.order_date) " - "ORDER BY order_lines_total_item_revenue DESC;" + "ORDER BY order_lines_total_item_revenue DESC NULLS LAST;" ) assert query == correct @@ -315,7 +315,7 @@ def test_mapping_metric_mapped_dim(connection): "FROM analytics.order_line_items order_lines LEFT JOIN analytics.orders orders " "ON order_lines.order_unique_id=orders.id WHERE order_lines.order_date" ">='2022-01-05T00:00:00' GROUP BY orders.sub_channel " - "ORDER BY orders_number_of_orders DESC;" + "ORDER BY orders_number_of_orders DESC NULLS LAST;" ) assert query == correct @@ -333,7 +333,7 @@ def test_mapped_metric_mapped_dim(connection): "order_lines_total_item_revenue FROM analytics.order_line_items order_lines " "LEFT JOIN analytics.orders orders ON order_lines.order_unique_id=orders.id " "WHERE orders.sub_channel='google' GROUP BY orders.sub_channel " - "ORDER BY order_lines_total_item_revenue DESC;" + "ORDER BY order_lines_total_item_revenue DESC NULLS LAST;" ) assert query == correct @@ -345,7 +345,7 @@ def test_mapped_metric_non_mapped_dim(connection): correct = ( "SELECT order_lines.sales_channel as order_lines_channel,SUM(order_lines.revenue) " "as order_lines_total_item_revenue FROM analytics.order_line_items order_lines " - "GROUP BY order_lines.sales_channel ORDER BY order_lines_total_item_revenue DESC;" + "GROUP BY order_lines.sales_channel ORDER BY order_lines_total_item_revenue DESC NULLS LAST;" ) assert query == correct @@ -365,7 +365,7 @@ def test_mapped_metric_mapped_dim_having(connection): "as orders_number_of_orders FROM analytics.order_line_items order_lines " "LEFT JOIN analytics.orders orders ON order_lines.order_unique_id=orders.id " "GROUP BY orders.sub_channel HAVING SUM(order_lines.revenue)>200 " - "ORDER BY order_lines_total_item_revenue ASC,orders_sub_channel DESC;" + "ORDER BY order_lines_total_item_revenue ASC NULLS LAST,orders_sub_channel DESC NULLS LAST;" ) assert query == correct @@ -383,10 +383,10 @@ def test_mapped_metric_mapped_merged_results(connection): f"WITH {order_lines_cte} AS (SELECT orders.sub_channel as orders_sub_channel," "SUM(order_lines.revenue) as order_lines_total_item_revenue FROM analytics.order_line_items " "order_lines LEFT JOIN analytics.orders orders ON order_lines.order_unique_id=orders.id " - "GROUP BY orders.sub_channel ORDER BY order_lines_total_item_revenue DESC) ," + "GROUP BY orders.sub_channel ORDER BY order_lines_total_item_revenue DESC NULLS LAST) ," f"{sessions_cte} AS (SELECT sessions.utm_source as sessions_utm_source," "COUNT(sessions.id) as sessions_number_of_sessions FROM analytics.sessions sessions " - "GROUP BY sessions.utm_source ORDER BY sessions_number_of_sessions DESC) " + "GROUP BY sessions.utm_source ORDER BY sessions_number_of_sessions DESC NULLS LAST) " f"SELECT {order_lines_cte}.order_lines_total_item_revenue as " f"order_lines_total_item_revenue,{sessions_cte}.sessions_number_of_sessions " f"as sessions_number_of_sessions,{order_lines_source} as " @@ -453,7 +453,7 @@ def test_dim_only_joinable_date_chooses_right_mapping_date(connection): "orders.sub_channel as orders_sub_channel,orders.campaign as orders_campaign " "FROM analytics.orders orders WHERE DATE_TRUNC('DAY', orders.order_date)>='2023-05-05' " "AND DATE_TRUNC('DAY', orders.order_date)<='2023-08-02' GROUP BY orders.customer_id," - "orders.account_id,orders.sub_channel,orders.campaign ORDER BY orders_customer_id ASC;" + "orders.account_id,orders.sub_channel,orders.campaign ORDER BY orders_customer_id ASC NULLS LAST;" ) assert query == correct @@ -476,7 +476,7 @@ def test_mapping_defer_to_metric_canon_date_not_dim_only(connection): "FROM analytics.clicked_on_page clicked_on_page WHERE DATE_TRUNC('DAY', " "clicked_on_page.session_date)>='2023-05-05' GROUP BY DATE_TRUNC('DAY', " "clicked_on_page.session_date),clicked_on_page.context_os ORDER BY " - "clicked_on_page_number_of_clicks DESC) ," + "clicked_on_page_number_of_clicks DESC NULLS LAST) ," "submitted_form_sent_at__cte_subquery_1 AS (SELECT DATE_TRUNC('DAY', " "submitted_form.sent_at) as submitted_form_sent_at_date," "submitted_form.context_os as submitted_form_context_os," @@ -484,7 +484,7 @@ def test_mapping_defer_to_metric_canon_date_not_dim_only(connection): "FROM analytics.submitted_form submitted_form WHERE DATE_TRUNC('DAY', " "submitted_form.sent_at)>='2023-05-05' GROUP BY DATE_TRUNC('DAY', " "submitted_form.sent_at),submitted_form.context_os ORDER BY " - "submitted_form_unique_users_form_submissions DESC) SELECT " + "submitted_form_unique_users_form_submissions DESC NULLS LAST) SELECT " f"{cte_1}.clicked_on_page_number_of_clicks " f"as clicked_on_page_number_of_clicks,{cte_2}" ".submitted_form_unique_users_form_submissions as " diff --git a/tests/test_funnels.py b/tests/test_funnels.py index 2f69b09..4808c69 100644 --- a/tests/test_funnels.py +++ b/tests/test_funnels.py @@ -292,7 +292,7 @@ def test_orders_basic_query_with_funnel_filter(connection): "order_lines LEFT JOIN analytics.orders orders ON order_lines.order_unique_id=orders.id " "LEFT JOIN analytics.customers customers ON order_lines.customer_id=customers.customer_id " "WHERE customers.customer_id IN (SELECT DISTINCT customers_customer_id FROM link_filter_subquery) " - "GROUP BY customers.region ORDER BY orders_number_of_orders DESC;" + "GROUP BY customers.region ORDER BY orders_number_of_orders DESC NULLS LAST;" ) assert query == correct diff --git a/tests/test_join_query.py b/tests/test_join_query.py index 5973a4b..37d4904 100644 --- a/tests/test_join_query.py +++ b/tests/test_join_query.py @@ -14,7 +14,7 @@ def test_query_no_join_with_limit(connection): "SELECT order_lines.sales_channel as order_lines_channel," "SUM(order_lines.revenue) as order_lines_total_item_revenue " "FROM analytics.order_line_items order_lines GROUP BY order_lines.sales_channel " - "ORDER BY order_lines_total_item_revenue DESC LIMIT 499;" + "ORDER BY order_lines_total_item_revenue DESC NULLS LAST LIMIT 499;" ) assert query == correct @@ -65,7 +65,7 @@ def test_query_no_join_average_distinct(connection): "/ CAST((1000000*1.0) AS DOUBLE PRECISION), 0) / NULLIF(COUNT(DISTINCT CASE WHEN " "(order_lines.order_total) IS NOT NULL THEN order_lines.order_unique_id ELSE NULL END), 0)) " "as order_lines_average_order_revenue FROM analytics.order_line_items order_lines " - "GROUP BY order_lines.sales_channel ORDER BY order_lines_average_order_revenue DESC;" + "GROUP BY order_lines.sales_channel ORDER BY order_lines_average_order_revenue DESC NULLS LAST;" ) assert query == correct @@ -110,7 +110,7 @@ def test_query_single_join(connection): "SUM(order_lines.revenue) as order_lines_total_item_revenue FROM " "analytics.order_line_items order_lines LEFT JOIN analytics.orders orders ON " "order_lines.order_unique_id=orders.id GROUP BY order_lines.sales_channel,orders.new_vs_repeat " - "ORDER BY order_lines_total_item_revenue DESC;" + "ORDER BY order_lines_total_item_revenue DESC NULLS LAST;" ) assert query == correct @@ -122,7 +122,7 @@ def test_query_single_dimension(connection): correct = ( "SELECT orders.new_vs_repeat as orders_new_vs_repeat FROM " "analytics.orders orders GROUP BY orders.new_vs_repeat " - "ORDER BY orders_new_vs_repeat ASC;" + "ORDER BY orders_new_vs_repeat ASC NULLS LAST;" ) assert query == correct @@ -137,7 +137,7 @@ def test_query_single_dimension_with_comment(connection): "SUM(order_lines.revenue) as order_lines_total_item_revenue " "FROM analytics.order_line_items order_lines GROUP BY CASE\n--- parent channel\nWHEN " "order_lines.sales_channel ilike '%social%' then 'Social'\nELSE 'Not Social'\nEND " - "ORDER BY order_lines_total_item_revenue DESC;" + "ORDER BY order_lines_total_item_revenue DESC NULLS LAST;" ) assert query == correct @@ -152,7 +152,7 @@ def test_query_single_dimension_with_multi_filter(connection): "and orders.revenue * 100>100 then order_lines.item_costs end) " "as order_lines_total_item_costs FROM analytics.order_line_items order_lines LEFT JOIN " "analytics.orders orders ON order_lines.order_unique_id=orders.id " - "GROUP BY order_lines.sales_channel ORDER BY order_lines_total_item_costs DESC;" + "GROUP BY order_lines.sales_channel ORDER BY order_lines_total_item_costs DESC NULLS LAST;" ) assert query == correct @@ -173,7 +173,7 @@ def test_query_single_dimension_sa_duration(connection): "ELSE NULL END), 0)) as orders_average_days_between_orders " "FROM analytics.order_line_items order_lines LEFT JOIN analytics.orders orders " "ON order_lines.order_unique_id=orders.id GROUP BY order_lines.product_name " - "ORDER BY orders_average_days_between_orders DESC;" + "ORDER BY orders_average_days_between_orders DESC NULLS LAST;" ) assert query == correct @@ -192,7 +192,7 @@ def test_functional_pk_resolve_one_to_many(connection): "LEFT JOIN analytics_live.discounts discounts ON " "discounts.discount_id=discount_detail.discount_id " "AND DATE_TRUNC('WEEK', CAST(discounts.order_date AS DATE)) is not null " - "GROUP BY discounts.country ORDER BY discount_detail_discount_usd DESC;" + "GROUP BY discounts.country ORDER BY discount_detail_discount_usd DESC NULLS LAST;" ) assert query == correct @@ -245,12 +245,12 @@ def test_query_single_join_metric_with_sub_field(connection): "WITH order_lines_order__cte_subquery_0 AS (SELECT order_lines.sales_channel as " "order_lines_channel,SUM(order_lines.revenue) as order_lines_total_item_revenue " "FROM analytics.order_line_items order_lines GROUP BY order_lines.sales_channel " - "ORDER BY order_lines_total_item_revenue DESC) ," + "ORDER BY order_lines_total_item_revenue DESC NULLS LAST) ," "orders_order__cte_subquery_1 AS (SELECT order_lines.sales_channel as order_lines_channel," "NULLIF(COUNT(DISTINCT CASE WHEN (orders.id) IS NOT NULL THEN orders.id " "ELSE NULL END), 0) as orders_number_of_orders FROM analytics.order_line_items order_lines " "LEFT JOIN analytics.orders orders ON order_lines.order_unique_id=orders.id GROUP BY " - "order_lines.sales_channel ORDER BY orders_number_of_orders DESC) " + "order_lines.sales_channel ORDER BY orders_number_of_orders DESC NULLS LAST) " "SELECT order_lines_order__cte_subquery_0.order_lines_total_item_revenue as " "order_lines_total_item_revenue,orders_order__cte_subquery_1.orders_number_of_orders " "as orders_number_of_orders,ifnull(order_lines_order__cte_subquery_0.order_lines_channel, " @@ -308,7 +308,7 @@ def test_query_single_join_select_args(connection): "analytics.order_line_items order_lines LEFT JOIN analytics.orders orders ON " "order_lines.order_unique_id=orders.id GROUP BY order_lines.sales_channel,orders.new_vs_repeat," "CAST(new_vs_repeat = 'Repeat' AS INT),CAST(date_created > '2021-04-02' AS INT) " - "ORDER BY order_lines_total_item_revenue DESC;" + "ORDER BY order_lines_total_item_revenue DESC NULLS LAST;" ) assert query == correct @@ -328,7 +328,7 @@ def test_query_single_join_with_case_raw_sql(connection): "analytics.order_line_items order_lines LEFT JOIN analytics.orders orders " "ON order_lines.order_unique_id=orders.id GROUP BY (CASE WHEN order_lines.product_name " "ilike '%sale%' then TRUE else FALSE end),orders.new_vs_repeat " - "ORDER BY order_lines_total_item_revenue DESC;" + "ORDER BY order_lines_total_item_revenue DESC NULLS LAST;" ) assert query == correct @@ -347,7 +347,7 @@ def test_query_single_join_with_case(connection): "analytics.order_line_items order_lines LEFT JOIN analytics.orders orders " "ON order_lines.order_unique_id=orders.id GROUP BY case when order_lines.product_name " "ilike '%sale%' then 'On sale' else 'Not on sale' end,orders.new_vs_repeat " - "ORDER BY order_lines_total_item_revenue DESC;" + "ORDER BY order_lines_total_item_revenue DESC NULLS LAST;" ) assert query == correct @@ -371,7 +371,7 @@ def test_query_single_join_with_tier(connection): "SUM(order_lines.revenue) as order_lines_total_item_revenue FROM " "analytics.order_line_items order_lines LEFT JOIN analytics.orders orders " f"ON order_lines.order_unique_id=orders.id GROUP BY {tier_case_query},orders.new_vs_repeat " - "ORDER BY order_lines_total_item_revenue DESC;" + "ORDER BY order_lines_total_item_revenue DESC NULLS LAST;" ) assert query == correct @@ -390,7 +390,7 @@ def test_query_single_join_with_filter(connection): "as order_lines_number_of_email_purchased_items FROM analytics.order_line_items " "order_lines LEFT JOIN analytics.orders orders ON order_lines.order_unique_id=orders.id" " GROUP BY order_lines.sales_channel,orders.new_vs_repeat " - "ORDER BY order_lines_number_of_email_purchased_items DESC;" + "ORDER BY order_lines_number_of_email_purchased_items DESC NULLS LAST;" ) assert query == correct @@ -408,7 +408,7 @@ def test_query_single_join_with_custom_join_type(connection): " analytics.submitted_form submitted_form FULL OUTER JOIN analytics.customers customers ON" " customers.customer_id=submitted_form.customer_id AND DATE_TRUNC('DAY', submitted_form.session_date)" " is not null GROUP BY submitted_form.context_os,customers.gender ORDER BY" - " submitted_form_number_of_form_submissions DESC;" + " submitted_form_number_of_form_submissions DESC NULLS LAST;" ) assert query == correct @@ -421,12 +421,12 @@ def test_query_multiple_join(connection): ) correct = ( - "SELECT customers.region as customers_region,orders.new_vs_repeat as orders_new_vs_repeat," - "SUM(order_lines.revenue) as order_lines_total_item_revenue FROM " - "analytics.order_line_items order_lines " - "LEFT JOIN analytics.orders orders ON order_lines.order_unique_id=orders.id " - "LEFT JOIN analytics.customers customers ON order_lines.customer_id=customers.customer_id " - "GROUP BY customers.region,orders.new_vs_repeat ORDER BY order_lines_total_item_revenue DESC;" + "SELECT customers.region as customers_region,orders.new_vs_repeat as" + " orders_new_vs_repeat,SUM(order_lines.revenue) as order_lines_total_item_revenue FROM" + " analytics.order_line_items order_lines LEFT JOIN analytics.orders orders ON" + " order_lines.order_unique_id=orders.id LEFT JOIN analytics.customers customers ON" + " order_lines.customer_id=customers.customer_id GROUP BY customers.region,orders.new_vs_repeat ORDER" + " BY order_lines_total_item_revenue DESC NULLS LAST;" ) assert query == correct @@ -451,7 +451,7 @@ def test_query_quad_join(connection): "LEFT JOIN analytics.customers customers ON order_lines.customer_id=customers.customer_id " "LEFT JOIN analytics.orders orders ON orders.id=discounts.order_id " "GROUP BY customers.region,orders.new_vs_repeat,discounts.code " - "ORDER BY order_lines_total_item_revenue DESC;" + "ORDER BY order_lines_total_item_revenue DESC NULLS LAST;" ) assert query == correct @@ -474,7 +474,7 @@ def test_query_multiple_join_with_duration(connection): "FROM analytics.orders orders " "LEFT JOIN analytics.customers customers ON orders.customer_id=customers.customer_id " "GROUP BY DATEDIFF('MONTH', orders.previous_order_date, orders.order_date) " - "ORDER BY customers_total_sessions DESC;" + "ORDER BY customers_total_sessions DESC NULLS LAST;" ) assert query == correct @@ -488,13 +488,12 @@ def test_query_multiple_join_where_dict(connection): ) correct = ( - "SELECT customers.region as customers_region,orders.new_vs_repeat as orders_new_vs_repeat," - "SUM(order_lines.revenue) as order_lines_total_item_revenue FROM " - "analytics.order_line_items order_lines " - "LEFT JOIN analytics.orders orders ON order_lines.order_unique_id=orders.id " - "LEFT JOIN analytics.customers customers ON order_lines.customer_id=customers.customer_id " - "WHERE customers.region<>'West' " - "GROUP BY customers.region,orders.new_vs_repeat ORDER BY order_lines_total_item_revenue DESC;" + "SELECT customers.region as customers_region,orders.new_vs_repeat as" + " orders_new_vs_repeat,SUM(order_lines.revenue) as order_lines_total_item_revenue FROM" + " analytics.order_line_items order_lines LEFT JOIN analytics.orders orders ON" + " order_lines.order_unique_id=orders.id LEFT JOIN analytics.customers customers ON" + " order_lines.customer_id=customers.customer_id WHERE customers.region<>'West' GROUP BY" + " customers.region,orders.new_vs_repeat ORDER BY order_lines_total_item_revenue DESC NULLS LAST;" ) assert query == correct @@ -508,13 +507,13 @@ def test_query_multiple_join_where_literal(connection): ) correct = ( - "SELECT customers.region as customers_region,orders.new_vs_repeat as orders_new_vs_repeat," - "SUM(order_lines.revenue) as order_lines_total_item_revenue FROM " - "analytics.order_line_items order_lines " - "LEFT JOIN analytics.orders orders ON order_lines.order_unique_id=orders.id " - "LEFT JOIN analytics.customers customers ON order_lines.customer_id=customers.customer_id " - "WHERE DATE_TRUNC('WEEK', CAST(customers.first_order_date AS DATE)) > '2021-07-12' " - "GROUP BY customers.region,orders.new_vs_repeat ORDER BY order_lines_total_item_revenue DESC;" + "SELECT customers.region as customers_region,orders.new_vs_repeat as" + " orders_new_vs_repeat,SUM(order_lines.revenue) as order_lines_total_item_revenue FROM" + " analytics.order_line_items order_lines LEFT JOIN analytics.orders orders ON" + " order_lines.order_unique_id=orders.id LEFT JOIN analytics.customers customers ON" + " order_lines.customer_id=customers.customer_id WHERE DATE_TRUNC('WEEK'," + " CAST(customers.first_order_date AS DATE)) > '2021-07-12' GROUP BY" + " customers.region,orders.new_vs_repeat ORDER BY order_lines_total_item_revenue DESC NULLS LAST;" ) assert query == correct @@ -534,7 +533,7 @@ def test_query_multiple_join_having_dict(connection): "LEFT JOIN analytics.orders orders ON order_lines.order_unique_id=orders.id " "LEFT JOIN analytics.customers customers ON order_lines.customer_id=customers.customer_id " "GROUP BY customers.region,orders.new_vs_repeat HAVING SUM(order_lines.revenue)>-12 " - "ORDER BY order_lines_total_item_revenue DESC;" + "ORDER BY order_lines_total_item_revenue DESC NULLS LAST;" ) assert query == correct @@ -554,7 +553,7 @@ def test_query_multiple_join_having_literal(connection): "LEFT JOIN analytics.orders orders ON order_lines.order_unique_id=orders.id " "LEFT JOIN analytics.customers customers ON order_lines.customer_id=customers.customer_id " "GROUP BY customers.region,orders.new_vs_repeat HAVING (SUM(order_lines.revenue)) > -12 " - "ORDER BY order_lines_total_item_revenue DESC;" + "ORDER BY order_lines_total_item_revenue DESC NULLS LAST;" ) assert query == correct @@ -568,12 +567,12 @@ def test_query_multiple_join_order_by_literal(connection): ) correct = ( - "SELECT customers.region as customers_region,orders.new_vs_repeat as orders_new_vs_repeat," - "SUM(order_lines.revenue) as order_lines_total_item_revenue FROM " - "analytics.order_line_items order_lines " - "LEFT JOIN analytics.orders orders ON order_lines.order_unique_id=orders.id " - "LEFT JOIN analytics.customers customers ON order_lines.customer_id=customers.customer_id " - "GROUP BY customers.region,orders.new_vs_repeat ORDER BY order_lines_total_item_revenue ASC;" + "SELECT customers.region as customers_region,orders.new_vs_repeat as" + " orders_new_vs_repeat,SUM(order_lines.revenue) as order_lines_total_item_revenue FROM" + " analytics.order_line_items order_lines LEFT JOIN analytics.orders orders ON" + " order_lines.order_unique_id=orders.id LEFT JOIN analytics.customers customers ON" + " order_lines.customer_id=customers.customer_id GROUP BY customers.region,orders.new_vs_repeat ORDER" + " BY order_lines_total_item_revenue ASC NULLS LAST;" ) assert query == correct @@ -596,7 +595,7 @@ def test_query_multiple_join_all(connection): "LEFT JOIN analytics.customers customers ON order_lines.customer_id=customers.customer_id " "WHERE customers.region<>'West' " "GROUP BY customers.region,orders.new_vs_repeat HAVING SUM(order_lines.revenue)>-12 " - "ORDER BY order_lines_total_item_revenue ASC;" + "ORDER BY order_lines_total_item_revenue ASC NULLS LAST;" ) assert query == correct @@ -615,7 +614,7 @@ def test_query_single_join_count_and_filter(connection): "ELSE NULL END), 0) " "as orders_new_order_count FROM analytics.order_line_items order_lines " "LEFT JOIN analytics.orders orders ON order_lines.order_unique_id=orders.id " - "GROUP BY order_lines.sales_channel ORDER BY orders_new_order_count DESC;" + "GROUP BY order_lines.sales_channel ORDER BY orders_new_order_count DESC NULLS LAST;" ) assert query == correct @@ -637,7 +636,7 @@ def test_query_implicit_add_three_views(connection): "LEFT JOIN analytics.orders orders ON orders.id=discounts.order_id " "LEFT JOIN analytics.customers customers ON orders.customer_id=customers.customer_id " "GROUP BY discounts.code,country_detail.rain " - "ORDER BY customers_number_of_customers DESC;" + "ORDER BY customers_number_of_customers DESC NULLS LAST;" ) assert query == correct @@ -655,7 +654,7 @@ def test_query_number_measure_w_dimension_reference(connection): "(order by (DATE_TRUNC('DAY', order_lines.order_date)) desc), ',', 0)::int " "as order_lines_ending_on_hand_qty " "FROM analytics.order_line_items order_lines GROUP BY order_lines.product_name " - "ORDER BY order_lines_ending_on_hand_qty DESC;" + "ORDER BY order_lines_ending_on_hand_qty DESC NULLS LAST;" ) assert query == correct @@ -673,7 +672,7 @@ def test_query_number_as_array_filter(connection): correct = ( "SELECT SUM(case when orders.anon_id NOT IN (9,3,22,9082) then orders.revenue end) as" " orders_total_non_merchant_revenue FROM analytics.orders orders WHERE DATE_TRUNC('DAY'," - " orders.order_date)>'2022-04-03' ORDER BY orders_total_non_merchant_revenue DESC;" + " orders.order_date)>'2022-04-03' ORDER BY orders_total_non_merchant_revenue DESC NULLS LAST;" ) assert query == correct @@ -699,7 +698,7 @@ def test_query_bool_and_date_filter(connection, bool_value): "as order_lines_total_item_revenue FROM analytics.order_line_items order_lines " "LEFT JOIN analytics.customers customers ON order_lines.customer_id=customers.customer_id " f"WHERE {negation}(customers.is_churned) AND DATE_TRUNC('DAY', order_lines.order_date)>'2022-04-03' " - "GROUP BY order_lines.sales_channel ORDER BY order_lines_total_item_revenue DESC;" + "GROUP BY order_lines.sales_channel ORDER BY order_lines_total_item_revenue DESC NULLS LAST;" ) assert query == correct @@ -726,13 +725,13 @@ def test_query_sub_group_by_filter(connection, filter_type): "WITH filter_subquery_0 AS (SELECT customers.customer_id as customers_customer_id " "FROM analytics.order_line_items order_lines LEFT JOIN analytics.customers customers " "ON order_lines.customer_id=customers.customer_id GROUP BY customers.customer_id " - "HAVING SUM(order_lines.revenue)>1000 ORDER BY customers_customer_id ASC) " + "HAVING SUM(order_lines.revenue)>1000 ORDER BY customers_customer_id ASC NULLS LAST) " "SELECT customers.region as customers_region,COUNT(orders.id) as orders_number_of_orders " "FROM analytics.orders orders LEFT JOIN analytics.customers customers " "ON orders.customer_id=customers.customer_id " "WHERE customers.customer_id IN (SELECT DISTINCT customers_customer_id " "FROM filter_subquery_0) GROUP BY customers.region " - "ORDER BY orders_number_of_orders DESC;" + "ORDER BY orders_number_of_orders DESC NULLS LAST;" ) assert query == correct @@ -766,7 +765,7 @@ def test_join_graph_working_as_expected(connection): "COUNT(clicked_on_page.id) as clicked_on_page_number_of_clicks " "FROM analytics.clicked_on_page clicked_on_page " "GROUP BY DATE_TRUNC('DAY', clicked_on_page.session_date)," - "clicked_on_page.context_os ORDER BY clicked_on_page_number_of_clicks DESC;" + "clicked_on_page.context_os ORDER BY clicked_on_page_number_of_clicks DESC NULLS LAST;" ) assert query == correct @@ -781,7 +780,7 @@ def test_join_graph_many_to_many_use_bridge_table(connection): " customers_number_of_customers FROM analytics.customer_accounts z_customer_accounts LEFT JOIN" " analytics.accounts accounts ON z_customer_accounts.account_id=accounts.account_id LEFT JOIN" " analytics.customers customers ON z_customer_accounts.customer_id=customers.customer_id GROUP BY" - " accounts.name ORDER BY customers_number_of_customers DESC;" + " accounts.name ORDER BY customers_number_of_customers DESC NULLS LAST;" ) assert query == correct @@ -799,7 +798,7 @@ def test_join_graph_many_to_many_skip_bridge_table(connection): " customers_number_of_customers,COUNT(orders.id) as orders_number_of_orders FROM analytics.orders" " orders LEFT JOIN analytics.customers customers ON orders.customer_id=customers.customer_id LEFT" " JOIN analytics.accounts accounts ON orders.account_id=accounts.account_id GROUP BY accounts.name" - " ORDER BY customers_number_of_customers DESC;" + " ORDER BY customers_number_of_customers DESC NULLS LAST;" ) assert query == correct @@ -839,7 +838,7 @@ def test_median_aggregate_function(connection, query_type): ) correct = ( "SELECT MEDIAN(customers.customer_ltv) as customers_median_customer_ltv " - "FROM analytics.customers customers ORDER BY customers_median_customer_ltv DESC;" + "FROM analytics.customers customers ORDER BY customers_median_customer_ltv DESC NULLS LAST;" ) assert query == correct else: @@ -869,7 +868,7 @@ def test_always_filter_with_and_without_join(connection): " WHERE NOT (customers.is_churned) AND NOT created_workspace.context_os IS NULL AND" " created_workspace.context_os IN ('1','Google','os:iOS') AND created_workspace.id NOT IN (1,44,87)" " GROUP BY DATE_TRUNC('DAY', created_workspace.session_date) ORDER BY" - " created_workspace_number_of_workspace_creations DESC;" + " created_workspace_number_of_workspace_creations DESC NULLS LAST;" ) assert query == correct @@ -886,7 +885,7 @@ def test_join_as_ability_single_join_non_as(connection): "COUNT(mrr.parent_account_id) as mrr_number_of_billed_accounts " "FROM analytics.mrr_by_customer mrr LEFT JOIN analytics.accounts accounts " "ON mrr.account_id=accounts.account_id " - "GROUP BY accounts.name ORDER BY mrr_number_of_billed_accounts DESC;" + "GROUP BY accounts.name ORDER BY mrr_number_of_billed_accounts DESC NULLS LAST;" ) assert query == correct @@ -903,7 +902,7 @@ def test_join_as_ability_single_join(connection): "COUNT(mrr.parent_account_id) as mrr_number_of_billed_accounts " "FROM analytics.mrr_by_customer mrr LEFT JOIN analytics.accounts parent_account " "ON mrr.parent_account_id=parent_account.account_id " - "GROUP BY parent_account.name ORDER BY mrr_number_of_billed_accounts DESC;" + "GROUP BY parent_account.name ORDER BY mrr_number_of_billed_accounts DESC NULLS LAST;" ) assert query == correct @@ -934,7 +933,7 @@ def test_join_as_ability_single_join_as_and_non_as(connection): "LEFT JOIN analytics.accounts accounts ON mrr.account_id=accounts.account_id " "LEFT JOIN analytics.accounts parent_account " "ON mrr.parent_account_id=parent_account.account_id " - "GROUP BY parent_account.name,accounts.name ORDER BY mrr_number_of_billed_accounts DESC;" + "GROUP BY parent_account.name,accounts.name ORDER BY mrr_number_of_billed_accounts DESC NULLS LAST;" ) assert query == correct @@ -957,7 +956,7 @@ def test_join_as_ability_single_join_only_where(connection): "LEFT JOIN analytics.accounts parent_account " "ON mrr.parent_account_id=parent_account.account_id " "WHERE parent_account.name<>'Amazon' AND mrr.plan_name='Enterprise' " - "ORDER BY mrr_number_of_billed_accounts DESC;" + "ORDER BY mrr_number_of_billed_accounts DESC NULLS LAST;" ) assert query == correct @@ -986,7 +985,7 @@ def test_join_as_ability_single_join_as_and_non_as_extra_dims(connection): "ON mrr.parent_account_id=parent_account.account_id " "WHERE parent_account.name<>'Amazon' AND mrr.plan_name='Enterprise' " "GROUP BY parent_account.name,accounts.name,DATE_TRUNC('DAY', mrr.record_date)," - "mrr.plan_name ORDER BY mrr_number_of_billed_accounts DESC;" + "mrr.plan_name ORDER BY mrr_number_of_billed_accounts DESC NULLS LAST;" ) assert query == correct @@ -1000,14 +999,12 @@ def test_join_as_ability_double_join(connection): ) correct = ( - "SELECT parent_account.name as parent_account_account_name," - "child_account.name as child_account_account_name," - "COUNT(mrr.parent_account_id) as mrr_number_of_billed_accounts " - "FROM analytics.mrr_by_customer mrr LEFT JOIN analytics.accounts parent_account " - "ON mrr.parent_account_id=parent_account.account_id " - "LEFT JOIN analytics.accounts child_account " - "ON mrr.child_account_id=child_account.account_id " - "GROUP BY parent_account.name,child_account.name ORDER BY mrr_number_of_billed_accounts DESC;" + "SELECT parent_account.name as parent_account_account_name,child_account.name as" + " child_account_account_name,COUNT(mrr.parent_account_id) as mrr_number_of_billed_accounts FROM" + " analytics.mrr_by_customer mrr LEFT JOIN analytics.accounts parent_account ON" + " mrr.parent_account_id=parent_account.account_id LEFT JOIN analytics.accounts child_account ON" + " mrr.child_account_id=child_account.account_id GROUP BY parent_account.name,child_account.name ORDER" + " BY mrr_number_of_billed_accounts DESC NULLS LAST;" ) assert query == correct @@ -1020,7 +1017,7 @@ def test_null_filter_handling_metric_filter(connection): "SELECT COUNT(case when aa_acquired_accounts.account_id IS NULL then " "aa_acquired_accounts.account_id end) as aa_acquired_accounts_number_of_acquired_accounts_missing " "FROM analytics.accounts aa_acquired_accounts " - "ORDER BY aa_acquired_accounts_number_of_acquired_accounts_missing DESC;" + "ORDER BY aa_acquired_accounts_number_of_acquired_accounts_missing DESC NULLS LAST;" ) assert query == correct diff --git a/tests/test_join_query_raw.py b/tests/test_join_query_raw.py index 93dee66..6a3db9c 100644 --- a/tests/test_join_query_raw.py +++ b/tests/test_join_query_raw.py @@ -33,7 +33,7 @@ def test_query_join_raw_force_group_by_pretty(connection): from analytics.order_line_items order_lines group by order_lines.order_line_id, order_lines.sales_channel -order by order_lines_total_item_revenue desc;""" +order by order_lines_total_item_revenue desc nulls last;""" assert query == correct @@ -51,7 +51,7 @@ def test_query_single_join_non_base_primary_key(connection): "SUM(order_lines.revenue) as order_lines_total_item_revenue FROM " "analytics.order_line_items order_lines LEFT JOIN analytics.orders orders ON " "order_lines.order_unique_id=orders.id GROUP BY orders.id,order_lines.sales_channel," - "orders.new_vs_repeat ORDER BY order_lines_total_item_revenue DESC;" + "orders.new_vs_repeat ORDER BY order_lines_total_item_revenue DESC NULLS LAST;" ) assert query == correct diff --git a/tests/test_merged_results.py b/tests/test_merged_results.py index 5a429ec..89044db 100644 --- a/tests/test_merged_results.py +++ b/tests/test_merged_results.py @@ -38,16 +38,16 @@ def test_merged_result_query_additional_metric(connection, query_type): order_date = "DATE_TRUNC('MONTH', CAST(order_lines.order_date AS TIMESTAMP))" session_date = "DATE_TRUNC('MONTH', CAST(sessions.session_date AS TIMESTAMP))" if query_type == Definitions.duck_db: - order_by = " ORDER BY order_lines_total_item_revenue DESC" - session_by = " ORDER BY sessions_number_of_sessions DESC" + order_by = " ORDER BY order_lines_total_item_revenue DESC NULLS LAST" + session_by = " ORDER BY sessions_number_of_sessions DESC NULLS LAST" else: order_by = "" session_by = "" else: order_date = "DATE_TRUNC('MONTH', order_lines.order_date)" session_date = "DATE_TRUNC('MONTH', sessions.session_date)" - order_by = " ORDER BY order_lines_total_item_revenue DESC" - session_by = " ORDER BY sessions_number_of_sessions DESC" + order_by = " ORDER BY order_lines_total_item_revenue DESC NULLS LAST" + session_by = " ORDER BY sessions_number_of_sessions DESC NULLS LAST" if Definitions.bigquery == query_type: on_statement = f"CAST({cte_1}.order_lines_order_month AS TIMESTAMP)=CAST({cte_2}.sessions_session_month AS TIMESTAMP)" # noqa @@ -113,13 +113,13 @@ def test_merged_result_query_only_metric(connection, dim): "SUM(order_lines.revenue) as order_lines_total_item_revenue " "FROM analytics.order_line_items order_lines " f"GROUP BY DATE_TRUNC('MONTH', order_lines.order_date) " - "ORDER BY order_lines_total_item_revenue DESC) ," + "ORDER BY order_lines_total_item_revenue DESC NULLS LAST) ," f"{cte_2} AS (" f"SELECT DATE_TRUNC('MONTH', sessions.session_date) as sessions_session_month," "COUNT(sessions.id) as sessions_number_of_sessions " "FROM analytics.sessions sessions " f"GROUP BY DATE_TRUNC('MONTH', sessions.session_date) " - "ORDER BY sessions_number_of_sessions DESC) " + "ORDER BY sessions_number_of_sessions DESC NULLS LAST) " f"SELECT {cte_1}.order_lines_total_item_revenue as order_lines_total_item_revenue," f"{cte_2}.sessions_number_of_sessions as sessions_number_of_sessions,{date_seq}," "order_lines_total_item_revenue / nullif(sessions_number_of_sessions, 0) as order_lines_revenue_per_session " # noqa @@ -311,11 +311,11 @@ def test_merged_result_query_only_metric_no_dim(connection, query_type): f"WITH {cte_1} AS (" "SELECT SUM(order_lines.revenue) as order_lines_total_item_revenue " "FROM analytics.order_line_items order_lines " - "ORDER BY order_lines_total_item_revenue DESC) ," + "ORDER BY order_lines_total_item_revenue DESC NULLS LAST) ," f"{cte_2} AS (" "SELECT COUNT(sessions.id) as sessions_number_of_sessions " "FROM analytics.sessions sessions " - "ORDER BY sessions_number_of_sessions DESC) " + "ORDER BY sessions_number_of_sessions DESC NULLS LAST) " f"SELECT {cte_1}.order_lines_total_item_revenue as order_lines_total_item_revenue," f"{cte_2}.sessions_number_of_sessions as sessions_number_of_sessions," "order_lines_total_item_revenue / nullif(sessions_number_of_sessions, 0) as order_lines_revenue_per_session " # noqa @@ -336,9 +336,9 @@ def test_merged_result_query_ambig_explore(connection): cte_1, cte_2 = "discounts_order__cte_subquery_0", "orders_order__cte_subquery_1" correct = ( f"WITH {cte_1} AS (SELECT SUM(discounts.discount_amt) as discounts_total_discount_amt " - "FROM analytics_live.discounts discounts ORDER BY discounts_total_discount_amt DESC) ," + "FROM analytics_live.discounts discounts ORDER BY discounts_total_discount_amt DESC NULLS LAST) ," f"{cte_2} AS (SELECT COUNT(orders.id) as orders_number_of_orders " - "FROM analytics.orders orders ORDER BY orders_number_of_orders DESC) " + "FROM analytics.orders orders ORDER BY orders_number_of_orders DESC NULLS LAST) " f"SELECT {cte_1}.discounts_total_discount_amt as discounts_total_discount_amt," f"{cte_2}.orders_number_of_orders as orders_number_of_orders," "discounts_total_discount_amt / nullif(orders_number_of_orders, 0) " @@ -376,14 +376,14 @@ def test_merged_result_query_only_metric_with_where(connection): "FROM analytics.order_line_items order_lines " "WHERE order_lines.order_date>='2022-01-05T00:00:00' " f"GROUP BY DATE_TRUNC('MONTH', order_lines.order_date) " - f"ORDER BY order_lines_total_item_revenue DESC) ," + f"ORDER BY order_lines_total_item_revenue DESC NULLS LAST) ," f"{cte_2} AS (" f"SELECT DATE_TRUNC('MONTH', sessions.session_date) as sessions_session_month," "COUNT(sessions.id) as sessions_number_of_sessions " "FROM analytics.sessions sessions " "WHERE sessions.session_date>='2022-01-05T00:00:00' " f"GROUP BY DATE_TRUNC('MONTH', sessions.session_date) " - f"ORDER BY sessions_number_of_sessions DESC) " + f"ORDER BY sessions_number_of_sessions DESC NULLS LAST) " f"SELECT {cte_1}.order_lines_total_item_revenue as order_lines_total_item_revenue," f"{cte_2}.sessions_number_of_sessions as sessions_number_of_sessions,{date_seq}," "order_lines_total_item_revenue / nullif(sessions_number_of_sessions, 0) as order_lines_revenue_per_session " # noqa @@ -426,13 +426,13 @@ def test_merged_result_query_only_metric_with_having(connection): "SUM(order_lines.revenue) as order_lines_total_item_revenue " "FROM analytics.order_line_items order_lines " f"GROUP BY DATE_TRUNC('MONTH', order_lines.order_date) " - f"ORDER BY order_lines_total_item_revenue DESC) ," + f"ORDER BY order_lines_total_item_revenue DESC NULLS LAST) ," f"{cte_2} AS (" f"SELECT DATE_TRUNC('MONTH', sessions.session_date) as sessions_session_month," "COUNT(sessions.id) as sessions_number_of_sessions " "FROM analytics.sessions sessions " f"GROUP BY DATE_TRUNC('MONTH', sessions.session_date) " - f"ORDER BY sessions_number_of_sessions DESC) " + f"ORDER BY sessions_number_of_sessions DESC NULLS LAST) " f"SELECT {cte_1}.order_lines_total_item_revenue as order_lines_total_item_revenue," f"{cte_2}.sessions_number_of_sessions as sessions_number_of_sessions,{date_seq}," "order_lines_total_item_revenue / nullif(sessions_number_of_sessions, 0) as order_lines_revenue_per_session " # noqa @@ -459,9 +459,9 @@ def test_merged_result_query_metric_with_having_non_selected(connection): f"WITH {cte_1} AS (SELECT orders.sub_channel as orders_sub_channel,SUM(order_lines.revenue) as" " order_lines_total_item_revenue FROM analytics.order_line_items order_lines LEFT JOIN" " analytics.orders orders ON order_lines.order_unique_id=orders.id GROUP BY orders.sub_channel ORDER" - f" BY order_lines_total_item_revenue DESC) ,{cte_2} AS (SELECT sessions.utm_source as" + f" BY order_lines_total_item_revenue DESC NULLS LAST) ,{cte_2} AS (SELECT sessions.utm_source as" " sessions_utm_source,COUNT(sessions.id) as sessions_number_of_sessions FROM analytics.sessions" - " sessions GROUP BY sessions.utm_source ORDER BY sessions_number_of_sessions DESC) SELECT" + " sessions GROUP BY sessions.utm_source ORDER BY sessions_number_of_sessions DESC NULLS LAST) SELECT" f" {cte_1}.order_lines_total_item_revenue as" " order_lines_total_item_revenue,sessions_session__cte_subquery_1.sessions_number_of_sessions as" f" sessions_number_of_sessions,ifnull({cte_1}.orders_sub_channel, {cte_2}.sessions_utm_source) as" @@ -492,16 +492,16 @@ def test_merged_result_query_with_non_component(connection): "order_lines_order_month,SUM(order_lines.revenue) as order_lines_total_item_revenue " "FROM analytics.order_line_items order_lines " "GROUP BY DATE_TRUNC('MONTH', order_lines.order_date) " - "ORDER BY order_lines_total_item_revenue DESC) ," + "ORDER BY order_lines_total_item_revenue DESC NULLS LAST) ," f"{cte_2} AS (SELECT DATE_TRUNC('MONTH', sessions.session_date) as sessions_session_month," "COUNT(sessions.id) as sessions_number_of_sessions " "FROM analytics.sessions sessions GROUP BY DATE_TRUNC('MONTH', sessions.session_date) " - "ORDER BY sessions_number_of_sessions DESC) ," + "ORDER BY sessions_number_of_sessions DESC NULLS LAST) ," f"{cte_3} AS (SELECT DATE_TRUNC('MONTH', orders.previous_order_date) as orders_previous_order_month," "AVG(DATEDIFF('DAY', orders.previous_order_date, orders.order_date)) as " "orders_average_days_between_orders FROM analytics.orders orders " "GROUP BY DATE_TRUNC('MONTH', orders.previous_order_date) " - "ORDER BY orders_average_days_between_orders DESC) " + "ORDER BY orders_average_days_between_orders DESC NULLS LAST) " f"SELECT {cte_1}.order_lines_total_item_revenue as order_lines_total_item_revenue," f"{cte_3}.orders_average_days_between_orders as orders_average_days_between_orders," f"{cte_2}.sessions_number_of_sessions as sessions_number_of_sessions," @@ -541,17 +541,17 @@ def test_merged_result_query_with_extra_dim(connection): "FROM analytics.order_line_items order_lines " "LEFT JOIN analytics.orders orders ON order_lines.order_unique_id=orders.id " "GROUP BY DATE_TRUNC('MONTH', order_lines.order_date),orders.sub_channel " - "ORDER BY order_lines_total_item_revenue DESC) ," + "ORDER BY order_lines_total_item_revenue DESC NULLS LAST) ," f"{cte_2} AS (SELECT DATE_TRUNC('MONTH', sessions.session_date) as sessions_session_month," "sessions.utm_source as sessions_utm_source,COUNT(sessions.id) as sessions_number_of_sessions " "FROM analytics.sessions sessions GROUP BY DATE_TRUNC('MONTH', sessions.session_date)," - "sessions.utm_source ORDER BY sessions_number_of_sessions DESC) ," + "sessions.utm_source ORDER BY sessions_number_of_sessions DESC NULLS LAST) ," f"{cte_3} AS (SELECT DATE_TRUNC('MONTH', orders.previous_order_date) as orders_previous_order_month," "orders.sub_channel as orders_sub_channel," "AVG(DATEDIFF('DAY', orders.previous_order_date, orders.order_date)) as " "orders_average_days_between_orders FROM analytics.orders orders " "GROUP BY DATE_TRUNC('MONTH', orders.previous_order_date),orders.sub_channel " - "ORDER BY orders_average_days_between_orders DESC) " + "ORDER BY orders_average_days_between_orders DESC NULLS LAST) " f"SELECT {cte_1}.order_lines_total_item_revenue as order_lines_total_item_revenue," f"{cte_3}.orders_average_days_between_orders as orders_average_days_between_orders," f"{cte_2}.sessions_number_of_sessions as sessions_number_of_sessions," @@ -580,19 +580,19 @@ def test_merged_query_implicit_with_subgraph(connection): sessions_cte = "sessions_session__cte_subquery_1" correct = ( - f"WITH {orders_cte} AS (SELECT DATE_TRUNC('MONTH', orders.order_date) " - "as orders_order_month,COUNT(orders.id) as orders_number_of_orders FROM analytics.orders " - "orders GROUP BY DATE_TRUNC('MONTH', orders.order_date) ORDER BY orders_number_of_orders DESC) ," - f"{sessions_cte} AS (SELECT DATE_TRUNC('MONTH', sessions.session_date) as " - "sessions_session_month,COUNT(sessions.id) as sessions_number_of_sessions FROM analytics.sessions " - "sessions GROUP BY DATE_TRUNC('MONTH', sessions.session_date) ORDER BY " - f"sessions_number_of_sessions DESC) SELECT {orders_cte}.orders_number_of_orders as " - f"orders_number_of_orders,{sessions_cte}.sessions_number_of_sessions as " - f"sessions_number_of_sessions,ifnull({orders_cte}.orders_order_month, " - f"{sessions_cte}.sessions_session_month) as orders_order_month," - f"ifnull({sessions_cte}.sessions_session_month, {orders_cte}.orders_order_month) " - f"as sessions_session_month FROM {orders_cte} FULL OUTER JOIN {sessions_cte} " - f"ON {orders_cte}.orders_order_month={sessions_cte}.sessions_session_month;" + f"WITH {orders_cte} AS (SELECT DATE_TRUNC('MONTH', orders.order_date) as" + " orders_order_month,COUNT(orders.id) as orders_number_of_orders FROM analytics.orders orders GROUP" + " BY DATE_TRUNC('MONTH', orders.order_date) ORDER BY orders_number_of_orders DESC NULLS LAST)" + f" ,{sessions_cte} AS (SELECT DATE_TRUNC('MONTH', sessions.session_date) as" + " sessions_session_month,COUNT(sessions.id) as sessions_number_of_sessions FROM analytics.sessions" + " sessions GROUP BY DATE_TRUNC('MONTH', sessions.session_date) ORDER BY sessions_number_of_sessions" + f" DESC NULLS LAST) SELECT {orders_cte}.orders_number_of_orders as" + f" orders_number_of_orders,{sessions_cte}.sessions_number_of_sessions as" + f" sessions_number_of_sessions,ifnull({orders_cte}.orders_order_month," + f" {sessions_cte}.sessions_session_month) as" + f" orders_order_month,ifnull({sessions_cte}.sessions_session_month, {orders_cte}.orders_order_month)" + f" as sessions_session_month FROM {orders_cte} FULL OUTER JOIN {sessions_cte} ON" + f" {orders_cte}.orders_order_month={sessions_cte}.sessions_session_month;" ) assert query == correct @@ -624,13 +624,13 @@ def test_merged_query_implicit_with_subgraph_and_mapping(connection): "COUNT(orders.id) as orders_number_of_orders FROM analytics.orders orders " "WHERE orders.order_date>='2022-01-05T00:00:00' " "GROUP BY DATE_TRUNC('MONTH', orders.order_date),orders.sub_channel,orders.campaign " - f"ORDER BY orders_number_of_orders DESC) ,{sessions_cte} AS (" + f"ORDER BY orders_number_of_orders DESC NULLS LAST) ,{sessions_cte} AS (" "SELECT DATE_TRUNC('MONTH', sessions.session_date) as sessions_session_month," "sessions.utm_source as sessions_utm_source,sessions.utm_campaign as " "sessions_utm_campaign,COUNT(sessions.id) as sessions_number_of_sessions " "FROM analytics.sessions sessions WHERE sessions.session_date>='2022-01-05T00:00:00' " "GROUP BY DATE_TRUNC('MONTH', sessions.session_date)" - ",sessions.utm_source,sessions.utm_campaign ORDER BY sessions_number_of_sessions DESC) " + ",sessions.utm_source,sessions.utm_campaign ORDER BY sessions_number_of_sessions DESC NULLS LAST) " f"SELECT {orders_cte}.orders_number_of_orders as orders_number_of_orders," f"{sessions_cte}.sessions_number_of_sessions as sessions_number_of_sessions," f"ifnull({orders_cte}.orders_order_month, {sessions_cte}.sessions_session_month) as orders_order_month," # noqa @@ -665,11 +665,11 @@ def test_merged_query_dimension_mapping_single_metric(connection): f"DATE_TRUNC('DAY', orders.order_date) as orders_order_date,orders.campaign as orders_campaign," f"COUNT(orders.id) as orders_number_of_orders FROM analytics.orders orders GROUP BY " f"orders.sub_channel,DATE_TRUNC('DAY', orders.order_date),orders.campaign ORDER BY " - f"orders_number_of_orders DESC) ,{sessions_cte} AS (SELECT sessions.utm_source " + f"orders_number_of_orders DESC NULLS LAST) ,{sessions_cte} AS (SELECT sessions.utm_source " f"as sessions_utm_source,DATE_TRUNC('DAY', sessions.session_date) as sessions_session_date," f"sessions.utm_campaign as sessions_utm_campaign FROM analytics.sessions sessions GROUP BY " f"sessions.utm_source,DATE_TRUNC('DAY', sessions.session_date),sessions.utm_campaign ORDER BY " - f"sessions_utm_source ASC) SELECT {orders_cte}.orders_number_of_orders as " + f"sessions_utm_source ASC NULLS LAST) SELECT {orders_cte}.orders_number_of_orders as " f"orders_number_of_orders,{orders_source} as orders_sub_channel," f"ifnull({orders_cte}.orders_order_date, {sessions_cte}.sessions_session_date) as orders_order_date," f"{orders_campaign} as orders_campaign,{sessions_source} as " @@ -708,11 +708,11 @@ def test_merged_query_dimension_mapping_no_metric(connection): f"WITH {orders_cte} AS (SELECT orders.campaign as orders_campaign," "DATE_TRUNC('DAY', orders.order_date) as orders_order_date FROM analytics.orders " "orders WHERE orders.order_date>='2022-01-05T00:00:00' GROUP BY orders.campaign," - f"DATE_TRUNC('DAY', orders.order_date) ORDER BY orders_campaign ASC) ,{sessions_cte} " + f"DATE_TRUNC('DAY', orders.order_date) ORDER BY orders_campaign ASC NULLS LAST) ,{sessions_cte} " "AS (SELECT sessions.utm_campaign as sessions_utm_campaign,DATE_TRUNC('DAY', " "sessions.session_date) as sessions_session_date FROM analytics.sessions sessions " "WHERE sessions.session_date>='2022-01-05T00:00:00' GROUP BY sessions.utm_campaign," - "DATE_TRUNC('DAY', sessions.session_date) ORDER BY sessions_utm_campaign ASC) " + "DATE_TRUNC('DAY', sessions.session_date) ORDER BY sessions_utm_campaign ASC NULLS LAST) " f"SELECT {orders_campaign} as orders_campaign," f"{orders_date} as orders_order_date,{sessions_campaign} " f"as sessions_utm_campaign,{sessions_date} as " @@ -738,10 +738,10 @@ def test_merged_query_implicit_no_time(connection): f"WITH {orders_cte} AS (SELECT orders.campaign as orders_campaign," f"COUNT(orders.id) as orders_number_of_orders FROM analytics.orders orders " f"WHERE orders.sub_channel='Iterable' GROUP BY orders.campaign " - f"ORDER BY orders_number_of_orders DESC) ,{sessions_cte} AS (" + f"ORDER BY orders_number_of_orders DESC NULLS LAST) ,{sessions_cte} AS (" f"SELECT sessions.utm_campaign as sessions_utm_campaign " f"FROM analytics.sessions sessions WHERE sessions.utm_source='Iterable' " - f"GROUP BY sessions.utm_campaign ORDER BY sessions_utm_campaign ASC) " + f"GROUP BY sessions.utm_campaign ORDER BY sessions_utm_campaign ASC NULLS LAST) " f"SELECT {orders_cte}.orders_number_of_orders as orders_number_of_orders," f"ifnull({orders_cte}.orders_campaign, {sessions_cte}.sessions_utm_campaign) as orders_campaign," f"ifnull({sessions_cte}.sessions_utm_campaign, {orders_cte}.orders_campaign) as sessions_utm_campaign " # noqa @@ -764,11 +764,11 @@ def test_merged_query_implicit_with_join(connection): f"WITH {orders_cte} AS (SELECT customers.gender as customers_gender," "COUNT(orders.id) as orders_number_of_orders FROM analytics.orders orders " "LEFT JOIN analytics.customers customers ON orders.customer_id=customers.customer_id " - "GROUP BY customers.gender ORDER BY orders_number_of_orders DESC) ," + "GROUP BY customers.gender ORDER BY orders_number_of_orders DESC NULLS LAST) ," f"{sessions_cte} AS (SELECT customers.gender as customers_gender," "COUNT(sessions.id) as sessions_number_of_sessions FROM analytics.sessions sessions " "LEFT JOIN analytics.customers customers ON sessions.customer_id=customers.customer_id " - "GROUP BY customers.gender ORDER BY sessions_number_of_sessions DESC) " + "GROUP BY customers.gender ORDER BY sessions_number_of_sessions DESC NULLS LAST) " f"SELECT {orders_cte}.orders_number_of_orders as orders_number_of_orders," f"{sessions_cte}.sessions_number_of_sessions as sessions_number_of_sessions," f"ifnull({orders_cte}.customers_gender, " @@ -795,11 +795,11 @@ def test_merged_query_implicit_with_extra_dim_only(connection): f"WITH {orders_cte} AS (SELECT DATE_TRUNC('DAY', orders.order_date) as " "orders_order_date,orders.sub_channel as orders_sub_channel,COUNT(orders.id) as " "orders_number_of_orders FROM analytics.orders orders GROUP BY DATE_TRUNC('DAY', " - "orders.order_date),orders.sub_channel ORDER BY orders_number_of_orders DESC) ," + "orders.order_date),orders.sub_channel ORDER BY orders_number_of_orders DESC NULLS LAST) ," f"{sessions_cte} AS (SELECT DATE_TRUNC('DAY', sessions.session_date) as " "sessions_session_date,sessions.utm_source as sessions_utm_source FROM analytics.sessions " "sessions GROUP BY DATE_TRUNC('DAY', sessions.session_date),sessions.utm_source ORDER BY " - f"sessions_session_date ASC) SELECT {orders_cte}.orders_number_of_orders as " + f"sessions_session_date ASC NULLS LAST) SELECT {orders_cte}.orders_number_of_orders as " f"orders_number_of_orders,{orders_date} as orders_order_date," f"{orders_source} as orders_sub_channel," f"{sessions_date} as sessions_session_date," @@ -834,14 +834,14 @@ def test_merged_query_implicit_3_way_merge(connection): f"WITH {orders_cte} AS (SELECT DATE_TRUNC('DAY', orders.order_date) as " "orders_order_date,COUNT(orders.id) as orders_number_of_orders FROM analytics.orders orders " "WHERE orders.order_date>='2022-01-05T00:00:00' GROUP BY DATE_TRUNC('DAY', orders.order_date)" - f" ORDER BY orders_number_of_orders DESC) ,{sessions_cte} AS (SELECT " + f" ORDER BY orders_number_of_orders DESC NULLS LAST) ,{sessions_cte} AS (SELECT " "DATE_TRUNC('DAY', sessions.session_date) as sessions_session_date,COUNT(sessions.id) as " "sessions_number_of_sessions FROM analytics.sessions sessions WHERE sessions.session_date>=" "'2022-01-05T00:00:00' GROUP BY DATE_TRUNC('DAY', sessions.session_date) ORDER BY " - f"sessions_number_of_sessions DESC) ,{events_cte} AS (SELECT DATE_TRUNC('DAY', " + f"sessions_number_of_sessions DESC NULLS LAST) ,{events_cte} AS (SELECT DATE_TRUNC('DAY', " "events.event_date) as events_event_date,COUNT(DISTINCT(events.id)) as events_number_of_events " "FROM analytics.events events WHERE events.event_date>='2022-01-05T00:00:00' GROUP BY " - "DATE_TRUNC('DAY', events.event_date) ORDER BY events_number_of_events DESC) SELECT " + "DATE_TRUNC('DAY', events.event_date) ORDER BY events_number_of_events DESC NULLS LAST) SELECT " f"{events_cte}.events_number_of_events as events_number_of_events," f"{orders_cte}.orders_number_of_orders as orders_number_of_orders," f"{sessions_cte}.sessions_number_of_sessions as sessions_number_of_sessions," @@ -873,11 +873,12 @@ def test_merged_query_merged_results_as_sub_reference(connection): f"then order_lines.order_id end) as order_lines_number_of_email_purchased_items " f"FROM analytics.order_line_items order_lines LEFT JOIN " f"analytics.orders orders ON order_lines.order_unique_id=orders.id GROUP BY " - f"DATE_TRUNC('MONTH', order_lines.order_date) ORDER BY order_lines_total_item_revenue DESC) ," + f"DATE_TRUNC('MONTH', order_lines.order_date) ORDER BY order_lines_total_item_revenue DESC NULLS" + f" LAST) ," f"{sessions_cte} AS (SELECT DATE_TRUNC('MONTH', sessions.session_date) as " f"sessions_session_month,COUNT(sessions.id) as sessions_number_of_sessions " f"FROM analytics.sessions sessions GROUP BY DATE_TRUNC('MONTH', sessions.session_date) " - f"ORDER BY sessions_number_of_sessions DESC) " + f"ORDER BY sessions_number_of_sessions DESC NULLS LAST) " f"SELECT " f"{order_lines_cte}.order_lines_total_item_revenue as order_lines_total_item_revenue," f"{order_lines_cte}.order_lines_total_item_costs as order_lines_total_item_costs," @@ -916,13 +917,13 @@ def test_merged_query_merged_results_joined_filter(connection): "as orders_order_date,COUNT(orders.id) as orders_number_of_orders FROM analytics.orders " "orders LEFT JOIN analytics.customers customers ON orders.customer_id=customers.customer_id " "WHERE customers.region IN ('West','South') AND orders.sub_channel='google' " - "GROUP BY DATE_TRUNC('DAY', orders.order_date) ORDER BY orders_number_of_orders DESC) ," + "GROUP BY DATE_TRUNC('DAY', orders.order_date) ORDER BY orders_number_of_orders DESC NULLS LAST) ," f"{sessions_cte} AS (SELECT DATE_TRUNC('DAY', sessions.session_date) as " "sessions_session_date,COUNT(sessions.id) as sessions_number_of_sessions " "FROM analytics.sessions sessions LEFT JOIN analytics.customers customers ON " "sessions.customer_id=customers.customer_id WHERE customers.region IN ('West','South') " "AND sessions.utm_source='google' GROUP BY DATE_TRUNC('DAY', sessions.session_date) " - f"ORDER BY sessions_number_of_sessions DESC) SELECT {orders_cte}." + f"ORDER BY sessions_number_of_sessions DESC NULLS LAST) SELECT {orders_cte}." f"orders_number_of_orders as orders_number_of_orders,{sessions_cte}." "sessions_number_of_sessions as sessions_number_of_sessions," f"ifnull({orders_cte}.orders_order_date, {sessions_cte}.sessions_session_date) " @@ -949,12 +950,12 @@ def test_merged_query_merged_results_3_way_third_date_only(connection): f"WITH {orders_cte} AS (SELECT DATE_TRUNC('DAY', orders.order_date) as " "orders_order_date,COUNT(orders.id) as orders_number_of_orders FROM analytics.orders " "orders GROUP BY DATE_TRUNC('DAY', orders.order_date) ORDER BY orders_number_of_orders " - f"DESC) ,{sessions_cte} AS (SELECT DATE_TRUNC('DAY', sessions.session_date) " + f"DESC NULLS LAST) ,{sessions_cte} AS (SELECT DATE_TRUNC('DAY', sessions.session_date) " "as sessions_session_date,COUNT(sessions.id) as sessions_number_of_sessions " "FROM analytics.sessions sessions GROUP BY DATE_TRUNC('DAY', sessions.session_date) " - f"ORDER BY sessions_number_of_sessions DESC) ,{events_cte} AS (" + f"ORDER BY sessions_number_of_sessions DESC NULLS LAST) ,{events_cte} AS (" "SELECT DATE_TRUNC('DAY', events.event_date) as events_event_date FROM analytics.events " - "events GROUP BY DATE_TRUNC('DAY', events.event_date) ORDER BY events_event_date ASC) " + "events GROUP BY DATE_TRUNC('DAY', events.event_date) ORDER BY events_event_date ASC NULLS LAST) " f"SELECT {orders_cte}.orders_number_of_orders as orders_number_of_orders," f"{sessions_cte}.sessions_number_of_sessions as sessions_number_of_sessions," f"{events_date} as events_event_date,{orders_date} as orders_order_date," @@ -1101,17 +1102,17 @@ def test_4_way_merge_with_joinable_canon_date(connection): f"WITH {orders_cte} AS (SELECT DATE_TRUNC('MONTH', orders.order_date) as " f"orders_order_month,COUNT(orders.id) as orders_number_of_orders FROM analytics.orders " f"orders GROUP BY DATE_TRUNC('MONTH', orders.order_date) ORDER BY orders_number_of_orders " - f"DESC) ,{customers_cte} AS (SELECT DATE_TRUNC('MONTH', " + f"DESC NULLS LAST) ,{customers_cte} AS (SELECT DATE_TRUNC('MONTH', " f"customers.first_order_date) as customers_first_order_month,COUNT(customers.customer_id) " f"as customers_number_of_customers FROM analytics.customers customers GROUP BY DATE_TRUNC('MONTH', " - f"customers.first_order_date) ORDER BY customers_number_of_customers DESC) ," + f"customers.first_order_date) ORDER BY customers_number_of_customers DESC NULLS LAST) ," f"{order_lines_cte} AS (SELECT DATE_TRUNC('MONTH', order_lines.order_date) " f"as order_lines_order_month,SUM(order_lines.revenue) as order_lines_total_item_revenue " f"FROM analytics.order_line_items order_lines GROUP BY DATE_TRUNC('MONTH', order_lines.order_date) " - f"ORDER BY order_lines_total_item_revenue DESC) ,{events_cte} AS (SELECT " + f"ORDER BY order_lines_total_item_revenue DESC NULLS LAST) ,{events_cte} AS (SELECT " f"DATE_TRUNC('MONTH', events.event_date) as events_event_month,COUNT(DISTINCT(events.id)) " f"as events_number_of_events FROM analytics.events events GROUP BY DATE_TRUNC('MONTH', " - f"events.event_date) ORDER BY events_number_of_events DESC) SELECT " + f"events.event_date) ORDER BY events_number_of_events DESC NULLS LAST) SELECT " f"{customers_cte}.customers_number_of_customers as " f"customers_number_of_customers,{events_cte}.events_number_of_events as " f"events_number_of_events,{order_lines_cte}.order_lines_total_item_revenue " @@ -1148,11 +1149,11 @@ def test_query_merge_results_order_issue(connection): "customers.first_order_date) as customers_first_order_month,COUNT(customers.customer_id) as " "customers_number_of_customers FROM analytics.customers customers WHERE DATE_TRUNC('DAY', " "customers.first_order_date)>'2022-04-03' GROUP BY DATE_TRUNC('MONTH', customers.first_order_date) " - f"ORDER BY customers_number_of_customers DESC) ,{orders_cte} AS (" + f"ORDER BY customers_number_of_customers DESC NULLS LAST) ,{orders_cte} AS (" "SELECT DATE_TRUNC('MONTH', orders.order_date) as orders_order_month,COUNT(orders.id) as " "orders_number_of_orders FROM analytics.orders orders " "WHERE DATE_TRUNC('DAY', orders.order_date)>'2022-04-03' " - "GROUP BY DATE_TRUNC('MONTH', orders.order_date) ORDER BY orders_number_of_orders DESC) " + "GROUP BY DATE_TRUNC('MONTH', orders.order_date) ORDER BY orders_number_of_orders DESC NULLS LAST) " f"SELECT {customers_cte}.customers_number_of_customers " f"as customers_number_of_customers,{orders_cte}.orders_number_of_orders as " f"orders_number_of_orders,ifnull({customers_cte}.customers_first_order_month, " @@ -1204,13 +1205,14 @@ def test_query_default_date_from_join(connection): "orders_order_date,COUNT(orders.id) as orders_number_of_orders FROM analytics.orders orders " "WHERE DATE_TRUNC('DAY', orders.order_date)>='2023-03-29T00:00:00' AND DATE_TRUNC('DAY', " "orders.order_date)<='2023-06-26T23:59:59' GROUP BY DATE_TRUNC('DAY', orders.order_date) " - "ORDER BY orders_number_of_orders DESC) ,events_event__cte_subquery_0 AS (SELECT DATE_TRUNC('DAY', " + "ORDER BY orders_number_of_orders DESC NULLS LAST) ,events_event__cte_subquery_0 AS (SELECT" + " DATE_TRUNC('DAY', " "events.event_date) as events_event_date,COUNT(DISTINCT(login_events.id)) as " "login_events_number_of_login_events FROM analytics.login_events login_events " "LEFT JOIN analytics.events events ON login_events.id=events.id WHERE DATE_TRUNC('DAY', " "events.event_date)>='2023-03-29T00:00:00' AND DATE_TRUNC('DAY', events.event_date)" "<='2023-06-26T23:59:59' GROUP BY DATE_TRUNC('DAY', events.event_date) ORDER BY " - "login_events_number_of_login_events DESC) SELECT events_event__cte_subquery_0" + "login_events_number_of_login_events DESC NULLS LAST) SELECT events_event__cte_subquery_0" ".login_events_number_of_login_events as login_events_number_of_login_events," "orders_order__cte_subquery_1.orders_number_of_orders as orders_number_of_orders," "ifnull(events_event__cte_subquery_0.events_event_date, orders_order__cte_subquery_1.orders_order_date) as events_event_date," # noqa @@ -1230,12 +1232,12 @@ def test_query_mapping_with_a_join(connection): correct = ( "WITH sessions_session__cte_subquery_1 AS (SELECT sessions.session_device as sessions_session_device," "COUNT(sessions.id) as sessions_number_of_sessions FROM analytics.sessions sessions " - "GROUP BY sessions.session_device ORDER BY sessions_number_of_sessions DESC) ," + "GROUP BY sessions.session_device ORDER BY sessions_number_of_sessions DESC NULLS LAST) ," "events_event__cte_subquery_0 AS (SELECT events.device as events_device," "COUNT(DISTINCT(login_events.id)) as login_events_number_of_login_events " "FROM analytics.login_events login_events LEFT JOIN analytics.events events " "ON login_events.id=events.id GROUP BY events.device ORDER BY login_events_number_of_login_events " - "DESC) SELECT events_event__cte_subquery_0.login_events_number_of_login_events as " + "DESC NULLS LAST) SELECT events_event__cte_subquery_0.login_events_number_of_login_events as " "login_events_number_of_login_events,sessions_session__cte_subquery_1.sessions_number_of_sessions " "as sessions_number_of_sessions,ifnull(events_event__cte_subquery_0.events_device, " "sessions_session__cte_subquery_1.sessions_session_device) " @@ -1257,12 +1259,12 @@ def test_query_mapping_with_a_join_inverted_mapping(connection): correct = ( "WITH sessions_session__cte_subquery_1 AS (SELECT sessions.session_device as sessions_session_device," "COUNT(sessions.id) as sessions_number_of_sessions FROM analytics.sessions sessions " - "GROUP BY sessions.session_device ORDER BY sessions_number_of_sessions DESC) ," + "GROUP BY sessions.session_device ORDER BY sessions_number_of_sessions DESC NULLS LAST) ," "events_event__cte_subquery_0 AS (SELECT events.device as login_events_device," "COUNT(DISTINCT(login_events.id)) as login_events_number_of_login_events " "FROM analytics.login_events login_events LEFT JOIN analytics.events events " "ON login_events.id=events.id GROUP BY events.device ORDER BY " - "login_events_number_of_login_events DESC) SELECT events_event__cte_subquery_0" + "login_events_number_of_login_events DESC NULLS LAST) SELECT events_event__cte_subquery_0" ".login_events_number_of_login_events as login_events_number_of_login_events," "sessions_session__cte_subquery_1.sessions_number_of_sessions as sessions_number_of_sessions," "ifnull(events_event__cte_subquery_0.login_events_device, sessions_session__cte_subquery_1.sessions_session_device) as login_events_device," # noqa @@ -1292,12 +1294,12 @@ def test_query_mapping_with_a_join_and_date(connection): "WITH sessions_session__cte_subquery_1 AS (SELECT sessions.session_device as sessions_session_device," "COUNT(sessions.id) as sessions_number_of_sessions FROM analytics.sessions sessions " "WHERE DATE_TRUNC('DAY', sessions.session_date)<='2023-06-26T23:59:59' " - "GROUP BY sessions.session_device ORDER BY sessions_number_of_sessions DESC) ," + "GROUP BY sessions.session_device ORDER BY sessions_number_of_sessions DESC NULLS LAST) ," "events_event__cte_subquery_0 AS (SELECT events.device as events_device," "COUNT(DISTINCT(login_events.id)) as login_events_number_of_login_events FROM analytics.login_events " "login_events LEFT JOIN analytics.events events ON login_events.id=events.id " "WHERE DATE_TRUNC('DAY', events.event_date)<='2023-06-26T23:59:59' GROUP BY events.device " - "ORDER BY login_events_number_of_login_events DESC) SELECT events_event__cte_subquery_0" + "ORDER BY login_events_number_of_login_events DESC NULLS LAST) SELECT events_event__cte_subquery_0" ".login_events_number_of_login_events as login_events_number_of_login_events," "sessions_session__cte_subquery_1.sessions_number_of_sessions as sessions_number_of_sessions," "ifnull(events_event__cte_subquery_0.events_device, sessions_session__cte_subquery_1.sessions_session_device) as events_device," # noqa @@ -1334,14 +1336,14 @@ def test_query_subquery_with_substring_in_name(connection): "z_customer_accounts WHERE DATE_TRUNC('DAY', z_customer_accounts.created_at)" "<='2023-06-26T23:59:59' GROUP BY DATE_TRUNC('MONTH', z_customer_accounts.created_at)," "z_customer_accounts.account_type ORDER BY z_customer_accounts_number_of_account_" - f"customer_connections DESC) ,{cte_2} AS (SELECT DATE_TRUNC('MONTH', " + f"customer_connections DESC NULLS LAST) ,{cte_2} AS (SELECT DATE_TRUNC('MONTH', " "aa_acquired_accounts.created_at) as aa_acquired_accounts_created_month," "aa_acquired_accounts.type as aa_acquired_accounts_account_type," "COUNT(aa_acquired_accounts.account_id) as aa_acquired_accounts_number_of_acquired_accounts " "FROM analytics.accounts aa_acquired_accounts WHERE DATE_TRUNC('DAY', " "aa_acquired_accounts.created_at)<='2023-06-26T23:59:59' GROUP BY DATE_TRUNC('MONTH', " "aa_acquired_accounts.created_at),aa_acquired_accounts.type ORDER BY " - "aa_acquired_accounts_number_of_acquired_accounts DESC) SELECT " + "aa_acquired_accounts_number_of_acquired_accounts DESC NULLS LAST) SELECT " f"{cte_2}.aa_acquired_accounts_number_of_acquired_accounts as " f"aa_acquired_accounts_number_of_acquired_accounts,{cte_1}." "z_customer_accounts_number_of_account_customer_connections as z_customer_accounts_" @@ -1382,13 +1384,13 @@ def test_query_number_metric_with_non_matching_canon_dates(connection): "FROM analytics.submitted_form submitted_form WHERE DATE_TRUNC('DAY', " "submitted_form.sent_at)<='2023-06-26T23:59:59' " "GROUP BY DATE_TRUNC('DAY', submitted_form.sent_at) " - "ORDER BY submitted_form_unique_users_form_submissions DESC) ," + "ORDER BY submitted_form_unique_users_form_submissions DESC NULLS LAST) ," f"{cte_2} AS (SELECT DATE_TRUNC('DAY', " "submitted_form.session_date) as submitted_form_session_date," "COUNT(submitted_form.id) as submitted_form_number_of_form_submissions " "FROM analytics.submitted_form submitted_form WHERE DATE_TRUNC('DAY', " "submitted_form.session_date)<='2023-06-26T23:59:59' GROUP BY DATE_TRUNC('DAY', " - "submitted_form.session_date) ORDER BY submitted_form_number_of_form_submissions DESC) " + "submitted_form.session_date) ORDER BY submitted_form_number_of_form_submissions DESC NULLS LAST) " f"SELECT {cte_1}.submitted_form_unique_users_form_submissions " f"as submitted_form_unique_users_form_submissions,{cte_2}." "submitted_form_number_of_form_submissions as submitted_form_number_of_form_submissions," @@ -1433,7 +1435,7 @@ def test_query_merge_results_no_metric_date(connection): "FROM analytics.orders orders LEFT JOIN analytics.customers customers " "ON orders.customer_id=customers.customer_id WHERE DATE_TRUNC('DAY', orders.order_date)>'2023-02-01' " "GROUP BY DATE_TRUNC('DAY', orders.order_date),customers.customer_id,orders.id " - "ORDER BY orders_order_date ASC;" + "ORDER BY orders_order_date ASC NULLS LAST;" ) assert query == correct @@ -1448,10 +1450,10 @@ def test_query_mapping_triple(connection): "WITH sessions_session__cte_subquery_1 AS (SELECT sessions.session_device as " "sessions_session_device,COUNT(sessions.id) as sessions_number_of_sessions " "FROM analytics.sessions sessions GROUP BY sessions.session_device ORDER BY " - "sessions_number_of_sessions DESC) ,events_event__cte_subquery_0 AS (" + "sessions_number_of_sessions DESC NULLS LAST) ,events_event__cte_subquery_0 AS (" "SELECT events.device as events_device,COUNT(DISTINCT(events.id)) as " "events_number_of_events FROM analytics.events events GROUP BY events.device " - "ORDER BY events_number_of_events DESC) SELECT events_event__cte_subquery_0." + "ORDER BY events_number_of_events DESC NULLS LAST) SELECT events_event__cte_subquery_0." "events_number_of_events as events_number_of_events,sessions_session__cte_subquery_1" ".sessions_number_of_sessions as sessions_number_of_sessions,ifnull(" "events_event__cte_subquery_0.events_device, sessions_session__cte_subquery_1." diff --git a/tests/test_mql_parse.py b/tests/test_mql_parse.py index 35b1ace..73eb78c 100644 --- a/tests/test_mql_parse.py +++ b/tests/test_mql_parse.py @@ -13,7 +13,7 @@ def test_query_no_join_mql(connection): "SELECT * FROM (SELECT order_lines.sales_channel as order_lines_channel,SUM(order_lines.revenue) " "as order_lines_total_item_revenue FROM " "analytics.order_line_items order_lines GROUP BY order_lines.sales_channel " - "ORDER BY order_lines_total_item_revenue DESC);" + "ORDER BY order_lines_total_item_revenue DESC NULLS LAST);" ) assert query == correct @@ -52,7 +52,7 @@ def test_query_single_join_mql(connection): "order_lines_total_item_revenue FROM analytics.order_line_items " "order_lines LEFT JOIN analytics.orders orders ON order_lines.order_unique_id=orders.id " "GROUP BY order_lines.sales_channel,orders.new_vs_repeat " - "ORDER BY order_lines_total_item_revenue DESC) as rev_group;" + "ORDER BY order_lines_total_item_revenue DESC NULLS LAST) as rev_group;" ) assert query == correct @@ -71,7 +71,7 @@ def test_query_multiple_join_mql(connection): "LEFT JOIN analytics.orders orders ON order_lines.order_unique_id=orders.id " "LEFT JOIN analytics.customers customers ON order_lines.customer_id=customers.customer_id " "GROUP BY customers.region,orders.new_vs_repeat " - "ORDER BY order_lines_total_item_revenue DESC) as rev_group;" + "ORDER BY order_lines_total_item_revenue DESC NULLS LAST) as rev_group;" ) assert query == correct @@ -79,20 +79,23 @@ def test_query_multiple_join_mql(connection): @pytest.mark.query def test_query_multiple_join_all_mql(connection): query = connection.get_sql_query( - sql="SELECT * FROM MQL(total_item_revenue BY region, new_vs_repeat WHERE ${customers.region} != 'West' AND ${orders.new_vs_repeat} <> 'New' HAVING ${total_item_revenue} > -12 AND ${total_item_revenue} < 122 ORDER BY total_item_revenue ASC, new_vs_repeat) as rev_group", # noqa + sql=( # noqa + "SELECT * FROM MQL(total_item_revenue BY region, new_vs_repeat WHERE ${customers.region} !=" + " 'West' AND ${orders.new_vs_repeat} <> 'New' HAVING ${total_item_revenue} > -12 AND" + " ${total_item_revenue} < 122 ORDER BY total_item_revenue ASC NULLS LAST, new_vs_repeat) as" + " rev_group" + ), ) correct = ( - "SELECT * FROM (SELECT customers.region as customers_region," - "orders.new_vs_repeat as orders_new_vs_repeat," - "SUM(order_lines.revenue) as order_lines_total_item_revenue " - "FROM analytics.order_line_items order_lines " - "LEFT JOIN analytics.orders orders ON order_lines.order_unique_id=orders.id " - "LEFT JOIN analytics.customers customers ON order_lines.customer_id=customers.customer_id " - "WHERE customers.region != 'West' AND orders.new_vs_repeat <>" - " 'New' GROUP BY customers.region,orders.new_vs_repeat HAVING (SUM(order_lines.revenue)) > -12 AND " - "(SUM(order_lines.revenue)) < 122 ORDER BY order_lines_total_item_revenue ASC,orders_new_vs_repeat" - " ASC) as rev_group;" + "SELECT * FROM (SELECT customers.region as customers_region,orders.new_vs_repeat as" + " orders_new_vs_repeat,SUM(order_lines.revenue) as order_lines_total_item_revenue FROM" + " analytics.order_line_items order_lines LEFT JOIN analytics.orders orders ON" + " order_lines.order_unique_id=orders.id LEFT JOIN analytics.customers customers ON" + " order_lines.customer_id=customers.customer_id WHERE customers.region != 'West' AND" + " orders.new_vs_repeat <> 'New' GROUP BY customers.region,orders.new_vs_repeat HAVING" + " (SUM(order_lines.revenue)) > -12 AND (SUM(order_lines.revenue)) < 122 ORDER BY" + " order_lines_total_item_revenue ASC NULLS LAST,orders_new_vs_repeat ASC NULLS LAST) as rev_group;" ) assert query == correct @@ -100,7 +103,12 @@ def test_query_multiple_join_all_mql(connection): @pytest.mark.query def test_query_mql_sequence(connection): query = connection.get_sql_query( - sql="SELECT * FROM MQL(number_of_orders, total_item_revenue FOR orders FUNNEL ${order_lines.channel} = 'Paid' THEN ${order_lines.channel} = 'Organic' THEN ${order_lines.channel} = 'Paid' or ${customers.region} = 'West' WITHIN 3 days WHERE ${customers.region} != 'West') as sequence_group", # noqa + sql=( # noqa + "SELECT * FROM MQL(number_of_orders, total_item_revenue FOR orders FUNNEL ${order_lines.channel}" + " = 'Paid' THEN ${order_lines.channel} = 'Organic' THEN ${order_lines.channel} = 'Paid' or" + " ${customers.region} = 'West' WITHIN 3 days WHERE ${customers.region} != 'West') as" + " sequence_group" + ), ) revenue_calc = ( @@ -162,7 +170,7 @@ def test_query_mql_as_subset(connection): "FROM analytics.order_line_items " "order_lines LEFT JOIN analytics.orders orders ON order_lines.order_unique_id=orders.id " "GROUP BY order_lines.sales_channel,orders.new_vs_repeat " - "ORDER BY order_lines_total_item_revenue DESC) as rev_group LEFT JOIN " + "ORDER BY order_lines_total_item_revenue DESC NULLS LAST) as rev_group LEFT JOIN " "analytics.channeldata channelinfo on rev_group.channel = channelinfo.channel;" ) assert query == correct @@ -181,7 +189,7 @@ def test_query_mql_mapping_query(connection): "SELECT * FROM (SELECT orders.sub_channel as orders_sub_channel,SUM(order_lines.revenue) " "as order_lines_total_item_revenue FROM analytics.order_line_items order_lines " "LEFT JOIN analytics.orders orders ON order_lines.order_unique_id=orders.id " - "GROUP BY orders.sub_channel ORDER BY order_lines_total_item_revenue DESC);" + "GROUP BY orders.sub_channel ORDER BY order_lines_total_item_revenue DESC NULLS LAST);" ) query = connection.get_sql_query(sql="SELECT * FROM MQL(total_item_revenue by source)") assert query == correct diff --git a/tests/test_non_additive_dimensions.py b/tests/test_non_additive_dimensions.py index 2a9f76d..4d66c68 100644 --- a/tests/test_non_additive_dimensions.py +++ b/tests/test_non_additive_dimensions.py @@ -11,10 +11,11 @@ def test_mrr_non_additive_dimension_no_group_by_max(connection, metric_suffix): func = "MAX" if metric_suffix == "end_of_month" else "MIN" correct = ( f"WITH cte_mrr_{metric_suffix}_record_raw AS (SELECT {func}(mrr.record_date) as mrr_{func.lower()}_record_raw " # noqa - f"FROM analytics.mrr_by_customer mrr ORDER BY mrr_{func.lower()}_record_raw DESC) " + f"FROM analytics.mrr_by_customer mrr ORDER BY mrr_{func.lower()}_record_raw DESC NULLS LAST) " f"SELECT SUM(case when mrr.record_date=cte_mrr_{metric_suffix}_record_raw.mrr_{func.lower()}_record_raw " # noqa f"then mrr.mrr else 0 end) as mrr_mrr_{metric_suffix} FROM analytics.mrr_by_customer mrr " - f"LEFT JOIN cte_mrr_{metric_suffix}_record_raw ON 1=1 ORDER BY mrr_mrr_{metric_suffix} DESC;" + f"LEFT JOIN cte_mrr_{metric_suffix}_record_raw ON 1=1 ORDER BY mrr_mrr_{metric_suffix} DESC NULLS" + " LAST;" ) assert query == correct @@ -30,11 +31,11 @@ def test_mrr_non_additive_dimension_no_group_by_counts(connection, metric_suffix correct = ( f"WITH cte_accounts_{metric_suffix}_record_raw AS (SELECT {func}(mrr.record_date) as" f" mrr_{func.lower()}_record_raw FROM analytics.mrr_by_customer mrr ORDER BY" - f" mrr_{func.lower()}_record_raw DESC) SELECT {agg}case when" + f" mrr_{func.lower()}_record_raw DESC NULLS LAST) SELECT {agg}case when" f" mrr.record_date=cte_accounts_{metric_suffix}_record_raw.mrr_{func.lower()}_record_raw then" f" mrr.parent_account_id end{close} as mrr_accounts_{metric_suffix} FROM analytics.mrr_by_customer" f" mrr LEFT JOIN cte_accounts_{metric_suffix}_record_raw ON 1=1 ORDER BY" - f" mrr_accounts_{metric_suffix} DESC;" + f" mrr_accounts_{metric_suffix} DESC NULLS LAST;" ) assert query == correct @@ -48,11 +49,11 @@ def test_mrr_non_additive_dimension_no_group_by_multi_cte(connection): correct = ( "WITH cte_mrr_end_of_month_by_account_record_date AS (SELECT mrr.account_id as" " mrr_account_id,MAX(DATE_TRUNC('DAY', mrr.record_date)) as mrr_max_record_date FROM" - " analytics.mrr_by_customer mrr GROUP BY mrr.account_id ORDER BY mrr_max_record_date DESC)" + " analytics.mrr_by_customer mrr GROUP BY mrr.account_id ORDER BY mrr_max_record_date DESC NULLS LAST)" " ,cte_mrr_end_of_month_record_raw AS (SELECT MAX(mrr.record_date) as mrr_max_record_raw FROM" - " analytics.mrr_by_customer mrr ORDER BY mrr_max_record_raw DESC)" + " analytics.mrr_by_customer mrr ORDER BY mrr_max_record_raw DESC NULLS LAST)" " ,cte_mrr_beginning_of_month_record_raw AS (SELECT MIN(mrr.record_date) as mrr_min_record_raw FROM" - " analytics.mrr_by_customer mrr ORDER BY mrr_min_record_raw DESC) SELECT SUM(case when" + " analytics.mrr_by_customer mrr ORDER BY mrr_min_record_raw DESC NULLS LAST) SELECT SUM(case when" " mrr.record_date=cte_mrr_end_of_month_record_raw.mrr_max_record_raw then mrr.mrr else 0 end) as" " mrr_mrr_end_of_month,SUM(case when" " mrr.record_date=cte_mrr_beginning_of_month_record_raw.mrr_min_record_raw then mrr.mrr else 0 end)" @@ -63,7 +64,7 @@ def test_mrr_non_additive_dimension_no_group_by_multi_cte(connection): " cte_mrr_end_of_month_by_account_record_date ON" " mrr.account_id=cte_mrr_end_of_month_by_account_record_date.mrr_account_id LEFT JOIN" " cte_mrr_end_of_month_record_raw ON 1=1 LEFT JOIN cte_mrr_beginning_of_month_record_raw ON 1=1 ORDER" - " BY mrr_mrr_end_of_month DESC;" + " BY mrr_mrr_end_of_month DESC NULLS LAST;" ) assert query == correct @@ -77,9 +78,9 @@ def test_mrr_non_additive_dimension_no_group_by_composed_with_duplicate_cte(conn correct = ( "WITH cte_mrr_end_of_month_record_raw AS (SELECT MAX(mrr.record_date) as mrr_max_record_raw FROM" - " analytics.mrr_by_customer mrr ORDER BY mrr_max_record_raw DESC)" + " analytics.mrr_by_customer mrr ORDER BY mrr_max_record_raw DESC NULLS LAST)" " ,cte_mrr_beginning_of_month_record_raw AS (SELECT MIN(mrr.record_date) as mrr_min_record_raw FROM" - " analytics.mrr_by_customer mrr ORDER BY mrr_min_record_raw DESC) SELECT ((SUM(case when" + " analytics.mrr_by_customer mrr ORDER BY mrr_min_record_raw DESC NULLS LAST) SELECT ((SUM(case when" " mrr.record_date=cte_mrr_end_of_month_record_raw.mrr_max_record_raw then mrr.mrr else 0 end)) -" " (SUM(case when mrr.record_date=cte_mrr_beginning_of_month_record_raw.mrr_min_record_raw then" " mrr.mrr else 0 end))) / (COUNT(mrr.parent_account_id)) as" @@ -87,7 +88,7 @@ def test_mrr_non_additive_dimension_no_group_by_composed_with_duplicate_cte(conn " mrr.record_date=cte_mrr_end_of_month_record_raw.mrr_max_record_raw then mrr.mrr else 0 end) as" " mrr_mrr_end_of_month FROM analytics.mrr_by_customer mrr LEFT JOIN cte_mrr_end_of_month_record_raw" " ON 1=1 LEFT JOIN cte_mrr_beginning_of_month_record_raw ON 1=1 ORDER BY" - " mrr_mrr_change_per_billed_account DESC;" + " mrr_mrr_change_per_billed_account DESC NULLS LAST;" ) assert query == correct @@ -105,11 +106,11 @@ def test_mrr_non_additive_dimension_no_group_by_with_where(connection): correct = ( f"WITH cte_mrr_end_of_month_record_raw AS (SELECT MAX(mrr.record_date) as mrr_max_record_raw " f"FROM analytics.mrr_by_customer mrr WHERE mrr.plan_name='Enterprise' " - "AND DATE_TRUNC('DAY', mrr.record_date)>'2022-04-03' ORDER BY mrr_max_record_raw DESC) " + "AND DATE_TRUNC('DAY', mrr.record_date)>'2022-04-03' ORDER BY mrr_max_record_raw DESC NULLS LAST) " f"SELECT SUM(case when mrr.record_date=cte_mrr_end_of_month_record_raw.mrr_max_record_raw " # noqa f"then mrr.mrr else 0 end) as mrr_mrr_end_of_month FROM analytics.mrr_by_customer mrr " f"LEFT JOIN cte_mrr_end_of_month_record_raw ON 1=1 WHERE mrr.plan_name='Enterprise' " - "AND DATE_TRUNC('DAY', mrr.record_date)>'2022-04-03' ORDER BY mrr_mrr_end_of_month DESC;" + "AND DATE_TRUNC('DAY', mrr.record_date)>'2022-04-03' ORDER BY mrr_mrr_end_of_month DESC NULLS LAST;" ) assert query == correct @@ -119,15 +120,16 @@ def test_mrr_non_additive_dimension_no_group_by_with_window_grouping(connection) query = connection.get_sql_query(metrics=[f"mrr_end_of_month_by_account"]) correct = ( - f"WITH cte_mrr_end_of_month_by_account_record_date AS (SELECT mrr.account_id as mrr_account_id," - f"MAX(DATE_TRUNC('DAY', mrr.record_date)) as mrr_max_record_date " - f"FROM analytics.mrr_by_customer mrr GROUP BY mrr.account_id ORDER BY mrr_max_record_date DESC) " - f"SELECT SUM(case when DATE_TRUNC('DAY', mrr.record_date)=cte_mrr_end_of_month_by_account_record_date" - f".mrr_max_record_date and mrr.account_id=cte_mrr_end_of_month_by_account_record_date.mrr_account_id " - f"then mrr.mrr else 0 end) as mrr_mrr_end_of_month_by_account FROM analytics.mrr_by_customer mrr " - f"LEFT JOIN cte_mrr_end_of_month_by_account_record_date ON mrr.account_id" - f"=cte_mrr_end_of_month_by_account_record_date.mrr_account_id " - f"ORDER BY mrr_mrr_end_of_month_by_account DESC;" + f"WITH cte_mrr_end_of_month_by_account_record_date AS (SELECT mrr.account_id as" + f" mrr_account_id,MAX(DATE_TRUNC('DAY', mrr.record_date)) as mrr_max_record_date FROM" + f" analytics.mrr_by_customer mrr GROUP BY mrr.account_id ORDER BY mrr_max_record_date DESC NULLS" + f" LAST) SELECT SUM(case when DATE_TRUNC('DAY'," + f" mrr.record_date)=cte_mrr_end_of_month_by_account_record_date.mrr_max_record_date and" + f" mrr.account_id=cte_mrr_end_of_month_by_account_record_date.mrr_account_id then mrr.mrr else 0 end)" + f" as mrr_mrr_end_of_month_by_account FROM analytics.mrr_by_customer mrr LEFT JOIN" + f" cte_mrr_end_of_month_by_account_record_date ON" + f" mrr.account_id=cte_mrr_end_of_month_by_account_record_date.mrr_account_id ORDER BY" + f" mrr_mrr_end_of_month_by_account DESC NULLS LAST;" ) assert query == correct @@ -140,13 +142,13 @@ def test_mrr_non_additive_dimension_time_group_by(connection): "WITH cte_mrr_end_of_month_record_raw AS (SELECT DATE_TRUNC('WEEK', CAST(mrr.record_date AS DATE)) " "as mrr_record_week,MAX(mrr.record_date) as mrr_max_record_raw " "FROM analytics.mrr_by_customer mrr GROUP BY DATE_TRUNC('WEEK', CAST(mrr.record_date AS DATE)) " - "ORDER BY mrr_max_record_raw DESC) " + "ORDER BY mrr_max_record_raw DESC NULLS LAST) " "SELECT DATE_TRUNC('WEEK', CAST(mrr.record_date AS DATE)) as mrr_record_week," "SUM(case when mrr.record_date=cte_mrr_end_of_month_record_raw.mrr_max_record_raw " "then mrr.mrr else 0 end) as mrr_mrr_end_of_month FROM analytics.mrr_by_customer mrr " "LEFT JOIN cte_mrr_end_of_month_record_raw ON DATE_TRUNC('WEEK', CAST(mrr.record_date AS DATE))" "=cte_mrr_end_of_month_record_raw.mrr_record_week GROUP BY DATE_TRUNC('WEEK', " - "CAST(mrr.record_date AS DATE)) ORDER BY mrr_mrr_end_of_month DESC;" + "CAST(mrr.record_date AS DATE)) ORDER BY mrr_mrr_end_of_month DESC NULLS LAST;" ) assert query == correct @@ -163,18 +165,17 @@ def test_mrr_non_additive_dimension_time_group_by_with_where(connection): ) correct = ( - "WITH cte_mrr_end_of_month_record_raw AS (SELECT DATE_TRUNC('WEEK', CAST(mrr.record_date AS DATE)) " - "as mrr_record_week,MAX(mrr.record_date) as mrr_max_record_raw " - "FROM analytics.mrr_by_customer mrr WHERE mrr.plan_name='Enterprise' " - "AND DATE_TRUNC('DAY', mrr.record_date)>'2022-04-03' GROUP BY DATE_TRUNC('WEEK', " - "CAST(mrr.record_date AS DATE)) ORDER BY mrr_max_record_raw DESC) " - "SELECT DATE_TRUNC('WEEK', CAST(mrr.record_date AS DATE)) as mrr_record_week," - "SUM(case when mrr.record_date=cte_mrr_end_of_month_record_raw.mrr_max_record_raw " - "then mrr.mrr else 0 end) as mrr_mrr_end_of_month FROM analytics.mrr_by_customer mrr " - "LEFT JOIN cte_mrr_end_of_month_record_raw ON DATE_TRUNC('WEEK', CAST(mrr.record_date AS DATE))" - "=cte_mrr_end_of_month_record_raw.mrr_record_week " - "WHERE mrr.plan_name='Enterprise' AND DATE_TRUNC('DAY', mrr.record_date)>'2022-04-03' " - "GROUP BY DATE_TRUNC('WEEK', CAST(mrr.record_date AS DATE)) ORDER BY mrr_mrr_end_of_month DESC;" + "WITH cte_mrr_end_of_month_record_raw AS (SELECT DATE_TRUNC('WEEK', CAST(mrr.record_date AS DATE)) as" + " mrr_record_week,MAX(mrr.record_date) as mrr_max_record_raw FROM analytics.mrr_by_customer mrr WHERE" + " mrr.plan_name='Enterprise' AND DATE_TRUNC('DAY', mrr.record_date)>'2022-04-03' GROUP BY" + " DATE_TRUNC('WEEK', CAST(mrr.record_date AS DATE)) ORDER BY mrr_max_record_raw DESC NULLS LAST)" + " SELECT DATE_TRUNC('WEEK', CAST(mrr.record_date AS DATE)) as mrr_record_week,SUM(case when" + " mrr.record_date=cte_mrr_end_of_month_record_raw.mrr_max_record_raw then mrr.mrr else 0 end) as" + " mrr_mrr_end_of_month FROM analytics.mrr_by_customer mrr LEFT JOIN cte_mrr_end_of_month_record_raw" + " ON DATE_TRUNC('WEEK', CAST(mrr.record_date AS" + " DATE))=cte_mrr_end_of_month_record_raw.mrr_record_week WHERE mrr.plan_name='Enterprise' AND" + " DATE_TRUNC('DAY', mrr.record_date)>'2022-04-03' GROUP BY DATE_TRUNC('WEEK', CAST(mrr.record_date AS" + " DATE)) ORDER BY mrr_mrr_end_of_month DESC NULLS LAST;" ) assert query == correct @@ -188,7 +189,7 @@ def test_mrr_non_additive_dimension_time_group_by_with_window_grouping(connectio f"DATE_TRUNC('WEEK', CAST(mrr.record_date AS DATE)) as mrr_record_week," f"MAX(DATE_TRUNC('DAY', mrr.record_date)) as mrr_max_record_date " f"FROM analytics.mrr_by_customer mrr GROUP BY mrr.account_id,DATE_TRUNC('WEEK', CAST(mrr.record_date " - f"AS DATE)) ORDER BY mrr_max_record_date DESC) " + f"AS DATE)) ORDER BY mrr_max_record_date DESC NULLS LAST) " f"SELECT DATE_TRUNC('WEEK', CAST(mrr.record_date AS DATE)) as mrr_record_week," f"SUM(case when DATE_TRUNC('DAY', mrr.record_date)=cte_mrr_end_of_month_by_account_record_date" f".mrr_max_record_date " @@ -199,7 +200,7 @@ def test_mrr_non_additive_dimension_time_group_by_with_window_grouping(connectio f"and DATE_TRUNC('WEEK', CAST(mrr.record_date AS DATE))" f"=cte_mrr_end_of_month_by_account_record_date.mrr_record_week " f"GROUP BY DATE_TRUNC('WEEK', CAST(mrr.record_date AS DATE)) " - f"ORDER BY mrr_mrr_end_of_month_by_account DESC;" + f"ORDER BY mrr_mrr_end_of_month_by_account DESC NULLS LAST;" ) assert query == correct @@ -211,11 +212,11 @@ def test_mrr_non_additive_dimension_alt_group_by(connection): correct = ( "WITH cte_mrr_end_of_month_record_raw AS (SELECT mrr.plan_name as mrr_plan_name,MAX(mrr.record_date)" " as mrr_max_record_raw FROM analytics.mrr_by_customer mrr GROUP BY mrr.plan_name ORDER BY" - " mrr_max_record_raw DESC) SELECT mrr.plan_name as mrr_plan_name,SUM(case when" + " mrr_max_record_raw DESC NULLS LAST) SELECT mrr.plan_name as mrr_plan_name,SUM(case when" " mrr.record_date=cte_mrr_end_of_month_record_raw.mrr_max_record_raw then mrr.mrr else 0 end) as" " mrr_mrr_end_of_month FROM analytics.mrr_by_customer mrr LEFT JOIN cte_mrr_end_of_month_record_raw" " ON mrr.plan_name=cte_mrr_end_of_month_record_raw.mrr_plan_name GROUP BY mrr.plan_name ORDER BY" - " mrr_mrr_end_of_month DESC;" + " mrr_mrr_end_of_month DESC NULLS LAST;" ) assert query == correct @@ -235,7 +236,7 @@ def test_mrr_non_additive_dimension_alt_group_by_with_where(connection): "WITH cte_mrr_end_of_month_record_raw AS (SELECT mrr.plan_name as mrr_plan_name," "MAX(mrr.record_date) as mrr_max_record_raw " "FROM analytics.mrr_by_customer mrr WHERE mrr.plan_name='Enterprise' " - "AND DATE_TRUNC('DAY', mrr.record_date)>'2022-04-03' GROUP BY mrr.plan_name ORDER BY mrr_max_record_raw DESC) " # noqa + "AND DATE_TRUNC('DAY', mrr.record_date)>'2022-04-03' GROUP BY mrr.plan_name ORDER BY mrr_max_record_raw DESC NULLS LAST) " # noqa "SELECT mrr.plan_name as mrr_plan_name," "SUM(case when mrr.record_date=cte_mrr_end_of_month_record_raw.mrr_max_record_raw " "then mrr.mrr else 0 end) as mrr_mrr_end_of_month FROM analytics.mrr_by_customer mrr " @@ -243,7 +244,7 @@ def test_mrr_non_additive_dimension_alt_group_by_with_where(connection): " mrr.plan_name=cte_mrr_end_of_month_record_raw.mrr_plan_name " "WHERE mrr.plan_name='Enterprise' " "AND DATE_TRUNC('DAY', mrr.record_date)>'2022-04-03' " - "GROUP BY mrr.plan_name ORDER BY mrr_mrr_end_of_month DESC;" + "GROUP BY mrr.plan_name ORDER BY mrr_mrr_end_of_month DESC NULLS LAST;" ) assert query == correct @@ -260,13 +261,13 @@ def test_mrr_non_additive_dimension_alt_group_by_with_having(connection): correct = ( "WITH cte_mrr_end_of_month_record_raw AS (SELECT mrr.plan_name as mrr_plan_name,MAX(mrr.record_date)" " as mrr_max_record_raw FROM analytics.mrr_by_customer mrr WHERE mrr.plan_name='Enterprise' GROUP BY" - " mrr.plan_name ORDER BY mrr_max_record_raw DESC) SELECT mrr.plan_name as mrr_plan_name,SUM(case when" - " mrr.record_date=cte_mrr_end_of_month_record_raw.mrr_max_record_raw then mrr.mrr else 0 end) as" - " mrr_mrr_end_of_month FROM analytics.mrr_by_customer mrr LEFT JOIN cte_mrr_end_of_month_record_raw" - " ON mrr.plan_name=cte_mrr_end_of_month_record_raw.mrr_plan_name WHERE mrr.plan_name='Enterprise'" - " GROUP BY mrr.plan_name HAVING SUM(case when" + " mrr.plan_name ORDER BY mrr_max_record_raw DESC NULLS LAST) SELECT mrr.plan_name as" + " mrr_plan_name,SUM(case when mrr.record_date=cte_mrr_end_of_month_record_raw.mrr_max_record_raw then" + " mrr.mrr else 0 end) as mrr_mrr_end_of_month FROM analytics.mrr_by_customer mrr LEFT JOIN" + " cte_mrr_end_of_month_record_raw ON mrr.plan_name=cte_mrr_end_of_month_record_raw.mrr_plan_name" + " WHERE mrr.plan_name='Enterprise' GROUP BY mrr.plan_name HAVING SUM(case when" " mrr.record_date=cte_mrr_end_of_month_record_raw.mrr_max_record_raw then mrr.mrr else 0 end)>1100" - " ORDER BY mrr_mrr_end_of_month DESC;" + " ORDER BY mrr_mrr_end_of_month DESC NULLS LAST;" ) assert query == correct @@ -282,11 +283,12 @@ def test_mrr_non_additive_dimension_alt_group_by_with_having_not_in_select(conne correct = ( "WITH cte_mrr_end_of_month_record_raw AS (SELECT MAX(mrr.record_date) as mrr_max_record_raw FROM" - " analytics.mrr_by_customer mrr WHERE mrr.plan_name='Enterprise' ORDER BY mrr_max_record_raw DESC)" - " SELECT COUNT(mrr.parent_account_id) as mrr_number_of_billed_accounts FROM analytics.mrr_by_customer" - " mrr LEFT JOIN cte_mrr_end_of_month_record_raw ON 1=1 WHERE mrr.plan_name='Enterprise' HAVING" - " SUM(case when mrr.record_date=cte_mrr_end_of_month_record_raw.mrr_max_record_raw then mrr.mrr else" - " 0 end)>1100 ORDER BY mrr_number_of_billed_accounts DESC;" + " analytics.mrr_by_customer mrr WHERE mrr.plan_name='Enterprise' ORDER BY mrr_max_record_raw DESC" + " NULLS LAST) SELECT COUNT(mrr.parent_account_id) as mrr_number_of_billed_accounts FROM" + " analytics.mrr_by_customer mrr LEFT JOIN cte_mrr_end_of_month_record_raw ON 1=1 WHERE" + " mrr.plan_name='Enterprise' HAVING SUM(case when" + " mrr.record_date=cte_mrr_end_of_month_record_raw.mrr_max_record_raw then mrr.mrr else 0 end)>1100" + " ORDER BY mrr_number_of_billed_accounts DESC NULLS LAST;" ) assert query == correct @@ -298,7 +300,7 @@ def test_mrr_non_additive_dimension_alt_group_by_with_window_grouping(connection correct = ( f"WITH cte_mrr_end_of_month_by_account_record_date AS (SELECT mrr.account_id as mrr_account_id," "mrr.plan_name as mrr_plan_name,MAX(DATE_TRUNC('DAY', mrr.record_date)) as mrr_max_record_date " - f"FROM analytics.mrr_by_customer mrr GROUP BY mrr.account_id,mrr.plan_name ORDER BY mrr_max_record_date DESC) " # noqa + f"FROM analytics.mrr_by_customer mrr GROUP BY mrr.account_id,mrr.plan_name ORDER BY mrr_max_record_date DESC NULLS LAST) " # noqa f"SELECT mrr.plan_name as mrr_plan_name," "SUM(case when DATE_TRUNC('DAY', mrr.record_date)=cte_mrr_end_of_month_by_account_record_date" ".mrr_max_record_date " @@ -308,7 +310,7 @@ def test_mrr_non_additive_dimension_alt_group_by_with_window_grouping(connection "=cte_mrr_end_of_month_by_account_record_date.mrr_account_id and mrr.plan_name" "=cte_mrr_end_of_month_by_account_record_date.mrr_plan_name " "GROUP BY mrr.plan_name " - "ORDER BY mrr_mrr_end_of_month_by_account DESC;" + "ORDER BY mrr_mrr_end_of_month_by_account DESC NULLS LAST;" ) assert query == correct @@ -318,18 +320,16 @@ def test_mrr_non_additive_dimension_group_by_equal_to_window_grouping(connection query = connection.get_sql_query(metrics=[f"mrr_end_of_month_by_account"], dimensions=["mrr.account_id"]) correct = ( - f"WITH cte_mrr_end_of_month_by_account_record_date AS (SELECT mrr.account_id as mrr_account_id," - f"MAX(DATE_TRUNC('DAY', mrr.record_date)) as mrr_max_record_date " - f"FROM analytics.mrr_by_customer mrr GROUP BY mrr.account_id ORDER BY mrr_max_record_date DESC) " - f"SELECT mrr.account_id as mrr_account_id," - f"SUM(case when DATE_TRUNC('DAY', mrr.record_date)=cte_mrr_end_of_month_by_account_record_date" - f".mrr_max_record_date " - f"and mrr.account_id=cte_mrr_end_of_month_by_account_record_date.mrr_account_id " - f"then mrr.mrr else 0 end) as mrr_mrr_end_of_month_by_account FROM analytics.mrr_by_customer mrr " - f"LEFT JOIN cte_mrr_end_of_month_by_account_record_date ON mrr.account_id" - f"=cte_mrr_end_of_month_by_account_record_date.mrr_account_id " - f"GROUP BY mrr.account_id " - f"ORDER BY mrr_mrr_end_of_month_by_account DESC;" + f"WITH cte_mrr_end_of_month_by_account_record_date AS (SELECT mrr.account_id as" + f" mrr_account_id,MAX(DATE_TRUNC('DAY', mrr.record_date)) as mrr_max_record_date FROM" + f" analytics.mrr_by_customer mrr GROUP BY mrr.account_id ORDER BY mrr_max_record_date DESC NULLS" + f" LAST) SELECT mrr.account_id as mrr_account_id,SUM(case when DATE_TRUNC('DAY'," + f" mrr.record_date)=cte_mrr_end_of_month_by_account_record_date.mrr_max_record_date and" + f" mrr.account_id=cte_mrr_end_of_month_by_account_record_date.mrr_account_id then mrr.mrr else 0 end)" + f" as mrr_mrr_end_of_month_by_account FROM analytics.mrr_by_customer mrr LEFT JOIN" + f" cte_mrr_end_of_month_by_account_record_date ON" + f" mrr.account_id=cte_mrr_end_of_month_by_account_record_date.mrr_account_id GROUP BY mrr.account_id" + f" ORDER BY mrr_mrr_end_of_month_by_account DESC NULLS LAST;" ) assert query == correct @@ -342,20 +342,20 @@ def test_mrr_non_additive_dimension_merged_results_no_group_by_where(connection) ) correct = ( - "WITH mrr_record__cte_subquery_0 AS (WITH cte_mrr_end_of_month_record_raw AS (" - "SELECT MAX(mrr.record_date) as mrr_max_record_raw FROM analytics.mrr_by_customer mrr " - "WHERE DATE_TRUNC('DAY', mrr.record_date)>'2022-04-03' ORDER BY mrr_max_record_raw DESC) " - "SELECT SUM(case when mrr.record_date=cte_mrr_end_of_month_record_raw." - "mrr_max_record_raw then mrr.mrr else 0 end) as mrr_mrr_end_of_month " - "FROM analytics.mrr_by_customer mrr LEFT JOIN cte_mrr_end_of_month_record_raw ON 1=1 " - "WHERE DATE_TRUNC('DAY', mrr.record_date)>'2022-04-03' ORDER BY mrr_mrr_end_of_month DESC" - ") ,order_lines_order__cte_subquery_1 AS (SELECT SUM(order_lines.revenue) as " - "order_lines_total_item_revenue FROM analytics.order_line_items order_lines " - "WHERE DATE_TRUNC('DAY', order_lines.order_date)>'2022-04-03' ORDER BY " - "order_lines_total_item_revenue DESC) SELECT mrr_record__cte_subquery_0.mrr_mrr_end_of_month " - "as mrr_mrr_end_of_month,order_lines_order__cte_subquery_1.order_lines_total_item_revenue " - "as order_lines_total_item_revenue FROM mrr_record__cte_subquery_0 FULL OUTER JOIN " - "order_lines_order__cte_subquery_1 ON 1=1;" + "WITH mrr_record__cte_subquery_0 AS (WITH cte_mrr_end_of_month_record_raw AS (SELECT" + " MAX(mrr.record_date) as mrr_max_record_raw FROM analytics.mrr_by_customer mrr WHERE" + " DATE_TRUNC('DAY', mrr.record_date)>'2022-04-03' ORDER BY mrr_max_record_raw DESC NULLS LAST) SELECT" + " SUM(case when mrr.record_date=cte_mrr_end_of_month_record_raw.mrr_max_record_raw then mrr.mrr else" + " 0 end) as mrr_mrr_end_of_month FROM analytics.mrr_by_customer mrr LEFT JOIN" + " cte_mrr_end_of_month_record_raw ON 1=1 WHERE DATE_TRUNC('DAY', mrr.record_date)>'2022-04-03' ORDER" + " BY mrr_mrr_end_of_month DESC NULLS LAST) ,order_lines_order__cte_subquery_1 AS (SELECT" + " SUM(order_lines.revenue) as order_lines_total_item_revenue FROM analytics.order_line_items" + " order_lines WHERE DATE_TRUNC('DAY', order_lines.order_date)>'2022-04-03' ORDER BY" + " order_lines_total_item_revenue DESC NULLS LAST) SELECT" + " mrr_record__cte_subquery_0.mrr_mrr_end_of_month as" + " mrr_mrr_end_of_month,order_lines_order__cte_subquery_1.order_lines_total_item_revenue as" + " order_lines_total_item_revenue FROM mrr_record__cte_subquery_0 FULL OUTER JOIN" + " order_lines_order__cte_subquery_1 ON 1=1;" ) assert query == correct @@ -372,18 +372,18 @@ def test_mrr_non_additive_dimension_merged_results_no_group_by_window_grouping(c f"WITH cte_mrr_end_of_month_by_account_record_date AS (SELECT mrr.account_id as mrr_account_id," "MAX(DATE_TRUNC('DAY', mrr.record_date)) as mrr_max_record_date " f"FROM analytics.mrr_by_customer mrr WHERE DATE_TRUNC('DAY', mrr.record_date)>'2022-04-03' " - "GROUP BY mrr.account_id ORDER BY mrr_max_record_date DESC) " + "GROUP BY mrr.account_id ORDER BY mrr_max_record_date DESC NULLS LAST) " f"SELECT SUM(case when DATE_TRUNC('DAY', mrr.record_date)=cte_mrr_end_of_month_by_account_record_date" ".mrr_max_record_date " "and mrr.account_id=cte_mrr_end_of_month_by_account_record_date.mrr_account_id " f"then mrr.mrr else 0 end) as mrr_mrr_end_of_month_by_account FROM analytics.mrr_by_customer mrr " f"LEFT JOIN cte_mrr_end_of_month_by_account_record_date ON mrr.account_id" "=cte_mrr_end_of_month_by_account_record_date.mrr_account_id WHERE DATE_TRUNC('DAY', mrr.record_date)>'2022-04-03' " # noqa - "ORDER BY mrr_mrr_end_of_month_by_account DESC" + "ORDER BY mrr_mrr_end_of_month_by_account DESC NULLS LAST" ") ,order_lines_order__cte_subquery_1 AS (SELECT SUM(order_lines.revenue) as " "order_lines_total_item_revenue FROM analytics.order_line_items order_lines " "WHERE DATE_TRUNC('DAY', order_lines.order_date)>'2022-04-03' ORDER BY " - "order_lines_total_item_revenue DESC) SELECT mrr_record__cte_subquery_0.mrr_mrr_end_of_month_by_account " # noqa + "order_lines_total_item_revenue DESC NULLS LAST) SELECT mrr_record__cte_subquery_0.mrr_mrr_end_of_month_by_account " # noqa "as mrr_mrr_end_of_month_by_account,order_lines_order__cte_subquery_1.order_lines_total_item_revenue " "as order_lines_total_item_revenue FROM mrr_record__cte_subquery_0 FULL OUTER JOIN " "order_lines_order__cte_subquery_1 ON 1=1;" @@ -403,18 +403,19 @@ def test_mrr_non_additive_dimension_merged_results_time_group_by(connection): "WITH cte_mrr_end_of_month_record_raw AS (SELECT DATE_TRUNC('WEEK', " "CAST(mrr.record_date AS DATE)) as mrr_record_week,MAX(mrr.record_date) as mrr_max_record_raw " "FROM analytics.mrr_by_customer mrr GROUP BY DATE_TRUNC('WEEK', CAST(mrr.record_date AS DATE)) " - "ORDER BY mrr_max_record_raw DESC) " + "ORDER BY mrr_max_record_raw DESC NULLS LAST) " "SELECT DATE_TRUNC('WEEK', CAST(mrr.record_date AS DATE)) as mrr_record_week," "SUM(case when mrr.record_date=cte_mrr_end_of_month_record_raw.mrr_max_record_raw " "then mrr.mrr else 0 end) as mrr_mrr_end_of_month FROM analytics.mrr_by_customer mrr " "LEFT JOIN cte_mrr_end_of_month_record_raw ON DATE_TRUNC('WEEK', CAST(mrr.record_date AS DATE))" "=cte_mrr_end_of_month_record_raw.mrr_record_week " - "GROUP BY DATE_TRUNC('WEEK', CAST(mrr.record_date AS DATE)) ORDER BY mrr_mrr_end_of_month DESC" + "GROUP BY DATE_TRUNC('WEEK', CAST(mrr.record_date AS DATE)) ORDER BY mrr_mrr_end_of_month DESC NULLS" + " LAST" ") ,order_lines_order__cte_subquery_1 AS (SELECT DATE_TRUNC('WEEK', " "CAST(order_lines.order_date AS DATE)) as order_lines_order_week," "SUM(order_lines.revenue) as order_lines_total_item_revenue FROM analytics.order_line_items " "order_lines GROUP BY DATE_TRUNC('WEEK', CAST(order_lines.order_date AS DATE)) " - "ORDER BY order_lines_total_item_revenue DESC) SELECT mrr_record__cte_subquery_0." + "ORDER BY order_lines_total_item_revenue DESC NULLS LAST) SELECT mrr_record__cte_subquery_0." "mrr_mrr_end_of_month as mrr_mrr_end_of_month,order_lines_order__cte_subquery_1." "order_lines_total_item_revenue as order_lines_total_item_revenue,ifnull(" "mrr_record__cte_subquery_0.mrr_record_week, order_lines_order__cte_subquery_1" @@ -436,14 +437,14 @@ def test_mrr_non_additive_dimension_time_group_by_ignore_dimensions(connection): "WITH cte_mrr_beginning_of_month_no_group_by_record_raw AS (SELECT DATE_TRUNC('WEEK', CAST(mrr.record_date AS DATE)) " # noqa "as mrr_record_week,MIN(mrr.record_date) as mrr_min_record_raw " "FROM analytics.mrr_by_customer mrr GROUP BY DATE_TRUNC('WEEK', CAST(mrr.record_date AS DATE)) " - "ORDER BY mrr_min_record_raw DESC) " + "ORDER BY mrr_min_record_raw DESC NULLS LAST) " "SELECT DATE_TRUNC('WEEK', CAST(mrr.record_date AS DATE)) as mrr_record_week," "SUM(case when mrr.record_date=cte_mrr_beginning_of_month_no_group_by_record_raw.mrr_min_record_raw " "then mrr.mrr else 0 end) as mrr_mrr_beginning_of_month_no_group_by FROM analytics.mrr_by_customer" " mrr " "LEFT JOIN cte_mrr_beginning_of_month_no_group_by_record_raw ON DATE_TRUNC('WEEK', CAST(mrr.record_date AS DATE))" # noqa "=cte_mrr_beginning_of_month_no_group_by_record_raw.mrr_record_week GROUP BY DATE_TRUNC('WEEK', " - "CAST(mrr.record_date AS DATE)) ORDER BY mrr_mrr_beginning_of_month_no_group_by DESC;" + "CAST(mrr.record_date AS DATE)) ORDER BY mrr_mrr_beginning_of_month_no_group_by DESC NULLS LAST;" ) assert query == correct @@ -459,7 +460,7 @@ def test_mrr_non_additive_dimension_time_group_by_with_window_grouping_ignore_di "DATE_TRUNC('WEEK', CAST(mrr.record_date AS DATE)) as mrr_record_week," "MAX(DATE_TRUNC('DAY', mrr.record_date)) as mrr_max_record_date " f"FROM analytics.mrr_by_customer mrr GROUP BY mrr.account_id,DATE_TRUNC('WEEK', CAST(mrr.record_date " - "AS DATE)) ORDER BY mrr_max_record_date DESC) " + "AS DATE)) ORDER BY mrr_max_record_date DESC NULLS LAST) " f"SELECT DATE_TRUNC('WEEK', CAST(mrr.record_date AS DATE)) as mrr_record_week," "SUM(case when DATE_TRUNC('DAY', mrr.record_date)=cte_mrr_end_of_month_by_account_no_group_by_record_date" # noqa ".mrr_max_record_date " @@ -470,7 +471,7 @@ def test_mrr_non_additive_dimension_time_group_by_with_window_grouping_ignore_di "and DATE_TRUNC('WEEK', CAST(mrr.record_date AS DATE))" "=cte_mrr_end_of_month_by_account_no_group_by_record_date.mrr_record_week " "GROUP BY DATE_TRUNC('WEEK', CAST(mrr.record_date AS DATE)) " - "ORDER BY mrr_mrr_end_of_month_by_account_no_group_by DESC;" + "ORDER BY mrr_mrr_end_of_month_by_account_no_group_by DESC NULLS LAST;" ) assert query == correct @@ -483,12 +484,12 @@ def test_mrr_non_additive_dimension_alt_group_by_ignore_dimensions(connection): correct = ( "WITH cte_mrr_beginning_of_month_no_group_by_record_raw AS (SELECT MIN(mrr.record_date) as" - " mrr_min_record_raw FROM analytics.mrr_by_customer mrr ORDER BY mrr_min_record_raw DESC) SELECT" - " mrr.plan_name as mrr_plan_name,SUM(case when" + " mrr_min_record_raw FROM analytics.mrr_by_customer mrr ORDER BY mrr_min_record_raw DESC NULLS LAST)" + " SELECT mrr.plan_name as mrr_plan_name,SUM(case when" " mrr.record_date=cte_mrr_beginning_of_month_no_group_by_record_raw.mrr_min_record_raw then mrr.mrr" " else 0 end) as mrr_mrr_beginning_of_month_no_group_by FROM analytics.mrr_by_customer mrr LEFT JOIN" " cte_mrr_beginning_of_month_no_group_by_record_raw ON 1=1 GROUP BY mrr.plan_name ORDER BY" - " mrr_mrr_beginning_of_month_no_group_by DESC;" + " mrr_mrr_beginning_of_month_no_group_by DESC NULLS LAST;" ) assert query == correct @@ -504,8 +505,8 @@ def test_mrr_non_additive_dimension_alt_group_by_nulls_equal(connection, query_t "equal_null(mrr.plan_name, cte_mrr_beginning_of_month_nulls_equal_record_raw.mrr_plan_name)" ) group_by = "mrr.plan_name" - cte_order_by = " ORDER BY mrr_min_record_raw DESC" - order_by = " ORDER BY mrr_mrr_beginning_of_month_nulls_equal DESC" + cte_order_by = " ORDER BY mrr_min_record_raw DESC NULLS LAST" + order_by = " ORDER BY mrr_mrr_beginning_of_month_nulls_equal DESC NULLS LAST" else: join_logic = ( "(mrr.plan_name=cte_mrr_beginning_of_month_nulls_equal_record_raw.mrr_plan_name OR (mrr.plan_name" @@ -534,7 +535,7 @@ def test_mrr_non_additive_dimension_alt_group_by_with_window_grouping_ignore_dim correct = ( f"WITH cte_mrr_end_of_month_by_account_no_group_by_record_date AS (SELECT mrr.account_id as mrr_account_id," # noqa "MAX(DATE_TRUNC('DAY', mrr.record_date)) as mrr_max_record_date " - f"FROM analytics.mrr_by_customer mrr GROUP BY mrr.account_id ORDER BY mrr_max_record_date DESC) " # noqa + f"FROM analytics.mrr_by_customer mrr GROUP BY mrr.account_id ORDER BY mrr_max_record_date DESC NULLS LAST) " # noqa f"SELECT mrr.plan_name as mrr_plan_name," "SUM(case when DATE_TRUNC('DAY', mrr.record_date)=cte_mrr_end_of_month_by_account_no_group_by_record_date" # noqa ".mrr_max_record_date " @@ -542,7 +543,7 @@ def test_mrr_non_additive_dimension_alt_group_by_with_window_grouping_ignore_dim f"then mrr.mrr else 0 end) as mrr_mrr_end_of_month_by_account_no_group_by FROM analytics.mrr_by_customer mrr " # noqa f"LEFT JOIN cte_mrr_end_of_month_by_account_no_group_by_record_date ON mrr.account_id" "=cte_mrr_end_of_month_by_account_no_group_by_record_date.mrr_account_id " - "GROUP BY mrr.plan_name ORDER BY mrr_mrr_end_of_month_by_account_no_group_by DESC;" + "GROUP BY mrr.plan_name ORDER BY mrr_mrr_end_of_month_by_account_no_group_by DESC NULLS LAST;" ) assert query == correct @@ -556,7 +557,8 @@ def test_mrr_non_additive_dimension_group_by_equal_to_window_grouping_ignore_dim correct = ( f"WITH cte_mrr_end_of_month_by_account_no_group_by_record_date AS (SELECT mrr.account_id as mrr_account_id," # noqa "MAX(DATE_TRUNC('DAY', mrr.record_date)) as mrr_max_record_date " - f"FROM analytics.mrr_by_customer mrr GROUP BY mrr.account_id ORDER BY mrr_max_record_date DESC) " + f"FROM analytics.mrr_by_customer mrr GROUP BY mrr.account_id ORDER BY mrr_max_record_date DESC NULLS" + f" LAST) " f"SELECT mrr.account_id as mrr_account_id," "SUM(case when DATE_TRUNC('DAY', mrr.record_date)=cte_mrr_end_of_month_by_account_no_group_by_record_date" # noqa ".mrr_max_record_date " @@ -565,7 +567,7 @@ def test_mrr_non_additive_dimension_group_by_equal_to_window_grouping_ignore_dim f"LEFT JOIN cte_mrr_end_of_month_by_account_no_group_by_record_date ON mrr.account_id" "=cte_mrr_end_of_month_by_account_no_group_by_record_date.mrr_account_id " "GROUP BY mrr.account_id " - "ORDER BY mrr_mrr_end_of_month_by_account_no_group_by DESC;" + "ORDER BY mrr_mrr_end_of_month_by_account_no_group_by DESC NULLS LAST;" ) assert query == correct @@ -583,7 +585,7 @@ def test_mrr_non_additive_dimension_merged_result_sub_join(connection): " as mrr_customer_account_type,MAX(DATE_TRUNC('DAY', mrr.record_date)) as mrr_max_record_date FROM" " analytics.mrr_by_customer mrr LEFT JOIN analytics.accounts accounts ON" " mrr.account_id=accounts.account_id GROUP BY mrr.account_id,accounts.name,mrr.customer_account_type" - " ORDER BY mrr_max_record_date DESC) SELECT accounts.name as" + " ORDER BY mrr_max_record_date DESC NULLS LAST) SELECT accounts.name as" " accounts_account_name,mrr.customer_account_type as mrr_customer_account_type,SUM(case when" " DATE_TRUNC('DAY', mrr.record_date)=cte_mrr_end_of_month_by_account_record_date.mrr_max_record_date" " and mrr.account_id=cte_mrr_end_of_month_by_account_record_date.mrr_account_id then mrr.mrr else 0" @@ -594,7 +596,8 @@ def test_mrr_non_additive_dimension_merged_result_sub_join(connection): " mrr.account_id=cte_mrr_end_of_month_by_account_record_date.mrr_account_id and" " accounts.name=cte_mrr_end_of_month_by_account_record_date.accounts_account_name and" " mrr.customer_account_type=cte_mrr_end_of_month_by_account_record_date.mrr_customer_account_type" - " GROUP BY accounts.name,mrr.customer_account_type ORDER BY mrr_mrr_end_of_month_by_account DESC)" + " GROUP BY accounts.name,mrr.customer_account_type ORDER BY mrr_mrr_end_of_month_by_account DESC" + " NULLS LAST)" " ,z_customer_accounts_created__cte_subquery_1 AS (SELECT accounts.name as" " accounts_account_name,z_customer_accounts.account_type as" " z_customer_accounts_type_of_account,COUNT(z_customer_accounts.account_id ||" @@ -602,7 +605,7 @@ def test_mrr_non_additive_dimension_merged_result_sub_join(connection): " analytics.customer_accounts z_customer_accounts LEFT JOIN analytics.accounts accounts ON" " z_customer_accounts.account_id=accounts.account_id GROUP BY" " accounts.name,z_customer_accounts.account_type ORDER BY" - " z_customer_accounts_number_of_account_customer_connections DESC) SELECT" + " z_customer_accounts_number_of_account_customer_connections DESC NULLS LAST) SELECT" " mrr_record__cte_subquery_0.mrr_mrr_end_of_month_by_account as" " mrr_mrr_end_of_month_by_account,z_customer_accounts_created__cte_subquery_1.z_customer_accounts_number_of_account_customer_connections" # noqa " as z_customer_accounts_number_of_account_customer_connections,ifnull(mrr_record__cte_subquery_0.accounts_account_name," # noqa @@ -635,7 +638,8 @@ def test_mrr_non_additive_dimension_merged_result_sub_join_where(connection): " mrr_record_date,MAX(DATE_TRUNC('DAY', mrr.record_date)) as mrr_max_record_date FROM" " analytics.mrr_by_customer mrr LEFT JOIN analytics.accounts accounts ON" " mrr.account_id=accounts.account_id WHERE accounts.name='Apple' GROUP BY" - " mrr.account_id,DATE_TRUNC('DAY', mrr.record_date) ORDER BY mrr_max_record_date DESC) SELECT" + " mrr.account_id,DATE_TRUNC('DAY', mrr.record_date) ORDER BY mrr_max_record_date DESC NULLS LAST)" + " SELECT" " DATE_TRUNC('DAY', mrr.record_date) as mrr_record_date,SUM(case when DATE_TRUNC('DAY'," " mrr.record_date)=cte_mrr_end_of_month_by_account_record_date.mrr_max_record_date and" " mrr.account_id=cte_mrr_end_of_month_by_account_record_date.mrr_account_id then mrr.mrr else 0" @@ -646,14 +650,15 @@ def test_mrr_non_additive_dimension_merged_result_sub_join_where(connection): " mrr.account_id=cte_mrr_end_of_month_by_account_record_date.mrr_account_id and DATE_TRUNC('DAY'," " mrr.record_date)=cte_mrr_end_of_month_by_account_record_date.mrr_record_date WHERE" " accounts.name='Apple' GROUP BY DATE_TRUNC('DAY', mrr.record_date) ORDER BY" - " mrr_mrr_end_of_month_by_account DESC) ,z_customer_accounts_created__cte_subquery_1 AS (SELECT" + " mrr_mrr_end_of_month_by_account DESC NULLS LAST) ,z_customer_accounts_created__cte_subquery_1 AS" + " (SELECT" " DATE_TRUNC('DAY', z_customer_accounts.created_at) as" " z_customer_accounts_created_date,COUNT(z_customer_accounts.account_id ||" " z_customer_accounts.customer_id) as z_customer_accounts_number_of_account_customer_connections FROM" " analytics.customer_accounts z_customer_accounts LEFT JOIN analytics.accounts accounts ON" " z_customer_accounts.account_id=accounts.account_id WHERE accounts.name='Apple' GROUP BY" " DATE_TRUNC('DAY', z_customer_accounts.created_at) ORDER BY" - " z_customer_accounts_number_of_account_customer_connections DESC) SELECT" + " z_customer_accounts_number_of_account_customer_connections DESC NULLS LAST) SELECT" " mrr_record__cte_subquery_0.mrr_mrr_end_of_month_by_account as" " mrr_mrr_end_of_month_by_account,z_customer_accounts_created__cte_subquery_1.z_customer_accounts_number_of_account_customer_connections" # noqa " as z_customer_accounts_number_of_account_customer_connections,ifnull(mrr_record__cte_subquery_0.mrr_record_date," # noqa diff --git a/tests/test_simple_query.py b/tests/test_simple_query.py index 675d2fa..70e7e80 100644 --- a/tests/test_simple_query.py +++ b/tests/test_simple_query.py @@ -157,7 +157,7 @@ def test_simple_query_dynamic_schema(): correct = ( "SELECT simple.sales_channel as simple_channel,SUM(simple.revenue) as simple_total_revenue FROM " - "{} simple GROUP BY simple.sales_channel ORDER BY simple_total_revenue DESC;" + "{} simple GROUP BY simple.sales_channel ORDER BY simple_total_revenue DESC NULLS LAST;" ) class sf_mock(BaseConnection): @@ -193,7 +193,9 @@ def test_simple_query(connections): correct = ( "SELECT simple.sales_channel as simple_channel,SUM(simple.revenue) as simple_total_revenue FROM " ) - correct += "analytics.orders simple GROUP BY simple.sales_channel ORDER BY simple_total_revenue DESC;" + correct += ( + "analytics.orders simple GROUP BY simple.sales_channel ORDER BY simple_total_revenue DESC NULLS LAST;" + ) assert query == correct @@ -207,7 +209,7 @@ def test_simple_query_dimension_filter(connections): "SELECT case when LOWER(simple.sales_channel) LIKE LOWER('%organic%') then simple.sales_channel end" " as simple_organic_channels,SUM(simple.revenue) as simple_total_revenue FROM analytics.orders simple" " GROUP BY case when LOWER(simple.sales_channel) LIKE LOWER('%organic%') then simple.sales_channel" - " end ORDER BY simple_total_revenue DESC;" + " end ORDER BY simple_total_revenue DESC NULLS LAST;" ) assert query == correct @@ -226,7 +228,7 @@ def test_simple_query_field_to_field_filter(connections): "SELECT case when LOWER(simple.sales_channel) LIKE LOWER('%organic%') then simple.sales_channel end" " as simple_organic_channels,SUM(simple.revenue) as simple_total_revenue FROM analytics.orders simple" " WHERE simple.new_vs_repeat>simple.group_name GROUP BY case when LOWER(simple.sales_channel) LIKE" - " LOWER('%organic%') then simple.sales_channel end ORDER BY simple_total_revenue DESC;" + " LOWER('%organic%') then simple.sales_channel end ORDER BY simple_total_revenue DESC NULLS LAST;" ) assert query == correct @@ -264,7 +266,7 @@ def test_simple_query_min_max(connections, metric, query_type): group_by = "simple.sales_channel" semi = ";" if query_type in {Definitions.snowflake, Definitions.redshift, Definitions.duck_db}: - order_by = f" ORDER BY simple_{agg.lower()}_revenue DESC" + order_by = f" ORDER BY simple_{agg.lower()}_revenue DESC NULLS LAST" else: order_by = "" @@ -303,7 +305,7 @@ def test_simple_query_count_distinct(connections, query_type): group_by = "simple.sales_channel" semi = ";" if query_type in {Definitions.snowflake, Definitions.redshift, Definitions.duck_db}: - order_by = f" ORDER BY simple_unique_groups DESC" + order_by = f" ORDER BY simple_unique_groups DESC NULLS LAST" else: order_by = "" @@ -328,7 +330,7 @@ def test_simple_query_single_metric(connections): correct = ( "SELECT SUM(simple.revenue) as simple_total_revenue " - "FROM analytics.orders simple ORDER BY simple_total_revenue DESC;" + "FROM analytics.orders simple ORDER BY simple_total_revenue DESC NULLS LAST;" ) assert query == correct @@ -341,7 +343,7 @@ def test_simple_query_single_dimension(connections): correct = ( "SELECT simple.sales_channel as simple_channel FROM analytics.orders simple " - "GROUP BY simple.sales_channel ORDER BY simple_channel ASC;" + "GROUP BY simple.sales_channel ORDER BY simple_channel ASC NULLS LAST;" ) assert query == correct @@ -358,7 +360,7 @@ def test_simple_query_limit(connections, query_type): if Definitions.snowflake == query_type: correct = ( "SELECT simple.sales_channel as simple_channel FROM analytics.orders simple " - "GROUP BY simple.sales_channel ORDER BY simple_channel ASC LIMIT 10;" + "GROUP BY simple.sales_channel ORDER BY simple_channel ASC NULLS LAST LIMIT 10;" ) elif query_type in {Definitions.sql_server, Definitions.azure_synapse}: correct = ( @@ -377,7 +379,7 @@ def test_simple_query_count(connections): query = conn.get_sql_query(metrics=["count"], dimensions=["channel"]) correct = "SELECT simple.sales_channel as simple_channel,COUNT(*) as simple_count FROM " - correct += "analytics.orders simple GROUP BY simple.sales_channel ORDER BY simple_count DESC;" + correct += "analytics.orders simple GROUP BY simple.sales_channel ORDER BY simple_count DESC NULLS LAST;" assert query == correct @@ -388,7 +390,7 @@ def test_simple_query_alias_keyword(connections): query = conn.get_sql_query(metrics=["count"], dimensions=["group"]) correct = "SELECT simple.group_name as simple_group,COUNT(*) as simple_count FROM " - correct += "analytics.orders simple GROUP BY simple.group_name ORDER BY simple_count DESC;" + correct += "analytics.orders simple GROUP BY simple.group_name ORDER BY simple_count DESC NULLS LAST;" assert query == correct @@ -465,7 +467,7 @@ def test_simple_query_dimension_group_timezone(connections, field: str, group: s f"AS {ttype}) AS TIMESTAMP))>='{start}' AND DATE_TRUNC('DAY', " f"CAST(CAST(CONVERT_TIMEZONE('America/New_York', simple.order_date) AS {ttype}) AS TIMESTAMP))<='{end}'" # noqa ) - order_by = " ORDER BY simple_total_revenue DESC" + order_by = " ORDER BY simple_total_revenue DESC NULLS LAST" elif query_type == Definitions.databricks: if field == "previous_order": result_lookup = {"date": "DATE_TRUNC('DAY', CAST(simple.previous_order_date AS TIMESTAMP))"} @@ -512,7 +514,7 @@ def test_simple_query_dimension_group_timezone(connections, field: str, group: s f"CAST(CAST(CAST(simple.order_date AS TIMESTAMP) at time zone 'utc' at time zone 'America/New_York' AS TIMESTAMP) AS TIMESTAMP))<='{end}'" # noqa ) if query_type == Definitions.duck_db: - order_by = " ORDER BY simple_total_revenue DESC" + order_by = " ORDER BY simple_total_revenue DESC NULLS LAST" else: order_by = "" elif query_type == Definitions.bigquery: @@ -848,7 +850,7 @@ def test_simple_query_dimension_group(connections, group: str, query_type: str): if query_type == Definitions.redshift: result_lookup["month_of_year"] = "TO_CHAR(CAST(simple.order_date AS TIMESTAMP), 'Mon')" result_lookup["month_name"] = "TO_CHAR(CAST(simple.order_date AS TIMESTAMP), 'Mon')" - order_by = " ORDER BY simple_total_revenue DESC" + order_by = " ORDER BY simple_total_revenue DESC NULLS LAST" elif query_type in {Definitions.sql_server, Definitions.azure_synapse}: result_lookup = { @@ -936,7 +938,7 @@ def test_simple_query_dimension_group(connections, group: str, query_type: str): "day_of_year": "EXTRACT('DOY' FROM CAST(simple.order_date AS TIMESTAMP))", } if query_type == Definitions.duck_db: - order_by = " ORDER BY simple_total_revenue DESC" + order_by = " ORDER BY simple_total_revenue DESC NULLS LAST" else: order_by = "" @@ -1147,7 +1149,7 @@ def test_simple_query_dimension_group_interval(connections, interval: str, query "quarter": "DATEDIFF('QUARTER', simple.view_date, simple.order_date)", "year": "DATEDIFF('YEAR', simple.view_date, simple.order_date)", } - order_by = " ORDER BY simple_total_revenue DESC" + order_by = " ORDER BY simple_total_revenue DESC NULLS LAST" elif query_type == Definitions.druid: result_lookup = { "second": "TIMESTAMPDIFF(SECOND, simple.view_date, simple.order_date)", @@ -1249,7 +1251,7 @@ def test_simple_query_two_group_by(connections): correct = ( "SELECT simple.sales_channel as simple_channel,simple.new_vs_repeat as simple_new_vs_repeat," "SUM(simple.revenue) as simple_total_revenue FROM analytics.orders simple " - "GROUP BY simple.sales_channel,simple.new_vs_repeat ORDER BY simple_total_revenue DESC;" + "GROUP BY simple.sales_channel,simple.new_vs_repeat ORDER BY simple_total_revenue DESC NULLS LAST;" ) assert query == correct @@ -1267,7 +1269,7 @@ def test_simple_query_two_metric(connections): "SELECT simple.sales_channel as simple_channel,simple.new_vs_repeat as simple_new_vs_repeat," "SUM(simple.revenue) as simple_total_revenue,AVG(simple.revenue) as simple_average_order_value FROM " "analytics.orders simple GROUP BY simple.sales_channel,simple.new_vs_repeat " - "ORDER BY simple_total_revenue DESC;" + "ORDER BY simple_total_revenue DESC NULLS LAST;" ) assert query == correct @@ -1282,7 +1284,7 @@ def test_simple_query_custom_dimension(connections): "SELECT (CASE WHEN simple.sales_channel != 'fraud' THEN TRUE ELSE FALSE END) as " "simple_is_valid_order,SUM(simple.revenue) as simple_total_revenue " "FROM analytics.orders simple GROUP BY (CASE WHEN simple.sales_channel " - "!= 'fraud' THEN TRUE ELSE FALSE END) ORDER BY simple_total_revenue DESC;" + "!= 'fraud' THEN TRUE ELSE FALSE END) ORDER BY simple_total_revenue DESC NULLS LAST;" ) assert query == correct @@ -1294,9 +1296,9 @@ def test_simple_query_custom_metric(connections): query = conn.get_sql_query(metrics=["revenue_per_aov"], dimensions=["channel"]) correct = ( - "SELECT simple.sales_channel as simple_channel,CASE WHEN (AVG(simple.revenue)) = 0 THEN " - "0 ELSE (SUM(simple.revenue)) / (AVG(simple.revenue)) END as simple_revenue_per_aov FROM " - "analytics.orders simple GROUP BY simple.sales_channel ORDER BY simple_revenue_per_aov DESC;" + "SELECT simple.sales_channel as simple_channel,CASE WHEN (AVG(simple.revenue)) = 0 THEN 0 ELSE" + " (SUM(simple.revenue)) / (AVG(simple.revenue)) END as simple_revenue_per_aov FROM analytics.orders" + " simple GROUP BY simple.sales_channel ORDER BY simple_revenue_per_aov DESC NULLS LAST;" ) assert query == correct @@ -1361,7 +1363,7 @@ def test_simple_query_with_where_dim_group(connections, field, expression, value }: order_by = "" else: - order_by = " ORDER BY simple_total_revenue DESC" + order_by = " ORDER BY simple_total_revenue DESC NULLS LAST" semi = ";" if query_type == Definitions.druid: @@ -1445,7 +1447,7 @@ def test_simple_query_convert_tz_alias_no(connections): correct = ( "SELECT DATE_TRUNC('DAY', simple.order_date) as simple_order_date," "SUM(simple.revenue) as simple_total_revenue FROM analytics.orders simple " - "GROUP BY DATE_TRUNC('DAY', simple.order_date) ORDER BY simple_total_revenue DESC;" + "GROUP BY DATE_TRUNC('DAY', simple.order_date) ORDER BY simple_total_revenue DESC NULLS LAST;" ) assert query == correct @@ -1498,7 +1500,7 @@ def test_simple_query_with_where_dict(connections, field_name, filter_type, valu ) if query_type == Definitions.snowflake: - order_by = " ORDER BY simple_total_revenue DESC" + order_by = " ORDER BY simple_total_revenue DESC NULLS LAST" semi = ";" elif query_type == Definitions.druid: order_by = "" @@ -1566,7 +1568,7 @@ def test_simple_query_with_where_literal(connections): correct = ( "SELECT simple.sales_channel as simple_channel,SUM(simple.revenue) as simple_total_revenue FROM " "analytics.orders simple WHERE simple.sales_channel != 'Email' GROUP BY simple.sales_channel " - "ORDER BY simple_total_revenue DESC;" + "ORDER BY simple_total_revenue DESC NULLS LAST;" ) assert query == correct @@ -1611,7 +1613,7 @@ def test_simple_query_with_having_dict(connections, filter_type): correct = ( "SELECT simple.sales_channel as simple_channel,SUM(simple.revenue) as simple_total_revenue FROM " f"analytics.orders simple GROUP BY simple.sales_channel HAVING {full_expr} " - "ORDER BY simple_total_revenue DESC;" + "ORDER BY simple_total_revenue DESC NULLS LAST;" ) assert query == correct @@ -1627,25 +1629,63 @@ def test_simple_query_with_having_literal(connections): correct = ( "SELECT simple.sales_channel as simple_channel,SUM(simple.revenue) as simple_total_revenue FROM " "analytics.orders simple GROUP BY simple.sales_channel HAVING (SUM(simple.revenue)) > 12 " - "ORDER BY simple_total_revenue DESC;" + "ORDER BY simple_total_revenue DESC NULLS LAST;" ) assert query == correct -@pytest.mark.query -def test_simple_query_with_order_by_dict(connections): +@pytest.mark.queryy +@pytest.mark.parametrize( + "query_type", + [ + Definitions.snowflake, + Definitions.bigquery, + Definitions.redshift, + Definitions.postgres, + Definitions.druid, + Definitions.sql_server, + Definitions.duck_db, + Definitions.databricks, + Definitions.azure_synapse, + ], +) +def test_simple_query_with_order_by_dict(connections, query_type): project = Project(models=[simple_model], views=[simple_view]) conn = MetricsLayerConnection(project=project, connections=connections) query = conn.get_sql_query( - metrics=["total_revenue", "average_order_value"], + metrics=["total_revenue", "average_order_value", "max_revenue"], dimensions=["channel"], - order_by=[{"field": "total_revenue", "sort": "asc"}, {"field": "average_order_value"}], + order_by=[ + {"field": "total_revenue", "sort": "asc"}, + {"field": "average_order_value"}, + {"field": "max_revenue", "sort": "desc"}, + ], + query_type=query_type, ) + if query_type == Definitions.bigquery: + group_by = "simple_channel" + else: + group_by = "simple.sales_channel" + + semi = ";" if query_type not in {Definitions.druid} else "" + if query_type in { + Definitions.snowflake, + Definitions.redshift, + Definitions.duck_db, + Definitions.postgres, + Definitions.databricks, + Definitions.bigquery, + }: + nulls_last = " NULLS LAST" + else: + nulls_last = "" correct = ( - "SELECT simple.sales_channel as simple_channel,SUM(simple.revenue) as simple_total_revenue," - "AVG(simple.revenue) as simple_average_order_value FROM analytics.orders simple " - "GROUP BY simple.sales_channel ORDER BY simple_total_revenue ASC,simple_average_order_value ASC;" + "SELECT simple.sales_channel as simple_channel,SUM(simple.revenue) as" + " simple_total_revenue,AVG(simple.revenue) as simple_average_order_value,MAX(simple.revenue) as" + f" simple_max_revenue FROM analytics.orders simple GROUP BY {group_by} ORDER BY" + f" simple_total_revenue ASC{nulls_last},simple_average_order_value ASC{nulls_last},simple_max_revenue" + f" DESC{nulls_last}{semi}" ) assert query == correct @@ -1660,7 +1700,7 @@ def test_simple_query_with_order_by_literal(connections): correct = ( "SELECT simple.sales_channel as simple_channel,SUM(simple.revenue) as simple_total_revenue FROM " - "analytics.orders simple GROUP BY simple.sales_channel ORDER BY simple_total_revenue ASC;" + "analytics.orders simple GROUP BY simple.sales_channel ORDER BY simple_total_revenue ASC NULLS LAST;" ) assert query == correct @@ -1678,8 +1718,8 @@ def test_simple_query_with_all(connections): ) correct = ( - "SELECT simple.sales_channel as simple_channel,SUM(simple.revenue) as simple_total_revenue FROM " - "analytics.orders simple WHERE simple.sales_channel<>'Email' " - "GROUP BY simple.sales_channel HAVING SUM(simple.revenue)>12 ORDER BY simple_total_revenue ASC;" + "SELECT simple.sales_channel as simple_channel,SUM(simple.revenue) as simple_total_revenue FROM" + " analytics.orders simple WHERE simple.sales_channel<>'Email' GROUP BY simple.sales_channel HAVING" + " SUM(simple.revenue)>12 ORDER BY simple_total_revenue ASC NULLS LAST;" ) assert query == correct diff --git a/tests/test_symmetric_aggregates.py b/tests/test_symmetric_aggregates.py index fa2426d..19b1a16 100644 --- a/tests/test_symmetric_aggregates.py +++ b/tests/test_symmetric_aggregates.py @@ -19,7 +19,7 @@ def test_query_sum_with_sql(connection, query_type): "% 1.0e27)::NUMERIC(38, 0))) AS DOUBLE PRECISION) " "/ CAST((1000000*1.0) AS DOUBLE PRECISION), 0)" ) - order_by = " ORDER BY orders_total_revenue DESC" + order_by = " ORDER BY orders_total_revenue DESC NULLS LAST" elif query_type == Definitions.redshift: sa = ( "COALESCE(CAST((SUM(DISTINCT (CAST(FLOOR(COALESCE(orders.revenue, 0) " @@ -29,7 +29,7 @@ def test_query_sum_with_sql(connection, query_type): "::NUMERIC(38, 0))) AS DOUBLE PRECISION) " "/ CAST((1000000*1.0) AS DOUBLE PRECISION), 0)" ) - order_by = " ORDER BY orders_total_revenue DESC" + order_by = " ORDER BY orders_total_revenue DESC NULLS LAST" elif query_type in {Definitions.postgres}: sa = ( "COALESCE(CAST((SUM(DISTINCT (CAST(FLOOR(COALESCE(orders.revenue, 0) * (1000000 * 1.0)) AS" @@ -67,7 +67,7 @@ def test_query_count_with_sql(connection): "(orders.id) IS NOT NULL THEN orders.id ELSE NULL END), 0)" " as orders_number_of_orders FROM analytics.order_line_items order_lines " "LEFT JOIN analytics.orders orders ON order_lines.order_unique_id=orders.id " - "GROUP BY order_lines.sales_channel ORDER BY orders_number_of_orders DESC;" + "GROUP BY order_lines.sales_channel ORDER BY orders_number_of_orders DESC NULLS LAST;" ) assert query == correct @@ -85,7 +85,7 @@ def test_query_count_with_one_to_many(connection): " then order_lines.order_line_id end ELSE NULL END), 0) as order_lines_number_of_email_purchased_items " # noqa "FROM analytics_live.discounts discounts " "LEFT JOIN analytics.order_line_items order_lines ON discounts.order_id=order_lines.order_unique_id " - "GROUP BY discounts.code ORDER BY order_lines_number_of_email_purchased_items DESC;" + "GROUP BY discounts.code ORDER BY order_lines_number_of_email_purchased_items DESC NULLS LAST;" ) assert query == correct @@ -106,7 +106,7 @@ def test_query_average_with_sql(connection, query_type: str): "% 1.0e27)::NUMERIC(38, 0))) AS DOUBLE PRECISION) " "/ CAST((1000000*1.0) AS DOUBLE PRECISION), 0)" ) - order_by = " ORDER BY orders_average_order_value DESC" + order_by = " ORDER BY orders_average_order_value DESC NULLS LAST" elif query_type == Definitions.redshift: sa_sum = ( "COALESCE(CAST((SUM(DISTINCT (CAST(FLOOR(COALESCE(orders.revenue, 0) " @@ -116,7 +116,7 @@ def test_query_average_with_sql(connection, query_type: str): "::NUMERIC(38, 0))) AS DOUBLE PRECISION) " "/ CAST((1000000*1.0) AS DOUBLE PRECISION), 0)" ) - order_by = " ORDER BY orders_average_order_value DESC" + order_by = " ORDER BY orders_average_order_value DESC NULLS LAST" elif query_type == Definitions.bigquery: sa_sum = ( "COALESCE(CAST((SUM(DISTINCT (CAST(FLOOR(COALESCE(orders.revenue, 0) " @@ -155,7 +155,7 @@ def test_query_number_with_sql(connection, query_type): "% 1.0e27)::NUMERIC(38, 0))) AS DOUBLE PRECISION) " "/ CAST((1000000*1.0) AS DOUBLE PRECISION), 0)" ) - order_by = " ORDER BY customers_total_sessions_divide DESC" + order_by = " ORDER BY customers_total_sessions_divide DESC NULLS LAST" elif query_type == Definitions.redshift: sa_sum = ( "COALESCE(CAST((SUM(DISTINCT (CAST(FLOOR(COALESCE(case when (customers.is_churned)=false then customers.total_sessions end, 0) " # noqa @@ -165,7 +165,7 @@ def test_query_number_with_sql(connection, query_type): "::NUMERIC(38, 0))) AS DOUBLE PRECISION) " "/ CAST((1000000*1.0) AS DOUBLE PRECISION), 0)" ) - order_by = " ORDER BY customers_total_sessions_divide DESC" + order_by = " ORDER BY customers_total_sessions_divide DESC NULLS LAST" elif query_type == Definitions.bigquery: sa_sum = ( "COALESCE(CAST((SUM(DISTINCT (CAST(FLOOR(COALESCE(case when (customers.is_churned)=false then customers.total_sessions end, 0) " # noqa From 68a4f2d28bf40c06329d4c97e7ae28e1ce27323b Mon Sep 17 00:00:00 2001 From: Paul Blankley Date: Fri, 2 Aug 2024 16:55:36 -0600 Subject: [PATCH 11/53] Release v0.12.29 From 0a2fdd39cf787143b635ca45db2478174b32cb6e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 2 Aug 2024 17:53:19 -0600 Subject: [PATCH 12/53] Bump setuptools from 69.2.0 to 70.0.0 (#208) Bumps [setuptools](https://github.com/pypa/setuptools) from 69.2.0 to 70.0.0. - [Release notes](https://github.com/pypa/setuptools/releases) - [Changelog](https://github.com/pypa/setuptools/blob/main/NEWS.rst) - [Commits](https://github.com/pypa/setuptools/compare/v69.2.0...v70.0.0) --- updated-dependencies: - dependency-name: setuptools dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 18 ++++++++---------- 1 file changed, 8 insertions(+), 10 deletions(-) diff --git a/poetry.lock b/poetry.lock index fcb7dec..b4100db 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "asn1crypto" @@ -965,7 +965,7 @@ license = ["ukkonen"] name = "idna" version = "3.7" description = "Internationalized Domain Names in Applications (IDNA)" -optional = false +optional = true python-versions = ">=3.5" files = [ {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, @@ -1107,7 +1107,6 @@ files = [ {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:9e2addd2d1866fe112bc6f80117bcc6bc25191c5ed1bfbcf9f1386a884252ae8"}, {file = "lxml-5.2.1-cp37-cp37m-win32.whl", hash = "sha256:f51969bac61441fd31f028d7b3b45962f3ecebf691a510495e5d2cd8c8092dbd"}, {file = "lxml-5.2.1-cp37-cp37m-win_amd64.whl", hash = "sha256:b0b58fbfa1bf7367dde8a557994e3b1637294be6cf2169810375caf8571a085c"}, - {file = "lxml-5.2.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:3e183c6e3298a2ed5af9d7a356ea823bccaab4ec2349dc9ed83999fd289d14d5"}, {file = "lxml-5.2.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:804f74efe22b6a227306dd890eecc4f8c59ff25ca35f1f14e7482bbce96ef10b"}, {file = "lxml-5.2.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:08802f0c56ed150cc6885ae0788a321b73505d2263ee56dad84d200cab11c07a"}, {file = "lxml-5.2.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f8c09ed18ecb4ebf23e02b8e7a22a05d6411911e6fabef3a36e4f371f4f2585"}, @@ -2062,19 +2061,18 @@ asn1crypto = ">=1.5.1" [[package]] name = "setuptools" -version = "69.2.0" +version = "70.0.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, - {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, + {file = "setuptools-70.0.0-py3-none-any.whl", hash = "sha256:54faa7f2e8d2d11bcd2c07bed282eef1046b5c080d1c32add737d7b5817b1ad4"}, + {file = "setuptools-70.0.0.tar.gz", hash = "sha256:f211a66637b8fa059bb28183da127d4e86396c991a942b028c6650d4319c3fd0"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -2371,4 +2369,4 @@ snowflake = ["pyarrow", "snowflake-connector-python"] [metadata] lock-version = "2.0" python-versions = ">=3.8.1, <3.12" -content-hash = "aaf5f47b21bf1d25d91610bca4f5ea49d43f153e07446f62ae7e8a3d240275a8" +content-hash = "e5bbcc744cbb3d63108c1e2d0e1c17a40ba05c93fc3f0e9f2e920cbc439b6944" From 7df717971207e0723020d000fb52aa54f271aff4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 2 Aug 2024 17:53:33 -0600 Subject: [PATCH 13/53] Bump certifi from 2024.2.2 to 2024.7.4 (#206) Bumps [certifi](https://github.com/certifi/python-certifi) from 2024.2.2 to 2024.7.4. - [Commits](https://github.com/certifi/python-certifi/compare/2024.02.02...2024.07.04) --- updated-dependencies: - dependency-name: certifi dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/poetry.lock b/poetry.lock index b4100db..dbb9088 100644 --- a/poetry.lock +++ b/poetry.lock @@ -161,13 +161,13 @@ files = [ [[package]] name = "certifi" -version = "2024.2.2" +version = "2024.7.4" description = "Python package for providing Mozilla's CA Bundle." -optional = true +optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, - {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, + {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, + {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, ] [[package]] From dddd72140d5983c96c81b8a828954a2343d28cf8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 2 Aug 2024 17:53:47 -0600 Subject: [PATCH 14/53] Bump urllib3 from 1.26.18 to 1.26.19 (#199) Bumps [urllib3](https://github.com/urllib3/urllib3) from 1.26.18 to 1.26.19. - [Release notes](https://github.com/urllib3/urllib3/releases) - [Changelog](https://github.com/urllib3/urllib3/blob/1.26.19/CHANGES.rst) - [Commits](https://github.com/urllib3/urllib3/compare/1.26.18...1.26.19) --- updated-dependencies: - dependency-name: urllib3 dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index dbb9088..6720707 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2325,13 +2325,13 @@ files = [ [[package]] name = "urllib3" -version = "1.26.18" +version = "1.26.19" description = "HTTP library with thread-safe connection pooling, file post, and more." -optional = true -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ - {file = "urllib3-1.26.18-py2.py3-none-any.whl", hash = "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07"}, - {file = "urllib3-1.26.18.tar.gz", hash = "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0"}, + {file = "urllib3-1.26.19-py2.py3-none-any.whl", hash = "sha256:37a0344459b199fce0e80b0d3569837ec6b6937435c5244e7fd73fa6006830f3"}, + {file = "urllib3-1.26.19.tar.gz", hash = "sha256:3e3d753a8618b86d7de333b4223005f68720bcd6a7d2bcb9fbd2229ec7c1e429"}, ] [package.extras] From aa38087f5340a37f3891e446dc1750fff6f556be Mon Sep 17 00:00:00 2001 From: Cole French <16979554+ColeFrench@users.noreply.github.com> Date: Mon, 5 Aug 2024 14:34:08 -0400 Subject: [PATCH 15/53] Add date formats (#218) Fix overridden valid formats --- metrics_layer/core/model/field.py | 20 -------------------- tests/test_project_validation.py | 4 ++-- 2 files changed, 2 insertions(+), 22 deletions(-) diff --git a/metrics_layer/core/model/field.py b/metrics_layer/core/model/field.py index 01d8c8d..586da24 100644 --- a/metrics_layer/core/model/field.py +++ b/metrics_layer/core/model/field.py @@ -83,26 +83,6 @@ "quarter", "year", ] -VALID_VALUE_FORMAT_NAMES = [ - "decimal_0", - "decimal_1", - "decimal_2", - "decimal_pct_0", - "decimal_pct_1", - "decimal_pct_2", - "percent_0", - "percent_1", - "percent_2", - "eur", - "eur_0", - "eur_1", - "eur_2", - "usd", - "usd_0", - "usd_1", - "usd_2", - "string", -] if TYPE_CHECKING: from metrics_layer.core.model.view import View diff --git a/tests/test_project_validation.py b/tests/test_project_validation.py index 9247180..a975461 100644 --- a/tests/test_project_validation.py +++ b/tests/test_project_validation.py @@ -1322,7 +1322,7 @@ def test_validation_with_replaced_view_properties(connection, name, value, error "Valid value_format_names are: ['decimal_0', 'decimal_1', 'decimal_2', " "'decimal_pct_0', 'decimal_pct_1', 'decimal_pct_2', 'percent_0', 'percent_1', " "'percent_2', 'eur', 'eur_0', 'eur_1', 'eur_2', 'usd', 'usd_0', 'usd_1', " - "'usd_2', 'string']" + "'usd_2', 'string', 'date', 'week', 'month', 'quarter', 'year']" ], ), ( @@ -1334,7 +1334,7 @@ def test_validation_with_replaced_view_properties(connection, name, value, error "Valid value_format_names are: ['decimal_0', 'decimal_1', 'decimal_2', " "'decimal_pct_0', 'decimal_pct_1', 'decimal_pct_2', 'percent_0', 'percent_1', " "'percent_2', 'eur', 'eur_0', 'eur_1', 'eur_2', 'usd', 'usd_0', 'usd_1', " - "'usd_2', 'string']" + "'usd_2', 'string', 'date', 'week', 'month', 'quarter', 'year']" ], ), ( From 75a6e889d551e08c198b526c9e7eddd16e256f6a Mon Sep 17 00:00:00 2001 From: Paul Blankley <31221512+pblankley@users.noreply.github.com> Date: Mon, 5 Aug 2024 14:12:14 -0600 Subject: [PATCH 16/53] make week of month and week index correspond to the set week start day (#220) * make week of month and week index correspond to the set week start day * bump version --- metrics_layer/core/model/field.py | 56 ++++++++++++++++++++----------- pyproject.toml | 2 +- tests/test_field_mappings.py | 2 +- tests/test_simple_query.py | 47 +++++++++++++++++--------- 4 files changed, 70 insertions(+), 37 deletions(-) diff --git a/metrics_layer/core/model/field.py b/metrics_layer/core/model/field.py index 586da24..0c3c282 100644 --- a/metrics_layer/core/model/field.py +++ b/metrics_layer/core/model/field.py @@ -1018,9 +1018,12 @@ def apply_dimension_group_time_sql(self, sql: str, query_type: str): f"DATE_TRUNC('QUARTER', {self._fiscal_offset_to_timestamp(s, qt)})" ), "fiscal_year": lambda s, qt: f"DATE_TRUNC('YEAR', {self._fiscal_offset_to_timestamp(s, qt)})", - "week_index": lambda s, qt: f"EXTRACT(WEEK FROM {s})", - "week_of_month": lambda s, qt: ( # noqa - f"EXTRACT(WEEK FROM {s}) - EXTRACT(WEEK FROM DATE_TRUNC('MONTH', {s})) + 1" + "week_index": lambda s, qt: ( + f"EXTRACT(WEEK FROM {self._week_dimension_group_time_sql(s, qt)})" + ), + "week_of_month": lambda s, qt: ( + f"EXTRACT(WEEK FROM {self._week_dimension_group_time_sql(s,qt)}) - EXTRACT(WEEK FROM" + f" DATE_TRUNC('MONTH', {self._week_dimension_group_time_sql(s,qt)})) + 1" ), "month_of_year_index": lambda s, qt: f"EXTRACT(MONTH FROM {s})", "fiscal_month_of_year_index": lambda s, qt: ( @@ -1056,10 +1059,13 @@ def apply_dimension_group_time_sql(self, sql: str, query_type: str): "fiscal_year": lambda s, qt: ( f"DATE_TRUNC('YEAR', CAST({self._fiscal_offset_to_timestamp(s, qt)} AS TIMESTAMP))" ), - "week_index": lambda s, qt: f"EXTRACT(WEEK FROM CAST({s} AS TIMESTAMP))", - "week_of_month": lambda s, qt: ( # noqa - f"EXTRACT(WEEK FROM CAST({s} AS TIMESTAMP)) - EXTRACT(WEEK FROM DATE_TRUNC('MONTH'," - f" CAST({s} AS TIMESTAMP))) + 1" + "week_index": lambda s, qt: ( + f"EXTRACT(WEEK FROM CAST({self._week_dimension_group_time_sql(s,qt)} AS TIMESTAMP))" + ), + "week_of_month": lambda s, qt: ( + f"EXTRACT(WEEK FROM CAST({self._week_dimension_group_time_sql(s,qt)} AS TIMESTAMP)) -" + " EXTRACT(WEEK FROM DATE_TRUNC('MONTH'," + f" CAST({self._week_dimension_group_time_sql(s,qt)} AS TIMESTAMP))) + 1" ), "month_of_year_index": lambda s, qt: f"EXTRACT(MONTH FROM CAST({s} AS TIMESTAMP))", "fiscal_month_of_year_index": lambda s, qt: ( @@ -1095,10 +1101,13 @@ def apply_dimension_group_time_sql(self, sql: str, query_type: str): "fiscal_year": lambda s, qt: ( f"DATE_TRUNC('YEAR', CAST({self._fiscal_offset_to_timestamp(s, qt)} AS TIMESTAMP))" ), - "week_index": lambda s, qt: f"EXTRACT(WEEK FROM CAST({s} AS TIMESTAMP))", + "week_index": lambda s, qt: ( + f"EXTRACT(WEEK FROM CAST({self._week_dimension_group_time_sql(s,qt)} AS TIMESTAMP))" + ), "week_of_month": lambda s, qt: ( # noqa - f"EXTRACT(WEEK FROM CAST({s} AS TIMESTAMP)) - EXTRACT(WEEK FROM DATE_TRUNC('MONTH'," - f" CAST({s} AS TIMESTAMP))) + 1" + f"EXTRACT(WEEK FROM CAST({self._week_dimension_group_time_sql(s,qt)} AS TIMESTAMP)) -" + " EXTRACT(WEEK FROM DATE_TRUNC('MONTH'," + f" CAST({self._week_dimension_group_time_sql(s,qt)} AS TIMESTAMP))) + 1" ), "month_of_year_index": lambda s, qt: f"EXTRACT(MONTH FROM CAST({s} AS TIMESTAMP))", "fiscal_month_of_year_index": lambda s, qt: ( @@ -1134,10 +1143,13 @@ def apply_dimension_group_time_sql(self, sql: str, query_type: str): "fiscal_year": lambda s, qt: ( f"DATE_TRUNC('YEAR', CAST({self._fiscal_offset_to_timestamp(s, qt)} AS TIMESTAMP))" ), - "week_index": lambda s, qt: f"EXTRACT(WEEK FROM CAST({s} AS TIMESTAMP))", - "week_of_month": lambda s, qt: ( # noqa - f"EXTRACT(WEEK FROM CAST({s} AS TIMESTAMP)) - EXTRACT(WEEK FROM DATE_TRUNC('MONTH'," - f" CAST({s} AS TIMESTAMP))) + 1" + "week_index": lambda s, qt: ( + f"EXTRACT(WEEK FROM CAST({self._week_dimension_group_time_sql(s,qt)} AS TIMESTAMP))" + ), + "week_of_month": lambda s, qt: ( + f"EXTRACT(WEEK FROM CAST({self._week_dimension_group_time_sql(s,qt)} AS TIMESTAMP)) -" + " EXTRACT(WEEK FROM DATE_TRUNC('MONTH'," + f" CAST({self._week_dimension_group_time_sql(s,qt)} AS TIMESTAMP))) + 1" ), "month_of_year_index": lambda s, qt: f"EXTRACT(MONTH FROM CAST({s} AS TIMESTAMP))", "fiscal_month_of_year_index": lambda s, qt: ( @@ -1185,10 +1197,13 @@ def apply_dimension_group_time_sql(self, sql: str, query_type: str): f"DATEADD(YEAR, DATEDIFF(YEAR, 0, CAST({self._fiscal_offset_to_timestamp(s, qt)} AS" " DATE)), 0)" ), - "week_index": lambda s, qt: f"EXTRACT(WEEK FROM CAST({s} AS DATE))", - "week_of_month": lambda s, qt: ( # noqa - f"EXTRACT(WEEK FROM CAST({s} AS DATE)) - EXTRACT(WEEK FROM DATEADD(MONTH, DATEDIFF(MONTH," - f" 0, CAST({s} AS DATE)), 0)) + 1" + "week_index": lambda s, qt: ( + f"EXTRACT(WEEK FROM CAST({self._week_dimension_group_time_sql(s,qt)} AS DATE))" + ), + "week_of_month": lambda s, qt: ( + f"EXTRACT(WEEK FROM CAST({self._week_dimension_group_time_sql(s,qt)} AS DATE)) -" + " EXTRACT(WEEK FROM DATEADD(MONTH, DATEDIFF(MONTH, 0," + f" CAST({self._week_dimension_group_time_sql(s,qt)} AS DATE)), 0)) + 1" ), "month_of_year_index": lambda s, qt: f"EXTRACT(MONTH FROM CAST({s} AS DATE))", "fiscal_month_of_year_index": lambda s, qt: ( @@ -1239,9 +1254,10 @@ def apply_dimension_group_time_sql(self, sql: str, query_type: str): f"CAST(DATE_TRUNC(CAST({self._fiscal_offset_to_timestamp(s, qt)} AS DATE), YEAR) AS" f" {self.datatype.upper()})" ), - "week_index": lambda s, qt: f"EXTRACT(WEEK FROM {s})", + "week_index": lambda s, qt: f"EXTRACT(WEEK FROM {self._week_dimension_group_time_sql(s,qt)})", "week_of_month": lambda s, qt: ( - f"EXTRACT(WEEK FROM {s}) - EXTRACT(WEEK FROM DATE_TRUNC(CAST({s} AS DATE), MONTH)) + 1" + f"EXTRACT(WEEK FROM {self._week_dimension_group_time_sql(s,qt)}) - EXTRACT(WEEK FROM" + f" DATE_TRUNC(CAST({self._week_dimension_group_time_sql(s,qt)} AS DATE), MONTH)) + 1" ), "month_of_year_index": lambda s, qt: f"EXTRACT(MONTH FROM {s})", "fiscal_month_of_year_index": lambda s, qt: ( diff --git a/pyproject.toml b/pyproject.toml index 983bd5b..f45cd41 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "metrics_layer" -version = "0.12.29" +version = "0.12.30" description = "The open source metrics layer." authors = ["Paul Blankley "] keywords = ["Metrics Layer", "Business Intelligence", "Analytics"] diff --git a/tests/test_field_mappings.py b/tests/test_field_mappings.py index 4f0b2f5..29db0cb 100644 --- a/tests/test_field_mappings.py +++ b/tests/test_field_mappings.py @@ -54,7 +54,7 @@ def test_mapping_metric_mapped_date_and_filter(connection, time_grain): elif time_grain == "week": date_part = "DATE_TRUNC('WEEK', CAST(orders.order_date AS DATE))" elif time_grain == "week_of_year": - date_part = "EXTRACT(WEEK FROM orders.order_date)" + date_part = "EXTRACT(WEEK FROM DATE_TRUNC('WEEK', CAST(orders.order_date AS DATE)))" elif time_grain == "month": date_part = "DATE_TRUNC('MONTH', orders.order_date)" elif time_grain == "month_of_year": diff --git a/tests/test_simple_query.py b/tests/test_simple_query.py index 70e7e80..b000775 100644 --- a/tests/test_simple_query.py +++ b/tests/test_simple_query.py @@ -831,11 +831,12 @@ def test_simple_query_dimension_group(connections, group: str, query_type: str): "fiscal_month_of_year_index": f"EXTRACT(MONTH FROM DATEADD(MONTH, 1, simple.order_date))", "fiscal_month_index": f"EXTRACT(MONTH FROM DATEADD(MONTH, 1, simple.order_date))", "fiscal_quarter_of_year": "EXTRACT(QUARTER FROM DATEADD(MONTH, 1, simple.order_date))", - "week_index": f"EXTRACT(WEEK FROM simple.order_date)", - "week_of_year": f"EXTRACT(WEEK FROM simple.order_date)", - "week_of_month": ( # noqa - f"EXTRACT(WEEK FROM simple.order_date) - EXTRACT(WEEK FROM DATE_TRUNC('MONTH'," - f" simple.order_date)) + 1" + "week_index": f"EXTRACT(WEEK FROM DATE_TRUNC('WEEK', CAST(simple.order_date AS DATE) + 1) - 1)", + "week_of_year": f"EXTRACT(WEEK FROM DATE_TRUNC('WEEK', CAST(simple.order_date AS DATE) + 1) - 1)", + "week_of_month": ( + f"EXTRACT(WEEK FROM DATE_TRUNC('WEEK', CAST(simple.order_date AS DATE) + 1) - 1) -" + f" EXTRACT(WEEK FROM DATE_TRUNC('MONTH', DATE_TRUNC('WEEK', CAST(simple.order_date AS DATE) +" + f" 1) - 1)) + 1" ), "month_of_year_index": f"EXTRACT(MONTH FROM simple.order_date)", "month_index": f"EXTRACT(MONTH FROM simple.order_date)", @@ -881,10 +882,15 @@ def test_simple_query_dimension_group(connections, group: str, query_type: str): ), "fiscal_month_index": "EXTRACT(MONTH FROM CAST(DATEADD(MONTH, 1, simple.order_date) AS DATE))", "fiscal_quarter_of_year": "DATEPART(QUARTER, CAST(DATEADD(MONTH, 1, simple.order_date) AS DATE))", - "week_index": f"EXTRACT(WEEK FROM CAST(simple.order_date AS DATE))", - "week_of_month": ( # noqa - f"EXTRACT(WEEK FROM CAST(simple.order_date AS DATE)) - EXTRACT(WEEK FROM DATEADD(MONTH," - f" DATEDIFF(MONTH, 0, CAST(simple.order_date AS DATE)), 0)) + 1" + "week_index": ( + f"EXTRACT(WEEK FROM CAST(DATEADD(DAY, -1, DATEADD(WEEK, DATEDIFF(WEEK, 0, DATEADD(DAY, 1," + f" CAST(simple.order_date AS DATE))), 0)) AS DATE))" + ), + "week_of_month": ( + f"EXTRACT(WEEK FROM CAST(DATEADD(DAY, -1, DATEADD(WEEK, DATEDIFF(WEEK, 0, DATEADD(DAY, 1," + f" CAST(simple.order_date AS DATE))), 0)) AS DATE)) - EXTRACT(WEEK FROM DATEADD(MONTH," + f" DATEDIFF(MONTH, 0, CAST(DATEADD(DAY, -1, DATEADD(WEEK, DATEDIFF(WEEK, 0, DATEADD(DAY, 1," + f" CAST(simple.order_date AS DATE))), 0)) AS DATE)), 0)) + 1" ), "month_of_year_index": f"EXTRACT(MONTH FROM CAST(simple.order_date AS DATE))", "month_of_year": "LEFT(DATENAME(MONTH, CAST(simple.order_date AS DATE)), 3)", @@ -924,10 +930,15 @@ def test_simple_query_dimension_group(connections, group: str, query_type: str): "fiscal_quarter_of_year": ( # noqa "EXTRACT(QUARTER FROM CAST(simple.order_date + INTERVAL '1' MONTH AS TIMESTAMP))" ), - "week_index": f"EXTRACT(WEEK FROM CAST(simple.order_date AS TIMESTAMP))", + "week_index": ( + f"EXTRACT(WEEK FROM CAST(DATE_TRUNC('WEEK', CAST(simple.order_date AS TIMESTAMP) + INTERVAL" + f" '1' DAY) - INTERVAL '1' DAY AS TIMESTAMP))" + ), "week_of_month": ( # noqa - f"EXTRACT(WEEK FROM CAST(simple.order_date AS TIMESTAMP)) - EXTRACT(WEEK FROM" - f" DATE_TRUNC('MONTH', CAST(simple.order_date AS TIMESTAMP))) + 1" + f"EXTRACT(WEEK FROM CAST(DATE_TRUNC('WEEK', CAST(simple.order_date AS TIMESTAMP) + INTERVAL" + f" '1' DAY) - INTERVAL '1' DAY AS TIMESTAMP)) - EXTRACT(WEEK FROM DATE_TRUNC('MONTH'," + f" CAST(DATE_TRUNC('WEEK', CAST(simple.order_date AS TIMESTAMP) + INTERVAL '1' DAY) -" + f" INTERVAL '1' DAY AS TIMESTAMP))) + 1" ), "month_of_year_index": f"EXTRACT(MONTH FROM CAST(simple.order_date AS TIMESTAMP))", "month_of_year": "TO_CHAR(CAST(simple.order_date AS TIMESTAMP), 'Mon')", @@ -1008,10 +1019,15 @@ def test_simple_query_dimension_group(connections, group: str, query_type: str): ), "fiscal_month_index": f"EXTRACT(MONTH FROM DATE_ADD(simple.order_date, INTERVAL 1 MONTH))", "fiscal_quarter_of_year": "EXTRACT(QUARTER FROM DATE_ADD(simple.order_date, INTERVAL 1 MONTH))", - "week_index": f"EXTRACT(WEEK FROM simple.order_date)", + "week_index": ( + f"EXTRACT(WEEK FROM CAST(DATE_TRUNC(CAST(simple.order_date AS DATE) + 1, WEEK) - 1 AS" + f" TIMESTAMP))" + ), "week_of_month": ( # noqa - f"EXTRACT(WEEK FROM simple.order_date) - EXTRACT(WEEK FROM DATE_TRUNC(CAST(simple.order_date" - f" AS DATE), MONTH)) + 1" + f"EXTRACT(WEEK FROM CAST(DATE_TRUNC(CAST(simple.order_date AS DATE) + 1, WEEK) - 1 AS" + f" TIMESTAMP)) - EXTRACT(WEEK FROM" + f" DATE_TRUNC(CAST(CAST(DATE_TRUNC(CAST(simple.order_date AS DATE) + 1, WEEK) - 1 AS" + f" TIMESTAMP) AS DATE), MONTH)) + 1" ), "month_of_year_index": f"EXTRACT(MONTH FROM simple.order_date)", "month_of_year": "FORMAT_DATETIME('%B', CAST(simple.order_date as DATETIME))", @@ -1033,6 +1049,7 @@ def test_simple_query_dimension_group(connections, group: str, query_type: str): f"GROUP BY {date_result if query_type != Definitions.bigquery else f'simple_order_{group}'}" f"{order_by}{semi}" ) + print(query) assert query == correct correct_label = f"Order Created {group.replace('_', ' ').title()}" From 4cb8db13985a746403cfc952c67fb72a98f5e977 Mon Sep 17 00:00:00 2001 From: Paul Blankley Date: Mon, 5 Aug 2024 14:38:42 -0600 Subject: [PATCH 17/53] Release v0.12.30 From 6c14fe3e0d04eac08440485eca3ab925e5344c51 Mon Sep 17 00:00:00 2001 From: Paul Blankley <31221512+pblankley@users.noreply.github.com> Date: Mon, 5 Aug 2024 15:25:20 -0600 Subject: [PATCH 18/53] add matrix for tests for 3.12 (#132) * add matrix for tests for 3.12 * update to higher python version of adaptors * upgrade pendulum to support python 3.12 * bump a few versions to see if poetry will lock * add new lockfile * fix new handling introduced by pendulum day_of_week index changing --- .github/workflows/tests.yaml | 2 +- poetry.lock | 1302 ++++++++++++++++++---------------- pyproject.toml | 14 +- tests/test_dashboards.py | 12 +- 4 files changed, 706 insertions(+), 624 deletions(-) diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index 5a44d48..d6c5ec1 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -7,7 +7,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ['3.8', '3.9', '3.10', '3.11'] + python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] steps: - uses: actions/checkout@v4 diff --git a/poetry.lock b/poetry.lock index 6720707..02a4d8a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. [[package]] name = "asn1crypto" @@ -23,22 +23,50 @@ files = [ [[package]] name = "attrs" -version = "23.2.0" +version = "24.1.0" description = "Classes Without Boilerplate" optional = false python-versions = ">=3.7" files = [ - {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, - {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, + {file = "attrs-24.1.0-py3-none-any.whl", hash = "sha256:377b47448cb61fea38533f671fba0d0f8a96fd58facd4dc518e3dac9dbea0905"}, + {file = "attrs-24.1.0.tar.gz", hash = "sha256:adbdec84af72d38be7628e353a09b6a6790d15cd71819f6e9d7b0faa8a125745"}, +] + +[package.extras] +benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] + +[[package]] +name = "backports-zoneinfo" +version = "0.2.1" +description = "Backport of the standard library zoneinfo module" +optional = false +python-versions = ">=3.6" +files = [ + {file = "backports.zoneinfo-0.2.1-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:da6013fd84a690242c310d77ddb8441a559e9cb3d3d59ebac9aca1a57b2e18bc"}, + {file = "backports.zoneinfo-0.2.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:89a48c0d158a3cc3f654da4c2de1ceba85263fafb861b98b59040a5086259722"}, + {file = "backports.zoneinfo-0.2.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:1c5742112073a563c81f786e77514969acb58649bcdf6cdf0b4ed31a348d4546"}, + {file = "backports.zoneinfo-0.2.1-cp36-cp36m-win32.whl", hash = "sha256:e8236383a20872c0cdf5a62b554b27538db7fa1bbec52429d8d106effbaeca08"}, + {file = "backports.zoneinfo-0.2.1-cp36-cp36m-win_amd64.whl", hash = "sha256:8439c030a11780786a2002261569bdf362264f605dfa4d65090b64b05c9f79a7"}, + {file = "backports.zoneinfo-0.2.1-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:f04e857b59d9d1ccc39ce2da1021d196e47234873820cbeaad210724b1ee28ac"}, + {file = "backports.zoneinfo-0.2.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:17746bd546106fa389c51dbea67c8b7c8f0d14b5526a579ca6ccf5ed72c526cf"}, + {file = "backports.zoneinfo-0.2.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5c144945a7752ca544b4b78c8c41544cdfaf9786f25fe5ffb10e838e19a27570"}, + {file = "backports.zoneinfo-0.2.1-cp37-cp37m-win32.whl", hash = "sha256:e55b384612d93be96506932a786bbcde5a2db7a9e6a4bb4bffe8b733f5b9036b"}, + {file = "backports.zoneinfo-0.2.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a76b38c52400b762e48131494ba26be363491ac4f9a04c1b7e92483d169f6582"}, + {file = "backports.zoneinfo-0.2.1-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:8961c0f32cd0336fb8e8ead11a1f8cd99ec07145ec2931122faaac1c8f7fd987"}, + {file = "backports.zoneinfo-0.2.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:e81b76cace8eda1fca50e345242ba977f9be6ae3945af8d46326d776b4cf78d1"}, + {file = "backports.zoneinfo-0.2.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7b0a64cda4145548fed9efc10322770f929b944ce5cee6c0dfe0c87bf4c0c8c9"}, + {file = "backports.zoneinfo-0.2.1-cp38-cp38-win32.whl", hash = "sha256:1b13e654a55cd45672cb54ed12148cd33628f672548f373963b0bff67b217328"}, + {file = "backports.zoneinfo-0.2.1-cp38-cp38-win_amd64.whl", hash = "sha256:4a0f800587060bf8880f954dbef70de6c11bbe59c673c3d818921f042f9954a6"}, + {file = "backports.zoneinfo-0.2.1.tar.gz", hash = "sha256:fadbfe37f74051d024037f223b8e001611eac868b5c5b06144ef4d8b799862f2"}, ] [package.extras] -cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] -dev = ["attrs[tests]", "pre-commit"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] -tests = ["attrs[tests-no-zope]", "zope-interface"] -tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] -tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] +tzdata = ["tzdata"] [[package]] name = "beautifulsoup4" @@ -63,33 +91,33 @@ lxml = ["lxml"] [[package]] name = "black" -version = "24.3.0" +version = "24.8.0" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ - {file = "black-24.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7d5e026f8da0322b5662fa7a8e752b3fa2dac1c1cbc213c3d7ff9bdd0ab12395"}, - {file = "black-24.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9f50ea1132e2189d8dff0115ab75b65590a3e97de1e143795adb4ce317934995"}, - {file = "black-24.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2af80566f43c85f5797365077fb64a393861a3730bd110971ab7a0c94e873e7"}, - {file = "black-24.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:4be5bb28e090456adfc1255e03967fb67ca846a03be7aadf6249096100ee32d0"}, - {file = "black-24.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4f1373a7808a8f135b774039f61d59e4be7eb56b2513d3d2f02a8b9365b8a8a9"}, - {file = "black-24.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:aadf7a02d947936ee418777e0247ea114f78aff0d0959461057cae8a04f20597"}, - {file = "black-24.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c02e4ea2ae09d16314d30912a58ada9a5c4fdfedf9512d23326128ac08ac3d"}, - {file = "black-24.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:bf21b7b230718a5f08bd32d5e4f1db7fc8788345c8aea1d155fc17852b3410f5"}, - {file = "black-24.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:2818cf72dfd5d289e48f37ccfa08b460bf469e67fb7c4abb07edc2e9f16fb63f"}, - {file = "black-24.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4acf672def7eb1725f41f38bf6bf425c8237248bb0804faa3965c036f7672d11"}, - {file = "black-24.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7ed6668cbbfcd231fa0dc1b137d3e40c04c7f786e626b405c62bcd5db5857e4"}, - {file = "black-24.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:56f52cfbd3dabe2798d76dbdd299faa046a901041faf2cf33288bc4e6dae57b5"}, - {file = "black-24.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:79dcf34b33e38ed1b17434693763301d7ccbd1c5860674a8f871bd15139e7837"}, - {file = "black-24.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e19cb1c6365fd6dc38a6eae2dcb691d7d83935c10215aef8e6c38edee3f77abd"}, - {file = "black-24.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65b76c275e4c1c5ce6e9870911384bff5ca31ab63d19c76811cb1fb162678213"}, - {file = "black-24.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:b5991d523eee14756f3c8d5df5231550ae8993e2286b8014e2fdea7156ed0959"}, - {file = "black-24.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c45f8dff244b3c431b36e3224b6be4a127c6aca780853574c00faf99258041eb"}, - {file = "black-24.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6905238a754ceb7788a73f02b45637d820b2f5478b20fec82ea865e4f5d4d9f7"}, - {file = "black-24.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7de8d330763c66663661a1ffd432274a2f92f07feeddd89ffd085b5744f85e7"}, - {file = "black-24.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:7bb041dca0d784697af4646d3b62ba4a6b028276ae878e53f6b4f74ddd6db99f"}, - {file = "black-24.3.0-py3-none-any.whl", hash = "sha256:41622020d7120e01d377f74249e677039d20e6344ff5851de8a10f11f513bf93"}, - {file = "black-24.3.0.tar.gz", hash = "sha256:a0c9c4a0771afc6919578cec71ce82a3e31e054904e7197deacbc9382671c41f"}, + {file = "black-24.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:09cdeb74d494ec023ded657f7092ba518e8cf78fa8386155e4a03fdcc44679e6"}, + {file = "black-24.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:81c6742da39f33b08e791da38410f32e27d632260e599df7245cccee2064afeb"}, + {file = "black-24.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:707a1ca89221bc8a1a64fb5e15ef39cd755633daa672a9db7498d1c19de66a42"}, + {file = "black-24.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:d6417535d99c37cee4091a2f24eb2b6d5ec42b144d50f1f2e436d9fe1916fe1a"}, + {file = "black-24.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fb6e2c0b86bbd43dee042e48059c9ad7830abd5c94b0bc518c0eeec57c3eddc1"}, + {file = "black-24.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:837fd281f1908d0076844bc2b801ad2d369c78c45cf800cad7b61686051041af"}, + {file = "black-24.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:62e8730977f0b77998029da7971fa896ceefa2c4c4933fcd593fa599ecbf97a4"}, + {file = "black-24.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:72901b4913cbac8972ad911dc4098d5753704d1f3c56e44ae8dce99eecb0e3af"}, + {file = "black-24.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7c046c1d1eeb7aea9335da62472481d3bbf3fd986e093cffd35f4385c94ae368"}, + {file = "black-24.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:649f6d84ccbae73ab767e206772cc2d7a393a001070a4c814a546afd0d423aed"}, + {file = "black-24.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2b59b250fdba5f9a9cd9d0ece6e6d993d91ce877d121d161e4698af3eb9c1018"}, + {file = "black-24.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:6e55d30d44bed36593c3163b9bc63bf58b3b30e4611e4d88a0c3c239930ed5b2"}, + {file = "black-24.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:505289f17ceda596658ae81b61ebbe2d9b25aa78067035184ed0a9d855d18afd"}, + {file = "black-24.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b19c9ad992c7883ad84c9b22aaa73562a16b819c1d8db7a1a1a49fb7ec13c7d2"}, + {file = "black-24.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1f13f7f386f86f8121d76599114bb8c17b69d962137fc70efe56137727c7047e"}, + {file = "black-24.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:f490dbd59680d809ca31efdae20e634f3fae27fba3ce0ba3208333b713bc3920"}, + {file = "black-24.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:eab4dd44ce80dea27dc69db40dab62d4ca96112f87996bca68cd75639aeb2e4c"}, + {file = "black-24.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3c4285573d4897a7610054af5a890bde7c65cb466040c5f0c8b732812d7f0e5e"}, + {file = "black-24.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e84e33b37be070ba135176c123ae52a51f82306def9f7d063ee302ecab2cf47"}, + {file = "black-24.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:73bbf84ed136e45d451a260c6b73ed674652f90a2b3211d6a35e78054563a9bb"}, + {file = "black-24.8.0-py3-none-any.whl", hash = "sha256:972085c618ee94f402da1af548a4f218c754ea7e5dc70acb168bfaca4c2542ed"}, + {file = "black-24.8.0.tar.gz", hash = "sha256:2500945420b6784c38b9ee885af039f5e7471ef284ab03fa35ecdde4688cd83f"}, ] [package.dependencies] @@ -109,17 +137,17 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "boto3" -version = "1.34.81" +version = "1.34.154" description = "The AWS SDK for Python" optional = true python-versions = ">=3.8" files = [ - {file = "boto3-1.34.81-py3-none-any.whl", hash = "sha256:18224d206a8a775bcaa562d22ed3d07854934699190e12b52fcde87aac76a80e"}, - {file = "boto3-1.34.81.tar.gz", hash = "sha256:004dad209d37b3d2df88f41da13b7ad702a751904a335fac095897ff7a19f82b"}, + {file = "boto3-1.34.154-py3-none-any.whl", hash = "sha256:7ca22adef4c77ee128e1e1dc7d48bc9512a87cc6fe3d771b3f913d5ecd41c057"}, + {file = "boto3-1.34.154.tar.gz", hash = "sha256:864f06528c583dc7b02adf12db395ecfadbf9cb0da90e907e848ffb27128ce19"}, ] [package.dependencies] -botocore = ">=1.34.81,<1.35.0" +botocore = ">=1.34.154,<1.35.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.10.0,<0.11.0" @@ -128,13 +156,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.34.81" +version = "1.34.154" description = "Low-level, data-driven core of boto 3." optional = true python-versions = ">=3.8" files = [ - {file = "botocore-1.34.81-py3-none-any.whl", hash = "sha256:85f6fd7c5715eeef7a236c50947de00f57d72e7439daed1125491014b70fab01"}, - {file = "botocore-1.34.81.tar.gz", hash = "sha256:f79bf122566cc1f09d71cc9ac9fcf52d47ba48b761cbc3f064017b36a3c40eb8"}, + {file = "botocore-1.34.154-py3-none-any.whl", hash = "sha256:4eef4b1bb809b382ba9dc9c88f5fcc4a133f221a1acb693ee6bee4de9f325979"}, + {file = "botocore-1.34.154.tar.gz", hash = "sha256:64d9b4c85a504d77cb56dabb2ad717cd8e1717424a88edb458b01d1e5797262a"}, ] [package.dependencies] @@ -146,24 +174,24 @@ urllib3 = [ ] [package.extras] -crt = ["awscrt (==0.19.19)"] +crt = ["awscrt (==0.20.11)"] [[package]] name = "cachetools" -version = "5.3.3" +version = "5.4.0" description = "Extensible memoizing collections and decorators" optional = true python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, - {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, + {file = "cachetools-5.4.0-py3-none-any.whl", hash = "sha256:3ae3b49a3d5e28a77a0be2b37dbcb89005058959cb2323858c2657c4a8cab474"}, + {file = "cachetools-5.4.0.tar.gz", hash = "sha256:b8adc2e7c07f105ced7bc56dbb6dfbe7c4a00acce20e2227b3f355be89bc6827"}, ] [[package]] name = "certifi" version = "2024.7.4" description = "Python package for providing Mozilla's CA Bundle." -optional = false +optional = true python-versions = ">=3.6" files = [ {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, @@ -371,63 +399,83 @@ files = [ [[package]] name = "coverage" -version = "7.4.4" +version = "7.6.1" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.4.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0be5efd5127542ef31f165de269f77560d6cdef525fffa446de6f7e9186cfb2"}, - {file = "coverage-7.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ccd341521be3d1b3daeb41960ae94a5e87abe2f46f17224ba5d6f2b8398016cf"}, - {file = "coverage-7.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fa497a8ab37784fbb20ab699c246053ac294d13fc7eb40ec007a5043ec91f8"}, - {file = "coverage-7.4.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b1a93009cb80730c9bca5d6d4665494b725b6e8e157c1cb7f2db5b4b122ea562"}, - {file = "coverage-7.4.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:690db6517f09336559dc0b5f55342df62370a48f5469fabf502db2c6d1cffcd2"}, - {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:09c3255458533cb76ef55da8cc49ffab9e33f083739c8bd4f58e79fecfe288f7"}, - {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8ce1415194b4a6bd0cdcc3a1dfbf58b63f910dcb7330fe15bdff542c56949f87"}, - {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b91cbc4b195444e7e258ba27ac33769c41b94967919f10037e6355e998af255c"}, - {file = "coverage-7.4.4-cp310-cp310-win32.whl", hash = "sha256:598825b51b81c808cb6f078dcb972f96af96b078faa47af7dfcdf282835baa8d"}, - {file = "coverage-7.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:09ef9199ed6653989ebbcaacc9b62b514bb63ea2f90256e71fea3ed74bd8ff6f"}, - {file = "coverage-7.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0f9f50e7ef2a71e2fae92774c99170eb8304e3fdf9c8c3c7ae9bab3e7229c5cf"}, - {file = "coverage-7.4.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:623512f8ba53c422fcfb2ce68362c97945095b864cda94a92edbaf5994201083"}, - {file = "coverage-7.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0513b9508b93da4e1716744ef6ebc507aff016ba115ffe8ecff744d1322a7b63"}, - {file = "coverage-7.4.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40209e141059b9370a2657c9b15607815359ab3ef9918f0196b6fccce8d3230f"}, - {file = "coverage-7.4.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a2b2b78c78293782fd3767d53e6474582f62443d0504b1554370bde86cc8227"}, - {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:73bfb9c09951125d06ee473bed216e2c3742f530fc5acc1383883125de76d9cd"}, - {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1f384c3cc76aeedce208643697fb3e8437604b512255de6d18dae3f27655a384"}, - {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:54eb8d1bf7cacfbf2a3186019bcf01d11c666bd495ed18717162f7eb1e9dd00b"}, - {file = "coverage-7.4.4-cp311-cp311-win32.whl", hash = "sha256:cac99918c7bba15302a2d81f0312c08054a3359eaa1929c7e4b26ebe41e9b286"}, - {file = "coverage-7.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:b14706df8b2de49869ae03a5ccbc211f4041750cd4a66f698df89d44f4bd30ec"}, - {file = "coverage-7.4.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:201bef2eea65e0e9c56343115ba3814e896afe6d36ffd37bab783261db430f76"}, - {file = "coverage-7.4.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:41c9c5f3de16b903b610d09650e5e27adbfa7f500302718c9ffd1c12cf9d6818"}, - {file = "coverage-7.4.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d898fe162d26929b5960e4e138651f7427048e72c853607f2b200909794ed978"}, - {file = "coverage-7.4.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ea79bb50e805cd6ac058dfa3b5c8f6c040cb87fe83de10845857f5535d1db70"}, - {file = "coverage-7.4.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce4b94265ca988c3f8e479e741693d143026632672e3ff924f25fab50518dd51"}, - {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:00838a35b882694afda09f85e469c96367daa3f3f2b097d846a7216993d37f4c"}, - {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:fdfafb32984684eb03c2d83e1e51f64f0906b11e64482df3c5db936ce3839d48"}, - {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:69eb372f7e2ece89f14751fbcbe470295d73ed41ecd37ca36ed2eb47512a6ab9"}, - {file = "coverage-7.4.4-cp312-cp312-win32.whl", hash = "sha256:137eb07173141545e07403cca94ab625cc1cc6bc4c1e97b6e3846270e7e1fea0"}, - {file = "coverage-7.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:d71eec7d83298f1af3326ce0ff1d0ea83c7cb98f72b577097f9083b20bdaf05e"}, - {file = "coverage-7.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d5ae728ff3b5401cc320d792866987e7e7e880e6ebd24433b70a33b643bb0384"}, - {file = "coverage-7.4.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cc4f1358cb0c78edef3ed237ef2c86056206bb8d9140e73b6b89fbcfcbdd40e1"}, - {file = "coverage-7.4.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8130a2aa2acb8788e0b56938786c33c7c98562697bf9f4c7d6e8e5e3a0501e4a"}, - {file = "coverage-7.4.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf271892d13e43bc2b51e6908ec9a6a5094a4df1d8af0bfc360088ee6c684409"}, - {file = "coverage-7.4.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4cdc86d54b5da0df6d3d3a2f0b710949286094c3a6700c21e9015932b81447e"}, - {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ae71e7ddb7a413dd60052e90528f2f65270aad4b509563af6d03d53e979feafd"}, - {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:38dd60d7bf242c4ed5b38e094baf6401faa114fc09e9e6632374388a404f98e7"}, - {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa5b1c1bfc28384f1f53b69a023d789f72b2e0ab1b3787aae16992a7ca21056c"}, - {file = "coverage-7.4.4-cp38-cp38-win32.whl", hash = "sha256:dfa8fe35a0bb90382837b238fff375de15f0dcdb9ae68ff85f7a63649c98527e"}, - {file = "coverage-7.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:b2991665420a803495e0b90a79233c1433d6ed77ef282e8e152a324bbbc5e0c8"}, - {file = "coverage-7.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3b799445b9f7ee8bf299cfaed6f5b226c0037b74886a4e11515e569b36fe310d"}, - {file = "coverage-7.4.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b4d33f418f46362995f1e9d4f3a35a1b6322cb959c31d88ae56b0298e1c22357"}, - {file = "coverage-7.4.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aadacf9a2f407a4688d700e4ebab33a7e2e408f2ca04dbf4aef17585389eff3e"}, - {file = "coverage-7.4.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c95949560050d04d46b919301826525597f07b33beba6187d04fa64d47ac82e"}, - {file = "coverage-7.4.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff7687ca3d7028d8a5f0ebae95a6e4827c5616b31a4ee1192bdfde697db110d4"}, - {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5fc1de20b2d4a061b3df27ab9b7c7111e9a710f10dc2b84d33a4ab25065994ec"}, - {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c74880fc64d4958159fbd537a091d2a585448a8f8508bf248d72112723974cbd"}, - {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:742a76a12aa45b44d236815d282b03cfb1de3b4323f3e4ec933acfae08e54ade"}, - {file = "coverage-7.4.4-cp39-cp39-win32.whl", hash = "sha256:d89d7b2974cae412400e88f35d86af72208e1ede1a541954af5d944a8ba46c57"}, - {file = "coverage-7.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:9ca28a302acb19b6af89e90f33ee3e1906961f94b54ea37de6737b7ca9d8827c"}, - {file = "coverage-7.4.4-pp38.pp39.pp310-none-any.whl", hash = "sha256:b2c5edc4ac10a7ef6605a966c58929ec6c1bd0917fb8c15cb3363f65aa40e677"}, - {file = "coverage-7.4.4.tar.gz", hash = "sha256:c901df83d097649e257e803be22592aedfd5182f07b3cc87d640bbb9afd50f49"}, + {file = "coverage-7.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16"}, + {file = "coverage-7.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959"}, + {file = "coverage-7.6.1-cp310-cp310-win32.whl", hash = "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232"}, + {file = "coverage-7.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0"}, + {file = "coverage-7.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93"}, + {file = "coverage-7.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133"}, + {file = "coverage-7.6.1-cp311-cp311-win32.whl", hash = "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c"}, + {file = "coverage-7.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6"}, + {file = "coverage-7.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778"}, + {file = "coverage-7.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d"}, + {file = "coverage-7.6.1-cp312-cp312-win32.whl", hash = "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5"}, + {file = "coverage-7.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb"}, + {file = "coverage-7.6.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a4acd025ecc06185ba2b801f2de85546e0b8ac787cf9d3b06e7e2a69f925b106"}, + {file = "coverage-7.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a6d3adcf24b624a7b778533480e32434a39ad8fa30c315208f6d3e5542aeb6e9"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0c212c49b6c10e6951362f7c6df3329f04c2b1c28499563d4035d964ab8e08c"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e81d7a3e58882450ec4186ca59a3f20a5d4440f25b1cff6f0902ad890e6748a"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78b260de9790fd81e69401c2dc8b17da47c8038176a79092a89cb2b7d945d060"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a78d169acd38300060b28d600344a803628c3fd585c912cacc9ea8790fe96862"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2c09f4ce52cb99dd7505cd0fc8e0e37c77b87f46bc9c1eb03fe3bc9991085388"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6878ef48d4227aace338d88c48738a4258213cd7b74fd9a3d4d7582bb1d8a155"}, + {file = "coverage-7.6.1-cp313-cp313-win32.whl", hash = "sha256:44df346d5215a8c0e360307d46ffaabe0f5d3502c8a1cefd700b34baf31d411a"}, + {file = "coverage-7.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:8284cf8c0dd272a247bc154eb6c95548722dce90d098c17a883ed36e67cdb129"}, + {file = "coverage-7.6.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d3296782ca4eab572a1a4eca686d8bfb00226300dcefdf43faa25b5242ab8a3e"}, + {file = "coverage-7.6.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:502753043567491d3ff6d08629270127e0c31d4184c4c8d98f92c26f65019962"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a89ecca80709d4076b95f89f308544ec8f7b4727e8a547913a35f16717856cb"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a318d68e92e80af8b00fa99609796fdbcdfef3629c77c6283566c6f02c6d6704"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13b0a73a0896988f053e4fbb7de6d93388e6dd292b0d87ee51d106f2c11b465b"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4421712dbfc5562150f7554f13dde997a2e932a6b5f352edcce948a815efee6f"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:166811d20dfea725e2e4baa71fffd6c968a958577848d2131f39b60043400223"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3"}, + {file = "coverage-7.6.1-cp313-cp313t-win32.whl", hash = "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f"}, + {file = "coverage-7.6.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657"}, + {file = "coverage-7.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6db04803b6c7291985a761004e9060b2bca08da6d04f26a7f2294b8623a0c1a0"}, + {file = "coverage-7.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f1adfc8ac319e1a348af294106bc6a8458a0f1633cc62a1446aebc30c5fa186a"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a95324a9de9650a729239daea117df21f4b9868ce32e63f8b650ebe6cef5595b"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b43c03669dc4618ec25270b06ecd3ee4fa94c7f9b3c14bae6571ca00ef98b0d3"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8929543a7192c13d177b770008bc4e8119f2e1f881d563fc6b6305d2d0ebe9de"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a09ece4a69cf399510c8ab25e0950d9cf2b42f7b3cb0374f95d2e2ff594478a6"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9054a0754de38d9dbd01a46621636689124d666bad1936d76c0341f7d71bf569"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0dbde0f4aa9a16fa4d754356a8f2e36296ff4d83994b2c9d8398aa32f222f989"}, + {file = "coverage-7.6.1-cp38-cp38-win32.whl", hash = "sha256:da511e6ad4f7323ee5702e6633085fb76c2f893aaf8ce4c51a0ba4fc07580ea7"}, + {file = "coverage-7.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:3f1156e3e8f2872197af3840d8ad307a9dd18e615dc64d9ee41696f287c57ad8"}, + {file = "coverage-7.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abd5fd0db5f4dc9289408aaf34908072f805ff7792632250dcb36dc591d24255"}, + {file = "coverage-7.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:547f45fa1a93154bd82050a7f3cddbc1a7a4dd2a9bf5cb7d06f4ae29fe94eaf8"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645786266c8f18a931b65bfcefdbf6952dd0dea98feee39bd188607a9d307ed2"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e0b2df163b8ed01d515807af24f63de04bebcecbd6c3bfeff88385789fdf75a"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:609b06f178fe8e9f89ef676532760ec0b4deea15e9969bf754b37f7c40326dbc"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:702855feff378050ae4f741045e19a32d57d19f3e0676d589df0575008ea5004"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2bdb062ea438f22d99cba0d7829c2ef0af1d768d1e4a4f528087224c90b132cb"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9c56863d44bd1c4fe2abb8a4d6f5371d197f1ac0ebdee542f07f35895fc07f36"}, + {file = "coverage-7.6.1-cp39-cp39-win32.whl", hash = "sha256:6e2cd258d7d927d09493c8df1ce9174ad01b381d4729a9d8d4e38670ca24774c"}, + {file = "coverage-7.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:06a737c882bd26d0d6ee7269b20b12f14a8704807a01056c80bb881a4b2ce6ca"}, + {file = "coverage-7.6.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df"}, + {file = "coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d"}, ] [package.extras] @@ -435,43 +483,43 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "42.0.5" +version = "42.0.8" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = true python-versions = ">=3.7" files = [ - {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16"}, - {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec"}, - {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb"}, - {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4"}, - {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278"}, - {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7"}, - {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee"}, - {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1"}, - {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d"}, - {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da"}, - {file = "cryptography-42.0.5-cp37-abi3-win32.whl", hash = "sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74"}, - {file = "cryptography-42.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940"}, - {file = "cryptography-42.0.5-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8"}, - {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1"}, - {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e"}, - {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc"}, - {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a"}, - {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7"}, - {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922"}, - {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc"}, - {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30"}, - {file = "cryptography-42.0.5-cp39-abi3-win32.whl", hash = "sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413"}, - {file = "cryptography-42.0.5-cp39-abi3-win_amd64.whl", hash = "sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400"}, - {file = "cryptography-42.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8"}, - {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2"}, - {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c"}, - {file = "cryptography-42.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576"}, - {file = "cryptography-42.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6"}, - {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e"}, - {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac"}, - {file = "cryptography-42.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd"}, - {file = "cryptography-42.0.5.tar.gz", hash = "sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1"}, + {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:81d8a521705787afe7a18d5bfb47ea9d9cc068206270aad0b96a725022e18d2e"}, + {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:961e61cefdcb06e0c6d7e3a1b22ebe8b996eb2bf50614e89384be54c48c6b63d"}, + {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3ec3672626e1b9e55afd0df6d774ff0e953452886e06e0f1eb7eb0c832e8902"}, + {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e599b53fd95357d92304510fb7bda8523ed1f79ca98dce2f43c115950aa78801"}, + {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5226d5d21ab681f432a9c1cf8b658c0cb02533eece706b155e5fbd8a0cdd3949"}, + {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:6b7c4f03ce01afd3b76cf69a5455caa9cfa3de8c8f493e0d3ab7d20611c8dae9"}, + {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:2346b911eb349ab547076f47f2e035fc8ff2c02380a7cbbf8d87114fa0f1c583"}, + {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ad803773e9df0b92e0a817d22fd8a3675493f690b96130a5e24f1b8fabbea9c7"}, + {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2f66d9cd9147ee495a8374a45ca445819f8929a3efcd2e3df6428e46c3cbb10b"}, + {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:d45b940883a03e19e944456a558b67a41160e367a719833c53de6911cabba2b7"}, + {file = "cryptography-42.0.8-cp37-abi3-win32.whl", hash = "sha256:a0c5b2b0585b6af82d7e385f55a8bc568abff8923af147ee3c07bd8b42cda8b2"}, + {file = "cryptography-42.0.8-cp37-abi3-win_amd64.whl", hash = "sha256:57080dee41209e556a9a4ce60d229244f7a66ef52750f813bfbe18959770cfba"}, + {file = "cryptography-42.0.8-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:dea567d1b0e8bc5764b9443858b673b734100c2871dc93163f58c46a97a83d28"}, + {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4783183f7cb757b73b2ae9aed6599b96338eb957233c58ca8f49a49cc32fd5e"}, + {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0608251135d0e03111152e41f0cc2392d1e74e35703960d4190b2e0f4ca9c70"}, + {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:dc0fdf6787f37b1c6b08e6dfc892d9d068b5bdb671198c72072828b80bd5fe4c"}, + {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:9c0c1716c8447ee7dbf08d6db2e5c41c688544c61074b54fc4564196f55c25a7"}, + {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fff12c88a672ab9c9c1cf7b0c80e3ad9e2ebd9d828d955c126be4fd3e5578c9e"}, + {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:cafb92b2bc622cd1aa6a1dce4b93307792633f4c5fe1f46c6b97cf67073ec961"}, + {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:31f721658a29331f895a5a54e7e82075554ccfb8b163a18719d342f5ffe5ecb1"}, + {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b297f90c5723d04bcc8265fc2a0f86d4ea2e0f7ab4b6994459548d3a6b992a14"}, + {file = "cryptography-42.0.8-cp39-abi3-win32.whl", hash = "sha256:2f88d197e66c65be5e42cd72e5c18afbfae3f741742070e3019ac8f4ac57262c"}, + {file = "cryptography-42.0.8-cp39-abi3-win_amd64.whl", hash = "sha256:fa76fbb7596cc5839320000cdd5d0955313696d9511debab7ee7278fc8b5c84a"}, + {file = "cryptography-42.0.8-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ba4f0a211697362e89ad822e667d8d340b4d8d55fae72cdd619389fb5912eefe"}, + {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:81884c4d096c272f00aeb1f11cf62ccd39763581645b0812e99a91505fa48e0c"}, + {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c9bb2ae11bfbab395bdd072985abde58ea9860ed84e59dbc0463a5d0159f5b71"}, + {file = "cryptography-42.0.8-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7016f837e15b0a1c119d27ecd89b3515f01f90a8615ed5e9427e30d9cdbfed3d"}, + {file = "cryptography-42.0.8-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5a94eccb2a81a309806027e1670a358b99b8fe8bfe9f8d329f27d72c094dde8c"}, + {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dec9b018df185f08483f294cae6ccac29e7a6e0678996587363dc352dc65c842"}, + {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:343728aac38decfdeecf55ecab3264b015be68fc2816ca800db649607aeee648"}, + {file = "cryptography-42.0.8-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:013629ae70b40af70c9a7a5db40abe5d9054e6f4380e50ce769947b73bf3caad"}, + {file = "cryptography-42.0.8.tar.gz", hash = "sha256:8d09d05439ce7baa8e9e95b07ec5b6c886f548deb7e0f69ef25f64b3bce842f2"}, ] [package.dependencies] @@ -514,18 +562,18 @@ testing = ["hatch", "pre-commit", "pytest", "tox"] [[package]] name = "filelock" -version = "3.13.4" +version = "3.15.4" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.13.4-py3-none-any.whl", hash = "sha256:404e5e9253aa60ad457cae1be07c0f0ca90a63931200a47d9b6a6af84fd7b45f"}, - {file = "filelock-3.13.4.tar.gz", hash = "sha256:d13f466618bfde72bd2c18255e269f72542c6e70e7bac83a0232d6b1cc5c8cf4"}, + {file = "filelock-3.15.4-py3-none-any.whl", hash = "sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7"}, + {file = "filelock-3.15.4.tar.gz", hash = "sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb"}, ] [package.extras] docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-asyncio (>=0.21)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)", "virtualenv (>=20.26.2)"] typing = ["typing-extensions (>=4.8)"] [[package]] @@ -578,13 +626,13 @@ test = ["coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", [[package]] name = "google-api-core" -version = "2.18.0" +version = "2.19.1" description = "Google API client core library" optional = true python-versions = ">=3.7" files = [ - {file = "google-api-core-2.18.0.tar.gz", hash = "sha256:62d97417bfc674d6cef251e5c4d639a9655e00c45528c4364fbfebb478ce72a9"}, - {file = "google_api_core-2.18.0-py3-none-any.whl", hash = "sha256:5a63aa102e0049abe85b5b88cb9409234c1f70afcda21ce1e40b285b9629c1d6"}, + {file = "google-api-core-2.19.1.tar.gz", hash = "sha256:f4695f1e3650b316a795108a76a1c416e6afb036199d1c1f1f110916df479ffd"}, + {file = "google_api_core-2.19.1-py3-none-any.whl", hash = "sha256:f12a9b8309b5e21d92483bbd47ce2c445861ec7d269ef6784ecc0ea8c1fa6125"}, ] [package.dependencies] @@ -599,7 +647,7 @@ grpcio-status = [ {version = ">=1.49.1,<2.0.dev0", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, ] proto-plus = ">=1.22.3,<2.0.0dev" -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0" requests = ">=2.18.0,<3.0.0.dev0" [package.extras] @@ -609,13 +657,13 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] name = "google-auth" -version = "2.29.0" +version = "2.32.0" description = "Google Authentication Library" optional = true python-versions = ">=3.7" files = [ - {file = "google-auth-2.29.0.tar.gz", hash = "sha256:672dff332d073227550ffc7457868ac4218d6c500b155fe6cc17d2b13602c360"}, - {file = "google_auth-2.29.0-py2.py3-none-any.whl", hash = "sha256:d452ad095688cd52bae0ad6fafe027f6a6d6f560e810fec20914e17a09526415"}, + {file = "google_auth-2.32.0-py2.py3-none-any.whl", hash = "sha256:53326ea2ebec768070a94bee4e1b9194c9646ea0c2bd72422785bd0f9abfad7b"}, + {file = "google_auth-2.32.0.tar.gz", hash = "sha256:49315be72c55a6a37d62819e3573f6b416aca00721f7e3e31a008d928bf64022"}, ] [package.dependencies] @@ -632,13 +680,13 @@ requests = ["requests (>=2.20.0,<3.0.0.dev0)"] [[package]] name = "google-cloud-bigquery" -version = "3.20.1" +version = "3.25.0" description = "Google BigQuery API client library" optional = true python-versions = ">=3.7" files = [ - {file = "google-cloud-bigquery-3.20.1.tar.gz", hash = "sha256:318aa3abab5f1900ee24f63ba8bd02b9cdafaa942d738b4dc14a4ef2cc2d925f"}, - {file = "google_cloud_bigquery-3.20.1-py2.py3-none-any.whl", hash = "sha256:d3e62fe61138c658b8853c402e2d8fb9346c84e602e21e3a26584be10fc5b0a4"}, + {file = "google-cloud-bigquery-3.25.0.tar.gz", hash = "sha256:5b2aff3205a854481117436836ae1403f11f2594e6810a98886afd57eda28509"}, + {file = "google_cloud_bigquery-3.25.0-py2.py3-none-any.whl", hash = "sha256:7f0c371bc74d2a7fb74dacbc00ac0f90c8c2bec2289b51dd6685a275873b1ce9"}, ] [package.dependencies] @@ -761,13 +809,13 @@ testing = ["pytest"] [[package]] name = "google-resumable-media" -version = "2.7.0" +version = "2.7.1" description = "Utilities for Google Media Downloads and Resumable Uploads" optional = true -python-versions = ">= 3.7" +python-versions = ">=3.7" files = [ - {file = "google-resumable-media-2.7.0.tar.gz", hash = "sha256:5f18f5fa9836f4b083162064a1c2c98c17239bfda9ca50ad970ccf905f3e625b"}, - {file = "google_resumable_media-2.7.0-py2.py3-none-any.whl", hash = "sha256:79543cfe433b63fd81c0844b7803aba1bb8950b47bedf7d980c38fa123937e08"}, + {file = "google-resumable-media-2.7.1.tar.gz", hash = "sha256:eae451a7b2e2cdbaaa0fd2eb00cc8a1ee5e95e16b55597359cbc3d27d7d90e33"}, + {file = "google_resumable_media-2.7.1-py2.py3-none-any.whl", hash = "sha256:103ebc4ba331ab1bfdac0250f8033627a2cd7cde09e7ccff9181e31ba4315b2c"}, ] [package.dependencies] @@ -779,17 +827,17 @@ requests = ["requests (>=2.18.0,<3.0.0dev)"] [[package]] name = "googleapis-common-protos" -version = "1.63.0" +version = "1.63.2" description = "Common protobufs used in Google APIs" optional = true python-versions = ">=3.7" files = [ - {file = "googleapis-common-protos-1.63.0.tar.gz", hash = "sha256:17ad01b11d5f1d0171c06d3ba5c04c54474e883b66b949722b4938ee2694ef4e"}, - {file = "googleapis_common_protos-1.63.0-py2.py3-none-any.whl", hash = "sha256:ae45f75702f7c08b541f750854a678bd8f534a1a6bace6afe975f1d0a82d6632"}, + {file = "googleapis-common-protos-1.63.2.tar.gz", hash = "sha256:27c5abdffc4911f28101e635de1533fb4cfd2c37fbaa9174587c799fac90aa87"}, + {file = "googleapis_common_protos-1.63.2-py2.py3-none-any.whl", hash = "sha256:27a2499c7e8aff199665b22741997e485eccc8645aa9176c7c988e6fae507945"}, ] [package.dependencies] -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" +protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0" [package.extras] grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] @@ -867,95 +915,87 @@ test = ["objgraph", "psutil"] [[package]] name = "grpcio" -version = "1.62.1" +version = "1.65.4" description = "HTTP/2-based RPC framework" optional = true -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "grpcio-1.62.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:179bee6f5ed7b5f618844f760b6acf7e910988de77a4f75b95bbfaa8106f3c1e"}, - {file = "grpcio-1.62.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:48611e4fa010e823ba2de8fd3f77c1322dd60cb0d180dc6630a7e157b205f7ea"}, - {file = "grpcio-1.62.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:b2a0e71b0a2158aa4bce48be9f8f9eb45cbd17c78c7443616d00abbe2a509f6d"}, - {file = "grpcio-1.62.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fbe80577c7880911d3ad65e5ecc997416c98f354efeba2f8d0f9112a67ed65a5"}, - {file = "grpcio-1.62.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58f6c693d446964e3292425e1d16e21a97a48ba9172f2d0df9d7b640acb99243"}, - {file = "grpcio-1.62.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:77c339403db5a20ef4fed02e4d1a9a3d9866bf9c0afc77a42234677313ea22f3"}, - {file = "grpcio-1.62.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b5a4ea906db7dec694098435d84bf2854fe158eb3cd51e1107e571246d4d1d70"}, - {file = "grpcio-1.62.1-cp310-cp310-win32.whl", hash = "sha256:4187201a53f8561c015bc745b81a1b2d278967b8de35f3399b84b0695e281d5f"}, - {file = "grpcio-1.62.1-cp310-cp310-win_amd64.whl", hash = "sha256:844d1f3fb11bd1ed362d3fdc495d0770cfab75761836193af166fee113421d66"}, - {file = "grpcio-1.62.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:833379943d1728a005e44103f17ecd73d058d37d95783eb8f0b28ddc1f54d7b2"}, - {file = "grpcio-1.62.1-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:c7fcc6a32e7b7b58f5a7d27530669337a5d587d4066060bcb9dee7a8c833dfb7"}, - {file = "grpcio-1.62.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:fa7d28eb4d50b7cbe75bb8b45ed0da9a1dc5b219a0af59449676a29c2eed9698"}, - {file = "grpcio-1.62.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48f7135c3de2f298b833be8b4ae20cafe37091634e91f61f5a7eb3d61ec6f660"}, - {file = "grpcio-1.62.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:71f11fd63365ade276c9d4a7b7df5c136f9030e3457107e1791b3737a9b9ed6a"}, - {file = "grpcio-1.62.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4b49fd8fe9f9ac23b78437da94c54aa7e9996fbb220bac024a67469ce5d0825f"}, - {file = "grpcio-1.62.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:482ae2ae78679ba9ed5752099b32e5fe580443b4f798e1b71df412abf43375db"}, - {file = "grpcio-1.62.1-cp311-cp311-win32.whl", hash = "sha256:1faa02530b6c7426404372515fe5ddf66e199c2ee613f88f025c6f3bd816450c"}, - {file = "grpcio-1.62.1-cp311-cp311-win_amd64.whl", hash = "sha256:5bd90b8c395f39bc82a5fb32a0173e220e3f401ff697840f4003e15b96d1befc"}, - {file = "grpcio-1.62.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:b134d5d71b4e0837fff574c00e49176051a1c532d26c052a1e43231f252d813b"}, - {file = "grpcio-1.62.1-cp312-cp312-macosx_10_10_universal2.whl", hash = "sha256:d1f6c96573dc09d50dbcbd91dbf71d5cf97640c9427c32584010fbbd4c0e0037"}, - {file = "grpcio-1.62.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:359f821d4578f80f41909b9ee9b76fb249a21035a061a327f91c953493782c31"}, - {file = "grpcio-1.62.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a485f0c2010c696be269184bdb5ae72781344cb4e60db976c59d84dd6354fac9"}, - {file = "grpcio-1.62.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b50b09b4dc01767163d67e1532f948264167cd27f49e9377e3556c3cba1268e1"}, - {file = "grpcio-1.62.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3227c667dccbe38f2c4d943238b887bac588d97c104815aecc62d2fd976e014b"}, - {file = "grpcio-1.62.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3952b581eb121324853ce2b191dae08badb75cd493cb4e0243368aa9e61cfd41"}, - {file = "grpcio-1.62.1-cp312-cp312-win32.whl", hash = "sha256:83a17b303425104d6329c10eb34bba186ffa67161e63fa6cdae7776ff76df73f"}, - {file = "grpcio-1.62.1-cp312-cp312-win_amd64.whl", hash = "sha256:6696ffe440333a19d8d128e88d440f91fb92c75a80ce4b44d55800e656a3ef1d"}, - {file = "grpcio-1.62.1-cp37-cp37m-linux_armv7l.whl", hash = "sha256:e3393b0823f938253370ebef033c9fd23d27f3eae8eb9a8f6264900c7ea3fb5a"}, - {file = "grpcio-1.62.1-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:83e7ccb85a74beaeae2634f10eb858a0ed1a63081172649ff4261f929bacfd22"}, - {file = "grpcio-1.62.1-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:882020c87999d54667a284c7ddf065b359bd00251fcd70279ac486776dbf84ec"}, - {file = "grpcio-1.62.1-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a10383035e864f386fe096fed5c47d27a2bf7173c56a6e26cffaaa5a361addb1"}, - {file = "grpcio-1.62.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:960edebedc6b9ada1ef58e1c71156f28689978188cd8cff3b646b57288a927d9"}, - {file = "grpcio-1.62.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:23e2e04b83f347d0aadde0c9b616f4726c3d76db04b438fd3904b289a725267f"}, - {file = "grpcio-1.62.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:978121758711916d34fe57c1f75b79cdfc73952f1481bb9583399331682d36f7"}, - {file = "grpcio-1.62.1-cp37-cp37m-win_amd64.whl", hash = "sha256:9084086190cc6d628f282e5615f987288b95457292e969b9205e45b442276407"}, - {file = "grpcio-1.62.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:22bccdd7b23c420a27fd28540fb5dcbc97dc6be105f7698cb0e7d7a420d0e362"}, - {file = "grpcio-1.62.1-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:8999bf1b57172dbc7c3e4bb3c732658e918f5c333b2942243f10d0d653953ba9"}, - {file = "grpcio-1.62.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:d9e52558b8b8c2f4ac05ac86344a7417ccdd2b460a59616de49eb6933b07a0bd"}, - {file = "grpcio-1.62.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1714e7bc935780bc3de1b3fcbc7674209adf5208ff825799d579ffd6cd0bd505"}, - {file = "grpcio-1.62.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8842ccbd8c0e253c1f189088228f9b433f7a93b7196b9e5b6f87dba393f5d5d"}, - {file = "grpcio-1.62.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1f1e7b36bdff50103af95a80923bf1853f6823dd62f2d2a2524b66ed74103e49"}, - {file = "grpcio-1.62.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bba97b8e8883a8038606480d6b6772289f4c907f6ba780fa1f7b7da7dfd76f06"}, - {file = "grpcio-1.62.1-cp38-cp38-win32.whl", hash = "sha256:a7f615270fe534548112a74e790cd9d4f5509d744dd718cd442bf016626c22e4"}, - {file = "grpcio-1.62.1-cp38-cp38-win_amd64.whl", hash = "sha256:e6c8c8693df718c5ecbc7babb12c69a4e3677fd11de8886f05ab22d4e6b1c43b"}, - {file = "grpcio-1.62.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:73db2dc1b201d20ab7083e7041946910bb991e7e9761a0394bbc3c2632326483"}, - {file = "grpcio-1.62.1-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:407b26b7f7bbd4f4751dbc9767a1f0716f9fe72d3d7e96bb3ccfc4aace07c8de"}, - {file = "grpcio-1.62.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:f8de7c8cef9261a2d0a62edf2ccea3d741a523c6b8a6477a340a1f2e417658de"}, - {file = "grpcio-1.62.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd5c8a1af40ec305d001c60236308a67e25419003e9bb3ebfab5695a8d0b369"}, - {file = "grpcio-1.62.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be0477cb31da67846a33b1a75c611f88bfbcd427fe17701b6317aefceee1b96f"}, - {file = "grpcio-1.62.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:60dcd824df166ba266ee0cfaf35a31406cd16ef602b49f5d4dfb21f014b0dedd"}, - {file = "grpcio-1.62.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:973c49086cabab773525f6077f95e5a993bfc03ba8fc32e32f2c279497780585"}, - {file = "grpcio-1.62.1-cp39-cp39-win32.whl", hash = "sha256:12859468e8918d3bd243d213cd6fd6ab07208195dc140763c00dfe901ce1e1b4"}, - {file = "grpcio-1.62.1-cp39-cp39-win_amd64.whl", hash = "sha256:b7209117bbeebdfa5d898205cc55153a51285757902dd73c47de498ad4d11332"}, - {file = "grpcio-1.62.1.tar.gz", hash = "sha256:6c455e008fa86d9e9a9d85bb76da4277c0d7d9668a3bfa70dbe86e9f3c759947"}, + {file = "grpcio-1.65.4-cp310-cp310-linux_armv7l.whl", hash = "sha256:0e85c8766cf7f004ab01aff6a0393935a30d84388fa3c58d77849fcf27f3e98c"}, + {file = "grpcio-1.65.4-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:e4a795c02405c7dfa8affd98c14d980f4acea16ea3b539e7404c645329460e5a"}, + {file = "grpcio-1.65.4-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:d7b984a8dd975d949c2042b9b5ebcf297d6d5af57dcd47f946849ee15d3c2fb8"}, + {file = "grpcio-1.65.4-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:644a783ce604a7d7c91412bd51cf9418b942cf71896344b6dc8d55713c71ce82"}, + {file = "grpcio-1.65.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5764237d751d3031a36fafd57eb7d36fd2c10c658d2b4057c516ccf114849a3e"}, + {file = "grpcio-1.65.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ee40d058cf20e1dd4cacec9c39e9bce13fedd38ce32f9ba00f639464fcb757de"}, + {file = "grpcio-1.65.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4482a44ce7cf577a1f8082e807a5b909236bce35b3e3897f839f2fbd9ae6982d"}, + {file = "grpcio-1.65.4-cp310-cp310-win32.whl", hash = "sha256:66bb051881c84aa82e4f22d8ebc9d1704b2e35d7867757f0740c6ef7b902f9b1"}, + {file = "grpcio-1.65.4-cp310-cp310-win_amd64.whl", hash = "sha256:870370524eff3144304da4d1bbe901d39bdd24f858ce849b7197e530c8c8f2ec"}, + {file = "grpcio-1.65.4-cp311-cp311-linux_armv7l.whl", hash = "sha256:85e9c69378af02e483bc626fc19a218451b24a402bdf44c7531e4c9253fb49ef"}, + {file = "grpcio-1.65.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2bd672e005afab8bf0d6aad5ad659e72a06dd713020554182a66d7c0c8f47e18"}, + {file = "grpcio-1.65.4-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:abccc5d73f5988e8f512eb29341ed9ced923b586bb72e785f265131c160231d8"}, + {file = "grpcio-1.65.4-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:886b45b29f3793b0c2576201947258782d7e54a218fe15d4a0468d9a6e00ce17"}, + {file = "grpcio-1.65.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be952436571dacc93ccc7796db06b7daf37b3b56bb97e3420e6503dccfe2f1b4"}, + {file = "grpcio-1.65.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8dc9ddc4603ec43f6238a5c95400c9a901b6d079feb824e890623da7194ff11e"}, + {file = "grpcio-1.65.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ade1256c98cba5a333ef54636095f2c09e6882c35f76acb04412f3b1aa3c29a5"}, + {file = "grpcio-1.65.4-cp311-cp311-win32.whl", hash = "sha256:280e93356fba6058cbbfc6f91a18e958062ef1bdaf5b1caf46c615ba1ae71b5b"}, + {file = "grpcio-1.65.4-cp311-cp311-win_amd64.whl", hash = "sha256:d2b819f9ee27ed4e3e737a4f3920e337e00bc53f9e254377dd26fc7027c4d558"}, + {file = "grpcio-1.65.4-cp312-cp312-linux_armv7l.whl", hash = "sha256:926a0750a5e6fb002542e80f7fa6cab8b1a2ce5513a1c24641da33e088ca4c56"}, + {file = "grpcio-1.65.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:2a1d4c84d9e657f72bfbab8bedf31bdfc6bfc4a1efb10b8f2d28241efabfaaf2"}, + {file = "grpcio-1.65.4-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:17de4fda50967679677712eec0a5c13e8904b76ec90ac845d83386b65da0ae1e"}, + {file = "grpcio-1.65.4-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3dee50c1b69754a4228e933696408ea87f7e896e8d9797a3ed2aeed8dbd04b74"}, + {file = "grpcio-1.65.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:74c34fc7562bdd169b77966068434a93040bfca990e235f7a67cdf26e1bd5c63"}, + {file = "grpcio-1.65.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:24a2246e80a059b9eb981e4c2a6d8111b1b5e03a44421adbf2736cc1d4988a8a"}, + {file = "grpcio-1.65.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:18c10f0d054d2dce34dd15855fcca7cc44ec3b811139437543226776730c0f28"}, + {file = "grpcio-1.65.4-cp312-cp312-win32.whl", hash = "sha256:d72962788b6c22ddbcdb70b10c11fbb37d60ae598c51eb47ec019db66ccfdff0"}, + {file = "grpcio-1.65.4-cp312-cp312-win_amd64.whl", hash = "sha256:7656376821fed8c89e68206a522522317787a3d9ed66fb5110b1dff736a5e416"}, + {file = "grpcio-1.65.4-cp38-cp38-linux_armv7l.whl", hash = "sha256:4934077b33aa6fe0b451de8b71dabde96bf2d9b4cb2b3187be86e5adebcba021"}, + {file = "grpcio-1.65.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0cef8c919a3359847c357cb4314e50ed1f0cca070f828ee8f878d362fd744d52"}, + {file = "grpcio-1.65.4-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:a925446e6aa12ca37114840d8550f308e29026cdc423a73da3043fd1603a6385"}, + {file = "grpcio-1.65.4-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf53e6247f1e2af93657e62e240e4f12e11ee0b9cef4ddcb37eab03d501ca864"}, + {file = "grpcio-1.65.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdb34278e4ceb224c89704cd23db0d902e5e3c1c9687ec9d7c5bb4c150f86816"}, + {file = "grpcio-1.65.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e6cbdd107e56bde55c565da5fd16f08e1b4e9b0674851d7749e7f32d8645f524"}, + {file = "grpcio-1.65.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:626319a156b1f19513156a3b0dbfe977f5f93db63ca673a0703238ebd40670d7"}, + {file = "grpcio-1.65.4-cp38-cp38-win32.whl", hash = "sha256:3d1bbf7e1dd1096378bd83c83f554d3b93819b91161deaf63e03b7022a85224a"}, + {file = "grpcio-1.65.4-cp38-cp38-win_amd64.whl", hash = "sha256:a99e6dffefd3027b438116f33ed1261c8d360f0dd4f943cb44541a2782eba72f"}, + {file = "grpcio-1.65.4-cp39-cp39-linux_armv7l.whl", hash = "sha256:874acd010e60a2ec1e30d5e505b0651ab12eb968157cd244f852b27c6dbed733"}, + {file = "grpcio-1.65.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b07f36faf01fca5427d4aa23645e2d492157d56c91fab7e06fe5697d7e171ad4"}, + {file = "grpcio-1.65.4-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:b81711bf4ec08a3710b534e8054c7dcf90f2edc22bebe11c1775a23f145595fe"}, + {file = "grpcio-1.65.4-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88fcabc332a4aef8bcefadc34a02e9ab9407ab975d2c7d981a8e12c1aed92aa1"}, + {file = "grpcio-1.65.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9ba3e63108a8749994f02c7c0e156afb39ba5bdf755337de8e75eb685be244b"}, + {file = "grpcio-1.65.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8eb485801957a486bf5de15f2c792d9f9c897a86f2f18db8f3f6795a094b4bb2"}, + {file = "grpcio-1.65.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:075f3903bc1749ace93f2b0664f72964ee5f2da5c15d4b47e0ab68e4f442c257"}, + {file = "grpcio-1.65.4-cp39-cp39-win32.whl", hash = "sha256:0a0720299bdb2cc7306737295d56e41ce8827d5669d4a3cd870af832e3b17c4d"}, + {file = "grpcio-1.65.4-cp39-cp39-win_amd64.whl", hash = "sha256:a146bc40fa78769f22e1e9ff4f110ef36ad271b79707577bf2a31e3e931141b9"}, + {file = "grpcio-1.65.4.tar.gz", hash = "sha256:2a4f476209acffec056360d3e647ae0e14ae13dcf3dfb130c227ae1c594cbe39"}, ] [package.extras] -protobuf = ["grpcio-tools (>=1.62.1)"] +protobuf = ["grpcio-tools (>=1.65.4)"] [[package]] name = "grpcio-status" -version = "1.62.1" +version = "1.65.4" description = "Status proto mapping for gRPC" optional = true -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "grpcio-status-1.62.1.tar.gz", hash = "sha256:3431c8abbab0054912c41df5c72f03ddf3b7a67be8a287bb3c18a3456f96ff77"}, - {file = "grpcio_status-1.62.1-py3-none-any.whl", hash = "sha256:af0c3ab85da31669f21749e8d53d669c061ebc6ce5637be49a46edcb7aa8ab17"}, + {file = "grpcio_status-1.65.4-py3-none-any.whl", hash = "sha256:09dd3d84d1a3164940b1d8ed2177688cd8d25a423b1f5ad826b393653ea3de01"}, + {file = "grpcio_status-1.65.4.tar.gz", hash = "sha256:1803968c4f14d81b4b156f6f2b54e6959bac4f40b0a6ca2bd4c169978438b9dc"}, ] [package.dependencies] googleapis-common-protos = ">=1.5.5" -grpcio = ">=1.62.1" -protobuf = ">=4.21.6" +grpcio = ">=1.65.4" +protobuf = ">=5.26.1,<6.0dev" [[package]] name = "identify" -version = "2.5.35" +version = "2.6.0" description = "File identification library for Python" optional = false python-versions = ">=3.8" files = [ - {file = "identify-2.5.35-py2.py3-none-any.whl", hash = "sha256:c4de0081837b211594f8e877a6b4fad7ca32bbfc1a9307fdd61c28bfe923f13e"}, - {file = "identify-2.5.35.tar.gz", hash = "sha256:10a7ca245cfcd756a554a7288159f72ff105ad233c7c4b9c6f0f4d108f5f6791"}, + {file = "identify-2.6.0-py2.py3-none-any.whl", hash = "sha256:e79ae4406387a9d300332b5fd366d8994f1525e8414984e1a59e058b2eda2dd0"}, + {file = "identify-2.6.0.tar.gz", hash = "sha256:cb171c685bdc31bcc4c1734698736a7d5b6c8bf2e0c15117f4d469c8640ae5cf"}, ] [package.extras] @@ -972,6 +1012,24 @@ files = [ {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, ] +[[package]] +name = "importlib-resources" +version = "6.4.0" +description = "Read resources from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_resources-6.4.0-py3-none-any.whl", hash = "sha256:50d10f043df931902d4194ea07ec57960f66a80449ff867bfe782b4c486ba78c"}, + {file = "importlib_resources-6.4.0.tar.gz", hash = "sha256:cdb2b453b8046ca4e3798eb1d84f3cce1446a0e8e7b5ef4efb600f19fc398145"}, +] + +[package.dependencies] +zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["jaraco.test (>=5.4)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)", "zipp (>=3.17)"] + [[package]] name = "iniconfig" version = "2.0.0" @@ -1010,165 +1068,153 @@ files = [ [[package]] name = "lxml" -version = "5.2.1" +version = "5.2.2" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." optional = true python-versions = ">=3.6" files = [ - {file = "lxml-5.2.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1f7785f4f789fdb522729ae465adcaa099e2a3441519df750ebdccc481d961a1"}, - {file = "lxml-5.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6cc6ee342fb7fa2471bd9b6d6fdfc78925a697bf5c2bcd0a302e98b0d35bfad3"}, - {file = "lxml-5.2.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:794f04eec78f1d0e35d9e0c36cbbb22e42d370dda1609fb03bcd7aeb458c6377"}, - {file = "lxml-5.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c817d420c60a5183953c783b0547d9eb43b7b344a2c46f69513d5952a78cddf3"}, - {file = "lxml-5.2.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2213afee476546a7f37c7a9b4ad4d74b1e112a6fafffc9185d6d21f043128c81"}, - {file = "lxml-5.2.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b070bbe8d3f0f6147689bed981d19bbb33070225373338df755a46893528104a"}, - {file = "lxml-5.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e02c5175f63effbd7c5e590399c118d5db6183bbfe8e0d118bdb5c2d1b48d937"}, - {file = "lxml-5.2.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:3dc773b2861b37b41a6136e0b72a1a44689a9c4c101e0cddb6b854016acc0aa8"}, - {file = "lxml-5.2.1-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:d7520db34088c96cc0e0a3ad51a4fd5b401f279ee112aa2b7f8f976d8582606d"}, - {file = "lxml-5.2.1-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:bcbf4af004f98793a95355980764b3d80d47117678118a44a80b721c9913436a"}, - {file = "lxml-5.2.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a2b44bec7adf3e9305ce6cbfa47a4395667e744097faed97abb4728748ba7d47"}, - {file = "lxml-5.2.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:1c5bb205e9212d0ebddf946bc07e73fa245c864a5f90f341d11ce7b0b854475d"}, - {file = "lxml-5.2.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2c9d147f754b1b0e723e6afb7ba1566ecb162fe4ea657f53d2139bbf894d050a"}, - {file = "lxml-5.2.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:3545039fa4779be2df51d6395e91a810f57122290864918b172d5dc7ca5bb433"}, - {file = "lxml-5.2.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a91481dbcddf1736c98a80b122afa0f7296eeb80b72344d7f45dc9f781551f56"}, - {file = "lxml-5.2.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2ddfe41ddc81f29a4c44c8ce239eda5ade4e7fc305fb7311759dd6229a080052"}, - {file = "lxml-5.2.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:a7baf9ffc238e4bf401299f50e971a45bfcc10a785522541a6e3179c83eabf0a"}, - {file = "lxml-5.2.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:31e9a882013c2f6bd2f2c974241bf4ba68c85eba943648ce88936d23209a2e01"}, - {file = "lxml-5.2.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0a15438253b34e6362b2dc41475e7f80de76320f335e70c5528b7148cac253a1"}, - {file = "lxml-5.2.1-cp310-cp310-win32.whl", hash = "sha256:6992030d43b916407c9aa52e9673612ff39a575523c5f4cf72cdef75365709a5"}, - {file = "lxml-5.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:da052e7962ea2d5e5ef5bc0355d55007407087392cf465b7ad84ce5f3e25fe0f"}, - {file = "lxml-5.2.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:70ac664a48aa64e5e635ae5566f5227f2ab7f66a3990d67566d9907edcbbf867"}, - {file = "lxml-5.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1ae67b4e737cddc96c99461d2f75d218bdf7a0c3d3ad5604d1f5e7464a2f9ffe"}, - {file = "lxml-5.2.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f18a5a84e16886898e51ab4b1d43acb3083c39b14c8caeb3589aabff0ee0b270"}, - {file = "lxml-5.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6f2c8372b98208ce609c9e1d707f6918cc118fea4e2c754c9f0812c04ca116d"}, - {file = "lxml-5.2.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:394ed3924d7a01b5bd9a0d9d946136e1c2f7b3dc337196d99e61740ed4bc6fe1"}, - {file = "lxml-5.2.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d077bc40a1fe984e1a9931e801e42959a1e6598edc8a3223b061d30fbd26bbc"}, - {file = "lxml-5.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:764b521b75701f60683500d8621841bec41a65eb739b8466000c6fdbc256c240"}, - {file = "lxml-5.2.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:3a6b45da02336895da82b9d472cd274b22dc27a5cea1d4b793874eead23dd14f"}, - {file = "lxml-5.2.1-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:5ea7b6766ac2dfe4bcac8b8595107665a18ef01f8c8343f00710b85096d1b53a"}, - {file = "lxml-5.2.1-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:e196a4ff48310ba62e53a8e0f97ca2bca83cdd2fe2934d8b5cb0df0a841b193a"}, - {file = "lxml-5.2.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:200e63525948e325d6a13a76ba2911f927ad399ef64f57898cf7c74e69b71095"}, - {file = "lxml-5.2.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:dae0ed02f6b075426accbf6b2863c3d0a7eacc1b41fb40f2251d931e50188dad"}, - {file = "lxml-5.2.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:ab31a88a651039a07a3ae327d68ebdd8bc589b16938c09ef3f32a4b809dc96ef"}, - {file = "lxml-5.2.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:df2e6f546c4df14bc81f9498bbc007fbb87669f1bb707c6138878c46b06f6510"}, - {file = "lxml-5.2.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5dd1537e7cc06efd81371f5d1a992bd5ab156b2b4f88834ca852de4a8ea523fa"}, - {file = "lxml-5.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9b9ec9c9978b708d488bec36b9e4c94d88fd12ccac3e62134a9d17ddba910ea9"}, - {file = "lxml-5.2.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:8e77c69d5892cb5ba71703c4057091e31ccf534bd7f129307a4d084d90d014b8"}, - {file = "lxml-5.2.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:a8d5c70e04aac1eda5c829a26d1f75c6e5286c74743133d9f742cda8e53b9c2f"}, - {file = "lxml-5.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c94e75445b00319c1fad60f3c98b09cd63fe1134a8a953dcd48989ef42318534"}, - {file = "lxml-5.2.1-cp311-cp311-win32.whl", hash = "sha256:4951e4f7a5680a2db62f7f4ab2f84617674d36d2d76a729b9a8be4b59b3659be"}, - {file = "lxml-5.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:5c670c0406bdc845b474b680b9a5456c561c65cf366f8db5a60154088c92d102"}, - {file = "lxml-5.2.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:abc25c3cab9ec7fcd299b9bcb3b8d4a1231877e425c650fa1c7576c5107ab851"}, - {file = "lxml-5.2.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6935bbf153f9a965f1e07c2649c0849d29832487c52bb4a5c5066031d8b44fd5"}, - {file = "lxml-5.2.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d793bebb202a6000390a5390078e945bbb49855c29c7e4d56a85901326c3b5d9"}, - {file = "lxml-5.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afd5562927cdef7c4f5550374acbc117fd4ecc05b5007bdfa57cc5355864e0a4"}, - {file = "lxml-5.2.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0e7259016bc4345a31af861fdce942b77c99049d6c2107ca07dc2bba2435c1d9"}, - {file = "lxml-5.2.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:530e7c04f72002d2f334d5257c8a51bf409db0316feee7c87e4385043be136af"}, - {file = "lxml-5.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59689a75ba8d7ffca577aefd017d08d659d86ad4585ccc73e43edbfc7476781a"}, - {file = "lxml-5.2.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:f9737bf36262046213a28e789cc82d82c6ef19c85a0cf05e75c670a33342ac2c"}, - {file = "lxml-5.2.1-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:3a74c4f27167cb95c1d4af1c0b59e88b7f3e0182138db2501c353555f7ec57f4"}, - {file = "lxml-5.2.1-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:68a2610dbe138fa8c5826b3f6d98a7cfc29707b850ddcc3e21910a6fe51f6ca0"}, - {file = "lxml-5.2.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:f0a1bc63a465b6d72569a9bba9f2ef0334c4e03958e043da1920299100bc7c08"}, - {file = "lxml-5.2.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c2d35a1d047efd68027817b32ab1586c1169e60ca02c65d428ae815b593e65d4"}, - {file = "lxml-5.2.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:79bd05260359170f78b181b59ce871673ed01ba048deef4bf49a36ab3e72e80b"}, - {file = "lxml-5.2.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:865bad62df277c04beed9478fe665b9ef63eb28fe026d5dedcb89b537d2e2ea6"}, - {file = "lxml-5.2.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:44f6c7caff88d988db017b9b0e4ab04934f11e3e72d478031efc7edcac6c622f"}, - {file = "lxml-5.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:71e97313406ccf55d32cc98a533ee05c61e15d11b99215b237346171c179c0b0"}, - {file = "lxml-5.2.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:057cdc6b86ab732cf361f8b4d8af87cf195a1f6dc5b0ff3de2dced242c2015e0"}, - {file = "lxml-5.2.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:f3bbbc998d42f8e561f347e798b85513ba4da324c2b3f9b7969e9c45b10f6169"}, - {file = "lxml-5.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:491755202eb21a5e350dae00c6d9a17247769c64dcf62d8c788b5c135e179dc4"}, - {file = "lxml-5.2.1-cp312-cp312-win32.whl", hash = "sha256:8de8f9d6caa7f25b204fc861718815d41cbcf27ee8f028c89c882a0cf4ae4134"}, - {file = "lxml-5.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:f2a9efc53d5b714b8df2b4b3e992accf8ce5bbdfe544d74d5c6766c9e1146a3a"}, - {file = "lxml-5.2.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:70a9768e1b9d79edca17890175ba915654ee1725975d69ab64813dd785a2bd5c"}, - {file = "lxml-5.2.1-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c38d7b9a690b090de999835f0443d8aa93ce5f2064035dfc48f27f02b4afc3d0"}, - {file = "lxml-5.2.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5670fb70a828663cc37552a2a85bf2ac38475572b0e9b91283dc09efb52c41d1"}, - {file = "lxml-5.2.1-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:958244ad566c3ffc385f47dddde4145088a0ab893504b54b52c041987a8c1863"}, - {file = "lxml-5.2.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:b6241d4eee5f89453307c2f2bfa03b50362052ca0af1efecf9fef9a41a22bb4f"}, - {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:2a66bf12fbd4666dd023b6f51223aed3d9f3b40fef06ce404cb75bafd3d89536"}, - {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:9123716666e25b7b71c4e1789ec829ed18663152008b58544d95b008ed9e21e9"}, - {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:0c3f67e2aeda739d1cc0b1102c9a9129f7dc83901226cc24dd72ba275ced4218"}, - {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:5d5792e9b3fb8d16a19f46aa8208987cfeafe082363ee2745ea8b643d9cc5b45"}, - {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_2_aarch64.whl", hash = "sha256:88e22fc0a6684337d25c994381ed8a1580a6f5ebebd5ad41f89f663ff4ec2885"}, - {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_2_ppc64le.whl", hash = "sha256:21c2e6b09565ba5b45ae161b438e033a86ad1736b8c838c766146eff8ceffff9"}, - {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_2_s390x.whl", hash = "sha256:afbbdb120d1e78d2ba8064a68058001b871154cc57787031b645c9142b937a62"}, - {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:627402ad8dea044dde2eccde4370560a2b750ef894c9578e1d4f8ffd54000461"}, - {file = "lxml-5.2.1-cp36-cp36m-win32.whl", hash = "sha256:e89580a581bf478d8dcb97d9cd011d567768e8bc4095f8557b21c4d4c5fea7d0"}, - {file = "lxml-5.2.1-cp36-cp36m-win_amd64.whl", hash = "sha256:59565f10607c244bc4c05c0c5fa0c190c990996e0c719d05deec7030c2aa8289"}, - {file = "lxml-5.2.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:857500f88b17a6479202ff5fe5f580fc3404922cd02ab3716197adf1ef628029"}, - {file = "lxml-5.2.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:56c22432809085b3f3ae04e6e7bdd36883d7258fcd90e53ba7b2e463efc7a6af"}, - {file = "lxml-5.2.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a55ee573116ba208932e2d1a037cc4b10d2c1cb264ced2184d00b18ce585b2c0"}, - {file = "lxml-5.2.1-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:6cf58416653c5901e12624e4013708b6e11142956e7f35e7a83f1ab02f3fe456"}, - {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:64c2baa7774bc22dd4474248ba16fe1a7f611c13ac6123408694d4cc93d66dbd"}, - {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:74b28c6334cca4dd704e8004cba1955af0b778cf449142e581e404bd211fb619"}, - {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:7221d49259aa1e5a8f00d3d28b1e0b76031655ca74bb287123ef56c3db92f213"}, - {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3dbe858ee582cbb2c6294dc85f55b5f19c918c2597855e950f34b660f1a5ede6"}, - {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:04ab5415bf6c86e0518d57240a96c4d1fcfc3cb370bb2ac2a732b67f579e5a04"}, - {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:6ab833e4735a7e5533711a6ea2df26459b96f9eec36d23f74cafe03631647c41"}, - {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f443cdef978430887ed55112b491f670bba6462cea7a7742ff8f14b7abb98d75"}, - {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:9e2addd2d1866fe112bc6f80117bcc6bc25191c5ed1bfbcf9f1386a884252ae8"}, - {file = "lxml-5.2.1-cp37-cp37m-win32.whl", hash = "sha256:f51969bac61441fd31f028d7b3b45962f3ecebf691a510495e5d2cd8c8092dbd"}, - {file = "lxml-5.2.1-cp37-cp37m-win_amd64.whl", hash = "sha256:b0b58fbfa1bf7367dde8a557994e3b1637294be6cf2169810375caf8571a085c"}, - {file = "lxml-5.2.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:804f74efe22b6a227306dd890eecc4f8c59ff25ca35f1f14e7482bbce96ef10b"}, - {file = "lxml-5.2.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:08802f0c56ed150cc6885ae0788a321b73505d2263ee56dad84d200cab11c07a"}, - {file = "lxml-5.2.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f8c09ed18ecb4ebf23e02b8e7a22a05d6411911e6fabef3a36e4f371f4f2585"}, - {file = "lxml-5.2.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3d30321949861404323c50aebeb1943461a67cd51d4200ab02babc58bd06a86"}, - {file = "lxml-5.2.1-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:b560e3aa4b1d49e0e6c847d72665384db35b2f5d45f8e6a5c0072e0283430533"}, - {file = "lxml-5.2.1-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:058a1308914f20784c9f4674036527e7c04f7be6fb60f5d61353545aa7fcb739"}, - {file = "lxml-5.2.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:adfb84ca6b87e06bc6b146dc7da7623395db1e31621c4785ad0658c5028b37d7"}, - {file = "lxml-5.2.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:417d14450f06d51f363e41cace6488519038f940676ce9664b34ebf5653433a5"}, - {file = "lxml-5.2.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a2dfe7e2473f9b59496247aad6e23b405ddf2e12ef0765677b0081c02d6c2c0b"}, - {file = "lxml-5.2.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bf2e2458345d9bffb0d9ec16557d8858c9c88d2d11fed53998512504cd9df49b"}, - {file = "lxml-5.2.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:58278b29cb89f3e43ff3e0c756abbd1518f3ee6adad9e35b51fb101c1c1daaec"}, - {file = "lxml-5.2.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:64641a6068a16201366476731301441ce93457eb8452056f570133a6ceb15fca"}, - {file = "lxml-5.2.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:78bfa756eab503673991bdcf464917ef7845a964903d3302c5f68417ecdc948c"}, - {file = "lxml-5.2.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:11a04306fcba10cd9637e669fd73aa274c1c09ca64af79c041aa820ea992b637"}, - {file = "lxml-5.2.1-cp38-cp38-win32.whl", hash = "sha256:66bc5eb8a323ed9894f8fa0ee6cb3e3fb2403d99aee635078fd19a8bc7a5a5da"}, - {file = "lxml-5.2.1-cp38-cp38-win_amd64.whl", hash = "sha256:9676bfc686fa6a3fa10cd4ae6b76cae8be26eb5ec6811d2a325636c460da1806"}, - {file = "lxml-5.2.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:cf22b41fdae514ee2f1691b6c3cdeae666d8b7fa9434de445f12bbeee0cf48dd"}, - {file = "lxml-5.2.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ec42088248c596dbd61d4ae8a5b004f97a4d91a9fd286f632e42e60b706718d7"}, - {file = "lxml-5.2.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd53553ddad4a9c2f1f022756ae64abe16da1feb497edf4d9f87f99ec7cf86bd"}, - {file = "lxml-5.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:feaa45c0eae424d3e90d78823f3828e7dc42a42f21ed420db98da2c4ecf0a2cb"}, - {file = "lxml-5.2.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddc678fb4c7e30cf830a2b5a8d869538bc55b28d6c68544d09c7d0d8f17694dc"}, - {file = "lxml-5.2.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:853e074d4931dbcba7480d4dcab23d5c56bd9607f92825ab80ee2bd916edea53"}, - {file = "lxml-5.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc4691d60512798304acb9207987e7b2b7c44627ea88b9d77489bbe3e6cc3bd4"}, - {file = "lxml-5.2.1-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:beb72935a941965c52990f3a32d7f07ce869fe21c6af8b34bf6a277b33a345d3"}, - {file = "lxml-5.2.1-cp39-cp39-manylinux_2_28_ppc64le.whl", hash = "sha256:6588c459c5627fefa30139be4d2e28a2c2a1d0d1c265aad2ba1935a7863a4913"}, - {file = "lxml-5.2.1-cp39-cp39-manylinux_2_28_s390x.whl", hash = "sha256:588008b8497667f1ddca7c99f2f85ce8511f8f7871b4a06ceede68ab62dff64b"}, - {file = "lxml-5.2.1-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b6787b643356111dfd4032b5bffe26d2f8331556ecb79e15dacb9275da02866e"}, - {file = "lxml-5.2.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7c17b64b0a6ef4e5affae6a3724010a7a66bda48a62cfe0674dabd46642e8b54"}, - {file = "lxml-5.2.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:27aa20d45c2e0b8cd05da6d4759649170e8dfc4f4e5ef33a34d06f2d79075d57"}, - {file = "lxml-5.2.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:d4f2cc7060dc3646632d7f15fe68e2fa98f58e35dd5666cd525f3b35d3fed7f8"}, - {file = "lxml-5.2.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff46d772d5f6f73564979cd77a4fffe55c916a05f3cb70e7c9c0590059fb29ef"}, - {file = "lxml-5.2.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:96323338e6c14e958d775700ec8a88346014a85e5de73ac7967db0367582049b"}, - {file = "lxml-5.2.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:52421b41ac99e9d91934e4d0d0fe7da9f02bfa7536bb4431b4c05c906c8c6919"}, - {file = "lxml-5.2.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:7a7efd5b6d3e30d81ec68ab8a88252d7c7c6f13aaa875009fe3097eb4e30b84c"}, - {file = "lxml-5.2.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0ed777c1e8c99b63037b91f9d73a6aad20fd035d77ac84afcc205225f8f41188"}, - {file = "lxml-5.2.1-cp39-cp39-win32.whl", hash = "sha256:644df54d729ef810dcd0f7732e50e5ad1bd0a135278ed8d6bcb06f33b6b6f708"}, - {file = "lxml-5.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:9ca66b8e90daca431b7ca1408cae085d025326570e57749695d6a01454790e95"}, - {file = "lxml-5.2.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9b0ff53900566bc6325ecde9181d89afadc59c5ffa39bddf084aaedfe3b06a11"}, - {file = "lxml-5.2.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd6037392f2d57793ab98d9e26798f44b8b4da2f2464388588f48ac52c489ea1"}, - {file = "lxml-5.2.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b9c07e7a45bb64e21df4b6aa623cb8ba214dfb47d2027d90eac197329bb5e94"}, - {file = "lxml-5.2.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:3249cc2989d9090eeac5467e50e9ec2d40704fea9ab72f36b034ea34ee65ca98"}, - {file = "lxml-5.2.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f42038016852ae51b4088b2862126535cc4fc85802bfe30dea3500fdfaf1864e"}, - {file = "lxml-5.2.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:533658f8fbf056b70e434dff7e7aa611bcacb33e01f75de7f821810e48d1bb66"}, - {file = "lxml-5.2.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:622020d4521e22fb371e15f580d153134bfb68d6a429d1342a25f051ec72df1c"}, - {file = "lxml-5.2.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:efa7b51824aa0ee957ccd5a741c73e6851de55f40d807f08069eb4c5a26b2baa"}, - {file = "lxml-5.2.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c6ad0fbf105f6bcc9300c00010a2ffa44ea6f555df1a2ad95c88f5656104817"}, - {file = "lxml-5.2.1-pp37-pypy37_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e233db59c8f76630c512ab4a4daf5a5986da5c3d5b44b8e9fc742f2a24dbd460"}, - {file = "lxml-5.2.1-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6a014510830df1475176466b6087fc0c08b47a36714823e58d8b8d7709132a96"}, - {file = "lxml-5.2.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:d38c8f50ecf57f0463399569aa388b232cf1a2ffb8f0a9a5412d0db57e054860"}, - {file = "lxml-5.2.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5aea8212fb823e006b995c4dda533edcf98a893d941f173f6c9506126188860d"}, - {file = "lxml-5.2.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff097ae562e637409b429a7ac958a20aab237a0378c42dabaa1e3abf2f896e5f"}, - {file = "lxml-5.2.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f5d65c39f16717a47c36c756af0fb36144069c4718824b7533f803ecdf91138"}, - {file = "lxml-5.2.1-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:3d0c3dd24bb4605439bf91068598d00c6370684f8de4a67c2992683f6c309d6b"}, - {file = "lxml-5.2.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e32be23d538753a8adb6c85bd539f5fd3b15cb987404327c569dfc5fd8366e85"}, - {file = "lxml-5.2.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:cc518cea79fd1e2f6c90baafa28906d4309d24f3a63e801d855e7424c5b34144"}, - {file = "lxml-5.2.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a0af35bd8ebf84888373630f73f24e86bf016642fb8576fba49d3d6b560b7cbc"}, - {file = "lxml-5.2.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8aca2e3a72f37bfc7b14ba96d4056244001ddcc18382bd0daa087fd2e68a354"}, - {file = "lxml-5.2.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ca1e8188b26a819387b29c3895c47a5e618708fe6f787f3b1a471de2c4a94d9"}, - {file = "lxml-5.2.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c8ba129e6d3b0136a0f50345b2cb3db53f6bda5dd8c7f5d83fbccba97fb5dcb5"}, - {file = "lxml-5.2.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e998e304036198b4f6914e6a1e2b6f925208a20e2042563d9734881150c6c246"}, - {file = "lxml-5.2.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d3be9b2076112e51b323bdf6d5a7f8a798de55fb8d95fcb64bd179460cdc0704"}, - {file = "lxml-5.2.1.tar.gz", hash = "sha256:3f7765e69bbce0906a7c74d5fe46d2c7a7596147318dbc08e4a2431f3060e306"}, + {file = "lxml-5.2.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:364d03207f3e603922d0d3932ef363d55bbf48e3647395765f9bfcbdf6d23632"}, + {file = "lxml-5.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:50127c186f191b8917ea2fb8b206fbebe87fd414a6084d15568c27d0a21d60db"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74e4f025ef3db1c6da4460dd27c118d8cd136d0391da4e387a15e48e5c975147"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:981a06a3076997adf7c743dcd0d7a0415582661e2517c7d961493572e909aa1d"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aef5474d913d3b05e613906ba4090433c515e13ea49c837aca18bde190853dff"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1e275ea572389e41e8b039ac076a46cb87ee6b8542df3fff26f5baab43713bca"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5b65529bb2f21ac7861a0e94fdbf5dc0daab41497d18223b46ee8515e5ad297"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:bcc98f911f10278d1daf14b87d65325851a1d29153caaf146877ec37031d5f36"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:b47633251727c8fe279f34025844b3b3a3e40cd1b198356d003aa146258d13a2"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:fbc9d316552f9ef7bba39f4edfad4a734d3d6f93341232a9dddadec4f15d425f"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:13e69be35391ce72712184f69000cda04fc89689429179bc4c0ae5f0b7a8c21b"}, + {file = "lxml-5.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3b6a30a9ab040b3f545b697cb3adbf3696c05a3a68aad172e3fd7ca73ab3c835"}, + {file = "lxml-5.2.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:a233bb68625a85126ac9f1fc66d24337d6e8a0f9207b688eec2e7c880f012ec0"}, + {file = "lxml-5.2.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:dfa7c241073d8f2b8e8dbc7803c434f57dbb83ae2a3d7892dd068d99e96efe2c"}, + {file = "lxml-5.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1a7aca7964ac4bb07680d5c9d63b9d7028cace3e2d43175cb50bba8c5ad33316"}, + {file = "lxml-5.2.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ae4073a60ab98529ab8a72ebf429f2a8cc612619a8c04e08bed27450d52103c0"}, + {file = "lxml-5.2.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ffb2be176fed4457e445fe540617f0252a72a8bc56208fd65a690fdb1f57660b"}, + {file = "lxml-5.2.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:e290d79a4107d7d794634ce3e985b9ae4f920380a813717adf61804904dc4393"}, + {file = "lxml-5.2.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:96e85aa09274955bb6bd483eaf5b12abadade01010478154b0ec70284c1b1526"}, + {file = "lxml-5.2.2-cp310-cp310-win32.whl", hash = "sha256:f956196ef61369f1685d14dad80611488d8dc1ef00be57c0c5a03064005b0f30"}, + {file = "lxml-5.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:875a3f90d7eb5c5d77e529080d95140eacb3c6d13ad5b616ee8095447b1d22e7"}, + {file = "lxml-5.2.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:45f9494613160d0405682f9eee781c7e6d1bf45f819654eb249f8f46a2c22545"}, + {file = "lxml-5.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b0b3f2df149efb242cee2ffdeb6674b7f30d23c9a7af26595099afaf46ef4e88"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d28cb356f119a437cc58a13f8135ab8a4c8ece18159eb9194b0d269ec4e28083"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:657a972f46bbefdbba2d4f14413c0d079f9ae243bd68193cb5061b9732fa54c1"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b74b9ea10063efb77a965a8d5f4182806fbf59ed068b3c3fd6f30d2ac7bee734"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:07542787f86112d46d07d4f3c4e7c760282011b354d012dc4141cc12a68cef5f"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:303f540ad2dddd35b92415b74b900c749ec2010e703ab3bfd6660979d01fd4ed"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:2eb2227ce1ff998faf0cd7fe85bbf086aa41dfc5af3b1d80867ecfe75fb68df3"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:1d8a701774dfc42a2f0b8ccdfe7dbc140500d1049e0632a611985d943fcf12df"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:56793b7a1a091a7c286b5f4aa1fe4ae5d1446fe742d00cdf2ffb1077865db10d"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:eb00b549b13bd6d884c863554566095bf6fa9c3cecb2e7b399c4bc7904cb33b5"}, + {file = "lxml-5.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a2569a1f15ae6c8c64108a2cd2b4a858fc1e13d25846be0666fc144715e32ab"}, + {file = "lxml-5.2.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:8cf85a6e40ff1f37fe0f25719aadf443686b1ac7652593dc53c7ef9b8492b115"}, + {file = "lxml-5.2.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:d237ba6664b8e60fd90b8549a149a74fcc675272e0e95539a00522e4ca688b04"}, + {file = "lxml-5.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0b3f5016e00ae7630a4b83d0868fca1e3d494c78a75b1c7252606a3a1c5fc2ad"}, + {file = "lxml-5.2.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:23441e2b5339bc54dc949e9e675fa35efe858108404ef9aa92f0456929ef6fe8"}, + {file = "lxml-5.2.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:2fb0ba3e8566548d6c8e7dd82a8229ff47bd8fb8c2da237607ac8e5a1b8312e5"}, + {file = "lxml-5.2.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:79d1fb9252e7e2cfe4de6e9a6610c7cbb99b9708e2c3e29057f487de5a9eaefa"}, + {file = "lxml-5.2.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6dcc3d17eac1df7859ae01202e9bb11ffa8c98949dcbeb1069c8b9a75917e01b"}, + {file = "lxml-5.2.2-cp311-cp311-win32.whl", hash = "sha256:4c30a2f83677876465f44c018830f608fa3c6a8a466eb223535035fbc16f3438"}, + {file = "lxml-5.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:49095a38eb333aaf44c06052fd2ec3b8f23e19747ca7ec6f6c954ffea6dbf7be"}, + {file = "lxml-5.2.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:7429e7faa1a60cad26ae4227f4dd0459efde239e494c7312624ce228e04f6391"}, + {file = "lxml-5.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:50ccb5d355961c0f12f6cf24b7187dbabd5433f29e15147a67995474f27d1776"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc911208b18842a3a57266d8e51fc3cfaccee90a5351b92079beed912a7914c2"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33ce9e786753743159799fdf8e92a5da351158c4bfb6f2db0bf31e7892a1feb5"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ec87c44f619380878bd49ca109669c9f221d9ae6883a5bcb3616785fa8f94c97"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08ea0f606808354eb8f2dfaac095963cb25d9d28e27edcc375d7b30ab01abbf6"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75a9632f1d4f698b2e6e2e1ada40e71f369b15d69baddb8968dcc8e683839b18"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:74da9f97daec6928567b48c90ea2c82a106b2d500f397eeb8941e47d30b1ca85"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:0969e92af09c5687d769731e3f39ed62427cc72176cebb54b7a9d52cc4fa3b73"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:9164361769b6ca7769079f4d426a41df6164879f7f3568be9086e15baca61466"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:d26a618ae1766279f2660aca0081b2220aca6bd1aa06b2cf73f07383faf48927"}, + {file = "lxml-5.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab67ed772c584b7ef2379797bf14b82df9aa5f7438c5b9a09624dd834c1c1aaf"}, + {file = "lxml-5.2.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:3d1e35572a56941b32c239774d7e9ad724074d37f90c7a7d499ab98761bd80cf"}, + {file = "lxml-5.2.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:8268cbcd48c5375f46e000adb1390572c98879eb4f77910c6053d25cc3ac2c67"}, + {file = "lxml-5.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e282aedd63c639c07c3857097fc0e236f984ceb4089a8b284da1c526491e3f3d"}, + {file = "lxml-5.2.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfdc2bfe69e9adf0df4915949c22a25b39d175d599bf98e7ddf620a13678585"}, + {file = "lxml-5.2.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4aefd911793b5d2d7a921233a54c90329bf3d4a6817dc465f12ffdfe4fc7b8fe"}, + {file = "lxml-5.2.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:8b8df03a9e995b6211dafa63b32f9d405881518ff1ddd775db4e7b98fb545e1c"}, + {file = "lxml-5.2.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f11ae142f3a322d44513de1018b50f474f8f736bc3cd91d969f464b5bfef8836"}, + {file = "lxml-5.2.2-cp312-cp312-win32.whl", hash = "sha256:16a8326e51fcdffc886294c1e70b11ddccec836516a343f9ed0f82aac043c24a"}, + {file = "lxml-5.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:bbc4b80af581e18568ff07f6395c02114d05f4865c2812a1f02f2eaecf0bfd48"}, + {file = "lxml-5.2.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e3d9d13603410b72787579769469af730c38f2f25505573a5888a94b62b920f8"}, + {file = "lxml-5.2.2-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38b67afb0a06b8575948641c1d6d68e41b83a3abeae2ca9eed2ac59892b36706"}, + {file = "lxml-5.2.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c689d0d5381f56de7bd6966a4541bff6e08bf8d3871bbd89a0c6ab18aa699573"}, + {file = "lxml-5.2.2-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:cf2a978c795b54c539f47964ec05e35c05bd045db5ca1e8366988c7f2fe6b3ce"}, + {file = "lxml-5.2.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:739e36ef7412b2bd940f75b278749106e6d025e40027c0b94a17ef7968d55d56"}, + {file = "lxml-5.2.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:d8bbcd21769594dbba9c37d3c819e2d5847656ca99c747ddb31ac1701d0c0ed9"}, + {file = "lxml-5.2.2-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:2304d3c93f2258ccf2cf7a6ba8c761d76ef84948d87bf9664e14d203da2cd264"}, + {file = "lxml-5.2.2-cp36-cp36m-win32.whl", hash = "sha256:02437fb7308386867c8b7b0e5bc4cd4b04548b1c5d089ffb8e7b31009b961dc3"}, + {file = "lxml-5.2.2-cp36-cp36m-win_amd64.whl", hash = "sha256:edcfa83e03370032a489430215c1e7783128808fd3e2e0a3225deee278585196"}, + {file = "lxml-5.2.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:28bf95177400066596cdbcfc933312493799382879da504633d16cf60bba735b"}, + {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a745cc98d504d5bd2c19b10c79c61c7c3df9222629f1b6210c0368177589fb8"}, + {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b590b39ef90c6b22ec0be925b211298e810b4856909c8ca60d27ffbca6c12e6"}, + {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b336b0416828022bfd5a2e3083e7f5ba54b96242159f83c7e3eebaec752f1716"}, + {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:c2faf60c583af0d135e853c86ac2735ce178f0e338a3c7f9ae8f622fd2eb788c"}, + {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:4bc6cb140a7a0ad1f7bc37e018d0ed690b7b6520ade518285dc3171f7a117905"}, + {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7ff762670cada8e05b32bf1e4dc50b140790909caa8303cfddc4d702b71ea184"}, + {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:57f0a0bbc9868e10ebe874e9f129d2917750adf008fe7b9c1598c0fbbfdde6a6"}, + {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:a6d2092797b388342c1bc932077ad232f914351932353e2e8706851c870bca1f"}, + {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:60499fe961b21264e17a471ec296dcbf4365fbea611bf9e303ab69db7159ce61"}, + {file = "lxml-5.2.2-cp37-cp37m-win32.whl", hash = "sha256:d9b342c76003c6b9336a80efcc766748a333573abf9350f4094ee46b006ec18f"}, + {file = "lxml-5.2.2-cp37-cp37m-win_amd64.whl", hash = "sha256:b16db2770517b8799c79aa80f4053cd6f8b716f21f8aca962725a9565ce3ee40"}, + {file = "lxml-5.2.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7ed07b3062b055d7a7f9d6557a251cc655eed0b3152b76de619516621c56f5d3"}, + {file = "lxml-5.2.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f60fdd125d85bf9c279ffb8e94c78c51b3b6a37711464e1f5f31078b45002421"}, + {file = "lxml-5.2.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a7e24cb69ee5f32e003f50e016d5fde438010c1022c96738b04fc2423e61706"}, + {file = "lxml-5.2.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23cfafd56887eaed93d07bc4547abd5e09d837a002b791e9767765492a75883f"}, + {file = "lxml-5.2.2-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:19b4e485cd07b7d83e3fe3b72132e7df70bfac22b14fe4bf7a23822c3a35bff5"}, + {file = "lxml-5.2.2-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:7ce7ad8abebe737ad6143d9d3bf94b88b93365ea30a5b81f6877ec9c0dee0a48"}, + {file = "lxml-5.2.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e49b052b768bb74f58c7dda4e0bdf7b79d43a9204ca584ffe1fb48a6f3c84c66"}, + {file = "lxml-5.2.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d14a0d029a4e176795cef99c056d58067c06195e0c7e2dbb293bf95c08f772a3"}, + {file = "lxml-5.2.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:be49ad33819d7dcc28a309b86d4ed98e1a65f3075c6acd3cd4fe32103235222b"}, + {file = "lxml-5.2.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:a6d17e0370d2516d5bb9062c7b4cb731cff921fc875644c3d751ad857ba9c5b1"}, + {file = "lxml-5.2.2-cp38-cp38-win32.whl", hash = "sha256:5b8c041b6265e08eac8a724b74b655404070b636a8dd6d7a13c3adc07882ef30"}, + {file = "lxml-5.2.2-cp38-cp38-win_amd64.whl", hash = "sha256:f61efaf4bed1cc0860e567d2ecb2363974d414f7f1f124b1df368bbf183453a6"}, + {file = "lxml-5.2.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:fb91819461b1b56d06fa4bcf86617fac795f6a99d12239fb0c68dbeba41a0a30"}, + {file = "lxml-5.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d4ed0c7cbecde7194cd3228c044e86bf73e30a23505af852857c09c24e77ec5d"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54401c77a63cc7d6dc4b4e173bb484f28a5607f3df71484709fe037c92d4f0ed"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:625e3ef310e7fa3a761d48ca7ea1f9d8718a32b1542e727d584d82f4453d5eeb"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:519895c99c815a1a24a926d5b60627ce5ea48e9f639a5cd328bda0515ea0f10c"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c7079d5eb1c1315a858bbf180000757db8ad904a89476653232db835c3114001"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:343ab62e9ca78094f2306aefed67dcfad61c4683f87eee48ff2fd74902447726"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:cd9e78285da6c9ba2d5c769628f43ef66d96ac3085e59b10ad4f3707980710d3"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_28_ppc64le.whl", hash = "sha256:546cf886f6242dff9ec206331209db9c8e1643ae642dea5fdbecae2453cb50fd"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_28_s390x.whl", hash = "sha256:02f6a8eb6512fdc2fd4ca10a49c341c4e109aa6e9448cc4859af5b949622715a"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:339ee4a4704bc724757cd5dd9dc8cf4d00980f5d3e6e06d5847c1b594ace68ab"}, + {file = "lxml-5.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0a028b61a2e357ace98b1615fc03f76eb517cc028993964fe08ad514b1e8892d"}, + {file = "lxml-5.2.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:f90e552ecbad426eab352e7b2933091f2be77115bb16f09f78404861c8322981"}, + {file = "lxml-5.2.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:d83e2d94b69bf31ead2fa45f0acdef0757fa0458a129734f59f67f3d2eb7ef32"}, + {file = "lxml-5.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a02d3c48f9bb1e10c7788d92c0c7db6f2002d024ab6e74d6f45ae33e3d0288a3"}, + {file = "lxml-5.2.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:6d68ce8e7b2075390e8ac1e1d3a99e8b6372c694bbe612632606d1d546794207"}, + {file = "lxml-5.2.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:453d037e09a5176d92ec0fd282e934ed26d806331a8b70ab431a81e2fbabf56d"}, + {file = "lxml-5.2.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:3b019d4ee84b683342af793b56bb35034bd749e4cbdd3d33f7d1107790f8c472"}, + {file = "lxml-5.2.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:cb3942960f0beb9f46e2a71a3aca220d1ca32feb5a398656be934320804c0df9"}, + {file = "lxml-5.2.2-cp39-cp39-win32.whl", hash = "sha256:ac6540c9fff6e3813d29d0403ee7a81897f1d8ecc09a8ff84d2eea70ede1cdbf"}, + {file = "lxml-5.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:610b5c77428a50269f38a534057444c249976433f40f53e3b47e68349cca1425"}, + {file = "lxml-5.2.2-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b537bd04d7ccd7c6350cdaaaad911f6312cbd61e6e6045542f781c7f8b2e99d2"}, + {file = "lxml-5.2.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4820c02195d6dfb7b8508ff276752f6b2ff8b64ae5d13ebe02e7667e035000b9"}, + {file = "lxml-5.2.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2a09f6184f17a80897172863a655467da2b11151ec98ba8d7af89f17bf63dae"}, + {file = "lxml-5.2.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:76acba4c66c47d27c8365e7c10b3d8016a7da83d3191d053a58382311a8bf4e1"}, + {file = "lxml-5.2.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b128092c927eaf485928cec0c28f6b8bead277e28acf56800e972aa2c2abd7a2"}, + {file = "lxml-5.2.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ae791f6bd43305aade8c0e22f816b34f3b72b6c820477aab4d18473a37e8090b"}, + {file = "lxml-5.2.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a2f6a1bc2460e643785a2cde17293bd7a8f990884b822f7bca47bee0a82fc66b"}, + {file = "lxml-5.2.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e8d351ff44c1638cb6e980623d517abd9f580d2e53bfcd18d8941c052a5a009"}, + {file = "lxml-5.2.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bec4bd9133420c5c52d562469c754f27c5c9e36ee06abc169612c959bd7dbb07"}, + {file = "lxml-5.2.2-pp37-pypy37_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:55ce6b6d803890bd3cc89975fca9de1dff39729b43b73cb15ddd933b8bc20484"}, + {file = "lxml-5.2.2-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:8ab6a358d1286498d80fe67bd3d69fcbc7d1359b45b41e74c4a26964ca99c3f8"}, + {file = "lxml-5.2.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:06668e39e1f3c065349c51ac27ae430719d7806c026fec462e5693b08b95696b"}, + {file = "lxml-5.2.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9cd5323344d8ebb9fb5e96da5de5ad4ebab993bbf51674259dbe9d7a18049525"}, + {file = "lxml-5.2.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89feb82ca055af0fe797a2323ec9043b26bc371365847dbe83c7fd2e2f181c34"}, + {file = "lxml-5.2.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e481bba1e11ba585fb06db666bfc23dbe181dbafc7b25776156120bf12e0d5a6"}, + {file = "lxml-5.2.2-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:9d6c6ea6a11ca0ff9cd0390b885984ed31157c168565702959c25e2191674a14"}, + {file = "lxml-5.2.2-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3d98de734abee23e61f6b8c2e08a88453ada7d6486dc7cdc82922a03968928db"}, + {file = "lxml-5.2.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:69ab77a1373f1e7563e0fb5a29a8440367dec051da6c7405333699d07444f511"}, + {file = "lxml-5.2.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:34e17913c431f5ae01d8658dbf792fdc457073dcdfbb31dc0cc6ab256e664a8d"}, + {file = "lxml-5.2.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05f8757b03208c3f50097761be2dea0aba02e94f0dc7023ed73a7bb14ff11eb0"}, + {file = "lxml-5.2.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a520b4f9974b0a0a6ed73c2154de57cdfd0c8800f4f15ab2b73238ffed0b36e"}, + {file = "lxml-5.2.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5e097646944b66207023bc3c634827de858aebc226d5d4d6d16f0b77566ea182"}, + {file = "lxml-5.2.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b5e4ef22ff25bfd4ede5f8fb30f7b24446345f3e79d9b7455aef2836437bc38a"}, + {file = "lxml-5.2.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:ff69a9a0b4b17d78170c73abe2ab12084bdf1691550c5629ad1fe7849433f324"}, + {file = "lxml-5.2.2.tar.gz", hash = "sha256:bb2dc4898180bea79863d5487e5f9c7c34297414bad54bcd0f0852aee9cfdb87"}, ] [package.extras] @@ -1235,18 +1281,15 @@ test = ["codecov (>=2.1)", "pytest (>=7.2)", "pytest-cov (>=4.0)"] [[package]] name = "nodeenv" -version = "1.8.0" +version = "1.9.1" description = "Node.js virtual environment builder" optional = false -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ - {file = "nodeenv-1.8.0-py2.py3-none-any.whl", hash = "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec"}, - {file = "nodeenv-1.8.0.tar.gz", hash = "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2"}, + {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, + {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, ] -[package.dependencies] -setuptools = "*" - [[package]] name = "numpy" version = "1.24.4" @@ -1286,13 +1329,13 @@ files = [ [[package]] name = "packaging" -version = "24.0" +version = "24.1" description = "Core utilities for Python packages" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, - {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, + {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, + {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, ] [[package]] @@ -1356,62 +1399,130 @@ files = [ [[package]] name = "pendulum" -version = "2.1.2" +version = "3.0.0" description = "Python datetimes made easy" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.8" files = [ - {file = "pendulum-2.1.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:b6c352f4bd32dff1ea7066bd31ad0f71f8d8100b9ff709fb343f3b86cee43efe"}, - {file = "pendulum-2.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:318f72f62e8e23cd6660dbafe1e346950281a9aed144b5c596b2ddabc1d19739"}, - {file = "pendulum-2.1.2-cp35-cp35m-macosx_10_15_x86_64.whl", hash = "sha256:0731f0c661a3cb779d398803655494893c9f581f6488048b3fb629c2342b5394"}, - {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:3481fad1dc3f6f6738bd575a951d3c15d4b4ce7c82dce37cf8ac1483fde6e8b0"}, - {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9702069c694306297ed362ce7e3c1ef8404ac8ede39f9b28b7c1a7ad8c3959e3"}, - {file = "pendulum-2.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:fb53ffa0085002ddd43b6ca61a7b34f2d4d7c3ed66f931fe599e1a531b42af9b"}, - {file = "pendulum-2.1.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c501749fdd3d6f9e726086bf0cd4437281ed47e7bca132ddb522f86a1645d360"}, - {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c807a578a532eeb226150d5006f156632df2cc8c5693d778324b43ff8c515dd0"}, - {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2d1619a721df661e506eff8db8614016f0720ac171fe80dda1333ee44e684087"}, - {file = "pendulum-2.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f888f2d2909a414680a29ae74d0592758f2b9fcdee3549887779cd4055e975db"}, - {file = "pendulum-2.1.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e95d329384717c7bf627bf27e204bc3b15c8238fa8d9d9781d93712776c14002"}, - {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4c9c689747f39d0d02a9f94fcee737b34a5773803a64a5fdb046ee9cac7442c5"}, - {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1245cd0075a3c6d889f581f6325dd8404aca5884dea7223a5566c38aab94642b"}, - {file = "pendulum-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:db0a40d8bcd27b4fb46676e8eb3c732c67a5a5e6bfab8927028224fbced0b40b"}, - {file = "pendulum-2.1.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f5e236e7730cab1644e1b87aca3d2ff3e375a608542e90fe25685dae46310116"}, - {file = "pendulum-2.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:de42ea3e2943171a9e95141f2eecf972480636e8e484ccffaf1e833929e9e052"}, - {file = "pendulum-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7c5ec650cb4bec4c63a89a0242cc8c3cebcec92fcfe937c417ba18277d8560be"}, - {file = "pendulum-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33fb61601083f3eb1d15edeb45274f73c63b3c44a8524703dc143f4212bf3269"}, - {file = "pendulum-2.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:29c40a6f2942376185728c9a0347d7c0f07905638c83007e1d262781f1e6953a"}, - {file = "pendulum-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:94b1fc947bfe38579b28e1cccb36f7e28a15e841f30384b5ad6c5e31055c85d7"}, - {file = "pendulum-2.1.2.tar.gz", hash = "sha256:b06a0ca1bfe41c990bbf0c029f0b6501a7f2ec4e38bfec730712015e8860f207"}, + {file = "pendulum-3.0.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2cf9e53ef11668e07f73190c805dbdf07a1939c3298b78d5a9203a86775d1bfd"}, + {file = "pendulum-3.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fb551b9b5e6059377889d2d878d940fd0bbb80ae4810543db18e6f77b02c5ef6"}, + {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c58227ac260d5b01fc1025176d7b31858c9f62595737f350d22124a9a3ad82d"}, + {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60fb6f415fea93a11c52578eaa10594568a6716602be8430b167eb0d730f3332"}, + {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b69f6b4dbcb86f2c2fe696ba991e67347bcf87fe601362a1aba6431454b46bde"}, + {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:138afa9c373ee450ede206db5a5e9004fd3011b3c6bbe1e57015395cd076a09f"}, + {file = "pendulum-3.0.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:83d9031f39c6da9677164241fd0d37fbfc9dc8ade7043b5d6d62f56e81af8ad2"}, + {file = "pendulum-3.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0c2308af4033fa534f089595bcd40a95a39988ce4059ccd3dc6acb9ef14ca44a"}, + {file = "pendulum-3.0.0-cp310-none-win_amd64.whl", hash = "sha256:9a59637cdb8462bdf2dbcb9d389518c0263799189d773ad5c11db6b13064fa79"}, + {file = "pendulum-3.0.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:3725245c0352c95d6ca297193192020d1b0c0f83d5ee6bb09964edc2b5a2d508"}, + {file = "pendulum-3.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6c035f03a3e565ed132927e2c1b691de0dbf4eb53b02a5a3c5a97e1a64e17bec"}, + {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:597e66e63cbd68dd6d58ac46cb7a92363d2088d37ccde2dae4332ef23e95cd00"}, + {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99a0f8172e19f3f0c0e4ace0ad1595134d5243cf75985dc2233e8f9e8de263ca"}, + {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:77d8839e20f54706aed425bec82a83b4aec74db07f26acd039905d1237a5e1d4"}, + {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afde30e8146292b059020fbc8b6f8fd4a60ae7c5e6f0afef937bbb24880bdf01"}, + {file = "pendulum-3.0.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:660434a6fcf6303c4efd36713ca9212c753140107ee169a3fc6c49c4711c2a05"}, + {file = "pendulum-3.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dee9e5a48c6999dc1106eb7eea3e3a50e98a50651b72c08a87ee2154e544b33e"}, + {file = "pendulum-3.0.0-cp311-none-win_amd64.whl", hash = "sha256:d4cdecde90aec2d67cebe4042fd2a87a4441cc02152ed7ed8fb3ebb110b94ec4"}, + {file = "pendulum-3.0.0-cp311-none-win_arm64.whl", hash = "sha256:773c3bc4ddda2dda9f1b9d51fe06762f9200f3293d75c4660c19b2614b991d83"}, + {file = "pendulum-3.0.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:409e64e41418c49f973d43a28afe5df1df4f1dd87c41c7c90f1a63f61ae0f1f7"}, + {file = "pendulum-3.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a38ad2121c5ec7c4c190c7334e789c3b4624798859156b138fcc4d92295835dc"}, + {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fde4d0b2024b9785f66b7f30ed59281bd60d63d9213cda0eb0910ead777f6d37"}, + {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b2c5675769fb6d4c11238132962939b960fcb365436b6d623c5864287faa319"}, + {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8af95e03e066826f0f4c65811cbee1b3123d4a45a1c3a2b4fc23c4b0dff893b5"}, + {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2165a8f33cb15e06c67070b8afc87a62b85c5a273e3aaa6bc9d15c93a4920d6f"}, + {file = "pendulum-3.0.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ad5e65b874b5e56bd942546ea7ba9dd1d6a25121db1c517700f1c9de91b28518"}, + {file = "pendulum-3.0.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:17fe4b2c844bbf5f0ece69cfd959fa02957c61317b2161763950d88fed8e13b9"}, + {file = "pendulum-3.0.0-cp312-none-win_amd64.whl", hash = "sha256:78f8f4e7efe5066aca24a7a57511b9c2119f5c2b5eb81c46ff9222ce11e0a7a5"}, + {file = "pendulum-3.0.0-cp312-none-win_arm64.whl", hash = "sha256:28f49d8d1e32aae9c284a90b6bb3873eee15ec6e1d9042edd611b22a94ac462f"}, + {file = "pendulum-3.0.0-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:d4e2512f4e1a4670284a153b214db9719eb5d14ac55ada5b76cbdb8c5c00399d"}, + {file = "pendulum-3.0.0-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:3d897eb50883cc58d9b92f6405245f84b9286cd2de6e8694cb9ea5cb15195a32"}, + {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e169cc2ca419517f397811bbe4589cf3cd13fca6dc38bb352ba15ea90739ebb"}, + {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f17c3084a4524ebefd9255513692f7e7360e23c8853dc6f10c64cc184e1217ab"}, + {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:826d6e258052715f64d05ae0fc9040c0151e6a87aae7c109ba9a0ed930ce4000"}, + {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2aae97087872ef152a0c40e06100b3665d8cb86b59bc8471ca7c26132fccd0f"}, + {file = "pendulum-3.0.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ac65eeec2250d03106b5e81284ad47f0d417ca299a45e89ccc69e36130ca8bc7"}, + {file = "pendulum-3.0.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a5346d08f3f4a6e9e672187faa179c7bf9227897081d7121866358af369f44f9"}, + {file = "pendulum-3.0.0-cp37-none-win_amd64.whl", hash = "sha256:235d64e87946d8f95c796af34818c76e0f88c94d624c268693c85b723b698aa9"}, + {file = "pendulum-3.0.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:6a881d9c2a7f85bc9adafcfe671df5207f51f5715ae61f5d838b77a1356e8b7b"}, + {file = "pendulum-3.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d7762d2076b9b1cb718a6631ad6c16c23fc3fac76cbb8c454e81e80be98daa34"}, + {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e8e36a8130819d97a479a0e7bf379b66b3b1b520e5dc46bd7eb14634338df8c"}, + {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7dc843253ac373358ffc0711960e2dd5b94ab67530a3e204d85c6e8cb2c5fa10"}, + {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0a78ad3635d609ceb1e97d6aedef6a6a6f93433ddb2312888e668365908c7120"}, + {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b30a137e9e0d1f751e60e67d11fc67781a572db76b2296f7b4d44554761049d6"}, + {file = "pendulum-3.0.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c95984037987f4a457bb760455d9ca80467be792236b69d0084f228a8ada0162"}, + {file = "pendulum-3.0.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d29c6e578fe0f893766c0d286adbf0b3c726a4e2341eba0917ec79c50274ec16"}, + {file = "pendulum-3.0.0-cp38-none-win_amd64.whl", hash = "sha256:deaba8e16dbfcb3d7a6b5fabdd5a38b7c982809567479987b9c89572df62e027"}, + {file = "pendulum-3.0.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b11aceea5b20b4b5382962b321dbc354af0defe35daa84e9ff3aae3c230df694"}, + {file = "pendulum-3.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a90d4d504e82ad236afac9adca4d6a19e4865f717034fc69bafb112c320dcc8f"}, + {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:825799c6b66e3734227756fa746cc34b3549c48693325b8b9f823cb7d21b19ac"}, + {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad769e98dc07972e24afe0cff8d365cb6f0ebc7e65620aa1976fcfbcadc4c6f3"}, + {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6fc26907eb5fb8cc6188cc620bc2075a6c534d981a2f045daa5f79dfe50d512"}, + {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c717eab1b6d898c00a3e0fa7781d615b5c5136bbd40abe82be100bb06df7a56"}, + {file = "pendulum-3.0.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3ddd1d66d1a714ce43acfe337190be055cdc221d911fc886d5a3aae28e14b76d"}, + {file = "pendulum-3.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:822172853d7a9cf6da95d7b66a16c7160cb99ae6df55d44373888181d7a06edc"}, + {file = "pendulum-3.0.0-cp39-none-win_amd64.whl", hash = "sha256:840de1b49cf1ec54c225a2a6f4f0784d50bd47f68e41dc005b7f67c7d5b5f3ae"}, + {file = "pendulum-3.0.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3b1f74d1e6ffe5d01d6023870e2ce5c2191486928823196f8575dcc786e107b1"}, + {file = "pendulum-3.0.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:729e9f93756a2cdfa77d0fc82068346e9731c7e884097160603872686e570f07"}, + {file = "pendulum-3.0.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e586acc0b450cd21cbf0db6bae386237011b75260a3adceddc4be15334689a9a"}, + {file = "pendulum-3.0.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22e7944ffc1f0099a79ff468ee9630c73f8c7835cd76fdb57ef7320e6a409df4"}, + {file = "pendulum-3.0.0-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:fa30af36bd8e50686846bdace37cf6707bdd044e5cb6e1109acbad3277232e04"}, + {file = "pendulum-3.0.0-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:440215347b11914ae707981b9a57ab9c7b6983ab0babde07063c6ee75c0dc6e7"}, + {file = "pendulum-3.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:314c4038dc5e6a52991570f50edb2f08c339debdf8cea68ac355b32c4174e820"}, + {file = "pendulum-3.0.0-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5acb1d386337415f74f4d1955c4ce8d0201978c162927d07df8eb0692b2d8533"}, + {file = "pendulum-3.0.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a789e12fbdefaffb7b8ac67f9d8f22ba17a3050ceaaa635cd1cc4645773a4b1e"}, + {file = "pendulum-3.0.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:860aa9b8a888e5913bd70d819306749e5eb488e6b99cd6c47beb701b22bdecf5"}, + {file = "pendulum-3.0.0-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:5ebc65ea033ef0281368217fbf59f5cb05b338ac4dd23d60959c7afcd79a60a0"}, + {file = "pendulum-3.0.0-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d9fef18ab0386ef6a9ac7bad7e43ded42c83ff7ad412f950633854f90d59afa8"}, + {file = "pendulum-3.0.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1c134ba2f0571d0b68b83f6972e2307a55a5a849e7dac8505c715c531d2a8795"}, + {file = "pendulum-3.0.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:385680812e7e18af200bb9b4a49777418c32422d05ad5a8eb85144c4a285907b"}, + {file = "pendulum-3.0.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9eec91cd87c59fb32ec49eb722f375bd58f4be790cae11c1b70fac3ee4f00da0"}, + {file = "pendulum-3.0.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4386bffeca23c4b69ad50a36211f75b35a4deb6210bdca112ac3043deb7e494a"}, + {file = "pendulum-3.0.0-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:dfbcf1661d7146d7698da4b86e7f04814221081e9fe154183e34f4c5f5fa3bf8"}, + {file = "pendulum-3.0.0-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:04a1094a5aa1daa34a6b57c865b25f691848c61583fb22722a4df5699f6bf74c"}, + {file = "pendulum-3.0.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5b0ec85b9045bd49dd3a3493a5e7ddfd31c36a2a60da387c419fa04abcaecb23"}, + {file = "pendulum-3.0.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:0a15b90129765b705eb2039062a6daf4d22c4e28d1a54fa260892e8c3ae6e157"}, + {file = "pendulum-3.0.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:bb8f6d7acd67a67d6fedd361ad2958ff0539445ef51cbe8cd288db4306503cd0"}, + {file = "pendulum-3.0.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd69b15374bef7e4b4440612915315cc42e8575fcda2a3d7586a0d88192d0c88"}, + {file = "pendulum-3.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc00f8110db6898360c53c812872662e077eaf9c75515d53ecc65d886eec209a"}, + {file = "pendulum-3.0.0-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:83a44e8b40655d0ba565a5c3d1365d27e3e6778ae2a05b69124db9e471255c4a"}, + {file = "pendulum-3.0.0-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:1a3604e9fbc06b788041b2a8b78f75c243021e0f512447806a6d37ee5214905d"}, + {file = "pendulum-3.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:92c307ae7accebd06cbae4729f0ba9fa724df5f7d91a0964b1b972a22baa482b"}, + {file = "pendulum-3.0.0.tar.gz", hash = "sha256:5d034998dea404ec31fae27af6b22cff1708f830a1ed7353be4d1019bb9f584e"}, ] [package.dependencies] -python-dateutil = ">=2.6,<3.0" -pytzdata = ">=2020.1" +"backports.zoneinfo" = {version = ">=0.2.1", markers = "python_version < \"3.9\""} +importlib-resources = {version = ">=5.9.0", markers = "python_version < \"3.9\""} +python-dateutil = ">=2.6" +tzdata = ">=2020.1" + +[package.extras] +test = ["time-machine (>=2.6.0)"] [[package]] name = "platformdirs" -version = "4.2.0" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +version = "4.2.2" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, - {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, + {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, + {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, ] [package.extras] docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] +type = ["mypy (>=1.8)"] [[package]] name = "pluggy" -version = "1.4.0" +version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" files = [ - {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, - {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, ] [package.extras] @@ -1438,39 +1549,39 @@ virtualenv = ">=20.10.0" [[package]] name = "proto-plus" -version = "1.23.0" +version = "1.24.0" description = "Beautiful, Pythonic protocol buffers." optional = true -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "proto-plus-1.23.0.tar.gz", hash = "sha256:89075171ef11988b3fa157f5dbd8b9cf09d65fffee97e29ce403cd8defba19d2"}, - {file = "proto_plus-1.23.0-py3-none-any.whl", hash = "sha256:a829c79e619e1cf632de091013a4173deed13a55f326ef84f05af6f50ff4c82c"}, + {file = "proto-plus-1.24.0.tar.gz", hash = "sha256:30b72a5ecafe4406b0d339db35b56c4059064e69227b8c3bda7462397f966445"}, + {file = "proto_plus-1.24.0-py3-none-any.whl", hash = "sha256:402576830425e5f6ce4c2a6702400ac79897dab0b4343821aa5188b0fab81a12"}, ] [package.dependencies] -protobuf = ">=3.19.0,<5.0.0dev" +protobuf = ">=3.19.0,<6.0.0dev" [package.extras] -testing = ["google-api-core[grpc] (>=1.31.5)"] +testing = ["google-api-core (>=1.31.5)"] [[package]] name = "protobuf" -version = "4.25.3" +version = "5.27.3" description = "" optional = true python-versions = ">=3.8" files = [ - {file = "protobuf-4.25.3-cp310-abi3-win32.whl", hash = "sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa"}, - {file = "protobuf-4.25.3-cp310-abi3-win_amd64.whl", hash = "sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8"}, - {file = "protobuf-4.25.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c"}, - {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019"}, - {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d"}, - {file = "protobuf-4.25.3-cp38-cp38-win32.whl", hash = "sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2"}, - {file = "protobuf-4.25.3-cp38-cp38-win_amd64.whl", hash = "sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4"}, - {file = "protobuf-4.25.3-cp39-cp39-win32.whl", hash = "sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4"}, - {file = "protobuf-4.25.3-cp39-cp39-win_amd64.whl", hash = "sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c"}, - {file = "protobuf-4.25.3-py3-none-any.whl", hash = "sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9"}, - {file = "protobuf-4.25.3.tar.gz", hash = "sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c"}, + {file = "protobuf-5.27.3-cp310-abi3-win32.whl", hash = "sha256:dcb307cd4ef8fec0cf52cb9105a03d06fbb5275ce6d84a6ae33bc6cf84e0a07b"}, + {file = "protobuf-5.27.3-cp310-abi3-win_amd64.whl", hash = "sha256:16ddf3f8c6c41e1e803da7abea17b1793a97ef079a912e42351eabb19b2cffe7"}, + {file = "protobuf-5.27.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:68248c60d53f6168f565a8c76dc58ba4fa2ade31c2d1ebdae6d80f969cdc2d4f"}, + {file = "protobuf-5.27.3-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:b8a994fb3d1c11156e7d1e427186662b64694a62b55936b2b9348f0a7c6625ce"}, + {file = "protobuf-5.27.3-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:a55c48f2a2092d8e213bd143474df33a6ae751b781dd1d1f4d953c128a415b25"}, + {file = "protobuf-5.27.3-cp38-cp38-win32.whl", hash = "sha256:043853dcb55cc262bf2e116215ad43fa0859caab79bb0b2d31b708f128ece035"}, + {file = "protobuf-5.27.3-cp38-cp38-win_amd64.whl", hash = "sha256:c2a105c24f08b1e53d6c7ffe69cb09d0031512f0b72f812dd4005b8112dbe91e"}, + {file = "protobuf-5.27.3-cp39-cp39-win32.whl", hash = "sha256:c84eee2c71ed83704f1afbf1a85c3171eab0fd1ade3b399b3fad0884cbcca8bf"}, + {file = "protobuf-5.27.3-cp39-cp39-win_amd64.whl", hash = "sha256:af7c0b7cfbbb649ad26132e53faa348580f844d9ca46fd3ec7ca48a1ea5db8a1"}, + {file = "protobuf-5.27.3-py3-none-any.whl", hash = "sha256:8572c6533e544ebf6899c360e91d6bcbbee2549251643d32c52cf8a5de295ba5"}, + {file = "protobuf-5.27.3.tar.gz", hash = "sha256:82460903e640f2b7e34ee81a947fdaad89de796d324bcbc38ff5430bcdead82c"}, ] [[package]] @@ -1567,51 +1678,54 @@ files = [ [[package]] name = "pyarrow" -version = "15.0.2" +version = "17.0.0" description = "Python library for Apache Arrow" optional = true python-versions = ">=3.8" files = [ - {file = "pyarrow-15.0.2-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:88b340f0a1d05b5ccc3d2d986279045655b1fe8e41aba6ca44ea28da0d1455d8"}, - {file = "pyarrow-15.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:eaa8f96cecf32da508e6c7f69bb8401f03745c050c1dd42ec2596f2e98deecac"}, - {file = "pyarrow-15.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23c6753ed4f6adb8461e7c383e418391b8d8453c5d67e17f416c3a5d5709afbd"}, - {file = "pyarrow-15.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f639c059035011db8c0497e541a8a45d98a58dbe34dc8fadd0ef128f2cee46e5"}, - {file = "pyarrow-15.0.2-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:290e36a59a0993e9a5224ed2fb3e53375770f07379a0ea03ee2fce2e6d30b423"}, - {file = "pyarrow-15.0.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:06c2bb2a98bc792f040bef31ad3e9be6a63d0cb39189227c08a7d955db96816e"}, - {file = "pyarrow-15.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:f7a197f3670606a960ddc12adbe8075cea5f707ad7bf0dffa09637fdbb89f76c"}, - {file = "pyarrow-15.0.2-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:5f8bc839ea36b1f99984c78e06e7a06054693dc2af8920f6fb416b5bca9944e4"}, - {file = "pyarrow-15.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f5e81dfb4e519baa6b4c80410421528c214427e77ca0ea9461eb4097c328fa33"}, - {file = "pyarrow-15.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a4f240852b302a7af4646c8bfe9950c4691a419847001178662a98915fd7ee7"}, - {file = "pyarrow-15.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e7d9cfb5a1e648e172428c7a42b744610956f3b70f524aa3a6c02a448ba853e"}, - {file = "pyarrow-15.0.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:2d4f905209de70c0eb5b2de6763104d5a9a37430f137678edfb9a675bac9cd98"}, - {file = "pyarrow-15.0.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:90adb99e8ce5f36fbecbbc422e7dcbcbed07d985eed6062e459e23f9e71fd197"}, - {file = "pyarrow-15.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:b116e7fd7889294cbd24eb90cd9bdd3850be3738d61297855a71ac3b8124ee38"}, - {file = "pyarrow-15.0.2-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:25335e6f1f07fdaa026a61c758ee7d19ce824a866b27bba744348fa73bb5a440"}, - {file = "pyarrow-15.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:90f19e976d9c3d8e73c80be84ddbe2f830b6304e4c576349d9360e335cd627fc"}, - {file = "pyarrow-15.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a22366249bf5fd40ddacc4f03cd3160f2d7c247692945afb1899bab8a140ddfb"}, - {file = "pyarrow-15.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2a335198f886b07e4b5ea16d08ee06557e07db54a8400cc0d03c7f6a22f785f"}, - {file = "pyarrow-15.0.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:3e6d459c0c22f0b9c810a3917a1de3ee704b021a5fb8b3bacf968eece6df098f"}, - {file = "pyarrow-15.0.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:033b7cad32198754d93465dcfb71d0ba7cb7cd5c9afd7052cab7214676eec38b"}, - {file = "pyarrow-15.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:29850d050379d6e8b5a693098f4de7fd6a2bea4365bfd073d7c57c57b95041ee"}, - {file = "pyarrow-15.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:7167107d7fb6dcadb375b4b691b7e316f4368f39f6f45405a05535d7ad5e5058"}, - {file = "pyarrow-15.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e85241b44cc3d365ef950432a1b3bd44ac54626f37b2e3a0cc89c20e45dfd8bf"}, - {file = "pyarrow-15.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:248723e4ed3255fcd73edcecc209744d58a9ca852e4cf3d2577811b6d4b59818"}, - {file = "pyarrow-15.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ff3bdfe6f1b81ca5b73b70a8d482d37a766433823e0c21e22d1d7dde76ca33f"}, - {file = "pyarrow-15.0.2-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:f3d77463dee7e9f284ef42d341689b459a63ff2e75cee2b9302058d0d98fe142"}, - {file = "pyarrow-15.0.2-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:8c1faf2482fb89766e79745670cbca04e7018497d85be9242d5350cba21357e1"}, - {file = "pyarrow-15.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:28f3016958a8e45a1069303a4a4f6a7d4910643fc08adb1e2e4a7ff056272ad3"}, - {file = "pyarrow-15.0.2-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:89722cb64286ab3d4daf168386f6968c126057b8c7ec3ef96302e81d8cdb8ae4"}, - {file = "pyarrow-15.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cd0ba387705044b3ac77b1b317165c0498299b08261d8122c96051024f953cd5"}, - {file = "pyarrow-15.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad2459bf1f22b6a5cdcc27ebfd99307d5526b62d217b984b9f5c974651398832"}, - {file = "pyarrow-15.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58922e4bfece8b02abf7159f1f53a8f4d9f8e08f2d988109126c17c3bb261f22"}, - {file = "pyarrow-15.0.2-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:adccc81d3dc0478ea0b498807b39a8d41628fa9210729b2f718b78cb997c7c91"}, - {file = "pyarrow-15.0.2-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:8bd2baa5fe531571847983f36a30ddbf65261ef23e496862ece83bdceb70420d"}, - {file = "pyarrow-15.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6669799a1d4ca9da9c7e06ef48368320f5856f36f9a4dd31a11839dda3f6cc8c"}, - {file = "pyarrow-15.0.2.tar.gz", hash = "sha256:9c9bc803cb3b7bfacc1e96ffbfd923601065d9d3f911179d81e72d99fd74a3d9"}, + {file = "pyarrow-17.0.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:a5c8b238d47e48812ee577ee20c9a2779e6a5904f1708ae240f53ecbee7c9f07"}, + {file = "pyarrow-17.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:db023dc4c6cae1015de9e198d41250688383c3f9af8f565370ab2b4cb5f62655"}, + {file = "pyarrow-17.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da1e060b3876faa11cee287839f9cc7cdc00649f475714b8680a05fd9071d545"}, + {file = "pyarrow-17.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75c06d4624c0ad6674364bb46ef38c3132768139ddec1c56582dbac54f2663e2"}, + {file = "pyarrow-17.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:fa3c246cc58cb5a4a5cb407a18f193354ea47dd0648194e6265bd24177982fe8"}, + {file = "pyarrow-17.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:f7ae2de664e0b158d1607699a16a488de3d008ba99b3a7aa5de1cbc13574d047"}, + {file = "pyarrow-17.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:5984f416552eea15fd9cee03da53542bf4cddaef5afecefb9aa8d1010c335087"}, + {file = "pyarrow-17.0.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:1c8856e2ef09eb87ecf937104aacfa0708f22dfeb039c363ec99735190ffb977"}, + {file = "pyarrow-17.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2e19f569567efcbbd42084e87f948778eb371d308e137a0f97afe19bb860ccb3"}, + {file = "pyarrow-17.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b244dc8e08a23b3e352899a006a26ae7b4d0da7bb636872fa8f5884e70acf15"}, + {file = "pyarrow-17.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b72e87fe3e1db343995562f7fff8aee354b55ee83d13afba65400c178ab2597"}, + {file = "pyarrow-17.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:dc5c31c37409dfbc5d014047817cb4ccd8c1ea25d19576acf1a001fe07f5b420"}, + {file = "pyarrow-17.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:e3343cb1e88bc2ea605986d4b94948716edc7a8d14afd4e2c097232f729758b4"}, + {file = "pyarrow-17.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:a27532c38f3de9eb3e90ecab63dfda948a8ca859a66e3a47f5f42d1e403c4d03"}, + {file = "pyarrow-17.0.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:9b8a823cea605221e61f34859dcc03207e52e409ccf6354634143e23af7c8d22"}, + {file = "pyarrow-17.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f1e70de6cb5790a50b01d2b686d54aaf73da01266850b05e3af2a1bc89e16053"}, + {file = "pyarrow-17.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0071ce35788c6f9077ff9ecba4858108eebe2ea5a3f7cf2cf55ebc1dbc6ee24a"}, + {file = "pyarrow-17.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:757074882f844411fcca735e39aae74248a1531367a7c80799b4266390ae51cc"}, + {file = "pyarrow-17.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:9ba11c4f16976e89146781a83833df7f82077cdab7dc6232c897789343f7891a"}, + {file = "pyarrow-17.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:b0c6ac301093b42d34410b187bba560b17c0330f64907bfa4f7f7f2444b0cf9b"}, + {file = "pyarrow-17.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:392bc9feabc647338e6c89267635e111d71edad5fcffba204425a7c8d13610d7"}, + {file = "pyarrow-17.0.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:af5ff82a04b2171415f1410cff7ebb79861afc5dae50be73ce06d6e870615204"}, + {file = "pyarrow-17.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:edca18eaca89cd6382dfbcff3dd2d87633433043650c07375d095cd3517561d8"}, + {file = "pyarrow-17.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c7916bff914ac5d4a8fe25b7a25e432ff921e72f6f2b7547d1e325c1ad9d155"}, + {file = "pyarrow-17.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f553ca691b9e94b202ff741bdd40f6ccb70cdd5fbf65c187af132f1317de6145"}, + {file = "pyarrow-17.0.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:0cdb0e627c86c373205a2f94a510ac4376fdc523f8bb36beab2e7f204416163c"}, + {file = "pyarrow-17.0.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:d7d192305d9d8bc9082d10f361fc70a73590a4c65cf31c3e6926cd72b76bc35c"}, + {file = "pyarrow-17.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:02dae06ce212d8b3244dd3e7d12d9c4d3046945a5933d28026598e9dbbda1fca"}, + {file = "pyarrow-17.0.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:13d7a460b412f31e4c0efa1148e1d29bdf18ad1411eb6757d38f8fbdcc8645fb"}, + {file = "pyarrow-17.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9b564a51fbccfab5a04a80453e5ac6c9954a9c5ef2890d1bcf63741909c3f8df"}, + {file = "pyarrow-17.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32503827abbc5aadedfa235f5ece8c4f8f8b0a3cf01066bc8d29de7539532687"}, + {file = "pyarrow-17.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a155acc7f154b9ffcc85497509bcd0d43efb80d6f733b0dc3bb14e281f131c8b"}, + {file = "pyarrow-17.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:dec8d129254d0188a49f8a1fc99e0560dc1b85f60af729f47de4046015f9b0a5"}, + {file = "pyarrow-17.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:a48ddf5c3c6a6c505904545c25a4ae13646ae1f8ba703c4df4a1bfe4f4006bda"}, + {file = "pyarrow-17.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:42bf93249a083aca230ba7e2786c5f673507fa97bbd9725a1e2754715151a204"}, + {file = "pyarrow-17.0.0.tar.gz", hash = "sha256:4beca9521ed2c0921c1023e68d097d0299b62c362639ea315572a58f3f50fd28"}, ] [package.dependencies] -numpy = ">=1.16.6,<2" +numpy = ">=1.16.6" + +[package.extras] +test = ["cffi", "hypothesis", "pandas", "pytest", "pytz"] [[package]] name = "pyasn1" @@ -1673,34 +1787,34 @@ files = [ [[package]] name = "pyjwt" -version = "2.8.0" +version = "2.9.0" description = "JSON Web Token implementation in Python" optional = true -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "PyJWT-2.8.0-py3-none-any.whl", hash = "sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320"}, - {file = "PyJWT-2.8.0.tar.gz", hash = "sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de"}, + {file = "PyJWT-2.9.0-py3-none-any.whl", hash = "sha256:3b02fb0f44517787776cf48f2ae25d8e14f300e6d7545a4315cee571a415e850"}, + {file = "pyjwt-2.9.0.tar.gz", hash = "sha256:7e1e5b56cc735432a7369cbfa0efe50fa113ebecdc04ae6922deba8b84582d0c"}, ] [package.extras] crypto = ["cryptography (>=3.4.0)"] -dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] -docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] +dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx", "sphinx-rtd-theme", "zope.interface"] +docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] [[package]] name = "pyopenssl" -version = "24.1.0" +version = "24.2.1" description = "Python wrapper module around the OpenSSL library" optional = true python-versions = ">=3.7" files = [ - {file = "pyOpenSSL-24.1.0-py3-none-any.whl", hash = "sha256:17ed5be5936449c5418d1cd269a1a9e9081bc54c17aed272b45856a3d3dc86ad"}, - {file = "pyOpenSSL-24.1.0.tar.gz", hash = "sha256:cabed4bfaa5df9f1a16c0ef64a0cb65318b5cd077a7eda7d6970131ca2f41a6f"}, + {file = "pyOpenSSL-24.2.1-py3-none-any.whl", hash = "sha256:967d5719b12b243588573f39b0c677637145c7a1ffedcd495a487e58177fbb8d"}, + {file = "pyopenssl-24.2.1.tar.gz", hash = "sha256:4247f0dbe3748d560dcbb2ff3ea01af0f9a1a001ef5f7c4c647956ed8cbf0e95"}, ] [package.dependencies] -cryptography = ">=41.0.5,<43" +cryptography = ">=41.0.5,<44" [package.extras] docs = ["sphinx (!=5.2.0,!=5.2.0.post0,!=7.2.5)", "sphinx-rtd-theme"] @@ -1821,17 +1935,6 @@ files = [ {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, ] -[[package]] -name = "pytzdata" -version = "2020.1" -description = "The Olson timezone database for Python." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "pytzdata-2020.1-py2.py3-none-any.whl", hash = "sha256:e1e14750bcf95016381e4d472bad004eef710f2d6417240904070b3d6654485f"}, - {file = "pytzdata-2020.1.tar.gz", hash = "sha256:3efa13b335a00a8de1d345ae41ec78dd11c9f8807f522d39850f2dd828681540"}, -] - [[package]] name = "pyyaml" version = "6.0.1" @@ -1894,12 +1997,12 @@ files = [ [[package]] name = "redshift-connector" -version = "2.1.0" +version = "2.1.3" description = "Redshift interface library" optional = true python-versions = ">=3.6" files = [ - {file = "redshift_connector-2.1.0-py3-none-any.whl", hash = "sha256:3073bd47853ea2bc3d03ce984b9d73cb7c9623a5eaa3a68de52887a58bc29be1"}, + {file = "redshift_connector-2.1.3-py3-none-any.whl", hash = "sha256:be168c5294ed0bc626e2edb09230ff117ca752b9d0a85923d8356ba8fe5f208d"}, ] [package.dependencies] @@ -1918,13 +2021,13 @@ full = ["numpy", "pandas"] [[package]] name = "requests" -version = "2.31.0" +version = "2.32.3" description = "Python HTTP for Humans." optional = true -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, - {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, ] [package.dependencies] @@ -2030,13 +2133,13 @@ files = [ [[package]] name = "s3transfer" -version = "0.10.1" +version = "0.10.2" description = "An Amazon S3 Transfer Manager" optional = true -python-versions = ">= 3.8" +python-versions = ">=3.8" files = [ - {file = "s3transfer-0.10.1-py3-none-any.whl", hash = "sha256:ceb252b11bcf87080fb7850a224fb6e05c8a776bab8f2b64b7f25b969464839d"}, - {file = "s3transfer-0.10.1.tar.gz", hash = "sha256:5683916b4c724f799e600f41dd9e10a9ff19871bf87623cc8f491cb4f5fa0a19"}, + {file = "s3transfer-0.10.2-py3-none-any.whl", hash = "sha256:eca1c20de70a39daee580aef4986996620f365c4e0fda6a86100231d62f1bf69"}, + {file = "s3transfer-0.10.2.tar.gz", hash = "sha256:0711534e9356d3cc692fdde846b4a1e4b0cb6519971860796e6bc4c7aea00ef6"}, ] [package.dependencies] @@ -2047,13 +2150,13 @@ crt = ["botocore[crt] (>=1.33.2,<2.0a.0)"] [[package]] name = "scramp" -version = "1.4.4" +version = "1.4.5" description = "An implementation of the SCRAM protocol." optional = true -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "scramp-1.4.4-py3-none-any.whl", hash = "sha256:b142312df7c2977241d951318b7ee923d6b7a4f75ba0f05b621ece1ed616faa3"}, - {file = "scramp-1.4.4.tar.gz", hash = "sha256:b7022a140040f33cf863ab2657917ed05287a807b917950489b89b9f685d59bc"}, + {file = "scramp-1.4.5-py3-none-any.whl", hash = "sha256:50e37c464fc67f37994e35bee4151e3d8f9320e9c204fca83a5d313c121bbbe7"}, + {file = "scramp-1.4.5.tar.gz", hash = "sha256:be3fbe774ca577a7a658117dca014e5d254d158cecae3dd60332dfe33ce6d78e"}, ] [package.dependencies] @@ -2061,18 +2164,19 @@ asn1crypto = ">=1.5.1" [[package]] name = "setuptools" -version = "70.0.0" +version = "72.1.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" -optional = false +optional = true python-versions = ">=3.8" files = [ - {file = "setuptools-70.0.0-py3-none-any.whl", hash = "sha256:54faa7f2e8d2d11bcd2c07bed282eef1046b5c080d1c32add737d7b5817b1ad4"}, - {file = "setuptools-70.0.0.tar.gz", hash = "sha256:f211a66637b8fa059bb28183da127d4e86396c991a942b028c6650d4319c3fd0"}, + {file = "setuptools-72.1.0-py3-none-any.whl", hash = "sha256:5a03e1860cf56bb6ef48ce186b0e557fdba433237481a9a625176c2831be15d1"}, + {file = "setuptools-72.1.0.tar.gz", hash = "sha256:8d243eff56d095e5817f796ede6ae32941278f542e0f941867cc05ae52b162ec"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.text (>=3.7)", "more-itertools (>=8.8)", "ordered-set (>=3.1.1)", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.11.*)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (<0.4)", "pytest-ruff (>=0.2.1)", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -2098,37 +2202,37 @@ files = [ [[package]] name = "snowflake-connector-python" -version = "3.8.1" +version = "3.12.0" description = "Snowflake Connector for Python" optional = true python-versions = ">=3.8" files = [ - {file = "snowflake-connector-python-3.8.1.tar.gz", hash = "sha256:9bcce1a0d9e212ceecf45edcea02d18e205a95f88cc0470ada4a8bacb54247d5"}, - {file = "snowflake_connector_python-3.8.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6841c7b37213c1d3caea044b4a5f3f8486911aaa39f2a99a750c0a066df6b389"}, - {file = "snowflake_connector_python-3.8.1-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:4de973d0d5a579f0eea82e0908123d72e27ee89bd743f39630ee6e92de0c2d8c"}, - {file = "snowflake_connector_python-3.8.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67f3608833854427f5611017ba4d7a46eed198b0481ffcaf058155c9cb4e576c"}, - {file = "snowflake_connector_python-3.8.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e7aebc0ea91d6f387d06b033302628b27aa5a5841aa41a3910343654ae8b318"}, - {file = "snowflake_connector_python-3.8.1-cp310-cp310-win_amd64.whl", hash = "sha256:50a47e863a278d3716ae3fd7abbfcacc3fa6b855a2e0b3039157c06e2f0ea7d8"}, - {file = "snowflake_connector_python-3.8.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:76844277162b84aefd3dcf345eb0bf62aab24b1b8a8cd774eb40478b7c9533c0"}, - {file = "snowflake_connector_python-3.8.1-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:646cb614d41f3285d5636ececff628495f21e9b50a1310e17ac8c90ea63c139a"}, - {file = "snowflake_connector_python-3.8.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56c12c079a9fff235fe6fbfd905af7999180b051f8f414a4887c7752bc4961c0"}, - {file = "snowflake_connector_python-3.8.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ddadb2e0e854c5737ba883b5b1cc29153792224b7f357e772388995e6b2f382"}, - {file = "snowflake_connector_python-3.8.1-cp311-cp311-win_amd64.whl", hash = "sha256:56b6b5c09f305022abb9964ca31430f7ce1be8883259d3fdade7d7bd40419656"}, - {file = "snowflake_connector_python-3.8.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b09da3ab1d7e3be5552dabacacabd9cfcdd4c620828aacf6077d1c408971512c"}, - {file = "snowflake_connector_python-3.8.1-cp312-cp312-macosx_11_0_x86_64.whl", hash = "sha256:3a53ecf180c23cb1f7c988277c2a76579bfbc5b35d60c3ea6f909bbac5f83c3f"}, - {file = "snowflake_connector_python-3.8.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29a37c3b1a059cb6320792badfb1d1f131d8a4eb799fb4733fde5c90745af390"}, - {file = "snowflake_connector_python-3.8.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ac25dc4866700e1a2b47620540a4e40f469bcf718226fc3585199f85228f5b5"}, - {file = "snowflake_connector_python-3.8.1-cp312-cp312-win_amd64.whl", hash = "sha256:103fce3b03b155d727607b620de64c5fbf94539fb45ca2653b768aab0ecf36f1"}, - {file = "snowflake_connector_python-3.8.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:63152ac063ddcd9f5dc9ce5fd4c72d9514bc1250b852c3d18504aa9b2b24d5e9"}, - {file = "snowflake_connector_python-3.8.1-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:322b899861cf104a3548e42a44a548de9a7e21510fdd1849da9e3e96641f9b90"}, - {file = "snowflake_connector_python-3.8.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3347743fd902c801e9e2dbd1d3727228c2fe7dcf6945ed9f48c961589c24102"}, - {file = "snowflake_connector_python-3.8.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c5ffc3a81f7ee240f42ca118d9d42738929fc196c40b8df35c7b698f94148254"}, - {file = "snowflake_connector_python-3.8.1-cp38-cp38-win_amd64.whl", hash = "sha256:3d7aa9c3637a519f493fd470bc03f1eb0164b0438f9ccb9d5e98ef9a48fe4036"}, - {file = "snowflake_connector_python-3.8.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:167abcbed9f9e33b28ca47cee7397183cb06dc54bae616466e7854a903607370"}, - {file = "snowflake_connector_python-3.8.1-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:97ae86e35886de231e5adf303549995b7a0a7e11fe992d5bdb5e5c42fbbecb6d"}, - {file = "snowflake_connector_python-3.8.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e2340f382578edc3a281527e399b2be24e0417f6980a785cef6b33d505433a6"}, - {file = "snowflake_connector_python-3.8.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9610fd750ee0b24249b8c28eff81491af1d0bfc7d82e71f47568dca0f12572cc"}, - {file = "snowflake_connector_python-3.8.1-cp39-cp39-win_amd64.whl", hash = "sha256:b8d8ca8dc832a87d361ad3816e313a7f3fdeaa62e00070ffe55528b19bb49c0b"}, + {file = "snowflake_connector_python-3.12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:edf28df8be24845cfcec653b160d2b8c048d5cb0c85b051f4957f0b0aae1e493"}, + {file = "snowflake_connector_python-3.12.0-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:c2bbdbbb028d7d542815ed68b28200728aa6707b9354e3a447fdc8c7a34bcdce"}, + {file = "snowflake_connector_python-3.12.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92c9a19a23033df709e63baa6ccdf6eff65210143a8c9c67a0a24bba862034b"}, + {file = "snowflake_connector_python-3.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d33d845e4c68d33e73a9f64100b53342c18607ac25c4f2a27dbed2078078d12"}, + {file = "snowflake_connector_python-3.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:c1d43bfaa885aab712f14f9ced232abe5023adfca7fbf7a7a0768a162523e9d6"}, + {file = "snowflake_connector_python-3.12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6a0cc03fb44808f3ddc464ee272f141564c8daea14475e1df5c2a54c7acb2ddf"}, + {file = "snowflake_connector_python-3.12.0-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:564752d22accc43351b50f676b03aa9f2b441be2641e3cf9a7790faf54eff210"}, + {file = "snowflake_connector_python-3.12.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27d6a1a180832c7b551d38df1094a70fb79917f90c57893b9ce7e219362f6c1"}, + {file = "snowflake_connector_python-3.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60675fd83022daef40541d717d006695149c512b283e35741b61a4f48ba537e9"}, + {file = "snowflake_connector_python-3.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:a567b937b0179d1e95a8ad7200943d286f38d0e76df90af10f747ed9149dd681"}, + {file = "snowflake_connector_python-3.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:dc333fcfc383a8cab8bd7e890a7c76703e26598925a05954c75d2c50bff06071"}, + {file = "snowflake_connector_python-3.12.0-cp312-cp312-macosx_11_0_x86_64.whl", hash = "sha256:3c06bfba4a329fd4ec3feba0ada7b31f86ed4e156a9766bced52c2814d001fd2"}, + {file = "snowflake_connector_python-3.12.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:acf84b07dd2f22adfaa7d52ccd6be1722bd5a0e2b1a9b08681c3851bea05768f"}, + {file = "snowflake_connector_python-3.12.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:019b8a61e5af689451d502df2af8793fc6f20b5b0a3548fd8ad03aa8b62e7f2d"}, + {file = "snowflake_connector_python-3.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:45f9b9678694f10571c1f7ec7d0d741663ad0ff61a71ae53aa71be47faa19978"}, + {file = "snowflake_connector_python-3.12.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:21cbaef51fbed719de01155079df3d004cee963d3723c1ebdb8980923f893e04"}, + {file = "snowflake_connector_python-3.12.0-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:c86d4a7d49f42ea0bb34218cb49c401ba995892abcfb509ea749cd0a74a8b28a"}, + {file = "snowflake_connector_python-3.12.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1aa34aec0f96d7fc7271e38c68ee0d58529875d05e084afb4fc8f09b694643c4"}, + {file = "snowflake_connector_python-3.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2f621030b26a220711c64518e00059736b79c1da53afa6a8ce68b31c1941014"}, + {file = "snowflake_connector_python-3.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:368e46f1d079056e028bfe8f7171fabef62eb00bcf590df294220b7a5be5d56c"}, + {file = "snowflake_connector_python-3.12.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2735e16fffded0900f7484030613b79699afc1ed4e5cff086bd139a0ce965594"}, + {file = "snowflake_connector_python-3.12.0-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:c06a8e2e12284b4a4d462d0073fb4983e90ad2d6a2382926f9e3409f06c81d0b"}, + {file = "snowflake_connector_python-3.12.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:880e6e95171cd7374a86da14132fdfc4b622665f134561f4d43e3f35bdacf67d"}, + {file = "snowflake_connector_python-3.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e245b84c164433454ce49d78e6bcf5c2e62e25657358bf34ab533166e588f80"}, + {file = "snowflake_connector_python-3.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:85a5565b8813d164f33f32a825a70443008fe009aae050307f128a1ca892f9ed"}, + {file = "snowflake_connector_python-3.12.0.tar.gz", hash = "sha256:320e0b6f8cd8556e19c8b87249c931700238b2958313afc7a33108d67da87d82"}, ] [package.dependencies] @@ -2153,7 +2257,7 @@ urllib3 = {version = ">=1.21.1,<2.0.0", markers = "python_version < \"3.10\""} [package.extras] development = ["Cython", "coverage", "more-itertools", "numpy (<1.27.0)", "pendulum (!=2.1.1)", "pexpect", "pytest (<7.5.0)", "pytest-cov", "pytest-rerunfailures", "pytest-timeout", "pytest-xdist", "pytzdata"] pandas = ["pandas (>=1.0.0,<3.0.0)", "pyarrow"] -secure-local-storage = ["keyring (>=23.1.0,<25.0.0)"] +secure-local-storage = ["keyring (>=23.1.0,<26.0.0)"] [[package]] name = "sortedcontainers" @@ -2179,64 +2283,16 @@ files = [ [[package]] name = "sqlalchemy" -version = "2.0.29" +version = "2.0.32" description = "Database Abstraction Library" optional = true python-versions = ">=3.7" files = [ - {file = "SQLAlchemy-2.0.29-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4c142852ae192e9fe5aad5c350ea6befe9db14370b34047e1f0f7cf99e63c63b"}, - {file = "SQLAlchemy-2.0.29-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:99a1e69d4e26f71e750e9ad6fdc8614fbddb67cfe2173a3628a2566034e223c7"}, - {file = "SQLAlchemy-2.0.29-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ef3fbccb4058355053c51b82fd3501a6e13dd808c8d8cd2561e610c5456013c"}, - {file = "SQLAlchemy-2.0.29-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d6753305936eddc8ed190e006b7bb33a8f50b9854823485eed3a886857ab8d1"}, - {file = "SQLAlchemy-2.0.29-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0f3ca96af060a5250a8ad5a63699180bc780c2edf8abf96c58af175921df847a"}, - {file = "SQLAlchemy-2.0.29-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c4520047006b1d3f0d89e0532978c0688219857eb2fee7c48052560ae76aca1e"}, - {file = "SQLAlchemy-2.0.29-cp310-cp310-win32.whl", hash = "sha256:b2a0e3cf0caac2085ff172c3faacd1e00c376e6884b5bc4dd5b6b84623e29e4f"}, - {file = "SQLAlchemy-2.0.29-cp310-cp310-win_amd64.whl", hash = "sha256:01d10638a37460616708062a40c7b55f73e4d35eaa146781c683e0fa7f6c43fb"}, - {file = "SQLAlchemy-2.0.29-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:308ef9cb41d099099fffc9d35781638986870b29f744382904bf9c7dadd08513"}, - {file = "SQLAlchemy-2.0.29-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:296195df68326a48385e7a96e877bc19aa210e485fa381c5246bc0234c36c78e"}, - {file = "SQLAlchemy-2.0.29-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a13b917b4ffe5a0a31b83d051d60477819ddf18276852ea68037a144a506efb9"}, - {file = "SQLAlchemy-2.0.29-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f6d971255d9ddbd3189e2e79d743ff4845c07f0633adfd1de3f63d930dbe673"}, - {file = "SQLAlchemy-2.0.29-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:61405ea2d563407d316c63a7b5271ae5d274a2a9fbcd01b0aa5503635699fa1e"}, - {file = "SQLAlchemy-2.0.29-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:de7202ffe4d4a8c1e3cde1c03e01c1a3772c92858837e8f3879b497158e4cb44"}, - {file = "SQLAlchemy-2.0.29-cp311-cp311-win32.whl", hash = "sha256:b5d7ed79df55a731749ce65ec20d666d82b185fa4898430b17cb90c892741520"}, - {file = "SQLAlchemy-2.0.29-cp311-cp311-win_amd64.whl", hash = "sha256:205f5a2b39d7c380cbc3b5dcc8f2762fb5bcb716838e2d26ccbc54330775b003"}, - {file = "SQLAlchemy-2.0.29-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d96710d834a6fb31e21381c6d7b76ec729bd08c75a25a5184b1089141356171f"}, - {file = "SQLAlchemy-2.0.29-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:52de4736404e53c5c6a91ef2698c01e52333988ebdc218f14c833237a0804f1b"}, - {file = "SQLAlchemy-2.0.29-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c7b02525ede2a164c5fa5014915ba3591730f2cc831f5be9ff3b7fd3e30958e"}, - {file = "SQLAlchemy-2.0.29-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dfefdb3e54cd15f5d56fd5ae32f1da2d95d78319c1f6dfb9bcd0eb15d603d5d"}, - {file = "SQLAlchemy-2.0.29-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a88913000da9205b13f6f195f0813b6ffd8a0c0c2bd58d499e00a30eb508870c"}, - {file = "SQLAlchemy-2.0.29-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fecd5089c4be1bcc37c35e9aa678938d2888845a134dd016de457b942cf5a758"}, - {file = "SQLAlchemy-2.0.29-cp312-cp312-win32.whl", hash = "sha256:8197d6f7a3d2b468861ebb4c9f998b9df9e358d6e1cf9c2a01061cb9b6cf4e41"}, - {file = "SQLAlchemy-2.0.29-cp312-cp312-win_amd64.whl", hash = "sha256:9b19836ccca0d321e237560e475fd99c3d8655d03da80c845c4da20dda31b6e1"}, - {file = "SQLAlchemy-2.0.29-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:87a1d53a5382cdbbf4b7619f107cc862c1b0a4feb29000922db72e5a66a5ffc0"}, - {file = "SQLAlchemy-2.0.29-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a0732dffe32333211801b28339d2a0babc1971bc90a983e3035e7b0d6f06b93"}, - {file = "SQLAlchemy-2.0.29-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90453597a753322d6aa770c5935887ab1fc49cc4c4fdd436901308383d698b4b"}, - {file = "SQLAlchemy-2.0.29-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ea311d4ee9a8fa67f139c088ae9f905fcf0277d6cd75c310a21a88bf85e130f5"}, - {file = "SQLAlchemy-2.0.29-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:5f20cb0a63a3e0ec4e169aa8890e32b949c8145983afa13a708bc4b0a1f30e03"}, - {file = "SQLAlchemy-2.0.29-cp37-cp37m-win32.whl", hash = "sha256:e5bbe55e8552019c6463709b39634a5fc55e080d0827e2a3a11e18eb73f5cdbd"}, - {file = "SQLAlchemy-2.0.29-cp37-cp37m-win_amd64.whl", hash = "sha256:c2f9c762a2735600654c654bf48dad388b888f8ce387b095806480e6e4ff6907"}, - {file = "SQLAlchemy-2.0.29-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7e614d7a25a43a9f54fcce4675c12761b248547f3d41b195e8010ca7297c369c"}, - {file = "SQLAlchemy-2.0.29-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:471fcb39c6adf37f820350c28aac4a7df9d3940c6548b624a642852e727ea586"}, - {file = "SQLAlchemy-2.0.29-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:988569c8732f54ad3234cf9c561364221a9e943b78dc7a4aaf35ccc2265f1930"}, - {file = "SQLAlchemy-2.0.29-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dddaae9b81c88083e6437de95c41e86823d150f4ee94bf24e158a4526cbead01"}, - {file = "SQLAlchemy-2.0.29-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:334184d1ab8f4c87f9652b048af3f7abea1c809dfe526fb0435348a6fef3d380"}, - {file = "SQLAlchemy-2.0.29-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:38b624e5cf02a69b113c8047cf7f66b5dfe4a2ca07ff8b8716da4f1b3ae81567"}, - {file = "SQLAlchemy-2.0.29-cp38-cp38-win32.whl", hash = "sha256:bab41acf151cd68bc2b466deae5deeb9e8ae9c50ad113444151ad965d5bf685b"}, - {file = "SQLAlchemy-2.0.29-cp38-cp38-win_amd64.whl", hash = "sha256:52c8011088305476691b8750c60e03b87910a123cfd9ad48576d6414b6ec2a1d"}, - {file = "SQLAlchemy-2.0.29-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3071ad498896907a5ef756206b9dc750f8e57352113c19272bdfdc429c7bd7de"}, - {file = "SQLAlchemy-2.0.29-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dba622396a3170974f81bad49aacebd243455ec3cc70615aeaef9e9613b5bca5"}, - {file = "SQLAlchemy-2.0.29-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b184e3de58009cc0bf32e20f137f1ec75a32470f5fede06c58f6c355ed42a72"}, - {file = "SQLAlchemy-2.0.29-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c37f1050feb91f3d6c32f864d8e114ff5545a4a7afe56778d76a9aec62638ba"}, - {file = "SQLAlchemy-2.0.29-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bda7ce59b06d0f09afe22c56714c65c957b1068dee3d5e74d743edec7daba552"}, - {file = "SQLAlchemy-2.0.29-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:25664e18bef6dc45015b08f99c63952a53a0a61f61f2e48a9e70cec27e55f699"}, - {file = "SQLAlchemy-2.0.29-cp39-cp39-win32.whl", hash = "sha256:77d29cb6c34b14af8a484e831ab530c0f7188f8efed1c6a833a2c674bf3c26ec"}, - {file = "SQLAlchemy-2.0.29-cp39-cp39-win_amd64.whl", hash = "sha256:04c487305ab035a9548f573763915189fc0fe0824d9ba28433196f8436f1449c"}, - {file = "SQLAlchemy-2.0.29-py3-none-any.whl", hash = "sha256:dc4ee2d4ee43251905f88637d5281a8d52e916a021384ec10758826f5cbae305"}, - {file = "SQLAlchemy-2.0.29.tar.gz", hash = "sha256:bd9566b8e58cabd700bc367b60e90d9349cd16f0984973f98a9a09f9c64e86f0"}, + {file = "SQLAlchemy-2.0.32.tar.gz", hash = "sha256:c1b88cc8b02b6a5f0efb0345a03672d4c897dc7d92585176f88c67346f565ea8"}, ] [package.dependencies] -greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""} +greenlet = {version = "!=0.4.17", markers = "python_version < \"3.13\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} typing-extensions = ">=4.6.0" [package.extras] @@ -2266,13 +2322,13 @@ sqlcipher = ["sqlcipher3_binary"] [[package]] name = "sqlparse" -version = "0.5.0" +version = "0.5.1" description = "A non-validating SQL parser." optional = false python-versions = ">=3.8" files = [ - {file = "sqlparse-0.5.0-py3-none-any.whl", hash = "sha256:c204494cd97479d0e39f28c93d46c0b2d5959c7b9ab904762ea6c7af211c8663"}, - {file = "sqlparse-0.5.0.tar.gz", hash = "sha256:714d0a4932c059d16189f58ef5411ec2287a4360f17cdd0edd2d09d4c5087c93"}, + {file = "sqlparse-0.5.1-py3-none-any.whl", hash = "sha256:773dcbf9a5ab44a090f3441e2180efe2560220203dc2f8c0b0fa141e18b505e4"}, + {file = "sqlparse-0.5.1.tar.gz", hash = "sha256:bb6b4df465655ef332548e24f08e205afc81b9ab86cb1c45657a7ff173a3a00e"}, ] [package.extras] @@ -2303,31 +2359,42 @@ files = [ [[package]] name = "tomlkit" -version = "0.12.4" +version = "0.13.0" description = "Style preserving TOML library" optional = true -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "tomlkit-0.12.4-py3-none-any.whl", hash = "sha256:5cd82d48a3dd89dee1f9d64420aa20ae65cfbd00668d6f094d7578a78efbb77b"}, - {file = "tomlkit-0.12.4.tar.gz", hash = "sha256:7ca1cfc12232806517a8515047ba66a19369e71edf2439d0f5824f91032b6cc3"}, + {file = "tomlkit-0.13.0-py3-none-any.whl", hash = "sha256:7075d3042d03b80f603482d69bf0c8f345c2b30e41699fd8883227f89972b264"}, + {file = "tomlkit-0.13.0.tar.gz", hash = "sha256:08ad192699734149f5b97b45f1f18dad7eb1b6d16bc72ad0c2335772650d7b72"}, ] [[package]] name = "typing-extensions" -version = "4.11.0" +version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"}, - {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"}, + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, +] + +[[package]] +name = "tzdata" +version = "2024.1" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +files = [ + {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, + {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, ] [[package]] name = "urllib3" version = "1.26.19" description = "HTTP library with thread-safe connection pooling, file post, and more." -optional = false +optional = true python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ {file = "urllib3-1.26.19-py2.py3-none-any.whl", hash = "sha256:37a0344459b199fce0e80b0d3569837ec6b6937435c5244e7fd73fa6006830f3"}, @@ -2341,13 +2408,13 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "virtualenv" -version = "20.25.1" +version = "20.26.3" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.25.1-py3-none-any.whl", hash = "sha256:961c026ac520bac5f69acb8ea063e8a4f071bcc9457b9c1f28f6b085c511583a"}, - {file = "virtualenv-20.25.1.tar.gz", hash = "sha256:e08e13ecdca7a0bd53798f356d5831434afa5b07b93f0abdf0797b7a06ffe197"}, + {file = "virtualenv-20.26.3-py3-none-any.whl", hash = "sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589"}, + {file = "virtualenv-20.26.3.tar.gz", hash = "sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a"}, ] [package.dependencies] @@ -2356,9 +2423,24 @@ filelock = ">=3.12.2,<4" platformdirs = ">=3.9.1,<5" [package.extras] -docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] +[[package]] +name = "zipp" +version = "3.19.2" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "zipp-3.19.2-py3-none-any.whl", hash = "sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c"}, + {file = "zipp-3.19.2.tar.gz", hash = "sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19"}, +] + +[package.extras] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] + [extras] all = ["google-cloud-bigquery", "psycopg2-binary", "pyarrow", "redshift-connector", "snowflake-connector-python"] bigquery = ["google-cloud-bigquery", "pyarrow"] @@ -2368,5 +2450,5 @@ snowflake = ["pyarrow", "snowflake-connector-python"] [metadata] lock-version = "2.0" -python-versions = ">=3.8.1, <3.12" -content-hash = "e5bbcc744cbb3d63108c1e2d0e1c17a40ba05c93fc3f0e9f2e920cbc439b6944" +python-versions = ">=3.8.1, <3.13" +content-hash = "e04729b9bcb87221c0b46456c02c33117065fdc54cd9e8d1ab90dc6408682400" diff --git a/pyproject.toml b/pyproject.toml index f45cd41..8d258e4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -11,23 +11,23 @@ repository = "https://github.com/Zenlytic/metrics_layer" documentation = "https://docs.zenlytic.com" [tool.poetry.dependencies] -python = ">=3.8.1, <3.12" +python = ">=3.8.1, <3.13" GitPython = ">=3.1.20" sqlparse = ">=0.4.1" PyPika = "^0.48.8" pandas = "^1.5.2" numpy = "^1.24.4" -redshift-connector = {version = ">=2.0.917", optional = true} -snowflake-connector-python = {version = ">=3.5.0", optional = true} +redshift-connector = {version = "^2.0.905", optional = true} +snowflake-connector-python = {version = "^3.5.0", optional = true} pyarrow = {version = ">=10", optional = true} -google-cloud-bigquery = {version = ">=2.24.1", optional = true} -psycopg2-binary = {version = ">=2.9.3", optional = true} -SQLAlchemy = {version = ">=1.3.18", optional = true} +google-cloud-bigquery = {version = "^3.13.0", optional = true} +psycopg2-binary = {version = "^2.9.9", optional = true} +SQLAlchemy = {version = "^2.0.21", optional = true} networkx = "^2.8.2" click = "^8.0" colorama = "^0.4.4" "ruamel.yaml" = "^0.17.20" -pendulum = "^2.1.2" +pendulum = "^3.0.0" PyYAML = "^6.0" metricflow-to-zenlytic = "^0.1.5" diff --git a/tests/test_dashboards.py b/tests/test_dashboards.py index 34a5522..a9ed76d 100644 --- a/tests/test_dashboards.py +++ b/tests/test_dashboards.py @@ -109,7 +109,7 @@ def test_dashboard_filter_timezone(fresh_project): element_parsed_filters = last_element.parsed_filters() # These are 24 hours apart so this test should always fail if we get the wrong timezone - to_sub = 1 if pendulum.now("Pacific/Apia").day_of_week != 1 else 0 + to_sub = 1 if pendulum.now("Pacific/Apia").day_of_week != 0 else 0 start = pendulum.now("Pacific/Apia").start_of("week").strftime(date_format) end = pendulum.now("Pacific/Apia").subtract(days=to_sub).end_of("day").strftime(date_format) wrong_end = pendulum.now("Pacific/Niue").subtract(days=to_sub).end_of("day").strftime(date_format) @@ -127,7 +127,7 @@ def test_dashboard_filter_timezone(fresh_project): assert parsed_filters[1]["value"] != wrong_end -@pytest.mark.query +@pytest.mark.queryy @pytest.mark.parametrize( "raw_filter_dict", [ @@ -332,7 +332,7 @@ def test_dashboard_filter_processing(connection, raw_filter_dict): .strftime(date_format), "2021-02-03 until this month": pendulum.now("UTC").end_of("month").strftime(date_format), "week to date": pendulum.now("UTC") - .subtract(days=1 if pendulum.now("UTC").day_of_week != 1 else 0) + .subtract(days=1 if pendulum.now("UTC").day_of_week != 0 else 0) .end_of("day") .strftime(date_format), "month to date": pendulum.now("UTC") @@ -351,7 +351,7 @@ def test_dashboard_filter_processing(connection, raw_filter_dict): .start_of("week") .add( days=(pendulum.now("UTC") - pendulum.now("UTC").start_of("week")).days - 1 - if pendulum.now("UTC").day_of_week != 1 + if pendulum.now("UTC").day_of_week != 0 else 0 ) .end_of("day") @@ -361,7 +361,7 @@ def test_dashboard_filter_processing(connection, raw_filter_dict): .start_of("week") .add( days=(pendulum.now("UTC") - pendulum.now("UTC").start_of("week")).days - 1 - if pendulum.now("UTC").day_of_week != 1 + if pendulum.now("UTC").day_of_week != 0 else 0 ) .end_of("day") @@ -371,7 +371,7 @@ def test_dashboard_filter_processing(connection, raw_filter_dict): .start_of("month") .add( days=(pendulum.now("UTC") - pendulum.now("UTC").start_of("month")).days - 1 - if pendulum.now("UTC").day != 1 + if pendulum.now("UTC").day != 0 else 0 ) .end_of("day") From 0a682227d53e86155be6e394f41d713918828e43 Mon Sep 17 00:00:00 2001 From: Joe Van Gundy Date: Mon, 5 Aug 2024 17:40:48 -0400 Subject: [PATCH 19/53] feature/add image url as a column format (#216) * feature/image-column-format * Add image URL type to tests --------- Co-authored-by: Joe Van Gundy Co-authored-by: Cole French <16979554+ColeFrench@users.noreply.github.com> Co-authored-by: Paul Blankley --- metrics_layer/core/model/field.py | 1 + tests/test_project_validation.py | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/metrics_layer/core/model/field.py b/metrics_layer/core/model/field.py index 0c3c282..a5be0cb 100644 --- a/metrics_layer/core/model/field.py +++ b/metrics_layer/core/model/field.py @@ -77,6 +77,7 @@ "usd_1", "usd_2", "string", + "image_url", "date", "week", "month", diff --git a/tests/test_project_validation.py b/tests/test_project_validation.py index a975461..b1b804c 100644 --- a/tests/test_project_validation.py +++ b/tests/test_project_validation.py @@ -1322,7 +1322,7 @@ def test_validation_with_replaced_view_properties(connection, name, value, error "Valid value_format_names are: ['decimal_0', 'decimal_1', 'decimal_2', " "'decimal_pct_0', 'decimal_pct_1', 'decimal_pct_2', 'percent_0', 'percent_1', " "'percent_2', 'eur', 'eur_0', 'eur_1', 'eur_2', 'usd', 'usd_0', 'usd_1', " - "'usd_2', 'string', 'date', 'week', 'month', 'quarter', 'year']" + "'usd_2', 'string', 'image_url', 'date', 'week', 'month', 'quarter', 'year']" ], ), ( @@ -1334,7 +1334,7 @@ def test_validation_with_replaced_view_properties(connection, name, value, error "Valid value_format_names are: ['decimal_0', 'decimal_1', 'decimal_2', " "'decimal_pct_0', 'decimal_pct_1', 'decimal_pct_2', 'percent_0', 'percent_1', " "'percent_2', 'eur', 'eur_0', 'eur_1', 'eur_2', 'usd', 'usd_0', 'usd_1', " - "'usd_2', 'string', 'date', 'week', 'month', 'quarter', 'year']" + "'usd_2', 'string', 'image_url', 'date', 'week', 'month', 'quarter', 'year']" ], ), ( From 0ffe6d50f18beb7881d0d038a93d4cd52aa4f7af Mon Sep 17 00:00:00 2001 From: Joe Van Gundy Date: Tue, 6 Aug 2024 10:33:21 -0400 Subject: [PATCH 20/53] refactor/renamed image_url to image_from_url (#221) * renamed to image_from_url * fix tests for new name --------- Co-authored-by: Joe Van Gundy Co-authored-by: Paul Blankley --- metrics_layer/core/model/field.py | 2 +- tests/test_project_validation.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/metrics_layer/core/model/field.py b/metrics_layer/core/model/field.py index a5be0cb..21e1866 100644 --- a/metrics_layer/core/model/field.py +++ b/metrics_layer/core/model/field.py @@ -77,7 +77,7 @@ "usd_1", "usd_2", "string", - "image_url", + "image_from_url", "date", "week", "month", diff --git a/tests/test_project_validation.py b/tests/test_project_validation.py index b1b804c..131fa10 100644 --- a/tests/test_project_validation.py +++ b/tests/test_project_validation.py @@ -1322,7 +1322,7 @@ def test_validation_with_replaced_view_properties(connection, name, value, error "Valid value_format_names are: ['decimal_0', 'decimal_1', 'decimal_2', " "'decimal_pct_0', 'decimal_pct_1', 'decimal_pct_2', 'percent_0', 'percent_1', " "'percent_2', 'eur', 'eur_0', 'eur_1', 'eur_2', 'usd', 'usd_0', 'usd_1', " - "'usd_2', 'string', 'image_url', 'date', 'week', 'month', 'quarter', 'year']" + "'usd_2', 'string', 'image_from_url', 'date', 'week', 'month', 'quarter', 'year']" ], ), ( @@ -1334,7 +1334,7 @@ def test_validation_with_replaced_view_properties(connection, name, value, error "Valid value_format_names are: ['decimal_0', 'decimal_1', 'decimal_2', " "'decimal_pct_0', 'decimal_pct_1', 'decimal_pct_2', 'percent_0', 'percent_1', " "'percent_2', 'eur', 'eur_0', 'eur_1', 'eur_2', 'usd', 'usd_0', 'usd_1', " - "'usd_2', 'string', 'image_url', 'date', 'week', 'month', 'quarter', 'year']" + "'usd_2', 'string', 'image_from_url', 'date', 'week', 'month', 'quarter', 'year']" ], ), ( From 8b78d32aa52a51fb3bf9a9ae7e96e9dc4b30bd06 Mon Sep 17 00:00:00 2001 From: Paul Blankley <31221512+pblankley@users.noreply.github.com> Date: Thu, 8 Aug 2024 15:43:32 -0600 Subject: [PATCH 21/53] fix bug with mapping-only behavior based on the order of the query (#222) * fix bug with mapping-only behavior based on the order of the query * bump version --- metrics_layer/core/sql/resolve.py | 83 +++++++++++++++++++++++++++++-- pyproject.toml | 2 +- tests/test_dashboards.py | 2 +- tests/test_field_mappings.py | 25 ++++++++++ tests/test_simple_query.py | 2 +- 5 files changed, 108 insertions(+), 6 deletions(-) diff --git a/metrics_layer/core/sql/resolve.py b/metrics_layer/core/sql/resolve.py index 95cf72c..a6e5d6a 100644 --- a/metrics_layer/core/sql/resolve.py +++ b/metrics_layer/core/sql/resolve.py @@ -1,3 +1,4 @@ +from collections import defaultdict from copy import deepcopy from typing import List, Union @@ -212,11 +213,87 @@ def _resolve_mapped_fields(self): ) self._replace_mapped_field(name, replace_with) else: - for i, (name, mapped_field) in enumerate(self.mapping_lookup.items()): - if i == 0: + # This is the scenario where we only have mappings and no other fields in the query + + # First, we need to get all join graphs for all fields involved in the mappings. + # Since we have no "real" fields, we have no basis for required join graphs we + # must match, yet. This gives us the raw material to derive which join graphs + # overlap in the mappings. + check = defaultdict(list) + for name, mapped_field in self.mapping_lookup.items(): + for field_id in mapped_field["fields"]: + ref_field = self.project.get_field(field_id) + check[name].append((field_id, ref_field.join_graphs())) + + # Next, we iterate over all mappings and check if there is a valid join path + # between all fields in the mappings. + validity = defaultdict(dict) + # This is done my comparing a single mapping (name), then... + for name, field_info in check.items(): + # Looking at all fields that are present in that mapping name, and + # comparing them to all other fields in all other mappings + for field_id, join_graphs in field_info: + passed, points = 0, 0 + for other_name, other_field_info in check.items(): + if name != other_name: + for other_field_id, other_join_graphs in other_field_info: + if field_id != other_field_id: + # If the fields are joinable or mergeable, then we can form a + # valid query, and we can count them as 'passed' + if set(join_graphs).intersection(set(other_join_graphs)): + passed += 1 + # It's not enough just to pass though. There are some things we prefer, + # when there is no solid direction on which fields to choose. + + # 1. We prefer when fields are in the same view + points += ( + 1 if field_id.split(".")[0] == other_field_id.split(".")[0] else 0 + ) + + # 2. We prefer when fields can be joined over when they can only be merged + joinable_first = [j for j in join_graphs if "merged_result" not in j] + joinable_second = [ + j for j in other_join_graphs if "merged_result" not in j + ] + if set(joinable_first).intersection(set(joinable_second)): + points += 1 + # Once we have determined a field pair is valid, + # we can stop checking other fields under that mapping name + break + + # If at least one field in each of the other mapping names has 'passed' the check, + # then we can consider this mapping name as valid, and assign it the points + if passed == len(check) - 1: + validity[name][field_id] = points + + # If this exists and we have more than one mapping present + if validity and len(self.mapping_lookup) > 1: + replaced_mapping = None + all_items = [ + (name, *item) for name, field_info in validity.items() for item in field_info.items() + ] + # Sort the mappings by points, and then by field_id (so it is consistent in + # the event fields have the same points), and replace the highest ranking one + # and set its join graphs as the active ones all other mappings must adhere to. + for name, field_id, points in sorted(all_items, key=lambda x: (x[-1], x[1]), reverse=True): + replaced_mapping = name + replace_with = self.project.get_field(field_id) + self.field_lookup[name] = replace_with.join_graphs() + break + + self._replace_mapped_field(name, replace_with) + # In the event there is only one mapping, just pick the first field in the options + elif len(self.mapping_lookup) == 1: + for name, mapped_field in self.mapping_lookup.items(): + replaced_mapping = name replace_with = self.project.get_field(mapped_field["fields"][0]) self.field_lookup[name] = replace_with.join_graphs() - else: + self._replace_mapped_field(name, replace_with) + else: + raise QueryError("No valid join path found for mapped fields") + + for name, mapped_field in self.mapping_lookup.items(): + if name != replaced_mapping: mergeable_graphs, joinable_graphs = self._join_graphs_by_type(self.field_lookup) self._handle_invalid_merged_result(mergeable_graphs, joinable_graphs) replace_with = self.determine_field_to_replace_with( diff --git a/pyproject.toml b/pyproject.toml index 8d258e4..2a3a72a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "metrics_layer" -version = "0.12.30" +version = "0.12.31" description = "The open source metrics layer." authors = ["Paul Blankley "] keywords = ["Metrics Layer", "Business Intelligence", "Analytics"] diff --git a/tests/test_dashboards.py b/tests/test_dashboards.py index a9ed76d..a591da9 100644 --- a/tests/test_dashboards.py +++ b/tests/test_dashboards.py @@ -127,7 +127,7 @@ def test_dashboard_filter_timezone(fresh_project): assert parsed_filters[1]["value"] != wrong_end -@pytest.mark.queryy +@pytest.mark.query @pytest.mark.parametrize( "raw_filter_dict", [ diff --git a/tests/test_field_mappings.py b/tests/test_field_mappings.py index 29db0cb..82d8a30 100644 --- a/tests/test_field_mappings.py +++ b/tests/test_field_mappings.py @@ -1,4 +1,5 @@ import datetime +from copy import copy import pytest @@ -500,3 +501,27 @@ def test_mapping_defer_to_metric_canon_date_not_dim_only(connection): f"and {cte_1}.clicked_on_page_context_os={cte_2}.submitted_form_context_os;" ) assert query == correct + + +@pytest.mark.query +@pytest.mark.parametrize("dims", [["date", "context_os"], ["context_os", "date"]]) +def test_mapping_order_intolerance_in_dims(connection, dims): + input_dims = copy(dims) + query = connection.get_sql_query(metrics=[], dimensions=dims) + + if input_dims[0] == "date": + correct = ( + "SELECT DATE_TRUNC('DAY', submitted_form.session_date) as" + " submitted_form_session_date,submitted_form.context_os as submitted_form_context_os FROM" + " analytics.submitted_form submitted_form GROUP BY DATE_TRUNC('DAY'," + " submitted_form.session_date),submitted_form.context_os ORDER BY submitted_form_session_date ASC" + " NULLS LAST;" + ) + else: + correct = ( + "SELECT submitted_form.context_os as submitted_form_context_os,DATE_TRUNC('DAY'," + " submitted_form.session_date) as submitted_form_session_date FROM analytics.submitted_form" + " submitted_form GROUP BY submitted_form.context_os,DATE_TRUNC('DAY'," + " submitted_form.session_date) ORDER BY submitted_form_context_os ASC NULLS LAST;" + ) + assert query == correct diff --git a/tests/test_simple_query.py b/tests/test_simple_query.py index b000775..c0b9ed9 100644 --- a/tests/test_simple_query.py +++ b/tests/test_simple_query.py @@ -1651,7 +1651,7 @@ def test_simple_query_with_having_literal(connections): assert query == correct -@pytest.mark.queryy +@pytest.mark.query @pytest.mark.parametrize( "query_type", [ From 5b190227e68b48565f7ab5357cd88b506ea3fc4c Mon Sep 17 00:00:00 2001 From: Paul Blankley Date: Thu, 8 Aug 2024 15:43:58 -0600 Subject: [PATCH 22/53] Release v0.12.31 From cc8a8d5b6991c9753d8c3acce30ff1b241704056 Mon Sep 17 00:00:00 2001 From: Cole French <16979554+ColeFrench@users.noreply.github.com> Date: Tue, 13 Aug 2024 17:05:58 -0400 Subject: [PATCH 23/53] Fix/ignore repos (#224) * Prevent git from tracking downloaded repos * Revert "Prevent git from tracking downloaded repos" This reverts commit e419d223c7d475d8044bd8f6d5783b27db481686. * Prevent git from tracking downloaded repos --- metrics_layer/core/parse/.gitignore | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 metrics_layer/core/parse/.gitignore diff --git a/metrics_layer/core/parse/.gitignore b/metrics_layer/core/parse/.gitignore new file mode 100644 index 0000000..9194443 --- /dev/null +++ b/metrics_layer/core/parse/.gitignore @@ -0,0 +1,2 @@ +# Ignore downloaded data model repositories +*/ From 72bb03c748c2a36f251334d31682dd0749833115 Mon Sep 17 00:00:00 2001 From: Paul Blankley <31221512+pblankley@users.noreply.github.com> Date: Tue, 13 Aug 2024 15:18:56 -0600 Subject: [PATCH 24/53] fix issue with mapping only field replacement (#225) * fix issue with mapping only field replacement * remove python 3.12 form tests because it took too long to install and run in CI --- .github/workflows/tests.yaml | 2 +- metrics_layer/core/sql/resolve.py | 2 +- pyproject.toml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index d6c5ec1..5a44d48 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -7,7 +7,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] + python-version: ['3.8', '3.9', '3.10', '3.11'] steps: - uses: actions/checkout@v4 diff --git a/metrics_layer/core/sql/resolve.py b/metrics_layer/core/sql/resolve.py index a6e5d6a..07670b7 100644 --- a/metrics_layer/core/sql/resolve.py +++ b/metrics_layer/core/sql/resolve.py @@ -299,7 +299,7 @@ def _resolve_mapped_fields(self): replace_with = self.determine_field_to_replace_with( mapped_field, joinable_graphs, mergeable_graphs ) - self._replace_mapped_field(name, replace_with) + self._replace_mapped_field(name, replace_with) def _get_field_from_lookup(self, field_name: str, only_search_lookup: bool = False): if field_name in self.field_object_lookup: diff --git a/pyproject.toml b/pyproject.toml index 2a3a72a..d796eb7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "metrics_layer" -version = "0.12.31" +version = "0.12.32" description = "The open source metrics layer." authors = ["Paul Blankley "] keywords = ["Metrics Layer", "Business Intelligence", "Analytics"] From 1a82084c5d7b907ded99325e9f1eb3d806f71088 Mon Sep 17 00:00:00 2001 From: Paul Blankley Date: Tue, 13 Aug 2024 15:19:35 -0600 Subject: [PATCH 25/53] Release v0.12.32 From bb8bc649c5d745534ca928d6c823ecfa7b937f44 Mon Sep 17 00:00:00 2001 From: Cole French <16979554+ColeFrench@users.noreply.github.com> Date: Tue, 13 Aug 2024 21:55:13 -0400 Subject: [PATCH 26/53] Fix/add datetime tests (#226) --- .github/workflows/tests.yaml | 2 +- poetry.lock | 130 +++++++- pyproject.toml | 1 + pytest.ini | 3 + tests/test_dashboards.py | 615 ++++++++++++++++++----------------- 5 files changed, 459 insertions(+), 292 deletions(-) diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index 5a44d48..e460163 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -32,7 +32,7 @@ jobs: - name: Test with pytest run: | - pytest --cov=metrics_layer/ --cov-report=xml + pytest -m extra_dt --cov=metrics_layer/ --cov-report=xml - name: Report on code coverage uses: codecov/codecov-action@v4 diff --git a/poetry.lock b/poetry.lock index 02a4d8a..c3c8dff 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1493,6 +1493,7 @@ files = [ "backports.zoneinfo" = {version = ">=0.2.1", markers = "python_version < \"3.9\""} importlib-resources = {version = ">=5.9.0", markers = "python_version < \"3.9\""} python-dateutil = ">=2.6" +time-machine = {version = ">=2.6.0", optional = true, markers = "implementation_name != \"pypy\" and extra == \"test\""} tzdata = ">=2020.1" [package.extras] @@ -2288,6 +2289,54 @@ description = "Database Abstraction Library" optional = true python-versions = ">=3.7" files = [ + {file = "SQLAlchemy-2.0.32-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0c9045ecc2e4db59bfc97b20516dfdf8e41d910ac6fb667ebd3a79ea54084619"}, + {file = "SQLAlchemy-2.0.32-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1467940318e4a860afd546ef61fefb98a14d935cd6817ed07a228c7f7c62f389"}, + {file = "SQLAlchemy-2.0.32-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5954463675cb15db8d4b521f3566a017c8789222b8316b1e6934c811018ee08b"}, + {file = "SQLAlchemy-2.0.32-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:167e7497035c303ae50651b351c28dc22a40bb98fbdb8468cdc971821b1ae533"}, + {file = "SQLAlchemy-2.0.32-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b27dfb676ac02529fb6e343b3a482303f16e6bc3a4d868b73935b8792edb52d0"}, + {file = "SQLAlchemy-2.0.32-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:bf2360a5e0f7bd75fa80431bf8ebcfb920c9f885e7956c7efde89031695cafb8"}, + {file = "SQLAlchemy-2.0.32-cp310-cp310-win32.whl", hash = "sha256:306fe44e754a91cd9d600a6b070c1f2fadbb4a1a257b8781ccf33c7067fd3e4d"}, + {file = "SQLAlchemy-2.0.32-cp310-cp310-win_amd64.whl", hash = "sha256:99db65e6f3ab42e06c318f15c98f59a436f1c78179e6a6f40f529c8cc7100b22"}, + {file = "SQLAlchemy-2.0.32-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:21b053be28a8a414f2ddd401f1be8361e41032d2ef5884b2f31d31cb723e559f"}, + {file = "SQLAlchemy-2.0.32-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b178e875a7a25b5938b53b006598ee7645172fccafe1c291a706e93f48499ff5"}, + {file = "SQLAlchemy-2.0.32-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:723a40ee2cc7ea653645bd4cf024326dea2076673fc9d3d33f20f6c81db83e1d"}, + {file = "SQLAlchemy-2.0.32-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:295ff8689544f7ee7e819529633d058bd458c1fd7f7e3eebd0f9268ebc56c2a0"}, + {file = "SQLAlchemy-2.0.32-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:49496b68cd190a147118af585173ee624114dfb2e0297558c460ad7495f9dfe2"}, + {file = "SQLAlchemy-2.0.32-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:acd9b73c5c15f0ec5ce18128b1fe9157ddd0044abc373e6ecd5ba376a7e5d961"}, + {file = "SQLAlchemy-2.0.32-cp311-cp311-win32.whl", hash = "sha256:9365a3da32dabd3e69e06b972b1ffb0c89668994c7e8e75ce21d3e5e69ddef28"}, + {file = "SQLAlchemy-2.0.32-cp311-cp311-win_amd64.whl", hash = "sha256:8bd63d051f4f313b102a2af1cbc8b80f061bf78f3d5bd0843ff70b5859e27924"}, + {file = "SQLAlchemy-2.0.32-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6bab3db192a0c35e3c9d1560eb8332463e29e5507dbd822e29a0a3c48c0a8d92"}, + {file = "SQLAlchemy-2.0.32-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:19d98f4f58b13900d8dec4ed09dd09ef292208ee44cc9c2fe01c1f0a2fe440e9"}, + {file = "SQLAlchemy-2.0.32-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cd33c61513cb1b7371fd40cf221256456d26a56284e7d19d1f0b9f1eb7dd7e8"}, + {file = "SQLAlchemy-2.0.32-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d6ba0497c1d066dd004e0f02a92426ca2df20fac08728d03f67f6960271feec"}, + {file = "SQLAlchemy-2.0.32-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2b6be53e4fde0065524f1a0a7929b10e9280987b320716c1509478b712a7688c"}, + {file = "SQLAlchemy-2.0.32-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:916a798f62f410c0b80b63683c8061f5ebe237b0f4ad778739304253353bc1cb"}, + {file = "SQLAlchemy-2.0.32-cp312-cp312-win32.whl", hash = "sha256:31983018b74908ebc6c996a16ad3690301a23befb643093fcfe85efd292e384d"}, + {file = "SQLAlchemy-2.0.32-cp312-cp312-win_amd64.whl", hash = "sha256:4363ed245a6231f2e2957cccdda3c776265a75851f4753c60f3004b90e69bfeb"}, + {file = "SQLAlchemy-2.0.32-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b8afd5b26570bf41c35c0121801479958b4446751a3971fb9a480c1afd85558e"}, + {file = "SQLAlchemy-2.0.32-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c750987fc876813f27b60d619b987b057eb4896b81117f73bb8d9918c14f1cad"}, + {file = "SQLAlchemy-2.0.32-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ada0102afff4890f651ed91120c1120065663506b760da4e7823913ebd3258be"}, + {file = "SQLAlchemy-2.0.32-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:78c03d0f8a5ab4f3034c0e8482cfcc415a3ec6193491cfa1c643ed707d476f16"}, + {file = "SQLAlchemy-2.0.32-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:3bd1cae7519283ff525e64645ebd7a3e0283f3c038f461ecc1c7b040a0c932a1"}, + {file = "SQLAlchemy-2.0.32-cp37-cp37m-win32.whl", hash = "sha256:01438ebcdc566d58c93af0171c74ec28efe6a29184b773e378a385e6215389da"}, + {file = "SQLAlchemy-2.0.32-cp37-cp37m-win_amd64.whl", hash = "sha256:4979dc80fbbc9d2ef569e71e0896990bc94df2b9fdbd878290bd129b65ab579c"}, + {file = "SQLAlchemy-2.0.32-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c742be912f57586ac43af38b3848f7688863a403dfb220193a882ea60e1ec3a"}, + {file = "SQLAlchemy-2.0.32-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:62e23d0ac103bcf1c5555b6c88c114089587bc64d048fef5bbdb58dfd26f96da"}, + {file = "SQLAlchemy-2.0.32-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:251f0d1108aab8ea7b9aadbd07fb47fb8e3a5838dde34aa95a3349876b5a1f1d"}, + {file = "SQLAlchemy-2.0.32-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ef18a84e5116340e38eca3e7f9eeaaef62738891422e7c2a0b80feab165905f"}, + {file = "SQLAlchemy-2.0.32-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:3eb6a97a1d39976f360b10ff208c73afb6a4de86dd2a6212ddf65c4a6a2347d5"}, + {file = "SQLAlchemy-2.0.32-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0c1c9b673d21477cec17ab10bc4decb1322843ba35b481585facd88203754fc5"}, + {file = "SQLAlchemy-2.0.32-cp38-cp38-win32.whl", hash = "sha256:c41a2b9ca80ee555decc605bd3c4520cc6fef9abde8fd66b1cf65126a6922d65"}, + {file = "SQLAlchemy-2.0.32-cp38-cp38-win_amd64.whl", hash = "sha256:8a37e4d265033c897892279e8adf505c8b6b4075f2b40d77afb31f7185cd6ecd"}, + {file = "SQLAlchemy-2.0.32-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:52fec964fba2ef46476312a03ec8c425956b05c20220a1a03703537824b5e8e1"}, + {file = "SQLAlchemy-2.0.32-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:328429aecaba2aee3d71e11f2477c14eec5990fb6d0e884107935f7fb6001632"}, + {file = "SQLAlchemy-2.0.32-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85a01b5599e790e76ac3fe3aa2f26e1feba56270023d6afd5550ed63c68552b3"}, + {file = "SQLAlchemy-2.0.32-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aaf04784797dcdf4c0aa952c8d234fa01974c4729db55c45732520ce12dd95b4"}, + {file = "SQLAlchemy-2.0.32-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4488120becf9b71b3ac718f4138269a6be99a42fe023ec457896ba4f80749525"}, + {file = "SQLAlchemy-2.0.32-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:14e09e083a5796d513918a66f3d6aedbc131e39e80875afe81d98a03312889e6"}, + {file = "SQLAlchemy-2.0.32-cp39-cp39-win32.whl", hash = "sha256:0d322cc9c9b2154ba7e82f7bf25ecc7c36fbe2d82e2933b3642fc095a52cfc78"}, + {file = "SQLAlchemy-2.0.32-cp39-cp39-win_amd64.whl", hash = "sha256:7dd8583df2f98dea28b5cd53a1beac963f4f9d087888d75f22fcc93a07cf8d84"}, + {file = "SQLAlchemy-2.0.32-py3-none-any.whl", hash = "sha256:e567a8793a692451f706b363ccf3c45e056b67d90ead58c3bc9471af5d212202"}, {file = "SQLAlchemy-2.0.32.tar.gz", hash = "sha256:c1b88cc8b02b6a5f0efb0345a03672d4c897dc7d92585176f88c67346f565ea8"}, ] @@ -2335,6 +2384,85 @@ files = [ dev = ["build", "hatch"] doc = ["sphinx"] +[[package]] +name = "time-machine" +version = "2.15.0" +description = "Travel through time in your tests." +optional = false +python-versions = ">=3.8" +files = [ + {file = "time_machine-2.15.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:892d016789b59950989b2db188dcd46cf16d34e8daf2343e33b679b0c5fd1001"}, + {file = "time_machine-2.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4428bdae507996aa3fdeb4727bca09e26306fa64a502e7335207252684516cbf"}, + {file = "time_machine-2.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0302568338c8bd333ed0698231dbb781b70ead1a5579b4ac734b9bf88313229f"}, + {file = "time_machine-2.15.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18fc4740073e67071472c48355775ec6d1b93af5c675524b7de2474e0dcd8741"}, + {file = "time_machine-2.15.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:768d33b484a35da93731cc99bdc926b539240a78673216cdc6306833d9072350"}, + {file = "time_machine-2.15.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:73a8c8160d2a170dadcad5b82fb5ee53236a19cec0996651cf4d21da0a2574d5"}, + {file = "time_machine-2.15.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:09fd839a321a92aa8183206c383b9725eaf4e0a28a70e4cb87db292b352eeefb"}, + {file = "time_machine-2.15.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:838a6d117739f1ae6ecc45ec630fa694f41a85c0d07b1f3b1db2a6cc52c1808b"}, + {file = "time_machine-2.15.0-cp310-cp310-win32.whl", hash = "sha256:d24d2ec74923b49bce7618e3e7762baa6be74e624d9829d5632321de102bf386"}, + {file = "time_machine-2.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:95c8e7036cf442480d0bf6f5fde371e1eb6dbbf5391d7bdb8db73bd8a732b538"}, + {file = "time_machine-2.15.0-cp310-cp310-win_arm64.whl", hash = "sha256:660810cd27a8a94cb5e845e8f28a95e70b01ff0c45466d394c4a0cba5a0ae279"}, + {file = "time_machine-2.15.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:674097dd54a0bbd555e7927092c74428c4c07268ad52bca38cfccc3214707e50"}, + {file = "time_machine-2.15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4e83fd6112808d1d14d1a57397c6fa3bd71bb2f3b8800036e12366e3680819b9"}, + {file = "time_machine-2.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b095a1de40ca1afaeae8df3f45e26b645094a1912e6e6871e725fcf06ecdb74a"}, + {file = "time_machine-2.15.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4601fe7a6b74c6fd9207e614d9db2a20dd4befd4d314677a0feac13a67189707"}, + {file = "time_machine-2.15.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:245ef73f9927b7d4909d554a6a0284dbc5dee9730adea599e430b37c9e9fa203"}, + {file = "time_machine-2.15.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:704abc7f3403584cca9c01c5809812e0bd70632ea4251389fae4f45e11aad94f"}, + {file = "time_machine-2.15.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6425001e50a0c82108caed438233066cea04d42a8fc9c49bfcf081a5b96e5b4e"}, + {file = "time_machine-2.15.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5d4073b754f90b19f28d036ec5143d3fca3a75e4d4241d78790a6178b00bb373"}, + {file = "time_machine-2.15.0-cp311-cp311-win32.whl", hash = "sha256:8817b0f7d7830215261b18db83c9c3ef1da6bb64da5c292d7c70b9a46e5a6745"}, + {file = "time_machine-2.15.0-cp311-cp311-win_amd64.whl", hash = "sha256:ddad27a62df2ea47b7b483009fbfcf167a71d702cbd8e2eefd9ddc1c93146658"}, + {file = "time_machine-2.15.0-cp311-cp311-win_arm64.whl", hash = "sha256:6f021aa2dbd8fbfe54d3fa2258518129108b7496922b3bcff2cf5991078eec67"}, + {file = "time_machine-2.15.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:a22f47c34ee1fcf7d93a8c5c93135499aac879d9d5d8f820bd28571a30fdabcd"}, + {file = "time_machine-2.15.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b684f8ecdeacd6baabc17b15ac1b054ca62029193e6c5367ef00b3516671de80"}, + {file = "time_machine-2.15.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5f7add997684bc6141e1c80f6ba0c38ffe316ba277a4074e61b1b7b4f5a172bf"}, + {file = "time_machine-2.15.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:31af56399bf7c9ef76a3f7b6d9471dffa8f06ee373c194a374b69523f9061de9"}, + {file = "time_machine-2.15.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5b94cba3edfc54bcb3ab5be616a2f50fa48be438e5af970824efdf882d1bc31"}, + {file = "time_machine-2.15.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3862dda89bdb05f9d521b08fdcb24b19a7dd9f559ae324f4301ba7a07b6eea64"}, + {file = "time_machine-2.15.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e1790481a6b9ce38888f22ce30710244067898c3ac4805a0e061e381f3db3506"}, + {file = "time_machine-2.15.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a731c03bc00552ee6cc685a59616d36003124e7e04c6ddf65c2c47f1c3d85480"}, + {file = "time_machine-2.15.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e6776840aea3ff5ab6924b50117957da62db51b109b3b491c0d5817a804b1a8e"}, + {file = "time_machine-2.15.0-cp312-cp312-win32.whl", hash = "sha256:9479530e3fce65f6149058071fa4df8150025f15b43b103445f619842981a87c"}, + {file = "time_machine-2.15.0-cp312-cp312-win_amd64.whl", hash = "sha256:b5f3ab4185c1f72010846ca9fccb08349e23a2b52982a18d9870e848ce9f1c86"}, + {file = "time_machine-2.15.0-cp312-cp312-win_arm64.whl", hash = "sha256:c0473dfa8f17c6a9a250b2bd6a5b62af3aa7d22518f701649115f1085d5e35ab"}, + {file = "time_machine-2.15.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:f50f10058b884d45cd8a50423bf561b1f9f9df7058abeb8b318700c8bcf4bb54"}, + {file = "time_machine-2.15.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:df6f618b98f0848fd8d07039541e10f23db679d8283f8719e870a98e1ef8e639"}, + {file = "time_machine-2.15.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52468a0784544eba708c0ae6bc5e8c5dcfd685495a60f7f74028662c984bd9cd"}, + {file = "time_machine-2.15.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c08800c28160f4d32ca510128b4e201a43c813e7a2dd53178fa79ebe050eba13"}, + {file = "time_machine-2.15.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65d395211736d9844537a530287a7c64b9fda1d353e899a0e1723986a0859154"}, + {file = "time_machine-2.15.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:3b177d334a35bf2ce103bfe4e0e416e4ee824dd33386ea73fa7491c17cc61897"}, + {file = "time_machine-2.15.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:9a6a9342fae113b12aab42c790880c549d9ba695b8deff27ee08096eedd67569"}, + {file = "time_machine-2.15.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:bcbb25029ee8756f10c6473cea5ef21707a1d9a8752cdf29fad3a5f34aa4a313"}, + {file = "time_machine-2.15.0-cp313-cp313-win32.whl", hash = "sha256:29b988b1f09f2a083b12b6b054787b799ae91ee15bb0e9de3e48f880e4d68674"}, + {file = "time_machine-2.15.0-cp313-cp313-win_amd64.whl", hash = "sha256:d828721dcbcb94b904a6b25df67c2513ecd24cd9e36694f38b9f0fa71c7c6103"}, + {file = "time_machine-2.15.0-cp313-cp313-win_arm64.whl", hash = "sha256:008bd668d933b1a029c81805bcdc0132390c2545b103cf8e6709e3adbc37989d"}, + {file = "time_machine-2.15.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e99689f6c6b9ca6e2fc7a75d140e38c5a7985dab61fe1f4e506268f7e9844e05"}, + {file = "time_machine-2.15.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:671e88a6209a1cf415dc0f8c67d2b2d3b55b436cc63801a518f9800ebd752959"}, + {file = "time_machine-2.15.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b2d28daf4cabc698aafb12135525d87dc1f2f893cbd29a8a6fe0d8d36d1342c"}, + {file = "time_machine-2.15.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4cd9f057457d12604be18b623bcd5ae7d0b917ad66cb510ee1135d5f123666e2"}, + {file = "time_machine-2.15.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97dc6793e512a62ba9eab250134a2e67372c16ae9948e73d27c2ef355356e2e1"}, + {file = "time_machine-2.15.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:0630a32e9ebcf2fac3704365b31e271fef6eabd6fedfa404cd8dbd244f7fc84d"}, + {file = "time_machine-2.15.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:617c9a92d8d8f60d5ef39e76596620503752a09f834a218e5b83be352fdd6c91"}, + {file = "time_machine-2.15.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:3f7eadd820e792de33a9ec91f8178a2b9088e4e8b9a166953419ddc4ec5f7cfe"}, + {file = "time_machine-2.15.0-cp38-cp38-win32.whl", hash = "sha256:b7b647684eb2e1fd1e5e6b101249d5fe9d6117c117b5e336ad8dd75af48d2d1f"}, + {file = "time_machine-2.15.0-cp38-cp38-win_amd64.whl", hash = "sha256:b48abd7745caec1a78a16a048966cde14ff6ccb04d471a7201532648d3f77d14"}, + {file = "time_machine-2.15.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8c2b1c91b437133c672e374857eccb1dd2c2d9f8477ae3b35138382d5ef19846"}, + {file = "time_machine-2.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:79bf1ef6850182e09d86e61fa31717da56014a3b2234afb025fca1f2a43ac07b"}, + {file = "time_machine-2.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:658ea8477fa020f08435fb7277635eb0b50cd5206b9d4cbe10e9a5466b01f855"}, + {file = "time_machine-2.15.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c947135750d20f35acac290c34f1acf5771fc166a3fbc0e3816a97c756aaa5f5"}, + {file = "time_machine-2.15.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1dee3a0dd1866988c49a5d00564404db9bcdf49ca92f9c4e8b6c99609d64e698"}, + {file = "time_machine-2.15.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c596920d6017702a36e3a43fd8110a84e87d6229f30b84bd5640cbae9b5145da"}, + {file = "time_machine-2.15.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:014589d0edd4aa14f8d63985745565e8cbbe48461d6c004a96000b47f6b44e78"}, + {file = "time_machine-2.15.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5ff655716cd13a242eef8cf5d368074e8b396ff86508a5933e7cff4f2b3eb3c2"}, + {file = "time_machine-2.15.0-cp39-cp39-win32.whl", hash = "sha256:1168eebd7af7e6e3e2fd378c16ca917b97dd81c89a1f1f9e1daa985c81699d90"}, + {file = "time_machine-2.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:c344eb09fcfbf71e5b5847d4f188fec98e1c3a976125ef571eac5f1c39e7a5e5"}, + {file = "time_machine-2.15.0-cp39-cp39-win_arm64.whl", hash = "sha256:899f1a856b3bebb82b6cbc3c0014834b583b83f246b28e462a031ec1b766130b"}, + {file = "time_machine-2.15.0.tar.gz", hash = "sha256:ebd2e63baa117ded04b978813fcd1279d3fc6be2149c9cac75c716b6f1db774c"}, +] + +[package.dependencies] +python-dateutil = "*" + [[package]] name = "toml" version = "0.10.2" @@ -2451,4 +2579,4 @@ snowflake = ["pyarrow", "snowflake-connector-python"] [metadata] lock-version = "2.0" python-versions = ">=3.8.1, <3.13" -content-hash = "e04729b9bcb87221c0b46456c02c33117065fdc54cd9e8d1ab90dc6408682400" +content-hash = "7812c5b54e7090b4ee403c39f8f389ad26079f364c952022de034d7d765fc1bb" diff --git a/pyproject.toml b/pyproject.toml index d796eb7..36a5b50 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -40,6 +40,7 @@ isort = "^5.9.3" pytest-cov = "^2.12.1" pytest-mock = "^3.6.1" pytest-xdist = "^3.5.0" +pendulum = {version = "^3.0.0", extras = ["test"]} [tool.poetry.extras] diff --git a/pytest.ini b/pytest.ini index d020334..305d46d 100644 --- a/pytest.ini +++ b/pytest.ini @@ -1,4 +1,5 @@ [pytest] +addopts = -m "not extra_dt" testpaths = tests markers = cli @@ -8,3 +9,5 @@ markers = dbt seeding validation + primary_dt: mark this as a primary datetime test (included by default). It depends on a certain datetime to run. The primary tests test the bare minimum number of datetimes, usually just the current datetime. + extra_dt: mark this as an extra datetime test (excluded by default). It depends on a certain datetime to run. The extra tests are of secondary importance, adding runtime to the test suite but improving robustness. To include these, add `-m extra_dt`. diff --git a/tests/test_dashboards.py b/tests/test_dashboards.py index a591da9..15e445c 100644 --- a/tests/test_dashboards.py +++ b/tests/test_dashboards.py @@ -4,6 +4,15 @@ from metrics_layer.core import MetricsLayerConnection from metrics_layer.core.exceptions import QueryError +_NOW = pendulum.now("UTC") +_THIS_YEAR = _NOW.end_of("year").diff(_NOW.start_of("year")) + + +def _generate_dt_params(): + yield pytest.param(_NOW, marks=pytest.mark.primary_dt) + for dt in _THIS_YEAR.range("days"): + yield pytest.param(dt, marks=pytest.mark.extra_dt) + def test_dashboard_located(connection): dash = connection.get_dashboard("sales_dashboard") @@ -59,72 +68,76 @@ def test_dashboard_to_dict(connection): @pytest.mark.query -def test_dashboard_filter_week_start(fresh_project): - date_format = "%Y-%m-%dT%H:%M:%S" - fresh_project._models[0]["week_start_day"] = "sunday" - connection = MetricsLayerConnection(project=fresh_project, connections=[]) - dash = connection.get_dashboard("sales_dashboard") - - raw_filter_dict = {"field": "orders.order_year", "value": "1 week"} - dash.filters = [raw_filter_dict] - dashboard_parsed_filters = dash.parsed_filters() - - last_element = dash.elements()[-1] - last_element.filters = [raw_filter_dict] - element_parsed_filters = last_element.parsed_filters() - - pendulum.week_starts_at(pendulum.SUNDAY) - pendulum.week_ends_at(pendulum.SATURDAY) - start = pendulum.now("UTC").start_of("week").subtract(days=0).strftime(date_format) - end = pendulum.now("UTC").end_of("week").subtract(days=0).strftime(date_format) - pendulum.week_starts_at(pendulum.MONDAY) - pendulum.week_ends_at(pendulum.SUNDAY) - - correct = [ - {"field": "orders.order_year", "value": start, "expression": "greater_or_equal_than"}, - {"field": "orders.order_year", "value": end, "expression": "less_or_equal_than"}, - ] - for parsed_filters in [dashboard_parsed_filters, element_parsed_filters]: - assert parsed_filters[0]["field"] == correct[0]["field"] - assert parsed_filters[0]["expression"].value == correct[0]["expression"] - assert parsed_filters[0]["value"] == correct[0]["value"] - assert parsed_filters[1]["field"] == correct[1]["field"] - assert parsed_filters[1]["expression"].value == correct[1]["expression"] - assert parsed_filters[1]["value"] == correct[1]["value"] +@pytest.mark.parametrize("dt", _generate_dt_params()) +def test_dashboard_filter_week_start(fresh_project, dt): + with pendulum.travel_to(dt): + date_format = "%Y-%m-%dT%H:%M:%S" + fresh_project._models[0]["week_start_day"] = "sunday" + connection = MetricsLayerConnection(project=fresh_project, connections=[]) + dash = connection.get_dashboard("sales_dashboard") + + raw_filter_dict = {"field": "orders.order_year", "value": "1 week"} + dash.filters = [raw_filter_dict] + dashboard_parsed_filters = dash.parsed_filters() + + last_element = dash.elements()[-1] + last_element.filters = [raw_filter_dict] + element_parsed_filters = last_element.parsed_filters() + + pendulum.week_starts_at(pendulum.SUNDAY) + pendulum.week_ends_at(pendulum.SATURDAY) + start = pendulum.now("UTC").start_of("week").subtract(days=0).strftime(date_format) + end = pendulum.now("UTC").end_of("week").subtract(days=0).strftime(date_format) + pendulum.week_starts_at(pendulum.MONDAY) + pendulum.week_ends_at(pendulum.SUNDAY) + + correct = [ + {"field": "orders.order_year", "value": start, "expression": "greater_or_equal_than"}, + {"field": "orders.order_year", "value": end, "expression": "less_or_equal_than"}, + ] + for parsed_filters in [dashboard_parsed_filters, element_parsed_filters]: + assert parsed_filters[0]["field"] == correct[0]["field"] + assert parsed_filters[0]["expression"].value == correct[0]["expression"] + assert parsed_filters[0]["value"] == correct[0]["value"] + assert parsed_filters[1]["field"] == correct[1]["field"] + assert parsed_filters[1]["expression"].value == correct[1]["expression"] + assert parsed_filters[1]["value"] == correct[1]["value"] @pytest.mark.query -def test_dashboard_filter_timezone(fresh_project): - date_format = "%Y-%m-%dT%H:%M:%S" - fresh_project.set_timezone("Pacific/Apia") - connection = MetricsLayerConnection(project=fresh_project, connections=[]) - dash = connection.get_dashboard("sales_dashboard") - - raw_filter_dict = {"field": "orders.order_date", "value": "week to date"} - dash.filters = [raw_filter_dict] - dashboard_parsed_filters = dash.parsed_filters() - - last_element = dash.elements()[-1] - last_element.filters = [raw_filter_dict] - element_parsed_filters = last_element.parsed_filters() - - # These are 24 hours apart so this test should always fail if we get the wrong timezone - to_sub = 1 if pendulum.now("Pacific/Apia").day_of_week != 0 else 0 - start = pendulum.now("Pacific/Apia").start_of("week").strftime(date_format) - end = pendulum.now("Pacific/Apia").subtract(days=to_sub).end_of("day").strftime(date_format) - wrong_end = pendulum.now("Pacific/Niue").subtract(days=to_sub).end_of("day").strftime(date_format) - correct = [ - {"field": "orders.order_date", "value": start, "expression": "greater_or_equal_than"}, - {"field": "orders.order_date", "value": end, "expression": "less_or_equal_than"}, - ] - for parsed_filters in [dashboard_parsed_filters, element_parsed_filters]: - assert parsed_filters[0]["field"] == correct[0]["field"] - assert parsed_filters[0]["expression"].value == correct[0]["expression"] - assert parsed_filters[0]["value"] == correct[0]["value"] - assert parsed_filters[1]["field"] == correct[1]["field"] - assert parsed_filters[1]["expression"].value == correct[1]["expression"] - assert parsed_filters[1]["value"] == correct[1]["value"] - assert parsed_filters[1]["value"] != wrong_end +@pytest.mark.parametrize("dt", _generate_dt_params()) +def test_dashboard_filter_timezone(fresh_project, dt): + with pendulum.travel_to(dt): + date_format = "%Y-%m-%dT%H:%M:%S" + fresh_project.set_timezone("Pacific/Apia") + connection = MetricsLayerConnection(project=fresh_project, connections=[]) + dash = connection.get_dashboard("sales_dashboard") + + raw_filter_dict = {"field": "orders.order_date", "value": "week to date"} + dash.filters = [raw_filter_dict] + dashboard_parsed_filters = dash.parsed_filters() + + last_element = dash.elements()[-1] + last_element.filters = [raw_filter_dict] + element_parsed_filters = last_element.parsed_filters() + + # These are 24 hours apart so this test should always fail if we get the wrong timezone + to_sub = 1 if pendulum.now("Pacific/Apia").day_of_week != 0 else 0 + start = pendulum.now("Pacific/Apia").start_of("week").strftime(date_format) + end = pendulum.now("Pacific/Apia").subtract(days=to_sub).end_of("day").strftime(date_format) + wrong_end = pendulum.now("Pacific/Niue").subtract(days=to_sub).end_of("day").strftime(date_format) + correct = [ + {"field": "orders.order_date", "value": start, "expression": "greater_or_equal_than"}, + {"field": "orders.order_date", "value": end, "expression": "less_or_equal_than"}, + ] + for parsed_filters in [dashboard_parsed_filters, element_parsed_filters]: + assert parsed_filters[0]["field"] == correct[0]["field"] + assert parsed_filters[0]["expression"].value == correct[0]["expression"] + assert parsed_filters[0]["value"] == correct[0]["value"] + assert parsed_filters[1]["field"] == correct[1]["field"] + assert parsed_filters[1]["expression"].value == correct[1]["expression"] + assert parsed_filters[1]["value"] == correct[1]["value"] + assert parsed_filters[1]["value"] != wrong_end @pytest.mark.query @@ -187,233 +200,255 @@ def test_dashboard_filter_timezone(fresh_project): {"field": "customers.gender", "value": "-Male, Female"}, ], ) -def test_dashboard_filter_processing(connection, raw_filter_dict): - dash = connection.get_dashboard("sales_dashboard") - dash.filters = [raw_filter_dict] - - expression_lookup = { - "Male": "equal_to", - "-Male": "not_equal_to", - "-Ma%": "does_not_start_with_case_insensitive", - "-%Ma": "does_not_end_with_case_insensitive", - "-%ale%": "does_not_contain_case_insensitive", - "Fe%": "starts_with_case_insensitive", - "%Fe": "ends_with_case_insensitive", - "%male%": "contains_case_insensitive", - "=100": "equal_to", - ">100": "greater_than", - "<100": "less_than", - "<=120": "less_or_equal_than", - ">=120": "greater_or_equal_than", - "!=120": "not_equal_to", - "<>120": "not_equal_to", - "Male, Female": "isin", - "-Male, -Female": "isnotin", - "-NULL": "is_not_null", - "NULL": "is_null", - "TRUE": "equal_to", - True: "equal_to", - False: "equal_to", - "after 2021-02-03": "greater_or_equal_than", - "2021-02-03 until 2022-02-03": "greater_or_equal_than", - "2021-02-03 until yesterday": "greater_or_equal_than", - "2021-02-03 until this month": "greater_or_equal_than", - "before 2021-02-03": "less_or_equal_than", - "on 2021-02-03": "equal_to", - "not on 2021-02-03": "not_equal_to", - "today": "greater_or_equal_than", - "yesterday": "greater_or_equal_than", - "this week": "greater_or_equal_than", - "this month": "greater_or_equal_than", - "this quarter": "greater_or_equal_than", - "this year": "greater_or_equal_than", - "last week": "greater_or_equal_than", - "last month": "greater_or_equal_than", - "last quarter": "greater_or_equal_than", - "last year": "greater_or_equal_than", - "week to date": "greater_or_equal_than", - "month to date": "greater_or_equal_than", - "quarter to date": "greater_or_equal_than", - "year to date": "greater_or_equal_than", - "last week to date": "greater_or_equal_than", - "52 weeks ago to date": "greater_or_equal_than", - "12 months ago to date": "greater_or_equal_than", - "1 year ago to date": "greater_or_equal_than", - "1 year ago for 3 months": "greater_or_equal_than", - "1 year ago for 30 days": "greater_or_equal_than", - "2 years ago": "greater_or_equal_than", - "3 months": "greater_or_equal_than", - "1 week": "greater_or_equal_than", - "2 days": "greater_or_equal_than", - "1 quarter": "greater_or_equal_than", - } - date_format = "%Y-%m-%dT%H:%M:%S" - value_lookup = { - "Male": "Male", - "-Male": "Male", - "-Ma%": "Ma", - "-%Ma": "Ma", - "-%ale%": "ale", - "Fe%": "Fe", - "%Fe": "Fe", - "%male%": "male", - "=100": 100, - ">100": 100, - "<100": 100, - "<=120": 120, - ">=120": 120, - "!=120": 120, - "<>120": 120, - "Male, Female": ["Male", "Female"], - "-Male, -Female": ["Male", "Female"], - "-NULL": None, - "NULL": None, - "TRUE": True, - True: True, - False: False, - "after 2021-02-03": "2021-02-03T00:00:00", - "2021-02-03 until 2022-02-03": "2021-02-03T00:00:00", - "2021-02-03 until yesterday": "2021-02-03T00:00:00", - "2021-02-03 until this month": "2021-02-03T00:00:00", - "before 2021-02-03": "2021-02-03T00:00:00", - "on 2021-02-03": "2021-02-03T00:00:00", - "not on 2021-02-03": "2021-02-03T00:00:00", - "today": pendulum.now("UTC").start_of("day").strftime(date_format), - "yesterday": pendulum.now("UTC").subtract(days=1).start_of("day").strftime(date_format), - "this week": pendulum.now("UTC").subtract(weeks=0).start_of("week").strftime(date_format), - "this month": pendulum.now("UTC").subtract(months=0).start_of("month").strftime(date_format), - "this quarter": pendulum.now("UTC").subtract(months=0).first_of("quarter").strftime(date_format), - "this year": pendulum.now("UTC").subtract(years=0).start_of("year").strftime(date_format), - "last week": pendulum.now("UTC").subtract(weeks=1).start_of("week").strftime(date_format), - "last month": pendulum.now("UTC").subtract(months=1).start_of("month").strftime(date_format), - "last quarter": pendulum.now("UTC").subtract(months=3).first_of("quarter").strftime(date_format), - "last year": pendulum.now("UTC").subtract(years=1).start_of("year").strftime(date_format), - "week to date": pendulum.now("UTC").subtract(weeks=0).start_of("week").strftime(date_format), - "month to date": pendulum.now("UTC").subtract(months=0).start_of("month").strftime(date_format), - "quarter to date": pendulum.now("UTC").subtract(months=0).first_of("quarter").strftime(date_format), - "year to date": pendulum.now("UTC").subtract(years=0).start_of("year").strftime(date_format), - "last week to date": pendulum.now("UTC").subtract(weeks=1).start_of("week").strftime(date_format), - "52 weeks ago to date": pendulum.now("UTC").subtract(weeks=52).start_of("week").strftime(date_format), - "12 months ago to date": pendulum.now("UTC") - .subtract(months=12) - .start_of("month") - .strftime(date_format), - "1 year ago to date": pendulum.now("UTC").subtract(years=1).start_of("year").strftime(date_format), - "1 year ago for 3 months": pendulum.now("UTC") - .subtract(years=1) - .start_of("year") - .strftime(date_format), - "1 year ago for 30 days": pendulum.now("UTC") - .subtract(years=1) - .start_of("year") - .strftime(date_format), - "2 years ago": pendulum.now("UTC").subtract(years=2).start_of("year").strftime(date_format), - "3 months": pendulum.now("UTC").subtract(months=2).start_of("month").strftime(date_format), - "1 week": pendulum.now("UTC").start_of("week").strftime(date_format), - "2 days": pendulum.now("UTC").subtract(days=1).start_of("day").strftime(date_format), - "1 quarter": pendulum.now("UTC").first_of("quarter").strftime(date_format), - } - - second_value_lookup = { - "today": pendulum.now("UTC").end_of("day").strftime(date_format), - "yesterday": pendulum.now("UTC").subtract(days=1).end_of("day").strftime(date_format), - "this week": pendulum.now("UTC").end_of("week").strftime(date_format), - "this month": pendulum.now("UTC").end_of("month").strftime(date_format), - "this quarter": pendulum.now("UTC").last_of("quarter").strftime(date_format), - "this year": pendulum.now("UTC").end_of("year").strftime(date_format), - "last week": pendulum.now("UTC").subtract(weeks=1).end_of("week").strftime(date_format), - "last month": pendulum.now("UTC").subtract(months=1).end_of("month").strftime(date_format), - "last quarter": pendulum.now("UTC").subtract(months=3).last_of("quarter").strftime(date_format), - "last year": pendulum.now("UTC").subtract(years=1).end_of("year").strftime(date_format), - "2021-02-03 until 2022-02-03": "2022-02-03T00:00:00", - "2021-02-03 until yesterday": pendulum.now("UTC") - .subtract(days=1) - .end_of("day") - .strftime(date_format), - "2021-02-03 until this month": pendulum.now("UTC").end_of("month").strftime(date_format), - "week to date": pendulum.now("UTC") - .subtract(days=1 if pendulum.now("UTC").day_of_week != 0 else 0) - .end_of("day") - .strftime(date_format), - "month to date": pendulum.now("UTC") - .subtract(days=1 if pendulum.now("UTC").day != 1 else 0) - .end_of("day") - .strftime(date_format), - "quarter to date": pendulum.now("UTC") - .subtract( - days=1 if pendulum.now("UTC").day != 1 or pendulum.now("UTC").month not in {1, 4, 7, 10} else 0 - ) - .end_of("day") - .strftime(date_format), - "year to date": pendulum.now("UTC").subtract(days=1).end_of("day").strftime(date_format), - "last week to date": pendulum.now("UTC") - .subtract(weeks=1) - .start_of("week") - .add( - days=(pendulum.now("UTC") - pendulum.now("UTC").start_of("week")).days - 1 - if pendulum.now("UTC").day_of_week != 0 - else 0 - ) - .end_of("day") - .strftime(date_format), - "52 weeks ago to date": pendulum.now("UTC") - .subtract(weeks=52) - .start_of("week") - .add( - days=(pendulum.now("UTC") - pendulum.now("UTC").start_of("week")).days - 1 - if pendulum.now("UTC").day_of_week != 0 - else 0 - ) - .end_of("day") - .strftime(date_format), - "12 months ago to date": pendulum.now("UTC") - .subtract(months=12) - .start_of("month") - .add( - days=(pendulum.now("UTC") - pendulum.now("UTC").start_of("month")).days - 1 - if pendulum.now("UTC").day != 0 - else 0 - ) - .end_of("day") - .strftime(date_format), - "1 year ago to date": pendulum.now("UTC") - .subtract(years=1) - .start_of("year") - .add(days=(pendulum.now("UTC") - pendulum.now("UTC").start_of("year")).days - 1) - .end_of("day") - .strftime(date_format), - "1 year ago for 3 months": pendulum.now("UTC") - .subtract(years=1) - .start_of("year") - .add(months=2) - .end_of("month") - .strftime(date_format), - "1 year ago for 30 days": pendulum.now("UTC") - .subtract(years=1) - .start_of("year") - .add(days=29) - .end_of("day") - .strftime(date_format), - "2 years ago": pendulum.now("UTC").subtract(years=2).end_of("year").strftime(date_format), - "3 months": pendulum.now("UTC").end_of("month").strftime(date_format), - "1 week": pendulum.now("UTC").end_of("week").strftime(date_format), - "2 days": pendulum.now("UTC").end_of("day").strftime(date_format), - "1 quarter": pendulum.now("UTC").last_of("quarter").strftime(date_format), - } - if raw_filter_dict["value"] == "-Male, Female": - with pytest.raises(QueryError) as exc_info: - dash.parsed_filters() - assert exc_info.value - else: - parsed_filters = dash.parsed_filters() - assert len(parsed_filters) in {1, 2} - assert parsed_filters[0]["field"] == raw_filter_dict["field"] - assert parsed_filters[0]["expression"].value == expression_lookup[raw_filter_dict["value"]] - assert parsed_filters[0]["value"] == value_lookup[raw_filter_dict["value"]] - if raw_filter_dict["value"] in second_value_lookup or len(parsed_filters) == 2: - assert parsed_filters[1]["field"] == raw_filter_dict["field"] - assert parsed_filters[1]["expression"].value == "less_or_equal_than" - assert parsed_filters[1]["value"] == second_value_lookup[raw_filter_dict["value"]] +@pytest.mark.parametrize("dt", _generate_dt_params()) +def test_dashboard_filter_processing(connection, raw_filter_dict, dt): + with pendulum.travel_to(dt): + dash = connection.get_dashboard("sales_dashboard") + dash.filters = [raw_filter_dict] + + expression_lookup = { + "Male": "equal_to", + "-Male": "not_equal_to", + "-Ma%": "does_not_start_with_case_insensitive", + "-%Ma": "does_not_end_with_case_insensitive", + "-%ale%": "does_not_contain_case_insensitive", + "Fe%": "starts_with_case_insensitive", + "%Fe": "ends_with_case_insensitive", + "%male%": "contains_case_insensitive", + "=100": "equal_to", + ">100": "greater_than", + "<100": "less_than", + "<=120": "less_or_equal_than", + ">=120": "greater_or_equal_than", + "!=120": "not_equal_to", + "<>120": "not_equal_to", + "Male, Female": "isin", + "-Male, -Female": "isnotin", + "-NULL": "is_not_null", + "NULL": "is_null", + "TRUE": "equal_to", + True: "equal_to", + False: "equal_to", + "after 2021-02-03": "greater_or_equal_than", + "2021-02-03 until 2022-02-03": "greater_or_equal_than", + "2021-02-03 until yesterday": "greater_or_equal_than", + "2021-02-03 until this month": "greater_or_equal_than", + "before 2021-02-03": "less_or_equal_than", + "on 2021-02-03": "equal_to", + "not on 2021-02-03": "not_equal_to", + "today": "greater_or_equal_than", + "yesterday": "greater_or_equal_than", + "this week": "greater_or_equal_than", + "this month": "greater_or_equal_than", + "this quarter": "greater_or_equal_than", + "this year": "greater_or_equal_than", + "last week": "greater_or_equal_than", + "last month": "greater_or_equal_than", + "last quarter": "greater_or_equal_than", + "last year": "greater_or_equal_than", + "week to date": "greater_or_equal_than", + "month to date": "greater_or_equal_than", + "quarter to date": "greater_or_equal_than", + "year to date": "greater_or_equal_than", + "last week to date": "greater_or_equal_than", + "52 weeks ago to date": "greater_or_equal_than", + "12 months ago to date": "greater_or_equal_than", + "1 year ago to date": "greater_or_equal_than", + "1 year ago for 3 months": "greater_or_equal_than", + "1 year ago for 30 days": "greater_or_equal_than", + "2 years ago": "greater_or_equal_than", + "3 months": "greater_or_equal_than", + "1 week": "greater_or_equal_than", + "2 days": "greater_or_equal_than", + "1 quarter": "greater_or_equal_than", + } + date_format = "%Y-%m-%dT%H:%M:%S" + value_lookup = { + "Male": "Male", + "-Male": "Male", + "-Ma%": "Ma", + "-%Ma": "Ma", + "-%ale%": "ale", + "Fe%": "Fe", + "%Fe": "Fe", + "%male%": "male", + "=100": 100, + ">100": 100, + "<100": 100, + "<=120": 120, + ">=120": 120, + "!=120": 120, + "<>120": 120, + "Male, Female": ["Male", "Female"], + "-Male, -Female": ["Male", "Female"], + "-NULL": None, + "NULL": None, + "TRUE": True, + True: True, + False: False, + "after 2021-02-03": "2021-02-03T00:00:00", + "2021-02-03 until 2022-02-03": "2021-02-03T00:00:00", + "2021-02-03 until yesterday": "2021-02-03T00:00:00", + "2021-02-03 until this month": "2021-02-03T00:00:00", + "before 2021-02-03": "2021-02-03T00:00:00", + "on 2021-02-03": "2021-02-03T00:00:00", + "not on 2021-02-03": "2021-02-03T00:00:00", + "today": pendulum.now("UTC").start_of("day").strftime(date_format), + "yesterday": pendulum.now("UTC").subtract(days=1).start_of("day").strftime(date_format), + "this week": pendulum.now("UTC").subtract(weeks=0).start_of("week").strftime(date_format), + "this month": pendulum.now("UTC").subtract(months=0).start_of("month").strftime(date_format), + "this quarter": pendulum.now("UTC").subtract(months=0).first_of("quarter").strftime(date_format), + "this year": pendulum.now("UTC").subtract(years=0).start_of("year").strftime(date_format), + "last week": pendulum.now("UTC").subtract(weeks=1).start_of("week").strftime(date_format), + "last month": pendulum.now("UTC").subtract(months=1).start_of("month").strftime(date_format), + "last quarter": pendulum.now("UTC").subtract(months=3).first_of("quarter").strftime(date_format), + "last year": pendulum.now("UTC").subtract(years=1).start_of("year").strftime(date_format), + "week to date": pendulum.now("UTC").subtract(weeks=0).start_of("week").strftime(date_format), + "month to date": pendulum.now("UTC").subtract(months=0).start_of("month").strftime(date_format), + "quarter to date": pendulum.now("UTC") + .subtract(months=0) + .first_of("quarter") + .strftime(date_format), + "year to date": pendulum.now("UTC").subtract(years=0).start_of("year").strftime(date_format), + "last week to date": pendulum.now("UTC").subtract(weeks=1).start_of("week").strftime(date_format), + "52 weeks ago to date": pendulum.now("UTC") + .subtract(weeks=52) + .start_of("week") + .strftime(date_format), + "12 months ago to date": pendulum.now("UTC") + .subtract(months=12) + .start_of("month") + .strftime(date_format), + "1 year ago to date": pendulum.now("UTC") + .subtract(years=1) + .start_of("year") + .strftime(date_format), + "1 year ago for 3 months": pendulum.now("UTC") + .subtract(years=1) + .start_of("year") + .strftime(date_format), + "1 year ago for 30 days": pendulum.now("UTC") + .subtract(years=1) + .start_of("year") + .strftime(date_format), + "2 years ago": pendulum.now("UTC").subtract(years=2).start_of("year").strftime(date_format), + "3 months": pendulum.now("UTC").subtract(months=2).start_of("month").strftime(date_format), + "1 week": pendulum.now("UTC").start_of("week").strftime(date_format), + "2 days": pendulum.now("UTC").subtract(days=1).start_of("day").strftime(date_format), + "1 quarter": pendulum.now("UTC").first_of("quarter").strftime(date_format), + } + + second_value_lookup = { + "today": pendulum.now("UTC").end_of("day").strftime(date_format), + "yesterday": pendulum.now("UTC").subtract(days=1).end_of("day").strftime(date_format), + "this week": pendulum.now("UTC").end_of("week").strftime(date_format), + "this month": pendulum.now("UTC").end_of("month").strftime(date_format), + "this quarter": pendulum.now("UTC").last_of("quarter").strftime(date_format), + "this year": pendulum.now("UTC").end_of("year").strftime(date_format), + "last week": pendulum.now("UTC").subtract(weeks=1).end_of("week").strftime(date_format), + "last month": pendulum.now("UTC").subtract(months=1).end_of("month").strftime(date_format), + "last quarter": pendulum.now("UTC").subtract(months=3).last_of("quarter").strftime(date_format), + "last year": pendulum.now("UTC").subtract(years=1).end_of("year").strftime(date_format), + "2021-02-03 until 2022-02-03": "2022-02-03T00:00:00", + "2021-02-03 until yesterday": pendulum.now("UTC") + .subtract(days=1) + .end_of("day") + .strftime(date_format), + "2021-02-03 until this month": pendulum.now("UTC").end_of("month").strftime(date_format), + "week to date": pendulum.now("UTC") + .subtract(days=1 if pendulum.now("UTC").day_of_week != 0 else 0) + .end_of("day") + .strftime(date_format), + "month to date": pendulum.now("UTC") + .subtract(days=1 if pendulum.now("UTC").day != 1 else 0) + .end_of("day") + .strftime(date_format), + "quarter to date": pendulum.now("UTC") + .subtract( + days=1 + if pendulum.now("UTC").day != 1 or pendulum.now("UTC").month not in {1, 4, 7, 10} + else 0 + ) + .end_of("day") + .strftime(date_format), + "year to date": pendulum.now("UTC") + .subtract( + days=1 if pendulum.now("UTC").date() != pendulum.now("UTC").start_of("year").date() else 0 + ) + .end_of("day") + .strftime(date_format), + "last week to date": pendulum.now("UTC") + .subtract(weeks=1) + .start_of("week") + .add( + days=(pendulum.now("UTC") - pendulum.now("UTC").start_of("week")).days - 1 + if pendulum.now("UTC").day_of_week != 0 + else 0 + ) + .end_of("day") + .strftime(date_format), + "52 weeks ago to date": pendulum.now("UTC") + .subtract(weeks=52) + .start_of("week") + .add( + days=(pendulum.now("UTC") - pendulum.now("UTC").start_of("week")).days - 1 + if pendulum.now("UTC").day_of_week != 0 + else 0 + ) + .end_of("day") + .strftime(date_format), + "12 months ago to date": pendulum.now("UTC") + .subtract(months=12) + .start_of("month") + .add( + days=(pendulum.now("UTC") - pendulum.now("UTC").start_of("month")).days - 1 + if pendulum.now("UTC").day != 1 + else 0 + ) + .end_of("day") + .strftime(date_format), + "1 year ago to date": pendulum.now("UTC") + .subtract(years=1) + .start_of("year") + .add( + days=(pendulum.now("UTC") - pendulum.now("UTC").start_of("year")).days - 1 + if pendulum.now("UTC").date() != pendulum.now("UTC").start_of("year").date() + else 0 + ) + .end_of("day") + .strftime(date_format), + "1 year ago for 3 months": pendulum.now("UTC") + .subtract(years=1) + .start_of("year") + .add(months=2) + .end_of("month") + .strftime(date_format), + "1 year ago for 30 days": pendulum.now("UTC") + .subtract(years=1) + .start_of("year") + .add(days=29) + .end_of("day") + .strftime(date_format), + "2 years ago": pendulum.now("UTC").subtract(years=2).end_of("year").strftime(date_format), + "3 months": pendulum.now("UTC").end_of("month").strftime(date_format), + "1 week": pendulum.now("UTC").end_of("week").strftime(date_format), + "2 days": pendulum.now("UTC").end_of("day").strftime(date_format), + "1 quarter": pendulum.now("UTC").last_of("quarter").strftime(date_format), + } + if raw_filter_dict["value"] == "-Male, Female": + with pytest.raises(QueryError) as exc_info: + dash.parsed_filters() + assert exc_info.value + else: + parsed_filters = dash.parsed_filters() + assert len(parsed_filters) in {1, 2} + assert parsed_filters[0]["field"] == raw_filter_dict["field"] + assert parsed_filters[0]["expression"].value == expression_lookup[raw_filter_dict["value"]] + assert parsed_filters[0]["value"] == value_lookup[raw_filter_dict["value"]] + if raw_filter_dict["value"] in second_value_lookup or len(parsed_filters) == 2: + assert parsed_filters[1]["field"] == raw_filter_dict["field"] + assert parsed_filters[1]["expression"].value == "less_or_equal_than" + assert parsed_filters[1]["value"] == second_value_lookup[raw_filter_dict["value"]] @pytest.mark.parametrize( From e39ee5cf78b2244fbebfd1bb55524cc0c9fb3d37 Mon Sep 17 00:00:00 2001 From: Paul Blankley <31221512+pblankley@users.noreply.github.com> Date: Wed, 14 Aug 2024 15:32:38 -0600 Subject: [PATCH 27/53] fix week of year offset handling for diff week start day (#227) * fix week of year offset handling for diff week start day * bump version --- .github/workflows/tests.yaml | 2 +- metrics_layer/core/model/field.py | 80 ++++++++++++++++++++++--------- pyproject.toml | 2 +- pytest.ini | 3 +- tests/conftest.py | 18 +++++++ tests/test_dashboards.py | 8 ++-- tests/test_field_mappings.py | 2 +- tests/test_simple_query.py | 42 +++++++--------- 8 files changed, 99 insertions(+), 58 deletions(-) diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index e460163..3441af9 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -32,7 +32,7 @@ jobs: - name: Test with pytest run: | - pytest -m extra_dt --cov=metrics_layer/ --cov-report=xml + pytest --runslow --cov=metrics_layer/ --cov-report=xml - name: Report on code coverage uses: codecov/codecov-action@v4 diff --git a/metrics_layer/core/model/field.py b/metrics_layer/core/model/field.py index 21e1866..902d2da 100644 --- a/metrics_layer/core/model/field.py +++ b/metrics_layer/core/model/field.py @@ -1020,11 +1020,10 @@ def apply_dimension_group_time_sql(self, sql: str, query_type: str): ), "fiscal_year": lambda s, qt: f"DATE_TRUNC('YEAR', {self._fiscal_offset_to_timestamp(s, qt)})", "week_index": lambda s, qt: ( - f"EXTRACT(WEEK FROM {self._week_dimension_group_time_sql(s, qt)})" + f"EXTRACT(WEEK FROM {self._apply_week_start_day_offset_only(s, qt)})" ), "week_of_month": lambda s, qt: ( - f"EXTRACT(WEEK FROM {self._week_dimension_group_time_sql(s,qt)}) - EXTRACT(WEEK FROM" - f" DATE_TRUNC('MONTH', {self._week_dimension_group_time_sql(s,qt)})) + 1" + f"EXTRACT(WEEK FROM {s}) - EXTRACT(WEEK FROM DATE_TRUNC('MONTH', {s})) + 1" ), "month_of_year_index": lambda s, qt: f"EXTRACT(MONTH FROM {s})", "fiscal_month_of_year_index": lambda s, qt: ( @@ -1061,12 +1060,11 @@ def apply_dimension_group_time_sql(self, sql: str, query_type: str): f"DATE_TRUNC('YEAR', CAST({self._fiscal_offset_to_timestamp(s, qt)} AS TIMESTAMP))" ), "week_index": lambda s, qt: ( - f"EXTRACT(WEEK FROM CAST({self._week_dimension_group_time_sql(s,qt)} AS TIMESTAMP))" + f"EXTRACT(WEEK FROM CAST({self._apply_week_start_day_offset_only(s, qt)} AS TIMESTAMP))" ), "week_of_month": lambda s, qt: ( - f"EXTRACT(WEEK FROM CAST({self._week_dimension_group_time_sql(s,qt)} AS TIMESTAMP)) -" - " EXTRACT(WEEK FROM DATE_TRUNC('MONTH'," - f" CAST({self._week_dimension_group_time_sql(s,qt)} AS TIMESTAMP))) + 1" + f"EXTRACT(WEEK FROM CAST({s} AS TIMESTAMP)) - EXTRACT(WEEK FROM DATE_TRUNC('MONTH'," + f" CAST({s} AS TIMESTAMP))) + 1" ), "month_of_year_index": lambda s, qt: f"EXTRACT(MONTH FROM CAST({s} AS TIMESTAMP))", "fiscal_month_of_year_index": lambda s, qt: ( @@ -1103,12 +1101,11 @@ def apply_dimension_group_time_sql(self, sql: str, query_type: str): f"DATE_TRUNC('YEAR', CAST({self._fiscal_offset_to_timestamp(s, qt)} AS TIMESTAMP))" ), "week_index": lambda s, qt: ( - f"EXTRACT(WEEK FROM CAST({self._week_dimension_group_time_sql(s,qt)} AS TIMESTAMP))" + f"EXTRACT(WEEK FROM CAST({self._apply_week_start_day_offset_only(s, qt)} AS TIMESTAMP))" ), - "week_of_month": lambda s, qt: ( # noqa - f"EXTRACT(WEEK FROM CAST({self._week_dimension_group_time_sql(s,qt)} AS TIMESTAMP)) -" - " EXTRACT(WEEK FROM DATE_TRUNC('MONTH'," - f" CAST({self._week_dimension_group_time_sql(s,qt)} AS TIMESTAMP))) + 1" + "week_of_month": lambda s, qt: ( + f"EXTRACT(WEEK FROM CAST({s} AS TIMESTAMP)) - EXTRACT(WEEK FROM DATE_TRUNC('MONTH'," + f" CAST({s} AS TIMESTAMP))) + 1" ), "month_of_year_index": lambda s, qt: f"EXTRACT(MONTH FROM CAST({s} AS TIMESTAMP))", "fiscal_month_of_year_index": lambda s, qt: ( @@ -1145,12 +1142,11 @@ def apply_dimension_group_time_sql(self, sql: str, query_type: str): f"DATE_TRUNC('YEAR', CAST({self._fiscal_offset_to_timestamp(s, qt)} AS TIMESTAMP))" ), "week_index": lambda s, qt: ( - f"EXTRACT(WEEK FROM CAST({self._week_dimension_group_time_sql(s,qt)} AS TIMESTAMP))" + f"EXTRACT(WEEK FROM CAST({self._apply_week_start_day_offset_only(s,qt)} AS TIMESTAMP))" ), "week_of_month": lambda s, qt: ( - f"EXTRACT(WEEK FROM CAST({self._week_dimension_group_time_sql(s,qt)} AS TIMESTAMP)) -" - " EXTRACT(WEEK FROM DATE_TRUNC('MONTH'," - f" CAST({self._week_dimension_group_time_sql(s,qt)} AS TIMESTAMP))) + 1" + f"EXTRACT(WEEK FROM CAST({s} AS TIMESTAMP)) - EXTRACT(WEEK FROM DATE_TRUNC('MONTH'," + f" CAST({s} AS TIMESTAMP))) + 1" ), "month_of_year_index": lambda s, qt: f"EXTRACT(MONTH FROM CAST({s} AS TIMESTAMP))", "fiscal_month_of_year_index": lambda s, qt: ( @@ -1199,12 +1195,11 @@ def apply_dimension_group_time_sql(self, sql: str, query_type: str): " DATE)), 0)" ), "week_index": lambda s, qt: ( - f"EXTRACT(WEEK FROM CAST({self._week_dimension_group_time_sql(s,qt)} AS DATE))" + f"EXTRACT(WEEK FROM CAST({self._apply_week_start_day_offset_only(s,qt)} AS DATE))" ), "week_of_month": lambda s, qt: ( - f"EXTRACT(WEEK FROM CAST({self._week_dimension_group_time_sql(s,qt)} AS DATE)) -" - " EXTRACT(WEEK FROM DATEADD(MONTH, DATEDIFF(MONTH, 0," - f" CAST({self._week_dimension_group_time_sql(s,qt)} AS DATE)), 0)) + 1" + f"EXTRACT(WEEK FROM CAST({s} AS DATE)) - EXTRACT(WEEK FROM DATEADD(MONTH, DATEDIFF(MONTH," + f" 0, CAST({s} AS DATE)), 0)) + 1" ), "month_of_year_index": lambda s, qt: f"EXTRACT(MONTH FROM CAST({s} AS DATE))", "fiscal_month_of_year_index": lambda s, qt: ( @@ -1255,10 +1250,11 @@ def apply_dimension_group_time_sql(self, sql: str, query_type: str): f"CAST(DATE_TRUNC(CAST({self._fiscal_offset_to_timestamp(s, qt)} AS DATE), YEAR) AS" f" {self.datatype.upper()})" ), - "week_index": lambda s, qt: f"EXTRACT(WEEK FROM {self._week_dimension_group_time_sql(s,qt)})", + "week_index": lambda s, qt: ( + f"EXTRACT(WEEK FROM {self._apply_week_start_day_offset_only(s,qt)})" + ), "week_of_month": lambda s, qt: ( - f"EXTRACT(WEEK FROM {self._week_dimension_group_time_sql(s,qt)}) - EXTRACT(WEEK FROM" - f" DATE_TRUNC(CAST({self._week_dimension_group_time_sql(s,qt)} AS DATE), MONTH)) + 1" + f"EXTRACT(WEEK FROM {s}) - EXTRACT(WEEK FROM DATE_TRUNC(CAST({s} AS DATE), MONTH)) + 1" ), "month_of_year_index": lambda s, qt: f"EXTRACT(MONTH FROM {s})", "fiscal_month_of_year_index": lambda s, qt: ( @@ -1379,6 +1375,44 @@ def _week_sql_date_trunc(sql, offset, query_type): else: raise QueryError(f"Unable to find a valid method for running week with query type {query_type}") + def _apply_week_start_day_offset_only(self, sql: str, query_type: str): + # Monday is the default date for warehouses + week_start_day = self.view.week_start_day + offset_lookup = { + "sunday": 1, + "saturday": 2, + "friday": 3, + "thursday": 4, + "wednesday": 5, + "tuesday": 6, + } + # monday will result in None here which means no offsets will be applied + offset = offset_lookup.get(week_start_day, None) + casted = f"CAST({sql} AS DATE)" + if query_type in {Definitions.snowflake, Definitions.redshift}: + if offset is None: + return f"DATE_TRUNC('DAY', {casted})" + return f"DATE_TRUNC('DAY', {casted} + {offset})" + elif query_type in { + Definitions.postgres, + Definitions.druid, + Definitions.duck_db, + Definitions.databricks, + }: + if offset is None: + return f"DATE_TRUNC('DAY', CAST({sql} AS TIMESTAMP))" + return f"DATE_TRUNC('DAY', CAST({sql} AS TIMESTAMP) + INTERVAL '{offset}' DAY)" + elif query_type == Definitions.bigquery: + if offset is None: + return f"DATE_TRUNC({casted}, DAY)" + return f"DATE_TRUNC({casted} + {offset}, DAY)" + elif query_type in {Definitions.sql_server, Definitions.azure_synapse}: + if offset is None: + return f"CAST({casted} AS DATETIME)" + return f"CAST(DATEADD(DAY, {offset}, {casted}) AS DATETIME)" # noqa + else: + raise QueryError(f"Unable to find a valid method for running week with query type {query_type}") + def _error(self, element, error, extra: dict = {}): line, column = self.line_col(element) return { diff --git a/pyproject.toml b/pyproject.toml index 36a5b50..f84ddb0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "metrics_layer" -version = "0.12.32" +version = "0.12.33" description = "The open source metrics layer." authors = ["Paul Blankley "] keywords = ["Metrics Layer", "Business Intelligence", "Analytics"] diff --git a/pytest.ini b/pytest.ini index 305d46d..3afc252 100644 --- a/pytest.ini +++ b/pytest.ini @@ -1,5 +1,4 @@ [pytest] -addopts = -m "not extra_dt" testpaths = tests markers = cli @@ -9,5 +8,5 @@ markers = dbt seeding validation + filters primary_dt: mark this as a primary datetime test (included by default). It depends on a certain datetime to run. The primary tests test the bare minimum number of datetimes, usually just the current datetime. - extra_dt: mark this as an extra datetime test (excluded by default). It depends on a certain datetime to run. The extra tests are of secondary importance, adding runtime to the test suite but improving robustness. To include these, add `-m extra_dt`. diff --git a/tests/conftest.py b/tests/conftest.py index d3764ee..b53c13b 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -73,6 +73,24 @@ dashboard_paths = [sales_dashboard_path, sales_dashboard_v2_path] +def pytest_addoption(parser): + parser.addoption("--runslow", action="store_true", default=False, help="run slow tests") + + +def pytest_configure(config): + config.addinivalue_line("markers", "slow: mark test as slow to run") + + +def pytest_collection_modifyitems(config, items): + if config.getoption("--runslow"): + # --runslow given in cli: do not skip slow tests + return + skip_slow = pytest.mark.skip(reason="need --runslow option to run") + for item in items: + if "slow" in item.keywords: + item.add_marker(skip_slow) + + @pytest.fixture(scope="function") def seed_snowflake_tables_data(): order_records = [ diff --git a/tests/test_dashboards.py b/tests/test_dashboards.py index 15e445c..8b93094 100644 --- a/tests/test_dashboards.py +++ b/tests/test_dashboards.py @@ -11,7 +11,7 @@ def _generate_dt_params(): yield pytest.param(_NOW, marks=pytest.mark.primary_dt) for dt in _THIS_YEAR.range("days"): - yield pytest.param(dt, marks=pytest.mark.extra_dt) + yield pytest.param(dt, marks=pytest.mark.slow) def test_dashboard_located(connection): @@ -67,7 +67,7 @@ def test_dashboard_to_dict(connection): assert first_element["slice_by"] == ["orders.new_vs_repeat", "order_lines.product_name"] -@pytest.mark.query +@pytest.mark.filters @pytest.mark.parametrize("dt", _generate_dt_params()) def test_dashboard_filter_week_start(fresh_project, dt): with pendulum.travel_to(dt): @@ -104,7 +104,7 @@ def test_dashboard_filter_week_start(fresh_project, dt): assert parsed_filters[1]["value"] == correct[1]["value"] -@pytest.mark.query +@pytest.mark.filters @pytest.mark.parametrize("dt", _generate_dt_params()) def test_dashboard_filter_timezone(fresh_project, dt): with pendulum.travel_to(dt): @@ -140,7 +140,7 @@ def test_dashboard_filter_timezone(fresh_project, dt): assert parsed_filters[1]["value"] != wrong_end -@pytest.mark.query +@pytest.mark.filters @pytest.mark.parametrize( "raw_filter_dict", [ diff --git a/tests/test_field_mappings.py b/tests/test_field_mappings.py index 82d8a30..c9ecc63 100644 --- a/tests/test_field_mappings.py +++ b/tests/test_field_mappings.py @@ -55,7 +55,7 @@ def test_mapping_metric_mapped_date_and_filter(connection, time_grain): elif time_grain == "week": date_part = "DATE_TRUNC('WEEK', CAST(orders.order_date AS DATE))" elif time_grain == "week_of_year": - date_part = "EXTRACT(WEEK FROM DATE_TRUNC('WEEK', CAST(orders.order_date AS DATE)))" + date_part = "EXTRACT(WEEK FROM DATE_TRUNC('DAY', CAST(orders.order_date AS DATE)))" elif time_grain == "month": date_part = "DATE_TRUNC('MONTH', orders.order_date)" elif time_grain == "month_of_year": diff --git a/tests/test_simple_query.py b/tests/test_simple_query.py index c0b9ed9..2e9fcd9 100644 --- a/tests/test_simple_query.py +++ b/tests/test_simple_query.py @@ -831,12 +831,11 @@ def test_simple_query_dimension_group(connections, group: str, query_type: str): "fiscal_month_of_year_index": f"EXTRACT(MONTH FROM DATEADD(MONTH, 1, simple.order_date))", "fiscal_month_index": f"EXTRACT(MONTH FROM DATEADD(MONTH, 1, simple.order_date))", "fiscal_quarter_of_year": "EXTRACT(QUARTER FROM DATEADD(MONTH, 1, simple.order_date))", - "week_index": f"EXTRACT(WEEK FROM DATE_TRUNC('WEEK', CAST(simple.order_date AS DATE) + 1) - 1)", - "week_of_year": f"EXTRACT(WEEK FROM DATE_TRUNC('WEEK', CAST(simple.order_date AS DATE) + 1) - 1)", + "week_index": f"EXTRACT(WEEK FROM DATE_TRUNC('DAY', CAST(simple.order_date AS DATE) + 1))", + "week_of_year": f"EXTRACT(WEEK FROM DATE_TRUNC('DAY', CAST(simple.order_date AS DATE) + 1))", "week_of_month": ( - f"EXTRACT(WEEK FROM DATE_TRUNC('WEEK', CAST(simple.order_date AS DATE) + 1) - 1) -" - f" EXTRACT(WEEK FROM DATE_TRUNC('MONTH', DATE_TRUNC('WEEK', CAST(simple.order_date AS DATE) +" - f" 1) - 1)) + 1" + f"EXTRACT(WEEK FROM simple.order_date) -" + f" EXTRACT(WEEK FROM DATE_TRUNC('MONTH', simple.order_date)) + 1" ), "month_of_year_index": f"EXTRACT(MONTH FROM simple.order_date)", "month_index": f"EXTRACT(MONTH FROM simple.order_date)", @@ -883,14 +882,12 @@ def test_simple_query_dimension_group(connections, group: str, query_type: str): "fiscal_month_index": "EXTRACT(MONTH FROM CAST(DATEADD(MONTH, 1, simple.order_date) AS DATE))", "fiscal_quarter_of_year": "DATEPART(QUARTER, CAST(DATEADD(MONTH, 1, simple.order_date) AS DATE))", "week_index": ( - f"EXTRACT(WEEK FROM CAST(DATEADD(DAY, -1, DATEADD(WEEK, DATEDIFF(WEEK, 0, DATEADD(DAY, 1," - f" CAST(simple.order_date AS DATE))), 0)) AS DATE))" + f"EXTRACT(WEEK FROM CAST(CAST(DATEADD(DAY, 1, CAST(simple.order_date AS DATE)) AS DATETIME)" + f" AS DATE))" ), "week_of_month": ( - f"EXTRACT(WEEK FROM CAST(DATEADD(DAY, -1, DATEADD(WEEK, DATEDIFF(WEEK, 0, DATEADD(DAY, 1," - f" CAST(simple.order_date AS DATE))), 0)) AS DATE)) - EXTRACT(WEEK FROM DATEADD(MONTH," - f" DATEDIFF(MONTH, 0, CAST(DATEADD(DAY, -1, DATEADD(WEEK, DATEDIFF(WEEK, 0, DATEADD(DAY, 1," - f" CAST(simple.order_date AS DATE))), 0)) AS DATE)), 0)) + 1" + f"EXTRACT(WEEK FROM CAST(simple.order_date AS DATE)) - EXTRACT(WEEK FROM DATEADD(MONTH," + f" DATEDIFF(MONTH, 0, CAST(simple.order_date AS DATE)), 0)) + 1" ), "month_of_year_index": f"EXTRACT(MONTH FROM CAST(simple.order_date AS DATE))", "month_of_year": "LEFT(DATENAME(MONTH, CAST(simple.order_date AS DATE)), 3)", @@ -931,14 +928,12 @@ def test_simple_query_dimension_group(connections, group: str, query_type: str): "EXTRACT(QUARTER FROM CAST(simple.order_date + INTERVAL '1' MONTH AS TIMESTAMP))" ), "week_index": ( - f"EXTRACT(WEEK FROM CAST(DATE_TRUNC('WEEK', CAST(simple.order_date AS TIMESTAMP) + INTERVAL" - f" '1' DAY) - INTERVAL '1' DAY AS TIMESTAMP))" + f"EXTRACT(WEEK FROM CAST(DATE_TRUNC('DAY', CAST(simple.order_date AS TIMESTAMP) + INTERVAL" + f" '1' DAY) AS TIMESTAMP))" ), "week_of_month": ( # noqa - f"EXTRACT(WEEK FROM CAST(DATE_TRUNC('WEEK', CAST(simple.order_date AS TIMESTAMP) + INTERVAL" - f" '1' DAY) - INTERVAL '1' DAY AS TIMESTAMP)) - EXTRACT(WEEK FROM DATE_TRUNC('MONTH'," - f" CAST(DATE_TRUNC('WEEK', CAST(simple.order_date AS TIMESTAMP) + INTERVAL '1' DAY) -" - f" INTERVAL '1' DAY AS TIMESTAMP))) + 1" + f"EXTRACT(WEEK FROM CAST(simple.order_date AS TIMESTAMP)) - EXTRACT(WEEK FROM" + f" DATE_TRUNC('MONTH', CAST(simple.order_date AS TIMESTAMP))) + 1" ), "month_of_year_index": f"EXTRACT(MONTH FROM CAST(simple.order_date AS TIMESTAMP))", "month_of_year": "TO_CHAR(CAST(simple.order_date AS TIMESTAMP), 'Mon')", @@ -1019,15 +1014,10 @@ def test_simple_query_dimension_group(connections, group: str, query_type: str): ), "fiscal_month_index": f"EXTRACT(MONTH FROM DATE_ADD(simple.order_date, INTERVAL 1 MONTH))", "fiscal_quarter_of_year": "EXTRACT(QUARTER FROM DATE_ADD(simple.order_date, INTERVAL 1 MONTH))", - "week_index": ( - f"EXTRACT(WEEK FROM CAST(DATE_TRUNC(CAST(simple.order_date AS DATE) + 1, WEEK) - 1 AS" - f" TIMESTAMP))" - ), - "week_of_month": ( # noqa - f"EXTRACT(WEEK FROM CAST(DATE_TRUNC(CAST(simple.order_date AS DATE) + 1, WEEK) - 1 AS" - f" TIMESTAMP)) - EXTRACT(WEEK FROM" - f" DATE_TRUNC(CAST(CAST(DATE_TRUNC(CAST(simple.order_date AS DATE) + 1, WEEK) - 1 AS" - f" TIMESTAMP) AS DATE), MONTH)) + 1" + "week_index": f"EXTRACT(WEEK FROM DATE_TRUNC(CAST(simple.order_date AS DATE) + 1, DAY))", + "week_of_month": ( + f"EXTRACT(WEEK FROM simple.order_date) - EXTRACT(WEEK FROM DATE_TRUNC(CAST(simple.order_date" + f" AS DATE), MONTH)) + 1" ), "month_of_year_index": f"EXTRACT(MONTH FROM simple.order_date)", "month_of_year": "FORMAT_DATETIME('%B', CAST(simple.order_date as DATETIME))", From 025dc35300fe3e344675c91f54fa83058c005c61 Mon Sep 17 00:00:00 2001 From: Paul Blankley Date: Wed, 14 Aug 2024 15:32:51 -0600 Subject: [PATCH 28/53] Release v0.12.33 From a414f10bf0d5a6530c4e37006bf4aecff2d99c8a Mon Sep 17 00:00:00 2001 From: Paul Blankley <31221512+pblankley@users.noreply.github.com> Date: Wed, 14 Aug 2024 23:16:11 -0600 Subject: [PATCH 29/53] Feature/support trino sql flavor (#223) * progress on trino * add trino flavor support and tests * add quotes to redshift seeding * fix tests * fix tests (for real) * add casting for input timestamps in trino and more tests * bump version * fix semi colon issues in seeding for trino --- metrics_layer/cli/seeding.py | 43 +- metrics_layer/core/model/definitions.py | 4 + metrics_layer/core/model/field.py | 62 ++- metrics_layer/core/parse/connections.py | 50 +- .../core/sql/arbitrary_merge_resolve.py | 7 +- .../core/sql/merged_query_resolve.py | 2 +- metrics_layer/core/sql/query_dialect.py | 4 +- metrics_layer/core/sql/query_filter.py | 13 +- .../core/sql/single_query_resolve.py | 4 +- pyproject.toml | 2 +- tests/conftest.py | 41 ++ tests/test_arbitrary_merged_results.py | 2 +- tests/test_cli.py | 26 + tests/test_cumulative_query.py | 6 +- tests/test_dashboards.py | 483 +++++++++--------- tests/test_join_query.py | 5 +- tests/test_merged_results.py | 7 +- tests/test_simple_query.py | 160 ++++-- 18 files changed, 607 insertions(+), 314 deletions(-) diff --git a/metrics_layer/cli/seeding.py b/metrics_layer/cli/seeding.py index 4b30202..381f011 100644 --- a/metrics_layer/cli/seeding.py +++ b/metrics_layer/cli/seeding.py @@ -158,6 +158,26 @@ def __init__( "NUMERIC": "number", "STRING": "string", } + self._trino_type_lookup = { + "boolean": "yesno", + "tinyint": "number", + "smallint": "number", + "integer": "number", + "int": "number", + "bigint": "number", + "real": "number", + "double": "number", + "decimal": "number", + "varchar": "string", + "char": "string", + "varbinary": "string", + "json": "string", + "date": "date", + "timestamp": "timestamp", + "timestamp(p)": "timestamp", + "timestamp with time zone": "timestamp", + "timestamp(p) with time zone": "timestamp", + } def seed(self, auto_tag_searchable_fields: bool = False): from metrics_layer.core.parse import ProjectDumper, ProjectLoader @@ -287,7 +307,7 @@ def make_view( sql_table_name = f"{schema_name}.{table_name}" if self._database_is_not_default: sql_table_name = f"{self.database}.{sql_table_name}" - elif self.connection.type == Definitions.druid: + elif self.connection.type in {Definitions.druid, Definitions.trino}: sql_table_name = f"{schema_name}.{table_name}" elif self.connection.type == Definitions.bigquery: sql_table_name = f"`{self.database}.{schema_name}.{table_name}`" @@ -337,14 +357,18 @@ def make_fields(self, column_data, schema_name: str, table_name: str, auto_tag_s metrics_layer_type = self._sql_server_type_lookup.get(row["DATA_TYPE"], "string") elif self.connection.type == Definitions.databricks: metrics_layer_type = self._databricks_type_lookup.get(row["DATA_TYPE"], "string") + elif self.connection.type == Definitions.trino: + metrics_layer_type = self._trino_type_lookup.get(row["DATA_TYPE"], "string") else: raise NotImplementedError(f"Unknown connection type: {self.connection.type}") # Add quotes for certain db only because we've seen issues with column names with special chars if self.connection.type in { Definitions.druid, + Definitions.trino, Definitions.snowflake, Definitions.duck_db, Definitions.postgres, + Definitions.redshift, }: column_name = '"' + row["COLUMN_NAME"] + '"' else: @@ -419,6 +443,9 @@ def column_cardinalities_query( if self.connection.type in (Definitions.snowflake, Definitions.duck_db, Definitions.druid): quote_column_name = f'"{column_name}"' if quote else column_name query = f'APPROX_COUNT_DISTINCT( {quote_column_name} ) as "{column_name_alias}_cardinality"' # noqa: E501 + elif self.connection.type in {Definitions.trino}: + quote_column_name = f'"{column_name}"' if quote else column_name + query = f'APPROX_DISTINCT( {quote_column_name} ) as "{column_name_alias}_cardinality"' # noqa: E501 elif self.connection.type in {Definitions.redshift, Definitions.postgres}: quote_column_name = f'"{column_name}"' if quote else column_name query = ( @@ -455,12 +482,12 @@ def column_cardinalities_query( Definitions.databricks, }: query += f" FROM {self.database}.{schema_name}.{table_name}" - elif self.connection.type == Definitions.druid: + elif self.connection.type in {Definitions.druid, Definitions.trino}: query += f"FROM {schema_name}.{table_name}" elif self.connection.type == Definitions.bigquery: query += f" FROM `{self.database}`.`{schema_name}`.`{table_name}`" - return query + ";" if self.connection.type != Definitions.druid else query + return query + ";" if self.connection.type not in Definitions.no_semicolon_warehouses else query def columns_query(self): if self.connection.type in {Definitions.snowflake, Definitions.databricks}: @@ -483,7 +510,7 @@ def columns_query(self): query += f"INFORMATION_SCHEMA.COLUMNS" else: query += f"{self.database}.INFORMATION_SCHEMA.COLUMNS" - elif self.connection.type == Definitions.druid: + elif self.connection.type in {Definitions.druid, Definitions.trino}: query = ( "SELECT TABLE_CATALOG, TABLE_SCHEMA, TABLE_NAME, COLUMN_NAME, DATA_TYPE " "FROM INFORMATION_SCHEMA.COLUMNS" @@ -507,7 +534,7 @@ def columns_query(self): if self.connection.type == Definitions.snowflake: # 10k columns is a reasonable max for a single table return query + " LIMIT 10000;" - return query + ";" if self.connection.type != Definitions.druid else query + return query + ";" if self.connection.type not in Definitions.no_semicolon_warehouses else query def table_query(self): if self.database and self.connection.type == Definitions.snowflake: @@ -518,12 +545,12 @@ def table_query(self): "row_count as table_row_count, comment as comment " f"FROM {self.database}.INFORMATION_SCHEMA.TABLES" ) - elif self.connection.type in {Definitions.druid}: + elif self.connection.type in {Definitions.druid, Definitions.trino}: query = ( "SELECT TABLE_CATALOG as table_database, TABLE_SCHEMA as table_schema, " "TABLE_NAME as table_name, TABLE_TYPE as table_type " "FROM INFORMATION_SCHEMA.TABLES " - "WHERE TABLE_SCHEMA not in ('sys', 'INFORMATION_SCHEMA')" + "WHERE TABLE_SCHEMA not in ('sys', 'INFORMATION_SCHEMA', 'information_schema')" ) elif self.database and self.connection.type in { Definitions.redshift, @@ -558,7 +585,7 @@ def table_query(self): ) else: raise ValueError("You must specify at least a database for seeding") - return query + ";" if self.connection.type != Definitions.druid else query + return query + ";" if self.connection.type not in Definitions.no_semicolon_warehouses else query def run_query(self, query: str): if self.run_query_override: diff --git a/metrics_layer/core/model/definitions.py b/metrics_layer/core/model/definitions.py index bf8ed6a..460f4d0 100644 --- a/metrics_layer/core/model/definitions.py +++ b/metrics_layer/core/model/definitions.py @@ -8,6 +8,7 @@ class Definitions: duck_db = "DUCK_DB" databricks = "DATABRICKS" azure_synapse = "AZURE_SYNAPSE" + trino = "TRINO" supported_warehouses = [ snowflake, bigquery, @@ -18,8 +19,11 @@ class Definitions: duck_db, databricks, azure_synapse, + trino, ] symmetric_aggregates_supported_warehouses = [snowflake, redshift, bigquery, postgres, duck_db] + no_semicolon_warehouses = [druid, trino] + needs_datetime_cast = [bigquery, trino] supported_warehouses_text = ", ".join(supported_warehouses) does_not_exist = "__DOES_NOT_EXIST__" diff --git a/metrics_layer/core/model/field.py b/metrics_layer/core/model/field.py index 902d2da..a09fc90 100644 --- a/metrics_layer/core/model/field.py +++ b/metrics_layer/core/model/field.py @@ -779,6 +779,7 @@ def _median_aggregate_sql(self, sql: str, query_type: str, functional_pk: str, a Definitions.bigquery, Definitions.sql_server, Definitions.azure_synapse, + Definitions.trino, }: raise QueryError( f"Median is not supported in {query_type}. Please choose another " @@ -920,6 +921,16 @@ def dimension_group_duration_sql(sql_start: str, sql_end: str, query_type: str, "quarters": lambda start, end: f"DATEDIFF('QUARTER', {start}, {end})", "years": lambda start, end: f"DATEDIFF('YEAR', {start}, {end})", }, + Definitions.trino: { + "seconds": lambda start, end: f"DATE_DIFF('SECOND', {start}, {end})", + "minutes": lambda start, end: f"DATE_DIFF('MINUTE', {start}, {end})", + "hours": lambda start, end: f"DATE_DIFF('HOUR', {start}, {end})", + "days": lambda start, end: f"DATE_DIFF('DAY', {start}, {end})", + "weeks": lambda start, end: f"DATE_DIFF('WEEK', {start}, {end})", + "months": lambda start, end: f"DATE_DIFF('MONTH', {start}, {end})", + "quarters": lambda start, end: f"DATE_DIFF('QUARTER', {start}, {end})", + "years": lambda start, end: f"DATE_DIFF('YEAR', {start}, {end})", + }, Definitions.postgres: { "seconds": lambda start, end: ( # noqa f"{meta_lookup[Definitions.postgres]['minutes'](start, end)} * 60 + DATE_PART('SECOND'," @@ -1121,6 +1132,47 @@ def apply_dimension_group_time_sql(self, sql: str, query_type: str): "day_of_month": lambda s, qt: f"EXTRACT('DAY' FROM CAST({s} AS TIMESTAMP))", "day_of_year": lambda s, qt: f"EXTRACT('DOY' FROM CAST({s} AS TIMESTAMP))", }, + Definitions.trino: { + "raw": lambda s, qt: s, + "time": lambda s, qt: f"CAST({s} AS TIMESTAMP)", + "second": lambda s, qt: f"DATE_TRUNC('SECOND', CAST({s} AS TIMESTAMP))", + "minute": lambda s, qt: f"DATE_TRUNC('MINUTE', CAST({s} AS TIMESTAMP))", + "hour": lambda s, qt: f"DATE_TRUNC('HOUR', CAST({s} AS TIMESTAMP))", + "date": lambda s, qt: f"DATE_TRUNC('DAY', CAST({s} AS TIMESTAMP))", + "week": self._week_dimension_group_time_sql, + "month": lambda s, qt: f"DATE_TRUNC('MONTH', CAST({s} AS TIMESTAMP))", + "quarter": lambda s, qt: f"DATE_TRUNC('QUARTER', CAST({s} AS TIMESTAMP))", + "year": lambda s, qt: f"DATE_TRUNC('YEAR', CAST({s} AS TIMESTAMP))", + "fiscal_month": lambda s, qt: ( + f"DATE_TRUNC('MONTH', CAST({self._fiscal_offset_to_timestamp(s, qt)} AS TIMESTAMP))" + ), + "fiscal_quarter": lambda s, qt: ( + f"DATE_TRUNC('QUARTER', CAST({self._fiscal_offset_to_timestamp(s, qt)} AS TIMESTAMP))" + ), + "fiscal_year": lambda s, qt: ( + f"DATE_TRUNC('YEAR', CAST({self._fiscal_offset_to_timestamp(s, qt)} AS TIMESTAMP))" + ), + "week_index": lambda s, qt: ( + f"EXTRACT(WEEK FROM CAST({self._apply_week_start_day_offset_only(s,qt)} AS TIMESTAMP))" + ), + "week_of_month": lambda s, qt: ( + f"EXTRACT(WEEK FROM CAST({s} AS TIMESTAMP)) - EXTRACT(WEEK FROM DATE_TRUNC('MONTH'," + f" CAST({s} AS TIMESTAMP))) + 1" + ), + "month_of_year_index": lambda s, qt: f"EXTRACT(MONTH FROM CAST({s} AS TIMESTAMP))", + "fiscal_month_of_year_index": lambda s, qt: ( + f"EXTRACT(MONTH FROM CAST({self._fiscal_offset_to_timestamp(s, qt)} AS TIMESTAMP))" + ), + "month_of_year": lambda s, qt: f"FORMAT_DATETIME(CAST({s} AS TIMESTAMP), 'MMM')", + "quarter_of_year": lambda s, qt: f"EXTRACT(QUARTER FROM CAST({s} AS TIMESTAMP))", + "fiscal_quarter_of_year": lambda s, qt: ( + f"EXTRACT(QUARTER FROM CAST({self._fiscal_offset_to_timestamp(s, qt)} AS TIMESTAMP))" + ), + "hour_of_day": lambda s, qt: f"EXTRACT(HOUR FROM CAST({s} AS TIMESTAMP))", + "day_of_week": lambda s, qt: f"FORMAT_DATETIME(CAST({s} AS TIMESTAMP), 'EEE')", + "day_of_month": lambda s, qt: f"EXTRACT(DAY FROM CAST({s} AS TIMESTAMP))", + "day_of_year": lambda s, qt: f"EXTRACT(DOY FROM CAST({s} AS TIMESTAMP))", + }, Definitions.druid: { "raw": lambda s, qt: s, "time": lambda s, qt: f"CAST({s} AS TIMESTAMP)", @@ -1297,8 +1349,8 @@ def _apply_timezone_to_sql(self, sql: str, timezone: str, query_type: str): return f"CAST(DATETIME(CAST({sql} AS TIMESTAMP), '{timezone}') AS {self.datatype.upper()})" elif query_type == Definitions.redshift: return f"CAST(CAST(CONVERT_TIMEZONE('{timezone}', {sql}) AS TIMESTAMP) AS {self.datatype.upper()})" # noqa - elif query_type in {Definitions.postgres, Definitions.duck_db}: - return f"CAST(CAST({sql} AS TIMESTAMP) at time zone 'utc' at time zone '{timezone}' AS {self.datatype.upper()})" # noqa + elif query_type in {Definitions.postgres, Definitions.duck_db, Definitions.trino}: + return f"CAST(CAST({sql} AS TIMESTAMP) at time zone 'UTC' at time zone '{timezone}' AS {self.datatype.upper()})" # noqa elif query_type in {Definitions.druid, Definitions.sql_server, Definitions.azure_synapse}: print( f"Warning: {query_type.title()} does not support timezone conversion. " @@ -1320,7 +1372,7 @@ def _fiscal_offset_to_timestamp(self, sql: str, query_type: str): Definitions.azure_synapse, }: return f"DATEADD(MONTH, {offset_in_months}, {sql})" - elif query_type in {Definitions.postgres, Definitions.duck_db, Definitions.druid}: + elif query_type in {Definitions.postgres, Definitions.duck_db, Definitions.druid, Definitions.trino}: return f"{sql} + INTERVAL '{offset_in_months}' MONTH" elif query_type == Definitions.bigquery: return f"DATE_ADD({sql}, INTERVAL {offset_in_months} MONTH)" @@ -1360,6 +1412,7 @@ def _week_sql_date_trunc(sql, offset, query_type): Definitions.druid, Definitions.duck_db, Definitions.databricks, + Definitions.trino, }: if offset is None: return f"DATE_TRUNC('WEEK', CAST({sql} AS TIMESTAMP))" @@ -1398,6 +1451,7 @@ def _apply_week_start_day_offset_only(self, sql: str, query_type: str): Definitions.druid, Definitions.duck_db, Definitions.databricks, + Definitions.trino, }: if offset is None: return f"DATE_TRUNC('DAY', CAST({sql} AS TIMESTAMP))" @@ -1411,7 +1465,7 @@ def _apply_week_start_day_offset_only(self, sql: str, query_type: str): return f"CAST({casted} AS DATETIME)" return f"CAST(DATEADD(DAY, {offset}, {casted}) AS DATETIME)" # noqa else: - raise QueryError(f"Unable to find a valid method for running week with query type {query_type}") + raise QueryError(f"Unable to find a valid method for running offset with query type {query_type}") def _error(self, element, error, extra: dict = {}): line, column = self.line_col(element) diff --git a/metrics_layer/core/parse/connections.py b/metrics_layer/core/parse/connections.py index 4b26e3f..086f55c 100644 --- a/metrics_layer/core/parse/connections.py +++ b/metrics_layer/core/parse/connections.py @@ -1,9 +1,10 @@ import json import os -from argparse import ArgumentError from copy import deepcopy +from typing import Any from metrics_layer.core.model.definitions import Definitions +from metrics_layer.core.sql.query_errors import ArgumentError class MetricsLayerConnectionError(Exception): @@ -20,6 +21,7 @@ class ConnectionType: duck_db = Definitions.duck_db databricks = Definitions.databricks azure_synapse = Definitions.azure_synapse + trino = Definitions.trino class BaseConnection: @@ -232,6 +234,51 @@ def __init__( self.schema = schema +class TrinoConnection(BaseConnection): + def __init__( + self, + name: str, + host: str, + user: str, + database: str, + schema: str = None, + scheme: str = "http", + auth: Any = None, + port: int = 8080, + **kwargs, + ) -> None: + self.type = ConnectionType.trino + self.name = name + self.host = host + self.user = user + self.port = port + self.auth = auth + self.scheme = scheme + self.database = database + self.schema = schema + + def to_dict(self): + base = { + "name": self.name, + "host": self.host, + "port": self.port, + "scheme": self.scheme, + "type": self.type, + } + if self.user: + base["user"] = self.user + if self.auth: + base["auth"] = self.auth + return base + + def printable_attributes(self): + attributes = deepcopy(self.to_dict()) + attributes.pop("password") + attributes["name"] = self.name + sort_order = ["name", "type", "host", "port", "user", "database", "scheme"] + return {key: attributes.get(key) for key in sort_order if attributes.get(key) is not None} + + class DruidConnection(BaseConnection): def __init__( self, @@ -401,4 +448,5 @@ def _convert_json_if_needed(creds: dict, kwargs: dict): ConnectionType.azure_synapse: AzureSynapseConnection, ConnectionType.duck_db: DuckDBConnection, ConnectionType.databricks: DatabricksConnection, + ConnectionType.trino: TrinoConnection, } diff --git a/metrics_layer/core/sql/arbitrary_merge_resolve.py b/metrics_layer/core/sql/arbitrary_merge_resolve.py index 638e96a..afed927 100644 --- a/metrics_layer/core/sql/arbitrary_merge_resolve.py +++ b/metrics_layer/core/sql/arbitrary_merge_resolve.py @@ -95,7 +95,7 @@ def get_query(self, semicolon: bool = True): } ) # Druid does not allow semicolons - if resolver.query_type == Definitions.druid: + if resolver.query_type in Definitions.no_semicolon_warehouses: semicolon = False query = merged_queries_resolver.get_query(semicolon=semicolon) @@ -179,7 +179,10 @@ def validate_arbitrary_merged_queries(merged_queries: list): raise QueryError( f"merged_queries must be a list of dictionaries. Item {i} is not a dictionary." ) - if not merged_query.get("metrics") or not merged_query.get("dimensions"): + if not all( + key in merged_query and isinstance(merged_query[key], list) + for key in ["metrics", "dimensions"] + ): raise QueryError(f"Each item in merged_queries must have 'metrics' and 'dimensions' keys.") if merged_query.get("funnel"): raise QueryError( diff --git a/metrics_layer/core/sql/merged_query_resolve.py b/metrics_layer/core/sql/merged_query_resolve.py index 8cdb5f2..c724dbe 100644 --- a/metrics_layer/core/sql/merged_query_resolve.py +++ b/metrics_layer/core/sql/merged_query_resolve.py @@ -101,7 +101,7 @@ def get_query(self, semicolon: bool = True): "project": self.project, } # Druid does not allow semicolons - if self.query_type == Definitions.druid: + if self.query_type in Definitions.no_semicolon_warehouses: semicolon = False merged_result_query = MetricsLayerMergedResultsQuery(query_config) diff --git a/metrics_layer/core/sql/query_dialect.py b/metrics_layer/core/sql/query_dialect.py index f9a8290..bcb0697 100644 --- a/metrics_layer/core/sql/query_dialect.py +++ b/metrics_layer/core/sql/query_dialect.py @@ -194,7 +194,8 @@ def _builder(cls, **kwargs) -> MSSQLQueryBuilderCorrectLimit: Definitions.postgres: PostgresQueryWithOrderByNullsOption, Definitions.druid: PostgresQuery, # druid core query logic is postgres compatible, minus null sorting Definitions.duck_db: PostgresQueryWithOrderByNullsOption, # duck db core query logic = postgres - Definitions.databricks: PostgresQueryWithOrderByNullsOption, # duck db core query logic = postgres + Definitions.databricks: PostgresQueryWithOrderByNullsOption, # databricks core query logic = postgres + Definitions.trino: PostgresQueryWithOrderByNullsOption, # trino core query logic = postgres Definitions.sql_server: MSSSQLQuery, Definitions.azure_synapse: MSSSQLQuery, # Azure Synapse is a T-SQL flavor } @@ -207,6 +208,7 @@ def _builder(cls, **kwargs) -> MSSQLQueryBuilderCorrectLimit: Definitions.databricks: "coalesce", Definitions.druid: "nvl", Definitions.duck_db: "coalesce", + Definitions.trino: "coalesce", Definitions.sql_server: "isnull", Definitions.azure_synapse: "isnull", } diff --git a/metrics_layer/core/sql/query_filter.py b/metrics_layer/core/sql/query_filter.py index 99291e8..e95cc5b 100644 --- a/metrics_layer/core/sql/query_filter.py +++ b/metrics_layer/core/sql/query_filter.py @@ -18,9 +18,8 @@ from metrics_layer.core.sql.query_errors import ParseError -def bigquery_cast(field, value): - cast_func = field.datatype.upper() - return LiteralValue(f"{cast_func}('{value}')") +def datatype_cast(field, value): + return LiteralValue(f"CAST('{value}' AS {field.datatype.upper()})") class FunnelFilterTypes: @@ -113,10 +112,10 @@ def validate(self, definition: Dict) -> None: except Exception: pass - if self.design.query_type == Definitions.bigquery and isinstance( + if self.design.query_type in Definitions.needs_datetime_cast and isinstance( definition["value"], datetime.datetime ): - definition["value"] = bigquery_cast(self.field, definition["value"]) + definition["value"] = datatype_cast(self.field, definition["value"]) if self.field.type == "yesno" and "False" in str(definition["value"]): definition["expression"] = "boolean_false" @@ -169,8 +168,8 @@ def criterion(self, field_sql: str) -> Criterion: "timezone": self.timezone, } for f in Filter(filter_dict).filter_dict(): - if self.query_type == Definitions.bigquery: - value = bigquery_cast(self.field, f["value"]) + if self.query_type in Definitions.needs_datetime_cast: + value = datatype_cast(self.field, f["value"]) else: value = f["value"] criteria.append(Filter.sql_query(field_sql, f["expression"], value, self.field.type)) diff --git a/metrics_layer/core/sql/single_query_resolve.py b/metrics_layer/core/sql/single_query_resolve.py index af66abc..df38d41 100644 --- a/metrics_layer/core/sql/single_query_resolve.py +++ b/metrics_layer/core/sql/single_query_resolve.py @@ -82,8 +82,8 @@ def get_query(self, semicolon: bool = True): query_definition, design=self.design, suppress_warnings=self.suppress_warnings ) - # Druid does not allow semicolons - if self.query_type == Definitions.druid: + # If query type does not allow semicolons + if self.query_type in Definitions.no_semicolon_warehouses: semicolon = False query = query_generator.get_query(semicolon=semicolon) diff --git a/pyproject.toml b/pyproject.toml index f84ddb0..4f98325 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "metrics_layer" -version = "0.12.33" +version = "0.12.34" description = "The open source metrics layer." authors = ["Paul Blankley "] keywords = ["Metrics Layer", "Business Intelligence", "Analytics"] diff --git a/tests/conftest.py b/tests/conftest.py index b53c13b..b38a5f2 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -296,6 +296,47 @@ def seed_druid_tables_data(): return pd.DataFrame(records) +@pytest.fixture(scope="function") +def seed_trino_tables_data(): + order_records = [ + {"COLUMN_NAME": "ORDER_ID", "DATA_TYPE": "decimal"}, + {"COLUMN_NAME": "ORDER_CREATED_AT", "DATA_TYPE": "date"}, + {"COLUMN_NAME": "REVENUE", "DATA_TYPE": "double"}, + {"COLUMN_NAME": "ACQUISITION_DATE", "DATA_TYPE": "timestamp with time zone"}, + {"COLUMN_NAME": "ON_SOCIAL_NETWORK", "DATA_TYPE": "boolean"}, + {"COLUMN_NAME": "CAMPAIGN", "DATA_TYPE": "json"}, + {"COLUMN_NAME": "NEW_VS_REPEAT", "DATA_TYPE": "char"}, + {"COLUMN_NAME": "PRODUCT", "DATA_TYPE": "varchar"}, + {"COLUMN_NAME": "DAY_OF_WEEK", "DATA_TYPE": "CHAR"}, + {"COLUMN_NAME": "TWITTER", "DATA_TYPE": "varchar"}, + {"COLUMN_NAME": "EMAILS_FROM_US_IN_THE_LAST_WEEK", "DATA_TYPE": "varchar"}, + {"COLUMN_NAME": "LAST_VIEWED_PAGE", "DATA_TYPE": "varchar"}, + {"COLUMN_NAME": "CUSTOMER_ID", "DATA_TYPE": "varchar"}, + {"COLUMN_NAME": "TOP_CUSTOMERS", "DATA_TYPE": "varchar"}, + ] + order_records = [{"TABLE_NAME": "orders", **o} for o in order_records] + session_records = [ + {"COLUMN_NAME": "SESSION_ID", "DATA_TYPE": "varchar"}, + {"COLUMN_NAME": "SESSION_DATE", "DATA_TYPE": "date"}, + {"COLUMN_NAME": "ADD_TO_CART", "DATA_TYPE": "bigint"}, + {"COLUMN_NAME": "CONVERSION", "DATA_TYPE": "real"}, + {"COLUMN_NAME": "@CRoSSell P-roduct:", "DATA_TYPE": "varchar"}, + {"COLUMN_NAME": "ACQUISITION_CHANNEL", "DATA_TYPE": "varchar"}, + {"COLUMN_NAME": "SOCIAL_NETWORK", "DATA_TYPE": "varchar"}, + {"COLUMN_NAME": "CAMPAIGN", "DATA_TYPE": "varchar"}, + {"COLUMN_NAME": "NEW_VS_REPEAT", "DATA_TYPE": "varchar"}, + {"COLUMN_NAME": "PRODUCT", "DATA_TYPE": "varchar"}, + {"COLUMN_NAME": "DAY_OF_WEEK", "DATA_TYPE": "varchar"}, + {"COLUMN_NAME": "TWITTER", "DATA_TYPE": "varchar"}, + {"COLUMN_NAME": "EMAILS_FROM_US_IN_THE_LAST_WEEK", "DATA_TYPE": "varchar"}, + {"COLUMN_NAME": "LAST_VIEWED_PAGE", "DATA_TYPE": "varchar"}, + ] + session_records = [{"TABLE_NAME": "sessions", **o} for o in session_records] + all_records = order_records + session_records + records = [{"TABLE_CATALOG": "demo", "TABLE_SCHEMA": "analytics", **r} for r in all_records] + return pd.DataFrame(records) + + @pytest.fixture(scope="function") def seed_redshift_tables_data(): order_records = [ diff --git a/tests/test_arbitrary_merged_results.py b/tests/test_arbitrary_merged_results.py index 04fabfd..167f552 100644 --- a/tests/test_arbitrary_merged_results.py +++ b/tests/test_arbitrary_merged_results.py @@ -375,7 +375,7 @@ def test_query_merged_queries_dim_group(connection, query_type): product_group = "order_lines_product_name" lines_order_by = "" orders_order_by = "" - time = "DATE('2018-01-02 00:00:00')" + time = "CAST('2018-01-02 00:00:00' AS DATE)" condition = ( "CAST(merged_query_0.orders_order_date AS TIMESTAMP)=CAST(merged_query_1.order_lines_order_date" " AS TIMESTAMP)" diff --git a/tests/test_cli.py b/tests/test_cli.py index 5741b36..c06af27 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -50,6 +50,7 @@ def assert_called(data, path): (Definitions.postgres, "alternative_demo", "alternative_target", None), (Definitions.redshift, None, None, None), (Definitions.druid, None, None, None), + (Definitions.trino, None, None, None), (Definitions.sql_server, None, None, None), (Definitions.azure_synapse, None, None, None), ], @@ -67,6 +68,7 @@ def test_cli_seed_metrics_layer( seed_redshift_tables_data, seed_postgres_tables_data, seed_druid_tables_data, + seed_trino_tables_data, seed_sql_server_tables_data, seed_databricks_tables_data, ): @@ -99,6 +101,8 @@ def query_runner_mock(slf, query): return seed_bigquery_tables_data elif query_type == Definitions.druid: return seed_druid_tables_data + elif query_type == Definitions.trino: + return seed_trino_tables_data elif query_type in {Definitions.sql_server, Definitions.azure_synapse}: return seed_sql_server_tables_data elif query_type == Definitions.databricks: @@ -134,6 +138,7 @@ def yaml_dump_assert(slf, data, file): Definitions.azure_synapse, Definitions.sql_server, Definitions.databricks, + Definitions.trino, } and database_override is None ): @@ -169,6 +174,8 @@ def yaml_dump_assert(slf, data, file): Definitions.druid, Definitions.duck_db, Definitions.postgres, + Definitions.trino, + Definitions.redshift, }: assert social["sql"] == '${TABLE}."ON_SOCIAL_NETWORK"' else: @@ -186,6 +193,8 @@ def yaml_dump_assert(slf, data, file): Definitions.druid, Definitions.duck_db, Definitions.postgres, + Definitions.trino, + Definitions.redshift, }: assert acq_date["sql"] == '${TABLE}."ACQUISITION_DATE"' else: @@ -199,6 +208,7 @@ def yaml_dump_assert(slf, data, file): Definitions.druid, Definitions.sql_server, Definitions.azure_synapse, + Definitions.trino, }: assert date["datatype"] == "date" else: @@ -219,6 +229,8 @@ def yaml_dump_assert(slf, data, file): Definitions.druid, Definitions.duck_db, Definitions.postgres, + Definitions.trino, + Definitions.redshift, }: assert date["sql"] == '${TABLE}."ORDER_CREATED_AT"' else: @@ -230,6 +242,8 @@ def yaml_dump_assert(slf, data, file): Definitions.druid, Definitions.duck_db, Definitions.postgres, + Definitions.trino, + Definitions.redshift, }: assert new["sql"] == '${TABLE}."NEW_VS_REPEAT"' else: @@ -241,6 +255,8 @@ def yaml_dump_assert(slf, data, file): Definitions.druid, Definitions.duck_db, Definitions.postgres, + Definitions.trino, + Definitions.redshift, }: assert num["sql"] == '${TABLE}."REVENUE"' else: @@ -260,6 +276,7 @@ def yaml_dump_assert(slf, data, file): Definitions.sql_server, Definitions.azure_synapse, Definitions.databricks, + Definitions.trino, } and database_override is None ): @@ -288,6 +305,8 @@ def yaml_dump_assert(slf, data, file): Definitions.druid, Definitions.duck_db, Definitions.postgres, + Definitions.trino, + Definitions.redshift, }: assert cross_sell["sql"] == '${TABLE}."@CRoSSell P-roduct:"' else: @@ -302,6 +321,7 @@ def yaml_dump_assert(slf, data, file): Definitions.sql_server, Definitions.azure_synapse, Definitions.databricks, + Definitions.trino, }: assert date["datatype"] == "date" else: @@ -322,6 +342,8 @@ def yaml_dump_assert(slf, data, file): Definitions.druid, Definitions.duck_db, Definitions.postgres, + Definitions.trino, + Definitions.redshift, }: assert date["sql"] == '${TABLE}."SESSION_DATE"' else: @@ -333,6 +355,8 @@ def yaml_dump_assert(slf, data, file): Definitions.druid, Definitions.duck_db, Definitions.postgres, + Definitions.trino, + Definitions.redshift, }: assert pk["sql"] == '${TABLE}."SESSION_ID"' else: @@ -344,6 +368,8 @@ def yaml_dump_assert(slf, data, file): Definitions.druid, Definitions.duck_db, Definitions.postgres, + Definitions.trino, + Definitions.redshift, }: assert num["sql"] == '${TABLE}."CONVERSION"' else: diff --git a/tests/test_cumulative_query.py b/tests/test_cumulative_query.py index 5121202..66990da 100644 --- a/tests/test_cumulative_query.py +++ b/tests/test_cumulative_query.py @@ -55,7 +55,7 @@ def test_cumulative_query_metric_with_number(connection, query_type): date_spine = "select date from unnest(generate_date_array('2000-01-01', '2040-01-01')) as date" date_trunc = "CAST(DATE_TRUNC(CAST(orders.order_date AS DATE), DAY) AS TIMESTAMP)" order_by = "" - time = "TIMESTAMP('2018-01-02 00:00:00')" + time = "CAST('2018-01-02 00:00:00' AS TIMESTAMP)" else: date_spine = ( "select dateadd(day, seq4(), '2000-01-01') as date from table(generator(rowcount => 365*40))" @@ -272,8 +272,8 @@ def test_cumulative_query_metrics_and_time(connection, query_type): month_date_trunc = "CAST(DATE_TRUNC(CAST(orders.order_date AS DATE), MONTH) AS TIMESTAMP)" date_trunc_group = "orders_order_date" order_by = "" - time1 = "TIMESTAMP('2018-01-02 00:00:00')" - time2 = "TIMESTAMP('2019-01-01 00:00:00')" + time1 = "CAST('2018-01-02 00:00:00' AS TIMESTAMP)" + time2 = "CAST('2019-01-01 00:00:00' AS TIMESTAMP)" else: date_spine = ( "select dateadd(day, seq4(), '2000-01-01') as date from table(generator(rowcount => 365*40))" diff --git a/tests/test_dashboards.py b/tests/test_dashboards.py index 8b93094..9e325d4 100644 --- a/tests/test_dashboards.py +++ b/tests/test_dashboards.py @@ -200,255 +200,242 @@ def test_dashboard_filter_timezone(fresh_project, dt): {"field": "customers.gender", "value": "-Male, Female"}, ], ) -@pytest.mark.parametrize("dt", _generate_dt_params()) -def test_dashboard_filter_processing(connection, raw_filter_dict, dt): - with pendulum.travel_to(dt): - dash = connection.get_dashboard("sales_dashboard") - dash.filters = [raw_filter_dict] +# These tests took too long to run in CI, commenting out for now +# @pytest.mark.parametrize("dt", _generate_dt_params()) +def test_dashboard_filter_processing(connection, raw_filter_dict): + dash = connection.get_dashboard("sales_dashboard") + dash.filters = [raw_filter_dict] - expression_lookup = { - "Male": "equal_to", - "-Male": "not_equal_to", - "-Ma%": "does_not_start_with_case_insensitive", - "-%Ma": "does_not_end_with_case_insensitive", - "-%ale%": "does_not_contain_case_insensitive", - "Fe%": "starts_with_case_insensitive", - "%Fe": "ends_with_case_insensitive", - "%male%": "contains_case_insensitive", - "=100": "equal_to", - ">100": "greater_than", - "<100": "less_than", - "<=120": "less_or_equal_than", - ">=120": "greater_or_equal_than", - "!=120": "not_equal_to", - "<>120": "not_equal_to", - "Male, Female": "isin", - "-Male, -Female": "isnotin", - "-NULL": "is_not_null", - "NULL": "is_null", - "TRUE": "equal_to", - True: "equal_to", - False: "equal_to", - "after 2021-02-03": "greater_or_equal_than", - "2021-02-03 until 2022-02-03": "greater_or_equal_than", - "2021-02-03 until yesterday": "greater_or_equal_than", - "2021-02-03 until this month": "greater_or_equal_than", - "before 2021-02-03": "less_or_equal_than", - "on 2021-02-03": "equal_to", - "not on 2021-02-03": "not_equal_to", - "today": "greater_or_equal_than", - "yesterday": "greater_or_equal_than", - "this week": "greater_or_equal_than", - "this month": "greater_or_equal_than", - "this quarter": "greater_or_equal_than", - "this year": "greater_or_equal_than", - "last week": "greater_or_equal_than", - "last month": "greater_or_equal_than", - "last quarter": "greater_or_equal_than", - "last year": "greater_or_equal_than", - "week to date": "greater_or_equal_than", - "month to date": "greater_or_equal_than", - "quarter to date": "greater_or_equal_than", - "year to date": "greater_or_equal_than", - "last week to date": "greater_or_equal_than", - "52 weeks ago to date": "greater_or_equal_than", - "12 months ago to date": "greater_or_equal_than", - "1 year ago to date": "greater_or_equal_than", - "1 year ago for 3 months": "greater_or_equal_than", - "1 year ago for 30 days": "greater_or_equal_than", - "2 years ago": "greater_or_equal_than", - "3 months": "greater_or_equal_than", - "1 week": "greater_or_equal_than", - "2 days": "greater_or_equal_than", - "1 quarter": "greater_or_equal_than", - } - date_format = "%Y-%m-%dT%H:%M:%S" - value_lookup = { - "Male": "Male", - "-Male": "Male", - "-Ma%": "Ma", - "-%Ma": "Ma", - "-%ale%": "ale", - "Fe%": "Fe", - "%Fe": "Fe", - "%male%": "male", - "=100": 100, - ">100": 100, - "<100": 100, - "<=120": 120, - ">=120": 120, - "!=120": 120, - "<>120": 120, - "Male, Female": ["Male", "Female"], - "-Male, -Female": ["Male", "Female"], - "-NULL": None, - "NULL": None, - "TRUE": True, - True: True, - False: False, - "after 2021-02-03": "2021-02-03T00:00:00", - "2021-02-03 until 2022-02-03": "2021-02-03T00:00:00", - "2021-02-03 until yesterday": "2021-02-03T00:00:00", - "2021-02-03 until this month": "2021-02-03T00:00:00", - "before 2021-02-03": "2021-02-03T00:00:00", - "on 2021-02-03": "2021-02-03T00:00:00", - "not on 2021-02-03": "2021-02-03T00:00:00", - "today": pendulum.now("UTC").start_of("day").strftime(date_format), - "yesterday": pendulum.now("UTC").subtract(days=1).start_of("day").strftime(date_format), - "this week": pendulum.now("UTC").subtract(weeks=0).start_of("week").strftime(date_format), - "this month": pendulum.now("UTC").subtract(months=0).start_of("month").strftime(date_format), - "this quarter": pendulum.now("UTC").subtract(months=0).first_of("quarter").strftime(date_format), - "this year": pendulum.now("UTC").subtract(years=0).start_of("year").strftime(date_format), - "last week": pendulum.now("UTC").subtract(weeks=1).start_of("week").strftime(date_format), - "last month": pendulum.now("UTC").subtract(months=1).start_of("month").strftime(date_format), - "last quarter": pendulum.now("UTC").subtract(months=3).first_of("quarter").strftime(date_format), - "last year": pendulum.now("UTC").subtract(years=1).start_of("year").strftime(date_format), - "week to date": pendulum.now("UTC").subtract(weeks=0).start_of("week").strftime(date_format), - "month to date": pendulum.now("UTC").subtract(months=0).start_of("month").strftime(date_format), - "quarter to date": pendulum.now("UTC") - .subtract(months=0) - .first_of("quarter") - .strftime(date_format), - "year to date": pendulum.now("UTC").subtract(years=0).start_of("year").strftime(date_format), - "last week to date": pendulum.now("UTC").subtract(weeks=1).start_of("week").strftime(date_format), - "52 weeks ago to date": pendulum.now("UTC") - .subtract(weeks=52) - .start_of("week") - .strftime(date_format), - "12 months ago to date": pendulum.now("UTC") - .subtract(months=12) - .start_of("month") - .strftime(date_format), - "1 year ago to date": pendulum.now("UTC") - .subtract(years=1) - .start_of("year") - .strftime(date_format), - "1 year ago for 3 months": pendulum.now("UTC") - .subtract(years=1) - .start_of("year") - .strftime(date_format), - "1 year ago for 30 days": pendulum.now("UTC") - .subtract(years=1) - .start_of("year") - .strftime(date_format), - "2 years ago": pendulum.now("UTC").subtract(years=2).start_of("year").strftime(date_format), - "3 months": pendulum.now("UTC").subtract(months=2).start_of("month").strftime(date_format), - "1 week": pendulum.now("UTC").start_of("week").strftime(date_format), - "2 days": pendulum.now("UTC").subtract(days=1).start_of("day").strftime(date_format), - "1 quarter": pendulum.now("UTC").first_of("quarter").strftime(date_format), - } - - second_value_lookup = { - "today": pendulum.now("UTC").end_of("day").strftime(date_format), - "yesterday": pendulum.now("UTC").subtract(days=1).end_of("day").strftime(date_format), - "this week": pendulum.now("UTC").end_of("week").strftime(date_format), - "this month": pendulum.now("UTC").end_of("month").strftime(date_format), - "this quarter": pendulum.now("UTC").last_of("quarter").strftime(date_format), - "this year": pendulum.now("UTC").end_of("year").strftime(date_format), - "last week": pendulum.now("UTC").subtract(weeks=1).end_of("week").strftime(date_format), - "last month": pendulum.now("UTC").subtract(months=1).end_of("month").strftime(date_format), - "last quarter": pendulum.now("UTC").subtract(months=3).last_of("quarter").strftime(date_format), - "last year": pendulum.now("UTC").subtract(years=1).end_of("year").strftime(date_format), - "2021-02-03 until 2022-02-03": "2022-02-03T00:00:00", - "2021-02-03 until yesterday": pendulum.now("UTC") - .subtract(days=1) - .end_of("day") - .strftime(date_format), - "2021-02-03 until this month": pendulum.now("UTC").end_of("month").strftime(date_format), - "week to date": pendulum.now("UTC") - .subtract(days=1 if pendulum.now("UTC").day_of_week != 0 else 0) - .end_of("day") - .strftime(date_format), - "month to date": pendulum.now("UTC") - .subtract(days=1 if pendulum.now("UTC").day != 1 else 0) - .end_of("day") - .strftime(date_format), - "quarter to date": pendulum.now("UTC") - .subtract( - days=1 - if pendulum.now("UTC").day != 1 or pendulum.now("UTC").month not in {1, 4, 7, 10} - else 0 - ) - .end_of("day") - .strftime(date_format), - "year to date": pendulum.now("UTC") - .subtract( - days=1 if pendulum.now("UTC").date() != pendulum.now("UTC").start_of("year").date() else 0 - ) - .end_of("day") - .strftime(date_format), - "last week to date": pendulum.now("UTC") - .subtract(weeks=1) - .start_of("week") - .add( - days=(pendulum.now("UTC") - pendulum.now("UTC").start_of("week")).days - 1 - if pendulum.now("UTC").day_of_week != 0 - else 0 - ) - .end_of("day") - .strftime(date_format), - "52 weeks ago to date": pendulum.now("UTC") - .subtract(weeks=52) - .start_of("week") - .add( - days=(pendulum.now("UTC") - pendulum.now("UTC").start_of("week")).days - 1 - if pendulum.now("UTC").day_of_week != 0 - else 0 - ) - .end_of("day") - .strftime(date_format), - "12 months ago to date": pendulum.now("UTC") - .subtract(months=12) - .start_of("month") - .add( - days=(pendulum.now("UTC") - pendulum.now("UTC").start_of("month")).days - 1 - if pendulum.now("UTC").day != 1 - else 0 - ) - .end_of("day") - .strftime(date_format), - "1 year ago to date": pendulum.now("UTC") - .subtract(years=1) - .start_of("year") - .add( - days=(pendulum.now("UTC") - pendulum.now("UTC").start_of("year")).days - 1 - if pendulum.now("UTC").date() != pendulum.now("UTC").start_of("year").date() - else 0 - ) - .end_of("day") - .strftime(date_format), - "1 year ago for 3 months": pendulum.now("UTC") - .subtract(years=1) - .start_of("year") - .add(months=2) - .end_of("month") - .strftime(date_format), - "1 year ago for 30 days": pendulum.now("UTC") - .subtract(years=1) - .start_of("year") - .add(days=29) - .end_of("day") - .strftime(date_format), - "2 years ago": pendulum.now("UTC").subtract(years=2).end_of("year").strftime(date_format), - "3 months": pendulum.now("UTC").end_of("month").strftime(date_format), - "1 week": pendulum.now("UTC").end_of("week").strftime(date_format), - "2 days": pendulum.now("UTC").end_of("day").strftime(date_format), - "1 quarter": pendulum.now("UTC").last_of("quarter").strftime(date_format), - } - if raw_filter_dict["value"] == "-Male, Female": - with pytest.raises(QueryError) as exc_info: - dash.parsed_filters() - assert exc_info.value - else: - parsed_filters = dash.parsed_filters() - assert len(parsed_filters) in {1, 2} - assert parsed_filters[0]["field"] == raw_filter_dict["field"] - assert parsed_filters[0]["expression"].value == expression_lookup[raw_filter_dict["value"]] - assert parsed_filters[0]["value"] == value_lookup[raw_filter_dict["value"]] - if raw_filter_dict["value"] in second_value_lookup or len(parsed_filters) == 2: - assert parsed_filters[1]["field"] == raw_filter_dict["field"] - assert parsed_filters[1]["expression"].value == "less_or_equal_than" - assert parsed_filters[1]["value"] == second_value_lookup[raw_filter_dict["value"]] + expression_lookup = { + "Male": "equal_to", + "-Male": "not_equal_to", + "-Ma%": "does_not_start_with_case_insensitive", + "-%Ma": "does_not_end_with_case_insensitive", + "-%ale%": "does_not_contain_case_insensitive", + "Fe%": "starts_with_case_insensitive", + "%Fe": "ends_with_case_insensitive", + "%male%": "contains_case_insensitive", + "=100": "equal_to", + ">100": "greater_than", + "<100": "less_than", + "<=120": "less_or_equal_than", + ">=120": "greater_or_equal_than", + "!=120": "not_equal_to", + "<>120": "not_equal_to", + "Male, Female": "isin", + "-Male, -Female": "isnotin", + "-NULL": "is_not_null", + "NULL": "is_null", + "TRUE": "equal_to", + True: "equal_to", + False: "equal_to", + "after 2021-02-03": "greater_or_equal_than", + "2021-02-03 until 2022-02-03": "greater_or_equal_than", + "2021-02-03 until yesterday": "greater_or_equal_than", + "2021-02-03 until this month": "greater_or_equal_than", + "before 2021-02-03": "less_or_equal_than", + "on 2021-02-03": "equal_to", + "not on 2021-02-03": "not_equal_to", + "today": "greater_or_equal_than", + "yesterday": "greater_or_equal_than", + "this week": "greater_or_equal_than", + "this month": "greater_or_equal_than", + "this quarter": "greater_or_equal_than", + "this year": "greater_or_equal_than", + "last week": "greater_or_equal_than", + "last month": "greater_or_equal_than", + "last quarter": "greater_or_equal_than", + "last year": "greater_or_equal_than", + "week to date": "greater_or_equal_than", + "month to date": "greater_or_equal_than", + "quarter to date": "greater_or_equal_than", + "year to date": "greater_or_equal_than", + "last week to date": "greater_or_equal_than", + "52 weeks ago to date": "greater_or_equal_than", + "12 months ago to date": "greater_or_equal_than", + "1 year ago to date": "greater_or_equal_than", + "1 year ago for 3 months": "greater_or_equal_than", + "1 year ago for 30 days": "greater_or_equal_than", + "2 years ago": "greater_or_equal_than", + "3 months": "greater_or_equal_than", + "1 week": "greater_or_equal_than", + "2 days": "greater_or_equal_than", + "1 quarter": "greater_or_equal_than", + } + date_format = "%Y-%m-%dT%H:%M:%S" + value_lookup = { + "Male": "Male", + "-Male": "Male", + "-Ma%": "Ma", + "-%Ma": "Ma", + "-%ale%": "ale", + "Fe%": "Fe", + "%Fe": "Fe", + "%male%": "male", + "=100": 100, + ">100": 100, + "<100": 100, + "<=120": 120, + ">=120": 120, + "!=120": 120, + "<>120": 120, + "Male, Female": ["Male", "Female"], + "-Male, -Female": ["Male", "Female"], + "-NULL": None, + "NULL": None, + "TRUE": True, + True: True, + False: False, + "after 2021-02-03": "2021-02-03T00:00:00", + "2021-02-03 until 2022-02-03": "2021-02-03T00:00:00", + "2021-02-03 until yesterday": "2021-02-03T00:00:00", + "2021-02-03 until this month": "2021-02-03T00:00:00", + "before 2021-02-03": "2021-02-03T00:00:00", + "on 2021-02-03": "2021-02-03T00:00:00", + "not on 2021-02-03": "2021-02-03T00:00:00", + "today": pendulum.now("UTC").start_of("day").strftime(date_format), + "yesterday": pendulum.now("UTC").subtract(days=1).start_of("day").strftime(date_format), + "this week": pendulum.now("UTC").subtract(weeks=0).start_of("week").strftime(date_format), + "this month": pendulum.now("UTC").subtract(months=0).start_of("month").strftime(date_format), + "this quarter": pendulum.now("UTC").subtract(months=0).first_of("quarter").strftime(date_format), + "this year": pendulum.now("UTC").subtract(years=0).start_of("year").strftime(date_format), + "last week": pendulum.now("UTC").subtract(weeks=1).start_of("week").strftime(date_format), + "last month": pendulum.now("UTC").subtract(months=1).start_of("month").strftime(date_format), + "last quarter": pendulum.now("UTC").subtract(months=3).first_of("quarter").strftime(date_format), + "last year": pendulum.now("UTC").subtract(years=1).start_of("year").strftime(date_format), + "week to date": pendulum.now("UTC").subtract(weeks=0).start_of("week").strftime(date_format), + "month to date": pendulum.now("UTC").subtract(months=0).start_of("month").strftime(date_format), + "quarter to date": pendulum.now("UTC").subtract(months=0).first_of("quarter").strftime(date_format), + "year to date": pendulum.now("UTC").subtract(years=0).start_of("year").strftime(date_format), + "last week to date": pendulum.now("UTC").subtract(weeks=1).start_of("week").strftime(date_format), + "52 weeks ago to date": pendulum.now("UTC").subtract(weeks=52).start_of("week").strftime(date_format), + "12 months ago to date": pendulum.now("UTC") + .subtract(months=12) + .start_of("month") + .strftime(date_format), + "1 year ago to date": pendulum.now("UTC").subtract(years=1).start_of("year").strftime(date_format), + "1 year ago for 3 months": pendulum.now("UTC") + .subtract(years=1) + .start_of("year") + .strftime(date_format), + "1 year ago for 30 days": pendulum.now("UTC") + .subtract(years=1) + .start_of("year") + .strftime(date_format), + "2 years ago": pendulum.now("UTC").subtract(years=2).start_of("year").strftime(date_format), + "3 months": pendulum.now("UTC").subtract(months=2).start_of("month").strftime(date_format), + "1 week": pendulum.now("UTC").start_of("week").strftime(date_format), + "2 days": pendulum.now("UTC").subtract(days=1).start_of("day").strftime(date_format), + "1 quarter": pendulum.now("UTC").first_of("quarter").strftime(date_format), + } + + second_value_lookup = { + "today": pendulum.now("UTC").end_of("day").strftime(date_format), + "yesterday": pendulum.now("UTC").subtract(days=1).end_of("day").strftime(date_format), + "this week": pendulum.now("UTC").end_of("week").strftime(date_format), + "this month": pendulum.now("UTC").end_of("month").strftime(date_format), + "this quarter": pendulum.now("UTC").last_of("quarter").strftime(date_format), + "this year": pendulum.now("UTC").end_of("year").strftime(date_format), + "last week": pendulum.now("UTC").subtract(weeks=1).end_of("week").strftime(date_format), + "last month": pendulum.now("UTC").subtract(months=1).end_of("month").strftime(date_format), + "last quarter": pendulum.now("UTC").subtract(months=3).last_of("quarter").strftime(date_format), + "last year": pendulum.now("UTC").subtract(years=1).end_of("year").strftime(date_format), + "2021-02-03 until 2022-02-03": "2022-02-03T00:00:00", + "2021-02-03 until yesterday": pendulum.now("UTC") + .subtract(days=1) + .end_of("day") + .strftime(date_format), + "2021-02-03 until this month": pendulum.now("UTC").end_of("month").strftime(date_format), + "week to date": pendulum.now("UTC") + .subtract(days=1 if pendulum.now("UTC").day_of_week != 0 else 0) + .end_of("day") + .strftime(date_format), + "month to date": pendulum.now("UTC") + .subtract(days=1 if pendulum.now("UTC").day != 1 else 0) + .end_of("day") + .strftime(date_format), + "quarter to date": pendulum.now("UTC") + .subtract( + days=1 if pendulum.now("UTC").day != 1 or pendulum.now("UTC").month not in {1, 4, 7, 10} else 0 + ) + .end_of("day") + .strftime(date_format), + "year to date": pendulum.now("UTC") + .subtract(days=1 if pendulum.now("UTC").date() != pendulum.now("UTC").start_of("year").date() else 0) + .end_of("day") + .strftime(date_format), + "last week to date": pendulum.now("UTC") + .subtract(weeks=1) + .start_of("week") + .add( + days=(pendulum.now("UTC") - pendulum.now("UTC").start_of("week")).days - 1 + if pendulum.now("UTC").day_of_week != 0 + else 0 + ) + .end_of("day") + .strftime(date_format), + "52 weeks ago to date": pendulum.now("UTC") + .subtract(weeks=52) + .start_of("week") + .add( + days=(pendulum.now("UTC") - pendulum.now("UTC").start_of("week")).days - 1 + if pendulum.now("UTC").day_of_week != 0 + else 0 + ) + .end_of("day") + .strftime(date_format), + "12 months ago to date": pendulum.now("UTC") + .subtract(months=12) + .start_of("month") + .add( + days=(pendulum.now("UTC") - pendulum.now("UTC").start_of("month")).days - 1 + if pendulum.now("UTC").day != 1 + else 0 + ) + .end_of("day") + .strftime(date_format), + "1 year ago to date": pendulum.now("UTC") + .subtract(years=1) + .start_of("year") + .add( + days=(pendulum.now("UTC") - pendulum.now("UTC").start_of("year")).days - 1 + if pendulum.now("UTC").date() != pendulum.now("UTC").start_of("year").date() + else 0 + ) + .end_of("day") + .strftime(date_format), + "1 year ago for 3 months": pendulum.now("UTC") + .subtract(years=1) + .start_of("year") + .add(months=2) + .end_of("month") + .strftime(date_format), + "1 year ago for 30 days": pendulum.now("UTC") + .subtract(years=1) + .start_of("year") + .add(days=29) + .end_of("day") + .strftime(date_format), + "2 years ago": pendulum.now("UTC").subtract(years=2).end_of("year").strftime(date_format), + "3 months": pendulum.now("UTC").end_of("month").strftime(date_format), + "1 week": pendulum.now("UTC").end_of("week").strftime(date_format), + "2 days": pendulum.now("UTC").end_of("day").strftime(date_format), + "1 quarter": pendulum.now("UTC").last_of("quarter").strftime(date_format), + } + if raw_filter_dict["value"] == "-Male, Female": + with pytest.raises(QueryError) as exc_info: + dash.parsed_filters() + assert exc_info.value + else: + parsed_filters = dash.parsed_filters() + assert len(parsed_filters) in {1, 2} + assert parsed_filters[0]["field"] == raw_filter_dict["field"] + assert parsed_filters[0]["expression"].value == expression_lookup[raw_filter_dict["value"]] + assert parsed_filters[0]["value"] == value_lookup[raw_filter_dict["value"]] + if raw_filter_dict["value"] in second_value_lookup or len(parsed_filters) == 2: + assert parsed_filters[1]["field"] == raw_filter_dict["field"] + assert parsed_filters[1]["expression"].value == "less_or_equal_than" + assert parsed_filters[1]["value"] == second_value_lookup[raw_filter_dict["value"]] @pytest.mark.parametrize( diff --git a/tests/test_join_query.py b/tests/test_join_query.py index 37d4904..9ba10fc 100644 --- a/tests/test_join_query.py +++ b/tests/test_join_query.py @@ -94,8 +94,8 @@ def test_query_bigquery_week_filter_type_conversion(connection, field): correct = ( "SELECT order_lines.sales_channel as order_lines_channel,SUM(order_lines.revenue) as" f" order_lines_total_item_revenue FROM analytics.order_line_items order_lines {join}WHERE" - f" CAST(DATE_TRUNC(CAST({sql_field} AS DATE), WEEK) AS {cast_as})>{cast_as}('2021-08-04 00:00:00')" - " GROUP BY order_lines_channel;" + f" CAST(DATE_TRUNC(CAST({sql_field} AS DATE), WEEK) AS {cast_as})>CAST('2021-08-04 00:00:00' AS" + f" {cast_as}) GROUP BY order_lines_channel;" ) assert query == correct @@ -824,6 +824,7 @@ def test_join_graph_raise_unjoinable_error(connection): [ Definitions.snowflake, Definitions.druid, + Definitions.trino, Definitions.redshift, Definitions.bigquery, Definitions.sql_server, diff --git a/tests/test_merged_results.py b/tests/test_merged_results.py index 89044db..fe2be97 100644 --- a/tests/test_merged_results.py +++ b/tests/test_merged_results.py @@ -16,6 +16,7 @@ Definitions.redshift, Definitions.duck_db, Definitions.postgres, + Definitions.trino, Definitions.databricks, ], ) @@ -34,7 +35,7 @@ def test_merged_result_query_additional_metric(connection, query_type): order_by = "" session_by = "" - elif query_type in {Definitions.postgres, Definitions.databricks, Definitions.duck_db}: + elif query_type in {Definitions.postgres, Definitions.trino, Definitions.databricks, Definitions.duck_db}: order_date = "DATE_TRUNC('MONTH', CAST(order_lines.order_date AS TIMESTAMP))" session_date = "DATE_TRUNC('MONTH', CAST(sessions.session_date AS TIMESTAMP))" if query_type == Definitions.duck_db: @@ -56,7 +57,7 @@ def test_merged_result_query_additional_metric(connection, query_type): if query_type == Definitions.redshift: ifnull = "nvl" - elif query_type in {Definitions.postgres, Definitions.databricks, Definitions.duck_db}: + elif query_type in {Definitions.postgres, Definitions.trino, Definitions.databricks, Definitions.duck_db}: ifnull = "coalesce" else: ifnull = "ifnull" @@ -80,7 +81,7 @@ def test_merged_result_query_additional_metric(connection, query_type): f"{ifnull}({cte_2}.sessions_session_month, {cte_1}.order_lines_order_month) as sessions_session_month," # noqa f"order_lines_total_item_revenue / nullif(sessions_number_of_sessions, 0) as order_lines_revenue_per_session " # noqa f"FROM {cte_1} FULL OUTER JOIN {cte_2} " - f"ON {on_statement};" + f"ON {on_statement}{';' if query_type != Definitions.trino else ''}" ) assert query == correct diff --git a/tests/test_simple_query.py b/tests/test_simple_query.py index 2e9fcd9..b9a628e 100644 --- a/tests/test_simple_query.py +++ b/tests/test_simple_query.py @@ -251,6 +251,8 @@ def test_simple_query_field_to_field_filter(connections): ("min_revenue", Definitions.redshift), ("max_revenue", Definitions.postgres), ("min_revenue", Definitions.postgres), + ("max_revenue", Definitions.trino), + ("min_revenue", Definitions.trino), ("max_revenue", Definitions.bigquery), ("min_revenue", Definitions.bigquery), ("max_revenue", Definitions.duck_db), @@ -270,7 +272,7 @@ def test_simple_query_min_max(connections, metric, query_type): else: order_by = "" - if query_type == Definitions.druid: + if query_type in Definitions.no_semicolon_warehouses: semi = "" if query_type == Definitions.bigquery: @@ -293,6 +295,7 @@ def test_simple_query_min_max(connections, metric, query_type): (Definitions.azure_synapse), (Definitions.redshift), (Definitions.postgres), + (Definitions.trino), (Definitions.bigquery), (Definitions.duck_db), ], @@ -309,7 +312,7 @@ def test_simple_query_count_distinct(connections, query_type): else: order_by = "" - if query_type == Definitions.druid: + if query_type in Definitions.no_semicolon_warehouses: semi = "" if query_type == Definitions.bigquery: @@ -400,6 +403,9 @@ def test_simple_query_alias_keyword(connections): ("order", "date", Definitions.snowflake), ("order", "week", Definitions.snowflake), ("previous_order", "date", Definitions.snowflake), + ("order", "date", Definitions.trino), + ("order", "week", Definitions.trino), + ("previous_order", "date", Definitions.trino), ("order", "date", Definitions.databricks), ("order", "week", Definitions.databricks), ("previous_order", "date", Definitions.databricks), @@ -439,7 +445,7 @@ def test_simple_query_dimension_group_timezone(connections, field: str, group: s query_type=query_type, ) - semi = ";" + semi = ";" if query_type not in Definitions.no_semicolon_warehouses else "" date_format = "%Y-%m-%dT%H:%M:%S" start = pendulum.now("America/New_York").start_of("month").strftime(date_format) if pendulum.now("America/New_York").day == 1: @@ -489,9 +495,9 @@ def test_simple_query_dimension_group_timezone(connections, field: str, group: s f"CAST(CAST(CAST(CONVERT_TIMEZONE('America/New_York', simple.order_date) AS TIMESTAMP_NTZ) AS TIMESTAMP) AS TIMESTAMP))<='{end}'" # noqa ) order_by = "" - elif query_type in {Definitions.postgres, Definitions.duck_db}: + elif query_type in {Definitions.postgres, Definitions.trino, Definitions.duck_db}: if field == "previous_order": - if query_type == Definitions.duck_db: + if query_type in {Definitions.duck_db, Definitions.trino}: result_lookup = {"date": "DATE_TRUNC('DAY', CAST(simple.previous_order_date AS TIMESTAMP))"} else: result_lookup = {"date": "DATE_TRUNC('DAY', simple.previous_order_date)"} @@ -499,20 +505,27 @@ def test_simple_query_dimension_group_timezone(connections, field: str, group: s else: result_lookup = { "date": ( # noqa - "DATE_TRUNC('DAY', CAST(CAST(CAST(simple.order_date AS TIMESTAMP) at time zone 'utc' at" + "DATE_TRUNC('DAY', CAST(CAST(CAST(simple.order_date AS TIMESTAMP) at time zone 'UTC' at" " time zone 'America/New_York' AS TIMESTAMP) AS TIMESTAMP))" ), "week": ( # noqa - "DATE_TRUNC('WEEK', CAST(CAST(CAST(simple.order_date AS TIMESTAMP) at time zone 'utc' at" + "DATE_TRUNC('WEEK', CAST(CAST(CAST(simple.order_date AS TIMESTAMP) at time zone 'UTC' at" " time zone 'America/New_York' AS TIMESTAMP) AS TIMESTAMP) + INTERVAL '1' DAY) - INTERVAL" " '1' DAY" ), } - where = ( - "WHERE DATE_TRUNC('DAY', CAST(CAST(CAST(simple.order_date AS TIMESTAMP) at time zone 'utc' at time zone 'America/New_York' AS TIMESTAMP) " # noqa - f"AS TIMESTAMP))>='{start}' AND DATE_TRUNC('DAY', " - f"CAST(CAST(CAST(simple.order_date AS TIMESTAMP) at time zone 'utc' at time zone 'America/New_York' AS TIMESTAMP) AS TIMESTAMP))<='{end}'" # noqa - ) + if query_type == Definitions.trino: + where = ( + "WHERE DATE_TRUNC('DAY', CAST(CAST(CAST(simple.order_date AS TIMESTAMP) at time zone 'UTC' at time zone 'America/New_York' AS TIMESTAMP) " # noqa + f"AS TIMESTAMP))>=CAST('{start}' AS TIMESTAMP) AND DATE_TRUNC('DAY', " + f"CAST(CAST(CAST(simple.order_date AS TIMESTAMP) at time zone 'UTC' at time zone 'America/New_York' AS TIMESTAMP) AS TIMESTAMP))<=CAST('{end}' AS TIMESTAMP)" # noqa + ) + else: + where = ( + "WHERE DATE_TRUNC('DAY', CAST(CAST(CAST(simple.order_date AS TIMESTAMP) at time zone 'UTC' at time zone 'America/New_York' AS TIMESTAMP) " # noqa + f"AS TIMESTAMP))>='{start}' AND DATE_TRUNC('DAY', " + f"CAST(CAST(CAST(simple.order_date AS TIMESTAMP) at time zone 'UTC' at time zone 'America/New_York' AS TIMESTAMP) AS TIMESTAMP))<='{end}'" # noqa + ) if query_type == Definitions.duck_db: order_by = " ORDER BY simple_total_revenue DESC NULLS LAST" else: @@ -530,8 +543,8 @@ def test_simple_query_dimension_group_timezone(connections, field: str, group: s } where = ( "WHERE CAST(DATETIME(CAST(simple.order_date AS TIMESTAMP), 'America/New_York')" - f" AS TIMESTAMP)>=TIMESTAMP('{start}') AND CAST(DATETIME(CAST(simple.order_date " - f"AS TIMESTAMP), 'America/New_York') AS TIMESTAMP)<=TIMESTAMP('{end}')" + f" AS TIMESTAMP)>=CAST('{start}' AS TIMESTAMP) AND CAST(DATETIME(CAST(simple.order_date " + f"AS TIMESTAMP), 'America/New_York') AS TIMESTAMP)<=CAST('{end}' AS TIMESTAMP)" ) order_by = "" elif query_type == Definitions.druid: @@ -754,6 +767,30 @@ def test_simple_query_dimension_group_timezone(connections, field: str, group: s ("day_of_week", Definitions.postgres), ("day_of_month", Definitions.postgres), ("day_of_year", Definitions.postgres), + ("time", Definitions.trino), + ("second", Definitions.trino), + ("minute", Definitions.trino), + ("hour", Definitions.trino), + ("date", Definitions.trino), + ("week", Definitions.trino), + ("month", Definitions.trino), + ("quarter", Definitions.trino), + ("year", Definitions.trino), + ("fiscal_month", Definitions.trino), + ("fiscal_quarter", Definitions.trino), + ("fiscal_year", Definitions.trino), + ("fiscal_month_of_year_index", Definitions.trino), + ("fiscal_month_index", Definitions.trino), + ("fiscal_quarter_of_year", Definitions.trino), + ("week_index", Definitions.trino), + ("week_of_month", Definitions.trino), + ("month_of_year_index", Definitions.trino), + ("month_of_year", Definitions.trino), + ("quarter_of_year", Definitions.trino), + ("hour_of_day", Definitions.trino), + ("day_of_week", Definitions.trino), + ("day_of_month", Definitions.trino), + ("day_of_year", Definitions.trino), ("time", Definitions.duck_db), ("second", Definitions.duck_db), ("minute", Definitions.duck_db), @@ -813,7 +850,7 @@ def test_simple_query_dimension_group(connections, group: str, query_type: str): ) field = project.get_field(f"order_{group}") - semi = ";" + semi = ";" if query_type not in Definitions.no_semicolon_warehouses else "" if query_type in {Definitions.snowflake, Definitions.redshift}: result_lookup = { "time": "CAST(simple.order_date AS TIMESTAMP)", @@ -899,7 +936,13 @@ def test_simple_query_dimension_group(connections, group: str, query_type: str): } order_by = "" - elif query_type in {Definitions.postgres, Definitions.databricks, Definitions.druid, Definitions.duck_db}: + elif query_type in { + Definitions.trino, + Definitions.postgres, + Definitions.databricks, + Definitions.druid, + Definitions.duck_db, + }: result_lookup = { "time": "CAST(simple.order_date AS TIMESTAMP)", "second": "DATE_TRUNC('SECOND', CAST(simple.order_date AS TIMESTAMP))", @@ -943,6 +986,13 @@ def test_simple_query_dimension_group(connections, group: str, query_type: str): "day_of_month": "EXTRACT('DAY' FROM CAST(simple.order_date AS TIMESTAMP))", "day_of_year": "EXTRACT('DOY' FROM CAST(simple.order_date AS TIMESTAMP))", } + if query_type == Definitions.trino: + result_lookup["month_of_year"] = "FORMAT_DATETIME(CAST(simple.order_date AS TIMESTAMP), 'MMM')" + result_lookup["hour_of_day"] = "EXTRACT(HOUR FROM CAST(simple.order_date AS TIMESTAMP))" + result_lookup["day_of_week"] = "FORMAT_DATETIME(CAST(simple.order_date AS TIMESTAMP), 'EEE')" + result_lookup["day_of_month"] = "EXTRACT(DAY FROM CAST(simple.order_date AS TIMESTAMP))" + result_lookup["day_of_year"] = "EXTRACT(DOY FROM CAST(simple.order_date AS TIMESTAMP))" + if query_type == Definitions.duck_db: order_by = " ORDER BY simple_total_revenue DESC NULLS LAST" else: @@ -1105,6 +1155,14 @@ def test_simple_query_dimension_group(connections, group: str, query_type: str): ("month", Definitions.postgres), ("quarter", Definitions.postgres), ("year", Definitions.postgres), + ("second", Definitions.trino), + ("minute", Definitions.trino), + ("hour", Definitions.trino), + ("day", Definitions.trino), + ("week", Definitions.trino), + ("month", Definitions.trino), + ("quarter", Definitions.trino), + ("year", Definitions.trino), ("second", Definitions.duck_db), ("minute", Definitions.duck_db), ("hour", Definitions.duck_db), @@ -1157,6 +1215,19 @@ def test_simple_query_dimension_group_interval(connections, interval: str, query "year": "DATEDIFF('YEAR', simple.view_date, simple.order_date)", } order_by = " ORDER BY simple_total_revenue DESC NULLS LAST" + elif query_type == Definitions.trino: + result_lookup = { + "second": "DATE_DIFF('SECOND', simple.view_date, simple.order_date)", + "minute": "DATE_DIFF('MINUTE', simple.view_date, simple.order_date)", + "hour": "DATE_DIFF('HOUR', simple.view_date, simple.order_date)", + "day": "DATE_DIFF('DAY', simple.view_date, simple.order_date)", + "week": "DATE_DIFF('WEEK', simple.view_date, simple.order_date)", + "month": "DATE_DIFF('MONTH', simple.view_date, simple.order_date)", + "quarter": "DATE_DIFF('QUARTER', simple.view_date, simple.order_date)", + "year": "DATE_DIFF('YEAR', simple.view_date, simple.order_date)", + } + order_by = "" + semi = "" elif query_type == Definitions.druid: result_lookup = { "second": "TIMESTAMPDIFF(SECOND, simple.view_date, simple.order_date)", @@ -1320,6 +1391,7 @@ def test_simple_query_custom_metric(connections): ("order_date", "greater_than", "2021-08-04", Definitions.redshift), ("order_date", "greater_than", "2021-08-04", Definitions.bigquery), ("order_date", "greater_than", "2021-08-04", Definitions.duck_db), + ("order_date", "greater_than", "2021-08-04", Definitions.trino), ("order_date", "greater_than", datetime(year=2021, month=8, day=4), Definitions.snowflake), ("order_date", "greater_than", datetime(year=2021, month=8, day=4), Definitions.databricks), ("order_date", "greater_than", datetime(year=2021, month=8, day=4), Definitions.druid), @@ -1327,6 +1399,7 @@ def test_simple_query_custom_metric(connections): ("order_date", "greater_than", datetime(year=2021, month=8, day=4), Definitions.redshift), ("order_date", "greater_than", datetime(year=2021, month=8, day=4), Definitions.bigquery), ("order_date", "greater_than", datetime(year=2021, month=8, day=4), Definitions.duck_db), + ("order_date", "greater_than", datetime(year=2021, month=8, day=4), Definitions.trino), ("previous_order_date", "greater_than", datetime(year=2021, month=8, day=4), Definitions.snowflake), ("previous_order_date", "greater_than", datetime(year=2021, month=8, day=4), Definitions.databricks), ("previous_order_date", "greater_than", datetime(year=2021, month=8, day=4), Definitions.druid), @@ -1341,6 +1414,7 @@ def test_simple_query_custom_metric(connections): ("first_order_date", "greater_than", datetime(year=2021, month=8, day=4), Definitions.redshift), ("first_order_date", "greater_than", datetime(year=2021, month=8, day=4), Definitions.bigquery), ("first_order_date", "greater_than", datetime(year=2021, month=8, day=4), Definitions.duck_db), + ("first_order_date", "greater_than", datetime(year=2021, month=8, day=4), Definitions.trino), ("order_date", "matches", "last week", Definitions.snowflake), ("order_date", "matches", "last year", Definitions.snowflake), ("order_date", "matches", "last year", Definitions.databricks), @@ -1349,6 +1423,7 @@ def test_simple_query_custom_metric(connections): ("order_date", "matches", "last year", Definitions.redshift), ("order_date", "matches", "last year", Definitions.bigquery), ("order_date", "matches", "last year", Definitions.duck_db), + ("order_date", "matches", "last year", Definitions.trino), ], ) @pytest.mark.query @@ -1367,13 +1442,14 @@ def test_simple_query_with_where_dim_group(connections, field, expression, value Definitions.databricks, Definitions.druid, Definitions.sql_server, + Definitions.trino, }: order_by = "" else: order_by = " ORDER BY simple_total_revenue DESC NULLS LAST" semi = ";" - if query_type == Definitions.druid: + if query_type in {Definitions.druid, Definitions.trino}: semi = "" sf_or_rs = query_type in { Definitions.snowflake, @@ -1381,8 +1457,9 @@ def test_simple_query_with_where_dim_group(connections, field, expression, value Definitions.druid, Definitions.duck_db, Definitions.databricks, + Definitions.trino, } - if query_type not in {Definitions.druid, Definitions.duck_db, Definitions.databricks}: + if query_type not in {Definitions.druid, Definitions.duck_db, Definitions.databricks, Definitions.trino}: field_id = f"simple.{field}" else: field_id = f"CAST(simple.{field} AS TIMESTAMP)" @@ -1394,7 +1471,7 @@ def test_simple_query_with_where_dim_group(connections, field, expression, value query_type == Definitions.sql_server and isinstance(value, datetime) and expression == "greater_than" ): condition = f"CAST(CAST({field_id} AS DATE) AS DATETIME)>'2021-08-04T00:00:00'" - elif sf_or_rs and isinstance(value, datetime): + elif sf_or_rs and query_type != Definitions.trino and isinstance(value, datetime): condition = f"DATE_TRUNC('DAY', {field_id})>'2021-08-04T00:00:00'" elif ( query_type == Definitions.bigquery @@ -1404,17 +1481,27 @@ def test_simple_query_with_where_dim_group(connections, field, expression, value ): condition = "CAST(DATE_TRUNC(CAST(simple.order_date AS DATE), DAY) AS TIMESTAMP)>'2021-08-04'" elif query_type == Definitions.bigquery and isinstance(value, datetime) and field == "order_date": - condition = "CAST(DATE_TRUNC(CAST(simple.order_date AS DATE), DAY) AS TIMESTAMP)>TIMESTAMP('2021-08-04 00:00:00')" # noqa + condition = "CAST(DATE_TRUNC(CAST(simple.order_date AS DATE), DAY) AS TIMESTAMP)>CAST('2021-08-04 00:00:00' AS TIMESTAMP)" # noqa + elif query_type == Definitions.trino and isinstance(value, datetime) and field == "order_date": + condition = "DATE_TRUNC('DAY', CAST(simple.order_date AS TIMESTAMP))>CAST('2021-08-04 00:00:00' AS TIMESTAMP)" # noqa elif ( query_type == Definitions.bigquery and isinstance(value, datetime) and field == "previous_order_date" ): - condition = "CAST(DATE_TRUNC(CAST(simple.previous_order_date AS DATE), DAY) AS DATETIME)>DATETIME('2021-08-04 00:00:00')" # noqa + condition = "CAST(DATE_TRUNC(CAST(simple.previous_order_date AS DATE), DAY) AS DATETIME)>CAST('2021-08-04 00:00:00' AS DATETIME)" # noqa + elif query_type == Definitions.trino and isinstance(value, datetime) and field == "first_order_date": + condition = "DATE_TRUNC('DAY', CAST(simple.first_order_date AS TIMESTAMP))>CAST('2021-08-04 00:00:00' AS DATE)" # noqa elif query_type == Definitions.bigquery and isinstance(value, datetime) and field == "first_order_date": - condition = "CAST(DATE_TRUNC(CAST(simple.first_order_date AS DATE), DAY) AS DATE)>DATE('2021-08-04 00:00:00')" # noqa + condition = "CAST(DATE_TRUNC(CAST(simple.first_order_date AS DATE), DAY) AS DATE)>CAST('2021-08-04 00:00:00' AS DATE)" # noqa elif sf_or_rs and expression == "matches" and value == "last year": last_year = pendulum.now("UTC").year - 1 - condition = f"DATE_TRUNC('DAY', {field_id})>='{last_year}-01-01T00:00:00' AND " - condition += f"DATE_TRUNC('DAY', {field_id})<='{last_year}-12-31T23:59:59'" + if query_type == Definitions.trino: + start_of = f"CAST('{last_year}-01-01T00:00:00' AS TIMESTAMP)" + end_of = f"CAST('{last_year}-12-31T23:59:59' AS TIMESTAMP)" + else: + start_of = f"'{last_year}-01-01T00:00:00'" + end_of = f"'{last_year}-12-31T23:59:59'" + condition = f"DATE_TRUNC('DAY', {field_id})>={start_of} AND " + condition += f"DATE_TRUNC('DAY', {field_id})<={end_of}" elif query_type == Definitions.sql_server and expression == "matches" and value == "last year": last_year = pendulum.now("UTC").year - 1 condition = f"CAST(CAST({field_id} AS DATE) AS DATETIME)>='{last_year}-01-01T00:00:00' AND " @@ -1425,14 +1512,20 @@ def test_simple_query_with_where_dim_group(connections, field, expression, value pendulum.week_ends_at(pendulum.SATURDAY) start_of = pendulum.now("UTC").subtract(days=7).start_of("week").strftime(date_format) end_of = pendulum.now("UTC").subtract(days=7).end_of("week").strftime(date_format) - condition = f"DATE_TRUNC('DAY', {field_id})>='{start_of}' AND " - condition += f"DATE_TRUNC('DAY', {field_id})<='{end_of}'" + if query_type == Definitions.trino: + start_of = f"CAST('{start_of}' AS TIMESTAMP)" + end_of = f"CAST('{end_of}' AS TIMESTAMP)" + else: + start_of = f"'{start_of}'" + end_of = f"'{end_of}'" + condition = f"DATE_TRUNC('DAY', {field_id})>={start_of} AND " + condition += f"DATE_TRUNC('DAY', {field_id})<={end_of}" pendulum.week_starts_at(pendulum.MONDAY) pendulum.week_ends_at(pendulum.SUNDAY) elif query_type == Definitions.bigquery and expression == "matches": last_year = pendulum.now("UTC").year - 1 - condition = f"CAST(DATE_TRUNC(CAST(simple.{field} AS DATE), DAY) AS TIMESTAMP)>=TIMESTAMP('{last_year}-01-01T00:00:00') AND " # noqa - condition += f"CAST(DATE_TRUNC(CAST(simple.{field} AS DATE), DAY) AS TIMESTAMP)<=TIMESTAMP('{last_year}-12-31T23:59:59')" # noqa + condition = f"CAST(DATE_TRUNC(CAST(simple.{field} AS DATE), DAY) AS TIMESTAMP)>=CAST('{last_year}-01-01T00:00:00' AS TIMESTAMP) AND " # noqa + condition += f"CAST(DATE_TRUNC(CAST(simple.{field} AS DATE), DAY) AS TIMESTAMP)<=CAST('{last_year}-12-31T23:59:59' AS TIMESTAMP)" # noqa correct = ( "SELECT simple.sales_channel as simple_channel,SUM(simple.revenue) as simple_total_revenue FROM " @@ -1471,10 +1564,12 @@ def test_simple_query_convert_tz_alias_no(connections): ("channel", "contains_case_insensitive", "Email", Definitions.databricks), ("channel", "contains_case_insensitive", "Email", Definitions.druid), ("channel", "contains_case_insensitive", "Email", Definitions.sql_server), + ("channel", "contains_case_insensitive", "Email", Definitions.trino), ("channel", "does_not_contain_case_insensitive", "Email", Definitions.snowflake), ("channel", "does_not_contain_case_insensitive", "Email", Definitions.databricks), ("channel", "does_not_contain_case_insensitive", "Email", Definitions.druid), ("channel", "does_not_contain_case_insensitive", "Email", Definitions.sql_server), + ("channel", "does_not_contain_case_insensitive", "Email", Definitions.trino), ("channel", "starts_with", "Email", Definitions.snowflake), ("channel", "ends_with", "Email", Definitions.snowflake), ("channel", "does_not_start_with", "Email", Definitions.snowflake), @@ -1488,10 +1583,13 @@ def test_simple_query_convert_tz_alias_no(connections): ("is_valid_order", "is_not_null", None, Definitions.databricks), ("is_valid_order", "is_not_null", None, Definitions.druid), ("is_valid_order", "is_not_null", None, Definitions.sql_server), + ("is_valid_order", "is_not_null", None, Definitions.trino), ("is_valid_order", "boolean_true", None, Definitions.snowflake), ("is_valid_order", "boolean_false", None, Definitions.snowflake), ("is_valid_order", "boolean_true", None, Definitions.sql_server), ("is_valid_order", "boolean_false", None, Definitions.sql_server), + ("is_valid_order", "boolean_true", None, Definitions.trino), + ("is_valid_order", "boolean_false", None, Definitions.trino), ], ) @pytest.mark.query @@ -1509,7 +1607,7 @@ def test_simple_query_with_where_dict(connections, field_name, filter_type, valu if query_type == Definitions.snowflake: order_by = " ORDER BY simple_total_revenue DESC NULLS LAST" semi = ";" - elif query_type == Definitions.druid: + elif query_type in {Definitions.druid, Definitions.trino}: order_by = "" semi = "" else: @@ -1649,6 +1747,7 @@ def test_simple_query_with_having_literal(connections): Definitions.bigquery, Definitions.redshift, Definitions.postgres, + Definitions.trino, Definitions.druid, Definitions.sql_server, Definitions.duck_db, @@ -1675,12 +1774,13 @@ def test_simple_query_with_order_by_dict(connections, query_type): else: group_by = "simple.sales_channel" - semi = ";" if query_type not in {Definitions.druid} else "" + semi = ";" if query_type not in {Definitions.druid, Definitions.trino} else "" if query_type in { Definitions.snowflake, Definitions.redshift, Definitions.duck_db, Definitions.postgres, + Definitions.trino, Definitions.databricks, Definitions.bigquery, }: From a588f3746dc1e25950709fd92b23927743e8f898 Mon Sep 17 00:00:00 2001 From: Paul Blankley Date: Wed, 14 Aug 2024 23:16:41 -0600 Subject: [PATCH 30/53] Release v0.12.34 From a92988eb4b101291ad9cc0b1832d27e3f3b5c23b Mon Sep 17 00:00:00 2001 From: Paul Blankley <31221512+pblankley@users.noreply.github.com> Date: Thu, 15 Aug 2024 12:33:59 -0600 Subject: [PATCH 31/53] fix casting for bq and trino (#229) --- metrics_layer/core/sql/query_filter.py | 2 ++ pyproject.toml | 2 +- tests/test_arbitrary_merged_results.py | 2 +- tests/test_join_query.py | 8 ++++++-- tests/test_simple_query.py | 4 ++-- 5 files changed, 12 insertions(+), 6 deletions(-) diff --git a/metrics_layer/core/sql/query_filter.py b/metrics_layer/core/sql/query_filter.py index e95cc5b..c8f70bc 100644 --- a/metrics_layer/core/sql/query_filter.py +++ b/metrics_layer/core/sql/query_filter.py @@ -19,6 +19,8 @@ def datatype_cast(field, value): + if field.datatype.upper() == "DATE": + return LiteralValue(f"CAST(CAST('{value}' AS TIMESTAMP) AS DATE)") return LiteralValue(f"CAST('{value}' AS {field.datatype.upper()})") diff --git a/pyproject.toml b/pyproject.toml index 4f98325..ca6d324 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "metrics_layer" -version = "0.12.34" +version = "0.12.35" description = "The open source metrics layer." authors = ["Paul Blankley "] keywords = ["Metrics Layer", "Business Intelligence", "Analytics"] diff --git a/tests/test_arbitrary_merged_results.py b/tests/test_arbitrary_merged_results.py index 167f552..61b0bf4 100644 --- a/tests/test_arbitrary_merged_results.py +++ b/tests/test_arbitrary_merged_results.py @@ -375,7 +375,7 @@ def test_query_merged_queries_dim_group(connection, query_type): product_group = "order_lines_product_name" lines_order_by = "" orders_order_by = "" - time = "CAST('2018-01-02 00:00:00' AS DATE)" + time = "CAST(CAST('2018-01-02 00:00:00' AS TIMESTAMP) AS DATE)" condition = ( "CAST(merged_query_0.orders_order_date AS TIMESTAMP)=CAST(merged_query_1.order_lines_order_date" " AS TIMESTAMP)" diff --git a/tests/test_join_query.py b/tests/test_join_query.py index 9ba10fc..d116c99 100644 --- a/tests/test_join_query.py +++ b/tests/test_join_query.py @@ -87,6 +87,10 @@ def test_query_bigquery_week_filter_type_conversion(connection, field): ) cast_as = "DATE" if "order_lines.order_week" == field else "TIMESTAMP" + if cast_as == "DATE": + casted = f"CAST(CAST('2021-08-04 00:00:00' AS TIMESTAMP) AS {cast_as})" + else: + casted = f"CAST('2021-08-04 00:00:00' AS {cast_as})" sql_field = "order_lines.order_date" if "order_lines.order_week" == field else "orders.order_date" join = "" if "orders" in field: @@ -94,8 +98,8 @@ def test_query_bigquery_week_filter_type_conversion(connection, field): correct = ( "SELECT order_lines.sales_channel as order_lines_channel,SUM(order_lines.revenue) as" f" order_lines_total_item_revenue FROM analytics.order_line_items order_lines {join}WHERE" - f" CAST(DATE_TRUNC(CAST({sql_field} AS DATE), WEEK) AS {cast_as})>CAST('2021-08-04 00:00:00' AS" - f" {cast_as}) GROUP BY order_lines_channel;" + f" CAST(DATE_TRUNC(CAST({sql_field} AS DATE), WEEK) AS {cast_as})>{casted} GROUP BY" + " order_lines_channel;" ) assert query == correct diff --git a/tests/test_simple_query.py b/tests/test_simple_query.py index b9a628e..8e83783 100644 --- a/tests/test_simple_query.py +++ b/tests/test_simple_query.py @@ -1489,9 +1489,9 @@ def test_simple_query_with_where_dim_group(connections, field, expression, value ): condition = "CAST(DATE_TRUNC(CAST(simple.previous_order_date AS DATE), DAY) AS DATETIME)>CAST('2021-08-04 00:00:00' AS DATETIME)" # noqa elif query_type == Definitions.trino and isinstance(value, datetime) and field == "first_order_date": - condition = "DATE_TRUNC('DAY', CAST(simple.first_order_date AS TIMESTAMP))>CAST('2021-08-04 00:00:00' AS DATE)" # noqa + condition = "DATE_TRUNC('DAY', CAST(simple.first_order_date AS TIMESTAMP))>CAST(CAST('2021-08-04 00:00:00' AS TIMESTAMP) AS DATE)" # noqa elif query_type == Definitions.bigquery and isinstance(value, datetime) and field == "first_order_date": - condition = "CAST(DATE_TRUNC(CAST(simple.first_order_date AS DATE), DAY) AS DATE)>CAST('2021-08-04 00:00:00' AS DATE)" # noqa + condition = "CAST(DATE_TRUNC(CAST(simple.first_order_date AS DATE), DAY) AS DATE)>CAST(CAST('2021-08-04 00:00:00' AS TIMESTAMP) AS DATE)" # noqa elif sf_or_rs and expression == "matches" and value == "last year": last_year = pendulum.now("UTC").year - 1 if query_type == Definitions.trino: From 9616a20b89534aa1bc555cb44ce95e61cd295fd6 Mon Sep 17 00:00:00 2001 From: Paul Blankley Date: Thu, 15 Aug 2024 12:34:13 -0600 Subject: [PATCH 32/53] Release v0.12.35 From a187854f9950eaffd9c4d38e4770d44d6fac3a17 Mon Sep 17 00:00:00 2001 From: Paul Blankley <31221512+pblankley@users.noreply.github.com> Date: Fri, 23 Aug 2024 09:44:02 -0600 Subject: [PATCH 33/53] improve error message for missing or invalid default date (#231) * improve error message for missing or invalid default date * fix cli test --- metrics_layer/core/model/view.py | 19 ++++++++++++++----- tests/test_cli.py | 20 +++++++------------- tests/test_project_validation.py | 7 ++++++- 3 files changed, 27 insertions(+), 19 deletions(-) diff --git a/metrics_layer/core/model/view.py b/metrics_layer/core/model/view.py index 578bed4..642dfa8 100644 --- a/metrics_layer/core/model/view.py +++ b/metrics_layer/core/model/view.py @@ -340,14 +340,23 @@ def collect_errors(self): ), ) ) - except (AccessDeniedOrDoesNotExistException, QueryError): + # If the default date is not joinable to the view (or in the view itself), + # then we need to add an error + if field.view.name not in self.project.get_joinable_views(self.name) + [self.name]: + errors.append( + self._error( + self._definition["default_date"], + ( + f"Default date {self.default_date} in view {self.name} is not joinable to the" + f" view {self.name}" + ), + ) + ) + except (QueryError, AccessDeniedOrDoesNotExistException): errors.append( self._error( self._definition["default_date"], - ( - f"Default date {self.default_date} in view {self.name} is not joinable to the" - f" view {self.name}" - ), + f"Default date {self.default_date} in view {self.name} does not exist.", ) ) diff --git a/tests/test_cli.py b/tests/test_cli.py index c06af27..b5917b2 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -471,13 +471,12 @@ def test_cli_validate(connection, fresh_project, mocker): # assert result.exit_code == 0 assert ( - result.output - == "Found 7 errors in the project:\n\n" + result.output == "Found 7 errors in the project:\n\n" "\nCould not locate reference revenue_dimension in field total_item_costs in view order_lines\n\n" "\nField total_item_costs in view order_lines contains invalid field reference revenue_dimension.\n\n" "\nCould not locate reference revenue_dimension in field revenue_in_cents in view orders\n\n" "\nCould not locate reference revenue_dimension in field total_revenue in view orders\n\n" - "\nDefault date sessions.session_date in view orders is not joinable to the view orders\n\n" + "\nDefault date sessions.session_date in view orders does not exist.\n\n" "\nField revenue_in_cents in view orders contains invalid field reference revenue_dimension.\n\n" "\nField total_revenue in view orders contains invalid field reference revenue_dimension.\n\n" ) @@ -500,8 +499,7 @@ def test_cli_validate_broken_canon_date(connection, fresh_project, mocker): assert result.exit_code == 0 assert ( - result.output - == "Found 1 error in the project:\n\n" + result.output == "Found 1 error in the project:\n\n" "\nCanon date customers.does_not_exist is unreachable in field total_sessions.\n\n" ) @@ -758,8 +756,7 @@ def test_cli_validate_model_name_in_view(connection, fresh_project, mocker): assert result.exit_code == 0 assert ( - result.output - == "Found 1 error in the project:\n\n" + result.output == "Found 1 error in the project:\n\n" "\nCould not find a model in the view orders. Use the model_name property to specify the model.\n\n" ) @@ -802,8 +799,7 @@ def test_cli_dashboard_model_does_not_exist(connection, fresh_project, mocker): assert result.exit_code == 0 assert ( - result.output - == "Found 1 error in the project:\n\n" + result.output == "Found 1 error in the project:\n\n" "\nCould not find or you do not have access to model missing_model in dashboard sales_dashboard\n\n" ) @@ -824,8 +820,7 @@ def test_cli_canon_date_inaccessible(connection, fresh_project, mocker): assert result.exit_code == 0 assert ( - result.output - == "Found 1 error in the project:\n\n" + result.output == "Found 1 error in the project:\n\n" "\nCanon date orders.missing_field is unreachable in field total_revenue.\n\n" ) @@ -910,8 +905,7 @@ def test_cli_duplicate_field_names(connection, fresh_project, mocker): assert result.exit_code == 0 assert ( - result.output - == "Found 1 error in the project:\n\n" + result.output == "Found 1 error in the project:\n\n" "\nDuplicate field names in view customers: number_of_customers\n\n" ) diff --git a/tests/test_project_validation.py b/tests/test_project_validation.py index 131fa10..d511bdd 100644 --- a/tests/test_project_validation.py +++ b/tests/test_project_validation.py @@ -611,10 +611,15 @@ def test_validation_with_replaced_model_properties(connection, name, value, erro 1, ["The default_date property, 1 must be a string in the view order_lines"], ), + ( + "default_date", + "sessions.session", + ["Default date sessions.session in view order_lines is not joinable to the view order_lines"], + ), ( "default_date", "fake", - ["Default date fake in view order_lines is not joinable to the view order_lines"], + ["Default date fake in view order_lines does not exist."], ), ("row_label", None, ["The row_label property, None must be a string in the view order_lines"]), ("row_label", "Hello", []), From 3a674f5dc3b154b9898bed0419b62364d61c662d Mon Sep 17 00:00:00 2001 From: tlokvenec Date: Wed, 28 Aug 2024 11:50:54 +0200 Subject: [PATCH 34/53] added additional cases to the recursive logic to handle 'conditionals' --- metrics_layer/core/sql/resolve.py | 11 ++++++++++- metrics_layer/core/sql/single_query_resolve.py | 5 ++++- 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/metrics_layer/core/sql/resolve.py b/metrics_layer/core/sql/resolve.py index b4ee8a3..67fdc1c 100644 --- a/metrics_layer/core/sql/resolve.py +++ b/metrics_layer/core/sql/resolve.py @@ -319,10 +319,19 @@ def _replace_dict_or_literal(self, where, to_replace, field): for w in where: if "field" in w and w["field"] == to_replace: result.append({**w, "field": field.id()}) - elif "field" not in w: + elif "conditions" in w: result.append( {**w, "conditions": self._replace_dict_or_literal(w["conditions"], to_replace, field)} ) + elif "conditionals" in w: + result.append( + { + **w, + "conditionals": self._replace_dict_or_literal( + w["conditionals"], to_replace, field + ), + } + ) else: result.append(w) return result diff --git a/metrics_layer/core/sql/single_query_resolve.py b/metrics_layer/core/sql/single_query_resolve.py index c31400e..67e76ba 100644 --- a/metrics_layer/core/sql/single_query_resolve.py +++ b/metrics_layer/core/sql/single_query_resolve.py @@ -227,7 +227,10 @@ def flatten_filters(filters: list): def recurse(filter_obj): if isinstance(filter_obj, dict): - if "conditions" in filter_obj: + if "conditionals" in filter_obj: + for f in filter_obj["conditionals"]: + recurse(f) + elif "conditions" in filter_obj: for f in filter_obj["conditions"]: recurse(f) else: From e0d189ba725c10c64e329d9f349fa9ee74573b5f Mon Sep 17 00:00:00 2001 From: Paul Blankley <31221512+pblankley@users.noreply.github.com> Date: Mon, 2 Sep 2024 17:35:01 -0600 Subject: [PATCH 35/53] add view affected by access filter method (#232) * add view affected by access filter method * fix tests * remove slow tests from ci (12 min vs 3 min build time) --- .github/workflows/tests.yaml | 2 +- metrics_layer/core/model/view.py | 13 ++++++++++++- pyproject.toml | 2 +- tests/test_project_validation.py | 2 +- 4 files changed, 15 insertions(+), 4 deletions(-) diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index 3441af9..5a44d48 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -32,7 +32,7 @@ jobs: - name: Test with pytest run: | - pytest --runslow --cov=metrics_layer/ --cov-report=xml + pytest --cov=metrics_layer/ --cov-report=xml - name: Report on code coverage uses: codecov/codecov-action@v4 diff --git a/metrics_layer/core/model/view.py b/metrics_layer/core/model/view.py index 642dfa8..7bb6cda 100644 --- a/metrics_layer/core/model/view.py +++ b/metrics_layer/core/model/view.py @@ -179,6 +179,17 @@ def printable_attributes(self): attributes["number_of_fields"] = f'{len(attributes.get("fields", []))}' return {key: attributes.get(key) for key in to_print if attributes.get(key) is not None} + def is_affected_by_access_filters(self): + """This method checks if the view is affected by any access filters + the current user (set in the Project object) has on him/herself. + """ + if self.access_filters: + for condition_set in self.access_filters: + user_attribute_value = condition_set["user_attribute"] + if self.project._user and self.project._user.get(user_attribute_value): + return True + return False + @property def primary_key(self): return next((f for f in self.fields() if f.primary_key), None) @@ -461,7 +472,7 @@ def collect_errors(self): errors.append( self._error( self._definition["access_filters"], - f"Access filter in view {self.name} is missing the required field property", + f"Access filter in view {self.name} is missing the required property: 'field'", ) ) elif "field" in f: diff --git a/pyproject.toml b/pyproject.toml index ca6d324..897faf4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "metrics_layer" -version = "0.12.35" +version = "0.12.36" description = "The open source metrics layer." authors = ["Paul Blankley "] keywords = ["Metrics Layer", "Business Intelligence", "Analytics"] diff --git a/tests/test_project_validation.py b/tests/test_project_validation.py index d511bdd..ccc38e8 100644 --- a/tests/test_project_validation.py +++ b/tests/test_project_validation.py @@ -693,7 +693,7 @@ def test_validation_with_replaced_model_properties(connection, name, value, erro "access_filters", [{"name": "test"}], [ - "Access filter in view order_lines is missing the required field property", + "Access filter in view order_lines is missing the required property: 'field'", "Access filter in view order_lines is missing the required user_attribute property", ], ), From 551578f3f7f0a19882772b2806e3c80ec49f35c5 Mon Sep 17 00:00:00 2001 From: Paul Blankley Date: Mon, 2 Sep 2024 17:35:25 -0600 Subject: [PATCH 36/53] Release v0.12.36 From 078192156e6869db8169b9f159fecf39f9c3cf5d Mon Sep 17 00:00:00 2001 From: Paul Blankley Date: Mon, 9 Sep 2024 15:11:41 +0200 Subject: [PATCH 37/53] add support for conditionals syntax --- metrics_layer/core/sql/query_filter.py | 28 +++- metrics_layer/core/sql/resolve.py | 11 +- .../core/sql/single_query_resolve.py | 17 ++- tests/test_join_query.py | 143 ++++++++++++++++++ 4 files changed, 187 insertions(+), 12 deletions(-) diff --git a/metrics_layer/core/sql/query_filter.py b/metrics_layer/core/sql/query_filter.py index 8549aa9..676863c 100644 --- a/metrics_layer/core/sql/query_filter.py +++ b/metrics_layer/core/sql/query_filter.py @@ -45,7 +45,7 @@ def __init__( self.design = design self.is_literal_filter = "literal" in definition # This is a filter with parenthesis like (XYZ or ABC) - self.is_filter_group = "conditions" in definition + self.is_filter_group = "conditions" in definition or "conditionals" in definition if self.design: self.query_type = self.design.query_type @@ -60,6 +60,12 @@ def __init__( super().__init__(definition) + @property + def conditions(self): + if "conditionals" in self._definition: + return self._definition["conditionals"] + return self._definition.get("conditions", []) + @property def is_group_by(self): return self.group_by is not None @@ -75,13 +81,18 @@ def validate(self, definition: Dict) -> None: key = definition.get("field", None) filter_literal = definition.get("literal", None) filter_group_conditions = definition.get("conditions", None) + filter_group_conditionals = definition.get("conditionals", None) + if filter_group_conditions is None: + filter_group_conditions = filter_group_conditionals + if filter_group_conditions: for f in filter_group_conditions: MetricsLayerFilter(f, self.design, self.filter_type) - if "logical_operator" not in definition: - raise ParseError(f"Filter group '{definition}' needs a logical_operator.") - elif definition["logical_operator"] not in MetricsLayerFilterGroupLogicalOperatorType.options: + if ( + "logical_operator" in definition + and definition["logical_operator"] not in MetricsLayerFilterGroupLogicalOperatorType.options + ): raise ParseError( f"Filter group '{definition}' needs a valid logical operator. Options are:" f" {MetricsLayerFilterGroupLogicalOperatorType.options}" @@ -152,10 +163,13 @@ def group_sql_query(self, functional_pk: str): condition_object.field.sql_query(self.query_type, functional_pk) ) ) - if self.logical_operator == MetricsLayerFilterGroupLogicalOperatorType.and_: - return Criterion.all(pypika_conditions) - elif self.logical_operator == MetricsLayerFilterGroupLogicalOperatorType.or_: + if self.logical_operator == MetricsLayerFilterGroupLogicalOperatorType.or_: return Criterion.any(pypika_conditions) + if ( + self.logical_operator is None + or self.logical_operator == MetricsLayerFilterGroupLogicalOperatorType.and_ + ): + return Criterion.all(pypika_conditions) raise ParseError(f"Invalid logical operator: {self.logical_operator}") def sql_query(self): diff --git a/metrics_layer/core/sql/resolve.py b/metrics_layer/core/sql/resolve.py index b4ee8a3..c0cc542 100644 --- a/metrics_layer/core/sql/resolve.py +++ b/metrics_layer/core/sql/resolve.py @@ -319,10 +319,19 @@ def _replace_dict_or_literal(self, where, to_replace, field): for w in where: if "field" in w and w["field"] == to_replace: result.append({**w, "field": field.id()}) - elif "field" not in w: + elif "field" not in w and "conditions" in w: result.append( {**w, "conditions": self._replace_dict_or_literal(w["conditions"], to_replace, field)} ) + elif "field" not in w and "conditionals" in w: + result.append( + { + **w, + "conditionals": self._replace_dict_or_literal( + w["conditionals"], to_replace, field + ), + } + ) else: result.append(w) return result diff --git a/metrics_layer/core/sql/single_query_resolve.py b/metrics_layer/core/sql/single_query_resolve.py index c31400e..556ada8 100644 --- a/metrics_layer/core/sql/single_query_resolve.py +++ b/metrics_layer/core/sql/single_query_resolve.py @@ -110,9 +110,12 @@ def parse_where(self, where: list): w["query_class"] = FunnelQuery( funnel_query, design=self.design, suppress_warnings=self.suppress_warnings ) - if "logical_operator" in w: + if "conditionals" in w: field_types = set( - [self.field_lookup[f["field"]].field_type for f in self.flatten_filters(w["conditions"])] + [ + self.field_lookup[f["field"]].field_type + for f in self.flatten_filters(w["conditionals"]) + ] ) if "measure" in field_types and ( "dimension" in field_types or "dimension_group" in field_types @@ -128,9 +131,12 @@ def parse_having(self, having: list): return having validated_having = [] for h in having: - if "logical_operator" in h: + if "conditionals" in h: field_types = set( - [self.field_lookup[f["field"]].field_type for f in self.flatten_filters(h["conditions"])] + [ + self.field_lookup[f["field"]].field_type + for f in self.flatten_filters(h["conditionals"]) + ] ) if "measure" in field_types and ( "dimension" in field_types or "dimension_group" in field_types @@ -230,6 +236,9 @@ def recurse(filter_obj): if "conditions" in filter_obj: for f in filter_obj["conditions"]: recurse(f) + elif "conditionals" in filter_obj: + for f in filter_obj["conditionals"]: + recurse(f) else: flat_list.append(filter_obj) elif isinstance(filter_obj, list): diff --git a/tests/test_join_query.py b/tests/test_join_query.py index 09c3429..2817f82 100644 --- a/tests/test_join_query.py +++ b/tests/test_join_query.py @@ -4,6 +4,7 @@ from metrics_layer.core.exceptions import JoinError, QueryError from metrics_layer.core.model import Definitions +from metrics_layer.core.sql.query_errors import ParseError @pytest.mark.query @@ -1105,3 +1106,145 @@ def test_query_with_or_filters_with_mappings_nested(connection): " order_lines_total_item_revenue DESC;" ) assert query == correct + + +@pytest.mark.query +def test_query_with_or_filters_conditionals_syntax_broken_logical_operator(connection): + with pytest.raises(ParseError) as exc_info: + connection.get_sql_query( + metrics=["total_item_revenue"], + dimensions=["channel"], + where=[ + { + "conditionals": [ + { + "conditions": [ + {"field": "customers.gender", "expression": "isin", "value": ["M"]} + ], + "logical_operator": "ORR", + } + ] + }, + {"field": "date", "expression": "greater_or_equal_than", "value": datetime(2024, 1, 1, 0, 0)}, + ], + having=[ + { + "field": "order_lines.total_item_revenue", + "expression": "less_or_equal_than", + "value": 200.0, + }, + { + "conditionals": [ + { + "conditions": [ + { + "field": "order_lines.total_item_revenue", + "expression": "greater_than", + "value": 100.0, + }, + { + "conditions": [ + { + "field": "order_lines.total_item_revenue", + "expression": "greater_than", + "value": 100.0, + }, + { + "field": "order_lines.total_item_revenue", + "expression": "less_than", + "value": 200.0, + }, + ], + "logical_operator": "ANDD", + }, + ], + "logical_operator": "OR", + } + ] + }, + ], + ) + + assert ( + "'logical_operator': 'ORR'}' needs a valid logical operator. Options are: ['AND', 'OR']" + in exc_info.value.args[0] + ) + + +@pytest.mark.query +def test_query_with_or_filters_with_mappings_nestedd(connection): + query = connection.get_sql_query( + metrics=["total_item_revenue"], + dimensions=["channel"], + where=[ + { + "conditionals": [ + { + "conditions": [{"field": "customers.gender", "expression": "isin", "value": ["M"]}], + "logical_operator": "OR", + } + ] + }, + {"field": "date", "expression": "greater_or_equal_than", "value": datetime(2024, 1, 1, 0, 0)}, + { + "field": "date", + "expression": "less_or_equal_than", + "value": datetime(2024, 12, 31, 23, 59, 59), + }, + ], + having=[ + { + "field": "order_lines.total_item_revenue", + "expression": "greater_or_equal_than", + "value": 100.0, + }, + {"field": "order_lines.total_item_revenue", "expression": "less_or_equal_than", "value": 200.0}, + { + "conditionals": [ + { + "conditions": [ + { + "field": "order_lines.total_item_revenue", + "expression": "greater_than", + "value": 100.0, + }, + { + "field": "order_lines.total_item_revenue", + "expression": "less_than", + "value": 200.0, + }, + { + "conditions": [ + { + "field": "order_lines.total_item_revenue", + "expression": "greater_than", + "value": 100.0, + }, + { + "field": "order_lines.total_item_revenue", + "expression": "less_than", + "value": 200.0, + }, + ], + "logical_operator": "AND", + }, + ], + "logical_operator": "OR", + } + ] + }, + ], + ) + + correct = ( + "SELECT order_lines.sales_channel as order_lines_channel,SUM(order_lines.revenue) as" + " order_lines_total_item_revenue FROM analytics.order_line_items order_lines LEFT JOIN" + " analytics.customers customers ON order_lines.customer_id=customers.customer_id WHERE" + " customers.gender IN ('M') AND DATE_TRUNC('DAY', order_lines.order_date)>='2024-01-01T00:00:00' AND" + " DATE_TRUNC('DAY', order_lines.order_date)<='2024-12-31T23:59:59' GROUP BY order_lines.sales_channel" + " HAVING SUM(order_lines.revenue)>=100.0 AND SUM(order_lines.revenue)<=200.0 AND" + " (SUM(order_lines.revenue)>100.0 OR SUM(order_lines.revenue)<200.0 OR" + " (SUM(order_lines.revenue)>100.0 AND SUM(order_lines.revenue)<200.0)) ORDER BY" + " order_lines_total_item_revenue DESC;" + ) + assert query == correct From 6bd94697e7a62e1185de033fcddaa4ad6607ca7c Mon Sep 17 00:00:00 2001 From: Paul Blankley Date: Mon, 9 Sep 2024 15:20:53 +0200 Subject: [PATCH 38/53] catch errors and update tests for new sorting strategies --- .../core/sql/single_query_resolve.py | 23 ++++++++++++------- tests/test_join_query.py | 6 ++--- tests/test_simple_query.py | 12 ++++++---- 3 files changed, 25 insertions(+), 16 deletions(-) diff --git a/metrics_layer/core/sql/single_query_resolve.py b/metrics_layer/core/sql/single_query_resolve.py index 1a33c1a..1bb18c9 100644 --- a/metrics_layer/core/sql/single_query_resolve.py +++ b/metrics_layer/core/sql/single_query_resolve.py @@ -110,12 +110,16 @@ def parse_where(self, where: list): w["query_class"] = FunnelQuery( funnel_query, design=self.design, suppress_warnings=self.suppress_warnings ) + if "conditionals" in w: + conditions = w["conditionals"] + elif "conditions" in w: + conditions = w["conditions"] + else: + conditions = [] + if conditions: field_types = set( - [ - self.field_lookup[f["field"]].field_type - for f in self.flatten_filters(w["conditionals"]) - ] + [self.field_lookup[f["field"]].field_type for f in self.flatten_filters(conditions)] ) if "measure" in field_types and ( "dimension" in field_types or "dimension_group" in field_types @@ -132,11 +136,14 @@ def parse_having(self, having: list): validated_having = [] for h in having: if "conditionals" in h: + conditions = h["conditionals"] + elif "conditions" in h: + conditions = h["conditions"] + else: + conditions = [] + if conditions: field_types = set( - [ - self.field_lookup[f["field"]].field_type - for f in self.flatten_filters(h["conditionals"]) - ] + [self.field_lookup[f["field"]].field_type for f in self.flatten_filters(conditions)] ) if "measure" in field_types and ( "dimension" in field_types or "dimension_group" in field_types diff --git a/tests/test_join_query.py b/tests/test_join_query.py index b223ac5..795eee8 100644 --- a/tests/test_join_query.py +++ b/tests/test_join_query.py @@ -1100,7 +1100,7 @@ def test_query_with_or_filters_with_mappings(connection): " analytics.orders orders ON order_lines.order_unique_id=orders.id WHERE (DATE_TRUNC('DAY'," " order_lines.order_date)<'2023-09-02' OR orders.new_vs_repeat='New') AND DATE_TRUNC('DAY'," " order_lines.order_date)>'2023-09-02' GROUP BY order_lines.sales_channel ORDER BY" - " order_lines_total_item_revenue DESC;" + " order_lines_total_item_revenue DESC NULLS LAST;" ) assert query == correct @@ -1136,7 +1136,7 @@ def test_query_with_or_filters_with_mappings_nested(connection): " order_lines.order_date)<'2023-09-02' OR orders.new_vs_repeat='New' OR (DATE_TRUNC('DAY'," " order_lines.order_date)<'2023-09-02' AND orders.new_vs_repeat='New')) AND DATE_TRUNC('DAY'," " order_lines.order_date)>'2023-09-02' GROUP BY order_lines.sales_channel ORDER BY" - " order_lines_total_item_revenue DESC;" + " order_lines_total_item_revenue DESC NULLS LAST;" ) assert query == correct @@ -1278,6 +1278,6 @@ def test_query_with_or_filters_with_mappings_nestedd(connection): " HAVING SUM(order_lines.revenue)>=100.0 AND SUM(order_lines.revenue)<=200.0 AND" " (SUM(order_lines.revenue)>100.0 OR SUM(order_lines.revenue)<200.0 OR" " (SUM(order_lines.revenue)>100.0 AND SUM(order_lines.revenue)<200.0)) ORDER BY" - " order_lines_total_item_revenue DESC;" + " order_lines_total_item_revenue DESC NULLS LAST;" ) assert query == correct diff --git a/tests/test_simple_query.py b/tests/test_simple_query.py index 3723ff1..94bc151 100644 --- a/tests/test_simple_query.py +++ b/tests/test_simple_query.py @@ -1863,8 +1863,9 @@ def test_simple_query_with_or_filters_no_nesting(connections): correct = ( "SELECT simple.sales_channel as simple_channel,SUM(simple.revenue) as simple_total_revenue FROM" - " analytics.orders simple WHERE (simple.sales_channel<>'Email' OR simple.new_vs_repeat='New')" - " AND simple.discount_amt>1335 GROUP BY simple.sales_channel ORDER BY simple_total_revenue DESC;" + " analytics.orders simple WHERE (simple.sales_channel<>'Email' OR simple.new_vs_repeat='New') AND" + " simple.discount_amt>1335 GROUP BY simple.sales_channel ORDER BY simple_total_revenue DESC NULLS" + " LAST;" ) assert query == correct @@ -1898,7 +1899,7 @@ def test_simple_query_with_or_filters_single_nesting(connections): "SELECT simple.sales_channel as simple_channel,SUM(simple.revenue) as simple_total_revenue FROM" " analytics.orders simple WHERE ((simple.sales_channel<>'Email' AND simple.discount_amt<0.01) OR" " simple.new_vs_repeat='New') AND simple.discount_amt>1335 GROUP BY simple.sales_channel ORDER BY" - " simple_total_revenue DESC;" + " simple_total_revenue DESC NULLS LAST;" ) assert query == correct @@ -1964,7 +1965,7 @@ def test_simple_query_with_or_filters_triple_nesting(connections): " simple.discount_amt<0.01 AND (simple.sales_channel='Email' OR simple.discount_amt<-100.05 OR" " (simple.sales_channel='Facebook' AND simple.new_vs_repeat='Repeat'))) OR" " simple.new_vs_repeat='New') AND (simple.sales_channel<>'Email' OR simple.discount_amt<0.01) AND" - " simple.discount_amt>13 GROUP BY simple.sales_channel ORDER BY simple_total_revenue DESC;" + " simple.discount_amt>13 GROUP BY simple.sales_channel ORDER BY simple_total_revenue DESC NULLS LAST;" ) assert query == correct @@ -1991,7 +1992,8 @@ def test_simple_query_with_or_filters_having(connections): correct = ( "SELECT simple.sales_channel as simple_channel,SUM(simple.revenue) as simple_total_revenue FROM" " analytics.orders simple GROUP BY simple.sales_channel HAVING (AVG(simple.revenue)>250 OR" - " SUM(simple.revenue)<25000) AND SUM(simple.revenue)>20000 ORDER BY simple_total_revenue DESC;" + " SUM(simple.revenue)<25000) AND SUM(simple.revenue)>20000 ORDER BY simple_total_revenue DESC NULLS" + " LAST;" ) assert query == correct From fbfb1c326a77152950c0a84beb14753d1202fb32 Mon Sep 17 00:00:00 2001 From: tlokvenec Date: Tue, 10 Sep 2024 22:24:07 +0200 Subject: [PATCH 39/53] hacky solution to update the metrics layer handling of the new structure used for the OR filters --- metrics_layer/core/sql/query_filter.py | 10 ++++---- .../core/sql/single_query_resolve.py | 24 ++++++++++++++----- 2 files changed, 23 insertions(+), 11 deletions(-) diff --git a/metrics_layer/core/sql/query_filter.py b/metrics_layer/core/sql/query_filter.py index 7461809..1259b46 100644 --- a/metrics_layer/core/sql/query_filter.py +++ b/metrics_layer/core/sql/query_filter.py @@ -47,7 +47,7 @@ def __init__( self.design = design self.is_literal_filter = "literal" in definition # This is a filter with parenthesis like (XYZ or ABC) - self.is_filter_group = "conditions" in definition or "conditionals" in definition + self.is_filter_group = "conditions" in definition or "conditional_filter_logic" in definition if self.design: self.query_type = self.design.query_type @@ -64,8 +64,8 @@ def __init__( @property def conditions(self): - if "conditionals" in self._definition: - return self._definition["conditionals"] + if "conditional_filter_logic" in self._definition: + return self._definition["conditional_filter_logic"] return self._definition.get("conditions", []) @property @@ -83,9 +83,9 @@ def validate(self, definition: Dict) -> None: key = definition.get("field", None) filter_literal = definition.get("literal", None) filter_group_conditions = definition.get("conditions", None) - filter_group_conditionals = definition.get("conditionals", None) + filter_group_conditional_filter_logic = definition.get("conditional_filter_logic", None) if filter_group_conditions is None: - filter_group_conditions = filter_group_conditionals + filter_group_conditions = filter_group_conditional_filter_logic if filter_group_conditions: for f in filter_group_conditions: diff --git a/metrics_layer/core/sql/single_query_resolve.py b/metrics_layer/core/sql/single_query_resolve.py index 1bb18c9..c9edfc2 100644 --- a/metrics_layer/core/sql/single_query_resolve.py +++ b/metrics_layer/core/sql/single_query_resolve.py @@ -111,8 +111,8 @@ def parse_where(self, where: list): funnel_query, design=self.design, suppress_warnings=self.suppress_warnings ) - if "conditionals" in w: - conditions = w["conditionals"] + if "conditional_filter_logic" in w: + conditions = w["conditional_filter_logic"] elif "conditions" in w: conditions = w["conditions"] else: @@ -135,8 +135,8 @@ def parse_having(self, having: list): return having validated_having = [] for h in having: - if "conditionals" in h: - conditions = h["conditionals"] + if "conditional_filter_logic" in h: + conditions = h["conditional_filter_logic"] elif "conditions" in h: conditions = h["conditions"] else: @@ -191,12 +191,24 @@ def get_field_with_error_handling(self, field_name: str, error_prefix: str): def parse_field_names(self, where, having, order_by): self.where = self._check_for_dict(where) + self.where = [ + {**f, "conditional_filter_logic": self._check_for_dict(f["conditional_filter_logic"])} + if "conditional_filter_logic" in f + else f + for f in self.where + ] if self._is_literal(self.where): self._where_field_names = MetricsLayerQuery.parse_identifiers_from_clause(self.where) else: self._where_field_names = self.parse_identifiers_from_dicts(self.where) self.having = self._check_for_dict(having) + self.having = [ + {**f, "conditional_filter_logic": self._check_for_dict(f["conditional_filter_logic"])} + if "conditional_filter_logic" in f + else f + for f in self.having + ] if self._is_literal(self.having): self._having_field_names = MetricsLayerQuery.parse_identifiers_from_clause(self.having) else: @@ -240,8 +252,8 @@ def flatten_filters(filters: list): def recurse(filter_obj): if isinstance(filter_obj, dict): - if "conditionals" in filter_obj: - for f in filter_obj["conditionals"]: + if "conditional_filter_logic" in filter_obj: + for f in filter_obj["conditional_filter_logic"]: recurse(f) elif "conditions" in filter_obj: for f in filter_obj["conditions"]: From 58630a47f06446d519356174b29332a74d56fc71 Mon Sep 17 00:00:00 2001 From: Paul Blankley Date: Wed, 11 Sep 2024 08:56:13 +0200 Subject: [PATCH 40/53] fix or filters by supporting new conditional_filter_logic syntax --- metrics_layer/core/sql/query_filter.py | 7 +- metrics_layer/core/sql/resolve.py | 26 ++-- .../core/sql/single_query_resolve.py | 28 +--- tests/test_join_query.py | 144 ++++++++---------- 4 files changed, 87 insertions(+), 118 deletions(-) diff --git a/metrics_layer/core/sql/query_filter.py b/metrics_layer/core/sql/query_filter.py index 1259b46..f901d52 100644 --- a/metrics_layer/core/sql/query_filter.py +++ b/metrics_layer/core/sql/query_filter.py @@ -47,7 +47,7 @@ def __init__( self.design = design self.is_literal_filter = "literal" in definition # This is a filter with parenthesis like (XYZ or ABC) - self.is_filter_group = "conditions" in definition or "conditional_filter_logic" in definition + self.is_filter_group = "conditions" in definition if self.design: self.query_type = self.design.query_type @@ -64,8 +64,6 @@ def __init__( @property def conditions(self): - if "conditional_filter_logic" in self._definition: - return self._definition["conditional_filter_logic"] return self._definition.get("conditions", []) @property @@ -83,9 +81,6 @@ def validate(self, definition: Dict) -> None: key = definition.get("field", None) filter_literal = definition.get("literal", None) filter_group_conditions = definition.get("conditions", None) - filter_group_conditional_filter_logic = definition.get("conditional_filter_logic", None) - if filter_group_conditions is None: - filter_group_conditions = filter_group_conditional_filter_logic if filter_group_conditions: for f in filter_group_conditions: diff --git a/metrics_layer/core/sql/resolve.py b/metrics_layer/core/sql/resolve.py index df8ee1f..4223def 100644 --- a/metrics_layer/core/sql/resolve.py +++ b/metrics_layer/core/sql/resolve.py @@ -42,7 +42,8 @@ def __init__( always_where = self._apply_always_filter(metrics + dimensions) if always_where: self.where.extend(always_where) - self.having = having + self.where = self._clean_conditional_filter_syntax(self.where) + self.having = self._clean_conditional_filter_syntax(having) self.order_by = order_by self.kwargs = kwargs self.connection = self._get_connection(self.model.connection) @@ -400,15 +401,6 @@ def _replace_dict_or_literal(self, where, to_replace, field): result.append( {**w, "conditions": self._replace_dict_or_literal(w["conditions"], to_replace, field)} ) - elif "field" not in w and "conditionals" in w: - result.append( - { - **w, - "conditionals": self._replace_dict_or_literal( - w["conditionals"], to_replace, field - ), - } - ) else: result.append(w) return result @@ -504,6 +496,20 @@ def _deduplicate_always_where_filters(filters: list): cleaned_filters.append(f) return cleaned_filters + def _clean_conditional_filter_syntax(self, filters: Union[str, None, List]): + if not filters or isinstance(filters, str): + return filters + + def process_filter(filter_obj): + if isinstance(filter_obj, dict): + if "conditional_filter_logic" in filter_obj: + return filter_obj["conditional_filter_logic"] + elif "conditions" in filter_obj: + filter_obj["conditions"] = [process_filter(cond) for cond in filter_obj["conditions"]] + return filter_obj + + return [process_filter(filter_obj) for filter_obj in filters] + def _get_connection_schema(self, connection): if connection is not None: return getattr(connection, "schema", None) diff --git a/metrics_layer/core/sql/single_query_resolve.py b/metrics_layer/core/sql/single_query_resolve.py index c9edfc2..8b17f04 100644 --- a/metrics_layer/core/sql/single_query_resolve.py +++ b/metrics_layer/core/sql/single_query_resolve.py @@ -111,9 +111,7 @@ def parse_where(self, where: list): funnel_query, design=self.design, suppress_warnings=self.suppress_warnings ) - if "conditional_filter_logic" in w: - conditions = w["conditional_filter_logic"] - elif "conditions" in w: + if "conditions" in w: conditions = w["conditions"] else: conditions = [] @@ -135,9 +133,7 @@ def parse_having(self, having: list): return having validated_having = [] for h in having: - if "conditional_filter_logic" in h: - conditions = h["conditional_filter_logic"] - elif "conditions" in h: + if "conditions" in h: conditions = h["conditions"] else: conditions = [] @@ -191,24 +187,12 @@ def get_field_with_error_handling(self, field_name: str, error_prefix: str): def parse_field_names(self, where, having, order_by): self.where = self._check_for_dict(where) - self.where = [ - {**f, "conditional_filter_logic": self._check_for_dict(f["conditional_filter_logic"])} - if "conditional_filter_logic" in f - else f - for f in self.where - ] if self._is_literal(self.where): self._where_field_names = MetricsLayerQuery.parse_identifiers_from_clause(self.where) else: self._where_field_names = self.parse_identifiers_from_dicts(self.where) self.having = self._check_for_dict(having) - self.having = [ - {**f, "conditional_filter_logic": self._check_for_dict(f["conditional_filter_logic"])} - if "conditional_filter_logic" in f - else f - for f in self.having - ] if self._is_literal(self.having): self._having_field_names = MetricsLayerQuery.parse_identifiers_from_clause(self.having) else: @@ -252,15 +236,9 @@ def flatten_filters(filters: list): def recurse(filter_obj): if isinstance(filter_obj, dict): - if "conditional_filter_logic" in filter_obj: - for f in filter_obj["conditional_filter_logic"]: - recurse(f) - elif "conditions" in filter_obj: + if "conditions" in filter_obj: for f in filter_obj["conditions"]: recurse(f) - elif "conditionals" in filter_obj: - for f in filter_obj["conditionals"]: - recurse(f) else: flat_list.append(filter_obj) elif isinstance(filter_obj, list): diff --git a/tests/test_join_query.py b/tests/test_join_query.py index 795eee8..e458907 100644 --- a/tests/test_join_query.py +++ b/tests/test_join_query.py @@ -1141,22 +1141,18 @@ def test_query_with_or_filters_with_mappings_nested(connection): assert query == correct -@pytest.mark.query -def test_query_with_or_filters_conditionals_syntax_broken_logical_operator(connection): +@pytest.mark.queryy +def test_query_with_or_filters_alternate_syntax_broken_logical_operator(connection): with pytest.raises(ParseError) as exc_info: connection.get_sql_query( metrics=["total_item_revenue"], dimensions=["channel"], where=[ { - "conditionals": [ - { - "conditions": [ - {"field": "customers.gender", "expression": "isin", "value": ["M"]} - ], - "logical_operator": "ORR", - } - ] + "conditional_filter_logic": { + "conditions": [{"field": "customers.gender", "expression": "isin", "value": ["M"]}], + "logical_operator": "ORR", + } }, {"field": "date", "expression": "greater_or_equal_than", "value": datetime(2024, 1, 1, 0, 0)}, ], @@ -1167,33 +1163,31 @@ def test_query_with_or_filters_conditionals_syntax_broken_logical_operator(conne "value": 200.0, }, { - "conditionals": [ - { - "conditions": [ - { - "field": "order_lines.total_item_revenue", - "expression": "greater_than", - "value": 100.0, - }, - { - "conditions": [ - { - "field": "order_lines.total_item_revenue", - "expression": "greater_than", - "value": 100.0, - }, - { - "field": "order_lines.total_item_revenue", - "expression": "less_than", - "value": 200.0, - }, - ], - "logical_operator": "ANDD", - }, - ], - "logical_operator": "OR", - } - ] + "conditional_filter_logic": { + "conditions": [ + { + "field": "order_lines.total_item_revenue", + "expression": "greater_than", + "value": 100.0, + }, + { + "conditions": [ + { + "field": "order_lines.total_item_revenue", + "expression": "greater_than", + "value": 100.0, + }, + { + "field": "order_lines.total_item_revenue", + "expression": "less_than", + "value": 200.0, + }, + ], + "logical_operator": "ANDD", + }, + ], + "logical_operator": "OR", + } }, ], ) @@ -1204,19 +1198,17 @@ def test_query_with_or_filters_conditionals_syntax_broken_logical_operator(conne ) -@pytest.mark.query -def test_query_with_or_filters_with_mappings_nestedd(connection): +@pytest.mark.queryy +def test_query_with_or_filters_alternate_syntax(connection): query = connection.get_sql_query( metrics=["total_item_revenue"], dimensions=["channel"], where=[ { - "conditionals": [ - { - "conditions": [{"field": "customers.gender", "expression": "isin", "value": ["M"]}], - "logical_operator": "OR", - } - ] + "conditional_filter_logic": { + "conditions": [{"field": "customers.gender", "expression": "isin", "value": ["M"]}], + "logical_operator": "OR", + } }, {"field": "date", "expression": "greater_or_equal_than", "value": datetime(2024, 1, 1, 0, 0)}, { @@ -1233,38 +1225,36 @@ def test_query_with_or_filters_with_mappings_nestedd(connection): }, {"field": "order_lines.total_item_revenue", "expression": "less_or_equal_than", "value": 200.0}, { - "conditionals": [ - { - "conditions": [ - { - "field": "order_lines.total_item_revenue", - "expression": "greater_than", - "value": 100.0, - }, - { - "field": "order_lines.total_item_revenue", - "expression": "less_than", - "value": 200.0, - }, - { - "conditions": [ - { - "field": "order_lines.total_item_revenue", - "expression": "greater_than", - "value": 100.0, - }, - { - "field": "order_lines.total_item_revenue", - "expression": "less_than", - "value": 200.0, - }, - ], - "logical_operator": "AND", - }, - ], - "logical_operator": "OR", - } - ] + "conditional_filter_logic": { + "conditions": [ + { + "field": "order_lines.total_item_revenue", + "expression": "greater_than", + "value": 100.0, + }, + { + "field": "order_lines.total_item_revenue", + "expression": "less_than", + "value": 200.0, + }, + { + "conditions": [ + { + "field": "order_lines.total_item_revenue", + "expression": "greater_than", + "value": 100.0, + }, + { + "field": "order_lines.total_item_revenue", + "expression": "less_than", + "value": 200.0, + }, + ], + "logical_operator": "AND", + }, + ], + "logical_operator": "OR", + } }, ], ) From bf46d00224520c7f7d5ff2d69ed25ecc8aee06eb Mon Sep 17 00:00:00 2001 From: Paul Blankley Date: Wed, 11 Sep 2024 09:03:01 +0200 Subject: [PATCH 41/53] fix tests --- metrics_layer/core/sql/resolve.py | 2 ++ tests/test_join_query.py | 4 ++-- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/metrics_layer/core/sql/resolve.py b/metrics_layer/core/sql/resolve.py index 4223def..7b4780c 100644 --- a/metrics_layer/core/sql/resolve.py +++ b/metrics_layer/core/sql/resolve.py @@ -499,6 +499,8 @@ def _deduplicate_always_where_filters(filters: list): def _clean_conditional_filter_syntax(self, filters: Union[str, None, List]): if not filters or isinstance(filters, str): return filters + if isinstance(filters, dict): + return [filters] def process_filter(filter_obj): if isinstance(filter_obj, dict): diff --git a/tests/test_join_query.py b/tests/test_join_query.py index e458907..97fcb13 100644 --- a/tests/test_join_query.py +++ b/tests/test_join_query.py @@ -1141,7 +1141,7 @@ def test_query_with_or_filters_with_mappings_nested(connection): assert query == correct -@pytest.mark.queryy +@pytest.mark.query def test_query_with_or_filters_alternate_syntax_broken_logical_operator(connection): with pytest.raises(ParseError) as exc_info: connection.get_sql_query( @@ -1198,7 +1198,7 @@ def test_query_with_or_filters_alternate_syntax_broken_logical_operator(connecti ) -@pytest.mark.queryy +@pytest.mark.query def test_query_with_or_filters_alternate_syntax(connection): query = connection.get_sql_query( metrics=["total_item_revenue"], From 8587d101f45e94ddf7fbe6f1d2a931791a76fe49 Mon Sep 17 00:00:00 2001 From: Paul Blankley <31221512+pblankley@users.noreply.github.com> Date: Wed, 11 Sep 2024 21:01:33 +0200 Subject: [PATCH 42/53] fix casting issue in bq for hour of day (#233) --- metrics_layer/core/model/field.py | 2 +- pyproject.toml | 2 +- tests/test_simple_query.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/metrics_layer/core/model/field.py b/metrics_layer/core/model/field.py index a09fc90..6e6fd39 100644 --- a/metrics_layer/core/model/field.py +++ b/metrics_layer/core/model/field.py @@ -1317,7 +1317,7 @@ def apply_dimension_group_time_sql(self, sql: str, query_type: str): "fiscal_quarter_of_year": lambda s, qt: ( f"EXTRACT(QUARTER FROM {self._fiscal_offset_to_timestamp(s, qt)})" ), - "hour_of_day": lambda s, qt: f"CAST({s} AS STRING FORMAT 'HH24')", + "hour_of_day": lambda s, qt: f"CAST(CAST({s} AS STRING FORMAT 'HH24') AS INT64)", "day_of_week": lambda s, qt: f"CAST({s} AS STRING FORMAT 'DAY')", "day_of_month": lambda s, qt: f"EXTRACT(DAY FROM {s})", "day_of_year": lambda s, qt: f"EXTRACT(DAYOFYEAR FROM {s})", diff --git a/pyproject.toml b/pyproject.toml index 897faf4..ccaae5b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "metrics_layer" -version = "0.12.36" +version = "0.12.37" description = "The open source metrics layer." authors = ["Paul Blankley "] keywords = ["Metrics Layer", "Business Intelligence", "Analytics"] diff --git a/tests/test_simple_query.py b/tests/test_simple_query.py index 8e83783..9636030 100644 --- a/tests/test_simple_query.py +++ b/tests/test_simple_query.py @@ -1072,7 +1072,7 @@ def test_simple_query_dimension_group(connections, group: str, query_type: str): "month_of_year_index": f"EXTRACT(MONTH FROM simple.order_date)", "month_of_year": "FORMAT_DATETIME('%B', CAST(simple.order_date as DATETIME))", "quarter_of_year": "EXTRACT(QUARTER FROM simple.order_date)", - "hour_of_day": f"CAST(simple.order_date AS STRING FORMAT 'HH24')", + "hour_of_day": f"CAST(CAST(simple.order_date AS STRING FORMAT 'HH24') AS INT64)", "day_of_week": f"CAST(simple.order_date AS STRING FORMAT 'DAY')", "day_of_month": "EXTRACT(DAY FROM simple.order_date)", "day_of_year": "EXTRACT(DAYOFYEAR FROM simple.order_date)", From 3d3e7ae27047d537210a8ba5c296f770f9458608 Mon Sep 17 00:00:00 2001 From: Paul Blankley Date: Wed, 11 Sep 2024 21:03:00 +0200 Subject: [PATCH 43/53] Release v0.12.37 From 45d7373c96852b6a38eaa4094cad25847a800bea Mon Sep 17 00:00:00 2001 From: Paul Blankley <31221512+pblankley@users.noreply.github.com> Date: Tue, 17 Sep 2024 09:30:25 +0200 Subject: [PATCH 44/53] fix error with dim group pk where symm agg incorrectly triggers (#234) --- metrics_layer/core/model/view.py | 2 +- .../views/monthly_aggregates.yml | 26 ++++++++++++++ tests/conftest.py | 4 +++ tests/test_cli.py | 33 ++++++++++------- tests/test_listing_functions.py | 6 ++-- tests/test_merged_results.py | 27 ++++++++------ tests/test_symmetric_aggregates.py | 35 +++++++++++++++++++ 7 files changed, 105 insertions(+), 28 deletions(-) create mode 100644 tests/config/metrics_layer_config/views/monthly_aggregates.yml diff --git a/metrics_layer/core/model/view.py b/metrics_layer/core/model/view.py index 7bb6cda..0c778a6 100644 --- a/metrics_layer/core/model/view.py +++ b/metrics_layer/core/model/view.py @@ -192,7 +192,7 @@ def is_affected_by_access_filters(self): @property def primary_key(self): - return next((f for f in self.fields() if f.primary_key), None) + return next((f for f in self.fields(expand_dimension_groups=True) if f.primary_key), None) def _error(self, element, error, extra: dict = {}): line, column = self.line_col(element) diff --git a/tests/config/metrics_layer_config/views/monthly_aggregates.yml b/tests/config/metrics_layer_config/views/monthly_aggregates.yml new file mode 100644 index 0000000..2130fda --- /dev/null +++ b/tests/config/metrics_layer_config/views/monthly_aggregates.yml @@ -0,0 +1,26 @@ +version: 1 +type: view +name: monthly_aggregates + +sql_table_name: analytics.monthly_rollup +default_date: record +model_name: test_model + +fields: + - name: record + field_type: 'dimension_group' + type: time + timeframes: [raw, time, date, week, month, quarter, year] + sql: '${TABLE}.record_date' + primary_key: yes + + - name: division + field_type: 'dimension' + type: string + sql: '${TABLE}.division' + searchable: true + + - name: count_new_employees + field_type: measure + type: count + sql: ${TABLE}.n_new_employees diff --git a/tests/conftest.py b/tests/conftest.py index b38a5f2..bb78929 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -17,6 +17,9 @@ sales_dashboard_v2_path = os.path.join( BASE_PATH, "config/metrics_layer_config/dashboards/sales_dashboard_v2.yml" ) +monthly_aggregates_view_path = os.path.join( + BASE_PATH, "config/metrics_layer_config/views/monthly_aggregates.yml" +) order_lines_view_path = os.path.join(BASE_PATH, "config/metrics_layer_config/views/test_order_lines.yml") orders_view_path = os.path.join(BASE_PATH, "config/metrics_layer_config/views/test_orders.yml") customers_view_path = os.path.join(BASE_PATH, "config/metrics_layer_config/views/test_customers.yml") @@ -69,6 +72,7 @@ other_db_view_path, created_workspaces_view_path, mrr_view_path, + monthly_aggregates_view_path, ] dashboard_paths = [sales_dashboard_path, sales_dashboard_v2_path] diff --git a/tests/test_cli.py b/tests/test_cli.py index b5917b2..c14a031 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -471,7 +471,8 @@ def test_cli_validate(connection, fresh_project, mocker): # assert result.exit_code == 0 assert ( - result.output == "Found 7 errors in the project:\n\n" + result.output + == "Found 7 errors in the project:\n\n" "\nCould not locate reference revenue_dimension in field total_item_costs in view order_lines\n\n" "\nField total_item_costs in view order_lines contains invalid field reference revenue_dimension.\n\n" "\nCould not locate reference revenue_dimension in field revenue_in_cents in view orders\n\n" @@ -499,7 +500,8 @@ def test_cli_validate_broken_canon_date(connection, fresh_project, mocker): assert result.exit_code == 0 assert ( - result.output == "Found 1 error in the project:\n\n" + result.output + == "Found 1 error in the project:\n\n" "\nCanon date customers.does_not_exist is unreachable in field total_sessions.\n\n" ) @@ -756,7 +758,8 @@ def test_cli_validate_model_name_in_view(connection, fresh_project, mocker): assert result.exit_code == 0 assert ( - result.output == "Found 1 error in the project:\n\n" + result.output + == "Found 1 error in the project:\n\n" "\nCould not find a model in the view orders. Use the model_name property to specify the model.\n\n" ) @@ -799,7 +802,8 @@ def test_cli_dashboard_model_does_not_exist(connection, fresh_project, mocker): assert result.exit_code == 0 assert ( - result.output == "Found 1 error in the project:\n\n" + result.output + == "Found 1 error in the project:\n\n" "\nCould not find or you do not have access to model missing_model in dashboard sales_dashboard\n\n" ) @@ -820,7 +824,8 @@ def test_cli_canon_date_inaccessible(connection, fresh_project, mocker): assert result.exit_code == 0 assert ( - result.output == "Found 1 error in the project:\n\n" + result.output + == "Found 1 error in the project:\n\n" "\nCanon date orders.missing_field is unreachable in field total_revenue.\n\n" ) @@ -905,7 +910,8 @@ def test_cli_duplicate_field_names(connection, fresh_project, mocker): assert result.exit_code == 0 assert ( - result.output == "Found 1 error in the project:\n\n" + result.output + == "Found 1 error in the project:\n\n" "\nDuplicate field names in view customers: number_of_customers\n\n" ) @@ -972,7 +978,7 @@ def test_cli_validate_required_access_filters(connection, fresh_project, mocker) assert result.exit_code == 0 assert ( result.output - == "Found 19 errors in the project:\n\n\nView order_lines does not have any access filters, but an" + == "Found 20 errors in the project:\n\n\nView order_lines does not have any access filters, but an" " access filter with user attribute products is required.\n\n\nView orders does not have an access" " filter with the required user attribute products\n\n\nView customers does not have any access" " filters, but an access filter with user attribute products is required.\n\n\nView discounts does" @@ -993,10 +999,11 @@ def test_cli_validate_required_access_filters(connection, fresh_project, mocker) " other_db_traffic does not have any access filters, but an access filter with user attribute" " products is required.\n\n\nView created_workspace does not have any access filters, but an" " access filter with user attribute products is required.\n\n\nView mrr does not have any access" - " filters, but an access filter with user attribute products is required.\n\n\nView child_account" - " does not have any access filters, but an access filter with user attribute products is" - " required.\n\n\nView parent_account does not have any access filters, but an access filter with" - " user attribute products is required.\n\n" + " filters, but an access filter with user attribute products is required.\n\n\nView" + " monthly_aggregates does not have any access filters, but an access filter with user attribute" + " products is required.\n\n\nView child_account does not have any access filters, but an access" + " filter with user attribute products is required.\n\n\nView parent_account does not have any" + " access filters, but an access filter with user attribute products is required.\n\n" ) @@ -1074,8 +1081,8 @@ def test_cli_list(connection, mocker, object_type: str, extra_args: list): "models": "Found 2 models:\n\ntest_model\nnew_model\n", "connections": "Found 3 connections:\n\ntesting_snowflake\ntesting_bigquery\ntesting_databricks\n", "views": ( # noqa - "Found 20" - " views:\n\norder_lines\norders\ncustomers\ndiscounts\ndiscount_detail\ncountry_detail\nsessions\nevents\nlogin_events\ntraffic\nclicked_on_page\nsubmitted_form\naccounts\naa_acquired_accounts\nz_customer_accounts\nother_db_traffic\ncreated_workspace\nmrr\nchild_account\nparent_account\n" # noqa + "Found 21" + " views:\n\norder_lines\norders\ncustomers\ndiscounts\ndiscount_detail\ncountry_detail\nsessions\nevents\nlogin_events\ntraffic\nclicked_on_page\nsubmitted_form\naccounts\naa_acquired_accounts\nz_customer_accounts\nother_db_traffic\ncreated_workspace\nmrr\nmonthly_aggregates\nchild_account\nparent_account\n" # noqa ), "fields": "Found 2 fields:\n\ndiscount_promo_name\ndiscount_usd\n", "dimensions": "Found 3 dimensions:\n\ncountry\norder\ndiscount_code\n", diff --git a/tests/test_listing_functions.py b/tests/test_listing_functions.py index 55e6f80..d70fd2f 100644 --- a/tests/test_listing_functions.py +++ b/tests/test_listing_functions.py @@ -4,7 +4,7 @@ @pytest.mark.project def test_list_metrics(connection): metrics = connection.list_metrics() - assert len(metrics) == 60 + assert len(metrics) == 61 metrics = connection.list_metrics(view_name="order_lines", names_only=True) assert len(metrics) == 11 @@ -26,10 +26,10 @@ def test_list_metrics(connection): @pytest.mark.project def test_list_dimensions(connection): dimensions = connection.list_dimensions(show_hidden=True) - assert len(dimensions) == 98 + assert len(dimensions) == 100 dimensions = connection.list_dimensions() - assert len(dimensions) == 64 + assert len(dimensions) == 66 dimensions = connection.list_dimensions(view_name="order_lines", names_only=True, show_hidden=True) dimensions_present = { diff --git a/tests/test_merged_results.py b/tests/test_merged_results.py index fe2be97..bb98eec 100644 --- a/tests/test_merged_results.py +++ b/tests/test_merged_results.py @@ -160,8 +160,9 @@ def _blow_out_by_time_frame(join_graph: str, tf: list): sub_q_0_10 = _blow_out_by_time_frame("merged_result_subquery_0_subquery_10", core_tf) sub_q_0_11 = _blow_out_by_time_frame("merged_result_subquery_0_subquery_11", core_tf) sub_q_0_12 = _blow_out_by_time_frame("merged_result_subquery_0_subquery_12", core_tf) - sub_q_0_14 = _blow_out_by_time_frame("merged_result_subquery_0_subquery_14", core_tf) + sub_q_0_14 = _blow_out_by_time_frame("merged_result_subquery_0_subquery_13", core_tf) sub_q_0_15 = _blow_out_by_time_frame("merged_result_subquery_0_subquery_15", core_tf) + sub_q_0_16 = _blow_out_by_time_frame("merged_result_subquery_0_subquery_16", core_tf) sub_q_0_1 = _blow_out_by_time_frame("merged_result_subquery_0_subquery_1", core_tf) revenue_set = [ *sub_q_cr, @@ -176,6 +177,7 @@ def _blow_out_by_time_frame(join_graph: str, tf: list): *sub_q_0_12, *sub_q_0_14, *sub_q_0_15, + *sub_q_0_16, *sub_q_0_1, ] field = connection.get_field("revenue_per_session") @@ -191,8 +193,9 @@ def _blow_out_by_time_frame(join_graph: str, tf: list): "merged_result_subquery_0_subquery_10_date", "merged_result_subquery_0_subquery_11_date", "merged_result_subquery_0_subquery_12_date", - "merged_result_subquery_0_subquery_14_date", + "merged_result_subquery_0_subquery_13_date", "merged_result_subquery_0_subquery_15_date", + "merged_result_subquery_0_subquery_16_date", "merged_result_subquery_0_subquery_1_date", "merged_result_subquery_0_subquery_4_date", "merged_result_subquery_0_subquery_7_date", @@ -210,8 +213,9 @@ def _blow_out_by_time_frame(join_graph: str, tf: list): "merged_result_subquery_0_subquery_10_date", "merged_result_subquery_0_subquery_11_date", "merged_result_subquery_0_subquery_12_date", - "merged_result_subquery_0_subquery_14_date", + "merged_result_subquery_0_subquery_13_date", "merged_result_subquery_0_subquery_15_date", + "merged_result_subquery_0_subquery_16_date", "merged_result_subquery_0_subquery_1_date", "merged_result_subquery_0_subquery_4_date", "merged_result_subquery_0_subquery_7_date", @@ -241,7 +245,7 @@ def _blow_out_by_time_frame(join_graph: str, tf: list): "subquery_4", "subquery_11", "subquery_12", - "subquery_10", + "subquery_13", *_blow_out_by_time_frame("merged_result_subquery_0_subquery_1", tf), *_blow_out_by_time_frame("merged_result_subquery_1_subquery_3", core_tf), *_blow_out_by_time_frame("merged_result_subquery_1_subquery_4", core_tf), @@ -256,12 +260,12 @@ def _blow_out_by_time_frame(join_graph: str, tf: list): *_blow_out_by_time_frame("merged_result_subquery_3_subquery_4", core_tf), *_blow_out_by_time_frame("merged_result_subquery_0_subquery_4", tf), *_blow_out_by_time_frame("merged_result_subquery_0_subquery_11", tf), - *_blow_out_by_time_frame("merged_result_subquery_10_subquery_3", core_tf), - *_blow_out_by_time_frame("merged_result_subquery_10_subquery_4", core_tf), - *_blow_out_by_time_frame("merged_result_subquery_10_subquery_11", core_tf), - *_blow_out_by_time_frame("merged_result_subquery_10_subquery_12", core_tf), - *_blow_out_by_time_frame("merged_result_subquery_1_subquery_10", core_tf), - *_blow_out_by_time_frame("merged_result_subquery_0_subquery_10", tf), + *_blow_out_by_time_frame("merged_result_subquery_13_subquery_3", core_tf), + *_blow_out_by_time_frame("merged_result_subquery_13_subquery_4", core_tf), + *_blow_out_by_time_frame("merged_result_subquery_11_subquery_13", core_tf), + *_blow_out_by_time_frame("merged_result_subquery_12_subquery_13", core_tf), + *_blow_out_by_time_frame("merged_result_subquery_1_subquery_13", core_tf), + *_blow_out_by_time_frame("merged_result_subquery_0_subquery_13", tf), *_blow_out_by_time_frame("merged_result_subquery_11_subquery_4", core_tf), ] assert field.join_graphs() == list(sorted(gender_graphs)) @@ -278,8 +282,9 @@ def _blow_out_by_time_frame(join_graph: str, tf: list): *_blow_out_by_time_frame("merged_result_subquery_3_subquery_4", core_tf), *_blow_out_by_time_frame("merged_result_subquery_4_subquery_7", core_tf), *_blow_out_by_time_frame("merged_result_subquery_4_subquery_5", core_tf), - *_blow_out_by_time_frame("merged_result_subquery_14_subquery_4", core_tf), + *_blow_out_by_time_frame("merged_result_subquery_13_subquery_4", core_tf), *_blow_out_by_time_frame("merged_result_subquery_15_subquery_4", core_tf), + *_blow_out_by_time_frame("merged_result_subquery_16_subquery_4", core_tf), *_blow_out_by_time_frame("merged_result_subquery_4_subquery_8", core_tf), *_blow_out_by_time_frame("merged_result_subquery_4_subquery_9", core_tf), ] diff --git a/tests/test_symmetric_aggregates.py b/tests/test_symmetric_aggregates.py index 19b1a16..40fac1e 100644 --- a/tests/test_symmetric_aggregates.py +++ b/tests/test_symmetric_aggregates.py @@ -1,3 +1,5 @@ +import datetime + import pytest from metrics_layer.core.model.definitions import Definitions @@ -185,3 +187,36 @@ def test_query_number_with_sql(connection, query_type): f"{order_by};" ) assert query == correct + + +@pytest.mark.query +def test_query_with_corrected_no_symm_agg_triggered(connection): + query = connection.get_sql_query( + metrics=["monthly_aggregates.count_new_employees"], + dimensions=["monthly_aggregates.division"], + where=[ + { + "field": "date", + "expression": "greater_or_equal_than", + "value": datetime.datetime(2024, 1, 5, 0, 0), + }, + { + "field": "date", + "expression": "less_or_equal_than", + "value": datetime.datetime(2024, 10, 5, 0, 0), + }, + ], + order_by=[{"field": "monthly_aggregates.division", "sort": "asc"}], + limit=25, + ) + + # This, correctly, does not apply a symmetric aggregate + correct = ( + "SELECT monthly_aggregates.division as" + " monthly_aggregates_division,COUNT(monthly_aggregates.n_new_employees) as" + " monthly_aggregates_count_new_employees FROM analytics.monthly_rollup monthly_aggregates WHERE" + " DATE_TRUNC('DAY', monthly_aggregates.record_date)>='2024-01-05T00:00:00' AND DATE_TRUNC('DAY'," + " monthly_aggregates.record_date)<='2024-10-05T00:00:00' GROUP BY monthly_aggregates.division ORDER" + " BY monthly_aggregates_division ASC NULLS LAST LIMIT 25;" + ) + assert query == correct From 21cc1de574a6c38ffa906b6259a5b19401fdc81a Mon Sep 17 00:00:00 2001 From: Paul Blankley Date: Thu, 19 Sep 2024 08:13:31 +0100 Subject: [PATCH 45/53] fix issue with merged results filter application + include test for pure organics (remove before merge) --- metrics_layer/core/model/base.py | 9 +++ .../core/sql/merged_query_resolve.py | 2 +- .../sql/query_arbitrary_merged_queries.py | 2 +- metrics_layer/core/sql/query_base.py | 25 +++----- metrics_layer/core/sql/query_filter.py | 54 ++++++++++++++-- tests/test_arbitrary_merged_results.py | 6 +- tests/test_join_query.py | 64 +++++++++++++++++++ 7 files changed, 137 insertions(+), 25 deletions(-) diff --git a/metrics_layer/core/model/base.py b/metrics_layer/core/model/base.py index 4197272..9da1ec1 100644 --- a/metrics_layer/core/model/base.py +++ b/metrics_layer/core/model/base.py @@ -2,6 +2,8 @@ import re from typing import List +from metrics_layer.core.exceptions import QueryError + NAME_REGEX = re.compile(r"([A-Za-z0-9\_]+)") @@ -32,6 +34,13 @@ def name_error(entity_name: str, name: str): "the naming conventions (only letters, numbers, or underscores)" ) + @staticmethod + def _raise_query_error_from_cte(field_name: str): + raise QueryError( + f"Field {field_name} is not present in either source query, so it" + " cannot be applied as a filter. Please add it to one of the source queries." + ) + @staticmethod def line_col(element): line = getattr(getattr(element, "lc", None), "line", None) diff --git a/metrics_layer/core/sql/merged_query_resolve.py b/metrics_layer/core/sql/merged_query_resolve.py index c724dbe..e1abd6d 100644 --- a/metrics_layer/core/sql/merged_query_resolve.py +++ b/metrics_layer/core/sql/merged_query_resolve.py @@ -125,7 +125,7 @@ def derive_sub_queries(self): secondary_metric_ids = [m.id() for m in self.secondary_metrics] merged_metric_ids = [m.id() for m in self.merged_metrics] - for h in self.having: + for h in self.flatten_filters(self.having): field = self.project.get_field(h["field"]) if field.id() not in secondary_metric_ids and not field.is_merged_result: self.secondary_metrics.append(field) diff --git a/metrics_layer/core/sql/query_arbitrary_merged_queries.py b/metrics_layer/core/sql/query_arbitrary_merged_queries.py index 2c90ad8..982cbbf 100644 --- a/metrics_layer/core/sql/query_arbitrary_merged_queries.py +++ b/metrics_layer/core/sql/query_arbitrary_merged_queries.py @@ -48,7 +48,7 @@ def get_query(self, semicolon: bool = True): if order_by_alias in self.cte_alias_lookup: order_by_alias = f"{self.cte_alias_lookup[order_by_alias]}.{order_by_alias}" else: - self._raise_query_error_from_cte(field.id(capitalize_alias=True)) + self._raise_query_error_from_cte(field.id()) order = Order.desc if order_clause.get("sort", "asc").lower() == "desc" else Order.asc complete_query = complete_query.orderby( diff --git a/metrics_layer/core/sql/query_base.py b/metrics_layer/core/sql/query_base.py index 5b24c37..5c9a9fd 100644 --- a/metrics_layer/core/sql/query_base.py +++ b/metrics_layer/core/sql/query_base.py @@ -22,23 +22,18 @@ def get_where_with_aliases( where = [] for filter_clause in filters: filter_clause["query_type"] = self.query_type - f = MetricsLayerFilter(definition=filter_clause, design=None, filter_type="where") - field = project.get_field(filter_clause["field"]) - field_alias = field.alias(with_view=True) - if field_alias in cte_alias_lookup: - field_alias = f"{cte_alias_lookup[field_alias]}.{field_alias}" - elif raise_if_not_in_lookup: - self._raise_query_error_from_cte(field.id(capitalize_alias=True)) - where.append(f.criterion(field_alias)) + f = MetricsLayerFilter( + definition=filter_clause, design=None, filter_type="where", project=project + ) + where.append( + f.sql_query( + alias_query=True, + cte_alias_lookup=cte_alias_lookup, + raise_if_not_in_lookup=raise_if_not_in_lookup, + ) + ) return where - @staticmethod - def _raise_query_error_from_cte(field_name: str): - raise QueryError( - f"Field {field_name} is not present in either source query, so it" - " cannot be applied as a filter. Please add it to one of the source queries." - ) - @staticmethod def parse_identifiers_from_clause(clause: str): if clause is None: diff --git a/metrics_layer/core/sql/query_filter.py b/metrics_layer/core/sql/query_filter.py index f901d52..c858be1 100644 --- a/metrics_layer/core/sql/query_filter.py +++ b/metrics_layer/core/sql/query_filter.py @@ -39,12 +39,13 @@ class MetricsLayerFilter(MetricsLayerBase): """ def __init__( - self, definition: Dict = {}, design: MetricsLayerDesign = None, filter_type: str = None + self, definition: Dict = {}, design: MetricsLayerDesign = None, filter_type: str = None, project=None ) -> None: # The design is used for filters in queries against specific designs # to validate that all the tables and attributes (columns/aggregates) # are properly defined in the design self.design = design + self.project = project self.is_literal_filter = "literal" in definition # This is a filter with parenthesis like (XYZ or ABC) self.is_filter_group = "conditions" in definition @@ -84,6 +85,7 @@ def validate(self, definition: Dict) -> None: if filter_group_conditions: for f in filter_group_conditions: + f["query_type"] = self.query_type MetricsLayerFilter(f, self.design, self.filter_type) if ( @@ -148,12 +150,32 @@ def validate(self, definition: Dict) -> None: if self.field.type == "yesno" and "True" in str(definition["value"]): definition["expression"] = "boolean_true" - def group_sql_query(self, functional_pk: str): + def group_sql_query( + self, + functional_pk: str, + alias_query: bool = False, + cte_alias_lookup: dict = {}, + raise_if_not_in_lookup: bool = False, + ): pypika_conditions = [] for condition in self.conditions: - condition_object = MetricsLayerFilter(condition, self.design, self.filter_type) + condition_object = MetricsLayerFilter(condition, self.design, self.filter_type, self.project) if condition_object.is_filter_group: - pypika_conditions.append(condition_object.group_sql_query(functional_pk)) + pypika_conditions.append( + condition_object.group_sql_query( + functional_pk, + alias_query, + cte_alias_lookup=cte_alias_lookup, + raise_if_not_in_lookup=raise_if_not_in_lookup, + ) + ) + elif alias_query: + if self.project is None: + raise ValueError("Project is not set, but it is required for an alias_query") + field_alias = self._handle_cte_alias_replacement( + condition_object.field, cte_alias_lookup, raise_if_not_in_lookup + ) + pypika_conditions.append(condition_object.criterion(field_alias)) else: pypika_conditions.append( condition_object.criterion( @@ -169,14 +191,36 @@ def group_sql_query(self, functional_pk: str): return Criterion.all(pypika_conditions) raise ParseError(f"Invalid logical operator: {self.logical_operator}") - def sql_query(self): + def sql_query( + self, alias_query: bool = False, cte_alias_lookup: dict = {}, raise_if_not_in_lookup: bool = False + ): if self.is_literal_filter: return LiteralValueCriterion(self.replace_fields_literal_filter()) + + if alias_query and self.is_filter_group: + return self.group_sql_query("NA", alias_query, cte_alias_lookup, raise_if_not_in_lookup) + elif alias_query: + field_alias = self._handle_cte_alias_replacement( + self.field, cte_alias_lookup, raise_if_not_in_lookup + ) + return self.criterion(field_alias) + functional_pk = self.design.functional_pk() if self.is_filter_group: return self.group_sql_query(functional_pk) return self.criterion(self.field.sql_query(self.query_type, functional_pk)) + def _handle_cte_alias_replacement( + self, field_id: str, cte_alias_lookup: dict, raise_if_not_in_lookup: bool + ): + field = self.project.get_field(field_id) + field_alias = field.alias(with_view=True) + if field_alias in cte_alias_lookup: + field_alias = f"{cte_alias_lookup[field_alias]}.{field_alias}" + elif raise_if_not_in_lookup: + self._raise_query_error_from_cte(field.id()) + return field_alias + def isin_sql_query(self, cte_alias, field_name, query_generator): group_by_field = self.design.get_field(field_name) base = query_generator._base_query() diff --git a/tests/test_arbitrary_merged_results.py b/tests/test_arbitrary_merged_results.py index 61b0bf4..32ecaf2 100644 --- a/tests/test_arbitrary_merged_results.py +++ b/tests/test_arbitrary_merged_results.py @@ -648,7 +648,7 @@ def test_query_merged_queries_invalid_where_post_merge(connection): assert isinstance(exc_info.value, QueryError) assert ( str(exc_info.value) - == "Field orders.NEW_VS_REPEAT is not present in either source query, so it cannot be applied as a" + == "Field orders.new_vs_repeat is not present in either source query, so it cannot be applied as a" " filter. Please add it to one of the source queries." ) @@ -670,7 +670,7 @@ def test_query_merged_queries_invalid_having_post_merge(connection): assert isinstance(exc_info.value, QueryError) assert ( str(exc_info.value) - == "Field order_lines.TOTAL_ITEM_COSTS is not present in either source query, so it cannot be applied" + == "Field order_lines.total_item_costs is not present in either source query, so it cannot be applied" " as a filter. Please add it to one of the source queries." ) @@ -691,7 +691,7 @@ def test_query_merged_queries_invalid_order_by_post_merge(connection): assert isinstance(exc_info.value, QueryError) assert ( str(exc_info.value) - == "Field order_lines.TOTAL_ITEM_COSTS is not present in either source query, so it cannot be applied" + == "Field order_lines.total_item_costs is not present in either source query, so it cannot be applied" " as a filter. Please add it to one of the source queries." ) diff --git a/tests/test_join_query.py b/tests/test_join_query.py index 97fcb13..b07ef00 100644 --- a/tests/test_join_query.py +++ b/tests/test_join_query.py @@ -2,6 +2,7 @@ import pytest +from metrics_layer import MetricsLayerConnection from metrics_layer.core.exceptions import JoinError, QueryError from metrics_layer.core.model import Definitions from metrics_layer.core.sql.query_errors import ParseError @@ -1271,3 +1272,66 @@ def test_query_with_or_filters_alternate_syntax(connection): " order_lines_total_item_revenue DESC NULLS LAST;" ) assert query == correct + + +# TODO DELETE BEFORE MERGE +@pytest.mark.queryy +def test_query_with_or_filters_alternate_syntaxx(connection): + connection = MetricsLayerConnection("/Users/pb/src/data_models/demo-data-model") + connection.load() + + query = connection.get_sql_query( + query_type="SNOWFLAKE", + metrics=["number_of_orders"], + dimensions=[], + where=[ + {"field": "date", "expression": "greater_or_equal_than", "value": datetime(2024, 1, 1, 0, 0)}, + { + "field": "date", + "expression": "less_or_equal_than", + "value": datetime(2024, 12, 31, 23, 59, 59), + }, + ], + having=[ + { + "conditional_filter_logic": { + "conditions": [ + { + "field": "order_lines.total_net_revenue", + "expression": "less_than", + "value": 5, + }, + { + "field": "order_lines.total_gross_revenue", + "expression": "greater_than", + "value": 6, + }, + { + "conditions": [ + { + "field": "roas", + "expression": "greater_than", + "value": 1, + }, + ], + "logical_operator": "AND", + }, + ], + "logical_operator": "OR", + } + }, + ], + ) + + correct = ( + "SELECT order_lines.sales_channel as order_lines_channel,SUM(order_lines.revenue) as" + " order_lines_total_item_revenue FROM analytics.order_line_items order_lines LEFT JOIN" + " analytics.customers customers ON order_lines.customer_id=customers.customer_id WHERE" + " customers.gender IN ('M') AND DATE_TRUNC('DAY', order_lines.order_date)>='2024-01-01T00:00:00' AND" + " DATE_TRUNC('DAY', order_lines.order_date)<='2024-12-31T23:59:59' GROUP BY order_lines.sales_channel" + " HAVING SUM(order_lines.revenue)>=100.0 AND SUM(order_lines.revenue)<=200.0 AND" + " (SUM(order_lines.revenue)>100.0 OR SUM(order_lines.revenue)<200.0 OR" + " (SUM(order_lines.revenue)>100.0 AND SUM(order_lines.revenue)<200.0)) ORDER BY" + " order_lines_total_item_revenue DESC NULLS LAST;" + ) + assert query == correct From 0092f17e63d3e4ccf9cf15037cf96e5475e6c9ae Mon Sep 17 00:00:00 2001 From: Paul Blankley Date: Fri, 20 Sep 2024 11:44:21 +0100 Subject: [PATCH 46/53] fix merged results issue with or filter in where --- .../core/sql/merged_query_resolve.py | 84 +++++----- metrics_layer/core/sql/query_filter.py | 3 +- tests/test_join_query.py | 150 ++++++++++++++---- 3 files changed, 171 insertions(+), 66 deletions(-) diff --git a/metrics_layer/core/sql/merged_query_resolve.py b/metrics_layer/core/sql/merged_query_resolve.py index e1abd6d..8c8b5f6 100644 --- a/metrics_layer/core/sql/merged_query_resolve.py +++ b/metrics_layer/core/sql/merged_query_resolve.py @@ -256,42 +256,10 @@ def derive_sub_queries(self): keys = list(self.query_metrics.keys()) + list(self.query_dimensions.keys()) unique_keys = sorted(list(set(keys)), key=lambda x: keys.index(x)) self.query_where = defaultdict(list) - for where in self.where: - metric_canon_dates = {f.canon_date for v in self.query_metrics.values() for f in v} - field = self.project.get_field(where["field"]) - is_canon_date = any(f"{field.view.name}.{field.name}" == d for d in metric_canon_dates) - dimension_group = field.dimension_group - join_group_hash = self._join_hash_key(field) - added_filter = {join_hash: False for join_hash in unique_keys} - for join_hash in unique_keys: - join_hash_with_canon_date = f"{field.view.name}_{field.name}__{join_group_hash}" - joinable_graphs = join_hash.split("__")[-1] - # The field is joinable if the subquery is the same as one in the main join hash's subquery - joinable_subqueries = [ - f"subquery{g}".strip("_") for g in joinable_graphs.split("subquery") if g != "" - ] - joinable_not_canon_date = not is_canon_date and join_group_hash in joinable_subqueries - is_canon_date_same = is_canon_date and join_hash_with_canon_date in join_hash - if joinable_not_canon_date or is_canon_date_same: - self.query_where[join_hash].append(where) - added_filter[join_hash] = True - else: - key = f"{field.view.name}.{field.name}" - for mapping_info in dimension_mapping[key]: - if mapping_info["from_join_hash"] == join_hash: - if dimension_group: - key = f"{mapping_info['field']}_{dimension_group}" - else: - key = mapping_info["field"] - ref_field = self.project.get_field(key) - mapped_where = deepcopy(where) - mapped_where["field"] = ref_field.id() - self.query_where[join_hash].append(mapped_where) - added_filter[join_hash] = True - # This handles the case where the where field is joined in and not in a mapping - for join_hash in self.query_metrics.keys(): - if not added_filter[join_hash]: - self.query_where[join_hash].append(where) + for join_hash in unique_keys: + for where in self.where: + resolved_where = self._parse_where_filter(where, dimension_mapping, join_hash) + self.query_where[join_hash].append(resolved_where) clean_wheres = defaultdict(list) for k, v in self.query_where.items(): @@ -305,6 +273,50 @@ def derive_sub_queries(self): clean_wheres[k] = [lookup[h] for h in sorted_hashes] self.query_where = clean_wheres + def _parse_where_filter(self, where_filter, dimension_mapping, join_hash): + def recurse(filter_obj): + if isinstance(filter_obj, dict): + if "conditions" in filter_obj: + return {**filter_obj, "conditions": [recurse(f) for f in filter_obj["conditions"]]} + + else: + return self._resolve_where_mapped_filter(filter_obj, dimension_mapping, join_hash) + + return recurse(where_filter) + + def _resolve_where_mapped_filter(self, where_filter_object, dimension_mapping, join_hash): + metric_canon_dates = {f.canon_date for v in self.query_metrics.values() for f in v} + field = self.project.get_field(where_filter_object["field"]) + is_canon_date = any(f"{field.view.name}.{field.name}" == d for d in metric_canon_dates) + dimension_group = field.dimension_group + join_group_hash = self._join_hash_key(field) + + join_hash_with_canon_date = f"{field.view.name}_{field.name}__{join_group_hash}" + joinable_graphs = join_hash.split("__")[-1] + + # The field is joinable if the subquery is the same as one in the main join hash's subquery + joinable_subqueries = [ + f"subquery{g}".strip("_") for g in joinable_graphs.split("subquery") if g != "" + ] + joinable_not_canon_date = not is_canon_date and join_group_hash in joinable_subqueries + is_canon_date_same = is_canon_date and join_hash_with_canon_date in join_hash + if joinable_not_canon_date or is_canon_date_same: + return where_filter_object + else: + key = f"{field.view.name}.{field.name}" + for mapping_info in dimension_mapping[key]: + if mapping_info["from_join_hash"] == join_hash: + if dimension_group: + key = f"{mapping_info['field']}_{dimension_group}" + else: + key = mapping_info["field"] + ref_field = self.project.get_field(key) + mapped_where = deepcopy(where_filter_object) + mapped_where["field"] = ref_field.id() + return mapped_where + + return where_filter_object + def _canon_date_mapping(self): canon_dates, join_hashes = [], [] dimension_mapping = defaultdict(list) diff --git a/metrics_layer/core/sql/query_filter.py b/metrics_layer/core/sql/query_filter.py index c858be1..5993e31 100644 --- a/metrics_layer/core/sql/query_filter.py +++ b/metrics_layer/core/sql/query_filter.py @@ -131,7 +131,8 @@ def validate(self, definition: Dict) -> None: # If the value is a string, it might be a field reference. # If it is a field reference, we need to replace it with the actual # field's sql as a LiteralValue - if "value" in definition and isinstance(definition["value"], str): + # Note: it must be a fully qualified reference, so the '.' is required + if "value" in definition and isinstance(definition["value"], str) and "." in definition["value"]: try: value_field = self.design.get_field(definition["value"]) functional_pk = self.design.functional_pk() diff --git a/tests/test_join_query.py b/tests/test_join_query.py index b07ef00..fb7c173 100644 --- a/tests/test_join_query.py +++ b/tests/test_join_query.py @@ -1274,17 +1274,44 @@ def test_query_with_or_filters_alternate_syntax(connection): assert query == correct -# TODO DELETE BEFORE MERGE -@pytest.mark.queryy -def test_query_with_or_filters_alternate_syntaxx(connection): - connection = MetricsLayerConnection("/Users/pb/src/data_models/demo-data-model") - connection.load() - +@pytest.mark.query +def test_query_with_or_filters_alternate_syntax_merged_result(connection): query = connection.get_sql_query( - query_type="SNOWFLAKE", - metrics=["number_of_orders"], - dimensions=[], + metrics=["total_item_revenue"], + dimensions=["date"], where=[ + { + "conditional_filter_logic": { + "conditions": [ + { + "field": "orders.campaign", + "expression": "equal_to", + "value": "Email", + }, + { + "field": "orders.sub_channel", + "expression": "isin", + "value": ["FB", "TikTok"], + }, + { + "conditions": [ + { + "field": "orders.sub_channel", + "expression": "equal_to", + "value": "Snap", + }, + { + "field": "customers.gender", + "expression": "equal_to", + "value": "M", + }, + ], + "logical_operator": "AND", + }, + ], + "logical_operator": "OR", + } + }, {"field": "date", "expression": "greater_or_equal_than", "value": datetime(2024, 1, 1, 0, 0)}, { "field": "date", @@ -1293,25 +1320,87 @@ def test_query_with_or_filters_alternate_syntaxx(connection): }, ], having=[ + { + "field": "costs_per_session", + "expression": "greater_than", + "value": 1, + }, + ], + ) + + correct = ( + "WITH order_lines_order__cte_subquery_0 AS (SELECT DATE_TRUNC('DAY', order_lines.order_date) as" + " order_lines_order_date,SUM(case when order_lines.product_name='Portable Charger' and" + " order_lines.product_name IN ('Portable Charger','Dual Charger') and orders.revenue * 100>100 then" + " order_lines.item_costs end) as order_lines_total_item_costs,COUNT(case when" + " order_lines.sales_channel='Email' then order_lines.order_id end) as" + " order_lines_number_of_email_purchased_items,SUM(order_lines.revenue) as" + " order_lines_total_item_revenue FROM analytics.order_line_items order_lines LEFT JOIN" + " analytics.orders orders ON order_lines.order_unique_id=orders.id LEFT JOIN analytics.customers" + " customers ON order_lines.customer_id=customers.customer_id WHERE (orders.campaign='Email' OR" + " orders.sub_channel IN ('FB','TikTok') OR (orders.sub_channel='Snap' AND customers.gender='M')) AND" + " DATE_TRUNC('DAY', order_lines.order_date)>='2024-01-01T00:00:00' AND DATE_TRUNC('DAY'," + " order_lines.order_date)<='2024-12-31T23:59:59' GROUP BY DATE_TRUNC('DAY', order_lines.order_date)" + " ORDER BY order_lines_total_item_costs DESC NULLS LAST) ,sessions_session__cte_subquery_1 AS (SELECT" + " DATE_TRUNC('DAY', sessions.session_date) as sessions_session_date,COUNT(sessions.id) as" + " sessions_number_of_sessions FROM analytics.sessions sessions LEFT JOIN analytics.customers" + " customers ON sessions.customer_id=customers.customer_id WHERE (sessions.utm_campaign='Email' OR" + " sessions.utm_source IN ('FB','TikTok') OR (sessions.utm_source='Snap' AND customers.gender='M'))" + " AND DATE_TRUNC('DAY', sessions.session_date)>='2024-01-01T00:00:00' AND DATE_TRUNC('DAY'," + " sessions.session_date)<='2024-12-31T23:59:59' GROUP BY DATE_TRUNC('DAY', sessions.session_date)" + " ORDER BY sessions_number_of_sessions DESC NULLS LAST) SELECT" + " order_lines_order__cte_subquery_0.order_lines_total_item_costs as" + " order_lines_total_item_costs,order_lines_order__cte_subquery_0.order_lines_number_of_email_purchased_items" # noqa + " as order_lines_number_of_email_purchased_items,order_lines_order__cte_subquery_0.order_lines_total_item_revenue" # noqa + " as order_lines_total_item_revenue,sessions_session__cte_subquery_1.sessions_number_of_sessions as" + " sessions_number_of_sessions,ifnull(order_lines_order__cte_subquery_0.order_lines_order_date," + " sessions_session__cte_subquery_1.sessions_session_date) as" + " order_lines_order_date,ifnull(sessions_session__cte_subquery_1.sessions_session_date," + " order_lines_order__cte_subquery_0.order_lines_order_date) as" + " sessions_session_date,(order_lines_total_item_costs * order_lines_number_of_email_purchased_items)" + " / nullif(sessions_number_of_sessions, 0) as order_lines_costs_per_session FROM" + " order_lines_order__cte_subquery_0 FULL OUTER JOIN sessions_session__cte_subquery_1 ON" + " order_lines_order__cte_subquery_0.order_lines_order_date=sessions_session__cte_subquery_1.sessions_session_date" # noqa + " WHERE order_lines_costs_per_session>1;" + ) + assert query == correct + + +# TODO DELETE BEFORE MERGE +@pytest.mark.queryy +def test_query_with_or_filters_alternate_syntaxx(connection): + connection = MetricsLayerConnection("/Users/pb/src/data_models/demo-data-model") + connection.load() + + query = connection.get_sql_query( + query_type="SNOWFLAKE", + metrics=["number_of_orders"], + dimensions=["date"], + where=[ { "conditional_filter_logic": { "conditions": [ { - "field": "order_lines.total_net_revenue", - "expression": "less_than", - "value": 5, + "field": "order_lines.marketing_channel", + "expression": "equal_to", + "value": "Email", }, { - "field": "order_lines.total_gross_revenue", - "expression": "greater_than", - "value": 6, + "field": "order_lines.marketing_channel", + "expression": "isin", + "value": ["Email", "Paid Social"], }, { "conditions": [ { - "field": "roas", - "expression": "greater_than", - "value": 1, + "field": "order_lines.campaign", + "expression": "equal_to", + "value": "Campaign A", + }, + { + "field": "order_lines.campaign", + "expression": "equal_to", + "value": "Campaign B", }, ], "logical_operator": "AND", @@ -1320,18 +1409,21 @@ def test_query_with_or_filters_alternate_syntaxx(connection): "logical_operator": "OR", } }, + {"field": "date", "expression": "greater_or_equal_than", "value": datetime(2024, 1, 1, 0, 0)}, + { + "field": "date", + "expression": "less_or_equal_than", + "value": datetime(2024, 12, 31, 23, 59, 59), + }, + ], + having=[ + { + "field": "cac", + "expression": "greater_than", + "value": 1, + }, ], ) - correct = ( - "SELECT order_lines.sales_channel as order_lines_channel,SUM(order_lines.revenue) as" - " order_lines_total_item_revenue FROM analytics.order_line_items order_lines LEFT JOIN" - " analytics.customers customers ON order_lines.customer_id=customers.customer_id WHERE" - " customers.gender IN ('M') AND DATE_TRUNC('DAY', order_lines.order_date)>='2024-01-01T00:00:00' AND" - " DATE_TRUNC('DAY', order_lines.order_date)<='2024-12-31T23:59:59' GROUP BY order_lines.sales_channel" - " HAVING SUM(order_lines.revenue)>=100.0 AND SUM(order_lines.revenue)<=200.0 AND" - " (SUM(order_lines.revenue)>100.0 OR SUM(order_lines.revenue)<200.0 OR" - " (SUM(order_lines.revenue)>100.0 AND SUM(order_lines.revenue)<200.0)) ORDER BY" - " order_lines_total_item_revenue DESC NULLS LAST;" - ) + correct = "" assert query == correct From 2c9e546391dc2d5d5db89d6e718464cea31005f0 Mon Sep 17 00:00:00 2001 From: tlokvenec Date: Fri, 20 Sep 2024 13:14:16 -0400 Subject: [PATCH 47/53] resolved TODO --- tests/test_join_query.py | 63 ---------------------------------------- 1 file changed, 63 deletions(-) diff --git a/tests/test_join_query.py b/tests/test_join_query.py index fb7c173..196c11d 100644 --- a/tests/test_join_query.py +++ b/tests/test_join_query.py @@ -1364,66 +1364,3 @@ def test_query_with_or_filters_alternate_syntax_merged_result(connection): " WHERE order_lines_costs_per_session>1;" ) assert query == correct - - -# TODO DELETE BEFORE MERGE -@pytest.mark.queryy -def test_query_with_or_filters_alternate_syntaxx(connection): - connection = MetricsLayerConnection("/Users/pb/src/data_models/demo-data-model") - connection.load() - - query = connection.get_sql_query( - query_type="SNOWFLAKE", - metrics=["number_of_orders"], - dimensions=["date"], - where=[ - { - "conditional_filter_logic": { - "conditions": [ - { - "field": "order_lines.marketing_channel", - "expression": "equal_to", - "value": "Email", - }, - { - "field": "order_lines.marketing_channel", - "expression": "isin", - "value": ["Email", "Paid Social"], - }, - { - "conditions": [ - { - "field": "order_lines.campaign", - "expression": "equal_to", - "value": "Campaign A", - }, - { - "field": "order_lines.campaign", - "expression": "equal_to", - "value": "Campaign B", - }, - ], - "logical_operator": "AND", - }, - ], - "logical_operator": "OR", - } - }, - {"field": "date", "expression": "greater_or_equal_than", "value": datetime(2024, 1, 1, 0, 0)}, - { - "field": "date", - "expression": "less_or_equal_than", - "value": datetime(2024, 12, 31, 23, 59, 59), - }, - ], - having=[ - { - "field": "cac", - "expression": "greater_than", - "value": 1, - }, - ], - ) - - correct = "" - assert query == correct From 546486a8250ac97a599a6930c164f1bcf72837e2 Mon Sep 17 00:00:00 2001 From: Paul Blankley Date: Fri, 20 Sep 2024 22:27:12 +0100 Subject: [PATCH 48/53] bump version to 0.12.38 --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index ccaae5b..d771f81 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "metrics_layer" -version = "0.12.37" +version = "0.12.38" description = "The open source metrics layer." authors = ["Paul Blankley "] keywords = ["Metrics Layer", "Business Intelligence", "Analytics"] From 93c8651b919c61e3a00ecfa3d0625657a56da483 Mon Sep 17 00:00:00 2001 From: Paul Blankley Date: Fri, 20 Sep 2024 22:27:27 +0100 Subject: [PATCH 49/53] Release v0.12.38 From 48c9cb14095337309352ce0a26f5e44e505a3293 Mon Sep 17 00:00:00 2001 From: Paul Blankley <31221512+pblankley@users.noreply.github.com> Date: Wed, 25 Sep 2024 16:42:00 -0600 Subject: [PATCH 50/53] fix non additive or filters issue (#235) * fix non additive or filters issue * bump version --- metrics_layer/cli/seeding.py | 2 + metrics_layer/core/sql/query_generator.py | 5 +- .../core/sql/single_query_resolve.py | 17 +--- metrics_layer/core/utils.py | 18 +++++ pyproject.toml | 2 +- tests/test_cli.py | 18 +++++ tests/test_non_additive_dimensions.py | 77 +++++++++++++++++++ 7 files changed, 121 insertions(+), 18 deletions(-) diff --git a/metrics_layer/cli/seeding.py b/metrics_layer/cli/seeding.py index 381f011..df8dc68 100644 --- a/metrics_layer/cli/seeding.py +++ b/metrics_layer/cli/seeding.py @@ -369,6 +369,8 @@ def make_fields(self, column_data, schema_name: str, table_name: str, auto_tag_s Definitions.duck_db, Definitions.postgres, Definitions.redshift, + Definitions.sql_server, + Definitions.azure_synapse, }: column_name = '"' + row["COLUMN_NAME"] + '"' else: diff --git a/metrics_layer/core/sql/query_generator.py b/metrics_layer/core/sql/query_generator.py index ecee606..5fea256 100644 --- a/metrics_layer/core/sql/query_generator.py +++ b/metrics_layer/core/sql/query_generator.py @@ -14,6 +14,7 @@ from metrics_layer.core.sql.query_dialect import NullSorting, query_lookup from metrics_layer.core.sql.query_errors import ArgumentError from metrics_layer.core.sql.query_filter import MetricsLayerFilter +from metrics_layer.core.utils import flatten_filters class MetricsLayerQuery(MetricsLayerQueryBase): @@ -80,7 +81,7 @@ def parse_definition(self, definition: dict): # them as CTE's for the appropriate filters self.non_additive_ctes = [] metrics_in_select = definition.get("metrics", []) - metrics_in_having = [h.field.id() for h in self.having_filters if h.field] + metrics_in_having = [h["field"] for h in flatten_filters(having)] for metric in metrics_in_select + metrics_in_having: metric_field = self.design.get_field(metric) for ref_field in [metric_field] + metric_field.referenced_fields(metric_field.sql): @@ -385,7 +386,7 @@ def _non_additive_cte(self, definition: dict, group_by_dimensions: list): field_lookup[non_additive_dimension.id()] = non_additive_dimension # We also need to make all fields in the where clause available to the query - for f in self.where: + for f in flatten_filters(self.where): field = self.design.get_field(f["field"]) field_lookup[field.id()] = field diff --git a/metrics_layer/core/sql/single_query_resolve.py b/metrics_layer/core/sql/single_query_resolve.py index 8b17f04..8438af3 100644 --- a/metrics_layer/core/sql/single_query_resolve.py +++ b/metrics_layer/core/sql/single_query_resolve.py @@ -4,6 +4,7 @@ from metrics_layer.core.sql.query_design import MetricsLayerDesign from metrics_layer.core.sql.query_funnel import FunnelQuery from metrics_layer.core.sql.query_generator import MetricsLayerQuery +from metrics_layer.core.utils import flatten_filters class SingleSQLQueryResolver: @@ -232,21 +233,7 @@ def parse_identifiers_from_dicts(conditions: list): @staticmethod def flatten_filters(filters: list): - flat_list = [] - - def recurse(filter_obj): - if isinstance(filter_obj, dict): - if "conditions" in filter_obj: - for f in filter_obj["conditions"]: - recurse(f) - else: - flat_list.append(filter_obj) - elif isinstance(filter_obj, list): - for item in filter_obj: - recurse(item) - - recurse(filters) - return flat_list + return flatten_filters(filters) @staticmethod def _check_for_dict(conditions: list): diff --git a/metrics_layer/core/utils.py b/metrics_layer/core/utils.py index d053722..89d1dd4 100644 --- a/metrics_layer/core/utils.py +++ b/metrics_layer/core/utils.py @@ -14,3 +14,21 @@ def generate_random_password(length): letters = string.ascii_letters result_str = "".join(random.choice(letters) for i in range(length)) return result_str + + +def flatten_filters(filters: list): + flat_list = [] + + def recurse(filter_obj): + if isinstance(filter_obj, dict): + if "conditions" in filter_obj: + for f in filter_obj["conditions"]: + recurse(f) + else: + flat_list.append(filter_obj) + elif isinstance(filter_obj, list): + for item in filter_obj: + recurse(item) + + recurse(filters) + return flat_list diff --git a/pyproject.toml b/pyproject.toml index d771f81..549b688 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "metrics_layer" -version = "0.12.38" +version = "0.12.39" description = "The open source metrics layer." authors = ["Paul Blankley "] keywords = ["Metrics Layer", "Business Intelligence", "Analytics"] diff --git a/tests/test_cli.py b/tests/test_cli.py index c14a031..0f7972d 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -176,6 +176,8 @@ def yaml_dump_assert(slf, data, file): Definitions.postgres, Definitions.trino, Definitions.redshift, + Definitions.sql_server, + Definitions.azure_synapse, }: assert social["sql"] == '${TABLE}."ON_SOCIAL_NETWORK"' else: @@ -195,6 +197,8 @@ def yaml_dump_assert(slf, data, file): Definitions.postgres, Definitions.trino, Definitions.redshift, + Definitions.sql_server, + Definitions.azure_synapse, }: assert acq_date["sql"] == '${TABLE}."ACQUISITION_DATE"' else: @@ -231,6 +235,8 @@ def yaml_dump_assert(slf, data, file): Definitions.postgres, Definitions.trino, Definitions.redshift, + Definitions.sql_server, + Definitions.azure_synapse, }: assert date["sql"] == '${TABLE}."ORDER_CREATED_AT"' else: @@ -244,6 +250,8 @@ def yaml_dump_assert(slf, data, file): Definitions.postgres, Definitions.trino, Definitions.redshift, + Definitions.sql_server, + Definitions.azure_synapse, }: assert new["sql"] == '${TABLE}."NEW_VS_REPEAT"' else: @@ -257,6 +265,8 @@ def yaml_dump_assert(slf, data, file): Definitions.postgres, Definitions.trino, Definitions.redshift, + Definitions.sql_server, + Definitions.azure_synapse, }: assert num["sql"] == '${TABLE}."REVENUE"' else: @@ -307,6 +317,8 @@ def yaml_dump_assert(slf, data, file): Definitions.postgres, Definitions.trino, Definitions.redshift, + Definitions.sql_server, + Definitions.azure_synapse, }: assert cross_sell["sql"] == '${TABLE}."@CRoSSell P-roduct:"' else: @@ -344,6 +356,8 @@ def yaml_dump_assert(slf, data, file): Definitions.postgres, Definitions.trino, Definitions.redshift, + Definitions.sql_server, + Definitions.azure_synapse, }: assert date["sql"] == '${TABLE}."SESSION_DATE"' else: @@ -357,6 +371,8 @@ def yaml_dump_assert(slf, data, file): Definitions.postgres, Definitions.trino, Definitions.redshift, + Definitions.sql_server, + Definitions.azure_synapse, }: assert pk["sql"] == '${TABLE}."SESSION_ID"' else: @@ -370,6 +386,8 @@ def yaml_dump_assert(slf, data, file): Definitions.postgres, Definitions.trino, Definitions.redshift, + Definitions.sql_server, + Definitions.azure_synapse, }: assert num["sql"] == '${TABLE}."CONVERSION"' else: diff --git a/tests/test_non_additive_dimensions.py b/tests/test_non_additive_dimensions.py index 4d66c68..5ec78f9 100644 --- a/tests/test_non_additive_dimensions.py +++ b/tests/test_non_additive_dimensions.py @@ -672,3 +672,80 @@ def test_mrr_non_additive_dimension_merged_result_sub_join_where(connection): " mrr_record__cte_subquery_0.mrr_record_date=z_customer_accounts_created__cte_subquery_1.z_customer_accounts_created_date;" # noqa ) assert query == correct + + +@pytest.mark.query +def test_mrr_non_additive_dimension_or_filters_with_select(connection): + query = connection.get_sql_query( + metrics=["mrr.mrr_end_of_month"], + dimensions=[], + where=[ + { + "conditional_filter_logic": { + "conditions": [ + {"field": "mrr.plan_name", "expression": "equal_to", "value": "Enterprise"} + ], + "logical_operator": "AND", + } + }, + ], + having=[ + { + "conditional_filter_logic": { + "conditions": [ + {"field": "number_of_billed_accounts", "expression": "greater_than", "value": 1100} + ], + "logical_operator": "AND", + } + } + ], + ) + + correct = ( + "WITH cte_mrr_end_of_month_record_raw AS (SELECT MAX(mrr.record_date) as mrr_max_record_raw FROM" + " analytics.mrr_by_customer mrr WHERE mrr.plan_name='Enterprise' ORDER BY mrr_max_record_raw DESC" + " NULLS LAST) SELECT SUM(case when mrr.record_date=cte_mrr_end_of_month_record_raw.mrr_max_record_raw" + " then mrr.mrr else 0 end) as mrr_mrr_end_of_month FROM analytics.mrr_by_customer mrr LEFT JOIN" + " cte_mrr_end_of_month_record_raw ON 1=1 WHERE mrr.plan_name='Enterprise' HAVING" + " COUNT(mrr.parent_account_id)>1100 ORDER BY mrr_mrr_end_of_month DESC NULLS LAST;" + ) + assert query == correct + + +@pytest.mark.query +def test_mrr_non_additive_dimension_or_filters(connection): + query = connection.get_sql_query( + metrics=["number_of_billed_accounts"], + dimensions=[], + where=[ + { + "conditional_filter_logic": { + "conditions": [ + {"field": "mrr.plan_name", "expression": "equal_to", "value": "Enterprise"} + ], + "logical_operator": "AND", + } + }, + ], + having=[ + { + "conditional_filter_logic": { + "conditions": [ + {"field": "mrr.mrr_end_of_month", "expression": "greater_than", "value": 1100} + ], + "logical_operator": "AND", + } + } + ], + ) + + correct = ( + "WITH cte_mrr_end_of_month_record_raw AS (SELECT MAX(mrr.record_date) as mrr_max_record_raw FROM" + " analytics.mrr_by_customer mrr WHERE mrr.plan_name='Enterprise' ORDER BY mrr_max_record_raw DESC" + " NULLS LAST) SELECT COUNT(mrr.parent_account_id) as mrr_number_of_billed_accounts FROM" + " analytics.mrr_by_customer mrr LEFT JOIN cte_mrr_end_of_month_record_raw ON 1=1 WHERE" + " mrr.plan_name='Enterprise' HAVING SUM(case when" + " mrr.record_date=cte_mrr_end_of_month_record_raw.mrr_max_record_raw then mrr.mrr else 0 end)>1100" + " ORDER BY mrr_number_of_billed_accounts DESC NULLS LAST;" + ) + assert query == correct From 82ce90a7bb7e6befdb25fc28bea125966450c477 Mon Sep 17 00:00:00 2001 From: Paul Blankley Date: Wed, 25 Sep 2024 16:42:16 -0600 Subject: [PATCH 51/53] Release v0.12.39 From b7f4418246bbc9ea48242297be2f32d5068f61fd Mon Sep 17 00:00:00 2001 From: Paul Blankley <31221512+pblankley@users.noreply.github.com> Date: Thu, 26 Sep 2024 17:53:42 -0600 Subject: [PATCH 52/53] add more robust support for multi connections (#230) * add more robust support for multi connections * fix tests and only add models to merged results * fix query func call * fix tests * bump version --- metrics_layer/cli/seeding.py | 7 +- metrics_layer/core/model/field.py | 8 +- metrics_layer/core/model/project.py | 10 +- .../core/sql/arbitrary_merge_resolve.py | 2 + metrics_layer/core/sql/resolve.py | 19 ++- pyproject.toml | 2 +- tests/test_merged_results.py | 156 +++++++++--------- 7 files changed, 112 insertions(+), 92 deletions(-) diff --git a/metrics_layer/cli/seeding.py b/metrics_layer/cli/seeding.py index df8dc68..60439e6 100644 --- a/metrics_layer/cli/seeding.py +++ b/metrics_layer/cli/seeding.py @@ -266,8 +266,13 @@ def seed(self, auto_tag_searchable_fields: bool = False): dumper.dump_yaml_file(project_data, zenlytic_project_path) def get_model_name(self, current_models: list): - if len(current_models) > 0: + if len(current_models) == 1: return current_models[0].name + elif len(current_models) > 1: + for model in current_models: + if self.connection and model.connection == self.connection.name: + return model.name + raise ValueError("Multiple models found, but none match the connection name") return self.default_model_name def make_models(self): diff --git a/metrics_layer/core/model/field.py b/metrics_layer/core/model/field.py index 6e6fd39..8408d23 100644 --- a/metrics_layer/core/model/field.py +++ b/metrics_layer/core/model/field.py @@ -2778,8 +2778,14 @@ def join_graphs(self): return base edges = self.view.project.join_graph.merged_results_graph(self.view.model).in_edges(self.id()) - extended = [f"merged_result_{mr}" for mr, _ in edges] + extended = self._wrap_with_model_name([f"merged_result_{mr}" for mr, _ in edges]) if self.loses_join_ability_with_other_views(): return extended return list(sorted(base + extended)) + + def _wrap_with_model_name(self, join_graphs: list): + if len(models := self.view.project.models()) > 1: + model_index = [m.name for m in models].index(self.view.model.name) + return [f"m{model_index}_{jg}" for jg in join_graphs] + return join_graphs diff --git a/metrics_layer/core/model/project.py b/metrics_layer/core/model/project.py index 771a4ae..9ea90cb 100644 --- a/metrics_layer/core/model/project.py +++ b/metrics_layer/core/model/project.py @@ -116,9 +116,13 @@ def remove_field(self, field_name: str, view_name: str, refresh_cache: bool = Tr def timezone(self): if self._timezone: return self._timezone - for m in self.models(): - if m.timezone: - return m.timezone + timezones = list(set(m.timezone for m in self.models() if m.timezone)) + if len(timezones) == 1: + return timezones[0] + elif len(timezones) > 1: + raise QueryError( + "Multiple timezones found in models, please specify only one timezone across models" + ) return None @property diff --git a/metrics_layer/core/sql/arbitrary_merge_resolve.py b/metrics_layer/core/sql/arbitrary_merge_resolve.py index afed927..20b06e9 100644 --- a/metrics_layer/core/sql/arbitrary_merge_resolve.py +++ b/metrics_layer/core/sql/arbitrary_merge_resolve.py @@ -37,6 +37,7 @@ def __init__( self.project = project self.connections = connections self.connection = None + self.model = None # All queries are merged queries (obviously) self.query_kind = QueryKindTypes.merged self.kwargs = kwargs @@ -76,6 +77,7 @@ def get_query(self, semicolon: bool = True): ) mapping_lookup = self._mapping_lookup + self.model = resolver.model clean_where = [{**w, "field": mapping_lookup.get(w["field"].lower(), w["field"])} for w in self.where] clean_having = [ {**h, "field": mapping_lookup.get(h["field"].lower(), h["field"])} for h in self.having diff --git a/metrics_layer/core/sql/resolve.py b/metrics_layer/core/sql/resolve.py index 7b4780c..f248065 100644 --- a/metrics_layer/core/sql/resolve.py +++ b/metrics_layer/core/sql/resolve.py @@ -1,4 +1,4 @@ -from collections import defaultdict +from collections import Counter, defaultdict from copy import deepcopy from typing import List, Union @@ -424,7 +424,7 @@ def _get_model_for_query(self, model_name: str = None, metrics: list = [], dimen return self._derive_model(metrics, dimensions) def _derive_model(self, metrics: list, dimensions: list): - all_model_names = [] + all_model_names, mapping_model_names = [], [] models = self.project.models() for f in metrics + dimensions: try: @@ -434,18 +434,21 @@ def _derive_model(self, metrics: list, dimensions: list): for model in models: try: self.project.get_mapped_field(f, model=model) - all_model_names.append(model.name) - break + mapping_model_names.append(model.name) except Exception: pass all_model_names = list(set(all_model_names)) - - if len(all_model_names) == 0: + if len(all_model_names) == 0 and len(mapping_model_names) > 0: # In a case that there are no models in the query, we'll just use the first model # in the project. This case should be limited to only mapping-only queries, so this is safe. - return self.project.models()[0] - elif len(all_model_names) == 1: + model_counts = Counter(mapping_model_names) + sorted_models = [m for m, _ in model_counts.most_common()] + return self.project.get_model(sorted_models[0]) + elif len(all_model_names) == 1 and ( + len(mapping_model_names) == 0 + or (len(mapping_model_names) > 0 and all_model_names[0] in mapping_model_names) + ): return self.project.get_model(list(all_model_names)[0]) else: raise QueryError( diff --git a/pyproject.toml b/pyproject.toml index 549b688..d48aa3b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "metrics_layer" -version = "0.12.39" +version = "0.12.40" description = "The open source metrics layer." authors = ["Paul Blankley "] keywords = ["Metrics Layer", "Business Intelligence", "Analytics"] diff --git a/tests/test_merged_results.py b/tests/test_merged_results.py index bb98eec..66cd495 100644 --- a/tests/test_merged_results.py +++ b/tests/test_merged_results.py @@ -150,20 +150,20 @@ def _blow_out_by_time_frame(join_graph: str, tf: list): "year", ] core_tf = ["raw", "time", "date", "week", "month", "quarter", "year"] - sub_q_cr = _blow_out_by_time_frame("merged_result_canon_date_core", core_tf) - sub_q_0_4 = _blow_out_by_time_frame("merged_result_subquery_0_subquery_4", core_tf) - sub_q_0_2 = _blow_out_by_time_frame("merged_result_subquery_0_subquery_7", core_tf) - sub_q_0_3 = _blow_out_by_time_frame("merged_result_subquery_0_subquery_3", core_tf) - sub_q_0_5 = _blow_out_by_time_frame("merged_result_subquery_0_subquery_5", core_tf) - sub_q_0_8 = _blow_out_by_time_frame("merged_result_subquery_0_subquery_8", core_tf) - sub_q_0_9 = _blow_out_by_time_frame("merged_result_subquery_0_subquery_9", core_tf) - sub_q_0_10 = _blow_out_by_time_frame("merged_result_subquery_0_subquery_10", core_tf) - sub_q_0_11 = _blow_out_by_time_frame("merged_result_subquery_0_subquery_11", core_tf) - sub_q_0_12 = _blow_out_by_time_frame("merged_result_subquery_0_subquery_12", core_tf) - sub_q_0_14 = _blow_out_by_time_frame("merged_result_subquery_0_subquery_13", core_tf) - sub_q_0_15 = _blow_out_by_time_frame("merged_result_subquery_0_subquery_15", core_tf) - sub_q_0_16 = _blow_out_by_time_frame("merged_result_subquery_0_subquery_16", core_tf) - sub_q_0_1 = _blow_out_by_time_frame("merged_result_subquery_0_subquery_1", core_tf) + sub_q_cr = _blow_out_by_time_frame("m0_merged_result_canon_date_core", core_tf) + sub_q_0_4 = _blow_out_by_time_frame("m0_merged_result_subquery_0_subquery_4", core_tf) + sub_q_0_2 = _blow_out_by_time_frame("m0_merged_result_subquery_0_subquery_7", core_tf) + sub_q_0_3 = _blow_out_by_time_frame("m0_merged_result_subquery_0_subquery_3", core_tf) + sub_q_0_5 = _blow_out_by_time_frame("m0_merged_result_subquery_0_subquery_5", core_tf) + sub_q_0_8 = _blow_out_by_time_frame("m0_merged_result_subquery_0_subquery_8", core_tf) + sub_q_0_9 = _blow_out_by_time_frame("m0_merged_result_subquery_0_subquery_9", core_tf) + sub_q_0_10 = _blow_out_by_time_frame("m0_merged_result_subquery_0_subquery_10", core_tf) + sub_q_0_11 = _blow_out_by_time_frame("m0_merged_result_subquery_0_subquery_11", core_tf) + sub_q_0_12 = _blow_out_by_time_frame("m0_merged_result_subquery_0_subquery_12", core_tf) + sub_q_0_14 = _blow_out_by_time_frame("m0_merged_result_subquery_0_subquery_13", core_tf) + sub_q_0_15 = _blow_out_by_time_frame("m0_merged_result_subquery_0_subquery_15", core_tf) + sub_q_0_16 = _blow_out_by_time_frame("m0_merged_result_subquery_0_subquery_16", core_tf) + sub_q_0_1 = _blow_out_by_time_frame("m0_merged_result_subquery_0_subquery_1", core_tf) revenue_set = [ *sub_q_cr, *sub_q_0_4, @@ -189,47 +189,47 @@ def _blow_out_by_time_frame(join_graph: str, tf: list): field = connection.get_field("order_lines.order_date") order_lines_date_graphs = [ "subquery_0", - "merged_result_canon_date_core_date", - "merged_result_subquery_0_subquery_10_date", - "merged_result_subquery_0_subquery_11_date", - "merged_result_subquery_0_subquery_12_date", - "merged_result_subquery_0_subquery_13_date", - "merged_result_subquery_0_subquery_15_date", - "merged_result_subquery_0_subquery_16_date", - "merged_result_subquery_0_subquery_1_date", - "merged_result_subquery_0_subquery_4_date", - "merged_result_subquery_0_subquery_7_date", - "merged_result_subquery_0_subquery_3_date", - "merged_result_subquery_0_subquery_5_date", - "merged_result_subquery_0_subquery_8_date", - "merged_result_subquery_0_subquery_9_date", + "m0_merged_result_canon_date_core_date", + "m0_merged_result_subquery_0_subquery_10_date", + "m0_merged_result_subquery_0_subquery_11_date", + "m0_merged_result_subquery_0_subquery_12_date", + "m0_merged_result_subquery_0_subquery_13_date", + "m0_merged_result_subquery_0_subquery_15_date", + "m0_merged_result_subquery_0_subquery_16_date", + "m0_merged_result_subquery_0_subquery_1_date", + "m0_merged_result_subquery_0_subquery_4_date", + "m0_merged_result_subquery_0_subquery_7_date", + "m0_merged_result_subquery_0_subquery_3_date", + "m0_merged_result_subquery_0_subquery_5_date", + "m0_merged_result_subquery_0_subquery_8_date", + "m0_merged_result_subquery_0_subquery_9_date", ] assert field.join_graphs() == list(sorted(order_lines_date_graphs)) field = connection.get_field("orders.order_date") order_date_graphs = [ "subquery_0", - "merged_result_canon_date_core_date", - "merged_result_subquery_0_subquery_10_date", - "merged_result_subquery_0_subquery_11_date", - "merged_result_subquery_0_subquery_12_date", - "merged_result_subquery_0_subquery_13_date", - "merged_result_subquery_0_subquery_15_date", - "merged_result_subquery_0_subquery_16_date", - "merged_result_subquery_0_subquery_1_date", - "merged_result_subquery_0_subquery_4_date", - "merged_result_subquery_0_subquery_7_date", - "merged_result_subquery_0_subquery_3_date", - "merged_result_subquery_0_subquery_5_date", - "merged_result_subquery_0_subquery_8_date", - "merged_result_subquery_0_subquery_9_date", + "m0_merged_result_canon_date_core_date", + "m0_merged_result_subquery_0_subquery_10_date", + "m0_merged_result_subquery_0_subquery_11_date", + "m0_merged_result_subquery_0_subquery_12_date", + "m0_merged_result_subquery_0_subquery_13_date", + "m0_merged_result_subquery_0_subquery_15_date", + "m0_merged_result_subquery_0_subquery_16_date", + "m0_merged_result_subquery_0_subquery_1_date", + "m0_merged_result_subquery_0_subquery_4_date", + "m0_merged_result_subquery_0_subquery_7_date", + "m0_merged_result_subquery_0_subquery_3_date", + "m0_merged_result_subquery_0_subquery_5_date", + "m0_merged_result_subquery_0_subquery_8_date", + "m0_merged_result_subquery_0_subquery_9_date", ] assert field.join_graphs() == list(sorted(order_date_graphs)) field = connection.get_field("sub_channel") sub_channel_graphs = [ "subquery_0", - *_blow_out_by_time_frame("merged_result_subquery_0_subquery_4", tf), + *_blow_out_by_time_frame("m0_merged_result_subquery_0_subquery_4", tf), ] assert field.join_graphs() == list(sorted(sub_channel_graphs)) @@ -246,47 +246,47 @@ def _blow_out_by_time_frame(join_graph: str, tf: list): "subquery_11", "subquery_12", "subquery_13", - *_blow_out_by_time_frame("merged_result_subquery_0_subquery_1", tf), - *_blow_out_by_time_frame("merged_result_subquery_1_subquery_3", core_tf), - *_blow_out_by_time_frame("merged_result_subquery_1_subquery_4", core_tf), - *_blow_out_by_time_frame("merged_result_subquery_1_subquery_12", core_tf), - *_blow_out_by_time_frame("merged_result_subquery_1_subquery_11", core_tf), - *_blow_out_by_time_frame("merged_result_subquery_0_subquery_3", tf), - *_blow_out_by_time_frame("merged_result_subquery_0_subquery_12", tf), - *_blow_out_by_time_frame("merged_result_subquery_12_subquery_3", core_tf), - *_blow_out_by_time_frame("merged_result_subquery_12_subquery_4", core_tf), - *_blow_out_by_time_frame("merged_result_subquery_11_subquery_12", core_tf), - *_blow_out_by_time_frame("merged_result_subquery_11_subquery_3", core_tf), - *_blow_out_by_time_frame("merged_result_subquery_3_subquery_4", core_tf), - *_blow_out_by_time_frame("merged_result_subquery_0_subquery_4", tf), - *_blow_out_by_time_frame("merged_result_subquery_0_subquery_11", tf), - *_blow_out_by_time_frame("merged_result_subquery_13_subquery_3", core_tf), - *_blow_out_by_time_frame("merged_result_subquery_13_subquery_4", core_tf), - *_blow_out_by_time_frame("merged_result_subquery_11_subquery_13", core_tf), - *_blow_out_by_time_frame("merged_result_subquery_12_subquery_13", core_tf), - *_blow_out_by_time_frame("merged_result_subquery_1_subquery_13", core_tf), - *_blow_out_by_time_frame("merged_result_subquery_0_subquery_13", tf), - *_blow_out_by_time_frame("merged_result_subquery_11_subquery_4", core_tf), + *_blow_out_by_time_frame("m0_merged_result_subquery_0_subquery_1", tf), + *_blow_out_by_time_frame("m0_merged_result_subquery_1_subquery_3", core_tf), + *_blow_out_by_time_frame("m0_merged_result_subquery_1_subquery_4", core_tf), + *_blow_out_by_time_frame("m0_merged_result_subquery_1_subquery_12", core_tf), + *_blow_out_by_time_frame("m0_merged_result_subquery_1_subquery_11", core_tf), + *_blow_out_by_time_frame("m0_merged_result_subquery_0_subquery_3", tf), + *_blow_out_by_time_frame("m0_merged_result_subquery_0_subquery_12", tf), + *_blow_out_by_time_frame("m0_merged_result_subquery_12_subquery_3", core_tf), + *_blow_out_by_time_frame("m0_merged_result_subquery_12_subquery_4", core_tf), + *_blow_out_by_time_frame("m0_merged_result_subquery_11_subquery_12", core_tf), + *_blow_out_by_time_frame("m0_merged_result_subquery_11_subquery_3", core_tf), + *_blow_out_by_time_frame("m0_merged_result_subquery_3_subquery_4", core_tf), + *_blow_out_by_time_frame("m0_merged_result_subquery_0_subquery_4", tf), + *_blow_out_by_time_frame("m0_merged_result_subquery_0_subquery_11", tf), + *_blow_out_by_time_frame("m0_merged_result_subquery_13_subquery_3", core_tf), + *_blow_out_by_time_frame("m0_merged_result_subquery_13_subquery_4", core_tf), + *_blow_out_by_time_frame("m0_merged_result_subquery_11_subquery_13", core_tf), + *_blow_out_by_time_frame("m0_merged_result_subquery_12_subquery_13", core_tf), + *_blow_out_by_time_frame("m0_merged_result_subquery_1_subquery_13", core_tf), + *_blow_out_by_time_frame("m0_merged_result_subquery_0_subquery_13", tf), + *_blow_out_by_time_frame("m0_merged_result_subquery_11_subquery_4", core_tf), ] assert field.join_graphs() == list(sorted(gender_graphs)) field = connection.get_field("number_of_sessions") sessions_graphs = [ "subquery_4", - *_blow_out_by_time_frame("merged_result_canon_date_core", core_tf), - *_blow_out_by_time_frame("merged_result_subquery_1_subquery_4", core_tf), - *_blow_out_by_time_frame("merged_result_subquery_10_subquery_4", core_tf), - *_blow_out_by_time_frame("merged_result_subquery_11_subquery_4", core_tf), - *_blow_out_by_time_frame("merged_result_subquery_12_subquery_4", core_tf), - *_blow_out_by_time_frame("merged_result_subquery_0_subquery_4", core_tf), - *_blow_out_by_time_frame("merged_result_subquery_3_subquery_4", core_tf), - *_blow_out_by_time_frame("merged_result_subquery_4_subquery_7", core_tf), - *_blow_out_by_time_frame("merged_result_subquery_4_subquery_5", core_tf), - *_blow_out_by_time_frame("merged_result_subquery_13_subquery_4", core_tf), - *_blow_out_by_time_frame("merged_result_subquery_15_subquery_4", core_tf), - *_blow_out_by_time_frame("merged_result_subquery_16_subquery_4", core_tf), - *_blow_out_by_time_frame("merged_result_subquery_4_subquery_8", core_tf), - *_blow_out_by_time_frame("merged_result_subquery_4_subquery_9", core_tf), + *_blow_out_by_time_frame("m0_merged_result_canon_date_core", core_tf), + *_blow_out_by_time_frame("m0_merged_result_subquery_1_subquery_4", core_tf), + *_blow_out_by_time_frame("m0_merged_result_subquery_10_subquery_4", core_tf), + *_blow_out_by_time_frame("m0_merged_result_subquery_11_subquery_4", core_tf), + *_blow_out_by_time_frame("m0_merged_result_subquery_12_subquery_4", core_tf), + *_blow_out_by_time_frame("m0_merged_result_subquery_0_subquery_4", core_tf), + *_blow_out_by_time_frame("m0_merged_result_subquery_3_subquery_4", core_tf), + *_blow_out_by_time_frame("m0_merged_result_subquery_4_subquery_7", core_tf), + *_blow_out_by_time_frame("m0_merged_result_subquery_4_subquery_5", core_tf), + *_blow_out_by_time_frame("m0_merged_result_subquery_13_subquery_4", core_tf), + *_blow_out_by_time_frame("m0_merged_result_subquery_15_subquery_4", core_tf), + *_blow_out_by_time_frame("m0_merged_result_subquery_16_subquery_4", core_tf), + *_blow_out_by_time_frame("m0_merged_result_subquery_4_subquery_8", core_tf), + *_blow_out_by_time_frame("m0_merged_result_subquery_4_subquery_9", core_tf), ] assert field.join_graphs() == list(sorted(sessions_graphs)) From 3805643feb7e3a575b3f0d379b738ddf450c133b Mon Sep 17 00:00:00 2001 From: Paul Blankley Date: Thu, 26 Sep 2024 17:54:07 -0600 Subject: [PATCH 53/53] Release v0.12.40