diff --git a/.changes/unreleased/Under the Hood-20240718-193206.yaml b/.changes/unreleased/Under the Hood-20240718-193206.yaml new file mode 100644 index 000000000..32b3084f5 --- /dev/null +++ b/.changes/unreleased/Under the Hood-20240718-193206.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: Simplify linting environment and dev dependencies +time: 2024-07-18T19:32:06.044016-04:00 +custom: + Author: mikealfare + Issue: "1291" diff --git a/.flake8 b/.flake8 deleted file mode 100644 index da7e039fd..000000000 --- a/.flake8 +++ /dev/null @@ -1,16 +0,0 @@ -[flake8] -select = - E - W - F -ignore = - # makes Flake8 work like black - W503, - W504, - # makes Flake8 work like black - E203, - E741, - E501, -exclude = tests -per-file-ignores = - */__init__.py: F401 diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 631bc7652..a88793bde 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -58,7 +58,6 @@ jobs: python -m pip install -r dev-requirements.txt python -m pip --version pre-commit --version - mypy --version dbt --version - name: Run pre-comit hooks run: pre-commit run --all-files --show-diff-on-failure diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 74dbdf99a..0739a7cc4 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,66 +1,58 @@ # For more on configuring pre-commit hooks (see https://pre-commit.com/) - default_language_version: - python: python3 + python: python3 repos: -- repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.4.0 - hooks: - - id: check-yaml - args: [--unsafe] - - id: check-json - - id: end-of-file-fixer - - id: trailing-whitespace - - id: check-case-conflict -- repo: https://github.com/dbt-labs/pre-commit-hooks - rev: v0.1.0a1 - hooks: - - id: dbt-core-in-adapters-check -- repo: https://github.com/psf/black - rev: 23.1.0 - hooks: - - id: black - additional_dependencies: ['click~=8.1'] - args: - - "--line-length=99" - - "--target-version=py38" - - id: black - alias: black-check - stages: [manual] - additional_dependencies: ['click~=8.1'] - args: - - "--line-length=99" - - "--target-version=py38" - - "--check" - - "--diff" -- repo: https://github.com/pycqa/flake8 - rev: 6.0.0 - hooks: - - id: flake8 - - id: flake8 - alias: flake8-check - stages: [manual] -- repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.1.1 - hooks: - - id: mypy - # N.B.: Mypy is... a bit fragile. - # - # By using `language: system` we run this hook in the local - # environment instead of a pre-commit isolated one. This is needed - # to ensure mypy correctly parses the project. +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.6.0 + hooks: + - id: check-yaml + args: [--unsafe] + - id: check-json + - id: end-of-file-fixer + - id: trailing-whitespace + - id: check-case-conflict + +- repo: https://github.com/dbt-labs/pre-commit-hooks + rev: v0.1.0a1 + hooks: + - id: dbt-core-in-adapters-check + +- repo: https://github.com/psf/black + rev: 24.4.2 + hooks: + - id: black + args: + - --line-length=99 + - --target-version=py38 + - --target-version=py39 + - --target-version=py310 + - --target-version=py311 + additional_dependencies: [flaky] + +- repo: https://github.com/pycqa/flake8 + rev: 7.0.0 + hooks: + - id: flake8 + exclude: tests/ + args: + - --max-line-length=99 + - --select=E,F,W + - --ignore=E203,E501,E741,W503,W504 + - --per-file-ignores=*/__init__.py:F401 - # It may cause trouble in that it adds environmental variables out - # of our control to the mix. Unfortunately, there's nothing we can - # do about per pre-commit's author. - # See https://github.com/pre-commit/pre-commit/issues/730 for details. - args: [--show-error-codes, --ignore-missing-imports, --explicit-package-bases] - files: ^dbt/adapters/.* - language: system - - id: mypy - alias: mypy-check - stages: [manual] - args: [--show-error-codes, --pretty, --ignore-missing-imports, --explicit-package-bases] - files: ^dbt/adapters - language: system +- repo: https://github.com/pre-commit/mirrors-mypy + rev: v1.10.0 + hooks: + - id: mypy + args: + - --explicit-package-bases + - --ignore-missing-imports + - --pretty + - --show-error-codes + - --warn-unused-ignores + files: ^dbt/adapters/bigquery + additional_dependencies: + - types-protobuf + - types-pytz + - types-requests diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index e9432d363..1af648741 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -67,7 +67,7 @@ $EDITOR test.env There are a few methods for running tests locally. #### `tox` -`tox` takes care of managing Python virtualenvs and installing dependencies in order to run tests. You can also run tests in parallel, for example you can run unit tests for Python 3.8, Python 3.9, and `flake8` checks in parallel with `tox -p`. Also, you can run unit tests for specific python versions with `tox -e py38`. The configuration of these tests are located in `tox.ini`. +`tox` takes care of managing Python virtualenvs and installing dependencies in order to run tests. You can also run tests in parallel, for example you can run unit tests for Python 3.8, Python 3.9, Python 3.10, and Python 3.11 in parallel with `tox -p`. Also, you can run unit tests for specific python versions with `tox -e py38`. The configuration of these tests are located in `tox.ini`. #### `pytest` Finally, you can also run a specific test or group of tests using `pytest` directly. With a Python virtualenv active and dev dependencies installed you can do things like: diff --git a/dbt/adapters/bigquery/__init__.py b/dbt/adapters/bigquery/__init__.py index adbd67590..5fe68e786 100644 --- a/dbt/adapters/bigquery/__init__.py +++ b/dbt/adapters/bigquery/__init__.py @@ -8,5 +8,5 @@ from dbt.include import bigquery Plugin = AdapterPlugin( - adapter=BigQueryAdapter, credentials=BigQueryCredentials, include_path=bigquery.PACKAGE_PATH # type: ignore[arg-type] + adapter=BigQueryAdapter, credentials=BigQueryCredentials, include_path=bigquery.PACKAGE_PATH ) diff --git a/dbt/adapters/bigquery/column.py b/dbt/adapters/bigquery/column.py index 1bdf4323d..4a12e211f 100644 --- a/dbt/adapters/bigquery/column.py +++ b/dbt/adapters/bigquery/column.py @@ -18,7 +18,7 @@ class BigQueryColumn(Column): "INTEGER": "INT64", } fields: List[Self] # type: ignore - mode: str # type: ignore + mode: str def __init__( self, @@ -110,7 +110,7 @@ def is_numeric(self) -> bool: def is_float(self): return self.dtype.lower() == "float64" - def can_expand_to(self: Self, other_column: Self) -> bool: # type: ignore + def can_expand_to(self: Self, other_column: Self) -> bool: """returns True if both columns are strings""" return self.is_string() and other_column.is_string() @@ -124,7 +124,7 @@ def column_to_bq_schema(self) -> SchemaField: fields = [field.column_to_bq_schema() for field in self.fields] # type: ignore[attr-defined] kwargs = {"fields": fields} - return SchemaField(self.name, self.dtype, self.mode, **kwargs) # type: ignore[arg-type] + return SchemaField(self.name, self.dtype, self.mode, **kwargs) def get_nested_column_data_types( diff --git a/dbt/adapters/bigquery/connections.py b/dbt/adapters/bigquery/connections.py index 14f958a05..0d57d22c3 100644 --- a/dbt/adapters/bigquery/connections.py +++ b/dbt/adapters/bigquery/connections.py @@ -116,8 +116,8 @@ class BigQueryCredentials(Credentials): # BigQuery allows an empty database / project, where it defers to the # environment for the project - database: Optional[str] = None # type: ignore - schema: Optional[str] = None # type: ignore + database: Optional[str] = None + schema: Optional[str] = None execution_project: Optional[str] = None location: Optional[str] = None priority: Optional[Priority] = None @@ -568,7 +568,7 @@ def execute( else: message = f"{code}" - response = BigQueryAdapterResponse( # type: ignore[call-arg] + response = BigQueryAdapterResponse( _message=message, rows_affected=num_rows, code=code, diff --git a/dbt/adapters/bigquery/dataproc/batch.py b/dbt/adapters/bigquery/dataproc/batch.py index 61dc3c18b..e7f13c913 100644 --- a/dbt/adapters/bigquery/dataproc/batch.py +++ b/dbt/adapters/bigquery/dataproc/batch.py @@ -20,9 +20,9 @@ def create_batch_request( batch: Batch, batch_id: str, project: str, region: str ) -> CreateBatchRequest: return CreateBatchRequest( - parent=f"projects/{project}/locations/{region}", # type: ignore - batch_id=batch_id, # type: ignore - batch=batch, # type: ignore + parent=f"projects/{project}/locations/{region}", + batch_id=batch_id, + batch=batch, ) @@ -35,10 +35,10 @@ def poll_batch_job( run_time = 0 while state in _BATCH_RUNNING_STATES and run_time < timeout: time.sleep(1) - response = job_client.get_batch( # type: ignore - request=GetBatchRequest(name=batch_name), # type: ignore + response = job_client.get_batch( + request=GetBatchRequest(name=batch_name), ) - run_time = datetime.now().timestamp() - response.create_time.timestamp() # type: ignore + run_time = datetime.now().timestamp() - response.create_time.timestamp() state = response.state if not response: raise ValueError("No response from Dataproc") diff --git a/dbt/adapters/bigquery/impl.py b/dbt/adapters/bigquery/impl.py index dc5cf6e17..a1aaf17eb 100644 --- a/dbt/adapters/bigquery/impl.py +++ b/dbt/adapters/bigquery/impl.py @@ -22,7 +22,7 @@ from dbt.adapters.contracts.relation import RelationConfig import dbt_common.exceptions.base -from dbt.adapters.base import ( # type: ignore +from dbt.adapters.base import ( AdapterConfig, BaseAdapter, BaseRelation, @@ -33,11 +33,15 @@ available, ) from dbt.adapters.base.impl import FreshnessResponse -from dbt.adapters.cache import _make_ref_key_dict # type: ignore +from dbt.adapters.cache import _make_ref_key_dict from dbt.adapters.capability import Capability, CapabilityDict, CapabilitySupport, Support from dbt.adapters.contracts.connection import AdapterResponse from dbt.adapters.contracts.macros import MacroResolverProtocol -from dbt_common.contracts.constraints import ColumnLevelConstraint, ConstraintType, ModelLevelConstraint # type: ignore +from dbt_common.contracts.constraints import ( + ColumnLevelConstraint, + ConstraintType, + ModelLevelConstraint, +) from dbt_common.dataclass_schema import dbtClassMixin from dbt.adapters.events.logging import AdapterLogger from dbt_common.events.functions import fire_event @@ -163,7 +167,7 @@ def is_cancelable(cls) -> bool: return False def drop_relation(self, relation: BigQueryRelation) -> None: - is_cached = self._schema_is_cached(relation.database, relation.schema) # type: ignore[arg-type] + is_cached = self._schema_is_cached(relation.database, relation.schema) if is_cached: self.cache_dropped(relation) @@ -258,7 +262,7 @@ def add_time_ingestion_partition_column(self, partition_by, columns) -> List[Big ) return columns - def expand_column_types(self, goal: BigQueryRelation, current: BigQueryRelation) -> None: # type: ignore[override] + def expand_column_types(self, goal: BigQueryRelation, current: BigQueryRelation) -> None: # This is a no-op on BigQuery pass @@ -323,7 +327,7 @@ def get_relation( # TODO: the code below is copy-pasted from SQLAdapter.create_schema. Is there a better way? def create_schema(self, relation: BigQueryRelation) -> None: # use SQL 'create schema' - relation = relation.without_identifier() # type: ignore + relation = relation.without_identifier() fire_event(SchemaCreation(relation=_make_ref_key_dict(relation))) kwargs = { @@ -410,7 +414,7 @@ def _agate_to_schema( for idx, col_name in enumerate(agate_table.column_names): inferred_type = self.convert_agate_type(agate_table, idx) type_ = column_override.get(col_name, inferred_type) - bq_schema.append(SchemaField(col_name, type_)) # type: ignore[arg-type] + bq_schema.append(SchemaField(col_name, type_)) return bq_schema @available.parse(lambda *a, **k: "") @@ -736,8 +740,8 @@ def _get_catalog_schemas(self, relation_config: Iterable[RelationConfig]) -> Sch for candidate, schemas in candidates.items(): database = candidate.database if database not in db_schemas: - db_schemas[database] = set(self.list_schemas(database)) # type: ignore[index] - if candidate.schema in db_schemas[database]: # type: ignore[index] + db_schemas[database] = set(self.list_schemas(database)) + if candidate.schema in db_schemas[database]: result[candidate] = schemas else: logger.debug( @@ -844,7 +848,7 @@ def describe_relation( return None @available.parse_none - def grant_access_to(self, entity, entity_type, role, grant_target_dict): + def grant_access_to(self, entity, entity_type, role, grant_target_dict) -> None: """ Given an entity, grants it access to a dataset. """ @@ -873,7 +877,7 @@ def get_dataset_location(self, relation): dataset = client.get_dataset(dataset_ref) return dataset.location - def get_rows_different_sql( # type: ignore[override] + def get_rows_different_sql( self, relation_a: BigQueryRelation, relation_b: BigQueryRelation, @@ -921,7 +925,7 @@ def run_sql_for_tests(self, sql, fetch, conn=None): return list(res) def generate_python_submission_response(self, submission_result) -> BigQueryAdapterResponse: - return BigQueryAdapterResponse(_message="OK") # type: ignore[call-arg] + return BigQueryAdapterResponse(_message="OK") @property def default_python_submission_method(self) -> str: @@ -961,7 +965,7 @@ def render_raw_columns_constraints(cls, raw_columns: Dict[str, Dict[str, Any]]) @classmethod def render_column_constraint(cls, constraint: ColumnLevelConstraint) -> Optional[str]: - c = super().render_column_constraint(constraint) # type: ignore + c = super().render_column_constraint(constraint) if ( constraint.type == ConstraintType.primary_key or constraint.type == ConstraintType.foreign_key @@ -971,7 +975,7 @@ def render_column_constraint(cls, constraint: ColumnLevelConstraint) -> Optional @classmethod def render_model_constraint(cls, constraint: ModelLevelConstraint) -> Optional[str]: - c = super().render_model_constraint(constraint) # type: ignore + c = super().render_model_constraint(constraint) if ( constraint.type == ConstraintType.primary_key or constraint.type == ConstraintType.foreign_key diff --git a/dbt/adapters/bigquery/python_submissions.py b/dbt/adapters/bigquery/python_submissions.py index 065c65d8b..368ed9d07 100644 --- a/dbt/adapters/bigquery/python_submissions.py +++ b/dbt/adapters/bigquery/python_submissions.py @@ -9,7 +9,7 @@ from dbt.adapters.bigquery import BigQueryConnectionManager, BigQueryCredentials from google.api_core import retry from google.api_core.client_options import ClientOptions -from google.cloud import storage, dataproc_v1 # type: ignore +from google.cloud import storage, dataproc_v1 from google.cloud.dataproc_v1.types.batches import Batch from dbt.adapters.bigquery.dataproc.batch import ( @@ -89,7 +89,7 @@ def _get_job_client(self) -> dataproc_v1.JobControllerClient: raise ValueError( "Need to supply dataproc_cluster_name in profile or config to submit python job with cluster submission method" ) - return dataproc_v1.JobControllerClient( # type: ignore + return dataproc_v1.JobControllerClient( client_options=self.client_options, credentials=self.GoogleCredentials ) @@ -105,7 +105,7 @@ def _submit_dataproc_job(self) -> dataproc_v1.types.jobs.Job: "main_python_file_uri": self.gcs_location, }, } - operation = self.job_client.submit_job_as_operation( # type: ignore + operation = self.job_client.submit_job_as_operation( request={ "project_id": self.credential.execution_project, "region": self.credential.dataproc_region, @@ -138,13 +138,13 @@ def _submit_dataproc_job(self) -> Batch: batch_id=batch_id, region=self.credential.dataproc_region, # type: ignore project=self.credential.execution_project, # type: ignore - ) # type: ignore + ) # make the request - self.job_client.create_batch(request=request) # type: ignore + self.job_client.create_batch(request=request) return poll_batch_job( parent=request.parent, batch_id=batch_id, - job_client=self.job_client, # type: ignore + job_client=self.job_client, timeout=self.timeout, ) # there might be useful results here that we can parse and return diff --git a/dbt/adapters/bigquery/relation.py b/dbt/adapters/bigquery/relation.py index 086b4a2aa..0e2c17670 100644 --- a/dbt/adapters/bigquery/relation.py +++ b/dbt/adapters/bigquery/relation.py @@ -78,7 +78,7 @@ def dataset(self): def materialized_view_from_relation_config( cls, relation_config: RelationConfig ) -> BigQueryMaterializedViewConfig: - return BigQueryMaterializedViewConfig.from_relation_config(relation_config) # type: ignore + return BigQueryMaterializedViewConfig.from_relation_config(relation_config) @classmethod def materialized_view_config_changeset( diff --git a/dbt/adapters/bigquery/relation_configs/_base.py b/dbt/adapters/bigquery/relation_configs/_base.py index 45e29b99f..8bc861587 100644 --- a/dbt/adapters/bigquery/relation_configs/_base.py +++ b/dbt/adapters/bigquery/relation_configs/_base.py @@ -32,7 +32,7 @@ def quote_policy(cls) -> Policy: def from_relation_config(cls, relation_config: RelationConfig) -> Self: relation_config_dict = cls.parse_relation_config(relation_config) relation = cls.from_dict(relation_config_dict) - return relation # type: ignore + return relation @classmethod def parse_relation_config(cls, relation_config: RelationConfig) -> Dict: @@ -44,7 +44,7 @@ def parse_relation_config(cls, relation_config: RelationConfig) -> Dict: def from_bq_table(cls, table: BigQueryTable) -> Self: relation_config = cls.parse_bq_table(table) relation = cls.from_dict(relation_config) - return relation # type: ignore + return relation @classmethod def parse_bq_table(cls, table: BigQueryTable) -> Dict: diff --git a/dbt/adapters/bigquery/relation_configs/_cluster.py b/dbt/adapters/bigquery/relation_configs/_cluster.py index 53092cb35..b3dbaf2e9 100644 --- a/dbt/adapters/bigquery/relation_configs/_cluster.py +++ b/dbt/adapters/bigquery/relation_configs/_cluster.py @@ -25,13 +25,13 @@ class BigQueryClusterConfig(BigQueryBaseRelationConfig): @classmethod def from_dict(cls, config_dict: Dict[str, Any]) -> Self: kwargs_dict = {"fields": config_dict.get("fields")} - return super().from_dict(kwargs_dict) # type: ignore + return super().from_dict(kwargs_dict) @classmethod def parse_relation_config(cls, relation_config: RelationConfig) -> Dict[str, Any]: config_dict = {} - if cluster_by := relation_config.config.extra.get("cluster_by"): # type: ignore + if cluster_by := relation_config.config.extra.get("cluster_by"): # users may input a single field as a string if isinstance(cluster_by, str): cluster_by = [cluster_by] @@ -40,7 +40,7 @@ def parse_relation_config(cls, relation_config: RelationConfig) -> Dict[str, Any return config_dict @classmethod - def parse_bq_table(cls, table: BigQueryTable) -> Dict[str, Any]: # type: ignore + def parse_bq_table(cls, table: BigQueryTable) -> Dict[str, Any]: config_dict = {"fields": frozenset(table.clustering_fields)} return config_dict diff --git a/dbt/adapters/bigquery/relation_configs/_materialized_view.py b/dbt/adapters/bigquery/relation_configs/_materialized_view.py index 81ca6b3de..7c63ba3bc 100644 --- a/dbt/adapters/bigquery/relation_configs/_materialized_view.py +++ b/dbt/adapters/bigquery/relation_configs/_materialized_view.py @@ -61,7 +61,7 @@ def from_dict(cls, config_dict: Dict[str, Any]) -> "BigQueryMaterializedViewConf if cluster := config_dict.get("cluster"): kwargs_dict.update({"cluster": BigQueryClusterConfig.from_dict(cluster)}) - materialized_view: "BigQueryMaterializedViewConfig" = super().from_dict(kwargs_dict) # type: ignore + materialized_view: "BigQueryMaterializedViewConfig" = super().from_dict(kwargs_dict) return materialized_view @classmethod diff --git a/dbt/adapters/bigquery/relation_configs/_options.py b/dbt/adapters/bigquery/relation_configs/_options.py index f0272df08..7fd8797df 100644 --- a/dbt/adapters/bigquery/relation_configs/_options.py +++ b/dbt/adapters/bigquery/relation_configs/_options.py @@ -103,13 +103,13 @@ def formatted_setting(name: str) -> Any: if kwargs_dict["enable_refresh"] is False: kwargs_dict.update({"refresh_interval_minutes": None, "max_staleness": None}) - options: Self = super().from_dict(kwargs_dict) # type: ignore + options: Self = super().from_dict(kwargs_dict) return options @classmethod def parse_relation_config(cls, relation_config: RelationConfig) -> Dict[str, Any]: config_dict = { - option: relation_config.config.extra.get(option) # type: ignore + option: relation_config.config.extra.get(option) for option in [ "enable_refresh", "refresh_interval_minutes", @@ -122,13 +122,11 @@ def parse_relation_config(cls, relation_config: RelationConfig) -> Dict[str, Any } # update dbt-specific versions of these settings - if hours_to_expiration := relation_config.config.extra.get( # type: ignore - "hours_to_expiration" - ): # type: ignore + if hours_to_expiration := relation_config.config.extra.get("hours_to_expiration"): config_dict.update( {"expiration_timestamp": datetime.now() + timedelta(hours=hours_to_expiration)} ) - if not relation_config.config.persist_docs: # type: ignore + if not relation_config.config.persist_docs: del config_dict["description"] return config_dict diff --git a/dbt/adapters/bigquery/relation_configs/_partition.py b/dbt/adapters/bigquery/relation_configs/_partition.py index 555aa3664..e1a5ac171 100644 --- a/dbt/adapters/bigquery/relation_configs/_partition.py +++ b/dbt/adapters/bigquery/relation_configs/_partition.py @@ -111,7 +111,7 @@ def parse_model_node(cls, relation_config: RelationConfig) -> Dict[str, Any]: This doesn't currently collect `time_ingestion_partitioning` and `copy_partitions` because this was built for materialized views, which do not support those settings. """ - config_dict: Dict[str, Any] = relation_config.config.extra.get("partition_by") # type: ignore + config_dict: Dict[str, Any] = relation_config.config.extra.get("partition_by") if "time_ingestion_partitioning" in config_dict: del config_dict["time_ingestion_partitioning"] if "copy_partitions" in config_dict: diff --git a/dev-requirements.txt b/dev-requirements.txt index a81a841f0..d8033ac55 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -1,33 +1,21 @@ # install latest changes in dbt-core -# TODO: how to automate switching from develop to version branches? -git+https://github.com/dbt-labs/dbt-core.git#egg=dbt-core&subdirectory=core -git+https://github.com/dbt-labs/dbt-common.git git+https://github.com/dbt-labs/dbt-adapters.git git+https://github.com/dbt-labs/dbt-adapters.git#subdirectory=dbt-tests-adapter -# if version 1.x or greater -> pin to major version -# if version 0.x -> pin to minor -black>=24.3 -bumpversion~=0.6.0 -click~=8.1 +git+https://github.com/dbt-labs/dbt-common.git +git+https://github.com/dbt-labs/dbt-core.git#subdirectory=core + +# dev ddtrace==2.3.0 -flake8~=7.0 -flaky~=3.8 -freezegun~=1.4 -ipdb~=0.13.13 -mypy==1.8.0 # patch updates have historically introduced breaking changes -pip-tools~=7.3 -pre-commit==3.7.0;python_version >="3.9" -pre-commit==3.5.0;python_version <"3.9" -pre-commit-hooks~=4.6 +pre-commit~=3.7.0;python_version>="3.9" +pre-commit~=3.5.0;python_version<"3.9" pytest~=7.4 pytest-csv~=3.0 pytest-dotenv~=0.5.2 pytest-logbook~=1.2 pytest-xdist~=3.6 -pytz~=2023.3 tox~=4.11 -types-pytz~=2023.3 -types-protobuf~=4.24 -types-requests~=2.31 + +# build +bumpversion~=0.6.0 twine~=5.1 -wheel~=0.42 +wheel~=0.43 diff --git a/mypy.ini b/mypy.ini index b111482fc..247a47fec 100644 --- a/mypy.ini +++ b/mypy.ini @@ -1,4 +1,2 @@ [mypy] mypy_path = third-party-stubs/ -namespace_packages = True -exclude = third-party-stubs/* diff --git a/tests/functional/python_model_tests/test_list_inference.py b/tests/functional/python_model_tests/test_list_inference.py index 143a61e88..88b1c4fa5 100644 --- a/tests/functional/python_model_tests/test_list_inference.py +++ b/tests/functional/python_model_tests/test_list_inference.py @@ -9,6 +9,7 @@ When the regression was first reported, `files.MULTI_RECORD` failed while the other models passed. """ + from dbt.tests.util import run_dbt_and_capture import pytest diff --git a/tests/unit/utils.py b/tests/unit/utils.py index 88b09ce60..633b6d565 100644 --- a/tests/unit/utils.py +++ b/tests/unit/utils.py @@ -2,6 +2,7 @@ Note that all imports should be inside the functions to avoid import/mocking issues. """ + import string import os from unittest import mock