From e1db48cb1b552aeefc480da0e3f7fc2605cacada Mon Sep 17 00:00:00 2001 From: Jonathan Daniel <36337649+jond01@users.noreply.github.com> Date: Wed, 6 Mar 2024 12:10:35 +0200 Subject: [PATCH] [Linting] Backport Ruff upgrade [1.6.x] (#5253) --- automation/system_test/prepare.py | 6 +- dev-requirements.txt | 2 +- mlrun/config.py | 8 +- mlrun/datastore/azure_blob.py | 18 +-- mlrun/datastore/google_cloud_storage.py | 12 +- mlrun/db/httpdb.py | 18 +-- mlrun/execution.py | 6 +- .../tf_keras/callbacks/logging_callback.py | 6 +- mlrun/frameworks/tf_keras/model_handler.py | 14 +- mlrun/kfpops.py | 6 +- mlrun/model_monitoring/api.py | 16 +-- .../stores/kv_model_endpoint_store.py | 26 ++-- .../stores/sql_model_endpoint_store.py | 1 - mlrun/package/packagers/pandas_packagers.py | 6 +- mlrun/runtimes/function.py | 18 +-- mlrun/runtimes/mpijob/abstract.py | 12 +- mlrun/runtimes/pod.py | 6 +- mlrun/runtimes/serving.py | 6 +- mlrun/runtimes/sparkjob/spark3job.py | 6 +- mlrun/utils/async_http.py | 6 +- mlrun/utils/http.py | 6 +- .../notifications/notification_pusher.py | 12 +- pyproject.toml | 13 +- server/api/api/endpoints/runtime_resources.py | 4 +- server/api/api/endpoints/workflows.py | 6 +- server/api/api/utils.py | 8 +- server/api/apiuvicorn.py | 6 +- .../api/crud/model_monitoring/deployment.py | 6 +- server/api/db/sqldb/db.py | 6 +- server/api/main.py | 6 +- ...487_altering_table_datastore_profiles_2.py | 1 + .../28383af526f3_market_place_to_hub.py | 1 + ...29c_increase_timestamp_fields_precision.py | 1 + ...03aef6a91d_tag_foreign_key_and_cascades.py | 1 + .../59061f6e2a87_add_index_migration.py | 1 + ...351c88a19_adding_background_tasks_table.py | 1 + ...add_requested_logs_column_and_index_to_.py | 1 + ...9d16de5f03a7_adding_data_versions_table.py | 1 + ...7ab5dec_adding_table_datastore_profiles.py | 1 + .../b268044fa2f7_adding_artifacts_v2_table.py | 1 + ...7_adding_name_and_updated_to_runs_table.py | 1 + ...9cbf87203_background_task_error_message.py | 1 + .../c0e342d73bd0_indexing_artifact_v2_key.py | 1 + .../versions/c4af40b0bf61_init.py | 1 + .../versions/c905d15bd91d_notifications.py | 1 + ...dding_next_run_time_column_to_schedule_.py | 1 + ...3_notifications_params_to_secret_params.py | 1 + ...487_altering_table_datastore_profiles_2.py | 1 + .../0b224a1b4e0d_indexing_artifact_v2_key.py | 1 + ...f_notifications_params_to_secret_params.py | 1 + .../versions/11f8dd2dc9fe_init.py | 1 + .../1c954f8cb32d_schedule_last_run_uri.py | 1 + .../2b6d23c715aa_adding_feature_sets.py | 1 + ...fb7e1274d_background_task_error_message.py | 1 + .../4acd9430b093_market_place_to_hub.py | 1 + ...dding_next_run_time_column_to_schedule_.py | 1 + ...90a1a69bc_adding_background_tasks_table.py | 1 + ...531edc7_adding_table_datastore_profiles.py | 1 + ...cd005_add_requested_logs_column_to_runs.py | 1 + .../863114f0c659_refactoring_feature_set.py | 1 + .../versions/959ae00528ad_notifications.py | 1 + ...accf9fc83d38_adding_data_versions_table.py | 1 + .../versions/b68e8e897a28_schedule_labels.py | 1 + .../bcd0c1f9720c_adding_project_labels.py | 1 + .../bf91ff18513b_add_index_migration.py | 1 + .../versions/cf21882f938e_schedule_id.py | 1 + .../d781f58f607f_tag_object_name_string.py | 1 + ...871ace_adding_marketplace_sources_table.py | 1 + ...e1dd5983c06b_schedule_concurrency_limit.py | 1 + ...3_adding_name_and_updated_to_runs_table.py | 1 + .../f4249b4ba6fa_adding_feature_vectors.py | 1 + .../f7b5a1a03629_adding_feature_labels.py | 1 + .../fa3009d9787f_adding_artifacts_v2_table.py | 1 + server/api/runtime_handlers/__init__.py | 6 +- server/api/runtime_handlers/base.py | 14 +- server/api/utils/builder.py | 6 +- server/api/utils/clients/iguazio.py | 28 ++-- server/api/utils/projects/leader.py | 6 +- tests/api/api/test_runtime_resources.py | 6 +- tests/api/api/test_utils.py | 16 +-- tests/api/crud/test_runs.py | 132 ++++++++++-------- tests/api/db/test_sqldb.py | 1 + tests/api/runtimes/test_kubejob.py | 18 +-- tests/api/utils/clients/test_iguazio.py | 24 ++-- .../automation/release_notes/test_generate.py | 16 ++- tests/integration/aws_s3/test_aws_s3.py | 18 ++- .../integration/azure_blob/test_azure_blob.py | 13 +- .../test_google_cloud_storage.py | 13 +- tests/integration/sdk_api/run/test_main.py | 6 +- tests/rundb/test_httpdb.py | 6 +- tests/runtimes/test_run.py | 36 ++--- .../model_monitoring/test_model_monitoring.py | 18 +-- tests/utils/test_get_secrets.py | 6 +- 93 files changed, 385 insertions(+), 319 deletions(-) diff --git a/automation/system_test/prepare.py b/automation/system_test/prepare.py index 79f6d5eabbf8..478f3dddd399 100644 --- a/automation/system_test/prepare.py +++ b/automation/system_test/prepare.py @@ -394,9 +394,9 @@ def _enrich_env(self): spark_service_name = self._get_service_name("app=spark,component=spark-master") self._env_config["MLRUN_IGUAZIO_API_URL"] = f"https://{api_url_host}" self._env_config["V3IO_FRAMESD"] = f"https://{framesd_host}" - self._env_config[ - "MLRUN_SYSTEM_TESTS_DEFAULT_SPARK_SERVICE" - ] = spark_service_name + self._env_config["MLRUN_SYSTEM_TESTS_DEFAULT_SPARK_SERVICE"] = ( + spark_service_name + ) self._env_config["V3IO_API"] = f"https://{v3io_api_host}" self._env_config["MLRUN_DBPATH"] = f"https://{mlrun_api_url}" diff --git a/dev-requirements.txt b/dev-requirements.txt index 2c8140f91a61..e9b6d80d7d52 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -3,7 +3,7 @@ twine~=3.1 build~=1.0 # formatting & linting -ruff~=0.1.8 +ruff~=0.3.0 import-linter~=1.8 # testing diff --git a/mlrun/config.py b/mlrun/config.py index 260fc694d2e9..f46534be10e6 100644 --- a/mlrun/config.py +++ b/mlrun/config.py @@ -960,10 +960,10 @@ def get_default_function_pod_resources( with_gpu = ( with_gpu_requests if requirement == "requests" else with_gpu_limits ) - resources[ - requirement - ] = self.get_default_function_pod_requirement_resources( - requirement, with_gpu + resources[requirement] = ( + self.get_default_function_pod_requirement_resources( + requirement, with_gpu + ) ) return resources diff --git a/mlrun/datastore/azure_blob.py b/mlrun/datastore/azure_blob.py index 920aa08e5bf2..0da6e0d2b2fd 100644 --- a/mlrun/datastore/azure_blob.py +++ b/mlrun/datastore/azure_blob.py @@ -175,9 +175,9 @@ def get_spark_options(self): if "client_secret" in st or "client_id" in st or "tenant_id" in st: res[f"spark.hadoop.fs.azure.account.auth.type.{host}"] = "OAuth" - res[ - f"spark.hadoop.fs.azure.account.oauth.provider.type.{host}" - ] = "org.apache.hadoop.fs.azurebfs.oauth2.ClientCredsTokenProvider" + res[f"spark.hadoop.fs.azure.account.oauth.provider.type.{host}"] = ( + "org.apache.hadoop.fs.azurebfs.oauth2.ClientCredsTokenProvider" + ) if "client_id" in st: res[f"spark.hadoop.fs.azure.account.oauth2.client.id.{host}"] = st[ "client_id" @@ -188,14 +188,14 @@ def get_spark_options(self): ] if "tenant_id" in st: tenant_id = st["tenant_id"] - res[ - f"spark.hadoop.fs.azure.account.oauth2.client.endpoint.{host}" - ] = f"https://login.microsoftonline.com/{tenant_id}/oauth2/token" + res[f"spark.hadoop.fs.azure.account.oauth2.client.endpoint.{host}"] = ( + f"https://login.microsoftonline.com/{tenant_id}/oauth2/token" + ) if "sas_token" in st: res[f"spark.hadoop.fs.azure.account.auth.type.{host}"] = "SAS" - res[ - f"spark.hadoop.fs.azure.sas.token.provider.type.{host}" - ] = "org.apache.hadoop.fs.azurebfs.sas.FixedSASTokenProvider" + res[f"spark.hadoop.fs.azure.sas.token.provider.type.{host}"] = ( + "org.apache.hadoop.fs.azurebfs.sas.FixedSASTokenProvider" + ) res[f"spark.hadoop.fs.azure.sas.fixed.token.{host}"] = st["sas_token"] return res diff --git a/mlrun/datastore/google_cloud_storage.py b/mlrun/datastore/google_cloud_storage.py index 04f7bc2a6ba5..980babcf3c66 100644 --- a/mlrun/datastore/google_cloud_storage.py +++ b/mlrun/datastore/google_cloud_storage.py @@ -147,13 +147,13 @@ def get_spark_options(self): if "project_id" in credentials: res["spark.hadoop.fs.gs.project.id"] = credentials["project_id"] if "private_key_id" in credentials: - res[ - "spark.hadoop.fs.gs.auth.service.account.private.key.id" - ] = credentials["private_key_id"] + res["spark.hadoop.fs.gs.auth.service.account.private.key.id"] = ( + credentials["private_key_id"] + ) if "private_key" in credentials: - res[ - "spark.hadoop.fs.gs.auth.service.account.private.key" - ] = credentials["private_key"] + res["spark.hadoop.fs.gs.auth.service.account.private.key"] = ( + credentials["private_key"] + ) if "client_email" in credentials: res["spark.hadoop.fs.gs.auth.service.account.email"] = credentials[ "client_email" diff --git a/mlrun/db/httpdb.py b/mlrun/db/httpdb.py index cab2a95bc136..7afe7ea0b0e9 100644 --- a/mlrun/db/httpdb.py +++ b/mlrun/db/httpdb.py @@ -1137,17 +1137,17 @@ def list_runtime_resources( structured_dict = {} for project, job_runtime_resources_map in response.json().items(): for job_id, runtime_resources in job_runtime_resources_map.items(): - structured_dict.setdefault(project, {})[ - job_id - ] = mlrun.common.schemas.RuntimeResources(**runtime_resources) + structured_dict.setdefault(project, {})[job_id] = ( + mlrun.common.schemas.RuntimeResources(**runtime_resources) + ) return structured_dict elif group_by == mlrun.common.schemas.ListRuntimeResourcesGroupByField.project: structured_dict = {} for project, kind_runtime_resources_map in response.json().items(): for kind, runtime_resources in kind_runtime_resources_map.items(): - structured_dict.setdefault(project, {})[ - kind - ] = mlrun.common.schemas.RuntimeResources(**runtime_resources) + structured_dict.setdefault(project, {})[kind] = ( + mlrun.common.schemas.RuntimeResources(**runtime_resources) + ) return structured_dict else: raise NotImplementedError( @@ -1206,9 +1206,9 @@ def delete_runtime_resources( structured_dict = {} for project, kind_runtime_resources_map in response.json().items(): for kind, runtime_resources in kind_runtime_resources_map.items(): - structured_dict.setdefault(project, {})[ - kind - ] = mlrun.common.schemas.RuntimeResources(**runtime_resources) + structured_dict.setdefault(project, {})[kind] = ( + mlrun.common.schemas.RuntimeResources(**runtime_resources) + ) return structured_dict def create_schedule( diff --git a/mlrun/execution.py b/mlrun/execution.py index 3121121b7769..60767c3462a9 100644 --- a/mlrun/execution.py +++ b/mlrun/execution.py @@ -559,9 +559,9 @@ def log_iteration_results(self, best, summary: list, task: dict, commit=False): for k, v in get_in(task, ["status", "results"], {}).items(): self._results[k] = v for artifact in get_in(task, ["status", run_keys.artifacts], []): - self._artifacts_manager.artifacts[ - artifact["metadata"]["key"] - ] = artifact + self._artifacts_manager.artifacts[artifact["metadata"]["key"]] = ( + artifact + ) self._artifacts_manager.link_artifact( self.project, self.name, diff --git a/mlrun/frameworks/tf_keras/callbacks/logging_callback.py b/mlrun/frameworks/tf_keras/callbacks/logging_callback.py index f1811ec538b0..c540410e2884 100644 --- a/mlrun/frameworks/tf_keras/callbacks/logging_callback.py +++ b/mlrun/frameworks/tf_keras/callbacks/logging_callback.py @@ -389,9 +389,9 @@ def _add_auto_hyperparameters(self): ): try: self._get_hyperparameter(key_chain=learning_rate_key_chain) - self._dynamic_hyperparameters_keys[ - learning_rate_key - ] = learning_rate_key_chain + self._dynamic_hyperparameters_keys[learning_rate_key] = ( + learning_rate_key_chain + ) except (KeyError, IndexError, ValueError): pass diff --git a/mlrun/frameworks/tf_keras/model_handler.py b/mlrun/frameworks/tf_keras/model_handler.py index eaacc9b25721..039d36f56827 100644 --- a/mlrun/frameworks/tf_keras/model_handler.py +++ b/mlrun/frameworks/tf_keras/model_handler.py @@ -263,13 +263,13 @@ def save( # Update the paths and log artifacts if context is available: if self._weights_file is not None: if self._context is not None: - artifacts[ - self._get_weights_file_artifact_name() - ] = self._context.log_artifact( - self._weights_file, - local_path=self._weights_file, - artifact_path=output_path, - db_key=False, + artifacts[self._get_weights_file_artifact_name()] = ( + self._context.log_artifact( + self._weights_file, + local_path=self._weights_file, + artifact_path=output_path, + db_key=False, + ) ) return artifacts if self._context is not None else None diff --git a/mlrun/kfpops.py b/mlrun/kfpops.py index 7b81f1476938..8e11e203939f 100644 --- a/mlrun/kfpops.py +++ b/mlrun/kfpops.py @@ -408,9 +408,9 @@ def mlrun_pipeline( cmd += ["--label", f"{label}={val}"] for output in outputs: cmd += ["-o", str(output)] - file_outputs[ - output.replace(".", "_") - ] = f"/tmp/{output}" # not using path.join to avoid windows "\" + file_outputs[output.replace(".", "_")] = ( + f"/tmp/{output}" # not using path.join to avoid windows "\" + ) if project: cmd += ["--project", project] if handler: diff --git a/mlrun/model_monitoring/api.py b/mlrun/model_monitoring/api.py index e7157dcd387b..19b9e3e2f961 100644 --- a/mlrun/model_monitoring/api.py +++ b/mlrun/model_monitoring/api.py @@ -436,9 +436,9 @@ def _generate_model_endpoint( ] = possible_drift_threshold model_endpoint.spec.monitoring_mode = monitoring_mode - model_endpoint.status.first_request = ( - model_endpoint.status.last_request - ) = datetime_now().isoformat() + model_endpoint.status.first_request = model_endpoint.status.last_request = ( + datetime_now().isoformat() + ) if sample_set_statistics: model_endpoint.status.feature_stats = sample_set_statistics @@ -476,11 +476,11 @@ def trigger_drift_batch_job( db_session = mlrun.get_run_db() # Register the monitoring batch job (do nothing if already exist) and get the job function as a dictionary - batch_function_dict: typing.Dict[ - str, typing.Any - ] = db_session.deploy_monitoring_batch_job( - project=project, - default_batch_image=default_batch_image, + batch_function_dict: typing.Dict[str, typing.Any] = ( + db_session.deploy_monitoring_batch_job( + project=project, + default_batch_image=default_batch_image, + ) ) # Prepare current run params diff --git a/mlrun/model_monitoring/stores/kv_model_endpoint_store.py b/mlrun/model_monitoring/stores/kv_model_endpoint_store.py index 37602277c328..dbffca1fb191 100644 --- a/mlrun/model_monitoring/stores/kv_model_endpoint_store.py +++ b/mlrun/model_monitoring/stores/kv_model_endpoint_store.py @@ -540,24 +540,24 @@ def validate_old_schema_fields(endpoint: dict): and endpoint[mlrun.common.schemas.model_monitoring.EventFieldType.METRICS] == "null" ): - endpoint[ - mlrun.common.schemas.model_monitoring.EventFieldType.METRICS - ] = json.dumps( - { - mlrun.common.schemas.model_monitoring.EventKeyMetrics.GENERIC: { - mlrun.common.schemas.model_monitoring.EventLiveStats.LATENCY_AVG_1H: 0, - mlrun.common.schemas.model_monitoring.EventLiveStats.PREDICTIONS_PER_SECOND: 0, + endpoint[mlrun.common.schemas.model_monitoring.EventFieldType.METRICS] = ( + json.dumps( + { + mlrun.common.schemas.model_monitoring.EventKeyMetrics.GENERIC: { + mlrun.common.schemas.model_monitoring.EventLiveStats.LATENCY_AVG_1H: 0, + mlrun.common.schemas.model_monitoring.EventLiveStats.PREDICTIONS_PER_SECOND: 0, + } } - } + ) ) # Validate key `uid` instead of `endpoint_id` # For backwards compatibility reasons, we replace the `endpoint_id` with `uid` which is the updated key name if mlrun.common.schemas.model_monitoring.EventFieldType.ENDPOINT_ID in endpoint: - endpoint[ - mlrun.common.schemas.model_monitoring.EventFieldType.UID - ] = endpoint[ - mlrun.common.schemas.model_monitoring.EventFieldType.ENDPOINT_ID - ] + endpoint[mlrun.common.schemas.model_monitoring.EventFieldType.UID] = ( + endpoint[ + mlrun.common.schemas.model_monitoring.EventFieldType.ENDPOINT_ID + ] + ) @staticmethod def _encode_field(field: typing.Union[str, bytes]) -> bytes: diff --git a/mlrun/model_monitoring/stores/sql_model_endpoint_store.py b/mlrun/model_monitoring/stores/sql_model_endpoint_store.py index e9cc5194eafb..82aa7070f529 100644 --- a/mlrun/model_monitoring/stores/sql_model_endpoint_store.py +++ b/mlrun/model_monitoring/stores/sql_model_endpoint_store.py @@ -31,7 +31,6 @@ class SQLModelEndpointStore(ModelEndpointStore): - """ Handles the DB operations when the DB target is from type SQL. For the SQL operations, we use SQLAlchemy, a Python SQL toolkit that handles the communication with the database. When using SQL for storing the model endpoints diff --git a/mlrun/package/packagers/pandas_packagers.py b/mlrun/package/packagers/pandas_packagers.py index ddf1e1c26abf..ea14384aca86 100644 --- a/mlrun/package/packagers/pandas_packagers.py +++ b/mlrun/package/packagers/pandas_packagers.py @@ -838,9 +838,9 @@ def _prepare_result(obj: Union[list, dict, tuple]) -> Any: """ if isinstance(obj, dict): for key, value in obj.items(): - obj[ - PandasDataFramePackager._prepare_result(obj=key) - ] = PandasDataFramePackager._prepare_result(obj=value) + obj[PandasDataFramePackager._prepare_result(obj=key)] = ( + PandasDataFramePackager._prepare_result(obj=value) + ) elif isinstance(obj, list): for i, value in enumerate(obj): obj[i] = PandasDataFramePackager._prepare_result(obj=value) diff --git a/mlrun/runtimes/function.py b/mlrun/runtimes/function.py index 7a7acfaf1456..78e58a06f5bb 100644 --- a/mlrun/runtimes/function.py +++ b/mlrun/runtimes/function.py @@ -432,15 +432,15 @@ def with_http( raise ValueError( "gateway timeout must be greater than the worker timeout" ) - annotations[ - "nginx.ingress.kubernetes.io/proxy-connect-timeout" - ] = f"{gateway_timeout}" - annotations[ - "nginx.ingress.kubernetes.io/proxy-read-timeout" - ] = f"{gateway_timeout}" - annotations[ - "nginx.ingress.kubernetes.io/proxy-send-timeout" - ] = f"{gateway_timeout}" + annotations["nginx.ingress.kubernetes.io/proxy-connect-timeout"] = ( + f"{gateway_timeout}" + ) + annotations["nginx.ingress.kubernetes.io/proxy-read-timeout"] = ( + f"{gateway_timeout}" + ) + annotations["nginx.ingress.kubernetes.io/proxy-send-timeout"] = ( + f"{gateway_timeout}" + ) trigger = nuclio.HttpTrigger( workers=workers, diff --git a/mlrun/runtimes/mpijob/abstract.py b/mlrun/runtimes/mpijob/abstract.py index f053eee52547..4571c2facebd 100644 --- a/mlrun/runtimes/mpijob/abstract.py +++ b/mlrun/runtimes/mpijob/abstract.py @@ -196,13 +196,13 @@ def with_autotune( if steps_per_sample is not None: horovod_autotune_settings["autotune-steps-per-sample"] = steps_per_sample if bayes_opt_max_samples is not None: - horovod_autotune_settings[ - "autotune-bayes-opt-max-samples" - ] = bayes_opt_max_samples + horovod_autotune_settings["autotune-bayes-opt-max-samples"] = ( + bayes_opt_max_samples + ) if gaussian_process_noise is not None: - horovod_autotune_settings[ - "autotune-gaussian-process-noise" - ] = gaussian_process_noise + horovod_autotune_settings["autotune-gaussian-process-noise"] = ( + gaussian_process_noise + ) self.set_envs(horovod_autotune_settings) diff --git a/mlrun/runtimes/pod.py b/mlrun/runtimes/pod.py index 77dfee1f3cd6..d162ebbef4ca 100644 --- a/mlrun/runtimes/pod.py +++ b/mlrun/runtimes/pod.py @@ -430,9 +430,9 @@ def enrich_resources_with_default_pod_resources( ) is None ): - resources[resource_requirement][ - resource_type - ] = default_resources[resource_requirement][resource_type] + resources[resource_requirement][resource_type] = ( + default_resources[resource_requirement][resource_type] + ) # This enables the user to define that no defaults would be applied on the resources elif resources == {}: return resources diff --git a/mlrun/runtimes/serving.py b/mlrun/runtimes/serving.py index 10da62b5d788..8d067a14bba2 100644 --- a/mlrun/runtimes/serving.py +++ b/mlrun/runtimes/serving.py @@ -523,9 +523,9 @@ def _deploy_function_refs(self, builder_env: dict = None): function_object.metadata.tag = self.metadata.tag function_object.metadata.labels = function_object.metadata.labels or {} - function_object.metadata.labels[ - "mlrun/parent-function" - ] = self.metadata.name + function_object.metadata.labels["mlrun/parent-function"] = ( + self.metadata.name + ) function_object._is_child_function = True if not function_object.spec.graph: # copy the current graph only if the child doesnt have a graph of his own diff --git a/mlrun/runtimes/sparkjob/spark3job.py b/mlrun/runtimes/sparkjob/spark3job.py index 6d72bc649a5f..4a9051e1922d 100644 --- a/mlrun/runtimes/sparkjob/spark3job.py +++ b/mlrun/runtimes/sparkjob/spark3job.py @@ -345,9 +345,9 @@ def enrich_resources_with_default_pod_resources( ) is None ): - resources[resource_requirement][ - resource_type - ] = default_resources[resource_requirement][resource_type] + resources[resource_requirement][resource_type] = ( + default_resources[resource_requirement][resource_type] + ) else: resources = default_resources diff --git a/mlrun/utils/async_http.py b/mlrun/utils/async_http.py index 346fcc9423e7..b43cd0e4a4ed 100644 --- a/mlrun/utils/async_http.py +++ b/mlrun/utils/async_http.py @@ -139,9 +139,9 @@ async def _do_request(self) -> aiohttp.ClientResponse: # enrich user agent # will help traceability and debugging - headers[ - aiohttp.hdrs.USER_AGENT - ] = f"{aiohttp.http.SERVER_SOFTWARE} mlrun/{config.version}" + headers[aiohttp.hdrs.USER_AGENT] = ( + f"{aiohttp.http.SERVER_SOFTWARE} mlrun/{config.version}" + ) response: typing.Optional[ aiohttp.ClientResponse diff --git a/mlrun/utils/http.py b/mlrun/utils/http.py index 86a18b3f4be1..7959ccca3269 100644 --- a/mlrun/utils/http.py +++ b/mlrun/utils/http.py @@ -110,9 +110,9 @@ def __init__( def request(self, method, url, **kwargs): retry_count = 0 kwargs.setdefault("headers", {}) - kwargs["headers"][ - "User-Agent" - ] = f"{requests.utils.default_user_agent()} mlrun/{config.version}" + kwargs["headers"]["User-Agent"] = ( + f"{requests.utils.default_user_agent()} mlrun/{config.version}" + ) while True: try: response = super().request(method, url, **kwargs) diff --git a/mlrun/utils/notifications/notification_pusher.py b/mlrun/utils/notifications/notification_pusher.py index db3c329b30e4..afc8430d2db2 100644 --- a/mlrun/utils/notifications/notification_pusher.py +++ b/mlrun/utils/notifications/notification_pusher.py @@ -307,9 +307,9 @@ def _push_notification_sync( traceback=traceback.format_exc(), ) update_notification_status_kwargs["reason"] = f"Exception error: {str(exc)}" - update_notification_status_kwargs[ - "status" - ] = mlrun.common.schemas.NotificationStatus.ERROR + update_notification_status_kwargs["status"] = ( + mlrun.common.schemas.NotificationStatus.ERROR + ) raise exc finally: self._update_notification_status( @@ -356,9 +356,9 @@ async def _push_notification_async( traceback=traceback.format_exc(), ) update_notification_status_kwargs["reason"] = f"Exception error: {str(exc)}" - update_notification_status_kwargs[ - "status" - ] = mlrun.common.schemas.NotificationStatus.ERROR + update_notification_status_kwargs["status"] = ( + mlrun.common.schemas.NotificationStatus.ERROR + ) raise exc finally: await mlrun.utils.helpers.run_in_threadpool( diff --git a/pyproject.toml b/pyproject.toml index ff5b3bf7fc62..fb0bbfcc850f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,23 +1,20 @@ [tool.ruff] +extend-include = ["*.ipynb"] +target-version = "py39" + +[tool.ruff.lint] select = [ "F", # pyflakes "W", # pycodestyle "E", # pycodestyle "I", # isort ] -extend-exclude = [ - "server/api/proto", - "playground", -] -extend-include = ["*.ipynb"] - -[tool.ruff.lint] exclude = ["*.ipynb"] [tool.ruff.lint.pycodestyle] max-line-length = 120 -[tool.ruff.per-file-ignores] +[tool.ruff.lint.per-file-ignores] "__init__.py" = ["F401"] [tool.pytest.ini_options] diff --git a/server/api/api/endpoints/runtime_resources.py b/server/api/api/endpoints/runtime_resources.py index a6bede026e24..259457e21929 100644 --- a/server/api/api/endpoints/runtime_resources.py +++ b/server/api/api/endpoints/runtime_resources.py @@ -215,7 +215,9 @@ async def _get_runtime_resources_allowed_projects( mlrun.common.schemas.AuthorizationAction.read, auth_info, ) - grouped_by_project_runtime_resources_output: mlrun.common.schemas.GroupedByProjectRuntimeResourcesOutput + grouped_by_project_runtime_resources_output: ( + mlrun.common.schemas.GroupedByProjectRuntimeResourcesOutput + ) grouped_by_project_runtime_resources_output = await run_in_threadpool( server.api.crud.RuntimeResources().list_runtime_resources, project, diff --git a/server/api/api/endpoints/workflows.py b/server/api/api/endpoints/workflows.py index ea02e191dfc0..5c07c058e408 100644 --- a/server/api/api/endpoints/workflows.py +++ b/server/api/api/endpoints/workflows.py @@ -194,9 +194,9 @@ async def submit_workflow( client_version ) if client_python_version is not None: - workflow_runner.metadata.labels[ - "mlrun/client_python_version" - ] = sanitize_label_value(client_python_version) + workflow_runner.metadata.labels["mlrun/client_python_version"] = ( + sanitize_label_value(client_python_version) + ) try: if workflow_spec.schedule: await run_in_threadpool( diff --git a/server/api/api/utils.py b/server/api/api/utils.py index f7de17f55dfe..bbe937bf2591 100644 --- a/server/api/api/utils.py +++ b/server/api/api/utils.py @@ -514,10 +514,10 @@ def _mask_v3io_volume_credentials( if isinstance( volume["flexVolume"], kubernetes.client.V1FlexVolumeSource ): - volume[ - "flexVolume" - ] = k8s_api_client.sanitize_for_serialization( - volume["flexVolume"] + volume["flexVolume"] = ( + k8s_api_client.sanitize_for_serialization( + volume["flexVolume"] + ) ) else: raise mlrun.errors.MLRunInvalidArgumentError( diff --git a/server/api/apiuvicorn.py b/server/api/apiuvicorn.py index 1c14fc5136d4..e5a3ef51c8d6 100644 --- a/server/api/apiuvicorn.py +++ b/server/api/apiuvicorn.py @@ -28,9 +28,9 @@ class UvicornMLRunLoggerMixin( def _get_uvicorn_log_config(): base_log_config = uvicorn.config.LOGGING_CONFIG - base_log_config["formatters"]["default"][ - "()" - ] = "server.api.apiuvicorn.UvicornMLRunLoggerMixin" + base_log_config["formatters"]["default"]["()"] = ( + "server.api.apiuvicorn.UvicornMLRunLoggerMixin" + ) return base_log_config diff --git a/server/api/crud/model_monitoring/deployment.py b/server/api/crud/model_monitoring/deployment.py index 259edf3d72e6..b91428ed7a1b 100644 --- a/server/api/crud/model_monitoring/deployment.py +++ b/server/api/crud/model_monitoring/deployment.py @@ -554,9 +554,9 @@ def _submit_schedule_batch_job( tracking_offset=tracking_offset, ) - task.spec.parameters[ - mm_constants.EventFieldType.BATCH_INTERVALS_DICT - ] = batch_dict + task.spec.parameters[mm_constants.EventFieldType.BATCH_INTERVALS_DICT] = ( + batch_dict + ) data = { "task": task.to_dict(), diff --git a/server/api/db/sqldb/db.py b/server/api/db/sqldb/db.py index 37de1b002975..2b2e1c0b369b 100644 --- a/server/api/db/sqldb/db.py +++ b/server/api/db/sqldb/db.py @@ -400,9 +400,9 @@ def list_runs( notification ) run_struct["spec"]["notifications"].append(notification_spec) - run_struct["status"]["notifications"][ - notification.name - ] = notification_status + run_struct["status"]["notifications"][notification.name] = ( + notification_status + ) runs.append(run_struct) return runs diff --git a/server/api/main.py b/server/api/main.py index c5ffa26dec8a..95b15c6e8263 100644 --- a/server/api/main.py +++ b/server/api/main.py @@ -712,9 +712,9 @@ async def abort_run(stale_run): # Using semaphore to limit the chunk we get from the thread pool for run aborting async with semaphore: # mark abort as internal, it doesn't have a background task - stale_run[ - "new_background_task_id" - ] = server.api.constants.internal_abort_task_id + stale_run["new_background_task_id"] = ( + server.api.constants.internal_abort_task_id + ) await fastapi.concurrency.run_in_threadpool( server.api.db.session.run_function_with_new_db_session, server.api.crud.Runs().abort_run, diff --git a/server/api/migrations_mysql/versions/026c947c4487_altering_table_datastore_profiles_2.py b/server/api/migrations_mysql/versions/026c947c4487_altering_table_datastore_profiles_2.py index 2af81e5bcacb..2df42df2a7f3 100644 --- a/server/api/migrations_mysql/versions/026c947c4487_altering_table_datastore_profiles_2.py +++ b/server/api/migrations_mysql/versions/026c947c4487_altering_table_datastore_profiles_2.py @@ -20,6 +20,7 @@ Create Date: 2023-08-10 14:15:30.523729 """ + import sqlalchemy as sa from alembic import op diff --git a/server/api/migrations_mysql/versions/28383af526f3_market_place_to_hub.py b/server/api/migrations_mysql/versions/28383af526f3_market_place_to_hub.py index 1a3d9f3265c3..dde2ceead385 100644 --- a/server/api/migrations_mysql/versions/28383af526f3_market_place_to_hub.py +++ b/server/api/migrations_mysql/versions/28383af526f3_market_place_to_hub.py @@ -19,6 +19,7 @@ Create Date: 2023-04-24 11:06:36.177314 """ + from alembic import op # revision identifiers, used by Alembic. diff --git a/server/api/migrations_mysql/versions/32bae1b0e29c_increase_timestamp_fields_precision.py b/server/api/migrations_mysql/versions/32bae1b0e29c_increase_timestamp_fields_precision.py index 443ef380b2a3..c089ef20d420 100644 --- a/server/api/migrations_mysql/versions/32bae1b0e29c_increase_timestamp_fields_precision.py +++ b/server/api/migrations_mysql/versions/32bae1b0e29c_increase_timestamp_fields_precision.py @@ -19,6 +19,7 @@ Create Date: 2022-01-16 19:32:08.676120 """ + import sqlalchemy.dialects.mysql from alembic import op diff --git a/server/api/migrations_mysql/versions/4903aef6a91d_tag_foreign_key_and_cascades.py b/server/api/migrations_mysql/versions/4903aef6a91d_tag_foreign_key_and_cascades.py index 219ba1ec4b92..24244fe5f76e 100644 --- a/server/api/migrations_mysql/versions/4903aef6a91d_tag_foreign_key_and_cascades.py +++ b/server/api/migrations_mysql/versions/4903aef6a91d_tag_foreign_key_and_cascades.py @@ -19,6 +19,7 @@ Create Date: 2021-11-24 17:38:11.753522 """ + from alembic import op # revision identifiers, used by Alembic. diff --git a/server/api/migrations_mysql/versions/59061f6e2a87_add_index_migration.py b/server/api/migrations_mysql/versions/59061f6e2a87_add_index_migration.py index f6e442425152..fd6d5e299105 100644 --- a/server/api/migrations_mysql/versions/59061f6e2a87_add_index_migration.py +++ b/server/api/migrations_mysql/versions/59061f6e2a87_add_index_migration.py @@ -19,6 +19,7 @@ Create Date: 2023-11-05 12:43:53.787957 """ + from alembic import op # revision identifiers, used by Alembic. diff --git a/server/api/migrations_mysql/versions/5f1351c88a19_adding_background_tasks_table.py b/server/api/migrations_mysql/versions/5f1351c88a19_adding_background_tasks_table.py index 3d4569fb05ef..7d74d29ba97c 100644 --- a/server/api/migrations_mysql/versions/5f1351c88a19_adding_background_tasks_table.py +++ b/server/api/migrations_mysql/versions/5f1351c88a19_adding_background_tasks_table.py @@ -19,6 +19,7 @@ Create Date: 2022-06-12 19:59:29.618366 """ + import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import mysql diff --git a/server/api/migrations_mysql/versions/88e656800d6a_add_requested_logs_column_and_index_to_.py b/server/api/migrations_mysql/versions/88e656800d6a_add_requested_logs_column_and_index_to_.py index d3ae248d8134..efbb80366501 100644 --- a/server/api/migrations_mysql/versions/88e656800d6a_add_requested_logs_column_and_index_to_.py +++ b/server/api/migrations_mysql/versions/88e656800d6a_add_requested_logs_column_and_index_to_.py @@ -19,6 +19,7 @@ Create Date: 2023-01-11 11:21:46.882374 """ + import sqlalchemy as sa from alembic import op diff --git a/server/api/migrations_mysql/versions/9d16de5f03a7_adding_data_versions_table.py b/server/api/migrations_mysql/versions/9d16de5f03a7_adding_data_versions_table.py index fa6fd3d81b15..69cbf6dbe37f 100644 --- a/server/api/migrations_mysql/versions/9d16de5f03a7_adding_data_versions_table.py +++ b/server/api/migrations_mysql/versions/9d16de5f03a7_adding_data_versions_table.py @@ -19,6 +19,7 @@ Create Date: 2021-10-04 16:08:05.267113 """ + import sqlalchemy as sa from alembic import op diff --git a/server/api/migrations_mysql/versions/b1d1e7ab5dec_adding_table_datastore_profiles.py b/server/api/migrations_mysql/versions/b1d1e7ab5dec_adding_table_datastore_profiles.py index 74cecee781b2..9b9c8107ae48 100644 --- a/server/api/migrations_mysql/versions/b1d1e7ab5dec_adding_table_datastore_profiles.py +++ b/server/api/migrations_mysql/versions/b1d1e7ab5dec_adding_table_datastore_profiles.py @@ -20,6 +20,7 @@ Create Date: 2023-06-30 13:42:19.974990 """ + import sqlalchemy as sa from alembic import op diff --git a/server/api/migrations_mysql/versions/b268044fa2f7_adding_artifacts_v2_table.py b/server/api/migrations_mysql/versions/b268044fa2f7_adding_artifacts_v2_table.py index 4708a453c520..cdf47a251876 100644 --- a/server/api/migrations_mysql/versions/b268044fa2f7_adding_artifacts_v2_table.py +++ b/server/api/migrations_mysql/versions/b268044fa2f7_adding_artifacts_v2_table.py @@ -19,6 +19,7 @@ Create Date: 2023-11-22 20:04:18.402025 """ + import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import mysql diff --git a/server/api/migrations_mysql/versions/b86f5b53f3d7_adding_name_and_updated_to_runs_table.py b/server/api/migrations_mysql/versions/b86f5b53f3d7_adding_name_and_updated_to_runs_table.py index c6f31ed0ed5c..b2bd2e764b1a 100644 --- a/server/api/migrations_mysql/versions/b86f5b53f3d7_adding_name_and_updated_to_runs_table.py +++ b/server/api/migrations_mysql/versions/b86f5b53f3d7_adding_name_and_updated_to_runs_table.py @@ -19,6 +19,7 @@ Create Date: 2022-01-08 19:28:45.141873 """ + import datetime import sqlalchemy as sa diff --git a/server/api/migrations_mysql/versions/b899cbf87203_background_task_error_message.py b/server/api/migrations_mysql/versions/b899cbf87203_background_task_error_message.py index af31e5ee324d..070f3b0487d5 100644 --- a/server/api/migrations_mysql/versions/b899cbf87203_background_task_error_message.py +++ b/server/api/migrations_mysql/versions/b899cbf87203_background_task_error_message.py @@ -19,6 +19,7 @@ Create Date: 2023-11-08 10:59:06.391117 """ + import sqlalchemy as sa from alembic import op diff --git a/server/api/migrations_mysql/versions/c0e342d73bd0_indexing_artifact_v2_key.py b/server/api/migrations_mysql/versions/c0e342d73bd0_indexing_artifact_v2_key.py index ebfdbcafa947..5a5536fe8927 100644 --- a/server/api/migrations_mysql/versions/c0e342d73bd0_indexing_artifact_v2_key.py +++ b/server/api/migrations_mysql/versions/c0e342d73bd0_indexing_artifact_v2_key.py @@ -19,6 +19,7 @@ Create Date: 2024-02-07 14:46:55.639228 """ + from alembic import op # revision identifiers, used by Alembic. diff --git a/server/api/migrations_mysql/versions/c4af40b0bf61_init.py b/server/api/migrations_mysql/versions/c4af40b0bf61_init.py index c11ee7880f09..b6f3f563659f 100644 --- a/server/api/migrations_mysql/versions/c4af40b0bf61_init.py +++ b/server/api/migrations_mysql/versions/c4af40b0bf61_init.py @@ -19,6 +19,7 @@ Create Date: 2021-09-30 10:55:51.956636 """ + import sqlalchemy as sa import sqlalchemy.dialects.mysql from alembic import op diff --git a/server/api/migrations_mysql/versions/c905d15bd91d_notifications.py b/server/api/migrations_mysql/versions/c905d15bd91d_notifications.py index b67b8f7e4463..c4b044c89388 100644 --- a/server/api/migrations_mysql/versions/c905d15bd91d_notifications.py +++ b/server/api/migrations_mysql/versions/c905d15bd91d_notifications.py @@ -19,6 +19,7 @@ Create Date: 2022-09-20 10:44:41.727488 """ + import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import mysql diff --git a/server/api/migrations_mysql/versions/ee041e8fdaa0_adding_next_run_time_column_to_schedule_.py b/server/api/migrations_mysql/versions/ee041e8fdaa0_adding_next_run_time_column_to_schedule_.py index 5adbe05d0b6c..0b53256393d6 100644 --- a/server/api/migrations_mysql/versions/ee041e8fdaa0_adding_next_run_time_column_to_schedule_.py +++ b/server/api/migrations_mysql/versions/ee041e8fdaa0_adding_next_run_time_column_to_schedule_.py @@ -19,6 +19,7 @@ Create Date: 2022-08-16 17:56:47.826661 """ + import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import mysql diff --git a/server/api/migrations_mysql/versions/eefc169f7633_notifications_params_to_secret_params.py b/server/api/migrations_mysql/versions/eefc169f7633_notifications_params_to_secret_params.py index 84f6349b29e4..6583b2b72fb2 100644 --- a/server/api/migrations_mysql/versions/eefc169f7633_notifications_params_to_secret_params.py +++ b/server/api/migrations_mysql/versions/eefc169f7633_notifications_params_to_secret_params.py @@ -19,6 +19,7 @@ Create Date: 2023-08-29 10:30:57.901466 """ + import sqlalchemy as sa from alembic import op diff --git a/server/api/migrations_sqlite/versions/026c947c4487_altering_table_datastore_profiles_2.py b/server/api/migrations_sqlite/versions/026c947c4487_altering_table_datastore_profiles_2.py index 19658ecbc1f6..68fb10ca6c09 100644 --- a/server/api/migrations_sqlite/versions/026c947c4487_altering_table_datastore_profiles_2.py +++ b/server/api/migrations_sqlite/versions/026c947c4487_altering_table_datastore_profiles_2.py @@ -20,6 +20,7 @@ Create Date: 2023-08-10 14:15:30.523729 """ + import sqlalchemy as sa from alembic import op diff --git a/server/api/migrations_sqlite/versions/0b224a1b4e0d_indexing_artifact_v2_key.py b/server/api/migrations_sqlite/versions/0b224a1b4e0d_indexing_artifact_v2_key.py index a2763d5da98c..c5d0cdefb4b2 100644 --- a/server/api/migrations_sqlite/versions/0b224a1b4e0d_indexing_artifact_v2_key.py +++ b/server/api/migrations_sqlite/versions/0b224a1b4e0d_indexing_artifact_v2_key.py @@ -19,6 +19,7 @@ Create Date: 2024-02-07 14:47:10.021608 """ + from alembic import op # revision identifiers, used by Alembic. diff --git a/server/api/migrations_sqlite/versions/114b2c80710f_notifications_params_to_secret_params.py b/server/api/migrations_sqlite/versions/114b2c80710f_notifications_params_to_secret_params.py index da853a014108..3ede61214961 100644 --- a/server/api/migrations_sqlite/versions/114b2c80710f_notifications_params_to_secret_params.py +++ b/server/api/migrations_sqlite/versions/114b2c80710f_notifications_params_to_secret_params.py @@ -19,6 +19,7 @@ Create Date: 2023-08-29 10:52:00.586301 """ + import sqlalchemy as sa from alembic import op diff --git a/server/api/migrations_sqlite/versions/11f8dd2dc9fe_init.py b/server/api/migrations_sqlite/versions/11f8dd2dc9fe_init.py index 0ef27d3a54c1..4ab3d711d9ac 100644 --- a/server/api/migrations_sqlite/versions/11f8dd2dc9fe_init.py +++ b/server/api/migrations_sqlite/versions/11f8dd2dc9fe_init.py @@ -19,6 +19,7 @@ Create Date: 2020-10-06 15:50:35.588592 """ + import sqlalchemy as sa from alembic import op diff --git a/server/api/migrations_sqlite/versions/1c954f8cb32d_schedule_last_run_uri.py b/server/api/migrations_sqlite/versions/1c954f8cb32d_schedule_last_run_uri.py index e767536d8eb5..d1de1f1a74a6 100644 --- a/server/api/migrations_sqlite/versions/1c954f8cb32d_schedule_last_run_uri.py +++ b/server/api/migrations_sqlite/versions/1c954f8cb32d_schedule_last_run_uri.py @@ -19,6 +19,7 @@ Create Date: 2020-11-11 09:39:09.551025 """ + import sqlalchemy as sa from alembic import op diff --git a/server/api/migrations_sqlite/versions/2b6d23c715aa_adding_feature_sets.py b/server/api/migrations_sqlite/versions/2b6d23c715aa_adding_feature_sets.py index 2c8d8aa74137..48f5243af63f 100644 --- a/server/api/migrations_sqlite/versions/2b6d23c715aa_adding_feature_sets.py +++ b/server/api/migrations_sqlite/versions/2b6d23c715aa_adding_feature_sets.py @@ -19,6 +19,7 @@ Create Date: 2020-11-05 01:42:53.395810 """ + import sqlalchemy as sa from alembic import op diff --git a/server/api/migrations_sqlite/versions/332fb7e1274d_background_task_error_message.py b/server/api/migrations_sqlite/versions/332fb7e1274d_background_task_error_message.py index 039e54f7c085..c8a1266c2638 100644 --- a/server/api/migrations_sqlite/versions/332fb7e1274d_background_task_error_message.py +++ b/server/api/migrations_sqlite/versions/332fb7e1274d_background_task_error_message.py @@ -19,6 +19,7 @@ Create Date: 2023-11-08 10:56:54.339846 """ + import sqlalchemy as sa from alembic import op diff --git a/server/api/migrations_sqlite/versions/4acd9430b093_market_place_to_hub.py b/server/api/migrations_sqlite/versions/4acd9430b093_market_place_to_hub.py index b901709fd388..773faa355417 100644 --- a/server/api/migrations_sqlite/versions/4acd9430b093_market_place_to_hub.py +++ b/server/api/migrations_sqlite/versions/4acd9430b093_market_place_to_hub.py @@ -19,6 +19,7 @@ Create Date: 2023-04-26 22:41:59.726305 """ + import sqlalchemy as sa from alembic import op diff --git a/server/api/migrations_sqlite/versions/6401142f2d7c_adding_next_run_time_column_to_schedule_.py b/server/api/migrations_sqlite/versions/6401142f2d7c_adding_next_run_time_column_to_schedule_.py index 979aeb6e8cd2..368b69dcb063 100644 --- a/server/api/migrations_sqlite/versions/6401142f2d7c_adding_next_run_time_column_to_schedule_.py +++ b/server/api/migrations_sqlite/versions/6401142f2d7c_adding_next_run_time_column_to_schedule_.py @@ -19,6 +19,7 @@ Create Date: 2022-08-16 17:51:41.624145 """ + import sqlalchemy as sa from alembic import op diff --git a/server/api/migrations_sqlite/versions/64d90a1a69bc_adding_background_tasks_table.py b/server/api/migrations_sqlite/versions/64d90a1a69bc_adding_background_tasks_table.py index 6143594058e5..fc535f1fd270 100644 --- a/server/api/migrations_sqlite/versions/64d90a1a69bc_adding_background_tasks_table.py +++ b/server/api/migrations_sqlite/versions/64d90a1a69bc_adding_background_tasks_table.py @@ -19,6 +19,7 @@ Create Date: 2022-06-12 20:00:38.183341 """ + import sqlalchemy as sa from alembic import op diff --git a/server/api/migrations_sqlite/versions/6e0c9531edc7_adding_table_datastore_profiles.py b/server/api/migrations_sqlite/versions/6e0c9531edc7_adding_table_datastore_profiles.py index 928ceed65def..19bb018a3d0b 100644 --- a/server/api/migrations_sqlite/versions/6e0c9531edc7_adding_table_datastore_profiles.py +++ b/server/api/migrations_sqlite/versions/6e0c9531edc7_adding_table_datastore_profiles.py @@ -20,6 +20,7 @@ Create Date: 2023-06-30 13:44:34.141769 """ + import sqlalchemy as sa from alembic import op diff --git a/server/api/migrations_sqlite/versions/803438ecd005_add_requested_logs_column_to_runs.py b/server/api/migrations_sqlite/versions/803438ecd005_add_requested_logs_column_to_runs.py index 895d3deceb85..ffaf35bed5de 100644 --- a/server/api/migrations_sqlite/versions/803438ecd005_add_requested_logs_column_to_runs.py +++ b/server/api/migrations_sqlite/versions/803438ecd005_add_requested_logs_column_to_runs.py @@ -19,6 +19,7 @@ Create Date: 2023-01-11 10:31:18.505231 """ + import sqlalchemy as sa from alembic import op diff --git a/server/api/migrations_sqlite/versions/863114f0c659_refactoring_feature_set.py b/server/api/migrations_sqlite/versions/863114f0c659_refactoring_feature_set.py index 4449032d0aa6..9aaa0d864f6c 100644 --- a/server/api/migrations_sqlite/versions/863114f0c659_refactoring_feature_set.py +++ b/server/api/migrations_sqlite/versions/863114f0c659_refactoring_feature_set.py @@ -19,6 +19,7 @@ Create Date: 2020-11-11 11:22:36.653049 """ + import sqlalchemy as sa from alembic import op diff --git a/server/api/migrations_sqlite/versions/959ae00528ad_notifications.py b/server/api/migrations_sqlite/versions/959ae00528ad_notifications.py index 98f399c7974b..0593b2d7f8a2 100644 --- a/server/api/migrations_sqlite/versions/959ae00528ad_notifications.py +++ b/server/api/migrations_sqlite/versions/959ae00528ad_notifications.py @@ -19,6 +19,7 @@ Create Date: 2022-09-20 10:40:41.354209 """ + import sqlalchemy as sa from alembic import op diff --git a/server/api/migrations_sqlite/versions/accf9fc83d38_adding_data_versions_table.py b/server/api/migrations_sqlite/versions/accf9fc83d38_adding_data_versions_table.py index fcb7a78e28c6..a420a1c91041 100644 --- a/server/api/migrations_sqlite/versions/accf9fc83d38_adding_data_versions_table.py +++ b/server/api/migrations_sqlite/versions/accf9fc83d38_adding_data_versions_table.py @@ -19,6 +19,7 @@ Create Date: 2021-10-04 16:05:42.095290 """ + import sqlalchemy as sa from alembic import op diff --git a/server/api/migrations_sqlite/versions/b68e8e897a28_schedule_labels.py b/server/api/migrations_sqlite/versions/b68e8e897a28_schedule_labels.py index 71066ee5772b..537df628d7a0 100644 --- a/server/api/migrations_sqlite/versions/b68e8e897a28_schedule_labels.py +++ b/server/api/migrations_sqlite/versions/b68e8e897a28_schedule_labels.py @@ -19,6 +19,7 @@ Create Date: 2020-10-07 11:30:41.810844 """ + import sqlalchemy as sa from alembic import op diff --git a/server/api/migrations_sqlite/versions/bcd0c1f9720c_adding_project_labels.py b/server/api/migrations_sqlite/versions/bcd0c1f9720c_adding_project_labels.py index 27f82c6e436c..b87759861c81 100644 --- a/server/api/migrations_sqlite/versions/bcd0c1f9720c_adding_project_labels.py +++ b/server/api/migrations_sqlite/versions/bcd0c1f9720c_adding_project_labels.py @@ -19,6 +19,7 @@ Create Date: 2020-12-20 03:42:02.763802 """ + import sqlalchemy as sa from alembic import op diff --git a/server/api/migrations_sqlite/versions/bf91ff18513b_add_index_migration.py b/server/api/migrations_sqlite/versions/bf91ff18513b_add_index_migration.py index 63596356b161..3a1e46e7048b 100644 --- a/server/api/migrations_sqlite/versions/bf91ff18513b_add_index_migration.py +++ b/server/api/migrations_sqlite/versions/bf91ff18513b_add_index_migration.py @@ -19,6 +19,7 @@ Create Date: 2023-11-05 12:43:39.286669 """ + from alembic import op # revision identifiers, used by Alembic. diff --git a/server/api/migrations_sqlite/versions/cf21882f938e_schedule_id.py b/server/api/migrations_sqlite/versions/cf21882f938e_schedule_id.py index 31ccadc3b058..818116ff7f30 100644 --- a/server/api/migrations_sqlite/versions/cf21882f938e_schedule_id.py +++ b/server/api/migrations_sqlite/versions/cf21882f938e_schedule_id.py @@ -19,6 +19,7 @@ Create Date: 2020-10-07 11:21:49.223077 """ + import sqlalchemy as sa from alembic import op diff --git a/server/api/migrations_sqlite/versions/d781f58f607f_tag_object_name_string.py b/server/api/migrations_sqlite/versions/d781f58f607f_tag_object_name_string.py index bca2ed5b6618..ed7c53db4803 100644 --- a/server/api/migrations_sqlite/versions/d781f58f607f_tag_object_name_string.py +++ b/server/api/migrations_sqlite/versions/d781f58f607f_tag_object_name_string.py @@ -19,6 +19,7 @@ Create Date: 2021-07-29 16:06:45.555323 """ + import sqlalchemy as sa from alembic import op diff --git a/server/api/migrations_sqlite/versions/deac06871ace_adding_marketplace_sources_table.py b/server/api/migrations_sqlite/versions/deac06871ace_adding_marketplace_sources_table.py index 768c87a902b7..e06e844669ad 100644 --- a/server/api/migrations_sqlite/versions/deac06871ace_adding_marketplace_sources_table.py +++ b/server/api/migrations_sqlite/versions/deac06871ace_adding_marketplace_sources_table.py @@ -19,6 +19,7 @@ Create Date: 2021-06-30 15:56:09.543139 """ + import sqlalchemy as sa from alembic import op diff --git a/server/api/migrations_sqlite/versions/e1dd5983c06b_schedule_concurrency_limit.py b/server/api/migrations_sqlite/versions/e1dd5983c06b_schedule_concurrency_limit.py index 8f03f52218ed..5d137f5f32ab 100644 --- a/server/api/migrations_sqlite/versions/e1dd5983c06b_schedule_concurrency_limit.py +++ b/server/api/migrations_sqlite/versions/e1dd5983c06b_schedule_concurrency_limit.py @@ -19,6 +19,7 @@ Create Date: 2021-03-15 13:36:18.703619 """ + import sqlalchemy as sa from alembic import op diff --git a/server/api/migrations_sqlite/versions/e5594ed3ab53_adding_name_and_updated_to_runs_table.py b/server/api/migrations_sqlite/versions/e5594ed3ab53_adding_name_and_updated_to_runs_table.py index ee89a85e7f7a..8269536e25d4 100644 --- a/server/api/migrations_sqlite/versions/e5594ed3ab53_adding_name_and_updated_to_runs_table.py +++ b/server/api/migrations_sqlite/versions/e5594ed3ab53_adding_name_and_updated_to_runs_table.py @@ -19,6 +19,7 @@ Create Date: 2022-01-08 12:33:59.070265 """ + import datetime import sqlalchemy as sa diff --git a/server/api/migrations_sqlite/versions/f4249b4ba6fa_adding_feature_vectors.py b/server/api/migrations_sqlite/versions/f4249b4ba6fa_adding_feature_vectors.py index 1ac20d67ce91..d4e5383ea5cd 100644 --- a/server/api/migrations_sqlite/versions/f4249b4ba6fa_adding_feature_vectors.py +++ b/server/api/migrations_sqlite/versions/f4249b4ba6fa_adding_feature_vectors.py @@ -19,6 +19,7 @@ Create Date: 2020-11-24 14:43:08.789873 """ + import sqlalchemy as sa from alembic import op diff --git a/server/api/migrations_sqlite/versions/f7b5a1a03629_adding_feature_labels.py b/server/api/migrations_sqlite/versions/f7b5a1a03629_adding_feature_labels.py index 6d5b2c0799d4..1a9014b4c993 100644 --- a/server/api/migrations_sqlite/versions/f7b5a1a03629_adding_feature_labels.py +++ b/server/api/migrations_sqlite/versions/f7b5a1a03629_adding_feature_labels.py @@ -19,6 +19,7 @@ Create Date: 2020-11-09 11:19:51.472174 """ + import sqlalchemy as sa from alembic import op diff --git a/server/api/migrations_sqlite/versions/fa3009d9787f_adding_artifacts_v2_table.py b/server/api/migrations_sqlite/versions/fa3009d9787f_adding_artifacts_v2_table.py index 8701a4f63467..db32a7da6832 100644 --- a/server/api/migrations_sqlite/versions/fa3009d9787f_adding_artifacts_v2_table.py +++ b/server/api/migrations_sqlite/versions/fa3009d9787f_adding_artifacts_v2_table.py @@ -19,6 +19,7 @@ Create Date: 2023-11-22 20:01:50.197379 """ + import sqlalchemy as sa from alembic import op diff --git a/server/api/runtime_handlers/__init__.py b/server/api/runtime_handlers/__init__.py index 0a34c8dc3c5b..c730dfcc843d 100644 --- a/server/api/runtime_handlers/__init__.py +++ b/server/api/runtime_handlers/__init__.py @@ -38,9 +38,9 @@ def get_runtime_handler(kind: str) -> BaseRuntimeHandler: if not runtime_handler_instances_cache.setdefault(RuntimeKinds.mpijob, {}).get( mpijob_crd_version ): - runtime_handler_instances_cache[RuntimeKinds.mpijob][ - mpijob_crd_version - ] = runtime_handler_class() + runtime_handler_instances_cache[RuntimeKinds.mpijob][mpijob_crd_version] = ( + runtime_handler_class() + ) return runtime_handler_instances_cache[RuntimeKinds.mpijob][mpijob_crd_version] kind_runtime_handler_map = { diff --git a/server/api/runtime_handlers/base.py b/server/api/runtime_handlers/base.py index b5c8374941fc..1e64010c894e 100644 --- a/server/api/runtime_handlers/base.py +++ b/server/api/runtime_handlers/base.py @@ -594,9 +594,9 @@ def _ensure_run_not_stuck_on_non_terminal_state( "Updating run state", run_uid=run_uid, run_state=RunStates.error ) run.setdefault("status", {})["state"] = RunStates.error - run.setdefault("status", {})[ - "reason" - ] = "A runtime resource related to this run could not be found" + run.setdefault("status", {})["reason"] = ( + "A runtime resource related to this run could not be found" + ) run.setdefault("status", {})["last_update"] = now.isoformat() db.store_run(db_session, run, run_uid, project) @@ -1513,10 +1513,10 @@ def _add_resource_to_grouped_by_field_resources_response( if first_field_value not in resources: resources[first_field_value] = {} if second_field_value not in resources[first_field_value]: - resources[first_field_value][ - second_field_value - ] = mlrun.common.schemas.RuntimeResources( - pod_resources=[], crd_resources=[] + resources[first_field_value][second_field_value] = ( + mlrun.common.schemas.RuntimeResources( + pod_resources=[], crd_resources=[] + ) ) if not getattr( resources[first_field_value][second_field_value], resource_field_name diff --git a/server/api/utils/builder.py b/server/api/utils/builder.py index aa978eb9f23f..426f5004454d 100644 --- a/server/api/utils/builder.py +++ b/server/api/utils/builder.py @@ -350,9 +350,9 @@ def configure_kaniko_ecr_init_container( aws_credentials_file_env_value = "/tmp/aws/credentials" # set the credentials file location in the init container - init_container_env[ - aws_credentials_file_env_key - ] = aws_credentials_file_env_value + init_container_env[aws_credentials_file_env_key] = ( + aws_credentials_file_env_value + ) # set the kaniko container AWS credentials location to the mount's path kpod.env.append( diff --git a/server/api/utils/clients/iguazio.py b/server/api/utils/clients/iguazio.py index ce78999224ad..9e62332a5a54 100644 --- a/server/api/utils/clients/iguazio.py +++ b/server/api/utils/clients/iguazio.py @@ -742,20 +742,20 @@ def _transform_mlrun_project_to_iguazio_project( } } if project.metadata.created: - body["data"]["attributes"][ - "created_at" - ] = project.metadata.created.isoformat() + body["data"]["attributes"]["created_at"] = ( + project.metadata.created.isoformat() + ) if project.metadata.labels is not None: - body["data"]["attributes"][ - "labels" - ] = Client._transform_mlrun_labels_to_iguazio_labels( - project.metadata.labels + body["data"]["attributes"]["labels"] = ( + Client._transform_mlrun_labels_to_iguazio_labels( + project.metadata.labels + ) ) if project.metadata.annotations is not None: - body["data"]["attributes"][ - "annotations" - ] = Client._transform_mlrun_labels_to_iguazio_labels( - project.metadata.annotations + body["data"]["attributes"]["annotations"] = ( + Client._transform_mlrun_labels_to_iguazio_labels( + project.metadata.annotations + ) ) if project.spec.owner: body["data"]["attributes"]["owner_username"] = project.spec.owner @@ -802,9 +802,9 @@ def _transform_iguazio_project_to_mlrun_project( iguazio_project["attributes"].get("mlrun_project", "{}") ) # name is mandatory in the mlrun schema, without adding it the schema initialization will fail - mlrun_project_without_common_fields.setdefault("metadata", {})[ - "name" - ] = iguazio_project["attributes"]["name"] + mlrun_project_without_common_fields.setdefault("metadata", {})["name"] = ( + iguazio_project["attributes"]["name"] + ) mlrun_project = mlrun.common.schemas.Project( **mlrun_project_without_common_fields ) diff --git a/server/api/utils/projects/leader.py b/server/api/utils/projects/leader.py index 8097f046e35e..148393a1071b 100644 --- a/server/api/utils/projects/leader.py +++ b/server/api/utils/projects/leader.py @@ -214,9 +214,9 @@ def _sync_projects(self): followers_projects_map = collections.defaultdict(dict) for _follower_name, follower_projects in follower_projects_map.items(): for project in follower_projects.projects: - followers_projects_map[_follower_name][ - project.metadata.name - ] = project + followers_projects_map[_follower_name][project.metadata.name] = ( + project + ) # create map - leader project name -> leader project for easier searches leader_projects_map = {} diff --git a/tests/api/api/test_runtime_resources.py b/tests/api/api/test_runtime_resources.py index d5cb00907c63..a6645a2a3967 100644 --- a/tests/api/api/test_runtime_resources.py +++ b/tests/api/api/test_runtime_resources.py @@ -694,9 +694,9 @@ def _filter_kind_from_grouped_by_project_runtime_resources_output( ) in grouped_by_project_runtime_resources_output.items(): for kind, runtime_resources in kind_runtime_resources_map.items(): if kind == filter_kind: - filtered_output.setdefault(project, {})[ - kind - ] = grouped_by_project_runtime_resources_output[project][kind] + filtered_output.setdefault(project, {})[kind] = ( + grouped_by_project_runtime_resources_output[project][kind] + ) return filtered_output diff --git a/tests/api/api/test_utils.py b/tests/api/api/test_utils.py index 273d4282f24f..f37934a98c4b 100644 --- a/tests/api/api/test_utils.py +++ b/tests/api/api/test_utils.py @@ -839,18 +839,18 @@ def test_mask_v3io_volume_credentials( v3io_volume["flexVolume"] = k8s_api_client.sanitize_for_serialization( v3io_volume["flexVolume"] ) - no_access_key_v3io_volume[ - "flexVolume" - ] = k8s_api_client.sanitize_for_serialization( - no_access_key_v3io_volume["flexVolume"] + no_access_key_v3io_volume["flexVolume"] = ( + k8s_api_client.sanitize_for_serialization( + no_access_key_v3io_volume["flexVolume"] + ) ) no_name_v3io_volume["flexVolume"] = k8s_api_client.sanitize_for_serialization( no_name_v3io_volume["flexVolume"] ) - no_matching_mount_v3io_volume[ - "flexVolume" - ] = k8s_api_client.sanitize_for_serialization( - no_matching_mount_v3io_volume["flexVolume"] + no_matching_mount_v3io_volume["flexVolume"] = ( + k8s_api_client.sanitize_for_serialization( + no_matching_mount_v3io_volume["flexVolume"] + ) ) v3io_volume_mount = k8s_api_client.sanitize_for_serialization(v3io_volume_mount) conflicting_v3io_volume_mount = k8s_api_client.sanitize_for_serialization( diff --git a/tests/api/crud/test_runs.py b/tests/api/crud/test_runs.py index 885fc052b7b7..7717bd119e20 100644 --- a/tests/api/crud/test_runs.py +++ b/tests/api/crud/test_runs.py @@ -51,35 +51,41 @@ async def test_delete_runs_with_resources(self, db: sqlalchemy.orm.Session): assert run["metadata"]["name"] == "run-name" k8s_helper = server.api.utils.singletons.k8s.get_k8s_helper() - with unittest.mock.patch.object( - k8s_helper.v1api, "delete_namespaced_pod" - ) as delete_namespaced_pod_mock, unittest.mock.patch.object( - k8s_helper.v1api, - "list_namespaced_pod", - side_effect=[ - k8s_client.V1PodList( - items=[ - k8s_client.V1Pod( - metadata=k8s_client.V1ObjectMeta( - name="pod-name", - labels={ - "mlrun/class": "job", - "mlrun/project": project, - "mlrun/uid": "uid", - }, - ), - status=k8s_client.V1PodStatus(phase="Running"), - ) - ] - ), - # 2nd time for waiting for pod to be deleted - k8s_client.V1PodList(items=[]), - ], - ), unittest.mock.patch.object( - server.api.runtime_handlers.BaseRuntimeHandler, "_ensure_run_logs_collected" - ), unittest.mock.patch.object( - server.api.utils.clients.log_collector.LogCollectorClient, "delete_logs" - ) as delete_logs_mock: + with ( + unittest.mock.patch.object( + k8s_helper.v1api, "delete_namespaced_pod" + ) as delete_namespaced_pod_mock, + unittest.mock.patch.object( + k8s_helper.v1api, + "list_namespaced_pod", + side_effect=[ + k8s_client.V1PodList( + items=[ + k8s_client.V1Pod( + metadata=k8s_client.V1ObjectMeta( + name="pod-name", + labels={ + "mlrun/class": "job", + "mlrun/project": project, + "mlrun/uid": "uid", + }, + ), + status=k8s_client.V1PodStatus(phase="Running"), + ) + ] + ), + # 2nd time for waiting for pod to be deleted + k8s_client.V1PodList(items=[]), + ], + ), + unittest.mock.patch.object( + server.api.runtime_handlers.BaseRuntimeHandler, + "_ensure_run_logs_collected", + ), + unittest.mock.patch.object( + server.api.utils.clients.log_collector.LogCollectorClient, "delete_logs" + ) as delete_logs_mock, + ): await server.api.crud.Runs().delete_run(db, "uid", 0, project) delete_namespaced_pod_mock.assert_called_once() delete_logs_mock.assert_called_once() @@ -114,17 +120,23 @@ async def test_delete_runs(self, db: sqlalchemy.orm.Session): assert len(runs) == 20 k8s_helper = server.api.utils.singletons.k8s.get_k8s_helper() - with unittest.mock.patch.object( - k8s_helper.v1api, "delete_namespaced_pod" - ) as delete_namespaced_pod_mock, unittest.mock.patch.object( - k8s_helper.v1api, - "list_namespaced_pod", - return_value=k8s_client.V1PodList(items=[]), - ), unittest.mock.patch.object( - server.api.runtime_handlers.BaseRuntimeHandler, "_ensure_run_logs_collected" - ), unittest.mock.patch.object( - server.api.utils.clients.log_collector.LogCollectorClient, "delete_logs" - ) as delete_logs_mock: + with ( + unittest.mock.patch.object( + k8s_helper.v1api, "delete_namespaced_pod" + ) as delete_namespaced_pod_mock, + unittest.mock.patch.object( + k8s_helper.v1api, + "list_namespaced_pod", + return_value=k8s_client.V1PodList(items=[]), + ), + unittest.mock.patch.object( + server.api.runtime_handlers.BaseRuntimeHandler, + "_ensure_run_logs_collected", + ), + unittest.mock.patch.object( + server.api.utils.clients.log_collector.LogCollectorClient, "delete_logs" + ) as delete_logs_mock, + ): await server.api.crud.Runs().delete_runs(db, name=run_name, project=project) runs = server.api.crud.Runs().list_runs(db, run_name, project=project) assert len(runs) == 0 @@ -160,18 +172,21 @@ async def test_delete_runs_failure(self, db: sqlalchemy.orm.Session): assert len(runs) == 3 k8s_helper = server.api.utils.singletons.k8s.get_k8s_helper() - with unittest.mock.patch.object( - k8s_helper.v1api, "delete_namespaced_pod" - ), unittest.mock.patch.object( - k8s_helper.v1api, - "list_namespaced_pod", - side_effect=[ - k8s_client.V1PodList(items=[]), - Exception("Boom!"), - k8s_client.V1PodList(items=[]), - ], - ), unittest.mock.patch.object( - server.api.runtime_handlers.BaseRuntimeHandler, "_ensure_run_logs_collected" + with ( + unittest.mock.patch.object(k8s_helper.v1api, "delete_namespaced_pod"), + unittest.mock.patch.object( + k8s_helper.v1api, + "list_namespaced_pod", + side_effect=[ + k8s_client.V1PodList(items=[]), + Exception("Boom!"), + k8s_client.V1PodList(items=[]), + ], + ), + unittest.mock.patch.object( + server.api.runtime_handlers.BaseRuntimeHandler, + "_ensure_run_logs_collected", + ), ): with pytest.raises(mlrun.errors.MLRunBadRequestError) as exc: await server.api.crud.Runs().delete_runs( @@ -230,11 +245,14 @@ def test_run_abortion_failure(self, db: sqlalchemy.orm.Session): run_uid, project=project, ) - with unittest.mock.patch.object( - server.api.crud.RuntimeResources(), - "delete_runtime_resources", - side_effect=mlrun.errors.MLRunInternalServerError("BOOM"), - ), pytest.raises(mlrun.errors.MLRunInternalServerError) as exc: + with ( + unittest.mock.patch.object( + server.api.crud.RuntimeResources(), + "delete_runtime_resources", + side_effect=mlrun.errors.MLRunInternalServerError("BOOM"), + ), + pytest.raises(mlrun.errors.MLRunInternalServerError) as exc, + ): server.api.crud.Runs().abort_run(db, project, run_uid, 0) assert "BOOM" == str(exc.value) diff --git a/tests/api/db/test_sqldb.py b/tests/api/db/test_sqldb.py index 6a85507a07cf..df30d9a37591 100644 --- a/tests/api/db/test_sqldb.py +++ b/tests/api/db/test_sqldb.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. """SQLDB specific tests, common tests should be in test_dbs.py""" + import copy from contextlib import contextmanager from datetime import datetime, timedelta diff --git a/tests/api/runtimes/test_kubejob.py b/tests/api/runtimes/test_kubejob.py index 5fcf1e1f78d8..5d1c4a0ae0df 100644 --- a/tests/api/runtimes/test_kubejob.py +++ b/tests/api/runtimes/test_kubejob.py @@ -1042,12 +1042,12 @@ def test_set_state_thresholds_success(self, db: Session, k8s_secrets_mock): self.execute_function(runtime) run = get_db().list_runs(db, project=self.project)[0] expected_state_thresholds = override_state_thresholds - expected_state_thresholds[ - "image_pull_backoff" - ] = mlconf.function.spec.state_thresholds.default.image_pull_backoff - expected_state_thresholds[ - "pending_scheduled" - ] = mlconf.function.spec.state_thresholds.default.pending_scheduled + expected_state_thresholds["image_pull_backoff"] = ( + mlconf.function.spec.state_thresholds.default.image_pull_backoff + ) + expected_state_thresholds["pending_scheduled"] = ( + mlconf.function.spec.state_thresholds.default.pending_scheduled + ) assert run["spec"]["state_thresholds"] == expected_state_thresholds patch_state_thresholds = { @@ -1061,9 +1061,9 @@ def test_set_state_thresholds_success(self, db: Session, k8s_secrets_mock): run = get_db().list_runs(db, project=self.project)[0] expected_state_thresholds = patch_state_thresholds expected_state_thresholds["executing"] = override_state_thresholds["executing"] - expected_state_thresholds[ - "pending_scheduled" - ] = mlconf.function.spec.state_thresholds.default.pending_scheduled + expected_state_thresholds["pending_scheduled"] = ( + mlconf.function.spec.state_thresholds.default.pending_scheduled + ) assert run["spec"]["state_thresholds"] == expected_state_thresholds @staticmethod diff --git a/tests/api/utils/clients/test_iguazio.py b/tests/api/utils/clients/test_iguazio.py index d9eb7cf12091..3e22043ddf27 100644 --- a/tests/api/utils/clients/test_iguazio.py +++ b/tests/api/utils/clients/test_iguazio.py @@ -1177,10 +1177,10 @@ def _build_project_response( }, } if with_mlrun_project: - body["attributes"][ - "mlrun_project" - ] = iguazio_client._transform_mlrun_project_to_iguazio_mlrun_project_attribute( - project + body["attributes"]["mlrun_project"] = ( + iguazio_client._transform_mlrun_project_to_iguazio_mlrun_project_attribute( + project + ) ) if project.spec.description: body["attributes"]["description"] = project.spec.description @@ -1189,16 +1189,16 @@ def _build_project_response( if owner_access_key: body["attributes"]["owner_access_key"] = owner_access_key if project.metadata.labels: - body["attributes"][ - "labels" - ] = iguazio_client._transform_mlrun_labels_to_iguazio_labels( - project.metadata.labels + body["attributes"]["labels"] = ( + iguazio_client._transform_mlrun_labels_to_iguazio_labels( + project.metadata.labels + ) ) if project.metadata.annotations: - body["attributes"][ - "annotations" - ] = iguazio_client._transform_mlrun_labels_to_iguazio_labels( - project.metadata.annotations + body["attributes"]["annotations"] = ( + iguazio_client._transform_mlrun_labels_to_iguazio_labels( + project.metadata.annotations + ) ) body["attributes"]["operational_status"] = ( operational_status.value diff --git a/tests/automation/release_notes/test_generate.py b/tests/automation/release_notes/test_generate.py index 274ccb64a8ac..68ac5b9319b5 100644 --- a/tests/automation/release_notes/test_generate.py +++ b/tests/automation/release_notes/test_generate.py @@ -192,13 +192,15 @@ def test_generate_release_notes(): ] automation.release_notes.generate.tempfile = unittest.mock.MagicMock() for case in cases: - with unittest.mock.patch( - "automation.release_notes.generate.ReleaseNotesGenerator._run_command" - ) as _run_command_mock, unittest.mock.patch( - "automation.release_notes.generate.ReleaseNotesGenerator._resolve_github_username" - ) as _resolve_github_user_mock, unittest.mock.patch( - "sys.stdout", new=io.StringIO() - ) as stdout_mock: + with ( + unittest.mock.patch( + "automation.release_notes.generate.ReleaseNotesGenerator._run_command" + ) as _run_command_mock, + unittest.mock.patch( + "automation.release_notes.generate.ReleaseNotesGenerator._resolve_github_username" + ) as _resolve_github_user_mock, + unittest.mock.patch("sys.stdout", new=io.StringIO()) as stdout_mock, + ): _run_command_mock.side_effect = case["_run_command"] _resolve_github_user_mock.side_effect = case["_resolve_github_username"] try: diff --git a/tests/integration/aws_s3/test_aws_s3.py b/tests/integration/aws_s3/test_aws_s3.py index 2cb935e51391..e38c520cde02 100644 --- a/tests/integration/aws_s3/test_aws_s3.py +++ b/tests/integration/aws_s3/test_aws_s3.py @@ -219,11 +219,10 @@ def test_directory(self, use_datastore_profile): # Create the DataFrames df1 = pd.DataFrame(data1) df2 = pd.DataFrame(data2) - with tempfile.NamedTemporaryFile( - suffix=".parquet", delete=True - ) as temp_file1, tempfile.NamedTemporaryFile( - suffix=".parquet", delete=True - ) as temp_file2: + with ( + tempfile.NamedTemporaryFile(suffix=".parquet", delete=True) as temp_file1, + tempfile.NamedTemporaryFile(suffix=".parquet", delete=True) as temp_file2, + ): # Save DataFrames as Parquet files df1.to_parquet(temp_file1.name, index=False) df2.to_parquet(temp_file2.name, index=False) @@ -255,11 +254,10 @@ def test_directory_csv(self, use_datastore_profile): # Create the DataFrames df1 = pd.DataFrame(data1) df2 = pd.DataFrame(data2) - with tempfile.NamedTemporaryFile( - suffix=".csv", delete=True - ) as temp_file1, tempfile.NamedTemporaryFile( - suffix=".csv", delete=True - ) as temp_file2: + with ( + tempfile.NamedTemporaryFile(suffix=".csv", delete=True) as temp_file1, + tempfile.NamedTemporaryFile(suffix=".csv", delete=True) as temp_file2, + ): # Save DataFrames as csv files df1.to_csv(temp_file1.name, index=False) df2.to_csv(temp_file2.name, index=False) diff --git a/tests/integration/azure_blob/test_azure_blob.py b/tests/integration/azure_blob/test_azure_blob.py index 2363f582f7ef..905c31ed26a6 100644 --- a/tests/integration/azure_blob/test_azure_blob.py +++ b/tests/integration/azure_blob/test_azure_blob.py @@ -276,11 +276,14 @@ def test_read_df_dir( # Create the DataFrames df1 = pd.DataFrame(data1) df2 = pd.DataFrame(data2) - with tempfile.NamedTemporaryFile( - suffix=f".{file_extension}", delete=True - ) as temp_file1, tempfile.NamedTemporaryFile( - suffix=f".{file_extension}", delete=True - ) as temp_file2: + with ( + tempfile.NamedTemporaryFile( + suffix=f".{file_extension}", delete=True + ) as temp_file1, + tempfile.NamedTemporaryFile( + suffix=f".{file_extension}", delete=True + ) as temp_file2, + ): first_file_path = temp_file1.name second_file_path = temp_file2.name writer(df1, temp_file1.name, index=False) diff --git a/tests/integration/google_cloud_storage/test_google_cloud_storage.py b/tests/integration/google_cloud_storage/test_google_cloud_storage.py index bbaaf1ebf883..26a33c7d84e6 100644 --- a/tests/integration/google_cloud_storage/test_google_cloud_storage.py +++ b/tests/integration/google_cloud_storage/test_google_cloud_storage.py @@ -241,11 +241,14 @@ def test_directory(self, use_datastore_profile, file_format, write_method): # Create the DataFrames df1 = pd.DataFrame(data1) df2 = pd.DataFrame(data2) - with tempfile.NamedTemporaryFile( - suffix=f".{file_format}", delete=True - ) as temp_file1, tempfile.NamedTemporaryFile( - suffix=f".{file_format}", delete=True - ) as temp_file2: + with ( + tempfile.NamedTemporaryFile( + suffix=f".{file_format}", delete=True + ) as temp_file1, + tempfile.NamedTemporaryFile( + suffix=f".{file_format}", delete=True + ) as temp_file2, + ): # Save DataFrames as files write_method(df1, temp_file1.name, index=False) write_method(df2, temp_file2.name, index=False) diff --git a/tests/integration/sdk_api/run/test_main.py b/tests/integration/sdk_api/run/test_main.py index 298079db8dee..40bcfd949a07 100644 --- a/tests/integration/sdk_api/run/test_main.py +++ b/tests/integration/sdk_api/run/test_main.py @@ -296,9 +296,9 @@ def test_main_run_nonpy_from_env(self): os.environ["MLRUN_EXEC_CODE"] = b64encode(nonpy_code.encode("utf-8")).decode( "utf-8" ) - os.environ[ - "MLRUN_EXEC_CONFIG" - ] = '{"spec":{},"metadata":{"uid":"123411", "name":"tst", "labels": {"kind": "job"}}}' + os.environ["MLRUN_EXEC_CONFIG"] = ( + '{"spec":{},"metadata":{"uid":"123411", "name":"tst", "labels": {"kind": "job"}}}' + ) # --kfp flag will force the logs to print (for the assert) out = self._exec_run( diff --git a/tests/rundb/test_httpdb.py b/tests/rundb/test_httpdb.py index 3be890ae949f..d5f24cef6787 100644 --- a/tests/rundb/test_httpdb.py +++ b/tests/rundb/test_httpdb.py @@ -64,9 +64,9 @@ def start_server(workdir, env_config: dict): port = free_port() env = environ.copy() env["MLRUN_httpdb__port"] = str(port) - env[ - "MLRUN_httpdb__dsn" - ] = f"sqlite:///{workdir}/mlrun.sqlite3?check_same_thread=false" + env["MLRUN_httpdb__dsn"] = ( + f"sqlite:///{workdir}/mlrun.sqlite3?check_same_thread=false" + ) env["MLRUN_httpdb__logs_path"] = workdir env.update(env_config or {}) cmd = [ diff --git a/tests/runtimes/test_run.py b/tests/runtimes/test_run.py index 29871f42ab43..c3b73dad1cf1 100644 --- a/tests/runtimes/test_run.py +++ b/tests/runtimes/test_run.py @@ -64,9 +64,9 @@ def test_new_function_from_runtime(): runtime = _get_runtime() function = mlrun.new_function(runtime=runtime) expected_runtime = runtime - expected_runtime["spec"][ - "preemption_mode" - ] = mlrun.mlconf.function_defaults.preemption_mode + expected_runtime["spec"]["preemption_mode"] = ( + mlrun.mlconf.function_defaults.preemption_mode + ) assert ( DeepDiff( function.to_dict(), @@ -82,9 +82,9 @@ def test_new_function_args_without_command(): runtime["spec"]["command"] = "" function = mlrun.new_function(runtime=runtime) expected_runtime = runtime - expected_runtime["spec"][ - "preemption_mode" - ] = mlrun.mlconf.function_defaults.preemption_mode + expected_runtime["spec"]["preemption_mode"] = ( + mlrun.mlconf.function_defaults.preemption_mode + ) assert ( DeepDiff( function.to_dict(), @@ -137,9 +137,9 @@ def test_new_function_with_resources(): ]: expected_runtime = copy.deepcopy(runtime) expected_runtime["spec"]["resources"] = test_case.get("expected_resources") - expected_runtime["spec"][ - "preemption_mode" - ] = mlrun.mlconf.function_defaults.preemption_mode + expected_runtime["spec"]["preemption_mode"] = ( + mlrun.mlconf.function_defaults.preemption_mode + ) runtime["spec"]["resources"] = test_case.get("resources", None) mlrun.mlconf.default_function_pod_resources = test_case.get("default_resources") function = mlrun.new_function(runtime=runtime) @@ -273,12 +273,12 @@ def test_new_function_args_with_default_image_pull_secret(): runtime = _get_runtime() function = mlrun.new_function(runtime=runtime) expected_runtime = runtime - expected_runtime["spec"][ - "image_pull_secret" - ] = mlrun.mlconf.function.spec.image_pull_secret.default - expected_runtime["spec"][ - "preemption_mode" - ] = mlrun.mlconf.function_defaults.preemption_mode + expected_runtime["spec"]["image_pull_secret"] = ( + mlrun.mlconf.function.spec.image_pull_secret.default + ) + expected_runtime["spec"]["preemption_mode"] = ( + mlrun.mlconf.function_defaults.preemption_mode + ) assert ( DeepDiff( function.to_dict(), @@ -297,9 +297,9 @@ def test_new_function_override_default_image_pull_secret(): function = mlrun.new_function(runtime=runtime) expected_runtime = runtime expected_runtime["spec"]["image_pull_secret"] = new_secret - expected_runtime["spec"][ - "preemption_mode" - ] = mlrun.mlconf.function_defaults.preemption_mode + expected_runtime["spec"]["preemption_mode"] = ( + mlrun.mlconf.function_defaults.preemption_mode + ) assert ( DeepDiff( function.to_dict(), diff --git a/tests/system/model_monitoring/test_model_monitoring.py b/tests/system/model_monitoring/test_model_monitoring.py index 92f5ff616626..2cbaa2c45f42 100644 --- a/tests/system/model_monitoring/test_model_monitoring.py +++ b/tests/system/model_monitoring/test_model_monitoring.py @@ -824,9 +824,9 @@ def test_batch_drift(self): "p0": [0, 0], } ) - infer_results_df[ - mlrun.common.schemas.EventFieldType.TIMESTAMP - ] = datetime.utcnow() + infer_results_df[mlrun.common.schemas.EventFieldType.TIMESTAMP] = ( + datetime.utcnow() + ) # Record results and trigger the monitoring batch job endpoint_id = "123123123123" @@ -990,9 +990,9 @@ def custom_setup_class(cls) -> None: cls.training_set = cls.x_train.join(cls.y_train) cls.test_set = cls.x_test.join(cls.y_test) cls.infer_results_df = cls.test_set - cls.infer_results_df[ - mlrun.common.schemas.EventFieldType.TIMESTAMP - ] = datetime.utcnow() + cls.infer_results_df[mlrun.common.schemas.EventFieldType.TIMESTAMP] = ( + datetime.utcnow() + ) cls.endpoint_id = "5d6ce0e704442c0ac59a933cb4d238baba83bb5d" cls.function_name = f"{cls.name_prefix}-function" cls._train() @@ -1085,9 +1085,9 @@ def custom_setup_class(cls) -> None: cls.model_name = "clf_model" cls.infer_results_df = cls.train_set.copy() - cls.infer_results_df[ - mlrun.common.schemas.EventFieldType.TIMESTAMP - ] = datetime.utcnow() + cls.infer_results_df[mlrun.common.schemas.EventFieldType.TIMESTAMP] = ( + datetime.utcnow() + ) def custom_setup(self): mlrun.runtimes.utils.global_context.set(None) diff --git a/tests/utils/test_get_secrets.py b/tests/utils/test_get_secrets.py index 097717e3c9b4..7e37acdce0ea 100644 --- a/tests/utils/test_get_secrets.py +++ b/tests/utils/test_get_secrets.py @@ -32,9 +32,9 @@ def test_get_secret_from_env(): os.environ[key] = value assert mlrun.get_secret_or_env(key) == value - os.environ[ - SecretsStore.k8s_env_variable_name_for_secret(key) - ] = project_secret_value + os.environ[SecretsStore.k8s_env_variable_name_for_secret(key)] = ( + project_secret_value + ) # Project secrets should not override directly set env variables assert mlrun.get_secret_or_env(key) == value