From 0e3cc34bae6655673e6ce5ac24e843aa015cedb8 Mon Sep 17 00:00:00 2001 From: Laurent Vaylet Date: Thu, 30 May 2024 22:50:27 +0200 Subject: [PATCH] chore: review and clean up `pylint` annotations (#481) --- pyproject.toml | 2 ++ samples/custom/custom_backend.py | 2 -- samples/custom/custom_exporter.py | 2 -- slo_generator/api/main.py | 8 ++++---- slo_generator/backends/cloud_monitoring.py | 5 +---- slo_generator/backends/cloud_monitoring_mql.py | 3 +-- .../backends/cloud_service_monitoring.py | 11 +++-------- slo_generator/backends/datadog.py | 1 - slo_generator/backends/dynatrace.py | 6 ++---- slo_generator/backends/elasticsearch.py | 1 - slo_generator/backends/open_search.py | 2 -- slo_generator/backends/prometheus.py | 6 +----- slo_generator/cli.py | 4 +--- slo_generator/compute.py | 5 ++--- slo_generator/constants.py | 3 ++- slo_generator/exporters/base.py | 4 ++-- slo_generator/exporters/bigquery.py | 1 - slo_generator/exporters/cloud_monitoring.py | 5 ++--- slo_generator/exporters/cloudevent.py | 1 - slo_generator/exporters/datadog.py | 1 - slo_generator/exporters/dynatrace.py | 3 ++- slo_generator/exporters/prometheus.py | 3 +-- slo_generator/exporters/pubsub.py | 4 ++-- slo_generator/migrations/migrator.py | 12 ++---------- slo_generator/report.py | 15 +++++---------- slo_generator/utils.py | 7 +------ tests/unit/backends/test_opensearch.py | 2 -- tests/unit/fixtures/dummy_backend.py | 4 ---- tests/unit/fixtures/fail_exporter.py | 1 - tests/unit/test_compute.py | 3 ++- tests/unit/test_stubs.py | 6 ------ 31 files changed, 38 insertions(+), 95 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 9b234df1..1a7d1bfb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -24,4 +24,6 @@ select = [ "SIM", # isort "I", + # Pylint + "PL", ] diff --git a/samples/custom/custom_backend.py b/samples/custom/custom_backend.py index 88ff0091..b8758c3a 100644 --- a/samples/custom/custom_backend.py +++ b/samples/custom/custom_backend.py @@ -27,7 +27,6 @@ class CustomBackend: def __init__(self, client=None, **kwargs): pass - # pylint: disable=unused-argument def good_bad_ratio(self, timestamp, window, slo_config): """Good bad ratio method. @@ -41,6 +40,5 @@ def good_bad_ratio(self, timestamp, window, slo_config): """ return 100000, 100 - # pylint: disable=unused-argument,missing-function-docstring def query_sli(self, timestamp, window, slo_config): return 0.999 diff --git a/samples/custom/custom_exporter.py b/samples/custom/custom_exporter.py index db07b51a..9e8aaab6 100644 --- a/samples/custom/custom_exporter.py +++ b/samples/custom/custom_exporter.py @@ -42,11 +42,9 @@ def export_metric(self, data): } -# pylint: disable=too-few-public-methods class CustomSLOExporter: """Custom exporter for SLO data.""" - # pylint: disable=unused-argument def export(self, data, **config): """Export data to custom destination. diff --git a/slo_generator/api/main.py b/slo_generator/api/main.py index 5c3b79e9..67cc8e4f 100644 --- a/slo_generator/api/main.py +++ b/slo_generator/api/main.py @@ -106,7 +106,7 @@ def run_export(request): # Construct exporters block spec = {} # pytype: disable=attribute-error - # pylint: disable=fixme + # FIXME `load_config()` returns `Optional[dict]` so `config` can be `None` default_exporters = config.get("default_exporters", []) # pytype: enable=attribute-error @@ -211,10 +211,10 @@ def process_batch_req(request, data, config): for url in urls: if "pubsub_batch_handler" in config: LOGGER.info(f"Sending {url} to pubsub batch handler.") - from google.cloud import pubsub_v1 # pylint: disable=C0415 + from google.cloud import pubsub_v1 # pytype: disable=attribute-error - # pylint: disable=fixme + # FIXME `load_config()` returns `Optional[dict]` so `config` can be `None` # so `config` can be `None` exporter_conf = config.get("pubsub_batch_handler") @@ -222,7 +222,7 @@ def process_batch_req(request, data, config): client = pubsub_v1.PublisherClient() project_id = exporter_conf["project_id"] topic_name = exporter_conf["topic_name"] - # pylint: disable=no-member + topic_path = client.topic_path(project_id, topic_name) data = url.encode("utf-8") client.publish(topic_path, data=data).result() diff --git a/slo_generator/backends/cloud_monitoring.py b/slo_generator/backends/cloud_monitoring.py index d0f8015a..8997957c 100644 --- a/slo_generator/backends/cloud_monitoring.py +++ b/slo_generator/backends/cloud_monitoring.py @@ -44,7 +44,6 @@ def __init__(self, project_id, client=None): self.client = monitoring_v3.MetricServiceClient() self.parent = self.client.common_project_path(project_id) - # pylint: disable=duplicate-code def good_bad_ratio(self, timestamp, window, slo_config): """Query two timeseries, one containing 'good' events, one containing 'bad' events. @@ -97,7 +96,6 @@ def good_bad_ratio(self, timestamp, window, slo_config): return good_event_count, bad_event_count - # pylint: disable=duplicate-code,too-many-locals def distribution_cut(self, timestamp, window, slo_config): """Query one timeseries of type 'exponential'. @@ -174,8 +172,7 @@ def exponential_distribution_cut(self, *args, **kwargs): ) return self.distribution_cut(*args, **kwargs) - # pylint: disable=redefined-builtin,too-many-arguments - def query( + def query( # noqa: PLR0913 self, timestamp, window, diff --git a/slo_generator/backends/cloud_monitoring_mql.py b/slo_generator/backends/cloud_monitoring_mql.py index c0a6e421..40351416 100644 --- a/slo_generator/backends/cloud_monitoring_mql.py +++ b/slo_generator/backends/cloud_monitoring_mql.py @@ -98,7 +98,6 @@ def good_bad_ratio( return good_event_count, bad_event_count - # pylint: disable=too-many-locals,disable=unused-argument def distribution_cut( self, timestamp: int, @@ -177,7 +176,7 @@ def exponential_distribution_cut(self, *args, **kwargs) -> Tuple[int, int]: def query_sli( self, - timestamp: int, # pylint: disable=unused-argument + timestamp: int, window: int, slo_config: dict, ) -> float: diff --git a/slo_generator/backends/cloud_service_monitoring.py b/slo_generator/backends/cloud_service_monitoring.py index a37307e8..3a332f41 100644 --- a/slo_generator/backends/cloud_service_monitoring.py +++ b/slo_generator/backends/cloud_service_monitoring.py @@ -46,7 +46,6 @@ SID_MESH_ISTIO: str = "ist:{mesh_uid}-{service_namespace}-{service_name}" -# pylint: disable=too-many-public-methods class CloudServiceMonitoringBackend: """Cloud Service Monitoring backend class. @@ -119,7 +118,6 @@ def window(self, timestamp: int, window: int, slo_config: dict) -> tuple: """ return self.retrieve_slo(timestamp, window, slo_config) - # pylint: disable=unused-argument def delete(self, timestamp: int, window: int, slo_config: dict) -> Optional[dict]: """Delete method. @@ -159,7 +157,7 @@ def retrieve_slo(self, timestamp: int, window: int, slo_config: dict): # Now that we have our SLO, retrieve the TimeSeries from Cloud # Monitoring API for that particular SLO id. metric_filter = self.build_slo_id(window, slo_config, full=True) - # pylint: disable=redefined-builtin + filter = f'select_slo_counts("{metric_filter}")' # Query SLO timeseries @@ -374,10 +372,8 @@ def create_slo(self, window: int, slo_config: dict) -> dict: ) return SSM.to_json(slo) - # pylint: disable=R0912,R0915 @staticmethod - # pylint: disable=R0912,R0915,too-many-locals - def build_slo(window: int, slo_config: dict) -> dict: + def build_slo(window: int, slo_config: dict) -> dict: # noqa: PLR0912, PLR0915 """Get SLO JSON representation in Cloud Service Monitoring API from SLO configuration. @@ -459,7 +455,6 @@ def build_slo(window: int, slo_config: dict) -> dict: sli["distribution_cut"]["range"]["min"] = float(range_min) elif method == "windows": - # pylint: disable=redefined-builtin filter = measurement.get("filter") # threshold = conf.get('threshold') # mean_in_range = conf.get('filter') @@ -728,7 +723,7 @@ def to_json(response): Returns: dict: Response object serialized as JSON. """ - # pylint: disable=protected-access + return json.loads(MessageToJson(response._pb)) diff --git a/slo_generator/backends/datadog.py b/slo_generator/backends/datadog.py index fe8fbb88..cc348834 100644 --- a/slo_generator/backends/datadog.py +++ b/slo_generator/backends/datadog.py @@ -45,7 +45,6 @@ def __init__(self, client=None, api_key=None, app_key=None, **kwargs): datadog.initialize(**options) self.client = datadog.api - # pylint: disable=too-many-locals def good_bad_ratio(self, timestamp, window, slo_config): """Query SLI value from good and valid queries. diff --git a/slo_generator/backends/dynatrace.py b/slo_generator/backends/dynatrace.py index 9735c39a..94eb1ab2 100644 --- a/slo_generator/backends/dynatrace.py +++ b/slo_generator/backends/dynatrace.py @@ -117,8 +117,7 @@ def threshold(self, timestamp, window, slo_config): response, threshold, good_below_threshold ) - # pylint: disable=too-many-arguments - def query( + def query( # noqa: PLR0913 self, start, end, @@ -270,8 +269,7 @@ def __init__(self, api_url, api_key): wait_exponential_max=10000, stop_max_delay=10000, ) - # pylint: disable=too-many-arguments,too-many-locals - def request( + def request( # noqa: PLR0913 self, method, endpoint, diff --git a/slo_generator/backends/elasticsearch.py b/slo_generator/backends/elasticsearch.py index d780024a..278a4add 100644 --- a/slo_generator/backends/elasticsearch.py +++ b/slo_generator/backends/elasticsearch.py @@ -59,7 +59,6 @@ def __init__(self, client=None, **es_config): # Note: Either `hosts` or `cloud_id` must be specified in v8.x.x self.client = Elasticsearch(**conf) - # pylint: disable=unused-argument,too-many-locals def good_bad_ratio(self, timestamp, window, slo_config): """Query two timeseries, one containing 'good' events, one containing 'bad' events. diff --git a/slo_generator/backends/open_search.py b/slo_generator/backends/open_search.py index 5d1ce58c..72f4dde5 100644 --- a/slo_generator/backends/open_search.py +++ b/slo_generator/backends/open_search.py @@ -13,7 +13,6 @@ LOGGER = logging.getLogger(__name__) -# pylint: disable=duplicate-code class OpenSearchBackend: """Backend for querying metrics from OpenSearch. @@ -38,7 +37,6 @@ def __init__(self, client=None, **os_config): self.client = OpenSearch(**conf) - # pylint: disable=unused-argument def good_bad_ratio(self, timestamp, window, slo_config): """Query two timeseries, one containing 'good' events, one containing 'bad' events. diff --git a/slo_generator/backends/prometheus.py b/slo_generator/backends/prometheus.py index 6f89833e..d08df896 100644 --- a/slo_generator/backends/prometheus.py +++ b/slo_generator/backends/prometheus.py @@ -97,7 +97,6 @@ def good_bad_ratio(self, timestamp, window, slo_config): return (good_count, bad_count) - # pylint: disable=unused-argument def distribution_cut( self, timestamp: int, window: int, slo_config: dict ) -> Tuple[float, float]: @@ -138,9 +137,7 @@ def distribution_cut( LOGGER.debug(f"Good events: {good_count} | " f"Bad events: {bad_count}") return (good_count, bad_count) - # pylint: disable=unused-argument,redefined-builtin,dangerous-default-value - # pylint: disable=too-many-arguments - def query( + def query( # noqa: PLR0913 self, filter: str, window: int, @@ -189,7 +186,6 @@ def count(response: dict) -> float: return NO_DATA # no events in timeseries @staticmethod - # pylint: disable=dangerous-default-value def _fmt_query( query: str, window: int, diff --git a/slo_generator/cli.py b/slo_generator/cli.py index aa678bad..db84bec0 100644 --- a/slo_generator/cli.py +++ b/slo_generator/cli.py @@ -127,7 +127,6 @@ def compute(slo_config, config, export, delete, timestamp): return all_reports -# pylint: disable=import-error,import-outside-toplevel @main.command() @click.pass_context @click.option( @@ -164,8 +163,7 @@ def compute(slo_config, config, export, delete, timestamp): default=8080, help="HTTP port", ) -# pylint: disable=too-many-arguments -def api(ctx, config, exporters, signature_type, target, port): +def api(ctx, config, exporters, signature_type, target, port): # noqa: PLR0913 """Run an API that can receive requests (supports both 'http' and 'cloudevents' signature types).""" from functions_framework._cli import _cli diff --git a/slo_generator/compute.py b/slo_generator/compute.py index 788557cd..1d489fe4 100644 --- a/slo_generator/compute.py +++ b/slo_generator/compute.py @@ -28,8 +28,7 @@ LOGGER = logging.getLogger(__name__) -# pylint: disable=too-many-arguments,too-many-locals -def compute( +def compute( # noqa: PLR0913 slo_config: dict, config: dict, timestamp: Optional[float] = None, @@ -140,7 +139,7 @@ def export(data: dict, exporters: list, raise_on_error: bool = False) -> list: response = instance().export(json_data, **exporter) LOGGER.info(f'{info} | SLO report sent to "{name}" exporter successfully.') LOGGER.debug(f"{info} | {response}") - except Exception as exc: # pylint: disable=broad-except + except Exception as exc: if raise_on_error: raise exc tbk = utils.fmt_traceback(exc) diff --git a/slo_generator/constants.py b/slo_generator/constants.py index 8b9dd74c..cd47f62a 100644 --- a/slo_generator/constants.py +++ b/slo_generator/constants.py @@ -78,7 +78,8 @@ # Colors / Status -# pylint: disable=too-few-public-methods + + class Colors: """Colors for console output.""" diff --git a/slo_generator/exporters/base.py b/slo_generator/exporters/base.py index 8167a7fa..882be59c 100644 --- a/slo_generator/exporters/base.py +++ b/slo_generator/exporters/base.py @@ -84,14 +84,14 @@ class `export_metric` method. LOGGER.debug(f"Exporting {len(metrics)} metrics with {self.__class__.__name__}") for metric_cfg in metrics: if isinstance(metric_cfg, str): # short form - metric_cfg = { + metric_cfg = { # noqa: PLW2901 "name": metric_cfg, "alias": metric_cfg, "description": "", "labels": DEFAULT_METRIC_LABELS, } if metric_cfg["name"] == "error_budget_burn_rate": - metric_cfg = MetricsExporter.use_deprecated_fields( + metric_cfg = MetricsExporter.use_deprecated_fields( # noqa: PLW2901 config=config, metric=metric_cfg ) metric = metric_cfg.copy() diff --git a/slo_generator/exporters/bigquery.py b/slo_generator/exporters/bigquery.py index 62e1f636..3522faa7 100644 --- a/slo_generator/exporters/bigquery.py +++ b/slo_generator/exporters/bigquery.py @@ -155,7 +155,6 @@ def create_table(self, project_id, dataset_id, table_id, schema=None): ) return self.client.create_table(table) - # pylint: disable=dangerous-default-value def update_schema(self, table_ref, keep=None): """Updates a BigQuery table schema if needed. diff --git a/slo_generator/exporters/cloud_monitoring.py b/slo_generator/exporters/cloud_monitoring.py index 38f24cff..4c7bf1be 100644 --- a/slo_generator/exporters/cloud_monitoring.py +++ b/slo_generator/exporters/cloud_monitoring.py @@ -59,7 +59,7 @@ def create_timeseries(self, data: dict): series.resource.type = "global" labels = data["labels"] for key, value in labels.items(): - series.metric.labels[key] = value # pylint: disable=E1101 + series.metric.labels[key] = value # Define end point timestamp. timestamp = data["timestamp"] @@ -88,7 +88,7 @@ def create_timeseries(self, data: dict): # Record the timeseries to Cloud Monitoring. project = self.client.common_project_path(data["project_id"]) self.client.create_time_series(name=project, time_series=[series]) - # pylint: disable=E1101 + labels = series.metric.labels LOGGER.debug( f"timestamp: {timestamp}" @@ -96,4 +96,3 @@ def create_timeseries(self, data: dict): f"{labels['service_name']}-{labels['feature_name']}-" f"{labels['slo_name']}-{labels['error_budget_policy_step_name']}" ) - # pylint: enable=E1101 diff --git a/slo_generator/exporters/cloudevent.py b/slo_generator/exporters/cloudevent.py index 0c8ab145..0da13874 100644 --- a/slo_generator/exporters/cloudevent.py +++ b/slo_generator/exporters/cloudevent.py @@ -26,7 +26,6 @@ LOGGER = logging.getLogger(__name__) -# pylint: disable=too-few-public-methods class CloudeventExporter: """Cloudevent exporter class. diff --git a/slo_generator/exporters/datadog.py b/slo_generator/exporters/datadog.py index bc1e1e88..e1885207 100644 --- a/slo_generator/exporters/datadog.py +++ b/slo_generator/exporters/datadog.py @@ -28,7 +28,6 @@ DEFAULT_API_HOST = "https://api.datadoghq.com" -# pylint: disable=too-few-public-methods class DatadogExporter(MetricsExporter): """Datadog exporter class. diff --git a/slo_generator/exporters/dynatrace.py b/slo_generator/exporters/dynatrace.py index db09d1d6..104bee0b 100644 --- a/slo_generator/exporters/dynatrace.py +++ b/slo_generator/exporters/dynatrace.py @@ -55,7 +55,8 @@ def export_metric(self, data): self.client = DynatraceClient(api_url, api_token) metric = self.get_custom_metric(data) code = int(metric.get("error", {}).get("code", "200")) - if code == 404: + NOT_FOUND = 404 + if code == NOT_FOUND: LOGGER.warning("Custom metric doesn't exist. Creating it.") metric = self.create_custom_metric(data) response = self.create_timeseries(data) diff --git a/slo_generator/exporters/prometheus.py b/slo_generator/exporters/prometheus.py index abcd5f59..594b4d49 100644 --- a/slo_generator/exporters/prometheus.py +++ b/slo_generator/exporters/prometheus.py @@ -89,8 +89,7 @@ def create_timeseries(self, data): handler=handler, ) - # pylint: disable=too-many-arguments - def auth_handler(self, url, method, timeout, headers, data): + def auth_handler(self, url, method, timeout, headers, data): # noqa: PLR0913 """Handles authentication for pushing to Prometheus gateway. Args: diff --git a/slo_generator/exporters/pubsub.py b/slo_generator/exporters/pubsub.py index 2dc10141..ac8e8b84 100644 --- a/slo_generator/exporters/pubsub.py +++ b/slo_generator/exporters/pubsub.py @@ -24,7 +24,7 @@ LOGGER = logging.getLogger(__name__) -class PubsubExporter: # pylint: disable=too-few-public-methods +class PubsubExporter: """Pubsub exporter class.""" def __init__(self): @@ -44,7 +44,7 @@ def export(self, data, **config): """ project_id = config["project_id"] topic_name = config["topic_name"] - # pylint: disable=no-member + topic_path = self.publisher.topic_path(project_id, topic_name) data = json.dumps(data, indent=4).encode("utf-8") return self.publisher.publish(topic_path, data=data).result() diff --git a/slo_generator/migrations/migrator.py b/slo_generator/migrations/migrator.py index 2ddae44f..9ed55849 100644 --- a/slo_generator/migrations/migrator.py +++ b/slo_generator/migrations/migrator.py @@ -16,7 +16,6 @@ Migrate utilities for migrating slo-generator configs from v1 to v2. """ -# pylint: disable=line-too-long, too-many-statements, too-many-ancestors, too-many-locals, too-many-nested-blocks, unused-argument # flake8: noqa # pytype: skip-file import copy @@ -54,7 +53,6 @@ yaml.preserve_quotes = True # type: ignore[attr-defined] -# pylint: disable=too-many-arguments def do_migrate( source, target, @@ -233,7 +231,6 @@ def do_migrate( # 3.3 - Replace `error_budget_policy.yaml` local variable to `config.yaml` -# pylint: disable=dangerous-default-value def exporters_v1tov2( exporters_paths: list, shared_config: dict = {}, quiet: bool = False ) -> list: @@ -250,7 +247,6 @@ def exporters_v1tov2( exp_keys = [] for exp_path in exporters_paths: with open(exp_path, encoding="utf-8") as conf: - # pylint: disable=E1111 content = yaml.load(conf, Loader=yaml.SafeLoader) exporters = content @@ -271,7 +267,6 @@ def exporters_v1tov2( return exp_keys -# pylint: disable=dangerous-default-value def ebp_v1tov2(ebp_paths: list, shared_config: dict = {}, quiet: bool = False) -> list: """Translate error budget policies to v2 and put into shared config @@ -286,9 +281,8 @@ def ebp_v1tov2(ebp_paths: list, shared_config: dict = {}, quiet: bool = False) - ebp_keys = [] for ebp_path in ebp_paths: with open(ebp_path, encoding="utf-8") as conf: - # pylint: disable=E1111 error_budget_policy = yaml.load(conf, Loader=yaml.SafeLoader) - # pylint: disable=E1133 + for step in error_budget_policy: step["name"] = step.pop("error_budget_policy_step_name") step["burn_rate_threshold"] = step.pop("alerting_burn_rate_threshold") @@ -313,7 +307,6 @@ def ebp_v1tov2(ebp_paths: list, shared_config: dict = {}, quiet: bool = False) - return ebp_keys -# pylint: disable=dangerous-default-value def slo_config_v1tov2( slo_config: dict, shared_config: dict = {}, @@ -575,7 +568,6 @@ def peek(iterable): return first, itertools.chain([first], iterable) -# pylint: disable=too-few-public-methods class CustomDumper(yaml.RoundTripDumper): """Dedicated YAML dumper to insert lines between top-level objects. @@ -585,7 +577,7 @@ class CustomDumper(yaml.RoundTripDumper): # HACK: insert blank lines between top-level objects # inspired by https://stackoverflow.com/a/44284819/3786245 - # pylint: disable=missing-function-docstring + def write_line_break(self, data: Optional[str] = None): super().write_line_break(data) diff --git a/slo_generator/report.py b/slo_generator/report.py index 26873bd3..0a754393 100644 --- a/slo_generator/report.py +++ b/slo_generator/report.py @@ -26,7 +26,6 @@ LOGGER = logging.getLogger(__name__) -# pylint: disable=too-many-arguments,too-many-locals @dataclass(init=False) class SLOReport: """SLO report dataclass. Compute an SLO report out of an SLO config and an @@ -41,8 +40,6 @@ class SLOReport: delete (bool): Backend delete action. """ - # pylint: disable=too-many-instance-attributes - # SLO name: str description: str @@ -90,8 +87,7 @@ class SLOReport: # Data validation errors: List[str] = field(default_factory=list) - # pylint: disable=too-many-arguments - def __init__(self, config, backend, step, timestamp, client=None, delete=False): + def __init__(self, config, backend, step, timestamp, client=None, delete=False): # noqa: PLR0913 # Init dataclass fields from SLO config and Error Budget Policy spec = config["spec"] self.exporters = [] @@ -224,7 +220,7 @@ def run_backend(self, config, backend, client=None, delete=False): try: data = method(self.timestamp, self.window, config) LOGGER.debug(f"{self.info} | Backend response: {data}") - except Exception as exc: # pylint:disable=broad-except + except Exception as exc: self.errors.append(utils.fmt_traceback(exc)) return None return data @@ -273,8 +269,7 @@ def to_json(self) -> dict: } return asdict(self) - # pylint: disable=too-many-return-statements - def _validate(self, data) -> bool: + def _validate(self, data) -> bool: # noqa: PLR0911 """Validate backend results. Invalid data will result in SLO report not being built. @@ -303,7 +298,8 @@ def _validate(self, data) -> bool: # Good / Bad tuple if isinstance(data, tuple): # Tuple length should be 2 - if len(data) != 2: + EXPECTED_LENGTH = 2 + if len(data) != EXPECTED_LENGTH: error = ( f"Backend method returned a tuple with {len(data)} items." f"Expected 2 items." @@ -368,7 +364,6 @@ def _post_validate(self) -> bool: return True - # pylint: disable=dangerous-default-value def __set_fields(self, lambdas=None, **kwargs): """Set all fields in dataclasses from configs passed and apply function on values whose key match one in the dictionaries. diff --git a/slo_generator/utils.py b/slo_generator/utils.py index b00dca5c..b88534fc 100644 --- a/slo_generator/utils.py +++ b/slo_generator/utils.py @@ -46,7 +46,6 @@ LOGGER = logging.getLogger(__name__) -# pylint: disable=dangerous-default-value def load_configs( path: str, ctx: os._Environ = os.environ, kind: Optional[str] = None ) -> list: @@ -67,7 +66,6 @@ def load_configs( return [cfg for cfg in configs if cfg] -# pylint: disable=dangerous-default-value def load_config( path: str, ctx: os._Environ = os.environ, kind: Optional[str] = None ) -> Optional[dict]: @@ -112,7 +110,6 @@ def load_config( raise -# pylint: disable=dangerous-default-value def parse_config( path: Optional[str] = None, content=None, ctx: os._Environ = os.environ ): @@ -191,7 +188,6 @@ def setup_logging(): # Ignore Cloud SDK warning when using a user instead of service account try: - # pylint: disable=import-outside-toplevel from google.auth._default import _CLOUD_SDK_CREDENTIALS_WARNING warnings.filterwarnings("ignore", message=_CLOUD_SDK_CREDENTIALS_WARNING) @@ -357,7 +353,7 @@ def import_dynamic(package: str, name: str, prefix: str = "class"): """ try: return getattr(importlib.import_module(package), name) - except Exception as exception: # pylint: disable=W0703 + except Exception as exception: dep = package.split(".")[-1] warnings.warn( f'{prefix} "{package}.{name}" not found.\nPlease ensure that:\n' @@ -489,7 +485,6 @@ def decode_gcs_url(url: str) -> tuple: return (bucket_name, file_path) -# pylint: disable=dangerous-default-value def get_files(source, extensions=None) -> list: """Get all files matching extensions. diff --git a/tests/unit/backends/test_opensearch.py b/tests/unit/backends/test_opensearch.py index 9136fd59..cad4be0a 100644 --- a/tests/unit/backends/test_opensearch.py +++ b/tests/unit/backends/test_opensearch.py @@ -4,8 +4,6 @@ class TestOpenSearchBackend(unittest.TestCase): - assert 1 == 1 - def test_build_query_with_empty_query(self): assert OpenSearchBackend.build_query(None, 3600, "date") is None diff --git a/tests/unit/fixtures/dummy_backend.py b/tests/unit/fixtures/dummy_backend.py index be12591b..3d213b75 100644 --- a/tests/unit/fixtures/dummy_backend.py +++ b/tests/unit/fixtures/dummy_backend.py @@ -4,18 +4,14 @@ """ -# pylint: disable=missing-class-docstring class DummyBackend: - # pylint: disable=unused-argument def __init__(self, client=None, **config): self.good_events = config.get("good_events", None) self.bad_events = config.get("bad_events", None) self.sli_value = config.get("sli", None) - # pylint: disable=missing-function-docstring,unused-argument def good_bad_ratio(self, timestamp, window, slo_config): return (self.good_events, self.bad_events) - # pylint: disable=missing-function-docstring,unused-argument def sli(self, timestamp, window, slo_config): return self.sli_value diff --git a/tests/unit/fixtures/fail_exporter.py b/tests/unit/fixtures/fail_exporter.py index d3797a85..7a3d5ba4 100644 --- a/tests/unit/fixtures/fail_exporter.py +++ b/tests/unit/fixtures/fail_exporter.py @@ -6,7 +6,6 @@ from slo_generator.exporters.base import MetricsExporter -# pylint: disable=missing-class-docstring class FailExporter(MetricsExporter): def export_metric(self, data): raise ValueError("Oops !") diff --git a/tests/unit/test_compute.py b/tests/unit/test_compute.py index 558a4b53..768d7018 100644 --- a/tests/unit/test_compute.py +++ b/tests/unit/test_compute.py @@ -198,7 +198,8 @@ def test_export_dynatrace(self, mock): def test_export_dynatrace_error(self, mock): responses = export(SLO_REPORT, EXPORTERS[5]) codes = [r[0]["response"]["error"]["code"] for r in responses] - self.assertTrue(all(code == 429 for code in codes)) + TOO_MANY_RETRIES = 429 + self.assertTrue(all(code == TOO_MANY_RETRIES for code in codes)) def test_metrics_exporter_build_data_labels(self): exporter = MetricsExporter() diff --git a/tests/unit/test_stubs.py b/tests/unit/test_stubs.py index 8a9ecd03..99d67094 100644 --- a/tests/unit/test_stubs.py +++ b/tests/unit/test_stubs.py @@ -107,7 +107,6 @@ def mock_slo_report(key): } -# pylint: disable=too-few-public-methods class MultiCallableStub: """Stub for the grpc.UnaryUnaryMultiCallable interface.""" @@ -115,7 +114,6 @@ def __init__(self, method, channel_stub): self.method = method self.channel_stub = channel_stub - # pylint: disable=inconsistent-return-statements def __call__(self, request, timeout=None, metadata=None, credentials=None): self.channel_stub.requests.append((self.method, request)) @@ -130,7 +128,6 @@ def __call__(self, request, timeout=None, metadata=None, credentials=None): return response -# pylint: disable=R0903 class ChannelStub: """Stub for the grpc.Channel interface.""" @@ -140,7 +137,6 @@ def __init__(self, responses=None): self.responses = responses self.requests = [] - # pylint: disable=C0116,W0613 def unary_unary(self, method, request_serializer=None, response_deserializer=None): return MultiCallableStub(method, self) @@ -178,7 +174,6 @@ def mock_sd(nresp=1): ) -# pylint: disable=W0613,R1721 def mock_prom(self, metric): """Fake Prometheus query response. @@ -201,7 +196,6 @@ def mock_prom(self, metric): return json.dumps(data) -# pylint: disable=W0613 def mock_es(self, index, body): """Fake ElasticSearch response.