Skip to content

Commit

Permalink
chore: review and clean up pylint annotations (#481)
Browse files Browse the repository at this point in the history
  • Loading branch information
lvaylet authored May 30, 2024
1 parent b6c50be commit 0e3cc34
Show file tree
Hide file tree
Showing 31 changed files with 38 additions and 95 deletions.
2 changes: 2 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -24,4 +24,6 @@ select = [
"SIM",
# isort
"I",
# Pylint
"PL",
]
2 changes: 0 additions & 2 deletions samples/custom/custom_backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@ class CustomBackend:
def __init__(self, client=None, **kwargs):
pass

# pylint: disable=unused-argument
def good_bad_ratio(self, timestamp, window, slo_config):
"""Good bad ratio method.
Expand All @@ -41,6 +40,5 @@ def good_bad_ratio(self, timestamp, window, slo_config):
"""
return 100000, 100

# pylint: disable=unused-argument,missing-function-docstring
def query_sli(self, timestamp, window, slo_config):
return 0.999
2 changes: 0 additions & 2 deletions samples/custom/custom_exporter.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,11 +42,9 @@ def export_metric(self, data):
}


# pylint: disable=too-few-public-methods
class CustomSLOExporter:
"""Custom exporter for SLO data."""

# pylint: disable=unused-argument
def export(self, data, **config):
"""Export data to custom destination.
Expand Down
8 changes: 4 additions & 4 deletions slo_generator/api/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ def run_export(request):
# Construct exporters block
spec = {}
# pytype: disable=attribute-error
# pylint: disable=fixme

# FIXME `load_config()` returns `Optional[dict]` so `config` can be `None`
default_exporters = config.get("default_exporters", [])
# pytype: enable=attribute-error
Expand Down Expand Up @@ -211,18 +211,18 @@ def process_batch_req(request, data, config):
for url in urls:
if "pubsub_batch_handler" in config:
LOGGER.info(f"Sending {url} to pubsub batch handler.")
from google.cloud import pubsub_v1 # pylint: disable=C0415
from google.cloud import pubsub_v1

# pytype: disable=attribute-error
# pylint: disable=fixme

# FIXME `load_config()` returns `Optional[dict]` so `config` can be `None`
# so `config` can be `None`
exporter_conf = config.get("pubsub_batch_handler")
# pytype: enable=attribute-error
client = pubsub_v1.PublisherClient()
project_id = exporter_conf["project_id"]
topic_name = exporter_conf["topic_name"]
# pylint: disable=no-member

topic_path = client.topic_path(project_id, topic_name)
data = url.encode("utf-8")
client.publish(topic_path, data=data).result()
Expand Down
5 changes: 1 addition & 4 deletions slo_generator/backends/cloud_monitoring.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,6 @@ def __init__(self, project_id, client=None):
self.client = monitoring_v3.MetricServiceClient()
self.parent = self.client.common_project_path(project_id)

# pylint: disable=duplicate-code
def good_bad_ratio(self, timestamp, window, slo_config):
"""Query two timeseries, one containing 'good' events, one containing
'bad' events.
Expand Down Expand Up @@ -97,7 +96,6 @@ def good_bad_ratio(self, timestamp, window, slo_config):

return good_event_count, bad_event_count

# pylint: disable=duplicate-code,too-many-locals
def distribution_cut(self, timestamp, window, slo_config):
"""Query one timeseries of type 'exponential'.
Expand Down Expand Up @@ -174,8 +172,7 @@ def exponential_distribution_cut(self, *args, **kwargs):
)
return self.distribution_cut(*args, **kwargs)

# pylint: disable=redefined-builtin,too-many-arguments
def query(
def query( # noqa: PLR0913
self,
timestamp,
window,
Expand Down
3 changes: 1 addition & 2 deletions slo_generator/backends/cloud_monitoring_mql.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,6 @@ def good_bad_ratio(

return good_event_count, bad_event_count

# pylint: disable=too-many-locals,disable=unused-argument
def distribution_cut(
self,
timestamp: int,
Expand Down Expand Up @@ -177,7 +176,7 @@ def exponential_distribution_cut(self, *args, **kwargs) -> Tuple[int, int]:

def query_sli(
self,
timestamp: int, # pylint: disable=unused-argument
timestamp: int,
window: int,
slo_config: dict,
) -> float:
Expand Down
11 changes: 3 additions & 8 deletions slo_generator/backends/cloud_service_monitoring.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,6 @@
SID_MESH_ISTIO: str = "ist:{mesh_uid}-{service_namespace}-{service_name}"


# pylint: disable=too-many-public-methods
class CloudServiceMonitoringBackend:
"""Cloud Service Monitoring backend class.
Expand Down Expand Up @@ -119,7 +118,6 @@ def window(self, timestamp: int, window: int, slo_config: dict) -> tuple:
"""
return self.retrieve_slo(timestamp, window, slo_config)

# pylint: disable=unused-argument
def delete(self, timestamp: int, window: int, slo_config: dict) -> Optional[dict]:
"""Delete method.
Expand Down Expand Up @@ -159,7 +157,7 @@ def retrieve_slo(self, timestamp: int, window: int, slo_config: dict):
# Now that we have our SLO, retrieve the TimeSeries from Cloud
# Monitoring API for that particular SLO id.
metric_filter = self.build_slo_id(window, slo_config, full=True)
# pylint: disable=redefined-builtin

filter = f'select_slo_counts("{metric_filter}")'

# Query SLO timeseries
Expand Down Expand Up @@ -374,10 +372,8 @@ def create_slo(self, window: int, slo_config: dict) -> dict:
)
return SSM.to_json(slo)

# pylint: disable=R0912,R0915
@staticmethod
# pylint: disable=R0912,R0915,too-many-locals
def build_slo(window: int, slo_config: dict) -> dict:
def build_slo(window: int, slo_config: dict) -> dict: # noqa: PLR0912, PLR0915
"""Get SLO JSON representation in Cloud Service Monitoring API from SLO
configuration.
Expand Down Expand Up @@ -459,7 +455,6 @@ def build_slo(window: int, slo_config: dict) -> dict:
sli["distribution_cut"]["range"]["min"] = float(range_min)

elif method == "windows":
# pylint: disable=redefined-builtin
filter = measurement.get("filter")
# threshold = conf.get('threshold')
# mean_in_range = conf.get('filter')
Expand Down Expand Up @@ -728,7 +723,7 @@ def to_json(response):
Returns:
dict: Response object serialized as JSON.
"""
# pylint: disable=protected-access

return json.loads(MessageToJson(response._pb))


Expand Down
1 change: 0 additions & 1 deletion slo_generator/backends/datadog.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,6 @@ def __init__(self, client=None, api_key=None, app_key=None, **kwargs):
datadog.initialize(**options)
self.client = datadog.api

# pylint: disable=too-many-locals
def good_bad_ratio(self, timestamp, window, slo_config):
"""Query SLI value from good and valid queries.
Expand Down
6 changes: 2 additions & 4 deletions slo_generator/backends/dynatrace.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,8 +117,7 @@ def threshold(self, timestamp, window, slo_config):
response, threshold, good_below_threshold
)

# pylint: disable=too-many-arguments
def query(
def query( # noqa: PLR0913
self,
start,
end,
Expand Down Expand Up @@ -270,8 +269,7 @@ def __init__(self, api_url, api_key):
wait_exponential_max=10000,
stop_max_delay=10000,
)
# pylint: disable=too-many-arguments,too-many-locals
def request(
def request( # noqa: PLR0913
self,
method,
endpoint,
Expand Down
1 change: 0 additions & 1 deletion slo_generator/backends/elasticsearch.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,6 @@ def __init__(self, client=None, **es_config):
# Note: Either `hosts` or `cloud_id` must be specified in v8.x.x
self.client = Elasticsearch(**conf)

# pylint: disable=unused-argument,too-many-locals
def good_bad_ratio(self, timestamp, window, slo_config):
"""Query two timeseries, one containing 'good' events, one containing
'bad' events.
Expand Down
2 changes: 0 additions & 2 deletions slo_generator/backends/open_search.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@
LOGGER = logging.getLogger(__name__)


# pylint: disable=duplicate-code
class OpenSearchBackend:
"""Backend for querying metrics from OpenSearch.
Expand All @@ -38,7 +37,6 @@ def __init__(self, client=None, **os_config):

self.client = OpenSearch(**conf)

# pylint: disable=unused-argument
def good_bad_ratio(self, timestamp, window, slo_config):
"""Query two timeseries, one containing 'good' events, one containing
'bad' events.
Expand Down
6 changes: 1 addition & 5 deletions slo_generator/backends/prometheus.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,6 @@ def good_bad_ratio(self, timestamp, window, slo_config):

return (good_count, bad_count)

# pylint: disable=unused-argument
def distribution_cut(
self, timestamp: int, window: int, slo_config: dict
) -> Tuple[float, float]:
Expand Down Expand Up @@ -138,9 +137,7 @@ def distribution_cut(
LOGGER.debug(f"Good events: {good_count} | " f"Bad events: {bad_count}")
return (good_count, bad_count)

# pylint: disable=unused-argument,redefined-builtin,dangerous-default-value
# pylint: disable=too-many-arguments
def query(
def query( # noqa: PLR0913
self,
filter: str,
window: int,
Expand Down Expand Up @@ -189,7 +186,6 @@ def count(response: dict) -> float:
return NO_DATA # no events in timeseries

@staticmethod
# pylint: disable=dangerous-default-value
def _fmt_query(
query: str,
window: int,
Expand Down
4 changes: 1 addition & 3 deletions slo_generator/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,6 @@ def compute(slo_config, config, export, delete, timestamp):
return all_reports


# pylint: disable=import-error,import-outside-toplevel
@main.command()
@click.pass_context
@click.option(
Expand Down Expand Up @@ -164,8 +163,7 @@ def compute(slo_config, config, export, delete, timestamp):
default=8080,
help="HTTP port",
)
# pylint: disable=too-many-arguments
def api(ctx, config, exporters, signature_type, target, port):
def api(ctx, config, exporters, signature_type, target, port): # noqa: PLR0913
"""Run an API that can receive requests (supports both 'http' and
'cloudevents' signature types)."""
from functions_framework._cli import _cli
Expand Down
5 changes: 2 additions & 3 deletions slo_generator/compute.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,7 @@
LOGGER = logging.getLogger(__name__)


# pylint: disable=too-many-arguments,too-many-locals
def compute(
def compute( # noqa: PLR0913
slo_config: dict,
config: dict,
timestamp: Optional[float] = None,
Expand Down Expand Up @@ -140,7 +139,7 @@ def export(data: dict, exporters: list, raise_on_error: bool = False) -> list:
response = instance().export(json_data, **exporter)
LOGGER.info(f'{info} | SLO report sent to "{name}" exporter successfully.')
LOGGER.debug(f"{info} | {response}")
except Exception as exc: # pylint: disable=broad-except
except Exception as exc:
if raise_on_error:
raise exc
tbk = utils.fmt_traceback(exc)
Expand Down
3 changes: 2 additions & 1 deletion slo_generator/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,8 @@


# Colors / Status
# pylint: disable=too-few-public-methods


class Colors:
"""Colors for console output."""

Expand Down
4 changes: 2 additions & 2 deletions slo_generator/exporters/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,14 +84,14 @@ class `export_metric` method.
LOGGER.debug(f"Exporting {len(metrics)} metrics with {self.__class__.__name__}")
for metric_cfg in metrics:
if isinstance(metric_cfg, str): # short form
metric_cfg = {
metric_cfg = { # noqa: PLW2901
"name": metric_cfg,
"alias": metric_cfg,
"description": "",
"labels": DEFAULT_METRIC_LABELS,
}
if metric_cfg["name"] == "error_budget_burn_rate":
metric_cfg = MetricsExporter.use_deprecated_fields(
metric_cfg = MetricsExporter.use_deprecated_fields( # noqa: PLW2901
config=config, metric=metric_cfg
)
metric = metric_cfg.copy()
Expand Down
1 change: 0 additions & 1 deletion slo_generator/exporters/bigquery.py
Original file line number Diff line number Diff line change
Expand Up @@ -155,7 +155,6 @@ def create_table(self, project_id, dataset_id, table_id, schema=None):
)
return self.client.create_table(table)

# pylint: disable=dangerous-default-value
def update_schema(self, table_ref, keep=None):
"""Updates a BigQuery table schema if needed.
Expand Down
5 changes: 2 additions & 3 deletions slo_generator/exporters/cloud_monitoring.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ def create_timeseries(self, data: dict):
series.resource.type = "global"
labels = data["labels"]
for key, value in labels.items():
series.metric.labels[key] = value # pylint: disable=E1101
series.metric.labels[key] = value

# Define end point timestamp.
timestamp = data["timestamp"]
Expand Down Expand Up @@ -88,12 +88,11 @@ def create_timeseries(self, data: dict):
# Record the timeseries to Cloud Monitoring.
project = self.client.common_project_path(data["project_id"])
self.client.create_time_series(name=project, time_series=[series])
# pylint: disable=E1101

labels = series.metric.labels
LOGGER.debug(
f"timestamp: {timestamp}"
f"value: {point.value.double_value}"
f"{labels['service_name']}-{labels['feature_name']}-"
f"{labels['slo_name']}-{labels['error_budget_policy_step_name']}"
)
# pylint: enable=E1101
1 change: 0 additions & 1 deletion slo_generator/exporters/cloudevent.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@
LOGGER = logging.getLogger(__name__)


# pylint: disable=too-few-public-methods
class CloudeventExporter:
"""Cloudevent exporter class.
Expand Down
1 change: 0 additions & 1 deletion slo_generator/exporters/datadog.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@
DEFAULT_API_HOST = "https://api.datadoghq.com"


# pylint: disable=too-few-public-methods
class DatadogExporter(MetricsExporter):
"""Datadog exporter class.
Expand Down
3 changes: 2 additions & 1 deletion slo_generator/exporters/dynatrace.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,8 @@ def export_metric(self, data):
self.client = DynatraceClient(api_url, api_token)
metric = self.get_custom_metric(data)
code = int(metric.get("error", {}).get("code", "200"))
if code == 404:
NOT_FOUND = 404
if code == NOT_FOUND:
LOGGER.warning("Custom metric doesn't exist. Creating it.")
metric = self.create_custom_metric(data)
response = self.create_timeseries(data)
Expand Down
3 changes: 1 addition & 2 deletions slo_generator/exporters/prometheus.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,8 +89,7 @@ def create_timeseries(self, data):
handler=handler,
)

# pylint: disable=too-many-arguments
def auth_handler(self, url, method, timeout, headers, data):
def auth_handler(self, url, method, timeout, headers, data): # noqa: PLR0913
"""Handles authentication for pushing to Prometheus gateway.
Args:
Expand Down
4 changes: 2 additions & 2 deletions slo_generator/exporters/pubsub.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
LOGGER = logging.getLogger(__name__)


class PubsubExporter: # pylint: disable=too-few-public-methods
class PubsubExporter:
"""Pubsub exporter class."""

def __init__(self):
Expand All @@ -44,7 +44,7 @@ def export(self, data, **config):
"""
project_id = config["project_id"]
topic_name = config["topic_name"]
# pylint: disable=no-member

topic_path = self.publisher.topic_path(project_id, topic_name)
data = json.dumps(data, indent=4).encode("utf-8")
return self.publisher.publish(topic_path, data=data).result()
Loading

0 comments on commit 0e3cc34

Please sign in to comment.