Skip to content

Commit

Permalink
Merge pull request #13 from infn-datacloud/errors
Browse files Browse the repository at this point in the history
Add logic to notify that at least one error happened
  • Loading branch information
giosava94 authored Jul 8, 2024
2 parents 6c0d55b + c6ffd58 commit d05828c
Show file tree
Hide file tree
Showing 7 changed files with 60 additions and 28 deletions.
6 changes: 3 additions & 3 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
# Create requirements.txt from poetry dependencies
FROM python:3.9-slim AS requirements
FROM python:3.10-slim AS requirements

WORKDIR /tmp

Expand All @@ -12,7 +12,7 @@ RUN poetry export -f requirements.txt --output requirements.txt --without-hashes


# Stage used in production
FROM python:3.9-slim AS production
FROM python:3.10 AS production

WORKDIR /app/

Expand All @@ -26,7 +26,7 @@ RUN apt-get update \
&& apt-get clean

# Upgrade pip and install requirements
RUN pip install --user --upgrade pip==20.2.4 \
RUN pip install --user --upgrade pip \
&& pip install --user --no-cache-dir --upgrade -r /app/requirements.txt

ENV PYTHONPATH="${PYTHONPATH}:/app/src"
Expand Down
2 changes: 1 addition & 1 deletion poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

33 changes: 20 additions & 13 deletions src/crud.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import os
from typing import Any, Dict, List, Optional
from logging import Logger
from typing import List, Optional

import requests
from fastapi import status
Expand All @@ -9,27 +10,31 @@
ProviderRead,
ProviderReadExtended,
)
from pydantic import AnyHttpUrl, BaseModel, Field, validator
from pydantic import AnyHttpUrl

TIMEOUT = 30 # s


class CRUD(BaseModel):
class CRUD:
"""Class with create read update and delete operations.
Each operation makes a call to the Federation-Registry.
"""

multi_url: AnyHttpUrl = Field(alias="url")
read_headers: Dict[str, str]
write_headers: Dict[str, str]
single_url: Optional[AnyHttpUrl]
logger: Any

@validator("single_url", pre=True, always=True)
@classmethod
def build_single_url(cls, v: Optional[str], values: Dict[str, Any]) -> str:
return os.path.join(values.get("multi_url"), "{uid}")
def __init__(
self,
*,
url: AnyHttpUrl,
read_headers: dict[str, str],
write_headers: dict[str, str],
logger: Logger,
) -> None:
self.multi_url = url
self.single_url = os.path.join(self.multi_url, "{uid}")
self.read_headers = read_headers
self.write_headers = write_headers
self.logger = logger
self.error = False

def read(self) -> List[ProviderRead]:
"""Retrieve all providers from the Federation-Registry."""
Expand Down Expand Up @@ -67,6 +72,7 @@ def create(self, *, data: ProviderCreateExtended) -> ProviderReadExtended:
elif resp.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY:
self.logger.error("Provider=%s has not been created.", data.name)
self.logger.error(resp.json())
self.error = True
return None

self.logger.debug("Status code: %s", resp.status_code)
Expand Down Expand Up @@ -117,6 +123,7 @@ def update(
elif resp.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY:
self.logger.error("Provider=%s has not been updated.", new_data.name)
self.logger.error(resp.json())
self.error = True
return None

self.logger.debug("Status code: %s", resp.status_code)
Expand Down
15 changes: 10 additions & 5 deletions src/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,31 +27,36 @@ def main(log_level: str) -> None:

# Read all yaml files containing providers configurations.
yaml_files = get_conf_files(settings=settings, logger=logger)
site_configs = get_site_configs(yaml_files=yaml_files, log_level=log_level)
site_configs, error = get_site_configs(yaml_files=yaml_files, log_level=log_level)

# Prepare data (merge issuers and provider configurations)
prov_iss_list: list[ProviderThread] = []
pthreads: list[ProviderThread] = []
for config in site_configs:
prov_configs = [*config.openstack, *config.kubernetes]
issuers = config.trusted_idps
for conf in prov_configs:
prov_iss_list.append(
pthreads.append(
ProviderThread(provider_conf=conf, issuers=issuers, log_level=log_level)
)

# Multithreading read
providers = []
with ThreadPoolExecutor() as executor:
providers = executor.map(lambda x: x.get_provider(), prov_iss_list)
providers = executor.map(lambda x: x.get_provider(), pthreads)
providers: list[ProviderCreateExtended] = list(filter(lambda x: x, providers))
error |= any([x.error for x in pthreads])

# Update the Federation-Registry
token = site_configs[0].trusted_idps[0].token if len(site_configs) > 0 else ""
fed_reg_endpoints = infer_service_endpoints(settings=settings, logger=logger)
update_database(
error |= update_database(
service_api_url=fed_reg_endpoints, token=token, items=providers, logger=logger
)

if error:
logger.error("Found at least one error.")
exit(1)


if __name__ == "__main__":
args = parser.parse_args()
Expand Down
8 changes: 8 additions & 0 deletions src/providers/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,12 +39,15 @@ def __init__(
self.region_conf = region_conf
self.project_conf = project_conf
self.issuers = issuers

self.log_level = log_level
logger_name = f"Provider {self.provider_conf.name}, "
logger_name += f"Region {self.region_conf.name}, "
logger_name += f"Project {self.project_conf.id}"
self.logger = create_logger(logger_name, level=log_level)

self.error = False

def get_provider_siblings(self) -> Optional[ProviderSiblings]:
"""Retrieve the provider region, project and identity provider.
Expand All @@ -60,6 +63,7 @@ def get_provider_siblings(self) -> Optional[ProviderSiblings]:
except ValueError as e:
self.logger.error(e)
self.logger.error("Skipping project")
self.error = True
return None

if self.provider_conf.type == ProviderType.OS.value:
Expand All @@ -72,7 +76,9 @@ def get_provider_siblings(self) -> Optional[ProviderSiblings]:
token=token,
logger=self.logger,
)
self.error |= data.error
except ProviderException:
self.error = True
return None
elif self.provider_conf.type == ProviderType.K8S.value:
self.logger.warning("Not yet implemented")
Expand Down Expand Up @@ -176,6 +182,7 @@ def __init__(
self.logger = create_logger(
f"Provider {self.provider_conf.name}", level=log_level
)
self.error = False

def get_provider(self) -> ProviderCreateExtended:
"""Generate a list of generic providers.
Expand Down Expand Up @@ -214,6 +221,7 @@ def get_provider(self) -> ProviderCreateExtended:
siblings: list[ProviderSiblings] = list(
filter(lambda x: x is not None, siblings)
)
self.error = any([x.error for x in connections])

identity_providers: dict[str, IdentityProviderCreateExtended] = {}
projects: dict[str, ProjectCreate] = {}
Expand Down
6 changes: 6 additions & 0 deletions src/providers/openstack.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,7 @@ def __init__(
self.identity_provider = identity_provider
self.region_name = region_name
self.logger = logger
self.error = False

# Connection can stay outside the try because it is only defined, not yet opened
self.conn = self.connect_to_provider(token=token)
Expand Down Expand Up @@ -131,6 +132,7 @@ def get_block_storage_quotas(
data = quota.to_dict()
except ForbiddenException as e:
self.logger.error(e)
self.error = True
data = {}
self.logger.debug("Block storage service quotas=%s", data)
data_limits = {**data}
Expand Down Expand Up @@ -205,6 +207,7 @@ def get_flavor_projects(self, flavor: Flavor) -> List[str]:
projects.add(i.get("tenant_id"))
except ForbiddenException as e:
self.logger.error(e)
self.error = True
return list(projects)

def get_flavors(self) -> List[FlavorCreateExtended]:
Expand Down Expand Up @@ -347,6 +350,7 @@ def get_block_storage_service(self) -> Optional[BlockStorageServiceCreateExtende
endpoint = self.conn.block_storage.get_endpoint()
except EndpointNotFound as e:
self.logger.error(e)
self.error = True
return None
if not endpoint:
return None
Expand Down Expand Up @@ -377,6 +381,7 @@ def get_compute_service(self) -> Optional[ComputeServiceCreateExtended]:
endpoint = self.conn.compute.get_endpoint()
except EndpointNotFound as e:
self.logger.error(e)
self.error = True
return None
if not endpoint:
return None
Expand All @@ -402,6 +407,7 @@ def get_network_service(self) -> Optional[NetworkServiceCreateExtended]:
endpoint = self.conn.network.get_endpoint()
except EndpointNotFound as e:
self.logger.error(e)
self.error = True
return None
if not endpoint:
return None
Expand Down
18 changes: 12 additions & 6 deletions src/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,24 +52,26 @@ def load_config(
config = SiteConfig(**config)
logger.info("Configuration loaded")
logger.debug("%r", config)
return config
except ValueError as e:
logger.error(e)
config = None
return None
else:
logger.error("Empty configuration")

return config
return None


def get_site_configs(
*, yaml_files: List[str], log_level: str | int | None = None
) -> List[SiteConfig]:
) -> tuple[List[SiteConfig], bool]:
"""Create a list of SiteConfig from a list of yaml files."""
with ThreadPoolExecutor() as executor:
site_configs = executor.map(
lambda x: load_config(fname=x, log_level=log_level), yaml_files
)
return list(filter(lambda x: x is not None, site_configs))
return list(filter(lambda x: x is not None, site_configs)), any(
[x is None for x in site_configs]
)


def get_read_write_headers(*, token: str) -> Tuple[Dict[str, str], Dict[str, str]]:
Expand All @@ -89,7 +91,7 @@ def update_database(
items: List[ProviderCreateExtended],
token: str,
logger: Logger,
) -> None:
) -> bool:
"""Update the Federation-Registry data.
Create the read and write headers to use in requests.
Expand All @@ -99,6 +101,8 @@ def update_database(
data. Once all the current federated providers have been added or updated, remove
the remaining providers retrieved from the Federation-Registry, they are no more
tracked.
Return True if no errors happened otherwise False.
"""
read_header, write_header = get_read_write_headers(token=token)
crud = CRUD(
Expand All @@ -118,3 +122,5 @@ def update_database(
crud.update(new_data=item, old_data=db_item)
for db_item in db_items.values():
crud.remove(item=db_item)

return crud.error

0 comments on commit d05828c

Please sign in to comment.