Skip to content

Commit

Permalink
Validation Workflow enhancement to support run with different options…
Browse files Browse the repository at this point in the history
… for staging and local artifacts (#3993)

Signed-off-by: Divya Madala <[email protected]>
  • Loading branch information
Divyaasm authored Oct 2, 2023
1 parent f981a04 commit 464254d
Show file tree
Hide file tree
Showing 16 changed files with 673 additions and 179 deletions.
22 changes: 11 additions & 11 deletions src/validation_workflow/api_test_cases.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@
from typing import Any

from validation_workflow.api_request import ApiTest
from validation_workflow.validation_args import ValidationArgs

'''
This class is the collection of test cases to run.
Expand All @@ -19,24 +18,25 @@
class ApiTestCases:

def __init__(self) -> None:
self.opensearch_image_version = ValidationArgs().stg_tag('opensearch').split(' ')[0]
self.opensearch_dashboards_image_version = ValidationArgs().stg_tag('opensearch_dashboards').split(' ')[0]
pass

def test_cases(self) -> Any:
@staticmethod
def test_apis(projects: list) -> Any:
pass_counter, fail_counter = 0, 0

# the test case parameters are formated as ['<request_url>',<success_status_code>,'<validate_string(optional)>']
test_cases = [
['https://localhost:9200/', 200, '"number" : "' + self.opensearch_image_version + '"'],
test_apis = [
['https://localhost:9200/', 200, ''],
['https://localhost:9200/_cat/plugins?v', 200, ''],
['https://localhost:9200/_cat/health?v', 200, 'green'],
['http://localhost:5601/api/status', 200, '"number":"' + self.opensearch_dashboards_image_version + '"']
]
if ("opensearch-dashboards" in projects):
test_apis.append(['http://localhost:5601/api/status', 200, ''])

for test_case in test_cases:
request_url = test_case.__getitem__(0)
success_status_code = test_case.__getitem__(1)
validate_string = test_case.__getitem__(2)
for test_api in test_apis:
request_url = test_api.__getitem__(0)
success_status_code = test_api.__getitem__(1)
validate_string = test_api.__getitem__(2)

status_code, response_text = ApiTest(str(request_url)).api_get()
logging.info(f"\nRequest_url ->{str(request_url)} \n")
Expand Down
29 changes: 13 additions & 16 deletions src/validation_workflow/docker/validation_docker.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,14 +33,14 @@ def download_artifacts(self) -> bool:
assert self.is_container_daemon_running(), 'Docker daemon is not running. Exiting the docker validation.'

# STEP 1 . pull the images for OS and OSD
product_names = ["opensearch", "opensearch_dashboards"]
product_names = self.args.projects
using_staging_artifact_only = 'staging' if self.args.using_staging_artifact_only else 'production'
get_image_id = lambda product: self.get_image_id( # noqa: E731
self.get_artifact_image_name(product, using_staging_artifact_only),
self.args.version if not self.args.using_staging_artifact_only else ValidationArgs().stg_tag(product).replace(" ", ""))
self.image_ids = list(map(get_image_id, product_names))
logging.info(f'the opensearch image ID is : {self.image_ids[0]}')
logging.info(f'the opensearch-dashboards image ID is : {self.image_ids[1]} \n\n')
self.image_ids = {key: value for key, value in zip(product_names, list(map(get_image_id, product_names)))}
self.image_ids = {key: value.strip() for key, value in self.image_ids.items()}

return True

except AssertionError as e:
Expand All @@ -62,8 +62,8 @@ def validation(self) -> bool:
# STEP 2 . inspect image digest between opensearchproject(downloaded/local) and opensearchstaging(dockerHub)
if not self.args.using_staging_artifact_only:
self.image_names_list = [self.args.OS_image, self.args.OSD_image]
self.image_digests = list(map(lambda x: self.inspect_docker_image(x[0], x[1]), zip(self.image_ids, self.image_names_list))) # type: ignore

self.image_names_list = [x for x in self.image_names_list if (os.path.basename(x) in self.args.projects)]
self.image_digests = list(map(lambda x: self.inspect_docker_image(x[0], x[1]), zip(self.image_ids.values(), self.image_names_list))) # type: ignore
if all(self.image_digests):
logging.info('Image digest is validated.\n\n')
if self.args.validate_digest_only:
Expand All @@ -75,16 +75,15 @@ def validation(self) -> bool:
# STEP 3 . spin-up OS/OSD cluster
if not self.args.validate_digest_only:
return_code, self._target_yml_file = self.run_container(
self.image_ids[0],
self.image_ids[1],
self.image_ids,
self.args.version
)
if return_code:
logging.info('Checking if cluster is ready for API test in every 10 seconds\n\n')

if self.check_cluster_readiness():
# STEP 4 . OS, OSD API validation
_test_result, _counter = ApiTestCases().test_cases()
_test_result, _counter = ApiTestCases().test_apis(self.args.projects)

if _test_result:
logging.info(f'All tests Pass : {_counter}')
Expand Down Expand Up @@ -167,7 +166,7 @@ def get_artifact_image_name(self, artifact: str, using_staging_artifact_only: st
'staging': 'opensearchstaging/opensearch',
'production': 'opensearchproject/opensearch'
},
'opensearch_dashboards': {
'opensearch-dashboards': {
'staging': 'opensearchstaging/opensearch-dashboards',
'production': 'opensearchproject/opensearch-dashboards'
}
Expand All @@ -177,7 +176,7 @@ def get_artifact_image_name(self, artifact: str, using_staging_artifact_only: st
'staging': 'public.ecr.aws/opensearchstaging/opensearch',
'production': 'public.ecr.aws/opensearchproject/opensearch'
},
'opensearch_dashboards': {
'opensearch-dashboards': {
'staging': 'public.ecr.aws/opensearchstaging/opensearch-dashboards',
'production': 'public.ecr.aws/opensearchproject/opensearch-dashboards'
}
Expand Down Expand Up @@ -236,7 +235,7 @@ def pull_image(self, image_name: str, image_version: str) -> str:
else:
raise Exception(f'error on pulling image : return code {str(result_pull.returncode)}')

def run_container(self, OpenSearch_image_id: str, OpenSearchDashboard_image_id: str, version: str) -> Any:
def run_container(self, image_ids: dict, version: str) -> Any:
self.docker_compose_files = {
'1': 'docker-compose-1.x.yml',
'2': 'docker-compose-2.x.yml'
Expand All @@ -250,10 +249,8 @@ def run_container(self, OpenSearch_image_id: str, OpenSearchDashboard_image_id:
self.source_file = os.path.join('docker', 'release', 'dockercomposefiles', self.docker_compose_files[self.major_version_number])
shutil.copy2(self.source_file, self.target_yml_file)

self.replacements = [
(f'opensearchproject/opensearch:{self.major_version_number}', f'{OpenSearch_image_id}'),
(f'opensearchproject/opensearch-dashboards:{self.major_version_number}', f'{OpenSearchDashboard_image_id}')
]
self.replacements = [(f'opensearchproject/{key}:{self.major_version_number}', value) for key, value in image_ids.items()]

list(map(lambda r: self.inplace_change(self.target_yml_file, r[0], r[1]), self.replacements))

# spin up containers
Expand Down
2 changes: 1 addition & 1 deletion src/validation_workflow/download_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ class DownloadUtils:
@staticmethod
def is_url_valid(url: str) -> bool:
response = requests.head(url)
status = bool(response.status_code == 200)
status = bool(response.status_code in [200, 302])
return status

@staticmethod
Expand Down
33 changes: 19 additions & 14 deletions src/validation_workflow/rpm/validation_rpm.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,36 +21,40 @@ class ValidateRpm(Validation, DownloadUtils):

def __init__(self, args: ValidationArgs) -> None:
super().__init__(args)
self.base_url = "https://artifacts.opensearch.org/releases/bundle/"
self.base_url_production = "https://artifacts.opensearch.org/releases/bundle/"
self.base_url_staging = "https://ci.opensearch.org/ci/dbc/distribution-build-"
self.tmp_dir = TemporaryDirectory()

def download_artifacts(self) -> bool:
architectures = ["x64", "arm64"]
isFilePathEmpty = bool(self.args.file_path)
for project in self.args.projects:
for architecture in architectures:
url = f"{self.base_url}{project}/{self.args.version}/{project}-{self.args.version}-linux-{architecture}.rpm"
if ValidateRpm.is_url_valid(url) and ValidateRpm.download(url, self.tmp_dir):
logging.info(f"Valid URL - {url} and Download Successful !")
if (isFilePathEmpty):
if ("https:" not in self.args.file_path.get(project)):
self.copy_artifact(self.args.file_path.get(project), str(self.tmp_dir.path))
else:
logging.info(f"Invalid URL - {url}")
raise Exception(f"Invalid url - {url}")
self.check_url(self.args.file_path.get(project))
else:
if (self.args.artifact_type == "staging"):
self.args.file_path[project] = f"{self.base_url_staging}{project}/{self.args.version}/{self.args.build_number[project]}/linux/{self.args.arch}/{self.args.distribution}/dist/{project}/{project}-{self.args.version}-linux-{self.args.arch}.rpm" # noqa: E501
else:
self.args.file_path[project] = f"{self.base_url_production}{project}/{self.args.version}/{project}-{self.args.version}-linux-{self.args.arch}.rpm"
self.check_url(self.args.file_path.get(project))
return True

def installation(self) -> bool:
try:
execute('sudo rpm --import https://artifacts.opensearch.org/publickeys/opensearch.pgp', str(self.tmp_dir.path), True, False)
for project in self.args.projects:
self.filename = os.path.basename(self.args.file_path.get(project))
execute(f'sudo yum remove {project} -y', ".")
path = os.path.join(self.tmp_dir.path, project + "-" + self.args.version + "-linux-" + self.args.arch + ".rpm")
execute(f'sudo rpm -ivh {path}', str(self.tmp_dir.path), True, False)
execute(f'sudo rpm -ivh {os.path.join(self.tmp_dir.path, self.filename)}', str(self.tmp_dir.path), True, False)
except:
raise Exception('Failed to install Opensearch')
return True

def start_cluster(self) -> bool:
try:
for project in self.args.projects:
execute(f'sudo systemctl enable {project}', ".")
execute(f'sudo systemctl start {project}', ".")
time.sleep(20)
(stdout, stderr, status) = execute(f'sudo systemctl status {project}', ".")
Expand All @@ -64,7 +68,7 @@ def start_cluster(self) -> bool:
return True

def validation(self) -> bool:
test_result, counter = ApiTestCases().test_cases()
test_result, counter = ApiTestCases().test_apis(self.args.projects)
if (test_result):
logging.info(f'All tests Pass : {counter}')
return True
Expand All @@ -75,6 +79,7 @@ def cleanup(self) -> bool:
try:
for project in self.args.projects:
execute(f'sudo systemctl stop {project}', ".")
except:
raise Exception('Failed to Stop Cluster')
execute(f'sudo yum remove {project} -y', ".")
except Exception as e:
raise Exception(f'Exception occurred either while attempting to stop cluster or removing OpenSearch/OpenSearch-Dashboards. {str(e)}')
return True
41 changes: 24 additions & 17 deletions src/validation_workflow/tar/validation_tar.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,45 +22,51 @@ class ValidateTar(Validation, DownloadUtils):

def __init__(self, args: ValidationArgs) -> None:
super().__init__(args)
self.base_url = "https://artifacts.opensearch.org/releases/bundle/"
self.base_url_production = "https://artifacts.opensearch.org/releases/bundle/"
self.base_url_staging = "https://ci.opensearch.org/ci/dbc/distribution-build-"
self.tmp_dir = TemporaryDirectory()
self.os_process = Process()
self.osd_process = Process()

def download_artifacts(self) -> bool:
architectures = ["x64", "arm64"]
isFilePathEmpty = bool(self.args.file_path)
for project in self.args.projects:
for architecture in architectures:
url = f"{self.base_url}{project}/{self.args.version}/{project}-{self.args.version}-linux-{architecture}.tar.gz"
if ValidateTar.is_url_valid(url) and ValidateTar.download(url, self.tmp_dir):
logging.info(f"Valid URL - {url} and Download Successful !")
if (isFilePathEmpty):
if ("https:" not in self.args.file_path.get(project)):
self.copy_artifact(self.args.file_path.get(project), str(self.tmp_dir.path))
else:
logging.info(f"Invalid URL - {url}")
raise Exception(f"Invalid url - {url}")
self.check_url(self.args.file_path.get(project))
else:
if (self.args.artifact_type == "staging"):
self.args.file_path[project] = f"{self.base_url_staging}{project}/{self.args.version}/{self.args.build_number[project]}/linux/{self.args.arch}/{self.args.distribution}/dist/{project}/{project}-{self.args.version}-linux-{self.args.arch}.tar.gz" # noqa: E501
else:
self.args.file_path[project] = f"{self.base_url_production}{project}/{self.args.version}/{project}-{self.args.version}-linux-{self.args.arch}.tar.gz"
self.check_url(self.args.file_path.get(project))
return True

def installation(self) -> bool:
try:
for project in self.args.projects:
fileName = f"{project}-{self.args.version}-{self.args.platform}-{self.args.arch}"
execute('tar -zxf ' + os.path.join(str(self.tmp_dir.path), fileName) + '.tar.gz -C ' + str(self.tmp_dir.path), ".", True, False)
self.filename = os.path.basename(self.args.file_path.get(project))
execute('mkdir ' + os.path.join(self.tmp_dir.path, project) + ' | tar -xzf ' + os.path.join(str(self.tmp_dir.path), self.filename) + ' -C ' + os.path.join(self.tmp_dir.path, project) + ' --strip-components=1', ".", True, False) # noqa: E501
except:
raise Exception('Failed to Install Opensearch')
raise Exception('Failed to install Opensearch')
return True

def start_cluster(self) -> bool:
try:
self.os_process.start(os.path.join(self.tmp_dir.path, "opensearch-" + self.args.version, "opensearch-tar-install.sh"), ".")
self.os_process.start(os.path.join(self.tmp_dir.path, "opensearch", "opensearch-tar-install.sh"), ".")
time.sleep(85)
if ("opensearch-dashboards" in self.args.projects):
self.osd_process.start(os.path.join(str(self.tmp_dir.path), "opensearch-dashboards", "bin", "opensearch-dashboards"), ".")
time.sleep(20)
logging.info('Started cluster')
self.osd_process.start(os.path.join(str(self.tmp_dir.path), "opensearch-dashboards-" + self.args.version, "bin", "opensearch-dashboards"), ".")
time.sleep(20)
except:
raise Exception('Failed to Start Cluster')
return True

def validation(self) -> bool:
test_result, counter = ApiTestCases().test_cases()
test_result, counter = ApiTestCases().test_apis(self.args.projects)
if (test_result):
logging.info(f'All tests Pass : {counter}')
else:
Expand All @@ -70,7 +76,8 @@ def validation(self) -> bool:
def cleanup(self) -> bool:
try:
self.os_process.terminate()
self.osd_process.terminate()
if ("opensearch-dashboards" in self.args.projects):
self.osd_process.terminate()
except:
raise Exception('Failed to Stop Cluster')
raise Exception('Failed to terminate the processes that started OpenSearch and OpenSearch-Dashboards')
return True
23 changes: 20 additions & 3 deletions src/validation_workflow/validation.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,26 +6,43 @@
# compatible open source license.


import logging
import shutil
from abc import ABC, abstractmethod
from typing import Any

from validation_workflow.download_utils import DownloadUtils
from validation_workflow.validation_args import ValidationArgs


class Validation(ABC):
"""
Abstract class for all types of artifact validation
Abstract class for all types of artifact validation
"""

def __init__(self, args: ValidationArgs) -> None:
super().__init__()
self.args = args

def check_url(self, url: str) -> bool:
if DownloadUtils().download(url, self.tmp_dir) and DownloadUtils().is_url_valid(url): # type: ignore
logging.info(f"Valid URL - {url} and Download Successful !")
return True
else:
raise Exception(f"Invalid url - {url}")

def copy_artifact(self, filepath: str, tempdir_path: str) -> bool:
if filepath:
shutil.copy2(filepath, tempdir_path)
return True
else:
raise Exception("Provided path for local artifacts does not exist")

def run(self) -> Any:
try:
return self.download_artifacts() and self.installation() and self.start_cluster() and self.validation() and self.cleanup()
except Exception:
return False
except Exception as e:
raise Exception(f'An error occurred while running the validation tests: {str(e)}')

@abstractmethod
def download_artifacts(self) -> bool:
Expand Down
Loading

0 comments on commit 464254d

Please sign in to comment.