diff --git a/backend/app/api/v1/commons/hce.py b/backend/app/api/v1/commons/hce.py index cbef5192..a9b66e7a 100644 --- a/backend/app/api/v1/commons/hce.py +++ b/backend/app/api/v1/commons/hce.py @@ -3,23 +3,27 @@ from app.services.search import ElasticService -async def getData(start_datetime: date, end_datetime: date, configpath: str): +async def getData( + start_datetime: date, end_datetime: date, size: int, offset: int, configpath: str +): query = { "query": {"bool": {"filter": {"range": {"date": {"format": "yyyy-MM-dd"}}}}} } - es = ElasticService(configpath=configpath) response = await es.post( query=query, + size=size, start_date=start_datetime, end_date=end_datetime, timestamp_field="date", ) await es.close() - tasks = [item["_source"] for item in response] + tasks = [item["_source"] for item in response["data"]] jobs = pd.json_normalize(tasks) + if len(jobs) == 0: + return {"data": jobs, "total": response["total"]} + jobs[["group"]] = jobs[["group"]].fillna(0) jobs.fillna("", inplace=True) - if len(jobs) == 0: - return jobs - return jobs + + return {"data": jobs, "total": response["total"]} diff --git a/backend/app/api/v1/commons/ocm.py b/backend/app/api/v1/commons/ocm.py index e9d6f588..35179bf2 100644 --- a/backend/app/api/v1/commons/ocm.py +++ b/backend/app/api/v1/commons/ocm.py @@ -3,27 +3,32 @@ from app.services.search import ElasticService -async def getData(start_datetime: date, end_datetime: date, configpath: str): +async def getData( + start_datetime: date, end_datetime: date, size: int, offset: int, configpath: str +): query = { + "size": size, + "from": offset, "query": { "bool": { "filter": {"range": {"metrics.earliest": {"format": "yyyy-MM-dd"}}} } - } + }, } es = ElasticService(configpath=configpath) response = await es.post( query=query, + size=size, start_date=start_datetime, end_date=end_datetime, timestamp_field="metrics.earliest", ) await es.close() - tasks = [item["_source"] for item in response] + tasks = [item["_source"] for item in response["data"]] jobs = pd.json_normalize(tasks) if len(jobs) == 0: - return jobs + return {"data": jobs, "total": response["total"]} if "buildUrl" not in jobs.columns: jobs.insert(len(jobs.columns), "buildUrl", "") @@ -31,7 +36,7 @@ async def getData(start_datetime: date, end_datetime: date, configpath: str): jobs.insert(len(jobs.columns), "ciSystem", "") jobs.fillna("", inplace=True) jobs["jobStatus"] = jobs.apply(convertJobStatus, axis=1) - return jobs + return {"data": jobs, "total": response["total"]} def fillCiSystem(row): diff --git a/backend/app/api/v1/commons/ocp.py b/backend/app/api/v1/commons/ocp.py index 759eabe7..1eaa3c54 100644 --- a/backend/app/api/v1/commons/ocp.py +++ b/backend/app/api/v1/commons/ocp.py @@ -4,25 +4,30 @@ from app.services.search import ElasticService -async def getData(start_datetime: date, end_datetime: date, configpath: str): +async def getData( + start_datetime: date, end_datetime: date, size: int, offset: int, configpath: str +): query = { + "size": size, + "from": offset, "query": { "bool": {"filter": {"range": {"timestamp": {"format": "yyyy-MM-dd"}}}} - } + }, } es = ElasticService(configpath=configpath) response = await es.post( query=query, + size=size, start_date=start_datetime, end_date=end_datetime, timestamp_field="timestamp", ) await es.close() - tasks = [item["_source"] for item in response] + tasks = [item["_source"] for item in response["data"]] jobs = pd.json_normalize(tasks) if len(jobs) == 0: - return jobs + return {"data": jobs, "total": response["total"]} jobs[ ["masterNodesCount", "workerNodesCount", "infraNodesCount", "totalNodesCount"] @@ -52,7 +57,7 @@ async def getData(start_datetime: date, end_datetime: date, configpath: str): jbs = cleanJobs jbs["shortVersion"] = jbs["ocpVersion"].str.slice(0, 4) - return jbs + return {"data": jbs, "total": response["total"]} def fillEncryptionType(row): diff --git a/backend/app/api/v1/commons/quay.py b/backend/app/api/v1/commons/quay.py index acee5649..71deeb8f 100644 --- a/backend/app/api/v1/commons/quay.py +++ b/backend/app/api/v1/commons/quay.py @@ -4,11 +4,15 @@ from app.services.search import ElasticService -async def getData(start_datetime: date, end_datetime: date, configpath: str): +async def getData( + start_datetime: date, end_datetime: date, size, offset, configpath: str +): query = { + "size": size, + "from": offset, "query": { "bool": {"filter": {"range": {"timestamp": {"format": "yyyy-MM-dd"}}}} - } + }, } es = ElasticService(configpath=configpath) @@ -19,10 +23,10 @@ async def getData(start_datetime: date, end_datetime: date, configpath: str): timestamp_field="timestamp", ) await es.close() - tasks = [item["_source"] for item in response] + tasks = [item["_source"] for item in response["data"]] jobs = pd.json_normalize(tasks) if len(jobs) == 0: - return jobs + return {"data": jobs, "total": response["total"]} jobs[ ["masterNodesCount", "workerNodesCount", "infraNodesCount", "totalNodesCount"] @@ -38,4 +42,6 @@ async def getData(start_datetime: date, end_datetime: date, configpath: str): jobs["build"] = jobs.apply(utils.getBuild, axis=1) jobs["shortVersion"] = jobs["ocpVersion"].str.slice(0, 4) - return jobs[jobs["platform"] != ""] + cleanJobs = jobs[jobs["platform"] != ""] + + return {"data": cleanJobs, "total": response["total"]} diff --git a/backend/app/api/v1/commons/telco.py b/backend/app/api/v1/commons/telco.py index e9d47ac4..b55e746f 100644 --- a/backend/app/api/v1/commons/telco.py +++ b/backend/app/api/v1/commons/telco.py @@ -8,7 +8,9 @@ import app.api.v1.endpoints.telco.telcoGraphs as telcoGraphs -async def getData(start_datetime: date, end_datetime: date, configpath: str): +async def getData( + start_datetime: date, end_datetime: date, size: int, offset: int, configpath: str +): test_types = [ "oslat", "cyclictest", @@ -41,10 +43,12 @@ async def getData(start_datetime: date, end_datetime: date, configpath: str): ['test_type="{}"'.format(test_type) for test_type in test_types] ) splunk = SplunkService(configpath=configpath) - response = await splunk.query(query=query, searchList=searchList) + response = await splunk.query( + query=query, size=size, offset=offset, searchList=searchList + ) mapped_list = [] - for each_response in response: + for each_response in response["data"]: end_timestamp = int(each_response["timestamp"]) test_data = each_response["data"] threshold = await telcoGraphs.process_json(test_data, True) @@ -83,7 +87,5 @@ async def getData(start_datetime: date, end_datetime: date, configpath: str): ) jobs = pd.json_normalize(mapped_list) - if len(jobs) == 0: - return jobs - return jobs + return {"data": jobs, "total": response["total"]} diff --git a/backend/app/api/v1/commons/utils.py b/backend/app/api/v1/commons/utils.py index 517cc420..ddcef9d2 100644 --- a/backend/app/api/v1/commons/utils.py +++ b/backend/app/api/v1/commons/utils.py @@ -7,7 +7,7 @@ async def getMetadata(uuid: str, configpath: str): es = ElasticService(configpath=configpath) response = await es.post(query=query) await es.close() - meta = [item["_source"] for item in response] + meta = [item["_source"] for item in response["data"]] return meta[0] diff --git a/backend/app/api/v1/endpoints/cpt/cptJobs.py b/backend/app/api/v1/endpoints/cpt/cptJobs.py index 6c9a4089..3c9839cb 100644 --- a/backend/app/api/v1/endpoints/cpt/cptJobs.py +++ b/backend/app/api/v1/endpoints/cpt/cptJobs.py @@ -28,7 +28,7 @@ @router.get( "/api/v1/cpt/jobs", summary="Returns a job list from all the products.", - description="Returns a list of jobs in the specified dates. \ + description="Returns a list of jobs in the specified dates of requested size \ If not dates are provided the API will default the values. \ `startDate`: will be set to the day of the request minus 5 days.\ `endDate`: will be set to the day of the request.", @@ -48,7 +48,10 @@ async def jobs( description="End date for searching jobs, format: 'YYYY-MM-DD'", examples=["2020-11-15"], ), - pretty: bool = Query(False, description="Output contet in pretty format."), + pretty: bool = Query(False, description="Output content in pretty format."), + size: int = Query(None, description="Number of jobs to fetch"), + offset: int = Query(None, description="Offset Number to fetch jobs from"), + totalJobs: int = Query(None, description="Total number of jobs"), ): if start_date is None: start_date = datetime.utcnow().date() @@ -66,23 +69,40 @@ async def jobs( ) results_df = pd.DataFrame() + total_dict = {} + total = 0 with ProcessPoolExecutor(max_workers=cpu_count()) as executor: futures = { - executor.submit(fetch_product, product, start_date, end_date): product + executor.submit( + fetch_product, product, start_date, end_date, size, offset + ): product for product in products } for future in as_completed(futures): product = futures[future] try: result = future.result() - results_df = pd.concat([results_df, result]) + total_dict[product] = result["total"] + results_df = pd.concat([results_df, result["data"]]) except Exception as e: print(f"Error fetching data for product {product}: {e}") + jobsCount = totalJobs + # The total is determined by summing the counts of all products and is included in the response. + # However, during pagination, if the count of any product drops to zero, + # the total becomes lower than the actual value, which is undesirable. + + # on first hit, totalJobs is 0 + if totalJobs == 0: + for product in total_dict: + total += int(total_dict[product]) + jobsCount = total response = { "startDate": start_date.__str__(), "endDate": end_date.__str__(), "results": results_df.to_dict("records"), + "total": jobsCount, + "offset": offset + size, } if pretty: @@ -93,28 +113,33 @@ async def jobs( return jsonstring -async def fetch_product_async(product, start_date, end_date): +async def fetch_product_async(product, start_date, end_date, size, offset): try: - df = await products[product](start_date, end_date) - return ( - df.loc[ - :, - [ - "ciSystem", - "uuid", - "releaseStream", - "jobStatus", - "buildUrl", - "startDate", - "endDate", - "product", - "version", - "testName", - ], - ] - if len(df) != 0 - else df - ) + response = await products[product](start_date, end_date, size, offset) + if response: + df = response["data"] + return { + "data": ( + df.loc[ + :, + [ + "ciSystem", + "uuid", + "releaseStream", + "jobStatus", + "buildUrl", + "startDate", + "endDate", + "product", + "version", + "testName", + ], + ] + if len(df) != 0 + else df + ), + "total": response["total"], + } except ConnectionError: print("Connection Error in mapper for product " + product) except Exception as e: @@ -122,5 +147,36 @@ async def fetch_product_async(product, start_date, end_date): return pd.DataFrame() -def fetch_product(product, start_date, end_date): - return asyncio.run(fetch_product_async(product, start_date, end_date)) +def fetch_product(product, start_date, end_date, size, offset): + return asyncio.run(fetch_product_async(product, start_date, end_date, size, offset)) + + +def is_requested_size_available(total_count, offset, requested_size): + """ + Check if the requested size of data is available starting from a given offset. + + Args: + total_count (int): Total number of available records. + offset (int): The starting position in the dataset. + requested_size (int): The number of records requested. + + Returns: + bool: True if the requested size is available, False otherwise. + """ + return (offset + requested_size) <= total_count + + +def calculate_remaining_data(total_count, offset, requested_size): + """ + Calculate the remaining number of data items that can be fetched based on the requested size. + + Args: + total_count (int): Total number of available records. + offset (int): The starting position in the dataset. + requested_size (int): The number of records requested. + + Returns: + int: The number of records that can be fetched, which may be less than or equal to requested_size. + """ + available_data = total_count - offset # Data available from the offset + return min(available_data, requested_size) diff --git a/backend/app/api/v1/endpoints/cpt/maps/hce.py b/backend/app/api/v1/endpoints/cpt/maps/hce.py index 7f9a8636..3c812d0d 100644 --- a/backend/app/api/v1/endpoints/cpt/maps/hce.py +++ b/backend/app/api/v1/endpoints/cpt/maps/hce.py @@ -1,10 +1,10 @@ from ....commons.hce import getData from datetime import date - +import pandas as pd ################################################################ -# This will return a DataFrame from HCE required by the CPT -# endpoint, it contians the following columns: +# This will return a Dictionary from HCE required by the CPT +# endpoint, it contians totalJobs and a Dataframe with the following columns: # "ciSystem" # "uuid" # "releaseStream" @@ -16,26 +16,33 @@ # "version" # "testName" ################################################################ -async def hceMapper(start_datetime: date, end_datetime: date): - df = await getData(start_datetime, end_datetime, f"hce.elasticsearch") - if len(df) == 0: - return df - df["releaseStream"] = "Nightly" - df["ciSystem"] = "Jenkins" - df["testName"] = df["product"] + ":" + df["test"] - df["product"] = df["group"] - df["jobStatus"] = df["result"].apply( - lambda x: "SUCCESS" if x == "PASS" else "FAILURE" - ) - df["version"] = df["version"].apply( - lambda x: x if len(x.split(":")) == 1 else x.split(":")[1][:7] + + +async def hceMapper(start_datetime: date, end_datetime: date, size: int, offset: int): + response = await getData( + start_datetime, end_datetime, size, offset, f"hce.elasticsearch" ) - df["uuid"] = df["result_id"] - df["buildUrl"] = df["link"] - df["startDate"] = df["date"] - df["endDate"] = df["date"] - df = dropColumns(df) - return df + if response: + df = response["data"] + if len(df) == 0: + return df + df["releaseStream"] = "Nightly" + df["ciSystem"] = "Jenkins" + df["testName"] = df["product"] + ":" + df["test"] + df["product"] = df["group"] + df["jobStatus"] = df["result"].apply( + lambda x: "SUCCESS" if x == "PASS" else "FAILURE" + ) + df["version"] = df["version"].apply( + lambda x: x if len(x.split(":")) == 1 else x.split(":")[1][:7] + ) + df["uuid"] = df["result_id"] + df["buildUrl"] = df["link"] + df["startDate"] = df["date"] + df["endDate"] = df["date"] + df = dropColumns(df) + return {"data": df, "total": response["total"]} + return {"data": pd.DataFrame(), "total": 0} def dropColumns(df): diff --git a/backend/app/api/v1/endpoints/cpt/maps/ocm.py b/backend/app/api/v1/endpoints/cpt/maps/ocm.py index 3f38bfd0..6d8a2821 100644 --- a/backend/app/api/v1/endpoints/cpt/maps/ocm.py +++ b/backend/app/api/v1/endpoints/cpt/maps/ocm.py @@ -1,18 +1,23 @@ from ....commons.ocm import getData from datetime import date +import pandas as pd ################################################################ # This will return a DataFrame from OCM required by the CPT endpoint ################################################################ -async def ocmMapper(start_datetime: date, end_datetime: date): - df = await getData(start_datetime, end_datetime, f"ocm.elasticsearch") - if len(df) == 0: - return df - df.insert(len(df.columns), "product", "ocm") - df.insert(len(df.columns), "releaseStream", "Nightly") - df["testName"] = df["attack"] - df["startDate"] = df["metrics.earliest"] - df["endDate"] = df["metrics.end"] - - return df +async def ocmMapper(start_datetime: date, end_datetime: date, size: int, offset: int): + response = await getData( + start_datetime, end_datetime, size, offset, f"ocm.elasticsearch" + ) + if not isinstance(response, pd.DataFrame) and response: + df = response["data"] + if len(df) == 0: + return df + df.insert(len(df.columns), "product", "ocm") + df.insert(len(df.columns), "releaseStream", "Nightly") + df["testName"] = df["attack"] + df["startDate"] = df["metrics.earliest"] + df["endDate"] = df["metrics.end"] + return {"data": df, "total": response["total"]} + return {"data": pd.DataFrame(), "total": 0} diff --git a/backend/app/api/v1/endpoints/cpt/maps/ocp.py b/backend/app/api/v1/endpoints/cpt/maps/ocp.py index af20220b..8d5c86d5 100644 --- a/backend/app/api/v1/endpoints/cpt/maps/ocp.py +++ b/backend/app/api/v1/endpoints/cpt/maps/ocp.py @@ -1,17 +1,23 @@ from ....commons.ocp import getData from ....commons.utils import getReleaseStream from datetime import date +import pandas as pd ################################################################ # This will return a DataFrame from OCP required by the CPT endpoint ################################################################ -async def ocpMapper(start_datetime: date, end_datetime: date): - df = await getData(start_datetime, end_datetime, f"ocp.elasticsearch") - if len(df) == 0: - return df - df.insert(len(df.columns), "product", "ocp") - df["releaseStream"] = df.apply(getReleaseStream, axis=1) - df["version"] = df["shortVersion"] - df["testName"] = df["benchmark"] - return df +async def ocpMapper(start_datetime: date, end_datetime: date, size: int, offset: int): + response = await getData( + start_datetime, end_datetime, size, offset, f"ocp.elasticsearch" + ) + if not isinstance(response, pd.DataFrame) and response: + df = response["data"] + if len(df) == 0: + return df + df.insert(len(df.columns), "product", "ocp") + df["releaseStream"] = df.apply(getReleaseStream, axis=1) + df["version"] = df["shortVersion"] + df["testName"] = df["benchmark"] + return {"data": df, "total": response["total"]} + return {"data": pd.DataFrame(), "total": response["total"]} diff --git a/backend/app/api/v1/endpoints/cpt/maps/quay.py b/backend/app/api/v1/endpoints/cpt/maps/quay.py index c0c61e8f..dceb0e14 100644 --- a/backend/app/api/v1/endpoints/cpt/maps/quay.py +++ b/backend/app/api/v1/endpoints/cpt/maps/quay.py @@ -1,15 +1,21 @@ from ....commons.quay import getData from datetime import date +import pandas as pd -##################################################################### -# This will return a DataFrame from Quay required by the CPT endpoint -##################################################################### -async def quayMapper(start_datetime: date, end_datetime: date): - df = await getData(start_datetime, end_datetime, f"quay.elasticsearch") - if len(df) == 0: - return df - df.insert(len(df.columns), "product", "quay") - df["version"] = df["releaseStream"] - df["testName"] = df["benchmark"] - return df +##################################################################################### +# This will return a DataFrame from Quay required by the CPT endpoint with Total jobs +##################################################################################### +async def quayMapper(start_datetime: date, end_datetime: date, size: int, offset: int): + response = await getData( + start_datetime, end_datetime, size, offset, f"quay.elasticsearch" + ) + if not isinstance(response, pd.DataFrame) and response: + df = response["data"] + if len(df) == 0: + return df + df.insert(len(df.columns), "product", "quay") + df["version"] = df["releaseStream"] + df["testName"] = df["benchmark"] + return {"data": df, "total": response["total"]} + return {"data": pd.DataFrame(), "total": 0} diff --git a/backend/app/api/v1/endpoints/cpt/maps/telco.py b/backend/app/api/v1/endpoints/cpt/maps/telco.py index e169d410..39d7c21a 100644 --- a/backend/app/api/v1/endpoints/cpt/maps/telco.py +++ b/backend/app/api/v1/endpoints/cpt/maps/telco.py @@ -1,17 +1,23 @@ from ....commons.telco import getData from ....commons.utils import getReleaseStream from datetime import date +import pandas as pd ##################################################################### # This will return a DataFrame from Telco required by the CPT endpoint ##################################################################### -async def telcoMapper(start_datetime: date, end_datetime: date): - df = await getData(start_datetime, end_datetime, f"telco.splunk") - if len(df) == 0: - return df - df.insert(len(df.columns), "product", "telco") - df["releaseStream"] = df.apply(getReleaseStream, axis=1) - df["version"] = df["shortVersion"] - df["testName"] = df["benchmark"] - return df +async def telcoMapper(start_datetime: date, end_datetime: date, size: int, offset: int): + response = await getData( + start_datetime, end_datetime, size, offset, f"telco.splunk" + ) + if not isinstance(response, pd.DataFrame) and response: + df = response["data"] + if len(df) == 0: + return df + df.insert(len(df.columns), "product", "telco") + df["releaseStream"] = df.apply(getReleaseStream, axis=1) + df["version"] = df["shortVersion"] + df["testName"] = df["benchmark"] + return {"data": df, "total": response["total"]} + return {"data": pd.DataFrame(), "total": 0} diff --git a/backend/app/api/v1/endpoints/ocm/ocmJobs.py b/backend/app/api/v1/endpoints/ocm/ocmJobs.py index de89285a..bbcc39c7 100644 --- a/backend/app/api/v1/endpoints/ocm/ocmJobs.py +++ b/backend/app/api/v1/endpoints/ocm/ocmJobs.py @@ -1,7 +1,7 @@ import json from fastapi import Response from datetime import datetime, timedelta, date -from fastapi import APIRouter +from fastapi import APIRouter, HTTPException from ...commons.ocm import getData from ...commons.example_responses import ocp_200_response, response_422 from fastapi.param_functions import Query @@ -32,7 +32,9 @@ async def jobs( description="End date for searching jobs, format: 'YYYY-MM-DD'", examples=["2020-11-15"], ), - pretty: bool = Query(False, description="Output contet in pretty format."), + pretty: bool = Query(False, description="Output content in pretty format."), + size: int = Query(None, description="Number of jobs to fetch"), + offset: int = Query(None, description="Offset Number to fetch jobs from"), ): if start_date is None: start_date = datetime.utcnow().date() @@ -49,20 +51,27 @@ async def jobs( status_code=422, ) - results = await getData(start_date, end_date, "ocm.elasticsearch") + if offset and not size: + raise HTTPException(400, f"offset {offset} specified without size") + elif not offset and not size: + size = 10000 + offset = 0 + elif not offset: + offset = 0 - if len(results) >= 1: - response = { - "startDate": start_date.__str__(), - "endDate": end_date.__str__(), - "results": results.to_dict("records"), - } - else: - response = { - "startDate": start_date.__str__(), - "endDate": end_date.__str__(), - "results": [], - } + results = await getData(start_date, end_date, size, offset, "ocm.elasticsearch") + + jobs = [] + if "data" in results and len(results["data"]) >= 1: + jobs = results["data"].to_dict("records") + + response = { + "startDate": start_date.__str__(), + "endDate": end_date.__str__(), + "results": jobs, + "total": 0, + "offset": 0, + } if pretty: json_str = json.dumps(response, indent=4) diff --git a/backend/app/api/v1/endpoints/ocp/graph.py b/backend/app/api/v1/endpoints/ocp/graph.py index eb05252e..e5ad5a2a 100644 --- a/backend/app/api/v1/endpoints/ocp/graph.py +++ b/backend/app/api/v1/endpoints/ocp/graph.py @@ -245,7 +245,7 @@ async def jobSummary(uuids: list): es = ElasticService(configpath="ocp.elasticsearch", index=index) response = await es.post(query=query) await es.close() - runs = [item["_source"] for item in response] + runs = [item["_source"] for item in response["data"]] return runs @@ -374,7 +374,7 @@ async def getBurnerResults(uuid: str, uuids: list, index: str): es = ElasticService(configpath="ocp.elasticsearch", index=index) response = await es.post(query=query) await es.close() - runs = [item["_source"] for item in response] + runs = [item["_source"] for item in response["data"]] return runs @@ -388,7 +388,7 @@ async def getResults(uuid: str, uuids: list, index: str): es = ElasticService(configpath="ocp.elasticsearch", index=index) response = await es.post(query=query) await es.close() - runs = [item["_source"] for item in response] + runs = [item["_source"] for item in response["data"]] return runs @@ -443,7 +443,7 @@ async def getMatchRuns(meta: dict, workerCount: False): es = ElasticService(configpath="ocp.elasticsearch") response = await es.post(query=query) await es.close() - runs = [item["_source"] for item in response] + runs = [item["_source"] for item in response["data"]] uuids = [] for run in runs: uuids.append(run["uuid"]) diff --git a/backend/app/api/v1/endpoints/ocp/ocpJobs.py b/backend/app/api/v1/endpoints/ocp/ocpJobs.py index 561d4ea2..5637b29d 100644 --- a/backend/app/api/v1/endpoints/ocp/ocpJobs.py +++ b/backend/app/api/v1/endpoints/ocp/ocpJobs.py @@ -1,7 +1,7 @@ import json from fastapi import Response from datetime import datetime, timedelta, date -from fastapi import APIRouter +from fastapi import APIRouter, HTTPException from ...commons.ocp import getData from ...commons.example_responses import ocp_200_response, response_422 from fastapi.param_functions import Query @@ -32,7 +32,9 @@ async def jobs( description="End date for searching jobs, format: 'YYYY-MM-DD'", examples=["2020-11-15"], ), - pretty: bool = Query(False, description="Output contet in pretty format."), + pretty: bool = Query(False, description="Output content in pretty format."), + size: int = Query(None, description="Number of jobs to fetch"), + offset: int = Query(None, description="Offset Number to fetch jobs from"), ): if start_date is None: start_date = datetime.utcnow().date() @@ -49,20 +51,26 @@ async def jobs( status_code=422, ) - results = await getData(start_date, end_date, "ocp.elasticsearch") + if offset and not size: + raise HTTPException(400, f"offset {offset} specified without size") + elif not offset and not size: + size = 10000 + offset = 0 + elif not offset: + offset = 0 - if len(results) >= 1: - response = { - "startDate": start_date.__str__(), - "endDate": end_date.__str__(), - "results": results.to_dict("records"), - } - else: - response = { - "startDate": start_date.__str__(), - "endDate": end_date.__str__(), - "results": [], - } + results = await getData(start_date, end_date, size, offset, "ocp.elasticsearch") + jobs = [] + if "data" in results and len(results["data"]) >= 1: + jobs = results["data"].to_dict("records") + + response = { + "startDate": start_date.__str__(), + "endDate": end_date.__str__(), + "results": jobs, + "total": results["total"], + "offset": offset + size, + } if pretty: json_str = json.dumps(response, indent=4) diff --git a/backend/app/api/v1/endpoints/quay/quayJobs.py b/backend/app/api/v1/endpoints/quay/quayJobs.py index b141b107..be05d074 100644 --- a/backend/app/api/v1/endpoints/quay/quayJobs.py +++ b/backend/app/api/v1/endpoints/quay/quayJobs.py @@ -1,7 +1,7 @@ import json from fastapi import Response from datetime import datetime, timedelta, date -from fastapi import APIRouter +from fastapi import APIRouter, HTTPException from ...commons.quay import getData from ...commons.example_responses import quay_200_response, response_422 from fastapi.param_functions import Query @@ -12,7 +12,7 @@ @router.get( "/api/v1/quay/jobs", summary="Returns a job list", - description="Returns a list of jobs in the specified dates. \ + description="Returns a list of jobs in the specified dates of requested size. \ If not dates are provided the API will default the values. \ `startDate`: will be set to the day of the request minus 5 days.\ `endDate`: will be set to the day of the request.", @@ -32,7 +32,9 @@ async def jobs( description="End date for searching jobs, format: 'YYYY-MM-DD'", examples=["2020-11-15"], ), - pretty: bool = Query(False, description="Output contet in pretty format."), + pretty: bool = Query(False, description="Output content in pretty format."), + size: int = Query(None, description="Number of jobs to fetch"), + offset: int = Query(None, description="Offset Number to fetch jobs from"), ): if start_date is None: start_date = datetime.utcnow().date() @@ -49,20 +51,27 @@ async def jobs( status_code=422, ) - results = await getData(start_date, end_date, "quay.elasticsearch") + if offset and not size: + raise HTTPException(400, f"offset {offset} specified without size") + elif not offset and not size: + size = 10000 + offset = 0 + elif not offset: + offset = 0 - if len(results) >= 1: - response = { - "startDate": start_date.__str__(), - "endDate": end_date.__str__(), - "results": results.to_dict("records"), - } - else: - response = { - "startDate": start_date.__str__(), - "endDate": end_date.__str__(), - "results": [], - } + results = await getData(start_date, end_date, size, offset, "quay.elasticsearch") + + jobs = [] + if "data" in results and len(results["data"]) >= 1: + jobs = results["data"].to_dict("records") + + response = { + "startDate": start_date.__str__(), + "endDate": end_date.__str__(), + "results": jobs, + "total": 0, + "offset": 0, + } if pretty: json_str = json.dumps(response, indent=4) diff --git a/backend/app/api/v1/endpoints/telco/telcoJobs.py b/backend/app/api/v1/endpoints/telco/telcoJobs.py index 32f6a041..c9a2d18f 100644 --- a/backend/app/api/v1/endpoints/telco/telcoJobs.py +++ b/backend/app/api/v1/endpoints/telco/telcoJobs.py @@ -12,7 +12,7 @@ @router.get( "/api/v1/telco/jobs", summary="Returns a job list", - description="Returns a list of jobs in the specified dates. \ + description="Returns a list of jobs in the specified dates of requested size. \ If not dates are provided the API will default the values. \ `startDate`: will be set to the day of the request minus 5 days.\ `endDate`: will be set to the day of the request.", @@ -33,6 +33,8 @@ async def jobs( examples=["2020-11-15"], ), pretty: bool = Query(False, description="Output content in pretty format."), + size: int = Query(None, description="Number of jobs to fetch"), + offset: int = Query(None, description="Offset Number to fetch jobs from"), ): if start_date is None: start_date = datetime.utcnow().date() @@ -49,20 +51,18 @@ async def jobs( status_code=422, ) - results = await getData(start_date, end_date, "telco.splunk") + results = await getData(start_date, end_date, size, offset, "telco.splunk") + jobs = [] + if len(results["data"]) >= 1: + jobs = results["data"].to_dict("records") - if len(results) >= 1: - response = { - "startDate": start_date.__str__(), - "endDate": end_date.__str__(), - "results": results.to_dict("records"), - } - else: - response = { - "startDate": start_date.__str__(), - "endDate": end_date.__str__(), - "results": [], - } + response = { + "startDate": start_date.__str__(), + "endDate": end_date.__str__(), + "results": jobs, + "total": 0, + "offset": 0, + } if pretty: json_str = json.dumps(response, indent=4) diff --git a/backend/app/services/search.py b/backend/app/services/search.py index 61a501b5..9f1837fb 100644 --- a/backend/app/services/search.py +++ b/backend/app/services/search.py @@ -80,7 +80,7 @@ async def post( today = datetime.today().date() seven_days_ago = today - timedelta(days=7) if start_date and start_date > seven_days_ago: - previous_results = [] + previous_results = {} else: new_end_date = ( min(end_date, seven_days_ago) @@ -100,17 +100,20 @@ async def post( body=jsonable_encoder(query), size=size, ) - previous_results = response["hits"]["hits"] + previous_results = { + "data": response["hits"]["hits"], + "total": response["hits"]["total"]["value"], + } else: - previous_results = await self.scan_indices( - self.prev_es, - self.prev_index, - query, - timestamp_field, - start_date, - new_end_date, - size, + response = await self.prev_es.search( + index=self.prev_index + "*", + body=jsonable_encoder(query), + size=size, ) + previous_results = { + "data": response["hits"]["hits"], + "total": response["hits"]["total"]["value"], + } if self.prev_es and self.new_es: self.new_index = self.new_index_prefix + ( self.new_index if indice is None else indice @@ -138,18 +141,31 @@ async def post( body=jsonable_encoder(query), size=size, ) - new_results = response["hits"]["hits"] + new_results = { + "data": response["hits"]["hits"], + "total": response["hits"]["total"]["value"], + } else: - new_results = await self.scan_indices( - self.new_es, - self.new_index, - query, - timestamp_field, - new_start_date, - end_date, - size, + response = await self.new_es.search( + index=self.new_index + "*", + body=jsonable_encoder(query), + size=size, ) - return await self.remove_duplicates(previous_results + new_results) + new_results = { + "data": response["hits"]["hits"], + "total": response["hits"]["total"]["value"], + } + unique_data = await self.remove_duplicates( + previous_results["data"] + if ("data" in previous_results) + else [] + new_results["data"] if ("data" in new_results) else [] + ) + totalVal = ( + previous_results["total"] + if ("total" in previous_results) + else 0 + new_results["total"] if ("total" in new_results) else 0 + ) + return {"data": unique_data, "total": totalVal} else: if start_date and end_date: query["query"]["bool"]["filter"]["range"][timestamp_field][ @@ -158,22 +174,15 @@ async def post( query["query"]["bool"]["filter"]["range"][timestamp_field][ "lte" ] = str(end_date) - return await self.scan_indices( - self.new_es, - self.new_index, - query, - timestamp_field, - start_date, - end_date, - size, - ) - else: response = await self.new_es.search( index=self.new_index + "*", body=jsonable_encoder(query), size=size, ) - return response["hits"]["hits"] + return { + "data": response["hits"]["hits"], + "total": response["hits"]["total"]["value"], + } else: """Handles queries that do not have a timestamp field""" previous_results = [] @@ -186,15 +195,33 @@ async def post( body=jsonable_encoder(query), size=size, ) - previous_results = response["hits"]["hits"] + previous_results = { + "data": response["hits"]["hits"], + "total": response["hits"]["total"]["value"], + } self.new_index = self.new_index_prefix + ( self.new_index if indice is None else indice ) response = await self.new_es.search( index=self.new_index + "*", body=jsonable_encoder(query), size=size ) - new_results = response["hits"]["hits"] - return await self.remove_duplicates(previous_results + new_results) + new_results = { + "data": response["hits"]["hits"], + "total": response["hits"]["total"]["value"], + } + + unique_data = await self.remove_duplicates( + previous_results["data"] + if ("data" in previous_results) + else [] + new_results["data"] if ("data" in new_results) else [] + ) + totalVal = ( + previous_results["total"] + if ("total" in previous_results) + else 0 + new_results["total"] if ("total" in new_results) else 0 + ) + + return {"data": unique_data, "total": totalVal} async def scan_indices( self, es_client, indice, query, timestamp_field, start_date, end_date, size @@ -207,7 +234,8 @@ async def scan_indices( for index in indices: sorted_index_list.insert( IndexTimestamp( - index, await self.get_timestamps(es_client, index, timestamp_field) + index, + await self.get_timestamps(es_client, index, timestamp_field, size), ) ) filtered_indices = sorted_index_list.get_indices_in_given_range( @@ -225,7 +253,9 @@ async def scan_indices( index=each_index.index, body=jsonable_encoder(query), size=size ) results.extend(response["hits"]["hits"]) - return await self.remove_duplicates(results) + total += response["hits"]["total"]["value"] + + return {"data": await self.remove_duplicates(results), "total": total} async def remove_duplicates(self, all_results): seen = set() @@ -239,7 +269,7 @@ async def remove_duplicates(self, all_results): seen.add(tuple(sorted(flat_doc.items()))) return filtered_results - async def get_timestamps(self, es_client, index, timestamp_field): + async def get_timestamps(self, es_client, index, timestamp_field, size): """Returns start and end timestamps of a index""" query = { "size": 0, diff --git a/backend/app/services/splunk.py b/backend/app/services/splunk.py index f8f07272..70b00263 100644 --- a/backend/app/services/splunk.py +++ b/backend/app/services/splunk.py @@ -1,6 +1,7 @@ import orjson from app import config from splunklib import client, results +import json class SplunkService: @@ -32,7 +33,9 @@ def __init__(self, configpath="", index=""): print(f"Error connecting to splunk: {e}") return None - async def query(self, query, searchList="", max_results=10000): + async def query( + self, query, searchList="", size=None, offset=None, max_results=10000 + ): """ Query data from splunk server using splunk lib sdk @@ -40,7 +43,8 @@ async def query(self, query, searchList="", max_results=10000): query (string): splunk query OPTIONAL: searchList (string): additional query parameters for index """ - query["count"] = max_results + query["count"] = size + query["offset"] = offset # If additional search parameters are provided, include those in searchindex searchindex = ( @@ -48,12 +52,40 @@ async def query(self, query, searchList="", max_results=10000): if searchList else "search index={}".format(self.indice) ) + + search_query = ( + "search index={} {} | stats count AS total_records".format( + self.indice, searchList + ) + if searchList + else "search index={} | stats count AS total_records".format(self.indice) + ) + try: + # Run the job and retrieve results + job = self.service.jobs.create( + search_query, + exec_mode="normal", + earliest_time=query["earliest_time"], + latest_time=query["latest_time"], + ) + + # Wait for the job to finish + while not job.is_done(): + job.refresh() + oneshotsearch_results = self.service.jobs.oneshot(searchindex, **query) + except Exception as e: print("Error querying splunk: {}".format(e)) return None + # Fetch the results + for result in job.results(output_mode="json"): + decoded_data = json.loads(result.decode("utf-8")) + value = decoded_data.get("results") + total_records = value[0]["total_records"] + # Get the results and display them using the JSONResultsReader res_array = [] async for record in self._stream_results(oneshotsearch_results): @@ -72,7 +104,7 @@ async def query(self, query, searchList="", max_results=10000): except Exception as e: print(f"Error on including Splunk record query in results array: {e}") - return res_array + return {"data": res_array, "total": total_records} async def _stream_results(self, oneshotsearch_results): for record in results.JSONResultsReader(oneshotsearch_results): diff --git a/frontend/src/actions/commonActions.js b/frontend/src/actions/commonActions.js index 1b2af030..0551bd0f 100644 --- a/frontend/src/actions/commonActions.js +++ b/frontend/src/actions/commonActions.js @@ -1,12 +1,10 @@ import * as TYPES from "@/actions/types.js"; -import { setCPTCatFilters, sliceCPTTableRows } from "./homeActions"; -import { setOCPCatFilters, sliceOCPTableRows } from "./ocpActions"; -import { setQuayCatFilters, sliceQuayTableRows } from "./quayActions"; -import { setTelcoCatFilters, sliceTelcoTableRows } from "./telcoActions"; - -import { DEFAULT_PER_PAGE } from "@/assets/constants/paginationConstants"; import { cloneDeep } from "lodash"; +import { setCPTCatFilters } from "./homeActions"; +import { setOCPCatFilters } from "./ocpActions"; +import { setQuayCatFilters } from "./quayActions"; +import { setTelcoCatFilters } from "./telcoActions"; const getSortableRowValues = (result, tableColumns) => { const tableKeys = tableColumns.map((item) => item.value); @@ -47,7 +45,6 @@ const sortedTableRows = (currState, sortedResults) => (dispatch) => { type: TYPES.SET_FILTERED_DATA, payload: sortedResults, }); - dispatch(sliceCPTTableRows(0, DEFAULT_PER_PAGE)); return; } if (currState === "ocp") { @@ -55,7 +52,6 @@ const sortedTableRows = (currState, sortedResults) => (dispatch) => { type: TYPES.SET_OCP_FILTERED_DATA, payload: sortedResults, }); - dispatch(sliceOCPTableRows(0, DEFAULT_PER_PAGE)); return; } if (currState === "quay") { @@ -63,7 +59,6 @@ const sortedTableRows = (currState, sortedResults) => (dispatch) => { type: TYPES.SET_QUAY_FILTERED_DATA, payload: sortedResults, }); - dispatch(sliceQuayTableRows(0, DEFAULT_PER_PAGE)); return; } if (currState === "telco") { @@ -71,7 +66,6 @@ const sortedTableRows = (currState, sortedResults) => (dispatch) => { type: TYPES.SET_TELCO_FILTERED_DATA, payload: sortedResults, }); - dispatch(sliceTelcoTableRows(0, DEFAULT_PER_PAGE)); } }; @@ -207,3 +201,16 @@ export const getSelectedFilter = return selectedFilters; }; + +export const getRequestParams = (type) => (dispatch, getState) => { + const { start_date, end_date, size, offset } = getState()[type]; + const params = { + pretty: true, + ...(start_date && { start_date }), + ...(end_date && { end_date }), + size: size, + offset: offset, + }; + + return params; +}; diff --git a/frontend/src/actions/filterActions.js b/frontend/src/actions/filterActions.js index 7f565887..ebdb0b78 100644 --- a/frontend/src/actions/filterActions.js +++ b/frontend/src/actions/filterActions.js @@ -1,29 +1,29 @@ import { + applyCPTDateFilter, removeCPTAppliedFilters, setCPTAppliedFilters, setCPTCatFilters, - setCPTDateFilter, setCPTOtherSummaryFilter, } from "./homeActions"; import { + applyOCPDateFilter, removeOCPAppliedFilters, setOCPAppliedFilters, setOCPCatFilters, - setOCPDateFilter, setOCPOtherSummaryFilter, } from "./ocpActions"; import { + applyQuayDateFilter, removeQuayAppliedFilters, setQuayAppliedFilters, setQuayCatFilters, - setQuayDateFilter, setQuayOtherSummaryFilter, } from "./quayActions"; import { + applyTelcoDateFilter, removeTelcoAppliedFilters, setTelcoAppliedFilters, setTelcoCatFilters, - setTelcoDateFilter, setTelcoOtherSummaryFilter, } from "./telcoActions"; @@ -69,13 +69,13 @@ export const removeAppliedFilters = (key, value, navigation, currType) => { export const setDateFilter = (date, key, navigation, currType) => { if (currType === "cpt") { - dispatch(setCPTDateFilter(date, key, navigation)); + dispatch(applyCPTDateFilter(date, key, navigation)); } else if (currType === "ocp") { - dispatch(setOCPDateFilter(date, key, navigation)); + dispatch(applyOCPDateFilter(date, key, navigation)); } else if (currType === "quay") { - dispatch(setQuayDateFilter(date, key, navigation)); + dispatch(applyQuayDateFilter(date, key, navigation)); } else if (currType === "telco") { - dispatch(setTelcoDateFilter(date, key, navigation)); + dispatch(applyTelcoDateFilter(date, key, navigation)); } }; diff --git a/frontend/src/actions/homeActions.js b/frontend/src/actions/homeActions.js index 68109d60..7a6dd735 100644 --- a/frontend/src/actions/homeActions.js +++ b/frontend/src/actions/homeActions.js @@ -1,16 +1,13 @@ import * as API_ROUTES from "@/utils/apiConstants"; import * as TYPES from "@/actions/types.js"; -import { - DEFAULT_PER_PAGE, - START_PAGE, -} from "@/assets/constants/paginationConstants"; import { appendDateFilter, appendQueryString } from "@/utils/helper"; import { buildFilterData, calculateMetrics, deleteAppliedFilters, getFilteredData, + getRequestParams, getSelectedFilter, sortTable, } from "./commonActions"; @@ -19,46 +16,54 @@ import API from "@/utils/axiosInstance"; import { cloneDeep } from "lodash"; import { showFailureToast } from "@/actions/toastActions"; -export const fetchOCPJobsData = () => async (dispatch, getState) => { - try { - dispatch({ type: TYPES.LOADING }); - const { start_date, end_date } = getState().cpt; - const response = await API.get(API_ROUTES.CPT_JOBS_API_V1, { - params: { - pretty: true, - ...(start_date && { start_date }), - ...(end_date && { end_date }), - }, - }); - if (response.status === 200) { - const startDate = response.data.startDate, - endDate = response.data.endDate; - //on initial load startDate and endDate are empty, so from response append to url - appendDateFilter(startDate, endDate); - dispatch({ - type: TYPES.SET_CPT_DATE_FILTER, - payload: { - start_date: startDate, - end_date: endDate, - }, - }); - } - - if (response?.data?.results?.length > 0) { - dispatch({ - type: TYPES.SET_CPT_JOBS_DATA, - payload: response.data.results, - }); - - dispatch(applyFilters()); - dispatch(sortTable("cpt")); - dispatch(tableReCalcValues()); +export const fetchOCPJobsData = + (isNewSearch = false) => + async (dispatch, getState) => { + try { + dispatch({ type: TYPES.LOADING }); + + const params = dispatch(getRequestParams("cpt")); + const results = getState().cpt.results; + params["totalJobs"] = getState().cpt.totalJobs; + const response = await API.get(API_ROUTES.CPT_JOBS_API_V1, { params }); + if (response.status === 200) { + const startDate = response.data.startDate, + endDate = response.data.endDate; + //on initial load startDate and endDate are empty, so from response append to url + appendDateFilter(startDate, endDate); + dispatch({ + type: TYPES.SET_CPT_DATE_FILTER, + payload: { + start_date: startDate, + end_date: endDate, + }, + }); + } + + if (response?.data?.results?.length > 0) { + dispatch({ + type: TYPES.SET_CPT_JOBS_DATA, + payload: isNewSearch + ? response.data.results + : [...results, ...response.data.results], + }); + dispatch({ + type: TYPES.SET_CPT_PAGE_TOTAL, + payload: { + total: response.data.total, + offset: response.data.offset, + }, + }); + + dispatch(applyFilters()); + dispatch(sortTable("cpt")); + dispatch(tableReCalcValues()); + } + } catch (error) { + dispatch(showFailureToast()); } - } catch (error) { - dispatch(showFailureToast()); - } - dispatch({ type: TYPES.COMPLETED }); -}; + dispatch({ type: TYPES.COMPLETED }); + }; export const setCPTSortIndex = (index) => ({ type: TYPES.SET_CPT_SORT_INDEX, @@ -70,6 +75,11 @@ export const setCPTSortDir = (direction) => ({ payload: direction, }); +export const setCPTOffset = (offset) => ({ + type: TYPES.SET_CPT_OFFSET, + payload: offset, +}); + export const sliceCPTTableRows = (startIdx, endIdx) => (dispatch, getState) => { const results = [...getState().cpt.filteredResults]; @@ -200,7 +210,11 @@ export const setCPTDateFilter = }); appendQueryString({ ...appliedFilters, start_date, end_date }, navigate); + }; +export const applyCPTDateFilter = + (start_date, end_date, navigate) => (dispatch) => { + dispatch(setCPTDateFilter(start_date, end_date, navigate)); dispatch(fetchOCPJobsData()); }; @@ -224,8 +238,11 @@ export const getCPTSummary = () => (dispatch, getState) => { }); }; -export const tableReCalcValues = () => (dispatch) => { +export const tableReCalcValues = () => (dispatch, getState) => { + const { page, perPage } = getState().cpt; dispatch(getCPTSummary()); - dispatch(setCPTPageOptions(START_PAGE, DEFAULT_PER_PAGE)); - dispatch(sliceCPTTableRows(0, DEFAULT_PER_PAGE)); + dispatch(setCPTPageOptions(page, perPage)); + const startIdx = page !== 0 ? (page - 1) * perPage : 0; + const endIdx = startIdx + perPage - 1; + dispatch(sliceCPTTableRows(startIdx, endIdx)); }; diff --git a/frontend/src/actions/ocpActions.js b/frontend/src/actions/ocpActions.js index 9a2f14c4..94657b15 100644 --- a/frontend/src/actions/ocpActions.js +++ b/frontend/src/actions/ocpActions.js @@ -1,16 +1,13 @@ import * as API_ROUTES from "@/utils/apiConstants"; import * as TYPES from "./types.js"; -import { - DEFAULT_PER_PAGE, - START_PAGE, -} from "@/assets/constants/paginationConstants"; import { appendDateFilter, appendQueryString } from "@/utils/helper.js"; import { buildFilterData, calculateMetrics, deleteAppliedFilters, getFilteredData, + getRequestParams, getSelectedFilter, sortTable, } from "./commonActions"; @@ -19,17 +16,14 @@ import API from "@/utils/axiosInstance"; import { cloneDeep } from "lodash"; import { showFailureToast } from "./toastActions"; -export const fetchOCPJobs = () => async (dispatch, getState) => { +export const fetchOCPJobs = () => async (dispatch) => { try { dispatch({ type: TYPES.LOADING }); - const { start_date, end_date } = getState().ocp; - const response = await API.get(API_ROUTES.OCP_JOBS_API_V1, { - params: { - pretty: true, - ...(start_date && { start_date }), - ...(end_date && { end_date }), - }, - }); + + const params = dispatch(getRequestParams("ocp")); + + const response = await API.get(API_ROUTES.OCP_JOBS_API_V1, { params }); + if (response.status === 200) { const startDate = response.data.startDate, endDate = response.data.endDate; @@ -49,6 +43,14 @@ export const fetchOCPJobs = () => async (dispatch, getState) => { payload: response.data.results, }); + dispatch({ + type: TYPES.SET_OCP_PAGE_TOTAL, + payload: { + total: response.data.total, + offset: response.data.offset, + }, + }); + dispatch(applyFilters()); dispatch(sortTable("ocp")); dispatch(tableReCalcValues()); @@ -69,14 +71,10 @@ export const setOCPPageOptions = (page, perPage) => ({ payload: { page, perPage }, }); -export const sliceOCPTableRows = (startIdx, endIdx) => (dispatch, getState) => { - const results = [...getState().ocp.filteredResults]; - - dispatch({ - type: TYPES.SET_OCP_INIT_JOBS, - payload: results.slice(startIdx, endIdx), - }); -}; +export const setOCPOffset = (offset) => ({ + type: TYPES.SET_OCP_OFFSET, + payload: offset, +}); export const setOCPSortIndex = (index) => ({ type: TYPES.SET_OCP_SORT_INDEX, @@ -188,10 +186,13 @@ export const setOCPDateFilter = }); appendQueryString({ ...appliedFilters, start_date, end_date }, navigate); + }; +export const applyOCPDateFilter = + (start_date, end_date, navigate) => (dispatch) => { + dispatch(setOCPDateFilter(start_date, end_date, navigate)); dispatch(fetchOCPJobs()); }; - export const setFilterFromURL = (searchParams) => ({ type: TYPES.SET_OCP_APPLIED_FILTERS, payload: searchParams, @@ -261,8 +262,9 @@ export const setTableColumns = (key, isAdding) => (dispatch, getState) => { payload: tableColumns, }); }; -export const tableReCalcValues = () => (dispatch) => { +export const tableReCalcValues = () => (dispatch, getState) => { + const { page, perPage } = getState().ocp; + dispatch(getOCPSummary()); - dispatch(setOCPPageOptions(START_PAGE, DEFAULT_PER_PAGE)); - dispatch(sliceOCPTableRows(0, DEFAULT_PER_PAGE)); + dispatch(setOCPPageOptions(page, perPage)); }; diff --git a/frontend/src/actions/paginationActions.js b/frontend/src/actions/paginationActions.js index 80a7dff1..09416cc8 100644 --- a/frontend/src/actions/paginationActions.js +++ b/frontend/src/actions/paginationActions.js @@ -1,39 +1,82 @@ import { + fetchOCPJobs, + setOCPOffset, + setOCPPage, + setOCPPageOptions, +} from "./ocpActions"; +import { + fetchOCPJobsData, setCPTPage, setCPTPageOptions, sliceCPTTableRows, } from "./homeActions"; -import { setOCPPage, setOCPPageOptions, sliceOCPTableRows } from "./ocpActions"; -import { setQuayPage, setQuayPageOptions } from "./quayActions"; -import { setTelcoPage, setTelcoPageOptions } from "./telcoActions"; +import { + fetchQuayJobsData, + setQuayOffset, + setQuayPage, + setQuayPageOptions, +} from "./quayActions"; +import { + fetchTelcoJobsData, + setTelcoOffset, + setTelcoPage, + setTelcoPageOptions, +} from "./telcoActions"; + export const setPage = (newPage, currType) => (dispatch) => { - if (currType === "cpt") { - dispatch(setCPTPage(newPage)); - } else if (currType === "ocp") { - dispatch(setOCPPage(newPage)); - } else if (currType === "quay") { - dispatch(setQuayPage(newPage)); - } else if (currType === "telco") { - dispatch(setTelcoPage(newPage)); - } + const actions = { + cpt: setCPTPage, + ocp: setOCPPage, + quay: setQuayPage, + telco: setTelcoPage, + }; + dispatch(actions[currType](newPage)); }; export const setPageOptions = (newPage, newPerPage, currType) => (dispatch) => { - if (currType === "cpt") { - dispatch(setCPTPageOptions(newPage, newPerPage)); - } else if (currType === "ocp") { - dispatch(setOCPPageOptions(newPage, newPerPage)); - } else if (currType === "quay") { - dispatch(setQuayPageOptions(newPage, newPerPage)); - } else if (currType === "telco") { - dispatch(setTelcoPageOptions(newPage, newPerPage)); - } + const actions = { + cpt: setCPTPageOptions, + ocp: setOCPPageOptions, + quay: setQuayPageOptions, + telco: setTelcoPageOptions, + }; + dispatch(actions[currType](newPage, newPerPage)); +}; + +const calculateOffset = (pageNumber, itemsPerPage) => { + return (pageNumber - 1) * itemsPerPage; }; -export const sliceTableRows = (startIdx, endIdx, currType) => (dispatch) => { - if (currType === "cpt") { - dispatch(sliceCPTTableRows(startIdx, endIdx)); - } else if (currType === "ocp") { - dispatch(sliceOCPTableRows(startIdx, endIdx)); +export const checkTableData = (newPage, currType) => (dispatch, getState) => { + const { results, totalJobs, perPage, page } = getState()[currType]; + const fetchActions = { + ocp: fetchOCPJobs, + quay: fetchQuayJobsData, + telco: fetchTelcoJobsData, + }; + const offsetActions = { + ocp: setOCPOffset, + quay: setQuayOffset, + telco: setTelcoOffset, + }; + const hasPageData = results.length >= newPage * perPage; + const offset = calculateOffset(newPage, perPage); + if (results.length < totalJobs && !hasPageData) { + if (currType === "cpt") { + const startIdx = (page - 1) * perPage; + const endIdx = startIdx + perPage - 1; + if (results[startIdx] === undefined || results[endIdx] === undefined) { + dispatch(fetchOCPJobsData()); + } + } else { + dispatch(offsetActions[currType](offset)); + dispatch(fetchActions[currType]()); + } + } else { + if (currType === "cpt") { + const startIdx = (page - 1) * perPage; + const endIdx = startIdx + perPage - 1; + dispatch(sliceCPTTableRows(startIdx, endIdx)); + } } }; diff --git a/frontend/src/actions/quayActions.js b/frontend/src/actions/quayActions.js index 59795ed7..71791884 100644 --- a/frontend/src/actions/quayActions.js +++ b/frontend/src/actions/quayActions.js @@ -1,16 +1,13 @@ import * as API_ROUTES from "@/utils/apiConstants"; import * as TYPES from "@/actions/types.js"; -import { - DEFAULT_PER_PAGE, - START_PAGE, -} from "@/assets/constants/paginationConstants"; import { appendDateFilter, appendQueryString } from "@/utils/helper.js"; import { buildFilterData, calculateMetrics, deleteAppliedFilters, getFilteredData, + getRequestParams, getSelectedFilter, } from "./commonActions"; @@ -18,17 +15,12 @@ import API from "@/utils/axiosInstance"; import { cloneDeep } from "lodash"; import { showFailureToast } from "@/actions/toastActions"; -export const fetchQuayJobsData = () => async (dispatch, getState) => { +export const fetchQuayJobsData = () => async (dispatch) => { try { dispatch({ type: TYPES.LOADING }); - const { start_date, end_date } = getState().quay; - const response = await API.get(API_ROUTES.QUAY_JOBS_API_V1, { - params: { - pretty: true, - ...(start_date && { start_date }), - ...(end_date && { end_date }), - }, - }); + + const params = dispatch(getRequestParams("quay")); + const response = await API.get(API_ROUTES.QUAY_JOBS_API_V1, { params }); if (response.status === 200) { const startDate = response.data.startDate, endDate = response.data.endDate; @@ -51,6 +43,13 @@ export const fetchQuayJobsData = () => async (dispatch, getState) => { type: TYPES.SET_QUAY_FILTERED_DATA, payload: response.data.results, }); + dispatch({ + type: TYPES.SET_QUAY_PAGE_TOTAL, + payload: { + total: response.data.total, + offset: response.data.offset, + }, + }); dispatch(applyFilters()); dispatch(tableReCalcValues()); } @@ -69,6 +68,12 @@ export const setQuayPageOptions = (page, perPage) => ({ type: TYPES.SET_QUAY_PAGE_OPTIONS, payload: { page, perPage }, }); + +export const setQuayOffset = (offset) => ({ + type: TYPES.SET_QUAY_OFFSET, + payload: offset, +}); + export const setQuaySortIndex = (index) => ({ type: TYPES.SET_QUAY_SORT_INDEX, payload: index, @@ -78,15 +83,6 @@ export const setQuaySortDir = (direction) => ({ type: TYPES.SET_QUAY_SORT_DIR, payload: direction, }); -export const sliceQuayTableRows = - (startIdx, endIdx) => (dispatch, getState) => { - const results = [...getState().quay.filteredResults]; - - dispatch({ - type: TYPES.SET_QUAY_INIT_JOBS, - payload: results.slice(startIdx, endIdx), - }); - }; export const setQuayCatFilters = (category) => (dispatch, getState) => { const filterData = [...getState().quay.filterData]; @@ -195,7 +191,11 @@ export const setQuayDateFilter = }); appendQueryString({ ...appliedFilters, start_date, end_date }, navigate); + }; +export const applyQuayDateFilter = + (start_date, end_date, navigate) => (dispatch) => { + dispatch(setQuayDateFilter(start_date, end_date, navigate)); dispatch(fetchQuayJobsData()); }; @@ -265,8 +265,8 @@ export const fetchGraphData = (uuid) => async (dispatch, getState) => { dispatch({ type: TYPES.GRAPH_COMPLETED }); }; -export const tableReCalcValues = () => (dispatch) => { +export const tableReCalcValues = () => (dispatch, getState) => { + const { page, perPage } = getState().quay; dispatch(getQuaySummary()); - dispatch(setQuayPageOptions(START_PAGE, DEFAULT_PER_PAGE)); - dispatch(sliceQuayTableRows(0, DEFAULT_PER_PAGE)); + dispatch(setQuayPageOptions(page, perPage)); }; diff --git a/frontend/src/actions/telcoActions.js b/frontend/src/actions/telcoActions.js index beaf04f9..923c9e6a 100644 --- a/frontend/src/actions/telcoActions.js +++ b/frontend/src/actions/telcoActions.js @@ -1,16 +1,13 @@ import * as API_ROUTES from "@/utils/apiConstants"; import * as TYPES from "@/actions/types.js"; -import { - DEFAULT_PER_PAGE, - START_PAGE, -} from "@/assets/constants/paginationConstants"; import { appendDateFilter, appendQueryString } from "@/utils/helper.js"; import { buildFilterData, calculateMetrics, deleteAppliedFilters, getFilteredData, + getRequestParams, getSelectedFilter, } from "./commonActions"; @@ -18,17 +15,13 @@ import API from "@/utils/axiosInstance"; import { cloneDeep } from "lodash"; import { showFailureToast } from "@/actions/toastActions"; -export const fetchTelcoJobsData = () => async (dispatch, getState) => { +export const fetchTelcoJobsData = () => async (dispatch) => { try { dispatch({ type: TYPES.LOADING }); - const { start_date, end_date } = getState().telco; - const response = await API.get(API_ROUTES.TELCO_JOBS_API_V1, { - params: { - pretty: true, - ...(start_date && { start_date }), - ...(end_date && { end_date }), - }, - }); + + const params = dispatch(getRequestParams("telco")); + + const response = await API.get(API_ROUTES.TELCO_JOBS_API_V1, { params }); if (response.status === 200) { const startDate = response.data.startDate, endDate = response.data.endDate; @@ -51,6 +44,13 @@ export const fetchTelcoJobsData = () => async (dispatch, getState) => { type: TYPES.SET_TELCO_FILTERED_DATA, payload: response.data.results, }); + dispatch({ + type: TYPES.SET_TELCO_PAGE_TOTAL, + payload: { + total: response.data.total, + offset: response.data.offset, + }, + }); dispatch(applyFilters()); dispatch(tableReCalcValues()); @@ -73,20 +73,15 @@ export const setTelcoSortIndex = (index) => ({ type: TYPES.SET_TELCO_SORT_INDEX, payload: index, }); - +export const setTelcoOffset = (offset) => ({ + type: TYPES.SET_TELCO_OFFSET, + payload: offset, +}); export const setTelcoSortDir = (direction) => ({ type: TYPES.SET_TELCO_SORT_DIR, payload: direction, }); -export const sliceTelcoTableRows = - (startIdx, endIdx) => (dispatch, getState) => { - const results = [...getState().telco.filteredResults]; - dispatch({ - type: TYPES.SET_TELCO_INIT_JOBS, - payload: results.slice(startIdx, endIdx), - }); - }; export const setTelcoCatFilters = (category) => (dispatch, getState) => { const filterData = [...getState().telco.filterData]; const options = filterData.filter((item) => item.name === category)[0].value; @@ -199,7 +194,11 @@ export const setTelcoDateFilter = }); appendQueryString({ ...appliedFilters, start_date, end_date }, navigate); + }; +export const applyTelcoDateFilter = + (start_date, end_date, navigate) => (dispatch) => { + dispatch(setTelcoDateFilter(start_date, end_date, navigate)); dispatch(fetchTelcoJobsData()); }; @@ -284,8 +283,8 @@ export const fetchGraphData = } dispatch({ type: TYPES.GRAPH_COMPLETED }); }; -export const tableReCalcValues = () => (dispatch) => { +export const tableReCalcValues = () => (dispatch, getState) => { + const { page, perPage } = getState().telco; dispatch(getTelcoSummary()); - dispatch(setTelcoPageOptions(START_PAGE, DEFAULT_PER_PAGE)); - dispatch(sliceTelcoTableRows(0, DEFAULT_PER_PAGE)); + dispatch(setTelcoPageOptions(page, perPage)); }; diff --git a/frontend/src/actions/types.js b/frontend/src/actions/types.js index 1804cf21..734d6568 100644 --- a/frontend/src/actions/types.js +++ b/frontend/src/actions/types.js @@ -26,6 +26,8 @@ export const SET_PAGE = "SET_PAGE"; export const SET_PAGE_OPTIONS = "SET_PAGE_OPTIONS"; export const SET_CPT_SUMMARY = "SET_CPT_SUMMARY"; export const SET_SELECTED_FILTERS = "SET_SELECTED_FILTERS"; +export const SET_CPT_OFFSET = "SET_CPT_OFFSET"; +export const SET_CPT_PAGE_TOTAL = "SET_CPT_PAGE_TOTAL"; /* OCP Jobs */ export const SET_OCP_JOBS_DATA = "SET_OCP_JOBS_DATA"; export const SET_OCP_DATE_FILTER = "SET_OCP_DATE_FILTER"; @@ -43,6 +45,8 @@ export const SET_OCP_APPLIED_FILTERS = "SET_OCP_APPLIED_FILTERS"; export const SET_OCP_GRAPH_DATA = "SET_OCP_GRAPH_DATA"; export const SET_OCP_COLUMNS = "SET_OCP_COLUMNS"; export const SET_SELECTED_OCP_FILTERS = "SET_SELECTED_OCP_FILTERS"; +export const SET_OCP_OFFSET = "SET_OCP_OFFSET"; +export const SET_OCP_PAGE_TOTAL = "SET_OCP_PAGE_TOTAL"; /* QUAY Jobs*/ export const SET_QUAY_JOBS_DATA = "SET_QUAY_JOBS_DATA"; export const SET_QUAY_DATE_FILTER = "SET_QUAY_DATE_FILTER"; @@ -60,6 +64,8 @@ export const SET_QUAY_SELECTED_FILTERS = "SET_QUAY_SELECTED_FILTERS"; export const SET_QUAY_SUMMARY = "SET_QUAY_SUMMARY"; export const SET_QUAY_COLUMNS = "SET_QUAY_COLUMNS"; export const SET_QUAY_GRAPH_DATA = "SET_QUAY_GRAPH_DATA"; +export const SET_QUAY_OFFSET = "SET_QUAY_OFFSET"; +export const SET_QUAY_PAGE_TOTAL = "SET_QUAY_PAGE_TOTAL"; /* Telco Jobs */ export const SET_TELCO_JOBS_DATA = "SET_TELCO_JOBS_DATA"; export const SET_TELCO_DATE_FILTER = "SET_TELCO_DATE_FILTER"; @@ -77,3 +83,5 @@ export const SET_TELCO_SELECTED_FILTERS = "SET_TELCO_SELECTED_FILTERS"; export const SET_TELCO_SUMMARY = "SET_TELCO_SUMMARY"; export const SET_TELCO_COLUMNS = "SET_TELCO_COLUMNS"; export const SET_TELCO_GRAPH_DATA = "SET_TELCO_GRAPH_DATA"; +export const SET_TELCO_OFFSET = "SET_TELCO_OFFSET"; +export const SET_TELCO_PAGE_TOTAL = "SET_TELCO_PAGE_TOTAL"; diff --git a/frontend/src/assets/constants/paginationConstants.js b/frontend/src/assets/constants/paginationConstants.js index 90460014..34251c96 100644 --- a/frontend/src/assets/constants/paginationConstants.js +++ b/frontend/src/assets/constants/paginationConstants.js @@ -1,2 +1,3 @@ export const DEFAULT_PER_PAGE = 25; export const START_PAGE = 1; +export const INITAL_OFFSET = 0; diff --git a/frontend/src/components/organisms/LoadingComponent/index.less b/frontend/src/components/organisms/LoadingComponent/index.less index 84e6cfe7..135fdb39 100644 --- a/frontend/src/components/organisms/LoadingComponent/index.less +++ b/frontend/src/components/organisms/LoadingComponent/index.less @@ -14,16 +14,16 @@ } } } -// .main-with-spinner { -// pointer-events: none; -// height: 100%; -// // position: fixed; /* Sit on top of the page content */ -// width: 100%; /* Full width (cover the whole page) */ -// top: 0; -// left: 0; -// right: 0; -// bottom: 0; -// background-color: rgba(0, 0, 0, 0.08); /* background with opacity */ -// z-index: 2; /* Specify a stack order in case you're using a different order for other elements */ -// cursor: pointer; /* Add a pointer on hover */ -// } +.main-with-spinner { + pointer-events: none; + height: 100%; + // position: fixed; /* Sit on top of the page content */ + width: 100%; /* Full width (cover the whole page) */ + top: 0; + left: 0; + right: 0; + bottom: 0; + opacity: 0.4; + z-index: 2; /* Specify a stack order in case you're using a different order for other elements */ + cursor: pointer; /* Add a pointer on hover */ +} diff --git a/frontend/src/components/organisms/Pagination/index.jsx b/frontend/src/components/organisms/Pagination/index.jsx index 7b316a21..5b6454b3 100644 --- a/frontend/src/components/organisms/Pagination/index.jsx +++ b/frontend/src/components/organisms/Pagination/index.jsx @@ -1,8 +1,8 @@ import { Pagination, PaginationVariant } from "@patternfly/react-core"; import { + checkTableData, setPage, setPageOptions, - sliceTableRows, } from "@/actions/paginationActions"; import PropTypes from "prop-types"; @@ -21,14 +21,22 @@ const RenderPagination = (props) => { const onSetPage = useCallback( (_evt, newPage, _perPage, startIdx, endIdx) => { dispatch(setPage(newPage, props.type)); - dispatch(sliceTableRows(startIdx, endIdx, props.type)); }, [dispatch, props.type] ); const onPerPageSelect = useCallback( (_evt, newPerPage, newPage, startIdx, endIdx) => { dispatch(setPageOptions(newPage, newPerPage, props.type)); - dispatch(sliceTableRows(startIdx, endIdx, props.type)); + }, + [dispatch, props.type] + ); + + const onNextClick = useCallback( + (_evt, newPage) => { + if (props.type === "cpt") { + dispatch(setPage(newPage, props.type)); + } + dispatch(checkTableData(newPage, props.type)); }, [dispatch, props.type] ); @@ -43,6 +51,9 @@ const RenderPagination = (props) => { perPageOptions={perPageOptions} onSetPage={onSetPage} onPerPageSelect={onPerPageSelect} + onNextClick={onNextClick} + onPageInput={onNextClick} + isCompact={props.type === "cpt" ? true : false} /> ); }; diff --git a/frontend/src/components/templates/Home/index.jsx b/frontend/src/components/templates/Home/index.jsx index 02d8b2a9..52b281b0 100644 --- a/frontend/src/components/templates/Home/index.jsx +++ b/frontend/src/components/templates/Home/index.jsx @@ -19,7 +19,6 @@ const Home = () => { const navigate = useNavigate(); const { - filteredResults, tableColumns, activeSortDir, activeSortIndex, @@ -35,6 +34,7 @@ const Home = () => { perPage, summary, selectedFilters, + totalJobs, } = useSelector((state) => state.cpt); useEffect(() => { @@ -67,7 +67,7 @@ const Home = () => { return ( <> { activeSortDir={activeSortDir} page={page} perPage={perPage} - totalItems={filteredResults.length} + totalItems={totalJobs} addExpansion={false} type={"cpt"} /> diff --git a/frontend/src/components/templates/OCP/index.jsx b/frontend/src/components/templates/OCP/index.jsx index 3d5932ba..74db5d00 100644 --- a/frontend/src/components/templates/OCP/index.jsx +++ b/frontend/src/components/templates/OCP/index.jsx @@ -20,11 +20,10 @@ const OCP = () => { const navigate = useNavigate(); const [searchParams] = useSearchParams(); const { - filteredResults, + results, tableColumns, activeSortDir, activeSortIndex, - tableData, page, perPage, summary, @@ -37,6 +36,7 @@ const OCP = () => { end_date, graphData, selectedFilters, + totalJobs, } = useSelector((state) => state.ocp); useEffect(() => { @@ -98,7 +98,7 @@ const OCP = () => { return ( <> { /> { const [searchParams] = useSearchParams(); const { - tableData, + results, tableColumns, activeSortIndex, activeSortDir, page, perPage, - filteredResults, tableFilters, filterOptions, categoryFilterValue, @@ -38,6 +37,7 @@ const Quay = () => { selectedFilters, graphData, summary, + totalJobs, } = useSelector((state) => state.quay); useEffect(() => { @@ -99,7 +99,7 @@ const Quay = () => { return ( <> { navigation={navigate} /> { const navigate = useNavigate(); const [searchParams] = useSearchParams(); const { - tableData, + results, tableColumns, activeSortIndex, activeSortDir, page, perPage, - filteredResults, tableFilters, filterOptions, categoryFilterValue, @@ -37,6 +36,7 @@ const Telco = () => { selectedFilters, summary, graphData, + totalJobs, } = useSelector((state) => state.telco); useEffect(() => { @@ -98,7 +98,7 @@ const Telco = () => { return ( <> { navigation={navigate} /> { ...state, results: payload, }; + case TYPES.SET_CPT_PAGE_TOTAL: + return { + ...state, + totalJobs: payload.total, + offset: payload.offset, + }; + case TYPES.SET_CPT_OFFSET: + return { ...state, offset: payload }; case TYPES.SET_CPT_DATE_FILTER: return { ...state, diff --git a/frontend/src/reducers/ocpReducer.js b/frontend/src/reducers/ocpReducer.js index c422ff5e..20232ad8 100644 --- a/frontend/src/reducers/ocpReducer.js +++ b/frontend/src/reducers/ocpReducer.js @@ -2,6 +2,7 @@ import * as TYPES from "@/actions/types"; import { DEFAULT_PER_PAGE, + INITAL_OFFSET, START_PAGE, } from "@/assets/constants/paginationConstants"; @@ -43,7 +44,10 @@ const initialState = { activeSortIndex: null, page: START_PAGE, perPage: DEFAULT_PER_PAGE, - tableData: [], + size: DEFAULT_PER_PAGE, + offset: INITAL_OFFSET, + totalJobs: 0, + //tableData: [], filterData: [], categoryFilterValue: "", filterOptions: [], @@ -111,6 +115,14 @@ const OCPReducer = (state = initialState, action = {}) => { ...state, results: payload, }; + case TYPES.SET_OCP_PAGE_TOTAL: + return { + ...state, + totalJobs: payload.total, + offset: payload.offset, + }; + case TYPES.SET_OCP_OFFSET: + return { ...state, offset: payload }; case TYPES.SET_OCP_DATE_FILTER: return { ...state, @@ -125,8 +137,6 @@ const OCPReducer = (state = initialState, action = {}) => { return { ...state, page: payload }; case TYPES.SET_OCP_PAGE_OPTIONS: return { ...state, page: payload.page, perPage: payload.perPage }; - case TYPES.SET_OCP_INIT_JOBS: - return { ...state, tableData: payload }; case TYPES.SET_OCP_SUMMARY: return { ...state, summary: payload }; case TYPES.SET_OCP_FILTER_DATA: diff --git a/frontend/src/reducers/quayReducer.js b/frontend/src/reducers/quayReducer.js index bc3ff723..9e33d160 100644 --- a/frontend/src/reducers/quayReducer.js +++ b/frontend/src/reducers/quayReducer.js @@ -2,6 +2,7 @@ import * as TYPES from "@/actions/types"; import { DEFAULT_PER_PAGE, + INITAL_OFFSET, START_PAGE, } from "@/assets/constants/paginationConstants"; @@ -60,10 +61,12 @@ const initialState = { appliedFilters: {}, activeSortDir: null, activeSortIndex: null, - tableData: [], graphData: [], page: START_PAGE, perPage: DEFAULT_PER_PAGE, + size: DEFAULT_PER_PAGE, + offset: INITAL_OFFSET, + totalJobs: 0, summary: {}, }; @@ -76,6 +79,14 @@ const QuayReducer = (state = initialState, action = {}) => { ...state, results: payload, }; + case TYPES.SET_QUAY_PAGE_TOTAL: + return { + ...state, + totalJobs: payload.total, + offset: payload.offset, + }; + case TYPES.SET_QUAY_OFFSET: + return { ...state, offset: payload }; case TYPES.SET_QUAY_DATE_FILTER: return { ...state, @@ -90,8 +101,6 @@ const QuayReducer = (state = initialState, action = {}) => { return { ...state, page: payload }; case TYPES.SET_QUAY_PAGE_OPTIONS: return { ...state, page: payload.page, perPage: payload.perPage }; - case TYPES.SET_QUAY_INIT_JOBS: - return { ...state, tableData: payload }; case TYPES.SET_QUAY_FILTERED_DATA: return { ...state, filteredResults: payload }; case TYPES.SET_QUAY_FILTER_OPTIONS: diff --git a/frontend/src/reducers/telcoReducer.js b/frontend/src/reducers/telcoReducer.js index 15ab045f..5dd1a140 100644 --- a/frontend/src/reducers/telcoReducer.js +++ b/frontend/src/reducers/telcoReducer.js @@ -2,6 +2,7 @@ import * as TYPES from "@/actions/types"; import { DEFAULT_PER_PAGE, + INITAL_OFFSET, START_PAGE, } from "@/assets/constants/paginationConstants"; @@ -57,10 +58,12 @@ const initialState = { appliedFilters: {}, activeSortDir: null, activeSortIndex: null, - tableData: [], graphData: [], page: START_PAGE, perPage: DEFAULT_PER_PAGE, + size: DEFAULT_PER_PAGE, + offset: INITAL_OFFSET, + totalJobs: 0, summary: {}, }; @@ -73,6 +76,14 @@ const TelcoReducer = (state = initialState, action = {}) => { ...state, results: payload, }; + case TYPES.SET_TELCO_PAGE_TOTAL: + return { + ...state, + totalJobs: payload.total, + offset: payload.offset, + }; + case TYPES.SET_TELCO_OFFSET: + return { ...state, offset: payload }; case TYPES.SET_TELCO_DATE_FILTER: return { ...state, @@ -87,8 +98,6 @@ const TelcoReducer = (state = initialState, action = {}) => { return { ...state, page: payload }; case TYPES.SET_TELCO_PAGE_OPTIONS: return { ...state, page: payload.page, perPage: payload.perPage }; - case TYPES.SET_TELCO_INIT_JOBS: - return { ...state, tableData: payload }; case TYPES.SET_TELCO_FILTERED_DATA: return { ...state, filteredResults: payload }; case TYPES.SET_TELCO_CATEGORY_FILTER: