Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Dynamic Pagination in CPT Dashboard #135

Merged
merged 9 commits into from
Dec 13, 2024
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 1 addition & 2 deletions backend/app/api/v1/commons/quay.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,5 +44,4 @@ async def getData(

cleanJobs = jobs[jobs["platform"] != ""]

jbs = cleanJobs
return {"data": jbs, "total": response["total"]}
return {"data": cleanJobs, "total": response["total"]}
32 changes: 15 additions & 17 deletions backend/app/api/v1/endpoints/ocm/ocmJobs.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import json
from fastapi import Response
from datetime import datetime, timedelta, date
from fastapi import APIRouter
from fastapi import APIRouter, HTTPException
from ...commons.ocm import getData
from ...commons.example_responses import ocp_200_response, response_422
from fastapi.param_functions import Query
Expand Down Expand Up @@ -54,28 +54,26 @@ async def jobs(
if not offset:
offset = 0

if not size:
if not offset and not size:
size = 10000
offset = 0

if offset and not size:
raise HTTPException(400, f"offset {offset} specified without size")
dbutenhof marked this conversation as resolved.
Show resolved Hide resolved

results = await getData(start_date, end_date, size, offset, "ocm.elasticsearch")

jobs = []
if "data" in results and len(results["data"]) >= 1:
response = {
"startDate": start_date.__str__(),
"endDate": end_date.__str__(),
"results": results["data"].to_dict("records"),
"total": results["total"],
"offset": offset + size,
}
else:
response = {
"startDate": start_date.__str__(),
"endDate": end_date.__str__(),
"results": [],
"total": 0,
"offset": 0,
}
jobs = results["data"].to_dict("records")

response = {
"startDate": start_date.__str__(),
dbutenhof marked this conversation as resolved.
Show resolved Hide resolved
"endDate": end_date.__str__(),
"results": jobs,
"total": 0,
"offset": 0,
}

if pretty:
json_str = json.dumps(response, indent=4)
Expand Down
33 changes: 14 additions & 19 deletions backend/app/api/v1/endpoints/ocp/ocpJobs.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import json
from fastapi import Response
from datetime import datetime, timedelta, date
from fastapi import APIRouter
from fastapi import APIRouter, HTTPException
from ...commons.ocp import getData
from ...commons.example_responses import ocp_200_response, response_422
from fastapi.param_functions import Query
Expand Down Expand Up @@ -53,29 +53,24 @@ async def jobs(

if not offset:
offset = 0

if not size:
if not offset and not size:
size = 10000
offset = 0
if offset and not size:
raise HTTPException(400, f"offset {offset} specified without size")

results = await getData(start_date, end_date, size, offset, "ocp.elasticsearch")

jobs = []
if "data" in results and len(results["data"]) >= 1:
response = {
"startDate": start_date.__str__(),
"endDate": end_date.__str__(),
"results": results["data"].to_dict("records"),
"total": results["total"],
"offset": offset + size,
}
else:
response = {
"startDate": start_date.__str__(),
"endDate": end_date.__str__(),
"results": [],
"total": 0,
"offset": 0,
}
jobs = results["data"].to_dict("records")

response = {
"startDate": start_date.__str__(),
"endDate": end_date.__str__(),
"results": jobs,
"total": results["total"],
"offset": offset + size,
}

if pretty:
json_str = json.dumps(response, indent=4)
Expand Down
32 changes: 15 additions & 17 deletions backend/app/api/v1/endpoints/quay/quayJobs.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import json
from fastapi import Response
from datetime import datetime, timedelta, date
from fastapi import APIRouter
from fastapi import APIRouter, HTTPException
from ...commons.quay import getData
from ...commons.example_responses import quay_200_response, response_422
from fastapi.param_functions import Query
Expand Down Expand Up @@ -54,28 +54,26 @@ async def jobs(
if not offset:
offset = 0

if not size:
if not offset and not size:
size = 10000
offset = 0

if offset and not size:
raise HTTPException(400, f"offset {offset} specified without size")

results = await getData(start_date, end_date, size, offset, "quay.elasticsearch")

jobs = []
if "data" in results and len(results["data"]) >= 1:
response = {
"startDate": start_date.__str__(),
"endDate": end_date.__str__(),
"results": results["data"].to_dict("records"),
"total": results["total"],
"offset": offset + size,
}
else:
response = {
"startDate": start_date.__str__(),
"endDate": end_date.__str__(),
"results": [],
"total": 0,
"offset": 0,
}
jobs = results["data"].to_dict("records")

response = {
"startDate": start_date.__str__(),
"endDate": end_date.__str__(),
"results": jobs,
"total": 0,
"offset": 0,
}

if pretty:
json_str = json.dumps(response, indent=4)
Expand Down
26 changes: 10 additions & 16 deletions backend/app/api/v1/endpoints/telco/telcoJobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,23 +52,17 @@ async def jobs(
)

results = await getData(start_date, end_date, size, offset, "telco.splunk")

jobs = []
if len(results["data"]) >= 1:
response = {
"startDate": start_date.__str__(),
"endDate": end_date.__str__(),
"results": results["data"].to_dict("records"),
"total": results["total"],
"offset": offset + size,
}
else:
response = {
"startDate": start_date.__str__(),
"endDate": end_date.__str__(),
"results": [],
"total": 0,
"offset": 0,
}
jobs = results["data"].to_dict("records")

response = {
"startDate": start_date.__str__(),
"endDate": end_date.__str__(),
"results": jobs,
"total": 0,
"offset": 0,
}

if pretty:
json_str = json.dumps(response, indent=4)
Expand Down
4 changes: 0 additions & 4 deletions backend/app/services/search.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,6 @@ async def post(
"data": response["hits"]["hits"],
"total": response["hits"]["total"]["value"],
}
# previous_results = await self.scan_indices(self.prev_es, self.prev_index, query, timestamp_field, start_date, new_end_date, size)
if self.prev_es and self.new_es:
self.new_index = self.new_index_prefix + (
self.new_index if indice is None else indice
Expand Down Expand Up @@ -156,7 +155,6 @@ async def post(
"data": response["hits"]["hits"],
"total": response["hits"]["total"]["value"],
}
# new_results = await self.scan_indices(self.new_es, self.new_index, query, timestamp_field, new_start_date, end_date, size)
unique_data = await self.remove_duplicates(
previous_results["data"]
if ("data" in previous_results)
Expand All @@ -176,8 +174,6 @@ async def post(
query["query"]["bool"]["filter"]["range"][timestamp_field][
"lte"
] = str(end_date)
# return await self.scan_indices(self.new_es, self.new_index, query, timestamp_field, start_date, end_date, size)
# else:
response = await self.new_es.search(
index=self.new_index + "*",
body=jsonable_encoder(query),
Expand Down