Skip to content

Commit

Permalink
ruff formatting
Browse files Browse the repository at this point in the history
  • Loading branch information
AndrewPlayer3 committed Dec 16, 2024
1 parent 061f35f commit 3d3af86
Show file tree
Hide file tree
Showing 35 changed files with 540 additions and 663 deletions.
2 changes: 1 addition & 1 deletion apps/api/src/hyp3_api/__main__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from hyp3_api import app

if __name__ == "__main__":
if __name__ == '__main__':
app.run(port=8080)
12 changes: 4 additions & 8 deletions apps/api/src/hyp3_api/handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,12 +10,7 @@


def problem_format(status, message):
response = jsonify({
'status': status,
'detail': message,
'title': responses[status],
'type': 'about:blank'
})
response = jsonify({'status': status, 'detail': message, 'title': responses[status], 'type': 'about:blank'})
response.headers['Content-Type'] = 'application/problem+json'
response.status_code = status
return response
Expand Down Expand Up @@ -49,8 +44,9 @@ def get_jobs(user, start=None, end=None, status_code=None, name=None, job_type=N
payload = {'jobs': jobs}
if last_evaluated_key is not None:
next_token = util.serialize(last_evaluated_key)
payload['next'] = util.build_next_url(request.url, next_token, request.headers.get('X-Forwarded-Host'),
request.root_path)
payload['next'] = util.build_next_url(
request.url, next_token, request.headers.get('X-Forwarded-Host'), request.root_path
)
return payload


Expand Down
16 changes: 7 additions & 9 deletions apps/api/src/hyp3_api/routes.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,14 +26,9 @@

@app.before_request
def check_system_available():
if environ['SYSTEM_AVAILABLE'] != "true":
if environ['SYSTEM_AVAILABLE'] != 'true':
message = 'HyP3 is currently unavailable. Please try again later.'
error = {
'detail': message,
'status': 503,
'title': 'Service Unavailable',
'type': 'about:blank'
}
error = {'detail': message, 'status': 503, 'title': 'Service Unavailable', 'type': 'about:blank'}
return make_response(jsonify(error), 503)


Expand Down Expand Up @@ -71,8 +66,11 @@ def render_ui():

@app.errorhandler(404)
def error404(_):
return handlers.problem_format(404, 'The requested URL was not found on the server.'
' If you entered the URL manually please check your spelling and try again.')
return handlers.problem_format(
404,
'The requested URL was not found on the server.'
' If you entered the URL manually please check your spelling and try again.',
)


class CustomEncoder(json.JSONEncoder):
Expand Down
13 changes: 5 additions & 8 deletions apps/api/src/hyp3_api/validation.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,8 +57,9 @@ def get_cmr_metadata(granules):
granules = [
{
'name': entry.get('producer_granule_id', entry.get('title')),
'polygon': Polygon(format_points(entry['polygons'][0][0]))
} for entry in response.json()['feed']['entry']
'polygon': Polygon(format_points(entry['polygons'][0][0])),
}
for entry in response.json()['feed']['entry']
]
return granules

Expand Down Expand Up @@ -93,9 +94,7 @@ def check_same_burst_ids(job, _):
)
for i in range(len(ref_ids)):
if ref_ids[i] != sec_ids[i]:
raise GranuleValidationError(
f'Burst IDs do not match for {refs[i]} and {secs[i]}.'
)
raise GranuleValidationError(f'Burst IDs do not match for {refs[i]} and {secs[i]}.')
if len(set(ref_ids)) != len(ref_ids):
duplicate_pair_id = next(ref_id for ref_id in ref_ids if ref_ids.count(ref_id) > 1)
raise GranuleValidationError(
Expand Down Expand Up @@ -174,9 +173,7 @@ def check_granules_intersecting_bounds(job, granule_metadata):
if not bbox.intersection(bounds):
bad_granules.append(granule['name'])
if bad_granules:
raise GranuleValidationError(
f'The following granules do not intersect the provided bounds: {bad_granules}.'
)
raise GranuleValidationError(f'The following granules do not intersect the provided bounds: {bad_granules}.')


def check_same_relative_orbits(job, granule_metadata):
Expand Down
2 changes: 1 addition & 1 deletion apps/disable-private-dns/src/disable_private_dns.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ def set_private_dns_disabled(endpoint_id):
response = CLIENT.modify_vpc_endpoint(VpcEndpointId=endpoint_id, PrivateDnsEnabled=False)
# https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/ec2/client/modify_vpc_endpoint.html
assert response['Return'] is True, response
print(f"Private DNS disabled for VPC Endpoint: {endpoint_id}.")
print(f'Private DNS disabled for VPC Endpoint: {endpoint_id}.')


def disable_private_dns(vpc_id, endpoint_name):
Expand Down
40 changes: 23 additions & 17 deletions apps/get-files/src/get_files.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,12 +40,16 @@ def visible_product(product_path: Union[str, Path]) -> bool:


def get_products(files):
return [{
'url': item['download_url'],
'size': item['size'],
'filename': item['filename'],
's3': item['s3'],
} for item in files if item['file_type'] == 'product' and visible_product(item['filename'])]
return [
{
'url': item['download_url'],
'size': item['size'],
'filename': item['filename'],
's3': item['s3'],
}
for item in files
if item['file_type'] == 'product' and visible_product(item['filename'])
]


def get_file_urls_by_type(file_list, file_type):
Expand All @@ -61,16 +65,18 @@ def organize_files(files_dict, bucket):
for item in files_dict:
download_url = get_download_url(bucket, item['Key'])
file_type = get_object_file_type(bucket, item['Key'])
all_files.append({
'download_url': download_url,
'file_type': file_type,
'size': item['Size'],
'filename': basename(item['Key']),
's3': {
'bucket': bucket,
'key': item['Key'],
},
})
all_files.append(
{
'download_url': download_url,
'file_type': file_type,
'size': item['Size'],
'filename': basename(item['Key']),
's3': {
'bucket': bucket,
'key': item['Key'],
},
}
)
if expiration is None and file_type in ['product', 'log']:
expiration = get_expiration_time(bucket, item['Key'])

Expand All @@ -79,7 +85,7 @@ def organize_files(files_dict, bucket):
'browse_images': get_file_urls_by_type(all_files, 'browse'),
'thumbnail_images': get_file_urls_by_type(all_files, 'thumbnail'),
'logs': get_file_urls_by_type(all_files, 'log'),
'expiration_time': expiration
'expiration_time': expiration,
}


Expand Down
40 changes: 11 additions & 29 deletions apps/render_cf.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,9 +37,7 @@ def get_state_for_job_step(step: dict, index: int, next_state_name: str, job_spe
{
'Catch': [
{
'ErrorEquals': [
'States.ALL'
],
'ErrorEquals': ['States.ALL'],
'ResultPath': f'$.results.processing_results.step_{index}',
'Next': 'PROCESSING_FAILED',
},
Expand Down Expand Up @@ -72,8 +70,8 @@ def get_map_state(job_spec: dict, step: dict) -> dict:
'StartAt': submit_job_state_name,
'States': {
submit_job_state_name: submit_job_state,
}
}
},
},
}


Expand All @@ -98,29 +96,16 @@ def get_batch_submit_job_state(job_spec: dict, step: dict, filter_batch_params=F
'SchedulingPriorityOverride.$': '$.priority',
parameters_key: batch_job_parameters,
'ContainerOverrides.$': '$.container_overrides',
'RetryStrategy': {
'Attempts': 3
},
'RetryStrategy': {'Attempts': 3},
},
'ResultSelector': {
'StartedAt.$': '$.StartedAt',
'StoppedAt.$': '$.StoppedAt',
},
'Retry': [
{
'ErrorEquals': [
'Batch.ServerException',
'Batch.AWSBatchException'
],
'MaxAttempts': 2
},
{
'ErrorEquals': [
'States.ALL'
],
'MaxAttempts': 0
}
]
{'ErrorEquals': ['Batch.ServerException', 'Batch.AWSBatchException'], 'MaxAttempts': 2},
{'ErrorEquals': ['States.ALL'], 'MaxAttempts': 0},
],
}


Expand Down Expand Up @@ -151,11 +136,7 @@ def get_batch_job_parameters(job_spec: dict, step: dict, map_item: str = None) -

def get_batch_param_names_for_job_step(step: dict) -> set[str]:
ref_prefix = 'Ref::'
return {
arg.removeprefix(ref_prefix)
for arg in step['command']
if arg.startswith(ref_prefix)
}
return {arg.removeprefix(ref_prefix) for arg in step['command'] if arg.startswith(ref_prefix)}


def render_templates(job_types: dict, compute_envs: dict, security_environment: str, api_name: str):
Expand Down Expand Up @@ -217,7 +198,8 @@ def render_batch_params_by_job_type(job_types: dict) -> None:
def render_default_params_by_job_type(job_types: dict) -> None:
default_params_by_job_type = {
job_type: {
key: value['api_schema']['default'] for key, value in job_spec['parameters'].items()
key: value['api_schema']['default']
for key, value in job_spec['parameters'].items()
if key not in job_spec['required_parameters']
}
for job_type, job_spec in job_types.items()
Expand Down Expand Up @@ -255,7 +237,7 @@ def validate_job_spec(job_type: str, job_spec: dict) -> None:
if actual_param_fields != expected_param_fields:
raise ValueError(
f"parameter '{param_name}' for {job_type} has fields {actual_param_fields} "
f"but should have {expected_param_fields}"
f'but should have {expected_param_fields}'
)


Expand Down
30 changes: 16 additions & 14 deletions apps/scale-cluster/src/scale_cluster.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,7 @@
def get_time_period(today: date):
start = today.replace(day=1)
end = start + dateutil.relativedelta.relativedelta(months=1)
return {
'Start': str(start),
'End': str(end)
}
return {'Start': str(start), 'End': str(end)}


def get_month_to_date_spending(today: date):
Expand All @@ -41,16 +38,19 @@ def set_max_vcpus(compute_environment_arn: str, target_max_vcpus: int, current_d
state='ENABLED',
)
else:
print(f'Disabling {compute_environment_arn}. Current desiredvCpus {current_desired_vcpus} is larger than '
f'target maxvCpus {target_max_vcpus}')
print(
f'Disabling {compute_environment_arn}. Current desiredvCpus {current_desired_vcpus} is larger than '
f'target maxvCpus {target_max_vcpus}'
)
BATCH.update_compute_environment(
computeEnvironment=compute_environment_arn,
state='DISABLED',
)


def get_target_max_vcpus(today, monthly_budget, month_to_date_spending, default_max_vcpus, expanded_max_vcpus,
required_surplus):
def get_target_max_vcpus(
today, monthly_budget, month_to_date_spending, default_max_vcpus, expanded_max_vcpus, required_surplus
):
days_in_month = calendar.monthrange(today.year, today.month)[1]
month_to_date_budget = monthly_budget * today.day / days_in_month
available_surplus = month_to_date_budget - month_to_date_spending
Expand All @@ -68,12 +68,14 @@ def get_target_max_vcpus(today, monthly_budget, month_to_date_spending, default_


def lambda_handler(event, context):
target_max_vcpus = get_target_max_vcpus(today=date.today(),
monthly_budget=int(environ['MONTHLY_BUDGET']),
month_to_date_spending=get_month_to_date_spending(date.today()),
default_max_vcpus=int(environ['DEFAULT_MAX_VCPUS']),
expanded_max_vcpus=int(environ['EXPANDED_MAX_VCPUS']),
required_surplus=int(environ['REQUIRED_SURPLUS']))
target_max_vcpus = get_target_max_vcpus(
today=date.today(),
monthly_budget=int(environ['MONTHLY_BUDGET']),
month_to_date_spending=get_month_to_date_spending(date.today()),
default_max_vcpus=int(environ['DEFAULT_MAX_VCPUS']),
expanded_max_vcpus=int(environ['EXPANDED_MAX_VCPUS']),
required_surplus=int(environ['REQUIRED_SURPLUS']),
)
current_desired_vcpus = get_current_desired_vcpus(environ['COMPUTE_ENVIRONMENT_ARN'])
set_max_vcpus(
compute_environment_arn=environ['COMPUTE_ENVIRONMENT_ARN'],
Expand Down
6 changes: 2 additions & 4 deletions apps/start-execution-manager/src/start_execution_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,7 @@


def invoke_worker(worker_function_arn: str, jobs: list[dict]) -> dict:
payload = json.dumps(
{'jobs': dynamo.util.convert_decimals_to_numbers(jobs)}
)
payload = json.dumps({'jobs': dynamo.util.convert_decimals_to_numbers(jobs)})
return LAMBDA_CLIENT.invoke(
FunctionName=worker_function_arn,
InvocationType='Event',
Expand All @@ -30,7 +28,7 @@ def lambda_handler(event, context) -> None:

batch_size = 250
for i in range(0, len(pending_jobs), batch_size):
jobs = pending_jobs[i:i + batch_size]
jobs = pending_jobs[i : i + batch_size]
logger.info(f'Invoking worker for {len(jobs)} jobs')
response = invoke_worker(worker_function_arn, jobs)
logger.info(f'Got response status code {response["StatusCode"]}')
5 changes: 3 additions & 2 deletions apps/upload-log/src/upload_log.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,9 @@ def get_log_content(log_group, log_stream):
next_token = None
while response['nextForwardToken'] != next_token:
next_token = response['nextForwardToken']
response = CLOUDWATCH.get_log_events(logGroupName=log_group, logStreamName=log_stream, startFromHead=True,
nextToken=next_token)
response = CLOUDWATCH.get_log_events(
logGroupName=log_group, logStreamName=log_stream, startFromHead=True, nextToken=next_token
)
messages.extend([event['message'] for event in response['events']])

return '\n'.join(messages)
Expand Down
5 changes: 2 additions & 3 deletions lib/dynamo/dynamo/exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,14 +22,13 @@ def __init__(self, user_id: str, application_status: str):

class UnexpectedApplicationStatusError(Exception):
"""Raised for an unexpected user application status."""

help_url = 'https://hyp3-docs.asf.alaska.edu/using/requesting_access'


class NotStartedApplicationError(UnexpectedApplicationStatusError):
def __init__(self, user_id: str):
super().__init__(
f'{user_id} must request access before submitting jobs. Visit {self.help_url}'
)
super().__init__(f'{user_id} must request access before submitting jobs. Visit {self.help_url}')


class PendingApplicationError(UnexpectedApplicationStatusError):
Expand Down
15 changes: 7 additions & 8 deletions lib/dynamo/dynamo/jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,12 +82,12 @@ def _raise_for_application_status(application_status: str, user_id: str) -> None


def _prepare_job_for_database(
job: dict,
user_id: str,
request_time: str,
remaining_credits: Optional[Decimal],
priority_override: Optional[int],
running_cost: Decimal,
job: dict,
user_id: str,
request_time: str,
remaining_credits: Optional[Decimal],
priority_override: Optional[int],
running_cost: Decimal,
) -> dict:
if priority_override:
priority = priority_override
Expand All @@ -107,7 +107,7 @@ def _prepare_job_for_database(
if 'job_type' in prepared_job:
prepared_job['job_parameters'] = {
**DEFAULT_PARAMS_BY_JOB_TYPE[prepared_job['job_type']],
**prepared_job.get('job_parameters', {})
**prepared_job.get('job_parameters', {}),
}
prepared_job['credit_cost'] = _get_credit_cost(prepared_job, COSTS)
else:
Expand All @@ -119,7 +119,6 @@ def _get_credit_cost(job: dict, costs: list[dict]) -> Decimal:
job_type = job['job_type']
for cost_definition in costs:
if cost_definition['job_type'] == job_type:

if cost_definition.keys() not in ({'job_type', 'cost_parameter', 'cost_table'}, {'job_type', 'cost'}):
raise ValueError(f'Cost definition for job type {job_type} has invalid keys: {cost_definition.keys()}')

Expand Down
Loading

0 comments on commit 3d3af86

Please sign in to comment.