Skip to content

Commit

Permalink
Apply suggestions from code review
Browse files Browse the repository at this point in the history
Co-authored-by: kirkrodrigues <[email protected]>
  • Loading branch information
haiqi96 and kirkrodrigues authored Jan 16, 2025
1 parent 996692e commit 351f239
Show file tree
Hide file tree
Showing 4 changed files with 16 additions and 14 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -43,16 +43,16 @@ def _generate_logs_list(
# Skip empty paths
continue
resolved_path = pathlib.Path(stripped_path_str).resolve()
mounted_path = CONTAINER_INPUT_LOGS_ROOT_DIR / pathlib.Path(
resolved_path
).relative_to(resolved_path.anchor)
mounted_path = CONTAINER_INPUT_LOGS_ROOT_DIR / resolved_path.relative_to(
resolved_path.anchor
)
container_logs_list_file.write(f"{mounted_path}\n")

for path in parsed_args.paths:
resolved_path = pathlib.Path(path).resolve()
mounted_path = CONTAINER_INPUT_LOGS_ROOT_DIR / pathlib.Path(
resolved_path
).relative_to(resolved_path.anchor)
mounted_path = CONTAINER_INPUT_LOGS_ROOT_DIR / resolved_path.relative_to(
resolved_path.anchor
)
container_logs_list_file.write(f"{mounted_path}\n")

elif InputType.S3 == input_type:
Expand Down Expand Up @@ -93,7 +93,7 @@ def _generate_compress_cmd(
if parsed_args.aws_credentials_file:
default_credentials_user = "default"
aws_access_key_id, aws_secret_access_key = parse_aws_credentials_file(
pathlib.Path(parsed_args.aws_credentials_file, default_credentials_user)
pathlib.Path(parsed_args.aws_credentials_file), default_credentials_user
)
if aws_access_key_id and aws_secret_access_key:
compress_cmd.append("--aws-access-key-id")
Expand Down Expand Up @@ -170,7 +170,8 @@ def main(argv):
default_config_file_path = clp_home / CLP_DEFAULT_CONFIG_FILE_RELATIVE_PATH

args_parser = argparse.ArgumentParser(description="Compresses logs")
# package-level config option

# Package-level config option
args_parser.add_argument(
"--config",
"-c",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -197,7 +197,8 @@ def main(argv):
clp_home = get_clp_home()
default_config_file_path = clp_home / CLP_DEFAULT_CONFIG_FILE_RELATIVE_PATH
args_parser = argparse.ArgumentParser(description="Compresses logs")
# package-level config option

# Package-level config option
args_parser.add_argument(
"--config",
"-c",
Expand Down
8 changes: 4 additions & 4 deletions components/clp-py-utils/clp_py_utils/s3_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ def parse_aws_credentials_file(
credentials_file_path: Path, user: str = "default"
) -> Tuple[str, str]:
"""
Parses the `aws_access_key_id` and `aws_secret_access_key` of 'user' from the given
Parses the `aws_access_key_id` and `aws_secret_access_key` of `user` from the given
credentials_file_path.
:param credentials_file_path:
:param user:
Expand All @@ -37,7 +37,7 @@ def parse_aws_credentials_file(

user_credentials = config_reader[user]
if "aws_session_token" in user_credentials:
raise ValueError(f"Short-term credentials with session token is not supported.")
raise ValueError(f"Session tokens (short-term credentials) are not supported.")

aws_access_key_id = user_credentials.get("aws_access_key_id")
aws_secret_access_key = user_credentials.get("aws_secret_access_key")
Expand Down Expand Up @@ -146,8 +146,8 @@ def s3_put(
:param src_file: Local file to upload.
:param dest_file_name: The name for the uploaded file in the S3 bucket.
:param total_max_attempts: Maximum number of retry attempts for the upload.
:raises: ValueError if `src_file` doesn't exist, doesn't resolve to a file or
is larger than the s3_put limit.
:raises: ValueError if `src_file` doesn't exist, doesn't resolve to a file or is larger than the
S3 PutObject limit.
:raises: Propagates `boto3.client`'s exceptions.
:raises: Propagates `boto3.client.put_object`'s exceptions.
"""
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -276,7 +276,7 @@ def run_clp(
logger.error(f"Unsupported storage engine {clp_storage_engine}")
return False, {"error_message": f"Unsupported storage engine {clp_storage_engine}"}

# generate list of logs to compress
# Generate list of logs to compress
input_type = clp_config.input.type
logs_list_path = data_dir / f"{instance_id_str}-log-paths.txt"
if InputType.FS == input_type:
Expand Down

0 comments on commit 351f239

Please sign in to comment.