Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: New param to remove memory limit in invoke #7892

Merged
merged 2 commits into from
Feb 15, 2025
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 7 additions & 1 deletion samcli/commands/local/cli_common/invoke_context.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,6 +102,7 @@ def __init__(
add_host: Optional[dict] = None,
invoke_images: Optional[str] = None,
mount_symlinks: Optional[bool] = False,
no_mem_limit: Optional[bool] = False,
) -> None:
"""
Initialize the context
Expand Down Expand Up @@ -202,6 +203,7 @@ def __init__(
self._debug_function = debug_function

self._mount_symlinks: Optional[bool] = mount_symlinks
self._no_mem_limit = no_mem_limit

# Note(xinhol): despite self._function_provider and self._stacks are initialized as None
# they will be assigned with a non-None value in __enter__() and
Expand Down Expand Up @@ -412,9 +414,13 @@ def lambda_runtime(self) -> LambdaRuntime:
self._container_manager,
image_builder,
mount_symlinks=self._mount_symlinks,
no_mem_limit=self._no_mem_limit,
),
ContainersMode.COLD: LambdaRuntime(
self._container_manager, image_builder, mount_symlinks=self._mount_symlinks
self._container_manager,
image_builder,
mount_symlinks=self._mount_symlinks,
no_mem_limit=self._no_mem_limit,
),
}
return self._lambda_runtimes[self._containers_mode]
Expand Down
10 changes: 10 additions & 0 deletions samcli/commands/local/invoke/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,12 @@
)
@click.option("--no-event", is_flag=True, default=True, help="DEPRECATED: By default no event is assumed.", hidden=True)
@mount_symlinks_option
@click.option(
"--no-memory-limit",
default=False,
is_flag=True,
help="Remove the Memory limit during local invoke. This runs the container without the --memory parameter",
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Are we doing this only for sam local invoke only? I think the issue happens for sam local start-api and sam local start-lambda as well.

The message here is specific to local invoke. Suggesting more generic message:
Remove the memory limit when running Docker container.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Good point. we might need the same for the other commands. I'll update that, also with a generic message.

)
@invoke_common_options
@local_common_options
@cli_framework_options
Expand Down Expand Up @@ -106,6 +112,7 @@ def cli(
skip_prepare_infra,
terraform_plan_file,
mount_symlinks,
no_memory_limit,
):
"""
`sam local invoke` command entry point
Expand Down Expand Up @@ -137,6 +144,7 @@ def cli(
invoke_image,
hook_name,
mount_symlinks,
no_memory_limit,
) # pragma: no cover


Expand Down Expand Up @@ -165,6 +173,7 @@ def do_cli( # pylint: disable=R0914
invoke_image,
hook_name,
mount_symlinks,
no_mem_limit,
):
"""
Implementation of the ``cli`` method, just separated out for unit testing purposes
Expand Down Expand Up @@ -214,6 +223,7 @@ def do_cli( # pylint: disable=R0914
add_host=add_host,
invoke_images=processed_invoke_images,
mount_symlinks=mount_symlinks,
no_mem_limit=no_mem_limit,
) as context:
# Invoke the function
context.local_lambda_runner.invoke(
Expand Down
1 change: 1 addition & 0 deletions samcli/commands/local/invoke/core/options.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@
"add_host",
"invoke_image",
"mount_symlinks",
"no_memory_limit",
]

CONFIGURATION_OPTION_NAMES: List[str] = ["config_env", "config_file"] + SAVE_PARAMS_OPTIONS
Expand Down
9 changes: 5 additions & 4 deletions samcli/local/lambdafn/runtime.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ class LambdaRuntime:

SUPPORTED_ARCHIVE_EXTENSIONS = (".zip", ".jar", ".ZIP", ".JAR")

def __init__(self, container_manager, image_builder, mount_symlinks=False):
def __init__(self, container_manager, image_builder, mount_symlinks=False, no_mem_limit=False):
"""
Initialize the Local Lambda runtime

Expand All @@ -53,6 +53,7 @@ def __init__(self, container_manager, image_builder, mount_symlinks=False):
self._temp_uncompressed_paths_to_be_cleaned = []
self._lock = threading.Lock()
self._mount_symlinks = mount_symlinks
self._no_mem_limit = no_mem_limit

def create(
self, function_config, debug_context=None, container_host=None, container_host_interface=None, extra_hosts=None
Expand Down Expand Up @@ -106,7 +107,7 @@ def create(
layers,
self._image_builder,
function_config.architecture,
memory_mb=function_config.memory,
memory_mb=(None if self._no_mem_limit else function_config.memory),
env_vars=env_vars,
debug_options=debug_context,
container_host=container_host,
Expand Down Expand Up @@ -394,7 +395,7 @@ class WarmLambdaRuntime(LambdaRuntime):
warm containers life cycle.
"""

def __init__(self, container_manager, image_builder, observer=None, mount_symlinks=False):
def __init__(self, container_manager, image_builder, observer=None, mount_symlinks=False, no_mem_limit=False):
"""
Initialize the Local Lambda runtime

Expand All @@ -412,7 +413,7 @@ def __init__(self, container_manager, image_builder, observer=None, mount_symlin

self._observer = observer if observer else LambdaFunctionObserver(self._on_code_change)

super().__init__(container_manager, image_builder, mount_symlinks=mount_symlinks)
super().__init__(container_manager, image_builder, mount_symlinks=mount_symlinks, no_mem_limit=no_mem_limit)

def create(
self, function_config, debug_context=None, container_host=None, container_host_interface=None, extra_hosts=None
Expand Down
7 changes: 6 additions & 1 deletion schema/samcli.json
Original file line number Diff line number Diff line change
Expand Up @@ -406,7 +406,7 @@
"properties": {
"parameters": {
"title": "Parameters for the local invoke command",
"description": "Available parameters for the local invoke command:\n* terraform_plan_file:\nUsed for passing a custom plan file when executing the Terraform hook.\n* hook_name:\nHook package id to extend AWS SAM CLI commands functionality. \n\nExample: `terraform` to extend AWS SAM CLI commands functionality to support terraform applications. \n\nAvailable Hook Names: ['terraform']\n* skip_prepare_infra:\nSkip preparation stage when there are no infrastructure changes. Only used in conjunction with --hook-name.\n* event:\nJSON file containing event data passed to the Lambda function during invoke. If this option is not specified, no event is assumed. Pass in the value '-' to input JSON via stdin\n* no_event:\nDEPRECATED: By default no event is assumed.\n* mount_symlinks:\nSpecify if symlinks at the top level of the code should be mounted inside the container. Activating this flag could allow access to locations outside of your workspace by using a symbolic link. By default symlinks are not mounted.\n* template_file:\nAWS SAM template which references built artifacts for resources in the template. (if applicable)\n* env_vars:\nJSON file containing values for Lambda function's environment variables.\n* parameter_overrides:\nString that contains AWS CloudFormation parameter overrides encoded as key=value pairs.\n* debug_port:\nWhen specified, Lambda function container will start in debug mode and will expose this port on localhost.\n* debugger_path:\nHost path to a debugger that will be mounted into the Lambda container.\n* debug_args:\nAdditional arguments to be passed to the debugger.\n* container_env_vars:\nJSON file containing additional environment variables to be set within the container when used in a debugging session locally.\n* docker_volume_basedir:\nSpecify the location basedir where the SAM template exists. If Docker is running on a remote machine, Path of the SAM template must be mounted on the Docker machine and modified to match the remote machine.\n* log_file:\nFile to capture output logs.\n* layer_cache_basedir:\nSpecify the location basedir where the lambda layers used by the template will be downloaded to.\n* skip_pull_image:\nSkip pulling down the latest Docker image for Lambda runtime.\n* docker_network:\nName or ID of an existing docker network for AWS Lambda docker containers to connect to, along with the default bridge network. If not specified, the Lambda containers will only connect to the default bridge docker network.\n* force_image_build:\nForce rebuilding the image used for invoking functions with layers.\n* shutdown:\nEmulate a shutdown event after invoke completes, to test extension handling of shutdown behavior.\n* container_host:\nHost of locally emulated Lambda container. This option is useful when the container runs on a different host than AWS SAM CLI. For example, if one wants to run AWS SAM CLI in a Docker container on macOS, this option could specify `host.docker.internal`\n* container_host_interface:\nIP address of the host network interface that container ports should bind to. Use 0.0.0.0 to bind to all interfaces.\n* add_host:\nPasses a hostname to IP address mapping to the Docker container's host file. This parameter can be passed multiple times.Example:--add-host example.com:127.0.0.1\n* invoke_image:\nContainer image URIs for invoking functions or starting api and function. One can specify the image URI used for the local function invocation (--invoke-image public.ecr.aws/sam/build-nodejs20.x:latest). One can also specify for each individual function with (--invoke-image Function1=public.ecr.aws/sam/build-nodejs20.x:latest). If a function does not have invoke image specified, the default AWS SAM CLI emulation image will be used.\n* beta_features:\nEnable/Disable beta features.\n* debug:\nTurn on debug logging to print debug message generated by AWS SAM CLI and display timestamps.\n* profile:\nSelect a specific profile from your credential file to get AWS credentials.\n* region:\nSet the AWS Region of the service. (e.g. us-east-1)\n* save_params:\nSave the parameters provided via the command line to the configuration file.",
"description": "Available parameters for the local invoke command:\n* terraform_plan_file:\nUsed for passing a custom plan file when executing the Terraform hook.\n* hook_name:\nHook package id to extend AWS SAM CLI commands functionality. \n\nExample: `terraform` to extend AWS SAM CLI commands functionality to support terraform applications. \n\nAvailable Hook Names: ['terraform']\n* skip_prepare_infra:\nSkip preparation stage when there are no infrastructure changes. Only used in conjunction with --hook-name.\n* event:\nJSON file containing event data passed to the Lambda function during invoke. If this option is not specified, no event is assumed. Pass in the value '-' to input JSON via stdin\n* no_event:\nDEPRECATED: By default no event is assumed.\n* mount_symlinks:\nSpecify if symlinks at the top level of the code should be mounted inside the container. Activating this flag could allow access to locations outside of your workspace by using a symbolic link. By default symlinks are not mounted.\n* no_memory_limit:\nRemove the Memory limit during local invoke. This runs the container without the --memory parameter\n* template_file:\nAWS SAM template which references built artifacts for resources in the template. (if applicable)\n* env_vars:\nJSON file containing values for Lambda function's environment variables.\n* parameter_overrides:\nString that contains AWS CloudFormation parameter overrides encoded as key=value pairs.\n* debug_port:\nWhen specified, Lambda function container will start in debug mode and will expose this port on localhost.\n* debugger_path:\nHost path to a debugger that will be mounted into the Lambda container.\n* debug_args:\nAdditional arguments to be passed to the debugger.\n* container_env_vars:\nJSON file containing additional environment variables to be set within the container when used in a debugging session locally.\n* docker_volume_basedir:\nSpecify the location basedir where the SAM template exists. If Docker is running on a remote machine, Path of the SAM template must be mounted on the Docker machine and modified to match the remote machine.\n* log_file:\nFile to capture output logs.\n* layer_cache_basedir:\nSpecify the location basedir where the lambda layers used by the template will be downloaded to.\n* skip_pull_image:\nSkip pulling down the latest Docker image for Lambda runtime.\n* docker_network:\nName or ID of an existing docker network for AWS Lambda docker containers to connect to, along with the default bridge network. If not specified, the Lambda containers will only connect to the default bridge docker network.\n* force_image_build:\nForce rebuilding the image used for invoking functions with layers.\n* shutdown:\nEmulate a shutdown event after invoke completes, to test extension handling of shutdown behavior.\n* container_host:\nHost of locally emulated Lambda container. This option is useful when the container runs on a different host than AWS SAM CLI. For example, if one wants to run AWS SAM CLI in a Docker container on macOS, this option could specify `host.docker.internal`\n* container_host_interface:\nIP address of the host network interface that container ports should bind to. Use 0.0.0.0 to bind to all interfaces.\n* add_host:\nPasses a hostname to IP address mapping to the Docker container's host file. This parameter can be passed multiple times.Example:--add-host example.com:127.0.0.1\n* invoke_image:\nContainer image URIs for invoking functions or starting api and function. One can specify the image URI used for the local function invocation (--invoke-image public.ecr.aws/sam/build-nodejs20.x:latest). One can also specify for each individual function with (--invoke-image Function1=public.ecr.aws/sam/build-nodejs20.x:latest). If a function does not have invoke image specified, the default AWS SAM CLI emulation image will be used.\n* beta_features:\nEnable/Disable beta features.\n* debug:\nTurn on debug logging to print debug message generated by AWS SAM CLI and display timestamps.\n* profile:\nSelect a specific profile from your credential file to get AWS credentials.\n* region:\nSet the AWS Region of the service. (e.g. us-east-1)\n* save_params:\nSave the parameters provided via the command line to the configuration file.",
"type": "object",
"properties": {
"terraform_plan_file": {
Expand Down Expand Up @@ -440,6 +440,11 @@
"type": "boolean",
"description": "Specify if symlinks at the top level of the code should be mounted inside the container. Activating this flag could allow access to locations outside of your workspace by using a symbolic link. By default symlinks are not mounted."
},
"no_memory_limit": {
"title": "no_memory_limit",
"type": "boolean",
"description": "Remove the Memory limit during local invoke. This runs the container without the --memory parameter"
},
"template_file": {
"title": "template_file",
"type": "string",
Expand Down
25 changes: 20 additions & 5 deletions tests/unit/commands/local/cli_common/test_invoke_context.py
Original file line number Diff line number Diff line change
Expand Up @@ -611,6 +611,7 @@ def test_must_create_runner(
aws_profile="profile",
aws_region="region",
mount_symlinks=True,
no_mem_limit=True,
)
self.context.get_cwd = Mock()
self.context.get_cwd.return_value = cwd
Expand All @@ -629,7 +630,9 @@ def test_must_create_runner(
result = self.context.local_lambda_runner
self.assertEqual(result, runner_mock)

LambdaRuntimeMock.assert_called_with(container_manager_mock, image_mock, mount_symlinks=True)
LambdaRuntimeMock.assert_called_with(
container_manager_mock, image_mock, mount_symlinks=True, no_mem_limit=True
)
lambda_image_patch.assert_called_once_with(download_mock, True, True, invoke_images=None)
LocalLambdaMock.assert_called_with(
local_runtime=runtime_mock,
Expand Down Expand Up @@ -694,6 +697,7 @@ def test_must_create_runner_using_warm_containers(
aws_region="region",
warm_container_initialization_mode=ContainersInitializationMode.EAGER,
mount_symlinks=False,
no_mem_limit=False,
)
self.context.get_cwd = Mock()
self.context.get_cwd.return_value = cwd
Expand All @@ -712,7 +716,9 @@ def test_must_create_runner_using_warm_containers(
result = self.context.local_lambda_runner
self.assertEqual(result, runner_mock)

WarmLambdaRuntimeMock.assert_called_with(container_manager_mock, image_mock, mount_symlinks=False)
WarmLambdaRuntimeMock.assert_called_with(
container_manager_mock, image_mock, mount_symlinks=False, no_mem_limit=False
)
lambda_image_patch.assert_called_once_with(download_mock, True, True, invoke_images=None)
LocalLambdaMock.assert_called_with(
local_runtime=runtime_mock,
Expand Down Expand Up @@ -783,6 +789,7 @@ def test_must_create_runner_with_container_host_option(
container_host="abcdef",
container_host_interface="192.168.100.101",
mount_symlinks=False,
no_mem_limit=False,
)
self.context.get_cwd = Mock()
self.context.get_cwd.return_value = cwd
Expand All @@ -801,7 +808,9 @@ def test_must_create_runner_with_container_host_option(
result = self.context.local_lambda_runner
self.assertEqual(result, runner_mock)

LambdaRuntimeMock.assert_called_with(container_manager_mock, image_mock, mount_symlinks=False)
LambdaRuntimeMock.assert_called_with(
container_manager_mock, image_mock, mount_symlinks=False, no_mem_limit=False
)
lambda_image_patch.assert_called_once_with(download_mock, True, True, invoke_images=None)
LocalLambdaMock.assert_called_with(
local_runtime=runtime_mock,
Expand Down Expand Up @@ -872,6 +881,7 @@ def test_must_create_runner_with_extra_hosts_option(
container_host="abcdef",
add_host={"prod-na.host": "10.11.12.13", "gamma-na.host": "10.22.23.24"},
mount_symlinks=False,
no_mem_limit=False,
)
self.context.get_cwd = Mock()
self.context.get_cwd.return_value = cwd
Expand All @@ -890,7 +900,9 @@ def test_must_create_runner_with_extra_hosts_option(
result = self.context.local_lambda_runner
self.assertEqual(result, runner_mock)

LambdaRuntimeMock.assert_called_with(container_manager_mock, image_mock, mount_symlinks=False)
LambdaRuntimeMock.assert_called_with(
container_manager_mock, image_mock, mount_symlinks=False, no_mem_limit=False
)
lambda_image_patch.assert_called_once_with(download_mock, True, True, invoke_images=None)
LocalLambdaMock.assert_called_with(
local_runtime=runtime_mock,
Expand Down Expand Up @@ -963,6 +975,7 @@ def test_must_create_runner_with_invoke_image_option(
aws_region="region",
invoke_images={None: "image"},
mount_symlinks=False,
no_mem_limit=False,
)
self.context.get_cwd = Mock()
self.context.get_cwd.return_value = cwd
Expand All @@ -981,7 +994,9 @@ def test_must_create_runner_with_invoke_image_option(
result = self.context.local_lambda_runner
self.assertEqual(result, runner_mock)

LambdaRuntimeMock.assert_called_with(container_manager_mock, image_mock, mount_symlinks=False)
LambdaRuntimeMock.assert_called_with(
container_manager_mock, image_mock, mount_symlinks=False, no_mem_limit=False
)
lambda_image_patch.assert_called_once_with(download_mock, True, True, invoke_images={None: "image"})
LocalLambdaMock.assert_called_with(
local_runtime=runtime_mock,
Expand Down
Loading
Loading