Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Error Converting max_pool2d #2456

Open
TobyRoseman opened this issue Feb 24, 2025 · 0 comments
Open

Error Converting max_pool2d #2456

TobyRoseman opened this issue Feb 24, 2025 · 0 comments
Labels
bug Unexpected behaviour that should be corrected (type) PyTorch (traced) triaged Reviewed and examined, release as been assigned if applicable (status)

Comments

@TobyRoseman
Copy link
Collaborator

🐞Describing the bug

import coremltools as ct
import torch
import torch.nn.functional as F


class Model(torch.nn.Module):
    def __init__(self, window_size=3):
        super().__init__()
        self.window_size = window_size

    def forward(self, image):
        _, ixs = F.max_pool2d(
            image,
            kernel_size=self.window_size,
            stride=1,
            padding=self.window_size // 2,
            return_indices=True,
        )
        return ixs

pytorch_model = Model()
example = torch.rand(1, 3, 224, 224)

pytorch_model.eval()
traced = torch.jit.trace(pytorch_model, example)

model = ct.convert(traced, inputs=[ct.TensorType(shape=example.shape)])

Stack Trace

----> 1 model = ct.convert(traced, inputs=[ct.TensorType(shape=example.shape)])

File ~/miniconda3/envs/prod/lib/python3.10/site-packages/coremltools/converters/_converters_entry.py:635, in convert(model, source, inputs, outputs, classifier_config, minimum_deployment_target, convert_to, compute_precision, skip_model_load, compute_units, package_dir, debug, pass_pipeline, states)
    632 if len(states) > 0 and exact_source != "pytorch":
    633     raise ValueError("'states' can only be passed with pytorch source model.")
--> 635 mlmodel = mil_convert(
    636     model,
    637     convert_from=exact_source,
    638     convert_to=exact_target,
    639     inputs=inputs,
    640     outputs=outputs_as_tensor_or_image_types,  # None or list[ct.ImageType/ct.TensorType]
    641     classifier_config=classifier_config,
    642     skip_model_load=skip_model_load,
    643     compute_units=compute_units,
    644     package_dir=package_dir,
    645     debug=debug,
    646     specification_version=specification_version,
    647     main_pipeline=pass_pipeline,
    648     use_default_fp16_io=use_default_fp16_io,
    649     states=states,
    650 )
    652 if exact_target == "mlprogram" and mlmodel._input_has_infinite_upper_bound():
    653     raise ValueError(
    654         "For mlprogram, inputs with infinite upper_bound is not allowed. Please set upper_bound"
    655         ' to a positive value in "RangeDim()" for the "inputs" param in ct.convert().'
    656     )

File ~/miniconda3/envs/prod/lib/python3.10/site-packages/coremltools/converters/mil/converter.py:186, in mil_convert(model, convert_from, convert_to, compute_units, **kwargs)
    147 @_profile
    148 def mil_convert(
    149     model,
   (...)
    153     **kwargs
    154 ):
    155     """
    156     Convert model from a specified frontend `convert_from` to a specified
    157     converter backend `convert_to`.
   (...)
    184         See `coremltools.converters.convert`
    185     """
--> 186     return _mil_convert(
    187         model,
    188         convert_from,
    189         convert_to,
    190         ConverterRegistry,
    191         ct.models.MLModel,
    192         compute_units,
    193         **kwargs,
    194     )

File ~/miniconda3/envs/prod/lib/python3.10/site-packages/coremltools/converters/mil/converter.py:218, in _mil_convert(model, convert_from, convert_to, registry, modelClass, compute_units, **kwargs)
    215     weights_dir = _tempfile.TemporaryDirectory()
    216     kwargs["weights_dir"] = weights_dir.name
--> 218 proto, mil_program = mil_convert_to_proto(
    219                         model,
    220                         convert_from,
    221                         convert_to,
    222                         registry,
    223                         **kwargs
    224                      )
    226 _reset_conversion_state()
    228 if convert_to == 'milinternal':

File ~/miniconda3/envs/prod/lib/python3.10/site-packages/coremltools/converters/mil/converter.py:294, in mil_convert_to_proto(model, convert_from, convert_to, converter_registry, main_pipeline, **kwargs)
    289 frontend_pipeline, backend_pipeline = _construct_other_pipelines(
    290     main_pipeline, convert_from, convert_to
    291 )
    293 frontend_converter = frontend_converter_type()
--> 294 prog = frontend_converter(model, **kwargs)
    295 PassPipelineManager.apply_pipeline(prog, frontend_pipeline)
    297 PassPipelineManager.apply_pipeline(prog, main_pipeline)

File ~/miniconda3/envs/prod/lib/python3.10/site-packages/coremltools/converters/mil/converter.py:106, in TorchFrontend.__call__(self, *args, **kwargs)
    103 def __call__(self, *args, **kwargs):
    104     from .frontend.torch.load import load
--> 106     return load(*args, **kwargs)

File ~/miniconda3/envs/prod/lib/python3.10/site-packages/coremltools/converters/mil/frontend/torch/load.py:88, in load(spec, inputs, specification_version, debug, outputs, cut_at_symbols, use_default_fp16_io, states, **kwargs)
     76     model = _torchscript_from_spec(spec)
     78 converter = TorchConverter(
     79     model,
     80     inputs,
   (...)
     85     states,
     86 )
---> 88 return _perform_torch_convert(converter, debug)

File ~/miniconda3/envs/prod/lib/python3.10/site-packages/coremltools/converters/mil/frontend/torch/load.py:151, in _perform_torch_convert(converter, debug)
    149 def _perform_torch_convert(converter: TorchConverter, debug: bool) -> Program:
    150     try:
--> 151         prog = converter.convert()
    152     except RuntimeError as e:
    153         if debug and "convert function" in str(e):

File ~/miniconda3/envs/prod/lib/python3.10/site-packages/coremltools/converters/mil/frontend/torch/converter.py:1402, in TorchConverter.convert(self)
   1397             cast_value = mb.cast(
   1398                 x=output_var, dtype=builtin_to_string(buffer_var.dtype)
   1399             )
   1400             mb.coreml_update_state(state=buffer_var, value=cast_value)
-> 1402 graph_outputs = [self.context[name] for name in self.graph.outputs]
   1404 # An output can be None when it's a None constant, which happens
   1405 # in Fairseq MT.
   1406 for g in graph_outputs:

File ~/miniconda3/envs/prod/lib/python3.10/site-packages/coremltools/converters/mil/frontend/torch/converter.py:1402, in <listcomp>(.0)
   1397             cast_value = mb.cast(
   1398                 x=output_var, dtype=builtin_to_string(buffer_var.dtype)
   1399             )
   1400             mb.coreml_update_state(state=buffer_var, value=cast_value)
-> 1402 graph_outputs = [self.context[name] for name in self.graph.outputs]
   1404 # An output can be None when it's a None constant, which happens
   1405 # in Fairseq MT.
   1406 for g in graph_outputs:

File ~/miniconda3/envs/prod/lib/python3.10/site-packages/coremltools/converters/mil/frontend/torch/converter.py:485, in TranscriptionContext.__getitem__(self, torch_name)
    483     if torch_name in current_graph:
    484         return self._current_graph[idx][torch_name]
--> 485 raise ValueError(f"Torch var {torch_name} not found in context {self.name}")

ValueError: Torch var 16 not found in context 
@TobyRoseman TobyRoseman added bug Unexpected behaviour that should be corrected (type) PyTorch (traced) triaged Reviewed and examined, release as been assigned if applicable (status) labels Feb 24, 2025
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
bug Unexpected behaviour that should be corrected (type) PyTorch (traced) triaged Reviewed and examined, release as been assigned if applicable (status)
Projects
None yet
Development

No branches or pull requests

1 participant