Skip to content

Pytorch slice causes runtime error #387

@KeKsBoTer

Description

@KeKsBoTer

Using a pytorch slice causes a runtime error.

Python / Pytorch code:

import torch
import slangpy as spy

device = spy.create_device(type=spy.DeviceType.automatic, enable_cuda_interop=True)
module = spy.TorchModule.load_from_file(device, "vecmul.slang")

a = torch.rand(100).cuda()
b = torch.rand(4).cuda()

module.vecmul(a,b[:3])

cloning the tensor before passing it to vecmul is a workaround:

module.vecmul(a,b[:3].clone())

vecmul.slang

float vecmul(float a, float[3] b)
{
    float result = 0.0f;
    for (int i = 0; i < 3; i++) {
        result += b[i] * a;
    }
    return result;
}

Error:

RuntimeError: size does not match, self was 12 bytes but src was 16 bytes

RuntimeError Traceback (most recent call last)
Cell In[1], line 10
7 a = torch.rand(100).cuda()
8 b = torch.rand(4).cuda()
---> 10 module.vecmul(a,b[:3])

File ~/miniconda3/envs/my_env/lib/python3.13/site-packages/torch/nn/modules/module.py:1751, in Module._wrapped_call_impl(self, *args, **kwargs)
1749 return self._compiled_call_impl(*args, **kwargs) # type: ignore[misc]
1750 else:
-> 1751 return self._call_impl(*args, **kwargs)

File ~/miniconda3/envs/my_env/lib/python3.13/site-packages/torch/nn/modules/module.py:1762, in Module._call_impl(self, *args, **kwargs)
1757 # If we don't have any hooks, we want to skip the rest of the logic in
1758 # this function, and just call forward.
1759 if not (self._backward_hooks or self._backward_pre_hooks or self._forward_hooks or self._forward_pre_hooks
1760 or _global_backward_pre_hooks or _global_backward_hooks
1761 or _global_forward_hooks or _global_forward_pre_hooks):
-> 1762 return forward_call(*args, **kwargs)
1764 result = None
1765 called_always_called_hooks = set()

File ~/miniconda3/envs/my_env/lib/python3.13/site-packages/slangpy/torchintegration/torchfunction.py:243, in TorchFunction.forward(self, *args, **kwargs)
240 self.function.module.device.sync_to_cuda(stream)
242 # Get the result
--> 243 result = self.function(*unpacked_args, **unpacked_kwargs)
245 if isinstance(result, WrappedTensor):
246 assert result.primal is not None

File ~/miniconda3/envs/my_env/lib/python3.13/site-packages/slangpy/core/function.py:329, in FunctionNode.call(self, *args, **kwargs)
325 def call(self, *args: Any, **kwargs: Any):
326 """
327 Call operator, maps to call method.
328 """
--> 329 return self.call(*args, **kwargs)

File ~/miniconda3/envs/my_env/lib/python3.13/site-packages/slangpy/core/function.py:213, in FunctionNode.call(self, *args, **kwargs)
211 else:
212 try:
--> 213 return self._native_call(self.module.call_data_cache, *args, **kwargs)
214 except ValueError as e:
215 # If runtime returned useful information, reformat it and raise a new exception
216 # Otherwise just throw the original.
217 if (
218 len(e.args) != 1
219 or not isinstance(e.args[0], dict)
(...) 222 or not "context" in e.args[0]
223 ):

File ~/miniconda3/envs/my_env/lib/python3.13/site-packages/slangpy/torchintegration/wrappedtensor.py:184, in WrappedTensorMarshall.create_calldata(self, context, binding, data)
181 data.temp_storage_buffer = temp_storage
182 data.temp_storage_tensor = temp_storage_tensor
--> 184 temp_storage_tensor.untyped_storage().copy_(
185 data.primal.untyped_storage(), non_blocking=False
186 )
188 primal_calldata = {
189 "buffer": temp_storage,
190 "layout": {"offset": offset, "strides": strides},
191 "_shape": shape,
192 }
194 if not self.d_in and not self.d_out:

RuntimeError: size does not match, self was 12 bytes but src was 16 bytes

Versions:

slangpy==0.31.0
torch==2.7.1+cu128

Metadata

Metadata

Assignees

Labels

Type

Projects

No projects

Relationships

None yet

Development

No branches or pull requests

Issue actions