File tree Expand file tree Collapse file tree 5 files changed +10
-9
lines changed
deep_test/src/mock_plugin Expand file tree Collapse file tree 5 files changed +10
-9
lines changed Original file line number Diff line number Diff line change @@ -138,7 +138,7 @@ class DeepNodeBase : public rclcpp_lifecycle::LifecycleNode
138138 * @param inputs Input tensor for inference
139139 * @return Output tensor from inference
140140 */
141- Tensor run_inference (Tensor & & inputs);
141+ Tensor run_inference (const Tensor & inputs);
142142
143143 /* *
144144 * @brief Check if a backend plugin is loaded
Original file line number Diff line number Diff line change @@ -50,7 +50,7 @@ class BackendInferenceExecutor
5050 * @throws std::invalid_argument if input tensor is invalid
5151 * @throws std::runtime_error if no model is loaded
5252 */
53- Tensor run_inference (Tensor & & input);
53+ Tensor run_inference (const Tensor & input);
5454
5555 /* *
5656 * @brief Unload the currently loaded model
@@ -81,7 +81,7 @@ class BackendInferenceExecutor
8181 /* *
8282 * @brief Implementation of run_inference (to be overridden by backends)
8383 */
84- virtual Tensor run_inference_impl (Tensor & & input) = 0;
84+ virtual Tensor run_inference_impl (const Tensor & input) = 0;
8585
8686 /* *
8787 * @brief Implementation of unload_model (to be overridden by backends)
Original file line number Diff line number Diff line change @@ -34,7 +34,7 @@ bool BackendInferenceExecutor::load_model(const std::filesystem::path & model_pa
3434 return success;
3535}
3636
37- Tensor BackendInferenceExecutor::run_inference (Tensor & & input)
37+ Tensor BackendInferenceExecutor::run_inference (const Tensor & input)
3838{
3939 // Validate input tensor
4040 if (input.data () == nullptr ) {
@@ -55,7 +55,7 @@ Tensor BackendInferenceExecutor::run_inference(Tensor && input)
5555 throw std::runtime_error (" No model loaded for inference" );
5656 }
5757
58- return run_inference_impl (std::move ( input) );
58+ return run_inference_impl (input);
5959}
6060
6161void BackendInferenceExecutor::unload_model ()
Original file line number Diff line number Diff line change @@ -191,7 +191,7 @@ void DeepNodeBase::unload_model()
191191 }
192192}
193193
194- Tensor DeepNodeBase::run_inference (Tensor & & inputs)
194+ Tensor DeepNodeBase::run_inference (const Tensor & inputs)
195195{
196196 if (!plugin_) {
197197 throw std::runtime_error (" No plugin loaded" );
@@ -206,7 +206,7 @@ Tensor DeepNodeBase::run_inference(Tensor && inputs)
206206 throw std::runtime_error (" No inference executor available" );
207207 }
208208
209- return executor->run_inference (std::move ( inputs) );
209+ return executor->run_inference (inputs);
210210}
211211
212212std::string DeepNodeBase::get_backend_name () const
Original file line number Diff line number Diff line change @@ -80,9 +80,10 @@ class MockInferenceExecutor : public BackendInferenceExecutor
8080 return true ;
8181 }
8282
83- Tensor run_inference_impl (Tensor & & input) override
83+ Tensor run_inference_impl (const Tensor & input) override
8484 {
85- return std::move (input);
85+ // For testing, just return a copy of the input tensor
86+ return input;
8687 }
8788
8889 void unload_model_impl () override
You can’t perform that action at this time.
0 commit comments