@@ -234,7 +234,7 @@ struct ProviderHostImpl : ProviderHost {
234234 void * CPUAllocator__Alloc (CPUAllocator* p, size_t size) override { return p->CPUAllocator ::Alloc (size); }
235235 void CPUAllocator__Free (CPUAllocator* p, void * allocation) override { return p->CPUAllocator ::Free (allocation); }
236236
237- #ifdef USE_CUDA
237+ // #ifdef USE_CUDA
238238 std::unique_ptr<IAllocator> CreateCUDAAllocator (int16_t device_id, const char * name) override { return GetProviderInfo_CUDA ().CreateCUDAAllocator (device_id, name); }
239239 std::unique_ptr<IAllocator> CreateCUDAPinnedAllocator (const char * name) override { return GetProviderInfo_CUDA ().CreateCUDAPinnedAllocator (name); }
240240 std::unique_ptr<IDataTransfer> CreateGPUDataTransfer () override { return GetProviderInfo_CUDA ().CreateGPUDataTransfer (); }
@@ -247,7 +247,7 @@ struct ProviderHostImpl : ProviderHost {
247247
248248 Status CudaCall_false (int retCode, const char * exprString, const char * libName, int successCode, const char * msg, const char * file, const int line) override { return GetProviderInfo_CUDA ().CudaCall_false (retCode, exprString, libName, successCode, msg, file, line); }
249249 void CudaCall_true (int retCode, const char * exprString, const char * libName, int successCode, const char * msg, const char * file, const int line) override { GetProviderInfo_CUDA ().CudaCall_true (retCode, exprString, libName, successCode, msg, file, line); }
250- #endif
250+ // #endif
251251
252252#ifdef USE_MIGRAPHX
253253 std::unique_ptr<IAllocator> CreateMIGraphXAllocator (int16_t device_id, const char * name) override { return GetProviderInfo_MIGraphX ().CreateMIGraphXAllocator (device_id, name); }
@@ -1419,9 +1419,9 @@ struct ProviderHostImpl : ProviderHost {
14191419 training::DistributedRunContext& GetDistributedRunContextInstance () override { return training::DistributedRunContext::GetInstance (); }
14201420#endif
14211421
1422- #if defined(USE_CUDA) || defined(USE_ROCM)
1422+ // #if defined(USE_CUDA) || defined(USE_ROCM)
14231423 PhiloxGenerator& PhiloxGenerator__Default () override { return PhiloxGenerator::Default (); }
1424- #endif
1424+ // #endif
14251425
14261426#ifdef ENABLE_TRAINING_TORCH_INTEROP
14271427 void contrib__PythonOpBase__Init (contrib::PythonOpBase* p, const OpKernelInfo& info) override { p->PythonOpBase ::Init (info); }
0 commit comments