Skip to content

Commit f9e6609

Browse files
Changming Sunguschmue
authored andcommitted
Bump clang-format from 18.1.8 to 19.1.6 (#23346)
To replace #23327
1 parent 1f55321 commit f9e6609

File tree

20 files changed

+68
-82
lines changed

20 files changed

+68
-82
lines changed

include/onnxruntime/core/common/profiler_common.h

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -81,8 +81,8 @@ class EpProfiler {
8181
virtual ~EpProfiler() = default;
8282
virtual bool StartProfiling(TimePoint profiling_start_time) = 0; // called when profiling starts
8383
virtual void EndProfiling(TimePoint start_time, Events& events) = 0; // called when profiling ends, save all captures numbers to "events"
84-
virtual void Start(uint64_t){}; // called before op start, accept an id as argument to identify the op
85-
virtual void Stop(uint64_t){}; // called after op stop, accept an id as argument to identify the op
84+
virtual void Start(uint64_t) {} // called before op start, accept an id as argument to identify the op
85+
virtual void Stop(uint64_t) {} // called after op stop, accept an id as argument to identify the op
8686
};
8787

8888
// Demangle C++ symbols

include/onnxruntime/core/platform/EigenNonBlockingThreadPool.h

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -218,18 +218,18 @@ class ThreadPoolProfiler {
218218
WAIT_REVOKE,
219219
MAX_EVENT
220220
};
221-
ThreadPoolProfiler(int, const CHAR_TYPE*) {};
221+
ThreadPoolProfiler(int, const CHAR_TYPE*) {}
222222
~ThreadPoolProfiler() = default;
223223
ORT_DISALLOW_COPY_ASSIGNMENT_AND_MOVE(ThreadPoolProfiler);
224-
void Start() {};
224+
void Start() {}
225225
std::string Stop() { return "not available for minimal build"; }
226-
void LogStart() {};
227-
void LogEnd(ThreadPoolEvent){};
228-
void LogEndAndStart(ThreadPoolEvent){};
229-
void LogStartAndCoreAndBlock(std::ptrdiff_t){};
230-
void LogCoreAndBlock(std::ptrdiff_t){};
231-
void LogThreadId(int) {};
232-
void LogRun(int) {};
226+
void LogStart() {}
227+
void LogEnd(ThreadPoolEvent) {}
228+
void LogEndAndStart(ThreadPoolEvent) {}
229+
void LogStartAndCoreAndBlock(std::ptrdiff_t) {}
230+
void LogCoreAndBlock(std::ptrdiff_t) {}
231+
void LogThreadId(int) {}
232+
void LogRun(int) {}
233233
std::string DumpChildThreadStat() { return {}; }
234234
};
235235
#else

js/react_native/e2e/ios/MNISTDataHandler.mm

Lines changed: 2 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -46,10 +46,7 @@ @implementation MNISTDataHandler
4646

4747
// It gets raw input data, which can be uri or byte array and others,
4848
// returns cooked data formatted as input of a model.
49-
RCT_EXPORT_METHOD(preprocess
50-
: (NSString*)uri resolve
51-
: (RCTPromiseResolveBlock)resolve reject
52-
: (RCTPromiseRejectBlock)reject) {
49+
RCT_EXPORT_METHOD(preprocess : (NSString*)uri resolve : (RCTPromiseResolveBlock)resolve reject : (RCTPromiseRejectBlock)reject) {
5350
@try {
5451
NSDictionary* inputDataMap = [self preprocess:uri];
5552
resolve(inputDataMap);
@@ -60,10 +57,7 @@ @implementation MNISTDataHandler
6057

6158
// It gets a result from onnxruntime and a duration of session time for input data,
6259
// returns output data formatted as React Native map.
63-
RCT_EXPORT_METHOD(postprocess
64-
: (NSDictionary*)result resolve
65-
: (RCTPromiseResolveBlock)resolve reject
66-
: (RCTPromiseRejectBlock)reject) {
60+
RCT_EXPORT_METHOD(postprocess : (NSDictionary*)result resolve : (RCTPromiseResolveBlock)resolve reject : (RCTPromiseRejectBlock)reject) {
6761
@try {
6862
NSDictionary* cookedMap = [self postprocess:result];
6963
resolve(cookedMap);

js/react_native/ios/OnnxruntimeModule.mm

Lines changed: 4 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -73,11 +73,7 @@ - (void)setBlobManager:(RCTBlobManager*)manager {
7373
* @param reject callback for returning an error back to react native js
7474
* @note when run() is called, the same modelPath must be passed into the first parameter.
7575
*/
76-
RCT_EXPORT_METHOD(loadModel
77-
: (NSString*)modelPath options
78-
: (NSDictionary*)options resolver
79-
: (RCTPromiseResolveBlock)resolve rejecter
80-
: (RCTPromiseRejectBlock)reject) {
76+
RCT_EXPORT_METHOD(loadModel : (NSString*)modelPath options : (NSDictionary*)options resolver : (RCTPromiseResolveBlock)resolve rejecter : (RCTPromiseRejectBlock)reject) {
8177
@try {
8278
NSDictionary* resultMap = [self loadModel:modelPath options:options];
8379
resolve(resultMap);
@@ -95,11 +91,7 @@ - (void)setBlobManager:(RCTBlobManager*)manager {
9591
* @param reject callback for returning an error back to react native js
9692
* @note when run() is called, the same modelPath must be passed into the first parameter.
9793
*/
98-
RCT_EXPORT_METHOD(loadModelFromBlob
99-
: (NSDictionary*)modelDataBlob options
100-
: (NSDictionary*)options resolver
101-
: (RCTPromiseResolveBlock)resolve rejecter
102-
: (RCTPromiseRejectBlock)reject) {
94+
RCT_EXPORT_METHOD(loadModelFromBlob : (NSDictionary*)modelDataBlob options : (NSDictionary*)options resolver : (RCTPromiseResolveBlock)resolve rejecter : (RCTPromiseRejectBlock)reject) {
10395
@try {
10496
[self checkBlobManager];
10597
NSString* blobId = [modelDataBlob objectForKey:@"blobId"];
@@ -121,10 +113,7 @@ - (void)setBlobManager:(RCTBlobManager*)manager {
121113
* @param resolve callback for returning output back to react native js
122114
* @param reject callback for returning an error back to react native js
123115
*/
124-
RCT_EXPORT_METHOD(dispose
125-
: (NSString*)key resolver
126-
: (RCTPromiseResolveBlock)resolve rejecter
127-
: (RCTPromiseRejectBlock)reject) {
116+
RCT_EXPORT_METHOD(dispose : (NSString*)key resolver : (RCTPromiseResolveBlock)resolve rejecter : (RCTPromiseRejectBlock)reject) {
128117
@try {
129118
[self dispose:key];
130119
resolve(nil);
@@ -143,13 +132,7 @@ - (void)setBlobManager:(RCTBlobManager*)manager {
143132
* @param resolve callback for returning an inference result back to react native js
144133
* @param reject callback for returning an error back to react native js
145134
*/
146-
RCT_EXPORT_METHOD(run
147-
: (NSString*)url input
148-
: (NSDictionary*)input output
149-
: (NSArray*)output options
150-
: (NSDictionary*)options resolver
151-
: (RCTPromiseResolveBlock)resolve rejecter
152-
: (RCTPromiseRejectBlock)reject) {
135+
RCT_EXPORT_METHOD(run : (NSString*)url input : (NSDictionary*)input output : (NSArray*)output options : (NSDictionary*)options resolver : (RCTPromiseResolveBlock)resolve rejecter : (RCTPromiseRejectBlock)reject) {
153136
@try {
154137
NSDictionary* resultMap = [self run:url input:input output:output options:options];
155138
resolve(resultMap);

js/react_native/ios/OnnxruntimeModuleTest/OnnxruntimeModuleTest.mm

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -144,7 +144,9 @@ - (void)testOnnxruntimeModule_AppendCoreml {
144144
XCTAssertEqualObjects(outputNames[0], @"output");
145145
}
146146

147-
{ [onnxruntimeModule dispose:sessionKey]; }
147+
{
148+
[onnxruntimeModule dispose:sessionKey];
149+
}
148150
}
149151

150152
@end

onnxruntime/core/graph/graph.cc

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,9 @@ namespace onnxruntime {
5050
#define NO_CHANGE_ON_SYNC_FLAG(...) \
5151
do { \
5252
const bool sync_needed = GraphProtoSyncNeeded(); \
53-
{ __VA_ARGS__; } \
53+
{ \
54+
__VA_ARGS__; \
55+
} \
5456
GraphProtoSyncNeeded(sync_needed); \
5557
} while (0)
5658

onnxruntime/core/providers/cpu/element_wise_ranged_transform.h

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -56,7 +56,7 @@ ElementWiseRangedTransform<T>::~ElementWiseRangedTransform() {
5656
Status Init(const onnxruntime::NodeAttributes& attributes) { \
5757
return (GetFloatParam(#X, attributes, X)); \
5858
} \
59-
GSL_SUPPRESS(r.11) \
59+
GSL_SUPPRESS(r.11) \
6060
ElementWiseRangedTransform<T>* Copy() const final { \
6161
using T1 = typename std::remove_pointer<decltype(this)>::type; \
6262
using T2 = typename std::remove_const<T1>::type; \
@@ -71,7 +71,7 @@ ElementWiseRangedTransform<T>::~ElementWiseRangedTransform() {
7171
ORT_RETURN_IF_ERROR(GetFloatParam(#Y, attributes, Y)); \
7272
return Status::OK(); \
7373
} \
74-
GSL_SUPPRESS(r.11) \
74+
GSL_SUPPRESS(r.11) \
7575
ElementWiseRangedTransform<T>* Copy() const final { \
7676
using T1 = typename std::remove_pointer<decltype(this)>::type; \
7777
using T2 = typename std::remove_const<T1>::type; \

onnxruntime/core/providers/cuda/cuda_profiler.h

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -33,9 +33,9 @@ class CudaProfiler final : public EpProfiler {
3333
ORT_DISALLOW_COPY_ASSIGNMENT_AND_MOVE(CudaProfiler);
3434
~CudaProfiler() {}
3535
bool StartProfiling(TimePoint) override { return true; }
36-
void EndProfiling(TimePoint, Events&) override {};
37-
void Start(uint64_t) override{};
38-
void Stop(uint64_t) override{};
36+
void EndProfiling(TimePoint, Events&) override {}
37+
void Start(uint64_t) override {}
38+
void Stop(uint64_t) override {}
3939
};
4040

4141
#endif

onnxruntime/core/providers/cuda/nn/pool.cc

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -287,13 +287,14 @@ Status Pool<T, MaxPool<8>, Layout>::ComputeInternal(OpKernelContext* context) co
287287
}
288288

289289
Tensor* I = context->Output(1, TensorShape(i_dims));
290+
constexpr bool pool_template_arg = Layout == LAYOUT_NHWC;
290291
if (nullptr != I || !this->pool_attrs_.default_dilations) {
291292
auto i_data = nullptr == I ? nullptr : I->MutableData<int64_t>();
292-
MaxPoolWithIndex<CudaT, Layout == LAYOUT_NHWC>(this->Stream(context), x_shape, TensorShape(y_dims), kernel_shape,
293-
strides, pads, this->pool_attrs_.dilations,
294-
this->pool_attrs_.storage_order, x_data, y_data, i_data);
293+
MaxPoolWithIndex<CudaT, pool_template_arg>(this->Stream(context), x_shape, TensorShape(y_dims), kernel_shape,
294+
strides, pads, this->pool_attrs_.dilations,
295+
this->pool_attrs_.storage_order, x_data, y_data, i_data);
295296
} else {
296-
ORT_RETURN_IF_ERROR((Pool<T, MaxPool<1>, Layout == LAYOUT_NHWC>::ComputeInternal(context)));
297+
ORT_RETURN_IF_ERROR((Pool<T, MaxPool<1>, pool_template_arg>::ComputeInternal(context)));
297298
}
298299
return Status::OK();
299300
}

onnxruntime/core/providers/cuda/tensor/space_depth_ops.cc

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -171,13 +171,13 @@ Status SpaceToDepth<Layout>::ComputeInternal(OpKernelContext* context) const {
171171
int64_t output_depth = -1;
172172
int64_t output_height = -1;
173173
int64_t output_width = -1;
174-
174+
constexpr bool template_arg = Layout == LAYOUT_NHWC;
175175
ORT_RETURN_IF_ERROR(
176-
InputValidationsAndOutputDimsCalc<Layout == LAYOUT_NHWC>(input,
177-
batch,
178-
input_depth, input_height, input_width,
179-
output_depth, output_height, output_width,
180-
true));
176+
InputValidationsAndOutputDimsCalc<template_arg>(input,
177+
batch,
178+
input_depth, input_height, input_width,
179+
output_depth, output_height, output_width,
180+
true));
181181

182182
// We use the "actual" output shape to construct the output tensor
183183
Tensor& output = (Layout == LAYOUT_NCHW)
@@ -223,13 +223,13 @@ Status DepthToSpace<Layout>::ComputeInternal(OpKernelContext* context) const {
223223
int64_t output_depth = -1;
224224
int64_t output_height = -1;
225225
int64_t output_width = -1;
226-
226+
constexpr bool template_arg = Layout == LAYOUT_NHWC;
227227
ORT_RETURN_IF_ERROR(
228-
InputValidationsAndOutputDimsCalc<Layout == LAYOUT_NHWC>(input,
229-
batch,
230-
input_depth, input_height, input_width,
231-
output_depth, output_height, output_width,
232-
false));
228+
InputValidationsAndOutputDimsCalc<template_arg>(input,
229+
batch,
230+
input_depth, input_height, input_width,
231+
output_depth, output_height, output_width,
232+
false));
233233

234234
// We use the "actual" output shape to construct the output tensor
235235
Tensor& output = (Layout == LAYOUT_NCHW)

0 commit comments

Comments
 (0)