Skip to content

Commit 4fad420

Browse files
committed
Rename AddLayer* to AddLayer_* and WriteDaqLayer_*
1 parent eb620ee commit 4fad420

7 files changed

Lines changed: 306 additions & 306 deletions

File tree

dnnlibrary/DaqReader.cpp

Lines changed: 36 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -2,19 +2,18 @@
22
// Created by daquexian on 8/13/18.
33
//
44

5+
#include <common/internal_vars.h>
56
#include <dnnlibrary/DaqReader.h>
6-
7+
#include <dnnlibrary/android_log_helper.h>
8+
#include <dnnlibrary/flatbuffers_helper.h>
79
#include <fcntl.h>
10+
#include <glog/logging.h>
811
#include <sys/mman.h>
912
#include <unistd.h>
13+
1014
#include <fstream>
1115
#include <iostream>
1216

13-
#include <common/internal_vars.h>
14-
#include <dnnlibrary/android_log_helper.h>
15-
#include <dnnlibrary/flatbuffers_helper.h>
16-
#include <glog/logging.h>
17-
1817
namespace dnn {
1918
void ReadDaqImpl(const uint8_t *buf, ModelBuilder &builder);
2019

@@ -220,10 +219,10 @@ void AddLayers(const DNN::Model &model, ModelBuilder &builder) {
220219
const dnn::optional<std::string> bias_right_type =
221220
(bias == "") ? dnn::nullopt : dnn::make_optional(bias);
222221

223-
builder.AddLayerCONV_2D(input, weight, bias_right_type,
224-
padding_left, padding_right,
225-
padding_top, padding_bottom, stride_x,
226-
stride_y, fuse, output, quant_info);
222+
builder.AddLayer_CONV_2D(input, weight, bias_right_type,
223+
padding_left, padding_right,
224+
padding_top, padding_bottom, stride_x,
225+
stride_y, fuse, output, quant_info);
227226
break;
228227
}
229228
case DNN::LayerType::AVERAGE_POOL_2D: {
@@ -232,7 +231,7 @@ void AddLayers(const DNN::Model &model, ModelBuilder &builder) {
232231
stride_x, stride_y, kernel_width,
233232
kernel_height, fuse, output);
234233

235-
builder.AddLayerAVERAGE_POOL_2D(
234+
builder.AddLayer_AVERAGE_POOL_2D(
236235
input, padding_left, padding_right, padding_top,
237236
padding_bottom, stride_x, stride_y, kernel_width,
238237
kernel_height, fuse, output, quant_info);
@@ -244,7 +243,7 @@ void AddLayers(const DNN::Model &model, ModelBuilder &builder) {
244243
stride_x, stride_y, kernel_width,
245244
kernel_height, fuse, output);
246245

247-
builder.AddLayerMAX_POOL_2D(
246+
builder.AddLayer_MAX_POOL_2D(
248247
input, padding_left, padding_right, padding_top,
249248
padding_bottom, stride_x, stride_y, kernel_width,
250249
kernel_height, fuse, output, quant_info);
@@ -253,13 +252,13 @@ void AddLayers(const DNN::Model &model, ModelBuilder &builder) {
253252
case DNN::LayerType::RELU: {
254253
UNPACK_LAYER_QUANT(RELU, input, output);
255254

256-
builder.AddLayerRELU(input, output);
255+
builder.AddLayer_RELU(input, output);
257256
break;
258257
}
259258
case DNN::LayerType::SOFTMAX: {
260259
UNPACK_LAYER_QUANT(SOFTMAX, input, beta, output);
261260

262-
builder.AddLayerSOFTMAX(input, beta, output);
261+
builder.AddLayer_SOFTMAX(input, beta, output);
263262
break;
264263
}
265264
case DNN::LayerType::FULLY_CONNECTED: {
@@ -268,20 +267,20 @@ void AddLayers(const DNN::Model &model, ModelBuilder &builder) {
268267
const dnn::optional<std::string> bias_right_type =
269268
(bias == "") ? dnn::nullopt : dnn::make_optional(bias);
270269

271-
builder.AddLayerFULLY_CONNECTED(input, weight, bias_right_type,
272-
fuse, output, quant_info);
270+
builder.AddLayer_FULLY_CONNECTED(input, weight, bias_right_type,
271+
fuse, output, quant_info);
273272
break;
274273
}
275274
case DNN::LayerType::ADD: {
276275
UNPACK_LAYER_QUANT(ADD, input1, input2, fuse, output);
277276

278-
builder.AddLayerADD(input1, input2, fuse, output, quant_info);
277+
builder.AddLayer_ADD(input1, input2, fuse, output, quant_info);
279278
break;
280279
}
281280
case DNN::LayerType::CONCATENATION: {
282281
UNPACK_LAYER_QUANT(CONCATENATION, inputs, axis, output);
283282

284-
builder.AddLayerCONCATENATION(inputs, axis, output);
283+
builder.AddLayer_CONCATENATION(inputs, axis, output);
285284
break;
286285
}
287286
case DNN::LayerType::DEPTHWISE_CONV_2D: {
@@ -292,7 +291,7 @@ void AddLayers(const DNN::Model &model, ModelBuilder &builder) {
292291
const dnn::optional<std::string> bias_right_type =
293292
(bias == "") ? dnn::nullopt : dnn::make_optional(bias);
294293

295-
builder.AddLayerDEPTHWISE_CONV_2D(
294+
builder.AddLayer_DEPTHWISE_CONV_2D(
296295
input, weight, bias_right_type, padding_left, padding_right,
297296
padding_top, padding_bottom, stride_x, stride_y,
298297
depth_multiplier, fuse, output, quant_info);
@@ -302,99 +301,99 @@ void AddLayers(const DNN::Model &model, ModelBuilder &builder) {
302301
UNPACK_LAYER_QUANT(BATCH_TO_SPACE_ND, input, block_sizes,
303302
output);
304303

305-
builder.AddLayerBATCH_TO_SPACE_ND(input, block_sizes, output);
304+
builder.AddLayer_BATCH_TO_SPACE_ND(input, block_sizes, output);
306305
break;
307306
}
308307
case DNN::LayerType::SPACE_TO_BATCH_ND: {
309308
UNPACK_LAYER_QUANT(SPACE_TO_BATCH_ND, input, block_sizes, pads,
310309
output);
311310

312-
builder.AddLayerSPACE_TO_BATCH_ND(input, block_sizes, pads,
313-
output);
311+
builder.AddLayer_SPACE_TO_BATCH_ND(input, block_sizes, pads,
312+
output);
314313
break;
315314
}
316315
case DNN::LayerType::STRIDED_SLICE: {
317316
UNPACK_LAYER_QUANT(STRIDED_SLICE, input, starts, ends, strides,
318317
begin_mask, end_mask, shrink_axis_mask,
319318
output);
320319

321-
builder.AddLayerSTRIDED_SLICE(input, starts, ends, strides,
322-
begin_mask, end_mask,
323-
shrink_axis_mask, output);
320+
builder.AddLayer_STRIDED_SLICE(input, starts, ends, strides,
321+
begin_mask, end_mask,
322+
shrink_axis_mask, output);
324323
break;
325324
}
326325
case DNN::LayerType::MUL: {
327326
UNPACK_LAYER_QUANT(MUL, input1, input2, fuse, output);
328327

329-
builder.AddLayerMUL(input1, input2, fuse, output, quant_info);
328+
builder.AddLayer_MUL(input1, input2, fuse, output, quant_info);
330329
break;
331330
}
332331
case DNN::LayerType::DEQUANTIZE: {
333332
UNPACK_LAYER_QUANT(DEQUANTIZE, input, output);
334333

335-
builder.AddLayerDEQUANTIZE(input, output);
334+
builder.AddLayer_DEQUANTIZE(input, output);
336335
break;
337336
}
338337
case DNN::LayerType::LOCAL_RESPONSE_NORMALIZATION: {
339338
UNPACK_LAYER_QUANT(LOCAL_RESPONSE_NORMALIZATION, input, radius,
340339
bias, alpha, beta, output);
341340

342-
builder.AddLayerLOCAL_RESPONSE_NORMALIZATION(
341+
builder.AddLayer_LOCAL_RESPONSE_NORMALIZATION(
343342
input, radius, bias, alpha, beta, output);
344343
break;
345344
}
346345
case DNN::LayerType::TANH: {
347346
UNPACK_LAYER_QUANT(TANH, input, output);
348347

349-
builder.AddLayerTANH(input, output);
348+
builder.AddLayer_TANH(input, output);
350349
break;
351350
}
352351
case DNN::LayerType::FLOOR: {
353352
UNPACK_LAYER_QUANT(FLOOR, input, output);
354353

355-
builder.AddLayerFLOOR(input, output);
354+
builder.AddLayer_FLOOR(input, output);
356355
break;
357356
}
358357
case DNN::LayerType::LOGISTIC: {
359358
UNPACK_LAYER_QUANT(LOGISTIC, input, output);
360359

361-
builder.AddLayerLOGISTIC(input, output);
360+
builder.AddLayer_LOGISTIC(input, output);
362361
break;
363362
}
364363
case DNN::LayerType::PRELU: {
365364
UNPACK_LAYER_QUANT(PRELU, input, alpha, output);
366365

367-
builder.AddLayerPRELU(input, alpha, output);
366+
builder.AddLayer_PRELU(input, alpha, output);
368367
break;
369368
}
370369
case DNN::LayerType::POW: {
371370
UNPACK_LAYER_QUANT(POW, input, exp, output);
372371

373-
builder.AddLayerPOW(input, exp, output);
372+
builder.AddLayer_POW(input, exp, output);
374373
break;
375374
}
376375
case DNN::LayerType::NEG: {
377376
UNPACK_LAYER_QUANT(NEG, input, output);
378377

379-
builder.AddLayerNEG(input, output);
378+
builder.AddLayer_NEG(input, output);
380379
break;
381380
}
382381
case DNN::LayerType::MINIMUM: {
383382
UNPACK_LAYER_QUANT(MINIMUM, input1, input2, output);
384383

385-
builder.AddLayerMINIMUM(input1, input2, output);
384+
builder.AddLayer_MINIMUM(input1, input2, output);
386385
break;
387386
}
388387
case DNN::LayerType::MAXIMUM: {
389388
UNPACK_LAYER_QUANT(MAXIMUM, input1, input2, output);
390389

391-
builder.AddLayerMAXIMUM(input1, input2, output);
390+
builder.AddLayer_MAXIMUM(input1, input2, output);
392391
break;
393392
}
394393
case DNN::LayerType::LOG: {
395394
UNPACK_LAYER_QUANT(LOG, input, output);
396395

397-
builder.AddLayerLOG(input, output);
396+
builder.AddLayer_LOG(input, output);
398397
break;
399398
}
400399
// auto generated layer reader end

0 commit comments

Comments
 (0)