1 /*
2  * Copyright (c) 2022 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 #include <fstream>
16 
17 #include "nncore_utils.h"
18 #include "nncore_const.h"
19 
20 namespace OHOS {
21 namespace NeuralNetworkRuntime {
22 namespace Test {
TransformUInt32Array(const std::vector<uint32_t>& vector)23 OH_NN_UInt32Array TransformUInt32Array(const std::vector<uint32_t>& vector)
24 {
25     uint32_t* data = (vector.empty()) ? nullptr : const_cast<uint32_t*>(vector.data());
26     return {data, vector.size()};
27 }
28 
createTensorDesc(const int32_t* shape, size_t shapeNum, OH_NN_DataType dataType, OH_NN_Format format)29 NN_TensorDesc* createTensorDesc(const int32_t* shape, size_t shapeNum, OH_NN_DataType dataType, OH_NN_Format format)
30 {
31     NN_TensorDesc* tensorDescTmp = OH_NNTensorDesc_Create();
32     if (tensorDescTmp == nullptr) {
33         LOGE("[NNRtTest]OH_NNTensorDesc_Create failed!");
34         return nullptr;
35     }
36 
37     OH_NN_ReturnCode ret = OH_NNTensorDesc_SetDataType(tensorDescTmp, dataType);
38     if (ret != OH_NN_SUCCESS) {
39         LOGE("[NNRtTest]OH_NNTensorDesc_SetDataType failed!ret = %d\n", ret);
40         return nullptr;
41     }
42 
43     if (shape != nullptr) {
44         ret = OH_NNTensorDesc_SetShape(tensorDescTmp, shape, shapeNum);
45         if (ret != OH_NN_SUCCESS) {
46             LOGE("[NNRtTest]OH_NNTensorDesc_SetShape failed!ret = %d\n", ret);
47             return nullptr;
48         }
49     }
50 
51     ret = OH_NNTensorDesc_SetFormat(tensorDescTmp, format);
52     if (ret != OH_NN_SUCCESS) {
53         LOGE("[NNRtTest]OH_NNTensorDesc_SetShape failed!ret = %d\n", ret);
54         return nullptr;
55     }
56 
57     return tensorDescTmp;
58 }
59 
MultiModelBuildEndStep(OH_NNModel *model, const OHNNGraphArgsMulti &graphArgs)60 int MultiModelBuildEndStep(OH_NNModel *model, const OHNNGraphArgsMulti &graphArgs)
61 {
62     int ret = 0;
63     auto graphInputs = TransformUInt32Array(graphArgs.graphInput);
64     auto graphOutputs = TransformUInt32Array(graphArgs.graphOutput);
65     ret = OH_NNModel_SpecifyInputsAndOutputs(model, &graphInputs, &graphOutputs);
66     if (ret != OH_NN_SUCCESS) {
67         LOGE("[NNRtTest] OH_NNModel_SpecifyInputsAndOutputs failed! ret=%d\n", ret);
68         return ret;
69     }
70     ret = OH_NNModel_Finish(model);
71     if (ret != OH_NN_SUCCESS) {
72         LOGE("[NNRtTest] OH_NNModel_Finish failed! ret=%d\n", ret);
73         return ret;
74     }
75     return ret;
76 }
77 
BuildMultiOpGraph(OH_NNModel *model, const OHNNGraphArgsMulti &graphArgs)78 int BuildMultiOpGraph(OH_NNModel *model, const OHNNGraphArgsMulti &graphArgs)
79 {
80     int ret = 0;
81     int opCnt = 0;
82     for (size_t j = 0; j < graphArgs.operationTypes.size(); j++) {
83         for (size_t i = 0; i < graphArgs.operands[j].size(); i++) {
84             const OHNNOperandTest &operandTem = graphArgs.operands[j][i];
85             NN_TensorDesc* tensorDesc = createTensorDesc(operandTem.shape.data(),
86                                                          (uint32_t) operandTem.shape.size(),
87                                                          operandTem.dataType, operandTem.format);
88             ret = OH_NNModel_AddTensorToModel(model, tensorDesc);
89             if (ret != OH_NN_SUCCESS) {
90                 LOGE("[NNRtTest] OH_NNModel_AddTensor failed! ret=%d\n", ret);
91                 return ret;
92             }
93             ret = OH_NNModel_SetTensorType(model, i, operandTem.type);
94             if (ret != OH_NN_SUCCESS) {
95                 LOGE("[NNRtTest] OH_NNBackend_SetModelTensorType failed! ret=%d\n", ret);
96                 return ret;
97             }
98             if (std::find(graphArgs.paramIndices[j].begin(), graphArgs.paramIndices[j].end(), opCnt) !=
99                 graphArgs.paramIndices[j].end()) {
100                 ret = OH_NNModel_SetTensorData(model, opCnt, operandTem.data, operandTem.length);
101                 opCnt += 1;
102             }
103             if (ret != OH_NN_SUCCESS) {
104                 LOGE("[NNRtTest] OH_NNModel_SetTensorData failed! ret=%d\n", ret);
105                 return ret;
106             }
107             OH_NNTensorDesc_Destroy(&tensorDesc);
108         }
109         auto paramIndices = TransformUInt32Array(graphArgs.paramIndices[j]);
110         auto inputIndices = TransformUInt32Array(graphArgs.inputIndices[j]);
111         auto outputIndices = TransformUInt32Array(graphArgs.outputIndices[j]);
112         ret = OH_NNModel_AddOperation(model, graphArgs.operationTypes[j], &paramIndices, &inputIndices,
113         &outputIndices);
114         if (ret != OH_NN_SUCCESS) {
115             LOGE("[NNRtTest] OH_NNModel_AddOperation failed! ret=%d\n", ret);
116             return ret;
117         }
118     }
119     ret = MultiModelBuildEndStep(model, graphArgs);
120     return ret;
121 }
122 
SingleModelBuildEndStep(OH_NNModel *model, const OHNNGraphArgs &graphArgs)123 int SingleModelBuildEndStep(OH_NNModel *model, const OHNNGraphArgs &graphArgs)
124 {
125     int ret = 0;
126     auto paramIndices = TransformUInt32Array(graphArgs.paramIndices);
127     auto inputIndices = TransformUInt32Array(graphArgs.inputIndices);
128     auto outputIndices = TransformUInt32Array(graphArgs.outputIndices);
129     if (graphArgs.addOperation) {
130         ret = OH_NNModel_AddOperation(model, graphArgs.operationType, &paramIndices, &inputIndices,
131                                       &outputIndices);
132         if (ret != OH_NN_SUCCESS) {
133             LOGE("[NNRtTest] OH_NNModel_AddOperation failed! ret=%d\n", ret);
134             return ret;
135         }
136     }
137     if (graphArgs.specifyIO) {
138         ret = OH_NNModel_SpecifyInputsAndOutputs(model, &inputIndices, &outputIndices);
139         if (ret != OH_NN_SUCCESS) {
140             LOGE("[NNRtTest] OH_NNModel_SpecifyInputsAndOutputs failed! ret=%d\n", ret);
141             return ret;
142         }
143     }
144     if (graphArgs.build) {
145         ret = OH_NNModel_Finish(model);
146         if (ret != OH_NN_SUCCESS) {
147             LOGE("[NNRtTest] OH_NNModel_Finish failed! ret=%d\n", ret);
148             return ret;
149         }
150     }
151     return ret;
152 }
153 
BuildSingleOpGraph(OH_NNModel *model, const OHNNGraphArgs &graphArgs)154 int BuildSingleOpGraph(OH_NNModel *model, const OHNNGraphArgs &graphArgs)
155 {
156     int ret = 0;
157     for (size_t i = 0; i < graphArgs.operands.size(); i++) {
158         const OHNNOperandTest &operandTem = graphArgs.operands[i];
159         NN_TensorDesc* tensorDesc = createTensorDesc(operandTem.shape.data(),
160                                                      (uint32_t) operandTem.shape.size(),
161                                                      operandTem.dataType, operandTem.format);
162         ret = OH_NNModel_AddTensorToModel(model, tensorDesc);
163         if (ret != OH_NN_SUCCESS) {
164             LOGE("[NNRtTest] OH_NNModel_AddTensor failed! ret=%d\n", ret);
165             return ret;
166         }
167         ret = OH_NNModel_SetTensorType(model, i, operandTem.type);
168         if (ret != OH_NN_SUCCESS) {
169             LOGE("[NNRtTest] OH_NNBackend_SetModelTensorType failed! ret=%d\n", ret);
170             return ret;
171         }
172         if (std::find(graphArgs.paramIndices.begin(), graphArgs.paramIndices.end(), i) !=
173             graphArgs.paramIndices.end()) {
174             ret = OH_NNModel_SetTensorData(model, i, operandTem.data, operandTem.length);
175             if (ret != OH_NN_SUCCESS) {
176                 LOGE("[NNRtTest] OH_NNModel_SetTensorData failed! ret=%d\n", ret);
177                 return ret;
178             }
179         }
180         OH_NNTensorDesc_Destroy(&tensorDesc);
181     }
182     ret = SingleModelBuildEndStep(model, graphArgs);
183     return ret;
184 }
185 
BuildSingleOpGraphWithQuantParams(OH_NNModel *model, const OHNNGraphArgs &graphArgs)186 int BuildSingleOpGraphWithQuantParams(OH_NNModel *model, const OHNNGraphArgs &graphArgs)
187 {
188     int ret = 0;
189     for (size_t i = 0; i < graphArgs.operands.size(); i++) {
190         const OHNNOperandTest &operandTem = graphArgs.operands[i];
191         NN_TensorDesc* tensorDesc = createTensorDesc(operandTem.shape.data(),
192                                                      (uint32_t) operandTem.shape.size(),
193                                                      operandTem.dataType, operandTem.format);
194         ret = OH_NNModel_AddTensorToModel(model, tensorDesc);
195         if (ret != OH_NN_SUCCESS) {
196             LOGE("[NNRtTest] OH_NNModel_AddTensor failed! ret=%d\n", ret);
197             return ret;
198         }
199 
200         NN_QuantParam* quantParam = OH_NNQuantParam_Create();
201         double scales = 0.2;
202         int32_t zeroPoints = 0;
203         uint32_t numBits = 8;
204         ret = OH_NNQuantParam_SetScales(quantParam, &scales, 1);
205         ret = OH_NNQuantParam_SetZeroPoints(quantParam, &zeroPoints, 1);
206         ret = OH_NNQuantParam_SetNumBits(quantParam, &numBits, 1);
207         ret = OH_NNModel_SetTensorQuantParams(model, i, quantParam);
208         ret = OH_NNQuantParam_Destroy(&quantParam);
209         ret = OH_NNModel_SetTensorType(model, i, operandTem.type);
210         if (ret != OH_NN_SUCCESS) {
211             LOGE("[NNRtTest] OH_NNBackend_SetModelTensorType failed! ret=%d\n", ret);
212             return ret;
213         }
214         if (std::find(graphArgs.paramIndices.begin(), graphArgs.paramIndices.end(), i) !=
215             graphArgs.paramIndices.end()) {
216             ret = OH_NNModel_SetTensorData(model, i, operandTem.data, operandTem.length);
217             if (ret != OH_NN_SUCCESS) {
218                 LOGE("[NNRtTest] OH_NNModel_SetTensorData failed! ret=%d\n", ret);
219                 return ret;
220             }
221         }
222         OH_NNTensorDesc_Destroy(&tensorDesc);
223     }
224     ret = SingleModelBuildEndStep(model, graphArgs);
225     return ret;
226 }
227 
GetDeviceID(size_t *deviceId)228 OH_NN_ReturnCode GetDeviceID(size_t *deviceId)
229 {
230     OH_NN_ReturnCode ret = OH_NN_FAILED;
231     const size_t *devicesID{nullptr};
232     uint32_t devicesCount{0};
233     ret = OH_NNDevice_GetAllDevicesID(&devicesID, &devicesCount);
234     if (ret != OH_NN_SUCCESS) {
235         LOGE("[NNRtTest] OH_NNDevice_GetAllDevicesID failed! ret=%d\n", ret);
236         return ret;
237     }
238     if (devicesCount <= NO_DEVICE_COUNT) {
239         LOGE("[NNRtTest] devicesCount <= 0  devicesCount=%d\n", devicesCount);
240         return OH_NN_FAILED;
241     }
242 
243     const char *name = nullptr;
244     std::string deviceName{"Device-CPU_TestVendor_v2_0"};
245     for (uint32_t i = 0; i < devicesCount; i++) {
246         name = nullptr;
247         ret = OH_NNDevice_GetName(devicesID[i], &name);
248         if (ret != OH_NN_SUCCESS) {
249             LOGE("[NNRtTest] OH_NNDevice_GetName failed! ret=%d\n", ret);
250             return ret;
251         }
252 
253         std::string sName(name);
254         if (deviceName == sName) {
255             *deviceId = devicesID[i];
256             return OH_NN_SUCCESS;
257         }
258     }
259     return OH_NN_FAILED;
260 }
261 
SetDevice(OH_NNCompilation *compilation)262 OH_NN_ReturnCode SetDevice(OH_NNCompilation *compilation)
263 {
264     OH_NN_ReturnCode ret = OH_NN_FAILED;
265     const size_t *devicesID{nullptr};
266     uint32_t devicesCount{0};
267     ret = OH_NNDevice_GetAllDevicesID(&devicesID, &devicesCount);
268     if (ret != OH_NN_SUCCESS) {
269         LOGE("[NNRtTest] OH_NNDevice_GetAllDevicesID failed! ret=%d\n", ret);
270         return ret;
271     }
272     if (devicesCount <= NO_DEVICE_COUNT) {
273         LOGE("[NNRtTest] devicesCount <= 0  devicesCount=%d\n", devicesCount);
274         return OH_NN_FAILED;
275     }
276 
277     const char *name = nullptr;
278     std::string deviceName{"Device-CPU_TestVendor_v2_0"};
279     for (uint32_t i = 0; i < devicesCount; i++) {
280         name = nullptr;
281         ret = OH_NNDevice_GetName(devicesID[i], &name);
282         if (ret != OH_NN_SUCCESS) {
283             LOGE("[NNRtTest] OH_NNDevice_GetName failed! ret=%d\n", ret);
284             return ret;
285         }
286 
287         std::string sName(name);
288         if (deviceName == sName) {
289             ret = OH_NNCompilation_SetDevice(compilation, devicesID[i]);
290             if (ret != OH_NN_SUCCESS) {
291                 LOGE("[NNRtTest] OH_NNCompilation_SetDevice failed! ret=%d\n", ret);
292                 return ret;
293             }
294             return OH_NN_SUCCESS;
295         }
296     }
297     return OH_NN_FAILED;
298 }
299 
CompileGraphMock(OH_NNCompilation *compilation, const OHNNCompileParam &compileParam)300 int CompileGraphMock(OH_NNCompilation *compilation, const OHNNCompileParam &compileParam)
301 {
302     int ret = 0;
303     ret = SetDevice(compilation);
304     if (ret != OH_NN_SUCCESS) {
305         LOGE("[NNRtTest] OH_NNCompilation_SetDevice failed! ret=%d\n", ret);
306         return ret;
307     }
308     // set cache
309     if (!compileParam.cacheDir.empty()) {
310         ret = OH_NNCompilation_SetCache(compilation, compileParam.cacheDir.c_str(),
311         compileParam.cacheVersion);
312         if (ret != OH_NN_SUCCESS) {
313             LOGE("[NNRtTest] OH_NNCompilation_SetCache failed! ret=%d\n", ret);
314             return ret;
315         }
316     }
317     // set performance
318     if (compileParam.performanceMode != OH_NN_PERFORMANCE_NONE) {
319         ret = OH_NNCompilation_SetPerformanceMode(compilation, compileParam.performanceMode);
320         if (ret != OH_NN_SUCCESS) {
321             LOGE("[NNRtTest] OH_NNCompilation_SetPerformanceMode failed! ret=%d\n", ret);
322             return ret;
323         }
324     }
325     // set priority
326     if (compileParam.priority != OH_NN_PRIORITY_NONE) {
327         ret = OH_NNCompilation_SetPriority(compilation, compileParam.priority);
328         if (ret != OH_NN_SUCCESS) {
329             LOGE("[NNRtTest] OH_NNCompilation_SetPriority failed! ret=%d\n", ret);
330             return ret;
331         }
332     }
333     // enable fp16
334     if (compileParam.enableFp16) {
335         ret = OH_NNCompilation_EnableFloat16(compilation, compileParam.enableFp16);
336         if (ret != OH_NN_SUCCESS) {
337             LOGE("[NNRtTest] OH_NNCompilation_EnableFloat16 failed! ret=%d\n", ret);
338             return ret;
339         }
340     }
341     // build
342     ret = OH_NNCompilation_Build(compilation);
343     return ret;
344 }
345 
Free(OH_NNModel *model, OH_NNCompilation *compilation, OH_NNExecutor *executor)346 void Free(OH_NNModel *model, OH_NNCompilation *compilation, OH_NNExecutor *executor)
347 {
348     if (model != nullptr) {
349         OH_NNModel_Destroy(&model);
350         ASSERT_EQ(nullptr, model);
351     }
352     if (compilation != nullptr) {
353         OH_NNCompilation_Destroy(&compilation);
354         ASSERT_EQ(nullptr, compilation);
355     }
356     if (executor != nullptr) {
357         OH_NNExecutor_Destroy(&executor);
358         ASSERT_EQ(nullptr, executor);
359     }
360 }
361 
CheckPath(const std::string &path)362 PathType CheckPath(const std::string &path)
363 {
364     if (path.empty()) {
365         LOGI("CheckPath: path is null");
366         return PathType::NOT_FOUND;
367     }
368     struct stat buf{};
369     if (stat(path.c_str(), &buf) == 0) {
370         if (buf.st_mode & S_IFDIR) {
371             return PathType::DIR;
372         } else if (buf.st_mode & S_IFREG) {
373             return PathType::FILE;
374         } else {
375             return PathType::UNKNOWN;
376         }
377     }
378     LOGI("%s not found", path.c_str());
379     return PathType::NOT_FOUND;
380 }
381 
DeleteFile(const std::string &path)382 bool DeleteFile(const std::string &path)
383 {
384     if (path.empty()) {
385         LOGI("DeleteFile: path is null");
386         return false;
387     }
388     if (CheckPath(path) == PathType::NOT_FOUND) {
389         LOGI("not found: %s", path.c_str());
390         return true;
391     }
392     if (remove(path.c_str()) == 0) {
393         LOGI("deleted: %s", path.c_str());
394         return true;
395     }
396     LOGI("delete failed: %s", path.c_str());
397     return false;
398 }
399 
CopyFile(const std::string &srcPath, const std::string &dstPath)400 void CopyFile(const std::string &srcPath, const std::string &dstPath)
401 {
402     std::ifstream src(srcPath, std::ios::binary);
403     std::ofstream dst(dstPath, std::ios::binary);
404 
405     dst << src.rdbuf();
406 }
407 
ConcatPath(const std::string &str1, const std::string &str2)408 std::string ConcatPath(const std::string &str1, const std::string &str2)
409 {
410     // boundary
411     if (str2.empty()) {
412         return str1;
413     }
414     if (str1.empty()) {
415         return str2;
416     }
417     // concat
418     char end = str1[str1.size() - 1];
419     if (end == '\\' or end == '/') {
420         return str1 + str2;
421     } else {
422         return str1 + '/' + str2;
423     }
424 }
425 
DeleteFolder(const std::string &path)426 void DeleteFolder(const std::string &path)
427 {
428     if (path.empty()) {
429         LOGI("DeletePath: path is null");
430         return;
431     }
432 
433     DIR *dir = opendir(path.c_str());
434     // check is dir ?
435     if (dir == nullptr) {
436         LOGE("[NNRtTest] Can not open dir. Check path or permission! path: %s", path.c_str());
437         return;
438     }
439     struct dirent *file;
440     // read all the files in dir
441     std::vector <std::string> pathList;
442     while ((file = readdir(dir)) != nullptr) {
443         // skip "." and ".."
444         if (strcmp(file->d_name, ".") == 0 || strcmp(file->d_name, "..") == 0) {
445             continue;
446         }
447         if (file->d_type == DT_DIR) {
448             std::string filePath = path + "/" + file->d_name;
449             DeleteFolder(filePath); // 递归执行
450         } else {
451             pathList.emplace_back(ConcatPath(path, file->d_name));
452         }
453     }
454     closedir(dir);
455     pathList.emplace_back(path);
456     LOGI("[Common] Delete folder %s", path.c_str());
457     for (auto &i : pathList) {
458         DeleteFile(i);
459     }
460 }
461 
CreateFolder(const std::string &path)462 bool CreateFolder(const std::string &path)
463 {
464     if (path.empty()) {
465         LOGI("CreateFolder: path is empty");
466         return false;
467     }
468     LOGI("CreateFolder:%s", path.c_str());
469     mode_t mode = 0700;
470     for (size_t i = 1; i < path.size() - 1; i++) {
471         if (path[i] != '/') {
472             continue;
473         }
474         PathType ret = CheckPath(path.substr(0, i));
475         switch (ret) {
476             case PathType::DIR:
477                 continue;
478             case PathType::NOT_FOUND:
479                 LOGI("mkdir: %s", path.substr(0, i).c_str());
480                 mkdir(path.substr(0, i).c_str(), mode);
481                 break;
482             default:
483                 LOGI("error: %s", path.substr(0, i).c_str());
484                 return false;
485         }
486     }
487     mkdir(path.c_str(), mode);
488     return CheckPath(path) == PathType::DIR;
489 }
490 
CheckOutput(const float* output, const float* expect)491 bool CheckOutput(const float* output, const float* expect)
492 {
493     if (output == nullptr || expect == nullptr) {
494         LOGE("[NNRtTest] output or expect is nullptr\n");
495         return false;
496     }
497     for (int i = 0; i < ELEMENT_COUNT; i++) {
498         if (std::abs(float(output[i]) - float(expect[i])) > 1e-8) {
499             for (int j = 0; j < ELEMENT_COUNT; j++) {
500                 LOGE("[NNRtTest] output %d not match: expect:%f, actual:%f\n", j, float(expect[j]), float(output[j]));
501             }
502             return false;
503         }
504     }
505     return true;
506 }
507 
508 //创建定长模型
ConstructAddModel(OH_NNModel **model)509 void ConstructAddModel(OH_NNModel **model)
510 {
511     *model = OH_NNModel_Construct();
512     ASSERT_NE(nullptr, model);
513     AddModel addModel;
514     OHNNGraphArgs graphArgs = addModel.graphArgs;
515     ASSERT_EQ(OH_NN_SUCCESS, BuildSingleOpGraph(*model, graphArgs));
516 }
517 
518 //定长模型创建compilation
ConstructCompilation(OH_NNCompilation **compilation, OH_NNModel **model)519 void ConstructCompilation(OH_NNCompilation **compilation, OH_NNModel **model)
520 {
521     ConstructAddModel(model);
522     *compilation = OH_NNCompilation_Construct(*model);
523     ASSERT_NE(nullptr, *compilation);
524 }
525 
526 //通过定长compilation创建executor
CreateExecutor(OH_NNExecutor **executor)527 void CreateExecutor(OH_NNExecutor **executor)
528 {
529     OH_NNCompilation *compilation = nullptr;
530     OH_NNModel *model = nullptr;
531     ConstructCompilation(&compilation, &model);
532     OHNNCompileParam compileParam{
533         .performanceMode = OH_NN_PERFORMANCE_HIGH,
534         .priority = OH_NN_PRIORITY_HIGH,
535     };
536     ASSERT_EQ(OH_NN_SUCCESS, CompileGraphMock(compilation, compileParam));
537     *executor = OH_NNExecutor_Construct(compilation);
538     ASSERT_NE(nullptr, *executor);
539     OH_NNModel_Destroy(&model);
540     OH_NNCompilation_Destroy(&compilation);
541 }
542 
CreateDynamicExecutor(OH_NNExecutor **executor)543 void CreateDynamicExecutor(OH_NNExecutor **executor)
544 {
545     OH_NNModel *model = OH_NNModel_Construct();
546     ASSERT_NE(nullptr, model);
547     AvgPoolDynamicModel avgModel;
548     OHNNGraphArgs graphArgs = avgModel.graphArgs;
549     ASSERT_EQ(OH_NN_SUCCESS, BuildSingleOpGraph(model, graphArgs));
550 
551     OH_NNCompilation *compilation = OH_NNCompilation_Construct(model);
552     ASSERT_NE(nullptr, compilation);
553 
554     OHNNCompileParam compileParam{
555         .performanceMode = OH_NN_PERFORMANCE_HIGH,
556         .priority = OH_NN_PRIORITY_HIGH,
557     };
558     ASSERT_EQ(OH_NN_SUCCESS, CompileGraphMock(compilation, compileParam));
559     *executor = OH_NNExecutor_Construct(compilation);
560     ASSERT_NE(nullptr, *executor);
561     OH_NNModel_Destroy(&model);
562     OH_NNCompilation_Destroy(&compilation);
563 }
564 
GetExecutorInputOutputTensorDesc(OH_NNExecutor* executor, std::vector<NN_TensorDesc*>& inputTensorDescs, size_t& inputCount, std::vector<NN_TensorDesc*>& outputTensorDescs, size_t& outputCount)565 void GetExecutorInputOutputTensorDesc(OH_NNExecutor* executor,
566                                       std::vector<NN_TensorDesc*>& inputTensorDescs, size_t& inputCount,
567                                       std::vector<NN_TensorDesc*>& outputTensorDescs, size_t& outputCount)
568 {
569     OH_NN_ReturnCode ret = OH_NNExecutor_GetInputCount(executor, &inputCount);
570     ASSERT_EQ(OH_NN_SUCCESS, ret);
571     NN_TensorDesc* tensorDescTmp = nullptr;
572     for (size_t i = 0; i < inputCount; ++i) {
573         tensorDescTmp = OH_NNExecutor_CreateInputTensorDesc(executor, i);
574         ASSERT_NE(nullptr, tensorDescTmp);
575         inputTensorDescs.emplace_back(tensorDescTmp);
576     }
577 
578     ret = OH_NNExecutor_GetOutputCount(executor, &outputCount);
579     ASSERT_EQ(OH_NN_SUCCESS, ret);
580     for (size_t i = 0; i < outputCount; ++i) {
581         tensorDescTmp = OH_NNExecutor_CreateOutputTensorDesc(executor, i);
582         ASSERT_NE(nullptr, tensorDescTmp);
583         outputTensorDescs.emplace_back(tensorDescTmp);
584     }
585 }
586 
GetExecutorInputOutputTensorByDesc(OH_NNExecutor* executor, std::vector<NN_Tensor*>& inputTensors, const std::vector<NN_TensorDesc*>& inputTensorDescs, std::vector<NN_Tensor*>& outputTensors, const std::vector<NN_TensorDesc*>& outputTensorDescs)587 void GetExecutorInputOutputTensorByDesc(OH_NNExecutor* executor,
588     std::vector<NN_Tensor*>& inputTensors, const std::vector<NN_TensorDesc*>& inputTensorDescs,
589     std::vector<NN_Tensor*>& outputTensors, const std::vector<NN_TensorDesc*>& outputTensorDescs)
590 {
591     size_t deviceID = 0;
592     if (OH_NN_SUCCESS != GetDeviceID(&deviceID)) {
593         LOGE("Get deviceid failed.");
594         return;
595     }
596     NN_Tensor* tensor = nullptr;
597     for (size_t i = 0; i < inputTensorDescs.size(); ++i) {
598         tensor = nullptr;
599         tensor = OH_NNTensor_Create(deviceID, inputTensorDescs[i]);
600         ASSERT_NE(nullptr, tensor);
601         inputTensors.emplace_back(tensor);
602     }
603 
604     for (size_t i = 0; i < outputTensorDescs.size(); ++i) {
605         tensor = nullptr;
606         tensor = OH_NNTensor_Create(deviceID, outputTensorDescs[i]);
607         ASSERT_NE(nullptr, tensor);
608         outputTensors.emplace_back(tensor);
609     }
610 }
611 
GetExecutorInputOutputTensor(OH_NNExecutor* executor, std::vector<NN_Tensor*>& inputTensors, size_t& inputCount, std::vector<NN_Tensor*>& outputTensors, size_t& outputCount)612 void GetExecutorInputOutputTensor(OH_NNExecutor* executor, std::vector<NN_Tensor*>& inputTensors, size_t& inputCount,
613                                   std::vector<NN_Tensor*>& outputTensors, size_t& outputCount)
614 {
615     std::vector<NN_TensorDesc*> inputTensorDescs;
616     std::vector<NN_TensorDesc*> outputTensorDescs;
617     OH_NN_ReturnCode ret = OH_NNExecutor_GetInputCount(executor, &inputCount);
618     ASSERT_EQ(OH_NN_SUCCESS, ret);
619     NN_TensorDesc* tensorDescTmp = nullptr;
620     for (size_t i = 0; i < inputCount; ++i) {
621         tensorDescTmp = OH_NNExecutor_CreateInputTensorDesc(executor, i);
622         ASSERT_NE(nullptr, tensorDescTmp);
623         inputTensorDescs.emplace_back(tensorDescTmp);
624     }
625 
626     ret = OH_NNExecutor_GetOutputCount(executor, &outputCount);
627     ASSERT_EQ(OH_NN_SUCCESS, ret);
628     for (size_t i = 0; i < outputCount; ++i) {
629         tensorDescTmp = OH_NNExecutor_CreateOutputTensorDesc(executor, i);
630         ASSERT_NE(nullptr, tensorDescTmp);
631         outputTensorDescs.emplace_back(tensorDescTmp);
632     }
633 
634     size_t deviceID = 0;
635     if (OH_NN_SUCCESS != GetDeviceID(&deviceID)) {
636         LOGE("Get deviceid failed.");
637         return;
638     }
639     NN_Tensor* tensor = nullptr;
640     for (size_t i = 0; i < inputTensorDescs.size(); ++i) {
641         tensor = nullptr;
642         tensor = OH_NNTensor_Create(deviceID, inputTensorDescs[i]);
643         ASSERT_NE(nullptr, tensor);
644         inputTensors.emplace_back(tensor);
645     }
646 
647     for (size_t i = 0; i < outputTensorDescs.size(); ++i) {
648         tensor = nullptr;
649         tensor = OH_NNTensor_Create(deviceID, outputTensorDescs[i]);
650         ASSERT_NE(nullptr, tensor);
651         outputTensors.emplace_back(tensor);
652     }
653 
654     DestroyTensorDesc(inputTensorDescs, outputTensorDescs);
655 }
656 
DestroyTensorDesc( std::vector<NN_TensorDesc*>& inputTensorDescs, std::vector<NN_TensorDesc*>& outputTensorDescs)657 OH_NN_ReturnCode DestroyTensorDesc(
658     std::vector<NN_TensorDesc*>& inputTensorDescs, std::vector<NN_TensorDesc*>& outputTensorDescs)
659 {
660     // 销毁输入输出tensordesc
661     OH_NN_ReturnCode returnCode {OH_NN_FAILED};
662     for (size_t i = 0; i < inputTensorDescs.size(); ++i) {
663         returnCode = OH_NNTensorDesc_Destroy(&inputTensorDescs[i]);
664         if (returnCode != OH_NN_SUCCESS) {
665             LOGE("End2EndTest::OH_NNTensorDesc_Destroy failed.");
666             return returnCode;
667         }
668     }
669     for (size_t i = 0; i < outputTensorDescs.size(); ++i) {
670         returnCode = OH_NNTensorDesc_Destroy(&outputTensorDescs[i]);
671         if (returnCode != OH_NN_SUCCESS) {
672             LOGE("End2EndTest::OH_NNTensorDesc_Destroy failed.");
673             return returnCode;
674         }
675     }
676 
677     return OH_NN_SUCCESS;
678 }
679 
DestroyTensor( std::vector<NN_Tensor*>& inputTensors, std::vector<NN_Tensor*>& outputTensors)680 OH_NN_ReturnCode DestroyTensor(
681     std::vector<NN_Tensor*>& inputTensors, std::vector<NN_Tensor*>& outputTensors)
682 {
683     // 清理输入输出Tensor
684     OH_NN_ReturnCode returnCode {OH_NN_FAILED};
685     for (size_t i = 0; i < inputTensors.size(); ++i) {
686         returnCode = OH_NNTensor_Destroy(&inputTensors[i]);
687         if (returnCode != OH_NN_SUCCESS) {
688             LOGE("End2EndTest::OH_NNTensor_Destroy failed.");
689             return returnCode;
690         }
691     }
692     for (size_t i = 0; i < outputTensors.size(); ++i) {
693         returnCode = OH_NNTensor_Destroy(&outputTensors[i]);
694         if (returnCode != OH_NN_SUCCESS) {
695             LOGE("End2EndTest::OH_NNTensor_Destroy failed.");
696             return returnCode;
697         }
698     }
699 
700     return OH_NN_SUCCESS;
701 }
702 } // namespace Test
703 } // namespace NeuralNetworkRuntime
704 } // namespace OHOS