/foundation/ai/neural_network_runtime/example/drivers/nnrt/v2_0/hdi_cpu_service/src/ |
H A D | prepared_model_service.cpp | 243 uint8_t* modelBuffer = m_builder.GetBufferPointer(); in Compile() local 244 if (modelBuffer == nullptr) { in Compile() 250 mindspore::Status msRet = m_model->Build(modelBuffer, modelSize, mindspore::kMindIR, m_context); in Compile() 269 NNRT_ReturnCode PreparedModelService::Compile(const void* modelBuffer, size_t length) in Compile() argument 271 if (modelBuffer == nullptr || length == 0) { in Compile() 277 mindspore::Status msRet = m_model->Build(modelBuffer, length, mindspore::kMindIR, m_context); in Compile()
|
H A D | nnrt_device_service.cpp | 208 void* modelBuffer = parser.GetBufferPtr(); in PrepareModelFromModelCache() local 209 ret = service->Compile(modelBuffer, modelCache[0].dataSize); in PrepareModelFromModelCache()
|
/foundation/ai/neural_network_runtime/example/drivers/nnrt/v1_0/hdi_cpu_service/src/ |
H A D | prepared_model_service.cpp | 203 uint8_t* modelBuffer = m_builder.GetBufferPointer(); in Compile() local 204 if (modelBuffer == nullptr) { in Compile() 210 mindspore::Status msRet = m_model->Build(modelBuffer, modelSize, mindspore::kMindIR, m_context); in Compile() 224 int32_t PreparedModelService::Compile(const void* modelBuffer, size_t length) in Compile() argument 226 if (modelBuffer == nullptr || length == 0) { in Compile() 232 mindspore::Status msRet = m_model->Build(modelBuffer, length, mindspore::kMindIR, m_context); in Compile()
|
H A D | nnrt_device_service.cpp | 167 void* modelBuffer = parser.GetBufferPtr(); in PrepareModelFromModelCache() local 175 ret = service->Compile(modelBuffer, modelCache[0].dataSize); in PrepareModelFromModelCache()
|
/foundation/ai/neural_network_runtime/frameworks/native/neural_network_runtime/ |
H A D | nncompiled_cache.cpp | 114 OHOS::NeuralNetworkRuntime::Buffer modelBuffer; in Restore() local 115 ret = ReadCacheModelFile(cacheModelPath, modelBuffer); in Restore() 121 if (GetCrc16(static_cast<char*>(modelBuffer.data), modelBuffer.length) != in Restore() 128 caches.emplace_back(std::move(modelBuffer)); in Restore()
|
H A D | hdi_device_v2_1.cpp | 528 Buffer modelBuffer {nullptr, 0}; in AllocateDeviceBufferForOfflineModel() 529 modelBuffer.data = newModelBuffer; in AllocateDeviceBufferForOfflineModel() 530 modelBuffer.length = offlineModelSize; in AllocateDeviceBufferForOfflineModel() 531 deviceBuffers.emplace_back(modelBuffer); in AllocateDeviceBufferForOfflineModel()
|
H A D | hdi_device_v2_0.cpp | 533 Buffer modelBuffer {nullptr, 0}; in AllocateDeviceBufferForOfflineModel() 534 modelBuffer.data = newModelBuffer; in AllocateDeviceBufferForOfflineModel() 535 modelBuffer.length = offlineModelSize; in AllocateDeviceBufferForOfflineModel() 536 deviceBuffers.emplace_back(modelBuffer); in AllocateDeviceBufferForOfflineModel()
|
/foundation/ai/neural_network_runtime/test/fuzztest/ |
H A D | nnrt_device_impl.h | 90 int32_t PrepareOfflineModel(const std::vector<SharedBuffer>& modelBuffer, const ModelConfig& config,
in PrepareOfflineModel() argument
|
/foundation/ai/neural_network_runtime/example/drivers/nnrt/v1_0/hdi_cpu_service/include/ |
H A D | prepared_model_service.h | 42 int32_t Compile(const void* modelBuffer, size_t length);
|
/foundation/ai/neural_network_runtime/example/drivers/nnrt/v2_0/hdi_cpu_service/include/ |
H A D | prepared_model_service.h | 42 NNRT_ReturnCode Compile(const void* modelBuffer, size_t length);
|
H A D | nnrt_device_service.h | 56 int32_t PrepareOfflineModel(const std::vector<SharedBuffer>& modelBuffer, const ModelConfig& config,
|
/foundation/ai/neural_network_runtime/interfaces/kits/c/neural_network_runtime/ |
H A D | neural_network_core.h | 102 * <b>modelBuffer</b> pointer inside, instead of copying its data. 103 * You should not release <b>modelBuffer</b> before the {@link OH_NNCompilation} instance is destroied. \n 105 * @param modelBuffer Offline model file buffer. 111 OH_NNCompilation *OH_NNCompilation_ConstructWithOfflineModelBuffer(const void *modelBuffer, size_t modelSize);
|
/foundation/ai/neural_network_runtime/frameworks/native/neural_network_core/ |
H A D | neural_network_core.cpp | 153 NNRT_API OH_NNCompilation *OH_NNCompilation_ConstructWithOfflineModelBuffer(const void *modelBuffer, size_t modelSize) in OH_NNCompilation_ConstructWithOfflineModelBuffer() argument 155 if (modelBuffer == nullptr) { in OH_NNCompilation_ConstructWithOfflineModelBuffer() 156 LOGE("OH_NNCompilation_ConstructWithOfflineModelBuffer failed, modelBuffer is nullptr."); in OH_NNCompilation_ConstructWithOfflineModelBuffer() 171 compilation->offlineModelBuffer.first = const_cast<void*>(modelBuffer); in OH_NNCompilation_ConstructWithOfflineModelBuffer()
|