Home
last modified time | relevance | path

Searched refs:modelBuffer (Results 1 - 13 of 13) sorted by relevance

/foundation/ai/neural_network_runtime/example/drivers/nnrt/v2_0/hdi_cpu_service/src/
H A Dprepared_model_service.cpp243 uint8_t* modelBuffer = m_builder.GetBufferPointer(); in Compile() local
244 if (modelBuffer == nullptr) { in Compile()
250 mindspore::Status msRet = m_model->Build(modelBuffer, modelSize, mindspore::kMindIR, m_context); in Compile()
269 NNRT_ReturnCode PreparedModelService::Compile(const void* modelBuffer, size_t length) in Compile() argument
271 if (modelBuffer == nullptr || length == 0) { in Compile()
277 mindspore::Status msRet = m_model->Build(modelBuffer, length, mindspore::kMindIR, m_context); in Compile()
H A Dnnrt_device_service.cpp208 void* modelBuffer = parser.GetBufferPtr(); in PrepareModelFromModelCache() local
209 ret = service->Compile(modelBuffer, modelCache[0].dataSize); in PrepareModelFromModelCache()
/foundation/ai/neural_network_runtime/example/drivers/nnrt/v1_0/hdi_cpu_service/src/
H A Dprepared_model_service.cpp203 uint8_t* modelBuffer = m_builder.GetBufferPointer(); in Compile() local
204 if (modelBuffer == nullptr) { in Compile()
210 mindspore::Status msRet = m_model->Build(modelBuffer, modelSize, mindspore::kMindIR, m_context); in Compile()
224 int32_t PreparedModelService::Compile(const void* modelBuffer, size_t length) in Compile() argument
226 if (modelBuffer == nullptr || length == 0) { in Compile()
232 mindspore::Status msRet = m_model->Build(modelBuffer, length, mindspore::kMindIR, m_context); in Compile()
H A Dnnrt_device_service.cpp167 void* modelBuffer = parser.GetBufferPtr(); in PrepareModelFromModelCache() local
175 ret = service->Compile(modelBuffer, modelCache[0].dataSize); in PrepareModelFromModelCache()
/foundation/ai/neural_network_runtime/frameworks/native/neural_network_runtime/
H A Dnncompiled_cache.cpp114 OHOS::NeuralNetworkRuntime::Buffer modelBuffer; in Restore() local
115 ret = ReadCacheModelFile(cacheModelPath, modelBuffer); in Restore()
121 if (GetCrc16(static_cast<char*>(modelBuffer.data), modelBuffer.length) != in Restore()
128 caches.emplace_back(std::move(modelBuffer)); in Restore()
H A Dhdi_device_v2_1.cpp528 Buffer modelBuffer {nullptr, 0}; in AllocateDeviceBufferForOfflineModel()
529 modelBuffer.data = newModelBuffer; in AllocateDeviceBufferForOfflineModel()
530 modelBuffer.length = offlineModelSize; in AllocateDeviceBufferForOfflineModel()
531 deviceBuffers.emplace_back(modelBuffer); in AllocateDeviceBufferForOfflineModel()
H A Dhdi_device_v2_0.cpp533 Buffer modelBuffer {nullptr, 0}; in AllocateDeviceBufferForOfflineModel()
534 modelBuffer.data = newModelBuffer; in AllocateDeviceBufferForOfflineModel()
535 modelBuffer.length = offlineModelSize; in AllocateDeviceBufferForOfflineModel()
536 deviceBuffers.emplace_back(modelBuffer); in AllocateDeviceBufferForOfflineModel()
/foundation/ai/neural_network_runtime/test/fuzztest/
H A Dnnrt_device_impl.h90 int32_t PrepareOfflineModel(const std::vector<SharedBuffer>& modelBuffer, const ModelConfig& config, in PrepareOfflineModel() argument
/foundation/ai/neural_network_runtime/example/drivers/nnrt/v1_0/hdi_cpu_service/include/
H A Dprepared_model_service.h42 int32_t Compile(const void* modelBuffer, size_t length);
/foundation/ai/neural_network_runtime/example/drivers/nnrt/v2_0/hdi_cpu_service/include/
H A Dprepared_model_service.h42 NNRT_ReturnCode Compile(const void* modelBuffer, size_t length);
H A Dnnrt_device_service.h56 int32_t PrepareOfflineModel(const std::vector<SharedBuffer>& modelBuffer, const ModelConfig& config,
/foundation/ai/neural_network_runtime/interfaces/kits/c/neural_network_runtime/
H A Dneural_network_core.h102 * <b>modelBuffer</b> pointer inside, instead of copying its data.
103 * You should not release <b>modelBuffer</b> before the {@link OH_NNCompilation} instance is destroied. \n
105 * @param modelBuffer Offline model file buffer.
111 OH_NNCompilation *OH_NNCompilation_ConstructWithOfflineModelBuffer(const void *modelBuffer, size_t modelSize);
/foundation/ai/neural_network_runtime/frameworks/native/neural_network_core/
H A Dneural_network_core.cpp153 NNRT_API OH_NNCompilation *OH_NNCompilation_ConstructWithOfflineModelBuffer(const void *modelBuffer, size_t modelSize) in OH_NNCompilation_ConstructWithOfflineModelBuffer() argument
155 if (modelBuffer == nullptr) { in OH_NNCompilation_ConstructWithOfflineModelBuffer()
156 LOGE("OH_NNCompilation_ConstructWithOfflineModelBuffer failed, modelBuffer is nullptr."); in OH_NNCompilation_ConstructWithOfflineModelBuffer()
171 compilation->offlineModelBuffer.first = const_cast<void*>(modelBuffer); in OH_NNCompilation_ConstructWithOfflineModelBuffer()

Completed in 14 milliseconds