Lines Matching refs:model

28  * @file model.h
70 * @brief Create a model object.
77 * @brief Destroy the model object.
78 * @param model Model object handle address.
81 OH_AI_API void OH_AI_ModelDestroy(OH_AI_ModelHandle *model);
84 * @brief Build the model from model file buffer so that it can run on a device.
85 * @param model Model object handle.
86 * @param model_data Define the buffer read from a model file.
87 * @param data_size Define bytes number of model file buffer.
88 * @param model_type Define The type of model file.
93 OH_AI_API OH_AI_Status OH_AI_ModelBuild(OH_AI_ModelHandle model, const void *model_data, size_t data_size,
97 * @brief Load and build the model from model path so that it can run on a device.
98 * @param model Model object handle.
99 * @param model_path Define the model file path.
100 * @param model_type Define The type of model file.
105 OH_AI_API OH_AI_Status OH_AI_ModelBuildFromFile(OH_AI_ModelHandle model, const char *model_path,
110 * @param model Model object handle.
117 OH_AI_API OH_AI_Status OH_AI_ModelResize(OH_AI_ModelHandle model, const OH_AI_TensorHandleArray inputs,
121 * @brief Inference model.
122 * @param model Model object handle.
130 OH_AI_API OH_AI_Status OH_AI_ModelPredict(OH_AI_ModelHandle model, const OH_AI_TensorHandleArray inputs,
135 * @brief Obtains all input tensor handles of the model.
136 * @param model Model object handle.
140 OH_AI_API OH_AI_TensorHandleArray OH_AI_ModelGetInputs(const OH_AI_ModelHandle model);
143 * @brief Obtains all output tensor handles of the model.
144 * @param model Model object handle.
148 OH_AI_API OH_AI_TensorHandleArray OH_AI_ModelGetOutputs(const OH_AI_ModelHandle model);
151 * @brief Obtains the input tensor handle of the model by name.
152 * @param model Model object handle.
157 OH_AI_API OH_AI_TensorHandle OH_AI_ModelGetInputByTensorName(const OH_AI_ModelHandle model, const char *tensor_name);
160 * @brief Obtains the output tensor handle of the model by name.
161 * @param model Model object handle.
166 OH_AI_API OH_AI_TensorHandle OH_AI_ModelGetOutputByTensorName(const OH_AI_ModelHandle model, const char *tensor_name);
217 * @brief Build the train model from model buffer so that it can run on a device. Only valid for Lite Train.
218 * @param model Model object handle.
219 * @param model_data Define the buffer read from a model file.
220 * @param data_size Define bytes number of model file buffer.
221 * @param model_type Define The type of model file.
227 OH_AI_API OH_AI_Status OH_AI_TrainModelBuild(OH_AI_ModelHandle model, const void *model_data, size_t data_size,
232 * @brief Build the train model from model file buffer so that it can run on a device. Only valid for Lite Train.
233 * @param model Model object handle.
234 * @param model_path Define the model path.
235 * @param model_type Define The type of model file.
241 OH_AI_API OH_AI_Status OH_AI_TrainModelBuildFromFile(OH_AI_ModelHandle model, const char *model_path,
247 * @brief Train model by step. Only valid for Lite Train.
248 * @param model Model object handle.
254 OH_AI_API OH_AI_Status OH_AI_RunStep(OH_AI_ModelHandle model, const OH_AI_KernelCallBack before,
263 OH_AI_API OH_AI_Status OH_AI_ModelSetLearningRate(OH_AI_ModelHandle model, float learning_rate);
267 * @param model Model object handle.
271 OH_AI_API float OH_AI_ModelGetLearningRate(OH_AI_ModelHandle model);
274 * @brief Obtains all weights tensors of the model. Only valid for Lite Train.
275 * @param model Model object handle.
279 OH_AI_API OH_AI_TensorHandleArray OH_AI_ModelGetWeights(OH_AI_ModelHandle model);
282 * @brief update weights tensors of the model. Only valid for Lite Train.
287 OH_AI_API OH_AI_Status OH_AI_ModelUpdateWeights(OH_AI_ModelHandle model, const OH_AI_TensorHandleArray new_weights);
290 * @brief Get the model running mode.
291 * @param model Model object handle.
295 OH_AI_API bool OH_AI_ModelGetTrainMode(OH_AI_ModelHandle model);
298 * @brief Set the model running mode. Only valid for Lite Train.
299 * @param model Model object handle.
300 * @param train True means model runs in Train Mode, otherwise Eval Mode.
304 OH_AI_API OH_AI_Status OH_AI_ModelSetTrainMode(OH_AI_ModelHandle model, bool train);
308 * @param model Model object handle.
315 OH_AI_API OH_AI_Status OH_AI_ModelSetupVirtualBatch(OH_AI_ModelHandle model, int virtual_batch_multiplier, float lr,
319 * @brief Export training model from file. Only valid for Lite Train.
320 * @param model The model data.
321 * @param model_type The model file type.
322 * @param model_file The exported model file.
324 * @param export_inference_only Whether to export a reasoning only model.
325 * @param output_tensor_name The set the name of the output tensor of the exported reasoning model, default as
326 * empty, and export the complete reasoning model.
331 OH_AI_API OH_AI_Status OH_AI_ExportModel(OH_AI_ModelHandle model, OH_AI_ModelType model_type, const char *model_file,
336 * @brief Export training model from buffer. Only valid for Lite Train.
337 * @param model The model data.
338 * @param model_type The model file type.
339 * @param model_data The exported model buffer.
340 * @param data_size The exported model buffer size.
342 * @param export_inference_only Whether to export a reasoning only model.
343 * @param output_tensor_name The set the name of the output tensor of the exported reasoning model, default as
344 * empty, and export the complete reasoning model.
349 OH_AI_API OH_AI_Status OH_AI_ExportModelBuffer(OH_AI_ModelHandle model, OH_AI_ModelType model_type, char **model_data,
354 * @brief Export model's weights, which can be used in micro only. Only valid for Lite Train.
355 * @param model The model data.
356 * @param model_type The model file type.
358 * @param is_inference Whether to export weights from a reasoning model. Currently, only support this is `true`.
365 OH_AI_API OH_AI_Status OH_AI_ExportWeightsCollaborateWithMicro(OH_AI_ModelHandle model, OH_AI_ModelType model_type,