1From 318fa79ab19e529b32b73a976f852a7428a71109 Mon Sep 17 00:00:00 2001
2From: zhangyanhui <zhangyanhui17@huawei.com>
3Date: Sat, 25 May 2024 10:19:16 +0800
4Subject: [PATCH] auto-apply 0003-add-js-api.patch
5
6---
7 include/js_api/@ohos.ai.mindSporeLite.d.ts    |  867 ++++++
8 include/js_api/common_napi.h                  |  196 ++
9 include/js_api/ms_errors.h                    |   39 +
10 include/js_api/ms_info.h                      |   69 +
11 include/js_api/ms_parameters_napi.h           |   24 +
12 include/js_api/mslite_model_callback_napi.h   |   38 +
13 include/js_api/mslite_model_napi.h            |  186 ++
14 include/js_api/mstensor_napi.h                |   49 +
15 include/js_api/native_module_ohos_ms.h        |   22 +
16 include/js_api/nnrt_device_desc_napi.h        |   45 +
17 mindspore/lite/BUILD.gn                       |    1 +
18 mindspore/lite/src/litert/js_api/BUILD.gn     |   56 +
19 .../lite/src/litert/js_api/common_napi.cc     |  303 ++
20 .../src/litert/js_api/mslite_model_napi.cc    | 2653 +++++++++++++++++
21 .../lite/src/litert/js_api/mstensor_napi.cc   |  416 +++
22 .../litert/js_api/native_module_ohos_ms.cc    |   48 +
23 .../src/litert/js_api/nnrt_device_desc.cc     |  216 ++
24 17 files changed, 5228 insertions(+)
25 create mode 100644 include/js_api/@ohos.ai.mindSporeLite.d.ts
26 create mode 100644 include/js_api/common_napi.h
27 create mode 100644 include/js_api/ms_errors.h
28 create mode 100644 include/js_api/ms_info.h
29 create mode 100644 include/js_api/ms_parameters_napi.h
30 create mode 100644 include/js_api/mslite_model_callback_napi.h
31 create mode 100644 include/js_api/mslite_model_napi.h
32 create mode 100644 include/js_api/mstensor_napi.h
33 create mode 100644 include/js_api/native_module_ohos_ms.h
34 create mode 100644 include/js_api/nnrt_device_desc_napi.h
35 create mode 100644 mindspore/lite/src/litert/js_api/BUILD.gn
36 create mode 100644 mindspore/lite/src/litert/js_api/common_napi.cc
37 create mode 100644 mindspore/lite/src/litert/js_api/mslite_model_napi.cc
38 create mode 100644 mindspore/lite/src/litert/js_api/mstensor_napi.cc
39 create mode 100644 mindspore/lite/src/litert/js_api/native_module_ohos_ms.cc
40 create mode 100644 mindspore/lite/src/litert/js_api/nnrt_device_desc.cc
41
42diff --git a/include/js_api/@ohos.ai.mindSporeLite.d.ts b/include/js_api/@ohos.ai.mindSporeLite.d.ts
43new file mode 100644
44index 00000000..6b9aa822
45--- /dev/null
46+++ b/include/js_api/@ohos.ai.mindSporeLite.d.ts
47@@ -0,0 +1,867 @@
48+/*
49+ * Copyright (c) 2023 Huawei Device Co., Ltd.
50+ * Licensed under the Apache License, Version 2.0 (the "License");
51+ * you may not use this file except in compliance with the License.
52+ * You may obtain a copy of the License at
53+ *
54+ * http://www.apache.org/licenses/LICENSE-2.0
55+ *
56+ * Unless required by applicable law or agreed to in writing, software
57+ * distributed under the License is distributed on an "AS IS" BASIS,
58+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
59+ * See the License for the specific language governing permissions and
60+ * limitations under the License.
61+ */
62+
63+import { Callback } from './@ohos.base';
64+
65+/**
66+ * @namespace mindSporeLite
67+ * @syscap SystemCapability.AI.MindSporeLite
68+ * @stagemodelonly
69+ * @since 10
70+ */
71+declare namespace mindSporeLite {
72+  /**
73+   * Create a Model instance from file path
74+   * @param { string } model - model indicates model path to be loaded
75+   * @param { Context } context - context indicates model context information
76+   * @returns { Promise<Model> } the promise returned by the function.
77+   * @syscap SystemCapability.AI.MindSporeLite
78+   * @stagemodelonly
79+   * @since 10
80+   */
81+  function loadModelFromFile(
82+    model: string,
83+    context?: Context): Promise<Model>;
84+
85+  /**
86+   * Create a Model instance from file path.
87+   * @param { string } model - model indicates model path to be loaded
88+   * @param { callback: Callback<Model> } callback - the callback of model
89+   * @syscap SystemCapability.AI.MindSporeLite
90+   * @stagemodelonly
91+   * @since 10
92+   */
93+  function loadModelFromFile(
94+    model: string, callback: Callback<Model>): void;
95+
96+  /**
97+   * Create a Model instance from file path.
98+   * @param { string } model - model indicates model path to be loaded
99+   * @param { Context } [context] - context indicates model context information
100+   * @param { callback: Callback<Model> } callback - the callback of model
101+   * @syscap SystemCapability.AI.MindSporeLite
102+   * @stagemodelonly
103+   * @since 10
104+   */
105+  function loadModelFromFile(
106+    model: string,
107+    context: Context, callback: Callback<Model>): void;
108+
109+  /**
110+   * Create a Model instance from buffer
111+   * @param { ArrayBuffer } model - model indicates model buffer to be loaded
112+   * @param { Context } context - context indicates model context information
113+   * @returns { Promise<Model> } the promise returned by the function.
114+   * @syscap SystemCapability.AI.MindSporeLite
115+   * @stagemodelonly
116+   * @since 10
117+   */
118+  function loadModelFromBuffer(
119+    model: ArrayBuffer,
120+    context?: Context): Promise<Model>;
121+
122+  /**
123+   * Create a Model instance from buffer
124+   * @param { ArrayBuffer } model - model indicates model buffer to be loaded
125+   * @param { callback: Callback<Model> } callback - the callback of model
126+   * @syscap SystemCapability.AI.MindSporeLite
127+   * @stagemodelonly
128+   * @since 10
129+   */
130+  function loadModelFromBuffer(
131+    model: ArrayBuffer, callback: Callback<Model>): void;
132+
133+  /**
134+   * Create a Model instance from buffer
135+   * @param { ArrayBuffer } model - model indicates model buffer to be loaded
136+   * @param { Context } [context] - context indicates model context information
137+   * @param { callback: Callback<Model> } callback - the callback of model
138+   * @syscap SystemCapability.AI.MindSporeLite
139+   * @stagemodelonly
140+   * @since 10
141+   */
142+  function loadModelFromBuffer(
143+    model: ArrayBuffer,
144+    context: Context, callback: Callback<Model>): void;
145+
146+  /**
147+   * Creates a Model instance file description
148+   * @param { number } model - model indicates model file description to be loaded
149+   * @param { Context } context - context indicates model context information
150+   * @returns { Promise<Model> } the promise returned by the function.
151+   * @syscap SystemCapability.AI.MindSporeLite
152+   * @stagemodelonly
153+   * @since 10
154+   */
155+  function loadModelFromFd(
156+    model: number,
157+    context?: Context): Promise<Model>;
158+
159+  /**
160+   * Create a Model instance from file description
161+   * @param { number } model - model indicates model file description to be loaded
162+   * @param { callback: Callback<Model> } callback - the callback of model
163+   * @syscap SystemCapability.AI.MindSporeLite
164+   * @stagemodelonly
165+   * @since 10
166+   */
167+  function loadModelFromFd(
168+    model: number, callback: Callback<Model>): void;
169+
170+  /**
171+   * Create a Model instance from file description
172+   * @param { number } model - model indicates model file description to be loaded
173+   * @param { Context } [context] - context indicates model context information
174+   * @param { callback: Callback<Model> } callback - the callback of model
175+   * @syscap SystemCapability.AI.MindSporeLite
176+   * @stagemodelonly
177+   * @since 10
178+   */
179+  function loadModelFromFd(
180+    model: number,
181+    context: Context, callback: Callback<Model>): void;
182+
183+  /**
184+   * Load train model from file
185+   * @param { string } model - model file path
186+   * @param { ?TrainCfg } trainCfg - model train configuration
187+   * @param { ?Context } context - model build context
188+   * @returns { Promise<Model> } the promise of the built model
189+   * @syscap SystemCapability.AI.MindSporeLite
190+   * @stagemodelonly
191+   * @since 11
192+   */
193+  function loadTrainModelFromFile(
194+    model: string,
195+    trainCfg?: TrainCfg,
196+    context?: Context): Promise<Model>;
197+
198+  /**
199+   * Load train model from buffer
200+   * @param { ArrayBuffer } model - model buffer
201+   * @param { ?TrainCfg } trainCfg - model train configuration
202+   * @param { ?Context } context - model build context
203+   * @returns { Promise<Model> } the promise of the built model
204+   * @syscap SystemCapability.AI.MindSporeLite
205+   * @stagemodelonly
206+   * @since 11
207+   */
208+  function loadTrainModelFromBuffer(
209+    model: ArrayBuffer,
210+    trainCfg?: TrainCfg,
211+    context?: Context): Promise<Model>;
212+
213+  /**
214+   * Load train model from file description
215+   * @param { number } model - model file description
216+   * @param { ?TrainCfg } trainCfg - model train configuration
217+   * @param { ?Context } context - model build context
218+   * @returns { Promise<Model> } the promise of the built model
219+   * @syscap SystemCapability.AI.MindSporeLite
220+   * @stagemodelonly
221+   * @since 11
222+   */
223+  function loadTrainModelFromFd(
224+    model: number,
225+    trainCfg?: TrainCfg,
226+    context?: Context): Promise<Model>;
227+
228+  /**
229+   * Provides manages model function. Including get inputs, predict ,resize.
230+   * @typedef Model
231+   * @syscap SystemCapability.AI.MindSporeLite
232+   * @stagemodelonly
233+   * @since 10
234+   */
235+  interface Model {
236+    /**
237+     * The learning rate of the training model
238+     * @type {?number}
239+     * @syscap SystemCapability.AI.MindSporeLite
240+     * @since 11
241+     */
242+    learningRate?: number,
243+
244+    /**
245+     * The running mode of the model
246+     * @type {?boolean}
247+     * @syscap SystemCapability.AI.MindSporeLite
248+     * @since 11
249+     */
250+    trainMode?: boolean,
251+
252+    /**
253+     * Get model input tensors.
254+     * @returns { MSTensor[] } the MSTensor array of the inputs.
255+     * @syscap SystemCapability.AI.MindSporeLite
256+     * @stagemodelonly
257+     * @since 10
258+     */
259+    getInputs(): MSTensor[];
260+
261+    /**
262+     * Infer model
263+     * @param { MSTensor[] } inputs - indicates the MSTensor array of the inputs.
264+     * @param { callback: Callback<MSTensor[]> }  callback - the callback of MSTensor array.
265+     * @syscap SystemCapability.AI.MindSporeLite
266+     * @stagemodelonly
267+     * @since 10
268+     */
269+    predict(inputs: MSTensor[], callback: Callback<MSTensor[]>): void;
270+
271+    /**
272+     * Infer model
273+     * @param { MSTensor[] } inputs - indicates the MSTensor array of the inputs.
274+     * @returns { Promise<MSTensor[]> } the promise returned by the function.
275+     * @syscap SystemCapability.AI.MindSporeLite
276+     * @stagemodelonly
277+     * @since 10
278+     */
279+    predict(inputs: MSTensor[]): Promise<MSTensor[]>;
280+
281+    /**
282+     * resize model input
283+     * @param { MSTensor[] } inputs - indicates the MSTensor array of the inputs.
284+     * @param { Array<Array<number>> } dims - indicates the target new shape array
285+     * @returns { boolean } the boolean result if the resize operation is successful
286+     * @syscap SystemCapability.AI.MindSporeLite
287+     * @stagemodelonly
288+     * @since 10
289+     */
290+    resize(inputs: MSTensor[], dims: Array<Array<number>>): boolean;
291+
292+    /**
293+     * Train model by step
294+     * @syscap SystemCapability.AI.MindSporeLite
295+     * @returns { boolean } the boolean result if the runStep operation is successful
296+     * @stagemodelonly
297+     * @since 11
298+     */
299+    runStep(): boolean;
300+
301+    /**
302+     * Obtain all weights of the model
303+     * @syscap SystemCapability.AI.MindSporeLite
304+     * @returns { MSTensor[] } the weight tensors of the model
305+     * @stagemodelonly
306+     * @since 11
307+     */
308+    getWeights(): MSTensor[];
309+
310+    /**
311+     * Update weights of the model
312+     * @param { MSTensor[] } weights - indicates the MSTensor array of the inputs
313+     * @returns { boolean } the boolean result if updating weights operation is successful
314+     * @syscap SystemCapability.AI.MindSporeLite
315+     * @stagemodelonly
316+     * @since 11
317+     */
318+    updateWeights(weights: MSTensor[]): boolean;
319+
320+    /**
321+     * Setup training with virtual batches
322+     * @param { number } virtualBatchMultiplier - virtual batch multiplier, use any number < 1 to disable
323+     * @param { number } lr - learning rate to use for virtual batch, -1 for internal configuration
324+     * @param { number } momentum - batch norm momentum to use for virtual batch, -1 for internal configuration
325+     * @returns { boolean } the boolean result if the operation is successful
326+     * @syscap SystemCapability.AI.MindSporeLite
327+     * @stagemodelonly
328+     * @since 11
329+     */
330+    setupVirtualBatch(virtualBatchMultiplier: number, lr: number, momentum: number): boolean;
331+
332+    /**
333+     * Export train model to file
334+     * @param { string } modelFile - model file path.
335+     * @param { ?QuantizationType } quantizationType - the quantization type, default NO_QUANT.
336+     * @param { ?boolean } exportInferenceOnly - whether to export a inference only model, default true.
337+     * @param { ?string[] } outputTensorName - the set of name of output tensor the exported inference model,
338+     * @returns { boolean } - the boolean result if the operation is successful
339+     * @syscap SystemCapability.AI.MindSporeLite
340+     * @stagemodelonly
341+     * @since 11
342+     */
343+    exportModel(
344+      modelFile: string,
345+      quantizationType?: QuantizationType,
346+      exportInferenceOnly?: boolean,
347+      outputTensorName?: string[]): boolean;
348+
349+    /**
350+     * Export model's weights, which can be used in micro only. Only valid for Lite Train
351+     * @param { string } weightFile - weight file path
352+     * @param { ?boolean } isInference - whether to export weights from inference model, only support this is `true` for now, default true
353+     * @param { ?boolean } enableFp16 - float-weight is whether to be saved in float16 format, default false
354+     * @param { ?string[] } changeableWeightsName - changeable weights name
355+     * @returns { boolean } the boolean result if the operation is successful
356+     * @syscap SystemCapability.AI.MindSporeLite
357+     * @stagemodelonly
358+     * @since 11
359+     */
360+    exportWeightsCollaborateWithMicro(
361+      weightFile: string,
362+      isInference?: boolean,
363+      enableFp16?: boolean,
364+      changeableWeightsName?: string[]): boolean;
365+  }
366+
367+  export enum QuantizationType {
368+    /**
369+     * No quantization.
370+     * @syscap SystemCapability.AI.MindSporeLite
371+     * @since 11
372+     */
373+    NO_QUANT = 0,
374+    /**
375+     * Weight quantization.
376+     * @syscap SystemCapability.AI.MindSporeLite
377+     * @since 11
378+     */
379+    WEIGHT_QUANT = 1,
380+    /**
381+     * Full quantization.
382+     * @syscap SystemCapability.AI.MindSporeLite
383+     * @since 11
384+     */
385+    FULL_QUANT = 2,
386+  }
387+  export enum OptimizationLevel {
388+    /**
389+     * Do not change
390+     * @syscap SystemCapability.AI.MindSporeLite
391+     * @since 11
392+     */
393+    O0 = 0,
394+    /**
395+     * Cast network to float16, keep batchnorm and loss in float32
396+     * @syscap SystemCapability.AI.MindSporeLite
397+     * @since 11
398+     */
399+    O2 = 2,
400+    /**
401+     * Cast network to float16, including bacthnorm
402+     * @syscap SystemCapability.AI.MindSporeLite
403+     * @since 11
404+     */
405+    O3 = 3,
406+    /**
407+     * Choose optimization based on device
408+     * @syscap SystemCapability.AI.MindSporeLite
409+     * @since 11
410+     */
411+    AUTO = 4,
412+  }
413+
414+  interface TrainCfg {
415+    /**
416+     * Array of loss name
417+     * @type {?string[]}
418+     * @syscap SystemCapability.AI.MindSporeLite
419+     * @since 11
420+     */
421+    lossName?: string[],
422+    /**
423+     * Train optimization level
424+     * @type {?OptimizationLevel}
425+     * @syscap SystemCapability.AI.MindSporeLite
426+     * @since 11
427+     */
428+    optimizationLevel?: OptimizationLevel,
429+  }
430+
431+  /**
432+   * Provides the device configurations
433+   * @typedef Context
434+   * @syscap SystemCapability.AI.MindSporeLite
435+   * @stagemodelonly
436+   * @since 10
437+   */
438+  interface Context {
439+    /**
440+     * The target device
441+     * @type {?string[]}
442+     * @syscap SystemCapability.AI.MindSporeLite
443+     * @since 10
444+     */
445+    target?: string[];
446+    /**
447+     * The cpu device information
448+     * @type {?CpuDevice}
449+     * @syscap SystemCapability.AI.MindSporeLite
450+     * @since 10
451+     */
452+    cpu?: CpuDevice;
453+    /**
454+     * The NNRT device information
455+     * @type {?NNRTDevice}
456+     * @since 10
457+     */
458+    nnrt?: NNRTDevice;
459+  }
460+
461+  /**
462+   * Provides the CPU device info
463+   * @typedef CpuDevice
464+   * @syscap SystemCapability.AI.MindSporeLite
465+   * @stagemodelonly
466+   * @since 10
467+   */
468+  interface CpuDevice {
469+    /**
470+     * The thread num
471+     * @type {?number}
472+     * @syscap SystemCapability.AI.MindSporeLite
473+     * @since 10
474+     */
475+    threadNum?: number;
476+    /**
477+     * The thread affinity mode
478+     * @type {?ThreadAffinityMode}
479+     * @syscap SystemCapability.AI.MindSporeLite
480+     * @since 10
481+     */
482+    threadAffinityMode?: ThreadAffinityMode;
483+    /**
484+     * The thread affinity core list
485+     * @type {?number[]}
486+     * @syscap SystemCapability.AI.MindSporeLite
487+     * @since 10
488+     */
489+    threadAffinityCoreList?: number[];
490+    /**
491+     * The precision mode
492+     * @type {?string}
493+     * @syscap SystemCapability.AI.MindSporeLite
494+     * @since 10
495+     */
496+    precisionMode?: string;
497+  }
498+
499+  export enum PerformanceMode {
500+    /**
501+     * No performance mode preference
502+     * @syscap SystemCapability.AI.MindSporeLite
503+     * @since 11
504+     */
505+    PERFORMANCE_NONE = 0,
506+    /**
507+     * Low power consumption mode
508+     * @syscap SystemCapability.AI.MindSporeLite
509+     * @since 11
510+     */
511+    PERFORMANCE_LOW = 1,
512+    /**
513+     * Medium performance mode
514+     * @syscap SystemCapability.AI.MindSporeLite
515+     * @since 11
516+     */
517+    PERFORMANCE_MEDIUM = 2,
518+    /**
519+     * High performance mode
520+     * @syscap SystemCapability.AI.MindSporeLite
521+     * @since 11
522+     */
523+    PERFORMANCE_HIGH = 3,
524+    /**
525+     * Ultimate performance mode
526+     * @syscap SystemCapability.AI.MindSporeLite
527+     * @since 11
528+     */
529+    PERFORMANCE_EXTREME = 4,
530+  }
531+
532+  export enum Priority {
533+    /**
534+     * No priority preference
535+     * @syscap SystemCapability.AI.MindSporeLite
536+     * @since 11
537+     */
538+    PRIORITY_NONE = 0,
539+    /**
540+     * Low priority
541+     * @syscap SystemCapability.AI.MindSporeLite
542+     * @since 11
543+     */
544+    PRIORITY_LOW = 1,
545+    /**
546+     * Medium priority
547+     * @syscap SystemCapability.AI.MindSporeLite
548+     * @since 11
549+     */
550+    PRIORITY_MEDIUM = 2,
551+    /**
552+     * High priority
553+     * @syscap SystemCapability.AI.MindSporeLite
554+     * @since 11
555+     */
556+    PRIORITY_HIGH = 3,
557+  }
558+
559+  interface Extension {
560+    /**
561+     * Extension name
562+     * @type {?string}
563+     * @syscap SystemCapability.AI.MindSporeLite
564+     * @since 11
565+     */
566+    name: String,
567+    /**
568+     * Extension array buffer
569+     * @type {?ArrayBuffer}
570+     * @syscap SystemCapability.AI.MindSporeLite
571+     * @since 11
572+     */
573+    value: ArrayBuffer
574+  }
575+
576+  export enum NNRTDeviceType {
577+    /**
578+     * Devices that are not CPU, GPU, or dedicated accelerator
579+     * @syscap SystemCapability.AI.MindSporeLite
580+     * @since 11
581+     */
582+    NNRTDEVICE_OTHERS = 0,
583+
584+    /**
585+     * CPU device
586+     * @syscap SystemCapability.AI.MindSporeLite
587+     * @since 11
588+     */
589+    NNRTDEVICE_CPU = 1,
590+
591+    /**
592+     * GPU device
593+     * @syscap SystemCapability.AI.MindSporeLite
594+     * @since 11
595+     */
596+    NNRTDEVICE_GPU = 2,
597+
598+    /**
599+     * Dedicated hardware accelerator
600+     * @syscap SystemCapability.AI.MindSporeLite
601+     * @since 11
602+     */
603+    NNRTDEVICE_ACCELERATOR = 3,
604+  }
605+
606+  interface NNRTDeviceDesc {
607+    /**
608+     * Get device id
609+     * @returns { number } the number of device id
610+     * @syscap SystemCapability.AI.MindSporeLite
611+     * @stagemodelonly
612+     * @since 11
613+     */
614+    deviceID() : number;
615+    /**
616+     * Get device type.
617+     * @returns { NNRTDeviceType } the device type
618+     * @syscap SystemCapability.AI.MindSporeLite
619+     * @stagemodelonly
620+     * @since 11
621+     */
622+    deviceType() : NNRTDeviceType;
623+    /**
624+     * Get device name.
625+     * @returns { string } device name
626+     * @syscap SystemCapability.AI.MindSporeLite
627+     * @stagemodelonly
628+     * @since 11
629+     */
630+    deviceName() : string;
631+  }
632+
633+  /**
634+   * Obtain the all device descriptions in NNRT.
635+   * @syscap SystemCapability.AI.MindSporeLite
636+   * @returns { NNRTDeviceDesc[] } the array of NNRTDeviceDecs
637+   * @since 11
638+   */
639+  function getAllNNRTDeviceDescs() : NNRTDeviceDesc[];
640+
641+  /**
642+   * Provides the NNRT device info
643+   * @typedef NNRTDevice
644+   * @syscap SystemCapability.AI.MindSporeLite
645+   * @stagemodelonly
646+   * @since 10
647+   */
648+
649+  interface NNRTDevice {
650+    /**
651+     * NNRT device id.
652+     * @type {?number}
653+     * @syscap SystemCapability.AI.MindSporeLite
654+     * @since 11
655+     */
656+    deviceID?: number,
657+    /**
658+     * NNRT device performance mode.
659+     * @type {?PerformanceMode}
660+     * @syscap SystemCapability.AI.MindSporeLite
661+     * @since 11
662+     */
663+    performanceMode?: PerformanceMode,
664+    /**
665+     * NNRT device priority.
666+     * @type {?Priority}
667+     * @syscap SystemCapability.AI.MindSporeLite
668+     * @since 11
669+     */
670+    priority?: Priority,
671+    /**
672+     * NNRT device extension array.
673+     * @type {?Extension[]}
674+     * @syscap SystemCapability.AI.MindSporeLite
675+     * @since 11
676+     */
677+    extensions?: Extension[],
678+  }
679+
680+  /**
681+   * Enum for provides CPU thread affinity mode
682+   * @enum {number}
683+   * @syscap SystemCapability.AI.MindSporeLite
684+   * @stagemodelonly
685+   * @since 10
686+   */
687+  export enum ThreadAffinityMode {
688+    /**
689+     * Thread affinity mode is no bind.
690+     * @syscap SystemCapability.AI.MindSporeLite
691+     * @since 10
692+     */
693+    NO_AFFINITIES = 0,
694+
695+    /**
696+     * Thread affinity mode is big cores first
697+     * @syscap SystemCapability.AI.MindSporeLite
698+     * @since 10
699+     */
700+    BIG_CORES_FIRST = 1,
701+
702+    /**
703+     * Thread affinity mode is little cores first
704+     * @syscap SystemCapability.AI.MindSporeLite
705+     * @since 10
706+     */
707+    LITTLE_CORES_FIRST = 2,
708+  }
709+
710+  /**
711+   * Provides MSTensor definition
712+   * @typedef MSTensor
713+   * @syscap SystemCapability.AI.MindSporeLite
714+   * @stagemodelonly
715+   * @since 10
716+   */
717+  interface MSTensor {
718+    /**
719+     * The name of the tensor.
720+     * @type {string}
721+     * @syscap SystemCapability.AI.MindSporeLite
722+     * @since 10
723+     */
724+    name: string;
725+    /**
726+     * The shape of the tensor.
727+     * @type {number[]}
728+     * @syscap SystemCapability.AI.MindSporeLite
729+     * @since 10
730+     */
731+    shape: number[];
732+    /**
733+     * The number of elements in the tensor.
734+     * @type {number}
735+     * @syscap SystemCapability.AI.MindSporeLite
736+     * @since 10
737+     */
738+    elementNum: number;
739+    /**
740+     * The data size of the tensor.
741+     * @type {number}
742+     * @syscap SystemCapability.AI.MindSporeLite
743+     * @since 10
744+     */
745+    dataSize: number;
746+    /**
747+     * The data type of the tensor.
748+     * @type {DataType}
749+     * @syscap SystemCapability.AI.MindSporeLite
750+     * @since 10
751+     */
752+    dtype: DataType;
753+    /**
754+     * The format of the tensor.
755+     * @type {Format}
756+     * @syscap SystemCapability.AI.MindSporeLite
757+     * @since 10
758+     */
759+    format: Format;
760+
761+    /**
762+     * Get MSTensor data
763+     * @returns { ArrayBuffer } the data of tensor
764+     * @syscap SystemCapability.AI.MindSporeLite
765+     * @stagemodelonly
766+     * @since 10
767+     */
768+    getData(): ArrayBuffer;
769+
770+    /**
771+     * Set MSTensor data
772+     * @param { ArrayBuffer } inputArray - indicates the buffer of tensor
773+     * @syscap SystemCapability.AI.MindSporeLite
774+     * @stagemodelonly
775+     * @since 10
776+     */
777+    setData(inputArray: ArrayBuffer): void;
778+  }
779+
780+  /**
781+   * Enum for provides MSTensor data type
782+   * @enum {number}
783+   * @syscap SystemCapability.AI.MindSporeLite
784+   * @stagemodelonly
785+   * @since 10
786+   */
787+  export enum DataType {
788+    /**
789+     * data type is unknown
790+     * @syscap SystemCapability.AI.MindSporeLite
791+     * @since 10
792+     */
793+    TYPE_UNKNOWN = 0,
794+    /**
795+     * data type is int8
796+     * @syscap SystemCapability.AI.MindSporeLite
797+     * @since 10
798+     */
799+    NUMBER_TYPE_INT8 = 32,
800+    /**
801+     * data type is int16
802+     * @syscap SystemCapability.AI.MindSporeLite
803+     * @since 10
804+     */
805+    NUMBER_TYPE_INT16 = 33,
806+    /**
807+     * data type is int32
808+     * @syscap SystemCapability.AI.MindSporeLite
809+     * @since 10
810+     */
811+    NUMBER_TYPE_INT32 = 34,
812+    /**
813+     * data type is int64
814+     * @syscap SystemCapability.AI.MindSporeLite
815+     * @since 10
816+     */
817+    NUMBER_TYPE_INT64 = 35,
818+    /**
819+     * data type is uint8
820+     * @syscap SystemCapability.AI.MindSporeLite
821+     * @since 10
822+     */
823+    NUMBER_TYPE_UINT8 = 37,
824+    /**
825+     * data type is uint16
826+     * @syscap SystemCapability.AI.MindSporeLite
827+     * @since 10
828+     */
829+    NUMBER_TYPE_UINT16 = 38,
830+    /**
831+     * data type is uint32
832+     * @syscap SystemCapability.AI.MindSporeLite
833+     * @since 10
834+     */
835+    NUMBER_TYPE_UINT32 = 39,
836+    /**
837+     * data type is uint64
838+     * @syscap SystemCapability.AI.MindSporeLite
839+     * @since 10
840+     */
841+    NUMBER_TYPE_UINT64 = 40,
842+    /**
843+     * data type is float16
844+     * @syscap SystemCapability.AI.MindSporeLite
845+     * @since 10
846+     */
847+    NUMBER_TYPE_FLOAT16 = 42,
848+    /**
849+     * data type is float32
850+     * @syscap SystemCapability.AI.MindSporeLite
851+     * @since 10
852+     */
853+    NUMBER_TYPE_FLOAT32 = 43,
854+    /**
855+     * data type is float64
856+     * @syscap SystemCapability.AI.MindSporeLite
857+     * @since 10
858+     */
859+    NUMBER_TYPE_FLOAT64 = 44,
860+  }
861+
862+  /**
863+   * Enum for provides MSTensor format
864+   * @enum {number}
865+   * @syscap SystemCapability.AI.MindSporeLite
866+   * @stagemodelonly
867+   * @since 10
868+   */
869+  export enum Format {
870+    /**
871+     * data format is default
872+     * @syscap SystemCapability.AI.MindSporeLite
873+     * @since 10
874+     */
875+    DEFAULT_FORMAT = -1,
876+    /**
877+     * data format is NCHW
878+     * @syscap SystemCapability.AI.MindSporeLite
879+     * @since 10
880+     */
881+    NCHW = 0,
882+    /**
883+     * data format is NHWC
884+     * @syscap SystemCapability.AI.MindSporeLite
885+     * @since 10
886+     */
887+    NHWC = 1,
888+    /**
889+     * data format is NHWC4
890+     * @syscap SystemCapability.AI.MindSporeLite
891+     * @since 10
892+     */
893+    NHWC4 = 2,
894+    /**
895+     * data format is HWKC
896+     * @syscap SystemCapability.AI.MindSporeLite
897+     * @since 10
898+     */
899+    HWKC = 3,
900+    /**
901+     * data format is HWCK
902+     * @syscap SystemCapability.AI.MindSporeLite
903+     * @since 10
904+     */
905+    HWCK = 4,
906+    /**
907+     * data format is KCHW
908+     * @syscap SystemCapability.AI.MindSporeLite
909+     * @since 10
910+     */
911+    KCHW = 5,
912+  }
913+}
914+export default mindSporeLite;
915diff --git a/include/js_api/common_napi.h b/include/js_api/common_napi.h
916new file mode 100644
917index 00000000..b857ac85
918--- /dev/null
919+++ b/include/js_api/common_napi.h
920@@ -0,0 +1,196 @@
921+/**
922+ * Copyright (C) 2023 Huawei Device Co., Ltd.
923+ * Licensed under the Apache License, Version 2.0 (the "License");
924+ * you may not use this file except in compliance with the License.
925+ * You may obtain a copy of the License at
926+ *
927+ *     http://www.apache.org/licenses/LICENSE-2.0
928+ *
929+ * Unless required by applicable law or agreed to in writing, software
930+ * distributed under the License is distributed on an "AS IS" BASIS,
931+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
932+ * See the License for the specific language governing permissions and
933+ * limitations under the License.
934+ */
935+
936+#ifndef MINDSPORE_INCLUDE_JS_API_COMMON_NAPI_H
937+#define MINDSPORE_INCLUDE_JS_API_COMMON_NAPI_H
938+
939+#include <string>
940+#include <fstream>
941+#include "napi/native_api.h"
942+#include "napi/native_node_api.h"
943+#include "ms_errors.h"
944+#include "include/api/types.h"
945+
946+namespace mindspore {
947+
948+class CommonNapi {
949+ public:
950+  CommonNapi() = delete;
951+  ~CommonNapi() = delete;
952+
953+  static std::string getMessageByCode(int32_t &code);
954+  static int32_t GetPropertyInt32(napi_env env, napi_value config_obj, const std::string &type, int32_t &result);
955+  static int32_t GetPropertyString(napi_env env, napi_value config_obj, const std::string &type, std::string &result);
956+  static int32_t GetPropertyInt32Array(napi_env env, napi_value config_obj, const std::string &type,
957+                                       std::vector<int32_t> &result);
958+  static int32_t GetPropertyBigIntUint64(napi_env env, napi_value config_obj, const std::string &type,
959+                                         uint64_t &result);
960+  static int32_t GetPropertyStringArray(napi_env env, napi_value config_obj, const std::string &type,
961+                                        std::vector<std::string> &result);
962+  static int32_t GetStringArray(napi_env env, napi_value value, std::vector<std::string> &result);
963+  static void WriteTensorData(MSTensor tensor, std::string file_path);
964+  static void WriteOutputsData(const std::vector<MSTensor> outputs, std::string file_path);
965+};
966+
967+struct MSLiteAsyncContext {
968+  explicit MSLiteAsyncContext(napi_env env);
969+  virtual ~MSLiteAsyncContext();
970+  int status = SUCCESS;
971+  std::string errMessage = "";
972+};
973+
974+enum ContextThreadAffinityMode : int32_t {
975+  CONTEXT_AFFINITY_MODE = 0,
976+  CONTEXT_BIG_CORES_FIRST,
977+  CONTEXT_LITTLE_CORES_FIRST
978+};
979+
980+enum TensorFormat : int32_t {
981+  TENSOR_DEFAULT_FORMAT = -1,
982+  TENSOR_NCHW,
983+  TENSOR_NHWC,
984+  TENSOR_NHWC4,
985+  TENSOR_HWKC,
986+  TENSOR_HWCK,
987+  TENSOR_KCHW
988+};
989+
990+enum TensorDataType : int32_t {
991+  TENSOR_UNKNOWN = 0,
992+  TENSOR_INT8 = 32,
993+  TENSOR_INT16 = 33,
994+  TENSOR_INT32 = 34,
995+  TENSOR_INT64 = 35,
996+  TENSOR_UINT8 = 37,
997+  TENSOR_UINT16 = 38,
998+  TENSOR_UINT32 = 39,
999+  TENSOR_UINT64 = 40,
1000+  TENSOR_FLOAT16 = 42,
1001+  TENSOR_FLOAT32 = 43,
1002+  TENSOR_FLOAT64 = 44
1003+};
1004+
1005+enum ModelMode : int32_t {
1006+  kBuffer = 0,
1007+  kPath,
1008+  kFD,
1009+  // add new type here
1010+  kInvalidModelMode = 10,
1011+};
1012+
1013+enum ContextQuantizationType : int32_t {
1014+  NO_QUANT = 0,
1015+  WEIGHT_QUANT = 1,
1016+  FULL_QUANT = 2,
1017+};
1018+
1019+enum ContextOptimizationLevel : int32_t {
1020+  O0 = 0,
1021+  O2 = 2,
1022+  O3 = 3,
1023+  AUTO = 4,
1024+};
1025+
1026+enum ContextPerformanceMode : int32_t {
1027+  PERFORMANCE_NONE = 0,
1028+  PERFORMANCE_LOW = 1,
1029+  PERFORMANCE_MEDIUM = 2,
1030+  PERFORMANCE_HIGH = 3,
1031+  PERFORMANCE_EXTREME = 4,
1032+};
1033+
1034+enum ContextPriority : int32_t {
1035+  PRIORITY_NONE = 0,
1036+  PRIORITY_LOW = 1,
1037+  PRIORITY_MEDIUM = 2,
1038+  PRIORITY_HIGH = 3,
1039+};
1040+
1041+enum ContextNnrtDeviceType : int32_t {
1042+  NNRTDEVICE_OTHERS = 0,
1043+  NNRTDEVICE_CPU = 1,
1044+  NNRTDEVICE_GPU = 2,
1045+  NNRTDEVICE_ACCELERATOR = 3,
1046+};
1047+
1048+struct ModelInfo {
1049+  std::string model_path = "";
1050+  char *model_buffer_data = nullptr;
1051+  size_t model_buffer_total = 0;
1052+  int32_t model_fd = 0;
1053+  ModelMode mode = kBuffer;
1054+  bool train_model = false;
1055+};
1056+
1057+struct CpuDevice {
1058+  int thread_num;
1059+  int thread_affinity_mode;
1060+  std::vector<int32_t> thread_affinity_cores;
1061+  std::string precision_mode;
1062+  CpuDevice(){};
1063+  CpuDevice(int thread_num, int affinity_mode, std::vector<int32_t> affinity_cores, std::string precision)
1064+      : thread_num(thread_num),
1065+        thread_affinity_mode(affinity_mode),
1066+        thread_affinity_cores(affinity_cores),
1067+        precision_mode(precision){};
1068+};
1069+
1070+struct NnrtDeviceDesc {
1071+  std::string name;
1072+  ContextNnrtDeviceType type;
1073+  size_t id;
1074+};
1075+
1076+struct NNRTDevice {
1077+  size_t device_id;
1078+  int performance_mode{-1};
1079+  int priority{-1};
1080+  NNRTDevice(){};
1081+  NNRTDevice(int device_id, int performance_mode, int priority)
1082+      : device_id(device_id), performance_mode(performance_mode), priority(priority){};
1083+};
1084+
1085+struct TrainConfig {
1086+  std::vector<std::string> loss_names;
1087+  int optimization_level = kO0; // kAUTO
1088+};
1089+
1090+struct ContextInfo {
1091+  std::vector<std::string> target;
1092+  CpuDevice cpu_device;
1093+  NNRTDevice nnrt_device;
1094+  TrainConfig train_cfg;
1095+};
1096+
1097+const int32_t NAPI_ERR_INPUT_INVALID = 401;
1098+const int32_t NAPI_ERR_INVALID_PARAM = 1000101;
1099+const int32_t NAPI_ERR_NO_MEMORY = 1000102;
1100+const int32_t NAPI_ERR_ILLEGAL_STATE = 1000103;
1101+const int32_t NAPI_ERR_UNSUPPORTED = 1000104;
1102+const int32_t NAPI_ERR_TIMEOUT = 1000105;
1103+const int32_t NAPI_ERR_STREAM_LIMIT = 1000201;
1104+const int32_t NAPI_ERR_SYSTEM = 1000301;
1105+
1106+const std::string NAPI_ERROR_INVALID_PARAM_INFO = "input parameter value error";
1107+const std::string NAPI_ERR_INPUT_INVALID_INFO = "input parameter type or number mismatch";
1108+const std::string NAPI_ERR_INVALID_PARAM_INFO = "invalid parameter";
1109+const std::string NAPI_ERR_NO_MEMORY_INFO = "allocate memory failed";
1110+const std::string NAPI_ERR_ILLEGAL_STATE_INFO = "Operation not permit at current state";
1111+const std::string NAPI_ERR_UNSUPPORTED_INFO = "unsupported option";
1112+const std::string NAPI_ERR_TIMEOUT_INFO = "time out";
1113+const std::string NAPI_ERR_STREAM_LIMIT_INFO = "stream number limited";
1114+const std::string NAPI_ERR_SYSTEM_INFO = "system error";
1115+}  // namespace mindspore
1116+#endif  // COMMON_NAPI_H
1117\ No newline at end of file
1118diff --git a/include/js_api/ms_errors.h b/include/js_api/ms_errors.h
1119new file mode 100644
1120index 00000000..0d030241
1121--- /dev/null
1122+++ b/include/js_api/ms_errors.h
1123@@ -0,0 +1,39 @@
1124+/**
1125+ * Copyright (C) 2023 Huawei Device Co., Ltd.
1126+ * Licensed under the Apache License, Version 2.0 (the "License");
1127+ * you may not use this file except in compliance with the License.
1128+ * You may obtain a copy of the License at
1129+ *
1130+ *     http://www.apache.org/licenses/LICENSE-2.0
1131+ *
1132+ * Unless required by applicable law or agreed to in writing, software
1133+ * distributed under the License is distributed on an "AS IS" BASIS,
1134+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
1135+ * See the License for the specific language governing permissions and
1136+ * limitations under the License.
1137+ */
1138+#ifndef MINDSPORE_INCLUDE_JS_API_MS_ERRORS_H
1139+#define MINDSPORE_INCLUDE_JS_API_MS_ERRORS_H
1140+
1141+namespace mindspore {
1142+const int32_t BASE_MSLITE_ERR_OFFSET = 1000199;
1143+
1144+/** Success */
1145+const int32_t SUCCESS = 0;
1146+
1147+/** Fail */
1148+const int32_t ERROR = BASE_MSLITE_ERR_OFFSET;
1149+
1150+/** Status error */
1151+const int32_t ERR_ILLEGAL_STATE = BASE_MSLITE_ERR_OFFSET - 1;
1152+
1153+/** Invalid parameter */
1154+const int32_t ERR_INVALID_PARAM = BASE_MSLITE_ERR_OFFSET - 2;
1155+
1156+/** Not existed parameter */
1157+const int32_t ERR_NOT_EXISTED_PARAM = BASE_MSLITE_ERR_OFFSET - 3;
1158+
1159+/** Invalid operation */
1160+const int32_t ERR_INVALID_OPERATION = BASE_MSLITE_ERR_OFFSET - 4;
1161+}  // namespace mindspore
1162+#endif  // MS_ERRORS_H
1163\ No newline at end of file
1164diff --git a/include/js_api/ms_info.h b/include/js_api/ms_info.h
1165new file mode 100644
1166index 00000000..6f563231
1167--- /dev/null
1168+++ b/include/js_api/ms_info.h
1169@@ -0,0 +1,69 @@
1170+/**
1171+ * Copyright (C) 2023 Huawei Device Co., Ltd.
1172+ * Licensed under the Apache License, Version 2.0 (the "License");
1173+ * you may not use this file except in compliance with the License.
1174+ * You may obtain a copy of the License at
1175+ *
1176+ *     http://www.apache.org/licenses/LICENSE-2.0
1177+ *
1178+ * Unless required by applicable law or agreed to in writing, software
1179+ * distributed under the License is distributed on an "AS IS" BASIS,
1180+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
1181+ * See the License for the specific language governing permissions and
1182+ * limitations under the License.
1183+ */
1184+#ifndef MINDSPORE_INCLUDE_JS_API_MS_INFO_H
1185+#define MINDSPORE_INCLUDE_JS_API_MS_INFO_H
1186+
1187+namespace mindspore {
1188+enum InterruptType {
1189+  INTERRUPT_TYPE_BEGIN = 1,
1190+  INTERRUPT_TYPE_END = 2,
1191+};
1192+
1193+enum InterruptHint {
1194+  INTERRUPT_HINT_NONE = 0,
1195+  INTERRUPT_HINT_RESUME,
1196+  INTERRUPT_HINT_PAUSE,
1197+  INTERRUPT_HINT_STOP,
1198+  INTERRUPT_HINT_DUCK,
1199+  INTERRUPT_HINT_UNDUCK
1200+};
1201+
1202+enum InterruptForceType {
1203+  /**
1204+   * Force type, system change audio state.
1205+   */
1206+  INTERRUPT_FORCE = 0,
1207+  /**
1208+   * Share type, application change audio state.
1209+   */
1210+  INTERRUPT_SHARE
1211+};
1212+
1213+struct InterruptEvent {
1214+  /**
1215+   * Interrupt event type, begin or end
1216+   */
1217+  InterruptType eventType;
1218+  /**
1219+   * Interrupt force type, force or share
1220+   */
1221+  InterruptForceType forceType;
1222+  /**
1223+   * Interrupt hint type. In force type, the audio state already changed,
1224+   * but in share mode, only provide a hint for application to decide.
1225+   */
1226+  InterruptHint hintType;
1227+};
1228+
1229+// Used internally only by AudioFramework
1230+struct InterruptEventInternal {
1231+  InterruptType eventType;
1232+  InterruptForceType forceType;
1233+  InterruptHint hintType;
1234+  float duckVolume;
1235+};
1236+
1237+}  // namespace mindspore
1238+#endif  // MS_INFO_H
1239\ No newline at end of file
1240diff --git a/include/js_api/ms_parameters_napi.h b/include/js_api/ms_parameters_napi.h
1241new file mode 100644
1242index 00000000..9585255f
1243--- /dev/null
1244+++ b/include/js_api/ms_parameters_napi.h
1245@@ -0,0 +1,24 @@
1246+/**
1247+ * Copyright (C) 2023 Huawei Device Co., Ltd.
1248+ * Licensed under the Apache License, Version 2.0 (the "License");
1249+ * you may not use this file except in compliance with the License.
1250+ * You may obtain a copy of the License at
1251+ *
1252+ *     http://www.apache.org/licenses/LICENSE-2.0
1253+ *
1254+ * Unless required by applicable law or agreed to in writing, software
1255+ * distributed under the License is distributed on an "AS IS" BASIS,
1256+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
1257+ * See the License for the specific language governing permissions and
1258+ * limitations under the License.
1259+ */
1260+#ifndef MINDSPORE_INCLUDE_JS_API_MS_PARAMETERS_NAPI_H
1261+#define MINDSPORE_INCLUDE_JS_API_MS_PARAMETERS_NAPI_H
1262+
1263+#include <iostream>
1264+
1265+namespace mindspore {
1266+
1267+static const std::int32_t REFERENCE_CREATION_COUNT = 1;
1268+}
1269+#endif  // MS_PARAMETERS_NAPI
1270\ No newline at end of file
1271diff --git a/include/js_api/mslite_model_callback_napi.h b/include/js_api/mslite_model_callback_napi.h
1272new file mode 100644
1273index 00000000..3b3ee595
1274--- /dev/null
1275+++ b/include/js_api/mslite_model_callback_napi.h
1276@@ -0,0 +1,38 @@
1277+/**
1278+ * Copyright (C) 2023 Huawei Device Co., Ltd.
1279+ * Licensed under the Apache License, Version 2.0 (the "License");
1280+ * you may not use this file except in compliance with the License.
1281+ * You may obtain a copy of the License at
1282+ *
1283+ *     http://www.apache.org/licenses/LICENSE-2.0
1284+ *
1285+ * Unless required by applicable law or agreed to in writing, software
1286+ * distributed under the License is distributed on an "AS IS" BASIS,
1287+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
1288+ * See the License for the specific language governing permissions and
1289+ * limitations under the License.
1290+ */
1291+#ifndef MINDSPORE_INCLUDE_JS_API_MSLITE_MODEL_CALLBACK_NAPI_H
1292+#define MINDSPORE_INCLUDE_JS_API_MSLITE_MODEL_CALLBACK_NAPI_H
1293+
1294+#include <queue>
1295+#include <uv.h>
1296+#include "mslite_model_napi.h"
1297+#include "ms_info.h"
1298+#include "common_napi.h"
1299+
1300+namespace mindspore {
1301+enum class AsyncWorkType : int32_t {
1302+  ASYNC_WORK_PREPARE = 0,
1303+  ASYNC_WORK_PLAY,
1304+  ASYNC_WORK_PAUSE,
1305+  ASYNC_WORK_STOP,
1306+  ASYNC_WORK_RESET,
1307+  ASYNC_WORK_SEEK,
1308+  ASYNC_WORK_SPEED,
1309+  ASYNC_WORK_VOLUME,
1310+  ASYNC_WORK_BITRATE,
1311+  ASYNC_WORK_INVALID,
1312+};
1313+}  // namespace mindspore
1314+#endif  // COMMON_NAPI_H
1315\ No newline at end of file
1316diff --git a/include/js_api/mslite_model_napi.h b/include/js_api/mslite_model_napi.h
1317new file mode 100644
1318index 00000000..1f075ba4
1319--- /dev/null
1320+++ b/include/js_api/mslite_model_napi.h
1321@@ -0,0 +1,186 @@
1322+/**
1323+ * Copyright 2023 Huawei Technologies Co., Ltd
1324+ *
1325+ * Licensed under the Apache License, Version 2.0 (the "License");
1326+ * you may not use this file except in compliance with the License.
1327+ * You may obtain a copy of the License at
1328+ *
1329+ * http://www.apache.org/licenses/LICENSE-2.0
1330+ *
1331+ * Unless required by applicable law or agreed to in writing, software
1332+ * distributed under the License is distributed on an "AS IS" BASIS,
1333+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
1334+ * See the License for the specific language governing permissions and
1335+ * limitations under the License.
1336+ */
1337+#ifndef MINDSPORE_INCLUDE_JS_API_MSLITE_MODEL_NAPI_H
1338+#define MINDSPORE_INCLUDE_JS_API_MSLITE_MODEL_NAPI_H
1339+
1340+#include <memory>
1341+#include <mutex>
1342+#include "include/api/model.h"
1343+#include "include/api/context.h"
1344+#include "include/api/serialization.h"
1345+#include "include/api/cell.h"
1346+#include "common_napi.h"
1347+#include "mslite_model_callback_napi.h"
1348+#include "napi/native_api.h"
1349+#include "napi/native_node_api.h"
1350+#include "include/js_api/common_napi.h"
1351+
1352+namespace mindspore {
1353+static const std::map<std::string, TensorFormat> tensorFormatMap = {
1354+    {"DEFAULT_FORMAT", TENSOR_DEFAULT_FORMAT},
1355+    {"NCHW", TENSOR_NCHW},
1356+    {"NHWC", TENSOR_NHWC},
1357+    {"NHWC4", TENSOR_NHWC4},
1358+    {"HWKC", TENSOR_HWKC},
1359+    {"HWCK", TENSOR_HWCK},
1360+    {"KCHW", TENSOR_KCHW}
1361+};
1362+static const std::map<std::string, TensorDataType> tensorDataTypeMap = {
1363+    {"TYPE_UNKNOWN", TENSOR_UNKNOWN},
1364+    {"NUMBER_TYPE_INT8", TENSOR_INT8},
1365+    {"NUMBER_TYPE_INT16", TENSOR_INT16},
1366+    {"NUMBER_TYPE_INT32", TENSOR_INT32},
1367+    {"NUMBER_TYPE_INT64", TENSOR_INT64},
1368+    {"NUMBER_TYPE_UINT8", TENSOR_UINT8},
1369+    {"NUMBER_TYPE_UINT16", TENSOR_UINT16},
1370+    {"NUMBER_TYPE_UINT32", TENSOR_UINT32},
1371+    {"NUMBER_TYPE_UINT64", TENSOR_UINT64},
1372+    {"NUMBER_TYPE_FLOAT16", TENSOR_FLOAT16},
1373+    {"NUMBER_TYPE_FLOAT32", TENSOR_FLOAT32},
1374+    {"NUMBER_TYPE_FLOAT64", TENSOR_FLOAT64}
1375+};
1376+static const std::map<std::string, ContextThreadAffinityMode> contextThreadAffinityModeMap = {
1377+    {"NO_AFFINITIES", CONTEXT_AFFINITY_MODE},
1378+    {"BIG_CORES_FIRST", CONTEXT_BIG_CORES_FIRST},
1379+    {"LITTLE_CORES_FIRST", CONTEXT_LITTLE_CORES_FIRST},
1380+};
1381+
1382+static const std::map<std::string, ContextQuantizationType> contextQuantizationTypeMap = {
1383+  {"NO_QUANT", NO_QUANT},
1384+  {"WEIGHT_QUANT", WEIGHT_QUANT},
1385+  {"FULL_QUANT", FULL_QUANT},
1386+};
1387+
1388+static const std::map<std::string, ContextOptimizationLevel> contextOptimizationLevelTypeMap = {
1389+  {"O0", O0},
1390+  {"O2", O2},
1391+  {"O3", O3},
1392+  {"AUTO", AUTO},
1393+};
1394+
1395+static const std::map<std::string, ContextPerformanceMode> contextPerformanceModeTypeMap = {
1396+  {"PERFORMANCE_NONE", PERFORMANCE_NONE},
1397+  {"PERFORMANCE_LOW", PERFORMANCE_LOW},
1398+  {"PERFORMANCE_MEDIUM", PERFORMANCE_MEDIUM},
1399+  {"PERFORMANCE_HIGH", PERFORMANCE_HIGH},
1400+  {"PERFORMANCE_EXTREME", PERFORMANCE_EXTREME}
1401+};
1402+
1403+static const std::map<std::string, ContextPriority> contextPriorityTypeMap = {
1404+  {"PRIORITY_NONE", PRIORITY_NONE},
1405+  {"PRIORITY_LOW", PRIORITY_LOW},
1406+  {"PRIORITY_MEDIUM", PRIORITY_MEDIUM},
1407+  {"PRIORITY_HIGH", PRIORITY_HIGH},
1408+};
1409+
1410+static const std::map<std::string, ContextNnrtDeviceType> contextNnrtDeviceTypeTypeMap = {
1411+  {"NNRTDEVICE_OTHERS", NNRTDEVICE_OTHERS},
1412+  {"NNRTDEVICE_CPU", NNRTDEVICE_CPU},
1413+  {"NNRTDEVICE_GPU", NNRTDEVICE_GPU},
1414+  {"NNRTDEVICE_ACCELERATOR", NNRTDEVICE_ACCELERATOR},
1415+};
1416+
1417+class MSLiteModelNapi {
1418+ public:
1419+  MSLiteModelNapi();
1420+  ~MSLiteModelNapi();
1421+
1422+  static napi_value Init(napi_env env, napi_value exports);
1423+  std::shared_ptr<mindspore::Model> native_model_ = nullptr;
1424+
1425+ private:
1426+  struct MSLiteModelAsyncContext {
1427+    napi_async_work work;
1428+    napi_deferred deferred = nullptr;
1429+    napi_ref callbackRef = nullptr;
1430+    int32_t status = SUCCESS;
1431+    MSLiteModelNapi *lite_model = nullptr;
1432+    ModelInfo model_info;
1433+    ContextInfo context;
1434+
1435+    MSLiteModelAsyncContext() {
1436+      // setting context default value
1437+      context.target.push_back("cpu");
1438+      context.cpu_device.thread_num = 2;
1439+      context.cpu_device.thread_affinity_mode = 0;
1440+      context.cpu_device.precision_mode = "enforce_fp32";
1441+    }
1442+  };
1443+  static napi_value Constructor(napi_env env, napi_callback_info info);
1444+  static void Finalize(napi_env env, void *nativeObject, void *finalize);
1445+  static napi_value LoadMSLiteModelFromFile(napi_env env, napi_callback_info info);
1446+  static napi_value LoadMSLiteModelFromBuffer(napi_env env, napi_callback_info info);
1447+  static napi_value LoadMSLiteModelFromFd(napi_env env, napi_callback_info info);
1448+  static napi_value LoadMSLiteTrainModelFromFile(napi_env env, napi_callback_info info);
1449+  static napi_value LoadMSLiteTrainModelFromBuffer(napi_env env, napi_callback_info info);
1450+  static napi_value LoadMSLiteTrainModelFromFd(napi_env env, napi_callback_info info);
1451+  static napi_value GetInputs(napi_env env, napi_callback_info info);
1452+  static napi_value Resize(napi_env env, napi_callback_info info);
1453+  static napi_value PredictAsync(napi_env env, napi_callback_info info);
1454+  static napi_value RunStep(napi_env env, napi_callback_info info);
1455+  static napi_value GetWeights(napi_env env, napi_callback_info info);
1456+  static napi_value UpdateWeights(napi_env env, napi_callback_info info);
1457+  static napi_value SetupVirtualBatch(napi_env env, napi_callback_info info);
1458+  static napi_value ExportModel(napi_env env, napi_callback_info info);
1459+  static napi_value ExportWeightsCollaborateWithMicro(napi_env env, napi_callback_info info);
1460+  static napi_value GetTrainMode(napi_env env, napi_callback_info info);
1461+  static napi_value SetTrainMode(napi_env env, napi_callback_info info);
1462+  static napi_value GetLearningRate(napi_env env, napi_callback_info info);
1463+  static napi_value SetLearningRate(napi_env env, napi_callback_info info);
1464+  static int32_t ParseModelInfo(napi_env env, napi_value root, ModelInfo &model_info);
1465+  static int32_t ParseContextInfo(napi_env env, napi_value root, ContextInfo &info);
1466+  static int32_t ParseTrainCfgInfo(napi_env env, napi_value root, TrainConfig &cfg);
1467+  static void GetMSLiteModelAsyncCallbackComplete(napi_env env, napi_status status, void *data);
1468+  static void PredictAsyncCallbackComplete(napi_env env, napi_status status, void *data);
1469+  static napi_value CreateMSLiteModelWrapper(napi_env env, MSLiteModelAsyncContext *async_context);
1470+  static void CommonCallbackRoutine(napi_env env, MSLiteModelAsyncContext *&asyncContext, const napi_value &valueParam);
1471+  static std::shared_ptr<mindspore::Model> CreateModel(ModelInfo *model_info_ptr, ContextInfo *contex_ptr);
1472+  static std::shared_ptr<mindspore::Model> CreateTrainModel(ModelInfo *model_info_ptr, ContextInfo *contex_ptr);
1473+  static int32_t GetCpuDeviceInfo(napi_env env, napi_value args, ContextInfo &context);
1474+  static int32_t GetNNRTDeviceInfo(napi_env env, napi_value args, ContextInfo &context);
1475+  static int32_t GetDeviceInfoContext(ContextInfo *context_info_ptr,
1476+                                      std::vector<std::shared_ptr<DeviceInfoContext>> &device_infos);
1477+  static int32_t SetTensorData(napi_env env, napi_value thisVar, napi_value argv,
1478+                               MSLiteModelAsyncContext *async_context);
1479+  static napi_status AddNamedProperty(napi_env env, napi_value object, const std::string name, int32_t enumValue);
1480+  static napi_value GetAllNnrtDeviceDescs(napi_env env, napi_callback_info info);
1481+  static napi_value CreateFormatObject(napi_env env);
1482+  static napi_value CreateDataTypeObject(napi_env env);
1483+  static napi_value CreateThreadAffinityModeObject(napi_env env);
1484+  static napi_value CreateQuantizationTypeObject(napi_env env);
1485+  static napi_value CreateOptimizationLevelObject(napi_env env);
1486+  static napi_value CreatePerformanceModeObject(napi_env env);
1487+  static napi_value CreatePriorityObject(napi_env env);
1488+  static napi_value CreateNnrtDeviceTypeObject(napi_env env);
1489+
1490+
1491+  static thread_local napi_ref constructor_;
1492+  napi_env env_ = nullptr;
1493+  static napi_ref tensorFormat_;
1494+  static napi_ref tensorDataType_;
1495+  static napi_ref contextThreadAffinityMode_;
1496+  static napi_ref contextQuantizationType_;
1497+  static napi_ref contextOptimizationLevel_;
1498+  static napi_ref contextPerformanceMode_;
1499+  static napi_ref contextPriority_;
1500+  static napi_ref contextNnrtDeviceType_;
1501+
1502+  static ModelInfo *model_info_;
1503+  static ContextInfo *context_;
1504+  static std::mutex create_mutex_;
1505+};
1506+}  // namespace mindspore
1507+#endif  // MINDSPORE_INCLUDE_JS_API_MSLITE_MODEL_NAPI_H
1508\ No newline at end of file
1509diff --git a/include/js_api/mstensor_napi.h b/include/js_api/mstensor_napi.h
1510new file mode 100644
1511index 00000000..e2b181b8
1512--- /dev/null
1513+++ b/include/js_api/mstensor_napi.h
1514@@ -0,0 +1,49 @@
1515+/**
1516+ * Copyright 2022 Huawei Technologies Co., Ltd
1517+ *
1518+ * Licensed under the Apache License, Version 2.0 (the "License");
1519+ * you may not use this file except in compliance with the License.
1520+ * You may obtain a copy of the License at
1521+ *
1522+ * http://www.apache.org/licenses/LICENSE-2.0
1523+ *
1524+ * Unless required by applicable law or agreed to in writing, software
1525+ * distributed under the License is distributed on an "AS IS" BASIS,
1526+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
1527+ * See the License for the specific language governing permissions and
1528+ * limitations under the License.
1529+ */
1530+#ifndef MINDSPORE_INCLUDE_JS_API_MSTENSOR_NAPI_H
1531+#define MINDSPORE_INCLUDE_JS_API_MSTENSOR_NAPI_H
1532+
1533+#include "include/api/types.h"
1534+#include "napi/native_api.h"
1535+#include "napi/native_node_api.h"
1536+
1537+namespace mindspore {
1538+class MSTensorNapi {
1539+ public:
1540+  static napi_value NewInstance(napi_env env, mindspore::MSTensor tensor);
1541+  MSTensorNapi();
1542+  ~MSTensorNapi();
1543+ private:
1544+  static napi_value Constructor(napi_env env, napi_callback_info info);
1545+  static void Finalize(napi_env env, void *nativeObject, void *finalize);
1546+  static napi_value GetConstructor(napi_env env);
1547+
1548+  static napi_value GetName(napi_env env, napi_callback_info info);
1549+  static napi_value GetShape(napi_env env, napi_callback_info info);
1550+  static napi_value GetElementNum(napi_env env, napi_callback_info info);
1551+  static napi_value GetDtype(napi_env env, napi_callback_info info);
1552+  static napi_value GetFormat(napi_env env, napi_callback_info info);
1553+  static napi_value GetDataSize(napi_env env, napi_callback_info info);
1554+  static napi_value GetDataBuffer(napi_env env, napi_callback_info info);
1555+  static napi_value SetData(napi_env env, napi_callback_info info);
1556+
1557+  static thread_local napi_ref constructor_;
1558+  napi_env env_ = nullptr;
1559+
1560+  std::unique_ptr<MSTensor> nativeMSTensor_ = nullptr;
1561+};
1562+}  // namespace mindspore
1563+#endif  // MINDSPORE_INCLUDE_JS_API_MSTENSOR_NAPI_H
1564\ No newline at end of file
1565diff --git a/include/js_api/native_module_ohos_ms.h b/include/js_api/native_module_ohos_ms.h
1566new file mode 100644
1567index 00000000..202e8384
1568--- /dev/null
1569+++ b/include/js_api/native_module_ohos_ms.h
1570@@ -0,0 +1,22 @@
1571+/**
1572+ * Copyright 2023 Huawei Technologies Co., Ltd
1573+ *
1574+ * Licensed under the Apache License, Version 2.0 (the "License");
1575+ * you may not use this file except in compliance with the License.
1576+ * You may obtain a copy of the License at
1577+ *
1578+ * http://www.apache.org/licenses/LICENSE-2.0
1579+ *
1580+ * Unless required by applicable law or agreed to in writing, software
1581+ * distributed under the License is distributed on an "AS IS" BASIS,
1582+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
1583+ * See the License for the specific language governing permissions and
1584+ * limitations under the License.
1585+ */
1586+#ifndef MINDSPORE_INCLUDE_JS_API_NATIVE_MODULE_OHOS_MS_H
1587+#define MINDSPORE_INCLUDE_JS_API_NATIVE_MODULE_OHOS_MS_H
1588+
1589+#include "mslite_model_napi.h"
1590+#include "mstensor_napi.h"
1591+
1592+#endif  // MINDSPORE_INCLUDE_JS_API_NATIVE_MODULE_OHOS_MS_H
1593\ No newline at end of file
1594diff --git a/include/js_api/nnrt_device_desc_napi.h b/include/js_api/nnrt_device_desc_napi.h
1595new file mode 100644
1596index 00000000..a1c92de5
1597--- /dev/null
1598+++ b/include/js_api/nnrt_device_desc_napi.h
1599@@ -0,0 +1,45 @@
1600+/**
1601+* Copyright 2022 Huawei Technologies Co., Ltd
1602+*
1603+* Licensed under the Apache License, Version 2.0 (the "License");
1604+* you may not use this file except in compliance with the License.
1605+* You may obtain a copy of the License at
1606+*
1607+* http://www.apache.org/licenses/LICENSE-2.0
1608+*
1609+* Unless required by applicable law or agreed to in writing, software
1610+* distributed under the License is distributed on an "AS IS" BASIS,
1611+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
1612+* See the License for the specific language governing permissions and
1613+* limitations under the License.
1614+*/
1615+#ifndef MINDSPORE_INCLUDE_JS_API_NNRT_DEVICE_DESC_NAPI_H
1616+#define MINDSPORE_INCLUDE_JS_API_NNRT_DEVICE_DESC_NAPI_H
1617+
1618+#include "include/api/types.h"
1619+#include "napi/native_api.h"
1620+#include "napi/native_node_api.h"
1621+#include "include/js_api/common_napi.h"
1622+
1623+namespace mindspore {
1624+class NnrtDeviceDescNapi {
1625+public:
1626+ static napi_value NewInstance(napi_env env, NnrtDeviceDesc decs);
1627+ NnrtDeviceDescNapi();
1628+ ~NnrtDeviceDescNapi();
1629+private:
1630+ static napi_value Constructor(napi_env env, napi_callback_info info);
1631+ static void Finalize(napi_env env, void *nativeObject, void *finalize);
1632+ static napi_value GetConstructor(napi_env env);
1633+
1634+ static napi_value GetDeviceName(napi_env env, napi_callback_info info);
1635+ static napi_value GetDeviceType(napi_env env, napi_callback_info info);
1636+ static napi_value GetDeviceID(napi_env env, napi_callback_info info);
1637+
1638+ static thread_local napi_ref constructor_;
1639+ napi_env env_ = nullptr;
1640+
1641+ std::unique_ptr<NnrtDeviceDesc> nativeNnrtDeviceDesc_ = nullptr;
1642+};
1643+}  // namespace mindspore
1644+#endif  // MINDSPORE_INCLUDE_JS_API_NNRT_DEVICE_DESC_NAPI_H
1645\ No newline at end of file
1646diff --git a/mindspore/lite/BUILD.gn b/mindspore/lite/BUILD.gn
1647index 8a492bf9..7e824c01 100644
1648--- a/mindspore/lite/BUILD.gn
1649+++ b/mindspore/lite/BUILD.gn
1650@@ -76,6 +76,7 @@ ohos_group("mindspore") {
1651     ":mindspore_lib",
1652     ":mindspore_train_lib",
1653     "mindir:mindir_lib",
1654+    "src/litert/js_api:mindsporelite_napi"
1655   ]
1656 }
1657 
1658diff --git a/mindspore/lite/src/litert/js_api/BUILD.gn b/mindspore/lite/src/litert/js_api/BUILD.gn
1659new file mode 100644
1660index 00000000..f12e4db4
1661--- /dev/null
1662+++ b/mindspore/lite/src/litert/js_api/BUILD.gn
1663@@ -0,0 +1,56 @@
1664+# Copyright (c) 2023 Huawei Device Co., Ltd.
1665+# Licensed under the Apache License, Version 2.0 (the "License");
1666+# you may not use this file except in compliance with the License.
1667+# You may obtain a copy of the License at
1668+#
1669+#     http://www.apache.org/licenses/LICENSE-2.0
1670+#
1671+# Unless required by applicable law or agreed to in writing, software
1672+# distributed under the License is distributed on an "AS IS" BASIS,
1673+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
1674+# See the License for the specific language governing permissions and
1675+# limitations under the License.
1676+
1677+import("//build/ohos.gni")
1678+import("//build/ohos/ace/ace.gni")
1679+
1680+ohos_shared_library("mindsporelite_napi") {
1681+  include_dirs = [
1682+    "//third_party/mindspore/mindspore-src/source/",
1683+    "//third_party/mindspore/mindspore-src/source/include/api",
1684+    "//third_party/mindspore/mindspore-src/source/mindspore/core",
1685+    "//third_party//mindspore/mindspore-src/source/mindspore/lite",
1686+    "//third_party/libuv/include",
1687+
1688+    "//foundation/arkui/napi",
1689+    "//foundation/arkui/napi/interfaces/inner_api",
1690+    "//foundation/arkui/napi/interfaces/kits",
1691+    "//third_party/libuv/include",
1692+    "//third_party/node/src",
1693+  ]
1694+
1695+  sources = [
1696+    "mslite_model_napi.cc",
1697+    "mstensor_napi.cc",
1698+    "native_module_ohos_ms.cc",
1699+    "common_napi.cc",
1700+    "nnrt_device_desc.cc"
1701+  ]
1702+
1703+  deps = [
1704+    "../../../:mindspore_lib",
1705+    "../../../:mindspore_ndk"
1706+  ]
1707+  external_deps = [
1708+    "ability_runtime:abilitykit_native",
1709+    "ability_runtime:napi_base_context",
1710+    "c_utils:utils",
1711+    "hilog:libhilog",
1712+    "napi:ace_napi",
1713+    "resource_management:global_resmgr",
1714+  ]
1715+
1716+  relative_install_dir = "module/ai"
1717+  part_name = "mindspore"
1718+  subsystem_name = "thirdparty"
1719+}
1720diff --git a/mindspore/lite/src/litert/js_api/common_napi.cc b/mindspore/lite/src/litert/js_api/common_napi.cc
1721new file mode 100644
1722index 00000000..85d69ff6
1723--- /dev/null
1724+++ b/mindspore/lite/src/litert/js_api/common_napi.cc
1725@@ -0,0 +1,303 @@
1726+/*
1727+ * Copyright (C) 2023 Huawei Device Co., Ltd.
1728+ * Licensed under the Apache License, Version 2.0 (the "License");
1729+ * you may not use this file except in compliance with the License.
1730+ * You may obtain a copy of the License at
1731+ *
1732+ *     http://www.apache.org/licenses/LICENSE-2.0
1733+ *
1734+ * Unless required by applicable law or agreed to in writing, software
1735+ * distributed under the License is distributed on an "AS IS" BASIS,
1736+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
1737+ * See the License for the specific language governing permissions and
1738+ * limitations under the License.
1739+ */
1740+
1741+#include "include/js_api/common_napi.h"
1742+#include <climits>
1743+#include "src/common/log.h"
1744+
1745+namespace mindspore {
1746+
1747+namespace {
1748+const int SIZE = 100;
1749+}
1750+
1751+std::string CommonNapi::getMessageByCode(int32_t &code) {
1752+  std::string err_message;
1753+  switch (code) {
1754+    case NAPI_ERR_INVALID_PARAM:
1755+      err_message = NAPI_ERR_INVALID_PARAM_INFO;
1756+      break;
1757+    case NAPI_ERR_NO_MEMORY:
1758+      err_message = NAPI_ERR_NO_MEMORY_INFO;
1759+      break;
1760+    case NAPI_ERR_ILLEGAL_STATE:
1761+      err_message = NAPI_ERR_ILLEGAL_STATE_INFO;
1762+      break;
1763+    case NAPI_ERR_UNSUPPORTED:
1764+      err_message = NAPI_ERR_UNSUPPORTED_INFO;
1765+      break;
1766+    case NAPI_ERR_TIMEOUT:
1767+      err_message = NAPI_ERR_TIMEOUT_INFO;
1768+      break;
1769+    case NAPI_ERR_STREAM_LIMIT:
1770+      err_message = NAPI_ERR_STREAM_LIMIT_INFO;
1771+      break;
1772+    case NAPI_ERR_SYSTEM:
1773+      err_message = NAPI_ERR_SYSTEM_INFO;
1774+      break;
1775+    case NAPI_ERR_INPUT_INVALID:
1776+      err_message = NAPI_ERR_INPUT_INVALID_INFO;
1777+      break;
1778+    default:
1779+      err_message = NAPI_ERR_SYSTEM_INFO;
1780+      code = NAPI_ERR_SYSTEM;
1781+      break;
1782+  }
1783+  return err_message;
1784+}
1785+
1786+int32_t CommonNapi::GetPropertyInt32(napi_env env, napi_value config_obj, const std::string &type, int32_t &result) {
1787+  napi_value item = nullptr;
1788+  bool exist = false;
1789+  napi_status status = napi_has_named_property(env, config_obj, type.c_str(), &exist);
1790+
1791+  if (status != napi_ok || !exist) {
1792+    MS_LOG(WARNING) << "can not find " << type.c_str() << " will set default value";
1793+    return ERR_NOT_EXISTED_PARAM;
1794+  }
1795+
1796+  if (napi_get_named_property(env, config_obj, type.c_str(), &item) != napi_ok) {
1797+    MS_LOG(WARNING) << "fail to get property: " << type.c_str();
1798+    return ERR_INVALID_PARAM;
1799+  }
1800+
1801+  if (napi_get_value_int32(env, item, &result) != napi_ok) {
1802+    MS_LOG(WARNING) << "fail to get property value " << type.c_str();
1803+    return ERR_INVALID_PARAM;
1804+  }
1805+  return SUCCESS;
1806+}
1807+
1808+int32_t CommonNapi::GetPropertyString(napi_env env, napi_value config_obj, const std::string &type,
1809+                                      std::string &result) {
1810+  napi_value item = nullptr;
1811+  bool exist = false;
1812+  char buffer[SIZE];
1813+  size_t length = 0;
1814+
1815+  napi_status status = napi_has_named_property(env, config_obj, type.c_str(), &exist);
1816+
1817+  if (status != napi_ok || !exist) {
1818+    MS_LOG(WARNING) << "can not find " << type.c_str() << "will set default value";
1819+    return ERR_NOT_EXISTED_PARAM;
1820+  }
1821+
1822+  if (napi_get_named_property(env, config_obj, type.c_str(), &item) != napi_ok) {
1823+    MS_LOG(WARNING) << "fail to get property: " << type.c_str();
1824+    return ERR_INVALID_PARAM;
1825+  }
1826+
1827+  if (napi_get_value_string_utf8(env, item, buffer, SIZE, &length) != napi_ok) {
1828+    MS_LOG(WARNING) << "fail to get property value " << type.c_str();
1829+    return ERR_INVALID_PARAM;
1830+  }
1831+  result = std::string(buffer);
1832+  return SUCCESS;
1833+}
1834+
1835+int32_t CommonNapi::GetPropertyBigIntUint64(napi_env env, napi_value config_obj, const std::string &type,
1836+                                uint64_t &result) {
1837+  napi_value item = nullptr;
1838+  bool exist = false;
1839+  napi_status status = napi_has_named_property(env, config_obj, type.c_str(), &exist);
1840+
1841+  if (status != napi_ok || !exist) {
1842+    MS_LOG(WARNING) << "can not find " << type.c_str() << " will set default value";
1843+    return ERR_NOT_EXISTED_PARAM;
1844+  }
1845+
1846+  if (napi_get_named_property(env, config_obj, type.c_str(), &item) != napi_ok) {
1847+    MS_LOG(WARNING) << "fail to get property: " << type.c_str();
1848+    return ERR_INVALID_PARAM;
1849+  }
1850+
1851+  bool lossless = false;
1852+  if (napi_get_value_bigint_uint64(env, item, &result, &lossless) != napi_ok) {
1853+    MS_LOG(WARNING) << "fail to get property value " << type.c_str();
1854+    return ERR_INVALID_PARAM;
1855+  }
1856+
1857+  if (!lossless) {
1858+    MS_LOG(WARNING) << "get uint64_t loss precision !";
1859+    return ERR_INVALID_PARAM;
1860+  }
1861+  return SUCCESS;
1862+}
1863+
1864+int32_t CommonNapi::GetPropertyInt32Array(napi_env env, napi_value config_obj, const std::string &type,
1865+                                          std::vector<int32_t> &result) {
1866+  napi_value item = nullptr;
1867+  bool exist = false;
1868+  napi_status status = napi_has_named_property(env, config_obj, type.c_str(), &exist);
1869+  if (status != napi_ok || !exist) {
1870+    MS_LOG(WARNING) << "can not find " << type.c_str() << "will set default value";
1871+    return ERR_NOT_EXISTED_PARAM;
1872+  }
1873+
1874+  if (napi_get_named_property(env, config_obj, type.c_str(), &item) != napi_ok) {
1875+    MS_LOG(WARNING) << "fail to get property: " << type.c_str();
1876+    return ERR_INVALID_PARAM;
1877+  }
1878+
1879+  uint32_t array_length = 0;
1880+  status = napi_get_array_length(env, item, &array_length);
1881+  if (status != napi_ok || array_length < 0) {
1882+    MS_LOG(WARNING) << "can not get array length.";
1883+    return ERR_INVALID_PARAM;
1884+  }
1885+
1886+  if (array_length == 0) {
1887+    return SUCCESS;
1888+  }
1889+
1890+  for (size_t i = 0; i < array_length; i++) {
1891+    int32_t int_value = {0};
1892+    napi_value element = nullptr;
1893+    status = napi_get_element(env, item, i, &element);
1894+    if (status != napi_ok) {
1895+      MS_LOG(WARNING) << "can not get element";
1896+      return ERR_INVALID_PARAM;
1897+    }
1898+
1899+    if (napi_get_value_int32(env, element, &int_value) != napi_ok) {
1900+      MS_LOG(WARNING) << "get " << type.c_str() << " property value fail";
1901+      return ERR_INVALID_PARAM;
1902+    }
1903+    result.push_back(int_value);
1904+  }
1905+
1906+  return SUCCESS;
1907+}
1908+
1909+int32_t CommonNapi::GetPropertyStringArray(napi_env env, napi_value config_obj, const std::string &type,
1910+                                           std::vector<std::string> &result) {
1911+  napi_value item = nullptr;
1912+  bool exist = false;
1913+  napi_status status = napi_has_named_property(env, config_obj, type.c_str(), &exist);
1914+
1915+  if (status != napi_ok || !exist) {
1916+    MS_LOG(WARNING) << "can not find " << type.c_str() << "will set default value";
1917+    return ERR_NOT_EXISTED_PARAM;
1918+  }
1919+
1920+  if (napi_get_named_property(env, config_obj, type.c_str(), &item) != napi_ok) {
1921+    MS_LOG(WARNING) << "fail to get property: " << type.c_str();
1922+    return ERR_INVALID_PARAM;
1923+  }
1924+
1925+  uint32_t array_length = 0;
1926+  status = napi_get_array_length(env, item, &array_length);
1927+  if (status != napi_ok || array_length <= 0) {
1928+    MS_LOG(WARNING) << "can not get array length";
1929+    return ERR_INVALID_PARAM;
1930+  }
1931+
1932+  for (size_t i = 0; i < array_length; i++) {
1933+    char buffer[SIZE];
1934+    size_t length = 0;
1935+
1936+    napi_value element = nullptr;
1937+    status = napi_get_element(env, item, i, &element);
1938+    if (status != napi_ok) {
1939+      MS_LOG(WARNING) << "can not get element";
1940+      return ERR_INVALID_PARAM;
1941+    }
1942+
1943+    if (napi_get_value_string_utf8(env, element, buffer, SIZE, &length) != napi_ok) {
1944+      MS_LOG(WARNING) << "fail to get property value " << type.c_str();
1945+      return ERR_INVALID_PARAM;
1946+    }
1947+    result.push_back(std::string(buffer));
1948+  }
1949+
1950+  return SUCCESS;
1951+}
1952+
1953+int32_t CommonNapi::GetStringArray(napi_env env, napi_value value, std::vector<std::string> &result) {
1954+  uint32_t array_length = 0;
1955+  auto status = napi_get_array_length(env, value, &array_length);
1956+  if (status != napi_ok || array_length <= 0) {
1957+    MS_LOG(WARNING) << "can not get array length";
1958+    return ERR_INVALID_PARAM;
1959+  }
1960+
1961+  for (size_t i = 0; i < array_length; i++) {
1962+    char buffer[SIZE];
1963+    size_t length = 0;
1964+
1965+    napi_value element = nullptr;
1966+    status = napi_get_element(env, value, i, &element);
1967+    if (status != napi_ok) {
1968+      MS_LOG(WARNING) << "can not get element";
1969+      return ERR_INVALID_PARAM;
1970+    }
1971+
1972+    if (napi_get_value_string_utf8(env, element, buffer, SIZE, &length) != napi_ok) {
1973+      MS_LOG(WARNING) << "fail to get string_utf8 value";
1974+      return ERR_INVALID_PARAM;
1975+    }
1976+    result.push_back(std::string(buffer));
1977+  }
1978+
1979+  return SUCCESS;
1980+}
1981+
1982+void CommonNapi::WriteTensorData(MSTensor tensor, std::string file_path) {
1983+  std::ofstream out_file;
1984+  out_file.open(file_path, std::ios::out | std::ios::app);
1985+  if (!out_file.is_open()) {
1986+    MS_LOG(ERROR) << "output file open failed";
1987+    return;
1988+  }
1989+  auto out_data = reinterpret_cast<const float *>(tensor.Data().get());
1990+  out_file << tensor.Name() << " ";
1991+  for (auto dim : tensor.Shape()) {
1992+    out_file << dim << " ";
1993+  }
1994+  out_file << std::endl;
1995+  for (int i = 0; i < tensor.ElementNum(); i++) {
1996+    out_file << out_data[i] << " ";
1997+  }
1998+  out_file << std::endl;
1999+  out_file.close();
2000+}
2001+
2002+void CommonNapi::WriteOutputsData(const std::vector<MSTensor> outputs, std::string file_path) {
2003+  std::ofstream out_file;
2004+  out_file.open(file_path, std::ios::out | std::ios::app);
2005+  if (!out_file.is_open()) {
2006+    MS_LOG(ERROR) << "output file open failed";
2007+    return;
2008+  }
2009+  for (auto tensor : outputs) {
2010+    MS_LOG(INFO) << "tensor name is: " << tensor.Name().c_str()
2011+                 << "tensor size is: " << static_cast<int>(tensor.DataSize())
2012+                 << "tensor elements num is: " << static_cast<int>(tensor.ElementNum());
2013+    // dtype float
2014+    auto out_data = reinterpret_cast<const float *>(tensor.Data().get());
2015+    out_file << tensor.Name() << " ";
2016+    for (auto dim : tensor.Shape()) {
2017+      out_file << dim << " ";
2018+    }
2019+    out_file << std::endl;
2020+    for (int i = 0; i < tensor.ElementNum(); i++) {
2021+      out_file << out_data[i] << " ";
2022+    }
2023+    out_file << std::endl;
2024+  }
2025+  out_file.close();
2026+}
2027+
2028+}  // namespace mindspore
2029\ No newline at end of file
2030diff --git a/mindspore/lite/src/litert/js_api/mslite_model_napi.cc b/mindspore/lite/src/litert/js_api/mslite_model_napi.cc
2031new file mode 100644
2032index 00000000..cfc71762
2033--- /dev/null
2034+++ b/mindspore/lite/src/litert/js_api/mslite_model_napi.cc
2035@@ -0,0 +1,2653 @@
2036+/**
2037+ * Copyright 2023 Huawei Technologies Co., Ltd
2038+ *
2039+ * Licensed under the Apache License, Version 2.0 (the "License");
2040+ * you may not use this file except in compliance with the License.
2041+ * You may obtain a copy of the License at
2042+ *
2043+ * http://www.apache.org/licenses/LICENSE-2.0
2044+ *
2045+ * Unless required by applicable law or agreed to in writing, software
2046+ * distributed under the License is distributed on an "AS IS" BASIS,
2047+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
2048+ * See the License for the specific language governing permissions and
2049+ * limitations under the License.
2050+ */
2051+#include "include/js_api/mslite_model_napi.h"
2052+#include <climits>
2053+#include <algorithm>
2054+#include <random>
2055+#include <cstring>
2056+#include <memory>
2057+#include <map>
2058+#include <vector>
2059+#include <unistd.h>
2060+#include <fcntl.h>
2061+#include <sys/mman.h>
2062+#include <sys/stat.h>
2063+#include "include/js_api/mstensor_napi.h"
2064+#include "include/js_api/common_napi.h"
2065+#include "include/js_api/ms_parameters_napi.h"
2066+#include "include/js_api/ms_errors.h"
2067+#include "include/js_api/mslite_model_callback_napi.h"
2068+#include "src/common/log.h"
2069+#include "mindspore/lite/src/common/log.h"
2070+#include "include/c_api/model_c.h"
2071+#include "include/c_api/context_c.h"
2072+#include "include/c_api/types_c.h"
2073+#include "include/js_api/nnrt_device_desc_napi.h"
2074+
2075+namespace mindspore {
2076+thread_local napi_ref MSLiteModelNapi::constructor_ = nullptr;
2077+ModelInfo *MSLiteModelNapi::model_info_ = nullptr;
2078+ContextInfo *MSLiteModelNapi::context_ = nullptr;
2079+std::mutex MSLiteModelNapi::create_mutex_;
2080+napi_ref MSLiteModelNapi::tensorFormat_ = nullptr;
2081+napi_ref MSLiteModelNapi::tensorDataType_ = nullptr;
2082+napi_ref MSLiteModelNapi::contextThreadAffinityMode_ = nullptr;
2083+napi_ref MSLiteModelNapi::contextQuantizationType_ = nullptr;
2084+napi_ref MSLiteModelNapi::contextOptimizationLevel_ = nullptr;
2085+napi_ref MSLiteModelNapi::contextPerformanceMode_ = nullptr;
2086+napi_ref MSLiteModelNapi::contextPriority_ = nullptr;
2087+napi_ref MSLiteModelNapi::contextNnrtDeviceType_ = nullptr;
2088+
2089+#define GET_PARAMS(env, info, num) \
2090+  size_t argc = num;               \
2091+  napi_value argv[num] = {0};      \
2092+  napi_value thisVar = nullptr;    \
2093+  void *data;                      \
2094+  napi_get_cb_info(env, info, &argc, argv, &thisVar, &data)
2095+
2096+namespace {
2097+const int ARGS_ONE = 1;
2098+const int ARGS_TWO = 2;
2099+const int ARGS_THREE = 3;
2100+const int ARGS_FOUR = 4;
2101+
2102+const int PARAM0 = 0;
2103+const int PARAM1 = 1;
2104+const int PARAM2 = 2;
2105+const int PARAM3 = 3;
2106+const int PARAM4 = 4;
2107+const int UNSET_VALUE = -1;
2108+
2109+const int SIZE = 100;
2110+
2111+const std::string CLASS_NAME = "Model";
2112+
2113+const std::unordered_map<std::string, DeviceType> kDeviceTypes{
2114+  {"cpu", kCPU},
2115+  {"nnrt", kNNRt},
2116+  {"gpu", kGPU},
2117+};
2118+}  // namespace
2119+
2120+MSLiteModelNapi::MSLiteModelNapi() : native_model_(nullptr), env_(nullptr) {
2121+  MS_LOG(INFO) << "MSLiteModelNapi Instances create.";
2122+}
2123+
2124+MSLiteModelNapi::~MSLiteModelNapi() {
2125+  native_model_ = nullptr;
2126+  env_ = nullptr;
2127+  MS_LOG(INFO) << "MSLiteModelNapi Instances destroy.";
2128+}
2129+
2130+void MSLiteModelNapi::Finalize(napi_env env, void *nativeObject, void *finalize) {
2131+  (void)env;
2132+  (void)finalize;
2133+  if (nativeObject != nullptr) {
2134+    // delete nativeObject
2135+    auto obj = static_cast<MSLiteModelNapi *>(nativeObject);
2136+    delete obj;
2137+    obj = nullptr;
2138+  }
2139+  MS_LOG(INFO) << "Finalize success";
2140+}
2141+
2142+napi_value MSLiteModelNapi::Init(napi_env env, napi_value exports) {
2143+  napi_property_descriptor properties[] = {
2144+    DECLARE_NAPI_FUNCTION("getInputs", GetInputs),
2145+    DECLARE_NAPI_FUNCTION("resize", Resize),
2146+    DECLARE_NAPI_FUNCTION("predict", PredictAsync),
2147+    DECLARE_NAPI_FUNCTION("runStep", RunStep),
2148+    DECLARE_NAPI_FUNCTION("getWeights", GetWeights),
2149+    DECLARE_NAPI_FUNCTION("updateWeights", UpdateWeights),
2150+    DECLARE_NAPI_FUNCTION("setupVirtualBatch", SetupVirtualBatch),
2151+    DECLARE_NAPI_FUNCTION("exportModel", ExportModel),
2152+    DECLARE_NAPI_FUNCTION("exportWeightsCollaborateWithMicro", ExportWeightsCollaborateWithMicro),
2153+    DECLARE_NAPI_GETTER_SETTER("trainMode", GetTrainMode, SetTrainMode),
2154+    DECLARE_NAPI_GETTER_SETTER("learningRate", GetLearningRate, SetLearningRate),
2155+    };
2156+
2157+  napi_property_descriptor staticProperty[] = {
2158+    DECLARE_NAPI_STATIC_FUNCTION("loadModelFromFile", LoadMSLiteModelFromFile),
2159+    DECLARE_NAPI_STATIC_FUNCTION("loadModelFromBuffer", LoadMSLiteModelFromBuffer),
2160+    DECLARE_NAPI_STATIC_FUNCTION("loadModelFromFd", LoadMSLiteModelFromFd),
2161+    DECLARE_NAPI_STATIC_FUNCTION("loadTrainModelFromFile", LoadMSLiteTrainModelFromFile),
2162+    DECLARE_NAPI_STATIC_FUNCTION("loadTrainModelFromBuffer", LoadMSLiteTrainModelFromBuffer),
2163+    DECLARE_NAPI_STATIC_FUNCTION("loadTrainModelFromFd", LoadMSLiteTrainModelFromFd),
2164+    DECLARE_NAPI_STATIC_FUNCTION("getAllNNRTDeviceDescriptions", GetAllNnrtDeviceDescs),
2165+    DECLARE_NAPI_PROPERTY("Format", CreateFormatObject(env)),
2166+    DECLARE_NAPI_PROPERTY("DataType", CreateDataTypeObject(env)),
2167+    DECLARE_NAPI_PROPERTY("ThreadAffinityMode", CreateThreadAffinityModeObject(env)),
2168+    DECLARE_NAPI_PROPERTY("QuantizationType", CreateQuantizationTypeObject(env)),
2169+    DECLARE_NAPI_PROPERTY("OptimizationLevel", CreateOptimizationLevelObject(env)),
2170+    DECLARE_NAPI_PROPERTY("PerformanceMode", CreatePerformanceModeObject(env)),
2171+    DECLARE_NAPI_PROPERTY("Priority", CreatePriorityObject(env)),
2172+    DECLARE_NAPI_PROPERTY("NNRTDeviceType", CreateNnrtDeviceTypeObject(env)),
2173+  };
2174+
2175+  napi_value constructor = nullptr;
2176+  napi_status status = napi_define_class(env, CLASS_NAME.c_str(), NAPI_AUTO_LENGTH, Constructor, nullptr,
2177+                                         sizeof(properties) / sizeof(properties[0]), properties, &constructor);
2178+  if (status != napi_ok) {
2179+    MS_LOG(ERROR) << "Failed to define MSLiteModel class";
2180+    return nullptr;
2181+  }
2182+
2183+  status = napi_create_reference(env, constructor, REFERENCE_CREATION_COUNT, &constructor_);
2184+  if (status != napi_ok) {
2185+    MS_LOG(ERROR) << "Failed to create reference of constructor";
2186+    return nullptr;
2187+  }
2188+
2189+  status = napi_set_named_property(env, exports, CLASS_NAME.c_str(), constructor);
2190+  if (status != napi_ok) {
2191+    MS_LOG(ERROR) << "Failed to set constructor";
2192+    return nullptr;
2193+  }
2194+
2195+  status = napi_define_properties(env, exports, sizeof(staticProperty) / sizeof(staticProperty[0]), staticProperty);
2196+  if (status != napi_ok) {
2197+    MS_LOG(ERROR) << "Failed to define static function";
2198+    return nullptr;
2199+  }
2200+
2201+  MS_LOG(INFO) << "init success";
2202+  return exports;
2203+}
2204+
2205+napi_value MSLiteModelNapi::CreateFormatObject(napi_env env)
2206+{
2207+  napi_value result = nullptr;
2208+  napi_status status;
2209+  std::string propName;
2210+  int32_t refCount = 1;
2211+
2212+  status = napi_create_object(env, &result);
2213+  if (status == napi_ok) {
2214+    for (auto &iter : tensorFormatMap) {
2215+      propName = iter.first;
2216+      status = AddNamedProperty(env, result, propName, iter.second);
2217+      if (status != napi_ok) {
2218+        MS_LOG(ERROR) << "Failed to add named prop in CreateFormatObject.";
2219+        break;
2220+      }
2221+      propName.clear();
2222+    }
2223+    if (status == napi_ok) {
2224+      status = napi_create_reference(env, result, refCount, &tensorFormat_);
2225+      if (status == napi_ok) {
2226+        return result;
2227+      }
2228+    }
2229+  }
2230+  MS_LOG(ERROR) << "CreateFormatObject is Failed!";
2231+  napi_get_undefined(env, &result);
2232+  return result;
2233+}
2234+
2235+napi_value MSLiteModelNapi::CreateDataTypeObject(napi_env env) {
2236+  napi_value result = nullptr;
2237+  napi_status status;
2238+  std::string propName;
2239+  int32_t refCount = 1;
2240+
2241+  status = napi_create_object(env, &result);
2242+  if (status == napi_ok) {
2243+    for (auto &iter : tensorDataTypeMap) {
2244+      propName = iter.first;
2245+      status = AddNamedProperty(env, result, propName, iter.second);
2246+      if (status != napi_ok) {
2247+        MS_LOG(ERROR) << "Failed to add named prop in CreateDataTypeObject.";
2248+        break;
2249+      }
2250+      propName.clear();
2251+    }
2252+    if (status == napi_ok) {
2253+      status = napi_create_reference(env, result, refCount, &tensorDataType_);
2254+      if (status == napi_ok) {
2255+        return result;
2256+      }
2257+    }
2258+  }
2259+  MS_LOG(ERROR) << "CreateDataTypeObject is Failed!";
2260+  napi_get_undefined(env, &result);
2261+  return result;
2262+}
2263+
2264+napi_value MSLiteModelNapi::CreateThreadAffinityModeObject(napi_env env) {
2265+  napi_value result = nullptr;
2266+  napi_status status;
2267+  std::string propName;
2268+  int32_t refCount = 1;
2269+
2270+  status = napi_create_object(env, &result);
2271+  if (status == napi_ok) {
2272+    for (auto &iter : contextThreadAffinityModeMap) {
2273+      propName = iter.first;
2274+      status = AddNamedProperty(env, result, propName, iter.second);
2275+      if (status != napi_ok) {
2276+        MS_LOG(ERROR) << "Failed to add named prop in CreateThreadAffinityModeObject.";
2277+        break;
2278+      }
2279+      propName.clear();
2280+    }
2281+    if (status == napi_ok) {
2282+      status = napi_create_reference(env, result, refCount, &contextThreadAffinityMode_);
2283+      if (status == napi_ok) {
2284+        return result;
2285+      }
2286+    }
2287+  }
2288+  MS_LOG(ERROR) << "CreateThreadAffinityModeObject is Failed!";
2289+  napi_get_undefined(env, &result);
2290+  return result;
2291+}
2292+
2293+napi_value MSLiteModelNapi::CreateQuantizationTypeObject(napi_env env) {
2294+  napi_value result = nullptr;
2295+  napi_status status;
2296+  std::string propName;
2297+  int32_t refCount = 1;
2298+
2299+  status = napi_create_object(env, &result);
2300+  if (status == napi_ok) {
2301+    for (auto &iter : contextQuantizationTypeMap) {
2302+      propName = iter.first;
2303+      status = AddNamedProperty(env, result, propName, iter.second);
2304+      if (status != napi_ok) {
2305+        MS_LOG(ERROR) << "Failed to add named prop in CreateQuantizationTypeObject.";
2306+        break;
2307+      }
2308+      propName.clear();
2309+    }
2310+    if (status == napi_ok) {
2311+      status = napi_create_reference(env, result, refCount, &contextQuantizationType_);
2312+      if (status == napi_ok) {
2313+        return result;
2314+      }
2315+    }
2316+  }
2317+  MS_LOG(ERROR) << "CreateQuantizationTypeObject is Failed!";
2318+  napi_get_undefined(env, &result);
2319+  return result;
2320+}
2321+
2322+napi_value MSLiteModelNapi::CreateOptimizationLevelObject(napi_env env) {
2323+  napi_value result = nullptr;
2324+  napi_status status;
2325+  std::string propName;
2326+  int32_t refCount = 1;
2327+
2328+  status = napi_create_object(env, &result);
2329+  if (status == napi_ok) {
2330+    for (auto &iter : contextOptimizationLevelTypeMap) {
2331+      propName = iter.first;
2332+      status = AddNamedProperty(env, result, propName, iter.second);
2333+      if (status != napi_ok) {
2334+        MS_LOG(ERROR) << "Failed to add named prop in CreateOptimizationLevelObject.";
2335+        break;
2336+      }
2337+      propName.clear();
2338+    }
2339+    if (status == napi_ok) {
2340+      status = napi_create_reference(env, result, refCount, &contextOptimizationLevel_);
2341+      if (status == napi_ok) {
2342+        return result;
2343+      }
2344+    }
2345+  }
2346+  MS_LOG(ERROR) << "CreateOptimizationLevelObject is Failed!";
2347+  napi_get_undefined(env, &result);
2348+  return result;
2349+}
2350+
2351+napi_value MSLiteModelNapi::CreatePerformanceModeObject(napi_env env) {
2352+  napi_value result = nullptr;
2353+  napi_status status;
2354+  std::string propName;
2355+  int32_t refCount = 1;
2356+
2357+  status = napi_create_object(env, &result);
2358+  if (status == napi_ok) {
2359+    for (auto &iter : contextPerformanceModeTypeMap) {
2360+      propName = iter.first;
2361+      status = AddNamedProperty(env, result, propName, iter.second);
2362+      if (status != napi_ok) {
2363+        MS_LOG(ERROR) << "Failed to add named prop in CreatePerformanceModeObject.";
2364+        break;
2365+      }
2366+      propName.clear();
2367+    }
2368+    if (status == napi_ok) {
2369+      status = napi_create_reference(env, result, refCount, &contextPerformanceMode_);
2370+      if (status == napi_ok) {
2371+        return result;
2372+      }
2373+    }
2374+  }
2375+  MS_LOG(ERROR) << "CreatePerformanceModeObject is Failed!";
2376+  napi_get_undefined(env, &result);
2377+  return result;
2378+}
2379+
2380+napi_value MSLiteModelNapi::CreatePriorityObject(napi_env env) {
2381+  napi_value result = nullptr;
2382+  napi_status status;
2383+  std::string propName;
2384+  int32_t refCount = 1;
2385+
2386+  status = napi_create_object(env, &result);
2387+  if (status == napi_ok) {
2388+    for (auto &iter : contextPriorityTypeMap) {
2389+      propName = iter.first;
2390+      status = AddNamedProperty(env, result, propName, iter.second);
2391+      if (status != napi_ok) {
2392+        MS_LOG(ERROR) << "Failed to add named prop in CreatePriorityObject.";
2393+        break;
2394+      }
2395+      propName.clear();
2396+    }
2397+    if (status == napi_ok) {
2398+      status = napi_create_reference(env, result, refCount, &contextPriority_);
2399+      if (status == napi_ok) {
2400+        return result;
2401+      }
2402+    }
2403+  }
2404+  MS_LOG(ERROR) << "CreatePriorityObject is Failed!";
2405+  napi_get_undefined(env, &result);
2406+  return result;
2407+}
2408+
2409+napi_value MSLiteModelNapi::CreateNnrtDeviceTypeObject(napi_env env) {
2410+  napi_value result = nullptr;
2411+  napi_status status;
2412+  std::string propName;
2413+  int32_t refCount = 1;
2414+
2415+  status = napi_create_object(env, &result);
2416+  if (status == napi_ok) {
2417+    for (auto &iter : contextNnrtDeviceTypeTypeMap) {
2418+      propName = iter.first;
2419+      status = AddNamedProperty(env, result, propName, iter.second);
2420+      if (status != napi_ok) {
2421+        MS_LOG(ERROR) << "Failed to add named prop in CreateNnrtDeviceTypeObject.";
2422+        break;
2423+      }
2424+      propName.clear();
2425+    }
2426+    if (status == napi_ok) {
2427+      status = napi_create_reference(env, result, refCount, &contextNnrtDeviceType_);
2428+      if (status == napi_ok) {
2429+        return result;
2430+      }
2431+    }
2432+  }
2433+  MS_LOG(ERROR) << "CreateNnrtDeviceTypeObject is Failed!";
2434+  napi_get_undefined(env, &result);
2435+  return result;
2436+}
2437+
2438+napi_status MSLiteModelNapi::AddNamedProperty(napi_env env, napi_value object, const std::string name,
2439+                                              int32_t enumValue) {
2440+  napi_status status;
2441+  napi_value enumNapiValue;
2442+
2443+  status = napi_create_int32(env, enumValue, &enumNapiValue);
2444+  if (status == napi_ok) {
2445+    status = napi_set_named_property(env, object, name.c_str(), enumNapiValue);
2446+  }
2447+  return status;
2448+}
2449+
2450+napi_value MSLiteModelNapi::GetAllNnrtDeviceDescs(napi_env env, napi_callback_info info) {
2451+  size_t num;
2452+  napi_value jsResult = nullptr;
2453+  NNRTDeviceDesc *devices = OH_AI_GetAllNNRTDeviceDescs(&num);
2454+  if (devices == nullptr) {
2455+    MS_LOG(ERROR) << "Get all nnrt devices error, may nnrt is not supported.";
2456+    OH_AI_DestroyAllNNRTDeviceDescs(&devices);
2457+    return jsResult;
2458+  }
2459+
2460+  MS_LOG(INFO) << "all nnrt devices size: " << num;
2461+  napi_create_array_with_length(env, num, &jsResult);
2462+  for (size_t i = 0; i < num; i++) {
2463+    NnrtDeviceDesc nnrt_device;
2464+    NNRTDeviceDesc *nnrt_device_desc = OH_AI_GetElementOfNNRTDeviceDescs(devices, i);
2465+    nnrt_device.name.assign(OH_AI_GetNameFromNNRTDeviceDesc(nnrt_device_desc));
2466+    size_t id = OH_AI_GetDeviceIdFromNNRTDeviceDesc(nnrt_device_desc);
2467+    nnrt_device.id = id;
2468+    nnrt_device.type = static_cast<ContextNnrtDeviceType>(OH_AI_GetTypeFromNNRTDeviceDesc(nnrt_device_desc));
2469+    auto status = napi_set_element(env, jsResult, i, NnrtDeviceDescNapi::NewInstance(env, nnrt_device));
2470+    if (status != napi_ok) {
2471+      MS_LOG(ERROR) << "napi_set_element failed! code: " << status;
2472+      OH_AI_DestroyAllNNRTDeviceDescs(&devices);
2473+      return jsResult;
2474+    }
2475+  }
2476+  MS_LOG(INFO) << "get All nnrt devices success!";
2477+  OH_AI_DestroyAllNNRTDeviceDescs(&devices);
2478+  return jsResult;
2479+}
2480+
2481+std::shared_ptr<mindspore::Model> MSLiteModelNapi::CreateModel(ModelInfo *model_info_ptr,
2482+                                                               ContextInfo *context_info_ptr) {
2483+  if (context_info_ptr == nullptr) {
2484+    MS_LOG(ERROR) << "context_info_ptr is nullptr.";
2485+    return nullptr;
2486+  }
2487+  // create and init context
2488+  std::string s;
2489+  for (const auto &device_name : context_info_ptr->target) {
2490+    s += device_name + " ";
2491+  }
2492+  MS_LOG(DEBUG) << "target device: " << s.c_str();
2493+
2494+  auto context = std::make_shared<mindspore::Context>();
2495+  if (context == nullptr) {
2496+    MS_LOG(ERROR) << "Failed to new context.";
2497+    return nullptr;
2498+  }
2499+
2500+  auto &device_infos = context->MutableDeviceInfo();
2501+  if (context_info_ptr->target.empty()) {
2502+    MS_LOG(ERROR) << "context is empty.";
2503+    return nullptr;
2504+  }
2505+  if (GetDeviceInfoContext(context_info_ptr, device_infos) != SUCCESS) {
2506+    MS_LOG(ERROR) << "Create context failed.";
2507+    return nullptr;
2508+  }
2509+  context->SetThreadNum(context_info_ptr->cpu_device.thread_num);
2510+  MS_LOG(DEBUG) << "current thread num is : " << context->GetThreadNum();
2511+
2512+  switch (model_info_ptr->mode) {
2513+    case kBuffer: {
2514+      MS_LOG(DEBUG) << "input model buffer, model_buffer_total: " << model_info_ptr->model_buffer_total;
2515+      if (model_info_ptr->model_buffer_data == nullptr || model_info_ptr->model_buffer_total <= 0) {
2516+        MS_LOG(ERROR) << "Failed to build model.";
2517+        return nullptr;
2518+      }
2519+      std::shared_ptr<mindspore::Model> model_ptr = std::make_shared<mindspore::Model>();
2520+      if (model_ptr == nullptr) {
2521+        MS_LOG(ERROR) << "Failed to new mindspore::model.";
2522+        return nullptr;
2523+      }
2524+      auto ret = model_ptr->Build(model_info_ptr->model_buffer_data, model_info_ptr->model_buffer_total,
2525+                                  mindspore::kMindIR, context);
2526+      if (ret == mindspore::kSuccess) {
2527+        MS_LOG(INFO) << "Build model from buffer success.";
2528+        return model_ptr;
2529+      }
2530+      break;
2531+    }
2532+    case kPath: {
2533+      MS_LOG(DEBUG) << "input model path, model_buffer_total: " << model_info_ptr->model_path.c_str();
2534+      std::shared_ptr<mindspore::Model> model_ptr = std::make_shared<mindspore::Model>();
2535+      if (model_ptr == nullptr) {
2536+        MS_LOG(ERROR) << "Failed to new mindspore::model.";
2537+        return nullptr;
2538+      }
2539+      auto ret = model_ptr->Build(model_info_ptr->model_path, mindspore::kMindIR, context);
2540+      if (ret == mindspore::kSuccess) {
2541+        MS_LOG(INFO) << "Build model from path success.";
2542+        return model_ptr;
2543+      }
2544+      return nullptr;
2545+    }
2546+    case kFD: {
2547+      MS_LOG(DEBUG) << "input model fd:" << model_info_ptr->model_fd
2548+                    << ", model_buffer_total: " << model_info_ptr->model_buffer_total;
2549+      std::shared_ptr<mindspore::Model> model_ptr = std::make_shared<mindspore::Model>();
2550+      if (model_ptr == nullptr) {
2551+        MS_LOG(ERROR) << "Failed to new mindspore::model.";
2552+        return nullptr;
2553+      }
2554+      auto ret = model_ptr->Build(model_info_ptr->model_buffer_data, model_info_ptr->model_buffer_total,
2555+                                  mindspore::kMindIR, context);
2556+
2557+      (void)munmap(model_info_ptr->model_buffer_data, model_info_ptr->model_buffer_total);
2558+      if (ret == mindspore::kSuccess) {
2559+        MS_LOG(INFO) << "Build model from fd success.";
2560+        return model_ptr;
2561+      }
2562+
2563+      break;
2564+    }
2565+    default: {
2566+      MS_LOG(ERROR) << "Invalid model mode.";
2567+    }
2568+  }
2569+  MS_LOG(ERROR) << "Build model failed.";
2570+  return nullptr;
2571+}
2572+
2573+std::shared_ptr<mindspore::Model> MSLiteModelNapi::CreateTrainModel(ModelInfo *model_info_ptr,
2574+                                                                    ContextInfo *context_info_ptr) {
2575+  // create and init context
2576+  std::string s;
2577+  for (const auto &device_name : context_info_ptr->target) {
2578+    s += device_name + " ";
2579+  }
2580+  MS_LOG(DEBUG) << "target device: " << s.c_str();
2581+
2582+  auto context = std::make_shared<mindspore::Context>();
2583+  if (context == nullptr) {
2584+    MS_LOG(ERROR) << "Failed to new context.";
2585+    return nullptr;
2586+  }
2587+
2588+  auto &device_infos = context->MutableDeviceInfo();
2589+  if (context_info_ptr->target.empty()) {
2590+    MS_LOG(ERROR) << "context is empty.";
2591+    return nullptr;
2592+  }
2593+  if (GetDeviceInfoContext(context_info_ptr, device_infos) != SUCCESS) {
2594+    MS_LOG(ERROR) << "Create context failed.";
2595+    return nullptr;
2596+  }
2597+
2598+  auto train_cfg = std::make_shared<TrainCfg>();
2599+  std::vector<std::string> loss_names;
2600+  for (const auto &name : train_cfg->GetLossName()) {
2601+      loss_names.push_back(name);
2602+  }
2603+  for (const auto &name : context_info_ptr->train_cfg.loss_names) {
2604+      loss_names.push_back(name);
2605+  }
2606+  train_cfg->SetLossName(loss_names);
2607+  train_cfg->optimization_level_ = static_cast<OptimizationLevel>(context_info_ptr->train_cfg.optimization_level);
2608+
2609+  switch (model_info_ptr->mode) {
2610+    case kBuffer: {
2611+      MS_LOG(DEBUG) << "input model buffer, model_buffer_total: " << model_info_ptr->model_buffer_total;
2612+      if (model_info_ptr->model_buffer_data == nullptr || model_info_ptr->model_buffer_total <= 0) {
2613+        MS_LOG(ERROR) << "Failed to build model.";
2614+        return nullptr;
2615+      }
2616+      std::shared_ptr<mindspore::Model> model_ptr = std::make_shared<mindspore::Model>();
2617+      if (model_ptr == nullptr) {
2618+        MS_LOG(ERROR) << "Failed to new mindspore::model.";
2619+        return nullptr;
2620+      }
2621+      mindspore::Graph graph;
2622+      auto status = mindspore::Serialization::Load(model_info_ptr->model_buffer_data,
2623+                                                   model_info_ptr->model_buffer_total, mindspore::kMindIR, &graph);
2624+      if (status != mindspore::kSuccess) {
2625+        MS_LOG(ERROR) << "load ms file failed.";
2626+        return nullptr;
2627+      }
2628+      auto ret = model_ptr->Build(static_cast<mindspore::GraphCell>(graph), context, train_cfg);
2629+      if (ret == mindspore::kSuccess) {
2630+        MS_LOG(INFO) << "Build model from buffer success.";
2631+        return model_ptr;
2632+      }
2633+      break;
2634+    }
2635+    case kPath: {
2636+      MS_LOG(DEBUG) << "input model path, model_buffer_total: " << model_info_ptr->model_path.c_str();
2637+      std::shared_ptr<mindspore::Model> model_ptr = std::make_shared<mindspore::Model>();
2638+      if (model_ptr == nullptr) {
2639+        MS_LOG(ERROR) << "Failed to new mindspore::model.";
2640+        return nullptr;
2641+      }
2642+
2643+      mindspore::Graph graph;
2644+      auto status = mindspore::Serialization::Load(model_info_ptr->model_path, mindspore::kMindIR, &graph);
2645+      if (status != mindspore::kSuccess) {
2646+        MS_LOG(ERROR) << "load ms file failed.";
2647+        return nullptr;
2648+      }
2649+      auto ret = model_ptr->Build(static_cast<mindspore::GraphCell>(graph), context, train_cfg);
2650+      if (ret == mindspore::kSuccess) {
2651+        MS_LOG(INFO) << "Build model from path success.";
2652+        return model_ptr;
2653+      }
2654+      return nullptr;
2655+    }
2656+    case kFD: {
2657+      MS_LOG(DEBUG) << "input model fd:" << model_info_ptr->model_fd
2658+                    << ", model_buffer_total: " << model_info_ptr->model_buffer_total;
2659+      std::shared_ptr<mindspore::Model> model_ptr = std::make_shared<mindspore::Model>();
2660+      if (model_ptr == nullptr) {
2661+        MS_LOG(ERROR) << "Failed to new mindspore::model.";
2662+        return nullptr;
2663+      }
2664+
2665+      mindspore::Graph graph;
2666+      auto status = mindspore::Serialization::Load(model_info_ptr->model_buffer_data,
2667+                                                   model_info_ptr->model_buffer_total, mindspore::kMindIR, &graph);
2668+      if (status != mindspore::kSuccess) {
2669+        MS_LOG(ERROR) << "load ms file failed.";
2670+        return nullptr;
2671+      }
2672+      auto ret = model_ptr->Build(static_cast<mindspore::GraphCell>(graph), context, train_cfg);
2673+      (void)munmap(model_info_ptr->model_buffer_data, model_info_ptr->model_buffer_total);
2674+      if (ret == mindspore::kSuccess) {
2675+        MS_LOG(INFO) << "Build model from fd success.";
2676+        return model_ptr;
2677+      }
2678+
2679+      break;
2680+    }
2681+    default: {
2682+      MS_LOG(ERROR) << "Invalid model mode.";
2683+    }
2684+  }
2685+  MS_LOG(ERROR) << "Build model failed.";
2686+  return nullptr;
2687+}
2688+
2689+int32_t MSLiteModelNapi::GetDeviceInfoContext(ContextInfo *context_ptr,
2690+                                              std::vector<std::shared_ptr<DeviceInfoContext>> &device_infos) {
2691+  for (auto device_name : context_ptr->target) {
2692+    if (kDeviceTypes.find(device_name) == kDeviceTypes.end()) {
2693+      MS_LOG(ERROR) << "Invalid device: " << device_name.c_str();
2694+      return ERR_INVALID_OPERATION;
2695+    }
2696+
2697+    auto device_type = kDeviceTypes.at(device_name);
2698+    switch (device_type) {
2699+      case kCPU: {
2700+        auto cpu_device = std::make_shared<mindspore::CPUDeviceInfo>();
2701+        if (cpu_device == nullptr) {
2702+          MS_LOG(ERROR) << "Failed to new CPU deviceInfo.";
2703+          return ERR_INVALID_OPERATION;
2704+        }
2705+        bool is_fp16 = (context_ptr->cpu_device.precision_mode.compare("preferred_fp16") == 0) ? true : false;
2706+        cpu_device->SetEnableFP16(is_fp16);
2707+        device_infos.push_back(cpu_device);
2708+        break;
2709+      }
2710+      case kNNRt: {
2711+        auto nnrt_device = std::make_shared<mindspore::NNRTDeviceInfo>();
2712+        if (nnrt_device == nullptr) {
2713+          MS_LOG(ERROR) << "Failed to new NNRT deviceInfo.";
2714+          return ERR_INVALID_OPERATION;
2715+        }
2716+        nnrt_device->SetDeviceID(context_ptr->nnrt_device.device_id);
2717+        if (context_ptr->nnrt_device.performance_mode != UNSET_VALUE) {
2718+          nnrt_device->SetPerformanceMode(context_ptr->nnrt_device.performance_mode);
2719+        }
2720+        if (context_ptr->nnrt_device.priority != UNSET_VALUE) {
2721+          nnrt_device->SetPriority(context_ptr->nnrt_device.priority);
2722+        }
2723+        // ignore extensions
2724+        device_infos.push_back(nnrt_device);
2725+        break;
2726+      }
2727+      default: {
2728+        MS_LOG(ERROR) << "invalid device.";
2729+        return ERR_INVALID_OPERATION;
2730+      }
2731+    }
2732+  }
2733+  return SUCCESS;
2734+}
2735+
2736+napi_value MSLiteModelNapi::Constructor(napi_env env, napi_callback_info info) {
2737+  napi_status status;
2738+  napi_value result = nullptr;
2739+  napi_get_undefined(env, &result);
2740+  GET_PARAMS(env, info, ARGS_TWO);
2741+
2742+  std::unique_ptr<MSLiteModelNapi> model_napi = std::make_unique<MSLiteModelNapi>();
2743+  if (model_napi == nullptr) {
2744+    MS_LOG(ERROR) << "No memory";
2745+    return result;
2746+  }
2747+
2748+  model_napi->env_ = env;
2749+  if (model_info_->train_model) {
2750+    model_napi->native_model_ = CreateTrainModel(model_info_, context_);
2751+  } else {
2752+    model_napi->native_model_ = CreateModel(model_info_, context_);
2753+  }
2754+  if (model_napi->native_model_ == nullptr) {
2755+    MS_LOG(ERROR) << "Failed to create model.";
2756+    return result;
2757+  }
2758+
2759+  status =
2760+    napi_wrap(env, thisVar, reinterpret_cast<void *>(model_napi.get()), MSLiteModelNapi::Finalize, nullptr, nullptr);
2761+  if (status == napi_ok) {
2762+    model_napi.release();
2763+    return thisVar;
2764+  }
2765+  return result;
2766+}
2767+
2768+int32_t MSLiteModelNapi::ParseModelInfo(napi_env env, napi_value root, ModelInfo &model_info) {
2769+  napi_valuetype valueType;
2770+  napi_status status = napi_typeof(env, root, &valueType);
2771+  if (status != napi_ok) {
2772+    MS_LOG(ERROR) << "napi_typeof error.";
2773+    return ERR_INVALID_PARAM;
2774+  }
2775+  if ((valueType != napi_object) && (valueType != napi_string) && (valueType != napi_number)) {
2776+    MS_LOG(ERROR) << "model is invaild.";
2777+    return ERR_INVALID_PARAM;
2778+  }
2779+
2780+  bool is_model_buffer = false;
2781+  napi_is_arraybuffer(env, root, &is_model_buffer);
2782+  if (is_model_buffer) {
2783+    // copy buffer
2784+    char *array_buffer_data;
2785+    size_t array_buffer_total;
2786+    status = napi_get_arraybuffer_info(env, root, reinterpret_cast<void **>(&array_buffer_data), &array_buffer_total);
2787+    if ((status != napi_ok) || (array_buffer_total <= 0)) {
2788+      MS_LOG(ERROR) << "Parse model buffer failed.";
2789+      return ERR_INVALID_PARAM;
2790+    }
2791+
2792+    // shallow copy
2793+    model_info.model_buffer_data = array_buffer_data;
2794+    model_info.model_buffer_total = array_buffer_total;
2795+    model_info.mode = kBuffer;
2796+  } else if (valueType == napi_number) {
2797+    int32_t fd;
2798+    status = napi_get_value_int32(env, root, &fd);
2799+    if ((status != napi_ok) || (fd <= 0)) {
2800+      MS_LOG(ERROR) << "Parse model FD failed.";
2801+      return ERR_INVALID_PARAM;
2802+    }
2803+
2804+    int size = lseek(fd, 0, SEEK_END);
2805+    (void)lseek(fd, 0, SEEK_SET);
2806+    auto mmap_buffers = mmap(NULL, size, PROT_READ, MAP_SHARED, fd, 0);
2807+    if (mmap_buffers == NULL) {
2808+      MS_LOG(ERROR) << "mmap_buffers is NULL.";
2809+      return ERR_INVALID_PARAM;
2810+    }
2811+    model_info.model_fd = fd;
2812+    model_info.model_buffer_data = static_cast<char *>(mmap_buffers);
2813+    model_info.model_buffer_total = size;
2814+    model_info.mode = kFD;
2815+  } else {
2816+    char char_buf[SIZE];
2817+    size_t buf_length = 0;
2818+    status = napi_get_value_string_utf8(env, root, char_buf, SIZE, &buf_length);
2819+    if ((status != napi_ok) || (buf_length <= 0)) {
2820+      MS_LOG(ERROR) << "Parse model file failed.";
2821+      return ERR_INVALID_PARAM;
2822+    }
2823+    model_info.model_path.assign(char_buf, char_buf + buf_length);
2824+    model_info.mode = kPath;
2825+    MS_LOG(DEBUG) << "model_path: " << model_info.model_path.c_str();
2826+  }
2827+  return SUCCESS;
2828+}
2829+
2830+int32_t MSLiteModelNapi::ParseContextInfo(napi_env env, napi_value args, ContextInfo &context) {
2831+  napi_valuetype valueType;
2832+  napi_status status = napi_typeof(env, args, &valueType);
2833+  if ((status != napi_ok) || (valueType != napi_object)) {
2834+    MS_LOG(ERROR) << "context is invaild.";
2835+    return ERR_NOT_EXISTED_PARAM;
2836+  }
2837+
2838+  std::vector<std::string> str_values;
2839+  auto ret = CommonNapi::GetPropertyStringArray(env, args, "target", str_values);
2840+  if (ret != SUCCESS) {
2841+    MS_LOG(ERROR) << "Get context target failed.";
2842+    return ret;
2843+  }
2844+  context.target.assign(str_values.begin(), str_values.end());
2845+
2846+  ret = GetCpuDeviceInfo(env, args, context);
2847+  if (ret != ERR_NOT_EXISTED_PARAM && ret != SUCCESS) {
2848+    MS_LOG(ERROR) << "Get context CpuDeviceInfo failed.";
2849+    return ret;
2850+  }
2851+
2852+  ret = GetNNRTDeviceInfo(env, args, context);
2853+  if (ret != ERR_NOT_EXISTED_PARAM && ret != SUCCESS) {
2854+    MS_LOG(ERROR) << "Get context NnrtDeviceInfo failed.";
2855+    return ret;
2856+  }
2857+  return SUCCESS;
2858+}
2859+
2860+int32_t MSLiteModelNapi::ParseTrainCfgInfo(napi_env env, napi_value root, TrainConfig &cfg) {
2861+  napi_valuetype valueType;
2862+  napi_status status = napi_typeof(env, root, &valueType);
2863+  if ((status != napi_ok) || (valueType != napi_object)) {
2864+    MS_LOG(ERROR) << "TrainCfg is invaild.";
2865+    return ERR_NOT_EXISTED_PARAM;
2866+  }
2867+  std::vector<std::string> str_values;
2868+  auto ret = CommonNapi::GetPropertyStringArray(env, root, "lossName", str_values);
2869+  if (ret != SUCCESS && ret != ERR_NOT_EXISTED_PARAM) {
2870+    MS_LOG(ERROR) << "Get lossName failed.";
2871+    return ret;
2872+  }
2873+  cfg.loss_names.assign(str_values.begin(), str_values.end());
2874+
2875+  int32_t int_value = 0;
2876+  ret = CommonNapi::GetPropertyInt32(env, root, "optimizationLevel", int_value);
2877+  if (ret != SUCCESS && ret != ERR_NOT_EXISTED_PARAM) {
2878+    MS_LOG(ERROR) << "Get optimization level failed";
2879+    return ret;
2880+  } else {
2881+    cfg.optimization_level = int_value;
2882+  }
2883+  return SUCCESS;
2884+}
2885+
2886+napi_value MSLiteModelNapi::CreateMSLiteModelWrapper(napi_env env, MSLiteModelAsyncContext *async_context) {
2887+  std::lock_guard<std::mutex> lock(create_mutex_);
2888+  napi_status status;
2889+  napi_value result = nullptr;
2890+  napi_value constructor;
2891+  napi_get_undefined(env, &result);
2892+
2893+  status = napi_get_reference_value(env, constructor_, &constructor);
2894+  if (status != napi_ok) {
2895+    MS_LOG(ERROR) << "get reference failed.";
2896+    return result;
2897+  }
2898+  model_info_ = &(async_context->model_info);
2899+  context_ = &(async_context->context);
2900+  status = napi_new_instance(env, constructor, 0, nullptr, &result);
2901+  if (status == napi_ok) {
2902+    return result;
2903+  }
2904+
2905+  return result;
2906+}
2907+
2908+void MSLiteModelNapi::GetMSLiteModelAsyncCallbackComplete(napi_env env, napi_status status, void *data) {
2909+  napi_value valueParam = nullptr;
2910+  auto async_context = static_cast<MSLiteModelAsyncContext *>(data);
2911+
2912+  if (async_context != nullptr) {
2913+    if (!async_context->status) {
2914+      valueParam = CreateMSLiteModelWrapper(env, async_context);
2915+    }
2916+    CommonCallbackRoutine(env, async_context, valueParam);
2917+  } else {
2918+    MS_LOG(ERROR) << "GetMSLiteModelAsyncCallbackComplete asyncContext is Null!";
2919+  }
2920+}
2921+
2922+void MSLiteModelNapi::CommonCallbackRoutine(napi_env env, MSLiteModelAsyncContext *&asyncContext,
2923+                                            const napi_value &valueParam) {
2924+  napi_value result[ARGS_ONE] = {0};
2925+  napi_value retVal;
2926+  napi_value error = nullptr;
2927+
2928+  if (!asyncContext->status) {
2929+    result[PARAM0] = valueParam;
2930+  } else {
2931+    napi_value message = nullptr;
2932+    std::string messageValue = CommonNapi::getMessageByCode(asyncContext->status);
2933+    napi_create_string_utf8(env, messageValue.c_str(), NAPI_AUTO_LENGTH, &message);
2934+
2935+    napi_value code = nullptr;
2936+    napi_create_string_utf8(env, (std::to_string(asyncContext->status)).c_str(), NAPI_AUTO_LENGTH, &code);
2937+
2938+    napi_create_error(env, code, message, &error);
2939+    napi_get_undefined(env, &result[PARAM0]);
2940+  }
2941+
2942+  if (asyncContext->deferred != nullptr) {
2943+    if (!asyncContext->status) {
2944+      napi_resolve_deferred(env, asyncContext->deferred, result[PARAM0]);
2945+    } else {
2946+      napi_reject_deferred(env, asyncContext->deferred, error);
2947+    }
2948+  } else {
2949+    napi_value callback = nullptr;
2950+    napi_get_reference_value(env, asyncContext->callbackRef, &callback);
2951+    napi_call_function(env, nullptr, callback, ARGS_ONE, result, &retVal);
2952+    napi_delete_reference(env, asyncContext->callbackRef);
2953+  }
2954+  napi_delete_async_work(env, asyncContext->work);
2955+
2956+  delete asyncContext;
2957+  asyncContext = nullptr;
2958+}
2959+
2960+napi_value MSLiteModelNapi::LoadMSLiteModelFromFile(napi_env env, napi_callback_info info) {
2961+  napi_status status;
2962+  napi_value result = nullptr;
2963+  const int32_t refCount = 1;
2964+  GET_PARAMS(env, info, ARGS_THREE);
2965+  napi_valuetype valueType = napi_undefined;
2966+
2967+  std::unique_ptr<MSLiteModelAsyncContext> asyncContext = std::make_unique<MSLiteModelAsyncContext>();
2968+
2969+  int32_t ret;
2970+  for (size_t i = PARAM0; i < argc; i++) {
2971+    if (i == PARAM0) {
2972+      ret = ParseModelInfo(env, argv[i], asyncContext->model_info);
2973+      if (ret != SUCCESS) {
2974+        MS_LOG(ERROR) << "Parsing model failed.";
2975+        return result;
2976+      }
2977+    } else if (i == PARAM1) {
2978+      napi_typeof(env, argv[i], &valueType);
2979+      if (valueType == napi_function) {
2980+        napi_create_reference(env, argv[i], refCount, &asyncContext->callbackRef);
2981+      } else {
2982+        ret = ParseContextInfo(env, argv[i], asyncContext->context);
2983+        if (ret != SUCCESS) {
2984+          MS_LOG(ERROR) << "Parsing context failed.";
2985+          return result;
2986+        }
2987+      }
2988+    } else if (i == PARAM2) {
2989+      napi_typeof(env, argv[i], &valueType);
2990+      if (valueType == napi_function) {
2991+        napi_create_reference(env, argv[i], refCount, &asyncContext->callbackRef);
2992+      }
2993+      break;
2994+    } else {
2995+      MS_LOG(ERROR) << "Invalid input params.";
2996+      return result;
2997+    }
2998+  }
2999+
3000+  if (asyncContext->callbackRef == nullptr) {
3001+    status = napi_create_promise(env, &asyncContext->deferred, &result);
3002+    if (status != napi_ok) {
3003+      MS_LOG(ERROR) << "create promise failed.";
3004+      return result;
3005+    }
3006+  } else {
3007+    status = napi_get_undefined(env, &result);
3008+    if (status != napi_ok) {
3009+      MS_LOG(ERROR) << "create callback failed.";
3010+      return result;
3011+    }
3012+  }
3013+
3014+  napi_value resource = nullptr;
3015+  napi_create_string_utf8(env, "LoadMSLiteModelFromFile", NAPI_AUTO_LENGTH, &resource);
3016+  status = napi_create_async_work(
3017+    env, nullptr, resource,
3018+    [](napi_env env, void *data) {
3019+      auto context = static_cast<MSLiteModelAsyncContext *>(data);
3020+      context->status = SUCCESS;
3021+    },
3022+    GetMSLiteModelAsyncCallbackComplete, static_cast<void *>(asyncContext.get()), &asyncContext->work);
3023+  if (status != napi_ok) {
3024+    result = nullptr;
3025+  } else {
3026+    status = napi_queue_async_work(env, asyncContext->work);
3027+    if (status == napi_ok) {
3028+      asyncContext.release();
3029+    } else {
3030+      result = nullptr;
3031+    }
3032+  }
3033+  return result;
3034+}
3035+
3036+napi_value MSLiteModelNapi::LoadMSLiteTrainModelFromFile(napi_env env, napi_callback_info info) {
3037+  napi_status status;
3038+  napi_value result = nullptr;
3039+  GET_PARAMS(env, info, ARGS_THREE);
3040+
3041+  std::unique_ptr<MSLiteModelAsyncContext> asyncContext = std::make_unique<MSLiteModelAsyncContext>();
3042+
3043+  asyncContext->model_info.train_model = true;
3044+  int32_t ret;
3045+  for (size_t i = PARAM0; i < argc; i++) {
3046+    if (i == PARAM0) {
3047+      ret = ParseModelInfo(env, argv[i], asyncContext->model_info);
3048+      if (ret != SUCCESS) {
3049+        MS_LOG(ERROR) << "Parsing model failed.";
3050+        return result;
3051+      }
3052+    } else if (i == PARAM1) {
3053+      ret = ParseTrainCfgInfo(env, argv[i], asyncContext->context.train_cfg);
3054+      if (ret != SUCCESS) {
3055+        MS_LOG(ERROR) << "Parsing TrainCfg failed.";
3056+        return result;
3057+      }
3058+    } else if (i == PARAM2) {
3059+      ret = ParseContextInfo(env, argv[i], asyncContext->context);
3060+      if (ret != SUCCESS) {
3061+        MS_LOG(ERROR) << "Parsing context failed.";
3062+        return result;
3063+      }
3064+    } else {
3065+      MS_LOG(ERROR) << "Invalid input params.";
3066+      return result;
3067+    }
3068+  }
3069+
3070+  if (asyncContext->callbackRef == nullptr) {
3071+    status = napi_create_promise(env, &asyncContext->deferred, &result);
3072+    if (status != napi_ok) {
3073+      MS_LOG(ERROR) << "create promise failed.";
3074+      return result;
3075+    }
3076+  } else {
3077+    status = napi_get_undefined(env, &result);
3078+    if (status != napi_ok) {
3079+      MS_LOG(ERROR) << "create callback failed.";
3080+      return result;
3081+    }
3082+  }
3083+
3084+  napi_value resource = nullptr;
3085+  napi_create_string_utf8(env, "LoadMSLiteTrainModelFromFile", NAPI_AUTO_LENGTH, &resource);
3086+  status = napi_create_async_work(
3087+    env, nullptr, resource,
3088+    [](napi_env env, void *data) {
3089+      auto context = static_cast<MSLiteModelAsyncContext *>(data);
3090+      context->status = SUCCESS;
3091+    },
3092+    GetMSLiteModelAsyncCallbackComplete, static_cast<void *>(asyncContext.get()), &asyncContext->work);
3093+  if (status != napi_ok) {
3094+    result = nullptr;
3095+  } else {
3096+    status = napi_queue_async_work(env, asyncContext->work);
3097+    if (status == napi_ok) {
3098+      asyncContext.release();
3099+    } else {
3100+      result = nullptr;
3101+    }
3102+  }
3103+  return result;
3104+}
3105+
3106+napi_value MSLiteModelNapi::LoadMSLiteTrainModelFromBuffer(napi_env env, napi_callback_info info) {
3107+  napi_status status;
3108+  napi_value result = nullptr;
3109+  GET_PARAMS(env, info, ARGS_THREE);
3110+
3111+  std::unique_ptr<MSLiteModelAsyncContext> asyncContext = std::make_unique<MSLiteModelAsyncContext>();
3112+
3113+  asyncContext->model_info.train_model = true;
3114+  int32_t ret;
3115+  for (size_t i = PARAM0; i < argc; i++) {
3116+    if (i == PARAM0) {
3117+      ret = ParseModelInfo(env, argv[i], asyncContext->model_info);
3118+      if (ret != SUCCESS) {
3119+        MS_LOG(ERROR) << "Parsing model failed.";
3120+        return result;
3121+      }
3122+    } else if (i == PARAM1) {
3123+      ret = ParseTrainCfgInfo(env, argv[i], asyncContext->context.train_cfg);
3124+      if (ret != SUCCESS) {
3125+        MS_LOG(ERROR) << "Parsing TrainCfg failed.";
3126+        return result;
3127+      }
3128+    } else if (i == PARAM2) {
3129+      ret = ParseContextInfo(env, argv[i], asyncContext->context);
3130+      if (ret != SUCCESS) {
3131+        MS_LOG(ERROR) << "Parsing context failed.";
3132+        return result;
3133+      }
3134+    } else {
3135+      MS_LOG(ERROR) << "Invalid input params.";
3136+      return result;
3137+    }
3138+  }
3139+
3140+  if (asyncContext->callbackRef == nullptr) {
3141+    status = napi_create_promise(env, &asyncContext->deferred, &result);
3142+    if (status != napi_ok) {
3143+      MS_LOG(ERROR) << "create promise failed.";
3144+      return result;
3145+    }
3146+  } else {
3147+    status = napi_get_undefined(env, &result);
3148+    if (status != napi_ok) {
3149+      MS_LOG(ERROR) << "create callback failed.";
3150+      return result;
3151+    }
3152+  }
3153+
3154+  napi_value resource = nullptr;
3155+  napi_create_string_utf8(env, "LoadMSLiteTrainModelFromBuffer", NAPI_AUTO_LENGTH, &resource);
3156+  status = napi_create_async_work(
3157+    env, nullptr, resource,
3158+    [](napi_env env, void *data) {
3159+      auto context = static_cast<MSLiteModelAsyncContext *>(data);
3160+      context->status = SUCCESS;
3161+    },
3162+    GetMSLiteModelAsyncCallbackComplete, static_cast<void *>(asyncContext.get()), &asyncContext->work);
3163+  if (status != napi_ok) {
3164+    result = nullptr;
3165+  } else {
3166+    status = napi_queue_async_work(env, asyncContext->work);
3167+    if (status == napi_ok) {
3168+      asyncContext.release();
3169+    } else {
3170+      result = nullptr;
3171+    }
3172+  }
3173+  return result;
3174+}
3175+
3176+napi_value MSLiteModelNapi::LoadMSLiteTrainModelFromFd(napi_env env, napi_callback_info info) {
3177+  napi_status status;
3178+  napi_value result = nullptr;
3179+  GET_PARAMS(env, info, ARGS_THREE);
3180+
3181+  std::unique_ptr<MSLiteModelAsyncContext> asyncContext = std::make_unique<MSLiteModelAsyncContext>();
3182+
3183+  asyncContext->model_info.train_model = true;
3184+  int32_t ret;
3185+  for (size_t i = PARAM0; i < argc; i++) {
3186+    if (i == PARAM0) {
3187+      ret = ParseModelInfo(env, argv[i], asyncContext->model_info);
3188+      if (ret != SUCCESS) {
3189+        MS_LOG(ERROR) << "Parsing model failed.";
3190+        return result;
3191+      }
3192+    } else if (i == PARAM1) {
3193+      ret = ParseTrainCfgInfo(env, argv[i], asyncContext->context.train_cfg);
3194+      if (ret != SUCCESS) {
3195+        MS_LOG(ERROR) << "Parsing TrainCfg failed.";
3196+        return result;
3197+      }
3198+    } else if (i == PARAM2) {
3199+      ret = ParseContextInfo(env, argv[i], asyncContext->context);
3200+      if (ret != SUCCESS) {
3201+        MS_LOG(ERROR) << "Parsing context failed.";
3202+        return result;
3203+      }
3204+    } else {
3205+      MS_LOG(ERROR) << "Invalid input params.";
3206+      return result;
3207+    }
3208+  }
3209+
3210+  if (asyncContext->callbackRef == nullptr) {
3211+    status = napi_create_promise(env, &asyncContext->deferred, &result);
3212+    if (status != napi_ok) {
3213+      MS_LOG(ERROR) << "create promise failed.";
3214+      return result;
3215+    }
3216+  } else {
3217+    status = napi_get_undefined(env, &result);
3218+    if (status != napi_ok) {
3219+      MS_LOG(ERROR) << "create callback failed.";
3220+      return result;
3221+    }
3222+  }
3223+
3224+  napi_value resource = nullptr;
3225+  napi_create_string_utf8(env, "LoadMSLiteTrainModelFromFd", NAPI_AUTO_LENGTH, &resource);
3226+  status = napi_create_async_work(
3227+    env, nullptr, resource,
3228+    [](napi_env env, void *data) {
3229+      auto context = static_cast<MSLiteModelAsyncContext *>(data);
3230+      context->status = SUCCESS;
3231+    },
3232+    GetMSLiteModelAsyncCallbackComplete, static_cast<void *>(asyncContext.get()), &asyncContext->work);
3233+  if (status != napi_ok) {
3234+    result = nullptr;
3235+  } else {
3236+    status = napi_queue_async_work(env, asyncContext->work);
3237+    if (status == napi_ok) {
3238+      asyncContext.release();
3239+    } else {
3240+      result = nullptr;
3241+    }
3242+  }
3243+  return result;
3244+}
3245+
3246+napi_value MSLiteModelNapi::LoadMSLiteModelFromBuffer(napi_env env, napi_callback_info info) {
3247+  napi_status status;
3248+  napi_value result = nullptr;
3249+  const int32_t refCount = 1;
3250+  GET_PARAMS(env, info, ARGS_THREE);
3251+  napi_valuetype valueType = napi_undefined;
3252+
3253+  std::unique_ptr<MSLiteModelAsyncContext> asyncContext = std::make_unique<MSLiteModelAsyncContext>();
3254+
3255+  int32_t ret;
3256+  for (size_t i = PARAM0; i < argc; i++) {
3257+    if (i == PARAM0) {
3258+      ret = ParseModelInfo(env, argv[i], asyncContext->model_info);
3259+      if (ret != SUCCESS) {
3260+        MS_LOG(ERROR) << "Parsing model failed.";
3261+        return result;
3262+      }
3263+    } else if (i == PARAM1) {
3264+      napi_typeof(env, argv[i], &valueType);
3265+      if (valueType == napi_function) {
3266+        napi_create_reference(env, argv[i], refCount, &asyncContext->callbackRef);
3267+      } else {
3268+        ret = ParseContextInfo(env, argv[i], asyncContext->context);
3269+        if (ret != SUCCESS) {
3270+          MS_LOG(ERROR) << "Parsing context failed.";
3271+          return result;
3272+        }
3273+      }
3274+    } else if (i == PARAM2) {
3275+      napi_typeof(env, argv[i], &valueType);
3276+      if (valueType == napi_function) {
3277+        napi_create_reference(env, argv[i], refCount, &asyncContext->callbackRef);
3278+      }
3279+      break;
3280+    } else {
3281+      MS_LOG(ERROR) << "Invalid input params.";
3282+      return result;
3283+    }
3284+  }
3285+
3286+  if (asyncContext->callbackRef == nullptr) {
3287+    status = napi_create_promise(env, &asyncContext->deferred, &result);
3288+    if (status != napi_ok) {
3289+      MS_LOG(ERROR) << "create promise failed.";
3290+      return result;
3291+    }
3292+  } else {
3293+    status = napi_get_undefined(env, &result);
3294+    if (status != napi_ok) {
3295+      MS_LOG(ERROR) << "create callback failed.";
3296+      return result;
3297+    }
3298+  }
3299+
3300+  napi_value resource = nullptr;
3301+  napi_create_string_utf8(env, "LoadMSLiteModelFromBuffer", NAPI_AUTO_LENGTH, &resource);
3302+  status = napi_create_async_work(
3303+    env, nullptr, resource,
3304+    [](napi_env env, void *data) {
3305+      auto context = static_cast<MSLiteModelAsyncContext *>(data);
3306+      context->status = SUCCESS;
3307+    },
3308+    GetMSLiteModelAsyncCallbackComplete, static_cast<void *>(asyncContext.get()), &asyncContext->work);
3309+  if (status != napi_ok) {
3310+    result = nullptr;
3311+  } else {
3312+    status = napi_queue_async_work(env, asyncContext->work);
3313+    if (status == napi_ok) {
3314+      asyncContext.release();
3315+    } else {
3316+      result = nullptr;
3317+    }
3318+  }
3319+  return result;
3320+}
3321+
3322+napi_value MSLiteModelNapi::LoadMSLiteModelFromFd(napi_env env, napi_callback_info info) {
3323+  napi_status status;
3324+  napi_value result = nullptr;
3325+  const int32_t refCount = 1;
3326+  GET_PARAMS(env, info, ARGS_THREE);
3327+  napi_valuetype valueType = napi_undefined;
3328+
3329+  std::unique_ptr<MSLiteModelAsyncContext> asyncContext = std::make_unique<MSLiteModelAsyncContext>();
3330+
3331+  int32_t ret;
3332+  for (size_t i = PARAM0; i < argc; i++) {
3333+    if (i == PARAM0) {
3334+      ret = ParseModelInfo(env, argv[i], asyncContext->model_info);
3335+      if (ret != SUCCESS) {
3336+        MS_LOG(ERROR) << "Parsing model failed.";
3337+        return result;
3338+      }
3339+    } else if (i == PARAM1) {
3340+      napi_typeof(env, argv[i], &valueType);
3341+      if (valueType == napi_function) {
3342+        napi_create_reference(env, argv[i], refCount, &asyncContext->callbackRef);
3343+      } else {
3344+        ret = ParseContextInfo(env, argv[i], asyncContext->context);
3345+        if (ret != SUCCESS) {
3346+          MS_LOG(ERROR) << "Parsing context failed.";
3347+          return result;
3348+        }
3349+      }
3350+    } else if (i == PARAM2) {
3351+      napi_typeof(env, argv[i], &valueType);
3352+      if (valueType == napi_function) {
3353+        napi_create_reference(env, argv[i], refCount, &asyncContext->callbackRef);
3354+      }
3355+      break;
3356+    } else {
3357+      MS_LOG(ERROR) << "Invalid input params.";
3358+      return result;
3359+    }
3360+  }
3361+
3362+  if (asyncContext->callbackRef == nullptr) {
3363+    status = napi_create_promise(env, &asyncContext->deferred, &result);
3364+    if (status != napi_ok) {
3365+      MS_LOG(ERROR) << "create promise failed.";
3366+      return result;
3367+    }
3368+  } else {
3369+    status = napi_get_undefined(env, &result);
3370+    if (status != napi_ok) {
3371+      MS_LOG(ERROR) << "create callback failed.";
3372+      return result;
3373+    }
3374+  }
3375+
3376+  napi_value resource = nullptr;
3377+  napi_create_string_utf8(env, "LoadMSLiteModelFromFd", NAPI_AUTO_LENGTH, &resource);
3378+  status = napi_create_async_work(
3379+    env, nullptr, resource,
3380+    [](napi_env env, void *data) {
3381+      auto context = static_cast<MSLiteModelAsyncContext *>(data);
3382+      context->status = SUCCESS;
3383+    },
3384+    GetMSLiteModelAsyncCallbackComplete, static_cast<void *>(asyncContext.get()), &asyncContext->work);
3385+  if (status != napi_ok) {
3386+    result = nullptr;
3387+  } else {
3388+    status = napi_queue_async_work(env, asyncContext->work);
3389+    if (status == napi_ok) {
3390+      asyncContext.release();
3391+    } else {
3392+      result = nullptr;
3393+    }
3394+  }
3395+  return result;
3396+}
3397+
3398+int32_t MSLiteModelNapi::GetCpuDeviceInfo(napi_env env, napi_value args, ContextInfo &context) {
3399+  bool has_cpu_property = false;
3400+  napi_status status = napi_has_named_property(env, args, "cpu", &has_cpu_property);
3401+  if (status != napi_ok) {
3402+    MS_LOG(ERROR) << "can not find cpu property";
3403+    return ERR_INVALID_OPERATION;
3404+  }
3405+  if (!has_cpu_property) {
3406+    return ERR_NOT_EXISTED_PARAM;
3407+  }
3408+
3409+  napi_value config_item = nullptr;
3410+  status = napi_get_named_property(env, args, "cpu", &config_item);
3411+  if (status != napi_ok) {
3412+    MS_LOG(ERROR) << "can not get cpu property";
3413+    return ERR_INVALID_OPERATION;
3414+  }
3415+
3416+  int32_t int_value = 0;
3417+  std::string str_value = "";
3418+  std::vector<int32_t> affinity_cores;
3419+
3420+  if (CommonNapi::GetPropertyInt32(env, config_item, "threadNum", int_value) == SUCCESS) {
3421+    MS_LOG(DEBUG) << "threadNum: " << int_value;
3422+    context.cpu_device.thread_num = int_value;
3423+  } else {
3424+    context.cpu_device.thread_num = PARAM2;
3425+  }
3426+
3427+  if (CommonNapi::GetPropertyInt32(env, config_item, "threadAffinityMode", int_value) == SUCCESS) {
3428+    MS_LOG(DEBUG) << "threadAffinityMode: " << int_value;
3429+    if (int_value > PARAM2 || int_value < PARAM0) {
3430+      MS_LOG(ERROR) << "threadAffinityMode value is set: " << int_value << ", is out of limition";
3431+      return ERR_INVALID_OPERATION;
3432+    }
3433+    context.cpu_device.thread_affinity_mode = int_value;
3434+  } else {
3435+    context.cpu_device.thread_affinity_mode = PARAM0;
3436+  }
3437+
3438+  if (CommonNapi::GetPropertyInt32Array(env, config_item, "threadAffinityCoreList", affinity_cores) == SUCCESS) {
3439+    MS_LOG(DEBUG) << "affinityCores size: " << affinity_cores.size();
3440+    context.cpu_device.thread_affinity_cores.assign(affinity_cores.begin(), affinity_cores.end());
3441+  } else {
3442+    context.cpu_device.thread_affinity_cores = {};
3443+  }
3444+
3445+  if (CommonNapi::GetPropertyString(env, config_item, "precisionMode", str_value) == SUCCESS) {
3446+    MS_LOG(DEBUG) << "precisionMode: " << str_value.c_str();
3447+    context.cpu_device.precision_mode = str_value;
3448+  } else {
3449+    context.cpu_device.precision_mode = "enforce_fp32";
3450+  }
3451+  return SUCCESS;
3452+}
3453+
3454+int32_t MSLiteModelNapi::GetNNRTDeviceInfo(napi_env env, napi_value args, ContextInfo &context) {
3455+  bool has_nnrt_property = false;
3456+  napi_status status = napi_has_named_property(env, args, "nnrt", &has_nnrt_property);
3457+  if (status != napi_ok) {
3458+    MS_LOG(ERROR) << "can not find nnrt property";
3459+    return ERR_ILLEGAL_STATE;
3460+  }
3461+  if (!has_nnrt_property) {
3462+    return ERR_NOT_EXISTED_PARAM;
3463+  }
3464+
3465+  napi_value config_item = nullptr;
3466+  status = napi_get_named_property(env, args, "nnrt", &config_item);
3467+  if (status != napi_ok) {
3468+    MS_LOG(ERROR) << "can not get nnrt property";
3469+    return ERR_INVALID_PARAM;
3470+  }
3471+
3472+  int32_t int_value = 0;
3473+  std::string str_value = "";
3474+  std::vector<int32_t> affinity_cores;
3475+
3476+  uint64_t device_id;
3477+  auto ret = CommonNapi::GetPropertyBigIntUint64(env, config_item, "deviceID", device_id);
3478+  if (ret == SUCCESS) {
3479+    MS_LOG(DEBUG) << "deviceID: " << device_id;
3480+    context.nnrt_device.device_id = static_cast<size_t>(device_id);
3481+  } else if (ret == ERR_NOT_EXISTED_PARAM) {
3482+    size_t num = 0;
3483+    auto *desc = OH_AI_GetAllNNRTDeviceDescs(&num);
3484+    if (desc == nullptr || num == 0) {
3485+      MS_LOG(WARNING) << "Failed to get nnrt device id, skip adding nnrt device info.";
3486+      return ERR_NOT_EXISTED_PARAM;
3487+    }
3488+    auto id = OH_AI_GetDeviceIdFromNNRTDeviceDesc(desc);
3489+    OH_AI_DestroyAllNNRTDeviceDescs(&desc);
3490+    MS_LOG(INFO) << "set nnrt device id to " << id;
3491+    context.nnrt_device.device_id = id;
3492+  } else {
3493+    return ERR_INVALID_PARAM;
3494+  }
3495+
3496+  ret = CommonNapi::GetPropertyInt32(env, config_item, "performanceMode", int_value);
3497+  if (ret == SUCCESS) {
3498+    MS_LOG(DEBUG) << "performanceMode: " << int_value;
3499+    if (int_value > PARAM4 || int_value < PARAM0) {
3500+      MS_LOG(ERROR) << "performanceMode value is set to: " << int_value << ", which is out of range";
3501+      return ERR_INVALID_PARAM;
3502+    }
3503+    context.nnrt_device.performance_mode = int_value;
3504+  } else if (ret == ERR_NOT_EXISTED_PARAM) {
3505+    context.nnrt_device.performance_mode = UNSET_VALUE;
3506+  } else {
3507+    return ERR_INVALID_PARAM;
3508+  }
3509+
3510+  ret = CommonNapi::GetPropertyInt32(env, config_item, "priority", int_value);
3511+  if (ret == SUCCESS) {
3512+    MS_LOG(DEBUG) << "priority: " << int_value;
3513+    if (int_value > PARAM3 || int_value < PARAM0) {
3514+      MS_LOG(ERROR) << "priority value is set to: " << int_value << ", which is out of range";
3515+      return ERR_INVALID_PARAM;
3516+    }
3517+    context.nnrt_device.priority = int_value;
3518+  } else if (ret == ERR_NOT_EXISTED_PARAM) {
3519+    context.nnrt_device.priority = UNSET_VALUE;
3520+  } else {
3521+    return ERR_INVALID_PARAM;
3522+  }
3523+
3524+  // ignore extensions for now
3525+  return SUCCESS;
3526+}
3527+
3528+napi_value MSLiteModelNapi::GetInputs(napi_env env, napi_callback_info info) {
3529+  napi_value undefinedResult = nullptr;
3530+  napi_get_undefined(env, &undefinedResult);
3531+
3532+  size_t argCount = 0;
3533+  napi_value jsThis = nullptr;
3534+  napi_value jsResult = nullptr;
3535+  MSLiteModelNapi *modelNapi = nullptr;
3536+
3537+  napi_status status = napi_get_cb_info(env, info, &argCount, nullptr, &jsThis, nullptr);
3538+  if (status != napi_ok || jsThis == nullptr) {
3539+    MS_LOG(ERROR) << "failed to retrieve details about the callback";
3540+    return undefinedResult;
3541+  }
3542+
3543+  status = napi_unwrap(env, jsThis, reinterpret_cast<void **>(&modelNapi));
3544+  if (status != napi_ok || modelNapi == nullptr) {
3545+    MS_LOG(ERROR) << "failed to get model";
3546+    return undefinedResult;
3547+  }
3548+
3549+  if (modelNapi->native_model_ == nullptr) {
3550+    MS_LOG(ERROR) << "model is released(null), please create model again";
3551+    return undefinedResult;
3552+  }
3553+  std::vector<MSTensor> inputs = modelNapi->native_model_->GetInputs();
3554+  std::vector<MSTensor> tensor_inputs;
3555+  for (size_t i = 0; i < inputs.size(); i++) {
3556+    auto tensor = mindspore::MSTensor::CreateTensor(inputs.at(i).Name(), inputs.at(i).DataType(), {}, nullptr, 0);
3557+    if (tensor == nullptr) {
3558+      MS_LOG(ERROR) << "create tensor failed.";
3559+      return undefinedResult;
3560+    }
3561+    tensor->SetShape(inputs.at(i).Shape());
3562+    tensor->SetFormat(inputs.at(i).format());
3563+    tensor->SetDataType(inputs.at(i).DataType());
3564+    tensor_inputs.push_back(*tensor);
3565+    delete tensor;
3566+  }
3567+
3568+  size_t size = inputs.size();
3569+  MS_LOG(INFO) << "inputs size: " << size;
3570+  napi_create_array_with_length(env, size, &jsResult);
3571+  for (size_t i = 0; i < size; i++) {
3572+    status = napi_set_element(env, jsResult, i, MSTensorNapi::NewInstance(env, tensor_inputs[i]));
3573+    if (status != napi_ok) {
3574+      MS_LOG(ERROR) << "napi_set_element failed! code: " << status;
3575+    } 
3576+  }
3577+  MS_LOG(INFO) << "get model inputs success: " << inputs[0].Name().c_str();
3578+  return jsResult;
3579+}
3580+
3581+napi_value MSLiteModelNapi::Resize(napi_env env, napi_callback_info info) {
3582+  napi_value undefinedResult = nullptr;
3583+  bool result = false;
3584+  napi_status status = napi_get_boolean(env, result, &undefinedResult);
3585+  if (status != napi_ok) {
3586+    MS_LOG(ERROR) << "get bool error";
3587+    return undefinedResult;
3588+  }
3589+
3590+  napi_value jsThis = nullptr;
3591+  napi_value jsResult = nullptr;
3592+  MSLiteModelNapi *modelNapi = nullptr;
3593+  napi_value argv[ARGS_TWO] = {0};
3594+  size_t argCount = PARAM2;
3595+  status = napi_get_cb_info(env, info, &argCount, argv, &jsThis, nullptr);
3596+  if (status != napi_ok || jsThis == nullptr) {
3597+    MS_LOG(ERROR) << "failed to retrieve details about the callback";
3598+    return undefinedResult;
3599+  }
3600+  status = napi_unwrap(env, jsThis, reinterpret_cast<void **>(&modelNapi));
3601+  if (status != napi_ok || modelNapi == nullptr) {
3602+    MS_LOG(ERROR) << "get model napi error";
3603+    return undefinedResult;
3604+  }
3605+
3606+  if (modelNapi->native_model_ == nullptr) {
3607+    MS_LOG(ERROR) << "model is released(null), please create model again";
3608+    return undefinedResult;
3609+  }
3610+  std::vector<MSTensor> inputs = modelNapi->native_model_->GetInputs();
3611+  std::vector<MSTensor> tensor_inputs;
3612+  std::vector<std::vector<int64_t>> dims;
3613+
3614+  // set inputs data
3615+  uint32_t array_length = 0;
3616+  status = napi_get_array_length(env, argv[PARAM0], &array_length);
3617+  if (status != napi_ok || array_length <= 0) {
3618+    MS_LOG(ERROR) << "get inputs tensor length failed.";
3619+    return undefinedResult;
3620+  }
3621+  if (inputs.size() != array_length) {
3622+    MS_LOG(ERROR) << "array length not equal to model inputs size.";
3623+    return undefinedResult;
3624+  }
3625+  for (size_t i = 0; i < array_length; i++) {
3626+    napi_value element = nullptr;
3627+    status = napi_get_element(env, argv[PARAM0], i, &element);
3628+    if (status != napi_ok) {
3629+      MS_LOG(ERROR) << "can not get element";
3630+      return undefinedResult;
3631+    }
3632+
3633+    std::string property_name = "getData";
3634+    bool exist = false;
3635+    napi_value data_func = nullptr;
3636+
3637+    status = napi_has_named_property(env, element, property_name.c_str(), &exist);
3638+    if (status != napi_ok || !exist) {
3639+      MS_LOG(ERROR) << "can not find target property";
3640+      return undefinedResult;
3641+    }
3642+
3643+    if (status != napi_ok || !exist) {
3644+      MS_LOG(ERROR) << "can not find " << property_name.c_str() << " property.";
3645+      return undefinedResult;
3646+    }
3647+
3648+    if (napi_get_named_property(env, element, property_name.c_str(), &data_func) != napi_ok) {
3649+      MS_LOG(ERROR) << "get " << property_name.c_str() << " property fail.";
3650+      return undefinedResult;
3651+    }
3652+    void *js_data = nullptr;
3653+    size_t length = 0;
3654+    napi_value return_val;
3655+
3656+    status = napi_call_function(env, element, data_func, 0, nullptr, &return_val);
3657+    if (status != napi_ok || return_val == nullptr) {
3658+      MS_LOG(ERROR) << "napi call function error.";
3659+      return undefinedResult;
3660+    }
3661+
3662+    status = napi_get_arraybuffer_info(env, return_val, &js_data, &length);
3663+    if (status != napi_ok || js_data == nullptr) {
3664+      MS_LOG(ERROR) << "get js data error.";
3665+      return undefinedResult;
3666+    }
3667+    if (inputs[i].DataSize() != length) {
3668+      MS_LOG(ERROR) << "tensor size is: " << static_cast<int>(inputs[i].DataSize()) << ", but data length got "
3669+                    << static_cast<int>(length);
3670+      return undefinedResult;
3671+    }
3672+
3673+    auto tensor_data = inputs[i].MutableData();
3674+    if (tensor_data == nullptr) {
3675+      MS_LOG(ERROR) << "malloc data for tensor failed.";
3676+      return undefinedResult;
3677+    }
3678+    memcpy(tensor_data, js_data, length);
3679+  }
3680+
3681+  napi_value dim_num = nullptr;
3682+  int64_t dim_ele = 0;
3683+  uint32_t dims_size = 0;
3684+  uint32_t dim_size = 0;
3685+
3686+  status = napi_is_array(env, argv[PARAM1], &result);
3687+  if (status != napi_ok || result == false) {
3688+    MS_LOG(ERROR) << "new dim is not a array";
3689+    return undefinedResult;
3690+  }
3691+
3692+  status = napi_get_array_length(env, argv[PARAM1], &dims_size);
3693+  if (status != napi_ok) {
3694+    MS_LOG(ERROR) << "get new dims size error";
3695+    return undefinedResult;
3696+  }
3697+  for (size_t i = 0; i < dims_size; i++) {
3698+    napi_value dim_element = nullptr;
3699+    status = napi_get_element(env, argv[PARAM1], i, &dim_element);
3700+    if (status != napi_ok) {
3701+      MS_LOG(ERROR) << "can not get element";
3702+      return undefinedResult;
3703+    }
3704+
3705+    status = napi_is_array(env, dim_element, &result);
3706+    if (status != napi_ok || result == false) {
3707+      MS_LOG(ERROR) << "new dim's element is not a array";
3708+      return undefinedResult;
3709+    }
3710+
3711+    status = napi_get_array_length(env, dim_element, &dim_size);
3712+    if (status != napi_ok) {
3713+      MS_LOG(ERROR) << "get new dim size error";
3714+      return undefinedResult;
3715+    }
3716+    std::vector<int64_t> dim(dim_size);
3717+    for (size_t j = 0; j < dim_size; j++) {
3718+      status = napi_get_element(env, dim_element, j, &dim_num);
3719+      if (status != napi_ok) {
3720+        MS_LOG(ERROR) << "get dim num error";
3721+        return undefinedResult;
3722+      }
3723+      status = napi_get_value_int64(env, dim_num, &dim_ele);
3724+      if (status != napi_ok) {
3725+        MS_LOG(ERROR) << "get dim element error";
3726+        return undefinedResult;
3727+      }
3728+      dim[j] = dim_ele;
3729+    }
3730+    dims.push_back(dim);
3731+  }
3732+  if (modelNapi->native_model_->Resize(inputs, dims) != mindspore::kSuccess) {
3733+    MS_LOG(ERROR) << "resize failed";
3734+    return undefinedResult;
3735+  }
3736+  status = napi_get_boolean(env, result, &jsResult);
3737+  if (status != napi_ok) {
3738+    MS_LOG(ERROR) << "get bool error";
3739+    return undefinedResult;
3740+  }
3741+  return jsResult;
3742+}
3743+
3744+template <typename T, typename Distribution>
3745+void GenerateRandomData(int size, void *data, Distribution distribution) {
3746+  std::mt19937 random_engine;
3747+  int elements_num = size / sizeof(T);
3748+  (void)std::generate_n(static_cast<T *>(data), elements_num,
3749+                        [&distribution, &random_engine]() { return static_cast<T>(distribution(random_engine)); });
3750+}
3751+
3752+int GenerateInputDataWithRandom(std::vector<mindspore::MSTensor> inputs) {
3753+  for (auto tensor : inputs) {
3754+    auto input_data = tensor.MutableData();
3755+    if (input_data == nullptr) {
3756+      std::cerr << "mallocData for inTensor failed." << std::endl;
3757+      return -1;
3758+    }
3759+    GenerateRandomData<float>(tensor.DataSize(), input_data, std::uniform_real_distribution<float>(0.1f, 1.0f));
3760+  }
3761+  return mindspore::kSuccess;
3762+}
3763+
3764+napi_value MSLiteModelNapi::PredictAsync(napi_env env, napi_callback_info info) {
3765+  napi_status status = napi_ok;
3766+  napi_value undefinedResult = nullptr;
3767+  napi_value result = nullptr;
3768+  const int32_t refCount = 1;
3769+  napi_valuetype valueType;
3770+
3771+  std::unique_ptr<MSLiteModelAsyncContext> asyncContext = std::make_unique<MSLiteModelAsyncContext>();
3772+  if (asyncContext == nullptr) {
3773+    MS_LOG(ERROR) << "MSLiteModelAsyncContext object create failed.";
3774+    return undefinedResult;
3775+  }
3776+
3777+  GET_PARAMS(env, info, ARGS_TWO);
3778+  for (size_t i = PARAM0; i < argc; i++) {
3779+    if (i == PARAM1) {
3780+      status = napi_typeof(env, argv[i], &valueType);
3781+      if ((status != napi_ok) || (valueType != napi_function)) {
3782+        MS_LOG(ERROR) << "napi_typeof check callback failed.";
3783+        return result;
3784+      }
3785+      status = napi_create_reference(env, argv[i], refCount, &asyncContext->callbackRef);
3786+      if (status != napi_ok) {
3787+        MS_LOG(ERROR) << "failed to create reference of callback";
3788+        return result;
3789+      }
3790+    }
3791+  }
3792+
3793+  if (SetTensorData(env, thisVar, argv[PARAM0], asyncContext.get()) != SUCCESS) {
3794+    MS_LOG(ERROR) << "Set tensor data failed.";
3795+    return undefinedResult;
3796+  }
3797+
3798+  if (asyncContext->callbackRef == nullptr) {
3799+    status = napi_create_promise(env, &asyncContext->deferred, &result);
3800+    if (status != napi_ok) {
3801+      MS_LOG(ERROR) << "create promise failed.";
3802+      return result;
3803+    }
3804+  } else {
3805+    status = napi_get_undefined(env, &result);
3806+    if (status != napi_ok) {
3807+      MS_LOG(ERROR) << "create callback failed.";
3808+      return result;
3809+    }
3810+  }
3811+
3812+  napi_value resource = nullptr;
3813+  napi_create_string_utf8(env, "Predict", NAPI_AUTO_LENGTH, &resource);
3814+  status = napi_create_async_work(
3815+    env, nullptr, resource,
3816+    [](napi_env env, void *data) {
3817+      auto context = static_cast<MSLiteModelAsyncContext *>(data);
3818+      context->status = SUCCESS;
3819+    },
3820+    PredictAsyncCallbackComplete, static_cast<void *>(asyncContext.get()), &asyncContext->work);
3821+  if (status != napi_ok) {
3822+    result = nullptr;
3823+  } else {
3824+    status = napi_queue_async_work(env, asyncContext->work);
3825+    if (status == napi_ok) {
3826+      asyncContext.release();
3827+    } else {
3828+      result = nullptr;
3829+    }
3830+  }
3831+  return result;
3832+}
3833+
3834+int32_t MSLiteModelNapi::SetTensorData(napi_env env, napi_value thisVar, napi_value argv,
3835+                                       MSLiteModelAsyncContext *async_context) {
3836+  uint32_t array_length = 0;
3837+  napi_status status = napi_get_array_length(env, argv, &array_length);
3838+  if (status != napi_ok || array_length <= 0) {
3839+    MS_LOG(ERROR) << "get inputs tensor length failed.";
3840+    return ERR_INVALID_PARAM;
3841+  }
3842+
3843+  status = napi_unwrap(env, thisVar, reinterpret_cast<void **>(&(async_context->lite_model)));
3844+  if (status != napi_ok || async_context->lite_model == nullptr) {
3845+    MS_LOG(ERROR) << "get model napi error";
3846+    return ERROR;
3847+  }
3848+  auto modelNapi = async_context->lite_model;
3849+  if (modelNapi->native_model_ == nullptr) {
3850+    MS_LOG(ERROR) << "model is released(null), please create model again";
3851+    return ERROR;
3852+  }
3853+
3854+  auto inputs = modelNapi->native_model_->GetInputs();
3855+  if (inputs.size() != array_length) {
3856+    MS_LOG(ERROR) << "array length not equal to model inputs size.";
3857+    return ERR_INVALID_PARAM;
3858+  }
3859+
3860+  for (size_t i = 0; i < array_length; i++) {
3861+    napi_value element = nullptr;
3862+    status = napi_get_element(env, argv, i, &element);
3863+    if (status != napi_ok) {
3864+      MS_LOG(ERROR) << "can not get element";
3865+      return ERROR;
3866+    }
3867+
3868+    std::string property_name = "getData";
3869+    bool exist = false;
3870+    napi_value data_func = nullptr;
3871+
3872+    napi_status status = napi_has_named_property(env, element, property_name.c_str(), &exist);
3873+
3874+    if (status != napi_ok || !exist) {
3875+      MS_LOG(ERROR) << "can not find " << property_name.c_str() << " property.";
3876+      return ERROR;
3877+    }
3878+
3879+    if (napi_get_named_property(env, element, property_name.c_str(), &data_func) != napi_ok) {
3880+      MS_LOG(ERROR) << "get " << property_name.c_str() << " property fail.";
3881+      return ERROR;
3882+    }
3883+    void *js_data = nullptr;
3884+    size_t length = 0;
3885+    napi_value return_val;
3886+
3887+    status = napi_call_function(env, element, data_func, 0, nullptr, &return_val);
3888+    if (status != napi_ok || return_val == nullptr) {
3889+      MS_LOG(ERROR) << "napi call function error.";
3890+      return ERROR;
3891+    }
3892+    status = napi_get_arraybuffer_info(env, return_val, &js_data, &length);
3893+    if (status != napi_ok || js_data == nullptr) {
3894+      MS_LOG(ERROR) << "Get js data error.";
3895+      return ERROR;
3896+    }
3897+    if (inputs[i].DataSize() != length) {
3898+      MS_LOG(ERROR) << "tensor size is: " << static_cast<int>(inputs[i].DataSize()) << ", but data length got "
3899+                    << static_cast<int>(length);
3900+      return ERROR;
3901+    }
3902+
3903+    auto tensor_data = inputs[i].MutableData();
3904+    if (tensor_data == nullptr) {
3905+      MS_LOG(ERROR) << "malloc data for tensor failed.";
3906+      return ERROR;
3907+    }
3908+    memcpy(tensor_data, js_data, length);
3909+  }
3910+  return SUCCESS;
3911+}
3912+
3913+void MSLiteModelNapi::PredictAsyncCallbackComplete(napi_env env, napi_status status, void *data) {
3914+  napi_value valueParam = nullptr;
3915+  auto asyncContext = static_cast<MSLiteModelAsyncContext *>(data);
3916+
3917+  if (asyncContext != nullptr) {
3918+    if (!asyncContext->status) {
3919+      auto modelNapi = asyncContext->lite_model;
3920+      if (modelNapi->native_model_ == nullptr) {
3921+        MS_LOG(ERROR) << "model is released(null), please create model again";
3922+        return;
3923+      }
3924+      auto inputs = modelNapi->native_model_->GetInputs();
3925+      std::vector<MSTensor> outputs;
3926+
3927+      auto predict_ret = modelNapi->native_model_->Predict(inputs, &outputs);
3928+      if (predict_ret != mindspore::kSuccess) {
3929+        MS_LOG(ERROR) << "model predict failed.";
3930+        return;
3931+      }
3932+
3933+      napi_create_array_with_length(env, outputs.size(), &valueParam);
3934+      for (size_t i = 0; i < outputs.size(); i++) {
3935+        status = napi_set_element(env, valueParam, i, MSTensorNapi::NewInstance(env, outputs[i]));
3936+        if (status != napi_ok) {
3937+          MS_LOG(ERROR) << "napi_set_element failed! code: " << status;
3938+        }
3939+      }
3940+      MS_LOG(INFO) << "predict model success.";
3941+    }
3942+    CommonCallbackRoutine(env, asyncContext, valueParam);
3943+  } else {
3944+    MS_LOG(ERROR) << "ERROR: PredictAsyncCallbackComplete asyncContext is Null!";
3945+  }
3946+}
3947+
3948+napi_value MSLiteModelNapi::GetWeights(napi_env env, napi_callback_info info) {
3949+  napi_value undefinedResult = nullptr;
3950+  napi_get_undefined(env, &undefinedResult);
3951+
3952+  size_t argCount = 0;
3953+  napi_value jsThis = nullptr;
3954+  napi_value jsResult = nullptr;
3955+  MSLiteModelNapi *modelNapi = nullptr;
3956+
3957+  napi_status status = napi_get_cb_info(env, info, &argCount, nullptr, &jsThis, nullptr);
3958+  if (status != napi_ok || jsThis == nullptr) {
3959+    MS_LOG(ERROR) << "failed to retrieve details about the callback";
3960+    return undefinedResult;
3961+  }
3962+
3963+  status = napi_unwrap(env, jsThis, reinterpret_cast<void **>(&modelNapi));
3964+  if (status != napi_ok || modelNapi == nullptr) {
3965+    MS_LOG(ERROR) << "failed to get model";
3966+    return undefinedResult;
3967+  }
3968+
3969+  if (modelNapi->native_model_ == nullptr) {
3970+    MS_LOG(ERROR) << "model is released(null), please create model again";
3971+    return undefinedResult;
3972+  }
3973+  std::vector<MSTensor> weights = modelNapi->native_model_->GetFeatureMaps();
3974+  std::vector<MSTensor> feature_maps;
3975+  for (size_t i = 0; i < weights.size(); i++) {
3976+    auto tensor = mindspore::MSTensor::CreateTensor(weights.at(i).Name(), weights.at(i).DataType(), {}, nullptr, 0);
3977+    if (tensor == nullptr) {
3978+      MS_LOG(ERROR) << "create tensor failed.";
3979+      return undefinedResult;
3980+    }
3981+    tensor->SetShape(weights.at(i).Shape());
3982+    tensor->SetFormat(weights.at(i).format());
3983+    tensor->SetDataType(weights.at(i).DataType());
3984+    tensor->SetData(weights.at(i).MutableData(), false);
3985+    feature_maps.push_back(*tensor);
3986+    delete tensor;
3987+  }
3988+
3989+  size_t size = weights.size();
3990+  MS_LOG(INFO) << "weights size: " << size;
3991+  napi_create_array_with_length(env, size, &jsResult);
3992+  for (size_t i = 0; i < size; i++) {
3993+    status = napi_set_element(env, jsResult, i, MSTensorNapi::NewInstance(env, feature_maps[i]));
3994+    if (status != napi_ok) {
3995+      MS_LOG(ERROR) << "napi_set_element failed! code: " << status;
3996+    }
3997+  }
3998+  MS_LOG(INFO) << "get model weights success";
3999+  return jsResult;
4000+}
4001+
4002+int32_t SetModelInputs(napi_env env, napi_value argv, std::shared_ptr<Model> model) {
4003+  uint32_t array_length = 0;
4004+  napi_status status = napi_get_array_length(env, argv, &array_length);
4005+  if (status != napi_ok || array_length <= 0) {
4006+    MS_LOG(ERROR) << "get inputs tensor length failed.";
4007+    return ERR_INVALID_PARAM;
4008+  }
4009+
4010+  if (model == nullptr) {
4011+    MS_LOG(ERROR) << "model is nullptr";
4012+    return ERR_INVALID_PARAM;
4013+  }
4014+
4015+  auto inputs = model->GetInputs();
4016+  if (inputs.size() != array_length) {
4017+    MS_LOG(ERROR) << "array length not equal to model inputs size.";
4018+    return ERR_INVALID_PARAM;
4019+  }
4020+
4021+  for (size_t i = 0; i < array_length; i++) {
4022+    napi_value element = nullptr;
4023+    status = napi_get_element(env, argv, i, &element);
4024+    if (status != napi_ok) {
4025+      MS_LOG(ERROR) << "can not get element";
4026+      return ERROR;
4027+    }
4028+
4029+    std::string property_name = "getData";
4030+    bool exist = false;
4031+    napi_value data_func = nullptr;
4032+
4033+    napi_status status = napi_has_named_property(env, element, property_name.c_str(), &exist);
4034+
4035+    if (status != napi_ok || !exist) {
4036+      MS_LOG(ERROR) << "can not find " << property_name.c_str() << " property.";
4037+      return ERROR;
4038+    }
4039+
4040+    if (napi_get_named_property(env, element, property_name.c_str(), &data_func) != napi_ok) {
4041+      MS_LOG(ERROR) << "get " << property_name.c_str() << " property fail.";
4042+      return ERROR;
4043+    }
4044+    void *js_data = nullptr;
4045+    size_t length = 0;
4046+    napi_value return_val;
4047+
4048+    status = napi_call_function(env, element, data_func, 0, nullptr, &return_val);
4049+    if (status != napi_ok || return_val == nullptr) {
4050+      MS_LOG(ERROR) << "napi call function error.";
4051+      return ERROR;
4052+    }
4053+    status = napi_get_arraybuffer_info(env, return_val, &js_data, &length);
4054+    if (status != napi_ok || js_data == nullptr) {
4055+      MS_LOG(ERROR) << "Get js data error.";
4056+      return ERROR;
4057+    }
4058+    if (inputs[i].DataSize() != length) {
4059+      MS_LOG(ERROR) << "tensor size is: " << static_cast<int>(inputs[i].DataSize()) << ", but data length got "
4060+                    << static_cast<int>(length);
4061+      return ERROR;
4062+    }
4063+
4064+    auto tensor_data = inputs[i].MutableData();
4065+    if (tensor_data == nullptr) {
4066+      MS_LOG(ERROR) << "malloc data for tensor failed.";
4067+      return ERROR;
4068+    }
4069+    memcpy(tensor_data, js_data, length);
4070+  }
4071+  return SUCCESS;
4072+}
4073+
4074+napi_value MSLiteModelNapi::RunStep(napi_env env, napi_callback_info info) {
4075+  napi_value undefinedResult = nullptr;
4076+  bool result = false;
4077+  napi_status status = napi_get_boolean(env, result, &undefinedResult);
4078+  if (status != napi_ok) {
4079+    MS_LOG(ERROR) << "get bool error";
4080+    return undefinedResult;
4081+  }
4082+
4083+  napi_value jsThis = nullptr;
4084+  MSLiteModelNapi *modelNapi = nullptr;
4085+  size_t argCount = PARAM1;
4086+  napi_value argv[ARGS_ONE] = {0};
4087+
4088+  status = napi_get_cb_info(env, info, &argCount, argv, &jsThis, nullptr);
4089+  if (status != napi_ok || jsThis == nullptr) {
4090+    MS_LOG(ERROR) << "failed to retrieve details about the callback";
4091+    return undefinedResult;
4092+  }
4093+
4094+  if (argCount < ARGS_ONE) {
4095+    MS_LOG(ERROR) << "argument num is less than one, please give input tensors";
4096+    return undefinedResult;
4097+  }
4098+
4099+  status = napi_unwrap(env, jsThis, reinterpret_cast<void **>(&modelNapi));
4100+  if (status != napi_ok || modelNapi == nullptr) {
4101+    MS_LOG(ERROR) << "get model napi error";
4102+    return undefinedResult;
4103+  }
4104+
4105+  if (SetModelInputs(env, argv[PARAM0], modelNapi->native_model_) != SUCCESS) {
4106+    MS_LOG(ERROR) << "set tensor data failed";
4107+    return undefinedResult;
4108+  }
4109+
4110+  if (modelNapi->native_model_ == nullptr) {
4111+    MS_LOG(ERROR) << "model is released(null), please create model again";
4112+    return undefinedResult;
4113+  }
4114+
4115+  auto ret = modelNapi->native_model_->RunStep();
4116+  if (ret != kSuccess) {
4117+    MS_LOG(ERROR) << "Model run step failed";
4118+    return undefinedResult;
4119+  }
4120+  status = napi_get_boolean(env, true, &undefinedResult);
4121+  if (status != napi_ok) {
4122+    MS_LOG(ERROR) << "create bool true value failed";
4123+    return undefinedResult;
4124+  }
4125+  return undefinedResult;
4126+}
4127+
4128+napi_value MSLiteModelNapi::UpdateWeights(napi_env env, napi_callback_info info) {
4129+  napi_value undefinedResult = nullptr;
4130+  bool result = false;
4131+  napi_status status = napi_get_boolean(env, result, &undefinedResult);
4132+  if (status != napi_ok) {
4133+    MS_LOG(ERROR) << "get bool error";
4134+    return undefinedResult;
4135+  }
4136+
4137+  napi_value jsThis = nullptr;
4138+  napi_value jsResult = nullptr;
4139+  MSLiteModelNapi *modelNapi = nullptr;
4140+  napi_value argv[ARGS_ONE] = {0};
4141+  size_t argCount = PARAM1;
4142+  status = napi_get_cb_info(env, info, &argCount, argv, &jsThis, nullptr);
4143+  if (status != napi_ok || jsThis == nullptr) {
4144+    MS_LOG(ERROR) << "failed to retrieve details about the callback";
4145+    return undefinedResult;
4146+  }
4147+  status = napi_unwrap(env, jsThis, reinterpret_cast<void **>(&modelNapi));
4148+  if (status != napi_ok || modelNapi == nullptr) {
4149+    MS_LOG(ERROR) << "get model napi error";
4150+    return undefinedResult;
4151+  }
4152+
4153+  if (modelNapi->native_model_ == nullptr) {
4154+    MS_LOG(ERROR) << "model is released(null), please create model again";
4155+    return undefinedResult;
4156+  }
4157+
4158+  // set inputs data
4159+  uint32_t array_length = 0;
4160+  status = napi_get_array_length(env, argv[PARAM0], &array_length);
4161+  if (status != napi_ok || array_length <= 0) {
4162+    MS_LOG(ERROR) << "get inputs tensor length failed.";
4163+    return undefinedResult;
4164+  }
4165+
4166+  std::vector<MSTensor> weights;
4167+  for (size_t i = 0; i < array_length; i++) {
4168+    napi_value element = nullptr;
4169+    status = napi_get_element(env, argv[PARAM0], i, &element);
4170+    if (status != napi_ok) {
4171+      MS_LOG(ERROR) << "can not get element";
4172+      return undefinedResult;
4173+    }
4174+
4175+    // get tensor name
4176+    std::string tensor_name;
4177+    auto ret = CommonNapi::GetPropertyString(env, element, "name", tensor_name);
4178+    if (ret != SUCCESS) {
4179+      MS_LOG(ERROR) << "get tensor name property failed";
4180+      return undefinedResult;
4181+    }
4182+
4183+    // get tensor format
4184+    int format;
4185+    ret = CommonNapi::GetPropertyInt32(env, element, "format", format);
4186+    if (ret != SUCCESS) {
4187+      MS_LOG(ERROR) << "get format property failed";
4188+      return undefinedResult;
4189+    }
4190+
4191+    // get dtype
4192+    int dtype;
4193+    ret = CommonNapi::GetPropertyInt32(env, element, "dtype", dtype);
4194+    if (ret != SUCCESS) {
4195+      MS_LOG(ERROR) << "get format property failed";
4196+      return undefinedResult;
4197+    }
4198+
4199+    // get data size
4200+    int data_size;
4201+    ret = CommonNapi::GetPropertyInt32(env, element, "dataSize", data_size);
4202+    if (ret != SUCCESS) {
4203+      MS_LOG(ERROR) << "get dataSize property failed";
4204+      return undefinedResult;
4205+    }
4206+
4207+    // get shape
4208+    std::vector<int32_t> shape;
4209+    ret = CommonNapi::GetPropertyInt32Array(env, element, "shape", shape);
4210+    if (ret != SUCCESS) {
4211+      MS_LOG(ERROR) << "get shape property failed";
4212+      return undefinedResult;
4213+    }
4214+
4215+    // get data
4216+    std::string property_name = "getData";
4217+    bool exist = false;
4218+    napi_value data_func = nullptr;
4219+
4220+    status = napi_has_named_property(env, element, property_name.c_str(), &exist);
4221+    if (status != napi_ok || !exist) {
4222+      MS_LOG(ERROR) << "can not find target property";
4223+      return undefinedResult;
4224+    }
4225+
4226+    if (napi_get_named_property(env, element, property_name.c_str(), &data_func) != napi_ok) {
4227+      MS_LOG(ERROR) << "get " << property_name.c_str() << " property fail.";
4228+      return undefinedResult;
4229+    }
4230+    void *js_data = nullptr;
4231+    size_t length = 0;
4232+
4233+    napi_value return_val;
4234+    status = napi_call_function(env, element, data_func, 0, nullptr, &return_val);
4235+    if (status != napi_ok || return_val == nullptr) {
4236+      MS_LOG(ERROR) << "napi call function error.";
4237+      return undefinedResult;
4238+    }
4239+
4240+    status = napi_get_arraybuffer_info(env, return_val, &js_data, &length);
4241+    if (status != napi_ok || js_data == nullptr) {
4242+      MS_LOG(ERROR) << "get js data error.";
4243+      return undefinedResult;
4244+    }
4245+
4246+    std::vector<int64_t> int64_shape;
4247+    int64_shape.reserve(shape.size());
4248+    std::transform(shape.begin(), shape.end(), std::back_inserter(int64_shape), [](int32_t value) {
4249+      return static_cast<int64_t>(value);
4250+    });
4251+    auto tensor = mindspore::MSTensor::CreateTensor(tensor_name, static_cast<mindspore::DataType>(dtype), int64_shape, nullptr, 0);
4252+    if (tensor == nullptr) {
4253+      MS_LOG(ERROR) << "create tensor failed.";
4254+      return undefinedResult;
4255+    }
4256+    tensor->SetFormat(static_cast<mindspore::Format>(format));
4257+    auto tensor_data = tensor->MutableData();
4258+    if (tensor_data == nullptr) {
4259+      MS_LOG(ERROR) << "mutable tensor data failed, get nullptr";
4260+      return undefinedResult;
4261+    }
4262+
4263+    if (tensor->DataSize() != length) {
4264+      MS_LOG(ERROR) << "tensor size is: " << static_cast<int>(tensor->DataSize()) << ", but data length got "
4265+                    << static_cast<int>(length);
4266+      return undefinedResult;
4267+    }
4268+
4269+    memcpy(tensor_data, js_data, length);
4270+    weights.push_back(*tensor);
4271+    delete tensor;
4272+  }
4273+
4274+  if (modelNapi->native_model_->UpdateFeatureMaps(weights) != mindspore::kSuccess) {
4275+    MS_LOG(ERROR) << "UpdateFeatureMaps failed";
4276+    return undefinedResult;
4277+  }
4278+  status = napi_get_boolean(env, true, &jsResult);
4279+  if (status != napi_ok) {
4280+    MS_LOG(ERROR) << "get bool error";
4281+    return undefinedResult;
4282+  }
4283+  return jsResult;
4284+}
4285+
4286+napi_value MSLiteModelNapi::ExportModel(napi_env env, napi_callback_info info) {
4287+  napi_value undefinedResult = nullptr;
4288+  bool result = false;
4289+  napi_status status = napi_get_boolean(env, result, &undefinedResult);
4290+  if (status != napi_ok) {
4291+    MS_LOG(ERROR) << "get bool error";
4292+    return undefinedResult;
4293+  }
4294+
4295+  napi_value jsThis = nullptr;
4296+  napi_value jsResult = nullptr;
4297+  MSLiteModelNapi *modelNapi = nullptr;
4298+  napi_value argv[ARGS_FOUR] = {0};
4299+  size_t argCount = PARAM4;
4300+  status = napi_get_cb_info(env, info, &argCount, argv, &jsThis, nullptr);
4301+  if (status != napi_ok || jsThis == nullptr) {
4302+    MS_LOG(ERROR) << "failed to retrieve details about the callback";
4303+    return undefinedResult;
4304+  }
4305+  status = napi_unwrap(env, jsThis, reinterpret_cast<void **>(&modelNapi));
4306+  if (status != napi_ok || modelNapi == nullptr) {
4307+    MS_LOG(ERROR) << "get model napi error";
4308+    return undefinedResult;
4309+  }
4310+
4311+  if (modelNapi->native_model_ == nullptr) {
4312+    MS_LOG(ERROR) << "model is released(null), please create model again";
4313+    return undefinedResult;
4314+  }
4315+
4316+  // get modelfile
4317+  char char_buf[SIZE];
4318+  size_t buf_length = 0;
4319+  status = napi_get_value_string_utf8(env, argv[PARAM0], char_buf, SIZE, &buf_length);
4320+  if ((status != napi_ok) || (buf_length <= 0)) {
4321+    MS_LOG(ERROR) << "Parse model file failed.";
4322+    return undefinedResult;
4323+  }
4324+
4325+  std::string model_path;
4326+  model_path.assign(char_buf, char_buf + buf_length);
4327+  MS_LOG(DEBUG) << "model_path: " << model_path.c_str();
4328+
4329+  mindspore::QuantizationType quantization_type = kNoQuant;
4330+  int32_t quantization_type_value;
4331+  // get quantization
4332+  if (argCount >= ARGS_TWO) {
4333+    if (napi_get_value_int32(env, argv[PARAM1], &quantization_type_value) != napi_ok) {
4334+      MS_LOG(WARNING) << "fail to get int32_t value from quantizationType";
4335+      return undefinedResult;
4336+    }
4337+    quantization_type = static_cast<mindspore::QuantizationType>(quantization_type_value);
4338+  }
4339+
4340+  // get inference mode
4341+  bool export_inference_only = true;
4342+  if (argCount >= ARGS_THREE) {
4343+    if (napi_get_value_bool(env, argv[PARAM2], &export_inference_only) != napi_ok) {
4344+      MS_LOG(WARNING) << "fail to get bool value from exportInferenceOnly";
4345+      return undefinedResult;
4346+    }
4347+  }
4348+
4349+  // get output names
4350+  std::vector<std::string> output_tensor_name;
4351+  if (argCount >= ARGS_FOUR) {
4352+    auto ret = CommonNapi::GetStringArray(env, argv[PARAM3], output_tensor_name);
4353+    if (ret != SUCCESS) {
4354+      MS_LOG(ERROR) << "Get context target failed.";
4355+      return undefinedResult;
4356+    }
4357+  }
4358+
4359+  auto ret = mindspore::Serialization::ExportModel(*(modelNapi->native_model_.get()), static_cast<mindspore::ModelType>(kMindIR),
4360+                                        model_path, static_cast<mindspore::QuantizationType>(quantization_type),
4361+                                        export_inference_only, output_tensor_name);
4362+  if (ret != mindspore::kSuccess) {
4363+    MS_LOG(ERROR) << "Export model failed";
4364+    return undefinedResult;
4365+  }
4366+
4367+  status = napi_get_boolean(env, true, &jsResult);
4368+  if (status != napi_ok) {
4369+    MS_LOG(ERROR) << "get bool error";
4370+    return undefinedResult;
4371+  }
4372+  MS_LOG(DEBUG) << "Export Model Success";
4373+  return jsResult;
4374+}
4375+
4376+napi_value MSLiteModelNapi::ExportWeightsCollaborateWithMicro(napi_env env, napi_callback_info info) {
4377+  napi_value undefinedResult = nullptr;
4378+  bool result = false;
4379+  napi_status status = napi_get_boolean(env, result, &undefinedResult);
4380+  if (status != napi_ok) {
4381+    MS_LOG(ERROR) << "get bool error";
4382+    return undefinedResult;
4383+  }
4384+
4385+  napi_value jsThis = nullptr;
4386+  napi_value jsResult = nullptr;
4387+  MSLiteModelNapi *modelNapi = nullptr;
4388+  napi_value argv[ARGS_FOUR] = {0};
4389+  size_t argCount = PARAM4;
4390+  status = napi_get_cb_info(env, info, &argCount, argv, &jsThis, nullptr);
4391+  if (status != napi_ok || jsThis == nullptr) {
4392+    MS_LOG(ERROR) << "failed to retrieve details about the callback";
4393+    return undefinedResult;
4394+  }
4395+  status = napi_unwrap(env, jsThis, reinterpret_cast<void **>(&modelNapi));
4396+  if (status != napi_ok || modelNapi == nullptr) {
4397+    MS_LOG(ERROR) << "get model napi error";
4398+    return undefinedResult;
4399+  }
4400+
4401+  if (modelNapi->native_model_ == nullptr) {
4402+    MS_LOG(ERROR) << "model is released(null), please create model again";
4403+    return undefinedResult;
4404+  }
4405+
4406+  // get weight file
4407+  char char_buf[SIZE];
4408+  size_t buf_length = 0;
4409+  status = napi_get_value_string_utf8(env, argv[PARAM0], char_buf, SIZE, &buf_length);
4410+  if ((status != napi_ok) || (buf_length <= 0)) {
4411+    MS_LOG(ERROR) << "Parse model file failed.";
4412+    return undefinedResult;
4413+  }
4414+
4415+  std::string weight_file;
4416+  weight_file.assign(char_buf, char_buf + buf_length);
4417+  MS_LOG(DEBUG) << "weight_file: " << weight_file.c_str();
4418+
4419+  // get is inference
4420+  bool is_inference = true;
4421+  if (argCount >= ARGS_TWO) {
4422+    if (napi_get_value_bool(env, argv[PARAM1], &is_inference) != napi_ok) {
4423+      MS_LOG(WARNING) << "fail to get bool value from isInference";
4424+      return undefinedResult;
4425+    }
4426+  }
4427+
4428+  // get inference mode
4429+  bool enable_fp16 = false;
4430+  if (argCount >= ARGS_THREE) {
4431+    if (napi_get_value_bool(env, argv[PARAM2], &enable_fp16) != napi_ok) {
4432+      MS_LOG(WARNING) << "fail to get bool value from enableFp16";
4433+      return undefinedResult;
4434+    }
4435+  }
4436+
4437+  // get output names
4438+  std::vector<std::string> changeable_weights_name;
4439+  if (argCount >= ARGS_FOUR) {
4440+    auto ret = CommonNapi::GetStringArray(env, argv[PARAM3], changeable_weights_name);
4441+    if (ret != SUCCESS) {
4442+      MS_LOG(ERROR) << "failed to get string array from changeableWeightsName";
4443+      return undefinedResult;
4444+    }
4445+  }
4446+
4447+  auto ret = mindspore::Serialization::ExportWeightsCollaborateWithMicro(*(modelNapi->native_model_.get()), static_cast<mindspore::ModelType>(kMindIR),
4448+                                                              weight_file, is_inference, enable_fp16, changeable_weights_name);
4449+
4450+  if (ret != mindspore::kSuccess) {
4451+    MS_LOG(ERROR) << "ExportWeightsCollaborateWithMicro failed";
4452+    return undefinedResult;
4453+  }
4454+
4455+  status = napi_get_boolean(env, true, &jsResult);
4456+  if (status != napi_ok) {
4457+    MS_LOG(ERROR) << "get bool error";
4458+    return undefinedResult;
4459+  }
4460+  MS_LOG(DEBUG) << "ExportWeightsCollaborateWithMicro Success";
4461+  return jsResult;
4462+}
4463+
4464+napi_value MSLiteModelNapi::SetupVirtualBatch(napi_env env, napi_callback_info info) {
4465+  napi_value undefinedResult = nullptr;
4466+  bool result = false;
4467+  napi_status status = napi_get_boolean(env, result, &undefinedResult);
4468+  if (status != napi_ok) {
4469+    MS_LOG(ERROR) << "get bool error";
4470+    return undefinedResult;
4471+  }
4472+
4473+  napi_value jsThis = nullptr;
4474+  napi_value jsResult = nullptr;
4475+  MSLiteModelNapi *modelNapi = nullptr;
4476+  napi_value argv[ARGS_THREE] = {0};
4477+  size_t argCount = ARGS_THREE;
4478+  status = napi_get_cb_info(env, info, &argCount, argv, &jsThis, nullptr);
4479+  if (status != napi_ok || jsThis == nullptr) {
4480+    MS_LOG(ERROR) << "failed to retrieve details about the callback";
4481+    return undefinedResult;
4482+  }
4483+  status = napi_unwrap(env, jsThis, reinterpret_cast<void **>(&modelNapi));
4484+  if (status != napi_ok || modelNapi == nullptr) {
4485+    MS_LOG(ERROR) << "get model napi error";
4486+    return undefinedResult;
4487+  }
4488+
4489+  if (modelNapi->native_model_ == nullptr) {
4490+    MS_LOG(ERROR) << "model is released(null), please create model again";
4491+    return undefinedResult;
4492+  }
4493+
4494+  // get virtual batch
4495+  int virtual_batch_multiplier;
4496+  if (napi_get_value_int32(env, argv[PARAM0], &virtual_batch_multiplier) != napi_ok) {
4497+    MS_LOG(WARNING) << "fail to get int32 value from virtualBatchMultiplier";
4498+    return undefinedResult;
4499+  }
4500+
4501+  // get lr
4502+  double lr = -1.0f;
4503+  if (argCount >= ARGS_TWO) {
4504+    if (napi_get_value_double(env, argv[PARAM1], &lr) != napi_ok) {
4505+      MS_LOG(WARNING) << "fail to get double value from lr";
4506+      return undefinedResult;
4507+    }
4508+  }
4509+
4510+  // get lr
4511+  double momentum = -1.0f;
4512+  if (argCount >= ARGS_THREE) {
4513+    if (napi_get_value_double(env, argv[PARAM2], &momentum) != napi_ok) {
4514+      MS_LOG(WARNING) << "fail to get double value from momentum";
4515+      return undefinedResult;
4516+    }
4517+  }
4518+
4519+
4520+  auto ret = modelNapi->native_model_->SetupVirtualBatch(virtual_batch_multiplier, static_cast<float>(lr), static_cast<float>(momentum));
4521+
4522+  if (ret != mindspore::kSuccess) {
4523+    MS_LOG(ERROR) << "SetupVirtualBatch failed";
4524+    return undefinedResult;
4525+  }
4526+
4527+  status = napi_get_boolean(env, true, &jsResult);
4528+  if (status != napi_ok) {
4529+    MS_LOG(ERROR) << "get bool error";
4530+    return undefinedResult;
4531+  }
4532+  return jsResult;
4533+}
4534+napi_value MSLiteModelNapi::GetTrainMode(napi_env env, napi_callback_info info) {
4535+  napi_value undefinedResult = nullptr;
4536+
4537+  napi_value jsThis = nullptr;
4538+  napi_value jsResult = nullptr;
4539+  MSLiteModelNapi *modelNapi = nullptr;
4540+  napi_value argv[ARGS_ONE] = {0};
4541+  size_t argCount = ARGS_ONE;
4542+  auto status = napi_get_cb_info(env, info, &argCount, argv, &jsThis, nullptr);
4543+  if (status != napi_ok || jsThis == nullptr) {
4544+    MS_LOG(ERROR) << "failed to retrieve details about the callback";
4545+    return undefinedResult;
4546+  }
4547+  status = napi_unwrap(env, jsThis, reinterpret_cast<void **>(&modelNapi));
4548+  if (status != napi_ok || modelNapi == nullptr) {
4549+    MS_LOG(ERROR) << "get model napi error";
4550+    return undefinedResult;
4551+  }
4552+  if (modelNapi->native_model_ == nullptr) {
4553+    MS_LOG(ERROR) << "model is released(null), please create model again";
4554+    return undefinedResult;
4555+  }
4556+
4557+  auto train_mode = modelNapi->native_model_->GetTrainMode();
4558+
4559+  status = napi_get_boolean(env, train_mode, &jsResult);
4560+  if (status != napi_ok) {
4561+    MS_LOG(WARNING) << "create bool value error";
4562+    return undefinedResult;
4563+  }
4564+  return jsResult;
4565+}
4566+napi_value MSLiteModelNapi::SetTrainMode(napi_env env, napi_callback_info info) {
4567+  napi_value undefinedResult = nullptr;
4568+
4569+  napi_value jsThis = nullptr;
4570+  napi_value jsResult = nullptr;
4571+  MSLiteModelNapi *modelNapi = nullptr;
4572+  napi_value argv[ARGS_ONE] = {0};
4573+  size_t argCount = ARGS_ONE;
4574+  auto status = napi_get_cb_info(env, info, &argCount, argv, &jsThis, nullptr);
4575+  if (status != napi_ok || jsThis == nullptr) {
4576+    MS_LOG(ERROR) << "failed to retrieve details about the callback";
4577+    return undefinedResult;
4578+  }
4579+  status = napi_unwrap(env, jsThis, reinterpret_cast<void **>(&modelNapi));
4580+  if (status != napi_ok || modelNapi == nullptr) {
4581+    MS_LOG(ERROR) << "get model napi error";
4582+    return undefinedResult;
4583+  }
4584+  if (modelNapi->native_model_ == nullptr) {
4585+    MS_LOG(ERROR) << "model is released(null), please create model again";
4586+    return undefinedResult;
4587+  }
4588+
4589+  bool train_mode;
4590+  if (napi_get_value_bool(env, argv[PARAM0], &train_mode) != napi_ok) {
4591+    MS_LOG(WARNING) << "failed to get bool value from input train mode.";
4592+    return undefinedResult;
4593+  }
4594+  if (!model_info_->train_model) {
4595+    MS_LOG(WARNING) << "current model is not train model, unable to set train or eval mode";
4596+    return undefinedResult;
4597+  }
4598+  if (modelNapi->native_model_->SetTrainMode(train_mode) != kSuccess) {
4599+    MS_LOG(ERROR) << "set train mode failed";
4600+    return undefinedResult;
4601+  }
4602+
4603+  status = napi_get_boolean(env, true, &jsResult);
4604+  if (status != napi_ok) {
4605+    MS_LOG(WARNING) << "create bool value error";
4606+    return undefinedResult;
4607+  }
4608+  return jsResult;
4609+}
4610+napi_value MSLiteModelNapi::GetLearningRate(napi_env env, napi_callback_info info) {
4611+  napi_value undefinedResult = nullptr;
4612+
4613+  napi_value jsThis = nullptr;
4614+  napi_value jsResult = nullptr;
4615+  MSLiteModelNapi *modelNapi = nullptr;
4616+  napi_value argv[ARGS_ONE] = {0};
4617+  size_t argCount = ARGS_ONE;
4618+  auto status = napi_get_cb_info(env, info, &argCount, argv, &jsThis, nullptr);
4619+  if (status != napi_ok || jsThis == nullptr) {
4620+    MS_LOG(ERROR) << "failed to retrieve details about the callback";
4621+    return undefinedResult;
4622+  }
4623+  status = napi_unwrap(env, jsThis, reinterpret_cast<void **>(&modelNapi));
4624+  if (status != napi_ok || modelNapi == nullptr) {
4625+    MS_LOG(ERROR) << "get model napi error";
4626+    return undefinedResult;
4627+  }
4628+  if (modelNapi->native_model_ == nullptr) {
4629+    MS_LOG(ERROR) << "model is released(null), please create model again";
4630+    return undefinedResult;
4631+  }
4632+
4633+  auto lr = modelNapi->native_model_->GetLearningRate();
4634+
4635+  status = napi_create_double(env, lr, &jsResult);
4636+  if (status != napi_ok) {
4637+    MS_LOG(WARNING) << "create double value error";
4638+    return undefinedResult;
4639+  }
4640+  return jsResult;
4641+}
4642+napi_value MSLiteModelNapi::SetLearningRate(napi_env env, napi_callback_info info) {
4643+  napi_value undefinedResult = nullptr;
4644+
4645+  napi_value jsThis = nullptr;
4646+  napi_value jsResult = nullptr;
4647+  MSLiteModelNapi *modelNapi = nullptr;
4648+  napi_value argv[ARGS_ONE] = {0};
4649+  size_t argCount = ARGS_ONE;
4650+  auto status = napi_get_cb_info(env, info, &argCount, argv, &jsThis, nullptr);
4651+  if (status != napi_ok || jsThis == nullptr) {
4652+    MS_LOG(ERROR) << "failed to retrieve details about the callback";
4653+    return undefinedResult;
4654+  }
4655+  status = napi_unwrap(env, jsThis, reinterpret_cast<void **>(&modelNapi));
4656+  if (status != napi_ok || modelNapi == nullptr) {
4657+    MS_LOG(ERROR) << "get model napi error";
4658+    return undefinedResult;
4659+  }
4660+  if (modelNapi->native_model_ == nullptr) {
4661+    MS_LOG(ERROR) << "model is released(null), please create model again";
4662+    return undefinedResult;
4663+  }
4664+
4665+  if (!model_info_->train_model) {
4666+    MS_LOG(WARNING) << "current model is not train model, unable to set learning rate";
4667+    return undefinedResult;
4668+  }
4669+
4670+  double lr;
4671+  if (napi_get_value_double(env, argv[PARAM0], &lr) != napi_ok) {
4672+    MS_LOG(WARNING) << "failed to get double value.";
4673+    return undefinedResult;
4674+  }
4675+
4676+  if (modelNapi->native_model_->SetLearningRate(static_cast<float>(lr)) != kSuccess) {
4677+    MS_LOG(ERROR) << "set learning rate failed";
4678+    return undefinedResult;
4679+  }
4680+
4681+  status = napi_get_boolean(env, true, &jsResult);
4682+  if (status != napi_ok) {
4683+    MS_LOG(WARNING) << "create bool value error";
4684+    return undefinedResult;
4685+  }
4686+  return jsResult;
4687+}
4688+}  // namespace mindspore
4689diff --git a/mindspore/lite/src/litert/js_api/mstensor_napi.cc b/mindspore/lite/src/litert/js_api/mstensor_napi.cc
4690new file mode 100644
4691index 00000000..975d3d94
4692--- /dev/null
4693+++ b/mindspore/lite/src/litert/js_api/mstensor_napi.cc
4694@@ -0,0 +1,416 @@
4695+/**
4696+ * Copyright 2023 Huawei Technologies Co., Ltd
4697+ *
4698+ * Licensed under the Apache License, Version 2.0 (the "License");
4699+ * you may not use this file except in compliance with the License.
4700+ * You may obtain a copy of the License at
4701+ *
4702+ * http://www.apache.org/licenses/LICENSE-2.0
4703+ *
4704+ * Unless required by applicable law or agreed to in writing, software
4705+ * distributed under the License is distributed on an "AS IS" BASIS,
4706+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
4707+ * See the License for the specific language governing permissions and
4708+ * limitations under the License.
4709+ */
4710+
4711+#include "include/js_api/mstensor_napi.h"
4712+#include <climits>
4713+#include <string.h>
4714+#include <map>
4715+#include "src/common/log.h"
4716+
4717+namespace mindspore {
4718+thread_local napi_ref MSTensorNapi::constructor_ = nullptr;
4719+const std::string CLASS_NAME = "MSTensor";
4720+
4721+#define GET_PARAMS(env, info, num) \
4722+  size_t argc = num;               \
4723+  napi_value argv[num] = {0};      \
4724+  napi_value thisVar = nullptr;    \
4725+  void *data;                      \
4726+  napi_get_cb_info(env, info, &argc, argv, &thisVar, &data)
4727+
4728+const std::unordered_map<std::string, napi_typedarray_type> kDTypeMap{
4729+  {"int32", napi_int32_array},
4730+  {"float32", napi_float32_array},
4731+  {"int8", napi_int8_array},
4732+  {"uint8", napi_uint8_array},
4733+};
4734+
4735+namespace {
4736+const int ARGS_TWO = 2;
4737+}
4738+
4739+MSTensorNapi::MSTensorNapi() { MS_LOG(DEBUG) << "MSLITE MSTensorNapi Instances create."; }
4740+
4741+MSTensorNapi::~MSTensorNapi() {
4742+  if (nativeMSTensor_ != nullptr) {
4743+    nativeMSTensor_ = nullptr;
4744+  }
4745+  MS_LOG(INFO) << "MSLITE MSTensorNapi Instances destroy.";
4746+}
4747+
4748+napi_value MSTensorNapi::Constructor(napi_env env, napi_callback_info info) {
4749+  napi_value jsThis = nullptr;
4750+  napi_status status = napi_get_cb_info(env, info, nullptr, nullptr, &jsThis, nullptr);
4751+  if (status != napi_ok || jsThis == nullptr) {
4752+    MS_LOG(ERROR) << "Failed to retrieve details about the callback";
4753+    return nullptr;
4754+  }
4755+
4756+  std::unique_ptr<MSTensorNapi> tensorNapi = std::make_unique<MSTensorNapi>();
4757+  if (tensorNapi == nullptr) {
4758+    MS_LOG(ERROR) << "No memory";
4759+    return nullptr;
4760+  }
4761+
4762+  tensorNapi->env_ = env;
4763+  status = napi_wrap(env, jsThis, reinterpret_cast<void *>(tensorNapi.get()), MSTensorNapi::Finalize, nullptr, nullptr);
4764+  if (status == napi_ok) {
4765+    tensorNapi.release();
4766+    return jsThis;
4767+  }
4768+
4769+  MS_LOG(ERROR) << "Constructor fail.";
4770+  return nullptr;
4771+}
4772+
4773+void MSTensorNapi::Finalize(napi_env env, void *nativeObject, void *finalize) {
4774+  (void)env;
4775+  (void)finalize;
4776+  if (nativeObject != nullptr) {
4777+    delete reinterpret_cast<MSTensorNapi *>(nativeObject);
4778+  }
4779+  MS_LOG(INFO) << "Finalize success.";
4780+}
4781+
4782+napi_value MSTensorNapi::NewInstance(napi_env env, mindspore::MSTensor tensor) {
4783+  napi_value cons = GetConstructor(env);
4784+  if (cons == nullptr) {
4785+    MS_LOG(ERROR) << "NewInstance GetConstructor is nullptr!";
4786+    return nullptr;
4787+  }
4788+  napi_value instance;
4789+  napi_status status = napi_new_instance(env, cons, 0, nullptr, &instance);
4790+  if (status != napi_ok) {
4791+    MS_LOG(ERROR) << "NewInstance napi_new_instance failed! code: " << status;
4792+    return nullptr;
4793+  }
4794+
4795+  MSTensorNapi *proxy = nullptr;
4796+  status = napi_unwrap(env, instance, reinterpret_cast<void **>(&proxy));
4797+  if (proxy == nullptr) {
4798+    MS_LOG(ERROR) << "NewInstance native instance is nullptr! code: " << status;
4799+    return instance;
4800+  }
4801+  // MSTensor 不需要new 内存,直接获取Model.getInputs()
4802+  proxy->nativeMSTensor_ = std::make_unique<mindspore::MSTensor>(tensor);
4803+  if (proxy->nativeMSTensor_ == nullptr) {
4804+    MS_LOG(ERROR) << "NewInstance native tensor unique ptr is nullptr!";
4805+    return instance;
4806+  }
4807+  return instance;
4808+}
4809+
4810+napi_value MSTensorNapi::GetConstructor(napi_env env) {
4811+  napi_value cons;
4812+  if (constructor_ != nullptr) {
4813+    napi_get_reference_value(env, constructor_, &cons);
4814+    return cons;
4815+  }
4816+
4817+  MS_LOG(INFO) << "Get msTensorNapi constructor.";
4818+  napi_property_descriptor properties[] = {
4819+    DECLARE_NAPI_GETTER("name", GetName),
4820+    DECLARE_NAPI_GETTER("shape", GetShape),
4821+    DECLARE_NAPI_GETTER("elementNum", GetElementNum),
4822+    DECLARE_NAPI_GETTER("dtype", GetDtype),
4823+    DECLARE_NAPI_GETTER("format", GetFormat),
4824+    DECLARE_NAPI_GETTER("dataSize", GetDataSize),
4825+
4826+    DECLARE_NAPI_FUNCTION("getData", GetDataBuffer),
4827+    DECLARE_NAPI_FUNCTION("setData", SetData),
4828+  };
4829+
4830+  napi_status status = napi_define_class(env, CLASS_NAME.c_str(), NAPI_AUTO_LENGTH, Constructor, nullptr,
4831+                                         sizeof(properties) / sizeof(napi_property_descriptor), properties, &cons);
4832+  if (status != napi_ok) {
4833+    MS_LOG(ERROR) << "MSLITE Failed to define MSTensor class";
4834+    return nullptr;
4835+  }
4836+
4837+  status = napi_create_reference(env, cons, 1, &constructor_);
4838+  if (status != napi_ok) {
4839+    MS_LOG(ERROR) << "MSLITE Failed to create reference of constructor";
4840+    return nullptr;
4841+  }
4842+
4843+  return cons;
4844+}
4845+
4846+napi_value MSTensorNapi::GetName(napi_env env, napi_callback_info info) {
4847+  napi_value undefinedResult = nullptr;
4848+  napi_get_undefined(env, &undefinedResult);
4849+  napi_value jsThis = nullptr;
4850+  napi_value jsResult = nullptr;
4851+  MSTensorNapi *tensor = nullptr;
4852+
4853+  napi_status status = napi_get_cb_info(env, info, nullptr, nullptr, &jsThis, nullptr);
4854+  if (status != napi_ok || jsThis == nullptr) {
4855+    MS_LOG(ERROR) << "Failed to retrieve details about the callback";
4856+    return undefinedResult;
4857+  }
4858+
4859+  status = napi_unwrap(env, jsThis, reinterpret_cast<void **>(&tensor));
4860+  if (status != napi_ok || tensor == nullptr) {
4861+    MS_LOG(ERROR) << "get tensor napi error";
4862+    return undefinedResult;
4863+  }
4864+
4865+  status = napi_create_string_utf8(env, tensor->nativeMSTensor_->Name().c_str(), NAPI_AUTO_LENGTH, &jsResult);
4866+  if (status != napi_ok) {
4867+    MS_LOG(ERROR) << "napi_create_string_utf8 error";
4868+    return undefinedResult;
4869+  }
4870+
4871+  MS_LOG(INFO) << "GetName success.";
4872+  return jsResult;
4873+}
4874+
4875+napi_value MSTensorNapi::GetShape(napi_env env, napi_callback_info info) {
4876+  napi_value undefinedResult = nullptr;
4877+  napi_get_undefined(env, &undefinedResult);
4878+  napi_value jsThis = nullptr;
4879+  napi_value jsResult = nullptr;
4880+  MSTensorNapi *tensor = nullptr;
4881+
4882+  napi_status status = napi_get_cb_info(env, info, nullptr, nullptr, &jsThis, nullptr);
4883+  if (status != napi_ok || jsThis == nullptr) {
4884+    MS_LOG(ERROR) << "Failed to retrieve details about the callback";
4885+    return undefinedResult;
4886+  }
4887+
4888+  status = napi_unwrap(env, jsThis, reinterpret_cast<void **>(&tensor));
4889+  if (status != napi_ok || tensor == nullptr) {
4890+    MS_LOG(ERROR) << "get tensor napi error";
4891+    return undefinedResult;
4892+  }
4893+
4894+  // return array
4895+  auto shape = tensor->nativeMSTensor_->Shape();
4896+  size_t size = shape.size();
4897+  napi_create_array_with_length(env, size, &jsResult);
4898+  for (size_t i = 0; i < size; i++) {
4899+    napi_value num;
4900+    status = napi_create_int32(env, shape.at(i), &num);
4901+    if (status != napi_ok) {
4902+      MS_LOG(ERROR) << "napi_create_int32 error";
4903+      return undefinedResult;
4904+    }
4905+    napi_set_element(env, jsResult, i, num);
4906+  }
4907+
4908+  MS_LOG(INFO) << "GetShape success.";
4909+  return jsResult;
4910+}
4911+
4912+napi_value MSTensorNapi::GetElementNum(napi_env env, napi_callback_info info) {
4913+  napi_value undefinedResult = nullptr;
4914+  napi_get_undefined(env, &undefinedResult);
4915+  napi_value jsThis = nullptr;
4916+  napi_value jsResult = nullptr;
4917+  MSTensorNapi *tensor = nullptr;
4918+
4919+  napi_status status = napi_get_cb_info(env, info, nullptr, nullptr, &jsThis, nullptr);
4920+  if (status != napi_ok || jsThis == nullptr) {
4921+    MS_LOG(ERROR) << "Failed to retrieve details about the callback";
4922+    return undefinedResult;
4923+  }
4924+
4925+  status = napi_unwrap(env, jsThis, reinterpret_cast<void **>(&tensor));
4926+  if (status != napi_ok || tensor == nullptr) {
4927+    MS_LOG(ERROR) << "get tensor napi error";
4928+    return undefinedResult;
4929+  }
4930+
4931+  status = napi_create_int32(env, tensor->nativeMSTensor_->ElementNum(), &jsResult);
4932+  if (status != napi_ok) {
4933+    MS_LOG(ERROR) << "napi_create_int32 error";
4934+    return undefinedResult;
4935+  }
4936+
4937+  MS_LOG(INFO) << "GetElementNum success.";
4938+  return jsResult;
4939+}
4940+
4941+napi_value MSTensorNapi::GetDtype(napi_env env, napi_callback_info info) {
4942+  napi_value undefinedResult = nullptr;
4943+  napi_get_undefined(env, &undefinedResult);
4944+  napi_value jsThis = nullptr;
4945+  napi_value jsResult = nullptr;
4946+  MSTensorNapi *tensor = nullptr;
4947+
4948+  napi_status status = napi_get_cb_info(env, info, nullptr, nullptr, &jsThis, nullptr);
4949+  if (status != napi_ok || jsThis == nullptr) {
4950+    MS_LOG(ERROR) << "Failed to retrieve details about the callback";
4951+    return undefinedResult;
4952+  }
4953+
4954+  status = napi_unwrap(env, jsThis, reinterpret_cast<void **>(&tensor));
4955+  if (status != napi_ok || tensor == nullptr) {
4956+    MS_LOG(ERROR) << "get tensor napi error";
4957+    return undefinedResult;
4958+  }
4959+
4960+  status = napi_create_int32(env, static_cast<int32_t>(tensor->nativeMSTensor_->DataType()), &jsResult);
4961+  if (status != napi_ok) {
4962+    MS_LOG(ERROR) << "napi_create_int32 error";
4963+    return undefinedResult;
4964+  }
4965+
4966+  MS_LOG(INFO) << "GetDtype success.";
4967+  return jsResult;
4968+}
4969+
4970+napi_value MSTensorNapi::GetFormat(napi_env env, napi_callback_info info) {
4971+  napi_value undefinedResult = nullptr;
4972+  napi_get_undefined(env, &undefinedResult);
4973+  napi_value jsThis = nullptr;
4974+  napi_value jsResult = nullptr;
4975+  MSTensorNapi *tensor = nullptr;
4976+
4977+  napi_status status = napi_get_cb_info(env, info, nullptr, nullptr, &jsThis, nullptr);
4978+  if (status != napi_ok || jsThis == nullptr) {
4979+    MS_LOG(ERROR) << "Failed to retrieve details about the callback";
4980+    return undefinedResult;
4981+  }
4982+
4983+  status = napi_unwrap(env, jsThis, reinterpret_cast<void **>(&tensor));
4984+  if (status != napi_ok || tensor == nullptr) {
4985+    MS_LOG(ERROR) << "get tensor napi error";
4986+    return undefinedResult;
4987+  }
4988+
4989+  status = napi_create_int32(env, static_cast<int32_t>(tensor->nativeMSTensor_->format()), &jsResult);
4990+  if (status != napi_ok) {
4991+    MS_LOG(ERROR) << "napi_create_int32 error";
4992+    return undefinedResult;
4993+  }
4994+
4995+  MS_LOG(INFO) << "GetFormat success.";
4996+  return jsResult;
4997+}
4998+
4999+napi_value MSTensorNapi::GetDataSize(napi_env env, napi_callback_info info) {
5000+  napi_value undefinedResult = nullptr;
5001+  napi_get_undefined(env, &undefinedResult);
5002+  napi_value jsThis = nullptr;
5003+  napi_value jsResult = nullptr;
5004+  MSTensorNapi *tensor = nullptr;
5005+
5006+  napi_status status = napi_get_cb_info(env, info, nullptr, nullptr, &jsThis, nullptr);
5007+  if (status != napi_ok || jsThis == nullptr) {
5008+    MS_LOG(ERROR) << "Failed to retrieve details about the callback";
5009+    return undefinedResult;
5010+  }
5011+
5012+  status = napi_unwrap(env, jsThis, reinterpret_cast<void **>(&tensor));
5013+  if (status != napi_ok || tensor == nullptr) {
5014+    MS_LOG(ERROR) << "get tensor napi error";
5015+    return undefinedResult;
5016+  }
5017+
5018+  status = napi_create_int32(env, tensor->nativeMSTensor_->DataSize(), &jsResult);
5019+  if (status != napi_ok) {
5020+    MS_LOG(ERROR) << "napi_create_int32 error";
5021+    return undefinedResult;
5022+  }
5023+
5024+  MS_LOG(INFO) << "GetDataSize success.";
5025+  return jsResult;
5026+}
5027+
5028+napi_value MSTensorNapi::GetDataBuffer(napi_env env, napi_callback_info info) {
5029+  napi_value undefinedResult = nullptr;
5030+  napi_get_undefined(env, &undefinedResult);
5031+
5032+  napi_value jsThis = nullptr;
5033+  napi_value jsResult = nullptr;
5034+  MSTensorNapi *tensor = nullptr;
5035+
5036+  napi_status status = napi_get_cb_info(env, info, nullptr, nullptr, &jsThis, nullptr);
5037+  if (status != napi_ok || jsThis == nullptr) {
5038+    MS_LOG(ERROR) << "Failed to retrieve details about the callback";
5039+    return undefinedResult;
5040+  }
5041+
5042+  status = napi_unwrap(env, jsThis, reinterpret_cast<void **>(&tensor));
5043+  if (status != napi_ok || tensor == nullptr) {
5044+    MS_LOG(ERROR) << "get tensor napi error";
5045+    return undefinedResult;
5046+  }
5047+
5048+  size_t byte_length = tensor->nativeMSTensor_->DataSize();
5049+  auto tensor_data = tensor->nativeMSTensor_->MutableData();
5050+  if (tensor_data == nullptr) {
5051+    MS_LOG(ERROR) << "tensor_data is null.";
5052+    return undefinedResult;
5053+  }
5054+
5055+  void *data = nullptr;
5056+  status = napi_create_arraybuffer(env, byte_length, &data, &jsResult);
5057+  if (status != napi_ok) {
5058+    MS_LOG(ERROR) << "napi_create_arraybuffer error";
5059+    return undefinedResult;
5060+  }
5061+  if (data == nullptr || jsResult == nullptr) {
5062+    MS_LOG(ERROR) << "napi_create_arraybuffer error";
5063+    return undefinedResult;
5064+  }
5065+
5066+  memcpy(data, tensor_data, byte_length);
5067+  MS_LOG(INFO) << "GetDataBuffer success.";
5068+  return jsResult;
5069+}
5070+
5071+napi_value MSTensorNapi::SetData(napi_env env, napi_callback_info info) {
5072+  napi_value undefinedResult = nullptr;
5073+  napi_get_undefined(env, &undefinedResult);
5074+  MSTensorNapi *tensor = nullptr;
5075+
5076+  GET_PARAMS(env, info, ARGS_TWO);
5077+
5078+  napi_status status = napi_unwrap(env, thisVar, reinterpret_cast<void **>(&tensor));
5079+  if (status != napi_ok || tensor == nullptr) {
5080+    MS_LOG(ERROR) << "get tensor napi error";
5081+    return undefinedResult;
5082+  }
5083+
5084+  // convert napi_value to c++ type data
5085+  void *js_data = nullptr;
5086+  size_t length = 0;
5087+  status = napi_get_arraybuffer_info(env, argv[0], &js_data, &length);
5088+  if (status != napi_ok || js_data == nullptr) {
5089+    MS_LOG(ERROR) << "Get js data error.";
5090+    return undefinedResult;
5091+  }
5092+
5093+  if (tensor->nativeMSTensor_->DataSize() != length) {
5094+    MS_LOG(ERROR) << "tensor size is: " << static_cast<int>(tensor->nativeMSTensor_->DataSize())
5095+                  << "but data length got " << length;
5096+    return undefinedResult;
5097+  }
5098+
5099+  // memcpy
5100+  auto tensor_data = tensor->nativeMSTensor_->MutableData();
5101+  if (tensor_data == nullptr) {
5102+    MS_LOG(ERROR) << "malloc data for tensor failed.";
5103+    return undefinedResult;
5104+  }
5105+  memcpy(tensor_data, js_data, length);
5106+
5107+  MS_LOG(INFO) << "SetFloatData success.";
5108+  return undefinedResult;
5109+}
5110+}  // namespace mindspore
5111\ No newline at end of file
5112diff --git a/mindspore/lite/src/litert/js_api/native_module_ohos_ms.cc b/mindspore/lite/src/litert/js_api/native_module_ohos_ms.cc
5113new file mode 100644
5114index 00000000..7a381c1e
5115--- /dev/null
5116+++ b/mindspore/lite/src/litert/js_api/native_module_ohos_ms.cc
5117@@ -0,0 +1,48 @@
5118+/**
5119+ * Copyright 2023 Huawei Technologies Co., Ltd
5120+ *
5121+ * Licensed under the Apache License, Version 2.0 (the "License");
5122+ * you may not use this file except in compliance with the License.
5123+ * You may obtain a copy of the License at
5124+ *
5125+ * http://www.apache.org/licenses/LICENSE-2.0
5126+ *
5127+ * Unless required by applicable law or agreed to in writing, software
5128+ * distributed under the License is distributed on an "AS IS" BASIS,
5129+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
5130+ * See the License for the specific language governing permissions and
5131+ * limitations under the License.
5132+ */
5133+
5134+#include "include/js_api/native_module_ohos_ms.h"
5135+#include "src/common/log_adapter.h"
5136+
5137+/*
5138+ * Function registering all props and functions of ohos.ai.mslite module
5139+ * which involves player and the recorder
5140+ */
5141+static napi_value Export(napi_env env, napi_value exports) {
5142+  MS_LOG(INFO) << "Export() is called.";
5143+
5144+  mindspore::MSLiteModelNapi::Init(env, exports);
5145+  return exports;
5146+}
5147+
5148+/*
5149+ * module define
5150+ */
5151+static napi_module g_module = {.nm_version = 1,
5152+                               .nm_flags = 0,
5153+                               .nm_filename = nullptr,
5154+                               .nm_register_func = Export,
5155+                               .nm_modname = "ai.mindSporeLite",
5156+                               .nm_priv = ((void *)0),
5157+                               .reserved = {0}};
5158+
5159+/*
5160+ * module register
5161+ */
5162+extern "C" __attribute__((constructor)) void RegisterModule(void) {
5163+  MS_LOG(INFO) << "RegisterModule() is called";
5164+  napi_module_register(&g_module);
5165+}
5166diff --git a/mindspore/lite/src/litert/js_api/nnrt_device_desc.cc b/mindspore/lite/src/litert/js_api/nnrt_device_desc.cc
5167new file mode 100644
5168index 00000000..145897db
5169--- /dev/null
5170+++ b/mindspore/lite/src/litert/js_api/nnrt_device_desc.cc
5171@@ -0,0 +1,216 @@
5172+/**
5173+* Copyright 2023 Huawei Technologies Co., Ltd
5174+*
5175+* Licensed under the Apache License, Version 2.0 (the "License");
5176+* you may not use this file except in compliance with the License.
5177+* You may obtain a copy of the License at
5178+*
5179+* http://www.apache.org/licenses/LICENSE-2.0
5180+*
5181+* Unless required by applicable law or agreed to in writing, software
5182+* distributed under the License is distributed on an "AS IS" BASIS,
5183+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
5184+* See the License for the specific language governing permissions and
5185+* limitations under the License.
5186+*/
5187+
5188+#include <climits>
5189+#include <string.h>
5190+#include <map>
5191+#include "src/common/log.h"
5192+#include "include/js_api/nnrt_device_desc_napi.h"
5193+
5194+namespace mindspore {
5195+thread_local napi_ref NnrtDeviceDescNapi::constructor_ = nullptr;
5196+const std::string CLASS_NAME = "NNRTDeviceDescription";
5197+
5198+NnrtDeviceDescNapi::NnrtDeviceDescNapi() { MS_LOG(DEBUG) << "MSLITE NNRTDeviceDescNapi Instances create."; }
5199+
5200+NnrtDeviceDescNapi::~NnrtDeviceDescNapi() {
5201+ if (nativeNnrtDeviceDesc_ != nullptr) {
5202+   nativeNnrtDeviceDesc_ = nullptr;
5203+ }
5204+ MS_LOG(INFO) << "MSLITE MSTensorNapi Instances destroy.";
5205+}
5206+
5207+napi_value NnrtDeviceDescNapi::Constructor(napi_env env, napi_callback_info info) {
5208+ napi_value jsThis = nullptr;
5209+ napi_status status = napi_get_cb_info(env, info, nullptr, nullptr, &jsThis, nullptr);
5210+ if (status != napi_ok || jsThis == nullptr) {
5211+   MS_LOG(ERROR) << "Failed to retrieve details about the callback";
5212+   return nullptr;
5213+ }
5214+
5215+ std::unique_ptr<NnrtDeviceDescNapi> nnrt_device_desc = std::make_unique<NnrtDeviceDescNapi>();
5216+ if (nnrt_device_desc == nullptr) {
5217+   MS_LOG(ERROR) << "No memory";
5218+   return nullptr;
5219+ }
5220+
5221+ nnrt_device_desc->env_ = env;
5222+ status = napi_wrap(env, jsThis, reinterpret_cast<void *>(nnrt_device_desc.get()), NnrtDeviceDescNapi::Finalize, nullptr, nullptr);
5223+ if (status == napi_ok) {
5224+   nnrt_device_desc.release();
5225+   return jsThis;
5226+ }
5227+
5228+ MS_LOG(ERROR) << "Constructor fail.";
5229+ return nullptr;
5230+}
5231+
5232+void NnrtDeviceDescNapi::Finalize(napi_env env, void *nativeObject, void *finalize) {
5233+ (void)env;
5234+ (void)finalize;
5235+ if (nativeObject != nullptr) {
5236+   delete reinterpret_cast<NnrtDeviceDesc *>(nativeObject);
5237+ }
5238+ MS_LOG(INFO) << "Finalize success.";
5239+}
5240+
5241+napi_value NnrtDeviceDescNapi::NewInstance(napi_env env, NnrtDeviceDesc desc) {
5242+ napi_value cons = GetConstructor(env);
5243+ if (cons == nullptr) {
5244+   MS_LOG(ERROR) << "NewInstance GetConstructor is nullptr!";
5245+   return nullptr;
5246+ }
5247+ napi_value instance;
5248+ napi_status status = napi_new_instance(env, cons, 0, nullptr, &instance);
5249+ if (status != napi_ok) {
5250+   MS_LOG(ERROR) << "NewInstance napi_new_instance failed! code: " << status;
5251+   return nullptr;
5252+ }
5253+
5254+ NnrtDeviceDescNapi *proxy = nullptr;
5255+ status = napi_unwrap(env, instance, reinterpret_cast<void **>(&proxy));
5256+ if (proxy == nullptr) {
5257+   MS_LOG(ERROR) << "NewInstance native instance is nullptr! code: " << status;
5258+   return instance;
5259+ }
5260+
5261+ proxy->nativeNnrtDeviceDesc_ = std::make_unique<NnrtDeviceDesc>(desc);
5262+ if (proxy->nativeNnrtDeviceDesc_ == nullptr) {
5263+   MS_LOG(ERROR) << "NewInstance native nnrt deivce desc unique ptr is nullptr!";
5264+   return instance;
5265+ }
5266+ return instance;
5267+}
5268+
5269+napi_value NnrtDeviceDescNapi::GetConstructor(napi_env env) {
5270+ napi_value cons;
5271+ if (constructor_ != nullptr) {
5272+   napi_get_reference_value(env, constructor_, &cons);
5273+   return cons;
5274+ }
5275+
5276+ MS_LOG(INFO) << "Get NnrtDeviceDesc constructor.";
5277+ napi_property_descriptor properties[] = {
5278+   DECLARE_NAPI_FUNCTION("deviceID", GetDeviceID),
5279+   DECLARE_NAPI_FUNCTION("deviceType", GetDeviceType),
5280+   DECLARE_NAPI_FUNCTION("deviceName", GetDeviceName),
5281+ };
5282+
5283+ napi_status status = napi_define_class(env, CLASS_NAME.c_str(), NAPI_AUTO_LENGTH, Constructor, nullptr,
5284+                                        sizeof(properties) / sizeof(napi_property_descriptor), properties, &cons);
5285+ if (status != napi_ok) {
5286+   MS_LOG(ERROR) << "MSLITE Failed to define NnrtDeviceDesc class";
5287+   return nullptr;
5288+ }
5289+
5290+ status = napi_create_reference(env, cons, 1, &constructor_);
5291+ if (status != napi_ok) {
5292+   MS_LOG(ERROR) << "MSLITE Failed to create reference of constructor";
5293+   return nullptr;
5294+ }
5295+
5296+ return cons;
5297+}
5298+
5299+napi_value NnrtDeviceDescNapi::GetDeviceName(napi_env env, napi_callback_info info) {
5300+ napi_value undefinedResult = nullptr;
5301+ napi_get_undefined(env, &undefinedResult);
5302+ napi_value jsThis = nullptr;
5303+ napi_value jsResult = nullptr;
5304+ NnrtDeviceDescNapi *desc = nullptr;
5305+
5306+ napi_status status = napi_get_cb_info(env, info, nullptr, nullptr, &jsThis, nullptr);
5307+ if (status != napi_ok || jsThis == nullptr) {
5308+   MS_LOG(ERROR) << "Failed to retrieve details about the callback";
5309+   return undefinedResult;
5310+ }
5311+
5312+ status = napi_unwrap(env, jsThis, reinterpret_cast<void **>(&desc));
5313+ if (status != napi_ok || desc == nullptr) {
5314+   MS_LOG(ERROR) << "get tensor napi error";
5315+   return undefinedResult;
5316+ }
5317+
5318+ status = napi_create_string_utf8(env, desc->nativeNnrtDeviceDesc_->name.c_str(), NAPI_AUTO_LENGTH, &jsResult);
5319+ if (status != napi_ok) {
5320+   MS_LOG(ERROR) << "napi_create_string_utf8 error";
5321+   return undefinedResult;
5322+ }
5323+
5324+ MS_LOG(INFO) << "GetName success.";
5325+ return jsResult;
5326+}
5327+
5328+napi_value NnrtDeviceDescNapi::GetDeviceID(napi_env env, napi_callback_info info) {
5329+ napi_value undefinedResult = nullptr;
5330+ napi_get_undefined(env, &undefinedResult);
5331+ napi_value jsThis = nullptr;
5332+ napi_value jsResult = nullptr;
5333+ NnrtDeviceDescNapi *desc = nullptr;
5334+
5335+ napi_status status = napi_get_cb_info(env, info, nullptr, nullptr, &jsThis, nullptr);
5336+ if (status != napi_ok || jsThis == nullptr) {
5337+   MS_LOG(ERROR) << "Failed to retrieve details about the callback";
5338+   return undefinedResult;
5339+ }
5340+
5341+ status = napi_unwrap(env, jsThis, reinterpret_cast<void **>(&desc));
5342+ if (status != napi_ok || desc == nullptr) {
5343+   MS_LOG(ERROR) << "get tensor napi error";
5344+   return undefinedResult;
5345+ }
5346+
5347+ auto id = static_cast<uint64_t>(desc->nativeNnrtDeviceDesc_->id);
5348+ status = napi_create_bigint_uint64(env, id, &jsResult);
5349+ if (status != napi_ok) {
5350+   MS_LOG(ERROR) << "napi_create_int32 error";
5351+   return undefinedResult;
5352+ }
5353+
5354+ MS_LOG(INFO) << "GetShape success.";
5355+ return jsResult;
5356+}
5357+
5358+napi_value NnrtDeviceDescNapi::GetDeviceType(napi_env env, napi_callback_info info) {
5359+ napi_value undefinedResult = nullptr;
5360+ napi_get_undefined(env, &undefinedResult);
5361+ napi_value jsThis = nullptr;
5362+ napi_value jsResult = nullptr;
5363+ NnrtDeviceDescNapi *desc = nullptr;
5364+
5365+ napi_status status = napi_get_cb_info(env, info, nullptr, nullptr, &jsThis, nullptr);
5366+ if (status != napi_ok || jsThis == nullptr) {
5367+   MS_LOG(ERROR) << "Failed to retrieve details about the callback";
5368+   return undefinedResult;
5369+ }
5370+
5371+ status = napi_unwrap(env, jsThis, reinterpret_cast<void **>(&desc));
5372+ if (status != napi_ok || desc == nullptr) {
5373+   MS_LOG(ERROR) << "get nnrt device type napi error";
5374+   return undefinedResult;
5375+ }
5376+
5377+ status = napi_create_int32(env, desc->nativeNnrtDeviceDesc_->type, &jsResult);
5378+
5379+ if (status != napi_ok) {
5380+   MS_LOG(ERROR) << "napi_create_int32 error";
5381+   return undefinedResult;
5382+ }
5383+
5384+ MS_LOG(INFO) << "GetDeviceType success.";
5385+ return jsResult;
5386+}
5387+}  // namespace mindspore
5388\ No newline at end of file
5389-- 
53902.25.1
5391
5392