diff --git a/zh-cn/application-dev/ai/mindspore-guidelines-based-js.md b/zh-cn/application-dev/ai/mindspore-guidelines-based-js.md index f69b448cde9d9aee637712a64dfb085d2238c828..f8aa9aee0df565321f5e41ccbde9a7885eb74106 100644 --- a/zh-cn/application-dev/ai/mindspore-guidelines-based-js.md +++ b/zh-cn/application-dev/ai/mindspore-guidelines-based-js.md @@ -93,14 +93,14 @@ build() { ```shell Launching com.example.myapptfjs $ hdc uninstall com.example.myapptfjs - $ hdc install -r "D:\TVOS\JSAPI\MyAppTfjs\entry\build\default\outputs\default\entry-default-signed.hap" + $ hdc install -r "path/to/xxx.hap" $ hdc shell aa start -a EntryAbility -b com.example.myapptfjs ``` 2. 使用hdc连接设备,并将mnet.caffemodel.ms推送到设备中的沙盒目录。mnet_caffemodel_nhwc.bin在本地项目中的rawfile目录下。 ```shell - hdc -t 7001005458323933328a00bcdf423800 file send .\mnet.caffemodel.ms /data/app/el2/100/base/com.example.myapptfjs/haps/entry/files/ + hdc -t your_device_id file send .\mnet.caffemodel.ms /data/app/el2/100/base/com.example.myapptfjs/haps/entry/files/ ``` 3. 在设备屏幕点击Test_MSLiteModel_predict触发用例,在HiLog打印结果中得到如下结果: diff --git a/zh-cn/application-dev/reference/apis/js-apis-mindSporeLite.md b/zh-cn/application-dev/reference/apis/js-apis-mindSporeLite.md index 48007f8c0d90495a27bc49ef01c3e0229c37f500..be0f9c022b2b792aefc1b019f9040b145121b914 100644 --- a/zh-cn/application-dev/reference/apis/js-apis-mindSporeLite.md +++ b/zh-cn/application-dev/reference/apis/js-apis-mindSporeLite.md @@ -185,13 +185,13 @@ let modelName = '/path/to/xxx.ms'; let syscontext = globalThis.context; syscontext.resourceManager.getRawFileContent(modelName).then((buffer) => { let modelBuffer = buffer; + mindSporeLite.loadModelFromBuffer(modelBuffer.buffer, (result) => { + const modelInputs = result.getInputs(); + console.log(modelInputs[0].name); + }) }).catch(error => { console.error('Failed to get buffer, error code: ${error.code},message:${error.message}.'); }) -mindSporeLite.loadModelFromBuffer(modelBuffer.buffer, (result) => { - const modelInputs = result.getInputs(); - console.log(modelInputs[0].name); -}) ``` ## mindSporeLite.loadModelFromBuffer @@ -217,15 +217,15 @@ let modelName = '/path/to/xxx.ms'; let syscontext = globalThis.context; syscontext.resourceManager.getRawFileContent(modelName).then((error,buffer) => { let modelBuffer = buffer; + let context: mindSporeLite.Context = {}; + context = {'target': ['cpu']}; + mindSporeLite.loadModelFromBuffer(modelBuffer.buffer, context, (result) => { + const modelInputs = result.getInputs(); + console.log(modelInputs[0].name); + }) }).catch(error => { console.error('Failed to get buffer, error code: ${error.code},message:${error.message}.'); }) -let context: mindSporeLite.Context = {}; -context = {'target': ['cpu']}; -mindSporeLite.loadModelFromBuffer(modelBuffer.buffer, context, (result) => { - const modelInputs = result.getInputs(); - console.log(modelInputs[0].name); -}) ``` ## mindSporeLite.loadModelFromBuffer @@ -256,13 +256,13 @@ let modelName = '/path/to/xxx.ms'; let syscontext = globalThis.context; syscontext.resourceManager.getRawFileContent(modelName).then((buffer) => { let modelBuffer = buffer; + mindSporeLite.loadModelFromBuffer(modelBuffer.buffer).then((result) => { + const modelInputs = result.getInputs(); + console.log(modelInputs[0].name); + }) }).catch(error => { console.error('Failed to get buffer, error code: ${error.code},message:${error.message}.'); }) -mindSporeLite.loadModelFromBuffer(modelBuffer.buffer).then((result) => { - const modelInputs = result.getInputs(); - console.log(modelInputs[0].name); -}) ``` ## mindSporeLite.loadModelFromFd @@ -402,7 +402,7 @@ predict(inputs: MSTensor[], callback: Callback<MSTensor[]>): void import resourceManager from '@ohos.resourceManager' let inputName = 'input_data.bin'; let syscontext = globalThis.context; -syscontext.resourceManager.getRawFileContent(inputName).then((buffer) => { +syscontext.resourceManager.getRawFileContent(inputName).then(async (buffer) => { let inputBuffer = buffer; let model_file = '/path/to/xxx.ms'; let mindSporeLiteModel = await mindSporeLite.loadModelFromFile(model_file); @@ -442,7 +442,7 @@ predict(inputs: MSTensor[]): Promise<MSTensor[]> import resourceManager from '@ohos.resourceManager' let inputName = 'input_data.bin'; let syscontext = globalThis.context; -syscontext.resourceManager.getRawFileContent(inputName).then((buffer) => { +syscontext.resourceManager.getRawFileContent(inputName).then(async (buffer) => { let inputBuffer = buffer; let model_file = '/path/to/xxx.ms'; let mindSporeLiteModel = await mindSporeLite.loadModelFromFile(model_file); @@ -543,7 +543,7 @@ getData(): ArrayBuffer import resourceManager from '@ohos.resourceManager' let inputName = 'input_data.bin'; let syscontext = globalThis.context; -syscontext.resourceManager.getRawFileContent(inputName).then((buffer) => { +syscontext.resourceManager.getRawFileContent(inputName).then(async (buffer) => { let inputBuffer = buffer; let model_file = '/path/to/xxx.ms'; let mindSporeLiteModel = await mindSporeLite.loadModelFromFile(model_file); @@ -578,13 +578,13 @@ setData(inputArray: ArrayBuffer): void import resourceManager from '@ohos.resourceManager' let inputName = 'input_data.bin'; let syscontext = globalThis.context; -syscontext.resourceManager.getRawFileContent(inputName).then((buffer) => { +syscontext.resourceManager.getRawFileContent(inputName).then(async (buffer) => { inputBuffer = buffer; + let model_file = '/path/to/xxx.ms'; + let mindSporeLiteModel = await mindSporeLite.loadModelFromFile(model_file); + const modelInputs = mindSporeLiteModel.getInputs(); + modelInputs[0].setData(inputBuffer.buffer); }) -let model_file = '/path/to/xxx.ms'; -let mindSporeLiteModel = await mindSporeLite.loadModelFromFile(model_file); -const modelInputs = mindSporeLiteModel.getInputs(); -modelInputs[0].setData(inputBuffer.buffer); ``` ## DataType