From d783b24ba7d8a03dabfa2139fcbf40c216e0ea3d Mon Sep 17 00:00:00 2001
From: zhifu gao <zhifu.gzf@alibaba-inc.com>
Date: 星期四, 16 三月 2023 19:34:52 +0800
Subject: [PATCH] Merge pull request #199 from alibaba-damo-academy/dev_xw

---
 funasr/export/README.md |   60 ++++++++++++++++++++++++++++++++++++++++++++++--------------
 1 files changed, 46 insertions(+), 14 deletions(-)

diff --git a/funasr/export/README.md b/funasr/export/README.md
index 39a7265..33ab22e 100644
--- a/funasr/export/README.md
+++ b/funasr/export/README.md
@@ -1,21 +1,53 @@
 
-environment: ubuntu20.04-py37-torch1.11.0-tf1.15.5-1.2.0
+## Environments
+    torch >= 1.11.0
+    modelscope >= 1.2.0
 
-Export onnx files from modelscope
-```python
-from funasr.export.export_model import ASRModelExportParaformer
+## Install modelscope and funasr
 
-output_dir = "../export"
-export_model = ASRModelExportParaformer(cache_dir=output_dir, onnx=True)
-export_model.export_from_modelscope('damo/speech_paraformer-large_asr_nat-zh-cn-16k-common-vocab8404-pytorch')
+The installation is the same as [funasr](../../README.md)
+
+## Export model
+   `Tips`: torch>=1.11.0
+
+   ```shell
+   python -m funasr.export.export_model \
+       --model-name [model_name] \
+       --export-dir [export_dir] \
+       --type [onnx, torch] \
+       --quantize \
+       --fallback-num [fallback_num]
+   ```
+   `model-name`: the model is to export. It could be the models from modelscope, or local finetuned model(named: model.pb).
+
+   `export-dir`: the dir where the onnx is export.
+
+   `type`: `onnx` or `torch`, export onnx format model or torchscript format model.
+
+   `quantize`: `true`, export quantized model at the same time; `false`, export fp32 model only.
+
+   `fallback-num`: specify the number of fallback layers to perform automatic mixed precision quantization.
+
+
+## For example
+### Export onnx format model
+Export model from modelscope
+```shell
+python -m funasr.export.export_model --model-name damo/speech_paraformer-large_asr_nat-zh-cn-16k-common-vocab8404-pytorch --export-dir ./export --type onnx
+```
+Export model from local path, the model'name must be `model.pb`.
+```shell
+python -m funasr.export.export_model --model-name /mnt/workspace/damo/speech_paraformer-large_asr_nat-zh-cn-16k-common-vocab8404-pytorch --export-dir ./export --type onnx
 ```
 
+### Export torchscripts format model
+Export model from modelscope
+```shell
+python -m funasr.export.export_model --model-name damo/speech_paraformer-large_asr_nat-zh-cn-16k-common-vocab8404-pytorch --export-dir ./export --type torch
+```
 
-Export onnx files from local path
-```python
-from funasr.export.export_model import ASRModelExportParaformer
+Export model from local path, the model'name must be `model.pb`.
+```shell
+python -m funasr.export.export_model --model-name /mnt/workspace/damo/speech_paraformer-large_asr_nat-zh-cn-16k-common-vocab8404-pytorch --export-dir ./export --type torch
+```
 
-output_dir = "../export"
-export_model = ASRModelExportParaformer(cache_dir=output_dir, onnx=True)
-export_model.export_from_local('/root/cache/export/damo/speech_paraformer-large_asr_nat-zh-cn-16k-common-vocab8404-pytorch')
-```
\ No newline at end of file

--
Gitblit v1.9.1