浏览代码

add quant tutorials

jiangjiajun 5 年之前
父节点
当前提交
b7ada18f51

+ 1 - 1
paddlex/__init__.py

@@ -14,7 +14,7 @@
 
 from __future__ import absolute_import
 
-__version__ = '1.2.2'
+__version__ = '1.2.3'
 
 import os
 if 'FLAGS_eager_delete_tensor_gb' not in os.environ:

+ 4 - 2
paddlex/cv/models/base.py

@@ -151,10 +151,11 @@ class BaseAPI:
         is_use_cache_file = True
         if cache_dir is None:
             is_use_cache_file = False
+        quant_prog = self.test_prog.clone(for_test=True)
         post_training_quantization = PaddleXPostTrainingQuantization(
             executor=self.exe,
             dataset=dataset,
-            program=self.test_prog,
+            program=quant_prog,
             inputs=self.test_inputs,
             outputs=self.test_outputs,
             batch_size=batch_size,
@@ -366,6 +367,7 @@ class BaseAPI:
     def export_inference_model(self, save_dir):
         test_input_names = [var.name for var in list(self.test_inputs.values())]
         test_outputs = list(self.test_outputs.values())
+        save_prog = self.test_prog.clone(for_test=True)
         with fluid.scope_guard(self.scope):
             fluid.io.save_inference_model(
                 dirname=save_dir,
@@ -373,7 +375,7 @@ class BaseAPI:
                 params_filename='__params__',
                 feeded_var_names=test_input_names,
                 target_vars=test_outputs,
-                main_program=self.test_prog)
+                main_program=save_prog)
         model_info = self.get_model_info()
         model_info['status'] = 'Infer'
 

+ 1 - 1
setup.py

@@ -19,7 +19,7 @@ long_description = "PaddlePaddle Entire Process Development Toolkit"
 
 setuptools.setup(
     name="paddlex",
-    version='1.2.2',
+    version='1.2.3',
     author="paddlex",
     author_email="paddlex@baidu.com",
     description=long_description,

+ 16 - 0
tutorials/slim/quant/image_classification/README.md

@@ -0,0 +1,16 @@
+# 图像分类模型量化
+
+在此目录下提供了MobileNetV2模型的量化示例,执行如下命令即可
+
+## 第一步 量化模型
+```
+python mobilenetv2_quant.py
+```
+执行代码会自动下载模型和数据集
+
+## 第二步 导出为PaddleLite模型
+
+```
+python paddlelite_export.py
+```
+执行此脚本前,需安装paddlelite,在python环境中`pip install paddlelite`即可

+ 31 - 0
tutorials/slim/quant/image_classification/mobilenetv2_quant.py

@@ -0,0 +1,31 @@
+import paddlex as pdx
+import os
+os.environ['CUDA_VISIBLE_DEVICES'] = '0'
+
+# 下载训练好的模型
+url = 'https://bj.bcebos.com/paddlex/models/mobilenetv2_vegetables.tar.gz'
+pdx.utils.download_and_decompress(url, path='.')
+
+# 下载相应的训练数据集
+url = 'https://bj.bcebos.com/paddlex/datasets/vegetables_cls.tar.gz'
+pdx.utils.download_and_decompress(url, path='.')
+
+# 加载模型
+model = pdx.load_model('mobilenetv2_vegetables')
+
+# 将正常模型导出为部署格式,用于对比
+import time
+for i in range(60):
+    print('save', i)
+    time.sleep(1)
+    model.export_inference_model('server_mobilenet')
+
+# 加载数据集用于量化
+dataset = pdx.datasets.ImageNet(
+                data_dir='vegetables_cls',
+                file_list='vegetables_cls/train_list.txt',
+                label_list='vegetables_cls/labels.txt',
+                transforms=model.test_transforms)
+
+# 开始量化
+pdx.slim.export_quant_model(model, dataset, save_dir='./quant_mobilenet', cache_dir='./tmp')

+ 18 - 0
tutorials/slim/quant/image_classification/paddlelite_export.py

@@ -0,0 +1,18 @@
+# 需先安装paddlelite
+import paddlelite.lite as lite
+
+model_filename = 'server_mobilenet/__model__'
+params_filename = 'server_mobilenet/__params__'
+export_filename = 'mobilenetv2'
+
+opt = lite.Opt()
+# 将正常模型导出为Lite模型
+opt.run_optimize("", model_filename, params_filename, 'naive_buffer', 'arm', export_filename) 
+
+
+quant_model_filename = 'quant_mobilenet/__model__'
+quant_params_filename = 'quant_mobilenet/__params__'
+quant_export_filename = 'mobilenetv2_quant'
+
+# 将量化模型导出为Lite模型
+opt.run_optimize("", quant_model_filename, quant_params_filename, 'naive_buffer', 'arm', quant_export_filename)