Эх сурвалжийг харах

Merge pull request #92 from SunAhong1993/syf

remove download
Jason 5 жил өмнө
parent
commit
87ddc84a7c

+ 1 - 1
paddlex/interpret/__init__.py

@@ -16,4 +16,4 @@ from __future__ import absolute_import
 from . import visualize
 
 lime = visualize.lime
-normlime = visualize.normlime
+normlime = visualize.normlime

+ 8 - 11
paddlex/interpret/core/_session_preparation.py

@@ -28,17 +28,6 @@ def gen_user_home():
     return os.path.expanduser('~')
 
 
-root_path = gen_user_home()
-root_path = osp.join(root_path, '.paddlex')
-h_pre_models = osp.join(root_path, "pre_models")
-if not osp.exists(h_pre_models):
-    if not osp.exists(root_path):
-        os.makedirs(root_path)
-    url = "https://bj.bcebos.com/paddlex/interpret/pre_models.tar.gz"
-    pdx.utils.download_and_decompress(url, path=root_path)
-h_pre_models_kmeans = osp.join(h_pre_models, "kmeans_model.pkl")
-
-
 def paddle_get_fc_weights(var_name="fc_0.w_0"):
     fc_weights = fluid.global_scope().find_var(var_name).get_tensor()
     return np.array(fc_weights)
@@ -50,6 +39,14 @@ def paddle_resize(extracted_features, outsize):
 
 
 def compute_features_for_kmeans(data_content):
+    root_path = gen_user_home()
+    root_path = osp.join(root_path, '.paddlex')
+    h_pre_models = osp.join(root_path, "pre_models")
+    if not osp.exists(h_pre_models):
+        if not osp.exists(root_path):
+            os.makedirs(root_path)
+        url = "https://bj.bcebos.com/paddlex/interpret/pre_models.tar.gz"
+        pdx.utils.download_and_decompress(url, path=root_path)
     def conv_bn_layer(input,
                       num_filters,
                       filter_size,

+ 11 - 1
paddlex/interpret/core/interpretation_algorithms.py

@@ -13,11 +13,12 @@
 #limitations under the License.
 
 import os
+import os.path as osp
 import numpy as np
 import time
 
 from . import lime_base
-from ._session_preparation import paddle_get_fc_weights, compute_features_for_kmeans, h_pre_models_kmeans
+from ._session_preparation import paddle_get_fc_weights, compute_features_for_kmeans, gen_user_home
 from .normlime_base import combine_normlime_and_lime, get_feature_for_kmeans, load_kmeans_model
 from paddlex.interpret.as_data_reader.readers import read_image
 
@@ -215,6 +216,15 @@ class LIME(object):
 class NormLIME(object):
     def __init__(self, predict_fn, label_names, num_samples=3000, batch_size=50,
                  kmeans_model_for_normlime=None, normlime_weights=None):
+        root_path = gen_user_home()
+        root_path = osp.join(root_path, '.paddlex')
+        h_pre_models = osp.join(root_path, "pre_models")
+        if not osp.exists(h_pre_models):
+            if not osp.exists(root_path):
+                os.makedirs(root_path)
+            url = "https://bj.bcebos.com/paddlex/interpret/pre_models.tar.gz"
+            pdx.utils.download_and_decompress(url, path=root_path)
+        h_pre_models_kmeans = osp.join(h_pre_models, "kmeans_model.pkl")
         if kmeans_model_for_normlime is None:
             try:
                 self.kmeans_model = load_kmeans_model(h_pre_models_kmeans)

+ 11 - 1
paddlex/interpret/core/normlime_base.py

@@ -13,13 +13,14 @@
 #limitations under the License.
 
 import os
+import os.path as osp
 import numpy as np
 import glob
 
 from paddlex.interpret.as_data_reader.readers import read_image
 import paddlex.utils.logging as logging
 from . import lime_base
-from ._session_preparation import compute_features_for_kmeans, h_pre_models_kmeans
+from ._session_preparation import compute_features_for_kmeans, gen_user_home
 
 
 def load_kmeans_model(fname):
@@ -103,6 +104,15 @@ def save_one_lime_predict_and_kmean_labels(lime_all_weights, image_pred_labels,
 
 
 def precompute_lime_weights(list_data_, predict_fn, num_samples, batch_size, save_dir):
+    root_path = gen_user_home()
+    root_path = osp.join(root_path, '.paddlex')
+    h_pre_models = osp.join(root_path, "pre_models")
+    if not osp.exists(h_pre_models):
+        if not osp.exists(root_path):
+            os.makedirs(root_path)
+        url = "https://bj.bcebos.com/paddlex/interpret/pre_models.tar.gz"
+        pdx.utils.download_and_decompress(url, path=root_path)
+    h_pre_models_kmeans = osp.join(h_pre_models, "kmeans_model.pkl")
     kmeans_model = load_kmeans_model(h_pre_models_kmeans)
 
     for data_index, each_data_ in enumerate(list_data_):