|
| 1 | +# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved. |
| 2 | +# |
| 3 | +# Licensed under the Apache License, Version 2.0 (the "License"); |
| 4 | +# you may not use this file except in compliance with the License. |
| 5 | +# You may obtain a copy of the License at |
| 6 | +# |
| 7 | +# http://www.apache.org/licenses/LICENSE-2.0 |
| 8 | +# |
| 9 | +# Unless required by applicable law or agreed to in writing, software |
| 10 | +# distributed under the License is distributed on an "AS IS" BASIS, |
| 11 | +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 12 | +# See the License for the specific language governing permissions and |
| 13 | +# limitations under the License. |
| 14 | + |
| 15 | +""" |
| 16 | +Download script, download dataset and pretrain models. |
| 17 | +""" |
| 18 | + |
| 19 | +from __future__ import absolute_import |
| 20 | +from __future__ import division |
| 21 | +from __future__ import print_function |
| 22 | + |
| 23 | +import io |
| 24 | +import os |
| 25 | +import sys |
| 26 | +import time |
| 27 | +import hashlib |
| 28 | +import tarfile |
| 29 | +import requests |
| 30 | + |
| 31 | + |
| 32 | +def usage(): |
| 33 | + desc = ("\nDownload datasets and pretrained models for EmotionDetection task.\n" |
| 34 | + "Usage:\n" |
| 35 | + " 1. python download.py dataset\n" |
| 36 | + " 2. python download.py model\n") |
| 37 | + print(desc) |
| 38 | + |
| 39 | + |
| 40 | +def md5file(fname): |
| 41 | + hash_md5 = hashlib.md5() |
| 42 | + with io.open(fname, "rb") as fin: |
| 43 | + for chunk in iter(lambda: fin.read(4096), b""): |
| 44 | + hash_md5.update(chunk) |
| 45 | + return hash_md5.hexdigest() |
| 46 | + |
| 47 | + |
| 48 | +def extract(fname, dir_path): |
| 49 | + """ |
| 50 | + Extract tar.gz file |
| 51 | + """ |
| 52 | + try: |
| 53 | + tar = tarfile.open(fname, "r:gz") |
| 54 | + file_names = tar.getnames() |
| 55 | + for file_name in file_names: |
| 56 | + tar.extract(file_name, dir_path) |
| 57 | + print(file_name) |
| 58 | + tar.close() |
| 59 | + except Exception as e: |
| 60 | + raise e |
| 61 | + |
| 62 | + |
| 63 | +def download(url, filename, md5sum): |
| 64 | + """ |
| 65 | + Download file and check md5 |
| 66 | + """ |
| 67 | + retry = 0 |
| 68 | + retry_limit = 3 |
| 69 | + chunk_size = 4096 |
| 70 | + while not (os.path.exists(filename) and md5file(filename) == md5sum): |
| 71 | + if retry < retry_limit: |
| 72 | + retry += 1 |
| 73 | + else: |
| 74 | + raise RuntimeError("Cannot download dataset ({0}) with retry {1} times.". |
| 75 | + format(url, retry_limit)) |
| 76 | + try: |
| 77 | + start = time.time() |
| 78 | + size = 0 |
| 79 | + res = requests.get(url, stream=True) |
| 80 | + filesize = int(res.headers['content-length']) |
| 81 | + if res.status_code == 200: |
| 82 | + print("[Filesize]: %0.2f MB" % (filesize / 1024 / 1024)) |
| 83 | + # save by chunk |
| 84 | + with io.open(filename, "wb") as fout: |
| 85 | + for chunk in res.iter_content(chunk_size=chunk_size): |
| 86 | + if chunk: |
| 87 | + fout.write(chunk) |
| 88 | + size += len(chunk) |
| 89 | + pr = '>' * int(size * 50 / filesize) |
| 90 | + print('\r[Process ]: %s%.2f%%' % (pr, float(size / filesize*100)), end='') |
| 91 | + end = time.time() |
| 92 | + print("\n[CostTime]: %.2f s" % (end - start)) |
| 93 | + except Exception as e: |
| 94 | + print(e) |
| 95 | + |
| 96 | + |
| 97 | +def download_dataset(dir_path): |
| 98 | + BASE_URL = "https://baidu-nlp.bj.bcebos.com/" |
| 99 | + DATASET_NAME = "emotion_detection-dataset-1.0.0.tar.gz" |
| 100 | + DATASET_MD5 = "512d256add5f9ebae2c101b74ab053e9" |
| 101 | + file_path = os.path.join(dir_path, DATASET_NAME) |
| 102 | + url = BASE_URL + DATASET_NAME |
| 103 | + |
| 104 | + if not os.path.exists(dir_path): |
| 105 | + os.makedirs(dir_path) |
| 106 | + # download dataset |
| 107 | + print("Downloading dataset: %s" % url) |
| 108 | + download(url, file_path, DATASET_MD5) |
| 109 | + # extract dataset |
| 110 | + print("Extracting dataset: %s" % file_path) |
| 111 | + extract(file_path, dir_path) |
| 112 | + os.remove(file_path) |
| 113 | + |
| 114 | + |
| 115 | +def download_model(dir_path): |
| 116 | + MODELS = {} |
| 117 | + BASE_URL = "https://baidu-nlp.bj.bcebos.com/" |
| 118 | + CNN_NAME = "emotion_detection_textcnn-1.0.0.tar.gz" |
| 119 | + CNN_MD5 = "b7ee648fcd108835c880a5f5fce0d8ab" |
| 120 | + ERNIE_NAME = "emotion_detection_ernie_finetune-1.0.0.tar.gz" |
| 121 | + ERNIE_MD5 = "dfeb68ddbbc87f466d3bb93e7d11c03a" |
| 122 | + MODELS[CNN_NAME] = CNN_MD5 |
| 123 | + MODELS[ERNIE_NAME] = ERNIE_MD5 |
| 124 | + |
| 125 | + if not os.path.exists(dir_path): |
| 126 | + os.makedirs(dir_path) |
| 127 | + |
| 128 | + for model in MODELS: |
| 129 | + url = BASE_URL + model |
| 130 | + model_path = os.path.join(dir_path, model) |
| 131 | + print("Downloading model: %s" % url) |
| 132 | + # download model |
| 133 | + download(url, model_path, MODELS[model]) |
| 134 | + # extract model.tar.gz |
| 135 | + print("Extracting model: %s" % model_path) |
| 136 | + extract(model_path, dir_path) |
| 137 | + os.remove(model_path) |
| 138 | + |
| 139 | + |
| 140 | +if __name__ == '__main__': |
| 141 | + if len(sys.argv) != 2: |
| 142 | + usage() |
| 143 | + sys.exit(1) |
| 144 | + |
| 145 | + if sys.argv[1] == "dataset": |
| 146 | + pwd = os.path.join(os.path.dirname(__file__), './') |
| 147 | + download_dataset(pwd) |
| 148 | + elif sys.argv[1] == "model": |
| 149 | + pwd = os.path.join(os.path.dirname(__file__), './pretrain_models') |
| 150 | + download_model(pwd) |
| 151 | + else: |
| 152 | + usage() |
| 153 | + |
0 commit comments