diff --git a/ernie/file_utils.py b/ernie/file_utils.py index 03e2784f78c9e0c94e51b430daf9688d730233ff..89ed138c82ac7bf2355c50709621f6561592bbda 100644 --- a/ernie/file_utils.py +++ b/ernie/file_utils.py @@ -50,7 +50,6 @@ def _fetch_from_remote(url, cached_dir_model.mkdir() tmpfile = cached_dir_model / 'tmp' with tmpfile.open('wb') as f: - #url = 'https://ernie.bj.bcebos.com/ERNIE_stable.tgz' r = requests.get(url, stream=True) total_len = int(r.headers.get('content-length')) for chunk in tqdm( diff --git a/ernie/modeling_ernie.py b/ernie/modeling_ernie.py index 65e76b5765124346382f9e0c8e772ed4db472f1f..e0d5a9f91f4be5979600bcfad9932179ba13bbd9 100644 --- a/ernie/modeling_ernie.py +++ b/ernie/modeling_ernie.py @@ -272,7 +272,7 @@ class PretrainedModel(object): pretrain_dir = Path(pretrain_dir_or_url) if not pretrain_dir.exists(): - raise ValueError('pretrain dir not found: %s' % pretrain_dir) + raise ValueError('pretrain dir not found: %s, optional: %s' % (pretrain_dir, cls.resource_map.keys())) state_dict_path = pretrain_dir / 'saved_weights.pdparams' config_path = pretrain_dir / 'ernie_config.json' diff --git a/ernie/tokenizing_ernie.py b/ernie/tokenizing_ernie.py index 7b866d84c281f5421890bfa2ba21dc5643817a1a..a9984ddd38ac8abd38e75864bf69135539488a0f 100644 --- a/ernie/tokenizing_ernie.py +++ b/ernie/tokenizing_ernie.py @@ -107,7 +107,7 @@ class ErnieTokenizer(object): (pretrain_dir_or_url, repr(cls.resource_map))) pretrain_dir = Path(pretrain_dir_or_url) if not pretrain_dir.exists(): - raise ValueError('pretrain dir not found: %s' % pretrain_dir) + raise ValueError('pretrain dir not found: %s, optional: %s' % (pretrain_dir, cls.resource_map.keys())) vocab_path = pretrain_dir / 'vocab.txt' if not vocab_path.exists(): raise ValueError('no vocab file in pretrain dir: %s' %