diff --git a/README.md b/README.md index a0f973e0..8a72ee2a 100644 --- a/README.md +++ b/README.md @@ -51,7 +51,7 @@ from fastNLP.transformers.torch import BertTokenizer # 该文件还存在,将自动读取缓存文件,而不再次运行预处理代码。 @cache_results('caches/cache.pkl') def prepare_data(): - # 会自动下载 SST2 数据,并且可以通过文档看到返回的 dataset 应该是包含"raw_words"和"target"两个field的 + # 会自动下载数据,并且可以通过文档看到返回的 dataset 应该是包含"raw_words"和"target"两个field的 data_bundle = ChnSentiCorpLoader().load() # 使用tokenizer对数据进行tokenize tokenizer = BertTokenizer.from_pretrained('hfl/chinese-bert-wwm') @@ -130,7 +130,7 @@ evaluator.run() from fastNLP.io import ChnSentiCorpLoader from functools import partial -# 会自动下载 SST2 数据,并且可以通过文档看到返回的 dataset 应该是包含"raw_words"和"target"两个field的 +# 会自动下载数据,并且可以通过文档看到返回的 dataset 应该是包含"raw_words"和"target"两个field的 data_bundle = ChnSentiCorpLoader().load() # 使用tokenizer对数据进行tokenize diff --git a/fastNLP/core/callbacks/topk_saver.py b/fastNLP/core/callbacks/topk_saver.py index 1a4de8d5..21a8961f 100644 --- a/fastNLP/core/callbacks/topk_saver.py +++ b/fastNLP/core/callbacks/topk_saver.py @@ -50,6 +50,8 @@ class Saver: self.save_fn_name = 'save_checkpoint' if save_object == 'trainer' else 'save_model' self.timestamp_path = self.folder.joinpath(os.environ[FASTNLP_LAUNCH_TIME]) + # 打印这次运行时 checkpoint 所保存在的文件夹,因为这个文件夹是根据时间实时生成的,因此需要打印出来防止用户混淆; + logger.info(f"The checkpoint will be saved in this folder for this time: {self.timestamp_path}.") def save(self, trainer, folder_name): """ diff --git a/fastNLP/core/drivers/torch_driver/torch_driver.py b/fastNLP/core/drivers/torch_driver/torch_driver.py index 3307e3c9..db011403 100644 --- a/fastNLP/core/drivers/torch_driver/torch_driver.py +++ b/fastNLP/core/drivers/torch_driver/torch_driver.py @@ -199,7 +199,8 @@ class TorchDriver(Driver): f"`only_state_dict=False`") if not isinstance(res, dict): res = res.state_dict() - model.load_state_dict(res) + _strict = kwargs.get("strict", True) + model.load_state_dict(res, _strict) @rank_zero_call def save_checkpoint(self, folder: Path, states: Dict, dataloader, only_state_dict: bool = True, should_save_model: bool = True, **kwargs):