diff --git a/0-eval_pretrain.py b/0-eval_pretrain.py index 32f1dda..edb3ad8 100644 --- a/0-eval_pretrain.py +++ b/0-eval_pretrain.py @@ -16,8 +16,7 @@ def count_parameters(model): def init_model(lm_config): - tokenizer = AutoTokenizer.from_pretrained('./model/minimind_tokenizer', - trust_remote_code=True, use_fast=False) + tokenizer = AutoTokenizer.from_pretrained('./model/minimind_tokenizer') model_from = 1 # 1从权重,2用transformers if model_from == 1: