Skip to content

Commit

Permalink
add missed trust_remote_code for finetune code
Browse files Browse the repository at this point in the history
  • Loading branch information
hanhainebula committed Nov 20, 2024
1 parent 3fdf27c commit 5e9b6b8
Show file tree
Hide file tree
Showing 5 changed files with 5 additions and 0 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,7 @@ def load_tokenizer_and_model(self) -> Tuple[PreTrainedTokenizer, AbsEmbedderMode
num_labels=num_labels,
cache_dir=self.model_args.cache_dir,
token=self.model_args.token,
trust_remote_code=self.model_args.trust_remote_code,
)
logger.info('Config: %s', config)

Expand Down
1 change: 1 addition & 0 deletions FlagEmbedding/finetune/embedder/decoder_only/icl/runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,7 @@ def load_tokenizer_and_model(self) -> Tuple[PreTrainedTokenizer, AbsEmbedderMode
num_labels=num_labels,
cache_dir=self.model_args.cache_dir,
token=self.model_args.token,
trust_remote_code=self.model_args.trust_remote_code,
)
logger.info('Config: %s', config)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@ def load_tokenizer_and_model(self) -> Tuple[PreTrainedTokenizer, AbsEmbedderMode
num_labels=num_labels,
cache_dir=self.model_args.cache_dir,
token=self.model_args.token,
trust_remote_code=self.model_args.trust_remote_code,
)
logger.info('Config: %s', config)

Expand Down
1 change: 1 addition & 0 deletions FlagEmbedding/finetune/embedder/encoder_only/m3/runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,6 +116,7 @@ def load_tokenizer_and_model(self) -> Tuple[PreTrainedTokenizer, AbsEmbedderMode
num_labels=num_labels,
cache_dir=self.model_args.cache_dir,
token=self.model_args.token,
trust_remote_code=self.model_args.trust_remote_code,
)
logger.info('Config: %s', config)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ def load_tokenizer_and_model(self) -> Tuple[PreTrainedTokenizer, AbsRerankerMode
num_labels=num_labels,
cache_dir=self.model_args.cache_dir,
token=self.model_args.token,
trust_remote_code=self.model_args.trust_remote_code,
)
logger.info('Config: %s', config)

Expand Down

0 comments on commit 5e9b6b8

Please sign in to comment.