Skip to content

Commit

Permalink
fix: the tokenzier in preprocessors can be None
Browse files Browse the repository at this point in the history
  • Loading branch information
aria-hacker committed Oct 6, 2024
1 parent 54b3fb3 commit 7f68992
Showing 1 changed file with 3 additions and 2 deletions.
5 changes: 3 additions & 2 deletions aria/model/processing_aria.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ def __init__(
else:
self.tokenizer = tokenizer

if self.tokenizer.pad_token is None:
if self.tokenizer is not None and self.tokenizer.pad_token is None:
self.tokenizer.pad_token = self.tokenizer.unk_token

self.image_token = image_token
Expand Down Expand Up @@ -241,7 +241,8 @@ def from_pretrained(
**cls._extract_kwargs(AutoTokenizer.from_pretrained, **kwargs),
)
chat_template = tokenizer.chat_template
except Exception:
except Exception as e:
logger.warning(f"Failed to load tokenizer from {tokenizer_path}: {e}")
tokenizer = None
chat_template = None
return cls(
Expand Down

0 comments on commit 7f68992

Please sign in to comment.