From ccd9fa90719784b7594f2a59007c4ecd8a96347c Mon Sep 17 00:00:00 2001 From: am9zZWY <46693545+am9zZWY@users.noreply.github.com> Date: Wed, 17 Jul 2024 15:20:09 +0200 Subject: [PATCH] Add main for tokenizer --- engine/tokenizer.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/engine/tokenizer.py b/engine/tokenizer.py index 925996d..7fc5f19 100644 --- a/engine/tokenizer.py +++ b/engine/tokenizer.py @@ -257,7 +257,9 @@ async def process(self, data, link): "I'm 6'2\" tall and I weigh 180 lbs. I'm 25 years old.", ] -for sentence in test_sentences: - print(f"Original: {sentence}") - print(f"Tokenized: {process_text(sentence)}") - print() +if __name__ == "__main__": + + for sentence in test_sentences: + print(f"Original: {sentence}") + print(f"Tokenized: {process_text(sentence)}") + print()