From c7f74e410f0851ce643d915e1cf79361ace4b837 Mon Sep 17 00:00:00 2001 From: AmirHussein96 Date: Fri, 5 Apr 2024 12:46:41 -0400 Subject: [PATCH] remove pretrained_ctc.py --- egs/librispeech/ASR/zipformer/export.py | 20 +++++++++++-------- egs/seame/ASR/zipformer_hat/pretrained_ctc.py | 1 - 2 files changed, 12 insertions(+), 9 deletions(-) delete mode 120000 egs/seame/ASR/zipformer_hat/pretrained_ctc.py diff --git a/egs/librispeech/ASR/zipformer/export.py b/egs/librispeech/ASR/zipformer/export.py index 2b8d1aaf36..0b3f07c4ba 100755 --- a/egs/librispeech/ASR/zipformer/export.py +++ b/egs/librispeech/ASR/zipformer/export.py @@ -29,13 +29,17 @@ (1) Export to torchscript model using torch.jit.script() -- For non-streaming model: - -./zipformer/export.py \ - --exp-dir ./zipformer/exp \ - --tokens data/lang_bpe_500/tokens.txt \ - --epoch 30 \ - --avg 9 \ +- For non-streaming model: + +./zipformer_hat_seame/export.py \ + --exp-dir ./zipformer_hat/exp \ + --tokens data_seame/lang_bpe_4000/tokens.txt \ + --epoch 20 \ + --avg 5 \ + --num-encoder-layers 2,2,2,2,2,2 \ + --feedforward-dim 512,768,1024,1024,1024,768 \ + --encoder-dim 192,256,256,256,256,256 \ + --encoder-unmasked-dim 192,192,192,192,192,192 \ --jit 1 It will generate a file `jit_script.pt` in the given `exp_dir`. You can later @@ -234,7 +238,7 @@ def get_parser(): parser.add_argument( "--tokens", type=str, - default="data/lang_bpe_500/tokens.txt", + default="data_libri/lang_bpe_500/tokens.txt", help="Path to the tokens.txt", ) diff --git a/egs/seame/ASR/zipformer_hat/pretrained_ctc.py b/egs/seame/ASR/zipformer_hat/pretrained_ctc.py deleted file mode 120000 index fb9bdf1fa2..0000000000 --- a/egs/seame/ASR/zipformer_hat/pretrained_ctc.py +++ /dev/null @@ -1 +0,0 @@ -../zipformer/pretrained_ctc.py \ No newline at end of file