config.json
1.9 KB · 143 lines · json Raw
1 {
2 "_name_or_path": "openai/whisper-small",
3 "activation_dropout": 0.0,
4 "activation_function": "gelu",
5 "architectures": [
6 "WhisperForConditionalGeneration"
7 ],
8 "attention_dropout": 0.0,
9 "begin_suppress_tokens": [
10 220,
11 50257
12 ],
13 "bos_token_id": 50257,
14 "d_model": 768,
15 "decoder_attention_heads": 12,
16 "decoder_ffn_dim": 3072,
17 "decoder_layerdrop": 0.0,
18 "decoder_layers": 12,
19 "decoder_start_token_id": 50258,
20 "dropout": 0.0,
21 "encoder_attention_heads": 12,
22 "encoder_ffn_dim": 3072,
23 "encoder_layerdrop": 0.0,
24 "encoder_layers": 12,
25 "eos_token_id": 50257,
26 "forced_decoder_ids": [
27 [
28 1,
29 50259
30 ],
31 [
32 2,
33 50359
34 ],
35 [
36 3,
37 50363
38 ]
39 ],
40 "init_std": 0.02,
41 "is_encoder_decoder": true,
42 "max_length": 448,
43 "max_source_positions": 1500,
44 "max_target_positions": 448,
45 "model_type": "whisper",
46 "num_hidden_layers": 12,
47 "num_mel_bins": 80,
48 "pad_token_id": 50257,
49 "scale_embedding": false,
50 "suppress_tokens": [
51 1,
52 2,
53 7,
54 8,
55 9,
56 10,
57 14,
58 25,
59 26,
60 27,
61 28,
62 29,
63 31,
64 58,
65 59,
66 60,
67 61,
68 62,
69 63,
70 90,
71 91,
72 92,
73 93,
74 359,
75 503,
76 522,
77 542,
78 873,
79 893,
80 902,
81 918,
82 922,
83 931,
84 1350,
85 1853,
86 1982,
87 2460,
88 2627,
89 3246,
90 3253,
91 3268,
92 3536,
93 3846,
94 3961,
95 4183,
96 4667,
97 6585,
98 6647,
99 7273,
100 9061,
101 9383,
102 10428,
103 10929,
104 11938,
105 12033,
106 12331,
107 12562,
108 13793,
109 14157,
110 14635,
111 15265,
112 15618,
113 16553,
114 16604,
115 18362,
116 18956,
117 20075,
118 21675,
119 22520,
120 26130,
121 26161,
122 26435,
123 28279,
124 29464,
125 31650,
126 32302,
127 32470,
128 36865,
129 42863,
130 47425,
131 49870,
132 50254,
133 50258,
134 50360,
135 50361,
136 50362
137 ],
138 "torch_dtype": "float32",
139 "transformers_version": "4.27.0.dev0",
140 "use_cache": true,
141 "vocab_size": 51865
142 }
143