config.json
1.7 KB · 91 lines · json Raw
1 {
2 "_name_or_path": "facebook/hubert-large-ls960-ft",
3 "activation_dropout": 0.1,
4 "apply_spec_augment": true,
5 "architectures": [
6 "HubertForSequenceClassification"
7 ],
8 "attention_dropout": 0.1,
9 "bos_token_id": 1,
10 "classifier_proj_size": 256,
11 "conv_bias": true,
12 "conv_dim": [
13 512,
14 512,
15 512,
16 512,
17 512,
18 512,
19 512
20 ],
21 "conv_kernel": [
22 10,
23 3,
24 3,
25 3,
26 3,
27 2,
28 2
29 ],
30 "conv_stride": [
31 5,
32 2,
33 2,
34 2,
35 2,
36 2,
37 2
38 ],
39 "ctc_loss_reduction": "sum",
40 "ctc_zero_infinity": false,
41 "diversity_loss_weight": 0.1,
42 "do_stable_layer_norm": true,
43 "eos_token_id": 2,
44 "feat_extract_activation": "gelu",
45 "feat_extract_dropout": 0.0,
46 "feat_extract_norm": "layer",
47 "feat_proj_dropout": 0.1,
48 "feat_proj_layer_norm": true,
49 "final_dropout": 0.1,
50 "gradient_checkpointing": false,
51 "hidden_act": "gelu",
52 "hidden_dropout": 0.1,
53 "hidden_dropout_prob": 0.1,
54 "hidden_size": 1024,
55 "id2label": {
56 "0": "neutral",
57 "1": "angry",
58 "2": "positive",
59 "3": "sad",
60 "4": "other"
61 },
62 "initializer_range": 0.02,
63 "intermediate_size": 4096,
64 "label2id": {
65 "neutral": 0,
66 "angry": 1,
67 "positive": 2,
68 "sad": 3,
69 "other": 4
70 },
71 "layer_norm_eps": 1e-05,
72 "layerdrop": 0.1,
73 "mask_feature_length": 10,
74 "mask_feature_min_masks": 0,
75 "mask_feature_prob": 0.0,
76 "mask_time_length": 10,
77 "mask_time_min_masks": 2,
78 "mask_time_prob": 0.05,
79 "model_type": "hubert",
80 "num_attention_heads": 16,
81 "num_conv_pos_embedding_groups": 16,
82 "num_conv_pos_embeddings": 128,
83 "num_feat_extract_layers": 7,
84 "num_hidden_layers": 24,
85 "pad_token_id": 0,
86 "torch_dtype": "float32",
87 "transformers_version": "4.30.2",
88 "use_weighted_layer_sum": false,
89 "vocab_size": 32
90 }
91