{ "_name_or_path": "output_bert", "architectures": [ "BertForTokenClassification" ], "attention_probs_dropout_prob": 0.1, "classifier_dropout": null, "directionality": "bidi", "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 768, "id2label": { "0": "UPPER_PERIOD", "1": "LOWER_PERIOD", "2": "UPPER_TOTAL_PERIOD", "3": "UPPER_COMMA", "4": "LOWER_COMMA", "5": "UPPER_TOTAL_COMMA", "6": "UPPER_QUESTION", "7": "LOWER_QUESTION", "8": "UPPER_TOTAL_QUESTION", "9": "UPPER_TIRE", "10": "LOWER_TIRE", "11": "UPPER_TOTAL_TIRE", "12": "UPPER_VOSKL", "13": "LOWER_VOSKL", "14": "UPPER_TOTAL_VOSKL", "15": "UPPER_DVOETOCHIE", "16": "LOWER_DVOETOCHIE", "17": "UPPER_TOTAL_DVOETOCHIE", "18": "UPPER_PERIODCOMMA", "19": "LOWER_PERIODCOMMA", "20": "UPPER_TOTAL_PERIODCOMMA", "21": "UPPER_DEFIS", "22": "LOWER_DEFIS", "23": "UPPER_TOTAL_DEFIS", "24": "UPPER_QUESTIONVOSKL", "25": "LOWER_QUESTIONVOSKL", "26": "UPPER_TOTAL_QUESTIONVOSKL", "27": "UPPER_MNOGOTOCHIE", "28": "LOWER_MNOGOTOCHIE", "29": "UPPER_TOTAL_MNOGOTOCHIE", "30": "UPPER_O", "31": "LOWER_O", "32": "UPPER_TOTAL_O" }, "initializer_range": 0.02, "intermediate_size": 3072, "label2id": { "LOWER_COMMA": 4, "LOWER_DEFIS": 22, "LOWER_DVOETOCHIE": 16, "LOWER_MNOGOTOCHIE": 28, "LOWER_O": 31, "LOWER_PERIOD": 1, "LOWER_PERIODCOMMA": 19, "LOWER_QUESTION": 7, "LOWER_QUESTIONVOSKL": 25, "LOWER_TIRE": 10, "LOWER_VOSKL": 13, "UPPER_COMMA": 3, "UPPER_DEFIS": 21, "UPPER_DVOETOCHIE": 15, "UPPER_MNOGOTOCHIE": 27, "UPPER_O": 30, "UPPER_PERIOD": 0, "UPPER_PERIODCOMMA": 18, "UPPER_QUESTION": 6, "UPPER_QUESTIONVOSKL": 24, "UPPER_TIRE": 9, "UPPER_TOTAL_COMMA": 5, "UPPER_TOTAL_DEFIS": 23, "UPPER_TOTAL_DVOETOCHIE": 17, "UPPER_TOTAL_MNOGOTOCHIE": 29, "UPPER_TOTAL_O": 32, "UPPER_TOTAL_PERIOD": 2, "UPPER_TOTAL_PERIODCOMMA": 20, "UPPER_TOTAL_QUESTION": 8, "UPPER_TOTAL_QUESTIONVOSKL": 26, "UPPER_TOTAL_TIRE": 11, "UPPER_TOTAL_VOSKL": 14, "UPPER_VOSKL": 12 }, "layer_norm_eps": 1e-12, "max_position_embeddings": 512, "model_type": "bert", "num_attention_heads": 12, "num_hidden_layers": 12, "pad_token_id": 0, "pooler_fc_size": 768, "pooler_num_attention_heads": 12, "pooler_num_fc_layers": 3, "pooler_size_per_head": 128, "pooler_type": "first_token_transform", "position_embedding_type": "absolute", "torch_dtype": "float32", "transformers_version": "4.28.1", "type_vocab_size": 2, "use_cache": true, "vocab_size": 120138 }