aboutsummaryrefslogtreecommitdiffstatshomepage
path: root/models/trpg-final/config.json
diff options
context:
space:
mode:
authorHsiangNianian <i@jyunko.cn>2025-12-30 21:21:59 +0800
committerHsiangNianian <i@jyunko.cn>2025-12-30 21:22:07 +0800
commitd35712d0f200b7862450b173a1bee95d1bd85dc8 (patch)
treed29ec96468d1f630262386e5b2c06a13622fdaee /models/trpg-final/config.json
parent2a7b27169c6b208175aad3d46c97a97cb59cd751 (diff)
downloadbase-model-d35712d0f200b7862450b173a1bee95d1bd85dc8.tar.gz
base-model-d35712d0f200b7862450b173a1bee95d1bd85dc8.zip
feat: Update Python version requirement and add onnxscript dependency
- Changed the required Python version from >=3.12 to >=3.10 in pyproject.toml. - Reformatted the dependencies section for better readability. - Added "onnxscript>=0.5.7" to the train optional dependencies.
Diffstat (limited to 'models/trpg-final/config.json')
-rw-r--r--models/trpg-final/config.json50
1 files changed, 0 insertions, 50 deletions
diff --git a/models/trpg-final/config.json b/models/trpg-final/config.json
deleted file mode 100644
index fcaf8e2..0000000
--- a/models/trpg-final/config.json
+++ /dev/null
@@ -1,50 +0,0 @@
-{
- "architectures": [
- "BertForTokenClassification"
- ],
- "attention_probs_dropout_prob": 0.1,
- "classifier_dropout": null,
- "dtype": "float32",
- "hidden_act": "gelu",
- "hidden_dropout_prob": 0.1,
- "hidden_size": 256,
- "id2label": {
- "0": "O",
- "1": "B-action",
- "2": "B-comment",
- "3": "B-dialogue",
- "4": "B-speaker",
- "5": "B-timestamp",
- "6": "I-action",
- "7": "I-comment",
- "8": "I-dialogue",
- "9": "I-speaker",
- "10": "I-timestamp"
- },
- "initializer_range": 0.02,
- "intermediate_size": 1024,
- "label2id": {
- "B-action": 1,
- "B-comment": 2,
- "B-dialogue": 3,
- "B-speaker": 4,
- "B-timestamp": 5,
- "I-action": 6,
- "I-comment": 7,
- "I-dialogue": 8,
- "I-speaker": 9,
- "I-timestamp": 10,
- "O": 0
- },
- "layer_norm_eps": 1e-12,
- "max_position_embeddings": 512,
- "model_type": "bert",
- "num_attention_heads": 8,
- "num_hidden_layers": 6,
- "pad_token_id": 0,
- "position_embedding_type": "absolute",
- "transformers_version": "4.57.3",
- "type_vocab_size": 2,
- "use_cache": true,
- "vocab_size": 21128
-}