diff options
| author | 2025-12-30 21:21:59 +0800 | |
|---|---|---|
| committer | 2025-12-30 21:22:07 +0800 | |
| commit | d35712d0f200b7862450b173a1bee95d1bd85dc8 (patch) | |
| tree | d29ec96468d1f630262386e5b2c06a13622fdaee /models/trpg-final/tokenizer_config.json | |
| parent | 2a7b27169c6b208175aad3d46c97a97cb59cd751 (diff) | |
| download | base-model-d35712d0f200b7862450b173a1bee95d1bd85dc8.tar.gz base-model-d35712d0f200b7862450b173a1bee95d1bd85dc8.zip | |
feat: Update Python version requirement and add onnxscript dependency
- Changed the required Python version from >=3.12 to >=3.10 in pyproject.toml.
- Reformatted the dependencies section for better readability.
- Added "onnxscript>=0.5.7" to the train optional dependencies.
Diffstat (limited to 'models/trpg-final/tokenizer_config.json')
| -rw-r--r-- | models/trpg-final/tokenizer_config.json | 58 |
1 files changed, 0 insertions, 58 deletions
diff --git a/models/trpg-final/tokenizer_config.json b/models/trpg-final/tokenizer_config.json deleted file mode 100644 index f0a3d97..0000000 --- a/models/trpg-final/tokenizer_config.json +++ /dev/null @@ -1,58 +0,0 @@ -{ - "added_tokens_decoder": { - "0": { - "content": "[PAD]", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "100": { - "content": "[UNK]", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "101": { - "content": "[CLS]", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "102": { - "content": "[SEP]", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "103": { - "content": "[MASK]", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - } - }, - "clean_up_tokenization_spaces": true, - "cls_token": "[CLS]", - "do_basic_tokenize": true, - "do_lower_case": true, - "extra_special_tokens": {}, - "mask_token": "[MASK]", - "model_max_length": 128, - "never_split": null, - "pad_token": "[PAD]", - "sep_token": "[SEP]", - "strip_accents": null, - "tokenize_chinese_chars": true, - "tokenizer_class": "BertTokenizer", - "unk_token": "[UNK]" -} |
