Xenova HF Staff commited on
Commit
51ac147
·
verified ·
1 Parent(s): 2701e33

Upload Gemma3ForCausalLM (#2)

Browse files

- Upload Gemma3ForCausalLM (b60d7b68ef4b039ed1a4a2125ad014029641d887)

Files changed (3) hide show
  1. config.json +2 -14
  2. generation_config.json +3 -3
  3. model.safetensors +1 -1
config.json CHANGED
@@ -57,17 +57,5 @@
57
  "transformers_version": "5.0.0.dev0",
58
  "use_bidirectional_attention": false,
59
  "use_cache": false,
60
- "vocab_size": 262144,
61
- "transformers.js_config": {
62
- "use_external_data_format": {
63
- "model.onnx": 1,
64
- "model_fp16.onnx": 1,
65
- "model_q4.onnx": 1,
66
- "model_q4f16.onnx": 1
67
- },
68
- "kv_cache_dtype": {
69
- "q4f16": "float16",
70
- "fp16": "float16"
71
- }
72
- }
73
- }
 
57
  "transformers_version": "5.0.0.dev0",
58
  "use_bidirectional_attention": false,
59
  "use_cache": false,
60
+ "vocab_size": 262144
61
+ }
 
 
 
 
 
 
 
 
 
 
 
 
generation_config.json CHANGED
@@ -1,13 +1,13 @@
1
  {
2
- "cache_implementation": "hybrid",
3
  "do_sample": true,
4
  "eos_token_id": [
5
  1,
6
  50,
7
  106
8
  ],
 
9
  "top_k": 64,
10
  "top_p": 0.95,
11
- "transformers_version": "5.0.0.dev0",
12
- "trust_remote_code": false
13
  }
 
1
  {
2
+ "bos_token_id": 2,
3
  "do_sample": true,
4
  "eos_token_id": [
5
  1,
6
  50,
7
  106
8
  ],
9
+ "pad_token_id": 0,
10
  "top_k": 64,
11
  "top_p": 0.95,
12
+ "transformers_version": "5.0.0.dev0"
 
13
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:631cc2af9d15e22f12cad50146a5e0bb617c650ee572f979ac288c985e210702
3
  size 1072419256
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aa66b56d16a17ac0cb48ac07c5968414a5f324423c46a45c98f9e83831716802
3
  size 1072419256