|
17 | 17 | backbone_presets = { |
18 | 18 | "gemma_2b_en": { |
19 | 19 | "metadata": { |
20 | | - "description": ( |
21 | | - "18-layer Gemma model (Gemma with 2B parameters). " |
22 | | - ), |
| 20 | + "description": "2 billion parameter, 18-layer, base Gemma model.", |
23 | 21 | "params": 2506172416, |
24 | 22 | "official_name": "Gemma", |
25 | 23 | "path": "gemma", |
|
30 | 28 | "gemma_instruct_2b_en": { |
31 | 29 | "metadata": { |
32 | 30 | "description": ( |
33 | | - "18-layer Gemma model (Gemma with 2B parameters). " |
| 31 | + "2 billion parameter, 18-layer, instruction tuned Gemma model." |
34 | 32 | ), |
35 | 33 | "params": 2506172416, |
36 | 34 | "official_name": "Gemma", |
|
39 | 37 | }, |
40 | 38 | "kaggle_handle": "kaggle://keras/gemma/keras/gemma_instruct_2b_en/2", |
41 | 39 | }, |
42 | | - "gemma_7b_en": { |
| 40 | + "gemma_1.1_instruct_2b_en": { |
43 | 41 | "metadata": { |
44 | 42 | "description": ( |
45 | | - "28-layer Gemma model (Gemma with 7B parameters). " |
| 43 | + "2 billion parameter, 18-layer, instruction tuned Gemma model. " |
| 44 | + "The 1.1 update improves model quality." |
46 | 45 | ), |
| 46 | + "params": 2506172416, |
| 47 | + "official_name": "Gemma", |
| 48 | + "path": "gemma", |
| 49 | + "model_card": "https://www.kaggle.com/models/google/gemma", |
| 50 | + }, |
| 51 | + "kaggle_handle": "kaggle://keras/gemma/keras/gemma_1.1_instruct_2b_en/1", |
| 52 | + }, |
| 53 | + "gemma_7b_en": { |
| 54 | + "metadata": { |
| 55 | + "description": "7 billion parameter, 28-layer, base Gemma model.", |
47 | 56 | "params": 8537680896, |
48 | 57 | "official_name": "Gemma", |
49 | 58 | "path": "gemma", |
|
54 | 63 | "gemma_instruct_7b_en": { |
55 | 64 | "metadata": { |
56 | 65 | "description": ( |
57 | | - "28-layer Gemma model (Gemma with 7B parameters). " |
| 66 | + "7 billion parameter, 28-layer, instruction tuned Gemma model." |
58 | 67 | ), |
59 | 68 | "params": 8537680896, |
60 | 69 | "official_name": "Gemma", |
|
63 | 72 | }, |
64 | 73 | "kaggle_handle": "kaggle://keras/gemma/keras/gemma_instruct_7b_en/2", |
65 | 74 | }, |
| 75 | + "gemma_1.1_instruct_7b_en": { |
| 76 | + "metadata": { |
| 77 | + "description": ( |
| 78 | + "7 billion parameter, 28-layer, instruction tuned Gemma model. " |
| 79 | + "The 1.1 update improves model quality." |
| 80 | + ), |
| 81 | + "params": 8537680896, |
| 82 | + "official_name": "Gemma", |
| 83 | + "path": "gemma", |
| 84 | + "model_card": "https://www.kaggle.com/models/google/gemma", |
| 85 | + }, |
| 86 | + "kaggle_handle": "kaggle://keras/gemma/keras/gemma_1.1_instruct_7b_en/1", |
| 87 | + }, |
66 | 88 | } |
0 commit comments