File tree Expand file tree Collapse file tree 4 files changed +19
-6
lines changed Expand file tree Collapse file tree 4 files changed +19
-6
lines changed Original file line number Diff line number Diff line change @@ -5,7 +5,7 @@ controls:
5
5
/huggingface/token :
6
6
type : TextControl
7
7
secret : true
8
- /ui/appSettings/model_instruction :
8
+ /ui/appSettings/hf_model_instruction :
9
9
type : TextControl
10
10
/ui/appSettings/page_title :
11
11
type : TextControl
Original file line number Diff line number Diff line change 28
28
args :
29
29
- --model
30
30
- {{ .Values.huggingface.model }}
31
+ {{- if .Values.huggingface.chatTemplate }}
32
+ - --chat-template
33
+ - {{ quote .Values.huggingface.chatTemplate }}
34
+ {{- end -}}
31
35
{{- if .Values.api.extraArgs -}}
32
36
{{- .Values.api.extraArgs | toYaml | nindent 10 }}
33
37
{{- end -}}
Original file line number Diff line number Diff line change 25
25
"appSettings" : {
26
26
"type" : " object" ,
27
27
"properties" : {
28
- "model_name " : {
28
+ "hf_model_name " : {
29
29
"type" : " string" ,
30
30
"title" : " Model Name" ,
31
31
"description" : " Model name supplied to the OpenAI client in frontend web app. Should match huggingface.model above." ,
32
32
"default" : " mistralai/Mistral-7B-Instruct-v0.2"
33
33
},
34
- "model_instruction " : {
34
+ "hf_model_instruction " : {
35
35
"type" : " string" ,
36
36
"title" : " Instruction" ,
37
37
"description" : " The initial model prompt (i.e. the hidden instructions) to use when generating responses." ,
75
75
}
76
76
77
77
},
78
- "required" : [" model_name " , " model_instruction " ]
78
+ "required" : [" hf_model_name " , " hf_model_instruction " ]
79
79
}
80
80
}
81
81
}
Original file line number Diff line number Diff line change @@ -6,6 +6,15 @@ huggingface:
6
6
# The name of the HuggingFace model to use
7
7
# Use a yaml anchor to avoid duplication elsewhere
8
8
model : &model-name ise-uiuc/Magicoder-S-DS-6.7B
9
+ # A Jinja formatted chat template to provide to the language model.
10
+ # See https://huggingface.co/blog/chat-templates for background info.
11
+ # If not provided, the default template specified in the HuggingFace
12
+ # model repository's tokenizer_config.json file is used. As explained
13
+ # in the above blog post, the HF template key in tokenizer_config.json
14
+ # is relatively new and not all HF models include a template in their
15
+ # repo files yet. This chart value provides a hook to manually apply the
16
+ # correct chat template for such models.
17
+ chatTemplate :
9
18
10
19
# For private/gated huggingface models (e.g. Meta's Llama models)
11
20
# you must provide your own huggingface token, for details see:
71
80
# The values to be written to settings.yml for parsing as frontend app setting
72
81
# (see example_app.py and config.py for example using pydantic-settings to configure app)
73
82
appSettings :
74
- model_name : *model-name
75
- model_instruction : " You are a helpful AI assistant. Please response appropriately."
83
+ hf_model_name : *model-name
84
+ hf_model_instruction : " You are a helpful AI assistant. Please response appropriately."
76
85
# Container image config
77
86
image :
78
87
repository : ghcr.io/stackhpc/azimuth-llm-ui-base
You can’t perform that action at this time.
0 commit comments