@@ -141,14 +141,16 @@ export function getInferenceSnippet(
141141 id : string ,
142142 pipeline_tag : PipelineType ,
143143 language : InferenceSnippetLanguage ,
144+ config ?: JsonObject ,
145+ tags ?: string [ ] ,
144146) : string | undefined {
145147 const modelData = {
146148 id,
147149 pipeline_tag,
148150 mask_token : "[MASK]" ,
149151 library_name : "" ,
150- config : { } ,
151- tags : [ ] ,
152+ config : config ?? { } ,
153+ tags : tags ?? [ ] ,
152154 } ;
153155 // @ts -ignore
154156 if ( HAS_SNIPPET_FN [ language ] ( modelData ) ) {
@@ -498,24 +500,14 @@ function fetchChatCompletion() {
498500 ) ;
499501
500502 const mainModel = DATA . models [ task . name ] [ 0 ] ;
501- const mainModelData = {
502- // @ts -ignore
503- id : mainModel . id ,
504- pipeline_tag : task . pipelineTag ,
505- mask_token : "" ,
506- library_name : "" ,
507- // @ts -ignore
508- tags : [ "conversational" ] ,
509- // @ts -ignore
510- config : mainModel . config ,
511- } ;
503+
512504 const taskSnippets = {
513505 // @ts -ignore
514- curl : GET_SNIPPET_FN [ "curl" ] ( mainModelData , "hf_***" ) ,
506+ curl : getInferenceSnippet ( mainModel . id , task . pipelineTag , "curl" , mainModel . config , [ "conversational" ] ) ,
515507 // @ts -ignore
516- python : GET_SNIPPET_FN [ "python" ] ( mainModelData , "hf_***" ) ,
508+ python : getInferenceSnippet ( mainModel . id , task . pipelineTag , "python" , mainModel . config , [ "conversational" ] ) ,
517509 // @ts -ignore
518- javascript : GET_SNIPPET_FN [ "js" ] ( mainModelData , "hf_***" ) ,
510+ javascript : getInferenceSnippet ( mainModel . id , task . pipelineTag , "js" , mainModel . config , [ "conversational" ] ) ,
519511 } ;
520512 DATA . snippets [ task . name ] = SNIPPETS_TEMPLATE ( {
521513 taskSnippets,
0 commit comments