@@ -56,14 +56,6 @@ type Status struct {
5656 Error error `json:"error"`
5757}
5858
59- // normalizeHuggingFaceModelName converts Hugging Face model names to lowercase
60- func normalizeHuggingFaceModelName (model string ) string {
61- if strings .HasPrefix (model , "hf.co/" ) {
62- return strings .ToLower (model )
63- }
64- return model
65- }
66-
6759func (c * Client ) Status () Status {
6860 // TODO: Query "/".
6961 resp , err := c .doRequest (http .MethodGet , inference .ModelsPrefix , nil )
@@ -106,8 +98,6 @@ func (c *Client) Status() Status {
10698}
10799
108100func (c * Client ) Pull (model string , printer standalone.StatusPrinter ) (string , bool , error ) {
109- model = normalizeHuggingFaceModelName (model )
110-
111101 // Check if this is a Hugging Face model and if HF_TOKEN is set
112102 var hfToken string
113103 if strings .HasPrefix (strings .ToLower (model ), "hf.co/" ) {
@@ -233,8 +223,6 @@ func (c *Client) withRetries(
233223}
234224
235225func (c * Client ) Push (model string , printer standalone.StatusPrinter ) (string , bool , error ) {
236- model = normalizeHuggingFaceModelName (model )
237-
238226 return c .withRetries ("push" , 3 , printer , func (attempt int ) (string , bool , error , bool ) {
239227 pushPath := inference .ModelsPrefix + "/" + model + "/push"
240228 resp , err := c .doRequest (
@@ -303,7 +291,6 @@ func (c *Client) ListOpenAI() (dmrm.OpenAIModelList, error) {
303291}
304292
305293func (c * Client ) Inspect (model string , remote bool ) (dmrm.Model , error ) {
306- model = normalizeHuggingFaceModelName (model )
307294 rawResponse , err := c .listRawWithQuery (fmt .Sprintf ("%s/%s" , inference .ModelsPrefix , model ), model , remote )
308295 if err != nil {
309296 return dmrm.Model {}, err
@@ -317,7 +304,6 @@ func (c *Client) Inspect(model string, remote bool) (dmrm.Model, error) {
317304}
318305
319306func (c * Client ) InspectOpenAI (model string ) (dmrm.OpenAIModel , error ) {
320- model = normalizeHuggingFaceModelName (model )
321307 modelsRoute := inference .InferencePrefix + "/v1/models"
322308 rawResponse , err := c .listRaw (fmt .Sprintf ("%s/%s" , modelsRoute , model ), model )
323309 if err != nil {
@@ -366,8 +352,6 @@ func (c *Client) Chat(model, prompt string, imageURLs []string, outputFunc func(
366352
367353// ChatWithContext performs a chat request with context support for cancellation and streams the response content with selective markdown rendering.
368354func (c * Client ) ChatWithContext (ctx context.Context , model , prompt string , imageURLs []string , outputFunc func (string ), shouldUseMarkdown bool ) error {
369- model = normalizeHuggingFaceModelName (model )
370-
371355 // Build the message content - either simple string or multimodal array
372356 var messageContent interface {}
373357 if len (imageURLs ) > 0 {
@@ -536,7 +520,6 @@ func (c *Client) ChatWithContext(ctx context.Context, model, prompt string, imag
536520func (c * Client ) Remove (modelArgs []string , force bool ) (string , error ) {
537521 modelRemoved := ""
538522 for _ , model := range modelArgs {
539- model = normalizeHuggingFaceModelName (model )
540523 // Construct the URL with query parameters
541524 removePath := fmt .Sprintf ("%s/%s?force=%s" ,
542525 inference .ModelsPrefix ,
@@ -826,11 +809,6 @@ func (c *Client) handleQueryError(err error, path string) error {
826809}
827810
828811func (c * Client ) Tag (source , targetRepo , targetTag string ) error {
829- source = normalizeHuggingFaceModelName (source )
830- // For tag operations, let the daemon handle name resolution to support
831- // partial name matching like "smollm2" -> "ai/smollm2:latest"
832- // Don't do client-side ID expansion which can cause issues with tagging
833-
834812 // Construct the URL with query parameters using the normalized source
835813 tagPath := fmt .Sprintf ("%s/%s/tag?repo=%s&tag=%s" ,
836814 inference .ModelsPrefix ,
0 commit comments