lysandre HF staff commited on
Commit
3c84925
·
verified ·
1 Parent(s): fafb413

Update with commit 4397dfcb7107508ab1ff1a8f644f248b84a9e912

Browse files

See: https://github.com/huggingface/transformers/commit/4397dfcb7107508ab1ff1a8f644f248b84a9e912

Files changed (2) hide show
  1. frameworks.json +1 -0
  2. pipeline_tags.json +3 -0
frameworks.json CHANGED
@@ -231,6 +231,7 @@
231
  {"model_type":"sew-d","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoProcessor"}
232
  {"model_type":"siglip","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoProcessor"}
233
  {"model_type":"siglip_vision_model","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
 
234
  {"model_type":"speech-encoder-decoder","pytorch":true,"tensorflow":false,"flax":true,"processor":"AutoTokenizer"}
235
  {"model_type":"speech_to_text","pytorch":true,"tensorflow":true,"flax":false,"processor":"AutoProcessor"}
236
  {"model_type":"speecht5","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoProcessor"}
 
231
  {"model_type":"sew-d","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoProcessor"}
232
  {"model_type":"siglip","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoProcessor"}
233
  {"model_type":"siglip_vision_model","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
234
+ {"model_type":"smolvlm","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
235
  {"model_type":"speech-encoder-decoder","pytorch":true,"tensorflow":false,"flax":true,"processor":"AutoTokenizer"}
236
  {"model_type":"speech_to_text","pytorch":true,"tensorflow":true,"flax":false,"processor":"AutoProcessor"}
237
  {"model_type":"speecht5","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoProcessor"}
pipeline_tags.json CHANGED
@@ -832,6 +832,9 @@
832
  {"model_class":"SiglipForImageClassification","pipeline_tag":"image-classification","auto_class":"AutoModelForImageClassification"}
833
  {"model_class":"SiglipModel","pipeline_tag":"zero-shot-image-classification","auto_class":"AutoModelForZeroShotImageClassification"}
834
  {"model_class":"SiglipVisionModel","pipeline_tag":"image-feature-extraction","auto_class":"AutoModel"}
 
 
 
835
  {"model_class":"Speech2Text2ForCausalLM","pipeline_tag":"text-generation","auto_class":"AutoModelForCausalLM"}
836
  {"model_class":"Speech2TextForConditionalGeneration","pipeline_tag":"automatic-speech-recognition","auto_class":"AutoModelForSpeechSeq2Seq"}
837
  {"model_class":"Speech2TextModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
 
832
  {"model_class":"SiglipForImageClassification","pipeline_tag":"image-classification","auto_class":"AutoModelForImageClassification"}
833
  {"model_class":"SiglipModel","pipeline_tag":"zero-shot-image-classification","auto_class":"AutoModelForZeroShotImageClassification"}
834
  {"model_class":"SiglipVisionModel","pipeline_tag":"image-feature-extraction","auto_class":"AutoModel"}
835
+ {"model_class":"SmolVLMForConditionalGeneration","pipeline_tag":"image-text-to-text","auto_class":"AutoModelForImageTextToText"}
836
+ {"model_class":"SmolVLMModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
837
+ {"model_class":"SmolVLMVisionTransformer","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
838
  {"model_class":"Speech2Text2ForCausalLM","pipeline_tag":"text-generation","auto_class":"AutoModelForCausalLM"}
839
  {"model_class":"Speech2TextForConditionalGeneration","pipeline_tag":"automatic-speech-recognition","auto_class":"AutoModelForSpeechSeq2Seq"}
840
  {"model_class":"Speech2TextModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}