File tree Expand file tree Collapse file tree 3 files changed +14
-0
lines changed
optimum/habana/transformers Expand file tree Collapse file tree 3 files changed +14
-0
lines changed Original file line number Diff line number Diff line change 315315 gaudi_XLMRoberta_Sdpa_SelfAttention_forward ,
316316)
317317from .models .deepseek_v2 .modeling_deepseek_v2 import DeepseekV2ForCausalLM as GaudiDeepseekV2ForCausalLM
318+ from .pipelines import GaudiImageToTextPipeline
318319
319320
320321def adapt_transformers_to_gaudi ():
@@ -394,6 +395,9 @@ def adapt_transformers_to_gaudi():
394395 transformers .generation .MaxTimeCriteria .__call__ = gaudi_MaxTimeCriteria_call
395396 transformers .generation .EosTokenCriteria .__call__ = gaudi_EosTokenCriteria_call
396397 transformers .generation .StoppingCriteriaList .__call__ = gaudi_StoppingCriteriaList_call
398+ transformers .pipelines .image_to_text .ImageToTextPipeline ._default_generation_config = (
399+ GaudiImageToTextPipeline ._default_generation_config
400+ )
397401
398402 # Optimization for BLOOM generation on Gaudi
399403 transformers .models .bloom .modeling_bloom .BloomAttention .forward = gaudi_bloom_attention_forward
Original file line number Diff line number Diff line change 1+ from .image_to_text import GaudiImageToTextPipeline
Original file line number Diff line number Diff line change 1+ from transformers .pipelines .image_to_text import ImageToTextPipeline
2+
3+ from ..generation import GaudiGenerationConfig
4+
5+
6+ class GaudiImageToTextPipeline (ImageToTextPipeline ):
7+ _default_generation_config = GaudiGenerationConfig (
8+ max_new_tokens = 256 ,
9+ )
You can’t perform that action at this time.
0 commit comments