File tree Expand file tree Collapse file tree 3 files changed +14
-0
lines changed
optimum/habana/transformers Expand file tree Collapse file tree 3 files changed +14
-0
lines changed Original file line number Diff line number Diff line change 309309 gaudi_XLMRoberta_Sdpa_SelfAttention_forward ,
310310)
311311from .models .deepseek_v2 .modeling_deepseek_v2 import DeepseekV2ForCausalLM as GaudiDeepseekV2ForCausalLM
312+ from .pipelines import GaudiImageToTextPipeline
312313
313314
314315def adapt_transformers_to_gaudi ():
@@ -388,6 +389,9 @@ def adapt_transformers_to_gaudi():
388389 transformers .generation .MaxTimeCriteria .__call__ = gaudi_MaxTimeCriteria_call
389390 transformers .generation .EosTokenCriteria .__call__ = gaudi_EosTokenCriteria_call
390391 transformers .generation .StoppingCriteriaList .__call__ = gaudi_StoppingCriteriaList_call
392+ transformers .pipelines .image_to_text .ImageToTextPipeline ._default_generation_config = (
393+ GaudiImageToTextPipeline ._default_generation_config
394+ )
391395
392396 # Optimization for BLOOM generation on Gaudi
393397 transformers .models .bloom .modeling_bloom .BloomAttention .forward = gaudi_bloom_attention_forward
Original file line number Diff line number Diff line change 1+ from .image_to_text import GaudiImageToTextPipeline
Original file line number Diff line number Diff line change 1+ from transformers .pipelines .image_to_text import ImageToTextPipeline
2+
3+ from ..generation import GaudiGenerationConfig
4+
5+
6+ class GaudiImageToTextPipeline (ImageToTextPipeline ):
7+ _default_generation_config = GaudiGenerationConfig (
8+ max_new_tokens = 256 ,
9+ )
You can’t perform that action at this time.
0 commit comments