Skip to content

Commit d73ea26

Browse files
huijuanzhmengker33
authored andcommitted
fix for GenerationConfig issue for pipeline init changes by transformer update
1 parent 86b5416 commit d73ea26

File tree

3 files changed

+14
-0
lines changed

3 files changed

+14
-0
lines changed

optimum/habana/transformers/modeling_utils.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -315,6 +315,7 @@
315315
gaudi_XLMRoberta_Sdpa_SelfAttention_forward,
316316
)
317317
from .models.deepseek_v2.modeling_deepseek_v2 import DeepseekV2ForCausalLM as GaudiDeepseekV2ForCausalLM
318+
from .pipelines import GaudiImageToTextPipeline
318319

319320

320321
def adapt_transformers_to_gaudi():
@@ -394,6 +395,9 @@ def adapt_transformers_to_gaudi():
394395
transformers.generation.MaxTimeCriteria.__call__ = gaudi_MaxTimeCriteria_call
395396
transformers.generation.EosTokenCriteria.__call__ = gaudi_EosTokenCriteria_call
396397
transformers.generation.StoppingCriteriaList.__call__ = gaudi_StoppingCriteriaList_call
398+
transformers.pipelines.image_to_text.ImageToTextPipeline._default_generation_config = (
399+
GaudiImageToTextPipeline._default_generation_config
400+
)
397401

398402
# Optimization for BLOOM generation on Gaudi
399403
transformers.models.bloom.modeling_bloom.BloomAttention.forward = gaudi_bloom_attention_forward
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
from .image_to_text import GaudiImageToTextPipeline
Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
from transformers.pipelines.image_to_text import ImageToTextPipeline
2+
3+
from ..generation import GaudiGenerationConfig
4+
5+
6+
class GaudiImageToTextPipeline(ImageToTextPipeline):
7+
_default_generation_config = GaudiGenerationConfig(
8+
max_new_tokens=256,
9+
)

0 commit comments

Comments
 (0)