diff --git a/modules/infotext_utils.py b/modules/infotext_utils.py index a6de9db99ac40afbb5876930d184dced0fc0938c..db1866449ad703a386148b294e065fdde56ee0d5 100644 --- a/modules/infotext_utils.py +++ b/modules/infotext_utils.py @@ -8,7 +8,7 @@ import sys import gradio as gr from modules.paths import data_path -from modules import shared, ui_tempdir, script_callbacks, processing, infotext_versions, images +from modules import shared, ui_tempdir, script_callbacks, processing, infotext_versions, images, prompt_parser from PIL import Image sys.modules['modules.generation_parameters_copypaste'] = sys.modules[__name__] # alias for old name @@ -356,7 +356,10 @@ Steps: 20, Sampler: Euler a, CFG scale: 7, Seed: 965400086, Size: 512x512, Model if "Cache FP16 weight for LoRA" not in res and res["FP8 weight"] != "Disable": res["Cache FP16 weight for LoRA"] = False - if "Emphasis" not in res: + prompt_attention = prompt_parser.parse_prompt_attention(prompt) + prompt_attention += prompt_parser.parse_prompt_attention(negative_prompt) + prompt_uses_emphasis = len(prompt_attention) != len([p for p in prompt_attention if p[1] == 1.0 or p[0] == 'BREAK']) + if "Emphasis" not in res and prompt_uses_emphasis: res["Emphasis"] = "Original" if "Refiner switch by sampling steps" not in res: