HyperTile add swap size and depth options

pull/2835/head
Disty0 2024-02-10 22:12:47 +03:00
parent 0ac2dfbcaa
commit ef909cd003
3 changed files with 6 additions and 3 deletions

View File

@ -122,9 +122,9 @@ def torch_bmm_32_bit(input, mat2, *, out=None):
mat2[start_idx:end_idx], mat2[start_idx:end_idx],
out=out out=out
) )
torch.xpu.synchronize(input.device)
else: else:
return original_torch_bmm(input, mat2, out=out) return original_torch_bmm(input, mat2, out=out)
torch.xpu.synchronize(input.device)
return hidden_states return hidden_states
original_scaled_dot_product_attention = torch.nn.functional.scaled_dot_product_attention original_scaled_dot_product_attention = torch.nn.functional.scaled_dot_product_attention

View File

@ -193,7 +193,7 @@ def context_hypertile_vae(p):
tile_size = shared.opts.hypertile_vae_tile if shared.opts.hypertile_vae_tile > 0 else max(128, 64 * min(p.width // 128, p.height // 128)) tile_size = shared.opts.hypertile_vae_tile if shared.opts.hypertile_vae_tile > 0 else max(128, 64 * min(p.width // 128, p.height // 128))
shared.log.info(f'Applying hypertile: vae={tile_size}') shared.log.info(f'Applying hypertile: vae={tile_size}')
p.extra_generation_params['Hypertile VAE'] = tile_size p.extra_generation_params['Hypertile VAE'] = tile_size
return split_attention(vae, tile_size=tile_size, min_tile_size=128, swap_size=1) return split_attention(vae, tile_size=tile_size, min_tile_size=128, swap_size=shared.opts.hypertile_vae_swap_size)
@ -219,7 +219,7 @@ def context_hypertile_unet(p):
tile_size = shared.opts.hypertile_unet_tile if shared.opts.hypertile_unet_tile > 0 else max(128, 64 * min(p.width // 128, p.height // 128)) tile_size = shared.opts.hypertile_unet_tile if shared.opts.hypertile_unet_tile > 0 else max(128, 64 * min(p.width // 128, p.height // 128))
shared.log.info(f'Applying hypertile: unet={tile_size}') shared.log.info(f'Applying hypertile: unet={tile_size}')
p.extra_generation_params['Hypertile UNet'] = tile_size p.extra_generation_params['Hypertile UNet'] = tile_size
return split_attention(unet, tile_size=tile_size, min_tile_size=128, swap_size=1) return split_attention(unet, tile_size=tile_size, min_tile_size=128, swap_size=shared.opts.hypertile_unet_swap_size, depth=shared.opts.hypertile_unet_depth)
def hypertile_set(p, hr=False): def hypertile_set(p, hr=False):

View File

@ -420,8 +420,11 @@ options_templates.update(options_section(('advanced', "Inference Settings"), {
"hypertile_sep": OptionInfo("<h2>HyperTile</h2>", "", gr.HTML), "hypertile_sep": OptionInfo("<h2>HyperTile</h2>", "", gr.HTML),
"hypertile_unet_enabled": OptionInfo(False, "HyperTile UNet"), "hypertile_unet_enabled": OptionInfo(False, "HyperTile UNet"),
"hypertile_unet_tile": OptionInfo(0, "HyperTile UNet tile size", gr.Slider, {"minimum": 0, "maximum": 1024, "step": 8}), "hypertile_unet_tile": OptionInfo(0, "HyperTile UNet tile size", gr.Slider, {"minimum": 0, "maximum": 1024, "step": 8}),
"hypertile_unet_swap_size": OptionInfo(1, "HyperTile UNet swap size", gr.Slider, {"minimum": 1, "maximum": 10, "step": 1}),
"hypertile_unet_depth": OptionInfo(0, "HyperTile UNet depth", gr.Slider, {"minimum": 0, "maximum": 4, "step": 1}),
"hypertile_vae_enabled": OptionInfo(False, "HyperTile VAE", gr.Checkbox), "hypertile_vae_enabled": OptionInfo(False, "HyperTile VAE", gr.Checkbox),
"hypertile_vae_tile": OptionInfo(128, "HyperTile VAE tile size", gr.Slider, {"minimum": 0, "maximum": 1024, "step": 8}), "hypertile_vae_tile": OptionInfo(128, "HyperTile VAE tile size", gr.Slider, {"minimum": 0, "maximum": 1024, "step": 8}),
"hypertile_vae_swap_size": OptionInfo(1, "HyperTile VAE swap size", gr.Slider, {"minimum": 1, "maximum": 10, "step": 1}),
"inference_other_sep": OptionInfo("<h2>Other</h2>", "", gr.HTML), "inference_other_sep": OptionInfo("<h2>Other</h2>", "", gr.HTML),
"batch_frame_mode": OptionInfo(False, "Parallel process images in batch"), "batch_frame_mode": OptionInfo(False, "Parallel process images in batch"),