exec now uses global scope instead of local scope

pull/13/head
File_xor 2023-09-03 22:53:44 +09:00
parent 9a626acf6d
commit 2127e8ba99
2 changed files with 12 additions and 12 deletions

View File

@ -52,17 +52,17 @@ If "DirectiveOrder" is absent, it will be treated as order is 0.
Local objects for eval are: Local objects for eval are:
i: torch.Tensor : input conditioning i: torch.Tensor : input conditioning
o: torch.Tensor : output conditioning o: torch.Tensor : output conditioning
c: dict : dict for carrying over g: dict : dict for carrying over
p: modules.processing.StableDiffusionProcessing : [See source code of Stable diffusion Web UI.](https://github.com/AUTOMATIC1111/stable-diffusion-webui/blob/baf6946e06249c5af9851c60171692c44ef633e0/modules/processing.py#L105) p: modules.processing.StableDiffusionProcessing : [See source code of Stable diffusion Web UI.](https://github.com/AUTOMATIC1111/stable-diffusion-webui/blob/baf6946e06249c5af9851c60171692c44ef633e0/modules/processing.py#L105)
t: int : 0th dimension (token-wise) of index of input conditioning t: int : 0th dimension (token-wise) of index of input conditioning
d: int : 1st dimension (dimension-wise) of index of input conditioning d: int : 1st dimension (dimension-wise) of index of input conditioning
torch module and all objects in math module torch module and all objects in math module
##### exec ##### exec
"exec" does component-wise python's exec. "exec" does component-wise python's exec.
Local objects for exec are: Global objects for exec are:
i: torch.Tensor : input conditioning i: torch.Tensor : input conditioning
o: torch.Tensor : output conditioning o: torch.Tensor : output conditioning
c: dict : dict for carrying over g: dict : dict for carrying over
p: modules.processing.StableDiffusionProcessing : [See source code of Stable diffusion Web UI.](https://github.com/AUTOMATIC1111/stable-diffusion-webui/blob/baf6946e06249c5af9851c60171692c44ef633e0/modules/processing.py#L105) p: modules.processing.StableDiffusionProcessing : [See source code of Stable diffusion Web UI.](https://github.com/AUTOMATIC1111/stable-diffusion-webui/blob/baf6946e06249c5af9851c60171692c44ef633e0/modules/processing.py#L105)
**NOTE: If you want to change seed, change both p.seed: int and p.seeds: list[int] .** **NOTE: If you want to change seed, change both p.seed: int and p.seeds: list[int] .**
torch module and all objects in math module torch module and all objects in math module

View File

@ -4,7 +4,7 @@ from collections import namedtuple
from enum import IntEnum from enum import IntEnum
import gradio import gradio
import torch import torch as torch
import lark import lark
import open_clip import open_clip
@ -314,8 +314,8 @@ class Clip_IO(scripts.Script):
pass pass
elif dir.name == "exec": elif dir.name == "exec":
try: try:
local = {"i": i, "o": o, "g": Clip_IO.global_carry, "c": c, "p": p, "sd_model": shared.sd_model, "torch": torch.__dict__} | math.__dict__ globals = {"i": i, "o": o, "g": Clip_IO.global_carry, "c": c, "p": p, "sd_model": shared.sd_model, "torch": torch} | math
exec(dir.inner, None, local) exec(dir.inner, globals, None)
except Exception as e: except Exception as e:
o = i o = i
raise e raise e
@ -447,8 +447,8 @@ class Clip_IO(scripts.Script):
pass pass
pass pass
with open(dir.inner) as program: with open(dir.inner) as program:
local = {"i": i, "o": o, "g": Clip_IO.global_carry, "c": c, "p": p, "sd_model": shared.sd_model, "torch": torch.__dict__} | math.__dict__ globals = {"i": i, "o": o, "g": Clip_IO.global_carry, "c": c, "p": p, "sd_model": shared.sd_model, "torch": torch} | math
exec(program, None, local) exec(program, globals, None)
pass pass
pass pass
except Exception as e: except Exception as e:
@ -775,8 +775,8 @@ class Clip_IO(scripts.Script):
p.get_conds_with_caching = Clip_IO.get_my_get_conds_with_caching(p) p.get_conds_with_caching = Clip_IO.get_my_get_conds_with_caching(p)
pass pass
try: try:
local = {"g": Clip_IO.global_carry, "p": p, "sd_model": shared.sd_model, "torch": torch.__dict__} | math.__dict__ globals = {"g": Clip_IO.global_carry, "p": p, "sd_model": shared.sd_model, "torch": torch} | math
exec(args[4], None, local) exec(args[4], globals, None)
except Exception as e: except Exception as e:
raise e raise e
pass pass
@ -787,8 +787,8 @@ class Clip_IO(scripts.Script):
def postprocess_batch(self, p: processing.StableDiffusionProcessing, *args, **kwargs): def postprocess_batch(self, p: processing.StableDiffusionProcessing, *args, **kwargs):
if Clip_IO.enabled: if Clip_IO.enabled:
try: try:
local = {"g": Clip_IO.global_carry, "p": p, "sd_model": shared.sd_model, "torch": torch.__dict__} | math.__dict__ globals = {"g": Clip_IO.global_carry, "p": p, "sd_model": shared.sd_model, "torch": torch} | math
exec(args[3], None, local) exec(args[3], globals, None)
except Exception as e: except Exception as e:
raise e raise e
pass pass