mirror of https://github.com/Filexor/Clip_IO.git
exec now uses global scope instead of local scope
parent
9a626acf6d
commit
2127e8ba99
|
|
@ -52,17 +52,17 @@ If "DirectiveOrder" is absent, it will be treated as order is 0.
|
|||
Local objects for eval are:
|
||||
i: torch.Tensor : input conditioning
|
||||
o: torch.Tensor : output conditioning
|
||||
c: dict : dict for carrying over
|
||||
g: dict : dict for carrying over
|
||||
p: modules.processing.StableDiffusionProcessing : [See source code of Stable diffusion Web UI.](https://github.com/AUTOMATIC1111/stable-diffusion-webui/blob/baf6946e06249c5af9851c60171692c44ef633e0/modules/processing.py#L105)
|
||||
t: int : 0th dimension (token-wise) of index of input conditioning
|
||||
d: int : 1st dimension (dimension-wise) of index of input conditioning
|
||||
torch module and all objects in math module
|
||||
##### exec
|
||||
"exec" does component-wise python's exec.
|
||||
Local objects for exec are:
|
||||
Global objects for exec are:
|
||||
i: torch.Tensor : input conditioning
|
||||
o: torch.Tensor : output conditioning
|
||||
c: dict : dict for carrying over
|
||||
g: dict : dict for carrying over
|
||||
p: modules.processing.StableDiffusionProcessing : [See source code of Stable diffusion Web UI.](https://github.com/AUTOMATIC1111/stable-diffusion-webui/blob/baf6946e06249c5af9851c60171692c44ef633e0/modules/processing.py#L105)
|
||||
**NOTE: If you want to change seed, change both p.seed: int and p.seeds: list[int] .**
|
||||
torch module and all objects in math module
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ from collections import namedtuple
|
|||
from enum import IntEnum
|
||||
|
||||
import gradio
|
||||
import torch
|
||||
import torch as torch
|
||||
import lark
|
||||
import open_clip
|
||||
|
||||
|
|
@ -314,8 +314,8 @@ class Clip_IO(scripts.Script):
|
|||
pass
|
||||
elif dir.name == "exec":
|
||||
try:
|
||||
local = {"i": i, "o": o, "g": Clip_IO.global_carry, "c": c, "p": p, "sd_model": shared.sd_model, "torch": torch.__dict__} | math.__dict__
|
||||
exec(dir.inner, None, local)
|
||||
globals = {"i": i, "o": o, "g": Clip_IO.global_carry, "c": c, "p": p, "sd_model": shared.sd_model, "torch": torch} | math
|
||||
exec(dir.inner, globals, None)
|
||||
except Exception as e:
|
||||
o = i
|
||||
raise e
|
||||
|
|
@ -447,8 +447,8 @@ class Clip_IO(scripts.Script):
|
|||
pass
|
||||
pass
|
||||
with open(dir.inner) as program:
|
||||
local = {"i": i, "o": o, "g": Clip_IO.global_carry, "c": c, "p": p, "sd_model": shared.sd_model, "torch": torch.__dict__} | math.__dict__
|
||||
exec(program, None, local)
|
||||
globals = {"i": i, "o": o, "g": Clip_IO.global_carry, "c": c, "p": p, "sd_model": shared.sd_model, "torch": torch} | math
|
||||
exec(program, globals, None)
|
||||
pass
|
||||
pass
|
||||
except Exception as e:
|
||||
|
|
@ -775,8 +775,8 @@ class Clip_IO(scripts.Script):
|
|||
p.get_conds_with_caching = Clip_IO.get_my_get_conds_with_caching(p)
|
||||
pass
|
||||
try:
|
||||
local = {"g": Clip_IO.global_carry, "p": p, "sd_model": shared.sd_model, "torch": torch.__dict__} | math.__dict__
|
||||
exec(args[4], None, local)
|
||||
globals = {"g": Clip_IO.global_carry, "p": p, "sd_model": shared.sd_model, "torch": torch} | math
|
||||
exec(args[4], globals, None)
|
||||
except Exception as e:
|
||||
raise e
|
||||
pass
|
||||
|
|
@ -787,8 +787,8 @@ class Clip_IO(scripts.Script):
|
|||
def postprocess_batch(self, p: processing.StableDiffusionProcessing, *args, **kwargs):
|
||||
if Clip_IO.enabled:
|
||||
try:
|
||||
local = {"g": Clip_IO.global_carry, "p": p, "sd_model": shared.sd_model, "torch": torch.__dict__} | math.__dict__
|
||||
exec(args[3], None, local)
|
||||
globals = {"g": Clip_IO.global_carry, "p": p, "sd_model": shared.sd_model, "torch": torch} | math
|
||||
exec(args[3], globals, None)
|
||||
except Exception as e:
|
||||
raise e
|
||||
pass
|
||||
|
|
|
|||
Loading…
Reference in New Issue