lint fixes

pull/2586/head
Vladimir Mandic 2023-12-04 10:08:11 -05:00
parent 77f06befa4
commit 2933ed6b4a
5 changed files with 17 additions and 10 deletions

View File

@ -71,18 +71,25 @@ def profile(profiler, msg: str):
# p.print_callers(10)
profiler = None
lines = stream.getvalue().split('\n')
lines = [l for l in lines if '<frozen' not in l and '{built-in' not in l and '/logging' not in l and 'Ordered by' not in l and 'List reduced' not in l and '_lsprof' not in l and '/profiler' not in l and 'rich' not in l and l.strip() != '']
lines = [x for x in lines if '<frozen' not in x
and '{built-in' not in x
and '/logging' not in x
and 'Ordered by' not in x
and 'List reduced' not in x
and '_lsprof' not in x
and '/profiler' not in x
and 'rich' not in x
and x.strip() != ''
]
txt = '\n'.join(lines[:min(5, len(lines))])
log.debug(f'Profile {msg}: {txt}')
def profile_torch(profiler, msg: str):
profiler.stop()
from rich import print # pylint: disable=redefined-builtin
# lines = profiler.key_averages().table(sort_by="self_cuda_time_total", row_limit=6)
lines = profiler.key_averages().table(sort_by="self_cpu_time_total", row_limit=12)
lines = lines.split('\n')
lines = [l for l in lines if '/profiler' not in l and '---' not in l]
lines = [x for x in lines if '/profiler' not in x and '---' not in x]
txt = '\n'.join(lines)
# print(f'Torch {msg}:', txt)
log.debug(f'Torch profile {msg}: \n{txt}')

View File

@ -53,5 +53,5 @@ try:
torch.set_num_threads(math.floor(affinity / 2))
threads = torch.get_num_threads()
errors.log.debug(f'Detected: cores={cores} affinity={affinity} set threads={threads}')
except Exception as e:
except Exception:
pass

View File

@ -37,7 +37,7 @@ def sdunet_permutation_spec() -> PermutationSpec:
}
# VAE blocks - Unused
easyblock2 = lambda name, p: { # pylint: disable=unnecessary-lambda-assignment
easyblock2 = lambda name, p: { # pylint: disable=unnecessary-lambda-assignment, unused-variable # noqa: F841
**norm(f"{name}.norm1", p),
**conv(f"{name}.conv1", p, f"P_{name}_inner"),
**norm(f"{name}.norm2", f"P_{name}_inner"),
@ -45,7 +45,7 @@ def sdunet_permutation_spec() -> PermutationSpec:
}
# This is for blocks that use a residual connection, but change the number of channels via a Conv.
shortcutblock = lambda name, p_in, p_out: { # pylint: disable=unnecessary-lambda-assignment
shortcutblock = lambda name, p_in, p_out: { # pylint: disable=unnecessary-lambda-assignment, , unused-variable # noqa: F841
**norm(f"{name}.norm1", p_in),
**conv(f"{name}.conv1", p_in, f"P_{name}_inner"),
**norm(f"{name}.norm2", f"P_{name}_inner"),

View File

@ -28,7 +28,7 @@ def soft_clamp_tensor(input_tensor, threshold=0.8, boundary=4):
return input_tensor
def center_tensor(input_tensor, channel_shift=1.0, full_shift=1.0, channels=[0, 1, 2, 3]):
def center_tensor(input_tensor, channel_shift=1.0, full_shift=1.0, channels=[0, 1, 2, 3]): # pylint: disable=dangerous-default-value # noqa: B006
if channel_shift == 0 and full_shift == 0:
return input_tensor
means = []
@ -40,7 +40,7 @@ def center_tensor(input_tensor, channel_shift=1.0, full_shift=1.0, channels=[0,
return input_tensor
def maximize_tensor(input_tensor, boundary=1.0, channels=[0, 1, 2]):
def maximize_tensor(input_tensor, boundary=1.0, channels=[0, 1, 2]): # pylint: disable=dangerous-default-value # noqa: B006
if boundary == 1.0:
return input_tensor
boundary *= 4

View File

@ -82,7 +82,7 @@ class IFNet(nn.Module):
# self.contextnet = Contextnet()
# self.unet = Unet()
def forward( self, x, timestep=0.5, scale_list=[8, 4, 2, 1], training=False, fastmode=True, ensemble=False):
def forward( self, x, timestep=0.5, scale_list=[8, 4, 2, 1], training=False, fastmode=True, ensemble=False): # pylint: disable=dangerous-default-value # noqa: B006
if training is False:
channel = x.shape[1] // 2
img0 = x[:, :channel]