revert model path to system default

TRANSFORMERS_CACHE
HUGGINGFACE_HUB_CACHE
feature/visualize-tokens
toshiaki1729 2024-05-08 04:47:51 +09:00
parent 4621d76286
commit f726ec3f04
3 changed files with 5 additions and 13 deletions

View File

@ -12,12 +12,8 @@ class BLIP2Captioning:
def load(self):
if self.model is None or self.processor is None:
self.processor = Blip2Processor.from_pretrained(
self.MODEL_REPO, cache_dir=paths.model_path
)
self.model = Blip2ForConditionalGeneration.from_pretrained(
self.MODEL_REPO, cache_dir=paths.model_path
).to(devices.device)
self.processor = Blip2Processor.from_pretrained(self.MODEL_REPO)
self.model = Blip2ForConditionalGeneration.from_pretrained(self.MODEL_REPO).to(devices.device)
def unload(self):
if not shared.opts.interrogate_keep_models_in_memory:

View File

@ -14,12 +14,8 @@ class GITLargeCaptioning:
def load(self):
if self.model is None or self.processor is None:
self.processor = AutoProcessor.from_pretrained(
self.MODEL_REPO, cache_dir=paths.model_path
)
self.model = AutoModelForCausalLM.from_pretrained(
self.MODEL_REPO, cache_dir=paths.model_path
).to(shared.device)
self.processor = AutoProcessor.from_pretrained(self.MODEL_REPO)
self.model = AutoModelForCausalLM.from_pretrained(self.MODEL_REPO).to(shared.device)
lowvram.send_everything_to_cpu()
def unload(self):

View File

@ -26,7 +26,7 @@ class WaifuDiffusionTagger:
if not self.model:
path_model = huggingface_hub.hf_hub_download(
self.MODEL_REPO, self.MODEL_FILENAME, cache_dir=paths.model_path
self.MODEL_REPO, self.MODEL_FILENAME
)
if (
"all" in shared.cmd_opts.use_cpu