Skip to content

Commit

Permalink
Add DarkLoRALoader
Browse files Browse the repository at this point in the history
  • Loading branch information
darkpixel committed Jan 25, 2024
1 parent 86ff991 commit a2f1b2d
Show file tree
Hide file tree
Showing 4 changed files with 115 additions and 0 deletions.
2 changes: 2 additions & 0 deletions LICENSE
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
With the exception of the 'gplv3' folder, all code is licensed under the following license:

BSD 3-Clause License

Copyright (c) 2024, Aaron C. de Bruyn
Expand Down
5 changes: 5 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,11 @@ DarkCombiner
============
DarkCombiner accepts up to 7 text inputs and combines them with an optional delimiter (\n by default) with a string output. This allows you to easily combine multiple DarkPrompt nodes to generate complex prompts.

DarkLoraTagLoader
=================
DarkLoraTagLoader is a modified version of LoraTagLoader from https://github.com/badjeff/comfyui_lora_tag_loader that also outputs a LORA_STACK of detected LoRAs for use in things like the Efficient Loader for doing XY Plots.
LoraTagLoader is licensed under the freedom-hating GPL v3 license which is why the code for DarkLoraTagLoader is under the gplv3 folder.

Screenshot
==========
![DarkPrompt](assets/workflow.png "DarkPrompt")
Expand Down
3 changes: 3 additions & 0 deletions __init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import logging
import random
import re
from .gpl3 import DarkLoraTagLoader

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -209,11 +210,13 @@ def dark_prompt(
NODE_CLASS_MAPPINGS = {
"DarkCombine": DarkCombine,
"DarkPrompt": DarkPrompt,
"DarkLoRALoader": DarkLoraTagLoader,
}

NODE_DISPLAY_NAME_MAPPINGS = {
"DarkCombine": "Dark Combiner",
"DarkPrompt": "Dark Prompt",
"DarkLoRALoader": "Dark LoRA Loader",
}

__all__ = [NODE_CLASS_MAPPINGS, NODE_DISPLAY_NAME_MAPPINGS]
105 changes: 105 additions & 0 deletions gpl3/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,105 @@
from pathlib import Path
import comfy.sd
import comfy.utils
import logging
import os
import random
import re
import server
import sys
import folder_paths

# DarkLoraTagLoader is a modified version of LoraTagLoader from
# https://github.com/badjeff/comfyui_lora_tag_loader that also outputs a
# LORA_STACK.
# LoraTagLoader is licensed under the GPL v3 license which is incompatible with
# freedom.


sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(__file__)), "comfy"))


class DarkLoraTagLoader:
@classmethod
def INPUT_TYPES(s):
return {
"required": {
"model": ("MODEL",),
"clip": ("CLIP",),
"text": ("STRING", {"multiline": True}),
}
}

RETURN_TYPES = ("MODEL", "CLIP", "STRING", "LORA_STACK")
RETURN_NAMES = ("MODEL", "CLIP", "STRING", "LORA_STACK")
FUNCTION = "load_lora"

CATEGORY = "loaders"

def __init__(self):
self.loaded_lora = None
self.tag_pattern = "\<[0-9a-zA-Z\:\_\-\.\s\/\(\)]+\>"

def load_lora(self, model, clip, text):
# print(f"\nLoraTagLoader input text: { text }")

founds = re.findall(self.tag_pattern, text)
# print(f"\nfoound lora tags: { founds }")

if len(founds) < 1:
return (model, clip, text)

model_lora = model
clip_lora = clip
lora_stack = list()

lora_files = folder_paths.get_filename_list("loras")
for f in founds:
tag = f[1:-1]
pak = tag.split(":")
(type, name, wModel) = pak[:3]
wClip = wModel
if len(pak) > 3:
wClip = pak[3]
if type != "lora":
continue
lora_name = None
for lora_file in lora_files:
if Path(lora_file).name.startswith(name) or lora_file.startswith(name):
lora_name = lora_file
break
if lora_name == None:
print(
f"bypassed lora tag: { (type, name, wModel, wClip) } >> { lora_name }"
)
continue
# print(f"detected lora tag: { (type, name, wModel, wClip) } >> { lora_name }")

lora_path = folder_paths.get_full_path("loras", lora_name)
lora = None
if self.loaded_lora is not None:
if self.loaded_lora[0] == lora_path:
lora = self.loaded_lora[1]
else:
temp = self.loaded_lora
self.loaded_lora = None
del temp

if lora is None:
lora = comfy.utils.load_torch_file(lora_path, safe_load=True)
self.loaded_lora = (lora_path, lora)

strength_model = float(wModel)
strength_clip = float(wClip)
model_lora, clip_lora = comfy.sd.load_lora_for_models(
model_lora, clip_lora, lora, strength_model, strength_clip
)
lora_stack.extend([(lora_name, strength_model, strength_clip)])

plain_prompt = re.sub(self.tag_pattern, "", text)
return (
model_lora,
clip_lora,
plain_prompt,
lora_stack,
)

0 comments on commit a2f1b2d

Please sign in to comment.