Skip to content

Commit

Permalink
Merge branch 'release/v1.0.0'
Browse files Browse the repository at this point in the history
  • Loading branch information
darkpixel committed Dec 5, 2024
2 parents 6157864 + 42263af commit 65154e1
Show file tree
Hide file tree
Showing 6 changed files with 277 additions and 8 deletions.
12 changes: 12 additions & 0 deletions History.md
Original file line number Diff line number Diff line change
@@ -1,4 +1,16 @@

v1.0.0 / 2024-12-05
==================

* Lots of changes from playtesting over the last few months. This will probably break workflows until you right-click the nodes and hit "fix".
* Add: DarkPopLoraFromStack which takes a LORA_STACK and pops *one* LoRA off the stack and returns the new LORA_STACK and the extracted LoRA
* Feature: Automatically strip comments (lines starting with '#') from Checkpoint names in the Checkpoint randomizer
* Fix: Remove IS_CHANGED method causing DarkLoRA nodes to constantly re-run
* Fix: LoRA loader wasn't loading LoRAs properly basically cusing them to not have any effect, especially in downstream nodes
* Fix: Don't die horribly on bad LoRA weights
* Fix: Clean up DarkLoRA loading and catch attempts to add invalid LoRAs to the stack
* Fix: Finished updating DarkLoraStackFromString

v0.1.2 / 2024-09-10
==================

Expand Down
4 changes: 4 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,10 @@ DarkPrompts

A slightly better tool for generating random prompts.

Changelog
=========
[changelog](History.md)

DarkPrompt
==========
DarkPrompt reads lines from a file and/or a text input, combines them together, optionally strips out comments and blank lines, and then selects a random line for use in generating your prompt based on a seed input.
Expand Down
10 changes: 5 additions & 5 deletions __init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,7 @@
import random
import re
from .gpl3 import DarkLoraTagLoader
from .modules import faces
from .modules import prompts
from .modules import files
from .modules import checkpoints
from .modules import convert
from .modules import faces, prompts, files, checkpoints, convert, loras

logger = logging.getLogger(__name__)

Expand All @@ -21,6 +17,8 @@
"DarkCheckpointRandomizer": checkpoints.DarkCheckpointRandomizer,
"DarkCheckpointSwitcher": checkpoints.DarkCheckpointSwitcher,
"DarkAnyToString": convert.DarkAnyToString,
"DarkLoraStackFromString": loras.DarkLoraStackFromString,
"DarkPopLoraFromStack": loras.DarkPopLoraFromStack,
}

NODE_DISPLAY_NAME_MAPPINGS = {
Expand All @@ -33,6 +31,8 @@
"DarkCheckpointRandomizer": "Dark Checkpoint Randomizer",
"DarkCheckpointSwitcher": "Dark Checkpoint Switcher",
"DarkAnyToString": "Dark Any to String",
"DarkLoraStackFromString": "Dark LoRA Stack from String",
"DarkPopLoraFromStack": "Dark LoRA Pop LoRA string from LORA_STACK",
}

__all__ = [NODE_CLASS_MAPPINGS, NODE_DISPLAY_NAME_MAPPINGS]
5 changes: 4 additions & 1 deletion modules/checkpoints.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from pathlib import Path
from .utils.darkdata import DarkData
from .prompts import strip_comments_from_lines
import folder_paths
import glob
import logging
Expand Down Expand Up @@ -100,7 +101,9 @@ def get_checkpoint(self, seed, use_for_iterations, checkpoint_names):
# doesnotexist.safetensors and everything crashed.
checkpoints = []
checkpoint = None
for cpn in checkpoint_names.splitlines():
checkpoint_lines = strip_comments_from_lines(checkpoint_names.splitlines())

for cpn in checkpoint_lines:
if cpn.strip():
if cpn.strip() in folder_paths.get_filename_list("checkpoints"):
checkpoints.append(cpn.strip())
Expand Down
248 changes: 248 additions & 0 deletions modules/loras.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,248 @@
from comfy.utils import load_torch_file
from comfy.sd import load_lora_for_models
import folder_paths
import glob
import logging
import os
import os.path
import random
import re

logger = logging.getLogger(__name__)


class DarkPopLoraFromStack(object):
"""
Accepts a LoRA stack and extracts the first LoRA it finds, removing it from the stack
"""

def __init__(self):
pass

@classmethod
def INPUT_TYPES(cls):
return {
"required": {
"LORA_STACK": (
"LORA_STACK",
{
"forceInput": True,
},
),
},
}

RETURN_TYPES = ("LORA_STACK", "STRING")
RETURN_NAMES = ("LORA_STACK", "EXTRACTED_LORA")
FUNCTION = "extract_lora_from_stack"

CATEGORY = "DarkPrompt"

def extract_lora_from_stack(self, LORA_STACK):
popped_lora = ""
if LORA_STACK:
popped_lora = LORA_STACK.pop(0)
popped_lora = popped_lora[0]

return (LORA_STACK, popped_lora)


class DarkLoraStackFromString(object):
"""
Takes in a string (prompt), scans it for LoRA tags in the format <lora:somelora:x:y> and creates a LoRA stack from the string
Optionally strips the LoRAs out of the string it empts
"""

def __init__(self):
pass

@classmethod
def INPUT_TYPES(cls):
return {
"required": {
"string_in": (
"STRING",
{
"default": "",
"forceInput": True,
},
),
},
"optional": {
"model": ("MODEL",),
"clip": ("CLIP",),
},
}

RETURN_TYPES = (
"MODEL",
"CLIP",
"STRING",
"LORA_STACK",
)
RETURN_NAMES = (
"MODEL",
"CLIP",
"string_out",
"LORA_STACK",
)
FUNCTION = "load_loras_from_string"

CATEGORY = "DarkPrompt"

def load_loras_from_string(self, string_in, model=None, clip=None):
lora_pattern = r"\<lora\:(?P<lora_name>[0-9a-zA-Z\_\-\.\s\/\(\)]+)\:(?P<model_weight>[\d\.]+):?(?P<clip_weight>[\d\.]*)\>"
lora_stack = list()
lora_folder = folder_paths.get_folder_paths("loras")[0]
lora_to_load = []

for lora in re.findall(lora_pattern, string_in):
try:
lora_to_load.append(
{
"name": (
lora[0]
if ".safetensors" in lora[0]
else "%s.safetensors" % (lora[0])
),
"path": os.path.join(
lora_folder,
(
lora[0]
if ".safetensors" in lora[0]
else "%s.safetensors" % (lora[0])
),
),
"model_weight": float(lora[1]),
"clip_weight": (
float(lora[2]) if len(lora[2]) > 0 else float(lora[1])
),
}
)
except ValueError:
logger.warning(
"This line appears to have an invalid LoRA weight: %s" % (lora)
)

this_lora_model = model
this_lora_clip = clip

for lora in lora_to_load:
# If a model and clip were passed, load the LoRA, otherwise just
# extend the set
if not os.path.isfile(lora["path"]):
print(
"SKIP LOADING LOADING LoRA THAT DOES NOT EXIST: %s" % (lora["path"])
)
continue
if model and clip:
lora_torch = load_torch_file(
lora["path"],
safe_load=True,
)

this_lora_model, this_lora_clip = load_lora_for_models(
model,
clip,
lora_torch,
float(lora["model_weight"]),
float(lora["clip_weight"]),
)

lora_stack.extend(
[
(
lora["name"],
float(lora["model_weight"]),
float(lora["clip_weight"]),
)
]
)

# Remove the LoRA tags from the string so a clean string can be passed
# to the sampler
string_in = re.sub(lora_pattern, "", string_in)

return (
this_lora_model,
this_lora_clip,
string_in,
lora_stack,
)


class DarkLoadAllTheLoras(object):
"""
Takes in a string (prompt), scans it for LoRA tags in the format <lora:somelora:x:y> and creates a LoRA stack from the string
Optionally strips the LoRAs out of the string it empts
"""

def __init__(self):
pass

@classmethod
def INPUT_TYPES(cls):
return {
"required": {
"model": ("MODEL",),
"clip": ("CLIP",),
},
}

RETURN_TYPES = (
"MODEL",
"CLIP",
)
RETURN_NAMES = (
"MODEL",
"CLIP",
)
FUNCTION = "load_all_the_loras"

CATEGORY = "DarkPrompt"

def load_all_the_loras(self, model, clip):
lora_folder = folder_paths.get_folder_paths("loras")[0]
loras_to_load = []

search_for = lora_folder + "/*.safetensors"
print("search_for: %s" % (search_for))
for lora_file in glob.glob(search_for):
logger.warning("Found lora: %s" % (lora_file))
print("Found lora: %s" % (lora_file))
if os.path.basename(lora_file) in [n["name"] for n in loras_to_load]:
logger.warning("%s is already loaded" % (lora_file))
else:
loras_to_load.append(
{
"name": os.path.basename(lora_file),
"path": lora_file,
"model_weight": 0.7,
"clip_weight": 0.0,
}
)

print(loras_to_load)

this_lora_model = model
this_lora_clip = clip

for lora in loras_to_load:
logger.warning("Loading lora: %s" % (lora))
lora_torch = load_torch_file(
lora["path"],
safe_load=True,
)

this_lora_model, this_lora_clip = load_lora_for_models(
this_lora_model,
this_lora_clip,
lora_torch,
float(lora["model_weight"]),
float(lora["clip_weight"]),
)

return (
this_lora_model,
this_lora_clip,
)
6 changes: 4 additions & 2 deletions modules/prompts.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import logging

import random
import re

Expand Down Expand Up @@ -189,7 +190,8 @@ def dark_prompt(
if strip_blank_lines:
lines = strip_blanks_from_lines(lines)

# Preseed the python random library
# Preseed the python random library with the seed we were fed initially
# to randomly choose a line
random.seed(seed)
try:
# Pick a random line from the list of available lines
Expand Down Expand Up @@ -249,7 +251,7 @@ def dark_prompt(
ret = combine_with + ret
else:
# If we have no data to return and no combine_with delimiter,
# just return our data
# just return the data we were fed initially.
ret = combine_with

return (ret,)

0 comments on commit 65154e1

Please sign in to comment.