Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
26 commits
Select commit Hold shift + click to select a range
129e82c
refactor: load pipeline using pytrickle model loader, add default pas…
eliteprox Oct 28, 2025
59b723c
chore(deps): update pytrickle dependency to latest commit and improve…
eliteprox Oct 28, 2025
2d6f2d1
feat: enhance ComfyStream client prompt management and frame processing
eliteprox Oct 29, 2025
50fdac1
Merge branch 'main' into feat/warmup-overlay-refactor
eliteprox Oct 29, 2025
400c652
byoc(frame_processor): add on_stream_start callback, bump pytrickle
eliteprox Oct 8, 2025
86975ef
feat(frame_processor): add loading screen during warmup
eliteprox Oct 8, 2025
e929b96
feat(frame_processor): play loading overlay during warmup and prompt …
eliteprox Oct 29, 2025
cf22583
remove git submodules added on accidental commit
eliteprox Oct 29, 2025
a7f89e6
remove broken force passthrough logic in favor of overlay
eliteprox Oct 29, 2025
a536863
dockerfile: add entrypoint and cmd for byoc
eliteprox Oct 7, 2025
2cdea10
fix blacklist_custom_nodes parameter name, fix logging setting for co…
eliteprox Oct 8, 2025
adbd6a6
chore(deps): update pytrickle dependency to a newer commit and enhanc…
eliteprox Nov 6, 2025
d8db00a
chore(docker): update cuDNN version in Dockerfile to match PyTorch re…
eliteprox Nov 6, 2025
e35b06f
chore(deps): update pytrickle dependency to a newer commit and refact…
eliteprox Nov 6, 2025
79775e1
refactor(frame_processor): streamline warmup logic and enhance loadin…
eliteprox Nov 7, 2025
e742710
refactor(frame_processor): improve warmup task cancellation handling …
eliteprox Nov 7, 2025
f27656d
fix(byoc): correct parameter name from blacklist_nodes to blacklist_c…
eliteprox Nov 7, 2025
482b8b8
Merge branch 'main' into feat/warmup-overlay-refactor-entrypoint
eliteprox Nov 7, 2025
5e572dd
chore(deps): update pytrickle dependency to a newer commit and add wa…
eliteprox Nov 7, 2025
92a882f
chore(deps): update pytrickle dependency to a newer commit in pyproje…
eliteprox Nov 7, 2025
86568c8
Merge branch 'main' into feat/warmup-overlay-refactor-entrypoint
eliteprox Nov 7, 2025
111b9f9
refactor(byoc): rename warmup_callback to warmup_handler for consiste…
eliteprox Nov 8, 2025
dd12781
fix async warmup
eliteprox Nov 8, 2025
d6210a0
chore(deps): update pytrickle dependency to a newer commit and refact…
eliteprox Nov 8, 2025
425d7bd
bump pytrickle
eliteprox Nov 8, 2025
622b5b1
feat(client): on prompt update failure, stop prompt and fallback to s…
eliteprox Nov 8, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ version = "0.1.6"
license = { file = "LICENSE" }
dependencies = [
"asyncio",
"pytrickle @ git+https://github.com/livepeer/pytrickle.git@v0.1.4",
"pytrickle @ git+https://github.com/livepeer/pytrickle.git@3f0fda21e2f2055e8f903a0430cfe8e782c0a28d",
"comfyui @ git+https://github.com/hiddenswitch/ComfyUI.git@e62df3a8811d8c652a195d4669f4fb27f6c9a9ba",
"aiortc",
"aiohttp",
Expand Down
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
asyncio
pytrickle @ git+https://github.com/livepeer/pytrickle.git@v0.1.4
pytrickle @ git+https://github.com/livepeer/pytrickle.git@3f0fda21e2f2055e8f903a0430cfe8e782c0a28d
comfyui @ git+https://github.com/hiddenswitch/ComfyUI.git@e62df3a8811d8c652a195d4669f4fb27f6c9a9ba
aiortc
aiohttp
Expand Down
21 changes: 9 additions & 12 deletions server/byoc.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ def main():
if args.comfyui_log_level:
log_level = logging._nameToLevel.get(args.comfyui_log_level.upper())
logging.getLogger("comfy").setLevel(log_level)

# Add ComfyStream timeout filter to suppress verbose execution logging
logging.getLogger("comfy.cmd.execution").addFilter(ComfyStreamTimeoutFilter())

Expand All @@ -85,7 +85,7 @@ def force_print(*args, **kwargs):
sys.stdout.flush()

logger.info("Starting ComfyStream BYOC server with pytrickle StreamProcessor...")

# Create frame processor with configuration
frame_processor = ComfyStreamFrameProcessor(
width=args.width,
Expand All @@ -98,22 +98,24 @@ def force_print(*args, **kwargs):
logging_level=args.comfyui_log_level,
comfyui_inference_log_level=args.comfyui_inference_log_level
)

# Create frame skip configuration only if enabled
frame_skip_config = None
if args.disable_frame_skip:
logger.info("Frame skipping disabled")
else:
frame_skip_config = FrameSkipConfig()
logger.info("Frame skipping enabled: adaptive skipping based on queue sizes")

# Create StreamProcessor with frame processor
processor = StreamProcessor(
video_processor=frame_processor.process_video_async,
audio_processor=frame_processor.process_audio_async,
model_loader=frame_processor.load_model,
param_updater=frame_processor.update_params,
on_stream_start=frame_processor.on_stream_start,
on_stream_stop=frame_processor.on_stream_stop,
warmup_handler=frame_processor.warmup,
# Align processor name with capability for consistent logs
name=(os.getenv("CAPABILITY_NAME") or "comfystream"),
port=int(args.port),
Expand All @@ -127,11 +129,7 @@ def force_print(*args, **kwargs):

# Set the stream processor reference for text data publishing
frame_processor.set_stream_processor(processor)

# Create async startup function to load model
async def load_model_on_startup(app):
await processor._frame_processor.load_model()


# Create async startup function for orchestrator registration
async def register_orchestrator_startup(app):
try:
Expand Down Expand Up @@ -163,8 +161,7 @@ async def register_orchestrator_startup(app):
# Clear ORCH_SECRET from environment even on error
os.environ.pop("ORCH_SECRET", None)

# Add model loading and registration to startup hooks
processor.server.app.on_startup.append(load_model_on_startup)
# Add registration to startup hooks
processor.server.app.on_startup.append(register_orchestrator_startup)

# Add warmup endpoint: accepts same body as prompts update
Expand All @@ -189,7 +186,7 @@ async def warmup_handler(request):

# Mount at same API namespace as StreamProcessor defaults
processor.server.add_route("POST", "/api/stream/warmup", warmup_handler)

# Run the processor
processor.run()

Expand Down
Loading