Skip to content

Commit 4dec9e4

Browse files
authored
Merge branch 'comfyanonymous:master' into master
2 parents 6bfcd8e + ace899e commit 4dec9e4

File tree

82 files changed

+12090
-8800
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

82 files changed

+12090
-8800
lines changed

README.md

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -47,6 +47,7 @@ This ui will let you design and execute advanced stable diffusion pipelines usin
4747
- [AuraFlow](https://comfyanonymous.github.io/ComfyUI_examples/aura_flow/)
4848
- [HunyuanDiT](https://comfyanonymous.github.io/ComfyUI_examples/hunyuan_dit/)
4949
- [Flux](https://comfyanonymous.github.io/ComfyUI_examples/flux/)
50+
- [Lumina Image 2.0](https://comfyanonymous.github.io/ComfyUI_examples/lumina2/)
5051
- Video Models
5152
- [Stable Video Diffusion](https://comfyanonymous.github.io/ComfyUI_examples/video/)
5253
- [Mochi](https://comfyanonymous.github.io/ComfyUI_examples/mochi/)
@@ -292,6 +293,8 @@ Use `--tls-keyfile key.pem --tls-certfile cert.pem` to enable TLS/SSL, the app w
292293
293294
## Support and dev channel
294295

296+
[Discord](https://comfy.org/discord): Try the #help or #feedback channels.
297+
295298
[Matrix space: #comfyui_space:matrix.org](https://app.element.io/#/room/%23comfyui_space%3Amatrix.org) (it's like discord but open source).
296299

297300
See also: [https://www.comfy.org/](https://www.comfy.org/)
@@ -308,7 +311,7 @@ For any bugs, issues, or feature requests related to the frontend, please use th
308311

309312
The new frontend is now the default for ComfyUI. However, please note:
310313

311-
1. The frontend in the main ComfyUI repository is updated weekly.
314+
1. The frontend in the main ComfyUI repository is updated fortnightly.
312315
2. Daily releases are available in the separate frontend repository.
313316

314317
To use the most up-to-date frontend version:
@@ -325,7 +328,7 @@ To use the most up-to-date frontend version:
325328
--front-end-version Comfy-Org/[email protected]
326329
```
327330

328-
This approach allows you to easily switch between the stable weekly release and the cutting-edge daily updates, or even specific versions for testing purposes.
331+
This approach allows you to easily switch between the stable fortnightly release and the cutting-edge daily updates, or even specific versions for testing purposes.
329332

330333
### Accessing the Legacy Frontend
331334

api_server/routes/internal/internal_routes.py

Lines changed: 16 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,9 @@
11
from aiohttp import web
22
from typing import Optional
3-
from folder_paths import models_dir, user_directory, output_directory, folder_names_and_paths
4-
from api_server.services.file_service import FileService
3+
from folder_paths import folder_names_and_paths, get_directory_by_type
54
from api_server.services.terminal_service import TerminalService
65
import app.logger
6+
import os
77

88
class InternalRoutes:
99
'''
@@ -15,26 +15,10 @@ class InternalRoutes:
1515
def __init__(self, prompt_server):
1616
self.routes: web.RouteTableDef = web.RouteTableDef()
1717
self._app: Optional[web.Application] = None
18-
self.file_service = FileService({
19-
"models": models_dir,
20-
"user": user_directory,
21-
"output": output_directory
22-
})
2318
self.prompt_server = prompt_server
2419
self.terminal_service = TerminalService(prompt_server)
2520

2621
def setup_routes(self):
27-
@self.routes.get('/files')
28-
async def list_files(request):
29-
directory_key = request.query.get('directory', '')
30-
try:
31-
file_list = self.file_service.list_files(directory_key)
32-
return web.json_response({"files": file_list})
33-
except ValueError as e:
34-
return web.json_response({"error": str(e)}, status=400)
35-
except Exception as e:
36-
return web.json_response({"error": str(e)}, status=500)
37-
3822
@self.routes.get('/logs')
3923
async def get_logs(request):
4024
return web.json_response("".join([(l["t"] + " - " + l["m"]) for l in app.logger.get_logs()]))
@@ -67,6 +51,20 @@ async def get_folder_paths(request):
6751
response[key] = folder_names_and_paths[key][0]
6852
return web.json_response(response)
6953

54+
@self.routes.get('/files/{directory_type}')
55+
async def get_files(request: web.Request) -> web.Response:
56+
directory_type = request.match_info['directory_type']
57+
if directory_type not in ("output", "input", "temp"):
58+
return web.json_response({"error": "Invalid directory type"}, status=400)
59+
60+
directory = get_directory_by_type(directory_type)
61+
sorted_files = sorted(
62+
(entry for entry in os.scandir(directory) if entry.is_file()),
63+
key=lambda entry: -entry.stat().st_mtime
64+
)
65+
return web.json_response([entry.name for entry in sorted_files], status=200)
66+
67+
7068
def get_app(self):
7169
if self._app is None:
7270
self._app = web.Application()

api_server/services/file_service.py

Lines changed: 0 additions & 13 deletions
This file was deleted.

comfy/cli_args.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -179,6 +179,8 @@ def is_valid_directory(path: Optional[str]) -> Optional[str]:
179179

180180
parser.add_argument("--user-directory", type=is_valid_directory, default=None, help="Set the ComfyUI user directory with an absolute path. Overrides --base-directory.")
181181

182+
parser.add_argument("--enable-compress-response-body", action="store_true", help="Enable compressing response body.")
183+
182184
if comfy.options.args_parsing:
183185
args = parser.parse_args()
184186
else:
@@ -189,3 +191,6 @@ def is_valid_directory(path: Optional[str]) -> Optional[str]:
189191

190192
if args.disable_auto_launch:
191193
args.auto_launch = False
194+
195+
if args.force_fp16:
196+
args.fp16_unet = True

comfy/clip_model.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -102,9 +102,10 @@ def forward(self, input_tokens, attention_mask=None, intermediate_output=None, f
102102
mask = None
103103
if attention_mask is not None:
104104
mask = 1.0 - attention_mask.to(x.dtype).reshape((attention_mask.shape[0], 1, -1, attention_mask.shape[-1])).expand(attention_mask.shape[0], 1, attention_mask.shape[-1], attention_mask.shape[-1])
105-
mask = mask.masked_fill(mask.to(torch.bool), float("-inf"))
105+
mask = mask.masked_fill(mask.to(torch.bool), -torch.finfo(x.dtype).max)
106+
107+
causal_mask = torch.full((x.shape[1], x.shape[1]), -torch.finfo(x.dtype).max, dtype=x.dtype, device=x.device).triu_(1)
106108

107-
causal_mask = torch.empty(x.shape[1], x.shape[1], dtype=x.dtype, device=x.device).fill_(float("-inf")).triu_(1)
108109
if mask is not None:
109110
mask += causal_mask
110111
else:

comfy/comfy_types/node_typing.py

Lines changed: 35 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -66,13 +66,26 @@ def __ne__(self, value: object) -> bool:
6666
b = frozenset(value.split(","))
6767
return not (b.issubset(a) or a.issubset(b))
6868

69+
class RemoteInputOptions(TypedDict):
70+
route: str
71+
"""The route to the remote source."""
72+
refresh_button: bool
73+
"""Specifies whether to show a refresh button in the UI below the widget."""
74+
control_after_refresh: Literal["first", "last"]
75+
"""Specifies the control after the refresh button is clicked. If "first", the first item will be automatically selected, and so on."""
76+
timeout: int
77+
"""The maximum amount of time to wait for a response from the remote source in milliseconds."""
78+
max_retries: int
79+
"""The maximum number of retries before aborting the request."""
80+
refresh: int
81+
"""The TTL of the remote input's value in milliseconds. Specifies the interval at which the remote input's value is refreshed."""
6982

7083
class InputTypeOptions(TypedDict):
7184
"""Provides type hinting for the return type of the INPUT_TYPES node function.
7285
7386
Due to IDE limitations with unions, for now all options are available for all types (e.g. `label_on` is hinted even when the type is not `IO.BOOLEAN`).
7487
75-
Comfy Docs: https://docs.comfy.org/essentials/custom_node_datatypes
88+
Comfy Docs: https://docs.comfy.org/custom-nodes/backend/datatypes
7689
"""
7790

7891
default: bool | str | float | int | list | tuple
@@ -113,6 +126,14 @@ class InputTypeOptions(TypedDict):
113126
# defaultVal: str
114127
dynamicPrompts: bool
115128
"""Causes the front-end to evaluate dynamic prompts (``STRING``)"""
129+
# class InputTypeCombo(InputTypeOptions):
130+
image_upload: bool
131+
"""Specifies whether the input should have an image upload button and image preview attached to it. Requires that the input's name is `image`."""
132+
image_folder: Literal["input", "output", "temp"]
133+
"""Specifies which folder to get preview images from if the input has the ``image_upload`` flag.
134+
"""
135+
remote: RemoteInputOptions
136+
"""Specifies the configuration for a remote input."""
116137

117138

118139
class HiddenInputTypeDict(TypedDict):
@@ -133,7 +154,7 @@ class HiddenInputTypeDict(TypedDict):
133154
class InputTypeDict(TypedDict):
134155
"""Provides type hinting for node INPUT_TYPES.
135156
136-
Comfy Docs: https://docs.comfy.org/essentials/custom_node_more_on_inputs
157+
Comfy Docs: https://docs.comfy.org/custom-nodes/backend/more_on_inputs
137158
"""
138159

139160
required: dict[str, tuple[IO, InputTypeOptions]]
@@ -143,14 +164,14 @@ class InputTypeDict(TypedDict):
143164
hidden: HiddenInputTypeDict
144165
"""Offers advanced functionality and server-client communication.
145166
146-
Comfy Docs: https://docs.comfy.org/essentials/custom_node_more_on_inputs#hidden-inputs
167+
Comfy Docs: https://docs.comfy.org/custom-nodes/backend/more_on_inputs#hidden-inputs
147168
"""
148169

149170

150171
class ComfyNodeABC(ABC):
151172
"""Abstract base class for Comfy nodes. Includes the names and expected types of attributes.
152173
153-
Comfy Docs: https://docs.comfy.org/essentials/custom_node_server_overview
174+
Comfy Docs: https://docs.comfy.org/custom-nodes/backend/server_overview
154175
"""
155176

156177
DESCRIPTION: str
@@ -167,7 +188,7 @@ class ComfyNodeABC(ABC):
167188
CATEGORY: str
168189
"""The category of the node, as per the "Add Node" menu.
169190
170-
Comfy Docs: https://docs.comfy.org/essentials/custom_node_server_overview#category
191+
Comfy Docs: https://docs.comfy.org/custom-nodes/backend/server_overview#category
171192
"""
172193
EXPERIMENTAL: bool
173194
"""Flags a node as experimental, informing users that it may change or not work as expected."""
@@ -181,9 +202,9 @@ def INPUT_TYPES(s) -> InputTypeDict:
181202
182203
* Must include the ``required`` key, which describes all inputs that must be connected for the node to execute.
183204
* The ``optional`` key can be added to describe inputs which do not need to be connected.
184-
* The ``hidden`` key offers some advanced functionality. More info at: https://docs.comfy.org/essentials/custom_node_more_on_inputs#hidden-inputs
205+
* The ``hidden`` key offers some advanced functionality. More info at: https://docs.comfy.org/custom-nodes/backend/more_on_inputs#hidden-inputs
185206
186-
Comfy Docs: https://docs.comfy.org/essentials/custom_node_server_overview#input-types
207+
Comfy Docs: https://docs.comfy.org/custom-nodes/backend/server_overview#input-types
187208
"""
188209
return {"required": {}}
189210

@@ -198,7 +219,7 @@ def INPUT_TYPES(s) -> InputTypeDict:
198219
199220
By default, a node is not considered an output. Set ``OUTPUT_NODE = True`` to specify that it is.
200221
201-
Comfy Docs: https://docs.comfy.org/essentials/custom_node_server_overview#output-node
222+
Comfy Docs: https://docs.comfy.org/custom-nodes/backend/server_overview#output-node
202223
"""
203224
INPUT_IS_LIST: bool
204225
"""A flag indicating if this node implements the additional code necessary to deal with OUTPUT_IS_LIST nodes.
@@ -209,7 +230,7 @@ def INPUT_TYPES(s) -> InputTypeDict:
209230
210231
A node can also override the default input behaviour and receive the whole list in a single call. This is done by setting a class attribute `INPUT_IS_LIST` to ``True``.
211232
212-
Comfy Docs: https://docs.comfy.org/essentials/custom_node_lists#list-processing
233+
Comfy Docs: https://docs.comfy.org/custom-nodes/backend/lists#list-processing
213234
"""
214235
OUTPUT_IS_LIST: tuple[bool]
215236
"""A tuple indicating which node outputs are lists, but will be connected to nodes that expect individual items.
@@ -227,7 +248,7 @@ def INPUT_TYPES(s) -> InputTypeDict:
227248
the node should provide a class attribute `OUTPUT_IS_LIST`, which is a ``tuple[bool]``, of the same length as `RETURN_TYPES`,
228249
specifying which outputs which should be so treated.
229250
230-
Comfy Docs: https://docs.comfy.org/essentials/custom_node_lists#list-processing
251+
Comfy Docs: https://docs.comfy.org/custom-nodes/backend/lists#list-processing
231252
"""
232253

233254
RETURN_TYPES: tuple[IO]
@@ -237,19 +258,19 @@ def INPUT_TYPES(s) -> InputTypeDict:
237258
238259
RETURN_TYPES = (IO.INT, "INT", "CUSTOM_TYPE")
239260
240-
Comfy Docs: https://docs.comfy.org/essentials/custom_node_server_overview#return-types
261+
Comfy Docs: https://docs.comfy.org/custom-nodes/backend/server_overview#return-types
241262
"""
242263
RETURN_NAMES: tuple[str]
243264
"""The output slot names for each item in `RETURN_TYPES`, e.g. ``RETURN_NAMES = ("count", "filter_string")``
244265
245-
Comfy Docs: https://docs.comfy.org/essentials/custom_node_server_overview#return-names
266+
Comfy Docs: https://docs.comfy.org/custom-nodes/backend/server_overview#return-names
246267
"""
247268
OUTPUT_TOOLTIPS: tuple[str]
248269
"""A tuple of strings to use as tooltips for node outputs, one for each item in `RETURN_TYPES`."""
249270
FUNCTION: str
250271
"""The name of the function to execute as a literal string, e.g. `FUNCTION = "execute"`
251272
252-
Comfy Docs: https://docs.comfy.org/essentials/custom_node_server_overview#function
273+
Comfy Docs: https://docs.comfy.org/custom-nodes/backend/server_overview#function
253274
"""
254275

255276

@@ -267,7 +288,7 @@ def check_lazy_status(self, **kwargs) -> list[str]:
267288
Params should match the nodes execution ``FUNCTION`` (self, and all inputs by name).
268289
Will be executed repeatedly until it returns an empty list, or all requested items were already evaluated (and sent as params).
269290
270-
Comfy Docs: https://docs.comfy.org/essentials/custom_node_lazy_evaluation#defining-check-lazy-status
291+
Comfy Docs: https://docs.comfy.org/custom-nodes/backend/lazy_evaluation#defining-check-lazy-status
271292
"""
272293

273294
need = [name for name in kwargs if kwargs[name] is None]

comfy/k_diffusion/sampling.py

Lines changed: 34 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -1267,7 +1267,7 @@ def post_cfg_function(args):
12671267
return x
12681268

12691269
@torch.no_grad()
1270-
def res_multistep(model, x, sigmas, extra_args=None, callback=None, disable=None, s_churn=0., s_tmin=0., s_tmax=float('inf'), s_noise=1., noise_sampler=None, cfg_pp=False):
1270+
def res_multistep(model, x, sigmas, extra_args=None, callback=None, disable=None, s_noise=1., noise_sampler=None, eta=1., cfg_pp=False):
12711271
extra_args = {} if extra_args is None else extra_args
12721272
seed = extra_args.get("seed", None)
12731273
noise_sampler = default_noise_sampler(x, seed=seed) if noise_sampler is None else noise_sampler
@@ -1289,53 +1289,60 @@ def post_cfg_function(args):
12891289
extra_args["model_options"] = comfy.model_patcher.set_model_options_post_cfg_function(model_options, post_cfg_function, disable_cfg1_optimization=True)
12901290

12911291
for i in trange(len(sigmas) - 1, disable=disable):
1292-
if s_churn > 0:
1293-
gamma = min(s_churn / (len(sigmas) - 1), 2**0.5 - 1) if s_tmin <= sigmas[i] <= s_tmax else 0.0
1294-
sigma_hat = sigmas[i] * (gamma + 1)
1295-
else:
1296-
gamma = 0
1297-
sigma_hat = sigmas[i]
1298-
1299-
if gamma > 0:
1300-
eps = torch.randn_like(x) * s_noise
1301-
x = x + eps * (sigma_hat**2 - sigmas[i] ** 2) ** 0.5
1302-
denoised = model(x, sigma_hat * s_in, **extra_args)
1292+
denoised = model(x, sigmas[i] * s_in, **extra_args)
1293+
sigma_down, sigma_up = get_ancestral_step(sigmas[i], sigmas[i + 1], eta=eta)
13031294
if callback is not None:
1304-
callback({"x": x, "i": i, "sigma": sigmas[i], "sigma_hat": sigma_hat, "denoised": denoised})
1305-
if sigmas[i + 1] == 0 or old_denoised is None:
1295+
callback({"x": x, "i": i, "sigma": sigmas[i], "sigma_hat": sigmas[i], "denoised": denoised})
1296+
if sigma_down == 0 or old_denoised is None:
13061297
# Euler method
13071298
if cfg_pp:
1308-
d = to_d(x, sigma_hat, uncond_denoised)
1309-
x = denoised + d * sigmas[i + 1]
1299+
d = to_d(x, sigmas[i], uncond_denoised)
1300+
x = denoised + d * sigma_down
13101301
else:
1311-
d = to_d(x, sigma_hat, denoised)
1312-
dt = sigmas[i + 1] - sigma_hat
1302+
d = to_d(x, sigmas[i], denoised)
1303+
dt = sigma_down - sigmas[i]
13131304
x = x + d * dt
13141305
else:
13151306
# Second order multistep method in https://arxiv.org/pdf/2308.02157
1316-
t, t_next, t_prev = t_fn(sigmas[i]), t_fn(sigmas[i + 1]), t_fn(sigmas[i - 1])
1307+
t, t_next, t_prev = t_fn(sigmas[i]), t_fn(sigma_down), t_fn(sigmas[i - 1])
13171308
h = t_next - t
13181309
c2 = (t_prev - t) / h
13191310

13201311
phi1_val, phi2_val = phi1_fn(-h), phi2_fn(-h)
1321-
b1 = torch.nan_to_num(phi1_val - 1.0 / c2 * phi2_val, nan=0.0)
1322-
b2 = torch.nan_to_num(1.0 / c2 * phi2_val, nan=0.0)
1312+
b1 = torch.nan_to_num(phi1_val - phi2_val / c2, nan=0.0)
1313+
b2 = torch.nan_to_num(phi2_val / c2, nan=0.0)
13231314

13241315
if cfg_pp:
13251316
x = x + (denoised - uncond_denoised)
1317+
x = sigma_fn(h) * x + h * (b1 * uncond_denoised + b2 * old_denoised)
1318+
else:
1319+
x = sigma_fn(h) * x + h * (b1 * denoised + b2 * old_denoised)
13261320

1327-
x = (sigma_fn(t_next) / sigma_fn(t)) * x + h * (b1 * denoised + b2 * old_denoised)
1321+
# Noise addition
1322+
if sigmas[i + 1] > 0:
1323+
x = x + noise_sampler(sigmas[i], sigmas[i + 1]) * s_noise * sigma_up
13281324

1329-
old_denoised = denoised
1325+
if cfg_pp:
1326+
old_denoised = uncond_denoised
1327+
else:
1328+
old_denoised = denoised
13301329
return x
13311330

13321331
@torch.no_grad()
1333-
def sample_res_multistep(model, x, sigmas, extra_args=None, callback=None, disable=None, s_churn=0., s_tmin=0., s_tmax=float('inf'), s_noise=1., noise_sampler=None):
1334-
return res_multistep(model, x, sigmas, extra_args=extra_args, callback=callback, disable=disable, s_churn=s_churn, s_tmin=s_tmin, s_tmax=s_tmax, s_noise=s_noise, noise_sampler=noise_sampler, cfg_pp=False)
1332+
def sample_res_multistep(model, x, sigmas, extra_args=None, callback=None, disable=None, s_noise=1., noise_sampler=None):
1333+
return res_multistep(model, x, sigmas, extra_args=extra_args, callback=callback, disable=disable, s_noise=s_noise, noise_sampler=noise_sampler, eta=0., cfg_pp=False)
1334+
1335+
@torch.no_grad()
1336+
def sample_res_multistep_cfg_pp(model, x, sigmas, extra_args=None, callback=None, disable=None, s_noise=1., noise_sampler=None):
1337+
return res_multistep(model, x, sigmas, extra_args=extra_args, callback=callback, disable=disable, s_noise=s_noise, noise_sampler=noise_sampler, eta=0., cfg_pp=True)
1338+
1339+
@torch.no_grad()
1340+
def sample_res_multistep_ancestral(model, x, sigmas, extra_args=None, callback=None, disable=None, eta=1., s_noise=1., noise_sampler=None):
1341+
return res_multistep(model, x, sigmas, extra_args=extra_args, callback=callback, disable=disable, s_noise=s_noise, noise_sampler=noise_sampler, eta=eta, cfg_pp=False)
13351342

13361343
@torch.no_grad()
1337-
def sample_res_multistep_cfg_pp(model, x, sigmas, extra_args=None, callback=None, disable=None, s_churn=0., s_tmin=0., s_tmax=float('inf'), s_noise=1., noise_sampler=None):
1338-
return res_multistep(model, x, sigmas, extra_args=extra_args, callback=callback, disable=disable, s_churn=s_churn, s_tmin=s_tmin, s_tmax=s_tmax, s_noise=s_noise, noise_sampler=noise_sampler, cfg_pp=True)
1344+
def sample_res_multistep_ancestral_cfg_pp(model, x, sigmas, extra_args=None, callback=None, disable=None, eta=1., s_noise=1., noise_sampler=None):
1345+
return res_multistep(model, x, sigmas, extra_args=extra_args, callback=callback, disable=disable, s_noise=s_noise, noise_sampler=noise_sampler, eta=eta, cfg_pp=True)
13391346

13401347
@torch.no_grad()
13411348
def sample_gradient_estimation(model, x, sigmas, extra_args=None, callback=None, disable=None, ge_gamma=2.):

0 commit comments

Comments
 (0)