mirror of
https://github.com/comfyanonymous/ComfyUI.git
synced 2025-01-11 10:25:16 +00:00
Merge remote-tracking branch 'origin/master' into menu-save-and-anchor
This commit is contained in:
commit
6481c90371
@ -25,3 +25,7 @@ To update the ComfyUI code: update\update_comfyui.bat
|
||||
To update ComfyUI with the python dependencies, note that you should ONLY run this if you have issues with python dependencies.
|
||||
update\update_comfyui_and_python_dependencies.bat
|
||||
|
||||
|
||||
TO SHARE MODELS BETWEEN COMFYUI AND ANOTHER UI:
|
||||
In the ComfyUI directory you will find a file: extra_model_paths.yaml.example
|
||||
Rename this file to: extra_model_paths.yaml and edit it with your favorite text editor.
|
||||
|
@ -45,6 +45,10 @@ There is a portable standalone build for Windows that should work for running on
|
||||
|
||||
Just download, extract and run. Make sure you put your Stable Diffusion checkpoints/models (the huge ckpt/safetensors files) in: ComfyUI\models\checkpoints
|
||||
|
||||
#### How do I share models between another UI and ComfyUI?
|
||||
|
||||
See the [Config file](extra_model_paths.yaml.example) to set the search paths for models. In the standalone windows build you can find this file in the ComfyUI directory. Rename this file to extra_model_paths.yaml and edit it with your favorite text editor.
|
||||
|
||||
## Colab Notebook
|
||||
|
||||
To run it on colab or paperspace you can use my [Colab Notebook](notebooks/comfyui_colab.ipynb) here: [Link to open with google colab](https://colab.research.google.com/github/comfyanonymous/ComfyUI/blob/master/notebooks/comfyui_colab.ipynb)
|
||||
@ -102,7 +106,6 @@ With cmd.exe: ```"path_to_other_sd_gui\venv\Scripts\activate.bat"```
|
||||
|
||||
And then you can use that terminal to run Comfyui without installing any dependencies. Note that the venv folder might be called something else depending on the SD UI.
|
||||
|
||||
|
||||
# Running
|
||||
|
||||
```python main.py```
|
||||
|
@ -221,7 +221,7 @@ class KSamplerX0Inpaint(torch.nn.Module):
|
||||
def forward(self, x, sigma, uncond, cond, cond_scale, denoise_mask, cond_concat=None):
|
||||
if denoise_mask is not None:
|
||||
latent_mask = 1. - denoise_mask
|
||||
x = x * denoise_mask + (self.latent_image + self.noise * sigma) * latent_mask
|
||||
x = x * denoise_mask + (self.latent_image + self.noise * sigma.reshape([sigma.shape[0]] + [1] * (len(self.noise.shape) - 1))) * latent_mask
|
||||
out = self.inner_model(x, sigma, cond=cond, uncond=uncond, cond_scale=cond_scale, cond_concat=cond_concat)
|
||||
if denoise_mask is not None:
|
||||
out *= denoise_mask
|
||||
|
@ -65,8 +65,11 @@ def recursive_execute(server, prompt, outputs, current_item, extra_data={}):
|
||||
|
||||
nodes.before_node_execution()
|
||||
outputs[unique_id] = getattr(obj, obj.FUNCTION)(**input_data_all)
|
||||
if "ui" in outputs[unique_id] and server.client_id is not None:
|
||||
if "ui" in outputs[unique_id]:
|
||||
if server.client_id is not None:
|
||||
server.send_sync("executed", { "node": unique_id, "output": outputs[unique_id]["ui"] }, server.client_id)
|
||||
if "result" in outputs[unique_id]:
|
||||
outputs[unique_id] = outputs[unique_id]["result"]
|
||||
return executed + [unique_id]
|
||||
|
||||
def recursive_will_execute(prompt, outputs, current_item):
|
||||
|
9
main.py
9
main.py
@ -18,6 +18,7 @@ if __name__ == "__main__":
|
||||
print("\t--use-split-cross-attention\tUse the split cross attention optimization instead of the sub-quadratic one.\n\t\t\t\t\tIgnored when xformers is used.")
|
||||
print("\t--use-pytorch-cross-attention\tUse the new pytorch 2.0 cross attention function.")
|
||||
print("\t--disable-xformers\t\tdisables xformers")
|
||||
print("\t--cuda-device 1\t\tSet the id of the cuda device this instance will use.")
|
||||
print()
|
||||
print("\t--highvram\t\t\tBy default models will be unloaded to CPU memory after being used.\n\t\t\t\t\tThis option keeps them in GPU memory.\n")
|
||||
print("\t--normalvram\t\t\tUsed to force normal vram use if lowvram gets automatically enabled.")
|
||||
@ -31,6 +32,14 @@ if __name__ == "__main__":
|
||||
print("disabling upcasting of attention")
|
||||
os.environ['ATTN_PRECISION'] = "fp16"
|
||||
|
||||
try:
|
||||
index = sys.argv.index('--cuda-device')
|
||||
device = sys.argv[index + 1]
|
||||
os.environ['CUDA_VISIBLE_DEVICES'] = device
|
||||
print("Set cuda device to:", device)
|
||||
except:
|
||||
pass
|
||||
|
||||
import execution
|
||||
import server
|
||||
import folder_paths
|
||||
|
100
web/extensions/core/saveImageExtraOutput.js
Normal file
100
web/extensions/core/saveImageExtraOutput.js
Normal file
@ -0,0 +1,100 @@
|
||||
import { app } from "/scripts/app.js";
|
||||
|
||||
// Use widget values and dates in output filenames
|
||||
|
||||
app.registerExtension({
|
||||
name: "Comfy.SaveImageExtraOutput",
|
||||
async beforeRegisterNodeDef(nodeType, nodeData, app) {
|
||||
if (nodeData.name === "SaveImage") {
|
||||
const onNodeCreated = nodeType.prototype.onNodeCreated;
|
||||
|
||||
// Simple date formatter
|
||||
const parts = {
|
||||
d: (d) => d.getDate(),
|
||||
M: (d) => d.getMonth() + 1,
|
||||
h: (d) => d.getHours(),
|
||||
m: (d) => d.getMinutes(),
|
||||
s: (d) => d.getSeconds(),
|
||||
};
|
||||
const format =
|
||||
Object.keys(parts)
|
||||
.map((k) => k + k + "?")
|
||||
.join("|") + "|yyy?y?";
|
||||
|
||||
function formatDate(text, date) {
|
||||
return text.replace(new RegExp(format, "g"), function (text) {
|
||||
if (text === "yy") return (date.getFullYear() + "").substring(2);
|
||||
if (text === "yyyy") return date.getFullYear();
|
||||
if (text[0] in parts) {
|
||||
const p = parts[text[0]](date);
|
||||
return (p + "").padStart(text.length, "0");
|
||||
}
|
||||
return text;
|
||||
});
|
||||
}
|
||||
|
||||
// When the SaveImage node is created we want to override the serialization of the output name widget to run our S&R
|
||||
nodeType.prototype.onNodeCreated = function () {
|
||||
const r = onNodeCreated ? onNodeCreated.apply(this, arguments) : undefined;
|
||||
|
||||
const widget = this.widgets.find((w) => w.name === "filename_prefix");
|
||||
widget.serializeValue = () => {
|
||||
return widget.value.replace(/%([^%]+)%/g, function (match, text) {
|
||||
const split = text.split(".");
|
||||
if (split.length !== 2) {
|
||||
// Special handling for dates
|
||||
if (split[0].startsWith("date:")) {
|
||||
return formatDate(split[0].substring(5), new Date());
|
||||
}
|
||||
|
||||
if (text !== "width" && text !== "height") {
|
||||
// Dont warn on standard replacements
|
||||
console.warn("Invalid replacement pattern", text);
|
||||
}
|
||||
return match;
|
||||
}
|
||||
|
||||
// Find node with matching S&R property name
|
||||
let nodes = app.graph._nodes.filter((n) => n.properties?.["Node name for S&R"] === split[0]);
|
||||
// If we cant, see if there is a node with that title
|
||||
if (!nodes.length) {
|
||||
nodes = app.graph._nodes.filter((n) => n.title === split[0]);
|
||||
}
|
||||
if (!nodes.length) {
|
||||
console.warn("Unable to find node", split[0]);
|
||||
return match;
|
||||
}
|
||||
|
||||
if (nodes.length > 1) {
|
||||
console.warn("Multiple nodes matched", split[0], "using first match");
|
||||
}
|
||||
|
||||
const node = nodes[0];
|
||||
|
||||
const widget = node.widgets?.find((w) => w.name === split[1]);
|
||||
if (!widget) {
|
||||
console.warn("Unable to find widget", split[1], "on node", split[0], node);
|
||||
return match;
|
||||
}
|
||||
|
||||
return ((widget.value ?? "") + "").replaceAll(/\/|\\/g, "_");
|
||||
});
|
||||
};
|
||||
|
||||
return r;
|
||||
};
|
||||
} else {
|
||||
// When any other node is created add a property to alias the node
|
||||
const onNodeCreated = nodeType.prototype.onNodeCreated;
|
||||
nodeType.prototype.onNodeCreated = function () {
|
||||
const r = onNodeCreated ? onNodeCreated.apply(this, arguments) : undefined;
|
||||
|
||||
if (!this.properties || !("Node name for S&R" in this.properties)) {
|
||||
this.addProperty("Node name for S&R", this.title, "string");
|
||||
}
|
||||
|
||||
return r;
|
||||
};
|
||||
}
|
||||
},
|
||||
});
|
@ -617,6 +617,10 @@ class ComfyApp {
|
||||
|
||||
api.addEventListener("executed", ({ detail }) => {
|
||||
this.nodeOutputs[detail.node] = detail.output;
|
||||
const node = this.graph.getNodeById(detail.node);
|
||||
if (node?.onExecuted) {
|
||||
node.onExecuted(detail.output);
|
||||
}
|
||||
});
|
||||
|
||||
api.init();
|
||||
@ -739,6 +743,9 @@ class ComfyApp {
|
||||
const inputData = inputs[inputName];
|
||||
const type = inputData[0];
|
||||
|
||||
if(inputData[1]?.forceInput) {
|
||||
this.addInput(inputName, type);
|
||||
} else {
|
||||
if (Array.isArray(type)) {
|
||||
// Enums
|
||||
Object.assign(config, widgets.COMBO(this, inputName, inputData, app) || {});
|
||||
@ -753,6 +760,7 @@ class ComfyApp {
|
||||
this.addInput(inputName, type);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const o in nodeData["output"]) {
|
||||
const output = nodeData["output"][o];
|
||||
|
@ -227,6 +227,13 @@ button.comfy-queue-btn {
|
||||
|
||||
@media only screen and (max-height: 850px) {
|
||||
.comfy-menu {
|
||||
margin-top: -70px;
|
||||
top: 0 !important;
|
||||
bottom: 0 !important;
|
||||
left: auto !important;
|
||||
right: 0 !important;
|
||||
border-radius: 0px;
|
||||
}
|
||||
.comfy-menu span.drag-handle {
|
||||
visibility:hidden
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user