mirror of
https://github.com/comfyanonymous/ComfyUI.git
synced 2025-01-10 18:05:16 +00:00
Compare commits
5 Commits
df5db03a43
...
6f225465bd
Author | SHA1 | Date | |
---|---|---|---|
|
6f225465bd | ||
|
2307ff6746 | ||
|
eeb20ac9d8 | ||
|
fa2c56ac60 | ||
|
59844491b1 |
@ -111,7 +111,7 @@ class CLIP:
|
||||
model_management.load_models_gpu([self.patcher], force_full_load=True)
|
||||
self.layer_idx = None
|
||||
self.use_clip_schedule = False
|
||||
logging.info("CLIP model load device: {}, offload device: {}, current: {}, dtype: {}".format(load_device, offload_device, params['device'], dtype))
|
||||
logging.info("CLIP/text encoder model load device: {}, offload device: {}, current: {}, dtype: {}".format(load_device, offload_device, params['device'], dtype))
|
||||
|
||||
def clone(self):
|
||||
n = CLIP(no_init=True)
|
||||
@ -898,7 +898,7 @@ def load_state_dict_guess_config(sd, output_vae=True, output_clip=True, output_c
|
||||
if output_model:
|
||||
model_patcher = comfy.model_patcher.ModelPatcher(model, load_device=load_device, offload_device=model_management.unet_offload_device())
|
||||
if inital_load_device != torch.device("cpu"):
|
||||
logging.info("loaded straight to GPU")
|
||||
logging.info("loaded diffusion model directly to GPU")
|
||||
model_management.load_models_gpu([model_patcher], force_full_load=True)
|
||||
|
||||
return (model_patcher, clip, vae, clipvision)
|
||||
|
@ -4,7 +4,8 @@ lint.ignore = ["ALL"]
|
||||
# Enable specific rules
|
||||
lint.select = [
|
||||
"S307", # suspicious-eval-usage
|
||||
"T201", # print-usage
|
||||
"S102", # exec
|
||||
"T", # print-usage
|
||||
"W",
|
||||
# The "F" series in Ruff stands for "Pyflakes" rules, which catch various Python syntax errors and undefined names.
|
||||
# See all rules here: https://docs.astral.sh/ruff/rules/#pyflakes-f
|
||||
|
35
server.py
35
server.py
@ -14,6 +14,7 @@ import struct
|
||||
import ssl
|
||||
import socket
|
||||
import ipaddress
|
||||
import io,base64
|
||||
from PIL import Image, ImageOps
|
||||
from PIL.PngImagePlugin import PngInfo
|
||||
from io import BytesIO
|
||||
@ -329,11 +330,45 @@ class PromptServer():
|
||||
else:
|
||||
return web.Response(status=400)
|
||||
|
||||
def str2pil(img_str:str)->Image.Image:
|
||||
"""
|
||||
Convert Image Byte Stream to PIL Image
|
||||
"""
|
||||
|
||||
img_data = base64.b64decode(img_str)
|
||||
img_io = io.BytesIO(img_data)
|
||||
img = Image.open(img_io)
|
||||
return img
|
||||
|
||||
@routes.post("/upload/image")
|
||||
async def upload_image(request):
|
||||
post = await request.post()
|
||||
return image_upload(post)
|
||||
|
||||
@routes.post("/upload/image_stream")
|
||||
async def upload_image(request):
|
||||
post = await request.post()
|
||||
|
||||
# Get Image Byte Stream
|
||||
image = str2pil(post.get("img_str"))
|
||||
|
||||
file_name = post.get("file_name")
|
||||
if not file_name.endswith((".png",".jpg")):
|
||||
return web.Response(text="The file name must end in .jpg or .png.", status=400)
|
||||
|
||||
# PATH
|
||||
if post.get("subfolder","") =="":
|
||||
UPLOAD_PATH = os.path.join(os.getcwd(),post.get("type"),file_name)
|
||||
else:
|
||||
sub = post.get("subfolder")
|
||||
sub_dir_path = os.path.join(os.getcwd(),post.get("type"),post.get("subfolder"))
|
||||
os.makedirs(sub_dir_path,exist_ok=True)
|
||||
UPLOAD_PATH = os.path.join(sub_dir_path,file_name)
|
||||
|
||||
image.save(UPLOAD_PATH)
|
||||
|
||||
return web.Response(status=200,text=f"Success Save Image PATH :{UPLOAD_PATH}")
|
||||
|
||||
|
||||
@routes.post("/upload/mask")
|
||||
async def upload_mask(request):
|
||||
|
Loading…
Reference in New Issue
Block a user