Resized to 66% of original (view original)

Prompt | masterpiece, best quality, 32k, high resolution, absurdres, lyytoaoitorist, grey eyes, skirt, short sleeves, white outline, outline, v-shaped eyebrows, jewelry, jacket, portrait, plaid |
---|---|
Negative prompt | bad quality, worst quality, worst detail, sketch, censor |
Sampler | euler_ancestral |
Seed | 1234567890 |
Steps | 30 |
Cfg Scale | 7 |
11 | {"class_type" => "KSampler", "inputs" => {"sampler_name" => "euler_ancestral", "scheduler" => "normal", "seed" => 1234567890, "steps" => 30, "cfg" => 7, "denoise" => 1, "model" => ["resource-stack-1", 0], "positive" => ["6", 0], "negative" => ["7", 0], "latent_image" => ["26", 0]}, "_meta" => {"title" => "KSampler"}} |
12 | {"class_type" => "SaveImage", "inputs" => {"filename_prefix" => "ComfyUI", "images" => ["25", 0]}, "_meta" => {"title" => "Save Image"}} |
19 | {"class_type" => "ImageUpscaleWithModel", "inputs" => {"upscale_model" => ["20", 0], "image" => ["27", 0]}, "_meta" => {"title" => "Upscale Image (using Model)"}} |
20 | {"class_type" => "UpscaleModelLoader", "inputs" => {"model_name" => "urn:air:other:upscaler:civitai:147759@164821"}, "_meta" => {"title" => "Load Upscale Model"}} |
21 | {"class_type" => "VAEEncode", "inputs" => {"pixels" => ["23", 0], "vae" => ["resource-stack", 2]}, "_meta" => {"title" => "VAE Encode"}} |
23 | {"class_type" => "ImageScale", "inputs" => {"upscale_method" => "nearest-exact", "crop" => "disabled", "width" => 1280, "height" => 1856, "image" => ["19", 0]}, "_meta" => {"title" => "Upscale Image"}} |
24 | {"class_type" => "KSampler", "inputs" => {"sampler_name" => "euler_ancestral", "scheduler" => "normal", "seed" => 1234567890, "steps" => 30, "cfg" => 7, "denoise" => 0.75, "model" => ["resource-stack-1", 0], "positive" => ["6", 0], "negative" => ["7", 0], "latent_image" => ["21", 0]}, "_meta" => {"title" => "KSampler"}} |
25 | {"class_type" => "VAEDecode", "inputs" => {"samples" => ["24", 0], "vae" => ["resource-stack", 2]}, "_meta" => {"title" => "VAE Decode"}} |
26 | {"class_type" => "EmptyLatentImage", "inputs" => {"width" => 832, "height" => 1216, "batch_size" => 1}, "_meta" => {"title" => "Empty Latent Image"}} |
27 | {"class_type" => "VAEDecode", "inputs" => {"samples" => ["11", 0], "vae" => ["resource-stack", 2]}, "_meta" => {"title" => "VAE Decode"}} |
28 | {"class_type" => "SaveImage", "inputs" => {"filename_prefix" => "ComfyUI", "images" => ["27", 0]}, "_meta" => {"title" => "Save Image"}} |
6 | {"class_type" => "smZ CLIPTextEncode", "inputs" => {"text" => "masterpiece, best quality, 32k, high resolution, absurdres, lyytoaoitorist, grey eyes, skirt, short sleeves, white outline, outline, v-shaped eyebrows, jewelry, jacket, portrait, plaid", "parser" => "A1111", "text_g" => "", "text_l" => "", "ascore" => 2.5, "width" => 0, "height" => 0, "crop_w" => 0, "crop_h" => 0, "target_width" => 0, "target_height" => 0, "smZ_steps" => 1, "mean_normalization" => true, "multi_conditioning" => true, "use_old_emphasis_implementation" => false, "with_SDXL" => false, "clip" => ["resource-stack-1", 1]}, "_meta" => {"title" => "Positive"}} |
7 | {"class_type" => "smZ CLIPTextEncode", "inputs" => {"text" => "bad quality, worst quality, worst detail, sketch, censor,", "parser" => "A1111", "text_g" => "", "text_l" => "", "ascore" => 2.5, "width" => 0, "height" => 0, "crop_w" => 0, "crop_h" => 0, "target_width" => 0, "target_height" => 0, "smZ_steps" => 1, "mean_normalization" => true, "multi_conditioning" => true, "use_old_emphasis_implementation" => false, "with_SDXL" => false, "clip" => ["resource-stack-1", 1]}, "_meta" => {"title" => "Negative"}} |
Extra | {"airs" => ["urn:air:other:upscaler:civitai:147759@164821", "urn:air:sdxl:checkpoint:civitai:827184@1761560", "urn:air:sdxl:lora:civitai:1912288@2164424"]} |
Resource Stack | {"class_type" => "CheckpointLoaderSimple", "inputs" => {"ckpt_name" => "urn:air:sdxl:checkpoint:civitai:827184@1761560"}} |
Resource Stack 1 | {"class_type" => "LoraLoader", "inputs" => {"lora_name" => "urn:air:sdxl:lora:civitai:1912288@2164424", "strength_model" => 1, "strength_clip" => 1, "model" => ["resource-stack", 0], "clip" => ["resource-stack", 1]}} |
Resources | [{"modelVersionId" => 1761560, "strength" => 1}, {"modelVersionId" => 2164424, "strength" => 1}] |
Workflow | txt2img-hires |