Spaces:
Running
on
Zero
Running
on
Zero
add some more prov imgs
Browse files- app.py +10 -8
- demo/relighting_gen.py +9 -12
- demo/render_hints.py +10 -1
- examples/provisional_img/3d-animation-character-minimal-art-toy.png +3 -0
- examples/provisional_img/a-decorated-plaster-round-plate-with-blue-fine-silk-ribbon-around-it-0.png +3 -0
- examples/provisional_img/a-decorated-plaster-round-plate-with-blue-fine-silk-ribbon-around-it-1.png +3 -0
- examples/provisional_img/a-large-colorful-candle-high-quality-product-photo.png +3 -0
- examples/provisional_img/an-elephant-sculpted-from-plaster-and-the-elephant-nose-is-decorated-with-the-golden-texture.png +3 -0
- examples/provisional_img/girraffe_turtle.jpeg +0 -0
- examples/provisional_img/gorgeous-ornate-fountain-made-of-marble.png +3 -0
- examples/provisional_img/leather-glove-0.png +3 -0
- examples/provisional_img/machine-dragon-robot-in-platinum-0.png +3 -0
- examples/provisional_img/machine-dragon-robot-in-platinum-1.png +3 -0
- examples/provisional_img/machine-dragon-robot-in-platinum-2.png +3 -0
- examples/provisional_img/pottery.png +3 -0
- examples/provisional_img/rusty-copper-toy-frog-with-spatially-varying-materials-some-parts-are-shinning-other-parts-are-rough.png +3 -0
- examples/provisional_img/rusty-phoenix.png +3 -0
- examples/provisional_img/starcraft-2-marine-machine-gun-0.png +3 -0
- examples/provisional_img/steampunk-space-tank-with-delicate-details-0.png +3 -0
- examples/provisional_img/steampunk-space-tank-with-delicate-details-1.png +3 -0
- examples/provisional_img/stone-griffin.png +3 -0
app.py
CHANGED
@@ -13,7 +13,7 @@ from demo.rm_bg import rm_bg
|
|
13 |
|
14 |
with gr.Blocks(title="DiLightNet Demo") as demo:
|
15 |
gr.Markdown("""# DiLightNet: Fine-grained Lighting Control for Diffusion-based Image Generation
|
16 |
-
## A demo for generating images under point/environmantal lighting using DiLightNet. For full usage (video generation & arbitary lighting condition), please refer to our [GitHub repository](https://github.com/iamNCJ/DiLightNet)""")
|
17 |
|
18 |
with gr.Row():
|
19 |
# 1. Reference Image Input / Generation
|
@@ -34,7 +34,7 @@ with gr.Blocks(title="DiLightNet Demo") as demo:
|
|
34 |
gr.Examples(
|
35 |
examples=[os.path.join("examples/provisional_img", i) for i in os.listdir("examples/provisional_img")],
|
36 |
inputs=[input_image],
|
37 |
-
examples_per_page
|
38 |
)
|
39 |
|
40 |
# 2. Background Removal
|
@@ -81,7 +81,7 @@ with gr.Blocks(title="DiLightNet Demo") as demo:
|
|
81 |
env_examples = gr.Examples(
|
82 |
examples=[[os.path.join("examples/env_map_preview", i), os.path.join("examples/env_map", i).replace("png", "exr")] for i in os.listdir("examples/env_map_preview")],
|
83 |
inputs=[env_map_preview, env_map_path],
|
84 |
-
examples_per_page
|
85 |
)
|
86 |
render_btn_env = gr.Button(value="Render Hints")
|
87 |
|
@@ -164,13 +164,15 @@ with gr.Blocks(title="DiLightNet Demo") as demo:
|
|
164 |
seed=int(relighting_seed),
|
165 |
cfg=relighting_cfg
|
166 |
)
|
167 |
-
relit_img = imageio.v3.imread(res_folder_path + '/
|
168 |
if do_env_inpainting:
|
169 |
bg = imageio.v3.imread(res_folder_path + f'/bg00.png') / 255.
|
170 |
-
|
171 |
-
|
172 |
-
|
173 |
-
|
|
|
|
|
174 |
return relit_img
|
175 |
|
176 |
relighting_generate_btn.click(fn=gen_relighting_image,
|
|
|
13 |
|
14 |
with gr.Blocks(title="DiLightNet Demo") as demo:
|
15 |
gr.Markdown("""# DiLightNet: Fine-grained Lighting Control for Diffusion-based Image Generation
|
16 |
+
## A demo for generating images under point/environmantal lighting using DiLightNet. For full usage (video generation & arbitary lighting condition & depth-conditioned generation) and more examples, please refer to our [GitHub repository](https://github.com/iamNCJ/DiLightNet)""")
|
17 |
|
18 |
with gr.Row():
|
19 |
# 1. Reference Image Input / Generation
|
|
|
34 |
gr.Examples(
|
35 |
examples=[os.path.join("examples/provisional_img", i) for i in os.listdir("examples/provisional_img")],
|
36 |
inputs=[input_image],
|
37 |
+
examples_per_page=8,
|
38 |
)
|
39 |
|
40 |
# 2. Background Removal
|
|
|
81 |
env_examples = gr.Examples(
|
82 |
examples=[[os.path.join("examples/env_map_preview", i), os.path.join("examples/env_map", i).replace("png", "exr")] for i in os.listdir("examples/env_map_preview")],
|
83 |
inputs=[env_map_preview, env_map_path],
|
84 |
+
examples_per_page=20,
|
85 |
)
|
86 |
render_btn_env = gr.Button(value="Render Hints")
|
87 |
|
|
|
164 |
seed=int(relighting_seed),
|
165 |
cfg=relighting_cfg
|
166 |
)
|
167 |
+
relit_img = imageio.v3.imread(res_folder_path + '/relighting00_0.png')
|
168 |
if do_env_inpainting:
|
169 |
bg = imageio.v3.imread(res_folder_path + f'/bg00.png') / 255.
|
170 |
+
else:
|
171 |
+
bg = np.zeros_like(relit_img)
|
172 |
+
relit_img = relit_img / 255.
|
173 |
+
mask_for_bg = imageio.v3.imread(res_folder_path + '/hint00_diffuse.png')[..., -1:] / 255.
|
174 |
+
relit_img = relit_img * mask_for_bg + bg * (1. - mask_for_bg)
|
175 |
+
relit_img = (relit_img * 255).clip(0, 255).astype(np.uint8)
|
176 |
return relit_img
|
177 |
|
178 |
relighting_generate_btn.click(fn=gen_relighting_image,
|
demo/relighting_gen.py
CHANGED
@@ -1,8 +1,7 @@
|
|
1 |
import imageio
|
2 |
import numpy as np
|
3 |
-
import spaces
|
4 |
import torch
|
5 |
-
from diffusers import UniPCMultistepScheduler, StableDiffusionControlNetPipeline, StableDiffusionInpaintPipeline
|
6 |
from diffusers.utils import get_class_from_dynamic_module
|
7 |
|
8 |
from tqdm import tqdm
|
@@ -22,10 +21,8 @@ controlnet = NeuralTextureControlNetModel.from_pretrained(
|
|
22 |
"dilightnet/DiLightNet-submissions-300k",
|
23 |
torch_dtype=dtype,
|
24 |
)
|
25 |
-
vae = ConsistencyDecoderVAE.from_pretrained("openai/consistency-decoder", torch_dtype=dtype)
|
26 |
pipe = StableDiffusionControlNetPipeline.from_pretrained(
|
27 |
"stabilityai/stable-diffusion-2-1",
|
28 |
-
vae=vae,
|
29 |
controlnet=controlnet,
|
30 |
torch_dtype=dtype
|
31 |
).to(device)
|
@@ -40,7 +37,7 @@ inpainting_pipe.set_progress_bar_config(disable=True)
|
|
40 |
|
41 |
|
42 |
@spaces.GPU
|
43 |
-
def relighting_gen(masked_ref_img, mask, cond_path, frames, prompt, steps, seed, cfg, inpaint=False):
|
44 |
mask = mask[..., :1] / 255.
|
45 |
for i in tqdm(range(frames)):
|
46 |
source_image = masked_ref_img[..., :3] / 255.
|
@@ -55,13 +52,13 @@ def relighting_gen(masked_ref_img, mask, cond_path, frames, prompt, steps, seed,
|
|
55 |
images.append(image)
|
56 |
|
57 |
hint = np.concatenate(images, axis=2).astype(np.float32)[None]
|
58 |
-
|
59 |
hint = torch.from_numpy(hint).to(dtype).permute(0, 3, 1, 2).to(device)
|
60 |
-
generator = torch.manual_seed(seed)
|
61 |
-
|
62 |
-
prompt, num_inference_steps=steps, generator=generator, image=hint, num_images_per_prompt=
|
63 |
-
).images
|
64 |
if inpaint:
|
65 |
mask_image = (1. - mask)[None]
|
66 |
-
|
67 |
-
|
|
|
|
1 |
import imageio
|
2 |
import numpy as np
|
|
|
3 |
import torch
|
4 |
+
from diffusers import UniPCMultistepScheduler, StableDiffusionControlNetPipeline, StableDiffusionInpaintPipeline
|
5 |
from diffusers.utils import get_class_from_dynamic_module
|
6 |
|
7 |
from tqdm import tqdm
|
|
|
21 |
"dilightnet/DiLightNet-submissions-300k",
|
22 |
torch_dtype=dtype,
|
23 |
)
|
|
|
24 |
pipe = StableDiffusionControlNetPipeline.from_pretrained(
|
25 |
"stabilityai/stable-diffusion-2-1",
|
|
|
26 |
controlnet=controlnet,
|
27 |
torch_dtype=dtype
|
28 |
).to(device)
|
|
|
37 |
|
38 |
|
39 |
@spaces.GPU
|
40 |
+
def relighting_gen(masked_ref_img, mask, cond_path, frames, prompt, steps, seed, cfg, num_imgs_per_prompt=1, inpaint=False):
|
41 |
mask = mask[..., :1] / 255.
|
42 |
for i in tqdm(range(frames)):
|
43 |
source_image = masked_ref_img[..., :3] / 255.
|
|
|
52 |
images.append(image)
|
53 |
|
54 |
hint = np.concatenate(images, axis=2).astype(np.float32)[None]
|
|
|
55 |
hint = torch.from_numpy(hint).to(dtype).permute(0, 3, 1, 2).to(device)
|
56 |
+
generator = torch.Generator(device=device).manual_seed(seed)
|
57 |
+
images = pipe(
|
58 |
+
prompt, num_inference_steps=steps, generator=generator, image=hint, num_images_per_prompt=num_imgs_per_prompt, guidance_scale=cfg, output_type='np',
|
59 |
+
).images # [N, H, W, C]
|
60 |
if inpaint:
|
61 |
mask_image = (1. - mask)[None]
|
62 |
+
images = inpainting_pipe(prompt=prompt, image=images, mask_image=mask_image, generator=generator, output_type='np', cfg=3.0, strength=1.0).images
|
63 |
+
for idx in range(num_imgs_per_prompt):
|
64 |
+
imageio.imwrite(f'{cond_path}/relighting{i:02d}_{idx}.png', (images[idx] * 255).clip(0, 255).astype(np.uint8))
|
demo/render_hints.py
CHANGED
@@ -27,6 +27,8 @@ def render_hint_images(model_path, fov, pls, power=500., geo_smooth=True, output
|
|
27 |
bpy.context.preferences.addons["cycles"].preferences.get_devices()
|
28 |
bpy.context.scene.cycles.device = 'GPU'
|
29 |
bpy.context.preferences.addons['cycles'].preferences.compute_device_type = 'CUDA'
|
|
|
|
|
30 |
|
31 |
# Enable the alpha channel for GT mask
|
32 |
bpy.context.scene.render.film_transparent = True
|
@@ -85,7 +87,7 @@ def render_hint_images(model_path, fov, pls, power=500., geo_smooth=True, output
|
|
85 |
return output_folder
|
86 |
|
87 |
|
88 |
-
def render_bg_images(fov, pls, output_folder: Optional[str] = None, env_map: Optional[str] = None, env_start_azi=0., resolution=512):
|
89 |
import bpy
|
90 |
import numpy as np
|
91 |
|
@@ -105,6 +107,13 @@ def render_bg_images(fov, pls, output_folder: Optional[str] = None, env_map: Opt
|
|
105 |
bpy.context.scene.render.film_transparent = False
|
106 |
bpy.context.scene.render.image_settings.color_mode = 'RGB'
|
107 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
108 |
def render_env_bg(output_path):
|
109 |
bpy.context.scene.view_layers["ViewLayer"].material_override = None
|
110 |
bpy.context.scene.render.image_settings.file_format = 'PNG'
|
|
|
27 |
bpy.context.preferences.addons["cycles"].preferences.get_devices()
|
28 |
bpy.context.scene.cycles.device = 'GPU'
|
29 |
bpy.context.preferences.addons['cycles'].preferences.compute_device_type = 'CUDA'
|
30 |
+
bpy.context.scene.render.threads = 8
|
31 |
+
bpy.context.scene.render.threads_mode = 'FIXED'
|
32 |
|
33 |
# Enable the alpha channel for GT mask
|
34 |
bpy.context.scene.render.film_transparent = True
|
|
|
87 |
return output_folder
|
88 |
|
89 |
|
90 |
+
def render_bg_images(fov, pls, output_folder: Optional[str] = None, env_map: Optional[str] = None, env_start_azi=0., resolution=512, use_gpu=False):
|
91 |
import bpy
|
92 |
import numpy as np
|
93 |
|
|
|
107 |
bpy.context.scene.render.film_transparent = False
|
108 |
bpy.context.scene.render.image_settings.color_mode = 'RGB'
|
109 |
|
110 |
+
if use_gpu:
|
111 |
+
bpy.context.preferences.addons["cycles"].preferences.get_devices()
|
112 |
+
bpy.context.scene.cycles.device = 'GPU'
|
113 |
+
bpy.context.preferences.addons['cycles'].preferences.compute_device_type = 'CUDA'
|
114 |
+
bpy.context.scene.render.threads = 8
|
115 |
+
bpy.context.scene.render.threads_mode = 'FIXED'
|
116 |
+
|
117 |
def render_env_bg(output_path):
|
118 |
bpy.context.scene.view_layers["ViewLayer"].material_override = None
|
119 |
bpy.context.scene.render.image_settings.file_format = 'PNG'
|
examples/provisional_img/3d-animation-character-minimal-art-toy.png
ADDED
Git LFS Details
|
examples/provisional_img/a-decorated-plaster-round-plate-with-blue-fine-silk-ribbon-around-it-0.png
ADDED
Git LFS Details
|
examples/provisional_img/a-decorated-plaster-round-plate-with-blue-fine-silk-ribbon-around-it-1.png
ADDED
Git LFS Details
|
examples/provisional_img/a-large-colorful-candle-high-quality-product-photo.png
ADDED
Git LFS Details
|
examples/provisional_img/an-elephant-sculpted-from-plaster-and-the-elephant-nose-is-decorated-with-the-golden-texture.png
ADDED
Git LFS Details
|
examples/provisional_img/girraffe_turtle.jpeg
ADDED
examples/provisional_img/gorgeous-ornate-fountain-made-of-marble.png
ADDED
Git LFS Details
|
examples/provisional_img/leather-glove-0.png
ADDED
Git LFS Details
|
examples/provisional_img/machine-dragon-robot-in-platinum-0.png
ADDED
Git LFS Details
|
examples/provisional_img/machine-dragon-robot-in-platinum-1.png
ADDED
Git LFS Details
|
examples/provisional_img/machine-dragon-robot-in-platinum-2.png
ADDED
Git LFS Details
|
examples/provisional_img/pottery.png
ADDED
Git LFS Details
|
examples/provisional_img/rusty-copper-toy-frog-with-spatially-varying-materials-some-parts-are-shinning-other-parts-are-rough.png
ADDED
Git LFS Details
|
examples/provisional_img/rusty-phoenix.png
ADDED
Git LFS Details
|
examples/provisional_img/starcraft-2-marine-machine-gun-0.png
ADDED
Git LFS Details
|
examples/provisional_img/steampunk-space-tank-with-delicate-details-0.png
ADDED
Git LFS Details
|
examples/provisional_img/steampunk-space-tank-with-delicate-details-1.png
ADDED
Git LFS Details
|
examples/provisional_img/stone-griffin.png
ADDED
Git LFS Details
|