Spaces:
Paused
Paused
Update app.py
Browse files
app.py
CHANGED
@@ -6,6 +6,8 @@ from huggingface_hub import hf_hub_download
|
|
6 |
import glob
|
7 |
from datetime import datetime
|
8 |
|
|
|
|
|
9 |
# Ensure 'checkpoint' directory exists
|
10 |
os.makedirs("checkpoint", exist_ok=True)
|
11 |
|
@@ -15,7 +17,31 @@ hf_hub_download(
|
|
15 |
local_dir="checkpoint"
|
16 |
)
|
17 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
18 |
def extract_frames_with_labels(video_path, base_output_dir="frames"):
|
|
|
|
|
|
|
19 |
# Generate a timestamped folder name
|
20 |
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
21 |
output_dir = os.path.join(base_output_dir, f"frames_{timestamp}")
|
@@ -106,6 +132,7 @@ div#frames-gallery{
|
|
106 |
with gr.Blocks(css=css) as demo:
|
107 |
with gr.Column(elem_id="col-container"):
|
108 |
gr.Markdown("# X-Portrait: Expressive Portrait Animation with Hierarchical Motion Attention")
|
|
|
109 |
gr.HTML("""
|
110 |
<div style="display:flex;column-gap:4px;">
|
111 |
<a href='https://github.com/bytedance/X-Portrait'>
|
@@ -123,8 +150,8 @@ with gr.Blocks(css=css) as demo:
|
|
123 |
driving_video = gr.Video(label="Driving Video")
|
124 |
with gr.Group():
|
125 |
with gr.Row():
|
126 |
-
best_frame = gr.Number(value=36, label="Best Frame")
|
127 |
-
out_frames = gr.Number(value=-1, label="Out Frames")
|
128 |
with gr.Accordion("Driving video Frames"):
|
129 |
driving_frames = gr.Gallery(show_label=True, columns=6, height=512, elem_id="frames-gallery")
|
130 |
with gr.Row():
|
@@ -143,6 +170,16 @@ with gr.Blocks(css=css) as demo:
|
|
143 |
],
|
144 |
inputs=[source_image, driving_video]
|
145 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
146 |
|
147 |
|
148 |
driving_video.upload(
|
|
|
6 |
import glob
|
7 |
from datetime import datetime
|
8 |
|
9 |
+
is_shared_ui = True if "fffiloni/X-Portrait" in os.environ['SPACE_ID'] else False
|
10 |
+
|
11 |
# Ensure 'checkpoint' directory exists
|
12 |
os.makedirs("checkpoint", exist_ok=True)
|
13 |
|
|
|
17 |
local_dir="checkpoint"
|
18 |
)
|
19 |
|
20 |
+
def trim_video(video_path, output_dir="trimmed_videos", max_duration=2):
|
21 |
+
# Create output directory if it does not exist
|
22 |
+
os.makedirs(output_dir, exist_ok=True)
|
23 |
+
|
24 |
+
# Generate a timestamp for the output filename
|
25 |
+
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
26 |
+
output_path = os.path.join(output_dir, f"trimmed_video_{timestamp}.mp4")
|
27 |
+
|
28 |
+
# Load the video
|
29 |
+
with VideoFileClip(video_path) as video:
|
30 |
+
# Check the duration of the video
|
31 |
+
if video.duration > max_duration:
|
32 |
+
# Trim the video to the first max_duration seconds
|
33 |
+
trimmed_video = video.subclip(0, max_duration)
|
34 |
+
# Write the trimmed video to a file
|
35 |
+
trimmed_video.write_videofile(output_path, codec="libx264")
|
36 |
+
return output_path
|
37 |
+
else:
|
38 |
+
# If the video is within the duration, return the original path
|
39 |
+
return video_path
|
40 |
+
|
41 |
def extract_frames_with_labels(video_path, base_output_dir="frames"):
|
42 |
+
if is_shared_ui :
|
43 |
+
video_path = trim_video(video_path)
|
44 |
+
print("Path to the (trimmed) driving video:", video_path)
|
45 |
# Generate a timestamped folder name
|
46 |
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
47 |
output_dir = os.path.join(base_output_dir, f"frames_{timestamp}")
|
|
|
132 |
with gr.Blocks(css=css) as demo:
|
133 |
with gr.Column(elem_id="col-container"):
|
134 |
gr.Markdown("# X-Portrait: Expressive Portrait Animation with Hierarchical Motion Attention")
|
135 |
+
gr.Markdown("On this shared UI, drinving video input will be trimmed to 2 seconds max. Duplicate this space for more controls.")
|
136 |
gr.HTML("""
|
137 |
<div style="display:flex;column-gap:4px;">
|
138 |
<a href='https://github.com/bytedance/X-Portrait'>
|
|
|
150 |
driving_video = gr.Video(label="Driving Video")
|
151 |
with gr.Group():
|
152 |
with gr.Row():
|
153 |
+
best_frame = gr.Number(value=36, label="Best Frame", info="specify the frame index in the driving video where the head pose best matches the source image (note: precision of best_frame index might affect the final quality)")
|
154 |
+
out_frames = gr.Number(value=-1, label="Out Frames", info="number of generation frames")
|
155 |
with gr.Accordion("Driving video Frames"):
|
156 |
driving_frames = gr.Gallery(show_label=True, columns=6, height=512, elem_id="frames-gallery")
|
157 |
with gr.Row():
|
|
|
170 |
],
|
171 |
inputs=[source_image, driving_video]
|
172 |
)
|
173 |
+
gr.HTML("""
|
174 |
+
<div style="display:flex;column-gap:4px;">
|
175 |
+
<a href="https://huggingface.co/spaces/fffiloni/X-Portrait?duplicate=true">
|
176 |
+
<img src="https://huggingface.co/datasets/huggingface/badges/resolve/main/duplicate-this-space-xl.svg" alt="Duplicate this Space">
|
177 |
+
</a>
|
178 |
+
<a href="https://huggingface.co/fffiloni">
|
179 |
+
<img src="https://huggingface.co/datasets/huggingface/badges/resolve/main/follow-me-on-HF-xl-dark.svg" alt="Follow me on HF">
|
180 |
+
</a>
|
181 |
+
</div>
|
182 |
+
""")
|
183 |
|
184 |
|
185 |
driving_video.upload(
|