Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
Commit
·
2ed47da
1
Parent(s):
46fcec6
work in progress on the AI Stories Factory
Browse files
src/core/exporters/clapWithStoryboardsToVideoFile.mts
ADDED
@@ -0,0 +1,106 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { join } from "node:path"
|
2 |
+
|
3 |
+
import { ClapProject } from "../clap/types.mts"
|
4 |
+
import { concatenateVideosWithAudio } from "../ffmpeg/concatenateVideosWithAudio.mts"
|
5 |
+
import { writeBase64ToFile } from "../files/writeBase64ToFile.mts"
|
6 |
+
import { getRandomDirectory } from "../files/getRandomDirectory.mts"
|
7 |
+
import { addTextToVideo } from "../ffmpeg/addTextToVideo.mts"
|
8 |
+
import { startOfSegment1IsWithinSegment2 } from "../utils/startOfSegment1IsWithinSegment2.mts"
|
9 |
+
import { deleteFile } from "../files/deleteFile.mts"
|
10 |
+
import { extractBase64 } from "../base64/extractBase64.mts"
|
11 |
+
import { ClapSegment } from "../clap/types.mts"
|
12 |
+
import { imageToVideoBase64 } from "../ffmpeg/imageToVideoBase64.mts"
|
13 |
+
|
14 |
+
export async function clapWithStoryboardsToVideoFile({
|
15 |
+
clap,
|
16 |
+
storyboardSegments = [],
|
17 |
+
outputDir = "",
|
18 |
+
}: {
|
19 |
+
clap: ClapProject
|
20 |
+
storyboardSegments: ClapSegment[]
|
21 |
+
outputDir?: string
|
22 |
+
}): Promise<{
|
23 |
+
outputDir: string
|
24 |
+
videoFilePaths: string[]
|
25 |
+
}> {
|
26 |
+
|
27 |
+
outputDir = outputDir || (await getRandomDirectory())
|
28 |
+
|
29 |
+
const videoFilePaths: string[] = []
|
30 |
+
|
31 |
+
for (const segment of storyboardSegments) {
|
32 |
+
|
33 |
+
let storyboardSegmentVideoFilePath = join(outputDir, `tmp_asset_${segment.id}_as_video.mp4`)
|
34 |
+
|
35 |
+
await imageToVideoBase64({
|
36 |
+
inputImageInBase64: segment.assetUrl,
|
37 |
+
outputFilePath: storyboardSegmentVideoFilePath,
|
38 |
+
outputDir,
|
39 |
+
clearOutputDirAtTheEnd: false, // <- must stay false or else we lose everything!
|
40 |
+
outputVideoFormat: "mp4",
|
41 |
+
})
|
42 |
+
|
43 |
+
const interfaceSegments = clap.segments.filter(s =>
|
44 |
+
s.assetUrl.startsWith("data:text/") &&
|
45 |
+
s.category === "interface" &&
|
46 |
+
startOfSegment1IsWithinSegment2(s, segment)
|
47 |
+
)
|
48 |
+
const interfaceSegment = interfaceSegments.at(0)
|
49 |
+
if (interfaceSegment) {
|
50 |
+
// here we are free to use mp4, since this is an internal intermediary format
|
51 |
+
const videoSegmentWithOverlayFilePath = join(outputDir, `tmp_asset_${segment.id}_with_interface.mp4`)
|
52 |
+
|
53 |
+
await addTextToVideo({
|
54 |
+
inputVideoPath: storyboardSegmentVideoFilePath,
|
55 |
+
outputVideoPath: videoSegmentWithOverlayFilePath,
|
56 |
+
text: atob(extractBase64(interfaceSegment.assetUrl).data),
|
57 |
+
width: clap.meta.width,
|
58 |
+
height: clap.meta.height,
|
59 |
+
})
|
60 |
+
|
61 |
+
// we overwrite
|
62 |
+
await deleteFile(storyboardSegmentVideoFilePath)
|
63 |
+
storyboardSegmentVideoFilePath = videoSegmentWithOverlayFilePath
|
64 |
+
}
|
65 |
+
|
66 |
+
const dialogueSegments = clap.segments.filter(s =>
|
67 |
+
s.assetUrl.startsWith("data:audio/") &&
|
68 |
+
s.category === "dialogue" &&
|
69 |
+
startOfSegment1IsWithinSegment2(s, segment)
|
70 |
+
)
|
71 |
+
const dialogueSegment = dialogueSegments.at(0)
|
72 |
+
if (dialogueSegment) {
|
73 |
+
extractBase64(dialogueSegment.assetUrl)
|
74 |
+
const base64Info = extractBase64(segment.assetUrl)
|
75 |
+
|
76 |
+
const dialogueSegmentFilePath = await writeBase64ToFile(
|
77 |
+
dialogueSegment.assetUrl,
|
78 |
+
join(outputDir, `tmp_asset_${segment.id}_dialogue.${base64Info.extension}`)
|
79 |
+
)
|
80 |
+
|
81 |
+
const finalFilePathOfVideoWithSound = await concatenateVideosWithAudio({
|
82 |
+
output: join(outputDir, `${segment.id}_video_with_audio.mp4`),
|
83 |
+
audioFilePath: dialogueSegmentFilePath,
|
84 |
+
videoFilePaths: [storyboardSegmentVideoFilePath],
|
85 |
+
// videos are silent, so they can stay at 0
|
86 |
+
videoTracksVolume: 0.0,
|
87 |
+
audioTrackVolume: 1.0,
|
88 |
+
})
|
89 |
+
|
90 |
+
// we delete the temporary dialogue file
|
91 |
+
await deleteFile(dialogueSegmentFilePath)
|
92 |
+
|
93 |
+
// we overwrite the video segment
|
94 |
+
await deleteFile(storyboardSegmentVideoFilePath)
|
95 |
+
|
96 |
+
storyboardSegmentVideoFilePath = finalFilePathOfVideoWithSound
|
97 |
+
}
|
98 |
+
|
99 |
+
videoFilePaths.push(storyboardSegmentVideoFilePath)
|
100 |
+
}
|
101 |
+
|
102 |
+
return {
|
103 |
+
outputDir,
|
104 |
+
videoFilePaths,
|
105 |
+
}
|
106 |
+
}
|
src/core/exporters/clapWithVideosToVideoFile.mts
ADDED
@@ -0,0 +1,103 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { join } from "node:path"
|
2 |
+
|
3 |
+
import { ClapProject } from "../clap/types.mts"
|
4 |
+
import { concatenateVideosWithAudio } from "../ffmpeg/concatenateVideosWithAudio.mts"
|
5 |
+
import { writeBase64ToFile } from "../files/writeBase64ToFile.mts"
|
6 |
+
import { getRandomDirectory } from "../files/getRandomDirectory.mts"
|
7 |
+
import { addTextToVideo } from "../ffmpeg/addTextToVideo.mts"
|
8 |
+
import { startOfSegment1IsWithinSegment2 } from "../utils/startOfSegment1IsWithinSegment2.mts"
|
9 |
+
import { deleteFile } from "../files/deleteFile.mts"
|
10 |
+
import { extractBase64 } from "../base64/extractBase64.mts"
|
11 |
+
import { ClapSegment } from "../clap/types.mts"
|
12 |
+
|
13 |
+
export async function clapWithVideosToVideoFile({
|
14 |
+
clap,
|
15 |
+
videoSegments = [],
|
16 |
+
outputDir = "",
|
17 |
+
}: {
|
18 |
+
clap: ClapProject
|
19 |
+
videoSegments: ClapSegment[]
|
20 |
+
outputDir?: string
|
21 |
+
}): Promise<{
|
22 |
+
outputDir: string
|
23 |
+
videoFilePaths: string[]
|
24 |
+
}> {
|
25 |
+
|
26 |
+
outputDir = outputDir || (await getRandomDirectory())
|
27 |
+
|
28 |
+
const videoFilePaths: string[] = []
|
29 |
+
|
30 |
+
for (const segment of videoSegments) {
|
31 |
+
|
32 |
+
const base64Info = extractBase64(segment.assetUrl)
|
33 |
+
|
34 |
+
// we write it to the disk *unconverted* (it might be a mp4, a webm or something else)
|
35 |
+
let videoSegmentFilePath = await writeBase64ToFile(
|
36 |
+
segment.assetUrl,
|
37 |
+
join(outputDir, `tmp_asset_${segment.id}.${base64Info.extension}`)
|
38 |
+
)
|
39 |
+
|
40 |
+
const interfaceSegments = clap.segments.filter(s =>
|
41 |
+
s.assetUrl.startsWith("data:text/") &&
|
42 |
+
s.category === "interface" &&
|
43 |
+
startOfSegment1IsWithinSegment2(s, segment)
|
44 |
+
)
|
45 |
+
const interfaceSegment = interfaceSegments.at(0)
|
46 |
+
if (interfaceSegment) {
|
47 |
+
// here we are free to use mp4, since this is an internal intermediary format
|
48 |
+
const videoSegmentWithOverlayFilePath = join(outputDir, `tmp_asset_${segment.id}_with_interface.mp4`)
|
49 |
+
|
50 |
+
await addTextToVideo({
|
51 |
+
inputVideoPath: videoSegmentFilePath,
|
52 |
+
outputVideoPath: videoSegmentWithOverlayFilePath,
|
53 |
+
text: atob(extractBase64(interfaceSegment.assetUrl).data),
|
54 |
+
width: clap.meta.width,
|
55 |
+
height: clap.meta.height,
|
56 |
+
})
|
57 |
+
|
58 |
+
// we overwrite
|
59 |
+
await deleteFile(videoSegmentFilePath)
|
60 |
+
videoSegmentFilePath = videoSegmentWithOverlayFilePath
|
61 |
+
}
|
62 |
+
|
63 |
+
const dialogueSegments = clap.segments.filter(s =>
|
64 |
+
s.assetUrl.startsWith("data:audio/") &&
|
65 |
+
s.category === "dialogue" &&
|
66 |
+
startOfSegment1IsWithinSegment2(s, segment)
|
67 |
+
)
|
68 |
+
const dialogueSegment = dialogueSegments.at(0)
|
69 |
+
if (dialogueSegment) {
|
70 |
+
extractBase64(dialogueSegment.assetUrl)
|
71 |
+
const base64Info = extractBase64(segment.assetUrl)
|
72 |
+
|
73 |
+
const dialogueSegmentFilePath = await writeBase64ToFile(
|
74 |
+
dialogueSegment.assetUrl,
|
75 |
+
join(outputDir, `tmp_asset_${segment.id}_dialogue.${base64Info.extension}`)
|
76 |
+
)
|
77 |
+
|
78 |
+
const finalFilePathOfVideoWithSound = await concatenateVideosWithAudio({
|
79 |
+
output: join(outputDir, `${segment.id}_video_with_audio.mp4`),
|
80 |
+
audioFilePath: dialogueSegmentFilePath,
|
81 |
+
videoFilePaths: [videoSegmentFilePath],
|
82 |
+
// videos are silent, so they can stay at 0
|
83 |
+
videoTracksVolume: 0.0,
|
84 |
+
audioTrackVolume: 1.0,
|
85 |
+
})
|
86 |
+
|
87 |
+
// we delete the temporary dialogue file
|
88 |
+
await deleteFile(dialogueSegmentFilePath)
|
89 |
+
|
90 |
+
// we overwrite the video segment
|
91 |
+
await deleteFile(videoSegmentFilePath)
|
92 |
+
|
93 |
+
videoSegmentFilePath = finalFilePathOfVideoWithSound
|
94 |
+
}
|
95 |
+
|
96 |
+
videoFilePaths.push(videoSegmentFilePath)
|
97 |
+
}
|
98 |
+
|
99 |
+
return {
|
100 |
+
outputDir,
|
101 |
+
videoFilePaths,
|
102 |
+
}
|
103 |
+
}
|
src/core/ffmpeg/createVideoFromFrames.mts
CHANGED
@@ -1,5 +1,4 @@
|
|
1 |
-
import {
|
2 |
-
import { writeFile, readFile } from "node:fs/promises"
|
3 |
import os from "node:os"
|
4 |
import path from "node:path"
|
5 |
|
@@ -39,7 +38,7 @@ export async function createVideoFromFrames({
|
|
39 |
asBase64?: boolean;
|
40 |
}): Promise<string> {
|
41 |
// Ensure the input directory exists
|
42 |
-
await
|
43 |
|
44 |
|
45 |
// Construct the input frame pattern
|
@@ -48,7 +47,7 @@ export async function createVideoFromFrames({
|
|
48 |
|
49 |
// Create a temporary working directory
|
50 |
const tempDir = path.join(os.tmpdir(), uuidv4());
|
51 |
-
await
|
52 |
|
53 |
|
54 |
let inputVideoToUseAsAudioFilePath = "";
|
@@ -65,7 +64,7 @@ export async function createVideoFromFrames({
|
|
65 |
}
|
66 |
|
67 |
if (debug) {
|
68 |
-
console.log("
|
69 |
}
|
70 |
|
71 |
|
@@ -73,7 +72,7 @@ export async function createVideoFromFrames({
|
|
73 |
// Also, if provided, check that the audio source file exists
|
74 |
if (inputVideoToUseAsAudioFilePath) {
|
75 |
try {
|
76 |
-
await
|
77 |
const info = await getMediaInfo(inputVideoToUseAsAudioFilePath)
|
78 |
if (info.hasAudio) {
|
79 |
canUseInputVideoForAudio = true
|
@@ -108,7 +107,7 @@ export async function createVideoFromFrames({
|
|
108 |
.input(inputFramePattern)
|
109 |
.inputFPS(framesPerSecond)
|
110 |
.outputOptions([
|
111 |
-
// by default ffmpeg doesn't tell us why it fails to
|
112 |
// so we need to force it to spit everything out
|
113 |
"-loglevel", "debug",
|
114 |
|
@@ -165,7 +164,7 @@ export async function createVideoFromFrames({
|
|
165 |
reject(new Error(`Error loading the video file: ${error}`));
|
166 |
} finally {
|
167 |
// Clean up temporary files
|
168 |
-
await
|
169 |
}
|
170 |
});
|
171 |
});
|
|
|
1 |
+
import { access, rm, mkdir, writeFile, readFile } from "node:fs/promises"
|
|
|
2 |
import os from "node:os"
|
3 |
import path from "node:path"
|
4 |
|
|
|
38 |
asBase64?: boolean;
|
39 |
}): Promise<string> {
|
40 |
// Ensure the input directory exists
|
41 |
+
await access(inputFramesDirectory);
|
42 |
|
43 |
|
44 |
// Construct the input frame pattern
|
|
|
47 |
|
48 |
// Create a temporary working directory
|
49 |
const tempDir = path.join(os.tmpdir(), uuidv4());
|
50 |
+
await mkdir(tempDir);
|
51 |
|
52 |
|
53 |
let inputVideoToUseAsAudioFilePath = "";
|
|
|
64 |
}
|
65 |
|
66 |
if (debug) {
|
67 |
+
console.log(" createVideoFromFrames(): inputVideoToUseAsAudioFilePath = ", inputVideoToUseAsAudioFilePath)
|
68 |
}
|
69 |
|
70 |
|
|
|
72 |
// Also, if provided, check that the audio source file exists
|
73 |
if (inputVideoToUseAsAudioFilePath) {
|
74 |
try {
|
75 |
+
await access(inputVideoToUseAsAudioFilePath)
|
76 |
const info = await getMediaInfo(inputVideoToUseAsAudioFilePath)
|
77 |
if (info.hasAudio) {
|
78 |
canUseInputVideoForAudio = true
|
|
|
107 |
.input(inputFramePattern)
|
108 |
.inputFPS(framesPerSecond)
|
109 |
.outputOptions([
|
110 |
+
// by default ffmpeg doesn't tell us why it fails to convert
|
111 |
// so we need to force it to spit everything out
|
112 |
"-loglevel", "debug",
|
113 |
|
|
|
164 |
reject(new Error(`Error loading the video file: ${error}`));
|
165 |
} finally {
|
166 |
// Clean up temporary files
|
167 |
+
await rm(tempDir, { recursive: true });
|
168 |
}
|
169 |
});
|
170 |
});
|
src/core/ffmpeg/imageToVideoBase64.mts
ADDED
@@ -0,0 +1,87 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { rm, mkdir, writeFile, readFile } from "node:fs/promises";
|
2 |
+
import os from "node:os";
|
3 |
+
import path from "node:path";
|
4 |
+
import ffmpeg from "fluent-ffmpeg";
|
5 |
+
import { getRandomDirectory } from "../files/getRandomDirectory.mts";
|
6 |
+
|
7 |
+
/**
|
8 |
+
* Converts an image in Base64 format to a video encoded in Base64.
|
9 |
+
*
|
10 |
+
* @param inputImageInBase64 - The input image encoded in Base64.
|
11 |
+
* @param outputVideoFormat - Optional. Format of the output video (default is "mp4").
|
12 |
+
* @param outputVideoDurationInMs - Optional. Duration of the video in milliseconds (default is 1000ms).
|
13 |
+
* @param codec - Optional. Codec used for video coding. Defaults differ based on `outputVideoFormat`.
|
14 |
+
* @param width - Optional. Width of the output video.
|
15 |
+
* @param height - Optional. Height of the output video.
|
16 |
+
* @param fps - Optional. Frame rate of the output video.
|
17 |
+
*
|
18 |
+
* @returns - A promise that resolves to the video as a Base64 encoded string.
|
19 |
+
*/
|
20 |
+
export async function imageToVideoBase64({
|
21 |
+
inputImageInBase64,
|
22 |
+
outputFilePath,
|
23 |
+
outputDir,
|
24 |
+
clearOutputDirAtTheEnd = true,
|
25 |
+
outputVideoFormat = "mp4",
|
26 |
+
outputVideoDurationInMs = 1000,
|
27 |
+
codec = outputVideoFormat === "webm" ? "libvpx-vp9" : "libx264",
|
28 |
+
width = 1920,
|
29 |
+
height = 1080,
|
30 |
+
fps = 25
|
31 |
+
}: {
|
32 |
+
inputImageInBase64: string
|
33 |
+
outputFilePath?: string
|
34 |
+
outputDir?: string
|
35 |
+
clearOutputDirAtTheEnd?: boolean
|
36 |
+
outputVideoFormat?: string
|
37 |
+
outputVideoDurationInMs?: number
|
38 |
+
codec?: string
|
39 |
+
width?: number
|
40 |
+
height?: number
|
41 |
+
fps?: number
|
42 |
+
}): Promise<string> {
|
43 |
+
|
44 |
+
outputDir = outputDir || (await getRandomDirectory())
|
45 |
+
|
46 |
+
// Decode the Base64 image and write it to a temporary file.
|
47 |
+
const base64Data = inputImageInBase64.substring(inputImageInBase64.indexOf(',') + 1);
|
48 |
+
const buffer = Buffer.from(base64Data, 'base64');
|
49 |
+
const inputImagePath = path.join(outputDir, 'inputImage.png');
|
50 |
+
await writeFile(inputImagePath, buffer);
|
51 |
+
|
52 |
+
// Set the path for the output video.
|
53 |
+
outputFilePath = outputFilePath || path.join(outputDir, `output.${outputVideoFormat}`);
|
54 |
+
const durationInSeconds = outputVideoDurationInMs / 1000;
|
55 |
+
|
56 |
+
// Process the image to video conversion using ffmpeg.
|
57 |
+
await new Promise<void>((resolve, reject) => {
|
58 |
+
ffmpeg(inputImagePath)
|
59 |
+
.outputOptions([
|
60 |
+
`-t ${durationInSeconds}`,
|
61 |
+
`-r ${fps}`,
|
62 |
+
`-s ${width}x${height}`, // set frame size
|
63 |
+
`-c:v ${codec}`, // set the codec
|
64 |
+
'-tune stillimage',
|
65 |
+
'-pix_fmt yuv420p'
|
66 |
+
])
|
67 |
+
.on('end', () => resolve())
|
68 |
+
.on('error', (err) => reject(err))
|
69 |
+
.save(outputFilePath);
|
70 |
+
});
|
71 |
+
|
72 |
+
// Read the video file, encode it to Base64, and format it as a data URI.
|
73 |
+
const videoBuffer = await readFile(outputFilePath);
|
74 |
+
const videoBase64 = videoBuffer.toString('base64');
|
75 |
+
const resultAsBase64DataUri = `data:video/${outputVideoFormat};base64,${videoBase64}`;
|
76 |
+
|
77 |
+
// Attempt to clean up temporary work files.
|
78 |
+
if (clearOutputDirAtTheEnd) {
|
79 |
+
try {
|
80 |
+
await rm(outputDir, { recursive: true, force: true });
|
81 |
+
} catch (error) {
|
82 |
+
console.error('Error removing temporary files:', error);
|
83 |
+
}
|
84 |
+
}
|
85 |
+
|
86 |
+
return resultAsBase64DataUri;
|
87 |
+
}
|
src/main.mts
CHANGED
@@ -7,13 +7,11 @@ import { writeBase64ToFile } from "./core/files/writeBase64ToFile.mts";
|
|
7 |
import { concatenateVideos } from "./core/ffmpeg/concatenateVideos.mts"
|
8 |
import { deleteFilesWithName } from "./core/files/deleteFileWithName.mts"
|
9 |
import { getRandomDirectory } from "./core/files/getRandomDirectory.mts";
|
10 |
-
import {
|
11 |
-
import {
|
12 |
-
import { deleteFile } from "./core/files/deleteFile.mts";
|
13 |
-
import { extractBase64 } from "./core/base64/extractBase64.mts";
|
14 |
|
15 |
/**
|
16 |
-
* Generate a .mp4 video inside a
|
17 |
*
|
18 |
* @param clap
|
19 |
* @returns file path to the final .mp4
|
@@ -38,75 +36,30 @@ export async function clapToTmpVideoFilePath({
|
|
38 |
outputDir = outputDir || (await getRandomDirectory())
|
39 |
|
40 |
const videoFilePaths: string[] = []
|
41 |
-
const videoSegments = clap.segments.filter(s => s.category === "video" && s.assetUrl.startsWith("data:video/"))
|
42 |
-
|
43 |
-
for (const segment of videoSegments) {
|
44 |
-
|
45 |
-
const base64Info = extractBase64(segment.assetUrl)
|
46 |
-
|
47 |
-
// we write it to the disk *unconverted* (it might be a mp4, a webm or something else)
|
48 |
-
let videoSegmentFilePath = await writeBase64ToFile(
|
49 |
-
segment.assetUrl,
|
50 |
-
join(outputDir, `tmp_asset_${segment.id}.${base64Info.extension}`)
|
51 |
-
)
|
52 |
|
53 |
-
|
54 |
-
|
55 |
-
|
56 |
-
|
57 |
-
|
58 |
-
|
59 |
-
|
60 |
-
|
61 |
-
|
62 |
-
|
63 |
-
|
64 |
-
|
65 |
-
|
66 |
-
|
67 |
-
|
68 |
-
|
69 |
-
|
70 |
-
|
71 |
-
|
72 |
-
|
73 |
-
|
74 |
-
|
75 |
-
|
76 |
-
const dialogueSegments = clap.segments.filter(s =>
|
77 |
-
s.assetUrl.startsWith("data:audio/") &&
|
78 |
-
s.category === "dialogue" &&
|
79 |
-
startOfSegment1IsWithinSegment2(s, segment)
|
80 |
-
)
|
81 |
-
const dialogueSegment = dialogueSegments.at(0)
|
82 |
-
if (dialogueSegment) {
|
83 |
-
extractBase64(dialogueSegment.assetUrl)
|
84 |
-
const base64Info = extractBase64(segment.assetUrl)
|
85 |
-
|
86 |
-
const dialogueSegmentFilePath = await writeBase64ToFile(
|
87 |
-
dialogueSegment.assetUrl,
|
88 |
-
join(outputDir, `tmp_asset_${segment.id}_dialogue.${base64Info.extension}`)
|
89 |
-
)
|
90 |
-
|
91 |
-
const finalFilePathOfVideoWithSound = await concatenateVideosWithAudio({
|
92 |
-
output: join(outputDir, `${segment.id}_video_with_audio.mp4`),
|
93 |
-
audioFilePath: dialogueSegmentFilePath,
|
94 |
-
videoFilePaths: [videoSegmentFilePath],
|
95 |
-
// videos are silent, so they can stay at 0
|
96 |
-
videoTracksVolume: 0.0,
|
97 |
-
audioTrackVolume: 1.0,
|
98 |
-
})
|
99 |
-
|
100 |
-
// we delete the temporary dialogue file
|
101 |
-
await deleteFile(dialogueSegmentFilePath)
|
102 |
-
|
103 |
-
// we overwrite the video segment
|
104 |
-
await deleteFile(videoSegmentFilePath)
|
105 |
-
|
106 |
-
videoSegmentFilePath = finalFilePathOfVideoWithSound
|
107 |
-
}
|
108 |
-
|
109 |
-
videoFilePaths.push(videoSegmentFilePath)
|
110 |
}
|
111 |
|
112 |
const concatenatedVideosNoMusic = await concatenateVideos({
|
|
|
7 |
import { concatenateVideos } from "./core/ffmpeg/concatenateVideos.mts"
|
8 |
import { deleteFilesWithName } from "./core/files/deleteFileWithName.mts"
|
9 |
import { getRandomDirectory } from "./core/files/getRandomDirectory.mts";
|
10 |
+
import { clapWithVideosToVideoFile } from "./core/exporters/clapWithVideosToVideoFile.mts";
|
11 |
+
import { clapWithStoryboardsToVideoFile } from "./core/exporters/clapWithStoryboardsToVideoFile.mts";
|
|
|
|
|
12 |
|
13 |
/**
|
14 |
+
* Generate a .mp4 video inside a directory (if none is provided, it will be created in /tmp)
|
15 |
*
|
16 |
* @param clap
|
17 |
* @returns file path to the final .mp4
|
|
|
36 |
outputDir = outputDir || (await getRandomDirectory())
|
37 |
|
38 |
const videoFilePaths: string[] = []
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
39 |
|
40 |
+
const videoSegments = clap.segments.filter(s => s.category === "video" && s.assetUrl.startsWith("data:video/"))
|
41 |
+
const storyboardSegments = clap.segments.filter(s => s.category === "storyboard" && s.assetUrl.startsWith("data:image/"))
|
42 |
+
|
43 |
+
const canUseVideos = videoSegments.length > 0
|
44 |
+
const canUseStoryboards = !canUseVideos && storyboardSegments.length > 0
|
45 |
+
|
46 |
+
// two possibilities:
|
47 |
+
// we can either generate from the video files, or from the storyboards
|
48 |
+
// the storyboard video will be a bit more boring, but at least it should process faster
|
49 |
+
if (canUseVideos) {
|
50 |
+
await clapWithVideosToVideoFile({
|
51 |
+
clap,
|
52 |
+
videoSegments,
|
53 |
+
outputDir,
|
54 |
+
})
|
55 |
+
} else if (canUseStoryboards) {
|
56 |
+
await clapWithStoryboardsToVideoFile({
|
57 |
+
clap,
|
58 |
+
storyboardSegments,
|
59 |
+
outputDir,
|
60 |
+
})
|
61 |
+
} else {
|
62 |
+
throw new Error(`the provided Clap doesn't contain any video or storyboard`)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
63 |
}
|
64 |
|
65 |
const concatenatedVideosNoMusic = await concatenateVideos({
|