Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
Commit
•
e40bd21
1
Parent(s):
f276512
try to fix the twitter embed
Browse files- src/app/embed/page.tsx +85 -0
- src/app/interface/top-header/index.tsx +7 -1
- src/app/interface/tube-layout/index.tsx +9 -0
- src/app/main.tsx +4 -1
- src/app/server/actions/ai-tube-hf/downloadClapProject.ts +7 -7
- src/app/server/actions/ai-tube-hf/getVideoRequestsFromChannel.ts +1 -1
- src/app/state/useStore.ts +1 -0
- src/app/views/public-video-embed-view/index.tsx +71 -0
- src/app/watch/page.tsx +12 -0
- src/{app/server/actions/utils → clap}/parseClap.ts +66 -7
- src/clap/serializeClap.ts +114 -0
- src/{types/clap.ts → clap/types.ts} +30 -3
- src/lib/extractBase64.ts +33 -0
- src/types/general.ts +2 -0
src/app/embed/page.tsx
ADDED
@@ -0,0 +1,85 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
import { Metadata, ResolvingMetadata } from "next"
|
3 |
+
|
4 |
+
import { AppQueryProps } from "@/types/general"
|
5 |
+
|
6 |
+
import { Main } from "../main"
|
7 |
+
import { getVideo } from "../server/actions/ai-tube-hf/getVideo"
|
8 |
+
|
9 |
+
|
10 |
+
// https://nextjs.org/docs/pages/building-your-application/optimizing/fonts
|
11 |
+
export async function generateMetadata(
|
12 |
+
{ params, searchParams: { v: videoId } }: AppQueryProps,
|
13 |
+
parent: ResolvingMetadata
|
14 |
+
): Promise<Metadata> {
|
15 |
+
// read route params
|
16 |
+
|
17 |
+
const metadataBase = new URL('https://huggingface.co/spaces/jbilcke-hf/ai-tube')
|
18 |
+
|
19 |
+
try {
|
20 |
+
const video = await getVideo({ videoId, neverThrow: true })
|
21 |
+
|
22 |
+
if (!video) {
|
23 |
+
throw new Error("Video not found")
|
24 |
+
}
|
25 |
+
|
26 |
+
return {
|
27 |
+
title: `${video.label} - AiTube`,
|
28 |
+
metadataBase,
|
29 |
+
openGraph: {
|
30 |
+
// some cool stuff we could use here:
|
31 |
+
// 'video.tv_show' | 'video.other' | 'video.movie' | 'video.episode';
|
32 |
+
type: "video.other",
|
33 |
+
// url: "https://example.com",
|
34 |
+
title: video.label || "", // put the video title here
|
35 |
+
description: video.description || "", // put the video description here
|
36 |
+
siteName: "AiTube",
|
37 |
+
images: [
|
38 |
+
`https://huggingface.co/datasets/jbilcke-hf/ai-tube-index/resolve/main/videos/${video.id}.webp`
|
39 |
+
],
|
40 |
+
videos: [
|
41 |
+
{
|
42 |
+
"url": video.assetUrl
|
43 |
+
}
|
44 |
+
],
|
45 |
+
// images: ['/some-specific-page-image.jpg', ...previousImages],
|
46 |
+
},
|
47 |
+
twitter: {
|
48 |
+
card: "player",
|
49 |
+
site: "@flngr",
|
50 |
+
description: video.description || "",
|
51 |
+
images: `https://huggingface.co/datasets/jbilcke-hf/ai-tube-index/resolve/main/videos/${video.id}.webp`,
|
52 |
+
players: {
|
53 |
+
playerUrl: `https://jbilcke-hf-ai-tube.hf.space/embed?v=${video.id}`,
|
54 |
+
streamUrl: `https://huggingface.co/datasets/jbilcke-hf/ai-tube-index/resolve/main/videos/${video.id}.mp4`,
|
55 |
+
width: 1024,
|
56 |
+
height: 576
|
57 |
+
}
|
58 |
+
}
|
59 |
+
}
|
60 |
+
} catch (err) {
|
61 |
+
return {
|
62 |
+
title: "AiTube",
|
63 |
+
metadataBase,
|
64 |
+
openGraph: {
|
65 |
+
type: "website",
|
66 |
+
// url: "https://example.com",
|
67 |
+
title: "AiTube", // put the video title here
|
68 |
+
description: "", // put the vide description here
|
69 |
+
siteName: "AiTube",
|
70 |
+
|
71 |
+
videos: [],
|
72 |
+
images: [],
|
73 |
+
},
|
74 |
+
}
|
75 |
+
}
|
76 |
+
}
|
77 |
+
|
78 |
+
|
79 |
+
export default async function Embed({ searchParams: { v: videoId } }: AppQueryProps) {
|
80 |
+
const publicVideo = await getVideo({ videoId, neverThrow: true })
|
81 |
+
// console.log("WatchPage: --> " + video?.id)
|
82 |
+
return (
|
83 |
+
<Main publicVideo={publicVideo} />
|
84 |
+
)
|
85 |
+
}
|
src/app/interface/top-header/index.tsx
CHANGED
@@ -37,7 +37,9 @@ export function TopHeader() {
|
|
37 |
|
38 |
|
39 |
useEffect(() => {
|
40 |
-
if (view === "
|
|
|
|
|
41 |
setHeaderMode("compact")
|
42 |
setMenuMode("slider_hidden")
|
43 |
} else {
|
@@ -53,6 +55,10 @@ export function TopHeader() {
|
|
53 |
})
|
54 |
}, [])
|
55 |
|
|
|
|
|
|
|
|
|
56 |
return (
|
57 |
<div className={cn(
|
58 |
`flex flex-col`,
|
|
|
37 |
|
38 |
|
39 |
useEffect(() => {
|
40 |
+
if (view === "public_video_embed") {
|
41 |
+
setHeaderMode("hidden")
|
42 |
+
} else if (view === "public_video" || view === "public_channel" || view === "public_music_videos") {
|
43 |
setHeaderMode("compact")
|
44 |
setMenuMode("slider_hidden")
|
45 |
} else {
|
|
|
55 |
})
|
56 |
}, [])
|
57 |
|
58 |
+
if (headerMode === "hidden") {
|
59 |
+
return null
|
60 |
+
}
|
61 |
+
|
62 |
return (
|
63 |
<div className={cn(
|
64 |
`flex flex-col`,
|
src/app/interface/tube-layout/index.tsx
CHANGED
@@ -12,6 +12,15 @@ import { TopHeader } from "../top-header"
|
|
12 |
export function TubeLayout({ children }: { children?: ReactNode }) {
|
13 |
const headerMode = useStore(s => s.headerMode)
|
14 |
const view = useStore(s => s.view)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
15 |
return (
|
16 |
<div className={cn(
|
17 |
`dark flex flex-row h-screen w-screen inset-0 overflow-hidden`,
|
|
|
12 |
export function TubeLayout({ children }: { children?: ReactNode }) {
|
13 |
const headerMode = useStore(s => s.headerMode)
|
14 |
const view = useStore(s => s.view)
|
15 |
+
if (headerMode === "hidden") {
|
16 |
+
return (
|
17 |
+
<div className={cn(
|
18 |
+
`dark flex flex-row h-screen w-screen inset-0 overflow-hidden`
|
19 |
+
)}>
|
20 |
+
{children}
|
21 |
+
</div>
|
22 |
+
)
|
23 |
+
}
|
24 |
return (
|
25 |
<div className={cn(
|
26 |
`dark flex flex-row h-screen w-screen inset-0 overflow-hidden`,
|
src/app/main.tsx
CHANGED
@@ -8,12 +8,13 @@ import { UserChannelView } from "./views/user-channel-view"
|
|
8 |
import { PublicVideoView } from "./views/public-video-view"
|
9 |
import { UserAccountView } from "./views/user-account-view"
|
10 |
import { NotFoundView } from "./views/not-found-view"
|
11 |
-
import { ChannelInfo,
|
12 |
import { useEffect } from "react"
|
13 |
import { usePathname, useRouter } from "next/navigation"
|
14 |
import { TubeLayout } from "./interface/tube-layout"
|
15 |
import { PublicMusicVideosView } from "./views/public-music-videos-view"
|
16 |
import { getCollectionKey } from "@/lib/getCollectionKey"
|
|
|
17 |
|
18 |
// this is where we transition from the server-side space
|
19 |
// and the client-side space
|
@@ -99,6 +100,7 @@ export function Main({
|
|
99 |
|
100 |
if (!publicVideo || !publicVideo?.id) { return }
|
101 |
|
|
|
102 |
// this is a hack for hugging face:
|
103 |
// we allow the ?v=<id> param on the root of the domain
|
104 |
if (pathname !== "/watch") {
|
@@ -138,6 +140,7 @@ export function Main({
|
|
138 |
return (
|
139 |
<TubeLayout>
|
140 |
{view === "home" && <HomeView />}
|
|
|
141 |
{view === "public_video" && <PublicVideoView />}
|
142 |
{view === "public_music_videos" && <PublicMusicVideosView />}
|
143 |
{view === "public_channels" && <PublicChannelsView />}
|
|
|
8 |
import { PublicVideoView } from "./views/public-video-view"
|
9 |
import { UserAccountView } from "./views/user-account-view"
|
10 |
import { NotFoundView } from "./views/not-found-view"
|
11 |
+
import { ChannelInfo, VideoInfo } from "@/types/general"
|
12 |
import { useEffect } from "react"
|
13 |
import { usePathname, useRouter } from "next/navigation"
|
14 |
import { TubeLayout } from "./interface/tube-layout"
|
15 |
import { PublicMusicVideosView } from "./views/public-music-videos-view"
|
16 |
import { getCollectionKey } from "@/lib/getCollectionKey"
|
17 |
+
import { PublicVideoEmbedView } from "./views/public-video-embed-view"
|
18 |
|
19 |
// this is where we transition from the server-side space
|
20 |
// and the client-side space
|
|
|
100 |
|
101 |
if (!publicVideo || !publicVideo?.id) { return }
|
102 |
|
103 |
+
if (pathname === "/embed") { return }
|
104 |
// this is a hack for hugging face:
|
105 |
// we allow the ?v=<id> param on the root of the domain
|
106 |
if (pathname !== "/watch") {
|
|
|
140 |
return (
|
141 |
<TubeLayout>
|
142 |
{view === "home" && <HomeView />}
|
143 |
+
{view === "public_video_embed" && <PublicVideoEmbedView />}
|
144 |
{view === "public_video" && <PublicVideoView />}
|
145 |
{view === "public_music_videos" && <PublicMusicVideosView />}
|
146 |
{view === "public_channels" && <PublicChannelsView />}
|
src/app/server/actions/ai-tube-hf/downloadClapProject.ts
CHANGED
@@ -1,24 +1,24 @@
|
|
1 |
import { v4 as uuidv4 } from "uuid"
|
|
|
2 |
|
3 |
-
import { ClapProject } from "@/types
|
4 |
import { ChannelInfo, VideoInfo, VideoRequest } from "@/types/general"
|
5 |
import { defaultVideoModel } from "@/app/config"
|
|
|
6 |
|
7 |
-
import { parseClap } from "../utils/parseClap"
|
8 |
import { parseVideoModelName } from "../utils/parseVideoModelName"
|
9 |
import { computeOrientationProjectionWidthHeight } from "../utils/computeOrientationProjectionWidthHeight"
|
10 |
|
11 |
-
import { downloadFileAsText } from "./downloadFileAsText"
|
12 |
import { downloadFileAsBlob } from "./downloadFileAsBlob"
|
13 |
|
14 |
export async function downloadClapProject({
|
15 |
path,
|
16 |
-
|
17 |
-
|
18 |
}: {
|
19 |
path: string
|
20 |
-
apiKey?: string
|
21 |
channel: ChannelInfo
|
|
|
22 |
}): Promise<{
|
23 |
videoRequest: VideoRequest
|
24 |
videoInfo: VideoInfo
|
@@ -31,7 +31,7 @@ export async function downloadClapProject({
|
|
31 |
const clapString = await downloadFileAsBlob({
|
32 |
repo,
|
33 |
path,
|
34 |
-
apiKey,
|
35 |
expectedMimeType: "application/gzip"
|
36 |
})
|
37 |
|
|
|
1 |
import { v4 as uuidv4 } from "uuid"
|
2 |
+
import { Credentials } from "@/huggingface/hub/src"
|
3 |
|
4 |
+
import { ClapProject } from "@/clap/types"
|
5 |
import { ChannelInfo, VideoInfo, VideoRequest } from "@/types/general"
|
6 |
import { defaultVideoModel } from "@/app/config"
|
7 |
+
import { parseClap } from "@/clap/parseClap"
|
8 |
|
|
|
9 |
import { parseVideoModelName } from "../utils/parseVideoModelName"
|
10 |
import { computeOrientationProjectionWidthHeight } from "../utils/computeOrientationProjectionWidthHeight"
|
11 |
|
|
|
12 |
import { downloadFileAsBlob } from "./downloadFileAsBlob"
|
13 |
|
14 |
export async function downloadClapProject({
|
15 |
path,
|
16 |
+
channel,
|
17 |
+
credentials,
|
18 |
}: {
|
19 |
path: string
|
|
|
20 |
channel: ChannelInfo
|
21 |
+
credentials: Credentials
|
22 |
}): Promise<{
|
23 |
videoRequest: VideoRequest
|
24 |
videoInfo: VideoInfo
|
|
|
31 |
const clapString = await downloadFileAsBlob({
|
32 |
repo,
|
33 |
path,
|
34 |
+
apiKey: credentials.accessToken,
|
35 |
expectedMimeType: "application/gzip"
|
36 |
})
|
37 |
|
src/app/server/actions/ai-tube-hf/getVideoRequestsFromChannel.ts
CHANGED
@@ -59,7 +59,7 @@ export async function getVideoRequestsFromChannel({
|
|
59 |
const clap = await downloadClapProject({
|
60 |
path: file.path,
|
61 |
channel,
|
62 |
-
|
63 |
})
|
64 |
console.log("got a clap file:", clap.clapProject.meta)
|
65 |
|
|
|
59 |
const clap = await downloadClapProject({
|
60 |
path: file.path,
|
61 |
channel,
|
62 |
+
credentials,
|
63 |
})
|
64 |
console.log("got a clap file:", clap.clapProject.meta)
|
65 |
|
src/app/state/useStore.ts
CHANGED
@@ -91,6 +91,7 @@ export const useStore = create<{
|
|
91 |
const routes: Record<string, InterfaceView> = {
|
92 |
"/": "home",
|
93 |
"/watch": "public_video",
|
|
|
94 |
"/music": "public_music_videos",
|
95 |
"/channels": "public_channels",
|
96 |
"/channel": "public_channel",
|
|
|
91 |
const routes: Record<string, InterfaceView> = {
|
92 |
"/": "home",
|
93 |
"/watch": "public_video",
|
94 |
+
"/embed": "public_video_embed",
|
95 |
"/music": "public_music_videos",
|
96 |
"/channels": "public_channels",
|
97 |
"/channel": "public_channel",
|
src/app/views/public-video-embed-view/index.tsx
ADDED
@@ -0,0 +1,71 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"use client"
|
2 |
+
|
3 |
+
import { useEffect, useTransition } from "react"
|
4 |
+
|
5 |
+
import { useStore } from "@/app/state/useStore"
|
6 |
+
import { cn } from "@/lib/utils"
|
7 |
+
import { VideoPlayer } from "@/app/interface/video-player"
|
8 |
+
|
9 |
+
import { watchVideo } from "@/app/server/actions/stats"
|
10 |
+
|
11 |
+
export function PublicVideoEmbedView() {
|
12 |
+
const [_pending, startTransition] = useTransition()
|
13 |
+
|
14 |
+
// current time in the video
|
15 |
+
// note: this is used to *set* the current time, not to read it
|
16 |
+
// EDIT: you know what, let's do this the dirty way for now
|
17 |
+
// const [desiredCurrentTime, setDesiredCurrentTime] = useState()
|
18 |
+
|
19 |
+
const video = useStore(s => s.publicVideo)
|
20 |
+
|
21 |
+
const videoId = `${video?.id || ""}`
|
22 |
+
|
23 |
+
const setPublicVideo = useStore(s => s.setPublicVideo)
|
24 |
+
|
25 |
+
// we inject the current videoId in the URL, if it's not already present
|
26 |
+
// this is a hack for Hugging Face iframes
|
27 |
+
useEffect(() => {
|
28 |
+
const queryString = new URL(location.href).search
|
29 |
+
const searchParams = new URLSearchParams(queryString)
|
30 |
+
if (videoId) {
|
31 |
+
if (searchParams.get("v") !== videoId) {
|
32 |
+
console.log(`current videoId "${videoId}" isn't set in the URL query params.. TODO we should set it`)
|
33 |
+
|
34 |
+
// searchParams.set("v", videoId)
|
35 |
+
// location.search = searchParams.toString()
|
36 |
+
}
|
37 |
+
} else {
|
38 |
+
// searchParams.delete("v")
|
39 |
+
// location.search = searchParams.toString()
|
40 |
+
}
|
41 |
+
}, [videoId])
|
42 |
+
|
43 |
+
useEffect(() => {
|
44 |
+
startTransition(async () => {
|
45 |
+
if (!video || !video.id) {
|
46 |
+
return
|
47 |
+
}
|
48 |
+
const numberOfViews = await watchVideo(videoId)
|
49 |
+
|
50 |
+
setPublicVideo({
|
51 |
+
...video,
|
52 |
+
numberOfViews
|
53 |
+
})
|
54 |
+
})
|
55 |
+
|
56 |
+
}, [video?.id])
|
57 |
+
|
58 |
+
if (!video) { return null }
|
59 |
+
|
60 |
+
return (
|
61 |
+
<div className={cn(
|
62 |
+
`w-full`,
|
63 |
+
`flex flex-col`
|
64 |
+
)}>
|
65 |
+
<VideoPlayer
|
66 |
+
video={video}
|
67 |
+
enableShortcuts={false}
|
68 |
+
/>
|
69 |
+
</div>
|
70 |
+
)
|
71 |
+
}
|
src/app/watch/page.tsx
CHANGED
@@ -44,6 +44,18 @@ export async function generateMetadata(
|
|
44 |
],
|
45 |
// images: ['/some-specific-page-image.jpg', ...previousImages],
|
46 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
47 |
}
|
48 |
} catch (err) {
|
49 |
return {
|
|
|
44 |
],
|
45 |
// images: ['/some-specific-page-image.jpg', ...previousImages],
|
46 |
},
|
47 |
+
twitter: {
|
48 |
+
card: "player",
|
49 |
+
site: "@flngr",
|
50 |
+
description: video.description || "",
|
51 |
+
images: `https://huggingface.co/datasets/jbilcke-hf/ai-tube-index/resolve/main/videos/${video.id}.webp`,
|
52 |
+
players: {
|
53 |
+
playerUrl: `https://jbilcke-hf-ai-tube.hf.space/embed?v=${video.id}`,
|
54 |
+
streamUrl: `https://huggingface.co/datasets/jbilcke-hf/ai-tube-index/resolve/main/videos/${video.id}.mp4`,
|
55 |
+
width: 1024,
|
56 |
+
height: 576
|
57 |
+
}
|
58 |
+
}
|
59 |
}
|
60 |
} catch (err) {
|
61 |
return {
|
src/{app/server/actions/utils → clap}/parseClap.ts
RENAMED
@@ -1,8 +1,8 @@
|
|
1 |
import YAML from "yaml"
|
2 |
import { v4 as uuidv4 } from "uuid"
|
3 |
|
4 |
-
import { ClapHeader, ClapMeta, ClapProject, ClapSegment } from "
|
5 |
-
import { getValidNumber } from "@/lib/getValidNumber"
|
6 |
|
7 |
/**
|
8 |
* import a Clap file (from a plain text string)
|
@@ -38,6 +38,7 @@ export async function parseClap(inputStringOrBlob: string | Blob): Promise<ClapP
|
|
38 |
throw new Error("invalid clap file (sorry, but you can't make up version numbers like that)")
|
39 |
}
|
40 |
|
|
|
41 |
const maybeClapMeta = rawData[1] as ClapMeta
|
42 |
|
43 |
const clapMeta: ClapMeta = {
|
@@ -46,14 +47,71 @@ export async function parseClap(inputStringOrBlob: string | Blob): Promise<ClapP
|
|
46 |
description: typeof maybeClapMeta.description === "string" ? maybeClapMeta.description : "",
|
47 |
licence: typeof maybeClapMeta.licence === "string" ? maybeClapMeta.licence : "",
|
48 |
orientation: maybeClapMeta.orientation === "portrait" ? "portrait" : maybeClapMeta.orientation === "square" ? "square" : "landscape",
|
49 |
-
width: getValidNumber(maybeClapMeta.width, 256,
|
50 |
-
height: getValidNumber(maybeClapMeta.height, 256,
|
51 |
defaultVideoModel: typeof maybeClapMeta.defaultVideoModel === "string" ? maybeClapMeta.defaultVideoModel : "SVD",
|
|
|
52 |
}
|
53 |
|
54 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
55 |
|
56 |
-
const clapSegments: ClapSegment[] =
|
57 |
id,
|
58 |
track,
|
59 |
startTimeInMs,
|
@@ -82,10 +140,11 @@ export async function parseClap(inputStringOrBlob: string | Blob): Promise<ClapP
|
|
82 |
assetUrl,
|
83 |
outputGain,
|
84 |
seed,
|
85 |
-
}))
|
86 |
|
87 |
return {
|
88 |
meta: clapMeta,
|
|
|
89 |
segments: clapSegments
|
90 |
}
|
91 |
}
|
|
|
1 |
import YAML from "yaml"
|
2 |
import { v4 as uuidv4 } from "uuid"
|
3 |
|
4 |
+
import { ClapHeader, ClapMeta, ClapModel, ClapProject, ClapSegment } from "./types"
|
5 |
+
import { getValidNumber } from "@/lib/getValidNumber";
|
6 |
|
7 |
/**
|
8 |
* import a Clap file (from a plain text string)
|
|
|
38 |
throw new Error("invalid clap file (sorry, but you can't make up version numbers like that)")
|
39 |
}
|
40 |
|
41 |
+
|
42 |
const maybeClapMeta = rawData[1] as ClapMeta
|
43 |
|
44 |
const clapMeta: ClapMeta = {
|
|
|
47 |
description: typeof maybeClapMeta.description === "string" ? maybeClapMeta.description : "",
|
48 |
licence: typeof maybeClapMeta.licence === "string" ? maybeClapMeta.licence : "",
|
49 |
orientation: maybeClapMeta.orientation === "portrait" ? "portrait" : maybeClapMeta.orientation === "square" ? "square" : "landscape",
|
50 |
+
width: getValidNumber(maybeClapMeta.width, 256, 8192, 1024),
|
51 |
+
height: getValidNumber(maybeClapMeta.height, 256, 8192, 576),
|
52 |
defaultVideoModel: typeof maybeClapMeta.defaultVideoModel === "string" ? maybeClapMeta.defaultVideoModel : "SVD",
|
53 |
+
extraPositivePrompt: Array.isArray(maybeClapMeta.extraPositivePrompt) ? maybeClapMeta.extraPositivePrompt : [],
|
54 |
}
|
55 |
|
56 |
+
/*
|
57 |
+
in case we want to support streaming (mix of models and segments etc), we could do it this way:
|
58 |
+
|
59 |
+
const maybeModelsOrSegments = rawData.slice(2)
|
60 |
+
maybeModelsOrSegments.forEach((unknownElement: any) => {
|
61 |
+
if (isValidNumber(unknownElement?.track)) {
|
62 |
+
maybeSegments.push(unknownElement as ClapSegment)
|
63 |
+
} else {
|
64 |
+
maybeModels.push(unknownElement as ClapModel)
|
65 |
+
}
|
66 |
+
})
|
67 |
+
*/
|
68 |
+
|
69 |
+
|
70 |
+
const expectedNumberOfModels = maybeClapHeader.numberOfModels || 0
|
71 |
+
const expectedNumberOfSegments = maybeClapHeader.numberOfSegments || 0
|
72 |
+
|
73 |
+
// note: we assume the order is strictly enforced!
|
74 |
+
// if you implement streaming (mix of models and segments) you will have to rewrite this!
|
75 |
+
|
76 |
+
const afterTheHeaders = 2
|
77 |
+
const afterTheModels = afterTheHeaders + expectedNumberOfModels
|
78 |
+
|
79 |
+
// note: if there are no expected models, maybeModels will be empty
|
80 |
+
const maybeModels = rawData.slice(afterTheHeaders, afterTheModels) as ClapModel[]
|
81 |
+
|
82 |
+
const maybeSegments = rawData.slice(afterTheModels) as ClapSegment[]
|
83 |
+
|
84 |
+
const clapModels: ClapModel[] = maybeModels.map(({
|
85 |
+
id,
|
86 |
+
imageType,
|
87 |
+
audioType,
|
88 |
+
category,
|
89 |
+
triggerName,
|
90 |
+
label,
|
91 |
+
description,
|
92 |
+
author,
|
93 |
+
thumbnailUrl,
|
94 |
+
storageUrl,
|
95 |
+
imagePrompt,
|
96 |
+
audioPrompt,
|
97 |
+
}) => ({
|
98 |
+
// TODO: we should verify each of those, probably
|
99 |
+
id,
|
100 |
+
imageType,
|
101 |
+
audioType,
|
102 |
+
category,
|
103 |
+
triggerName,
|
104 |
+
label,
|
105 |
+
description,
|
106 |
+
author,
|
107 |
+
thumbnailUrl,
|
108 |
+
storageUrl,
|
109 |
+
imagePrompt,
|
110 |
+
audioPrompt,
|
111 |
+
}))
|
112 |
+
|
113 |
|
114 |
+
const clapSegments: ClapSegment[] = maybeSegments.map(({
|
115 |
id,
|
116 |
track,
|
117 |
startTimeInMs,
|
|
|
140 |
assetUrl,
|
141 |
outputGain,
|
142 |
seed,
|
143 |
+
}))
|
144 |
|
145 |
return {
|
146 |
meta: clapMeta,
|
147 |
+
models: clapModels,
|
148 |
segments: clapSegments
|
149 |
}
|
150 |
}
|
src/clap/serializeClap.ts
ADDED
@@ -0,0 +1,114 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import YAML from "yaml"
|
2 |
+
import { v4 as uuidv4 } from "uuid"
|
3 |
+
|
4 |
+
import { ClapHeader, ClapMeta, ClapModel, ClapProject, ClapSegment } from "./types"
|
5 |
+
import { getValidNumber } from "@/lib/getValidNumber"
|
6 |
+
|
7 |
+
export async function serializeClap({
|
8 |
+
meta, // ClapMeta
|
9 |
+
models, // ClapModel[]
|
10 |
+
segments, // ClapSegment[]
|
11 |
+
}: ClapProject): Promise<Blob> {
|
12 |
+
|
13 |
+
// we play it safe, and we verify the structure of the parameters,
|
14 |
+
// to make sure we generate a valid clap file
|
15 |
+
const clapModels: ClapModel[] = models.map(({
|
16 |
+
id,
|
17 |
+
imageType,
|
18 |
+
audioType,
|
19 |
+
category,
|
20 |
+
triggerName,
|
21 |
+
label,
|
22 |
+
description,
|
23 |
+
author,
|
24 |
+
thumbnailUrl,
|
25 |
+
storageUrl,
|
26 |
+
imagePrompt,
|
27 |
+
audioPrompt,
|
28 |
+
}) => ({
|
29 |
+
id,
|
30 |
+
imageType,
|
31 |
+
audioType,
|
32 |
+
category,
|
33 |
+
triggerName,
|
34 |
+
label,
|
35 |
+
description,
|
36 |
+
author,
|
37 |
+
thumbnailUrl,
|
38 |
+
storageUrl,
|
39 |
+
imagePrompt,
|
40 |
+
audioPrompt,
|
41 |
+
}))
|
42 |
+
|
43 |
+
const clapSegments: ClapSegment[] = segments.map(({
|
44 |
+
id,
|
45 |
+
track,
|
46 |
+
startTimeInMs,
|
47 |
+
endTimeInMs,
|
48 |
+
category,
|
49 |
+
modelId,
|
50 |
+
prompt,
|
51 |
+
outputType,
|
52 |
+
renderId,
|
53 |
+
status,
|
54 |
+
assetUrl,
|
55 |
+
outputGain,
|
56 |
+
seed,
|
57 |
+
}) => ({
|
58 |
+
id,
|
59 |
+
track,
|
60 |
+
startTimeInMs,
|
61 |
+
endTimeInMs,
|
62 |
+
category,
|
63 |
+
modelId,
|
64 |
+
prompt,
|
65 |
+
outputType,
|
66 |
+
renderId,
|
67 |
+
status,
|
68 |
+
assetUrl,
|
69 |
+
outputGain,
|
70 |
+
seed,
|
71 |
+
}))
|
72 |
+
|
73 |
+
const clapHeader: ClapHeader = {
|
74 |
+
format: "clap-0",
|
75 |
+
numberOfModels: clapModels.length,
|
76 |
+
numberOfSegments: clapSegments.length,
|
77 |
+
}
|
78 |
+
|
79 |
+
const clapMeta: ClapMeta = {
|
80 |
+
id: meta.id || uuidv4(),
|
81 |
+
title: typeof meta.title === "string" ? meta.title : "Untitled",
|
82 |
+
description: typeof meta.description === "string" ? meta.description : "",
|
83 |
+
licence: typeof meta.licence === "string" ? meta.licence : "",
|
84 |
+
orientation: meta.orientation === "portrait" ? "portrait" : meta.orientation === "square" ? "square" : "landscape",
|
85 |
+
width: getValidNumber(meta.width, 256, 8192, 1024),
|
86 |
+
height: getValidNumber(meta.height, 256, 8192, 576),
|
87 |
+
defaultVideoModel: typeof meta.defaultVideoModel === "string" ? meta.defaultVideoModel : "SVD",
|
88 |
+
extraPositivePrompt: Array.isArray(meta.extraPositivePrompt) ? meta.extraPositivePrompt : [],
|
89 |
+
}
|
90 |
+
|
91 |
+
const entries = [
|
92 |
+
clapHeader,
|
93 |
+
clapMeta,
|
94 |
+
...clapModels,
|
95 |
+
...clapSegments
|
96 |
+
]
|
97 |
+
|
98 |
+
const strigifiedResult = YAML.stringify(entries)
|
99 |
+
|
100 |
+
// Convert the string to a Blob
|
101 |
+
const blobResult = new Blob([strigifiedResult], { type: "application/x-yaml" })
|
102 |
+
|
103 |
+
// Create a stream for the blob
|
104 |
+
const readableStream = blobResult.stream();
|
105 |
+
|
106 |
+
// Compress the stream using gzip
|
107 |
+
const compressionStream = new CompressionStream('gzip');
|
108 |
+
const compressedStream = readableStream.pipeThrough(compressionStream);
|
109 |
+
|
110 |
+
// Create a new blob from the compressed stream
|
111 |
+
const compressedBlob = await new Response(compressedStream).blob();
|
112 |
+
|
113 |
+
return compressedBlob
|
114 |
+
}
|
src/{types/clap.ts → clap/types.ts}
RENAMED
@@ -1,5 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
export type ClapHeader = {
|
2 |
format: "clap-0"
|
|
|
|
|
3 |
}
|
4 |
|
5 |
export type ClapMeta = {
|
@@ -11,6 +20,7 @@ export type ClapMeta = {
|
|
11 |
width: number
|
12 |
height: number
|
13 |
defaultVideoModel: string
|
|
|
14 |
}
|
15 |
|
16 |
export type ClapSegment = {
|
@@ -18,18 +28,35 @@ export type ClapSegment = {
|
|
18 |
track: number
|
19 |
startTimeInMs: number
|
20 |
endTimeInMs: number
|
21 |
-
category:
|
22 |
modelId: string
|
23 |
prompt: string
|
24 |
-
outputType:
|
25 |
renderId: string
|
26 |
-
status:
|
27 |
assetUrl: string
|
28 |
outputGain: number
|
29 |
seed: number
|
30 |
}
|
31 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
32 |
export type ClapProject = {
|
33 |
meta: ClapMeta
|
|
|
34 |
segments: ClapSegment[]
|
|
|
35 |
}
|
|
|
1 |
+
|
2 |
+
export type ClapSegmentCategory = "render" | "preview" | "characters" | "location" | "time" | "era" | "lighting" | "weather" | "action" | "music" | "sound" | "dialogue" | "style" | "camera" | "generic"
|
3 |
+
export type ClapOutputType = "text" | "movement" | "image" | "video" | "audio"
|
4 |
+
export type ClapSegmentStatus = "pending" | "completed" | "error"
|
5 |
+
export type ClapImageType = "reference_image" | "text_prompt" | "other"
|
6 |
+
export type ClapAudioType = "reference_audio" | "text_prompt" | "other"
|
7 |
+
|
8 |
export type ClapHeader = {
|
9 |
format: "clap-0"
|
10 |
+
numberOfModels: number
|
11 |
+
numberOfSegments: number
|
12 |
}
|
13 |
|
14 |
export type ClapMeta = {
|
|
|
20 |
width: number
|
21 |
height: number
|
22 |
defaultVideoModel: string
|
23 |
+
extraPositivePrompt: string[]
|
24 |
}
|
25 |
|
26 |
export type ClapSegment = {
|
|
|
28 |
track: number
|
29 |
startTimeInMs: number
|
30 |
endTimeInMs: number
|
31 |
+
category: ClapSegmentCategory
|
32 |
modelId: string
|
33 |
prompt: string
|
34 |
+
outputType: ClapOutputType
|
35 |
renderId: string
|
36 |
+
status: ClapSegmentStatus
|
37 |
assetUrl: string
|
38 |
outputGain: number
|
39 |
seed: number
|
40 |
}
|
41 |
|
42 |
+
export type ClapModel = {
|
43 |
+
id: string
|
44 |
+
imageType: ClapImageType
|
45 |
+
audioType: ClapAudioType
|
46 |
+
category: ClapSegmentCategory
|
47 |
+
triggerName: string
|
48 |
+
label: string
|
49 |
+
description: string
|
50 |
+
author: string
|
51 |
+
thumbnailUrl: string
|
52 |
+
storageUrl: string
|
53 |
+
imagePrompt: string
|
54 |
+
audioPrompt: string
|
55 |
+
}
|
56 |
+
|
57 |
export type ClapProject = {
|
58 |
meta: ClapMeta
|
59 |
+
models: ClapModel[]
|
60 |
segments: ClapSegment[]
|
61 |
+
// let's keep room for other stuff (screenplay etc)
|
62 |
}
|
src/lib/extractBase64.ts
ADDED
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
/**
|
2 |
+
* break a base64 string into sub-components
|
3 |
+
*/
|
4 |
+
export function extractBase64(base64: string = ""): {
|
5 |
+
mimetype: string;
|
6 |
+
extension: string;
|
7 |
+
data: string;
|
8 |
+
buffer: Buffer;
|
9 |
+
blob: Blob;
|
10 |
+
} {
|
11 |
+
// Regular expression to extract the MIME type and the base64 data
|
12 |
+
const matches = base64.match(/^data:([A-Za-z-+/]+);base64,(.+)$/)
|
13 |
+
|
14 |
+
if (!matches || matches.length !== 3) {
|
15 |
+
throw new Error("Invalid base64 string")
|
16 |
+
}
|
17 |
+
|
18 |
+
const mimetype = matches[1] || ""
|
19 |
+
const data = matches[2] || ""
|
20 |
+
const buffer = Buffer.from(data, "base64")
|
21 |
+
const blob = new Blob([buffer])
|
22 |
+
|
23 |
+
// this should be enough for most media formats (jpeg, png, webp, mp4)
|
24 |
+
const extension = mimetype.split("/").pop() || ""
|
25 |
+
|
26 |
+
return {
|
27 |
+
mimetype,
|
28 |
+
extension,
|
29 |
+
data,
|
30 |
+
buffer,
|
31 |
+
blob,
|
32 |
+
}
|
33 |
+
}
|
src/types/general.ts
CHANGED
@@ -608,6 +608,7 @@ export type InterfaceDisplayMode =
|
|
608 |
| "tv"
|
609 |
|
610 |
export type InterfaceHeaderMode =
|
|
|
611 |
| "normal"
|
612 |
| "compact"
|
613 |
|
@@ -627,6 +628,7 @@ export type InterfaceView =
|
|
627 |
| "public_channels"
|
628 |
| "public_channel" // public view of a channel
|
629 |
| "public_video" // public view of a video
|
|
|
630 |
| "public_music_videos" // public music videos - it's a special category, because music is *cool*
|
631 |
| "not_found"
|
632 |
|
|
|
608 |
| "tv"
|
609 |
|
610 |
export type InterfaceHeaderMode =
|
611 |
+
| "hidden"
|
612 |
| "normal"
|
613 |
| "compact"
|
614 |
|
|
|
628 |
| "public_channels"
|
629 |
| "public_channel" // public view of a channel
|
630 |
| "public_video" // public view of a video
|
631 |
+
| "public_video_embed" // for integration into twitter etc
|
632 |
| "public_music_videos" // public music videos - it's a special category, because music is *cool*
|
633 |
| "not_found"
|
634 |
|