shiveshnavin commited on
Commit
e67f931
·
1 Parent(s): f50aa01

Remove typewriter animation and add simple text template; modularize helpers

Browse files
common-utils CHANGED
@@ -1 +1 @@
1
- Subproject commit c24060f9cf3f1c746aa910308ef48c5110f2da37
 
1
+ Subproject commit ae3b5f2d4613da9bd6f0b30ad508cc0063a0eaf7
render-server.js CHANGED
@@ -1,6 +1,6 @@
1
  import app from './app.js';
2
  let port = process.env.PORT || 8083
3
-
4
  import express from 'express';
5
  import { createServer } from 'http';
6
  import { Server } from 'socket.io';
 
1
  import app from './app.js';
2
  let port = process.env.PORT || 8083
3
+ import Bubble from './utils/bubble/Bubble.js';
4
  import express from 'express';
5
  import { createServer } from 'http';
6
  import { Server } from 'socket.io';
src/ig-reel/IGSimpleScene.tsx CHANGED
@@ -1,299 +1,263 @@
1
- import { AbsoluteFill, Sequence, interpolate, staticFile, Audio, useCurrentFrame, Easing, Freeze } from 'remotion';
2
- import React, { useEffect, useState } from 'react'
3
- import { RenderUtils } from '../RenderUtils';
4
- import { Transcript, SectionMedia, OriginalManuscript, IGReelExtra } from 'common-utils'
5
- import axios from 'axios';
6
- import ExcitedSubtitles from '../subtitles/excited_Subtitles';
7
- import _ from 'lodash';
8
- import './styles.module.css'
9
- import TransitionSeries from 'remotion-transition-series';
10
- import { CircularWipe } from '../anims/CircularWipe';
11
- import { LinearWipe } from '../anims/LinearWipe';
12
- import { Slide } from '../anims/Slide';
13
- import { Pan } from '../anims/Pan';
14
- import { Dissolve } from '../anims/Dissolve';
15
- import { SlidingDoors } from '../anims/SlidingDoors';
16
- import { IGMediaRender } from './IGMediaRender';
17
- import { Group } from '../models/Types';
18
- import { VideoSplash } from '../anims/VideoSplash';
19
- import Transitions from '../../public/assets/transitions.json'
20
-
21
- export type DisplayMedia = SectionMedia &
22
- {
23
- idx: number,
24
- durationFrames: number,
25
- startFrame: number,
26
- endFrame: number,
27
- }
28
-
29
-
30
- export const GenerateWordGroupRanges = function (audioCaptionFile: string, setSubtitles: Function, setGroups: Function) {
31
- const subFile = staticFile(RenderUtils.getFileName(audioCaptionFile)!!)
32
- axios.get(subFile).then((response) => {
33
- let subtitles = response?.data
34
- if (subtitles?.default) {
35
- subtitles = subtitles.default
36
- }
37
- let subs = subtitles.words
38
- let noOfCharacters = 0
39
- let groupDuration = 0
40
- for (let i = 0; i < subs.length; i++) {
41
- let word = subs[i]
42
- word.idx = i
43
- delete word.phones
44
- let wordEnd = word.endOffset
45
- if (['.', ','].includes(subtitles.transcript?.charAt(wordEnd))) {
46
- word.sentence_end = true
47
- }
48
- if (word.end && word.start) {
49
- noOfCharacters += word.endOffset - word.startOffset
50
- groupDuration += word.end - word.start
51
- }
52
- }
53
- let avgCps = groupDuration / noOfCharacters
54
-
55
- // Fill start and end for missing frames
56
- for (let i = 0; i < subs.length; i++) {
57
- let word = subs[i]
58
- let prevFrame = subs[i - 1]
59
- let nextFrame = subs[i + 1]
60
- if (!word.end || !word.start) {
61
- let noOfCharacters = word.endOffset - word.startOffset
62
- let frameDuration = noOfCharacters * avgCps
63
- word.start = prevFrame?.end || 0
64
- word.end = (word.start + frameDuration)
65
- // frame.end = nextFrame?.start || (frame.start + frameDuration)
66
- if (nextFrame?.start < word.end) {
67
- let next2next = subs[i + 2]
68
- if (next2next && next2next.start) {
69
- nextFrame.end = next2next.start
70
- }
71
- }
72
- }
73
- }
74
- setSubtitles(subs)
75
-
76
-
77
- let arr = subs
78
- let groups = [];
79
-
80
- groups = RenderUtils.splitWordsIntoGroups(arr)
81
- groups.forEach((group) => {
82
- let words = group.words;
83
- group.start = words[0]?.start
84
- group.end = words[words.length - 1].end
85
- })
86
- setGroups(groups)
87
-
88
-
89
- })
90
- }
91
-
92
-
93
- export const IGSimpleScene: React.FC<any> = (props: {
94
- script: OriginalManuscript,
95
- item: Transcript,
96
- SubtitleComponent: React.FC<any>
97
- }) => {
98
- const [subtitles, setSubtitles] = useState(undefined)
99
- const [groups, setGroups] = useState(undefined)
100
- const item: Transcript = props.item
101
- let { audioCaptionFile, durationInSeconds } = item
102
- const { script } = props
103
- const { meta } = script
104
- const { fps } = meta
105
- const frame = useCurrentFrame();
106
- const SubtitleComponent = props.SubtitleComponent
107
- let speechPath = RenderUtils.getFileName(item?.audioFullPath)
108
-
109
- const itemIdx = item.index;
110
- const isLastScene = script.transcript[script.transcript?.length - 1]?.index == itemIdx
111
-
112
- useEffect(() => {
113
- if (audioCaptionFile)
114
- GenerateWordGroupRanges(audioCaptionFile, setSubtitles, setGroups)
115
- }, [])
116
-
117
- if (!groups) {
118
- return (<></>)
119
- }
120
-
121
- let itemDurationInFrames = item.durationInSeconds * fps
122
- // console.log('scriptItem.offset', frame, itemDurationInFrames)
123
-
124
- const foregroundOpacity = item.transition_type?.includes("facde") ? interpolate(
125
- frame,
126
- [0, 10, itemDurationInFrames - 10, itemDurationInFrames],
127
- [1, 0, 0, item.index >= props?.script?.transcript?.length - 1 ? 0 : 1]
128
- ) : 0;
129
-
130
-
131
- let curSecond: Number = frame / fps
132
- let word = RenderUtils.findCurrentWord(curSecond, subtitles!!)
133
- let group: Group = RenderUtils.findCurrentGroupByTime(curSecond, groups)
134
-
135
- let currentDisplayMedia: DisplayMedia | undefined = RenderUtils.calculateDisplayMedia(item, durationInSeconds, frame, fps)
136
-
137
-
138
-
139
- let chunks: React.ReactNode[] = ((item?.mediaAbsPaths as any[])?.map((displayMedia: DisplayMedia, i, arr) => {
140
- let chunk = []
141
- let easeInOutExp = Easing.inOut(Easing.bezier(0.8, 0.22, 0.96, 0.65));
142
- easeInOutExp = Easing.ease
143
- let curZoom = 1 + 0.3 * (frame - displayMedia?.startFrame) / (displayMedia?.durationFrames || 1)
144
- if ((item.extras as IGReelExtra)?.animation == 'none') {
145
- curZoom = 1
146
- }
147
- let duration = displayMedia.durationFrames
148
- if (arr.length == i + 1) {
149
- // to fix the black screen at last few milis
150
- duration = (item.durationInSeconds + item.transition_duration_sec) * fps
151
- }
152
- chunk.push((
153
- <TransitionSeries.Sequence key={`x-${i}`} durationInFrames={duration}>
154
- <IGMediaRender offThreadVideo={true} curZoom={curZoom} displayMedia={displayMedia} />
155
- </TransitionSeries.Sequence>
156
- ))
157
- // Only show in schene media transitions if there are more than one media
158
- if (i < item?.mediaAbsPaths.length - 1 && i > 0) {
159
-
160
- let animsObj = {
161
- // CircularWipe, Slide,LinearWipe, // Not work
162
- SlidingDoors: SlidingDoors,
163
- Pan: Pan,
164
- Dissolve: Dissolve
165
- }
166
- let anims = Object.values(animsObj);//.fill(SlidingDoors);
167
- let seed = itemIdx + i * displayMedia.idx
168
- let Anim = RenderUtils.randomElement(anims, `x-${seed}`)
169
- const easeInOutExp = Easing.inOut(Easing.bezier(0.8, 0.22, 0.96, 0.65));
170
-
171
- let transitionList = item.transition_type?.split(",");
172
- let animFromTranscript = Object.keys(animsObj).find(n => {
173
- return transitionList.includes(n)
174
- })
175
- if (animFromTranscript) {
176
- //@ts-ignore
177
- Anim = animsObj[animFromTranscript]
178
- }
179
- Anim = Pan
180
-
181
- if (!isLastScene && (item.transition_type?.includes('geometrial') || animFromTranscript || item.transition_type == undefined)) {
182
- chunk.push((
183
- <TransitionSeries.Transition
184
- durationInFrames={30}
185
- transitionComponent={({ progress, ...props }) => {
186
- return (
187
- <Anim {...props} progress={easeInOutExp(progress)} />
188
- )
189
- }}
190
- />
191
- ))
192
- }
193
- }
194
- return chunk
195
- }))
196
-
197
- // console.log('currentDisplayMedia?.endFrame', frame, currentDisplayMedia?.endFrame)
198
- return (
199
- <Freeze frame={itemDurationInFrames} active={frame > itemDurationInFrames}>
200
- <AbsoluteFill style={{
201
- // backgroundColor: RenderUtils.randomElement(['red', 'blue', 'green', 'orange', 'yellow', 'cyan'], `x-${itemIdx}`),
202
- width: '100%',
203
- height: '100%',
204
- }} className='relative'>
205
- <Audio volume={1} src={staticFile(speechPath as string)} />
206
-
207
- <TransitionSeries>
208
- {
209
- ...chunks
210
- }
211
- </TransitionSeries>
212
-
213
- {
214
- (item.transition_type?.includes("graphical")) && !isLastScene &&
215
- (currentDisplayMedia?.idx || -1) > -1 && (
216
- (item?.mediaAbsPaths as any)?.map((displayMedia: DisplayMedia) => {
217
- let transitionsByMood = Transitions.filter(t => (t?.mood?.indexOf("happy") || -1) > -1)
218
- let transition = RenderUtils.randomElement(transitionsByMood, `x-${displayMedia.idx * displayMedia.durationFrames}`)
219
- // transition = transitionsByMood[5]
220
-
221
- return (
222
- <Sequence from={displayMedia?.endFrame - transition.durationSec * fps / 2} durationInFrames={transition.durationSec * fps}>
223
- <VideoSplash
224
- file={transition.file}
225
- style={{
226
- position: 'relative',
227
- height: '100%',
228
- width: '100%'
229
- } as any} />
230
- </Sequence>
231
- )
232
- })
233
- )
234
- }
235
-
236
- <div style={{
237
- opacity: foregroundOpacity,
238
- zIndex: 10,
239
- width: '100%',
240
- height: '100%',
241
- background: 'black'
242
- }}></div>
243
-
244
- <div style={{
245
- display: frame < itemDurationInFrames ? undefined : 'none',
246
- zIndex: 20,
247
- }} className='absolute inset-0'>
248
- {
249
- SubtitleComponent ?
250
- <SubtitleComponent group={group} word={word} fontSize={(item.extras as IGReelExtra)?.fontSize} />
251
- :
252
- <ExcitedSubtitles position='center' group={group} word={word} fontSize={(item.extras as IGReelExtra)?.fontSize} />
253
- }
254
- </div>
255
-
256
- {
257
- item?.bubble?.text && (
258
- <div style={{
259
- display: 'flex',
260
- flexDirection: 'row',
261
- justifyContent: 'center',
262
- alignContent: 'center',
263
- paddingTop: '20%',
264
- position: 'absolute',
265
- width: '100%',
266
- height: '100%',
267
- background: 'transparent',
268
- zIndex: 100,
269
- }}>
270
- <span
271
- style={{
272
- textAlign: 'center',
273
- color: '#FFEB3B',
274
- fontSize: '5em',
275
- fontFamily: 'Poppins',
276
- borderColor: 'transparent',
277
- borderRadius: '2rem',
278
- borderLeftWidth: '4px',
279
- borderRightWidth: '4px',
280
- backgroundPosition: 'center center',
281
- textShadow: '0px 0px 50px #000, 0px 0px 50px #000',
282
- padding: '0.2rem 1.5rem',
283
- }}
284
- >
285
- {item.bubble.text}
286
- </span>
287
- </div>
288
- )
289
- }
290
-
291
-
292
- </AbsoluteFill >
293
- </Freeze>
294
- );
295
- };
296
-
297
-
298
-
299
-
 
1
+ import { AbsoluteFill, Sequence, interpolate, staticFile, Audio, useCurrentFrame, Easing, Freeze } from 'remotion';
2
+ import React, { useEffect, useState } from 'react'
3
+ import { RenderUtils } from '../RenderUtils';
4
+ import { Transcript, SectionMedia, OriginalManuscript, IGReelExtra } from 'common-utils'
5
+ import axios from 'axios';
6
+ import ExcitedSubtitles from '../subtitles/excited_Subtitles';
7
+ import _ from 'lodash';
8
+ import './styles.module.css'
9
+ import TransitionSeries from 'remotion-transition-series';
10
+ import { CircularWipe } from '../anims/CircularWipe';
11
+ import { LinearWipe } from '../anims/LinearWipe';
12
+ import { Slide } from '../anims/Slide';
13
+ import { Pan } from '../anims/Pan';
14
+ import { Dissolve } from '../anims/Dissolve';
15
+ import { SlidingDoors } from '../anims/SlidingDoors';
16
+ import { IGMediaRender } from './IGMediaRender';
17
+ import { Group } from '../models/Types';
18
+ import { VideoSplash } from '../anims/VideoSplash';
19
+ import Transitions from '../../public/assets/transitions.json'
20
+
21
+ export type DisplayMedia = SectionMedia &
22
+ {
23
+ idx: number,
24
+ durationFrames: number,
25
+ startFrame: number,
26
+ endFrame: number,
27
+ }
28
+
29
+
30
+ export const GenerateWordGroupRanges = function (audioCaptionFile: string, setSubtitles: Function, setGroups: Function) {
31
+ const subFile = staticFile(RenderUtils.getFileName(audioCaptionFile)!!)
32
+ axios.get(subFile).then((response) => {
33
+ let subtitles = response?.data
34
+ if (subtitles?.default) {
35
+ subtitles = subtitles.default
36
+ }
37
+ let subs = subtitles.words
38
+ let noOfCharacters = 0
39
+ let groupDuration = 0
40
+ for (let i = 0; i < subs.length; i++) {
41
+ let word = subs[i]
42
+ word.idx = i
43
+ delete word.phones
44
+ let wordEnd = word.endOffset
45
+ if (['.', ','].includes(subtitles.transcript?.charAt(wordEnd))) {
46
+ word.sentence_end = true
47
+ }
48
+ if (word.end && word.start) {
49
+ noOfCharacters += word.endOffset - word.startOffset
50
+ groupDuration += word.end - word.start
51
+ }
52
+ }
53
+ let avgCps = groupDuration / noOfCharacters
54
+
55
+ // Fill start and end for missing frames
56
+ for (let i = 0; i < subs.length; i++) {
57
+ let word = subs[i]
58
+ let prevFrame = subs[i - 1]
59
+ let nextFrame = subs[i + 1]
60
+ if (!word.end || !word.start) {
61
+ let noOfCharacters = word.endOffset - word.startOffset
62
+ let frameDuration = noOfCharacters * avgCps
63
+ word.start = prevFrame?.end || 0
64
+ word.end = (word.start + frameDuration)
65
+ // frame.end = nextFrame?.start || (frame.start + frameDuration)
66
+ if (nextFrame?.start < word.end) {
67
+ let next2next = subs[i + 2]
68
+ if (next2next && next2next.start) {
69
+ nextFrame.end = next2next.start
70
+ }
71
+ }
72
+ }
73
+ }
74
+ setSubtitles(subs)
75
+
76
+
77
+ let arr = subs
78
+ let groups = [];
79
+
80
+ groups = RenderUtils.splitWordsIntoGroups(arr)
81
+ groups.forEach((group) => {
82
+ let words = group.words;
83
+ group.start = words[0]?.start
84
+ group.end = words[words.length - 1].end
85
+ })
86
+ setGroups(groups)
87
+
88
+
89
+ })
90
+ }
91
+
92
+
93
+ export const IGSimpleScene: React.FC<any> = (props: {
94
+ script: OriginalManuscript,
95
+ item: Transcript,
96
+ SubtitleComponent: React.FC<any>
97
+ }) => {
98
+ const [subtitles, setSubtitles] = useState(undefined)
99
+ const [groups, setGroups] = useState(undefined)
100
+ const item: Transcript = props.item
101
+ let { audioCaptionFile, durationInSeconds } = item
102
+ const { script } = props
103
+ const { meta } = script
104
+ const { fps } = meta
105
+ const frame = useCurrentFrame();
106
+ const SubtitleComponent = props.SubtitleComponent
107
+ let speechPath = RenderUtils.getFileName(item?.audioFullPath)
108
+
109
+ const itemIdx = item.index;
110
+ const isLastScene = script.transcript[script.transcript?.length - 1]?.index == itemIdx
111
+
112
+ useEffect(() => {
113
+ if (audioCaptionFile)
114
+ GenerateWordGroupRanges(audioCaptionFile, setSubtitles, setGroups)
115
+ }, [])
116
+
117
+ if (!groups) {
118
+ return (<></>)
119
+ }
120
+
121
+ let itemDurationInFrames = item.durationInSeconds * fps
122
+ // console.log('scriptItem.offset', frame, itemDurationInFrames)
123
+
124
+ const foregroundOpacity = item.transition_type?.includes("facde") ? interpolate(
125
+ frame,
126
+ [0, 10, itemDurationInFrames - 10, itemDurationInFrames],
127
+ [1, 0, 0, item.index >= props?.script?.transcript?.length - 1 ? 0 : 1]
128
+ ) : 0;
129
+
130
+
131
+ let curSecond: Number = frame / fps
132
+ let word = RenderUtils.findCurrentWord(curSecond, subtitles!!)
133
+ let group: Group = RenderUtils.findCurrentGroupByTime(curSecond, groups)
134
+
135
+ let currentDisplayMedia: DisplayMedia | undefined = RenderUtils.calculateDisplayMedia(item, durationInSeconds, frame, fps)
136
+
137
+
138
+
139
+ let chunks: React.ReactNode[] = ((item?.mediaAbsPaths as any[])?.map((displayMedia: DisplayMedia, i, arr) => {
140
+ let chunk = []
141
+ let easeInOutExp = Easing.inOut(Easing.bezier(0.8, 0.22, 0.96, 0.65));
142
+ easeInOutExp = Easing.ease
143
+ let curZoom = 1 + 0.3 * (frame - displayMedia?.startFrame) / (displayMedia?.durationFrames || 1)
144
+ if ((item.extras as IGReelExtra)?.animation == 'none') {
145
+ curZoom = 1
146
+ }
147
+ let duration = displayMedia.durationFrames
148
+ if (arr.length == i + 1) {
149
+ // to fix the black screen at last few milis
150
+ duration = (item.durationInSeconds + item.transition_duration_sec) * fps
151
+ }
152
+ chunk.push((
153
+ <TransitionSeries.Sequence key={`x-${i}`} durationInFrames={duration}>
154
+ <IGMediaRender offThreadVideo={true} curZoom={curZoom} displayMedia={displayMedia} />
155
+ </TransitionSeries.Sequence>
156
+ ))
157
+ // Only show in schene media transitions if there are more than one media
158
+ if (i < item?.mediaAbsPaths.length - 1 && i > 0) {
159
+
160
+ let animsObj = {
161
+ // CircularWipe, Slide,LinearWipe, // Not work
162
+ SlidingDoors: SlidingDoors,
163
+ Pan: Pan,
164
+ Dissolve: Dissolve
165
+ }
166
+ let anims = Object.values(animsObj);//.fill(SlidingDoors);
167
+ let seed = itemIdx + i * displayMedia.idx
168
+ let Anim = RenderUtils.randomElement(anims, `x-${seed}`)
169
+ const easeInOutExp = Easing.inOut(Easing.bezier(0.8, 0.22, 0.96, 0.65));
170
+
171
+ let transitionList = item.transition_type?.split(",");
172
+ let animFromTranscript = Object.keys(animsObj).find(n => {
173
+ return transitionList.includes(n)
174
+ })
175
+ if (animFromTranscript) {
176
+ //@ts-ignore
177
+ Anim = animsObj[animFromTranscript]
178
+ }
179
+ Anim = Pan
180
+
181
+ if (!isLastScene && (item.transition_type?.includes('geometrial') || animFromTranscript || item.transition_type == undefined)) {
182
+ chunk.push((
183
+ <TransitionSeries.Transition
184
+ durationInFrames={30}
185
+ transitionComponent={({ progress, ...props }) => {
186
+ return (
187
+ <Anim {...props} progress={easeInOutExp(progress)} />
188
+ )
189
+ }}
190
+ />
191
+ ))
192
+ }
193
+ }
194
+ return chunk
195
+ }))
196
+
197
+ // console.log('currentDisplayMedia?.endFrame', frame, currentDisplayMedia?.endFrame)
198
+ return (
199
+ <Freeze frame={itemDurationInFrames} active={frame > itemDurationInFrames}>
200
+ <AbsoluteFill style={{
201
+ // backgroundColor: RenderUtils.randomElement(['red', 'blue', 'green', 'orange', 'yellow', 'cyan'], `x-${itemIdx}`),
202
+ width: '100%',
203
+ height: '100%',
204
+ }} className='relative'>
205
+ <Audio volume={1} src={staticFile(speechPath as string)} />
206
+
207
+ <TransitionSeries>
208
+ {
209
+ ...chunks
210
+ }
211
+ </TransitionSeries>
212
+
213
+ {
214
+ (item.transition_type?.includes("graphical")) && !isLastScene &&
215
+ (currentDisplayMedia?.idx || -1) > -1 && (
216
+ (item?.mediaAbsPaths as any)?.map((displayMedia: DisplayMedia) => {
217
+ let transitionsByMood = Transitions.filter(t => (t?.mood?.indexOf("happy") || -1) > -1)
218
+ let transition = RenderUtils.randomElement(transitionsByMood, `x-${displayMedia.idx * displayMedia.durationFrames}`)
219
+ // transition = transitionsByMood[5]
220
+
221
+ return (
222
+ <Sequence from={displayMedia?.endFrame - transition.durationSec * fps / 2} durationInFrames={transition.durationSec * fps}>
223
+ <VideoSplash
224
+ file={transition.file}
225
+ style={{
226
+ position: 'relative',
227
+ height: '100%',
228
+ width: '100%'
229
+ } as any} />
230
+ </Sequence>
231
+ )
232
+ })
233
+ )
234
+ }
235
+
236
+ <div style={{
237
+ opacity: foregroundOpacity,
238
+ zIndex: 10,
239
+ width: '100%',
240
+ height: '100%',
241
+ background: 'black'
242
+ }}></div>
243
+
244
+ <div style={{
245
+ display: frame < itemDurationInFrames ? undefined : 'none',
246
+ zIndex: 20,
247
+ }} className='absolute inset-0'>
248
+ {
249
+ SubtitleComponent ?
250
+ <SubtitleComponent group={group} word={word} fontSize={(item.extras as IGReelExtra)?.fontSize} />
251
+ :
252
+ <ExcitedSubtitles position='center' group={group} word={word} fontSize={(item.extras as IGReelExtra)?.fontSize} />
253
+ }
254
+ </div>
255
+
256
+ </AbsoluteFill >
257
+ </Freeze>
258
+ );
259
+ };
260
+
261
+
262
+
263
+
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
src/linkedin-video/LinkedinFullSysDesignComposition.tsx CHANGED
@@ -34,50 +34,20 @@ export const LinkedinFullSysDesignComposition: React.FC = (props: OriginalManusc
34
  <Audio
35
  loop
36
  volume={(f) =>
37
- bgMusicVolume !== undefined
38
- ? bgMusicVolume
39
- : interpolate(
40
- f,
41
- [0, bgMusicDuration * fps],
42
- [0.05, 0.01],
43
- { extrapolateLeft: "clamp" }
44
- )
45
- }
46
  src={bgMusicPath}
47
  />
48
  )}
49
 
50
- <TransitionSeries>
51
- {
52
- transcripts.map((scriptItem: Transcript, i) => {
53
- let { durationInSeconds } = scriptItem
54
- let durationInFrames = meta.fps * durationInSeconds
55
 
56
- let isGraph = scriptItem.bubble.type == 'graph'
57
-
58
- return (
59
- <>
60
- <TransitionSeries.Sequence durationInFrames={durationInFrames}>
61
- {isGraph ?
62
- <AnimGraphScene
63
- script={props}
64
- item={scriptItem} />
65
- :
66
- <IGSimpleScene
67
- script={props}
68
- item={scriptItem}
69
- SubtitleComponent={({ group, word }) => {
70
- return (
71
- <GeneralSubtitles position='end' group={group} word={word} />
72
- )
73
- }} />
74
- }
75
- </TransitionSeries.Sequence>
76
- </>
77
- )
78
- })
79
- }
80
- </TransitionSeries>
81
 
82
  <div
83
  style={{
 
34
  <Audio
35
  loop
36
  volume={(f) =>
37
+ bgMusicVolume !== undefined
38
+ ? bgMusicVolume
39
+ : interpolate(
40
+ f,
41
+ [0, bgMusicDuration * fps],
42
+ [0.05, 0.01],
43
+ { extrapolateLeft: "clamp" }
44
+ )
45
+ }
46
  src={bgMusicPath}
47
  />
48
  )}
49
 
 
 
 
 
 
50
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
51
 
52
  <div
53
  style={{
utils/AvatarRender.js CHANGED
@@ -130,7 +130,7 @@ export class AvatarRenderer {
130
 
131
  // Cleanup intermediate files
132
  for (const f of tempFiles) {
133
- try { if (fs.existsSync(f)) fs.unlinkSync(f); } catch (_) { }
134
  }
135
 
136
  return resultFile;
 
130
 
131
  // Cleanup intermediate files
132
  for (const f of tempFiles) {
133
+ // try { if (fs.existsSync(f)) fs.unlinkSync(f); } catch (_) { }
134
  }
135
 
136
  return resultFile;
utils/bubble/Bubble.js ADDED
@@ -0,0 +1,345 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import fs from 'fs';
2
+ import path from 'path';
3
+ import _ from 'lodash';
4
+ import { fileURLToPath } from 'url';
5
+ import { FFMpegUtils } from 'common-utils';
6
+ import os from 'os';
7
+ import sharp from 'sharp';
8
+ import crypto from 'crypto';
9
+ import { tempPath, escapeXml, escapeText, resolveAudioPath } from './helpers.js';
10
+ import { createTextBackgroundPng, processImageWithBg } from './bg-utils.js';
11
+ import { computeXY } from './layout.js';
12
+
13
+
14
+ const DEFAULT_FONT_FILE = (() => {
15
+ const winFonts = process.env.WINDIR ? path.join(process.env.WINDIR, 'Fonts') : 'C:\\Windows\\Fonts';
16
+ const preferred = path.join(winFonts, 'CascadiaCode.ttf');
17
+ const fallback = path.join(winFonts, 'Arial.ttf');
18
+ if (fs.existsSync(preferred)) return preferred.replace(/\\/g, '/');
19
+ if (fs.existsSync(fallback)) return fallback.replace(/\\/g, '/');
20
+ return preferred.replace(/\\/g, '/');
21
+ })();
22
+
23
+
24
+ // Base bubble templates (defaults). Templates prefill everything except bubbleText.text for text templates.
25
+ const BaseBubbleTemplates = {
26
+ // Simple text box: black text on white rounded box at top-center
27
+ 'simple-top-center': {
28
+ bubbleText: {
29
+ fontSize: 48,
30
+ fontColor: '#000000',
31
+ fontName: 'Arial',
32
+ // drawtext box properties - we prefer generated rounded png background, so keep box flag to indicate background
33
+ box: 1,
34
+ boxColor: 'white',
35
+ boxBorderW: 10
36
+ },
37
+ bubbleExtra: {
38
+ positionX: 'center',
39
+ positionY: 'top',
40
+ size: 'full',
41
+ paddingX: 2,
42
+ paddingY: 2
43
+ }
44
+ }
45
+ };
46
+
47
+
48
+ class BubbleMaker {
49
+ constructor() { }
50
+
51
+ /**
52
+ * Create an animated bubble (image or text) over a video.
53
+ * @param {string} videoPath - path to base video
54
+ * @param {Object} bubble - bubble config (see OriginalManuscriptModel.Bubble)
55
+ * @param {string} outputPath - output video path
56
+ * @param {Function} onLog - optional logger
57
+ */
58
+ async makeBubble(videoPath, bubble, outputPath, onLog) {
59
+ if (!fs.existsSync(videoPath)) throw new Error('video not found: ' + videoPath);
60
+ const meta = await FFMpegUtils.getMediaMetadata(videoPath);
61
+ const vw = meta.video.width;
62
+ const vh = meta.video.height;
63
+
64
+ // Apply template defaults if templateName provided
65
+ if (bubble && bubble.templateName) {
66
+ const tpl = BaseBubbleTemplates[bubble.templateName];
67
+ if (tpl) {
68
+ // deep merge: template provides defaults, bubble overrides
69
+ bubble = _.merge({}, tpl, bubble);
70
+ // Scale pixel-based template values defined for a 1080x1920 reference to the actual video size
71
+ const REF_W = 1080, REF_H = 1920;
72
+ const scale = Math.min(vw / REF_W, vh / REF_H) || 1;
73
+ if (bubble.bubbleText) {
74
+ const bt = bubble.bubbleText;
75
+ if (typeof bt.fontSize === 'number') bt.fontSize = Math.max(1, Math.round(bt.fontSize * scale));
76
+ if (typeof bt.boxBorderW === 'number') bt.boxBorderW = Math.max(1, Math.round(bt.boxBorderW * scale));
77
+ if (typeof bt.shadowSize === 'number') bt.shadowSize = Math.max(0, Math.round(bt.shadowSize * scale));
78
+ }
79
+ } else {
80
+ (onLog || console.log)(`Bubble template ${bubble.templateName} not found`);
81
+ }
82
+ }
83
+
84
+ const extra = bubble.bubbleExtra || {};
85
+ // animation config may now live on bubble.bubbleText.animExtra per model changes
86
+ const anim = (bubble.bubbleText && bubble.bubbleText.animExtra) || bubble.animExtra || {};
87
+
88
+
89
+ const from = typeof bubble.fromSec === 'number' ? bubble.fromSec : 0;
90
+ const to = typeof bubble.toSec === 'number' ? bubble.toSec : (from + (bubble.durationSec || 3));
91
+
92
+ // determine overlay source (image/video) or text
93
+ let isMedia = false;
94
+ let overlayPath = null;
95
+ // accept either mediaAbsPaths (plural, template) or mediaAbsPath (singular) for compatibility
96
+ const mediaField = bubble.mediaAbsPaths || bubble.mediaAbsPath || bubble.mediaAbs;
97
+ if (mediaField) {
98
+ const m = Array.isArray(mediaField) ? mediaField[0] : mediaField;
99
+ overlayPath = m?.path;
100
+ isMedia = !!overlayPath;
101
+ }
102
+
103
+ // compute target overlay size
104
+ const padX = extra.paddingX || 5; // percent
105
+ const padY = extra.paddingY || 5;
106
+
107
+ let ow = Math.round(vw * (extra.size === 'half' ? 0.45 : 0.30));
108
+ let oh = null; // keep aspect if image
109
+
110
+
111
+ // build ffmpeg command
112
+ let cmd;
113
+
114
+ if (isMedia && fs.existsSync(overlayPath)) {
115
+ // determine if overlay is image by extension
116
+ const imgExts = ['.png', '.jpg', '.jpeg', '.webp', '.bmp', '.gif'];
117
+ const overlayExt = path.extname(overlayPath).toLowerCase();
118
+ const isImageFile = imgExts.includes(overlayExt);
119
+
120
+ // get overlay dimensions if available
121
+ let overlayMeta;
122
+ try { overlayMeta = await FFMpegUtils.getMediaMetadata(overlayPath); } catch (e) { overlayMeta = null; }
123
+ if (overlayMeta && overlayMeta.video && overlayMeta.video.width && overlayMeta.video.height) {
124
+ const iw = overlayMeta.video.width;
125
+ const ih = overlayMeta.video.height;
126
+ const ar = iw / ih;
127
+ if (!oh) oh = Math.round(ow / ar);
128
+ } else {
129
+ // fallback to square-ish
130
+ if (!oh) oh = Math.round(ow * 0.75);
131
+ }
132
+
133
+ const { x, y } = computeXY(ow, oh, extra, vw, vh);
134
+
135
+ // fade durations
136
+ const fadeDur = Math.min(0.5, (to - from) / 2);
137
+ const fadeInStart = from;
138
+ const fadeOutStart = Math.max(from, to - fadeDur);
139
+
140
+ // Build video part of filter_complex
141
+ // If image and needs bg/rounded corners, pre-process the image to bake background and rounding into a PNG
142
+ if (isImageFile && (bubble.backgroundColor || (typeof bubble.borderRadius === 'number'))) {
143
+ try {
144
+ const processed = await processImageWithBg(overlayPath, ow, oh, bubble.backgroundColor, bubble.borderRadius || 0);
145
+ overlayPath = processed;
146
+ // update isImageFile extension to .png so we handle loop flags correctly
147
+ } catch (e) {
148
+ (onLog || console.log)('Failed to process image for rounded bg:', e);
149
+ }
150
+ }
151
+
152
+ const videoFilter = `[1:v]scale=${ow}:${oh},format=rgba,fade=t=in:st=${fadeInStart}:d=${fadeDur}:alpha=1,fade=t=out:st=${fadeOutStart}:d=${fadeDur}:alpha=1[ov];[0:v][ov]overlay=${x}:${y}:enable='between(t,${from},${to})'[vout]`;
153
+
154
+ // Inputs: 0 = main video, 1 = overlay (image/video) (looped for images)
155
+ // Limit looped image input to main video duration to avoid encoding hanging
156
+ const mainDuration = meta.duration || meta.video?.duration || 0;
157
+ const overlayInputFlag = isImageFile
158
+ ? (mainDuration > 0 ? `-loop 1 -t ${mainDuration} -i "${overlayPath}"` : `-loop 1 -i "${overlayPath}"`)
159
+ : `-i "${overlayPath}"`;
160
+
161
+ // Audio handling: optionally mix in audioEffectFile
162
+ let audioFilter = '';
163
+ let audioMap = '-map "[aout]" -c:a aac';
164
+ let extraAudioInput = '';
165
+ if (bubble.audioEffectFile) {
166
+ let audioPath = bubble.audioEffectFile;
167
+ // support stock names mapped to public/assets/audio/<name>.mp3
168
+ const stock = ['click', 'pop', 'whoosh'];
169
+ // Resolve stock or relative audio names dynamically
170
+ // prefer audio-effects folder
171
+ const assetsDir = path.join(process.cwd(), 'public', 'assets', 'audio-effects');
172
+ if (audioPath === 'typewriter') audioPath = 'click';
173
+ if (fs.existsSync(assetsDir)) {
174
+ const files = fs.readdirSync(assetsDir);
175
+ const match = files.find(f => path.parse(f).name === audioPath);
176
+ if (match) {
177
+ audioPath = path.join(assetsDir, match);
178
+ }
179
+ }
180
+
181
+ // If still not an absolute path, check relative to cwd
182
+ if (audioPath && !path.isAbsolute(audioPath)) {
183
+ const candidate = path.join(process.cwd(), audioPath);
184
+ if (fs.existsSync(candidate)) {
185
+ audioPath = candidate;
186
+ } else {
187
+ (onLog || console.log)(`Audio effect file not found at ${candidate}, skipping audio effect`);
188
+ audioPath = null;
189
+ }
190
+ }
191
+
192
+ if (audioPath) {
193
+ extraAudioInput = `-i "${audioPath}"`;
194
+ const aVol = typeof bubble.audioEffectVolume === 'number' ? bubble.audioEffectVolume : 1.0;
195
+ const delayMs = Math.round(from * 1000);
196
+ // overlay is input 1, so effect audio will be input 2
197
+ const audioInputIndex = 2;
198
+ const clipSec = Math.min((typeof bubble.audioEffectDurationSec === 'number' ? bubble.audioEffectDurationSec : (to - from)), (to - from));
199
+ audioFilter = `[0:a]aresample=async=1[a0];[${audioInputIndex}:a]atrim=0:${clipSec},asetpts=PTS-STARTPTS,volume=${aVol}[aeff];[aeff]adelay=${delayMs}|${delayMs}[aeffd];[a0][aeffd]amix=inputs=2:duration=first:dropout_transition=0[aout]`;
200
+ } else {
201
+ // fallback to mapping main audio
202
+ audioFilter = `[0:a]aresample=async=1[aout]`;
203
+ }
204
+ } else {
205
+ // map main audio through as aout
206
+ audioFilter = `[0:a]aresample=async=1[aout]`;
207
+ }
208
+
209
+ const fc = `${videoFilter};${audioFilter}`;
210
+
211
+ // Build final command
212
+ // place extraAudioInput after overlayInputFlag
213
+ const inputs = `${overlayInputFlag} ${extraAudioInput}`.trim();
214
+ cmd = `ffmpeg -i "${videoPath}" ${inputs} -filter_complex "${fc}" -map "[vout]" ${audioMap} -c:v libx264 -preset veryfast -crf 23 -shortest "${outputPath}" -y`;
215
+
216
+ } else if (bubble.bubbleText && bubble.bubbleText.text) {
217
+ const t = bubble.bubbleText;
218
+ const fontSize = t.fontSize || 40;
219
+ const fontColor = t.fontColor || '#FFFFFF';
220
+ const posX = extra.positionX || 'center';
221
+ const posY = extra.positionY || 'center';
222
+ // compute x,y expressions for drawtext
223
+ let xExpr = `(w-text_w)/2`;
224
+ if (posX === 'left') xExpr = `(w*${padX / 100})`;
225
+ else if (posX === 'right') xExpr = `(w-text_w-w*${padX / 100})`;
226
+ let yExpr = `(h-text_h)/2`;
227
+ if (posY === 'top') yExpr = `(h*${padY / 100})`;
228
+ else if (posY === 'bottom') yExpr = `(h-text_h-h*${padY / 100})`;
229
+
230
+ // If typing animation requested, create multiple drawtext filters showing progressively longer substring
231
+ if (false) {
232
+ // typewriter removed
233
+ } else {
234
+ // enable between
235
+ const enable = `between(t,${from},${to})`;
236
+ const needsBg = !!(bubble.backgroundColor || (typeof bubble.borderRadius === 'number') || t.box);
237
+ const fontfileEsc = DEFAULT_FONT_FILE.replace(/\\/g, '\\\\').replace(/:/g, '\\:');
238
+
239
+ let bgPath = null;
240
+ let videoFilter;
241
+ if (needsBg) {
242
+ const paddingXpx = t.boxBorderW || 14;
243
+ const paddingYpx = Math.max(8, Math.round(fontSize * 0.6));
244
+ const bgInfo = await createTextBackgroundPng(t.text || '', fontSize, t.fontName, t.fontColor || fontColor, bubble.backgroundColor || t.boxColor || 'white', bubble.borderRadius || t.boxBorderW || 0, paddingXpx, paddingYpx);
245
+ bgPath = bgInfo.path;
246
+ const { x: bgX, y: bgY } = computeXY(bgInfo.width, bgInfo.height, extra, vw, vh);
247
+ // center text within bg box
248
+ const xFor = `${bgX}+(${bgInfo.width}-text_w)/2`;
249
+ const yFor = `${bgY}+(${bgInfo.height}-text_h)/2`;
250
+ const drawTextBoxPart = '';
251
+ const draw = `drawtext=text='${escapeText(t.text)}':font=${t.fontName || 'Arial'}:fontfile='${fontfileEsc}':fontsize=${fontSize}:fontcolor=${t.fontColor || '#FFFFFF'}:x=${xFor}:y=${yFor}:enable='${enable}'${drawTextBoxPart}`;
252
+ videoFilter = `[1:v]scale=${bgInfo.width}:${bgInfo.height},format=rgba[bg];[0:v][bg]overlay=${bgX}:${bgY}:enable='between(t,${from},${to})'[mid];[mid]${draw}[vout]`;
253
+ } else {
254
+ const drawTextBoxPart = t.box ? `:box=1:boxcolor=${t.boxColor || 'white'}@1:boxborderw=${t.boxBorderW || 10}` : '';
255
+ const draw = `drawtext=text='${escapeText(t.text)}':font=${t.fontName || 'Arial'}:fontfile='${fontfileEsc}':fontsize=${fontSize}:fontcolor=${t.fontColor || '#FFFFFF'}:x=${xExpr}:y=${yExpr}:enable='${enable}'${drawTextBoxPart}`;
256
+ videoFilter = `[0:v]${draw}[vout]`;
257
+ }
258
+
259
+ if (bubble.audioEffectFile) {
260
+ // Use filter_complex to mix audio
261
+ let audioPath = bubble.audioEffectFile;
262
+ // prefer audio-effects folder
263
+ const assetsDir = path.join(process.cwd(), 'public', 'assets', 'audio-effects');
264
+ if (audioPath === 'typewriter') audioPath = 'click';
265
+ if (fs.existsSync(assetsDir)) {
266
+ const files = fs.readdirSync(assetsDir);
267
+ const match = files.find(f => path.parse(f).name === audioPath);
268
+ if (match) audioPath = path.join(assetsDir, match);
269
+ }
270
+ if (audioPath && !path.isAbsolute(audioPath)) {
271
+ const candidate = path.join(process.cwd(), audioPath);
272
+ if (fs.existsSync(candidate)) audioPath = candidate;
273
+ else audioPath = null;
274
+ }
275
+
276
+ if (audioPath) {
277
+ const aVol = typeof bubble.audioEffectVolume === 'number' ? bubble.audioEffectVolume : 1.0;
278
+ const delayMs = Math.round(from * 1000);
279
+ const audioInputIndex = bgPath ? 2 : 1;
280
+ const clipSec = Math.min((typeof bubble.audioEffectDurationSec === 'number' ? bubble.audioEffectDurationSec : (to - from)), (to - from));
281
+ const fc = `${videoFilter};[0:a]aresample=async=1[a0];[${audioInputIndex}:a]atrim=0:${clipSec},asetpts=PTS-STARTPTS,volume=${aVol}[aeff];[aeff]adelay=${delayMs}|${delayMs}[aeffd];[a0][aeffd]amix=inputs=2:duration=first:dropout_transition=0[aout]`;
282
+ if (bgPath) {
283
+ cmd = `ffmpeg -i "${videoPath}" -i "${bgPath}" -i "${audioPath}" -filter_complex "${fc}" -map "[vout]" -map "[aout]" -c:v libx264 -preset veryfast -crf 23 -c:a aac -shortest "${outputPath}" -y`;
284
+ } else {
285
+ cmd = `ffmpeg -i "${videoPath}" -i "${audioPath}" -filter_complex "${fc}" -map "[vout]" -map "[aout]" -c:v libx264 -preset veryfast -crf 23 -c:a aac -shortest "${outputPath}" -y`;
286
+ }
287
+ } else {
288
+ // fallback
289
+ const fc = `${videoFilter};[0:a]aresample=async=1[aout]`;
290
+ if (bgPath) {
291
+ cmd = `ffmpeg -i "${videoPath}" -i "${bgPath}" -filter_complex "${fc}" -map "[vout]" -map "[aout]" -c:v libx264 -preset veryfast -crf 23 -c:a aac -b:a 128k -shortest "${outputPath}" -y`;
292
+ } else {
293
+ cmd = `ffmpeg -i "${videoPath}" -filter_complex "${fc}" -map "[vout]" -map "[aout]" -c:v libx264 -preset veryfast -crf 23 -c:a aac -b:a 128k -shortest "${outputPath}" -y`;
294
+ }
295
+ }
296
+ } else {
297
+ // no audio mixing; if bg present include it as input
298
+ const fc = `${videoFilter};[0:a]aresample=async=1[aout]`;
299
+ if (bgPath) {
300
+ cmd = `ffmpeg -i "${videoPath}" -i "${bgPath}" -filter_complex "${fc}" -map "[vout]" -map "[aout]" -c:v libx264 -preset veryfast -crf 23 -c:a aac -b:a 128k -shortest "${outputPath}" -y`;
301
+ } else {
302
+ cmd = `ffmpeg -i "${videoPath}" -filter_complex "${fc}" -map "[vout]" -map "[aout]" -c:v libx264 -preset veryfast -crf 23 -c:a aac -b:a 128k -shortest "${outputPath}" -y`;
303
+ }
304
+ }
305
+ }
306
+ } else {
307
+ throw new Error('No valid bubble source (media or text) found');
308
+ }
309
+
310
+ onLog && onLog('Running ffmpeg: ' + cmd);
311
+ await FFMpegUtils.execute(cmd);
312
+ return outputPath;
313
+ }
314
+ }
315
+
316
+ export default new BubbleMaker();
317
+
318
+
319
+ // --- Test runner (executes when run directly) ---
320
+
321
+ export async function test() {
322
+ const bubbleMaker = new BubbleMaker();
323
+ const cwd = process.cwd();
324
+ const baseVideo = path.join(cwd, 'public', 'media.mp4');
325
+ const outDir = path.join(cwd, 'out');
326
+ if (!fs.existsSync(outDir)) fs.mkdirSync(outDir, { recursive: true });
327
+
328
+ // Single sample: typing-top-center template. Only provide the text content.
329
+ const typingSample = {
330
+ templateName: 'typing-top-center',
331
+ bubbleText: { text: 'TYPING TITLE' },
332
+ fromSec: 0.5,
333
+ toSec: 5.0
334
+ };
335
+
336
+ try {
337
+ const outPath = path.join(outDir, 'media_typing_template_bubble.mp4');
338
+ await bubbleMaker.makeBubble(baseVideo, typingSample, outPath, console.log);
339
+ console.log('Created:', outPath);
340
+ } catch (e) {
341
+ console.error('Test failed:', e);
342
+ }
343
+ }
344
+
345
+ test()
utils/bubble/bg-utils.js ADDED
@@ -0,0 +1,49 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import sharp from 'sharp';
2
+ import fs from 'fs';
3
+ import { tempPath, escapeXml } from './helpers.js';
4
+
5
+ // Accept positional args for compatibility with Bubble.js caller
6
+ export async function createTextBackgroundPng(text, fontSize=40, fontName='Arial', boxColor='#ffffff', boxBorderW=0, paddingX=20, paddingY=8, radius=10, fontColor='#000000'){
7
+ // estimate size and create an SVG with rounded rect and centered text
8
+ const paddingXPx = Math.round(paddingX || 20);
9
+ const paddingYPx = Math.round(paddingY || 8);
10
+ const safeText = (text == null) ? '' : String(text);
11
+ const estimatedWidth = Math.max(60, Math.round((fontSize || 40) * Math.max(1, safeText.length) * 0.6) + paddingXPx * 2);
12
+ const estimatedHeight = Math.max(24, Math.round((fontSize || 40) * 1.4) + paddingYPx * 2);
13
+ const rx = Math.max(0, Math.round(radius || 0));
14
+ const svg = `<?xml version="1.0" encoding="utf-8"?>\n<svg xmlns='http://www.w3.org/2000/svg' width='${estimatedWidth}' height='${estimatedHeight}'>\n <rect x='0' y='0' width='100%' height='100%' rx='${rx}' ry='${rx}' fill='${boxColor || '#ffffff'}' stroke='none' />\n <text x='50%' y='50%' dominant-baseline='middle' text-anchor='middle' font-family='${escapeXml(fontName)}' font-size='${fontSize}' fill='${escapeXml(fontColor)}'>${escapeXml(safeText)}</text>\n</svg>`;
15
+ const tmp = tempPath('text-bg','png');
16
+ await sharp(Buffer.from(svg)).png().toFile(tmp);
17
+ const meta = await sharp(tmp).metadata();
18
+ return { path: tmp, width: meta.width, height: meta.height };
19
+ }
20
+
21
+ // Accept positional args for compatibility
22
+ export async function processImageWithBg(srcPath, width, height, backgroundColor=null, radius=0){
23
+ const tmp = tempPath('img-out','png');
24
+ const rx = Math.max(0, Math.round(radius || 0));
25
+ let img = sharp(srcPath).resize(width, height, { fit: 'cover', position: 'centre' });
26
+ const overlayBuf = await img.png().toBuffer();
27
+ if (rx > 0) {
28
+ const maskSvg = `<?xml version="1.0" encoding="utf-8"?><svg xmlns="http://www.w3.org/2000/svg" width="${width}" height="${height}"><rect x="0" y="0" rx="${rx}" ry="${rx}" width="${width}" height="${height}" fill="#fff"/></svg>`;
29
+ const rounded = await sharp(overlayBuf).composite([{ input: Buffer.from(maskSvg), blend: 'dest-in' }]).png().toBuffer();
30
+ if (backgroundColor) {
31
+ const bg = sharp({ create: { width, height, channels: 4, background: backgroundColor } }).png();
32
+ const out = tempPath('img-composite','png');
33
+ await bg.composite([{ input: rounded, gravity: 'centre' }]).png().toFile(out);
34
+ return out;
35
+ }
36
+ await sharp(rounded).png().toFile(tmp);
37
+ return tmp;
38
+ }
39
+
40
+ if (backgroundColor) {
41
+ const bg = sharp({ create: { width, height, channels: 4, background: backgroundColor } }).png();
42
+ const out = tempPath('img-composite','png');
43
+ await bg.composite([{ input: overlayBuf, gravity: 'centre' }]).png().toFile(out);
44
+ return out;
45
+ }
46
+
47
+ await sharp(overlayBuf).png().toFile(tmp);
48
+ return tmp;
49
+ }
utils/bubble/helpers.js ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os from 'os';
2
+ import path from 'path';
3
+ import fs from 'fs';
4
+ import crypto from 'crypto';
5
+
6
+ export function tempPath(prefix, ext) {
7
+ const id = Date.now().toString(36) + '-' + crypto.randomBytes(4).toString('hex');
8
+ return path.join(os.tmpdir(), `${prefix}-${id}.${ext}`);
9
+ }
10
+
11
+ export function escapeXml(s) {
12
+ return ('' + s).replace(/&/g, '&amp;').replace(/</g, '&lt;').replace(/>/g, '&gt;').replace(/"/g, '&quot;').replace(/'/g, '&apos;');
13
+ }
14
+
15
+ export function escapeText(s) {
16
+ return ('' + s).replace(/:/g, '\\:').replace(/'/g, "\\'").replace(/\\/g, '\\\\');
17
+ }
18
+
19
+ export function resolveAudioPath(audioKey) {
20
+ if (!audioKey) return null;
21
+ let key = audioKey;
22
+ if (key === 'typewriter') key = 'click';
23
+ const assetsDir = path.join(process.cwd(), 'public', 'assets', 'audio-effects');
24
+ if (fs.existsSync(assetsDir)) {
25
+ const files = fs.readdirSync(assetsDir);
26
+ const match = files.find(f => path.parse(f).name === key);
27
+ if (match) return path.join(assetsDir, match);
28
+ }
29
+ // fallback: if key is already a path
30
+ if (path.isAbsolute(key) && fs.existsSync(key)) return key;
31
+ const candidate = path.join(process.cwd(), key);
32
+ if (fs.existsSync(candidate)) return candidate;
33
+ return null;
34
+ }
utils/bubble/layout.js ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ export function scaleTemplate(value, scale) {
2
+ if (typeof value === 'number') return Math.round(value * scale);
3
+ if (typeof value === 'string' && value.match(/^\d+(px)?$/)) return Math.round(parseInt(value,10) * scale);
4
+ return value;
5
+ }
6
+
7
+ // compute overlay x,y from overlay width/height, extra positioning and video size
8
+ export function computeXY(overlayW, overlayH, extra = {}, vw = 0, vh = 0) {
9
+ const padX = extra.paddingX || 5;
10
+ const padY = extra.paddingY || 5;
11
+ const posX = extra.positionX || 'center';
12
+ const posY = extra.positionY || 'center';
13
+ let x = 0, y = 0;
14
+ if (posX === 'left' || posX === 'top') x = Math.round((padX / 100) * vw);
15
+ else if (posX === 'right' || posX === 'bottom') x = Math.round(vw - overlayW - (padX / 100) * vw);
16
+ else x = Math.round((vw - overlayW) / 2);
17
+
18
+ if (posY === 'top') y = Math.round((padY / 100) * vh);
19
+ else if (posY === 'bottom') y = Math.round(vh - overlayH - (padY / 100) * vh);
20
+ else y = Math.round((vh - overlayH) / 2);
21
+ return { x, y };
22
+ }
utils/bubble/typewriter.js ADDED
File without changes