diff --git a/src/app/api/video-export/route.ts b/src/app/api/video-export/route.ts index dde43dd..1dafa33 100644 --- a/src/app/api/video-export/route.ts +++ b/src/app/api/video-export/route.ts @@ -31,9 +31,9 @@ export async function POST(request: NextRequest) { console.log(`πŸ“Š Project: ${project.title}, tracks: ${tracks?.length || 0}`); // Check if there's content - const hasContent = tracks?.length > 0 && + const hasContent = tracks?.length > 0 && Object.values(keyframes || {}).some((kfs: any) => kfs?.length > 0); - + if (!hasContent) { return NextResponse.json({ success: true, @@ -77,26 +77,26 @@ export async function POST(request: NextRequest) { // Convert relative URLs to absolute file paths for Remotion const processedKeyframes: Record = {}; - + for (const [trackId, kfs] of Object.entries(keyframes || {})) { processedKeyframes[trackId] = (kfs as any[]).map((kf: any) => { let url = kf.data?.url || kf.url; - + // Skip blob URLs - they won't work in server-side rendering if (url?.startsWith('blob:')) { console.warn(`⚠️ Skipping blob URL for keyframe ${kf.id}`); return { ...kf, data: { ...kf.data, url: null } }; } - + // Convert relative URLs to absolute http:// URLs using localhost if (url && !url.startsWith('http')) { // Ensure URL starts with / const urlPath = url.startsWith('/') ? url : `/${url}`; url = `http://localhost:3000${urlPath}`; } - + console.log(`πŸ“ URL for keyframe ${kf.id}: ${url}`); - + return { ...kf, data: { ...kf.data, url }, @@ -117,7 +117,7 @@ export async function POST(request: NextRequest) { // Use project's width and height if available, otherwise calculate from aspect ratio let outputWidth = project.width; let outputHeight = project.height; - + if (!outputWidth || !outputHeight) { // Fallback to aspect ratio calculation switch (project.aspectRatio) { @@ -138,7 +138,7 @@ export async function POST(request: NextRequest) { outputHeight = 1080; } } - + console.log(`πŸ“ Output resolution: ${outputWidth}x${outputHeight} (${project.aspectRatio})`); // Select composition with explicit dimensions @@ -147,43 +147,65 @@ export async function POST(request: NextRequest) { id: 'MainComposition', inputProps, }); - + // Override composition dimensions with project settings composition.width = outputWidth; composition.height = outputHeight; + // Calculate duration based on content (latest keyframe end time) + let maxDurationMs = 0; + for (const [trackId, kfs] of Object.entries(processedKeyframes)) { + for (const kf of kfs as any[]) { + // Use visual duration (kf.duration) not original data duration + const duration = kf.duration || 0; + const endTime = (kf.timestamp || 0) + duration; + if (endTime > maxDurationMs) { + maxDurationMs = endTime; + } + } + } + + // Convert to frames (30 FPS default) + const fps = composition.fps || 30; + const durationInFrames = Math.max(Math.ceil((maxDurationMs / 1000) * fps), 1); + composition.durationInFrames = durationInFrames; + + console.log(`⏱️ Auto-calculated duration: ${maxDurationMs}ms (${durationInFrames} frames)`); + console.log('πŸŽ₯ Rendering video...'); // Collect audio files for later merging with volume info // Audio keyframes have their own timestamps, which are used directly for synchronization - const audioFiles: { url: string; startTime: number; volume: number }[] = []; + const audioFiles: { url: string; startTime: number; duration: number; volume: number }[] = []; for (const [trackId, kfs] of Object.entries(processedKeyframes)) { const track = tracks.find((t: any) => t.id === trackId); if (track?.type === 'music' || track?.type === 'voiceover') { // Skip if track is muted if (track.muted) continue; - + const trackVolume = track.volume ?? 100; - + for (const kf of kfs as any[]) { if (kf.data?.url) { // Use keyframe volume if set, otherwise use track volume const keyframeVolume = kf.data.volume ?? null; const finalVolume = keyframeVolume !== null ? keyframeVolume : trackVolume; - + // Use the audio keyframe's own timestamp directly // This ensures audio starts at the correct time relative to the video timeline const audioStartTime = kf.timestamp / 1000; // Convert milliseconds to seconds - + const audioDuration = kf.duration / 1000; // keyframe duration in seconds + audioFiles.push({ url: kf.data.url, startTime: audioStartTime, // Audio keyframe's timestamp in seconds + duration: audioDuration, // Audio duration (trimmed) volume: finalVolume / 100, // Convert to 0-2 range (100% = 1.0) }); } } } - } + } // Render the video (without audio from AudioKeyFrame since it uses HTML5 Audio) await renderMedia({ @@ -196,51 +218,66 @@ export async function POST(request: NextRequest) { console.log('βœ… Video rendered to:', outputPath); - // If there are audio files, merge them with FFmpeg - if (audioFiles.length > 0) { + // If there are audio files, merge them with FFmpeg + if (audioFiles.length > 0) { - const { execSync } = await import('child_process'); + const { execSync } = await import('child_process'); const tempVideoPath = outputPath.replace(`.${format}`, `_temp.${format}`); - + // Rename original video to temp const fs = await import('fs/promises'); await fs.rename(outputPath, tempVideoPath); - + try { // Convert URLs to file paths const audioPaths = audioFiles.map(af => { const audioPath = af.url.startsWith('http://localhost:3000/') ? path.join(publicDir, af.url.replace('http://localhost:3000/', '')) : af.url; - return { path: audioPath, startTime: af.startTime, volume: af.volume }; - }); + return { path: audioPath, startTime: af.startTime, duration: af.duration, volume: af.volume }; + }); if (audioPaths.length === 1) { // Single audio file - merge with volume and delay const ap = audioPaths[0]; const delayMs = Math.round(ap.startTime * 1000); - // Apply delay and volume using filter_complex - const filterComplex = delayMs > 0 || ap.volume !== 1.0 - ? `-filter_complex "[1:a]volume=${ap.volume}${delayMs > 0 ? `,adelay=${delayMs}|${delayMs}` : ''}[aout]"` - : ''; - const mapAudio = delayMs > 0 || ap.volume !== 1.0 ? '-map "[aout]"' : '-map 1:a:0'; - const ffmpegCmd = `ffmpeg -y -i "${tempVideoPath}" -i "${ap.path}" ${filterComplex} -c:v copy -c:a aac -map 0:v:0 ${mapAudio} -shortest "${outputPath}"`; + + // Build filter chain: trim -> reset timestamps -> volume -> delay + const filters = []; + if (ap.duration) filters.push(`atrim=duration=${ap.duration}`); + filters.push('asetpts=PTS-STARTPTS'); + if (ap.volume !== 1.0) filters.push(`volume=${ap.volume}`); + if (delayMs > 0) filters.push(`adelay=${delayMs}|${delayMs}`); + + const filterString = filters.join(','); + const filterComplex = `-filter_complex "[1:a]${filterString}[aout]"`; + + const ffmpegCmd = `ffmpeg -y -i "${tempVideoPath}" -i "${ap.path}" ${filterComplex} -c:v copy -c:a aac -map 0:v:0 -map "[aout]" -shortest "${outputPath}"`; execSync(ffmpegCmd, { stdio: 'pipe' }); } else { // Multiple audio files - mix them together with individual volumes // Build FFmpeg filter complex to mix all audio tracks with volume control const inputs = audioPaths.map((ap, i) => `-i "${ap.path}"`).join(' '); - // Apply volume and delay to each audio track - const volumeAndDelays = audioPaths.map((ap, i) => - `[${i + 1}:a]volume=${ap.volume},adelay=${Math.round(ap.startTime * 1000)}|${Math.round(ap.startTime * 1000)}[a${i}]` - ).join(';'); + + // Apply trim, reset pts, volume, and delay to each audio track + const processedInputs = audioPaths.map((ap, i) => { + const filters = []; + if (ap.duration) filters.push(`atrim=duration=${ap.duration}`); + filters.push('asetpts=PTS-STARTPTS'); + filters.push(`volume=${ap.volume}`); + const delayMs = Math.round(ap.startTime * 1000); + filters.push(`adelay=${delayMs}|${delayMs}`); + + return `[${i + 1}:a]${filters.join(',')}[a${i}]`; + }).join(';'); + const mixInputs = audioPaths.map((_, i) => `[a${i}]`).join(''); - const filterComplex = `${volumeAndDelays};${mixInputs}amix=inputs=${audioPaths.length}:duration=longest[aout]`; + const filterComplex = `${processedInputs};${mixInputs}amix=inputs=${audioPaths.length}:duration=longest[aout]`; const ffmpegCmd = `ffmpeg -y -i "${tempVideoPath}" ${inputs} -filter_complex "${filterComplex}" -map 0:v:0 -map "[aout]" -c:v copy -c:a aac "${outputPath}"`; execSync(ffmpegCmd, { stdio: 'pipe' }); } - + // Clean up temp file await fs.unlink(tempVideoPath); console.log('βœ… Audio merged successfully'); diff --git a/src/components/video-editor/TimelineControls.tsx b/src/components/video-editor/TimelineControls.tsx index 85c02e5..d04ae62 100644 --- a/src/components/video-editor/TimelineControls.tsx +++ b/src/components/video-editor/TimelineControls.tsx @@ -12,7 +12,17 @@ import { SkipForward, ChevronsLeft, ChevronsRight, + Plus, + Video, + Music, + Mic, } from 'lucide-react'; +import { + DropdownMenu, + DropdownMenuContent, + DropdownMenuItem, + DropdownMenuTrigger, +} from '@/components/ui/dropdown-menu'; const MIN_ZOOM = 0.1; const MAX_ZOOM = 10; @@ -33,9 +43,23 @@ export const TimelineControls = React.memo(function TimelineControls({ className, ...props }: TimelineControlsProps) { - const { player, playerState, setPlayerState, setCurrentTimestamp } = useStudio(); + const { player, playerState, setPlayerState, setCurrentTimestamp, addTrack, currentProject } = useStudio(); const { t } = useI18n(); + const handleAddTrack = useCallback(async (type: 'video' | 'audio') => { + if (!currentProject) return; + + await addTrack({ + projectId: currentProject.id, + type, + label: type === 'video' ? 'Video Track' : 'Audio Track', + locked: false, + muted: false, + order: 0, // Order is handled by backend or reducer usually, but providing 0 is safe + volume: 100 + }); + }, [addTrack, currentProject]); + // Format time as MM:SS.ms - memoized const formatTime = useCallback((seconds: number): string => { const mins = Math.floor(seconds / 60); @@ -254,7 +278,33 @@ export const TimelineControls = React.memo(function TimelineControls({ {/* Empty spacer for balance */} -
+ {/* Track Controls */} +
+ + + + + + handleAddTrack('video')} + > + + handleAddTrack('audio')} + > + + {t('videoEditor.tracks.addAudioTrack')} + + + +
); }); diff --git a/src/components/video-editor/TimelineRuler.tsx b/src/components/video-editor/TimelineRuler.tsx index 6041598..de38dfe 100644 --- a/src/components/video-editor/TimelineRuler.tsx +++ b/src/components/video-editor/TimelineRuler.tsx @@ -32,40 +32,25 @@ const MIN_MAJOR_SPACING_PX = 96; const EPSILON = 0.000_01; function formatTickLabel(seconds: number, majorInterval: number) { - if (seconds < EPSILON) { - return "0s"; + if (Math.abs(seconds) < EPSILON) { + return "0:00"; } - if (seconds >= 3600) { - const hours = seconds / 3600; - const decimals = majorInterval < 3600 ? 1 : 0; - return `${hours.toFixed(decimals)}h`; - } - - if (seconds >= 60) { - const minutes = seconds / 60; - const decimals = majorInterval < 60 ? 1 : 0; - return `${minutes.toFixed(decimals)}m`; - } + const minutes = Math.floor(seconds / 60); + const remainingSeconds = seconds % 60; - if (seconds >= 1) { - const isNearInteger = Math.abs(seconds - Math.round(seconds)) < 0.005; - const decimals = - isNearInteger && majorInterval >= 1 - ? 0 - : majorInterval < 1 - ? Math.ceil(-Math.log10(majorInterval)) - : Math.min( - 2, - Math.max( - 1, - Math.ceil(-Math.log10(seconds - Math.floor(seconds))), - ), - ); - return `${seconds.toFixed(decimals)}s`; + // Decide on decimal places based on interval + // If interval is less than 1 second, we likely need decimals + if (majorInterval < 1) { + // Show decimals - e.g. 0:00.50 + // Keep seconds part fixed width if possible, or just standard numeric + const s = remainingSeconds.toFixed(2).padStart(5, '0'); + return `${minutes}:${s}`; } - return `${Math.round(seconds * 1000)}ms`; + // Integer seconds - e.g. 0:10, 1:05 + const s = Math.floor(remainingSeconds).toString().padStart(2, '0'); + return `${minutes}:${s}`; } function chooseMajorInterval(pixelsPerSecond: number) { diff --git a/src/components/video-editor/VideoComposition.tsx b/src/components/video-editor/VideoComposition.tsx index cca512c..48928db 100644 --- a/src/components/video-editor/VideoComposition.tsx +++ b/src/components/video-editor/VideoComposition.tsx @@ -52,11 +52,11 @@ interface MediaKeyFrameProps { function MediaKeyFrame({ keyframe, canvasWidth, canvasHeight }: MediaKeyFrameProps) { // Get fit mode from keyframe data, default to 'contain' const fitMode: FitMode = keyframe.data.fitMode || 'contain'; - + // For now, we'll use a placeholder for media dimensions // In a real implementation, we would need to load the media to get its actual dimensions // For this implementation, we'll use CSS object-fit which handles this automatically - + const getObjectFit = (mode: FitMode): 'contain' | 'cover' | 'fill' => { switch (mode) { case 'contain': @@ -69,17 +69,17 @@ function MediaKeyFrame({ keyframe, canvasWidth, canvasHeight }: MediaKeyFramePro return 'contain'; } }; - + const objectFit = getObjectFit(fitMode); - + // Normalize URL to handle relative paths (especially on Windows) const normalizedUrl = keyframe.data.url ? normalizeUrl(keyframe.data.url) : ''; - + // Debug log for video URLs if (keyframe.data.type === 'video' && normalizedUrl) { console.log('[VideoComposition] Video URL - original:', keyframe.data.url, 'normalized:', normalizedUrl); } - + if (keyframe.data.type === 'video') { return ( ); } - + if (keyframe.data.type === 'image') { return ( ); } - + return null; } @@ -124,7 +124,7 @@ function AudioKeyFrame({ keyframe, trackVolume, trackMuted }: AudioKeyFrameProps const lastFrameTimeRef = useRef(Date.now()); const pauseCheckIntervalRef = useRef(null); const detectedDurationRef = useRef(null); - + // Calculate the original audio duration (in seconds) // Priority: detected duration > originalDuration > keyframe duration const getEffectiveDuration = () => { @@ -136,15 +136,15 @@ function AudioKeyFrame({ keyframe, trackVolume, trackMuted }: AudioKeyFrameProps } return keyframe.duration / 1000; }; - + const originalDurationSec = getEffectiveDuration(); - + // Set up interval to check if playback has stopped (frame not changing) useEffect(() => { if (keyframe.data.type !== 'music' && keyframe.data.type !== 'voiceover') { return; } - + // Check every 100ms if frame has stopped changing (player paused) pauseCheckIntervalRef.current = setInterval(() => { const timeSinceLastFrame = Date.now() - lastFrameTimeRef.current; @@ -153,29 +153,29 @@ function AudioKeyFrame({ keyframe, trackVolume, trackMuted }: AudioKeyFrameProps audioRef.current.pause(); } }, 100); - + return () => { if (pauseCheckIntervalRef.current) { clearInterval(pauseCheckIntervalRef.current); } }; }, [keyframe.data.type]); - + useEffect(() => { if (keyframe.data.type !== 'music' && keyframe.data.type !== 'voiceover') { return; } - + // Update last frame time lastFrameTimeRef.current = Date.now(); - + // Create audio element if not exists if (!audioRef.current && keyframe.data.url) { // Normalize URL to handle relative paths (especially on Windows) const normalizedUrl = normalizeUrl(keyframe.data.url); audioRef.current = new Audio(normalizedUrl); audioRef.current.preload = 'auto'; - + // Detect actual audio duration when loaded audioRef.current.addEventListener('loadedmetadata', () => { if (audioRef.current && audioRef.current.duration && isFinite(audioRef.current.duration)) { @@ -183,33 +183,33 @@ function AudioKeyFrame({ keyframe, trackVolume, trackMuted }: AudioKeyFrameProps console.log(`AudioKeyFrame: Detected duration ${audioRef.current.duration}s for ${keyframe.data.url?.substring(0, 50)}...`); } }); - + audioRef.current.addEventListener('durationchange', () => { if (audioRef.current && audioRef.current.duration && isFinite(audioRef.current.duration)) { detectedDurationRef.current = audioRef.current.duration; } }); } - + const audio = audioRef.current; - + if (!audio) return; // Calculate and apply final volume const keyframeVolume = keyframe.data.volume; const finalVolume = calculateFinalVolume(trackVolume, keyframeVolume, trackMuted); const gain = volumeToGain(finalVolume); audio.volume = Math.max(0, Math.min(1, gain)); // Clamp to 0-1 for HTML5 Audio - + const currentTimeSec = frame / fps; - + // Only update if frame changed significantly (avoid micro-updates) if (Math.abs(frame - lastFrameRef.current) < 2 && lastFrameRef.current !== -1) { return; } lastFrameRef.current = frame; - + // Use detected duration if available, otherwise fall back to stored duration const effectiveDuration = detectedDurationRef.current || originalDurationSec; - + // Check if current time is within the audio's actual duration if (currentTimeSec >= 0 && currentTimeSec < effectiveDuration) { // Sync audio position if it drifted too much (more than 0.3 seconds) @@ -217,7 +217,7 @@ function AudioKeyFrame({ keyframe, trackVolume, trackMuted }: AudioKeyFrameProps if (drift > 0.3 || audio.paused) { audio.currentTime = currentTimeSec; } - + // Play if paused if (audio.paused) { audio.play().catch(() => { @@ -231,7 +231,7 @@ function AudioKeyFrame({ keyframe, trackVolume, trackMuted }: AudioKeyFrameProps } } }, [frame, fps, keyframe.data.type, keyframe.data.url, originalDurationSec]); - + // Cleanup on unmount useEffect(() => { return () => { @@ -245,7 +245,7 @@ function AudioKeyFrame({ keyframe, trackVolume, trackMuted }: AudioKeyFrameProps } }; }, []); - + // This component doesn't render anything visible return null; } @@ -261,13 +261,13 @@ interface TrackSequenceProps { function VideoTrackSequence({ track, keyframes, canvasWidth, canvasHeight }: TrackSequenceProps) { // Sort keyframes by timestamp const sortedKeyframes = [...keyframes].sort((a, b) => a.timestamp - b.timestamp); - + return ( <> {sortedKeyframes.map((keyframe) => { const startFrame = msToFrames(keyframe.timestamp); const durationFrames = msToFrames(keyframe.duration); - + return ( - @@ -291,28 +291,28 @@ function VideoTrackSequence({ track, keyframes, canvasWidth, canvasHeight }: Tra function AudioTrackSequence({ track, keyframes, canvasWidth, canvasHeight }: TrackSequenceProps) { // Sort keyframes by timestamp const sortedKeyframes = [...keyframes].sort((a, b) => a.timestamp - b.timestamp); - + // Get track volume and muted state, with defaults const trackVolume = track.volume ?? 100; const trackMuted = track.muted ?? false; - + return ( <> {sortedKeyframes.map((keyframe) => { const startFrame = msToFrames(keyframe.timestamp); - // For audio, use originalDuration if available (actual audio length) - // This ensures the Sequence doesn't end before the audio finishes - const audioDuration = keyframe.data.originalDuration || keyframe.duration; + // Use the keyframe's duration (visual duration on timeline) for the Sequence length + // This ensures the component unmounts (and audio stops) when the keyframe ends + const audioDuration = keyframe.duration; const durationFrames = msToFrames(audioDuration); - - console.log(`AudioTrackSequence: keyframe ${keyframe.id}, duration=${keyframe.duration}ms, originalDuration=${keyframe.data.originalDuration}ms, using=${audioDuration}ms, frames=${durationFrames}`); - + + console.log(`AudioTrackSequence: keyframe ${keyframe.id}, duration=${keyframe.duration}ms, frames=${durationFrames}`); + // Skip if no valid URL if (!keyframe.data.url) { console.warn(`Skipping audio keyframe ${keyframe.id} - no URL`); return null; } - + return ( {/* HTML5 audio for preview playback */} - @@ -339,11 +339,11 @@ export function MainComposition({ project, tracks, keyframes }: VideoComposition const { width: canvasWidth, height: canvasHeight } = project.width && project.height ? { width: project.width, height: project.height } : getAspectRatioDimensions(project.aspectRatio); - + // Separate tracks by type const videoTracks = tracks.filter(t => t.type === 'video'); const audioTracks = tracks.filter(t => t.type === 'music' || t.type === 'voiceover'); - + return ( {/* Render video tracks (layered) */} @@ -359,7 +359,7 @@ export function MainComposition({ project, tracks, keyframes }: VideoComposition /> ); })} - + {/* Render audio tracks */} {audioTracks.map((track) => { const trackKeyframes = keyframes[track.id] || []; diff --git a/src/components/video-editor/VideoEditorView.tsx b/src/components/video-editor/VideoEditorView.tsx index adaa820..e7f763b 100644 --- a/src/components/video-editor/VideoEditorView.tsx +++ b/src/components/video-editor/VideoEditorView.tsx @@ -27,6 +27,8 @@ const VideoEditorViewInternal = React.memo(function VideoEditorViewInternal({ pr setPlayer, setPlayerState, setCurrentTimestamp, + undo, + redo, } = useStudio(); const [isLoading, setIsLoading] = useState(true); @@ -49,14 +51,34 @@ const VideoEditorViewInternal = React.memo(function VideoEditorViewInternal({ pr loadProjectData(); // eslint-disable-next-line react-hooks/exhaustive-deps + loadProjectData(); + // eslint-disable-next-line react-hooks/exhaustive-deps }, [projectId]); // Only re-run when projectId changes, not when loadProject reference changes + // Undo/Redo Keyboard Shortcuts + useEffect(() => { + const handleKeyDown = (e: KeyboardEvent) => { + // Check for Ctrl+Z or Cmd+Z + if ((e.ctrlKey || e.metaKey) && e.key.toLowerCase() === 'z') { + e.preventDefault(); + if (e.shiftKey) { + redo(); + } else { + undo(); + } + } + }; + + window.addEventListener('keydown', handleKeyDown); + return () => window.removeEventListener('keydown', handleKeyDown); + }, [undo, redo]); + // Note: Tracks are no longer auto-created. Use Import or Add Media to create tracks. // Loading state - show first if (isLoading) { return ( -
(null); const [validationError, setValidationError] = useState(null); @@ -47,6 +47,14 @@ export const VideoTimeline = React.memo(function VideoTimeline({ const timelineWidth = useMemo(() => pixelsPerSecond * durationSeconds, [pixelsPerSecond, durationSeconds]); const pixelsPerMs = useMemo(() => pixelsPerSecond / 1000, [pixelsPerSecond]); + const [containerWidth, setContainerWidth] = useState(0); + + // Calculate actual display width (max of content and container) + const displayWidth = useMemo(() => Math.max(timelineWidth, containerWidth), [timelineWidth, containerWidth]); + + // Calculate visual duration for Ruler to match the stretched width + const visualDuration = useMemo(() => displayWidth / pixelsPerSecond, [displayWidth, pixelsPerSecond]); + // Sort tracks by type order - memoized const sortedTracks = useMemo(() => { return [...tracks].sort((a, b) => { @@ -65,6 +73,32 @@ export const VideoTimeline = React.memo(function VideoTimeline({ [currentTimestamp, pixelsPerSecond] ); + // Auto-expand timeline duration based on content + useEffect(() => { + if (!project) return; + + const bufferMs = 60000; // 60 seconds buffer to allow "infinite" feel + const minDurationMs = 60000; // 60 seconds minimum base + + // Find the end time of the last keyframe + let maxContentTime = 0; + Object.values(keyframes).flat().forEach(kf => { + const outputEnd = kf.timestamp + kf.duration; + if (outputEnd > maxContentTime) { + maxContentTime = outputEnd; + } + }); + + // Desired duration is content end + buffer + const desiredDuration = Math.max(minDurationMs, maxContentTime + bufferMs); + + // Only update if difference is significant (> 1s) to prevent jitter/loops + if (Math.abs(project.duration - desiredDuration) > 1000) { + // console.log('VideoTimeline: Auto-expanding duration from', project.duration, 'to', desiredDuration); + updateProject(project.id, { duration: desiredDuration }); + } + }, [keyframes, project.duration, project.id, updateProject]); + const handleTimelineClick = useCallback((event: MouseEvent) => { const rect = timelineRef.current?.getBoundingClientRect(); if (!rect) return; @@ -72,16 +106,17 @@ export const VideoTimeline = React.memo(function VideoTimeline({ // Check if click was on empty space (not on a keyframe) const target = event.target as HTMLElement; const isKeyframeClick = target.closest('[aria-checked]'); - + // Clear selection when clicking on empty space if (!isKeyframeClick) { clearSelection(); } - const relativeX = event.clientX - rect.left; + const scrollLeft = timelineRef.current?.scrollLeft || 0; + const relativeX = (event.clientX - rect.left) + scrollLeft; // Convert pixels to seconds const timestamp = relativeX / pixelsPerSecond; - + // Clamp timestamp to valid range const clampedTimestamp = Math.max(0, Math.min(durationSeconds, timestamp)); setCurrentTimestamp(clampedTimestamp); @@ -92,19 +127,36 @@ export const VideoTimeline = React.memo(function VideoTimeline({ } }, [pixelsPerSecond, durationSeconds, setCurrentTimestamp, player, clearSelection]); + // Observer for container width + useEffect(() => { + if (!timelineRef.current) return; + + const observer = new ResizeObserver((entries) => { + for (const entry of entries) { + setContainerWidth(entry.contentRect.width); + } + }); + + observer.observe(timelineRef.current); + return () => observer.disconnect(); + }, []); + const getTrackIdForMediaType = useCallback(async (mediaType: string): Promise => { // Map media type to track type - let trackType: 'video' | 'music' | 'voiceover'; - if (mediaType === 'image' || mediaType === 'video') { - trackType = 'video'; - } else if (mediaType === 'music') { - trackType = 'music'; - } else { - trackType = 'voiceover'; - } + // Map media type to track type + const isVideoType = mediaType === 'image' || mediaType === 'video'; + const targetTrackType = isVideoType ? 'video' : 'audio'; // Find existing track of this type - const existingTrack = tracks.find(t => t.type === trackType); + // For audio, we allow dropping on audio, music, or voiceover tracks + const existingTrack = tracks.find(t => { + if (isVideoType) { + return t.type === 'video'; + } else { + return t.type === 'audio' || t.type === 'music' || t.type === 'voiceover'; + } + }); + if (existingTrack) { return existingTrack.id; } @@ -112,8 +164,8 @@ export const VideoTimeline = React.memo(function VideoTimeline({ // Create new track const trackId = await addTrack({ projectId: project.id, - type: trackType, - label: `${trackType.charAt(0).toUpperCase() + trackType.slice(1)} Track`, + type: targetTrackType, + label: isVideoType ? 'Video Track' : 'Audio Track', locked: false, order: tracks.length, volume: 100, // Default volume @@ -128,10 +180,10 @@ export const VideoTimeline = React.memo(function VideoTimeline({ return new Promise((resolve) => { const TIMEOUT_MS = 15000; // Increased timeout for blob URLs let resolved = false; - + // Normalize URL to handle relative paths (especially on Windows) const normalizedUrl = normalizeUrl(url); - + const resolveOnce = (duration: number, source: string) => { if (!resolved) { resolved = true; @@ -139,17 +191,17 @@ export const VideoTimeline = React.memo(function VideoTimeline({ resolve(duration); } }; - + // Timeout fallback const timeoutId = setTimeout(() => { console.warn(`Media duration detection timed out for: ${normalizedUrl}`); resolveOnce(5000, 'timeout'); }, TIMEOUT_MS); - + if (type === 'music' || type === 'voiceover') { const audio = new Audio(); audio.preload = 'auto'; // Changed from 'metadata' to 'auto' for better blob URL support - + // Try multiple events for duration detection const handleDurationChange = () => { if (audio.duration && isFinite(audio.duration) && audio.duration > 0) { @@ -158,7 +210,7 @@ export const VideoTimeline = React.memo(function VideoTimeline({ resolveOnce(durationMs, 'durationchange'); } }; - + const handleLoadedMetadata = () => { if (audio.duration && isFinite(audio.duration) && audio.duration > 0) { clearTimeout(timeoutId); @@ -166,7 +218,7 @@ export const VideoTimeline = React.memo(function VideoTimeline({ resolveOnce(durationMs, 'loadedmetadata'); } }; - + const handleCanPlayThrough = () => { if (audio.duration && isFinite(audio.duration) && audio.duration > 0) { clearTimeout(timeoutId); @@ -174,18 +226,18 @@ export const VideoTimeline = React.memo(function VideoTimeline({ resolveOnce(durationMs, 'canplaythrough'); } }; - + audio.addEventListener('durationchange', handleDurationChange); audio.addEventListener('loadedmetadata', handleLoadedMetadata); audio.addEventListener('canplaythrough', handleCanPlayThrough); audio.addEventListener('loadeddata', handleCanPlayThrough); - + audio.addEventListener('error', (e) => { clearTimeout(timeoutId); console.error('Audio duration detection error:', e); resolveOnce(5000, 'error'); }); - + audio.src = normalizedUrl; // Force load for some browsers (skip in test environment where load() is not implemented) if (typeof audio.load === 'function') { @@ -198,7 +250,7 @@ export const VideoTimeline = React.memo(function VideoTimeline({ } else if (type === 'video') { const video = document.createElement('video'); video.preload = 'auto'; // Changed from 'metadata' to 'auto' - + const handleDurationChange = () => { if (video.duration && isFinite(video.duration) && video.duration > 0) { clearTimeout(timeoutId); @@ -206,7 +258,7 @@ export const VideoTimeline = React.memo(function VideoTimeline({ resolveOnce(durationMs, 'durationchange'); } }; - + const handleLoadedMetadata = () => { if (video.duration && isFinite(video.duration) && video.duration > 0) { clearTimeout(timeoutId); @@ -214,7 +266,7 @@ export const VideoTimeline = React.memo(function VideoTimeline({ resolveOnce(durationMs, 'loadedmetadata'); } }; - + const handleCanPlayThrough = () => { if (video.duration && isFinite(video.duration) && video.duration > 0) { clearTimeout(timeoutId); @@ -222,18 +274,18 @@ export const VideoTimeline = React.memo(function VideoTimeline({ resolveOnce(durationMs, 'canplaythrough'); } }; - + video.addEventListener('durationchange', handleDurationChange); video.addEventListener('loadedmetadata', handleLoadedMetadata); video.addEventListener('canplaythrough', handleCanPlayThrough); video.addEventListener('loadeddata', handleCanPlayThrough); - + video.addEventListener('error', (e) => { clearTimeout(timeoutId); console.error('Video duration detection error:', e); resolveOnce(5000, 'error'); }); - + video.src = normalizedUrl; // Force load for some browsers (skip in test environment where load() is not implemented) if (typeof video.load === 'function') { @@ -254,7 +306,7 @@ export const VideoTimeline = React.memo(function VideoTimeline({ event.preventDefault(); setValidationError(null); setNotification(null); - + try { const mediaDataStr = event.dataTransfer.getData('application/json'); if (!mediaDataStr) return; @@ -272,17 +324,20 @@ export const VideoTimeline = React.memo(function VideoTimeline({ const mediaUrl = rawMediaUrl ? normalizeUrl(rawMediaUrl) : null; const mediaId = rawMediaData.id || rawMediaData.jobId || `media-${Date.now()}`; const mediaName = rawMediaData.prompt || rawMediaData.audioName || rawMediaData.name || ''; - - // Map 'audio' type to 'music' - user can drag to voiceover track later + + // Map 'audio' to 'music', and 'tts' to 'voiceover' let normalizedType: 'image' | 'video' | 'music' | 'voiceover' = mediaType; if (mediaType === 'audio') { normalizedType = 'music'; + } else if (mediaType === 'tts') { + normalizedType = 'voiceover'; } - + console.log('Drop data normalized:', { mediaType, normalizedType, mediaUrl, rawMediaUrl, mediaId, mediaName }); - const relativeX = event.clientX - rect.left; - const timestamp = Math.max(0, (relativeX / timelineWidth) * durationSeconds * 1000); + const scrollLeft = timelineRef.current?.scrollLeft || 0; + const relativeX = (event.clientX - rect.left) + scrollLeft; + const timestamp = Math.max(0, (relativeX / displayWidth) * visualDuration * 1000); // Get or create appropriate track const trackId = await getTrackIdForMediaType(normalizedType); @@ -291,7 +346,7 @@ export const VideoTimeline = React.memo(function VideoTimeline({ // This ensures the keyframe duration matches the actual media length let duration: number; let originalDuration: number; - + if (mediaUrl && (normalizedType === 'music' || normalizedType === 'voiceover' || normalizedType === 'video')) { // For audio and video, always detect duration from the actual file console.log(`Detecting duration for ${normalizedType} from: ${mediaUrl}`); @@ -316,7 +371,7 @@ export const VideoTimeline = React.memo(function VideoTimeline({ data: { type: normalizedType, mediaId: mediaId, - url: mediaUrl, + url: mediaUrl || '', prompt: rawMediaData.prompt, originalDuration, // Store original duration for waveform scaling }, @@ -336,7 +391,7 @@ export const VideoTimeline = React.memo(function VideoTimeline({ if (normalizedType === 'video' && mediaUrl) { // Skip audio processing for blob URLs (browser-only, can't be processed server-side) const isBlobUrl = mediaUrl.startsWith('blob:'); - + if (isBlobUrl) { console.log('Blob URL detected, skipping server-side audio processing:', mediaUrl); // For blob URLs, use client-side detection only @@ -373,88 +428,88 @@ export const VideoTimeline = React.memo(function VideoTimeline({ // Fallback to client-side detection hasAudio = await hasAudioTrack(mediaUrl); } - - if (hasAudio) { - console.log('Video has audio, creating muted version...'); - // Create muted version of the video for the video track - const mutedResponse = await fetch('/api/video-tracks/create-muted', { - method: 'POST', - headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify({ videoPath: mediaUrl }), - }); - - if (mutedResponse.ok) { - const { mutedVideoPath } = await mutedResponse.json(); - console.log('[VideoTimeline] Received mutedVideoPath from API:', mutedVideoPath); - // Normalize URL to handle relative paths (especially on Windows) - finalVideoUrl = normalizeUrl(mutedVideoPath); - console.log('[VideoTimeline] Normalized mutedVideoPath:', finalVideoUrl); - console.log('βœ“ Using muted video for video track:', finalVideoUrl); - } else { - const errorData = await mutedResponse.json(); - console.error('Failed to create muted video:', errorData); - console.warn('Using original video with audio'); - } - console.log('Extracting audio from video...'); + if (hasAudio) { + console.log('Video has audio, creating muted version...'); + // Create muted version of the video for the video track + const mutedResponse = await fetch('/api/video-tracks/create-muted', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ videoPath: mediaUrl }), + }); - // Extract audio asynchronously - // Note: This requires server-side API call since FFmpeg runs on server - const audioResponse = await fetch('/api/video-tracks/extract-audio', { - method: 'POST', - headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify({ videoPath: mediaUrl }), - }); + if (mutedResponse.ok) { + const { mutedVideoPath } = await mutedResponse.json(); + console.log('[VideoTimeline] Received mutedVideoPath from API:', mutedVideoPath); + // Normalize URL to handle relative paths (especially on Windows) + finalVideoUrl = normalizeUrl(mutedVideoPath); + console.log('[VideoTimeline] Normalized mutedVideoPath:', finalVideoUrl); + console.log('βœ“ Using muted video for video track:', finalVideoUrl); + } else { + const errorData = await mutedResponse.json(); + console.error('Failed to create muted video:', errorData); + console.warn('Using original video with audio'); + } + + console.log('Extracting audio from video...'); + + // Extract audio asynchronously + // Note: This requires server-side API call since FFmpeg runs on server + const audioResponse = await fetch('/api/video-tracks/extract-audio', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ videoPath: mediaUrl }), + }); - if (!audioResponse.ok) { - throw new Error('Failed to extract audio from video'); - } + if (!audioResponse.ok) { + throw new Error('Failed to extract audio from video'); + } - const { audioPath } = await audioResponse.json(); - console.log('[VideoTimeline] Received audioPath from API:', audioPath); - // Normalize URL to handle relative paths (especially on Windows) - const normalizedAudioPath = normalizeUrl(audioPath); - console.log('[VideoTimeline] Normalized audioPath:', normalizedAudioPath); - console.log('βœ“ Audio extracted:', normalizedAudioPath); - - // Find available audio track - const audioTrackId = findAvailableAudioTrack( - tracks, - keyframes, - timestamp, - duration - ); - - console.log('Available audio track:', audioTrackId); - - if (audioTrackId) { - // Determine track type for audio keyframe - const audioTrack = tracks.find(t => t.id === audioTrackId); - const audioType = audioTrack?.type === 'voiceover' ? 'voiceover' : 'music'; - - // Add synchronized audio keyframe - await addKeyframe({ - trackId: audioTrackId, + const { audioPath } = await audioResponse.json(); + console.log('[VideoTimeline] Received audioPath from API:', audioPath); + // Normalize URL to handle relative paths (especially on Windows) + const normalizedAudioPath = normalizeUrl(audioPath); + console.log('[VideoTimeline] Normalized audioPath:', normalizedAudioPath); + console.log('βœ“ Audio extracted:', normalizedAudioPath); + + // Find available audio track + const audioTrackId = findAvailableAudioTrack( + tracks, + keyframes, timestamp, - duration, - data: { - type: audioType, - mediaId: `${mediaId}-audio`, - url: normalizedAudioPath, - prompt: `${mediaName} (audio)`, - originalDuration: duration, - }, - }); - - console.log('βœ“ Audio keyframe added to track:', audioTrackId); - } else { - console.warn('No available audio track found'); - // Show warning notification - setNotification({ - message: 'No available audio track for extracted audio', - type: 'warning', - }); - } + duration + ); + + console.log('Available audio track:', audioTrackId); + + if (audioTrackId) { + // Determine track type for audio keyframe + const audioTrack = tracks.find(t => t.id === audioTrackId); + const audioType = audioTrack?.type === 'voiceover' ? 'voiceover' : 'music'; + + // Add synchronized audio keyframe + await addKeyframe({ + trackId: audioTrackId, + timestamp, + duration, + data: { + type: audioType, + mediaId: `${mediaId}-audio`, + url: normalizedAudioPath, + prompt: `${mediaName} (audio)`, + originalDuration: duration, + }, + }); + + console.log('βœ“ Audio keyframe added to track:', audioTrackId); + } else { + console.warn('No available audio track found'); + // Show warning notification + setNotification({ + message: 'No available audio track for extracted audio', + type: 'warning', + }); + } } else { console.log('Video has no audio, skipping extraction'); } @@ -470,7 +525,7 @@ export const VideoTimeline = React.memo(function VideoTimeline({ } // Update keyframe data with final video URL (muted if audio was present) - keyframeData.data.url = finalVideoUrl; + keyframeData.data.url = finalVideoUrl || ''; // Add video keyframe with muted video URL if audio was present await addKeyframe(keyframeData); @@ -479,7 +534,7 @@ export const VideoTimeline = React.memo(function VideoTimeline({ setValidationError(errorMessage); console.error('Failed to handle drop:', error); } - }, [timelineWidth, durationSeconds, getTrackIdForMediaType, addKeyframe, getMediaDuration, tracks, keyframes]); + }, [displayWidth, visualDuration, getTrackIdForMediaType, addKeyframe, getMediaDuration, tracks, keyframes]); const handleDragOver = useCallback((event: DragEvent) => { event.preventDefault(); @@ -491,7 +546,7 @@ export const VideoTimeline = React.memo(function VideoTimeline({ // Find the keyframe in all tracks let sourceKeyframe: VideoKeyFrame | null = null; let sourceTrackId: string | null = null; - + for (const [trackId, trackKeyframes] of Object.entries(keyframes)) { const found = trackKeyframes.find(kf => kf.id === keyframeId); if (found) { @@ -500,12 +555,12 @@ export const VideoTimeline = React.memo(function VideoTimeline({ break; } } - + if (!sourceKeyframe || !sourceTrackId) { console.warn('Source keyframe not found:', keyframeId); return; } - + // If same track, just update timestamp (position change within track) if (sourceTrackId === targetTrackId) { if (Math.round(timestamp) !== sourceKeyframe.timestamp) { @@ -514,17 +569,17 @@ export const VideoTimeline = React.memo(function VideoTimeline({ } return; } - + // Get target track to update media type const targetTrack = tracks.find(t => t.id === targetTrackId); if (!targetTrack) { console.warn('Target track not found:', targetTrackId); return; } - + // Update the keyframe's trackId and media type const newMediaType = targetTrack.type === 'voiceover' ? 'voiceover' : 'music'; - + // Remove from source and add to target await removeKeyframe(keyframeId); await addKeyframe({ @@ -536,7 +591,7 @@ export const VideoTimeline = React.memo(function VideoTimeline({ type: newMediaType, }, }); - + console.log(`Moved keyframe ${keyframeId} from ${sourceTrackId} to ${targetTrackId} at ${timestamp}ms`); }, [keyframes, tracks, removeKeyframe, addKeyframe, updateKeyframe]); @@ -553,18 +608,56 @@ export const VideoTimeline = React.memo(function VideoTimeline({ } }, []); + // Handle playhead dragging + const handlePlayheadMouseDown = useCallback((e: React.MouseEvent) => { + e.preventDefault(); + e.stopPropagation(); + + const timelineEl = timelineRef.current; + if (!timelineEl) return; + + const startX = e.clientX; + const rect = timelineEl.getBoundingClientRect(); + + // We want to continue tracking mouse anywhere + const handleMouseMove = (moveEvent: globalThis.MouseEvent) => { + // Calculate relative position including scroll + const scrollLeft = timelineEl.scrollLeft || 0; + const relativeX = (moveEvent.clientX - rect.left) + scrollLeft; + + // Convert to timestamp + const timestamp = Math.max(0, relativeX / pixelsPerSecond); + + // Update timestamp + setCurrentTimestamp(timestamp); + + // Seek player + if (player && typeof player.seekTo === 'function') { + player.seekTo(Math.floor(timestamp * 30)); // 30fps assumption + } + }; + + const handleMouseUp = () => { + document.removeEventListener('mousemove', handleMouseMove); + document.removeEventListener('mouseup', handleMouseUp); + }; + + document.addEventListener('mousemove', handleMouseMove); + document.addEventListener('mouseup', handleMouseUp); + }, [pixelsPerSecond, player, setCurrentTimestamp]); + // Handle wheel zoom (Alt+Wheel or Ctrl+Wheel for mouse, pinch for trackpad) const handleWheel = useCallback((event: WheelEvent) => { // Check if Alt or Ctrl is pressed (for mouse wheel zoom) // Or if it's a pinch gesture (ctrlKey is true for trackpad pinch on macOS) if (event.altKey || event.ctrlKey) { event.preventDefault(); - + // Calculate zoom delta // For trackpad pinch, deltaY is typically smaller and smoother // For mouse wheel, deltaY is larger (usually 100 or -100) const delta = -event.deltaY * ZOOM_SENSITIVITY; - + // Apply zoom with exponential scaling for smoother feel const newZoom = Math.max(MIN_ZOOM, Math.min(MAX_ZOOM, zoom * (1 + delta))); setZoom(newZoom); @@ -647,7 +740,7 @@ export const VideoTimeline = React.memo(function VideoTimeline({ > {/* Validation Error Display */} {validationError && ( -
-