Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
92 changes: 92 additions & 0 deletions packages/core/src/runtime/init.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -123,4 +123,96 @@ describe("initSandboxRuntimeModular", () => {

expect(child.style.visibility).toBe("hidden");
});

it("pauses nested media that is outside the timed-media cache after a seek", () => {
const root = document.createElement("div");
root.setAttribute("data-composition-id", "main");
root.setAttribute("data-root", "true");
root.setAttribute("data-width", "1920");
root.setAttribute("data-height", "1080");
document.body.appendChild(root);

const child = document.createElement("div");
child.setAttribute("data-composition-id", "slide-translation");
child.setAttribute("data-start", "20");
child.setAttribute("data-duration", "16");
root.appendChild(child);

const video = document.createElement("video");
child.appendChild(video);
Object.defineProperty(video, "duration", { value: 20, writable: true, configurable: true });
Object.defineProperty(video, "paused", { value: false, writable: true, configurable: true });
Object.defineProperty(video, "readyState", { value: 4, writable: true, configurable: true });
Object.defineProperty(video, "currentTime", { value: 0, writable: true, configurable: true });
const pause = () => {
Object.defineProperty(video, "paused", { value: true, writable: true, configurable: true });
};
video.load = () => {};
video.pause = pause;

(window as Window & { __timelines?: Record<string, RuntimeTimelineLike> }).__timelines = {
main: createMockTimeline(40),
"slide-translation": createMockTimeline(16),
};

initSandboxRuntimeModular();

const player = (
window as Window & {
__player?: { seek: (timeSeconds: number) => void };
}
).__player;
expect(player).toBeDefined();

player?.seek(29);

expect(video.paused).toBe(true);
expect(video.currentTime).toBe(9);
});

it("clamps nested media to the authored host window on seek", () => {
const root = document.createElement("div");
root.setAttribute("data-composition-id", "main");
root.setAttribute("data-root", "true");
root.setAttribute("data-width", "1920");
root.setAttribute("data-height", "1080");
document.body.appendChild(root);

const child = document.createElement("div");
child.setAttribute("data-composition-id", "slide-translation");
child.setAttribute("data-start", "20");
child.setAttribute("data-duration", "16");
root.appendChild(child);

const video = document.createElement("video");
child.appendChild(video);
Object.defineProperty(video, "duration", { value: 20, writable: true, configurable: true });
Object.defineProperty(video, "paused", { value: false, writable: true, configurable: true });
Object.defineProperty(video, "readyState", { value: 4, writable: true, configurable: true });
Object.defineProperty(video, "currentTime", { value: 0, writable: true, configurable: true });
const pause = () => {
Object.defineProperty(video, "paused", { value: true, writable: true, configurable: true });
};
video.load = () => {};
video.pause = pause;

(window as Window & { __timelines?: Record<string, RuntimeTimelineLike> }).__timelines = {
main: createMockTimeline(40),
"slide-translation": createMockTimeline(16),
};

initSandboxRuntimeModular();

const player = (
window as Window & {
__player?: { seek: (timeSeconds: number) => void };
}
).__player;
expect(player).toBeDefined();

player?.seek(37);

expect(video.paused).toBe(true);
expect(video.currentTime).toBe(0);
});
});
43 changes: 42 additions & 1 deletion packages/core/src/runtime/init.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1199,8 +1199,49 @@ export function initSandboxRuntimeModular(): void {
};

const syncMediaForCurrentState = () => {
const resolveMediaCompositionContext = (element: HTMLVideoElement | HTMLAudioElement) => {
const compositionRoot = element.closest("[data-composition-id]");
const inheritedStart = compositionRoot ? resolveStartForElement(compositionRoot, 0) : null;
// Media sync intentionally uses the authored host window here instead of
// the live child timeline duration. Visibility prefers live truth so a
// shrinking child composition hides early, but nested media needs a
// stable authored window so seeks clamp against the host clip timing.
const inheritedDuration = compositionRoot
? resolveDurationForElement(compositionRoot, { includeAuthoredTimingAttrs: true })
: null;
return { compositionRoot, inheritedStart, inheritedDuration };
};
const cache = refreshRuntimeMediaCache({
resolveStartSeconds: (element) => resolveStartForElement(element, 0),
shouldIncludeElement: (element) =>
element.hasAttribute("data-start") ||
Boolean(resolveMediaCompositionContext(element).compositionRoot),
resolveStartSeconds: (element) => {
const context = resolveMediaCompositionContext(
element as HTMLVideoElement | HTMLAudioElement,
);
return resolveStartForElement(element, context.inheritedStart ?? 0);
},
resolveDurationSeconds: (element) => {
const context = resolveMediaCompositionContext(element);
const start = resolveStartForElement(element, context.inheritedStart ?? 0);
const mediaStart =
Number.parseFloat(element.dataset.playbackStart ?? element.dataset.mediaStart ?? "0") ||
0;
const hostRemaining =
context.inheritedStart != null &&
context.inheritedDuration != null &&
context.inheritedDuration > 0
? Math.max(0, context.inheritedStart + context.inheritedDuration - start)
: null;
const sourceDuration =
Number.isFinite(element.duration) && element.duration > mediaStart
? Math.max(0, element.duration - mediaStart)
: null;
if (sourceDuration != null && hostRemaining != null) {
return Math.min(sourceDuration, hostRemaining);
}
return sourceDuration ?? hostRemaining;
},
});
syncRuntimeMedia({
clips: cache.mediaClips,
Expand Down
18 changes: 12 additions & 6 deletions packages/core/src/runtime/media.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,19 +13,24 @@ export type RuntimeMediaClip = {

export function refreshRuntimeMediaCache(params?: {
resolveStartSeconds?: (element: Element) => number;
resolveDurationSeconds?: (element: HTMLVideoElement | HTMLAudioElement) => number | null;
shouldIncludeElement?: (element: HTMLVideoElement | HTMLAudioElement) => boolean;
}): {
timedMediaEls: Array<HTMLVideoElement | HTMLAudioElement>;
mediaClips: RuntimeMediaClip[];
videoClips: RuntimeMediaClip[];
maxMediaEnd: number;
} {
const mediaEls = Array.from(
document.querySelectorAll("video[data-start], audio[data-start]"),
) as Array<HTMLVideoElement | HTMLAudioElement>;
const mediaEls = Array.from(document.querySelectorAll("video, audio")) as Array<
HTMLVideoElement | HTMLAudioElement
>;
const timedMediaEls = params?.shouldIncludeElement
? mediaEls.filter((el) => params.shouldIncludeElement?.(el))
: mediaEls.filter((el) => el.hasAttribute("data-start"));
const mediaClips: RuntimeMediaClip[] = [];
const videoClips: RuntimeMediaClip[] = [];
let maxMediaEnd = 0;
for (const el of mediaEls) {
for (const el of timedMediaEls) {
const start = params?.resolveStartSeconds
? params.resolveStartSeconds(el)
: Number.parseFloat(el.dataset.start ?? "0");
Expand All @@ -39,7 +44,8 @@ export function refreshRuntimeMediaCache(params?: {
Number.isFinite(rawRate) && rawRate > 0 ? Math.max(0.1, Math.min(5, rawRate)) : 1;
const loop = el.loop;
const sourceDuration = Number.isFinite(el.duration) && el.duration > 0 ? el.duration : null;
let duration = Number.parseFloat(el.dataset.duration ?? "");
let duration =
params?.resolveDurationSeconds?.(el) ?? Number.parseFloat(el.dataset.duration ?? "");
if ((!Number.isFinite(duration) || duration <= 0) && sourceDuration != null) {
// Effective duration accounts for playback rate:
// at 0.5x, a 10s source plays for 20s on the timeline
Expand All @@ -63,7 +69,7 @@ export function refreshRuntimeMediaCache(params?: {
if (el.tagName === "VIDEO") videoClips.push(clip);
if (Number.isFinite(end)) maxMediaEnd = Math.max(maxMediaEnd, end);
}
return { timedMediaEls: mediaEls, mediaClips, videoClips, maxMediaEnd };
return { timedMediaEls, mediaClips, videoClips, maxMediaEnd };
}

// Per-element timeline→media offset from the previous tick. Used to tell a
Expand Down
Loading