Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
93 changes: 92 additions & 1 deletion packages/core/src/runtime/analytics.test.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { describe, it, expect, vi, beforeEach } from "vitest";
import { initRuntimeAnalytics, emitAnalyticsEvent } from "./analytics";
import { initRuntimeAnalytics, emitAnalyticsEvent, emitPerformanceMetric } from "./analytics";

describe("runtime analytics", () => {
let postMessage: ReturnType<typeof vi.fn>;
Expand Down Expand Up @@ -58,3 +58,94 @@ describe("runtime analytics", () => {
expect(postMessage).toHaveBeenCalledTimes(events.length);
});
});

describe("runtime performance metrics", () => {
let postMessage: ReturnType<typeof vi.fn>;

beforeEach(() => {
postMessage = vi.fn();
initRuntimeAnalytics(postMessage);
// Clean up DevTools marks between tests to avoid cross-test interference.
if (typeof performance !== "undefined" && typeof performance.clearMarks === "function") {
performance.clearMarks();
}
});

it("emits a perf metric via postMessage", () => {
emitPerformanceMetric("player_scrub_latency", 12.5);
expect(postMessage).toHaveBeenCalledWith({
source: "hf-preview",
type: "perf",
name: "player_scrub_latency",
value: 12.5,
tags: {},
});
});

it("passes tags through", () => {
emitPerformanceMetric("player_decoder_count", 3, {
composition_id: "abc123",
mode: "isolated",
});
expect(postMessage).toHaveBeenCalledWith({
source: "hf-preview",
type: "perf",
name: "player_decoder_count",
value: 3,
tags: { composition_id: "abc123", mode: "isolated" },
});
});

it("normalizes missing tags to an empty object", () => {
emitPerformanceMetric("player_playback_fps", 60);
expect(postMessage).toHaveBeenCalledWith(expect.objectContaining({ tags: {} }));
});

it("supports zero and negative values", () => {
emitPerformanceMetric("player_dropped_frames", 0);
emitPerformanceMetric("player_media_sync_drift", -8.3);
expect(postMessage).toHaveBeenNthCalledWith(1, expect.objectContaining({ value: 0 }));
expect(postMessage).toHaveBeenNthCalledWith(2, expect.objectContaining({ value: -8.3 }));
});

it("does not throw when postMessage is not set", () => {
initRuntimeAnalytics(null as unknown as (payload: unknown) => void);
expect(() => emitPerformanceMetric("player_load_time", 250)).not.toThrow();
});

it("does not throw when postMessage throws", () => {
postMessage.mockImplementation(() => {
throw new Error("channel closed");
});
expect(() => emitPerformanceMetric("player_scrub_latency", 12)).not.toThrow();
});

it("does not throw when performance.mark throws", () => {
const original = performance.mark;
// Vitest provides a real performance API; replace mark with a thrower for this test.
performance.mark = vi.fn(() => {
throw new Error("mark failed");
}) as typeof performance.mark;
try {
expect(() => emitPerformanceMetric("player_load_time", 100)).not.toThrow();
// Even though performance.mark threw, the bridge should still receive the metric.
expect(postMessage).toHaveBeenCalledWith(
expect.objectContaining({ type: "perf", name: "player_load_time", value: 100 }),
);
} finally {
performance.mark = original;
}
});

it("writes a User Timing mark with detail for DevTools visibility", () => {
if (typeof performance.getEntriesByName !== "function") {
// Older test environments — skip the DevTools assertion but don't fail.
return;
}
emitPerformanceMetric("player_composition_switch", 42, { from: "a", to: "b" });
const entries = performance.getEntriesByName("player_composition_switch", "mark");
expect(entries.length).toBeGreaterThan(0);
const mark = entries[entries.length - 1] as PerformanceMark;
expect(mark.detail).toEqual({ value: 42, tags: { from: "a", to: "b" } });
});
});
80 changes: 70 additions & 10 deletions packages/core/src/runtime/analytics.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/**
* Runtime analytics — vendor-agnostic event emission.
* Runtime analytics & performance telemetry — vendor-agnostic event emission.
*
* The runtime emits structured events via postMessage. The host application
* decides what to do with them: forward to PostHog, Mixpanel, Amplitude,
Expand All @@ -13,15 +13,18 @@
*
* ```javascript
* window.addEventListener("message", (e) => {
* if (e.data?.source !== "hf-preview" || e.data?.type !== "analytics") return;
* const { event, properties } = e.data;
* if (e.data?.source !== "hf-preview") return;
*
* // PostHog:
* posthog.capture(event, properties);
* // Mixpanel:
* mixpanel.track(event, properties);
* // Custom:
* myLogger.track(event, properties);
* if (e.data.type === "analytics") {
* // discrete lifecycle events: composition_loaded, played, seeked, etc.
* posthog.capture(e.data.event, e.data.properties);
* }
*
* if (e.data.type === "perf") {
* // numeric performance metrics: scrub latency, fps, decoder count, etc.
* // Aggregate per-session (p50/p95) and forward on flush.
* myMetrics.observe(e.data.name, e.data.value, e.data.tags);
* }
* });
* ```
*/
Expand All @@ -36,10 +39,22 @@ export type RuntimeAnalyticsEvent =

export type RuntimeAnalyticsProperties = Record<string, string | number | boolean | null>;

/**
* Tags attached to a performance metric — small, low-cardinality identifiers
* (composition id hash, media count bucket, browser version, etc.). Same shape
* as analytics properties so hosts can forward both through one pipeline.
*/
export type RuntimePerformanceTags = Record<string, string | number | boolean | null>;

// Stored reference to the postRuntimeMessage function, set during init.
// Avoids a circular import between analytics ↔ bridge.
// Avoids a circular import between analytics ↔ bridge. Shared by both
// emitAnalyticsEvent and emitPerformanceMetric — one bridge, two channels.
let _postMessage: ((payload: unknown) => void) | null = null;

/**
* Wire the analytics + performance bridge to the runtime's postMessage transport.
* Called once during runtime bootstrap from `init.ts`.
*/
export function initRuntimeAnalytics(postMessage: (payload: unknown) => void): void {
_postMessage = postMessage;
}
Expand All @@ -64,3 +79,48 @@ export function emitAnalyticsEvent(
// Never let analytics failures affect the runtime
}
}

/**
* Emit a numeric performance metric through the bridge.
*
* Used for player-perf telemetry — scrub latency, sustained fps, dropped
* frames, decoder count, composition load time, media sync drift. The host
* aggregates per-session values (p50/p95) and forwards to its observability
* pipeline on flush.
*
* Also writes a `performance.mark()` so the metric shows up under the
* DevTools Performance panel's "User Timing" track for local debugging,
* with `value` and `tags` available on the entry's `detail` field.
*
* @param name Metric name, e.g. "player_scrub_latency", "player_playback_fps"
* @param value Numeric value (units are metric-specific: ms for latency, fps for rate, etc.)
* @param tags Optional low-cardinality tags (composition id, media count bucket, etc.)
*/
export function emitPerformanceMetric(
name: string,
value: number,
tags?: RuntimePerformanceTags,
): void {
// Local DevTools breadcrumb. Wrapped because performance.mark() can throw on
// strict CSP, when the document is not yet ready, or when `detail` is non-cloneable.
try {
if (typeof performance !== "undefined" && typeof performance.mark === "function") {
performance.mark(name, { detail: { value, tags: tags ?? {} } });
}
} catch {
// performance API unavailable or rejected — keep going
}

if (!_postMessage) return;
try {
_postMessage({
source: "hf-preview",
type: "perf",
name,
value,
tags: tags ?? {},
});
} catch {
// Never let telemetry failures affect the runtime
}
}
18 changes: 17 additions & 1 deletion packages/core/src/runtime/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -170,6 +170,21 @@ export type RuntimeAnalyticsMessage = {
properties: Record<string, string | number | boolean | null>;
};

/**
* Numeric performance metrics emitted by the runtime — scrub latency, sustained
* fps, dropped frames, decoder count, composition load time, media sync drift.
* The host aggregates per-session values (p50/p95) and forwards to its
* observability pipeline. Distinct from `analytics` events because perf data
* is continuous and numeric, not discrete.
*/
export type RuntimePerformanceMessage = {
source: "hf-preview";
type: "perf";
name: string;
value: number;
tags: Record<string, string | number | boolean | null>;
};

export type RuntimeOutboundMessage =
| RuntimeStateMessage
| RuntimeTimelineMessage
Expand All @@ -181,7 +196,8 @@ export type RuntimeOutboundMessage =
| RuntimePickerCancelledMessage
| RuntimeStageSizeMessage
| RuntimeMediaAutoplayBlockedMessage
| RuntimeAnalyticsMessage;
| RuntimeAnalyticsMessage
| RuntimePerformanceMessage;

export type RuntimePlayer = {
_timeline: RuntimeTimelineLike | null;
Expand Down
Loading