Added health metrics for application load and database load (#2257)

* Added health metrics for application load

* Added health metrics for application load

* Fix unit tests

* Added more metrics

* Added few comments

* Added DatabaseLoad Scenario and address comments

* Fix unit tests

* fix unit tests

* Fix unit tests

* fix unit tests

* fix the mock

* Fix unit tests
This commit is contained in:
sunghyunkang1111
2025-12-09 14:14:35 -06:00
committed by GitHub
parent 8c0e6da377
commit 5b7d1a74af
19 changed files with 701 additions and 2 deletions

6
package-lock.json generated
View File

@@ -116,6 +116,7 @@
"tinykeys": "2.1.0",
"underscore": "1.12.1",
"utility-types": "3.10.0",
"web-vitals": "4.2.4",
"uuid": "9.0.0",
"zustand": "3.5.0"
},
@@ -35930,6 +35931,11 @@
"defaults": "^1.0.3"
}
},
"node_modules/web-vitals": {
"version": "4.2.4",
"resolved": "https://registry.npmjs.org/web-vitals/-/web-vitals-4.2.4.tgz",
"integrity": "sha512-r4DIlprAGwJ7YM11VZp4R884m0Vmgr6EAKe3P+kO0PPj3Unqyvv59rczf6UiGcb9Z8QxZVcqKNwv/g0WNdWwsw=="
},
"node_modules/webidl-conversions": {
"version": "3.0.1",
"license": "BSD-2-Clause"

View File

@@ -111,6 +111,7 @@
"tinykeys": "2.1.0",
"underscore": "1.12.1",
"utility-types": "3.10.0",
"web-vitals": "4.2.4",
"uuid": "9.0.0",
"zustand": "3.5.0"
},

View File

@@ -38,6 +38,9 @@ import { ContainerConnectionInfo, IPhoenixServiceInfo, IProvisionData, IResponse
import * as ViewModels from "../Contracts/ViewModels";
import { UploadDetailsRecord } from "../Contracts/ViewModels";
import { GitHubOAuthService } from "../GitHub/GitHubOAuthService";
import MetricScenario from "../Metrics/MetricEvents";
import { ApplicationMetricPhase } from "../Metrics/ScenarioConfig";
import { scenarioMonitor } from "../Metrics/ScenarioMonitor";
import { PhoenixClient } from "../Phoenix/PhoenixClient";
import * as ExplorerSettings from "../Shared/ExplorerSettings";
import { Action, ActionModifiers } from "../Shared/Telemetry/TelemetryConstants";
@@ -402,7 +405,9 @@ export default class Explorer {
updatedDatabases = [...updatedDatabases, ...deltaDatabases.toAdd].sort((db1, db2) =>
db1.id().localeCompare(db2.id()),
);
useDatabases.setState({ databases: updatedDatabases });
useDatabases.setState({ databases: updatedDatabases, databasesFetchedSuccessfully: true });
scenarioMonitor.completePhase(MetricScenario.DatabaseLoad, ApplicationMetricPhase.DatabasesFetched);
await this.refreshAndExpandNewDatabases(deltaDatabases.toAdd, updatedDatabases);
} catch (error) {
const errorMessage = getErrorMessage(error);
@@ -416,6 +421,8 @@ export default class Explorer {
startKey,
);
logConsoleError(`Error while refreshing databases: ${errorMessage}`);
useDatabases.setState({ databasesFetchedSuccessfully: false });
scenarioMonitor.failPhase(MetricScenario.DatabaseLoad, ApplicationMetricPhase.DatabasesFetched);
}
}
@@ -1183,6 +1190,11 @@ export default class Explorer {
}
public async refreshExplorer(): Promise<void> {
// Start DatabaseLoad scenario before fetching databases
if (userContext.apiType !== "Postgres" && userContext.apiType !== "VCoreMongo") {
scenarioMonitor.start(MetricScenario.DatabaseLoad);
}
if (userContext.apiType !== "Postgres" && userContext.apiType !== "VCoreMongo") {
userContext.authType === AuthType.ResourceToken
? this.refreshDatabaseForResourceToken()

View File

@@ -35,6 +35,15 @@ import * as ViewModels from "../../../Contracts/ViewModels";
import { updateUserContext } from "../../../UserContext";
import Explorer from "../../Explorer";
jest.mock("rx-jupyter", () => ({
sessions: {
create: jest.fn(),
},
contents: {
JupyterContentProvider: jest.fn().mockImplementation(() => ({})),
},
}));
jest.mock("Common/dataAccess/queryDocuments", () => ({
queryDocuments: jest.fn(() => ({
// Omit headers, because we can't mock a private field and we don't need to test it

View File

@@ -19,6 +19,15 @@ import { act } from "react-dom/test-utils";
import * as ViewModels from "../../../Contracts/ViewModels";
import Explorer from "../../Explorer";
jest.mock("rx-jupyter", () => ({
sessions: {
create: jest.fn(),
},
contents: {
JupyterContentProvider: jest.fn().mockImplementation(() => ({})),
},
}));
jest.requireActual("Explorer/Controls/Editor/EditorReact");
const PROPERTY_VALUE = "__SOME_PROPERTY_VALUE__";

View File

@@ -6,6 +6,15 @@ import { updateUserContext, userContext } from "../../UserContext";
import Explorer from "../Explorer";
import Database from "./Database";
jest.mock("rx-jupyter", () => ({
sessions: {
create: jest.fn(),
},
contents: {
JupyterContentProvider: jest.fn().mockImplementation(() => ({})),
},
}));
const createMockContainer = (): Explorer => {
const mockContainer = new Explorer();
return mockContainer;

View File

@@ -17,6 +17,7 @@ import { ReactTabKind, useTabs } from "hooks/useTabs";
import * as React from "react";
import { useEffect, useMemo } from "react";
import shallow from "zustand/shallow";
import { useDatabaseLoadScenario } from "../../Metrics/useMetricPhases";
import Explorer from "../Explorer";
import { useNotebook } from "../Notebook/useNotebook";
@@ -53,6 +54,7 @@ export const ResourceTree: React.FC<ResourceTreeProps> = ({ explorer }: Resource
resourceTokenCollection: state.resourceTokenCollection,
sampleDataResourceTokenCollection: state.sampleDataResourceTokenCollection,
}));
const databasesFetchedSuccessfully = useDatabases((state) => state.databasesFetchedSuccessfully);
const { isCopilotEnabled, isCopilotSampleDBEnabled } = useQueryCopilot((state) => ({
isCopilotEnabled: state.copilotEnabled,
isCopilotSampleDBEnabled: state.copilotSampleDBEnabled,
@@ -114,6 +116,9 @@ export const ResourceTree: React.FC<ResourceTreeProps> = ({ explorer }: Resource
}
}, [databaseTreeNodes, sampleDataNodes]);
// Track complete DatabaseLoad scenario (start, tree rendered, interactive)
useDatabaseLoadScenario(databaseTreeNodes, databasesFetchedSuccessfully);
useEffect(() => {
// Compute open items based on node.isExpanded
const updateOpenItems = (node: TreeNode, parentNodeId: string): void => {

View File

@@ -9,6 +9,7 @@ interface DatabasesState {
databases: ViewModels.Database[];
resourceTokenCollection: ViewModels.CollectionBase;
sampleDataResourceTokenCollection: ViewModels.CollectionBase;
databasesFetchedSuccessfully: boolean; // Track if last database fetch was successful
updateDatabase: (database: ViewModels.Database) => void;
addDatabases: (databases: ViewModels.Database[]) => void;
deleteDatabase: (database: ViewModels.Database) => void;
@@ -30,6 +31,7 @@ export const useDatabases: UseStore<DatabasesState> = create((set, get) => ({
databases: [],
resourceTokenCollection: undefined,
sampleDataResourceTokenCollection: undefined,
databasesFetchedSuccessfully: false,
updateDatabase: (updatedDatabase: ViewModels.Database) =>
set((state) => {
const updatedDatabases = state.databases.map((database: ViewModels.Database) => {

View File

@@ -60,6 +60,10 @@ import "./Explorer/Panes/PanelComponent.less";
import { SidePanel } from "./Explorer/Panes/PanelContainerComponent";
import "./Explorer/SplashScreen/SplashScreen.less";
import "./Libs/jquery";
import MetricScenario from "./Metrics/MetricEvents";
import { MetricScenarioProvider, useMetricScenario } from "./Metrics/MetricScenarioProvider";
import { ApplicationMetricPhase } from "./Metrics/ScenarioConfig";
import { useInteractive } from "./Metrics/useMetricPhases";
import { appThemeFabric } from "./Platform/Fabric/FabricTheme";
import "./Shared/appInsights";
import { useConfig } from "./hooks/useConfig";
@@ -79,6 +83,20 @@ const App: React.FunctionComponent = () => {
StyleConstants.updateStyles();
const explorer = useKnockoutExplorer(config?.platform);
// Scenario-based health tracking: start ApplicationLoad and complete phases.
const { startScenario, completePhase } = useMetricScenario();
React.useEffect(() => {
startScenario(MetricScenario.ApplicationLoad);
// eslint-disable-next-line react-hooks/exhaustive-deps
}, []);
React.useEffect(() => {
if (explorer) {
completePhase(MetricScenario.ApplicationLoad, ApplicationMetricPhase.ExplorerInitialized);
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [explorer]);
if (!explorer) {
return <LoadingExplorer />;
}
@@ -104,9 +122,16 @@ const App: React.FunctionComponent = () => {
};
const mainElement = document.getElementById("Main");
ReactDOM.render(<App />, mainElement);
ReactDOM.render(
<MetricScenarioProvider>
<App />
</MetricScenarioProvider>,
mainElement,
);
function DivExplorer({ explorer }: { explorer: Explorer }): JSX.Element {
useInteractive(MetricScenario.ApplicationLoad);
return (
<div id="divExplorer" className="flexContainer hideOverflows">
<div id="freeTierTeachingBubble"> </div>

16
src/Metrics/Constants.ts Normal file
View File

@@ -0,0 +1,16 @@
import { ApiType } from "Common/Constants";
import { Platform } from "ConfigContext";
// Metric scenarios represent lifecycle checkpoints we measure.
export enum MetricScenario {
ApplicationLoad = "ApplicationLoad",
DatabaseLoad = "DatabaseLoad",
}
// Generic metric emission event describing scenario outcome.
export interface MetricEvent {
readonly platform: Platform;
readonly api: ApiType;
readonly scenario: MetricScenario;
readonly healthy: boolean;
}

View File

@@ -0,0 +1,104 @@
import { configContext, Platform } from "../ConfigContext";
import { getAuthorizationHeader } from "../Utils/AuthorizationUtils";
import { fetchWithTimeout } from "../Utils/FetchWithTimeout";
import MetricScenario, { reportHealthy, reportUnhealthy } from "./MetricEvents";
// eslint-disable-next-line @typescript-eslint/no-var-requires
const { Response } = require("node-fetch");
jest.mock("../Utils/AuthorizationUtils", () => ({
getAuthorizationHeader: jest.fn().mockReturnValue({ header: "authorization", token: "Bearer test-token" }),
}));
jest.mock("../Utils/FetchWithTimeout", () => ({
fetchWithTimeout: jest.fn(),
}));
describe("MetricEvents", () => {
const mockFetchWithTimeout = fetchWithTimeout as jest.MockedFunction<typeof fetchWithTimeout>;
afterEach(() => {
jest.clearAllMocks();
});
test("reportHealthy success includes auth header", async () => {
const mockResponse = new Response(null, { status: 200 });
mockFetchWithTimeout.mockResolvedValue(mockResponse);
const result = await reportHealthy(MetricScenario.ApplicationLoad, Platform.Portal, "SQL");
expect(result).toBeInstanceOf(Response);
expect(result.ok).toBe(true);
expect(result.status).toBe(200);
expect(mockFetchWithTimeout).toHaveBeenCalledTimes(1);
const callArgs = mockFetchWithTimeout.mock.calls[0];
expect(callArgs[0]).toContain("/api/dataexplorer/metrics/health");
expect(callArgs[1]?.headers).toEqual({
"Content-Type": "application/json",
authorization: "Bearer test-token",
});
const body = JSON.parse(callArgs[1]?.body as string);
expect(body.scenario).toBe(MetricScenario.ApplicationLoad);
expect(body.platform).toBe(Platform.Portal);
expect(body.api).toBe("SQL");
expect(body.healthy).toBe(true);
expect(getAuthorizationHeader).toHaveBeenCalled();
});
test("reportUnhealthy failure status", async () => {
const mockResponse = new Response("Failure", { status: 500 });
mockFetchWithTimeout.mockResolvedValue(mockResponse);
const result = await reportUnhealthy(MetricScenario.ApplicationLoad, Platform.Portal, "SQL");
expect(result).toBeInstanceOf(Response);
expect(result.ok).toBe(false);
expect(result.status).toBe(500);
const callArgs = mockFetchWithTimeout.mock.calls[0];
const body = JSON.parse(callArgs[1]?.body as string);
expect(body.healthy).toBe(false);
});
test("helpers healthy/unhealthy", async () => {
mockFetchWithTimeout.mockResolvedValue(new Response(null, { status: 201 }));
const healthyResult = await reportHealthy(MetricScenario.ApplicationLoad, Platform.Portal, "SQL");
const unhealthyResult = await reportUnhealthy(MetricScenario.ApplicationLoad, Platform.Portal, "SQL");
expect(healthyResult.status).toBe(201);
expect(unhealthyResult.status).toBe(201);
expect(mockFetchWithTimeout).toHaveBeenCalledTimes(2);
});
test("throws when backend endpoint missing", async () => {
const original = configContext.PORTAL_BACKEND_ENDPOINT;
(configContext as { PORTAL_BACKEND_ENDPOINT: string }).PORTAL_BACKEND_ENDPOINT = "";
await expect(reportHealthy(MetricScenario.ApplicationLoad, Platform.Portal, "SQL")).rejects.toThrow(
"baseUri is null or empty",
);
expect(mockFetchWithTimeout).not.toHaveBeenCalled();
(configContext as { PORTAL_BACKEND_ENDPOINT: string }).PORTAL_BACKEND_ENDPOINT = original;
});
test("propagates fetch errors", async () => {
mockFetchWithTimeout.mockRejectedValue(new Error("Network error"));
await expect(reportHealthy(MetricScenario.ApplicationLoad, Platform.Portal, "SQL")).rejects.toThrow(
"Network error",
);
});
test("propagates timeout errors", async () => {
const abortError = new DOMException("The operation was aborted", "AbortError");
mockFetchWithTimeout.mockRejectedValue(abortError);
await expect(reportUnhealthy(MetricScenario.ApplicationLoad, Platform.Portal, "SQL")).rejects.toThrow(
"The operation was aborted",
);
});
});

View File

@@ -0,0 +1,28 @@
// Metrics module: scenario metric emission logic.
import { MetricEvent, MetricScenario } from "Metrics/Constants";
import { createUri } from "../Common/UrlUtility";
import { configContext, Platform } from "../ConfigContext";
import { ApiType } from "../UserContext";
import { getAuthorizationHeader } from "../Utils/AuthorizationUtils";
import { fetchWithTimeout } from "../Utils/FetchWithTimeout";
const RELATIVE_PATH = "/api/dataexplorer/metrics/health"; // Endpoint retains 'health' for backend compatibility.
export const reportHealthy = (scenario: MetricScenario, platform: Platform, api: ApiType): Promise<Response> =>
send({ platform, api, scenario, healthy: true });
export const reportUnhealthy = (scenario: MetricScenario, platform: Platform, api: ApiType): Promise<Response> =>
send({ platform, api, scenario, healthy: false });
const send = async (event: MetricEvent): Promise<Response> => {
const url = createUri(configContext?.PORTAL_BACKEND_ENDPOINT, RELATIVE_PATH);
const authHeader = getAuthorizationHeader();
return await fetchWithTimeout(url, {
method: "POST",
headers: { "Content-Type": "application/json", [authHeader.header]: authHeader.token },
body: JSON.stringify(event),
});
};
export default MetricScenario;

View File

@@ -0,0 +1,17 @@
import MetricScenario from "./MetricEvents";
import { ApplicationMetricPhase, CommonMetricPhase, ScenarioConfig } from "./ScenarioConfig";
export const scenarioConfigs: Record<MetricScenario, ScenarioConfig> = {
[MetricScenario.ApplicationLoad]: {
requiredPhases: [ApplicationMetricPhase.ExplorerInitialized, CommonMetricPhase.Interactive],
timeoutMs: 10000,
},
[MetricScenario.DatabaseLoad]: {
requiredPhases: [
ApplicationMetricPhase.DatabasesFetched,
ApplicationMetricPhase.DatabaseTreeRendered,
CommonMetricPhase.Interactive,
],
timeoutMs: 10000,
},
};

View File

@@ -0,0 +1,29 @@
import React, { useContext } from "react";
import MetricScenario from "./MetricEvents";
import { MetricPhase } from "./ScenarioConfig";
import { scenarioMonitor } from "./ScenarioMonitor";
interface MetricScenarioContextValue {
startScenario: (scenario: MetricScenario) => void;
startPhase: (scenario: MetricScenario, phase: MetricPhase) => void;
completePhase: (scenario: MetricScenario, phase: MetricPhase) => void;
}
const MetricScenarioContext = React.createContext<MetricScenarioContextValue | undefined>(undefined);
export const MetricScenarioProvider: React.FC<{ children: React.ReactNode }> = ({ children }) => {
const value: MetricScenarioContextValue = {
startScenario: (s: MetricScenario) => scenarioMonitor.start(s),
startPhase: (s: MetricScenario, p: MetricPhase) => scenarioMonitor.startPhase(s, p),
completePhase: (s: MetricScenario, p: MetricPhase) => scenarioMonitor.completePhase(s, p),
};
return <MetricScenarioContext.Provider value={value}>{children}</MetricScenarioContext.Provider>;
};
export function useMetricScenario(): MetricScenarioContextValue {
const ctx = useContext(MetricScenarioContext);
if (!ctx) {
throw new Error("useMetricScenario must be used within MetricScenarioProvider");
}
return ctx;
}

View File

@@ -0,0 +1,47 @@
import MetricScenario from "./MetricEvents";
// Common phases shared across all scenarios
export enum CommonMetricPhase {
Interactive = "Interactive",
}
// Application-specific phases
export enum ApplicationMetricPhase {
ExplorerInitialized = "ExplorerInitialized",
DatabasesFetched = "DatabasesFetched",
DatabaseTreeRendered = "DatabaseTreeRendered",
}
// Combined type for all metric phases
export type MetricPhase = CommonMetricPhase | ApplicationMetricPhase;
export interface WebVitals {
lcp?: number; // Largest Contentful Paint
inp?: number; // Interaction to Next Paint
cls?: number; // Cumulative Layout Shift
fcp?: number; // First Contentful Paint
ttfb?: number; // Time to First Byte
}
export interface ScenarioConfig<TPhase extends string = MetricPhase> {
requiredPhases: TPhase[];
timeoutMs: number;
validate?: (ctx: ScenarioContextSnapshot<TPhase>) => boolean; // Optional custom validation
}
export interface PhaseTimings {
endTimeISO: string; // When the phase completed
durationMs: number; // Duration from scenario start to phase completion
}
export interface ScenarioContextSnapshot<TPhase extends string = MetricPhase> {
scenario: MetricScenario;
startTimeISO: string; // Human-readable ISO timestamp
endTimeISO: string; // Human-readable end timestamp
durationMs: number; // Total scenario duration from start to end
completed: TPhase[]; // Array for JSON serialization
failedPhases?: TPhase[]; // Phases that failed
timedOut: boolean;
vitals?: WebVitals;
phaseTimings?: Record<string, PhaseTimings>; // Start/end times for each phase
}

View File

@@ -0,0 +1,267 @@
import { Metric, onCLS, onFCP, onINP, onLCP, onTTFB } from "web-vitals";
import { configContext } from "../ConfigContext";
import { trackEvent } from "../Shared/appInsights";
import { userContext } from "../UserContext";
import MetricScenario, { reportHealthy, reportUnhealthy } from "./MetricEvents";
import { scenarioConfigs } from "./MetricScenarioConfigs";
import { MetricPhase, PhaseTimings, ScenarioConfig, ScenarioContextSnapshot, WebVitals } from "./ScenarioConfig";
interface PhaseContext {
startMarkName: string; // Performance mark name for phase start
endMarkName?: string; // Performance mark name for phase end
}
interface InternalScenarioContext {
scenario: MetricScenario;
config: ScenarioConfig;
startMarkName: string;
completed: Set<MetricPhase>;
failed: Set<MetricPhase>;
phases: Map<MetricPhase, PhaseContext>; // Track start/end for each phase
timeoutId?: number;
emitted: boolean;
}
class ScenarioMonitor {
private contexts = new Map<MetricScenario, InternalScenarioContext>();
private vitals: WebVitals = {};
private vitalsInitialized = false;
constructor() {
this.initializeVitals();
}
private initializeVitals() {
if (this.vitalsInitialized) {
return;
}
this.vitalsInitialized = true;
onLCP((metric: Metric) => {
this.vitals.lcp = metric.value;
});
onINP((metric: Metric) => {
this.vitals.inp = metric.value;
});
onCLS((metric: Metric) => {
this.vitals.cls = metric.value;
});
onFCP((metric: Metric) => {
this.vitals.fcp = metric.value;
});
onTTFB((metric: Metric) => {
this.vitals.ttfb = metric.value;
});
}
start(scenario: MetricScenario) {
if (this.contexts.has(scenario)) {
return;
}
const config = scenarioConfigs[scenario];
if (!config) {
throw new Error(`Missing scenario config for ${scenario}`);
}
const startMarkName = `scenario_${scenario}_start`;
performance.mark(startMarkName);
const ctx: InternalScenarioContext = {
scenario,
config,
startMarkName,
completed: new Set<MetricPhase>(),
failed: new Set<MetricPhase>(),
phases: new Map<MetricPhase, PhaseContext>(),
emitted: false,
};
// Start all required phases at scenario start time
config.requiredPhases.forEach((phase) => {
const phaseStartMarkName = `scenario_${scenario}_${phase}_start`;
performance.mark(phaseStartMarkName);
ctx.phases.set(phase, { startMarkName: phaseStartMarkName });
});
ctx.timeoutId = window.setTimeout(() => this.emit(ctx, false, true), config.timeoutMs);
this.contexts.set(scenario, ctx);
}
startPhase(scenario: MetricScenario, phase: MetricPhase) {
const ctx = this.contexts.get(scenario);
if (!ctx || ctx.emitted || !ctx.config.requiredPhases.includes(phase) || ctx.phases.has(phase)) {
return;
}
const startMarkName = `scenario_${scenario}_${phase}_start`;
performance.mark(startMarkName);
ctx.phases.set(phase, { startMarkName });
}
completePhase(scenario: MetricScenario, phase: MetricPhase) {
const ctx = this.contexts.get(scenario);
const phaseCtx = ctx?.phases.get(phase);
if (!ctx || ctx.emitted || !ctx.config.requiredPhases.includes(phase) || !phaseCtx) {
return;
}
const endMarkName = `scenario_${scenario}_${phase}_end`;
performance.mark(endMarkName);
phaseCtx.endMarkName = endMarkName;
ctx.completed.add(phase);
this.tryEmitIfReady(ctx);
}
failPhase(scenario: MetricScenario, phase: MetricPhase) {
const ctx = this.contexts.get(scenario);
if (!ctx || ctx.emitted) {
return;
}
// Mark the explicitly failed phase
performance.mark(`scenario_${scenario}_${phase}_failed`);
ctx.failed.add(phase);
// Mark all remaining incomplete required phases as failed
ctx.config.requiredPhases.forEach((requiredPhase) => {
if (!ctx.completed.has(requiredPhase) && !ctx.failed.has(requiredPhase)) {
ctx.failed.add(requiredPhase);
}
});
// Build a snapshot with failure info
const failureSnapshot = this.buildSnapshot(ctx, { final: false, timedOut: false });
// Emit unhealthy immediately
this.emit(ctx, false, false, failureSnapshot);
}
private tryEmitIfReady(ctx: InternalScenarioContext) {
const allDone = ctx.config.requiredPhases.every((p) => ctx.completed.has(p));
if (!allDone) {
return;
}
const finalSnapshot = this.buildSnapshot(ctx, { final: true, timedOut: false });
const healthy = ctx.config.validate ? ctx.config.validate(finalSnapshot) : true;
this.emit(ctx, healthy, false, finalSnapshot);
}
private getPhaseTimings(ctx: InternalScenarioContext): Record<string, PhaseTimings> {
const result: Record<string, PhaseTimings> = {};
const navigationStart = performance.timeOrigin;
ctx.phases.forEach((phaseCtx, phase) => {
// Only include completed phases (those with endMarkName)
if (phaseCtx.endMarkName) {
const endEntry = performance.getEntriesByName(phaseCtx.endMarkName)[0];
if (endEntry) {
const endTimeISO = new Date(navigationStart + endEntry.startTime).toISOString();
// Use Performance API measure to calculate duration
const measureName = `scenario_${ctx.scenario}_${phase}_duration`;
performance.measure(measureName, phaseCtx.startMarkName, phaseCtx.endMarkName);
const measure = performance.getEntriesByName(measureName)[0];
if (measure) {
result[phase] = {
endTimeISO,
durationMs: measure.duration,
};
}
}
}
});
return result;
}
private emit(ctx: InternalScenarioContext, healthy: boolean, timedOut: boolean, snapshot?: ScenarioContextSnapshot) {
if (ctx.emitted) {
return;
}
ctx.emitted = true;
if (ctx.timeoutId) {
clearTimeout(ctx.timeoutId);
ctx.timeoutId = undefined;
}
const platform = configContext.platform;
const api = userContext.apiType;
// Build snapshot if not provided
const finalSnapshot = snapshot || this.buildSnapshot(ctx, { final: false, timedOut });
// Emit enriched telemetry with performance data
// TODO: Call portal backend metrics endpoint
trackEvent(
{ name: "MetricScenarioComplete" },
{
scenario: ctx.scenario,
healthy: healthy.toString(),
timedOut: timedOut.toString(),
platform,
api,
durationMs: finalSnapshot.durationMs.toString(),
completedPhases: finalSnapshot.completed.join(","),
failedPhases: finalSnapshot.failedPhases?.join(","),
lcp: finalSnapshot.vitals?.lcp?.toString(),
inp: finalSnapshot.vitals?.inp?.toString(),
cls: finalSnapshot.vitals?.cls?.toString(),
fcp: finalSnapshot.vitals?.fcp?.toString(),
ttfb: finalSnapshot.vitals?.ttfb?.toString(),
phaseTimings: JSON.stringify(finalSnapshot.phaseTimings),
},
);
// Call portal backend health metrics endpoint
if (healthy && !timedOut) {
reportHealthy(ctx.scenario, platform, api);
} else {
reportUnhealthy(ctx.scenario, platform, api);
}
// Cleanup performance entries
this.cleanupPerformanceEntries(ctx);
}
private cleanupPerformanceEntries(ctx: InternalScenarioContext) {
performance.clearMarks(ctx.startMarkName);
ctx.config.requiredPhases.forEach((phase) => {
performance.clearMarks(`scenario_${ctx.scenario}_${phase}`);
});
performance.clearMeasures(`scenario_${ctx.scenario}_total`);
}
private buildSnapshot(
ctx: InternalScenarioContext,
opts: { final: boolean; timedOut: boolean },
): ScenarioContextSnapshot {
const phaseTimings = this.getPhaseTimings(ctx);
// Capture current time once for consistency
const currentTime = performance.now();
// Convert performance timestamps (relative to navigationStart) to absolute timestamps
const navigationStart = performance.timeOrigin;
const startEntry = performance.getEntriesByName(ctx.startMarkName)[0];
const startTimeISO = new Date(navigationStart + (startEntry?.startTime || 0)).toISOString();
const endTimeISO = new Date(navigationStart + currentTime).toISOString();
// Calculate overall scenario duration directly from the timestamps
const durationMs = currentTime - (startEntry?.startTime || 0);
return {
scenario: ctx.scenario,
startTimeISO,
endTimeISO,
durationMs,
completed: Array.from(ctx.completed),
failedPhases: ctx.failed.size > 0 ? Array.from(ctx.failed) : undefined,
timedOut: opts.timedOut,
vitals: { ...this.vitals },
phaseTimings,
};
}
}
export const scenarioMonitor = new ScenarioMonitor();

View File

@@ -0,0 +1,40 @@
import React from "react";
import MetricScenario from "./MetricEvents";
import { useMetricScenario } from "./MetricScenarioProvider";
import { ApplicationMetricPhase, CommonMetricPhase } from "./ScenarioConfig";
/**
* Hook to automatically complete the Interactive phase when the component becomes interactive.
* Uses requestAnimationFrame to complete after the browser has painted.
*/
export function useInteractive(scenario: MetricScenario) {
const { completePhase } = useMetricScenario();
React.useEffect(() => {
requestAnimationFrame(() => {
completePhase(scenario, CommonMetricPhase.Interactive);
});
}, [scenario, completePhase]);
}
/**
* Hook to manage DatabaseLoad scenario phase completions.
* Tracks tree rendering and completes Interactive phase.
* Only completes DatabaseTreeRendered if the database fetch was successful.
* Note: Scenario must be started before databases are fetched (in refreshExplorer).
*/
export function useDatabaseLoadScenario(databaseTreeNodes: unknown[], fetchSucceeded: boolean) {
const { completePhase } = useMetricScenario();
const hasCompletedTreeRenderRef = React.useRef(false);
// Track DatabaseTreeRendered phase (only if fetch succeeded)
React.useEffect(() => {
if (!hasCompletedTreeRenderRef.current && fetchSucceeded) {
hasCompletedTreeRenderRef.current = true;
completePhase(MetricScenario.DatabaseLoad, ApplicationMetricPhase.DatabaseTreeRendered);
}
}, [databaseTreeNodes, fetchSucceeded, completePhase]);
// Track Interactive phase
useInteractive(MetricScenario.DatabaseLoad);
}

View File

@@ -0,0 +1,37 @@
/**
* Perform a fetch with an AbortController-based timeout. Returns the Response or throws (including AbortError on timeout).
*
* Usage: await fetchWithTimeout(url, { method: 'GET', headers: {...} }, 10000);
*
* A shared helper to remove duplicated inline implementations across the codebase.
*/
export async function fetchWithTimeout(
url: string,
init: RequestInit = {},
timeoutMs: number = 5000,
): Promise<Response> {
const controller = new AbortController();
const id = setTimeout(() => controller.abort(), timeoutMs);
try {
const response = await fetch(url, { ...init, signal: controller.signal });
return response;
} finally {
clearTimeout(id);
}
}
/**
* Convenience wrapper that returns null instead of throwing on timeout / network error.
* Useful for feature probing scenarios where failure should be treated as absence.
*/
export async function tryFetchWithTimeout(
url: string,
init: RequestInit = {},
timeoutMs: number = 5000,
): Promise<Response | null> {
try {
return await fetchWithTimeout(url, init, timeoutMs);
} catch {
return null;
}
}

View File

@@ -50,3 +50,39 @@ require("jquery-ui-dist/jquery-ui");
// The test environment Data Explorer uses does not have crypto.subtle implementation
(<any>global).crypto.subtle = {};
// Mock Performance API for scenario monitoring
const performanceMock = {
...(typeof performance !== "undefined" ? performance : {}),
mark: jest.fn(),
measure: jest.fn(),
clearMarks: jest.fn(),
clearMeasures: jest.fn(),
getEntriesByName: jest.fn().mockReturnValue([]),
getEntriesByType: jest.fn().mockReturnValue([]),
now: jest.fn(() => Date.now()),
timeOrigin: Date.now(),
};
// Assign to both global and window
Object.defineProperty(global, "performance", {
writable: true,
configurable: true,
value: performanceMock,
});
Object.defineProperty(window, "performance", {
writable: true,
configurable: true,
value: performanceMock,
});
// Mock fetch API - minimal mock to prevent errors
(<any>global).fetch = jest.fn(() =>
Promise.resolve({
ok: true,
status: 200,
json: () => Promise.resolve({}),
text: () => Promise.resolve(""),
}),
);