mirror of
https://github.com/Azure/cosmos-explorer.git
synced 2026-01-23 11:44:03 +00:00
Compare commits
18 Commits
master
...
users/sind
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1f1ab078c6 | ||
|
|
abd41485e8 | ||
|
|
151ce0ac3a | ||
|
|
c89c878636 | ||
|
|
1a2d2b0e4e | ||
|
|
36e8fd3519 | ||
|
|
5c83b36fd2 | ||
|
|
79dbdbbe7f | ||
|
|
3f977df00d | ||
|
|
865e9c906b | ||
|
|
1c34425dd8 | ||
|
|
50a244e6f9 | ||
|
|
9dad75c2f9 | ||
|
|
876b531248 | ||
|
|
28fe5846b3 | ||
|
|
f8533abb64 | ||
|
|
2921294a3d | ||
|
|
a03c289da0 |
@@ -7,27 +7,16 @@ import { HttpStatusCodes } from "./Constants";
|
|||||||
import { logError } from "./Logger";
|
import { logError } from "./Logger";
|
||||||
import { sendMessage } from "./MessageHandler";
|
import { sendMessage } from "./MessageHandler";
|
||||||
|
|
||||||
export interface HandleErrorOptions {
|
export const handleError = (error: string | ARMError | Error, area: string, consoleErrorPrefix?: string): void => {
|
||||||
/** Optional redacted error to use for telemetry logging instead of the original error */
|
|
||||||
redactedError?: string | ARMError | Error;
|
|
||||||
}
|
|
||||||
|
|
||||||
export const handleError = (
|
|
||||||
error: string | ARMError | Error,
|
|
||||||
area: string,
|
|
||||||
consoleErrorPrefix?: string,
|
|
||||||
options?: HandleErrorOptions,
|
|
||||||
): void => {
|
|
||||||
const errorMessage = getErrorMessage(error);
|
const errorMessage = getErrorMessage(error);
|
||||||
const errorCode = error instanceof ARMError ? error.code : undefined;
|
const errorCode = error instanceof ARMError ? error.code : undefined;
|
||||||
|
|
||||||
// logs error to data explorer console (always shows original, non-redacted message)
|
// logs error to data explorer console
|
||||||
const consoleErrorMessage = consoleErrorPrefix ? `${consoleErrorPrefix}:\n ${errorMessage}` : errorMessage;
|
const consoleErrorMessage = consoleErrorPrefix ? `${consoleErrorPrefix}:\n ${errorMessage}` : errorMessage;
|
||||||
logConsoleError(consoleErrorMessage);
|
logConsoleError(consoleErrorMessage);
|
||||||
|
|
||||||
// logs error to both app insight and kusto (use redacted message if provided)
|
// logs error to both app insight and kusto
|
||||||
const telemetryErrorMessage = options?.redactedError ? getErrorMessage(options.redactedError) : errorMessage;
|
logError(errorMessage, area, errorCode);
|
||||||
logError(telemetryErrorMessage, area, errorCode);
|
|
||||||
|
|
||||||
// checks for errors caused by firewall and sends them to portal to handle
|
// checks for errors caused by firewall and sends them to portal to handle
|
||||||
sendNotificationForError(errorMessage, errorCode);
|
sendNotificationForError(errorMessage, errorCode);
|
||||||
|
|||||||
@@ -44,8 +44,7 @@ export const deleteDocuments = async (
|
|||||||
documentIds: DocumentId[],
|
documentIds: DocumentId[],
|
||||||
abortSignal: AbortSignal,
|
abortSignal: AbortSignal,
|
||||||
): Promise<IBulkDeleteResult[]> => {
|
): Promise<IBulkDeleteResult[]> => {
|
||||||
const totalCount = documentIds.length;
|
const clearMessage = logConsoleProgress(`Deleting ${documentIds.length} ${getEntityName(true)}`);
|
||||||
const clearMessage = logConsoleProgress(`Deleting ${totalCount} ${getEntityName(true)}`);
|
|
||||||
try {
|
try {
|
||||||
const v2Container = await client().database(collection.databaseId).container(collection.id());
|
const v2Container = await client().database(collection.databaseId).container(collection.id());
|
||||||
|
|
||||||
@@ -84,7 +83,11 @@ export const deleteDocuments = async (
|
|||||||
const flatAllResult = Array.prototype.concat.apply([], allResult);
|
const flatAllResult = Array.prototype.concat.apply([], allResult);
|
||||||
return flatAllResult;
|
return flatAllResult;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
handleError(error, "DeleteDocuments", `Error while deleting ${totalCount} ${getEntityName(totalCount > 1)}`);
|
handleError(
|
||||||
|
error,
|
||||||
|
"DeleteDocuments",
|
||||||
|
`Error while deleting ${documentIds.length} ${getEntityName(documentIds.length > 1)}`,
|
||||||
|
);
|
||||||
throw error;
|
throw error;
|
||||||
} finally {
|
} finally {
|
||||||
clearMessage();
|
clearMessage();
|
||||||
|
|||||||
@@ -1,171 +0,0 @@
|
|||||||
import { redactSyntaxErrorMessage } from "./queryDocumentsPage";
|
|
||||||
|
|
||||||
/* Typical error to redact looks like this (the message property contains a JSON string with nested structure):
|
|
||||||
{
|
|
||||||
"message": "{\"code\":\"BadRequest\",\"message\":\"{\\\"errors\\\":[{\\\"severity\\\":\\\"Error\\\",\\\"location\\\":{\\\"start\\\":0,\\\"end\\\":5},\\\"code\\\":\\\"SC1001\\\",\\\"message\\\":\\\"Syntax error, incorrect syntax near 'Crazy'.\\\"}]}\\r\\nActivityId: d5424e10-51bd-46f7-9aec-7b40bed36f17, Windows/10.0.20348 cosmos-netstandard-sdk/3.18.0\"}"
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
|
|
||||||
// Helper to create the nested error structure that matches what the SDK returns
|
|
||||||
const createNestedError = (
|
|
||||||
errors: Array<{ severity?: string; location?: { start: number; end: number }; code: string; message: string }>,
|
|
||||||
activityId: string = "test-activity-id",
|
|
||||||
): { message: string } => {
|
|
||||||
const innerErrorsJson = JSON.stringify({ errors });
|
|
||||||
const innerMessage = `${innerErrorsJson}\r\n${activityId}`;
|
|
||||||
const outerJson = JSON.stringify({ code: "BadRequest", message: innerMessage });
|
|
||||||
return { message: outerJson };
|
|
||||||
};
|
|
||||||
|
|
||||||
// Helper to parse the redacted result
|
|
||||||
const parseRedactedResult = (result: { message: string }) => {
|
|
||||||
const outerParsed = JSON.parse(result.message);
|
|
||||||
const [innerErrorsJson, activityIdPart] = outerParsed.message.split("\r\n");
|
|
||||||
const innerErrors = JSON.parse(innerErrorsJson);
|
|
||||||
return { outerParsed, innerErrors, activityIdPart };
|
|
||||||
};
|
|
||||||
|
|
||||||
describe("redactSyntaxErrorMessage", () => {
|
|
||||||
it("should redact SC1001 error message", () => {
|
|
||||||
const error = createNestedError(
|
|
||||||
[
|
|
||||||
{
|
|
||||||
severity: "Error",
|
|
||||||
location: { start: 0, end: 5 },
|
|
||||||
code: "SC1001",
|
|
||||||
message: "Syntax error, incorrect syntax near 'Crazy'.",
|
|
||||||
},
|
|
||||||
],
|
|
||||||
"ActivityId: d5424e10-51bd-46f7-9aec-7b40bed36f17",
|
|
||||||
);
|
|
||||||
|
|
||||||
const result = redactSyntaxErrorMessage(error) as { message: string };
|
|
||||||
const { outerParsed, innerErrors, activityIdPart } = parseRedactedResult(result);
|
|
||||||
|
|
||||||
expect(outerParsed.code).toBe("BadRequest");
|
|
||||||
expect(innerErrors.errors[0].message).toBe("__REDACTED__");
|
|
||||||
expect(activityIdPart).toContain("ActivityId: d5424e10-51bd-46f7-9aec-7b40bed36f17");
|
|
||||||
});
|
|
||||||
|
|
||||||
it("should redact SC2001 error message", () => {
|
|
||||||
const error = createNestedError(
|
|
||||||
[
|
|
||||||
{
|
|
||||||
severity: "Error",
|
|
||||||
location: { start: 0, end: 10 },
|
|
||||||
code: "SC2001",
|
|
||||||
message: "Some sensitive syntax error message.",
|
|
||||||
},
|
|
||||||
],
|
|
||||||
"ActivityId: abc123",
|
|
||||||
);
|
|
||||||
|
|
||||||
const result = redactSyntaxErrorMessage(error) as { message: string };
|
|
||||||
const { outerParsed, innerErrors, activityIdPart } = parseRedactedResult(result);
|
|
||||||
|
|
||||||
expect(outerParsed.code).toBe("BadRequest");
|
|
||||||
expect(innerErrors.errors[0].message).toBe("__REDACTED__");
|
|
||||||
expect(activityIdPart).toContain("ActivityId: abc123");
|
|
||||||
});
|
|
||||||
|
|
||||||
it("should redact multiple errors with SC1001 and SC2001 codes", () => {
|
|
||||||
const error = createNestedError(
|
|
||||||
[
|
|
||||||
{ severity: "Error", code: "SC1001", message: "First error" },
|
|
||||||
{ severity: "Error", code: "SC2001", message: "Second error" },
|
|
||||||
],
|
|
||||||
"ActivityId: xyz",
|
|
||||||
);
|
|
||||||
|
|
||||||
const result = redactSyntaxErrorMessage(error) as { message: string };
|
|
||||||
const { innerErrors } = parseRedactedResult(result);
|
|
||||||
|
|
||||||
expect(innerErrors.errors[0].message).toBe("__REDACTED__");
|
|
||||||
expect(innerErrors.errors[1].message).toBe("__REDACTED__");
|
|
||||||
});
|
|
||||||
|
|
||||||
it("should not redact errors with other codes", () => {
|
|
||||||
const error = createNestedError(
|
|
||||||
[{ severity: "Error", code: "SC9999", message: "This should not be redacted." }],
|
|
||||||
"ActivityId: test123",
|
|
||||||
);
|
|
||||||
|
|
||||||
const result = redactSyntaxErrorMessage(error);
|
|
||||||
|
|
||||||
expect(result).toBe(error); // Should return original error unchanged
|
|
||||||
});
|
|
||||||
|
|
||||||
it("should not modify non-BadRequest errors", () => {
|
|
||||||
const innerMessage = JSON.stringify({ errors: [{ code: "SC1001", message: "Should not be redacted" }] });
|
|
||||||
const error = {
|
|
||||||
message: JSON.stringify({ code: "NotFound", message: innerMessage }),
|
|
||||||
};
|
|
||||||
|
|
||||||
const result = redactSyntaxErrorMessage(error);
|
|
||||||
|
|
||||||
expect(result).toBe(error);
|
|
||||||
});
|
|
||||||
|
|
||||||
it("should handle errors without message property", () => {
|
|
||||||
const error = { code: "BadRequest" };
|
|
||||||
|
|
||||||
const result = redactSyntaxErrorMessage(error);
|
|
||||||
|
|
||||||
expect(result).toBe(error);
|
|
||||||
});
|
|
||||||
|
|
||||||
it("should handle non-object errors", () => {
|
|
||||||
const stringError = "Simple string error";
|
|
||||||
const nullError: null = null;
|
|
||||||
const undefinedError: undefined = undefined;
|
|
||||||
|
|
||||||
expect(redactSyntaxErrorMessage(stringError)).toBe(stringError);
|
|
||||||
expect(redactSyntaxErrorMessage(nullError)).toBe(nullError);
|
|
||||||
expect(redactSyntaxErrorMessage(undefinedError)).toBe(undefinedError);
|
|
||||||
});
|
|
||||||
|
|
||||||
it("should handle malformed JSON in message", () => {
|
|
||||||
const error = {
|
|
||||||
message: "not valid json",
|
|
||||||
};
|
|
||||||
|
|
||||||
const result = redactSyntaxErrorMessage(error);
|
|
||||||
|
|
||||||
expect(result).toBe(error);
|
|
||||||
});
|
|
||||||
|
|
||||||
it("should handle message without ActivityId suffix", () => {
|
|
||||||
const innerErrorsJson = JSON.stringify({
|
|
||||||
errors: [{ severity: "Error", code: "SC1001", message: "Syntax error near something." }],
|
|
||||||
});
|
|
||||||
const error = {
|
|
||||||
message: JSON.stringify({ code: "BadRequest", message: innerErrorsJson + "\r\n" }),
|
|
||||||
};
|
|
||||||
|
|
||||||
const result = redactSyntaxErrorMessage(error) as { message: string };
|
|
||||||
const { innerErrors } = parseRedactedResult(result);
|
|
||||||
|
|
||||||
expect(innerErrors.errors[0].message).toBe("__REDACTED__");
|
|
||||||
});
|
|
||||||
|
|
||||||
it("should preserve other error properties", () => {
|
|
||||||
const baseError = createNestedError([{ code: "SC1001", message: "Error" }], "ActivityId: test");
|
|
||||||
const error = {
|
|
||||||
...baseError,
|
|
||||||
statusCode: 400,
|
|
||||||
additionalInfo: "extra data",
|
|
||||||
};
|
|
||||||
|
|
||||||
const result = redactSyntaxErrorMessage(error) as {
|
|
||||||
message: string;
|
|
||||||
statusCode: number;
|
|
||||||
additionalInfo: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
expect(result.statusCode).toBe(400);
|
|
||||||
expect(result.additionalInfo).toBe("extra data");
|
|
||||||
|
|
||||||
const { innerErrors } = parseRedactedResult(result);
|
|
||||||
expect(innerErrors.errors[0].message).toBe("__REDACTED__");
|
|
||||||
});
|
|
||||||
});
|
|
||||||
@@ -4,51 +4,6 @@ import { getEntityName } from "../DocumentUtility";
|
|||||||
import { handleError } from "../ErrorHandlingUtils";
|
import { handleError } from "../ErrorHandlingUtils";
|
||||||
import { MinimalQueryIterator, nextPage } from "../IteratorUtilities";
|
import { MinimalQueryIterator, nextPage } from "../IteratorUtilities";
|
||||||
|
|
||||||
// Redact sensitive information from BadRequest errors with specific codes
|
|
||||||
export const redactSyntaxErrorMessage = (error: unknown): unknown => {
|
|
||||||
const codesToRedact = ["SC1001", "SC2001"];
|
|
||||||
|
|
||||||
try {
|
|
||||||
// Handle error objects with a message property
|
|
||||||
if (error && typeof error === "object" && "message" in error) {
|
|
||||||
const errorObj = error as { code?: string; message?: string };
|
|
||||||
if (typeof errorObj.message === "string") {
|
|
||||||
// Parse the inner JSON from the message
|
|
||||||
const innerJson = JSON.parse(errorObj.message);
|
|
||||||
if (innerJson.code === "BadRequest" && typeof innerJson.message === "string") {
|
|
||||||
const [innerErrorsJson, activityIdPart] = innerJson.message.split("\r\n");
|
|
||||||
const innerErrorsObj = JSON.parse(innerErrorsJson);
|
|
||||||
if (Array.isArray(innerErrorsObj.errors)) {
|
|
||||||
let modified = false;
|
|
||||||
innerErrorsObj.errors = innerErrorsObj.errors.map((err: { code?: string; message?: string }) => {
|
|
||||||
if (err.code && codesToRedact.includes(err.code)) {
|
|
||||||
modified = true;
|
|
||||||
return { ...err, message: "__REDACTED__" };
|
|
||||||
}
|
|
||||||
return err;
|
|
||||||
});
|
|
||||||
|
|
||||||
if (modified) {
|
|
||||||
// Reconstruct the message with the redacted content
|
|
||||||
const redactedMessage = JSON.stringify(innerErrorsObj) + `\r\n${activityIdPart}`;
|
|
||||||
const redactedError = {
|
|
||||||
...error,
|
|
||||||
message: JSON.stringify({ ...innerJson, message: redactedMessage }),
|
|
||||||
body: undefined as unknown, // Clear body to avoid sensitive data
|
|
||||||
};
|
|
||||||
return redactedError;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch {
|
|
||||||
// If parsing fails, return the original error
|
|
||||||
}
|
|
||||||
|
|
||||||
return error;
|
|
||||||
};
|
|
||||||
|
|
||||||
export const queryDocumentsPage = async (
|
export const queryDocumentsPage = async (
|
||||||
resourceName: string,
|
resourceName: string,
|
||||||
documentsIterator: MinimalQueryIterator,
|
documentsIterator: MinimalQueryIterator,
|
||||||
@@ -63,12 +18,7 @@ export const queryDocumentsPage = async (
|
|||||||
logConsoleInfo(`Successfully fetched ${itemCount} ${entityName} for container ${resourceName}`);
|
logConsoleInfo(`Successfully fetched ${itemCount} ${entityName} for container ${resourceName}`);
|
||||||
return result;
|
return result;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
// Redact sensitive information for telemetry while showing original in console
|
handleError(error, "QueryDocumentsPage", `Failed to query ${entityName} for container ${resourceName}`);
|
||||||
const redactedError = redactSyntaxErrorMessage(error);
|
|
||||||
|
|
||||||
handleError(error, "QueryDocumentsPage", `Failed to query ${entityName} for container ${resourceName}`, {
|
|
||||||
redactedError: redactedError as Error,
|
|
||||||
});
|
|
||||||
throw error;
|
throw error;
|
||||||
} finally {
|
} finally {
|
||||||
clearMessage();
|
clearMessage();
|
||||||
|
|||||||
@@ -94,7 +94,7 @@ export function extractFeatures(given = new URLSearchParams(window.location.sear
|
|||||||
notebookBasePath: get("notebookbasepath"),
|
notebookBasePath: get("notebookbasepath"),
|
||||||
notebookServerToken: get("notebookservertoken"),
|
notebookServerToken: get("notebookservertoken"),
|
||||||
notebookServerUrl: get("notebookserverurl"),
|
notebookServerUrl: get("notebookserverurl"),
|
||||||
sandboxNotebookOutputs: true,
|
sandboxNotebookOutputs: "true" === get("sandboxnotebookoutputs", "true"),
|
||||||
selfServeType: get("selfservetype"),
|
selfServeType: get("selfservetype"),
|
||||||
showMinRUSurvey: "true" === get("showminrusurvey"),
|
showMinRUSurvey: "true" === get("showminrusurvey"),
|
||||||
ttl90Days: "true" === get("ttl90days"),
|
ttl90Days: "true" === get("ttl90days"),
|
||||||
|
|||||||
@@ -27,7 +27,7 @@ describe("AuthorizationUtils", () => {
|
|||||||
enableKoResourceTree: false,
|
enableKoResourceTree: false,
|
||||||
enableThroughputBuckets: false,
|
enableThroughputBuckets: false,
|
||||||
hostedDataExplorer: false,
|
hostedDataExplorer: false,
|
||||||
sandboxNotebookOutputs: true,
|
sandboxNotebookOutputs: false,
|
||||||
showMinRUSurvey: false,
|
showMinRUSurvey: false,
|
||||||
ttl90Days: false,
|
ttl90Days: false,
|
||||||
enableThroughputCap: false,
|
enableThroughputCap: false,
|
||||||
|
|||||||
@@ -58,7 +58,9 @@ export const defaultAccounts: Record<TestAccount, string> = {
|
|||||||
export const resourceGroupName = process.env.DE_TEST_RESOURCE_GROUP ?? "de-e2e-tests";
|
export const resourceGroupName = process.env.DE_TEST_RESOURCE_GROUP ?? "de-e2e-tests";
|
||||||
export const subscriptionId = process.env.DE_TEST_SUBSCRIPTION_ID ?? "69e02f2d-f059-4409-9eac-97e8a276ae2c";
|
export const subscriptionId = process.env.DE_TEST_SUBSCRIPTION_ID ?? "69e02f2d-f059-4409-9eac-97e8a276ae2c";
|
||||||
export const TEST_AUTOSCALE_THROUGHPUT_RU = 1000;
|
export const TEST_AUTOSCALE_THROUGHPUT_RU = 1000;
|
||||||
|
export const TEST_MANUAL_THROUGHPUT_RU = 800;
|
||||||
export const TEST_AUTOSCALE_MAX_THROUGHPUT_RU_2K = 2000;
|
export const TEST_AUTOSCALE_MAX_THROUGHPUT_RU_2K = 2000;
|
||||||
|
export const TEST_AUTOSCALE_MAX_THROUGHPUT_RU_4K = 4000;
|
||||||
export const TEST_MANUAL_THROUGHPUT_RU_2K = 2000;
|
export const TEST_MANUAL_THROUGHPUT_RU_2K = 2000;
|
||||||
export const ONE_MINUTE_MS: number = 60 * 1000;
|
export const ONE_MINUTE_MS: number = 60 * 1000;
|
||||||
|
|
||||||
|
|||||||
232
test/sql/scaleAndSettings/sharedThroughput.spec.ts
Normal file
232
test/sql/scaleAndSettings/sharedThroughput.spec.ts
Normal file
@@ -0,0 +1,232 @@
|
|||||||
|
import { Locator, expect, test } from "@playwright/test";
|
||||||
|
import {
|
||||||
|
CommandBarButton,
|
||||||
|
DataExplorer,
|
||||||
|
ONE_MINUTE_MS,
|
||||||
|
TEST_AUTOSCALE_MAX_THROUGHPUT_RU_4K,
|
||||||
|
TEST_MANUAL_THROUGHPUT_RU,
|
||||||
|
TestAccount,
|
||||||
|
} from "../../fx";
|
||||||
|
import { TestDatabaseContext, createTestDB } from "../../testData";
|
||||||
|
|
||||||
|
test.describe("Database with Shared Throughput", () => {
|
||||||
|
let dbContext: TestDatabaseContext = null!;
|
||||||
|
let explorer: DataExplorer = null!;
|
||||||
|
const containerId = "sharedcontainer";
|
||||||
|
|
||||||
|
// Helper methods
|
||||||
|
const getThroughputInput = (type: "manual" | "autopilot"): Locator => {
|
||||||
|
return explorer.frame.getByTestId(`${type}-throughput-input`);
|
||||||
|
};
|
||||||
|
|
||||||
|
// Delete database only if not running in CI
|
||||||
|
if (!process.env.CI) {
|
||||||
|
test.afterEach("Delete Test Database", async () => {
|
||||||
|
await dbContext?.dispose();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
test.describe("Manual Throughput Tests", () => {
|
||||||
|
test.beforeEach(async ({ page }) => {
|
||||||
|
explorer = await DataExplorer.open(page, TestAccount.SQL);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("Create database with shared manual throughput and verify Scale node in UI", async () => {
|
||||||
|
test.setTimeout(120000); // 2 minutes timeout
|
||||||
|
// Create database with shared manual throughput (400 RU/s)
|
||||||
|
dbContext = await createTestDB({ throughput: 400 });
|
||||||
|
|
||||||
|
// Verify database node appears in the tree
|
||||||
|
const databaseNode = await explorer.waitForNode(dbContext.database.id);
|
||||||
|
expect(databaseNode).toBeDefined();
|
||||||
|
|
||||||
|
// Expand the database node to see child nodes
|
||||||
|
await databaseNode.expand();
|
||||||
|
|
||||||
|
// Verify that "Scale" node appears under the database
|
||||||
|
const scaleNode = await explorer.waitForNode(`${dbContext.database.id}/Scale`);
|
||||||
|
expect(scaleNode).toBeDefined();
|
||||||
|
await expect(scaleNode.element).toBeVisible();
|
||||||
|
});
|
||||||
|
|
||||||
|
test("Add container to shared database without dedicated throughput", async () => {
|
||||||
|
// Create database with shared manual throughput
|
||||||
|
dbContext = await createTestDB({ throughput: 400 });
|
||||||
|
|
||||||
|
// Wait for the database to appear in the tree
|
||||||
|
await explorer.waitForNode(dbContext.database.id);
|
||||||
|
|
||||||
|
// Add a container to the shared database via UI
|
||||||
|
const newContainerButton = await explorer.globalCommandButton("New Container");
|
||||||
|
await newContainerButton.click();
|
||||||
|
|
||||||
|
await explorer.whilePanelOpen(
|
||||||
|
"New Container",
|
||||||
|
async (panel, okButton) => {
|
||||||
|
// Select "Use existing" database
|
||||||
|
const useExistingRadio = panel.getByRole("radio", { name: /Use existing/i });
|
||||||
|
await useExistingRadio.click();
|
||||||
|
|
||||||
|
// Select the database from dropdown using the new data-testid
|
||||||
|
const databaseDropdown = panel.getByRole("combobox", { name: "Choose an existing database" });
|
||||||
|
await databaseDropdown.click();
|
||||||
|
|
||||||
|
await explorer.frame.getByRole("option", { name: dbContext.database.id }).click();
|
||||||
|
// Now you can target the specific database option by its data-testid
|
||||||
|
//await panel.getByTestId(`database-option-${dbContext.database.id}`).click();
|
||||||
|
// Fill container id
|
||||||
|
await panel.getByRole("textbox", { name: "Container id, Example Container1" }).fill(containerId);
|
||||||
|
|
||||||
|
// Fill partition key
|
||||||
|
await panel.getByRole("textbox", { name: "Partition key" }).fill("/pk");
|
||||||
|
|
||||||
|
// Ensure "Provision dedicated throughput" is NOT checked
|
||||||
|
const dedicatedThroughputCheckbox = panel.getByRole("checkbox", {
|
||||||
|
name: /Provision dedicated throughput for this container/i,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (await dedicatedThroughputCheckbox.isVisible()) {
|
||||||
|
const isChecked = await dedicatedThroughputCheckbox.isChecked();
|
||||||
|
if (isChecked) {
|
||||||
|
await dedicatedThroughputCheckbox.uncheck();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
await okButton.click();
|
||||||
|
},
|
||||||
|
{ closeTimeout: 5 * ONE_MINUTE_MS },
|
||||||
|
);
|
||||||
|
|
||||||
|
// Verify container was created under the database
|
||||||
|
const containerNode = await explorer.waitForContainerNode(dbContext.database.id, containerId);
|
||||||
|
expect(containerNode).toBeDefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
test("Scale shared database manual throughput", async () => {
|
||||||
|
// Create database with shared manual throughput (400 RU/s)
|
||||||
|
dbContext = await createTestDB({ throughput: 400 });
|
||||||
|
|
||||||
|
// Navigate to the scale settings by clicking the "Scale" node in the tree
|
||||||
|
const databaseNode = await explorer.waitForNode(dbContext.database.id);
|
||||||
|
await databaseNode.expand();
|
||||||
|
const scaleNode = await explorer.waitForNode(`${dbContext.database.id}/Scale`);
|
||||||
|
await scaleNode.element.click();
|
||||||
|
|
||||||
|
// Update manual throughput from 400 to 800
|
||||||
|
await getThroughputInput("manual").fill(TEST_MANUAL_THROUGHPUT_RU.toString());
|
||||||
|
|
||||||
|
// Save changes
|
||||||
|
await explorer.commandBarButton(CommandBarButton.Save).click();
|
||||||
|
|
||||||
|
// Verify success message
|
||||||
|
await expect(explorer.getConsoleHeaderStatus()).toContainText(
|
||||||
|
`Successfully updated offer for database ${dbContext.database.id}`,
|
||||||
|
{
|
||||||
|
timeout: 2 * ONE_MINUTE_MS,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("Scale shared database from manual to autoscale", async () => {
|
||||||
|
// Create database with shared manual throughput (400 RU/s)
|
||||||
|
dbContext = await createTestDB({ throughput: 400 });
|
||||||
|
|
||||||
|
// Open database settings by clicking the "Scale" node
|
||||||
|
const databaseNode = await explorer.waitForNode(dbContext.database.id);
|
||||||
|
await databaseNode.expand();
|
||||||
|
const scaleNode = await explorer.waitForNode(`${dbContext.database.id}/Scale`);
|
||||||
|
await scaleNode.element.click();
|
||||||
|
|
||||||
|
// Switch to Autoscale
|
||||||
|
const autoscaleRadio = explorer.frame.getByText("Autoscale", { exact: true });
|
||||||
|
await autoscaleRadio.click();
|
||||||
|
|
||||||
|
// Set autoscale max throughput to 1000
|
||||||
|
//await getThroughputInput("autopilot").fill(TEST_AUTOSCALE_THROUGHPUT_RU.toString());
|
||||||
|
|
||||||
|
// Save changes
|
||||||
|
await explorer.commandBarButton(CommandBarButton.Save).click();
|
||||||
|
|
||||||
|
await expect(explorer.getConsoleHeaderStatus()).toContainText(
|
||||||
|
`Successfully updated offer for database ${dbContext.database.id}`,
|
||||||
|
{
|
||||||
|
timeout: 2 * ONE_MINUTE_MS,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test.describe("Autoscale Throughput Tests", () => {
|
||||||
|
test.beforeEach(async ({ page }) => {
|
||||||
|
explorer = await DataExplorer.open(page, TestAccount.SQL);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("Create database with shared autoscale throughput and verify Scale node in UI", async () => {
|
||||||
|
test.setTimeout(120000); // 2 minutes timeout
|
||||||
|
|
||||||
|
// Create database with shared autoscale throughput (max 1000 RU/s)
|
||||||
|
dbContext = await createTestDB({ maxThroughput: 1000 });
|
||||||
|
|
||||||
|
// Verify database node appears
|
||||||
|
const databaseNode = await explorer.waitForNode(dbContext.database.id);
|
||||||
|
expect(databaseNode).toBeDefined();
|
||||||
|
|
||||||
|
// Expand the database node to see child nodes
|
||||||
|
await databaseNode.expand();
|
||||||
|
|
||||||
|
// Verify that "Scale" node appears under the database
|
||||||
|
const scaleNode = await explorer.waitForNode(`${dbContext.database.id}/Scale`);
|
||||||
|
expect(scaleNode).toBeDefined();
|
||||||
|
await expect(scaleNode.element).toBeVisible();
|
||||||
|
});
|
||||||
|
|
||||||
|
test("Scale shared database autoscale throughput", async () => {
|
||||||
|
// Create database with shared autoscale throughput (max 1000 RU/s)
|
||||||
|
dbContext = await createTestDB({ maxThroughput: 1000 });
|
||||||
|
|
||||||
|
// Open database settings
|
||||||
|
const databaseNode = await explorer.waitForNode(dbContext.database.id);
|
||||||
|
await databaseNode.expand();
|
||||||
|
const scaleNode = await explorer.waitForNode(`${dbContext.database.id}/Scale`);
|
||||||
|
await scaleNode.element.click();
|
||||||
|
|
||||||
|
// Update autoscale max throughput from 1000 to 4000
|
||||||
|
await getThroughputInput("autopilot").fill(TEST_AUTOSCALE_MAX_THROUGHPUT_RU_4K.toString());
|
||||||
|
|
||||||
|
// Save changes
|
||||||
|
await explorer.commandBarButton(CommandBarButton.Save).click();
|
||||||
|
|
||||||
|
// Verify success message
|
||||||
|
await expect(explorer.getConsoleHeaderStatus()).toContainText(
|
||||||
|
`Successfully updated offer for database ${dbContext.database.id}`,
|
||||||
|
{
|
||||||
|
timeout: 2 * ONE_MINUTE_MS,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("Scale shared database from autoscale to manual", async () => {
|
||||||
|
// Create database with shared autoscale throughput (max 1000 RU/s)
|
||||||
|
dbContext = await createTestDB({ maxThroughput: 1000 });
|
||||||
|
|
||||||
|
// Open database settings
|
||||||
|
const databaseNode = await explorer.waitForNode(dbContext.database.id);
|
||||||
|
await databaseNode.expand();
|
||||||
|
const scaleNode = await explorer.waitForNode(`${dbContext.database.id}/Scale`);
|
||||||
|
await scaleNode.element.click();
|
||||||
|
|
||||||
|
// Switch to Manual
|
||||||
|
const manualRadio = explorer.frame.getByText("Manual", { exact: true });
|
||||||
|
await manualRadio.click();
|
||||||
|
|
||||||
|
// Save changes
|
||||||
|
await explorer.commandBarButton(CommandBarButton.Save).click();
|
||||||
|
|
||||||
|
// Verify success message
|
||||||
|
await expect(explorer.getConsoleHeaderStatus()).toContainText(
|
||||||
|
`Successfully updated offer for database ${dbContext.database.id}`,
|
||||||
|
{ timeout: 2 * ONE_MINUTE_MS },
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
121
test/testData.ts
121
test/testData.ts
@@ -82,6 +82,75 @@ export class TestContainerContext {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export class TestDatabaseContext {
|
||||||
|
constructor(
|
||||||
|
public armClient: CosmosDBManagementClient,
|
||||||
|
public client: CosmosClient,
|
||||||
|
public database: Database,
|
||||||
|
) {}
|
||||||
|
|
||||||
|
async dispose() {
|
||||||
|
await this.database.delete();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface CreateTestDBOptions {
|
||||||
|
throughput?: number;
|
||||||
|
maxThroughput?: number; // For autoscale
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper function to create ARM client and Cosmos client for SQL account
|
||||||
|
async function createCosmosClientForSQLAccount(
|
||||||
|
accountType: TestAccount.SQL | TestAccount.SQLContainerCopyOnly = TestAccount.SQL,
|
||||||
|
): Promise<{ armClient: CosmosDBManagementClient; client: CosmosClient }> {
|
||||||
|
const credentials = getAzureCLICredentials();
|
||||||
|
const adaptedCredentials = new AzureIdentityCredentialAdapter(credentials);
|
||||||
|
const armClient = new CosmosDBManagementClient(adaptedCredentials, subscriptionId);
|
||||||
|
const accountName = getAccountName(accountType);
|
||||||
|
const account = await armClient.databaseAccounts.get(resourceGroupName, accountName);
|
||||||
|
|
||||||
|
const clientOptions: CosmosClientOptions = {
|
||||||
|
endpoint: account.documentEndpoint!,
|
||||||
|
};
|
||||||
|
|
||||||
|
const rbacToken =
|
||||||
|
accountType === TestAccount.SQL
|
||||||
|
? process.env.NOSQL_TESTACCOUNT_TOKEN
|
||||||
|
: accountType === TestAccount.SQLContainerCopyOnly
|
||||||
|
? process.env.NOSQL_CONTAINERCOPY_TESTACCOUNT_TOKEN
|
||||||
|
: "";
|
||||||
|
|
||||||
|
if (rbacToken) {
|
||||||
|
clientOptions.tokenProvider = async (): Promise<string> => {
|
||||||
|
const AUTH_PREFIX = `type=aad&ver=1.0&sig=`;
|
||||||
|
const authorizationToken = `${AUTH_PREFIX}${rbacToken}`;
|
||||||
|
return authorizationToken;
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
const keys = await armClient.databaseAccounts.listKeys(resourceGroupName, accountName);
|
||||||
|
clientOptions.key = keys.primaryMasterKey;
|
||||||
|
}
|
||||||
|
|
||||||
|
const client = new CosmosClient(clientOptions);
|
||||||
|
|
||||||
|
return { armClient, client };
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function createTestDB(options?: CreateTestDBOptions): Promise<TestDatabaseContext> {
|
||||||
|
const databaseId = generateUniqueName("db");
|
||||||
|
const { armClient, client } = await createCosmosClientForSQLAccount();
|
||||||
|
|
||||||
|
// Create database with provisioned throughput (shared throughput)
|
||||||
|
// This checks the "Provision database throughput" option
|
||||||
|
const { database } = await client.databases.create({
|
||||||
|
id: databaseId,
|
||||||
|
throughput: options?.throughput, // Manual throughput (e.g., 400)
|
||||||
|
maxThroughput: options?.maxThroughput, // Autoscale max throughput (e.g., 1000)
|
||||||
|
});
|
||||||
|
|
||||||
|
return new TestDatabaseContext(armClient, client, database);
|
||||||
|
}
|
||||||
|
|
||||||
type createTestSqlContainerConfig = {
|
type createTestSqlContainerConfig = {
|
||||||
includeTestData?: boolean;
|
includeTestData?: boolean;
|
||||||
partitionKey?: string;
|
partitionKey?: string;
|
||||||
@@ -104,34 +173,7 @@ export async function createMultipleTestContainers({
|
|||||||
const creationPromises: Promise<TestContainerContext>[] = [];
|
const creationPromises: Promise<TestContainerContext>[] = [];
|
||||||
|
|
||||||
const databaseId = databaseName ? databaseName : generateUniqueName("db");
|
const databaseId = databaseName ? databaseName : generateUniqueName("db");
|
||||||
const credentials = getAzureCLICredentials();
|
const { armClient, client } = await createCosmosClientForSQLAccount(accountType);
|
||||||
const adaptedCredentials = new AzureIdentityCredentialAdapter(credentials);
|
|
||||||
const armClient = new CosmosDBManagementClient(adaptedCredentials, subscriptionId);
|
|
||||||
const accountName = getAccountName(accountType);
|
|
||||||
const account = await armClient.databaseAccounts.get(resourceGroupName, accountName);
|
|
||||||
|
|
||||||
const clientOptions: CosmosClientOptions = {
|
|
||||||
endpoint: account.documentEndpoint!,
|
|
||||||
};
|
|
||||||
|
|
||||||
const rbacToken =
|
|
||||||
accountType === TestAccount.SQL
|
|
||||||
? process.env.NOSQL_TESTACCOUNT_TOKEN
|
|
||||||
: accountType === TestAccount.SQLContainerCopyOnly
|
|
||||||
? process.env.NOSQL_CONTAINERCOPY_TESTACCOUNT_TOKEN
|
|
||||||
: "";
|
|
||||||
if (rbacToken) {
|
|
||||||
clientOptions.tokenProvider = async (): Promise<string> => {
|
|
||||||
const AUTH_PREFIX = `type=aad&ver=1.0&sig=`;
|
|
||||||
const authorizationToken = `${AUTH_PREFIX}${rbacToken}`;
|
|
||||||
return authorizationToken;
|
|
||||||
};
|
|
||||||
} else {
|
|
||||||
const keys = await armClient.databaseAccounts.listKeys(resourceGroupName, accountName);
|
|
||||||
clientOptions.key = keys.primaryMasterKey;
|
|
||||||
}
|
|
||||||
|
|
||||||
const client = new CosmosClient(clientOptions);
|
|
||||||
const { database } = await client.databases.createIfNotExists({ id: databaseId });
|
const { database } = await client.databases.createIfNotExists({ id: databaseId });
|
||||||
|
|
||||||
try {
|
try {
|
||||||
@@ -158,29 +200,8 @@ export async function createTestSQLContainer({
|
|||||||
}: createTestSqlContainerConfig = {}) {
|
}: createTestSqlContainerConfig = {}) {
|
||||||
const databaseId = databaseName ? databaseName : generateUniqueName("db");
|
const databaseId = databaseName ? databaseName : generateUniqueName("db");
|
||||||
const containerId = "testcontainer"; // A unique container name isn't needed because the database is unique
|
const containerId = "testcontainer"; // A unique container name isn't needed because the database is unique
|
||||||
const credentials = getAzureCLICredentials();
|
const { armClient, client } = await createCosmosClientForSQLAccount();
|
||||||
const adaptedCredentials = new AzureIdentityCredentialAdapter(credentials);
|
|
||||||
const armClient = new CosmosDBManagementClient(adaptedCredentials, subscriptionId);
|
|
||||||
const accountName = getAccountName(TestAccount.SQL);
|
|
||||||
const account = await armClient.databaseAccounts.get(resourceGroupName, accountName);
|
|
||||||
|
|
||||||
const clientOptions: CosmosClientOptions = {
|
|
||||||
endpoint: account.documentEndpoint!,
|
|
||||||
};
|
|
||||||
|
|
||||||
const nosqlAccountRbacToken = process.env.NOSQL_TESTACCOUNT_TOKEN;
|
|
||||||
if (nosqlAccountRbacToken) {
|
|
||||||
clientOptions.tokenProvider = async (): Promise<string> => {
|
|
||||||
const AUTH_PREFIX = `type=aad&ver=1.0&sig=`;
|
|
||||||
const authorizationToken = `${AUTH_PREFIX}${nosqlAccountRbacToken}`;
|
|
||||||
return authorizationToken;
|
|
||||||
};
|
|
||||||
} else {
|
|
||||||
const keys = await armClient.databaseAccounts.listKeys(resourceGroupName, accountName);
|
|
||||||
clientOptions.key = keys.primaryMasterKey;
|
|
||||||
}
|
|
||||||
|
|
||||||
const client = new CosmosClient(clientOptions);
|
|
||||||
const { database } = await client.databases.createIfNotExists({ id: databaseId });
|
const { database } = await client.databases.createIfNotExists({ id: databaseId });
|
||||||
try {
|
try {
|
||||||
const { container } = await database.containers.createIfNotExists({
|
const { container } = await database.containers.createIfNotExists({
|
||||||
|
|||||||
Reference in New Issue
Block a user