mirror of
https://github.com/Azure/cosmos-explorer.git
synced 2025-12-25 11:51:07 +00:00
Compare commits
1 Commits
release/ma
...
before_opt
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9bd905403b |
@@ -2,6 +2,7 @@
|
|||||||
|
|
||||||
UI for Azure Cosmos DB. Powers the [Azure Portal](https://portal.azure.com/), https://cosmos.azure.com/, and the [Cosmos DB Emulator](https://docs.microsoft.com/en-us/azure/cosmos-db/local-emulator)
|
UI for Azure Cosmos DB. Powers the [Azure Portal](https://portal.azure.com/), https://cosmos.azure.com/, and the [Cosmos DB Emulator](https://docs.microsoft.com/en-us/azure/cosmos-db/local-emulator)
|
||||||
|
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
## Getting Started
|
## Getting Started
|
||||||
|
|||||||
@@ -61,8 +61,6 @@
|
|||||||
|
|
||||||
@GalleryBackgroundColor: #fdfdfd;
|
@GalleryBackgroundColor: #fdfdfd;
|
||||||
|
|
||||||
@LinkColor: #2d6da4;
|
|
||||||
|
|
||||||
//Icons
|
//Icons
|
||||||
@InfoIconColor: #0072c6;
|
@InfoIconColor: #0072c6;
|
||||||
@WarningIconColor: #db7500;
|
@WarningIconColor: #db7500;
|
||||||
|
|||||||
@@ -3,12 +3,12 @@ import { getAuthorizationTokenUsingResourceTokens } from "Common/getAuthorizatio
|
|||||||
import { AuthorizationToken } from "Contracts/FabricMessageTypes";
|
import { AuthorizationToken } from "Contracts/FabricMessageTypes";
|
||||||
import { checkDatabaseResourceTokensValidity } from "Platform/Fabric/FabricUtil";
|
import { checkDatabaseResourceTokensValidity } from "Platform/Fabric/FabricUtil";
|
||||||
import { LocalStorageUtility, StorageKey } from "Shared/StorageUtility";
|
import { LocalStorageUtility, StorageKey } from "Shared/StorageUtility";
|
||||||
|
import { useNewPortalBackendEndpoint } from "Utils/EndpointUtils";
|
||||||
import { AuthType } from "../AuthType";
|
import { AuthType } from "../AuthType";
|
||||||
import { PriorityLevel } from "../Common/Constants";
|
import { BackendApi, PriorityLevel } from "../Common/Constants";
|
||||||
import * as Logger from "../Common/Logger";
|
import * as Logger from "../Common/Logger";
|
||||||
import { Platform, configContext } from "../ConfigContext";
|
import { Platform, configContext } from "../ConfigContext";
|
||||||
import { updateUserContext, userContext } from "../UserContext";
|
import { updateUserContext, userContext } from "../UserContext";
|
||||||
import { isDataplaneRbacSupported } from "../Utils/APITypeUtils";
|
|
||||||
import { logConsoleError } from "../Utils/NotificationConsoleUtils";
|
import { logConsoleError } from "../Utils/NotificationConsoleUtils";
|
||||||
import * as PriorityBasedExecutionUtils from "../Utils/PriorityBasedExecutionUtils";
|
import * as PriorityBasedExecutionUtils from "../Utils/PriorityBasedExecutionUtils";
|
||||||
import { EmulatorMasterKey, HttpHeaders } from "./Constants";
|
import { EmulatorMasterKey, HttpHeaders } from "./Constants";
|
||||||
@@ -19,7 +19,7 @@ const _global = typeof self === "undefined" ? window : self;
|
|||||||
export const tokenProvider = async (requestInfo: Cosmos.RequestInfo) => {
|
export const tokenProvider = async (requestInfo: Cosmos.RequestInfo) => {
|
||||||
const { verb, resourceId, resourceType, headers } = requestInfo;
|
const { verb, resourceId, resourceType, headers } = requestInfo;
|
||||||
|
|
||||||
const dataPlaneRBACOptionEnabled = userContext.dataPlaneRbacEnabled && isDataplaneRbacSupported(userContext.apiType);
|
const dataPlaneRBACOptionEnabled = userContext.dataPlaneRbacEnabled && userContext.apiType === "SQL";
|
||||||
if (userContext.features.enableAadDataPlane || dataPlaneRBACOptionEnabled) {
|
if (userContext.features.enableAadDataPlane || dataPlaneRBACOptionEnabled) {
|
||||||
Logger.logInfo(
|
Logger.logInfo(
|
||||||
`AAD Data Plane Feature flag set to ${userContext.features.enableAadDataPlane} for account with disable local auth ${userContext.databaseAccount.properties.disableLocalAuth} `,
|
`AAD Data Plane Feature flag set to ${userContext.features.enableAadDataPlane} for account with disable local auth ${userContext.databaseAccount.properties.disableLocalAuth} `,
|
||||||
@@ -125,6 +125,10 @@ export async function getTokenFromAuthService(
|
|||||||
resourceType: string,
|
resourceType: string,
|
||||||
resourceId?: string,
|
resourceId?: string,
|
||||||
): Promise<AuthorizationToken> {
|
): Promise<AuthorizationToken> {
|
||||||
|
if (!useNewPortalBackendEndpoint(BackendApi.RuntimeProxy)) {
|
||||||
|
return getTokenFromAuthService_ToBeDeprecated(verb, resourceType, resourceId);
|
||||||
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const host: string = configContext.PORTAL_BACKEND_ENDPOINT;
|
const host: string = configContext.PORTAL_BACKEND_ENDPOINT;
|
||||||
const response: Response = await _global.fetch(host + "/api/connectionstring/runtimeproxy/authorizationtokens", {
|
const response: Response = await _global.fetch(host + "/api/connectionstring/runtimeproxy/authorizationtokens", {
|
||||||
@@ -147,6 +151,34 @@ export async function getTokenFromAuthService(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export async function getTokenFromAuthService_ToBeDeprecated(
|
||||||
|
verb: string,
|
||||||
|
resourceType: string,
|
||||||
|
resourceId?: string,
|
||||||
|
): Promise<AuthorizationToken> {
|
||||||
|
try {
|
||||||
|
const host = configContext.BACKEND_ENDPOINT;
|
||||||
|
const response = await _global.fetch(host + "/api/guest/runtimeproxy/authorizationTokens", {
|
||||||
|
method: "POST",
|
||||||
|
headers: {
|
||||||
|
"content-type": "application/json",
|
||||||
|
"x-ms-encrypted-auth-token": userContext.accessToken,
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
verb,
|
||||||
|
resourceType,
|
||||||
|
resourceId,
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
//TODO I am not sure why we have to parse the JSON again here. fetch should do it for us when we call .json()
|
||||||
|
const result = JSON.parse(await response.json());
|
||||||
|
return result;
|
||||||
|
} catch (error) {
|
||||||
|
logConsoleError(`Failed to get authorization headers for ${resourceType}: ${getErrorMessage(error)}`);
|
||||||
|
return Promise.reject(error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// The Capability is a bitmap, which cosmosdb backend decodes as per the below enum
|
// The Capability is a bitmap, which cosmosdb backend decodes as per the below enum
|
||||||
enum SDKSupportedCapabilities {
|
enum SDKSupportedCapabilities {
|
||||||
None = 0,
|
None = 0,
|
||||||
@@ -171,10 +203,8 @@ export function client(): Cosmos.CosmosClient {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let _defaultHeaders: Cosmos.CosmosHeaders = {};
|
let _defaultHeaders: Cosmos.CosmosHeaders = {};
|
||||||
|
|
||||||
_defaultHeaders["x-ms-cosmos-sdk-supportedcapabilities"] =
|
_defaultHeaders["x-ms-cosmos-sdk-supportedcapabilities"] =
|
||||||
SDKSupportedCapabilities.None | SDKSupportedCapabilities.PartitionMerge;
|
SDKSupportedCapabilities.None | SDKSupportedCapabilities.PartitionMerge;
|
||||||
_defaultHeaders["x-ms-cosmos-throughput-bucket"] = 1;
|
|
||||||
|
|
||||||
if (
|
if (
|
||||||
userContext.authType === AuthType.ConnectionString ||
|
userContext.authType === AuthType.ConnectionString ||
|
||||||
|
|||||||
@@ -4,8 +4,16 @@ import { configContext, resetConfigContext, updateConfigContext } from "../Confi
|
|||||||
import { DatabaseAccount } from "../Contracts/DataModels";
|
import { DatabaseAccount } from "../Contracts/DataModels";
|
||||||
import { Collection } from "../Contracts/ViewModels";
|
import { Collection } from "../Contracts/ViewModels";
|
||||||
import DocumentId from "../Explorer/Tree/DocumentId";
|
import DocumentId from "../Explorer/Tree/DocumentId";
|
||||||
|
import { extractFeatures } from "../Platform/Hosted/extractFeatures";
|
||||||
import { updateUserContext } from "../UserContext";
|
import { updateUserContext } from "../UserContext";
|
||||||
import { deleteDocuments, getEndpoint, queryDocuments, readDocument, updateDocument } from "./MongoProxyClient";
|
import {
|
||||||
|
deleteDocument,
|
||||||
|
getEndpoint,
|
||||||
|
getFeatureEndpointOrDefault,
|
||||||
|
queryDocuments,
|
||||||
|
readDocument,
|
||||||
|
updateDocument,
|
||||||
|
} from "./MongoProxyClient";
|
||||||
|
|
||||||
const databaseId = "testDB";
|
const databaseId = "testDB";
|
||||||
|
|
||||||
@@ -188,8 +196,20 @@ describe("MongoProxyClient", () => {
|
|||||||
expect.any(Object),
|
expect.any(Object),
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it("builds the correct proxy URL in development", () => {
|
||||||
|
updateConfigContext({
|
||||||
|
MONGO_BACKEND_ENDPOINT: "https://localhost:1234",
|
||||||
|
globallyEnabledMongoAPIs: [],
|
||||||
|
});
|
||||||
|
updateDocument(databaseId, collection, documentId, "{}");
|
||||||
|
expect(window.fetch).toHaveBeenCalledWith(
|
||||||
|
`${configContext.MONGO_PROXY_ENDPOINT}/api/mongo/explorer`,
|
||||||
|
expect.any(Object),
|
||||||
|
);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
describe("deleteDocuments", () => {
|
describe("deleteDocument", () => {
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
resetConfigContext();
|
resetConfigContext();
|
||||||
updateUserContext({
|
updateUserContext({
|
||||||
@@ -206,9 +226,9 @@ describe("MongoProxyClient", () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it("builds the correct URL", () => {
|
it("builds the correct URL", () => {
|
||||||
deleteDocuments(databaseId, collection, [documentId]);
|
deleteDocument(databaseId, collection, documentId);
|
||||||
expect(window.fetch).toHaveBeenCalledWith(
|
expect(window.fetch).toHaveBeenCalledWith(
|
||||||
`${configContext.MONGO_PROXY_ENDPOINT}/api/mongo/explorer/bulkdelete`,
|
`${configContext.MONGO_PROXY_ENDPOINT}/api/mongo/explorer`,
|
||||||
expect.any(Object),
|
expect.any(Object),
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
@@ -218,9 +238,9 @@ describe("MongoProxyClient", () => {
|
|||||||
MONGO_PROXY_ENDPOINT: "https://localhost:1234",
|
MONGO_PROXY_ENDPOINT: "https://localhost:1234",
|
||||||
globallyEnabledMongoAPIs: [],
|
globallyEnabledMongoAPIs: [],
|
||||||
});
|
});
|
||||||
deleteDocuments(databaseId, collection, [documentId]);
|
deleteDocument(databaseId, collection, documentId);
|
||||||
expect(window.fetch).toHaveBeenCalledWith(
|
expect(window.fetch).toHaveBeenCalledWith(
|
||||||
`${configContext.MONGO_PROXY_ENDPOINT}/api/mongo/explorer/bulkdelete`,
|
`${configContext.MONGO_PROXY_ENDPOINT}/api/mongo/explorer`,
|
||||||
expect.any(Object),
|
expect.any(Object),
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
@@ -255,4 +275,33 @@ describe("MongoProxyClient", () => {
|
|||||||
expect(endpoint).toEqual(`${configContext.MONGO_PROXY_ENDPOINT}/api/connectionstring/mongo/explorer`);
|
expect(endpoint).toEqual(`${configContext.MONGO_PROXY_ENDPOINT}/api/connectionstring/mongo/explorer`);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe("getFeatureEndpointOrDefault", () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
resetConfigContext();
|
||||||
|
updateConfigContext({
|
||||||
|
MONGO_PROXY_ENDPOINT: MongoProxyEndpoints.Prod,
|
||||||
|
globallyEnabledMongoAPIs: [],
|
||||||
|
});
|
||||||
|
const params = new URLSearchParams({
|
||||||
|
"feature.mongoProxyEndpoint": MongoProxyEndpoints.Prod,
|
||||||
|
"feature.mongoProxyAPIs": "readDocument|createDocument",
|
||||||
|
});
|
||||||
|
const features = extractFeatures(params);
|
||||||
|
updateUserContext({
|
||||||
|
authType: AuthType.AAD,
|
||||||
|
features: features,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("returns a local endpoint", () => {
|
||||||
|
const endpoint = getFeatureEndpointOrDefault("readDocument");
|
||||||
|
expect(endpoint).toEqual(`${configContext.MONGO_PROXY_ENDPOINT}/api/mongo/explorer`);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("returns a production endpoint", () => {
|
||||||
|
const endpoint = getFeatureEndpointOrDefault("DeleteDocument");
|
||||||
|
expect(endpoint).toEqual(`${configContext.MONGO_PROXY_ENDPOINT}/api/mongo/explorer`);
|
||||||
|
});
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1,13 +1,20 @@
|
|||||||
import { Constants as CosmosSDKConstants } from "@azure/cosmos";
|
import { Constants as CosmosSDKConstants } from "@azure/cosmos";
|
||||||
|
import {
|
||||||
|
allowedMongoProxyEndpoints_ToBeDeprecated,
|
||||||
|
defaultAllowedMongoProxyEndpoints,
|
||||||
|
validateEndpoint,
|
||||||
|
} from "Utils/EndpointUtils";
|
||||||
|
import queryString from "querystring";
|
||||||
import { AuthType } from "../AuthType";
|
import { AuthType } from "../AuthType";
|
||||||
import { configContext } from "../ConfigContext";
|
import { configContext } from "../ConfigContext";
|
||||||
import * as DataModels from "../Contracts/DataModels";
|
import * as DataModels from "../Contracts/DataModels";
|
||||||
import { MessageTypes } from "../Contracts/ExplorerContracts";
|
import { MessageTypes } from "../Contracts/ExplorerContracts";
|
||||||
import { Collection } from "../Contracts/ViewModels";
|
import { Collection } from "../Contracts/ViewModels";
|
||||||
import DocumentId from "../Explorer/Tree/DocumentId";
|
import DocumentId from "../Explorer/Tree/DocumentId";
|
||||||
|
import { hasFlag } from "../Platform/Hosted/extractFeatures";
|
||||||
import { userContext } from "../UserContext";
|
import { userContext } from "../UserContext";
|
||||||
import { logConsoleError } from "../Utils/NotificationConsoleUtils";
|
import { logConsoleError } from "../Utils/NotificationConsoleUtils";
|
||||||
import { ApiType, ContentType, HttpHeaders, HttpStatusCodes } from "./Constants";
|
import { ApiType, ContentType, HttpHeaders, HttpStatusCodes, MongoProxyApi, MongoProxyEndpoints } from "./Constants";
|
||||||
import { MinimalQueryIterator } from "./IteratorUtilities";
|
import { MinimalQueryIterator } from "./IteratorUtilities";
|
||||||
import { sendMessage } from "./MessageHandler";
|
import { sendMessage } from "./MessageHandler";
|
||||||
|
|
||||||
@@ -60,6 +67,10 @@ export function queryDocuments(
|
|||||||
query: string,
|
query: string,
|
||||||
continuationToken?: string,
|
continuationToken?: string,
|
||||||
): Promise<QueryResponse> {
|
): Promise<QueryResponse> {
|
||||||
|
if (!useMongoProxyEndpoint(MongoProxyApi.ResourceList) || !useMongoProxyEndpoint(MongoProxyApi.QueryDocuments)) {
|
||||||
|
return queryDocuments_ToBeDeprecated(databaseId, collection, isResourceList, query, continuationToken);
|
||||||
|
}
|
||||||
|
|
||||||
const { databaseAccount } = userContext;
|
const { databaseAccount } = userContext;
|
||||||
const resourceEndpoint = databaseAccount.properties.mongoEndpoint || databaseAccount.properties.documentEndpoint;
|
const resourceEndpoint = databaseAccount.properties.mongoEndpoint || databaseAccount.properties.documentEndpoint;
|
||||||
const params = {
|
const params = {
|
||||||
@@ -78,7 +89,7 @@ export function queryDocuments(
|
|||||||
query,
|
query,
|
||||||
};
|
};
|
||||||
|
|
||||||
const endpoint = getEndpoint(configContext.MONGO_PROXY_ENDPOINT) || "";
|
const endpoint = getFeatureEndpointOrDefault(MongoProxyApi.ResourceList) || "";
|
||||||
|
|
||||||
const headers = {
|
const headers = {
|
||||||
...defaultHeaders,
|
...defaultHeaders,
|
||||||
@@ -116,11 +127,76 @@ export function queryDocuments(
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function queryDocuments_ToBeDeprecated(
|
||||||
|
databaseId: string,
|
||||||
|
collection: Collection,
|
||||||
|
isResourceList: boolean,
|
||||||
|
query: string,
|
||||||
|
continuationToken?: string,
|
||||||
|
): Promise<QueryResponse> {
|
||||||
|
const { databaseAccount } = userContext;
|
||||||
|
const resourceEndpoint = databaseAccount.properties.mongoEndpoint || databaseAccount.properties.documentEndpoint;
|
||||||
|
const params = {
|
||||||
|
db: databaseId,
|
||||||
|
coll: collection.id(),
|
||||||
|
resourceUrl: `${resourceEndpoint}dbs/${databaseId}/colls/${collection.id()}/docs/`,
|
||||||
|
rid: collection.rid,
|
||||||
|
rtype: "docs",
|
||||||
|
sid: userContext.subscriptionId,
|
||||||
|
rg: userContext.resourceGroup,
|
||||||
|
dba: databaseAccount.name,
|
||||||
|
pk:
|
||||||
|
collection && collection.partitionKey && !collection.partitionKey.systemKey
|
||||||
|
? collection.partitionKeyProperties?.[0]
|
||||||
|
: "",
|
||||||
|
};
|
||||||
|
|
||||||
|
const endpoint = getFeatureEndpointOrDefault("resourcelist") || "";
|
||||||
|
|
||||||
|
const headers = {
|
||||||
|
...defaultHeaders,
|
||||||
|
...authHeaders(),
|
||||||
|
[CosmosSDKConstants.HttpHeaders.IsQuery]: "true",
|
||||||
|
[CosmosSDKConstants.HttpHeaders.PopulateQueryMetrics]: "true",
|
||||||
|
[CosmosSDKConstants.HttpHeaders.EnableScanInQuery]: "true",
|
||||||
|
[CosmosSDKConstants.HttpHeaders.EnableCrossPartitionQuery]: "true",
|
||||||
|
[CosmosSDKConstants.HttpHeaders.ParallelizeCrossPartitionQuery]: "true",
|
||||||
|
[HttpHeaders.contentType]: "application/query+json",
|
||||||
|
};
|
||||||
|
|
||||||
|
if (continuationToken) {
|
||||||
|
headers[CosmosSDKConstants.HttpHeaders.Continuation] = continuationToken;
|
||||||
|
}
|
||||||
|
|
||||||
|
const path = isResourceList ? "/resourcelist" : "";
|
||||||
|
|
||||||
|
return window
|
||||||
|
.fetch(`${endpoint}${path}?${queryString.stringify(params)}`, {
|
||||||
|
method: "POST",
|
||||||
|
body: JSON.stringify({ query }),
|
||||||
|
headers,
|
||||||
|
})
|
||||||
|
.then(async (response) => {
|
||||||
|
if (response.ok) {
|
||||||
|
return {
|
||||||
|
continuationToken: response.headers.get(CosmosSDKConstants.HttpHeaders.Continuation),
|
||||||
|
documents: (await response.json()).Documents as DataModels.DocumentId[],
|
||||||
|
headers: response.headers,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
await errorHandling(response, "querying documents", params);
|
||||||
|
return undefined;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
export function readDocument(
|
export function readDocument(
|
||||||
databaseId: string,
|
databaseId: string,
|
||||||
collection: Collection,
|
collection: Collection,
|
||||||
documentId: DocumentId,
|
documentId: DocumentId,
|
||||||
): Promise<DataModels.DocumentId> {
|
): Promise<DataModels.DocumentId> {
|
||||||
|
if (!useMongoProxyEndpoint(MongoProxyApi.ReadDocument)) {
|
||||||
|
return readDocument_ToBeDeprecated(databaseId, collection, documentId);
|
||||||
|
}
|
||||||
const { databaseAccount } = userContext;
|
const { databaseAccount } = userContext;
|
||||||
const resourceEndpoint = databaseAccount.properties.mongoEndpoint || databaseAccount.properties.documentEndpoint;
|
const resourceEndpoint = databaseAccount.properties.mongoEndpoint || databaseAccount.properties.documentEndpoint;
|
||||||
const idComponents = documentId.self.split("/");
|
const idComponents = documentId.self.split("/");
|
||||||
@@ -141,7 +217,7 @@ export function readDocument(
|
|||||||
: "",
|
: "",
|
||||||
};
|
};
|
||||||
|
|
||||||
const endpoint = getEndpoint(configContext.MONGO_PROXY_ENDPOINT);
|
const endpoint = getFeatureEndpointOrDefault(MongoProxyApi.ReadDocument);
|
||||||
|
|
||||||
return window
|
return window
|
||||||
.fetch(endpoint, {
|
.fetch(endpoint, {
|
||||||
@@ -161,12 +237,61 @@ export function readDocument(
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function readDocument_ToBeDeprecated(
|
||||||
|
databaseId: string,
|
||||||
|
collection: Collection,
|
||||||
|
documentId: DocumentId,
|
||||||
|
): Promise<DataModels.DocumentId> {
|
||||||
|
const { databaseAccount } = userContext;
|
||||||
|
const resourceEndpoint = databaseAccount.properties.mongoEndpoint || databaseAccount.properties.documentEndpoint;
|
||||||
|
const idComponents = documentId.self.split("/");
|
||||||
|
const path = idComponents.slice(0, 4).join("/");
|
||||||
|
const rid = encodeURIComponent(idComponents[5]);
|
||||||
|
const params = {
|
||||||
|
db: databaseId,
|
||||||
|
coll: collection.id(),
|
||||||
|
resourceUrl: `${resourceEndpoint}${path}/${rid}`,
|
||||||
|
rid,
|
||||||
|
rtype: "docs",
|
||||||
|
sid: userContext.subscriptionId,
|
||||||
|
rg: userContext.resourceGroup,
|
||||||
|
dba: databaseAccount.name,
|
||||||
|
pk:
|
||||||
|
documentId && documentId.partitionKey && !documentId.partitionKey.systemKey
|
||||||
|
? documentId.partitionKeyProperties?.[0]
|
||||||
|
: "",
|
||||||
|
};
|
||||||
|
|
||||||
|
const endpoint = getFeatureEndpointOrDefault("readDocument");
|
||||||
|
|
||||||
|
return window
|
||||||
|
.fetch(`${endpoint}?${queryString.stringify(params)}`, {
|
||||||
|
method: "GET",
|
||||||
|
headers: {
|
||||||
|
...defaultHeaders,
|
||||||
|
...authHeaders(),
|
||||||
|
[CosmosSDKConstants.HttpHeaders.PartitionKey]: encodeURIComponent(
|
||||||
|
JSON.stringify(documentId.partitionKeyHeader()),
|
||||||
|
),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
.then(async (response) => {
|
||||||
|
if (response.ok) {
|
||||||
|
return response.json();
|
||||||
|
}
|
||||||
|
return await errorHandling(response, "reading document", params);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
export function createDocument(
|
export function createDocument(
|
||||||
databaseId: string,
|
databaseId: string,
|
||||||
collection: Collection,
|
collection: Collection,
|
||||||
partitionKeyProperty: string,
|
partitionKeyProperty: string,
|
||||||
documentContent: unknown,
|
documentContent: unknown,
|
||||||
): Promise<DataModels.DocumentId> {
|
): Promise<DataModels.DocumentId> {
|
||||||
|
if (!useMongoProxyEndpoint(MongoProxyApi.CreateDocument)) {
|
||||||
|
return createDocument_ToBeDeprecated(databaseId, collection, partitionKeyProperty, documentContent);
|
||||||
|
}
|
||||||
const { databaseAccount } = userContext;
|
const { databaseAccount } = userContext;
|
||||||
const resourceEndpoint = databaseAccount.properties.mongoEndpoint || databaseAccount.properties.documentEndpoint;
|
const resourceEndpoint = databaseAccount.properties.mongoEndpoint || databaseAccount.properties.documentEndpoint;
|
||||||
const params = {
|
const params = {
|
||||||
@@ -183,7 +308,7 @@ export function createDocument(
|
|||||||
documentContent: JSON.stringify(documentContent),
|
documentContent: JSON.stringify(documentContent),
|
||||||
};
|
};
|
||||||
|
|
||||||
const endpoint = getEndpoint(configContext.MONGO_PROXY_ENDPOINT);
|
const endpoint = getFeatureEndpointOrDefault(MongoProxyApi.CreateDocument);
|
||||||
|
|
||||||
return window
|
return window
|
||||||
.fetch(`${endpoint}/createDocument`, {
|
.fetch(`${endpoint}/createDocument`, {
|
||||||
@@ -203,12 +328,54 @@ export function createDocument(
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function createDocument_ToBeDeprecated(
|
||||||
|
databaseId: string,
|
||||||
|
collection: Collection,
|
||||||
|
partitionKeyProperty: string,
|
||||||
|
documentContent: unknown,
|
||||||
|
): Promise<DataModels.DocumentId> {
|
||||||
|
const { databaseAccount } = userContext;
|
||||||
|
const resourceEndpoint = databaseAccount.properties.mongoEndpoint || databaseAccount.properties.documentEndpoint;
|
||||||
|
const params = {
|
||||||
|
db: databaseId,
|
||||||
|
coll: collection.id(),
|
||||||
|
resourceUrl: `${resourceEndpoint}dbs/${databaseId}/colls/${collection.id()}/docs/`,
|
||||||
|
rid: collection.rid,
|
||||||
|
rtype: "docs",
|
||||||
|
sid: userContext.subscriptionId,
|
||||||
|
rg: userContext.resourceGroup,
|
||||||
|
dba: databaseAccount.name,
|
||||||
|
pk: collection && collection.partitionKey && !collection.partitionKey.systemKey ? partitionKeyProperty : "",
|
||||||
|
};
|
||||||
|
|
||||||
|
const endpoint = getFeatureEndpointOrDefault("createDocument");
|
||||||
|
|
||||||
|
return window
|
||||||
|
.fetch(`${endpoint}/resourcelist?${queryString.stringify(params)}`, {
|
||||||
|
method: "POST",
|
||||||
|
body: JSON.stringify(documentContent),
|
||||||
|
headers: {
|
||||||
|
...defaultHeaders,
|
||||||
|
...authHeaders(),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
.then(async (response) => {
|
||||||
|
if (response.ok) {
|
||||||
|
return response.json();
|
||||||
|
}
|
||||||
|
return await errorHandling(response, "creating document", params);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
export function updateDocument(
|
export function updateDocument(
|
||||||
databaseId: string,
|
databaseId: string,
|
||||||
collection: Collection,
|
collection: Collection,
|
||||||
documentId: DocumentId,
|
documentId: DocumentId,
|
||||||
documentContent: string,
|
documentContent: string,
|
||||||
): Promise<DataModels.DocumentId> {
|
): Promise<DataModels.DocumentId> {
|
||||||
|
if (!useMongoProxyEndpoint(MongoProxyApi.UpdateDocument)) {
|
||||||
|
return updateDocument_ToBeDeprecated(databaseId, collection, documentId, documentContent);
|
||||||
|
}
|
||||||
const { databaseAccount } = userContext;
|
const { databaseAccount } = userContext;
|
||||||
const resourceEndpoint = databaseAccount.properties.mongoEndpoint || databaseAccount.properties.documentEndpoint;
|
const resourceEndpoint = databaseAccount.properties.mongoEndpoint || databaseAccount.properties.documentEndpoint;
|
||||||
const idComponents = documentId.self.split("/");
|
const idComponents = documentId.self.split("/");
|
||||||
@@ -229,7 +396,7 @@ export function updateDocument(
|
|||||||
: "",
|
: "",
|
||||||
documentContent,
|
documentContent,
|
||||||
};
|
};
|
||||||
const endpoint = getEndpoint(configContext.MONGO_PROXY_ENDPOINT);
|
const endpoint = getFeatureEndpointOrDefault(MongoProxyApi.UpdateDocument);
|
||||||
|
|
||||||
return window
|
return window
|
||||||
.fetch(endpoint, {
|
.fetch(endpoint, {
|
||||||
@@ -250,6 +417,139 @@ export function updateDocument(
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function updateDocument_ToBeDeprecated(
|
||||||
|
databaseId: string,
|
||||||
|
collection: Collection,
|
||||||
|
documentId: DocumentId,
|
||||||
|
documentContent: string,
|
||||||
|
): Promise<DataModels.DocumentId> {
|
||||||
|
const { databaseAccount } = userContext;
|
||||||
|
const resourceEndpoint = databaseAccount.properties.mongoEndpoint || databaseAccount.properties.documentEndpoint;
|
||||||
|
const idComponents = documentId.self.split("/");
|
||||||
|
const path = idComponents.slice(0, 5).join("/");
|
||||||
|
const rid = encodeURIComponent(idComponents[5]);
|
||||||
|
const params = {
|
||||||
|
db: databaseId,
|
||||||
|
coll: collection.id(),
|
||||||
|
resourceUrl: `${resourceEndpoint}${path}/${rid}`,
|
||||||
|
rid,
|
||||||
|
rtype: "docs",
|
||||||
|
sid: userContext.subscriptionId,
|
||||||
|
rg: userContext.resourceGroup,
|
||||||
|
dba: databaseAccount.name,
|
||||||
|
pk:
|
||||||
|
documentId && documentId.partitionKey && !documentId.partitionKey.systemKey
|
||||||
|
? documentId.partitionKeyProperties?.[0]
|
||||||
|
: "",
|
||||||
|
};
|
||||||
|
const endpoint = getFeatureEndpointOrDefault("updateDocument");
|
||||||
|
|
||||||
|
return window
|
||||||
|
.fetch(`${endpoint}?${queryString.stringify(params)}`, {
|
||||||
|
method: "PUT",
|
||||||
|
body: documentContent,
|
||||||
|
headers: {
|
||||||
|
...defaultHeaders,
|
||||||
|
...authHeaders(),
|
||||||
|
[HttpHeaders.contentType]: ContentType.applicationJson,
|
||||||
|
[CosmosSDKConstants.HttpHeaders.PartitionKey]: JSON.stringify(documentId.partitionKeyHeader()),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
.then(async (response) => {
|
||||||
|
if (response.ok) {
|
||||||
|
return response.json();
|
||||||
|
}
|
||||||
|
return await errorHandling(response, "updating document", params);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
export function deleteDocument(databaseId: string, collection: Collection, documentId: DocumentId): Promise<void> {
|
||||||
|
if (!useMongoProxyEndpoint(MongoProxyApi.DeleteDocument)) {
|
||||||
|
return deleteDocument_ToBeDeprecated(databaseId, collection, documentId);
|
||||||
|
}
|
||||||
|
const { databaseAccount } = userContext;
|
||||||
|
const resourceEndpoint = databaseAccount.properties.mongoEndpoint || databaseAccount.properties.documentEndpoint;
|
||||||
|
const idComponents = documentId.self.split("/");
|
||||||
|
const path = idComponents.slice(0, 5).join("/");
|
||||||
|
const rid = encodeURIComponent(idComponents[5]);
|
||||||
|
const params = {
|
||||||
|
databaseID: databaseId,
|
||||||
|
collectionID: collection.id(),
|
||||||
|
resourceUrl: `${resourceEndpoint}${path}/${rid}`,
|
||||||
|
resourceID: rid,
|
||||||
|
resourceType: "docs",
|
||||||
|
subscriptionID: userContext.subscriptionId,
|
||||||
|
resourceGroup: userContext.resourceGroup,
|
||||||
|
databaseAccountName: databaseAccount.name,
|
||||||
|
partitionKey:
|
||||||
|
documentId && documentId.partitionKey && !documentId.partitionKey.systemKey
|
||||||
|
? documentId.partitionKeyProperties?.[0]
|
||||||
|
: "",
|
||||||
|
};
|
||||||
|
const endpoint = getFeatureEndpointOrDefault(MongoProxyApi.DeleteDocument);
|
||||||
|
|
||||||
|
return window
|
||||||
|
.fetch(endpoint, {
|
||||||
|
method: "DELETE",
|
||||||
|
body: JSON.stringify(params),
|
||||||
|
headers: {
|
||||||
|
...defaultHeaders,
|
||||||
|
...authHeaders(),
|
||||||
|
[HttpHeaders.contentType]: ContentType.applicationJson,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
.then(async (response) => {
|
||||||
|
if (response.ok) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
return await errorHandling(response, "deleting document", params);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
export function deleteDocument_ToBeDeprecated(
|
||||||
|
databaseId: string,
|
||||||
|
collection: Collection,
|
||||||
|
documentId: DocumentId,
|
||||||
|
): Promise<void> {
|
||||||
|
const { databaseAccount } = userContext;
|
||||||
|
const resourceEndpoint = databaseAccount.properties.mongoEndpoint || databaseAccount.properties.documentEndpoint;
|
||||||
|
const idComponents = documentId.self.split("/");
|
||||||
|
const path = idComponents.slice(0, 5).join("/");
|
||||||
|
const rid = encodeURIComponent(idComponents[5]);
|
||||||
|
const params = {
|
||||||
|
db: databaseId,
|
||||||
|
coll: collection.id(),
|
||||||
|
resourceUrl: `${resourceEndpoint}${path}/${rid}`,
|
||||||
|
rid,
|
||||||
|
rtype: "docs",
|
||||||
|
sid: userContext.subscriptionId,
|
||||||
|
rg: userContext.resourceGroup,
|
||||||
|
dba: databaseAccount.name,
|
||||||
|
pk:
|
||||||
|
documentId && documentId.partitionKey && !documentId.partitionKey.systemKey
|
||||||
|
? documentId.partitionKeyProperties?.[0]
|
||||||
|
: "",
|
||||||
|
};
|
||||||
|
const endpoint = getFeatureEndpointOrDefault("deleteDocument");
|
||||||
|
|
||||||
|
return window
|
||||||
|
.fetch(`${endpoint}?${queryString.stringify(params)}`, {
|
||||||
|
method: "DELETE",
|
||||||
|
headers: {
|
||||||
|
...defaultHeaders,
|
||||||
|
...authHeaders(),
|
||||||
|
[HttpHeaders.contentType]: ContentType.applicationJson,
|
||||||
|
[CosmosSDKConstants.HttpHeaders.PartitionKey]: JSON.stringify(documentId.partitionKeyHeader()),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
.then(async (response) => {
|
||||||
|
if (response.ok) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
return await errorHandling(response, "deleting document", params);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
export function deleteDocuments(
|
export function deleteDocuments(
|
||||||
databaseId: string,
|
databaseId: string,
|
||||||
collection: Collection,
|
collection: Collection,
|
||||||
@@ -275,7 +575,7 @@ export function deleteDocuments(
|
|||||||
resourceGroup: userContext.resourceGroup,
|
resourceGroup: userContext.resourceGroup,
|
||||||
databaseAccountName: databaseAccount.name,
|
databaseAccountName: databaseAccount.name,
|
||||||
};
|
};
|
||||||
const endpoint = getEndpoint(configContext.MONGO_PROXY_ENDPOINT);
|
const endpoint = getFeatureEndpointOrDefault(MongoProxyApi.BulkDelete);
|
||||||
|
|
||||||
return window
|
return window
|
||||||
.fetch(`${endpoint}/bulkdelete`, {
|
.fetch(`${endpoint}/bulkdelete`, {
|
||||||
@@ -299,6 +599,9 @@ export function deleteDocuments(
|
|||||||
export function createMongoCollectionWithProxy(
|
export function createMongoCollectionWithProxy(
|
||||||
params: DataModels.CreateCollectionParams,
|
params: DataModels.CreateCollectionParams,
|
||||||
): Promise<DataModels.Collection> {
|
): Promise<DataModels.Collection> {
|
||||||
|
if (!useMongoProxyEndpoint(MongoProxyApi.CreateCollectionWithProxy)) {
|
||||||
|
return createMongoCollectionWithProxy_ToBeDeprecated(params);
|
||||||
|
}
|
||||||
const { databaseAccount } = userContext;
|
const { databaseAccount } = userContext;
|
||||||
const shardKey: string = params.partitionKey?.paths[0];
|
const shardKey: string = params.partitionKey?.paths[0];
|
||||||
|
|
||||||
@@ -319,7 +622,7 @@ export function createMongoCollectionWithProxy(
|
|||||||
isSharded: !!shardKey,
|
isSharded: !!shardKey,
|
||||||
};
|
};
|
||||||
|
|
||||||
const endpoint = getEndpoint(configContext.MONGO_PROXY_ENDPOINT);
|
const endpoint = getFeatureEndpointOrDefault(MongoProxyApi.CreateCollectionWithProxy);
|
||||||
|
|
||||||
return window
|
return window
|
||||||
.fetch(`${endpoint}/createCollection`, {
|
.fetch(`${endpoint}/createCollection`, {
|
||||||
@@ -339,6 +642,70 @@ export function createMongoCollectionWithProxy(
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function createMongoCollectionWithProxy_ToBeDeprecated(
|
||||||
|
params: DataModels.CreateCollectionParams,
|
||||||
|
): Promise<DataModels.Collection> {
|
||||||
|
const { databaseAccount } = userContext;
|
||||||
|
const shardKey: string = params.partitionKey?.paths[0];
|
||||||
|
const mongoParams: DataModels.MongoParameters = {
|
||||||
|
resourceUrl: databaseAccount.properties.mongoEndpoint || databaseAccount.properties.documentEndpoint,
|
||||||
|
db: params.databaseId,
|
||||||
|
coll: params.collectionId,
|
||||||
|
pk: shardKey,
|
||||||
|
offerThroughput: params.autoPilotMaxThroughput || params.offerThroughput,
|
||||||
|
cd: params.createNewDatabase,
|
||||||
|
st: params.databaseLevelThroughput,
|
||||||
|
is: !!shardKey,
|
||||||
|
rid: "",
|
||||||
|
rtype: "colls",
|
||||||
|
sid: userContext.subscriptionId,
|
||||||
|
rg: userContext.resourceGroup,
|
||||||
|
dba: databaseAccount.name,
|
||||||
|
isAutoPilot: !!params.autoPilotMaxThroughput,
|
||||||
|
};
|
||||||
|
|
||||||
|
const endpoint = getFeatureEndpointOrDefault("createCollectionWithProxy");
|
||||||
|
|
||||||
|
return window
|
||||||
|
.fetch(
|
||||||
|
`${endpoint}/createCollection?${queryString.stringify(
|
||||||
|
mongoParams as unknown as queryString.ParsedUrlQueryInput,
|
||||||
|
)}`,
|
||||||
|
{
|
||||||
|
method: "POST",
|
||||||
|
headers: {
|
||||||
|
...defaultHeaders,
|
||||||
|
...authHeaders(),
|
||||||
|
[HttpHeaders.contentType]: "application/json",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.then(async (response) => {
|
||||||
|
if (response.ok) {
|
||||||
|
return response.json();
|
||||||
|
}
|
||||||
|
return await errorHandling(response, "creating collection", mongoParams);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
export function getFeatureEndpointOrDefault(feature: string): string {
|
||||||
|
let endpoint;
|
||||||
|
if (useMongoProxyEndpoint(feature)) {
|
||||||
|
endpoint = configContext.MONGO_PROXY_ENDPOINT;
|
||||||
|
} else {
|
||||||
|
const allowedMongoProxyEndpoints = configContext.allowedMongoProxyEndpoints || [
|
||||||
|
...defaultAllowedMongoProxyEndpoints,
|
||||||
|
...allowedMongoProxyEndpoints_ToBeDeprecated,
|
||||||
|
];
|
||||||
|
endpoint =
|
||||||
|
hasFlag(userContext.features.mongoProxyAPIs, feature) &&
|
||||||
|
validateEndpoint(userContext.features.mongoProxyEndpoint, allowedMongoProxyEndpoints)
|
||||||
|
? userContext.features.mongoProxyEndpoint
|
||||||
|
: configContext.MONGO_BACKEND_ENDPOINT || configContext.BACKEND_ENDPOINT;
|
||||||
|
}
|
||||||
|
|
||||||
|
return getEndpoint(endpoint);
|
||||||
|
}
|
||||||
|
|
||||||
export function getEndpoint(endpoint: string): string {
|
export function getEndpoint(endpoint: string): string {
|
||||||
let url = endpoint + "/api/mongo/explorer";
|
let url = endpoint + "/api/mongo/explorer";
|
||||||
|
|
||||||
@@ -352,6 +719,84 @@ export function getEndpoint(endpoint: string): string {
|
|||||||
return url;
|
return url;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function useMongoProxyEndpoint(mongoProxyApi: string): boolean {
|
||||||
|
const mongoProxyEnvironmentMap: { [key: string]: string[] } = {
|
||||||
|
[MongoProxyApi.ResourceList]: [
|
||||||
|
MongoProxyEndpoints.Development,
|
||||||
|
MongoProxyEndpoints.Mpac,
|
||||||
|
MongoProxyEndpoints.Prod,
|
||||||
|
MongoProxyEndpoints.Fairfax,
|
||||||
|
MongoProxyEndpoints.Mooncake,
|
||||||
|
],
|
||||||
|
[MongoProxyApi.QueryDocuments]: [
|
||||||
|
MongoProxyEndpoints.Development,
|
||||||
|
MongoProxyEndpoints.Mpac,
|
||||||
|
MongoProxyEndpoints.Prod,
|
||||||
|
MongoProxyEndpoints.Fairfax,
|
||||||
|
MongoProxyEndpoints.Mooncake,
|
||||||
|
],
|
||||||
|
[MongoProxyApi.CreateDocument]: [
|
||||||
|
MongoProxyEndpoints.Development,
|
||||||
|
MongoProxyEndpoints.Mpac,
|
||||||
|
MongoProxyEndpoints.Prod,
|
||||||
|
MongoProxyEndpoints.Fairfax,
|
||||||
|
MongoProxyEndpoints.Mooncake,
|
||||||
|
],
|
||||||
|
[MongoProxyApi.ReadDocument]: [
|
||||||
|
MongoProxyEndpoints.Development,
|
||||||
|
MongoProxyEndpoints.Mpac,
|
||||||
|
MongoProxyEndpoints.Prod,
|
||||||
|
MongoProxyEndpoints.Fairfax,
|
||||||
|
MongoProxyEndpoints.Mooncake,
|
||||||
|
],
|
||||||
|
[MongoProxyApi.UpdateDocument]: [
|
||||||
|
MongoProxyEndpoints.Development,
|
||||||
|
MongoProxyEndpoints.Mpac,
|
||||||
|
MongoProxyEndpoints.Prod,
|
||||||
|
MongoProxyEndpoints.Fairfax,
|
||||||
|
MongoProxyEndpoints.Mooncake,
|
||||||
|
],
|
||||||
|
[MongoProxyApi.DeleteDocument]: [
|
||||||
|
MongoProxyEndpoints.Development,
|
||||||
|
MongoProxyEndpoints.Mpac,
|
||||||
|
MongoProxyEndpoints.Prod,
|
||||||
|
MongoProxyEndpoints.Fairfax,
|
||||||
|
MongoProxyEndpoints.Mooncake,
|
||||||
|
],
|
||||||
|
[MongoProxyApi.CreateCollectionWithProxy]: [
|
||||||
|
MongoProxyEndpoints.Development,
|
||||||
|
MongoProxyEndpoints.Mpac,
|
||||||
|
MongoProxyEndpoints.Prod,
|
||||||
|
MongoProxyEndpoints.Fairfax,
|
||||||
|
MongoProxyEndpoints.Mooncake,
|
||||||
|
],
|
||||||
|
[MongoProxyApi.LegacyMongoShell]: [
|
||||||
|
MongoProxyEndpoints.Development,
|
||||||
|
MongoProxyEndpoints.Mpac,
|
||||||
|
MongoProxyEndpoints.Prod,
|
||||||
|
MongoProxyEndpoints.Fairfax,
|
||||||
|
MongoProxyEndpoints.Mooncake,
|
||||||
|
],
|
||||||
|
[MongoProxyApi.BulkDelete]: [
|
||||||
|
MongoProxyEndpoints.Development,
|
||||||
|
MongoProxyEndpoints.Mpac,
|
||||||
|
MongoProxyEndpoints.Prod,
|
||||||
|
MongoProxyEndpoints.Fairfax,
|
||||||
|
MongoProxyEndpoints.Mooncake,
|
||||||
|
],
|
||||||
|
};
|
||||||
|
|
||||||
|
if (!mongoProxyEnvironmentMap[mongoProxyApi] || !configContext.MONGO_PROXY_ENDPOINT) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (configContext.globallyEnabledMongoAPIs.includes(mongoProxyApi)) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
return mongoProxyEnvironmentMap[mongoProxyApi].includes(configContext.MONGO_PROXY_ENDPOINT);
|
||||||
|
}
|
||||||
|
|
||||||
export class ThrottlingError extends Error {
|
export class ThrottlingError extends Error {
|
||||||
constructor(message: string) {
|
constructor(message: string) {
|
||||||
super(message);
|
super(message);
|
||||||
|
|||||||
@@ -105,8 +105,6 @@ const readCollectionOfferWithARM = async (databaseId: string, collectionId: stri
|
|||||||
? parseInt(resource.softAllowedMaximumThroughput)
|
? parseInt(resource.softAllowedMaximumThroughput)
|
||||||
: resource.softAllowedMaximumThroughput;
|
: resource.softAllowedMaximumThroughput;
|
||||||
|
|
||||||
const throughputBuckets = resource?.throughputBuckets;
|
|
||||||
|
|
||||||
if (autoscaleSettings) {
|
if (autoscaleSettings) {
|
||||||
return {
|
return {
|
||||||
id: offerId,
|
id: offerId,
|
||||||
@@ -116,7 +114,6 @@ const readCollectionOfferWithARM = async (databaseId: string, collectionId: stri
|
|||||||
offerReplacePending: resource.offerReplacePending === "true",
|
offerReplacePending: resource.offerReplacePending === "true",
|
||||||
instantMaximumThroughput,
|
instantMaximumThroughput,
|
||||||
softAllowedMaximumThroughput,
|
softAllowedMaximumThroughput,
|
||||||
throughputBuckets,
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -128,7 +125,6 @@ const readCollectionOfferWithARM = async (databaseId: string, collectionId: stri
|
|||||||
offerReplacePending: resource.offerReplacePending === "true",
|
offerReplacePending: resource.offerReplacePending === "true",
|
||||||
instantMaximumThroughput,
|
instantMaximumThroughput,
|
||||||
softAllowedMaximumThroughput,
|
softAllowedMaximumThroughput,
|
||||||
throughputBuckets,
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import { OfferDefinition, RequestOptions } from "@azure/cosmos";
|
import { OfferDefinition, RequestOptions } from "@azure/cosmos";
|
||||||
import { AuthType } from "../../AuthType";
|
import { AuthType } from "../../AuthType";
|
||||||
import { Offer, SDKOfferDefinition, ThroughputBucket, UpdateOfferParams } from "../../Contracts/DataModels";
|
import { Offer, SDKOfferDefinition, UpdateOfferParams } from "../../Contracts/DataModels";
|
||||||
import { userContext } from "../../UserContext";
|
import { userContext } from "../../UserContext";
|
||||||
import {
|
import {
|
||||||
migrateCassandraKeyspaceToAutoscale,
|
migrateCassandraKeyspaceToAutoscale,
|
||||||
@@ -359,13 +359,6 @@ const createUpdateOfferBody = (params: UpdateOfferParams): ThroughputSettingsUpd
|
|||||||
body.properties.resource.throughput = params.manualThroughput;
|
body.properties.resource.throughput = params.manualThroughput;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (params.throughputBuckets) {
|
|
||||||
const throughputBuckets = params.throughputBuckets.filter(
|
|
||||||
(bucket: ThroughputBucket) => bucket.maxThroughputPercentage !== 100,
|
|
||||||
);
|
|
||||||
body.properties.resource.throughputBuckets = throughputBuckets;
|
|
||||||
}
|
|
||||||
|
|
||||||
return body;
|
return body;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -12,6 +12,7 @@ import {
|
|||||||
allowedGraphEndpoints,
|
allowedGraphEndpoints,
|
||||||
allowedHostedExplorerEndpoints,
|
allowedHostedExplorerEndpoints,
|
||||||
allowedJunoOrigins,
|
allowedJunoOrigins,
|
||||||
|
allowedMongoBackendEndpoints,
|
||||||
allowedMsalRedirectEndpoints,
|
allowedMsalRedirectEndpoints,
|
||||||
defaultAllowedArmEndpoints,
|
defaultAllowedArmEndpoints,
|
||||||
defaultAllowedBackendEndpoints,
|
defaultAllowedBackendEndpoints,
|
||||||
@@ -49,8 +50,10 @@ export interface ConfigContext {
|
|||||||
CATALOG_API_KEY: string;
|
CATALOG_API_KEY: string;
|
||||||
ARCADIA_ENDPOINT: string;
|
ARCADIA_ENDPOINT: string;
|
||||||
ARCADIA_LIVY_ENDPOINT_DNS_ZONE: string;
|
ARCADIA_LIVY_ENDPOINT_DNS_ZONE: string;
|
||||||
|
BACKEND_ENDPOINT?: string;
|
||||||
PORTAL_BACKEND_ENDPOINT: string;
|
PORTAL_BACKEND_ENDPOINT: string;
|
||||||
NEW_BACKEND_APIS?: BackendApi[];
|
NEW_BACKEND_APIS?: BackendApi[];
|
||||||
|
MONGO_BACKEND_ENDPOINT?: string;
|
||||||
MONGO_PROXY_ENDPOINT: string;
|
MONGO_PROXY_ENDPOINT: string;
|
||||||
CASSANDRA_PROXY_ENDPOINT: string;
|
CASSANDRA_PROXY_ENDPOINT: string;
|
||||||
NEW_CASSANDRA_APIS?: string[];
|
NEW_CASSANDRA_APIS?: string[];
|
||||||
@@ -106,6 +109,7 @@ let configContext: Readonly<ConfigContext> = {
|
|||||||
GITHUB_CLIENT_ID: "6cb2f63cf6f7b5cbdeca", // Registered OAuth app: https://github.com/organizations/AzureCosmosDBNotebooks/settings/applications/1189306
|
GITHUB_CLIENT_ID: "6cb2f63cf6f7b5cbdeca", // Registered OAuth app: https://github.com/organizations/AzureCosmosDBNotebooks/settings/applications/1189306
|
||||||
GITHUB_TEST_ENV_CLIENT_ID: "b63fc8cbf87fd3c6e2eb", // Registered OAuth app: https://github.com/organizations/AzureCosmosDBNotebooks/settings/applications/1777772
|
GITHUB_TEST_ENV_CLIENT_ID: "b63fc8cbf87fd3c6e2eb", // Registered OAuth app: https://github.com/organizations/AzureCosmosDBNotebooks/settings/applications/1777772
|
||||||
JUNO_ENDPOINT: JunoEndpoints.Prod,
|
JUNO_ENDPOINT: JunoEndpoints.Prod,
|
||||||
|
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
|
||||||
PORTAL_BACKEND_ENDPOINT: PortalBackendEndpoints.Prod,
|
PORTAL_BACKEND_ENDPOINT: PortalBackendEndpoints.Prod,
|
||||||
MONGO_PROXY_ENDPOINT: MongoProxyEndpoints.Prod,
|
MONGO_PROXY_ENDPOINT: MongoProxyEndpoints.Prod,
|
||||||
CASSANDRA_PROXY_ENDPOINT: CassandraProxyEndpoints.Prod,
|
CASSANDRA_PROXY_ENDPOINT: CassandraProxyEndpoints.Prod,
|
||||||
@@ -148,6 +152,15 @@ export function updateConfigContext(newContext: Partial<ConfigContext>): void {
|
|||||||
delete newContext.ARCADIA_ENDPOINT;
|
delete newContext.ARCADIA_ENDPOINT;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
!validateEndpoint(
|
||||||
|
newContext.BACKEND_ENDPOINT,
|
||||||
|
configContext.allowedBackendEndpoints || defaultAllowedBackendEndpoints,
|
||||||
|
)
|
||||||
|
) {
|
||||||
|
delete newContext.BACKEND_ENDPOINT;
|
||||||
|
}
|
||||||
|
|
||||||
if (
|
if (
|
||||||
!validateEndpoint(
|
!validateEndpoint(
|
||||||
newContext.MONGO_PROXY_ENDPOINT,
|
newContext.MONGO_PROXY_ENDPOINT,
|
||||||
@@ -157,6 +170,10 @@ export function updateConfigContext(newContext: Partial<ConfigContext>): void {
|
|||||||
delete newContext.MONGO_PROXY_ENDPOINT;
|
delete newContext.MONGO_PROXY_ENDPOINT;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (!validateEndpoint(newContext.MONGO_BACKEND_ENDPOINT, allowedMongoBackendEndpoints)) {
|
||||||
|
delete newContext.MONGO_BACKEND_ENDPOINT;
|
||||||
|
}
|
||||||
|
|
||||||
if (
|
if (
|
||||||
!validateEndpoint(
|
!validateEndpoint(
|
||||||
newContext.CASSANDRA_PROXY_ENDPOINT,
|
newContext.CASSANDRA_PROXY_ENDPOINT,
|
||||||
|
|||||||
@@ -275,12 +275,6 @@ export interface Offer {
|
|||||||
offerReplacePending: boolean;
|
offerReplacePending: boolean;
|
||||||
instantMaximumThroughput?: number;
|
instantMaximumThroughput?: number;
|
||||||
softAllowedMaximumThroughput?: number;
|
softAllowedMaximumThroughput?: number;
|
||||||
throughputBuckets?: ThroughputBucket[];
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface ThroughputBucket {
|
|
||||||
id: number;
|
|
||||||
maxThroughputPercentage: number;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface SDKOfferDefinition extends Resource {
|
export interface SDKOfferDefinition extends Resource {
|
||||||
@@ -403,7 +397,6 @@ export interface UpdateOfferParams {
|
|||||||
collectionId?: string;
|
collectionId?: string;
|
||||||
migrateToAutoPilot?: boolean;
|
migrateToAutoPilot?: boolean;
|
||||||
migrateToManual?: boolean;
|
migrateToManual?: boolean;
|
||||||
throughputBuckets?: ThroughputBucket[];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface Notification {
|
export interface Notification {
|
||||||
|
|||||||
@@ -406,6 +406,7 @@ export interface DataExplorerInputsFrame {
|
|||||||
csmEndpoint?: string;
|
csmEndpoint?: string;
|
||||||
dnsSuffix?: string;
|
dnsSuffix?: string;
|
||||||
serverId?: string;
|
serverId?: string;
|
||||||
|
extensionEndpoint?: string;
|
||||||
portalBackendEndpoint?: string;
|
portalBackendEndpoint?: string;
|
||||||
mongoProxyEndpoint?: string;
|
mongoProxyEndpoint?: string;
|
||||||
cassandraProxyEndpoint?: string;
|
cassandraProxyEndpoint?: string;
|
||||||
|
|||||||
@@ -1,7 +1,5 @@
|
|||||||
import { AuthType } from "AuthType";
|
|
||||||
import { shallow } from "enzyme";
|
import { shallow } from "enzyme";
|
||||||
import ko from "knockout";
|
import ko from "knockout";
|
||||||
import { Features } from "Platform/Hosted/extractFeatures";
|
|
||||||
import React from "react";
|
import React from "react";
|
||||||
import { updateCollection } from "../../../Common/dataAccess/updateCollection";
|
import { updateCollection } from "../../../Common/dataAccess/updateCollection";
|
||||||
import { updateOffer } from "../../../Common/dataAccess/updateOffer";
|
import { updateOffer } from "../../../Common/dataAccess/updateOffer";
|
||||||
@@ -249,42 +247,4 @@ describe("SettingsComponent", () => {
|
|||||||
expect(conflictResolutionPolicy.mode).toEqual(DataModels.ConflictResolutionMode.Custom);
|
expect(conflictResolutionPolicy.mode).toEqual(DataModels.ConflictResolutionMode.Custom);
|
||||||
expect(conflictResolutionPolicy.conflictResolutionProcedure).toEqual(expectSprocPath);
|
expect(conflictResolutionPolicy.conflictResolutionProcedure).toEqual(expectSprocPath);
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should save throughput bucket changes when Save button is clicked", async () => {
|
|
||||||
updateUserContext({
|
|
||||||
apiType: "SQL",
|
|
||||||
features: { enableThroughputBuckets: true } as Features,
|
|
||||||
authType: AuthType.AAD,
|
|
||||||
});
|
|
||||||
|
|
||||||
const wrapper = shallow(<SettingsComponent {...baseProps} />);
|
|
||||||
|
|
||||||
const settingsComponentInstance = wrapper.instance() as SettingsComponent;
|
|
||||||
const isEnabled = settingsComponentInstance["throughputBucketsEnabled"];
|
|
||||||
expect(isEnabled).toBe(true);
|
|
||||||
|
|
||||||
wrapper.setState({
|
|
||||||
isThroughputBucketsSaveable: true,
|
|
||||||
throughputBuckets: [
|
|
||||||
{ id: 1, maxThroughputPercentage: 70 },
|
|
||||||
{ id: 2, maxThroughputPercentage: 60 },
|
|
||||||
],
|
|
||||||
});
|
|
||||||
|
|
||||||
await settingsComponentInstance.onSaveClick();
|
|
||||||
|
|
||||||
expect(updateOffer).toHaveBeenCalledWith({
|
|
||||||
databaseId: collection.databaseId,
|
|
||||||
collectionId: collection.id(),
|
|
||||||
currentOffer: expect.any(Object),
|
|
||||||
autopilotThroughput: collection.offer().autoscaleMaxThroughput,
|
|
||||||
manualThroughput: collection.offer().manualThroughput,
|
|
||||||
throughputBuckets: [
|
|
||||||
{ id: 1, maxThroughputPercentage: 70 },
|
|
||||||
{ id: 2, maxThroughputPercentage: 60 },
|
|
||||||
],
|
|
||||||
});
|
|
||||||
|
|
||||||
expect(wrapper.state("isThroughputBucketsSaveable")).toBe(false);
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -7,10 +7,6 @@ import {
|
|||||||
ContainerPolicyComponent,
|
ContainerPolicyComponent,
|
||||||
ContainerPolicyComponentProps,
|
ContainerPolicyComponentProps,
|
||||||
} from "Explorer/Controls/Settings/SettingsSubComponents/ContainerPolicyComponent";
|
} from "Explorer/Controls/Settings/SettingsSubComponents/ContainerPolicyComponent";
|
||||||
import {
|
|
||||||
ThroughputBucketsComponent,
|
|
||||||
ThroughputBucketsComponentProps,
|
|
||||||
} from "Explorer/Controls/Settings/SettingsSubComponents/ThroughputInputComponents/ThroughputBucketsComponent";
|
|
||||||
import { useDatabases } from "Explorer/useDatabases";
|
import { useDatabases } from "Explorer/useDatabases";
|
||||||
import { isFullTextSearchEnabled, isVectorSearchEnabled } from "Utils/CapabilityUtils";
|
import { isFullTextSearchEnabled, isVectorSearchEnabled } from "Utils/CapabilityUtils";
|
||||||
import { isRunningOnPublicCloud } from "Utils/CloudUtils";
|
import { isRunningOnPublicCloud } from "Utils/CloudUtils";
|
||||||
@@ -90,8 +86,6 @@ export interface SettingsComponentState {
|
|||||||
wasAutopilotOriginallySet: boolean;
|
wasAutopilotOriginallySet: boolean;
|
||||||
isScaleSaveable: boolean;
|
isScaleSaveable: boolean;
|
||||||
isScaleDiscardable: boolean;
|
isScaleDiscardable: boolean;
|
||||||
throughputBuckets: DataModels.ThroughputBucket[];
|
|
||||||
throughputBucketsBaseline: DataModels.ThroughputBucket[];
|
|
||||||
throughputError: string;
|
throughputError: string;
|
||||||
|
|
||||||
timeToLive: TtlType;
|
timeToLive: TtlType;
|
||||||
@@ -110,7 +104,6 @@ export interface SettingsComponentState {
|
|||||||
changeFeedPolicyBaseline: ChangeFeedPolicyState;
|
changeFeedPolicyBaseline: ChangeFeedPolicyState;
|
||||||
isSubSettingsSaveable: boolean;
|
isSubSettingsSaveable: boolean;
|
||||||
isSubSettingsDiscardable: boolean;
|
isSubSettingsDiscardable: boolean;
|
||||||
isThroughputBucketsSaveable: boolean;
|
|
||||||
|
|
||||||
vectorEmbeddingPolicy: DataModels.VectorEmbeddingPolicy;
|
vectorEmbeddingPolicy: DataModels.VectorEmbeddingPolicy;
|
||||||
vectorEmbeddingPolicyBaseline: DataModels.VectorEmbeddingPolicy;
|
vectorEmbeddingPolicyBaseline: DataModels.VectorEmbeddingPolicy;
|
||||||
@@ -165,7 +158,6 @@ export class SettingsComponent extends React.Component<SettingsComponentProps, S
|
|||||||
private isVectorSearchEnabled: boolean;
|
private isVectorSearchEnabled: boolean;
|
||||||
private isFullTextSearchEnabled: boolean;
|
private isFullTextSearchEnabled: boolean;
|
||||||
private totalThroughputUsed: number;
|
private totalThroughputUsed: number;
|
||||||
private throughputBucketsEnabled: boolean;
|
|
||||||
public mongoDBCollectionResource: MongoDBCollectionResource;
|
public mongoDBCollectionResource: MongoDBCollectionResource;
|
||||||
|
|
||||||
constructor(props: SettingsComponentProps) {
|
constructor(props: SettingsComponentProps) {
|
||||||
@@ -183,10 +175,6 @@ export class SettingsComponent extends React.Component<SettingsComponentProps, S
|
|||||||
this.isFullTextSearchEnabled = isFullTextSearchEnabled() && !hasDatabaseSharedThroughput(this.collection);
|
this.isFullTextSearchEnabled = isFullTextSearchEnabled() && !hasDatabaseSharedThroughput(this.collection);
|
||||||
|
|
||||||
this.changeFeedPolicyVisible = userContext.features.enableChangeFeedPolicy;
|
this.changeFeedPolicyVisible = userContext.features.enableChangeFeedPolicy;
|
||||||
this.throughputBucketsEnabled =
|
|
||||||
userContext.apiType === "SQL" &&
|
|
||||||
userContext.features.enableThroughputBuckets &&
|
|
||||||
userContext.authType === AuthType.AAD;
|
|
||||||
|
|
||||||
// Mongo container with system partition key still treat as "Fixed"
|
// Mongo container with system partition key still treat as "Fixed"
|
||||||
this.isFixedContainer =
|
this.isFixedContainer =
|
||||||
@@ -205,8 +193,6 @@ export class SettingsComponent extends React.Component<SettingsComponentProps, S
|
|||||||
wasAutopilotOriginallySet: false,
|
wasAutopilotOriginallySet: false,
|
||||||
isScaleSaveable: false,
|
isScaleSaveable: false,
|
||||||
isScaleDiscardable: false,
|
isScaleDiscardable: false,
|
||||||
throughputBuckets: undefined,
|
|
||||||
throughputBucketsBaseline: undefined,
|
|
||||||
throughputError: undefined,
|
throughputError: undefined,
|
||||||
|
|
||||||
timeToLive: undefined,
|
timeToLive: undefined,
|
||||||
@@ -225,7 +211,6 @@ export class SettingsComponent extends React.Component<SettingsComponentProps, S
|
|||||||
changeFeedPolicyBaseline: undefined,
|
changeFeedPolicyBaseline: undefined,
|
||||||
isSubSettingsSaveable: false,
|
isSubSettingsSaveable: false,
|
||||||
isSubSettingsDiscardable: false,
|
isSubSettingsDiscardable: false,
|
||||||
isThroughputBucketsSaveable: false,
|
|
||||||
|
|
||||||
vectorEmbeddingPolicy: undefined,
|
vectorEmbeddingPolicy: undefined,
|
||||||
vectorEmbeddingPolicyBaseline: undefined,
|
vectorEmbeddingPolicyBaseline: undefined,
|
||||||
@@ -342,8 +327,7 @@ export class SettingsComponent extends React.Component<SettingsComponentProps, S
|
|||||||
this.state.isIndexingPolicyDirty ||
|
this.state.isIndexingPolicyDirty ||
|
||||||
this.state.isConflictResolutionDirty ||
|
this.state.isConflictResolutionDirty ||
|
||||||
this.state.isComputedPropertiesDirty ||
|
this.state.isComputedPropertiesDirty ||
|
||||||
(!!this.state.currentMongoIndexes && this.state.isMongoIndexingPolicySaveable) ||
|
(!!this.state.currentMongoIndexes && this.state.isMongoIndexingPolicySaveable)
|
||||||
this.state.isThroughputBucketsSaveable
|
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -355,8 +339,7 @@ export class SettingsComponent extends React.Component<SettingsComponentProps, S
|
|||||||
this.state.isIndexingPolicyDirty ||
|
this.state.isIndexingPolicyDirty ||
|
||||||
this.state.isConflictResolutionDirty ||
|
this.state.isConflictResolutionDirty ||
|
||||||
this.state.isComputedPropertiesDirty ||
|
this.state.isComputedPropertiesDirty ||
|
||||||
(!!this.state.currentMongoIndexes && this.state.isMongoIndexingPolicyDiscardable) ||
|
(!!this.state.currentMongoIndexes && this.state.isMongoIndexingPolicyDiscardable)
|
||||||
this.state.isThroughputBucketsSaveable
|
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -436,8 +419,6 @@ export class SettingsComponent extends React.Component<SettingsComponentProps, S
|
|||||||
|
|
||||||
this.setState({
|
this.setState({
|
||||||
throughput: this.state.throughputBaseline,
|
throughput: this.state.throughputBaseline,
|
||||||
throughputBuckets: this.state.throughputBucketsBaseline,
|
|
||||||
throughputBucketsBaseline: this.state.throughputBucketsBaseline,
|
|
||||||
timeToLive: this.state.timeToLiveBaseline,
|
timeToLive: this.state.timeToLiveBaseline,
|
||||||
timeToLiveSeconds: this.state.timeToLiveSecondsBaseline,
|
timeToLiveSeconds: this.state.timeToLiveSecondsBaseline,
|
||||||
displayedTtlSeconds: this.state.displayedTtlSecondsBaseline,
|
displayedTtlSeconds: this.state.displayedTtlSecondsBaseline,
|
||||||
@@ -460,7 +441,6 @@ export class SettingsComponent extends React.Component<SettingsComponentProps, S
|
|||||||
isScaleSaveable: false,
|
isScaleSaveable: false,
|
||||||
isScaleDiscardable: false,
|
isScaleDiscardable: false,
|
||||||
isSubSettingsSaveable: false,
|
isSubSettingsSaveable: false,
|
||||||
isThroughputBucketsSaveable: false,
|
|
||||||
isSubSettingsDiscardable: false,
|
isSubSettingsDiscardable: false,
|
||||||
isContainerPolicyDirty: false,
|
isContainerPolicyDirty: false,
|
||||||
isIndexingPolicyDirty: false,
|
isIndexingPolicyDirty: false,
|
||||||
@@ -499,10 +479,6 @@ export class SettingsComponent extends React.Component<SettingsComponentProps, S
|
|||||||
private onIndexingPolicyContentChange = (newIndexingPolicy: DataModels.IndexingPolicy): void =>
|
private onIndexingPolicyContentChange = (newIndexingPolicy: DataModels.IndexingPolicy): void =>
|
||||||
this.setState({ indexingPolicyContent: newIndexingPolicy });
|
this.setState({ indexingPolicyContent: newIndexingPolicy });
|
||||||
|
|
||||||
private onThroughputBucketsSaveableChange = (isSaveable: boolean): void => {
|
|
||||||
this.setState({ isThroughputBucketsSaveable: isSaveable });
|
|
||||||
};
|
|
||||||
|
|
||||||
private resetShouldDiscardContainerPolicies = (): void => this.setState({ shouldDiscardContainerPolicies: false });
|
private resetShouldDiscardContainerPolicies = (): void => this.setState({ shouldDiscardContainerPolicies: false });
|
||||||
|
|
||||||
private resetShouldDiscardIndexingPolicy = (): void => this.setState({ shouldDiscardIndexingPolicy: false });
|
private resetShouldDiscardIndexingPolicy = (): void => this.setState({ shouldDiscardIndexingPolicy: false });
|
||||||
@@ -773,13 +749,9 @@ export class SettingsComponent extends React.Component<SettingsComponentProps, S
|
|||||||
] as DataModels.ComputedProperties;
|
] as DataModels.ComputedProperties;
|
||||||
}
|
}
|
||||||
|
|
||||||
const throughputBuckets = this.offer?.throughputBuckets;
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
throughput: offerThroughput,
|
throughput: offerThroughput,
|
||||||
throughputBaseline: offerThroughput,
|
throughputBaseline: offerThroughput,
|
||||||
throughputBuckets,
|
|
||||||
throughputBucketsBaseline: throughputBuckets,
|
|
||||||
changeFeedPolicy: changeFeedPolicy,
|
changeFeedPolicy: changeFeedPolicy,
|
||||||
changeFeedPolicyBaseline: changeFeedPolicy,
|
changeFeedPolicyBaseline: changeFeedPolicy,
|
||||||
timeToLive: timeToLive,
|
timeToLive: timeToLive,
|
||||||
@@ -867,10 +839,6 @@ export class SettingsComponent extends React.Component<SettingsComponentProps, S
|
|||||||
this.setState({ throughput: newThroughput, throughputError });
|
this.setState({ throughput: newThroughput, throughputError });
|
||||||
};
|
};
|
||||||
|
|
||||||
private onThroughputBucketChange = (throughputBuckets: DataModels.ThroughputBucket[]): void => {
|
|
||||||
this.setState({ throughputBuckets });
|
|
||||||
};
|
|
||||||
|
|
||||||
private onAutoPilotSelected = (isAutoPilotSelected: boolean): void =>
|
private onAutoPilotSelected = (isAutoPilotSelected: boolean): void =>
|
||||||
this.setState({ isAutoPilotSelected: isAutoPilotSelected });
|
this.setState({ isAutoPilotSelected: isAutoPilotSelected });
|
||||||
|
|
||||||
@@ -1061,24 +1029,6 @@ export class SettingsComponent extends React.Component<SettingsComponentProps, S
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (this.throughputBucketsEnabled && this.state.isThroughputBucketsSaveable) {
|
|
||||||
const updatedOffer: DataModels.Offer = await updateOffer({
|
|
||||||
databaseId: this.collection.databaseId,
|
|
||||||
collectionId: this.collection.id(),
|
|
||||||
currentOffer: this.collection.offer(),
|
|
||||||
autopilotThroughput: this.collection.offer().autoscaleMaxThroughput
|
|
||||||
? this.collection.offer().autoscaleMaxThroughput
|
|
||||||
: undefined,
|
|
||||||
manualThroughput: this.collection.offer().manualThroughput
|
|
||||||
? this.collection.offer().manualThroughput
|
|
||||||
: undefined,
|
|
||||||
throughputBuckets: this.state.throughputBuckets,
|
|
||||||
});
|
|
||||||
this.collection.offer(updatedOffer);
|
|
||||||
this.offer = updatedOffer;
|
|
||||||
this.setState({ isThroughputBucketsSaveable: false });
|
|
||||||
}
|
|
||||||
|
|
||||||
if (this.state.isScaleSaveable) {
|
if (this.state.isScaleSaveable) {
|
||||||
const updateOfferParams: DataModels.UpdateOfferParams = {
|
const updateOfferParams: DataModels.UpdateOfferParams = {
|
||||||
databaseId: this.collection.databaseId,
|
databaseId: this.collection.databaseId,
|
||||||
@@ -1259,13 +1209,6 @@ export class SettingsComponent extends React.Component<SettingsComponentProps, S
|
|||||||
onConflictResolutionDirtyChange: this.onConflictResolutionDirtyChange,
|
onConflictResolutionDirtyChange: this.onConflictResolutionDirtyChange,
|
||||||
};
|
};
|
||||||
|
|
||||||
const throughputBucketsComponentProps: ThroughputBucketsComponentProps = {
|
|
||||||
currentBuckets: this.state.throughputBuckets,
|
|
||||||
throughputBucketsBaseline: this.state.throughputBucketsBaseline,
|
|
||||||
onBucketsChange: this.onThroughputBucketChange,
|
|
||||||
onSaveableChange: this.onThroughputBucketsSaveableChange,
|
|
||||||
};
|
|
||||||
|
|
||||||
const partitionKeyComponentProps: PartitionKeyComponentProps = {
|
const partitionKeyComponentProps: PartitionKeyComponentProps = {
|
||||||
database: useDatabases.getState().findDatabaseWithId(this.collection.databaseId),
|
database: useDatabases.getState().findDatabaseWithId(this.collection.databaseId),
|
||||||
collection: this.collection,
|
collection: this.collection,
|
||||||
@@ -1328,13 +1271,6 @@ export class SettingsComponent extends React.Component<SettingsComponentProps, S
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
if (this.throughputBucketsEnabled) {
|
|
||||||
tabs.push({
|
|
||||||
tab: SettingsV2TabTypes.ThroughputBucketsTab,
|
|
||||||
content: <ThroughputBucketsComponent {...throughputBucketsComponentProps} />,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
const pivotProps: IPivotProps = {
|
const pivotProps: IPivotProps = {
|
||||||
onLinkClick: this.onPivotChange,
|
onLinkClick: this.onPivotChange,
|
||||||
selectedKey: SettingsV2TabTypes[this.state.selectedTab],
|
selectedKey: SettingsV2TabTypes[this.state.selectedTab],
|
||||||
|
|||||||
@@ -1,177 +0,0 @@
|
|||||||
import "@testing-library/jest-dom";
|
|
||||||
import { fireEvent, render, screen } from "@testing-library/react";
|
|
||||||
import React from "react";
|
|
||||||
import { ThroughputBucketsComponent } from "./ThroughputBucketsComponent";
|
|
||||||
|
|
||||||
describe("ThroughputBucketsComponent", () => {
|
|
||||||
const mockOnBucketsChange = jest.fn();
|
|
||||||
const mockOnSaveableChange = jest.fn();
|
|
||||||
|
|
||||||
const defaultProps = {
|
|
||||||
currentBuckets: [
|
|
||||||
{ id: 1, maxThroughputPercentage: 50 },
|
|
||||||
{ id: 2, maxThroughputPercentage: 60 },
|
|
||||||
],
|
|
||||||
throughputBucketsBaseline: [
|
|
||||||
{ id: 1, maxThroughputPercentage: 40 },
|
|
||||||
{ id: 2, maxThroughputPercentage: 50 },
|
|
||||||
],
|
|
||||||
onBucketsChange: mockOnBucketsChange,
|
|
||||||
onSaveableChange: mockOnSaveableChange,
|
|
||||||
};
|
|
||||||
|
|
||||||
beforeEach(() => {
|
|
||||||
jest.clearAllMocks();
|
|
||||||
});
|
|
||||||
|
|
||||||
it("renders the correct number of buckets", () => {
|
|
||||||
render(<ThroughputBucketsComponent {...defaultProps} />);
|
|
||||||
expect(screen.getAllByText(/Group \d+/)).toHaveLength(5);
|
|
||||||
});
|
|
||||||
|
|
||||||
it("renders buckets in the correct order even if input is unordered", () => {
|
|
||||||
const unorderedBuckets = [
|
|
||||||
{ id: 2, maxThroughputPercentage: 60 },
|
|
||||||
{ id: 1, maxThroughputPercentage: 50 },
|
|
||||||
];
|
|
||||||
render(<ThroughputBucketsComponent {...defaultProps} currentBuckets={unorderedBuckets} />);
|
|
||||||
|
|
||||||
const bucketLabels = screen.getAllByText(/Group \d+/).map((el) => el.textContent);
|
|
||||||
expect(bucketLabels).toEqual(["Group 1 (Data Explorer Query Bucket)", "Group 2", "Group 3", "Group 4", "Group 5"]);
|
|
||||||
});
|
|
||||||
|
|
||||||
it("renders all provided buckets even if they exceed the max default bucket count", () => {
|
|
||||||
const oversizedBuckets = [
|
|
||||||
{ id: 1, maxThroughputPercentage: 50 },
|
|
||||||
{ id: 2, maxThroughputPercentage: 60 },
|
|
||||||
{ id: 3, maxThroughputPercentage: 70 },
|
|
||||||
{ id: 4, maxThroughputPercentage: 80 },
|
|
||||||
{ id: 5, maxThroughputPercentage: 90 },
|
|
||||||
{ id: 6, maxThroughputPercentage: 100 },
|
|
||||||
{ id: 7, maxThroughputPercentage: 40 },
|
|
||||||
];
|
|
||||||
|
|
||||||
render(<ThroughputBucketsComponent {...defaultProps} currentBuckets={oversizedBuckets} />);
|
|
||||||
|
|
||||||
expect(screen.getAllByText(/Group \d+/)).toHaveLength(7);
|
|
||||||
|
|
||||||
expect(screen.getByDisplayValue("50")).toBeInTheDocument();
|
|
||||||
expect(screen.getByDisplayValue("60")).toBeInTheDocument();
|
|
||||||
expect(screen.getByDisplayValue("70")).toBeInTheDocument();
|
|
||||||
expect(screen.getByDisplayValue("80")).toBeInTheDocument();
|
|
||||||
expect(screen.getByDisplayValue("90")).toBeInTheDocument();
|
|
||||||
expect(screen.getByDisplayValue("100")).toBeInTheDocument();
|
|
||||||
expect(screen.getByDisplayValue("40")).toBeInTheDocument();
|
|
||||||
});
|
|
||||||
|
|
||||||
it("calls onBucketsChange when a bucket value changes", () => {
|
|
||||||
render(<ThroughputBucketsComponent {...defaultProps} />);
|
|
||||||
const input = screen.getByDisplayValue("50");
|
|
||||||
fireEvent.change(input, { target: { value: "70" } });
|
|
||||||
|
|
||||||
expect(mockOnBucketsChange).toHaveBeenCalledWith([
|
|
||||||
{ id: 1, maxThroughputPercentage: 70 },
|
|
||||||
{ id: 2, maxThroughputPercentage: 60 },
|
|
||||||
{ id: 3, maxThroughputPercentage: 100 },
|
|
||||||
{ id: 4, maxThroughputPercentage: 100 },
|
|
||||||
{ id: 5, maxThroughputPercentage: 100 },
|
|
||||||
]);
|
|
||||||
});
|
|
||||||
|
|
||||||
it("triggers onSaveableChange when values change", () => {
|
|
||||||
render(<ThroughputBucketsComponent {...defaultProps} />);
|
|
||||||
const input = screen.getByDisplayValue("50");
|
|
||||||
fireEvent.change(input, { target: { value: "80" } });
|
|
||||||
|
|
||||||
expect(mockOnSaveableChange).toHaveBeenCalledWith(true);
|
|
||||||
});
|
|
||||||
|
|
||||||
it("updates state consistently after multiple changes to different buckets", () => {
|
|
||||||
render(<ThroughputBucketsComponent {...defaultProps} />);
|
|
||||||
|
|
||||||
const input1 = screen.getByDisplayValue("50");
|
|
||||||
fireEvent.change(input1, { target: { value: "70" } });
|
|
||||||
|
|
||||||
const input2 = screen.getByDisplayValue("60");
|
|
||||||
fireEvent.change(input2, { target: { value: "80" } });
|
|
||||||
|
|
||||||
expect(mockOnBucketsChange).toHaveBeenCalledWith([
|
|
||||||
{ id: 1, maxThroughputPercentage: 70 },
|
|
||||||
{ id: 2, maxThroughputPercentage: 80 },
|
|
||||||
{ id: 3, maxThroughputPercentage: 100 },
|
|
||||||
{ id: 4, maxThroughputPercentage: 100 },
|
|
||||||
{ id: 5, maxThroughputPercentage: 100 },
|
|
||||||
]);
|
|
||||||
});
|
|
||||||
|
|
||||||
it("resets to baseline when currentBuckets are reset", () => {
|
|
||||||
const { rerender } = render(<ThroughputBucketsComponent {...defaultProps} />);
|
|
||||||
const input1 = screen.getByDisplayValue("50");
|
|
||||||
fireEvent.change(input1, { target: { value: "70" } });
|
|
||||||
|
|
||||||
rerender(<ThroughputBucketsComponent {...defaultProps} currentBuckets={defaultProps.throughputBucketsBaseline} />);
|
|
||||||
|
|
||||||
expect(screen.getByDisplayValue("40")).toBeInTheDocument();
|
|
||||||
expect(screen.getByDisplayValue("50")).toBeInTheDocument();
|
|
||||||
});
|
|
||||||
|
|
||||||
it("does not call onBucketsChange when value remains unchanged", () => {
|
|
||||||
render(<ThroughputBucketsComponent {...defaultProps} />);
|
|
||||||
const input = screen.getByDisplayValue("50");
|
|
||||||
fireEvent.change(input, { target: { value: "50" } });
|
|
||||||
|
|
||||||
expect(mockOnBucketsChange).not.toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
|
|
||||||
it("disables input and slider when maxThroughputPercentage is 100", () => {
|
|
||||||
render(
|
|
||||||
<ThroughputBucketsComponent
|
|
||||||
{...defaultProps}
|
|
||||||
currentBuckets={[
|
|
||||||
{ id: 1, maxThroughputPercentage: 100 },
|
|
||||||
{ id: 2, maxThroughputPercentage: 50 },
|
|
||||||
]}
|
|
||||||
/>,
|
|
||||||
);
|
|
||||||
|
|
||||||
const disabledInputs = screen.getAllByDisplayValue("100");
|
|
||||||
expect(disabledInputs.length).toBeGreaterThan(0);
|
|
||||||
expect(disabledInputs[0]).toBeDisabled();
|
|
||||||
|
|
||||||
const sliders = screen.getAllByRole("slider");
|
|
||||||
expect(sliders.length).toBeGreaterThan(0);
|
|
||||||
expect(sliders[0]).toHaveAttribute("aria-disabled", "true");
|
|
||||||
expect(sliders[1]).toHaveAttribute("aria-disabled", "false");
|
|
||||||
});
|
|
||||||
|
|
||||||
it("toggles bucket value between 50 and 100 with switch", () => {
|
|
||||||
render(<ThroughputBucketsComponent {...defaultProps} />);
|
|
||||||
const toggles = screen.getAllByRole("switch");
|
|
||||||
|
|
||||||
fireEvent.click(toggles[0]);
|
|
||||||
|
|
||||||
expect(mockOnBucketsChange).toHaveBeenCalledWith([
|
|
||||||
{ id: 1, maxThroughputPercentage: 100 },
|
|
||||||
{ id: 2, maxThroughputPercentage: 60 },
|
|
||||||
{ id: 3, maxThroughputPercentage: 100 },
|
|
||||||
{ id: 4, maxThroughputPercentage: 100 },
|
|
||||||
{ id: 5, maxThroughputPercentage: 100 },
|
|
||||||
]);
|
|
||||||
|
|
||||||
fireEvent.click(toggles[0]);
|
|
||||||
|
|
||||||
expect(mockOnBucketsChange).toHaveBeenCalledWith([
|
|
||||||
{ id: 1, maxThroughputPercentage: 50 },
|
|
||||||
{ id: 2, maxThroughputPercentage: 60 },
|
|
||||||
{ id: 3, maxThroughputPercentage: 100 },
|
|
||||||
{ id: 4, maxThroughputPercentage: 100 },
|
|
||||||
{ id: 5, maxThroughputPercentage: 100 },
|
|
||||||
]);
|
|
||||||
});
|
|
||||||
|
|
||||||
it("ensures default buckets are used when no buckets are provided", () => {
|
|
||||||
render(<ThroughputBucketsComponent {...defaultProps} currentBuckets={[]} />);
|
|
||||||
expect(screen.getAllByText(/Group \d+/)).toHaveLength(5);
|
|
||||||
expect(screen.getAllByDisplayValue("100")).toHaveLength(5);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
@@ -1,105 +0,0 @@
|
|||||||
import { Label, Slider, Stack, TextField, Toggle } from "@fluentui/react";
|
|
||||||
import { ThroughputBucket } from "Contracts/DataModels";
|
|
||||||
import React, { FC, useEffect, useState } from "react";
|
|
||||||
import { isDirty } from "../../SettingsUtils";
|
|
||||||
|
|
||||||
const MAX_BUCKET_SIZES = 5;
|
|
||||||
|
|
||||||
const DEFAULT_BUCKETS = Array.from({ length: MAX_BUCKET_SIZES }, (_, i) => ({
|
|
||||||
id: i + 1,
|
|
||||||
maxThroughputPercentage: 100,
|
|
||||||
}));
|
|
||||||
|
|
||||||
export interface ThroughputBucketsComponentProps {
|
|
||||||
currentBuckets: ThroughputBucket[];
|
|
||||||
throughputBucketsBaseline: ThroughputBucket[];
|
|
||||||
onBucketsChange: (updatedBuckets: ThroughputBucket[]) => void;
|
|
||||||
onSaveableChange: (isSaveable: boolean) => void;
|
|
||||||
}
|
|
||||||
|
|
||||||
export const ThroughputBucketsComponent: FC<ThroughputBucketsComponentProps> = ({
|
|
||||||
currentBuckets,
|
|
||||||
throughputBucketsBaseline,
|
|
||||||
onBucketsChange,
|
|
||||||
onSaveableChange,
|
|
||||||
}) => {
|
|
||||||
const getThroughputBuckets = (buckets: ThroughputBucket[]): ThroughputBucket[] => {
|
|
||||||
if (!buckets || buckets.length === 0) {
|
|
||||||
return DEFAULT_BUCKETS;
|
|
||||||
}
|
|
||||||
const maxBuckets = Math.max(DEFAULT_BUCKETS.length, buckets.length);
|
|
||||||
const adjustedDefaultBuckets = Array.from({ length: maxBuckets }, (_, i) => ({
|
|
||||||
id: i + 1,
|
|
||||||
maxThroughputPercentage: 100,
|
|
||||||
}));
|
|
||||||
|
|
||||||
return adjustedDefaultBuckets.map(
|
|
||||||
(defaultBucket) => buckets?.find((bucket) => bucket.id === defaultBucket.id) || defaultBucket,
|
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|
||||||
const [throughputBuckets, setThroughputBuckets] = useState<ThroughputBucket[]>(getThroughputBuckets(currentBuckets));
|
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
setThroughputBuckets(getThroughputBuckets(currentBuckets));
|
|
||||||
onSaveableChange(false);
|
|
||||||
}, [currentBuckets]);
|
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
const isChanged = isDirty(throughputBuckets, getThroughputBuckets(throughputBucketsBaseline));
|
|
||||||
onSaveableChange(isChanged);
|
|
||||||
}, [throughputBuckets]);
|
|
||||||
|
|
||||||
const handleBucketChange = (id: number, newValue: number) => {
|
|
||||||
const updatedBuckets = throughputBuckets.map((bucket) =>
|
|
||||||
bucket.id === id ? { ...bucket, maxThroughputPercentage: newValue } : bucket,
|
|
||||||
);
|
|
||||||
setThroughputBuckets(updatedBuckets);
|
|
||||||
const settingsChanged = isDirty(updatedBuckets, throughputBuckets);
|
|
||||||
settingsChanged && onBucketsChange(updatedBuckets);
|
|
||||||
};
|
|
||||||
|
|
||||||
const onToggle = (id: number, checked: boolean) => {
|
|
||||||
handleBucketChange(id, checked ? 50 : 100);
|
|
||||||
};
|
|
||||||
|
|
||||||
return (
|
|
||||||
<Stack tokens={{ childrenGap: "m" }} styles={{ root: { width: "70%", maxWidth: 700 } }}>
|
|
||||||
<Label>Throughput Buckets</Label>
|
|
||||||
<Stack>
|
|
||||||
{throughputBuckets?.map((bucket) => (
|
|
||||||
<Stack key={bucket.id} horizontal tokens={{ childrenGap: 8 }} verticalAlign="center">
|
|
||||||
<Slider
|
|
||||||
min={1}
|
|
||||||
max={100}
|
|
||||||
step={1}
|
|
||||||
value={bucket.maxThroughputPercentage}
|
|
||||||
onChange={(newValue) => handleBucketChange(bucket.id, newValue)}
|
|
||||||
showValue={false}
|
|
||||||
label={`Group ${bucket.id}${bucket.id === 1 ? " (Data Explorer Query Bucket)" : ""}`}
|
|
||||||
styles={{ root: { flex: 2, maxWidth: 400 } }}
|
|
||||||
disabled={bucket.maxThroughputPercentage === 100}
|
|
||||||
/>
|
|
||||||
<TextField
|
|
||||||
value={bucket.maxThroughputPercentage.toString()}
|
|
||||||
onChange={(event, newValue) => handleBucketChange(bucket.id, parseInt(newValue || "0", 10))}
|
|
||||||
type="number"
|
|
||||||
suffix="%"
|
|
||||||
styles={{
|
|
||||||
fieldGroup: { width: 80 },
|
|
||||||
}}
|
|
||||||
disabled={bucket.maxThroughputPercentage === 100}
|
|
||||||
/>
|
|
||||||
<Toggle
|
|
||||||
onText="Active"
|
|
||||||
offText="Inactive"
|
|
||||||
checked={bucket.maxThroughputPercentage !== 100}
|
|
||||||
onChange={(event, checked) => onToggle(bucket.id, checked)}
|
|
||||||
styles={{ root: { marginBottom: 0 }, text: { fontSize: 12 } }}
|
|
||||||
></Toggle>
|
|
||||||
</Stack>
|
|
||||||
))}
|
|
||||||
</Stack>
|
|
||||||
</Stack>
|
|
||||||
);
|
|
||||||
};
|
|
||||||
@@ -11,8 +11,7 @@ export type isDirtyTypes =
|
|||||||
| DataModels.IndexingPolicy
|
| DataModels.IndexingPolicy
|
||||||
| DataModels.ComputedProperties
|
| DataModels.ComputedProperties
|
||||||
| DataModels.VectorEmbedding[]
|
| DataModels.VectorEmbedding[]
|
||||||
| DataModels.FullTextPolicy
|
| DataModels.FullTextPolicy;
|
||||||
| DataModels.ThroughputBucket[];
|
|
||||||
export const TtlOff = "off";
|
export const TtlOff = "off";
|
||||||
export const TtlOn = "on";
|
export const TtlOn = "on";
|
||||||
export const TtlOnNoDefault = "on-nodefault";
|
export const TtlOnNoDefault = "on-nodefault";
|
||||||
@@ -56,7 +55,6 @@ export enum SettingsV2TabTypes {
|
|||||||
PartitionKeyTab,
|
PartitionKeyTab,
|
||||||
ComputedPropertiesTab,
|
ComputedPropertiesTab,
|
||||||
ContainerVectorPolicyTab,
|
ContainerVectorPolicyTab,
|
||||||
ThroughputBucketsTab,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export enum ContainerPolicyTabTypes {
|
export enum ContainerPolicyTabTypes {
|
||||||
@@ -169,8 +167,6 @@ export const getTabTitle = (tab: SettingsV2TabTypes): string => {
|
|||||||
return "Computed Properties";
|
return "Computed Properties";
|
||||||
case SettingsV2TabTypes.ContainerVectorPolicyTab:
|
case SettingsV2TabTypes.ContainerVectorPolicyTab:
|
||||||
return "Container Policies";
|
return "Container Policies";
|
||||||
case SettingsV2TabTypes.ThroughputBucketsTab:
|
|
||||||
return "Throughput Buckets";
|
|
||||||
default:
|
default:
|
||||||
throw new Error(`Unknown tab ${tab}`);
|
throw new Error(`Unknown tab ${tab}`);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -35,20 +35,12 @@ export const ThroughputInput: FunctionComponent<ThroughputInputProps> = ({
|
|||||||
setIsThroughputCapExceeded,
|
setIsThroughputCapExceeded,
|
||||||
onCostAcknowledgeChange,
|
onCostAcknowledgeChange,
|
||||||
}: ThroughputInputProps) => {
|
}: ThroughputInputProps) => {
|
||||||
let defaultThroughput: number;
|
const defaultThroughput: number =
|
||||||
const workloadType: Constants.WorkloadType = getWorkloadType();
|
|
||||||
|
|
||||||
if (
|
|
||||||
isFreeTier ||
|
isFreeTier ||
|
||||||
isQuickstart ||
|
isQuickstart ||
|
||||||
[Constants.WorkloadType.Learning, Constants.WorkloadType.DevelopmentTesting].includes(workloadType)
|
[Constants.WorkloadType.Learning, Constants.WorkloadType.DevelopmentTesting].includes(getWorkloadType())
|
||||||
) {
|
? AutoPilotUtils.autoPilotThroughput1K
|
||||||
defaultThroughput = AutoPilotUtils.autoPilotThroughput1K;
|
: AutoPilotUtils.autoPilotThroughput4K;
|
||||||
} else if (workloadType === Constants.WorkloadType.Production) {
|
|
||||||
defaultThroughput = AutoPilotUtils.autoPilotThroughput10K;
|
|
||||||
} else {
|
|
||||||
defaultThroughput = AutoPilotUtils.autoPilotThroughput4K;
|
|
||||||
}
|
|
||||||
|
|
||||||
const [isAutoscaleSelected, setIsAutoScaleSelected] = useState<boolean>(true);
|
const [isAutoscaleSelected, setIsAutoScaleSelected] = useState<boolean>(true);
|
||||||
const [throughput, setThroughput] = useState<number>(defaultThroughput);
|
const [throughput, setThroughput] = useState<number>(defaultThroughput);
|
||||||
|
|||||||
@@ -1127,7 +1127,7 @@ export default class Explorer {
|
|||||||
await this.initNotebooks(userContext.databaseAccount);
|
await this.initNotebooks(userContext.databaseAccount);
|
||||||
}
|
}
|
||||||
|
|
||||||
this.refreshSampleData();
|
await this.refreshSampleData();
|
||||||
}
|
}
|
||||||
|
|
||||||
public async configureCopilot(): Promise<void> {
|
public async configureCopilot(): Promise<void> {
|
||||||
@@ -1152,27 +1152,26 @@ export default class Explorer {
|
|||||||
.setCopilotSampleDBEnabled(copilotEnabled && copilotUserDBEnabled && copilotSampleDBEnabled);
|
.setCopilotSampleDBEnabled(copilotEnabled && copilotUserDBEnabled && copilotSampleDBEnabled);
|
||||||
}
|
}
|
||||||
|
|
||||||
public refreshSampleData(): void {
|
public async refreshSampleData(): Promise<void> {
|
||||||
if (!userContext.sampleDataConnectionInfo) {
|
try {
|
||||||
|
if (!userContext.sampleDataConnectionInfo) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const collection: DataModels.Collection = await readSampleCollection();
|
||||||
|
if (!collection) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const databaseId = userContext.sampleDataConnectionInfo?.databaseId;
|
||||||
|
if (!databaseId) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const sampleDataResourceTokenCollection = new ResourceTokenCollection(this, databaseId, collection, true);
|
||||||
|
useDatabases.setState({ sampleDataResourceTokenCollection });
|
||||||
|
} catch (error) {
|
||||||
|
Logger.logError(getErrorMessage(error), "Explorer");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const databaseId = userContext.sampleDataConnectionInfo?.databaseId;
|
|
||||||
if (!databaseId) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
readSampleCollection()
|
|
||||||
.then((collection: DataModels.Collection) => {
|
|
||||||
if (!collection) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const sampleDataResourceTokenCollection = new ResourceTokenCollection(this, databaseId, collection, true);
|
|
||||||
useDatabases.setState({ sampleDataResourceTokenCollection });
|
|
||||||
})
|
|
||||||
.catch((error) => {
|
|
||||||
Logger.logError(getErrorMessage(error), "Explorer/refreshSampleData");
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -14,6 +14,10 @@
|
|||||||
.flex-direction(@direction: row);
|
.flex-direction(@direction: row);
|
||||||
padding: 4px 5px;
|
padding: 4px 5px;
|
||||||
|
|
||||||
|
label {
|
||||||
|
padding: 0px;
|
||||||
|
}
|
||||||
|
|
||||||
.valueCol {
|
.valueCol {
|
||||||
flex-grow: 1;
|
flex-grow: 1;
|
||||||
padding-right: 5px;
|
padding-right: 5px;
|
||||||
@@ -59,10 +63,6 @@
|
|||||||
height: 100%;
|
height: 100%;
|
||||||
}
|
}
|
||||||
|
|
||||||
.customTrashIcon {
|
|
||||||
padding-top: 33px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.rightPaneTrashIconImg {
|
.rightPaneTrashIconImg {
|
||||||
vertical-align: top;
|
vertical-align: top;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -142,11 +142,10 @@ export const NewVertexComponent: FunctionComponent<INewVertexComponentProps> = (
|
|||||||
<div className="labelCol">
|
<div className="labelCol">
|
||||||
<TextField
|
<TextField
|
||||||
className="edgeInput"
|
className="edgeInput"
|
||||||
label={index === 0 && "Key"}
|
|
||||||
type="text"
|
type="text"
|
||||||
id="propertyKeyNewVertexPane"
|
id="propertyKeyNewVertexPane"
|
||||||
componentRef={input}
|
componentRef={input}
|
||||||
required
|
aria-required="true"
|
||||||
placeholder="Key"
|
placeholder="Key"
|
||||||
autoComplete="off"
|
autoComplete="off"
|
||||||
aria-label={`Enter value for propery ${index + 1}`}
|
aria-label={`Enter value for propery ${index + 1}`}
|
||||||
@@ -154,11 +153,11 @@ export const NewVertexComponent: FunctionComponent<INewVertexComponentProps> = (
|
|||||||
onChange={(event: React.ChangeEvent<HTMLInputElement>) => onKeyChange(event, index)}
|
onChange={(event: React.ChangeEvent<HTMLInputElement>) => onKeyChange(event, index)}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
|
<span className="mandatoryStar">* </span>
|
||||||
|
|
||||||
<div className="valueCol">
|
<div className="valueCol">
|
||||||
<TextField
|
<TextField
|
||||||
className="edgeInput"
|
className="edgeInput"
|
||||||
label={index === 0 && "Value"}
|
|
||||||
type="text"
|
type="text"
|
||||||
placeholder="Value"
|
placeholder="Value"
|
||||||
autoComplete="off"
|
autoComplete="off"
|
||||||
@@ -170,8 +169,6 @@ export const NewVertexComponent: FunctionComponent<INewVertexComponentProps> = (
|
|||||||
<div>
|
<div>
|
||||||
<Dropdown
|
<Dropdown
|
||||||
role="combobox"
|
role="combobox"
|
||||||
label={index === 0 && "Type"}
|
|
||||||
ariaLabel="Type"
|
|
||||||
placeholder="Select an option"
|
placeholder="Select an option"
|
||||||
defaultSelectedKey={data.values[0].type}
|
defaultSelectedKey={data.values[0].type}
|
||||||
style={{ width: 100 }}
|
style={{ width: 100 }}
|
||||||
@@ -184,7 +181,7 @@ export const NewVertexComponent: FunctionComponent<INewVertexComponentProps> = (
|
|||||||
</div>
|
</div>
|
||||||
<div className="actionCol">
|
<div className="actionCol">
|
||||||
<div
|
<div
|
||||||
className={`rightPaneTrashIcon rightPaneBtns ${index === 0 && "customTrashIcon"}`}
|
className="rightPaneTrashIcon rightPaneBtns"
|
||||||
tabIndex={0}
|
tabIndex={0}
|
||||||
role="button"
|
role="button"
|
||||||
aria-label={`Delete ${data.key}`}
|
aria-label={`Delete ${data.key}`}
|
||||||
|
|||||||
@@ -37,25 +37,21 @@ describe("CommandBarComponentButtonFactory tests", () => {
|
|||||||
expect(enableAzureSynapseLinkBtn).toBeDefined();
|
expect(enableAzureSynapseLinkBtn).toBeDefined();
|
||||||
});
|
});
|
||||||
|
|
||||||
// TODO: Now that Tables API supports dataplane RBAC, calling createStaticCommandBarButtons will enable the
|
it("Button should not be visible for Tables API", () => {
|
||||||
// Entra ID Login button, which causes this test to fail due to "Invalid hook call.". This seems to be
|
updateUserContext({
|
||||||
// unsupported in jest and needs to be tested with react-hooks-testing-library.
|
databaseAccount: {
|
||||||
//
|
properties: {
|
||||||
// it("Button should not be visible for Tables API", () => {
|
capabilities: [{ name: "EnableTable" }],
|
||||||
// updateUserContext({
|
},
|
||||||
// databaseAccount: {
|
} as DatabaseAccount,
|
||||||
// properties: {
|
});
|
||||||
// capabilities: [{ name: "EnableTable" }],
|
|
||||||
// },
|
const buttons = CommandBarComponentButtonFactory.createStaticCommandBarButtons(mockExplorer, selectedNodeState);
|
||||||
// } as DatabaseAccount,
|
const enableAzureSynapseLinkBtn = buttons.find(
|
||||||
// });
|
(button) => button.commandButtonLabel === enableAzureSynapseLinkBtnLabel,
|
||||||
//
|
);
|
||||||
// const buttons = CommandBarComponentButtonFactory.createStaticCommandBarButtons(mockExplorer, selectedNodeState);
|
expect(enableAzureSynapseLinkBtn).toBeUndefined();
|
||||||
// const enableAzureSynapseLinkBtn = buttons.find(
|
});
|
||||||
// (button) => button.commandButtonLabel === enableAzureSynapseLinkBtnLabel,
|
|
||||||
// );
|
|
||||||
// expect(enableAzureSynapseLinkBtn).toBeUndefined();
|
|
||||||
//});
|
|
||||||
|
|
||||||
it("Button should not be visible for Cassandra API", () => {
|
it("Button should not be visible for Cassandra API", () => {
|
||||||
updateUserContext({
|
updateUserContext({
|
||||||
|
|||||||
@@ -1,5 +1,4 @@
|
|||||||
import { KeyboardAction } from "KeyboardShortcuts";
|
import { KeyboardAction } from "KeyboardShortcuts";
|
||||||
import { isDataplaneRbacSupported } from "Utils/APITypeUtils";
|
|
||||||
import * as React from "react";
|
import * as React from "react";
|
||||||
import { useEffect, useState } from "react";
|
import { useEffect, useState } from "react";
|
||||||
import AddSqlQueryIcon from "../../../../images/AddSqlQuery_16x16.svg";
|
import AddSqlQueryIcon from "../../../../images/AddSqlQuery_16x16.svg";
|
||||||
@@ -62,7 +61,7 @@ export function createStaticCommandBarButtons(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (isDataplaneRbacSupported(userContext.apiType)) {
|
if (userContext.apiType === "SQL") {
|
||||||
const [loginButtonProps, setLoginButtonProps] = useState<CommandButtonComponentProps | undefined>(undefined);
|
const [loginButtonProps, setLoginButtonProps] = useState<CommandButtonComponentProps | undefined>(undefined);
|
||||||
const dataPlaneRbacEnabled = useDataPlaneRbac((state) => state.dataPlaneRbacEnabled);
|
const dataPlaneRbacEnabled = useDataPlaneRbac((state) => state.dataPlaneRbacEnabled);
|
||||||
const aadTokenUpdated = useDataPlaneRbac((state) => state.aadTokenUpdated);
|
const aadTokenUpdated = useDataPlaneRbac((state) => state.aadTokenUpdated);
|
||||||
|
|||||||
@@ -109,15 +109,15 @@ export class NotificationConsoleComponent extends React.Component<
|
|||||||
<div className="statusBar">
|
<div className="statusBar">
|
||||||
<span className="dataTypeIcons">
|
<span className="dataTypeIcons">
|
||||||
<span className="notificationConsoleHeaderIconWithData">
|
<span className="notificationConsoleHeaderIconWithData">
|
||||||
<img src={LoadingIcon} alt="In progress items" />
|
<img src={LoadingIcon} alt="in progress items" />
|
||||||
<span className="numInProgress">{numInProgress}</span>
|
<span className="numInProgress">{numInProgress}</span>
|
||||||
</span>
|
</span>
|
||||||
<span className="notificationConsoleHeaderIconWithData">
|
<span className="notificationConsoleHeaderIconWithData">
|
||||||
<img src={ErrorBlackIcon} alt="Error items" />
|
<img src={ErrorBlackIcon} alt="error items" />
|
||||||
<span className="numErroredItems">{numErroredItems}</span>
|
<span className="numErroredItems">{numErroredItems}</span>
|
||||||
</span>
|
</span>
|
||||||
<span className="notificationConsoleHeaderIconWithData">
|
<span className="notificationConsoleHeaderIconWithData">
|
||||||
<img src={infoBubbleIcon} alt="Info items" />
|
<img src={infoBubbleIcon} alt="info items" />
|
||||||
<span className="numInfoItems">{numInfoItems}</span>
|
<span className="numInfoItems">{numInfoItems}</span>
|
||||||
</span>
|
</span>
|
||||||
</span>
|
</span>
|
||||||
@@ -134,12 +134,12 @@ export class NotificationConsoleComponent extends React.Component<
|
|||||||
data-test="NotificationConsole/ExpandCollapseButton"
|
data-test="NotificationConsole/ExpandCollapseButton"
|
||||||
role="button"
|
role="button"
|
||||||
tabIndex={0}
|
tabIndex={0}
|
||||||
aria-label="Console"
|
aria-label={"console button" + (this.props.isConsoleExpanded ? " expanded" : " collapsed")}
|
||||||
aria-expanded={this.props.isConsoleExpanded}
|
aria-expanded={!this.props.isConsoleExpanded}
|
||||||
>
|
>
|
||||||
<img
|
<img
|
||||||
src={this.props.isConsoleExpanded ? ChevronDownIcon : ChevronUpIcon}
|
src={this.props.isConsoleExpanded ? ChevronDownIcon : ChevronUpIcon}
|
||||||
alt={this.props.isConsoleExpanded ? "Collapse icon" : "Expand icon"}
|
alt={this.props.isConsoleExpanded ? "ChevronDownIcon" : "ChevronUpIcon"}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -21,7 +21,7 @@ exports[`NotificationConsoleComponent renders the console 1`] = `
|
|||||||
className="notificationConsoleHeaderIconWithData"
|
className="notificationConsoleHeaderIconWithData"
|
||||||
>
|
>
|
||||||
<img
|
<img
|
||||||
alt="In progress items"
|
alt="in progress items"
|
||||||
src={{}}
|
src={{}}
|
||||||
/>
|
/>
|
||||||
<span
|
<span
|
||||||
@@ -34,7 +34,7 @@ exports[`NotificationConsoleComponent renders the console 1`] = `
|
|||||||
className="notificationConsoleHeaderIconWithData"
|
className="notificationConsoleHeaderIconWithData"
|
||||||
>
|
>
|
||||||
<img
|
<img
|
||||||
alt="Error items"
|
alt="error items"
|
||||||
src={{}}
|
src={{}}
|
||||||
/>
|
/>
|
||||||
<span
|
<span
|
||||||
@@ -47,7 +47,7 @@ exports[`NotificationConsoleComponent renders the console 1`] = `
|
|||||||
className="notificationConsoleHeaderIconWithData"
|
className="notificationConsoleHeaderIconWithData"
|
||||||
>
|
>
|
||||||
<img
|
<img
|
||||||
alt="Info items"
|
alt="info items"
|
||||||
src={{}}
|
src={{}}
|
||||||
/>
|
/>
|
||||||
<span
|
<span
|
||||||
@@ -71,15 +71,15 @@ exports[`NotificationConsoleComponent renders the console 1`] = `
|
|||||||
</span>
|
</span>
|
||||||
</div>
|
</div>
|
||||||
<div
|
<div
|
||||||
aria-expanded={false}
|
aria-expanded={true}
|
||||||
aria-label="Console"
|
aria-label="console button collapsed"
|
||||||
className="expandCollapseButton"
|
className="expandCollapseButton"
|
||||||
data-test="NotificationConsole/ExpandCollapseButton"
|
data-test="NotificationConsole/ExpandCollapseButton"
|
||||||
role="button"
|
role="button"
|
||||||
tabIndex={0}
|
tabIndex={0}
|
||||||
>
|
>
|
||||||
<img
|
<img
|
||||||
alt="Expand icon"
|
alt="ChevronUpIcon"
|
||||||
src=""
|
src=""
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
@@ -192,7 +192,7 @@ exports[`NotificationConsoleComponent renders the console 2`] = `
|
|||||||
className="notificationConsoleHeaderIconWithData"
|
className="notificationConsoleHeaderIconWithData"
|
||||||
>
|
>
|
||||||
<img
|
<img
|
||||||
alt="In progress items"
|
alt="in progress items"
|
||||||
src={{}}
|
src={{}}
|
||||||
/>
|
/>
|
||||||
<span
|
<span
|
||||||
@@ -205,7 +205,7 @@ exports[`NotificationConsoleComponent renders the console 2`] = `
|
|||||||
className="notificationConsoleHeaderIconWithData"
|
className="notificationConsoleHeaderIconWithData"
|
||||||
>
|
>
|
||||||
<img
|
<img
|
||||||
alt="Error items"
|
alt="error items"
|
||||||
src={{}}
|
src={{}}
|
||||||
/>
|
/>
|
||||||
<span
|
<span
|
||||||
@@ -218,7 +218,7 @@ exports[`NotificationConsoleComponent renders the console 2`] = `
|
|||||||
className="notificationConsoleHeaderIconWithData"
|
className="notificationConsoleHeaderIconWithData"
|
||||||
>
|
>
|
||||||
<img
|
<img
|
||||||
alt="Info items"
|
alt="info items"
|
||||||
src={{}}
|
src={{}}
|
||||||
/>
|
/>
|
||||||
<span
|
<span
|
||||||
@@ -244,15 +244,15 @@ exports[`NotificationConsoleComponent renders the console 2`] = `
|
|||||||
</span>
|
</span>
|
||||||
</div>
|
</div>
|
||||||
<div
|
<div
|
||||||
aria-expanded={false}
|
aria-expanded={true}
|
||||||
aria-label="Console"
|
aria-label="console button collapsed"
|
||||||
className="expandCollapseButton"
|
className="expandCollapseButton"
|
||||||
data-test="NotificationConsole/ExpandCollapseButton"
|
data-test="NotificationConsole/ExpandCollapseButton"
|
||||||
role="button"
|
role="button"
|
||||||
tabIndex={0}
|
tabIndex={0}
|
||||||
>
|
>
|
||||||
<img
|
<img
|
||||||
alt="Expand icon"
|
alt="ChevronUpIcon"
|
||||||
src=""
|
src=""
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
import { isPublicInternetAccessAllowed } from "Common/DatabaseAccountUtility";
|
import { isPublicInternetAccessAllowed } from "Common/DatabaseAccountUtility";
|
||||||
import { PhoenixClient } from "Phoenix/PhoenixClient";
|
import { PhoenixClient } from "Phoenix/PhoenixClient";
|
||||||
|
import { useNewPortalBackendEndpoint } from "Utils/EndpointUtils";
|
||||||
import { cloneDeep } from "lodash";
|
import { cloneDeep } from "lodash";
|
||||||
import create, { UseStore } from "zustand";
|
import create, { UseStore } from "zustand";
|
||||||
import { AuthType } from "../../AuthType";
|
import { AuthType } from "../../AuthType";
|
||||||
@@ -127,7 +128,9 @@ export const useNotebook: UseStore<NotebookState> = create((set, get) => ({
|
|||||||
userContext.apiType === "Postgres" || userContext.apiType === "VCoreMongo"
|
userContext.apiType === "Postgres" || userContext.apiType === "VCoreMongo"
|
||||||
? databaseAccount?.location
|
? databaseAccount?.location
|
||||||
: databaseAccount?.properties?.writeLocations?.[0]?.locationName.toLowerCase();
|
: databaseAccount?.properties?.writeLocations?.[0]?.locationName.toLowerCase();
|
||||||
const disallowedLocationsUri: string = `${configContext.PORTAL_BACKEND_ENDPOINT}/api/disallowedlocations`;
|
const disallowedLocationsUri: string = useNewPortalBackendEndpoint(Constants.BackendApi.DisallowedLocations)
|
||||||
|
? `${configContext.PORTAL_BACKEND_ENDPOINT}/api/disallowedlocations`
|
||||||
|
: `${configContext.BACKEND_ENDPOINT}/api/disallowedLocations`;
|
||||||
const authorizationHeader = getAuthorizationHeader();
|
const authorizationHeader = getAuthorizationHeader();
|
||||||
try {
|
try {
|
||||||
const response = await fetch(disallowedLocationsUri, {
|
const response = await fetch(disallowedLocationsUri, {
|
||||||
|
|||||||
@@ -94,7 +94,6 @@
|
|||||||
padding-left: @MediumSpace;
|
padding-left: @MediumSpace;
|
||||||
|
|
||||||
.paneErrorLink {
|
.paneErrorLink {
|
||||||
color: @LinkColor;
|
|
||||||
cursor: pointer;
|
cursor: pointer;
|
||||||
font-size: @mediumFontSize;
|
font-size: @mediumFontSize;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -32,7 +32,6 @@ import {
|
|||||||
} from "Shared/StorageUtility";
|
} from "Shared/StorageUtility";
|
||||||
import * as StringUtility from "Shared/StringUtility";
|
import * as StringUtility from "Shared/StringUtility";
|
||||||
import { updateUserContext, userContext } from "UserContext";
|
import { updateUserContext, userContext } from "UserContext";
|
||||||
import { isDataplaneRbacSupported } from "Utils/APITypeUtils";
|
|
||||||
import { acquireMsalTokenForAccount } from "Utils/AuthorizationUtils";
|
import { acquireMsalTokenForAccount } from "Utils/AuthorizationUtils";
|
||||||
import { logConsoleError, logConsoleInfo } from "Utils/NotificationConsoleUtils";
|
import { logConsoleError, logConsoleInfo } from "Utils/NotificationConsoleUtils";
|
||||||
import * as PriorityBasedExecutionUtils from "Utils/PriorityBasedExecutionUtils";
|
import * as PriorityBasedExecutionUtils from "Utils/PriorityBasedExecutionUtils";
|
||||||
@@ -184,7 +183,7 @@ export const SettingsPane: FunctionComponent<{ explorer: Explorer }> = ({
|
|||||||
const shouldShowCrossPartitionOption = userContext.apiType !== "Gremlin" && !isEmulator;
|
const shouldShowCrossPartitionOption = userContext.apiType !== "Gremlin" && !isEmulator;
|
||||||
const shouldShowParallelismOption = userContext.apiType !== "Gremlin" && !isEmulator;
|
const shouldShowParallelismOption = userContext.apiType !== "Gremlin" && !isEmulator;
|
||||||
const showEnableEntraIdRbac =
|
const showEnableEntraIdRbac =
|
||||||
isDataplaneRbacSupported(userContext.apiType) &&
|
userContext.apiType === "SQL" &&
|
||||||
userContext.authType === AuthType.AAD &&
|
userContext.authType === AuthType.AAD &&
|
||||||
configContext.platform !== Platform.Fabric &&
|
configContext.platform !== Platform.Fabric &&
|
||||||
!isEmulator;
|
!isEmulator;
|
||||||
|
|||||||
@@ -393,7 +393,8 @@ export const QueryCopilotPromptbar: React.FC<QueryCopilotPromptProps> = ({
|
|||||||
},
|
},
|
||||||
}}
|
}}
|
||||||
disabled={isGeneratingQuery}
|
disabled={isGeneratingQuery}
|
||||||
autoComplete="off"
|
autoComplete="list"
|
||||||
|
aria-expanded={showSamplePrompts}
|
||||||
placeholder="Ask a question in natural language and we’ll generate the query for you."
|
placeholder="Ask a question in natural language and we’ll generate the query for you."
|
||||||
aria-labelledby="copilot-textfield-label"
|
aria-labelledby="copilot-textfield-label"
|
||||||
onRenderSuffix={() => {
|
onRenderSuffix={() => {
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import { FeedOptions } from "@azure/cosmos";
|
import { FeedOptions } from "@azure/cosmos";
|
||||||
import {
|
import {
|
||||||
Areas,
|
Areas,
|
||||||
|
BackendApi,
|
||||||
ConnectionStatusType,
|
ConnectionStatusType,
|
||||||
ContainerStatusType,
|
ContainerStatusType,
|
||||||
HttpStatusCodes,
|
HttpStatusCodes,
|
||||||
@@ -31,6 +32,7 @@ import { Action } from "Shared/Telemetry/TelemetryConstants";
|
|||||||
import { traceFailure, traceStart, traceSuccess } from "Shared/Telemetry/TelemetryProcessor";
|
import { traceFailure, traceStart, traceSuccess } from "Shared/Telemetry/TelemetryProcessor";
|
||||||
import { userContext } from "UserContext";
|
import { userContext } from "UserContext";
|
||||||
import { getAuthorizationHeader } from "Utils/AuthorizationUtils";
|
import { getAuthorizationHeader } from "Utils/AuthorizationUtils";
|
||||||
|
import { useNewPortalBackendEndpoint } from "Utils/EndpointUtils";
|
||||||
import { queryPagesUntilContentPresent } from "Utils/QueryUtils";
|
import { queryPagesUntilContentPresent } from "Utils/QueryUtils";
|
||||||
import { QueryCopilotState, useQueryCopilot } from "hooks/useQueryCopilot";
|
import { QueryCopilotState, useQueryCopilot } from "hooks/useQueryCopilot";
|
||||||
import { useTabs } from "hooks/useTabs";
|
import { useTabs } from "hooks/useTabs";
|
||||||
@@ -80,7 +82,9 @@ export const isCopilotFeatureRegistered = async (subscriptionId: string): Promis
|
|||||||
};
|
};
|
||||||
|
|
||||||
export const getCopilotEnabled = async (): Promise<boolean> => {
|
export const getCopilotEnabled = async (): Promise<boolean> => {
|
||||||
const backendEndpoint: string = configContext.PORTAL_BACKEND_ENDPOINT;
|
const backendEndpoint: string = useNewPortalBackendEndpoint(BackendApi.PortalSettings)
|
||||||
|
? configContext.PORTAL_BACKEND_ENDPOINT
|
||||||
|
: configContext.BACKEND_ENDPOINT;
|
||||||
|
|
||||||
const url = `${backendEndpoint}/api/portalsettings/querycopilot`;
|
const url = `${backendEndpoint}/api/portalsettings/querycopilot`;
|
||||||
const authorizationHeader: AuthorizationTokenHeaderMetadata = getAuthorizationHeader();
|
const authorizationHeader: AuthorizationTokenHeaderMetadata = getAuthorizationHeader();
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ import * as ko from "knockout";
|
|||||||
import Q from "q";
|
import Q from "q";
|
||||||
import { AuthType } from "../../AuthType";
|
import { AuthType } from "../../AuthType";
|
||||||
import * as Constants from "../../Common/Constants";
|
import * as Constants from "../../Common/Constants";
|
||||||
import { CassandraProxyAPIs } from "../../Common/Constants";
|
import { CassandraProxyAPIs, CassandraProxyEndpoints } from "../../Common/Constants";
|
||||||
import { handleError } from "../../Common/ErrorHandlingUtils";
|
import { handleError } from "../../Common/ErrorHandlingUtils";
|
||||||
import * as HeadersUtility from "../../Common/HeadersUtility";
|
import * as HeadersUtility from "../../Common/HeadersUtility";
|
||||||
import { createDocument } from "../../Common/dataAccess/createDocument";
|
import { createDocument } from "../../Common/dataAccess/createDocument";
|
||||||
@@ -264,6 +264,9 @@ export class CassandraAPIDataClient extends TableDataClient {
|
|||||||
shouldNotify?: boolean,
|
shouldNotify?: boolean,
|
||||||
paginationToken?: string,
|
paginationToken?: string,
|
||||||
): Promise<Entities.IListTableEntitiesResult> {
|
): Promise<Entities.IListTableEntitiesResult> {
|
||||||
|
if (!this.useCassandraProxyEndpoint("postQuery")) {
|
||||||
|
return this.queryDocuments_ToBeDeprecated(collection, query, shouldNotify, paginationToken);
|
||||||
|
}
|
||||||
const clearMessage =
|
const clearMessage =
|
||||||
shouldNotify && NotificationConsoleUtils.logConsoleProgress(`Querying rows for table ${collection.id()}`);
|
shouldNotify && NotificationConsoleUtils.logConsoleProgress(`Querying rows for table ${collection.id()}`);
|
||||||
try {
|
try {
|
||||||
@@ -306,6 +309,55 @@ export class CassandraAPIDataClient extends TableDataClient {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public async queryDocuments_ToBeDeprecated(
|
||||||
|
collection: ViewModels.Collection,
|
||||||
|
query: string,
|
||||||
|
shouldNotify?: boolean,
|
||||||
|
paginationToken?: string,
|
||||||
|
): Promise<Entities.IListTableEntitiesResult> {
|
||||||
|
const clearMessage =
|
||||||
|
shouldNotify && NotificationConsoleUtils.logConsoleProgress(`Querying rows for table ${collection.id()}`);
|
||||||
|
try {
|
||||||
|
const { authType, databaseAccount } = userContext;
|
||||||
|
const apiEndpoint: string =
|
||||||
|
authType === AuthType.EncryptedToken
|
||||||
|
? Constants.CassandraBackend.guestQueryApi
|
||||||
|
: Constants.CassandraBackend.queryApi;
|
||||||
|
const data: any = await $.ajax(`${configContext.BACKEND_ENDPOINT}/${apiEndpoint}`, {
|
||||||
|
type: "POST",
|
||||||
|
data: {
|
||||||
|
accountName: databaseAccount?.name,
|
||||||
|
cassandraEndpoint: this.trimCassandraEndpoint(databaseAccount?.properties.cassandraEndpoint),
|
||||||
|
resourceId: databaseAccount?.id,
|
||||||
|
keyspaceId: collection.databaseId,
|
||||||
|
tableId: collection.id(),
|
||||||
|
query,
|
||||||
|
paginationToken,
|
||||||
|
},
|
||||||
|
beforeSend: this.setAuthorizationHeader as any,
|
||||||
|
cache: false,
|
||||||
|
});
|
||||||
|
shouldNotify &&
|
||||||
|
NotificationConsoleUtils.logConsoleInfo(
|
||||||
|
`Successfully fetched ${data.result.length} rows for table ${collection.id()}`,
|
||||||
|
);
|
||||||
|
return {
|
||||||
|
Results: data.result,
|
||||||
|
ContinuationToken: data.paginationToken,
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
shouldNotify &&
|
||||||
|
handleError(
|
||||||
|
error,
|
||||||
|
"QueryDocuments_ToBeDeprecated_Cassandra",
|
||||||
|
`Failed to query rows for table ${collection.id()}`,
|
||||||
|
);
|
||||||
|
throw error;
|
||||||
|
} finally {
|
||||||
|
clearMessage?.();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public async deleteDocuments(
|
public async deleteDocuments(
|
||||||
collection: ViewModels.Collection,
|
collection: ViewModels.Collection,
|
||||||
entitiesToDelete: Entities.ITableEntity[],
|
entitiesToDelete: Entities.ITableEntity[],
|
||||||
@@ -419,6 +471,10 @@ export class CassandraAPIDataClient extends TableDataClient {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public getTableKeys(collection: ViewModels.Collection): Q.Promise<CassandraTableKeys> {
|
public getTableKeys(collection: ViewModels.Collection): Q.Promise<CassandraTableKeys> {
|
||||||
|
if (!this.useCassandraProxyEndpoint("getKeys")) {
|
||||||
|
return this.getTableKeys_ToBeDeprecated(collection);
|
||||||
|
}
|
||||||
|
|
||||||
if (!!collection.cassandraKeys) {
|
if (!!collection.cassandraKeys) {
|
||||||
return Q.resolve(collection.cassandraKeys);
|
return Q.resolve(collection.cassandraKeys);
|
||||||
}
|
}
|
||||||
@@ -459,7 +515,52 @@ export class CassandraAPIDataClient extends TableDataClient {
|
|||||||
return deferred.promise;
|
return deferred.promise;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public getTableKeys_ToBeDeprecated(collection: ViewModels.Collection): Q.Promise<CassandraTableKeys> {
|
||||||
|
if (!!collection.cassandraKeys) {
|
||||||
|
return Q.resolve(collection.cassandraKeys);
|
||||||
|
}
|
||||||
|
const clearInProgressMessage = logConsoleProgress(`Fetching keys for table ${collection.id()}`);
|
||||||
|
const { authType, databaseAccount } = userContext;
|
||||||
|
const apiEndpoint: string =
|
||||||
|
authType === AuthType.EncryptedToken
|
||||||
|
? Constants.CassandraBackend.guestKeysApi
|
||||||
|
: Constants.CassandraBackend.keysApi;
|
||||||
|
let endpoint = `${configContext.BACKEND_ENDPOINT}/${apiEndpoint}`;
|
||||||
|
const deferred = Q.defer<CassandraTableKeys>();
|
||||||
|
|
||||||
|
$.ajax(endpoint, {
|
||||||
|
type: "POST",
|
||||||
|
data: {
|
||||||
|
accountName: databaseAccount?.name,
|
||||||
|
cassandraEndpoint: this.trimCassandraEndpoint(databaseAccount?.properties.cassandraEndpoint),
|
||||||
|
resourceId: databaseAccount?.id,
|
||||||
|
keyspaceId: collection.databaseId,
|
||||||
|
tableId: collection.id(),
|
||||||
|
},
|
||||||
|
beforeSend: this.setAuthorizationHeader as any,
|
||||||
|
cache: false,
|
||||||
|
})
|
||||||
|
.then(
|
||||||
|
(data: CassandraTableKeys) => {
|
||||||
|
collection.cassandraKeys = data;
|
||||||
|
logConsoleInfo(`Successfully fetched keys for table ${collection.id()}`);
|
||||||
|
deferred.resolve(data);
|
||||||
|
},
|
||||||
|
(error: any) => {
|
||||||
|
const errorText = error.responseJSON?.message ?? JSON.stringify(error);
|
||||||
|
handleError(errorText, "FetchKeysCassandra", `Error fetching keys for table ${collection.id()}`);
|
||||||
|
deferred.reject(errorText);
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.done(clearInProgressMessage);
|
||||||
|
return deferred.promise;
|
||||||
|
}
|
||||||
|
|
||||||
public getTableSchema(collection: ViewModels.Collection): Q.Promise<CassandraTableKey[]> {
|
public getTableSchema(collection: ViewModels.Collection): Q.Promise<CassandraTableKey[]> {
|
||||||
|
if (!this.useCassandraProxyEndpoint("getSchema")) {
|
||||||
|
return this.getTableSchema_ToBeDeprecated(collection);
|
||||||
|
}
|
||||||
|
|
||||||
if (!!collection.cassandraSchema) {
|
if (!!collection.cassandraSchema) {
|
||||||
return Q.resolve(collection.cassandraSchema);
|
return Q.resolve(collection.cassandraSchema);
|
||||||
}
|
}
|
||||||
@@ -501,7 +602,52 @@ export class CassandraAPIDataClient extends TableDataClient {
|
|||||||
return deferred.promise;
|
return deferred.promise;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public getTableSchema_ToBeDeprecated(collection: ViewModels.Collection): Q.Promise<CassandraTableKey[]> {
|
||||||
|
if (!!collection.cassandraSchema) {
|
||||||
|
return Q.resolve(collection.cassandraSchema);
|
||||||
|
}
|
||||||
|
const clearInProgressMessage = logConsoleProgress(`Fetching schema for table ${collection.id()}`);
|
||||||
|
const { databaseAccount, authType } = userContext;
|
||||||
|
const apiEndpoint: string =
|
||||||
|
authType === AuthType.EncryptedToken
|
||||||
|
? Constants.CassandraBackend.guestSchemaApi
|
||||||
|
: Constants.CassandraBackend.schemaApi;
|
||||||
|
let endpoint = `${configContext.BACKEND_ENDPOINT}/${apiEndpoint}`;
|
||||||
|
const deferred = Q.defer<CassandraTableKey[]>();
|
||||||
|
|
||||||
|
$.ajax(endpoint, {
|
||||||
|
type: "POST",
|
||||||
|
data: {
|
||||||
|
accountName: databaseAccount?.name,
|
||||||
|
cassandraEndpoint: this.trimCassandraEndpoint(databaseAccount?.properties.cassandraEndpoint),
|
||||||
|
resourceId: databaseAccount?.id,
|
||||||
|
keyspaceId: collection.databaseId,
|
||||||
|
tableId: collection.id(),
|
||||||
|
},
|
||||||
|
beforeSend: this.setAuthorizationHeader as any,
|
||||||
|
cache: false,
|
||||||
|
})
|
||||||
|
.then(
|
||||||
|
(data: any) => {
|
||||||
|
collection.cassandraSchema = data.columns;
|
||||||
|
logConsoleInfo(`Successfully fetched schema for table ${collection.id()}`);
|
||||||
|
deferred.resolve(data.columns);
|
||||||
|
},
|
||||||
|
(error: any) => {
|
||||||
|
const errorText = error.responseJSON?.message ?? JSON.stringify(error);
|
||||||
|
handleError(errorText, "FetchSchemaCassandra", `Error fetching schema for table ${collection.id()}`);
|
||||||
|
deferred.reject(errorText);
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.done(clearInProgressMessage);
|
||||||
|
return deferred.promise;
|
||||||
|
}
|
||||||
|
|
||||||
private createOrDeleteQuery(cassandraEndpoint: string, resourceId: string, query: string): Q.Promise<any> {
|
private createOrDeleteQuery(cassandraEndpoint: string, resourceId: string, query: string): Q.Promise<any> {
|
||||||
|
if (!this.useCassandraProxyEndpoint("createOrDelete")) {
|
||||||
|
return this.createOrDeleteQuery_ToBeDeprecated(cassandraEndpoint, resourceId, query);
|
||||||
|
}
|
||||||
|
|
||||||
const deferred = Q.defer();
|
const deferred = Q.defer();
|
||||||
const { authType, databaseAccount } = userContext;
|
const { authType, databaseAccount } = userContext;
|
||||||
const apiEndpoint: string =
|
const apiEndpoint: string =
|
||||||
@@ -531,6 +677,38 @@ export class CassandraAPIDataClient extends TableDataClient {
|
|||||||
return deferred.promise;
|
return deferred.promise;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private createOrDeleteQuery_ToBeDeprecated(
|
||||||
|
cassandraEndpoint: string,
|
||||||
|
resourceId: string,
|
||||||
|
query: string,
|
||||||
|
): Q.Promise<any> {
|
||||||
|
const deferred = Q.defer();
|
||||||
|
const { authType, databaseAccount } = userContext;
|
||||||
|
const apiEndpoint: string =
|
||||||
|
authType === AuthType.EncryptedToken
|
||||||
|
? Constants.CassandraBackend.guestCreateOrDeleteApi
|
||||||
|
: Constants.CassandraBackend.createOrDeleteApi;
|
||||||
|
$.ajax(`${configContext.BACKEND_ENDPOINT}/${apiEndpoint}`, {
|
||||||
|
type: "POST",
|
||||||
|
data: {
|
||||||
|
accountName: databaseAccount?.name,
|
||||||
|
cassandraEndpoint: this.trimCassandraEndpoint(cassandraEndpoint),
|
||||||
|
resourceId: resourceId,
|
||||||
|
query: query,
|
||||||
|
},
|
||||||
|
beforeSend: this.setAuthorizationHeader as any,
|
||||||
|
cache: false,
|
||||||
|
}).then(
|
||||||
|
(data: any) => {
|
||||||
|
deferred.resolve();
|
||||||
|
},
|
||||||
|
(reason) => {
|
||||||
|
deferred.reject(reason);
|
||||||
|
},
|
||||||
|
);
|
||||||
|
return deferred.promise;
|
||||||
|
}
|
||||||
|
|
||||||
private trimCassandraEndpoint(cassandraEndpoint: string): string {
|
private trimCassandraEndpoint(cassandraEndpoint: string): string {
|
||||||
if (!cassandraEndpoint) {
|
if (!cassandraEndpoint) {
|
||||||
return cassandraEndpoint;
|
return cassandraEndpoint;
|
||||||
@@ -569,4 +747,23 @@ export class CassandraAPIDataClient extends TableDataClient {
|
|||||||
private getCassandraPartitionKeyProperty(collection: ViewModels.Collection): string {
|
private getCassandraPartitionKeyProperty(collection: ViewModels.Collection): string {
|
||||||
return collection.cassandraKeys.partitionKeys[0].property;
|
return collection.cassandraKeys.partitionKeys[0].property;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private useCassandraProxyEndpoint(api: string): boolean {
|
||||||
|
const activeCassandraProxyEndpoints: string[] = [
|
||||||
|
CassandraProxyEndpoints.Development,
|
||||||
|
CassandraProxyEndpoints.Mpac,
|
||||||
|
CassandraProxyEndpoints.Prod,
|
||||||
|
CassandraProxyEndpoints.Fairfax,
|
||||||
|
CassandraProxyEndpoints.Mooncake,
|
||||||
|
];
|
||||||
|
|
||||||
|
if (configContext.globallyEnabledCassandraAPIs.includes(api)) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
configContext.NEW_CASSANDRA_APIS?.includes(api) &&
|
||||||
|
activeCassandraProxyEndpoints.includes(configContext.CASSANDRA_PROXY_ENDPOINT)
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1150,16 +1150,27 @@ export const DocumentsTabComponent: React.FunctionComponent<IDocumentsTabCompone
|
|||||||
deletePromise = _bulkDeleteNoSqlDocuments(_collection, toDeleteDocumentIds);
|
deletePromise = _bulkDeleteNoSqlDocuments(_collection, toDeleteDocumentIds);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
deletePromise = MongoProxyClient.deleteDocuments(
|
if (isMongoBulkDeleteDisabled) {
|
||||||
_collection.databaseId,
|
// TODO: Once new mongo proxy is available for all users, remove the call for MongoProxyClient.deleteDocument().
|
||||||
_collection as ViewModels.Collection,
|
// MongoProxyClient.deleteDocuments() should be called for all users.
|
||||||
toDeleteDocumentIds,
|
deletePromise = MongoProxyClient.deleteDocument(
|
||||||
).then(({ deletedCount, isAcknowledged }) => {
|
_collection.databaseId,
|
||||||
if (deletedCount === toDeleteDocumentIds.length && isAcknowledged) {
|
_collection as ViewModels.Collection,
|
||||||
return toDeleteDocumentIds;
|
toDeleteDocumentIds[0],
|
||||||
}
|
).then(() => [toDeleteDocumentIds[0]]);
|
||||||
throw new Error(`Delete failed with deletedCount: ${deletedCount} and isAcknowledged: ${isAcknowledged}`);
|
// ----------------------------------------------------------------------------------------------------
|
||||||
});
|
} else {
|
||||||
|
deletePromise = MongoProxyClient.deleteDocuments(
|
||||||
|
_collection.databaseId,
|
||||||
|
_collection as ViewModels.Collection,
|
||||||
|
toDeleteDocumentIds,
|
||||||
|
).then(({ deletedCount, isAcknowledged }) => {
|
||||||
|
if (deletedCount === toDeleteDocumentIds.length && isAcknowledged) {
|
||||||
|
return toDeleteDocumentIds;
|
||||||
|
}
|
||||||
|
throw new Error(`Delete failed with deletedCount: ${deletedCount} and isAcknowledged: ${isAcknowledged}`);
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return deletePromise
|
return deletePromise
|
||||||
@@ -2043,8 +2054,11 @@ export const DocumentsTabComponent: React.FunctionComponent<IDocumentsTabCompone
|
|||||||
}
|
}
|
||||||
}, [prevSelectedColumnIds, refreshDocumentsGrid, selectedColumnIds]);
|
}, [prevSelectedColumnIds, refreshDocumentsGrid, selectedColumnIds]);
|
||||||
|
|
||||||
|
// TODO: remove isMongoBulkDeleteDisabled when new mongo proxy is enabled for all users
|
||||||
// TODO: remove partitionKey.systemKey when JS SDK bug is fixed
|
// TODO: remove partitionKey.systemKey when JS SDK bug is fixed
|
||||||
const isBulkDeleteDisabled = partitionKey.systemKey && !isPreferredApiMongoDB;
|
const isMongoBulkDeleteDisabled = !MongoProxyClient.useMongoProxyEndpoint(Constants.MongoProxyApi.BulkDelete);
|
||||||
|
const isBulkDeleteDisabled =
|
||||||
|
(partitionKey.systemKey && !isPreferredApiMongoDB) || (isPreferredApiMongoDB && isMongoBulkDeleteDisabled);
|
||||||
// -------------------------------------------------------
|
// -------------------------------------------------------
|
||||||
|
|
||||||
const getFilterChoices = (): InputDatalistDropdownOptionSection[] => {
|
const getFilterChoices = (): InputDatalistDropdownOptionSection[] => {
|
||||||
|
|||||||
@@ -1,4 +1,6 @@
|
|||||||
|
import { useMongoProxyEndpoint } from "Common/MongoProxyClient";
|
||||||
import React, { Component } from "react";
|
import React, { Component } from "react";
|
||||||
|
import * as Constants from "../../../Common/Constants";
|
||||||
import { configContext } from "../../../ConfigContext";
|
import { configContext } from "../../../ConfigContext";
|
||||||
import * as ViewModels from "../../../Contracts/ViewModels";
|
import * as ViewModels from "../../../Contracts/ViewModels";
|
||||||
import { Action, ActionModifiers } from "../../../Shared/Telemetry/TelemetryConstants";
|
import { Action, ActionModifiers } from "../../../Shared/Telemetry/TelemetryConstants";
|
||||||
@@ -48,13 +50,15 @@ export default class MongoShellTabComponent extends Component<
|
|||||||
IMongoShellTabComponentStates
|
IMongoShellTabComponentStates
|
||||||
> {
|
> {
|
||||||
private _logTraces: Map<string, number>;
|
private _logTraces: Map<string, number>;
|
||||||
|
private _useMongoProxyEndpoint: boolean;
|
||||||
|
|
||||||
constructor(props: IMongoShellTabComponentProps) {
|
constructor(props: IMongoShellTabComponentProps) {
|
||||||
super(props);
|
super(props);
|
||||||
this._logTraces = new Map();
|
this._logTraces = new Map();
|
||||||
|
this._useMongoProxyEndpoint = useMongoProxyEndpoint(Constants.MongoProxyApi.LegacyMongoShell);
|
||||||
|
|
||||||
this.state = {
|
this.state = {
|
||||||
url: getMongoShellUrl(),
|
url: getMongoShellUrl(this._useMongoProxyEndpoint),
|
||||||
};
|
};
|
||||||
|
|
||||||
props.onMongoShellTabAccessor({
|
props.onMongoShellTabAccessor({
|
||||||
@@ -109,9 +113,17 @@ export default class MongoShellTabComponent extends Component<
|
|||||||
const resourceId = databaseAccount?.id;
|
const resourceId = databaseAccount?.id;
|
||||||
const accountName = databaseAccount?.name;
|
const accountName = databaseAccount?.name;
|
||||||
const documentEndpoint = databaseAccount?.properties.mongoEndpoint || databaseAccount?.properties.documentEndpoint;
|
const documentEndpoint = databaseAccount?.properties.mongoEndpoint || databaseAccount?.properties.documentEndpoint;
|
||||||
|
const mongoEndpoint =
|
||||||
|
documentEndpoint.substr(
|
||||||
|
Constants.MongoDBAccounts.protocol.length + 3,
|
||||||
|
documentEndpoint.length -
|
||||||
|
(Constants.MongoDBAccounts.protocol.length + 2 + Constants.MongoDBAccounts.defaultPort.length),
|
||||||
|
) + Constants.MongoDBAccounts.defaultPort.toString();
|
||||||
const databaseId = this.props.collection.databaseId;
|
const databaseId = this.props.collection.databaseId;
|
||||||
const collectionId = this.props.collection.id();
|
const collectionId = this.props.collection.id();
|
||||||
const apiEndpoint = configContext.MONGO_PROXY_ENDPOINT;
|
const apiEndpoint = this._useMongoProxyEndpoint
|
||||||
|
? configContext.MONGO_PROXY_ENDPOINT
|
||||||
|
: configContext.BACKEND_ENDPOINT;
|
||||||
const encryptedAuthToken: string = userContext.accessToken;
|
const encryptedAuthToken: string = userContext.accessToken;
|
||||||
|
|
||||||
shellIframe.contentWindow.postMessage(
|
shellIframe.contentWindow.postMessage(
|
||||||
@@ -120,7 +132,7 @@ export default class MongoShellTabComponent extends Component<
|
|||||||
data: {
|
data: {
|
||||||
resourceId: resourceId,
|
resourceId: resourceId,
|
||||||
accountName: accountName,
|
accountName: accountName,
|
||||||
mongoEndpoint: documentEndpoint,
|
mongoEndpoint: this._useMongoProxyEndpoint ? documentEndpoint : mongoEndpoint,
|
||||||
authorization: authorization,
|
authorization: authorization,
|
||||||
databaseId: databaseId,
|
databaseId: databaseId,
|
||||||
collectionId: collectionId,
|
collectionId: collectionId,
|
||||||
|
|||||||
@@ -2,6 +2,8 @@ import { Platform, resetConfigContext, updateConfigContext } from "../../../Conf
|
|||||||
import { updateUserContext, userContext } from "../../../UserContext";
|
import { updateUserContext, userContext } from "../../../UserContext";
|
||||||
import { getMongoShellUrl } from "./getMongoShellUrl";
|
import { getMongoShellUrl } from "./getMongoShellUrl";
|
||||||
|
|
||||||
|
const mongoBackendEndpoint = "https://localhost:1234";
|
||||||
|
|
||||||
describe("getMongoShellUrl", () => {
|
describe("getMongoShellUrl", () => {
|
||||||
let queryString = "";
|
let queryString = "";
|
||||||
|
|
||||||
@@ -9,6 +11,7 @@ describe("getMongoShellUrl", () => {
|
|||||||
resetConfigContext();
|
resetConfigContext();
|
||||||
|
|
||||||
updateConfigContext({
|
updateConfigContext({
|
||||||
|
BACKEND_ENDPOINT: mongoBackendEndpoint,
|
||||||
platform: Platform.Hosted,
|
platform: Platform.Hosted,
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -34,7 +37,12 @@ describe("getMongoShellUrl", () => {
|
|||||||
queryString = `resourceId=${userContext.databaseAccount.id}&accountName=${userContext.databaseAccount.name}&mongoEndpoint=${userContext.databaseAccount.properties.documentEndpoint}`;
|
queryString = `resourceId=${userContext.databaseAccount.id}&accountName=${userContext.databaseAccount.name}&mongoEndpoint=${userContext.databaseAccount.properties.documentEndpoint}`;
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should return /index.html by default", () => {
|
it("should return /indexv2.html by default", () => {
|
||||||
expect(getMongoShellUrl().toString()).toContain(`/index.html?${queryString}`);
|
expect(getMongoShellUrl().toString()).toContain(`/indexv2.html?${queryString}`);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return /index.html when useMongoProxyEndpoint is true", () => {
|
||||||
|
const useMongoProxyEndpoint: boolean = true;
|
||||||
|
expect(getMongoShellUrl(useMongoProxyEndpoint).toString()).toContain(`/index.html?${queryString}`);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1,11 +1,11 @@
|
|||||||
import { userContext } from "../../../UserContext";
|
import { userContext } from "../../../UserContext";
|
||||||
|
|
||||||
export function getMongoShellUrl(): string {
|
export function getMongoShellUrl(useMongoProxyEndpoint?: boolean): string {
|
||||||
const { databaseAccount: account } = userContext;
|
const { databaseAccount: account } = userContext;
|
||||||
const resourceId = account?.id;
|
const resourceId = account?.id;
|
||||||
const accountName = account?.name;
|
const accountName = account?.name;
|
||||||
const mongoEndpoint = account?.properties?.mongoEndpoint || account?.properties?.documentEndpoint;
|
const mongoEndpoint = account?.properties?.mongoEndpoint || account?.properties?.documentEndpoint;
|
||||||
const queryString = `resourceId=${resourceId}&accountName=${accountName}&mongoEndpoint=${mongoEndpoint}`;
|
const queryString = `resourceId=${resourceId}&accountName=${accountName}&mongoEndpoint=${mongoEndpoint}`;
|
||||||
|
|
||||||
return `/mongoshell/index.html?${queryString}`;
|
return useMongoProxyEndpoint ? `/mongoshell/index.html?${queryString}` : `/mongoshell/indexv2.html?${queryString}`;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -375,7 +375,6 @@ class QueryTabComponentImpl extends React.Component<QueryTabComponentImplProps,
|
|||||||
ruCapPerOperation: ruThreshold,
|
ruCapPerOperation: ruThreshold,
|
||||||
} as QueryOperationOptions;
|
} as QueryOperationOptions;
|
||||||
}
|
}
|
||||||
|
|
||||||
const queryDocuments = async (firstItemIndex: number) =>
|
const queryDocuments = async (firstItemIndex: number) =>
|
||||||
await queryDocumentsPage(
|
await queryDocumentsPage(
|
||||||
this.props.collection && this.props.collection.id(),
|
this.props.collection && this.props.collection.id(),
|
||||||
|
|||||||
@@ -1,3 +1,7 @@
|
|||||||
|
import { IMessageBarStyles, MessageBar, MessageBarType } from "@fluentui/react";
|
||||||
|
import { CassandraProxyEndpoints, MongoProxyEndpoints } from "Common/Constants";
|
||||||
|
import { configContext } from "ConfigContext";
|
||||||
|
import { IpRule } from "Contracts/DataModels";
|
||||||
import { CollectionTabKind } from "Contracts/ViewModels";
|
import { CollectionTabKind } from "Contracts/ViewModels";
|
||||||
import Explorer from "Explorer/Explorer";
|
import Explorer from "Explorer/Explorer";
|
||||||
import { useCommandBar } from "Explorer/Menus/CommandBar/CommandBarComponentAdapter";
|
import { useCommandBar } from "Explorer/Menus/CommandBar/CommandBarComponentAdapter";
|
||||||
@@ -8,8 +12,10 @@ import { PostgresConnectTab } from "Explorer/Tabs/PostgresConnectTab";
|
|||||||
import { QuickstartTab } from "Explorer/Tabs/QuickstartTab";
|
import { QuickstartTab } from "Explorer/Tabs/QuickstartTab";
|
||||||
import { VcoreMongoConnectTab } from "Explorer/Tabs/VCoreMongoConnectTab";
|
import { VcoreMongoConnectTab } from "Explorer/Tabs/VCoreMongoConnectTab";
|
||||||
import { VcoreMongoQuickstartTab } from "Explorer/Tabs/VCoreMongoQuickstartTab";
|
import { VcoreMongoQuickstartTab } from "Explorer/Tabs/VCoreMongoQuickstartTab";
|
||||||
|
import { LayoutConstants } from "Explorer/Theme/ThemeUtil";
|
||||||
import { KeyboardAction, KeyboardActionGroup, useKeyboardActionGroup } from "KeyboardShortcuts";
|
import { KeyboardAction, KeyboardActionGroup, useKeyboardActionGroup } from "KeyboardShortcuts";
|
||||||
import { userContext } from "UserContext";
|
import { userContext } from "UserContext";
|
||||||
|
import { CassandraProxyOutboundIPs, MongoProxyOutboundIPs, PortalBackendIPs } from "Utils/EndpointUtils";
|
||||||
import { useTeachingBubble } from "hooks/useTeachingBubble";
|
import { useTeachingBubble } from "hooks/useTeachingBubble";
|
||||||
import ko from "knockout";
|
import ko from "knockout";
|
||||||
import React, { MutableRefObject, useEffect, useRef, useState } from "react";
|
import React, { MutableRefObject, useEffect, useRef, useState } from "react";
|
||||||
@@ -28,6 +34,10 @@ interface TabsProps {
|
|||||||
|
|
||||||
export const Tabs = ({ explorer }: TabsProps): JSX.Element => {
|
export const Tabs = ({ explorer }: TabsProps): JSX.Element => {
|
||||||
const { openedTabs, openedReactTabs, activeTab, activeReactTab } = useTabs();
|
const { openedTabs, openedReactTabs, activeTab, activeReactTab } = useTabs();
|
||||||
|
const [
|
||||||
|
showMongoAndCassandraProxiesNetworkSettingsWarningState,
|
||||||
|
setShowMongoAndCassandraProxiesNetworkSettingsWarningState,
|
||||||
|
] = useState<boolean>(showMongoAndCassandraProxiesNetworkSettingsWarning());
|
||||||
|
|
||||||
const setKeyboardHandlers = useKeyboardActionGroup(KeyboardActionGroup.TABS);
|
const setKeyboardHandlers = useKeyboardActionGroup(KeyboardActionGroup.TABS);
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
@@ -38,8 +48,28 @@ export const Tabs = ({ explorer }: TabsProps): JSX.Element => {
|
|||||||
});
|
});
|
||||||
}, [setKeyboardHandlers]);
|
}, [setKeyboardHandlers]);
|
||||||
|
|
||||||
|
const defaultMessageBarStyles: IMessageBarStyles = {
|
||||||
|
root: {
|
||||||
|
height: `${LayoutConstants.rowHeight}px`,
|
||||||
|
overflow: "hidden",
|
||||||
|
flexDirection: "row",
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="tabsManagerContainer">
|
<div className="tabsManagerContainer">
|
||||||
|
{showMongoAndCassandraProxiesNetworkSettingsWarningState && (
|
||||||
|
<MessageBar
|
||||||
|
messageBarType={MessageBarType.warning}
|
||||||
|
styles={defaultMessageBarStyles}
|
||||||
|
onDismiss={() => {
|
||||||
|
setShowMongoAndCassandraProxiesNetworkSettingsWarningState(false);
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
{`We have migrated our middleware to new infrastructure. To avoid issues with Data Explorer access, please
|
||||||
|
re-enable "Allow access from Azure Portal" on the Networking blade for your account.`}
|
||||||
|
</MessageBar>
|
||||||
|
)}
|
||||||
<div className="nav-tabs-margin">
|
<div className="nav-tabs-margin">
|
||||||
<ul className="nav nav-tabs level navTabHeight" id="navTabs" role="tablist">
|
<ul className="nav nav-tabs level navTabHeight" id="navTabs" role="tablist">
|
||||||
{openedReactTabs.map((tab) => (
|
{openedReactTabs.map((tab) => (
|
||||||
@@ -284,3 +314,57 @@ const getReactTabContent = (activeReactTab: ReactTabKind, explorer: Explorer): J
|
|||||||
throw Error(`Unsupported tab kind ${ReactTabKind[activeReactTab]}`);
|
throw Error(`Unsupported tab kind ${ReactTabKind[activeReactTab]}`);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const showMongoAndCassandraProxiesNetworkSettingsWarning = (): boolean => {
|
||||||
|
const ipRules: IpRule[] = userContext.databaseAccount?.properties?.ipRules;
|
||||||
|
if (
|
||||||
|
((userContext.apiType === "Mongo" && configContext.MONGO_PROXY_ENDPOINT !== MongoProxyEndpoints.Development) ||
|
||||||
|
(userContext.apiType === "Cassandra" &&
|
||||||
|
configContext.CASSANDRA_PROXY_ENDPOINT !== CassandraProxyEndpoints.Development)) &&
|
||||||
|
ipRules?.length
|
||||||
|
) {
|
||||||
|
const legacyPortalBackendIPs: string[] = PortalBackendIPs[configContext.BACKEND_ENDPOINT];
|
||||||
|
const ipAddressesFromIPRules: string[] = ipRules.map((ipRule) => ipRule.ipAddressOrRange);
|
||||||
|
const ipRulesIncludeLegacyPortalBackend: boolean = legacyPortalBackendIPs.every((legacyPortalBackendIP: string) =>
|
||||||
|
ipAddressesFromIPRules.includes(legacyPortalBackendIP),
|
||||||
|
);
|
||||||
|
if (!ipRulesIncludeLegacyPortalBackend) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (userContext.apiType === "Mongo") {
|
||||||
|
const isProdOrMpacMongoProxyEndpoint: boolean = [MongoProxyEndpoints.Mpac, MongoProxyEndpoints.Prod].includes(
|
||||||
|
configContext.MONGO_PROXY_ENDPOINT,
|
||||||
|
);
|
||||||
|
|
||||||
|
const mongoProxyOutboundIPs: string[] = isProdOrMpacMongoProxyEndpoint
|
||||||
|
? [...MongoProxyOutboundIPs[MongoProxyEndpoints.Mpac], ...MongoProxyOutboundIPs[MongoProxyEndpoints.Prod]]
|
||||||
|
: MongoProxyOutboundIPs[configContext.MONGO_PROXY_ENDPOINT];
|
||||||
|
|
||||||
|
const ipRulesIncludeMongoProxy: boolean = mongoProxyOutboundIPs.every((mongoProxyOutboundIP: string) =>
|
||||||
|
ipAddressesFromIPRules.includes(mongoProxyOutboundIP),
|
||||||
|
);
|
||||||
|
|
||||||
|
return !ipRulesIncludeMongoProxy;
|
||||||
|
} else if (userContext.apiType === "Cassandra") {
|
||||||
|
const isProdOrMpacCassandraProxyEndpoint: boolean = [
|
||||||
|
CassandraProxyEndpoints.Mpac,
|
||||||
|
CassandraProxyEndpoints.Prod,
|
||||||
|
].includes(configContext.CASSANDRA_PROXY_ENDPOINT);
|
||||||
|
|
||||||
|
const cassandraProxyOutboundIPs: string[] = isProdOrMpacCassandraProxyEndpoint
|
||||||
|
? [
|
||||||
|
...CassandraProxyOutboundIPs[CassandraProxyEndpoints.Mpac],
|
||||||
|
...CassandraProxyOutboundIPs[CassandraProxyEndpoints.Prod],
|
||||||
|
]
|
||||||
|
: CassandraProxyOutboundIPs[configContext.CASSANDRA_PROXY_ENDPOINT];
|
||||||
|
|
||||||
|
const ipRulesIncludeCassandraProxy: boolean = cassandraProxyOutboundIPs.every(
|
||||||
|
(cassandraProxyOutboundIP: string) => ipAddressesFromIPRules.includes(cassandraProxyOutboundIP),
|
||||||
|
);
|
||||||
|
|
||||||
|
return !ipRulesIncludeCassandraProxy;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
|||||||
@@ -1,11 +1,13 @@
|
|||||||
import { useBoolean } from "@fluentui/react-hooks";
|
import { useBoolean } from "@fluentui/react-hooks";
|
||||||
import { userContext } from "UserContext";
|
import { userContext } from "UserContext";
|
||||||
|
import { useNewPortalBackendEndpoint } from "Utils/EndpointUtils";
|
||||||
import * as React from "react";
|
import * as React from "react";
|
||||||
import ConnectImage from "../../../../images/HdeConnectCosmosDB.svg";
|
import ConnectImage from "../../../../images/HdeConnectCosmosDB.svg";
|
||||||
import ErrorImage from "../../../../images/error.svg";
|
import ErrorImage from "../../../../images/error.svg";
|
||||||
import { AuthType } from "../../../AuthType";
|
import { AuthType } from "../../../AuthType";
|
||||||
import { HttpHeaders } from "../../../Common/Constants";
|
import { BackendApi, HttpHeaders } from "../../../Common/Constants";
|
||||||
import { configContext } from "../../../ConfigContext";
|
import { configContext } from "../../../ConfigContext";
|
||||||
|
import { GenerateTokenResponse } from "../../../Contracts/DataModels";
|
||||||
import { isResourceTokenConnectionString } from "../Helpers/ResourceTokenUtils";
|
import { isResourceTokenConnectionString } from "../Helpers/ResourceTokenUtils";
|
||||||
|
|
||||||
interface Props {
|
interface Props {
|
||||||
@@ -17,6 +19,10 @@ interface Props {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export const fetchEncryptedToken = async (connectionString: string): Promise<string> => {
|
export const fetchEncryptedToken = async (connectionString: string): Promise<string> => {
|
||||||
|
if (!useNewPortalBackendEndpoint(BackendApi.GenerateToken)) {
|
||||||
|
return await fetchEncryptedToken_ToBeDeprecated(connectionString);
|
||||||
|
}
|
||||||
|
|
||||||
const headers = new Headers();
|
const headers = new Headers();
|
||||||
headers.append(HttpHeaders.connectionString, connectionString);
|
headers.append(HttpHeaders.connectionString, connectionString);
|
||||||
const url = configContext.PORTAL_BACKEND_ENDPOINT + "/api/connectionstring/token/generatetoken";
|
const url = configContext.PORTAL_BACKEND_ENDPOINT + "/api/connectionstring/token/generatetoken";
|
||||||
@@ -29,10 +35,28 @@ export const fetchEncryptedToken = async (connectionString: string): Promise<str
|
|||||||
return decodeURIComponent(encryptedTokenResponse);
|
return decodeURIComponent(encryptedTokenResponse);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export const fetchEncryptedToken_ToBeDeprecated = async (connectionString: string): Promise<string> => {
|
||||||
|
const headers = new Headers();
|
||||||
|
headers.append(HttpHeaders.connectionString, connectionString);
|
||||||
|
const url = configContext.BACKEND_ENDPOINT + "/api/guest/tokens/generateToken";
|
||||||
|
const response = await fetch(url, { headers, method: "POST" });
|
||||||
|
if (!response.ok) {
|
||||||
|
throw response;
|
||||||
|
}
|
||||||
|
// This API has a quirk where it must be parsed twice
|
||||||
|
const result: GenerateTokenResponse = JSON.parse(await response.json());
|
||||||
|
return decodeURIComponent(result.readWrite || result.read);
|
||||||
|
};
|
||||||
|
|
||||||
export const isAccountRestrictedForConnectionStringLogin = async (connectionString: string): Promise<boolean> => {
|
export const isAccountRestrictedForConnectionStringLogin = async (connectionString: string): Promise<boolean> => {
|
||||||
const headers = new Headers();
|
const headers = new Headers();
|
||||||
headers.append(HttpHeaders.connectionString, connectionString);
|
headers.append(HttpHeaders.connectionString, connectionString);
|
||||||
const url = configContext.PORTAL_BACKEND_ENDPOINT + "/api/guest/accountrestrictions/checkconnectionstringlogin";
|
|
||||||
|
const backendEndpoint: string = useNewPortalBackendEndpoint(BackendApi.AccountRestrictions)
|
||||||
|
? configContext.PORTAL_BACKEND_ENDPOINT
|
||||||
|
: configContext.BACKEND_ENDPOINT;
|
||||||
|
|
||||||
|
const url = backendEndpoint + "/api/guest/accountrestrictions/checkconnectionstringlogin";
|
||||||
const response = await fetch(url, { headers, method: "POST" });
|
const response = await fetch(url, { headers, method: "POST" });
|
||||||
if (!response.ok) {
|
if (!response.ok) {
|
||||||
throw response;
|
throw response;
|
||||||
|
|||||||
@@ -16,7 +16,6 @@ export type Features = {
|
|||||||
readonly enableAadDataPlane: boolean;
|
readonly enableAadDataPlane: boolean;
|
||||||
readonly enableResourceGraph: boolean;
|
readonly enableResourceGraph: boolean;
|
||||||
readonly enableKoResourceTree: boolean;
|
readonly enableKoResourceTree: boolean;
|
||||||
readonly enableThroughputBuckets: boolean;
|
|
||||||
readonly hostedDataExplorer: boolean;
|
readonly hostedDataExplorer: boolean;
|
||||||
readonly junoEndpoint?: string;
|
readonly junoEndpoint?: string;
|
||||||
readonly phoenixEndpoint?: string;
|
readonly phoenixEndpoint?: string;
|
||||||
@@ -82,7 +81,6 @@ export function extractFeatures(given = new URLSearchParams(window.location.sear
|
|||||||
enableSpark: "true" === get("enablespark"),
|
enableSpark: "true" === get("enablespark"),
|
||||||
enableTtl: "true" === get("enablettl"),
|
enableTtl: "true" === get("enablettl"),
|
||||||
enableKoResourceTree: "true" === get("enablekoresourcetree"),
|
enableKoResourceTree: "true" === get("enablekoresourcetree"),
|
||||||
enableThroughputBuckets: "true" === get("enablethroughputbuckets"),
|
|
||||||
executeSproc: "true" === get("dataexplorerexecutesproc"),
|
executeSproc: "true" === get("dataexplorerexecutesproc"),
|
||||||
hostedDataExplorer: "true" === get("hosteddataexplorerenabled"),
|
hostedDataExplorer: "true" === get("hosteddataexplorerenabled"),
|
||||||
mongoProxyEndpoint: get("mongoproxyendpoint"),
|
mongoProxyEndpoint: get("mongoproxyendpoint"),
|
||||||
|
|||||||
@@ -41,13 +41,13 @@ const getDescriptor = async (selfServeType: SelfServeType): Promise<SelfServeDes
|
|||||||
case SelfServeType.example: {
|
case SelfServeType.example: {
|
||||||
const SelfServeExample = await import(/* webpackChunkName: "SelfServeExample" */ "./Example/SelfServeExample");
|
const SelfServeExample = await import(/* webpackChunkName: "SelfServeExample" */ "./Example/SelfServeExample");
|
||||||
const selfServeExample = new SelfServeExample.default();
|
const selfServeExample = new SelfServeExample.default();
|
||||||
await loadTranslations(selfServeType);
|
await loadTranslations(selfServeExample.constructor.name);
|
||||||
return selfServeExample.toSelfServeDescriptor();
|
return selfServeExample.toSelfServeDescriptor();
|
||||||
}
|
}
|
||||||
case SelfServeType.sqlx: {
|
case SelfServeType.sqlx: {
|
||||||
const SqlX = await import(/* webpackChunkName: "SqlX" */ "./SqlX/SqlX");
|
const SqlX = await import(/* webpackChunkName: "SqlX" */ "./SqlX/SqlX");
|
||||||
const sqlX = new SqlX.default();
|
const sqlX = new SqlX.default();
|
||||||
await loadTranslations(selfServeType);
|
await loadTranslations(sqlX.constructor.name);
|
||||||
return sqlX.toSelfServeDescriptor();
|
return sqlX.toSelfServeDescriptor();
|
||||||
}
|
}
|
||||||
case SelfServeType.graphapicompute: {
|
case SelfServeType.graphapicompute: {
|
||||||
@@ -55,7 +55,7 @@ const getDescriptor = async (selfServeType: SelfServeType): Promise<SelfServeDes
|
|||||||
/* webpackChunkName: "GraphAPICompute" */ "./GraphAPICompute/GraphAPICompute"
|
/* webpackChunkName: "GraphAPICompute" */ "./GraphAPICompute/GraphAPICompute"
|
||||||
);
|
);
|
||||||
const graphAPICompute = new GraphAPICompute.default();
|
const graphAPICompute = new GraphAPICompute.default();
|
||||||
await loadTranslations(selfServeType);
|
await loadTranslations(graphAPICompute.constructor.name);
|
||||||
return graphAPICompute.toSelfServeDescriptor();
|
return graphAPICompute.toSelfServeDescriptor();
|
||||||
}
|
}
|
||||||
case SelfServeType.materializedviewsbuilder: {
|
case SelfServeType.materializedviewsbuilder: {
|
||||||
@@ -63,7 +63,7 @@ const getDescriptor = async (selfServeType: SelfServeType): Promise<SelfServeDes
|
|||||||
/* webpackChunkName: "MaterializedViewsBuilder" */ "./MaterializedViewsBuilder/MaterializedViewsBuilder"
|
/* webpackChunkName: "MaterializedViewsBuilder" */ "./MaterializedViewsBuilder/MaterializedViewsBuilder"
|
||||||
);
|
);
|
||||||
const materializedViewsBuilder = new MaterializedViewsBuilder.default();
|
const materializedViewsBuilder = new MaterializedViewsBuilder.default();
|
||||||
await loadTranslations(selfServeType);
|
await loadTranslations(materializedViewsBuilder.constructor.name);
|
||||||
return materializedViewsBuilder.toSelfServeDescriptor();
|
return materializedViewsBuilder.toSelfServeDescriptor();
|
||||||
}
|
}
|
||||||
default:
|
default:
|
||||||
@@ -103,7 +103,7 @@ const handleMessage = async (event: MessageEvent): Promise<void> => {
|
|||||||
|
|
||||||
const urlSearchParams = new URLSearchParams(window.location.search);
|
const urlSearchParams = new URLSearchParams(window.location.search);
|
||||||
const selfServeTypeText = urlSearchParams.get("selfServeType") || inputs.selfServeType;
|
const selfServeTypeText = urlSearchParams.get("selfServeType") || inputs.selfServeType;
|
||||||
const selfServeType = SelfServeType[selfServeTypeText.toLocaleLowerCase() as keyof typeof SelfServeType];
|
const selfServeType = SelfServeType[selfServeTypeText?.toLowerCase() as keyof typeof SelfServeType];
|
||||||
if (
|
if (
|
||||||
!inputs.subscriptionId ||
|
!inputs.subscriptionId ||
|
||||||
!inputs.resourceGroup ||
|
!inputs.resourceGroup ||
|
||||||
|
|||||||
@@ -29,11 +29,10 @@ export enum SelfServeType {
|
|||||||
// Unsupported self serve type passed as feature flag
|
// Unsupported self serve type passed as feature flag
|
||||||
invalid = "invalid",
|
invalid = "invalid",
|
||||||
// Add your self serve types here
|
// Add your self serve types here
|
||||||
// NOTE: text and casing of the enum's value must match the corresponding file in Localization\en\
|
example = "example",
|
||||||
example = "SelfServeExample",
|
sqlx = "sqlx",
|
||||||
sqlx = "SqlX",
|
graphapicompute = "graphapicompute",
|
||||||
graphapicompute = "GraphAPICompute",
|
materializedviewsbuilder = "materializedviewsbuilder",
|
||||||
materializedviewsbuilder = "MaterializedViewsBuilder",
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
@@ -89,7 +89,3 @@ export const getItemName = (): string => {
|
|||||||
return "Items";
|
return "Items";
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
export const isDataplaneRbacSupported = (apiType: string): boolean => {
|
|
||||||
return apiType === "SQL" || apiType === "Tables";
|
|
||||||
};
|
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
export const autoPilotThroughput1K = 1000;
|
export const autoPilotThroughput1K = 1000;
|
||||||
export const autoPilotIncrementStep = 1000;
|
export const autoPilotIncrementStep = 1000;
|
||||||
export const autoPilotThroughput4K = 4000;
|
export const autoPilotThroughput4K = 4000;
|
||||||
export const autoPilotThroughput10K = 10000;
|
|
||||||
|
|
||||||
export function isValidAutoPilotThroughput(maxThroughput: number): boolean {
|
export function isValidAutoPilotThroughput(maxThroughput: number): boolean {
|
||||||
if (!maxThroughput) {
|
if (!maxThroughput) {
|
||||||
|
|||||||
@@ -1,4 +1,11 @@
|
|||||||
import { CassandraProxyEndpoints, JunoEndpoints, MongoProxyEndpoints, PortalBackendEndpoints } from "Common/Constants";
|
import {
|
||||||
|
BackendApi,
|
||||||
|
CassandraProxyEndpoints,
|
||||||
|
JunoEndpoints,
|
||||||
|
MongoProxyEndpoints,
|
||||||
|
PortalBackendEndpoints,
|
||||||
|
} from "Common/Constants";
|
||||||
|
import { configContext } from "ConfigContext";
|
||||||
import * as Logger from "../Common/Logger";
|
import * as Logger from "../Common/Logger";
|
||||||
|
|
||||||
export function validateEndpoint(
|
export function validateEndpoint(
|
||||||
@@ -66,6 +73,9 @@ export const PortalBackendIPs: { [key: string]: string[] } = {
|
|||||||
//"https://main2.documentdb.ext.azure.com": ["104.42.196.69"],
|
//"https://main2.documentdb.ext.azure.com": ["104.42.196.69"],
|
||||||
"https://main.documentdb.ext.azure.cn": ["139.217.8.252"],
|
"https://main.documentdb.ext.azure.cn": ["139.217.8.252"],
|
||||||
"https://main.documentdb.ext.azure.us": ["52.244.48.71"],
|
"https://main.documentdb.ext.azure.us": ["52.244.48.71"],
|
||||||
|
// Add ussec and usnat when endpoint address is known:
|
||||||
|
//ussec: ["29.26.26.67", "29.26.26.66"],
|
||||||
|
//usnat: ["7.28.202.68"],
|
||||||
};
|
};
|
||||||
|
|
||||||
export const PortalBackendOutboundIPs: { [key: string]: string[] } = {
|
export const PortalBackendOutboundIPs: { [key: string]: string[] } = {
|
||||||
@@ -90,6 +100,14 @@ export const defaultAllowedMongoProxyEndpoints: ReadonlyArray<string> = [
|
|||||||
MongoProxyEndpoints.Mooncake,
|
MongoProxyEndpoints.Mooncake,
|
||||||
];
|
];
|
||||||
|
|
||||||
|
export const allowedMongoProxyEndpoints_ToBeDeprecated: ReadonlyArray<string> = [
|
||||||
|
"https://main.documentdb.ext.azure.com",
|
||||||
|
"https://main.documentdb.ext.azure.cn",
|
||||||
|
"https://main.documentdb.ext.azure.us",
|
||||||
|
"https://main.cosmos.ext.azure",
|
||||||
|
"https://localhost:12901",
|
||||||
|
];
|
||||||
|
|
||||||
export const defaultAllowedCassandraProxyEndpoints: ReadonlyArray<string> = [
|
export const defaultAllowedCassandraProxyEndpoints: ReadonlyArray<string> = [
|
||||||
CassandraProxyEndpoints.Development,
|
CassandraProxyEndpoints.Development,
|
||||||
CassandraProxyEndpoints.Mpac,
|
CassandraProxyEndpoints.Mpac,
|
||||||
@@ -135,3 +153,53 @@ export const allowedJunoOrigins: ReadonlyArray<string> = [
|
|||||||
];
|
];
|
||||||
|
|
||||||
export const allowedNotebookServerUrls: ReadonlyArray<string> = [];
|
export const allowedNotebookServerUrls: ReadonlyArray<string> = [];
|
||||||
|
|
||||||
|
//
|
||||||
|
// Temporary function to determine if a portal backend API is supported by the
|
||||||
|
// new backend in this environment.
|
||||||
|
//
|
||||||
|
// TODO: Remove this function once new backend migration is completed for all environments.
|
||||||
|
//
|
||||||
|
export function useNewPortalBackendEndpoint(backendApi: string): boolean {
|
||||||
|
// This maps backend APIs to the environments supported by the new backend.
|
||||||
|
const newBackendApiEnvironmentMap: { [key: string]: string[] } = {
|
||||||
|
[BackendApi.GenerateToken]: [
|
||||||
|
PortalBackendEndpoints.Development,
|
||||||
|
PortalBackendEndpoints.Mpac,
|
||||||
|
PortalBackendEndpoints.Prod,
|
||||||
|
],
|
||||||
|
[BackendApi.PortalSettings]: [
|
||||||
|
PortalBackendEndpoints.Development,
|
||||||
|
PortalBackendEndpoints.Mpac,
|
||||||
|
PortalBackendEndpoints.Prod,
|
||||||
|
],
|
||||||
|
[BackendApi.AccountRestrictions]: [
|
||||||
|
PortalBackendEndpoints.Development,
|
||||||
|
PortalBackendEndpoints.Mpac,
|
||||||
|
PortalBackendEndpoints.Prod,
|
||||||
|
],
|
||||||
|
[BackendApi.RuntimeProxy]: [
|
||||||
|
PortalBackendEndpoints.Development,
|
||||||
|
PortalBackendEndpoints.Mpac,
|
||||||
|
PortalBackendEndpoints.Prod,
|
||||||
|
],
|
||||||
|
[BackendApi.DisallowedLocations]: [
|
||||||
|
PortalBackendEndpoints.Development,
|
||||||
|
PortalBackendEndpoints.Mpac,
|
||||||
|
PortalBackendEndpoints.Prod,
|
||||||
|
PortalBackendEndpoints.Fairfax,
|
||||||
|
PortalBackendEndpoints.Mooncake,
|
||||||
|
],
|
||||||
|
[BackendApi.SampleData]: [
|
||||||
|
PortalBackendEndpoints.Development,
|
||||||
|
PortalBackendEndpoints.Mpac,
|
||||||
|
PortalBackendEndpoints.Prod,
|
||||||
|
],
|
||||||
|
};
|
||||||
|
|
||||||
|
if (!newBackendApiEnvironmentMap[backendApi] || !configContext.PORTAL_BACKEND_ENDPOINT) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return newBackendApiEnvironmentMap[backendApi].includes(configContext.PORTAL_BACKEND_ENDPOINT);
|
||||||
|
}
|
||||||
|
|||||||
@@ -3,13 +3,13 @@
|
|||||||
Run "npm run generateARMClients" to regenerate
|
Run "npm run generateARMClients" to regenerate
|
||||||
Edting this file directly should be done with extreme caution as not to diverge from ARM REST specs
|
Edting this file directly should be done with extreme caution as not to diverge from ARM REST specs
|
||||||
|
|
||||||
Generated from: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/cosmos-db/resource-manager/Microsoft.DocumentDB/preview/2024-12-01-preview/cosmos-db.json
|
Generated from: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/cosmos-db/resource-manager/Microsoft.DocumentDB/preview/2024-02-15-preview/cosmos-db.json
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { configContext } from "../../../../ConfigContext";
|
import { configContext } from "../../../../ConfigContext";
|
||||||
import { armRequest } from "../../request";
|
import { armRequest } from "../../request";
|
||||||
import * as Types from "./types";
|
import * as Types from "./types";
|
||||||
const apiVersion = "2024-12-01-preview";
|
const apiVersion = "2024-02-15-preview";
|
||||||
|
|
||||||
/* Lists the Cassandra keyspaces under an existing Azure Cosmos DB database account. */
|
/* Lists the Cassandra keyspaces under an existing Azure Cosmos DB database account. */
|
||||||
export async function listCassandraKeyspaces(
|
export async function listCassandraKeyspaces(
|
||||||
|
|||||||
@@ -3,13 +3,13 @@
|
|||||||
Run "npm run generateARMClients" to regenerate
|
Run "npm run generateARMClients" to regenerate
|
||||||
Edting this file directly should be done with extreme caution as not to diverge from ARM REST specs
|
Edting this file directly should be done with extreme caution as not to diverge from ARM REST specs
|
||||||
|
|
||||||
Generated from: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/cosmos-db/resource-manager/Microsoft.DocumentDB/preview/2024-12-01-preview/cosmos-db.json
|
Generated from: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/cosmos-db/resource-manager/Microsoft.DocumentDB/preview/2024-02-15-preview/cosmos-db.json
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { configContext } from "../../../../ConfigContext";
|
import { configContext } from "../../../../ConfigContext";
|
||||||
import { armRequest } from "../../request";
|
import { armRequest } from "../../request";
|
||||||
import * as Types from "./types";
|
import * as Types from "./types";
|
||||||
const apiVersion = "2024-12-01-preview";
|
const apiVersion = "2024-02-15-preview";
|
||||||
|
|
||||||
/* Retrieves the metrics determined by the given filter for the given database account and collection. */
|
/* Retrieves the metrics determined by the given filter for the given database account and collection. */
|
||||||
export async function listMetrics(
|
export async function listMetrics(
|
||||||
|
|||||||
@@ -3,13 +3,13 @@
|
|||||||
Run "npm run generateARMClients" to regenerate
|
Run "npm run generateARMClients" to regenerate
|
||||||
Edting this file directly should be done with extreme caution as not to diverge from ARM REST specs
|
Edting this file directly should be done with extreme caution as not to diverge from ARM REST specs
|
||||||
|
|
||||||
Generated from: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/cosmos-db/resource-manager/Microsoft.DocumentDB/preview/2024-12-01-preview/cosmos-db.json
|
Generated from: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/cosmos-db/resource-manager/Microsoft.DocumentDB/preview/2024-02-15-preview/cosmos-db.json
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { configContext } from "../../../../ConfigContext";
|
import { configContext } from "../../../../ConfigContext";
|
||||||
import { armRequest } from "../../request";
|
import { armRequest } from "../../request";
|
||||||
import * as Types from "./types";
|
import * as Types from "./types";
|
||||||
const apiVersion = "2024-12-01-preview";
|
const apiVersion = "2024-02-15-preview";
|
||||||
|
|
||||||
/* Retrieves the metrics determined by the given filter for the given collection, split by partition. */
|
/* Retrieves the metrics determined by the given filter for the given collection, split by partition. */
|
||||||
export async function listMetrics(
|
export async function listMetrics(
|
||||||
|
|||||||
@@ -3,13 +3,13 @@
|
|||||||
Run "npm run generateARMClients" to regenerate
|
Run "npm run generateARMClients" to regenerate
|
||||||
Edting this file directly should be done with extreme caution as not to diverge from ARM REST specs
|
Edting this file directly should be done with extreme caution as not to diverge from ARM REST specs
|
||||||
|
|
||||||
Generated from: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/cosmos-db/resource-manager/Microsoft.DocumentDB/preview/2024-12-01-preview/cosmos-db.json
|
Generated from: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/cosmos-db/resource-manager/Microsoft.DocumentDB/preview/2024-02-15-preview/cosmos-db.json
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { configContext } from "../../../../ConfigContext";
|
import { configContext } from "../../../../ConfigContext";
|
||||||
import { armRequest } from "../../request";
|
import { armRequest } from "../../request";
|
||||||
import * as Types from "./types";
|
import * as Types from "./types";
|
||||||
const apiVersion = "2024-12-01-preview";
|
const apiVersion = "2024-02-15-preview";
|
||||||
|
|
||||||
/* Retrieves the metrics determined by the given filter for the given collection and region, split by partition. */
|
/* Retrieves the metrics determined by the given filter for the given collection and region, split by partition. */
|
||||||
export async function listMetrics(
|
export async function listMetrics(
|
||||||
|
|||||||
@@ -3,13 +3,13 @@
|
|||||||
Run "npm run generateARMClients" to regenerate
|
Run "npm run generateARMClients" to regenerate
|
||||||
Edting this file directly should be done with extreme caution as not to diverge from ARM REST specs
|
Edting this file directly should be done with extreme caution as not to diverge from ARM REST specs
|
||||||
|
|
||||||
Generated from: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/cosmos-db/resource-manager/Microsoft.DocumentDB/preview/2024-12-01-preview/cosmos-db.json
|
Generated from: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/cosmos-db/resource-manager/Microsoft.DocumentDB/preview/2024-02-15-preview/cosmos-db.json
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { configContext } from "../../../../ConfigContext";
|
import { configContext } from "../../../../ConfigContext";
|
||||||
import { armRequest } from "../../request";
|
import { armRequest } from "../../request";
|
||||||
import * as Types from "./types";
|
import * as Types from "./types";
|
||||||
const apiVersion = "2024-12-01-preview";
|
const apiVersion = "2024-02-15-preview";
|
||||||
|
|
||||||
/* Retrieves the metrics determined by the given filter for the given database account, collection and region. */
|
/* Retrieves the metrics determined by the given filter for the given database account, collection and region. */
|
||||||
export async function listMetrics(
|
export async function listMetrics(
|
||||||
|
|||||||
@@ -3,13 +3,13 @@
|
|||||||
Run "npm run generateARMClients" to regenerate
|
Run "npm run generateARMClients" to regenerate
|
||||||
Edting this file directly should be done with extreme caution as not to diverge from ARM REST specs
|
Edting this file directly should be done with extreme caution as not to diverge from ARM REST specs
|
||||||
|
|
||||||
Generated from: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/cosmos-db/resource-manager/Microsoft.DocumentDB/preview/2024-12-01-preview/cosmos-db.json
|
Generated from: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/cosmos-db/resource-manager/Microsoft.DocumentDB/preview/2024-02-15-preview/cosmos-db.json
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { configContext } from "../../../../ConfigContext";
|
import { configContext } from "../../../../ConfigContext";
|
||||||
import { armRequest } from "../../request";
|
import { armRequest } from "../../request";
|
||||||
import * as Types from "./types";
|
import * as Types from "./types";
|
||||||
const apiVersion = "2024-12-01-preview";
|
const apiVersion = "2024-02-15-preview";
|
||||||
|
|
||||||
/* Retrieves the metrics determined by the given filter for the given database account and database. */
|
/* Retrieves the metrics determined by the given filter for the given database account and database. */
|
||||||
export async function listMetrics(
|
export async function listMetrics(
|
||||||
|
|||||||
@@ -3,13 +3,13 @@
|
|||||||
Run "npm run generateARMClients" to regenerate
|
Run "npm run generateARMClients" to regenerate
|
||||||
Edting this file directly should be done with extreme caution as not to diverge from ARM REST specs
|
Edting this file directly should be done with extreme caution as not to diverge from ARM REST specs
|
||||||
|
|
||||||
Generated from: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/cosmos-db/resource-manager/Microsoft.DocumentDB/preview/2024-12-01-preview/cosmos-db.json
|
Generated from: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/cosmos-db/resource-manager/Microsoft.DocumentDB/preview/2024-02-15-preview/cosmos-db.json
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { configContext } from "../../../../ConfigContext";
|
import { configContext } from "../../../../ConfigContext";
|
||||||
import { armRequest } from "../../request";
|
import { armRequest } from "../../request";
|
||||||
import * as Types from "./types";
|
import * as Types from "./types";
|
||||||
const apiVersion = "2024-12-01-preview";
|
const apiVersion = "2024-02-15-preview";
|
||||||
|
|
||||||
/* Retrieves the metrics determined by the given filter for the given database account and region. */
|
/* Retrieves the metrics determined by the given filter for the given database account and region. */
|
||||||
export async function listMetrics(
|
export async function listMetrics(
|
||||||
|
|||||||
@@ -3,13 +3,13 @@
|
|||||||
Run "npm run generateARMClients" to regenerate
|
Run "npm run generateARMClients" to regenerate
|
||||||
Edting this file directly should be done with extreme caution as not to diverge from ARM REST specs
|
Edting this file directly should be done with extreme caution as not to diverge from ARM REST specs
|
||||||
|
|
||||||
Generated from: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/cosmos-db/resource-manager/Microsoft.DocumentDB/preview/2024-12-01-preview/cosmos-db.json
|
Generated from: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/cosmos-db/resource-manager/Microsoft.DocumentDB/preview/2024-02-15-preview/cosmos-db.json
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { configContext } from "../../../../ConfigContext";
|
import { configContext } from "../../../../ConfigContext";
|
||||||
import { armRequest } from "../../request";
|
import { armRequest } from "../../request";
|
||||||
import * as Types from "./types";
|
import * as Types from "./types";
|
||||||
const apiVersion = "2024-12-01-preview";
|
const apiVersion = "2024-02-15-preview";
|
||||||
|
|
||||||
/* Retrieves the properties of an existing Azure Cosmos DB database account. */
|
/* Retrieves the properties of an existing Azure Cosmos DB database account. */
|
||||||
export async function get(
|
export async function get(
|
||||||
|
|||||||
@@ -3,13 +3,13 @@
|
|||||||
Run "npm run generateARMClients" to regenerate
|
Run "npm run generateARMClients" to regenerate
|
||||||
Edting this file directly should be done with extreme caution as not to diverge from ARM REST specs
|
Edting this file directly should be done with extreme caution as not to diverge from ARM REST specs
|
||||||
|
|
||||||
Generated from: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/cosmos-db/resource-manager/Microsoft.DocumentDB/preview/2024-12-01-preview/cosmos-db.json
|
Generated from: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/cosmos-db/resource-manager/Microsoft.DocumentDB/preview/2024-02-15-preview/cosmos-db.json
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { configContext } from "../../../../ConfigContext";
|
import { configContext } from "../../../../ConfigContext";
|
||||||
import { armRequest } from "../../request";
|
import { armRequest } from "../../request";
|
||||||
import * as Types from "./types";
|
import * as Types from "./types";
|
||||||
const apiVersion = "2024-12-01-preview";
|
const apiVersion = "2024-02-15-preview";
|
||||||
|
|
||||||
/* Lists the graphs under an existing Azure Cosmos DB database account. */
|
/* Lists the graphs under an existing Azure Cosmos DB database account. */
|
||||||
export async function listGraphs(
|
export async function listGraphs(
|
||||||
|
|||||||
@@ -3,13 +3,13 @@
|
|||||||
Run "npm run generateARMClients" to regenerate
|
Run "npm run generateARMClients" to regenerate
|
||||||
Edting this file directly should be done with extreme caution as not to diverge from ARM REST specs
|
Edting this file directly should be done with extreme caution as not to diverge from ARM REST specs
|
||||||
|
|
||||||
Generated from: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/cosmos-db/resource-manager/Microsoft.DocumentDB/preview/2024-12-01-preview/cosmos-db.json
|
Generated from: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/cosmos-db/resource-manager/Microsoft.DocumentDB/preview/2024-02-15-preview/cosmos-db.json
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { configContext } from "../../../../ConfigContext";
|
import { configContext } from "../../../../ConfigContext";
|
||||||
import { armRequest } from "../../request";
|
import { armRequest } from "../../request";
|
||||||
import * as Types from "./types";
|
import * as Types from "./types";
|
||||||
const apiVersion = "2024-12-01-preview";
|
const apiVersion = "2024-02-15-preview";
|
||||||
|
|
||||||
/* Lists the Gremlin databases under an existing Azure Cosmos DB database account. */
|
/* Lists the Gremlin databases under an existing Azure Cosmos DB database account. */
|
||||||
export async function listGremlinDatabases(
|
export async function listGremlinDatabases(
|
||||||
|
|||||||
@@ -3,13 +3,13 @@
|
|||||||
Run "npm run generateARMClients" to regenerate
|
Run "npm run generateARMClients" to regenerate
|
||||||
Edting this file directly should be done with extreme caution as not to diverge from ARM REST specs
|
Edting this file directly should be done with extreme caution as not to diverge from ARM REST specs
|
||||||
|
|
||||||
Generated from: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/cosmos-db/resource-manager/Microsoft.DocumentDB/preview/2024-12-01-preview/cosmos-db.json
|
Generated from: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/cosmos-db/resource-manager/Microsoft.DocumentDB/preview/2024-02-15-preview/cosmos-db.json
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { configContext } from "../../../../ConfigContext";
|
import { configContext } from "../../../../ConfigContext";
|
||||||
import { armRequest } from "../../request";
|
import { armRequest } from "../../request";
|
||||||
import * as Types from "./types";
|
import * as Types from "./types";
|
||||||
const apiVersion = "2024-12-01-preview";
|
const apiVersion = "2024-02-15-preview";
|
||||||
|
|
||||||
/* List Cosmos DB locations and their properties */
|
/* List Cosmos DB locations and their properties */
|
||||||
export async function list(subscriptionId: string): Promise<Types.LocationListResult | Types.CloudError> {
|
export async function list(subscriptionId: string): Promise<Types.LocationListResult | Types.CloudError> {
|
||||||
|
|||||||
@@ -3,13 +3,13 @@
|
|||||||
Run "npm run generateARMClients" to regenerate
|
Run "npm run generateARMClients" to regenerate
|
||||||
Edting this file directly should be done with extreme caution as not to diverge from ARM REST specs
|
Edting this file directly should be done with extreme caution as not to diverge from ARM REST specs
|
||||||
|
|
||||||
Generated from: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/cosmos-db/resource-manager/Microsoft.DocumentDB/preview/2024-12-01-preview/cosmos-db.json
|
Generated from: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/cosmos-db/resource-manager/Microsoft.DocumentDB/preview/2024-02-15-preview/cosmos-db.json
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { configContext } from "../../../../ConfigContext";
|
import { configContext } from "../../../../ConfigContext";
|
||||||
import { armRequest } from "../../request";
|
import { armRequest } from "../../request";
|
||||||
import * as Types from "./types";
|
import * as Types from "./types";
|
||||||
const apiVersion = "2024-12-01-preview";
|
const apiVersion = "2024-02-15-preview";
|
||||||
|
|
||||||
/* Lists the MongoDB databases under an existing Azure Cosmos DB database account. */
|
/* Lists the MongoDB databases under an existing Azure Cosmos DB database account. */
|
||||||
export async function listMongoDBDatabases(
|
export async function listMongoDBDatabases(
|
||||||
|
|||||||
@@ -3,13 +3,13 @@
|
|||||||
Run "npm run generateARMClients" to regenerate
|
Run "npm run generateARMClients" to regenerate
|
||||||
Edting this file directly should be done with extreme caution as not to diverge from ARM REST specs
|
Edting this file directly should be done with extreme caution as not to diverge from ARM REST specs
|
||||||
|
|
||||||
Generated from: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/cosmos-db/resource-manager/Microsoft.DocumentDB/preview/2024-12-01-preview/cosmos-db.json
|
Generated from: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/cosmos-db/resource-manager/Microsoft.DocumentDB/preview/2024-02-15-preview/cosmos-db.json
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { configContext } from "../../../../ConfigContext";
|
import { configContext } from "../../../../ConfigContext";
|
||||||
import { armRequest } from "../../request";
|
import { armRequest } from "../../request";
|
||||||
import * as Types from "./types";
|
import * as Types from "./types";
|
||||||
const apiVersion = "2024-12-01-preview";
|
const apiVersion = "2024-02-15-preview";
|
||||||
|
|
||||||
/* Lists all of the available Cosmos DB Resource Provider operations. */
|
/* Lists all of the available Cosmos DB Resource Provider operations. */
|
||||||
export async function list(): Promise<Types.OperationListResult> {
|
export async function list(): Promise<Types.OperationListResult> {
|
||||||
|
|||||||
@@ -3,13 +3,13 @@
|
|||||||
Run "npm run generateARMClients" to regenerate
|
Run "npm run generateARMClients" to regenerate
|
||||||
Edting this file directly should be done with extreme caution as not to diverge from ARM REST specs
|
Edting this file directly should be done with extreme caution as not to diverge from ARM REST specs
|
||||||
|
|
||||||
Generated from: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/cosmos-db/resource-manager/Microsoft.DocumentDB/preview/2024-12-01-preview/cosmos-db.json
|
Generated from: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/cosmos-db/resource-manager/Microsoft.DocumentDB/preview/2024-02-15-preview/cosmos-db.json
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { configContext } from "../../../../ConfigContext";
|
import { configContext } from "../../../../ConfigContext";
|
||||||
import { armRequest } from "../../request";
|
import { armRequest } from "../../request";
|
||||||
import * as Types from "./types";
|
import * as Types from "./types";
|
||||||
const apiVersion = "2024-12-01-preview";
|
const apiVersion = "2024-02-15-preview";
|
||||||
|
|
||||||
/* Retrieves the metrics determined by the given filter for the given partition key range id. */
|
/* Retrieves the metrics determined by the given filter for the given partition key range id. */
|
||||||
export async function listMetrics(
|
export async function listMetrics(
|
||||||
|
|||||||
@@ -3,13 +3,13 @@
|
|||||||
Run "npm run generateARMClients" to regenerate
|
Run "npm run generateARMClients" to regenerate
|
||||||
Edting this file directly should be done with extreme caution as not to diverge from ARM REST specs
|
Edting this file directly should be done with extreme caution as not to diverge from ARM REST specs
|
||||||
|
|
||||||
Generated from: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/cosmos-db/resource-manager/Microsoft.DocumentDB/preview/2024-12-01-preview/cosmos-db.json
|
Generated from: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/cosmos-db/resource-manager/Microsoft.DocumentDB/preview/2024-02-15-preview/cosmos-db.json
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { configContext } from "../../../../ConfigContext";
|
import { configContext } from "../../../../ConfigContext";
|
||||||
import { armRequest } from "../../request";
|
import { armRequest } from "../../request";
|
||||||
import * as Types from "./types";
|
import * as Types from "./types";
|
||||||
const apiVersion = "2024-12-01-preview";
|
const apiVersion = "2024-02-15-preview";
|
||||||
|
|
||||||
/* Retrieves the metrics determined by the given filter for the given partition key range id and region. */
|
/* Retrieves the metrics determined by the given filter for the given partition key range id and region. */
|
||||||
export async function listMetrics(
|
export async function listMetrics(
|
||||||
|
|||||||
@@ -3,13 +3,13 @@
|
|||||||
Run "npm run generateARMClients" to regenerate
|
Run "npm run generateARMClients" to regenerate
|
||||||
Edting this file directly should be done with extreme caution as not to diverge from ARM REST specs
|
Edting this file directly should be done with extreme caution as not to diverge from ARM REST specs
|
||||||
|
|
||||||
Generated from: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/cosmos-db/resource-manager/Microsoft.DocumentDB/preview/2024-12-01-preview/cosmos-db.json
|
Generated from: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/cosmos-db/resource-manager/Microsoft.DocumentDB/preview/2024-02-15-preview/cosmos-db.json
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { configContext } from "../../../../ConfigContext";
|
import { configContext } from "../../../../ConfigContext";
|
||||||
import { armRequest } from "../../request";
|
import { armRequest } from "../../request";
|
||||||
import * as Types from "./types";
|
import * as Types from "./types";
|
||||||
const apiVersion = "2024-12-01-preview";
|
const apiVersion = "2024-02-15-preview";
|
||||||
|
|
||||||
/* Retrieves the metrics determined by the given filter for the given database account. This url is only for PBS and Replication Latency data */
|
/* Retrieves the metrics determined by the given filter for the given database account. This url is only for PBS and Replication Latency data */
|
||||||
export async function listMetrics(
|
export async function listMetrics(
|
||||||
|
|||||||
@@ -3,13 +3,13 @@
|
|||||||
Run "npm run generateARMClients" to regenerate
|
Run "npm run generateARMClients" to regenerate
|
||||||
Edting this file directly should be done with extreme caution as not to diverge from ARM REST specs
|
Edting this file directly should be done with extreme caution as not to diverge from ARM REST specs
|
||||||
|
|
||||||
Generated from: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/cosmos-db/resource-manager/Microsoft.DocumentDB/preview/2024-12-01-preview/cosmos-db.json
|
Generated from: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/cosmos-db/resource-manager/Microsoft.DocumentDB/preview/2024-02-15-preview/cosmos-db.json
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { configContext } from "../../../../ConfigContext";
|
import { configContext } from "../../../../ConfigContext";
|
||||||
import { armRequest } from "../../request";
|
import { armRequest } from "../../request";
|
||||||
import * as Types from "./types";
|
import * as Types from "./types";
|
||||||
const apiVersion = "2024-12-01-preview";
|
const apiVersion = "2024-02-15-preview";
|
||||||
|
|
||||||
/* Retrieves the metrics determined by the given filter for the given account, source and target region. This url is only for PBS and Replication Latency data */
|
/* Retrieves the metrics determined by the given filter for the given account, source and target region. This url is only for PBS and Replication Latency data */
|
||||||
export async function listMetrics(
|
export async function listMetrics(
|
||||||
|
|||||||
@@ -3,13 +3,13 @@
|
|||||||
Run "npm run generateARMClients" to regenerate
|
Run "npm run generateARMClients" to regenerate
|
||||||
Edting this file directly should be done with extreme caution as not to diverge from ARM REST specs
|
Edting this file directly should be done with extreme caution as not to diverge from ARM REST specs
|
||||||
|
|
||||||
Generated from: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/cosmos-db/resource-manager/Microsoft.DocumentDB/preview/2024-12-01-preview/cosmos-db.json
|
Generated from: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/cosmos-db/resource-manager/Microsoft.DocumentDB/preview/2024-02-15-preview/cosmos-db.json
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { configContext } from "../../../../ConfigContext";
|
import { configContext } from "../../../../ConfigContext";
|
||||||
import { armRequest } from "../../request";
|
import { armRequest } from "../../request";
|
||||||
import * as Types from "./types";
|
import * as Types from "./types";
|
||||||
const apiVersion = "2024-12-01-preview";
|
const apiVersion = "2024-02-15-preview";
|
||||||
|
|
||||||
/* Retrieves the metrics determined by the given filter for the given account target region. This url is only for PBS and Replication Latency data */
|
/* Retrieves the metrics determined by the given filter for the given account target region. This url is only for PBS and Replication Latency data */
|
||||||
export async function listMetrics(
|
export async function listMetrics(
|
||||||
|
|||||||
@@ -3,13 +3,13 @@
|
|||||||
Run "npm run generateARMClients" to regenerate
|
Run "npm run generateARMClients" to regenerate
|
||||||
Edting this file directly should be done with extreme caution as not to diverge from ARM REST specs
|
Edting this file directly should be done with extreme caution as not to diverge from ARM REST specs
|
||||||
|
|
||||||
Generated from: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/cosmos-db/resource-manager/Microsoft.DocumentDB/preview/2024-12-01-preview/cosmos-db.json
|
Generated from: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/cosmos-db/resource-manager/Microsoft.DocumentDB/preview/2024-02-15-preview/cosmos-db.json
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { configContext } from "../../../../ConfigContext";
|
import { configContext } from "../../../../ConfigContext";
|
||||||
import { armRequest } from "../../request";
|
import { armRequest } from "../../request";
|
||||||
import * as Types from "./types";
|
import * as Types from "./types";
|
||||||
const apiVersion = "2024-12-01-preview";
|
const apiVersion = "2024-05-15-preview";
|
||||||
|
|
||||||
/* Lists the SQL databases under an existing Azure Cosmos DB database account. */
|
/* Lists the SQL databases under an existing Azure Cosmos DB database account. */
|
||||||
export async function listSqlDatabases(
|
export async function listSqlDatabases(
|
||||||
|
|||||||
@@ -3,13 +3,13 @@
|
|||||||
Run "npm run generateARMClients" to regenerate
|
Run "npm run generateARMClients" to regenerate
|
||||||
Edting this file directly should be done with extreme caution as not to diverge from ARM REST specs
|
Edting this file directly should be done with extreme caution as not to diverge from ARM REST specs
|
||||||
|
|
||||||
Generated from: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/cosmos-db/resource-manager/Microsoft.DocumentDB/preview/2024-12-01-preview/cosmos-db.json
|
Generated from: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/cosmos-db/resource-manager/Microsoft.DocumentDB/preview/2024-02-15-preview/cosmos-db.json
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { configContext } from "../../../../ConfigContext";
|
import { configContext } from "../../../../ConfigContext";
|
||||||
import { armRequest } from "../../request";
|
import { armRequest } from "../../request";
|
||||||
import * as Types from "./types";
|
import * as Types from "./types";
|
||||||
const apiVersion = "2024-12-01-preview";
|
const apiVersion = "2024-02-15-preview";
|
||||||
|
|
||||||
/* Lists the Tables under an existing Azure Cosmos DB database account. */
|
/* Lists the Tables under an existing Azure Cosmos DB database account. */
|
||||||
export async function listTables(
|
export async function listTables(
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
Run "npm run generateARMClients" to regenerate
|
Run "npm run generateARMClients" to regenerate
|
||||||
Edting this file directly should be done with extreme caution as not to diverge from ARM REST specs
|
Edting this file directly should be done with extreme caution as not to diverge from ARM REST specs
|
||||||
|
|
||||||
Generated from: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/cosmos-db/resource-manager/Microsoft.DocumentDB/preview/2024-12-01-preview/cosmos-db.json
|
Generated from: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/cosmos-db/resource-manager/Microsoft.DocumentDB/preview/2024-02-15-preview/cosmos-db.json
|
||||||
*/
|
*/
|
||||||
|
|
||||||
/* The List operation response, that contains the client encryption keys and their properties. */
|
/* The List operation response, that contains the client encryption keys and their properties. */
|
||||||
@@ -553,12 +553,6 @@ export interface DatabaseAccountGetProperties {
|
|||||||
/* The object that represents all properties related to capacity enforcement on an account. */
|
/* The object that represents all properties related to capacity enforcement on an account. */
|
||||||
capacity?: Capacity;
|
capacity?: Capacity;
|
||||||
|
|
||||||
/* Indicates the capacityMode of the Cosmos DB account. */
|
|
||||||
capacityMode?: CapacityMode;
|
|
||||||
|
|
||||||
/* The object that represents the migration state for the CapacityMode of the Cosmos DB account. */
|
|
||||||
capacityModeChangeTransitionState?: CapacityModeChangeTransitionState;
|
|
||||||
|
|
||||||
/* Flag to indicate whether to enable MaterializedViews on the Cosmos DB account */
|
/* Flag to indicate whether to enable MaterializedViews on the Cosmos DB account */
|
||||||
enableMaterializedViews?: boolean;
|
enableMaterializedViews?: boolean;
|
||||||
/* The object that represents the metadata for the Account Keys of the Cosmos DB account. */
|
/* The object that represents the metadata for the Account Keys of the Cosmos DB account. */
|
||||||
@@ -658,9 +652,6 @@ export interface DatabaseAccountCreateUpdateProperties {
|
|||||||
/* The object that represents all properties related to capacity enforcement on an account. */
|
/* The object that represents all properties related to capacity enforcement on an account. */
|
||||||
capacity?: Capacity;
|
capacity?: Capacity;
|
||||||
|
|
||||||
/* Indicates the capacityMode of the Cosmos DB account. */
|
|
||||||
capacityMode?: CapacityMode;
|
|
||||||
|
|
||||||
/* Flag to indicate whether to enable MaterializedViews on the Cosmos DB account */
|
/* Flag to indicate whether to enable MaterializedViews on the Cosmos DB account */
|
||||||
enableMaterializedViews?: boolean;
|
enableMaterializedViews?: boolean;
|
||||||
/* This property is ignored during the update/create operation, as the metadata is read-only. The object represents the metadata for the Account Keys of the Cosmos DB account. */
|
/* This property is ignored during the update/create operation, as the metadata is read-only. The object represents the metadata for the Account Keys of the Cosmos DB account. */
|
||||||
@@ -763,9 +754,6 @@ export interface DatabaseAccountUpdateProperties {
|
|||||||
/* The object that represents all properties related to capacity enforcement on an account. */
|
/* The object that represents all properties related to capacity enforcement on an account. */
|
||||||
capacity?: Capacity;
|
capacity?: Capacity;
|
||||||
|
|
||||||
/* Indicates the capacityMode of the Cosmos DB account. */
|
|
||||||
capacityMode?: CapacityMode;
|
|
||||||
|
|
||||||
/* Flag to indicate whether to enable MaterializedViews on the Cosmos DB account */
|
/* Flag to indicate whether to enable MaterializedViews on the Cosmos DB account */
|
||||||
enableMaterializedViews?: boolean;
|
enableMaterializedViews?: boolean;
|
||||||
/* This property is ignored during the update operation, as the metadata is read-only. The object represents the metadata for the Account Keys of the Cosmos DB account. */
|
/* This property is ignored during the update operation, as the metadata is read-only. The object represents the metadata for the Account Keys of the Cosmos DB account. */
|
||||||
@@ -1099,8 +1087,6 @@ export interface ThroughputSettingsResource {
|
|||||||
readonly instantMaximumThroughput?: string;
|
readonly instantMaximumThroughput?: string;
|
||||||
/* The maximum throughput value or the maximum maxThroughput value (for autoscale) that can be specified */
|
/* The maximum throughput value or the maximum maxThroughput value (for autoscale) that can be specified */
|
||||||
readonly softAllowedMaximumThroughput?: string;
|
readonly softAllowedMaximumThroughput?: string;
|
||||||
/* Array of Throughput Bucket limits to be applied to the Cosmos DB container */
|
|
||||||
throughputBuckets?: ThroughputBucketResource[];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Cosmos DB provisioned throughput settings object */
|
/* Cosmos DB provisioned throughput settings object */
|
||||||
@@ -1128,14 +1114,6 @@ export interface ThroughputPolicyResource {
|
|||||||
incrementPercent?: number;
|
incrementPercent?: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Cosmos DB throughput bucket object */
|
|
||||||
export interface ThroughputBucketResource {
|
|
||||||
/* Represents the throughput bucket id */
|
|
||||||
id: number;
|
|
||||||
/* Represents maximum percentage throughput that can be used by the bucket */
|
|
||||||
maxThroughputPercentage: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Cosmos DB options resource object */
|
/* Cosmos DB options resource object */
|
||||||
export interface OptionsResource {
|
export interface OptionsResource {
|
||||||
/* Value of the Cosmos DB resource throughput or autoscaleSettings. Use the ThroughputSetting resource when retrieving offer details. */
|
/* Value of the Cosmos DB resource throughput or autoscaleSettings. Use the ThroughputSetting resource when retrieving offer details. */
|
||||||
@@ -1257,6 +1235,10 @@ export interface SqlDatabaseResource {
|
|||||||
export interface SqlContainerResource {
|
export interface SqlContainerResource {
|
||||||
/* Name of the Cosmos DB SQL container */
|
/* Name of the Cosmos DB SQL container */
|
||||||
id: string;
|
id: string;
|
||||||
|
|
||||||
|
vectorEmbeddingPolicy?: VectorEmbeddingPolicy;
|
||||||
|
fullTextPolicy?: FullTextPolicy;
|
||||||
|
|
||||||
/* The configuration of the indexing policy. By default, the indexing is automatic for all document paths within the container */
|
/* The configuration of the indexing policy. By default, the indexing is automatic for all document paths within the container */
|
||||||
indexingPolicy?: IndexingPolicy;
|
indexingPolicy?: IndexingPolicy;
|
||||||
|
|
||||||
@@ -1287,11 +1269,39 @@ export interface SqlContainerResource {
|
|||||||
|
|
||||||
/* List of computed properties */
|
/* List of computed properties */
|
||||||
computedProperties?: ComputedProperty[];
|
computedProperties?: ComputedProperty[];
|
||||||
|
}
|
||||||
|
|
||||||
/* The vector embedding policy for the container. */
|
export interface VectorEmbeddingPolicy {
|
||||||
vectorEmbeddingPolicy?: VectorEmbeddingPolicy;
|
vectorEmbeddings: VectorEmbedding[];
|
||||||
|
}
|
||||||
|
|
||||||
fullTextPolicy?: FullTextPolicy;
|
export interface VectorEmbedding {
|
||||||
|
path?: string;
|
||||||
|
dataType?: string;
|
||||||
|
dimensions?: number;
|
||||||
|
distanceFunction?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface FullTextPolicy {
|
||||||
|
/**
|
||||||
|
* The default language for the full text .
|
||||||
|
*/
|
||||||
|
defaultLanguage: string;
|
||||||
|
/**
|
||||||
|
* The paths to be indexed for full text search.
|
||||||
|
*/
|
||||||
|
fullTextPaths: FullTextPath[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface FullTextPath {
|
||||||
|
/**
|
||||||
|
* The path to be indexed for full text search.
|
||||||
|
*/
|
||||||
|
path: string;
|
||||||
|
/**
|
||||||
|
* The language for the full text path.
|
||||||
|
*/
|
||||||
|
language: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Cosmos DB indexing policy */
|
/* Cosmos DB indexing policy */
|
||||||
@@ -1313,14 +1323,19 @@ export interface IndexingPolicy {
|
|||||||
/* List of spatial specifics */
|
/* List of spatial specifics */
|
||||||
spatialIndexes?: SpatialSpec[];
|
spatialIndexes?: SpatialSpec[];
|
||||||
|
|
||||||
/* List of paths to include in the vector indexing */
|
|
||||||
vectorIndexes?: VectorIndex[];
|
vectorIndexes?: VectorIndex[];
|
||||||
|
|
||||||
|
fullTextIndexes?: FullTextIndex[];
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Cosmos DB Vector Embedding Policy */
|
export interface VectorIndex {
|
||||||
export interface VectorEmbeddingPolicy {
|
path?: string;
|
||||||
/* List of vector embeddings */
|
type?: string;
|
||||||
vectorEmbeddings?: VectorEmbedding[];
|
}
|
||||||
|
|
||||||
|
export interface FullTextIndex {
|
||||||
|
/** The path in the JSON document to index. */
|
||||||
|
path: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* undocumented */
|
/* undocumented */
|
||||||
@@ -1348,50 +1363,6 @@ export interface Indexes {
|
|||||||
kind?: "Hash" | "Range" | "Spatial";
|
kind?: "Hash" | "Range" | "Spatial";
|
||||||
}
|
}
|
||||||
|
|
||||||
/* undocumented */
|
|
||||||
export interface VectorIndex {
|
|
||||||
/* The path to the vector field in the document. */
|
|
||||||
path: string;
|
|
||||||
/* The index type of the vector. Currently, flat, diskANN, and quantizedFlat are supported. */
|
|
||||||
type: "flat" | "diskANN" | "quantizedFlat";
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Represents a vector embedding. A vector embedding is used to define a vector field in the documents. */
|
|
||||||
export interface VectorEmbedding {
|
|
||||||
/* The path to the vector field in the document. */
|
|
||||||
path: string;
|
|
||||||
/* Indicates the data type of vector. */
|
|
||||||
dataType: "float16" | "float32" | "uint8" | "int8";
|
|
||||||
|
|
||||||
/* The distance function to use for distance calculation in between vectors. */
|
|
||||||
distanceFunction: "euclidean" | "cosine" | "dotproduct";
|
|
||||||
|
|
||||||
/* The number of dimensions in the vector. */
|
|
||||||
dimensions: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface FullTextPolicy {
|
|
||||||
/**
|
|
||||||
* The default language for the full text .
|
|
||||||
*/
|
|
||||||
defaultLanguage: string;
|
|
||||||
/**
|
|
||||||
* The paths to be indexed for full text search.
|
|
||||||
*/
|
|
||||||
fullTextPaths: FullTextPath[];
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface FullTextPath {
|
|
||||||
/**
|
|
||||||
* The path to be indexed for full text search.
|
|
||||||
*/
|
|
||||||
path: string;
|
|
||||||
/**
|
|
||||||
* The language for the full text path.
|
|
||||||
*/
|
|
||||||
language: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* List of composite path */
|
/* List of composite path */
|
||||||
export type CompositePathList = CompositePath[];
|
export type CompositePathList = CompositePath[];
|
||||||
|
|
||||||
@@ -1717,28 +1688,6 @@ export interface Capacity {
|
|||||||
totalThroughputLimit?: number;
|
totalThroughputLimit?: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Indicates the capacity mode of the account. */
|
|
||||||
export type CapacityMode = "None" | "Provisioned" | "Serverless";
|
|
||||||
|
|
||||||
/* The transition state information related capacity mode change with update request. */
|
|
||||||
export interface CapacityModeChangeTransitionState {
|
|
||||||
/* The transition status of capacity mode. */
|
|
||||||
capacityModeTransitionStatus?: "Invalid" | "Initialized" | "InProgress" | "Completed" | "Failed";
|
|
||||||
|
|
||||||
/* Indicates the current capacity mode of the account. */
|
|
||||||
currentCapacityMode?: "None" | "Provisioned" | "Serverless";
|
|
||||||
|
|
||||||
/* Indicates the previous capacity mode of the account before successful transition. */
|
|
||||||
previousCapacityMode?: "None" | "Provisioned" | "Serverless";
|
|
||||||
|
|
||||||
/* Begin time in UTC of the capacity mode change. */
|
|
||||||
readonly capacityModeTransitionBeginTimestamp?: string;
|
|
||||||
/* End time in UTC of the capacity mode change. */
|
|
||||||
readonly capacityModeTransitionEndTimestamp?: string;
|
|
||||||
/* End time in UTC of the last successful capacity mode change. */
|
|
||||||
readonly capacityModeLastSuccessfulTransitionEndTimestamp?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Tags are a list of key-value pairs that describe the resource. These tags can be used in viewing and grouping this resource (across resource groups). A maximum of 15 tags can be provided for a resource. Each tag must have a key no greater than 128 characters and value no greater than 256 characters. For example, the default experience for a template type is set with "defaultExperience": "Cassandra". Current "defaultExperience" values also include "Table", "Graph", "DocumentDB", and "MongoDB". */
|
/* Tags are a list of key-value pairs that describe the resource. These tags can be used in viewing and grouping this resource (across resource groups). A maximum of 15 tags can be provided for a resource. Each tag must have a key no greater than 128 characters and value no greater than 256 characters. For example, the default experience for a template type is set with "defaultExperience": "Cassandra". Current "defaultExperience" values also include "Table", "Graph", "DocumentDB", and "MongoDB". */
|
||||||
export type Tags = { [key: string]: string };
|
export type Tags = { [key: string]: string };
|
||||||
|
|
||||||
@@ -2004,8 +1953,8 @@ export type PublicNetworkAccess = "Enabled" | "Disabled" | "SecuredByPerimeter";
|
|||||||
|
|
||||||
/* undocumented */
|
/* undocumented */
|
||||||
export interface ApiProperties {
|
export interface ApiProperties {
|
||||||
/* Describes the version of the MongoDB account. */
|
/* Describes the ServerVersion of an a MongoDB account. */
|
||||||
serverVersion?: "3.2" | "3.6" | "4.0" | "4.2" | "5.0" | "6.0" | "7.0";
|
serverVersion?: "3.2" | "3.6" | "4.0" | "4.2";
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Analytical storage specific properties. */
|
/* Analytical storage specific properties. */
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ import {
|
|||||||
readSubComponentState,
|
readSubComponentState,
|
||||||
} from "Shared/AppStatePersistenceUtility";
|
} from "Shared/AppStatePersistenceUtility";
|
||||||
import { LocalStorageUtility, StorageKey } from "Shared/StorageUtility";
|
import { LocalStorageUtility, StorageKey } from "Shared/StorageUtility";
|
||||||
import { isDataplaneRbacSupported } from "Utils/APITypeUtils";
|
import { useNewPortalBackendEndpoint } from "Utils/EndpointUtils";
|
||||||
import { logConsoleError } from "Utils/NotificationConsoleUtils";
|
import { logConsoleError } from "Utils/NotificationConsoleUtils";
|
||||||
import { useQueryCopilot } from "hooks/useQueryCopilot";
|
import { useQueryCopilot } from "hooks/useQueryCopilot";
|
||||||
import { ReactTabKind, useTabs } from "hooks/useTabs";
|
import { ReactTabKind, useTabs } from "hooks/useTabs";
|
||||||
@@ -300,7 +300,7 @@ async function configureHostedWithAAD(config: AAD): Promise<Explorer> {
|
|||||||
);
|
);
|
||||||
if (!userContext.features.enableAadDataPlane) {
|
if (!userContext.features.enableAadDataPlane) {
|
||||||
Logger.logInfo(`AAD Feature flag is not enabled for account ${account.name}`, "Explorer/configureHostedWithAAD");
|
Logger.logInfo(`AAD Feature flag is not enabled for account ${account.name}`, "Explorer/configureHostedWithAAD");
|
||||||
if (isDataplaneRbacSupported(userContext.apiType)) {
|
if (userContext.apiType === "SQL") {
|
||||||
if (LocalStorageUtility.hasItem(StorageKey.DataPlaneRbacEnabled)) {
|
if (LocalStorageUtility.hasItem(StorageKey.DataPlaneRbacEnabled)) {
|
||||||
const isDataPlaneRbacSetting = LocalStorageUtility.getEntryString(StorageKey.DataPlaneRbacEnabled);
|
const isDataPlaneRbacSetting = LocalStorageUtility.getEntryString(StorageKey.DataPlaneRbacEnabled);
|
||||||
Logger.logInfo(
|
Logger.logInfo(
|
||||||
@@ -548,12 +548,20 @@ async function configurePortal(): Promise<Explorer> {
|
|||||||
const inputs = message?.inputs;
|
const inputs = message?.inputs;
|
||||||
const openAction = message?.openAction;
|
const openAction = message?.openAction;
|
||||||
if (inputs) {
|
if (inputs) {
|
||||||
|
if (
|
||||||
|
configContext.BACKEND_ENDPOINT &&
|
||||||
|
configContext.platform === Platform.Portal &&
|
||||||
|
process.env.NODE_ENV === "development"
|
||||||
|
) {
|
||||||
|
inputs.extensionEndpoint = configContext.PROXY_PATH;
|
||||||
|
}
|
||||||
|
|
||||||
updateContextsFromPortalMessage(inputs);
|
updateContextsFromPortalMessage(inputs);
|
||||||
|
|
||||||
const { databaseAccount: account, subscriptionId, resourceGroup } = userContext;
|
const { databaseAccount: account, subscriptionId, resourceGroup } = userContext;
|
||||||
|
|
||||||
let dataPlaneRbacEnabled;
|
let dataPlaneRbacEnabled;
|
||||||
if (isDataplaneRbacSupported(userContext.apiType)) {
|
if (userContext.apiType === "SQL") {
|
||||||
if (LocalStorageUtility.hasItem(StorageKey.DataPlaneRbacEnabled)) {
|
if (LocalStorageUtility.hasItem(StorageKey.DataPlaneRbacEnabled)) {
|
||||||
const isDataPlaneRbacSetting = LocalStorageUtility.getEntryString(StorageKey.DataPlaneRbacEnabled);
|
const isDataPlaneRbacSetting = LocalStorageUtility.getEntryString(StorageKey.DataPlaneRbacEnabled);
|
||||||
Logger.logInfo(
|
Logger.logInfo(
|
||||||
@@ -672,17 +680,18 @@ function updateAADEndpoints(portalEnv: PortalEnv) {
|
|||||||
|
|
||||||
function updateContextsFromPortalMessage(inputs: DataExplorerInputsFrame) {
|
function updateContextsFromPortalMessage(inputs: DataExplorerInputsFrame) {
|
||||||
if (
|
if (
|
||||||
configContext.PORTAL_BACKEND_ENDPOINT &&
|
configContext.BACKEND_ENDPOINT &&
|
||||||
configContext.platform === Platform.Portal &&
|
configContext.platform === Platform.Portal &&
|
||||||
process.env.NODE_ENV === "development"
|
process.env.NODE_ENV === "development"
|
||||||
) {
|
) {
|
||||||
inputs.portalBackendEndpoint = configContext.PROXY_PATH;
|
inputs.extensionEndpoint = configContext.PROXY_PATH;
|
||||||
}
|
}
|
||||||
|
|
||||||
const authorizationToken = inputs.authorizationToken || "";
|
const authorizationToken = inputs.authorizationToken || "";
|
||||||
const databaseAccount = inputs.databaseAccount;
|
const databaseAccount = inputs.databaseAccount;
|
||||||
|
|
||||||
updateConfigContext({
|
updateConfigContext({
|
||||||
|
BACKEND_ENDPOINT: inputs.extensionEndpoint || configContext.BACKEND_ENDPOINT,
|
||||||
ARM_ENDPOINT: normalizeArmEndpoint(inputs.csmEndpoint || configContext.ARM_ENDPOINT),
|
ARM_ENDPOINT: normalizeArmEndpoint(inputs.csmEndpoint || configContext.ARM_ENDPOINT),
|
||||||
MONGO_PROXY_ENDPOINT: inputs.mongoProxyEndpoint,
|
MONGO_PROXY_ENDPOINT: inputs.mongoProxyEndpoint,
|
||||||
CASSANDRA_PROXY_ENDPOINT: inputs.cassandraProxyEndpoint,
|
CASSANDRA_PROXY_ENDPOINT: inputs.cassandraProxyEndpoint,
|
||||||
@@ -785,7 +794,17 @@ async function updateContextForSampleData(explorer: Explorer): Promise<void> {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const url: string = createUri(configContext.PORTAL_BACKEND_ENDPOINT, "/api/sampledata");
|
let url: string;
|
||||||
|
if (useNewPortalBackendEndpoint(Constants.BackendApi.SampleData)) {
|
||||||
|
url = createUri(configContext.PORTAL_BACKEND_ENDPOINT, "/api/sampledata");
|
||||||
|
} else {
|
||||||
|
const sampleDatabaseEndpoint = useQueryCopilot.getState().copilotUserDBEnabled
|
||||||
|
? `/api/tokens/sampledataconnection/v2`
|
||||||
|
: `/api/tokens/sampledataconnection`;
|
||||||
|
|
||||||
|
url = createUri(`${configContext.BACKEND_ENDPOINT}`, sampleDatabaseEndpoint);
|
||||||
|
}
|
||||||
|
|
||||||
const authorizationHeader = getAuthorizationHeader();
|
const authorizationHeader = getAuthorizationHeader();
|
||||||
const headers = { [authorizationHeader.header]: authorizationHeader.token };
|
const headers = { [authorizationHeader.header]: authorizationHeader.token };
|
||||||
|
|
||||||
@@ -801,7 +820,7 @@ async function updateContextForSampleData(explorer: Explorer): Promise<void> {
|
|||||||
const sampleDataConnectionInfo = parseResourceTokenConnectionString(data.connectionString);
|
const sampleDataConnectionInfo = parseResourceTokenConnectionString(data.connectionString);
|
||||||
updateUserContext({ sampleDataConnectionInfo });
|
updateUserContext({ sampleDataConnectionInfo });
|
||||||
|
|
||||||
explorer.refreshSampleData();
|
await explorer.refreshSampleData();
|
||||||
}
|
}
|
||||||
|
|
||||||
interface SampledataconnectionResponse {
|
interface SampledataconnectionResponse {
|
||||||
|
|||||||
@@ -1,9 +1,16 @@
|
|||||||
import { useEffect, useState } from "react";
|
import { useEffect, useState } from "react";
|
||||||
import { HttpHeaders } from "../Common/Constants";
|
import { useNewPortalBackendEndpoint } from "Utils/EndpointUtils";
|
||||||
|
import { ApiEndpoints, BackendApi, HttpHeaders } from "../Common/Constants";
|
||||||
import { configContext } from "../ConfigContext";
|
import { configContext } from "../ConfigContext";
|
||||||
import { AccessInputMetadata } from "../Contracts/DataModels";
|
import { AccessInputMetadata } from "../Contracts/DataModels";
|
||||||
|
|
||||||
|
const url = `${configContext.BACKEND_ENDPOINT}${ApiEndpoints.guestRuntimeProxy}/accessinputmetadata?_=1609359229955`;
|
||||||
|
|
||||||
export async function fetchAccessData(portalToken: string): Promise<AccessInputMetadata> {
|
export async function fetchAccessData(portalToken: string): Promise<AccessInputMetadata> {
|
||||||
|
if (!useNewPortalBackendEndpoint(BackendApi.RuntimeProxy)) {
|
||||||
|
return fetchAccessData_ToBeDeprecated(portalToken);
|
||||||
|
}
|
||||||
|
|
||||||
const headers = new Headers();
|
const headers = new Headers();
|
||||||
// Portal encrypted token API quirk: The token header must be URL encoded
|
// Portal encrypted token API quirk: The token header must be URL encoded
|
||||||
headers.append(HttpHeaders.guestAccessToken, encodeURIComponent(portalToken));
|
headers.append(HttpHeaders.guestAccessToken, encodeURIComponent(portalToken));
|
||||||
@@ -18,6 +25,25 @@ export async function fetchAccessData(portalToken: string): Promise<AccessInputM
|
|||||||
.catch((error) => console.error(error));
|
.catch((error) => console.error(error));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export async function fetchAccessData_ToBeDeprecated(portalToken: string): Promise<AccessInputMetadata> {
|
||||||
|
const headers = new Headers();
|
||||||
|
// Portal encrypted token API quirk: The token header must be URL encoded
|
||||||
|
headers.append(HttpHeaders.guestAccessToken, encodeURIComponent(portalToken));
|
||||||
|
|
||||||
|
const options = {
|
||||||
|
method: "GET",
|
||||||
|
headers: headers,
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
fetch(url, options)
|
||||||
|
.then((response) => response.json())
|
||||||
|
// Portal encrypted token API quirk: The response is double JSON encoded
|
||||||
|
.then((json) => JSON.parse(json))
|
||||||
|
.catch((error) => console.error(error))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
export function useTokenMetadata(token: string): AccessInputMetadata | undefined {
|
export function useTokenMetadata(token: string): AccessInputMetadata | undefined {
|
||||||
const [state, setState] = useState<AccessInputMetadata | undefined>();
|
const [state, setState] = useState<AccessInputMetadata | undefined>();
|
||||||
|
|
||||||
|
|||||||
@@ -39,6 +39,7 @@ const initTestExplorer = async (): Promise<void> => {
|
|||||||
csmEndpoint: "https://management.azure.com",
|
csmEndpoint: "https://management.azure.com",
|
||||||
dnsSuffix: "documents.azure.com",
|
dnsSuffix: "documents.azure.com",
|
||||||
serverId: "prod1",
|
serverId: "prod1",
|
||||||
|
extensionEndpoint: "/proxy",
|
||||||
portalBackendEndpoint: "https://cdb-ms-mpac-pbe.cosmos.azure.com",
|
portalBackendEndpoint: "https://cdb-ms-mpac-pbe.cosmos.azure.com",
|
||||||
mongoProxyEndpoint: "https://cdb-ms-mpac-mp.cosmos.azure.com",
|
mongoProxyEndpoint: "https://cdb-ms-mpac-mp.cosmos.azure.com",
|
||||||
cassandraProxyEndpoint: "https://cdb-ms-mpac-cp.cosmos.azure.com",
|
cassandraProxyEndpoint: "https://cdb-ms-mpac-cp.cosmos.azure.com",
|
||||||
|
|||||||
@@ -16,13 +16,13 @@ Results of this file should be checked into the repo.
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
// CHANGE THESE VALUES TO GENERATE NEW CLIENTS
|
// CHANGE THESE VALUES TO GENERATE NEW CLIENTS
|
||||||
const version = "2024-12-01-preview";
|
const version = "2024-02-15-preview";
|
||||||
/* The following are legal options for resourceName but you generally will only use cosmos:
|
/* The following are legal options for resourceName but you generally will only use cosmos-db:
|
||||||
"cosmos" | "managedCassandra" | "mongorbac" | "notebook" | "privateEndpointConnection" | "privateLinkResources" |
|
"cosmos-db" | "managedCassandra" | "mongorbac" | "notebook" | "privateEndpointConnection" | "privateLinkResources" |
|
||||||
"rbac" | "restorable" | "services" | "dataTransferService"
|
"rbac" | "restorable" | "services" | "dataTransferService"
|
||||||
*/
|
*/
|
||||||
const githubResourceName = "cosmos-db";
|
const githubResourceName = "cosmos-db";
|
||||||
const deResourceName = "cosmos";
|
const deResourceName = "cosmos-db";
|
||||||
const schemaURL = `https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/cosmos-db/resource-manager/Microsoft.DocumentDB/preview/${version}/${githubResourceName}.json`;
|
const schemaURL = `https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/cosmos-db/resource-manager/Microsoft.DocumentDB/preview/${version}/${githubResourceName}.json`;
|
||||||
const outputDir = path.join(__dirname, `../../src/Utils/arm/generatedClients/${deResourceName}`);
|
const outputDir = path.join(__dirname, `../../src/Utils/arm/generatedClients/${deResourceName}`);
|
||||||
|
|
||||||
|
|||||||
@@ -248,8 +248,7 @@ module.exports = function (_env = {}, argv = {}) {
|
|||||||
new TerserPlugin({
|
new TerserPlugin({
|
||||||
terserOptions: {
|
terserOptions: {
|
||||||
// These options increase our initial bundle size by ~5% but the builds are significantly faster and won't run out of memory
|
// These options increase our initial bundle size by ~5% but the builds are significantly faster and won't run out of memory
|
||||||
// Update 2/11/2025: we are removing this flag as our bundles sizes grew so that it can remove dead and unreachable code with compromise of build time
|
compress: false,
|
||||||
// compress: false,
|
|
||||||
mangle: {
|
mangle: {
|
||||||
keep_fnames: true,
|
keep_fnames: true,
|
||||||
keep_classnames: true,
|
keep_classnames: true,
|
||||||
|
|||||||
Reference in New Issue
Block a user