Backend Migration - Remove Use of Legacy Backend from DE (#2043)

* Default to new backend endpoint if the endpoint in current context does not match existing set in constants.

* Remove some env references.

* Added comments with reasoning for selecting new backend by default.

* Update comment.

* Remove all references to useNewPortalBackendEndpoint now that old backend is disabled in all environments.

* Resolve lint issues.

* Removed references to old backend from Cassandra and Mongo Apis

* fix unit tests

---------

Co-authored-by: Asier Isayas <aisayas@microsoft.com>
This commit is contained in:
bogercraig 2025-02-12 18:12:59 -08:00 committed by GitHub
parent de2449ee25
commit 2730da7ab6
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
18 changed files with 42 additions and 1048 deletions

View File

@ -3,9 +3,8 @@ import { getAuthorizationTokenUsingResourceTokens } from "Common/getAuthorizatio
import { AuthorizationToken } from "Contracts/FabricMessageTypes";
import { checkDatabaseResourceTokensValidity } from "Platform/Fabric/FabricUtil";
import { LocalStorageUtility, StorageKey } from "Shared/StorageUtility";
import { useNewPortalBackendEndpoint } from "Utils/EndpointUtils";
import { AuthType } from "../AuthType";
import { BackendApi, PriorityLevel } from "../Common/Constants";
import { PriorityLevel } from "../Common/Constants";
import * as Logger from "../Common/Logger";
import { Platform, configContext } from "../ConfigContext";
import { updateUserContext, userContext } from "../UserContext";
@ -125,10 +124,6 @@ export async function getTokenFromAuthService(
resourceType: string,
resourceId?: string,
): Promise<AuthorizationToken> {
if (!useNewPortalBackendEndpoint(BackendApi.RuntimeProxy)) {
return getTokenFromAuthService_ToBeDeprecated(verb, resourceType, resourceId);
}
try {
const host: string = configContext.PORTAL_BACKEND_ENDPOINT;
const response: Response = await _global.fetch(host + "/api/connectionstring/runtimeproxy/authorizationtokens", {
@ -151,34 +146,6 @@ export async function getTokenFromAuthService(
}
}
export async function getTokenFromAuthService_ToBeDeprecated(
verb: string,
resourceType: string,
resourceId?: string,
): Promise<AuthorizationToken> {
try {
const host = configContext.BACKEND_ENDPOINT;
const response = await _global.fetch(host + "/api/guest/runtimeproxy/authorizationTokens", {
method: "POST",
headers: {
"content-type": "application/json",
"x-ms-encrypted-auth-token": userContext.accessToken,
},
body: JSON.stringify({
verb,
resourceType,
resourceId,
}),
});
//TODO I am not sure why we have to parse the JSON again here. fetch should do it for us when we call .json()
const result = JSON.parse(await response.json());
return result;
} catch (error) {
logConsoleError(`Failed to get authorization headers for ${resourceType}: ${getErrorMessage(error)}`);
return Promise.reject(error);
}
}
// The Capability is a bitmap, which cosmosdb backend decodes as per the below enum
enum SDKSupportedCapabilities {
None = 0,

View File

@ -4,16 +4,8 @@ import { configContext, resetConfigContext, updateConfigContext } from "../Confi
import { DatabaseAccount } from "../Contracts/DataModels";
import { Collection } from "../Contracts/ViewModels";
import DocumentId from "../Explorer/Tree/DocumentId";
import { extractFeatures } from "../Platform/Hosted/extractFeatures";
import { updateUserContext } from "../UserContext";
import {
deleteDocument,
getEndpoint,
getFeatureEndpointOrDefault,
queryDocuments,
readDocument,
updateDocument,
} from "./MongoProxyClient";
import { deleteDocuments, getEndpoint, queryDocuments, readDocument, updateDocument } from "./MongoProxyClient";
const databaseId = "testDB";
@ -196,20 +188,8 @@ describe("MongoProxyClient", () => {
expect.any(Object),
);
});
it("builds the correct proxy URL in development", () => {
updateConfigContext({
MONGO_BACKEND_ENDPOINT: "https://localhost:1234",
globallyEnabledMongoAPIs: [],
});
updateDocument(databaseId, collection, documentId, "{}");
expect(window.fetch).toHaveBeenCalledWith(
`${configContext.MONGO_PROXY_ENDPOINT}/api/mongo/explorer`,
expect.any(Object),
);
});
});
describe("deleteDocument", () => {
describe("deleteDocuments", () => {
beforeEach(() => {
resetConfigContext();
updateUserContext({
@ -226,9 +206,9 @@ describe("MongoProxyClient", () => {
});
it("builds the correct URL", () => {
deleteDocument(databaseId, collection, documentId);
deleteDocuments(databaseId, collection, [documentId]);
expect(window.fetch).toHaveBeenCalledWith(
`${configContext.MONGO_PROXY_ENDPOINT}/api/mongo/explorer`,
`${configContext.MONGO_PROXY_ENDPOINT}/api/mongo/explorer/bulkdelete`,
expect.any(Object),
);
});
@ -238,9 +218,9 @@ describe("MongoProxyClient", () => {
MONGO_PROXY_ENDPOINT: "https://localhost:1234",
globallyEnabledMongoAPIs: [],
});
deleteDocument(databaseId, collection, documentId);
deleteDocuments(databaseId, collection, [documentId]);
expect(window.fetch).toHaveBeenCalledWith(
`${configContext.MONGO_PROXY_ENDPOINT}/api/mongo/explorer`,
`${configContext.MONGO_PROXY_ENDPOINT}/api/mongo/explorer/bulkdelete`,
expect.any(Object),
);
});
@ -275,33 +255,4 @@ describe("MongoProxyClient", () => {
expect(endpoint).toEqual(`${configContext.MONGO_PROXY_ENDPOINT}/api/connectionstring/mongo/explorer`);
});
});
describe("getFeatureEndpointOrDefault", () => {
beforeEach(() => {
resetConfigContext();
updateConfigContext({
MONGO_PROXY_ENDPOINT: MongoProxyEndpoints.Prod,
globallyEnabledMongoAPIs: [],
});
const params = new URLSearchParams({
"feature.mongoProxyEndpoint": MongoProxyEndpoints.Prod,
"feature.mongoProxyAPIs": "readDocument|createDocument",
});
const features = extractFeatures(params);
updateUserContext({
authType: AuthType.AAD,
features: features,
});
});
it("returns a local endpoint", () => {
const endpoint = getFeatureEndpointOrDefault("readDocument");
expect(endpoint).toEqual(`${configContext.MONGO_PROXY_ENDPOINT}/api/mongo/explorer`);
});
it("returns a production endpoint", () => {
const endpoint = getFeatureEndpointOrDefault("DeleteDocument");
expect(endpoint).toEqual(`${configContext.MONGO_PROXY_ENDPOINT}/api/mongo/explorer`);
});
});
});

View File

@ -1,20 +1,13 @@
import { Constants as CosmosSDKConstants } from "@azure/cosmos";
import {
allowedMongoProxyEndpoints_ToBeDeprecated,
defaultAllowedMongoProxyEndpoints,
validateEndpoint,
} from "Utils/EndpointUtils";
import queryString from "querystring";
import { AuthType } from "../AuthType";
import { configContext } from "../ConfigContext";
import * as DataModels from "../Contracts/DataModels";
import { MessageTypes } from "../Contracts/ExplorerContracts";
import { Collection } from "../Contracts/ViewModels";
import DocumentId from "../Explorer/Tree/DocumentId";
import { hasFlag } from "../Platform/Hosted/extractFeatures";
import { userContext } from "../UserContext";
import { logConsoleError } from "../Utils/NotificationConsoleUtils";
import { ApiType, ContentType, HttpHeaders, HttpStatusCodes, MongoProxyApi, MongoProxyEndpoints } from "./Constants";
import { ApiType, ContentType, HttpHeaders, HttpStatusCodes } from "./Constants";
import { MinimalQueryIterator } from "./IteratorUtilities";
import { sendMessage } from "./MessageHandler";
@ -67,10 +60,6 @@ export function queryDocuments(
query: string,
continuationToken?: string,
): Promise<QueryResponse> {
if (!useMongoProxyEndpoint(MongoProxyApi.ResourceList) || !useMongoProxyEndpoint(MongoProxyApi.QueryDocuments)) {
return queryDocuments_ToBeDeprecated(databaseId, collection, isResourceList, query, continuationToken);
}
const { databaseAccount } = userContext;
const resourceEndpoint = databaseAccount.properties.mongoEndpoint || databaseAccount.properties.documentEndpoint;
const params = {
@ -89,7 +78,7 @@ export function queryDocuments(
query,
};
const endpoint = getFeatureEndpointOrDefault(MongoProxyApi.ResourceList) || "";
const endpoint = getEndpoint(configContext.MONGO_PROXY_ENDPOINT) || "";
const headers = {
...defaultHeaders,
@ -127,76 +116,11 @@ export function queryDocuments(
});
}
function queryDocuments_ToBeDeprecated(
databaseId: string,
collection: Collection,
isResourceList: boolean,
query: string,
continuationToken?: string,
): Promise<QueryResponse> {
const { databaseAccount } = userContext;
const resourceEndpoint = databaseAccount.properties.mongoEndpoint || databaseAccount.properties.documentEndpoint;
const params = {
db: databaseId,
coll: collection.id(),
resourceUrl: `${resourceEndpoint}dbs/${databaseId}/colls/${collection.id()}/docs/`,
rid: collection.rid,
rtype: "docs",
sid: userContext.subscriptionId,
rg: userContext.resourceGroup,
dba: databaseAccount.name,
pk:
collection && collection.partitionKey && !collection.partitionKey.systemKey
? collection.partitionKeyProperties?.[0]
: "",
};
const endpoint = getFeatureEndpointOrDefault("resourcelist") || "";
const headers = {
...defaultHeaders,
...authHeaders(),
[CosmosSDKConstants.HttpHeaders.IsQuery]: "true",
[CosmosSDKConstants.HttpHeaders.PopulateQueryMetrics]: "true",
[CosmosSDKConstants.HttpHeaders.EnableScanInQuery]: "true",
[CosmosSDKConstants.HttpHeaders.EnableCrossPartitionQuery]: "true",
[CosmosSDKConstants.HttpHeaders.ParallelizeCrossPartitionQuery]: "true",
[HttpHeaders.contentType]: "application/query+json",
};
if (continuationToken) {
headers[CosmosSDKConstants.HttpHeaders.Continuation] = continuationToken;
}
const path = isResourceList ? "/resourcelist" : "";
return window
.fetch(`${endpoint}${path}?${queryString.stringify(params)}`, {
method: "POST",
body: JSON.stringify({ query }),
headers,
})
.then(async (response) => {
if (response.ok) {
return {
continuationToken: response.headers.get(CosmosSDKConstants.HttpHeaders.Continuation),
documents: (await response.json()).Documents as DataModels.DocumentId[],
headers: response.headers,
};
}
await errorHandling(response, "querying documents", params);
return undefined;
});
}
export function readDocument(
databaseId: string,
collection: Collection,
documentId: DocumentId,
): Promise<DataModels.DocumentId> {
if (!useMongoProxyEndpoint(MongoProxyApi.ReadDocument)) {
return readDocument_ToBeDeprecated(databaseId, collection, documentId);
}
const { databaseAccount } = userContext;
const resourceEndpoint = databaseAccount.properties.mongoEndpoint || databaseAccount.properties.documentEndpoint;
const idComponents = documentId.self.split("/");
@ -217,7 +141,7 @@ export function readDocument(
: "",
};
const endpoint = getFeatureEndpointOrDefault(MongoProxyApi.ReadDocument);
const endpoint = getEndpoint(configContext.MONGO_PROXY_ENDPOINT);
return window
.fetch(endpoint, {
@ -237,61 +161,12 @@ export function readDocument(
});
}
export function readDocument_ToBeDeprecated(
databaseId: string,
collection: Collection,
documentId: DocumentId,
): Promise<DataModels.DocumentId> {
const { databaseAccount } = userContext;
const resourceEndpoint = databaseAccount.properties.mongoEndpoint || databaseAccount.properties.documentEndpoint;
const idComponents = documentId.self.split("/");
const path = idComponents.slice(0, 4).join("/");
const rid = encodeURIComponent(idComponents[5]);
const params = {
db: databaseId,
coll: collection.id(),
resourceUrl: `${resourceEndpoint}${path}/${rid}`,
rid,
rtype: "docs",
sid: userContext.subscriptionId,
rg: userContext.resourceGroup,
dba: databaseAccount.name,
pk:
documentId && documentId.partitionKey && !documentId.partitionKey.systemKey
? documentId.partitionKeyProperties?.[0]
: "",
};
const endpoint = getFeatureEndpointOrDefault("readDocument");
return window
.fetch(`${endpoint}?${queryString.stringify(params)}`, {
method: "GET",
headers: {
...defaultHeaders,
...authHeaders(),
[CosmosSDKConstants.HttpHeaders.PartitionKey]: encodeURIComponent(
JSON.stringify(documentId.partitionKeyHeader()),
),
},
})
.then(async (response) => {
if (response.ok) {
return response.json();
}
return await errorHandling(response, "reading document", params);
});
}
export function createDocument(
databaseId: string,
collection: Collection,
partitionKeyProperty: string,
documentContent: unknown,
): Promise<DataModels.DocumentId> {
if (!useMongoProxyEndpoint(MongoProxyApi.CreateDocument)) {
return createDocument_ToBeDeprecated(databaseId, collection, partitionKeyProperty, documentContent);
}
const { databaseAccount } = userContext;
const resourceEndpoint = databaseAccount.properties.mongoEndpoint || databaseAccount.properties.documentEndpoint;
const params = {
@ -308,7 +183,7 @@ export function createDocument(
documentContent: JSON.stringify(documentContent),
};
const endpoint = getFeatureEndpointOrDefault(MongoProxyApi.CreateDocument);
const endpoint = getEndpoint(configContext.MONGO_PROXY_ENDPOINT);
return window
.fetch(`${endpoint}/createDocument`, {
@ -328,54 +203,12 @@ export function createDocument(
});
}
export function createDocument_ToBeDeprecated(
databaseId: string,
collection: Collection,
partitionKeyProperty: string,
documentContent: unknown,
): Promise<DataModels.DocumentId> {
const { databaseAccount } = userContext;
const resourceEndpoint = databaseAccount.properties.mongoEndpoint || databaseAccount.properties.documentEndpoint;
const params = {
db: databaseId,
coll: collection.id(),
resourceUrl: `${resourceEndpoint}dbs/${databaseId}/colls/${collection.id()}/docs/`,
rid: collection.rid,
rtype: "docs",
sid: userContext.subscriptionId,
rg: userContext.resourceGroup,
dba: databaseAccount.name,
pk: collection && collection.partitionKey && !collection.partitionKey.systemKey ? partitionKeyProperty : "",
};
const endpoint = getFeatureEndpointOrDefault("createDocument");
return window
.fetch(`${endpoint}/resourcelist?${queryString.stringify(params)}`, {
method: "POST",
body: JSON.stringify(documentContent),
headers: {
...defaultHeaders,
...authHeaders(),
},
})
.then(async (response) => {
if (response.ok) {
return response.json();
}
return await errorHandling(response, "creating document", params);
});
}
export function updateDocument(
databaseId: string,
collection: Collection,
documentId: DocumentId,
documentContent: string,
): Promise<DataModels.DocumentId> {
if (!useMongoProxyEndpoint(MongoProxyApi.UpdateDocument)) {
return updateDocument_ToBeDeprecated(databaseId, collection, documentId, documentContent);
}
const { databaseAccount } = userContext;
const resourceEndpoint = databaseAccount.properties.mongoEndpoint || databaseAccount.properties.documentEndpoint;
const idComponents = documentId.self.split("/");
@ -396,7 +229,7 @@ export function updateDocument(
: "",
documentContent,
};
const endpoint = getFeatureEndpointOrDefault(MongoProxyApi.UpdateDocument);
const endpoint = getEndpoint(configContext.MONGO_PROXY_ENDPOINT);
return window
.fetch(endpoint, {
@ -417,139 +250,6 @@ export function updateDocument(
});
}
export function updateDocument_ToBeDeprecated(
databaseId: string,
collection: Collection,
documentId: DocumentId,
documentContent: string,
): Promise<DataModels.DocumentId> {
const { databaseAccount } = userContext;
const resourceEndpoint = databaseAccount.properties.mongoEndpoint || databaseAccount.properties.documentEndpoint;
const idComponents = documentId.self.split("/");
const path = idComponents.slice(0, 5).join("/");
const rid = encodeURIComponent(idComponents[5]);
const params = {
db: databaseId,
coll: collection.id(),
resourceUrl: `${resourceEndpoint}${path}/${rid}`,
rid,
rtype: "docs",
sid: userContext.subscriptionId,
rg: userContext.resourceGroup,
dba: databaseAccount.name,
pk:
documentId && documentId.partitionKey && !documentId.partitionKey.systemKey
? documentId.partitionKeyProperties?.[0]
: "",
};
const endpoint = getFeatureEndpointOrDefault("updateDocument");
return window
.fetch(`${endpoint}?${queryString.stringify(params)}`, {
method: "PUT",
body: documentContent,
headers: {
...defaultHeaders,
...authHeaders(),
[HttpHeaders.contentType]: ContentType.applicationJson,
[CosmosSDKConstants.HttpHeaders.PartitionKey]: JSON.stringify(documentId.partitionKeyHeader()),
},
})
.then(async (response) => {
if (response.ok) {
return response.json();
}
return await errorHandling(response, "updating document", params);
});
}
export function deleteDocument(databaseId: string, collection: Collection, documentId: DocumentId): Promise<void> {
if (!useMongoProxyEndpoint(MongoProxyApi.DeleteDocument)) {
return deleteDocument_ToBeDeprecated(databaseId, collection, documentId);
}
const { databaseAccount } = userContext;
const resourceEndpoint = databaseAccount.properties.mongoEndpoint || databaseAccount.properties.documentEndpoint;
const idComponents = documentId.self.split("/");
const path = idComponents.slice(0, 5).join("/");
const rid = encodeURIComponent(idComponents[5]);
const params = {
databaseID: databaseId,
collectionID: collection.id(),
resourceUrl: `${resourceEndpoint}${path}/${rid}`,
resourceID: rid,
resourceType: "docs",
subscriptionID: userContext.subscriptionId,
resourceGroup: userContext.resourceGroup,
databaseAccountName: databaseAccount.name,
partitionKey:
documentId && documentId.partitionKey && !documentId.partitionKey.systemKey
? documentId.partitionKeyProperties?.[0]
: "",
};
const endpoint = getFeatureEndpointOrDefault(MongoProxyApi.DeleteDocument);
return window
.fetch(endpoint, {
method: "DELETE",
body: JSON.stringify(params),
headers: {
...defaultHeaders,
...authHeaders(),
[HttpHeaders.contentType]: ContentType.applicationJson,
},
})
.then(async (response) => {
if (response.ok) {
return undefined;
}
return await errorHandling(response, "deleting document", params);
});
}
export function deleteDocument_ToBeDeprecated(
databaseId: string,
collection: Collection,
documentId: DocumentId,
): Promise<void> {
const { databaseAccount } = userContext;
const resourceEndpoint = databaseAccount.properties.mongoEndpoint || databaseAccount.properties.documentEndpoint;
const idComponents = documentId.self.split("/");
const path = idComponents.slice(0, 5).join("/");
const rid = encodeURIComponent(idComponents[5]);
const params = {
db: databaseId,
coll: collection.id(),
resourceUrl: `${resourceEndpoint}${path}/${rid}`,
rid,
rtype: "docs",
sid: userContext.subscriptionId,
rg: userContext.resourceGroup,
dba: databaseAccount.name,
pk:
documentId && documentId.partitionKey && !documentId.partitionKey.systemKey
? documentId.partitionKeyProperties?.[0]
: "",
};
const endpoint = getFeatureEndpointOrDefault("deleteDocument");
return window
.fetch(`${endpoint}?${queryString.stringify(params)}`, {
method: "DELETE",
headers: {
...defaultHeaders,
...authHeaders(),
[HttpHeaders.contentType]: ContentType.applicationJson,
[CosmosSDKConstants.HttpHeaders.PartitionKey]: JSON.stringify(documentId.partitionKeyHeader()),
},
})
.then(async (response) => {
if (response.ok) {
return undefined;
}
return await errorHandling(response, "deleting document", params);
});
}
export function deleteDocuments(
databaseId: string,
collection: Collection,
@ -575,7 +275,7 @@ export function deleteDocuments(
resourceGroup: userContext.resourceGroup,
databaseAccountName: databaseAccount.name,
};
const endpoint = getFeatureEndpointOrDefault(MongoProxyApi.BulkDelete);
const endpoint = getEndpoint(configContext.MONGO_PROXY_ENDPOINT);
return window
.fetch(`${endpoint}/bulkdelete`, {
@ -599,9 +299,6 @@ export function deleteDocuments(
export function createMongoCollectionWithProxy(
params: DataModels.CreateCollectionParams,
): Promise<DataModels.Collection> {
if (!useMongoProxyEndpoint(MongoProxyApi.CreateCollectionWithProxy)) {
return createMongoCollectionWithProxy_ToBeDeprecated(params);
}
const { databaseAccount } = userContext;
const shardKey: string = params.partitionKey?.paths[0];
@ -622,7 +319,7 @@ export function createMongoCollectionWithProxy(
isSharded: !!shardKey,
};
const endpoint = getFeatureEndpointOrDefault(MongoProxyApi.CreateCollectionWithProxy);
const endpoint = getEndpoint(configContext.MONGO_PROXY_ENDPOINT);
return window
.fetch(`${endpoint}/createCollection`, {
@ -642,70 +339,6 @@ export function createMongoCollectionWithProxy(
});
}
export function createMongoCollectionWithProxy_ToBeDeprecated(
params: DataModels.CreateCollectionParams,
): Promise<DataModels.Collection> {
const { databaseAccount } = userContext;
const shardKey: string = params.partitionKey?.paths[0];
const mongoParams: DataModels.MongoParameters = {
resourceUrl: databaseAccount.properties.mongoEndpoint || databaseAccount.properties.documentEndpoint,
db: params.databaseId,
coll: params.collectionId,
pk: shardKey,
offerThroughput: params.autoPilotMaxThroughput || params.offerThroughput,
cd: params.createNewDatabase,
st: params.databaseLevelThroughput,
is: !!shardKey,
rid: "",
rtype: "colls",
sid: userContext.subscriptionId,
rg: userContext.resourceGroup,
dba: databaseAccount.name,
isAutoPilot: !!params.autoPilotMaxThroughput,
};
const endpoint = getFeatureEndpointOrDefault("createCollectionWithProxy");
return window
.fetch(
`${endpoint}/createCollection?${queryString.stringify(
mongoParams as unknown as queryString.ParsedUrlQueryInput,
)}`,
{
method: "POST",
headers: {
...defaultHeaders,
...authHeaders(),
[HttpHeaders.contentType]: "application/json",
},
},
)
.then(async (response) => {
if (response.ok) {
return response.json();
}
return await errorHandling(response, "creating collection", mongoParams);
});
}
export function getFeatureEndpointOrDefault(feature: string): string {
let endpoint;
if (useMongoProxyEndpoint(feature)) {
endpoint = configContext.MONGO_PROXY_ENDPOINT;
} else {
const allowedMongoProxyEndpoints = configContext.allowedMongoProxyEndpoints || [
...defaultAllowedMongoProxyEndpoints,
...allowedMongoProxyEndpoints_ToBeDeprecated,
];
endpoint =
hasFlag(userContext.features.mongoProxyAPIs, feature) &&
validateEndpoint(userContext.features.mongoProxyEndpoint, allowedMongoProxyEndpoints)
? userContext.features.mongoProxyEndpoint
: configContext.MONGO_BACKEND_ENDPOINT || configContext.BACKEND_ENDPOINT;
}
return getEndpoint(endpoint);
}
export function getEndpoint(endpoint: string): string {
let url = endpoint + "/api/mongo/explorer";
@ -719,84 +352,6 @@ export function getEndpoint(endpoint: string): string {
return url;
}
export function useMongoProxyEndpoint(mongoProxyApi: string): boolean {
const mongoProxyEnvironmentMap: { [key: string]: string[] } = {
[MongoProxyApi.ResourceList]: [
MongoProxyEndpoints.Development,
MongoProxyEndpoints.Mpac,
MongoProxyEndpoints.Prod,
MongoProxyEndpoints.Fairfax,
MongoProxyEndpoints.Mooncake,
],
[MongoProxyApi.QueryDocuments]: [
MongoProxyEndpoints.Development,
MongoProxyEndpoints.Mpac,
MongoProxyEndpoints.Prod,
MongoProxyEndpoints.Fairfax,
MongoProxyEndpoints.Mooncake,
],
[MongoProxyApi.CreateDocument]: [
MongoProxyEndpoints.Development,
MongoProxyEndpoints.Mpac,
MongoProxyEndpoints.Prod,
MongoProxyEndpoints.Fairfax,
MongoProxyEndpoints.Mooncake,
],
[MongoProxyApi.ReadDocument]: [
MongoProxyEndpoints.Development,
MongoProxyEndpoints.Mpac,
MongoProxyEndpoints.Prod,
MongoProxyEndpoints.Fairfax,
MongoProxyEndpoints.Mooncake,
],
[MongoProxyApi.UpdateDocument]: [
MongoProxyEndpoints.Development,
MongoProxyEndpoints.Mpac,
MongoProxyEndpoints.Prod,
MongoProxyEndpoints.Fairfax,
MongoProxyEndpoints.Mooncake,
],
[MongoProxyApi.DeleteDocument]: [
MongoProxyEndpoints.Development,
MongoProxyEndpoints.Mpac,
MongoProxyEndpoints.Prod,
MongoProxyEndpoints.Fairfax,
MongoProxyEndpoints.Mooncake,
],
[MongoProxyApi.CreateCollectionWithProxy]: [
MongoProxyEndpoints.Development,
MongoProxyEndpoints.Mpac,
MongoProxyEndpoints.Prod,
MongoProxyEndpoints.Fairfax,
MongoProxyEndpoints.Mooncake,
],
[MongoProxyApi.LegacyMongoShell]: [
MongoProxyEndpoints.Development,
MongoProxyEndpoints.Mpac,
MongoProxyEndpoints.Prod,
MongoProxyEndpoints.Fairfax,
MongoProxyEndpoints.Mooncake,
],
[MongoProxyApi.BulkDelete]: [
MongoProxyEndpoints.Development,
MongoProxyEndpoints.Mpac,
MongoProxyEndpoints.Prod,
MongoProxyEndpoints.Fairfax,
MongoProxyEndpoints.Mooncake,
],
};
if (!mongoProxyEnvironmentMap[mongoProxyApi] || !configContext.MONGO_PROXY_ENDPOINT) {
return false;
}
if (configContext.globallyEnabledMongoAPIs.includes(mongoProxyApi)) {
return true;
}
return mongoProxyEnvironmentMap[mongoProxyApi].includes(configContext.MONGO_PROXY_ENDPOINT);
}
export class ThrottlingError extends Error {
constructor(message: string) {
super(message);

View File

@ -12,7 +12,6 @@ import {
allowedGraphEndpoints,
allowedHostedExplorerEndpoints,
allowedJunoOrigins,
allowedMongoBackendEndpoints,
allowedMsalRedirectEndpoints,
defaultAllowedArmEndpoints,
defaultAllowedBackendEndpoints,
@ -50,10 +49,8 @@ export interface ConfigContext {
CATALOG_API_KEY: string;
ARCADIA_ENDPOINT: string;
ARCADIA_LIVY_ENDPOINT_DNS_ZONE: string;
BACKEND_ENDPOINT?: string;
PORTAL_BACKEND_ENDPOINT: string;
NEW_BACKEND_APIS?: BackendApi[];
MONGO_BACKEND_ENDPOINT?: string;
MONGO_PROXY_ENDPOINT: string;
CASSANDRA_PROXY_ENDPOINT: string;
NEW_CASSANDRA_APIS?: string[];
@ -109,7 +106,6 @@ let configContext: Readonly<ConfigContext> = {
GITHUB_CLIENT_ID: "6cb2f63cf6f7b5cbdeca", // Registered OAuth app: https://github.com/organizations/AzureCosmosDBNotebooks/settings/applications/1189306
GITHUB_TEST_ENV_CLIENT_ID: "b63fc8cbf87fd3c6e2eb", // Registered OAuth app: https://github.com/organizations/AzureCosmosDBNotebooks/settings/applications/1777772
JUNO_ENDPOINT: JunoEndpoints.Prod,
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
PORTAL_BACKEND_ENDPOINT: PortalBackendEndpoints.Prod,
MONGO_PROXY_ENDPOINT: MongoProxyEndpoints.Prod,
CASSANDRA_PROXY_ENDPOINT: CassandraProxyEndpoints.Prod,
@ -152,15 +148,6 @@ export function updateConfigContext(newContext: Partial<ConfigContext>): void {
delete newContext.ARCADIA_ENDPOINT;
}
if (
!validateEndpoint(
newContext.BACKEND_ENDPOINT,
configContext.allowedBackendEndpoints || defaultAllowedBackendEndpoints,
)
) {
delete newContext.BACKEND_ENDPOINT;
}
if (
!validateEndpoint(
newContext.MONGO_PROXY_ENDPOINT,
@ -170,10 +157,6 @@ export function updateConfigContext(newContext: Partial<ConfigContext>): void {
delete newContext.MONGO_PROXY_ENDPOINT;
}
if (!validateEndpoint(newContext.MONGO_BACKEND_ENDPOINT, allowedMongoBackendEndpoints)) {
delete newContext.MONGO_BACKEND_ENDPOINT;
}
if (
!validateEndpoint(
newContext.CASSANDRA_PROXY_ENDPOINT,

View File

@ -406,7 +406,6 @@ export interface DataExplorerInputsFrame {
csmEndpoint?: string;
dnsSuffix?: string;
serverId?: string;
extensionEndpoint?: string;
portalBackendEndpoint?: string;
mongoProxyEndpoint?: string;
cassandraProxyEndpoint?: string;

View File

@ -1,6 +1,5 @@
import { isPublicInternetAccessAllowed } from "Common/DatabaseAccountUtility";
import { PhoenixClient } from "Phoenix/PhoenixClient";
import { useNewPortalBackendEndpoint } from "Utils/EndpointUtils";
import { cloneDeep } from "lodash";
import create, { UseStore } from "zustand";
import { AuthType } from "../../AuthType";
@ -128,9 +127,7 @@ export const useNotebook: UseStore<NotebookState> = create((set, get) => ({
userContext.apiType === "Postgres" || userContext.apiType === "VCoreMongo"
? databaseAccount?.location
: databaseAccount?.properties?.writeLocations?.[0]?.locationName.toLowerCase();
const disallowedLocationsUri: string = useNewPortalBackendEndpoint(Constants.BackendApi.DisallowedLocations)
? `${configContext.PORTAL_BACKEND_ENDPOINT}/api/disallowedlocations`
: `${configContext.BACKEND_ENDPOINT}/api/disallowedLocations`;
const disallowedLocationsUri: string = `${configContext.PORTAL_BACKEND_ENDPOINT}/api/disallowedlocations`;
const authorizationHeader = getAuthorizationHeader();
try {
const response = await fetch(disallowedLocationsUri, {

View File

@ -1,7 +1,6 @@
import { FeedOptions } from "@azure/cosmos";
import {
Areas,
BackendApi,
ConnectionStatusType,
ContainerStatusType,
HttpStatusCodes,
@ -32,7 +31,6 @@ import { Action } from "Shared/Telemetry/TelemetryConstants";
import { traceFailure, traceStart, traceSuccess } from "Shared/Telemetry/TelemetryProcessor";
import { userContext } from "UserContext";
import { getAuthorizationHeader } from "Utils/AuthorizationUtils";
import { useNewPortalBackendEndpoint } from "Utils/EndpointUtils";
import { queryPagesUntilContentPresent } from "Utils/QueryUtils";
import { QueryCopilotState, useQueryCopilot } from "hooks/useQueryCopilot";
import { useTabs } from "hooks/useTabs";
@ -82,9 +80,7 @@ export const isCopilotFeatureRegistered = async (subscriptionId: string): Promis
};
export const getCopilotEnabled = async (): Promise<boolean> => {
const backendEndpoint: string = useNewPortalBackendEndpoint(BackendApi.PortalSettings)
? configContext.PORTAL_BACKEND_ENDPOINT
: configContext.BACKEND_ENDPOINT;
const backendEndpoint: string = configContext.PORTAL_BACKEND_ENDPOINT;
const url = `${backendEndpoint}/api/portalsettings/querycopilot`;
const authorizationHeader: AuthorizationTokenHeaderMetadata = getAuthorizationHeader();

View File

@ -3,7 +3,7 @@ import * as ko from "knockout";
import Q from "q";
import { AuthType } from "../../AuthType";
import * as Constants from "../../Common/Constants";
import { CassandraProxyAPIs, CassandraProxyEndpoints } from "../../Common/Constants";
import { CassandraProxyAPIs } from "../../Common/Constants";
import { handleError } from "../../Common/ErrorHandlingUtils";
import * as HeadersUtility from "../../Common/HeadersUtility";
import { createDocument } from "../../Common/dataAccess/createDocument";
@ -264,9 +264,6 @@ export class CassandraAPIDataClient extends TableDataClient {
shouldNotify?: boolean,
paginationToken?: string,
): Promise<Entities.IListTableEntitiesResult> {
if (!this.useCassandraProxyEndpoint("postQuery")) {
return this.queryDocuments_ToBeDeprecated(collection, query, shouldNotify, paginationToken);
}
const clearMessage =
shouldNotify && NotificationConsoleUtils.logConsoleProgress(`Querying rows for table ${collection.id()}`);
try {
@ -309,55 +306,6 @@ export class CassandraAPIDataClient extends TableDataClient {
}
}
public async queryDocuments_ToBeDeprecated(
collection: ViewModels.Collection,
query: string,
shouldNotify?: boolean,
paginationToken?: string,
): Promise<Entities.IListTableEntitiesResult> {
const clearMessage =
shouldNotify && NotificationConsoleUtils.logConsoleProgress(`Querying rows for table ${collection.id()}`);
try {
const { authType, databaseAccount } = userContext;
const apiEndpoint: string =
authType === AuthType.EncryptedToken
? Constants.CassandraBackend.guestQueryApi
: Constants.CassandraBackend.queryApi;
const data: any = await $.ajax(`${configContext.BACKEND_ENDPOINT}/${apiEndpoint}`, {
type: "POST",
data: {
accountName: databaseAccount?.name,
cassandraEndpoint: this.trimCassandraEndpoint(databaseAccount?.properties.cassandraEndpoint),
resourceId: databaseAccount?.id,
keyspaceId: collection.databaseId,
tableId: collection.id(),
query,
paginationToken,
},
beforeSend: this.setAuthorizationHeader as any,
cache: false,
});
shouldNotify &&
NotificationConsoleUtils.logConsoleInfo(
`Successfully fetched ${data.result.length} rows for table ${collection.id()}`,
);
return {
Results: data.result,
ContinuationToken: data.paginationToken,
};
} catch (error) {
shouldNotify &&
handleError(
error,
"QueryDocuments_ToBeDeprecated_Cassandra",
`Failed to query rows for table ${collection.id()}`,
);
throw error;
} finally {
clearMessage?.();
}
}
public async deleteDocuments(
collection: ViewModels.Collection,
entitiesToDelete: Entities.ITableEntity[],
@ -471,10 +419,6 @@ export class CassandraAPIDataClient extends TableDataClient {
}
public getTableKeys(collection: ViewModels.Collection): Q.Promise<CassandraTableKeys> {
if (!this.useCassandraProxyEndpoint("getKeys")) {
return this.getTableKeys_ToBeDeprecated(collection);
}
if (!!collection.cassandraKeys) {
return Q.resolve(collection.cassandraKeys);
}
@ -515,52 +459,7 @@ export class CassandraAPIDataClient extends TableDataClient {
return deferred.promise;
}
public getTableKeys_ToBeDeprecated(collection: ViewModels.Collection): Q.Promise<CassandraTableKeys> {
if (!!collection.cassandraKeys) {
return Q.resolve(collection.cassandraKeys);
}
const clearInProgressMessage = logConsoleProgress(`Fetching keys for table ${collection.id()}`);
const { authType, databaseAccount } = userContext;
const apiEndpoint: string =
authType === AuthType.EncryptedToken
? Constants.CassandraBackend.guestKeysApi
: Constants.CassandraBackend.keysApi;
let endpoint = `${configContext.BACKEND_ENDPOINT}/${apiEndpoint}`;
const deferred = Q.defer<CassandraTableKeys>();
$.ajax(endpoint, {
type: "POST",
data: {
accountName: databaseAccount?.name,
cassandraEndpoint: this.trimCassandraEndpoint(databaseAccount?.properties.cassandraEndpoint),
resourceId: databaseAccount?.id,
keyspaceId: collection.databaseId,
tableId: collection.id(),
},
beforeSend: this.setAuthorizationHeader as any,
cache: false,
})
.then(
(data: CassandraTableKeys) => {
collection.cassandraKeys = data;
logConsoleInfo(`Successfully fetched keys for table ${collection.id()}`);
deferred.resolve(data);
},
(error: any) => {
const errorText = error.responseJSON?.message ?? JSON.stringify(error);
handleError(errorText, "FetchKeysCassandra", `Error fetching keys for table ${collection.id()}`);
deferred.reject(errorText);
},
)
.done(clearInProgressMessage);
return deferred.promise;
}
public getTableSchema(collection: ViewModels.Collection): Q.Promise<CassandraTableKey[]> {
if (!this.useCassandraProxyEndpoint("getSchema")) {
return this.getTableSchema_ToBeDeprecated(collection);
}
if (!!collection.cassandraSchema) {
return Q.resolve(collection.cassandraSchema);
}
@ -602,52 +501,7 @@ export class CassandraAPIDataClient extends TableDataClient {
return deferred.promise;
}
public getTableSchema_ToBeDeprecated(collection: ViewModels.Collection): Q.Promise<CassandraTableKey[]> {
if (!!collection.cassandraSchema) {
return Q.resolve(collection.cassandraSchema);
}
const clearInProgressMessage = logConsoleProgress(`Fetching schema for table ${collection.id()}`);
const { databaseAccount, authType } = userContext;
const apiEndpoint: string =
authType === AuthType.EncryptedToken
? Constants.CassandraBackend.guestSchemaApi
: Constants.CassandraBackend.schemaApi;
let endpoint = `${configContext.BACKEND_ENDPOINT}/${apiEndpoint}`;
const deferred = Q.defer<CassandraTableKey[]>();
$.ajax(endpoint, {
type: "POST",
data: {
accountName: databaseAccount?.name,
cassandraEndpoint: this.trimCassandraEndpoint(databaseAccount?.properties.cassandraEndpoint),
resourceId: databaseAccount?.id,
keyspaceId: collection.databaseId,
tableId: collection.id(),
},
beforeSend: this.setAuthorizationHeader as any,
cache: false,
})
.then(
(data: any) => {
collection.cassandraSchema = data.columns;
logConsoleInfo(`Successfully fetched schema for table ${collection.id()}`);
deferred.resolve(data.columns);
},
(error: any) => {
const errorText = error.responseJSON?.message ?? JSON.stringify(error);
handleError(errorText, "FetchSchemaCassandra", `Error fetching schema for table ${collection.id()}`);
deferred.reject(errorText);
},
)
.done(clearInProgressMessage);
return deferred.promise;
}
private createOrDeleteQuery(cassandraEndpoint: string, resourceId: string, query: string): Q.Promise<any> {
if (!this.useCassandraProxyEndpoint("createOrDelete")) {
return this.createOrDeleteQuery_ToBeDeprecated(cassandraEndpoint, resourceId, query);
}
const deferred = Q.defer();
const { authType, databaseAccount } = userContext;
const apiEndpoint: string =
@ -677,38 +531,6 @@ export class CassandraAPIDataClient extends TableDataClient {
return deferred.promise;
}
private createOrDeleteQuery_ToBeDeprecated(
cassandraEndpoint: string,
resourceId: string,
query: string,
): Q.Promise<any> {
const deferred = Q.defer();
const { authType, databaseAccount } = userContext;
const apiEndpoint: string =
authType === AuthType.EncryptedToken
? Constants.CassandraBackend.guestCreateOrDeleteApi
: Constants.CassandraBackend.createOrDeleteApi;
$.ajax(`${configContext.BACKEND_ENDPOINT}/${apiEndpoint}`, {
type: "POST",
data: {
accountName: databaseAccount?.name,
cassandraEndpoint: this.trimCassandraEndpoint(cassandraEndpoint),
resourceId: resourceId,
query: query,
},
beforeSend: this.setAuthorizationHeader as any,
cache: false,
}).then(
(data: any) => {
deferred.resolve();
},
(reason) => {
deferred.reject(reason);
},
);
return deferred.promise;
}
private trimCassandraEndpoint(cassandraEndpoint: string): string {
if (!cassandraEndpoint) {
return cassandraEndpoint;
@ -747,23 +569,4 @@ export class CassandraAPIDataClient extends TableDataClient {
private getCassandraPartitionKeyProperty(collection: ViewModels.Collection): string {
return collection.cassandraKeys.partitionKeys[0].property;
}
private useCassandraProxyEndpoint(api: string): boolean {
const activeCassandraProxyEndpoints: string[] = [
CassandraProxyEndpoints.Development,
CassandraProxyEndpoints.Mpac,
CassandraProxyEndpoints.Prod,
CassandraProxyEndpoints.Fairfax,
CassandraProxyEndpoints.Mooncake,
];
if (configContext.globallyEnabledCassandraAPIs.includes(api)) {
return true;
}
return (
configContext.NEW_CASSANDRA_APIS?.includes(api) &&
activeCassandraProxyEndpoints.includes(configContext.CASSANDRA_PROXY_ENDPOINT)
);
}
}

View File

@ -1149,16 +1149,6 @@ export const DocumentsTabComponent: React.FunctionComponent<IDocumentsTabCompone
} else {
deletePromise = _bulkDeleteNoSqlDocuments(_collection, toDeleteDocumentIds);
}
} else {
if (isMongoBulkDeleteDisabled) {
// TODO: Once new mongo proxy is available for all users, remove the call for MongoProxyClient.deleteDocument().
// MongoProxyClient.deleteDocuments() should be called for all users.
deletePromise = MongoProxyClient.deleteDocument(
_collection.databaseId,
_collection as ViewModels.Collection,
toDeleteDocumentIds[0],
).then(() => [toDeleteDocumentIds[0]]);
// ----------------------------------------------------------------------------------------------------
} else {
deletePromise = MongoProxyClient.deleteDocuments(
_collection.databaseId,
@ -1171,7 +1161,6 @@ export const DocumentsTabComponent: React.FunctionComponent<IDocumentsTabCompone
throw new Error(`Delete failed with deletedCount: ${deletedCount} and isAcknowledged: ${isAcknowledged}`);
});
}
}
return deletePromise
.then(
@ -2054,11 +2043,8 @@ export const DocumentsTabComponent: React.FunctionComponent<IDocumentsTabCompone
}
}, [prevSelectedColumnIds, refreshDocumentsGrid, selectedColumnIds]);
// TODO: remove isMongoBulkDeleteDisabled when new mongo proxy is enabled for all users
// TODO: remove partitionKey.systemKey when JS SDK bug is fixed
const isMongoBulkDeleteDisabled = !MongoProxyClient.useMongoProxyEndpoint(Constants.MongoProxyApi.BulkDelete);
const isBulkDeleteDisabled =
(partitionKey.systemKey && !isPreferredApiMongoDB) || (isPreferredApiMongoDB && isMongoBulkDeleteDisabled);
const isBulkDeleteDisabled = partitionKey.systemKey && !isPreferredApiMongoDB;
// -------------------------------------------------------
const getFilterChoices = (): InputDatalistDropdownOptionSection[] => {

View File

@ -1,6 +1,4 @@
import { useMongoProxyEndpoint } from "Common/MongoProxyClient";
import React, { Component } from "react";
import * as Constants from "../../../Common/Constants";
import { configContext } from "../../../ConfigContext";
import * as ViewModels from "../../../Contracts/ViewModels";
import { Action, ActionModifiers } from "../../../Shared/Telemetry/TelemetryConstants";
@ -50,15 +48,13 @@ export default class MongoShellTabComponent extends Component<
IMongoShellTabComponentStates
> {
private _logTraces: Map<string, number>;
private _useMongoProxyEndpoint: boolean;
constructor(props: IMongoShellTabComponentProps) {
super(props);
this._logTraces = new Map();
this._useMongoProxyEndpoint = useMongoProxyEndpoint(Constants.MongoProxyApi.LegacyMongoShell);
this.state = {
url: getMongoShellUrl(this._useMongoProxyEndpoint),
url: getMongoShellUrl(),
};
props.onMongoShellTabAccessor({
@ -113,17 +109,9 @@ export default class MongoShellTabComponent extends Component<
const resourceId = databaseAccount?.id;
const accountName = databaseAccount?.name;
const documentEndpoint = databaseAccount?.properties.mongoEndpoint || databaseAccount?.properties.documentEndpoint;
const mongoEndpoint =
documentEndpoint.substr(
Constants.MongoDBAccounts.protocol.length + 3,
documentEndpoint.length -
(Constants.MongoDBAccounts.protocol.length + 2 + Constants.MongoDBAccounts.defaultPort.length),
) + Constants.MongoDBAccounts.defaultPort.toString();
const databaseId = this.props.collection.databaseId;
const collectionId = this.props.collection.id();
const apiEndpoint = this._useMongoProxyEndpoint
? configContext.MONGO_PROXY_ENDPOINT
: configContext.BACKEND_ENDPOINT;
const apiEndpoint = configContext.MONGO_PROXY_ENDPOINT;
const encryptedAuthToken: string = userContext.accessToken;
shellIframe.contentWindow.postMessage(
@ -132,7 +120,7 @@ export default class MongoShellTabComponent extends Component<
data: {
resourceId: resourceId,
accountName: accountName,
mongoEndpoint: this._useMongoProxyEndpoint ? documentEndpoint : mongoEndpoint,
mongoEndpoint: documentEndpoint,
authorization: authorization,
databaseId: databaseId,
collectionId: collectionId,

View File

@ -2,8 +2,6 @@ import { Platform, resetConfigContext, updateConfigContext } from "../../../Conf
import { updateUserContext, userContext } from "../../../UserContext";
import { getMongoShellUrl } from "./getMongoShellUrl";
const mongoBackendEndpoint = "https://localhost:1234";
describe("getMongoShellUrl", () => {
let queryString = "";
@ -11,7 +9,6 @@ describe("getMongoShellUrl", () => {
resetConfigContext();
updateConfigContext({
BACKEND_ENDPOINT: mongoBackendEndpoint,
platform: Platform.Hosted,
});
@ -37,12 +34,7 @@ describe("getMongoShellUrl", () => {
queryString = `resourceId=${userContext.databaseAccount.id}&accountName=${userContext.databaseAccount.name}&mongoEndpoint=${userContext.databaseAccount.properties.documentEndpoint}`;
});
it("should return /indexv2.html by default", () => {
expect(getMongoShellUrl().toString()).toContain(`/indexv2.html?${queryString}`);
});
it("should return /index.html when useMongoProxyEndpoint is true", () => {
const useMongoProxyEndpoint: boolean = true;
expect(getMongoShellUrl(useMongoProxyEndpoint).toString()).toContain(`/index.html?${queryString}`);
it("should return /index.html by default", () => {
expect(getMongoShellUrl().toString()).toContain(`/index.html?${queryString}`);
});
});

View File

@ -1,11 +1,11 @@
import { userContext } from "../../../UserContext";
export function getMongoShellUrl(useMongoProxyEndpoint?: boolean): string {
export function getMongoShellUrl(): string {
const { databaseAccount: account } = userContext;
const resourceId = account?.id;
const accountName = account?.name;
const mongoEndpoint = account?.properties?.mongoEndpoint || account?.properties?.documentEndpoint;
const queryString = `resourceId=${resourceId}&accountName=${accountName}&mongoEndpoint=${mongoEndpoint}`;
return useMongoProxyEndpoint ? `/mongoshell/index.html?${queryString}` : `/mongoshell/indexv2.html?${queryString}`;
return `/mongoshell/index.html?${queryString}`;
}

View File

@ -1,7 +1,3 @@
import { IMessageBarStyles, MessageBar, MessageBarType } from "@fluentui/react";
import { CassandraProxyEndpoints, MongoProxyEndpoints } from "Common/Constants";
import { configContext } from "ConfigContext";
import { IpRule } from "Contracts/DataModels";
import { CollectionTabKind } from "Contracts/ViewModels";
import Explorer from "Explorer/Explorer";
import { useCommandBar } from "Explorer/Menus/CommandBar/CommandBarComponentAdapter";
@ -12,10 +8,8 @@ import { PostgresConnectTab } from "Explorer/Tabs/PostgresConnectTab";
import { QuickstartTab } from "Explorer/Tabs/QuickstartTab";
import { VcoreMongoConnectTab } from "Explorer/Tabs/VCoreMongoConnectTab";
import { VcoreMongoQuickstartTab } from "Explorer/Tabs/VCoreMongoQuickstartTab";
import { LayoutConstants } from "Explorer/Theme/ThemeUtil";
import { KeyboardAction, KeyboardActionGroup, useKeyboardActionGroup } from "KeyboardShortcuts";
import { userContext } from "UserContext";
import { CassandraProxyOutboundIPs, MongoProxyOutboundIPs, PortalBackendIPs } from "Utils/EndpointUtils";
import { useTeachingBubble } from "hooks/useTeachingBubble";
import ko from "knockout";
import React, { MutableRefObject, useEffect, useRef, useState } from "react";
@ -34,10 +28,6 @@ interface TabsProps {
export const Tabs = ({ explorer }: TabsProps): JSX.Element => {
const { openedTabs, openedReactTabs, activeTab, activeReactTab } = useTabs();
const [
showMongoAndCassandraProxiesNetworkSettingsWarningState,
setShowMongoAndCassandraProxiesNetworkSettingsWarningState,
] = useState<boolean>(showMongoAndCassandraProxiesNetworkSettingsWarning());
const setKeyboardHandlers = useKeyboardActionGroup(KeyboardActionGroup.TABS);
useEffect(() => {
@ -48,28 +38,8 @@ export const Tabs = ({ explorer }: TabsProps): JSX.Element => {
});
}, [setKeyboardHandlers]);
const defaultMessageBarStyles: IMessageBarStyles = {
root: {
height: `${LayoutConstants.rowHeight}px`,
overflow: "hidden",
flexDirection: "row",
},
};
return (
<div className="tabsManagerContainer">
{showMongoAndCassandraProxiesNetworkSettingsWarningState && (
<MessageBar
messageBarType={MessageBarType.warning}
styles={defaultMessageBarStyles}
onDismiss={() => {
setShowMongoAndCassandraProxiesNetworkSettingsWarningState(false);
}}
>
{`We have migrated our middleware to new infrastructure. To avoid issues with Data Explorer access, please
re-enable "Allow access from Azure Portal" on the Networking blade for your account.`}
</MessageBar>
)}
<div className="nav-tabs-margin">
<ul className="nav nav-tabs level navTabHeight" id="navTabs" role="tablist">
{openedReactTabs.map((tab) => (
@ -314,57 +284,3 @@ const getReactTabContent = (activeReactTab: ReactTabKind, explorer: Explorer): J
throw Error(`Unsupported tab kind ${ReactTabKind[activeReactTab]}`);
}
};
const showMongoAndCassandraProxiesNetworkSettingsWarning = (): boolean => {
const ipRules: IpRule[] = userContext.databaseAccount?.properties?.ipRules;
if (
((userContext.apiType === "Mongo" && configContext.MONGO_PROXY_ENDPOINT !== MongoProxyEndpoints.Development) ||
(userContext.apiType === "Cassandra" &&
configContext.CASSANDRA_PROXY_ENDPOINT !== CassandraProxyEndpoints.Development)) &&
ipRules?.length
) {
const legacyPortalBackendIPs: string[] = PortalBackendIPs[configContext.BACKEND_ENDPOINT];
const ipAddressesFromIPRules: string[] = ipRules.map((ipRule) => ipRule.ipAddressOrRange);
const ipRulesIncludeLegacyPortalBackend: boolean = legacyPortalBackendIPs.every((legacyPortalBackendIP: string) =>
ipAddressesFromIPRules.includes(legacyPortalBackendIP),
);
if (!ipRulesIncludeLegacyPortalBackend) {
return false;
}
if (userContext.apiType === "Mongo") {
const isProdOrMpacMongoProxyEndpoint: boolean = [MongoProxyEndpoints.Mpac, MongoProxyEndpoints.Prod].includes(
configContext.MONGO_PROXY_ENDPOINT,
);
const mongoProxyOutboundIPs: string[] = isProdOrMpacMongoProxyEndpoint
? [...MongoProxyOutboundIPs[MongoProxyEndpoints.Mpac], ...MongoProxyOutboundIPs[MongoProxyEndpoints.Prod]]
: MongoProxyOutboundIPs[configContext.MONGO_PROXY_ENDPOINT];
const ipRulesIncludeMongoProxy: boolean = mongoProxyOutboundIPs.every((mongoProxyOutboundIP: string) =>
ipAddressesFromIPRules.includes(mongoProxyOutboundIP),
);
return !ipRulesIncludeMongoProxy;
} else if (userContext.apiType === "Cassandra") {
const isProdOrMpacCassandraProxyEndpoint: boolean = [
CassandraProxyEndpoints.Mpac,
CassandraProxyEndpoints.Prod,
].includes(configContext.CASSANDRA_PROXY_ENDPOINT);
const cassandraProxyOutboundIPs: string[] = isProdOrMpacCassandraProxyEndpoint
? [
...CassandraProxyOutboundIPs[CassandraProxyEndpoints.Mpac],
...CassandraProxyOutboundIPs[CassandraProxyEndpoints.Prod],
]
: CassandraProxyOutboundIPs[configContext.CASSANDRA_PROXY_ENDPOINT];
const ipRulesIncludeCassandraProxy: boolean = cassandraProxyOutboundIPs.every(
(cassandraProxyOutboundIP: string) => ipAddressesFromIPRules.includes(cassandraProxyOutboundIP),
);
return !ipRulesIncludeCassandraProxy;
}
}
return false;
};

View File

@ -1,13 +1,11 @@
import { useBoolean } from "@fluentui/react-hooks";
import { userContext } from "UserContext";
import { useNewPortalBackendEndpoint } from "Utils/EndpointUtils";
import * as React from "react";
import ConnectImage from "../../../../images/HdeConnectCosmosDB.svg";
import ErrorImage from "../../../../images/error.svg";
import { AuthType } from "../../../AuthType";
import { BackendApi, HttpHeaders } from "../../../Common/Constants";
import { HttpHeaders } from "../../../Common/Constants";
import { configContext } from "../../../ConfigContext";
import { GenerateTokenResponse } from "../../../Contracts/DataModels";
import { isResourceTokenConnectionString } from "../Helpers/ResourceTokenUtils";
interface Props {
@ -19,10 +17,6 @@ interface Props {
}
export const fetchEncryptedToken = async (connectionString: string): Promise<string> => {
if (!useNewPortalBackendEndpoint(BackendApi.GenerateToken)) {
return await fetchEncryptedToken_ToBeDeprecated(connectionString);
}
const headers = new Headers();
headers.append(HttpHeaders.connectionString, connectionString);
const url = configContext.PORTAL_BACKEND_ENDPOINT + "/api/connectionstring/token/generatetoken";
@ -35,28 +29,10 @@ export const fetchEncryptedToken = async (connectionString: string): Promise<str
return decodeURIComponent(encryptedTokenResponse);
};
export const fetchEncryptedToken_ToBeDeprecated = async (connectionString: string): Promise<string> => {
const headers = new Headers();
headers.append(HttpHeaders.connectionString, connectionString);
const url = configContext.BACKEND_ENDPOINT + "/api/guest/tokens/generateToken";
const response = await fetch(url, { headers, method: "POST" });
if (!response.ok) {
throw response;
}
// This API has a quirk where it must be parsed twice
const result: GenerateTokenResponse = JSON.parse(await response.json());
return decodeURIComponent(result.readWrite || result.read);
};
export const isAccountRestrictedForConnectionStringLogin = async (connectionString: string): Promise<boolean> => {
const headers = new Headers();
headers.append(HttpHeaders.connectionString, connectionString);
const backendEndpoint: string = useNewPortalBackendEndpoint(BackendApi.AccountRestrictions)
? configContext.PORTAL_BACKEND_ENDPOINT
: configContext.BACKEND_ENDPOINT;
const url = backendEndpoint + "/api/guest/accountrestrictions/checkconnectionstringlogin";
const url = configContext.PORTAL_BACKEND_ENDPOINT + "/api/guest/accountrestrictions/checkconnectionstringlogin";
const response = await fetch(url, { headers, method: "POST" });
if (!response.ok) {
throw response;

View File

@ -1,11 +1,4 @@
import {
BackendApi,
CassandraProxyEndpoints,
JunoEndpoints,
MongoProxyEndpoints,
PortalBackendEndpoints,
} from "Common/Constants";
import { configContext } from "ConfigContext";
import { CassandraProxyEndpoints, JunoEndpoints, MongoProxyEndpoints, PortalBackendEndpoints } from "Common/Constants";
import * as Logger from "../Common/Logger";
export function validateEndpoint(
@ -73,9 +66,6 @@ export const PortalBackendIPs: { [key: string]: string[] } = {
//"https://main2.documentdb.ext.azure.com": ["104.42.196.69"],
"https://main.documentdb.ext.azure.cn": ["139.217.8.252"],
"https://main.documentdb.ext.azure.us": ["52.244.48.71"],
// Add ussec and usnat when endpoint address is known:
//ussec: ["29.26.26.67", "29.26.26.66"],
//usnat: ["7.28.202.68"],
};
export const PortalBackendOutboundIPs: { [key: string]: string[] } = {
@ -100,14 +90,6 @@ export const defaultAllowedMongoProxyEndpoints: ReadonlyArray<string> = [
MongoProxyEndpoints.Mooncake,
];
export const allowedMongoProxyEndpoints_ToBeDeprecated: ReadonlyArray<string> = [
"https://main.documentdb.ext.azure.com",
"https://main.documentdb.ext.azure.cn",
"https://main.documentdb.ext.azure.us",
"https://main.cosmos.ext.azure",
"https://localhost:12901",
];
export const defaultAllowedCassandraProxyEndpoints: ReadonlyArray<string> = [
CassandraProxyEndpoints.Development,
CassandraProxyEndpoints.Mpac,
@ -153,53 +135,3 @@ export const allowedJunoOrigins: ReadonlyArray<string> = [
];
export const allowedNotebookServerUrls: ReadonlyArray<string> = [];
//
// Temporary function to determine if a portal backend API is supported by the
// new backend in this environment.
//
// TODO: Remove this function once new backend migration is completed for all environments.
//
export function useNewPortalBackendEndpoint(backendApi: string): boolean {
// This maps backend APIs to the environments supported by the new backend.
const newBackendApiEnvironmentMap: { [key: string]: string[] } = {
[BackendApi.GenerateToken]: [
PortalBackendEndpoints.Development,
PortalBackendEndpoints.Mpac,
PortalBackendEndpoints.Prod,
],
[BackendApi.PortalSettings]: [
PortalBackendEndpoints.Development,
PortalBackendEndpoints.Mpac,
PortalBackendEndpoints.Prod,
],
[BackendApi.AccountRestrictions]: [
PortalBackendEndpoints.Development,
PortalBackendEndpoints.Mpac,
PortalBackendEndpoints.Prod,
],
[BackendApi.RuntimeProxy]: [
PortalBackendEndpoints.Development,
PortalBackendEndpoints.Mpac,
PortalBackendEndpoints.Prod,
],
[BackendApi.DisallowedLocations]: [
PortalBackendEndpoints.Development,
PortalBackendEndpoints.Mpac,
PortalBackendEndpoints.Prod,
PortalBackendEndpoints.Fairfax,
PortalBackendEndpoints.Mooncake,
],
[BackendApi.SampleData]: [
PortalBackendEndpoints.Development,
PortalBackendEndpoints.Mpac,
PortalBackendEndpoints.Prod,
],
};
if (!newBackendApiEnvironmentMap[backendApi] || !configContext.PORTAL_BACKEND_ENDPOINT) {
return false;
}
return newBackendApiEnvironmentMap[backendApi].includes(configContext.PORTAL_BACKEND_ENDPOINT);
}

View File

@ -13,7 +13,6 @@ import {
readSubComponentState,
} from "Shared/AppStatePersistenceUtility";
import { LocalStorageUtility, StorageKey } from "Shared/StorageUtility";
import { useNewPortalBackendEndpoint } from "Utils/EndpointUtils";
import { logConsoleError } from "Utils/NotificationConsoleUtils";
import { useQueryCopilot } from "hooks/useQueryCopilot";
import { ReactTabKind, useTabs } from "hooks/useTabs";
@ -548,14 +547,6 @@ async function configurePortal(): Promise<Explorer> {
const inputs = message?.inputs;
const openAction = message?.openAction;
if (inputs) {
if (
configContext.BACKEND_ENDPOINT &&
configContext.platform === Platform.Portal &&
process.env.NODE_ENV === "development"
) {
inputs.extensionEndpoint = configContext.PROXY_PATH;
}
updateContextsFromPortalMessage(inputs);
const { databaseAccount: account, subscriptionId, resourceGroup } = userContext;
@ -680,18 +671,17 @@ function updateAADEndpoints(portalEnv: PortalEnv) {
function updateContextsFromPortalMessage(inputs: DataExplorerInputsFrame) {
if (
configContext.BACKEND_ENDPOINT &&
configContext.PORTAL_BACKEND_ENDPOINT &&
configContext.platform === Platform.Portal &&
process.env.NODE_ENV === "development"
) {
inputs.extensionEndpoint = configContext.PROXY_PATH;
inputs.portalBackendEndpoint = configContext.PROXY_PATH;
}
const authorizationToken = inputs.authorizationToken || "";
const databaseAccount = inputs.databaseAccount;
updateConfigContext({
BACKEND_ENDPOINT: inputs.extensionEndpoint || configContext.BACKEND_ENDPOINT,
ARM_ENDPOINT: normalizeArmEndpoint(inputs.csmEndpoint || configContext.ARM_ENDPOINT),
MONGO_PROXY_ENDPOINT: inputs.mongoProxyEndpoint,
CASSANDRA_PROXY_ENDPOINT: inputs.cassandraProxyEndpoint,
@ -794,17 +784,7 @@ async function updateContextForSampleData(explorer: Explorer): Promise<void> {
return;
}
let url: string;
if (useNewPortalBackendEndpoint(Constants.BackendApi.SampleData)) {
url = createUri(configContext.PORTAL_BACKEND_ENDPOINT, "/api/sampledata");
} else {
const sampleDatabaseEndpoint = useQueryCopilot.getState().copilotUserDBEnabled
? `/api/tokens/sampledataconnection/v2`
: `/api/tokens/sampledataconnection`;
url = createUri(`${configContext.BACKEND_ENDPOINT}`, sampleDatabaseEndpoint);
}
const url: string = createUri(configContext.PORTAL_BACKEND_ENDPOINT, "/api/sampledata");
const authorizationHeader = getAuthorizationHeader();
const headers = { [authorizationHeader.header]: authorizationHeader.token };

View File

@ -1,16 +1,9 @@
import { useEffect, useState } from "react";
import { useNewPortalBackendEndpoint } from "Utils/EndpointUtils";
import { ApiEndpoints, BackendApi, HttpHeaders } from "../Common/Constants";
import { HttpHeaders } from "../Common/Constants";
import { configContext } from "../ConfigContext";
import { AccessInputMetadata } from "../Contracts/DataModels";
const url = `${configContext.BACKEND_ENDPOINT}${ApiEndpoints.guestRuntimeProxy}/accessinputmetadata?_=1609359229955`;
export async function fetchAccessData(portalToken: string): Promise<AccessInputMetadata> {
if (!useNewPortalBackendEndpoint(BackendApi.RuntimeProxy)) {
return fetchAccessData_ToBeDeprecated(portalToken);
}
const headers = new Headers();
// Portal encrypted token API quirk: The token header must be URL encoded
headers.append(HttpHeaders.guestAccessToken, encodeURIComponent(portalToken));
@ -25,25 +18,6 @@ export async function fetchAccessData(portalToken: string): Promise<AccessInputM
.catch((error) => console.error(error));
}
export async function fetchAccessData_ToBeDeprecated(portalToken: string): Promise<AccessInputMetadata> {
const headers = new Headers();
// Portal encrypted token API quirk: The token header must be URL encoded
headers.append(HttpHeaders.guestAccessToken, encodeURIComponent(portalToken));
const options = {
method: "GET",
headers: headers,
};
return (
fetch(url, options)
.then((response) => response.json())
// Portal encrypted token API quirk: The response is double JSON encoded
.then((json) => JSON.parse(json))
.catch((error) => console.error(error))
);
}
export function useTokenMetadata(token: string): AccessInputMetadata | undefined {
const [state, setState] = useState<AccessInputMetadata | undefined>();

View File

@ -39,7 +39,6 @@ const initTestExplorer = async (): Promise<void> => {
csmEndpoint: "https://management.azure.com",
dnsSuffix: "documents.azure.com",
serverId: "prod1",
extensionEndpoint: "/proxy",
portalBackendEndpoint: "https://cdb-ms-mpac-pbe.cosmos.azure.com",
mongoProxyEndpoint: "https://cdb-ms-mpac-mp.cosmos.azure.com",
cassandraProxyEndpoint: "https://cdb-ms-mpac-cp.cosmos.azure.com",