Compare commits

..

54 Commits

Author SHA1 Message Date
Senthamil Sindhu
a6473adf40 Add recent changes 2024-11-19 20:56:54 -08:00
Senthamil Sindhu
451316cad4 Merge branch 'users/sindhuba/listKeys' into users/sindhuba/refresh-token 2024-10-27 20:59:04 -07:00
Senthamil Sindhu
b456e53b2f Merge branch 'master' of https://github.com/Azure/cosmos-explorer 2024-10-10 00:16:31 -07:00
Senthamil Sindhu
de5ba041e9 Merge branch 'master' of https://github.com/Azure/cosmos-explorer 2024-10-08 14:40:04 -07:00
Senthamil Sindhu
ae7184f7ea Merge branch 'master' of https://github.com/Azure/cosmos-explorer 2024-10-04 15:50:19 -07:00
Senthamil Sindhu
3a6769280b t Merge branch 'master' of https://github.com/Azure/cosmos-explorer 2024-10-02 19:56:51 -07:00
Senthamil Sindhu
4768ba3642 Merge branch 'master' of https://github.com/Azure/cosmos-explorer 2024-10-01 19:55:09 -07:00
Senthamil Sindhu
bd564c665b Merge branch 'master' of https://github.com/Azure/cosmos-explorer 2024-09-17 16:12:01 -07:00
Senthamil Sindhu
7e5c6420ad Run npm format 2024-08-21 13:36:46 -07:00
Senthamil Sindhu
89a3a040d8 Add AAD endpoints 2024-08-21 13:31:02 -07:00
Senthamil Sindhu
ec3afa0526 Add AAD endpoints for all environments 2024-08-21 10:11:35 -07:00
Senthamil Sindhu
4176a8a9a9 Merge branch 'master' of https://github.com/Azure/cosmos-explorer 2024-08-21 10:11:04 -07:00
Senthamil Sindhu
311cf9aa5a Add AAD endpoints for all environments 2024-08-21 10:08:55 -07:00
Senthamil Sindhu
bc8094f44f Merge branch 'master' of https://github.com/Azure/cosmos-explorer 2024-08-05 10:02:29 -07:00
Senthamil Sindhu
9203276a24 Add common code for ARM token refresh 2024-08-05 09:57:54 -07:00
Senthamil Sindhu
d7825f4f78 Merge branch 'master' of https://github.com/Azure/cosmos-explorer 2024-07-30 08:23:50 -07:00
Senthamil Sindhu
e51c28c634 Merge branch 'master' of https://github.com/Azure/cosmos-explorer 2024-07-24 12:32:54 -07:00
Senthamil Sindhu
3b86a9477f Add code for arm token refresh 2024-07-23 11:14:19 -07:00
Senthamil Sindhu
521ff39eb0 Resolve conflicts 2024-07-19 07:48:12 -07:00
Senthamil Sindhu
2e2db3c2a9 Merge branch 'users/sindhuba/fix-listKeys' into users/sindhuba/listKeys 2024-07-19 07:37:14 -07:00
Senthamil Sindhu
2b84af60f4 Merge branch 'master' of https://github.com/Azure/cosmos-explorer 2024-07-19 07:31:11 -07:00
Senthamil Sindhu
40283ff7f1 Add readOnlyKeys call for accounts with Reader role 2024-07-19 07:28:39 -07:00
Senthamil Sindhu
29a1a819c3 Merge branch 'master' of https://github.com/Azure/cosmos-explorer 2024-07-16 14:28:50 -07:00
Senthamil Sindhu
5a16eec29d Add more logs for RBAC features 2024-07-10 07:38:37 -07:00
Senthamil Sindhu
2b11e0e52b Add more logs for RBAC feature 2024-07-10 07:30:34 -07:00
Senthamil Sindhu
89374a16ba Merge branch 'master' of https://github.com/Azure/cosmos-explorer 2024-07-09 21:13:41 -07:00
Senthamil Sindhu
dc289ece75 Address feedback 2024-07-09 07:31:09 -07:00
Senthamil Sindhu
1ddd372c6d Add code to fix VCoreMongo/PG bug 2024-07-08 14:46:11 -07:00
Senthamil Sindhu
2740657b4a Remove unnecessary code 2024-07-08 14:39:49 -07:00
Senthamil Sindhu
8c888a751c Remove unnecessary code 2024-07-08 14:26:04 -07:00
Senthamil Sindhu
8140f0edb1 Merge branch 'master' of https://github.com/Azure/cosmos-explorer 2024-07-08 13:45:54 -07:00
Senthamil Sindhu
ab5239df09 Merge branch 'master' of https://github.com/Azure/cosmos-explorer 2024-07-08 13:27:24 -07:00
Senthamil Sindhu
3e48393fbb Merge branch 'master' of https://github.com/Azure/cosmos-explorer 2024-07-03 00:07:17 -07:00
Senthamil Sindhu
0079a9147f Resolved merge conflict 2024-07-01 16:22:04 -07:00
Senthamil Sindhu
912688dc14 Merge branch 'master' of https://github.com/Azure/cosmos-explorer 2024-06-27 11:00:31 -07:00
Senthamil Sindhu
8849526fab Merge branch 'add-dp-rbac' of https://github.com/Azure/cosmos-explorer 2024-06-19 15:20:20 -07:00
Senthamil Sindhu
24af64a66d Add additional changes for Portal RBAC functionality 2024-06-19 15:05:14 -07:00
Senthamil Sindhu
be871737ad Support data plane RBAC 2024-06-14 12:45:21 -07:00
Senthamil Sindhu
4d8bb5c3ea Merge branch 'master' of https://github.com/Azure/cosmos-explorer 2024-06-14 12:18:14 -07:00
Senthamil Sindhu
10a8505b9a Support data plane RBAC 2024-06-14 12:12:30 -07:00
Senthamil Sindhu
ef7c2fe2f7 Remove dev endpoint 2024-04-10 11:59:57 -07:00
Senthamil Sindhu
4c7aca95e1 Merge branch 'users/aisayas/mp-cp-activate-prod' of https://github.com/Azure/cosmos-explorer into users/sindhuba/activate-prod 2024-04-09 12:27:51 -07:00
Senthamil Sindhu
2243ad895a Remove prod endpoint 2024-04-09 12:16:13 -07:00
Senthamil Sindhu
b2d5f91fe1 Remove prod 2024-04-09 11:22:17 -07:00
Asier Isayas
a712193477 fix pr check tests 2024-04-09 11:43:24 -04:00
Senthamil Sindhu
5ee411693c Add prod endpoint 2024-04-09 08:41:47 -07:00
Asier Isayas
16c7b2567b fix bug that blocked local mongo proxy and cassandra proxy development 2024-04-09 11:39:11 -04:00
Senthamil Sindhu
78d9a0cd8d Revert code 2024-04-08 16:20:40 -07:00
Senthamil Sindhu
c6ad538559 Run npm format and tests 2024-04-08 15:58:10 -07:00
Senthamil Sindhu
2bc09a6efe Add CP Prod endpoint 2024-04-08 15:37:19 -07:00
Senthamil Sindhu
d3a3033b25 Merge branch 'master' of https://github.com/Azure/cosmos-explorer 2024-04-08 15:32:50 -07:00
Asier Isayas
6bdc714e11 activate Mongo Proxy and Cassandra Proxy in Prod 2024-04-08 16:52:09 -04:00
Senthamil Sindhu
5042f28229 Merge branch 'master' of https://github.com/Azure/cosmos-explorer 2024-03-25 15:11:53 -07:00
Senthamil Sindhu
e1430fd06f Fix API endpoint for CassandraProxy query API 2024-03-18 10:25:17 -07:00
30 changed files with 1338 additions and 235 deletions

View File

@@ -3117,7 +3117,3 @@ a:link {
background: white;
height: 100%;
}
.sidebarContainer {
height: 100%;
}

View File

@@ -20,18 +20,14 @@ a:focus {
text-decoration: underline;
}
.splashLoaderContainer {
background-color: #f5f5f5;
}
#divExplorer {
background-color: #f5f5f5;
padding: @FabricBoxMargin;
}
.resourceTreeAndTabs {
border-radius: 0px;
box-shadow: @FabricBoxBorderShadow;
margin: @FabricBoxMargin;
margin-top: 0px;
margin-bottom: 0px;
background-color: #ffffff;
@@ -50,6 +46,7 @@ a:focus {
background-color: #ffffff;
border-radius: @FabricBoxBorderRadius @FabricBoxBorderRadius 0px 0px;
box-shadow: @FabricBoxBorderShadow;
margin: @FabricBoxMargin;
margin-top: 0px;
margin-bottom: 0px;
padding-top: 2px;
@@ -170,6 +167,7 @@ a:focus {
.dataExplorerErrorConsoleContainer {
border-radius: 0px 0px @FabricBoxBorderRadius @FabricBoxBorderRadius;
box-shadow: @FabricBoxBorderShadow;
margin: @FabricBoxMargin;
margin-top: 0px;
width: auto;
align-self: auto;

View File

@@ -130,6 +130,15 @@ export enum MongoBackendEndpointType {
remote,
}
export class BackendApi {
public static readonly GenerateToken: string = "GenerateToken";
public static readonly PortalSettings: string = "PortalSettings";
public static readonly AccountRestrictions: string = "AccountRestrictions";
public static readonly RuntimeProxy: string = "RuntimeProxy";
public static readonly DisallowedLocations: string = "DisallowedLocations";
public static readonly SampleData: string = "SampleData";
}
export class PortalBackendEndpoints {
public static readonly Development: string = "https://localhost:7235";
public static readonly Mpac: string = "https://cdb-ms-mpac-pbe.cosmos.azure.com";
@@ -139,13 +148,25 @@ export class PortalBackendEndpoints {
}
export class MongoProxyEndpoints {
public static readonly Development: string = "https://localhost:7238";
public static readonly Local: string = "https://localhost:7238";
public static readonly Mpac: string = "https://cdb-ms-mpac-mp.cosmos.azure.com";
public static readonly Prod: string = "https://cdb-ms-prod-mp.cosmos.azure.com";
public static readonly Fairfax: string = "https://cdb-ff-prod-mp.cosmos.azure.us";
public static readonly Mooncake: string = "https://cdb-mc-prod-mp.cosmos.azure.cn";
}
export class MongoProxyApi {
public static readonly ResourceList: string = "ResourceList";
public static readonly QueryDocuments: string = "QueryDocuments";
public static readonly CreateDocument: string = "CreateDocumen";
public static readonly ReadDocument: string = "ReadDocument";
public static readonly UpdateDocument: string = "UpdateDocument";
public static readonly DeleteDocument: string = "DeleteDocument";
public static readonly CreateCollectionWithProxy: string = "CreateCollectionWithProxy";
public static readonly LegacyMongoShell: string = "LegacyMongoShell";
public static readonly BulkDelete: string = "BulkDelete";
}
export class CassandraProxyEndpoints {
public static readonly Development: string = "https://localhost:7240";
public static readonly Mpac: string = "https://cdb-ms-mpac-cp.cosmos.azure.com";
@@ -154,6 +175,18 @@ export class CassandraProxyEndpoints {
public static readonly Mooncake: string = "https://cdb-mc-prod-cp.cosmos.azure.cn";
}
//TODO: Remove this when new backend is migrated over
export class CassandraBackend {
public static readonly createOrDeleteApi: string = "api/cassandra/createordelete";
public static readonly guestCreateOrDeleteApi: string = "api/guest/cassandra/createordelete";
public static readonly queryApi: string = "api/cassandra";
public static readonly guestQueryApi: string = "api/guest/cassandra";
public static readonly keysApi: string = "api/cassandra/keys";
public static readonly guestKeysApi: string = "api/guest/cassandra/keys";
public static readonly schemaApi: string = "api/cassandra/schema";
public static readonly guestSchemaApi: string = "api/guest/cassandra/schema";
}
export class CassandraProxyAPIs {
public static readonly createOrDeleteApi: string = "api/cassandra/createordelete";
public static readonly connectionStringCreateOrDeleteApi: string = "api/connectionstring/cassandra/createordelete";

View File

@@ -3,15 +3,18 @@ import { getAuthorizationTokenUsingResourceTokens } from "Common/getAuthorizatio
import { AuthorizationToken } from "Contracts/FabricMessageTypes";
import { checkDatabaseResourceTokensValidity } from "Platform/Fabric/FabricUtil";
import { LocalStorageUtility, StorageKey } from "Shared/StorageUtility";
import { useNewPortalBackendEndpoint } from "Utils/EndpointUtils";
import { AuthType } from "../AuthType";
import { PriorityLevel } from "../Common/Constants";
import { BackendApi, PriorityLevel } from "../Common/Constants";
import * as Logger from "../Common/Logger";
import { Platform, configContext } from "../ConfigContext";
import { userContext } from "../UserContext";
import { updateUserContext, userContext } from "../UserContext";
import { logConsoleError } from "../Utils/NotificationConsoleUtils";
import * as PriorityBasedExecutionUtils from "../Utils/PriorityBasedExecutionUtils";
import { EmulatorMasterKey, HttpHeaders } from "./Constants";
import { getErrorMessage } from "./ErrorHandlingUtils";
import { runCommand } from "hooks/useDatabaseAccounts";
import { acquireTokenWithMsal, getMsalInstance } from "Utils/AuthorizationUtils";
const _global = typeof self === "undefined" ? window : self;
@@ -31,7 +34,42 @@ export const tokenProvider = async (requestInfo: Cosmos.RequestInfo) => {
return null;
}
const AUTH_PREFIX = `type=aad&ver=1.0&sig=`;
const authorizationToken = `${AUTH_PREFIX}${userContext.aadToken}`;
let authorizationToken;
try {
authorizationToken = `${AUTH_PREFIX}${userContext.aadToken}`;
} catch (error) {
if (error.code === "ExpiredAuthenticationToken") {
// Renew the AAD token using runCommand
const newToken = await runCommand(async () => {
// Implement the logic to acquire a new AAD token
const msalInstance = await getMsalInstance();
const cachedAccount = msalInstance.getAllAccounts()?.[0];
const cachedTenantId = localStorage.getItem("cachedTenantId");
msalInstance.setActiveAccount(cachedAccount);
const newAccessToken = await acquireTokenWithMsal(msalInstance, {
authority: `${configContext.AAD_ENDPOINT}${cachedTenantId}`,
scopes: [`${configContext.ARM_ENDPOINT}/.default`],
});
// Update user context with the new token
updateUserContext({ aadToken: newAccessToken });
authorizationToken = `${AUTH_PREFIX}${userContext.aadToken}`;
return newAccessToken;
});
// Retry getting the token after renewing
const retryResult = await getTokenFromAuthService(verb, resourceType, resourceId);
headers[HttpHeaders.msDate] = retryResult.XDate;
return decodeURIComponent(retryResult.PrimaryReadWriteToken);
} else {
console.error('An error occurred:', error.message);
throw error;
}
}
return authorizationToken;
}
@@ -124,6 +162,10 @@ export async function getTokenFromAuthService(
resourceType: string,
resourceId?: string,
): Promise<AuthorizationToken> {
if (!useNewPortalBackendEndpoint(BackendApi.RuntimeProxy)) {
return getTokenFromAuthService_ToBeDeprecated(verb, resourceType, resourceId);
}
try {
const host: string = configContext.PORTAL_BACKEND_ENDPOINT;
const response: Response = await _global.fetch(host + "/api/connectionstring/runtimeproxy/authorizationtokens", {
@@ -146,6 +188,34 @@ export async function getTokenFromAuthService(
}
}
export async function getTokenFromAuthService_ToBeDeprecated(
verb: string,
resourceType: string,
resourceId?: string,
): Promise<AuthorizationToken> {
try {
const host = configContext.BACKEND_ENDPOINT;
const response = await _global.fetch(host + "/api/guest/runtimeproxy/authorizationTokens", {
method: "POST",
headers: {
"content-type": "application/json",
"x-ms-encrypted-auth-token": userContext.accessToken,
},
body: JSON.stringify({
verb,
resourceType,
resourceId,
}),
});
//TODO I am not sure why we have to parse the JSON again here. fetch should do it for us when we call .json()
const result = JSON.parse(await response.json());
return result;
} catch (error) {
logConsoleError(`Failed to get authorization headers for ${resourceType}: ${getErrorMessage(error)}`);
return Promise.reject(error);
}
}
// The Capability is a bitmap, which cosmosdb backend decodes as per the below enum
enum SDKSupportedCapabilities {
None = 0,

View File

@@ -73,7 +73,6 @@ describe("MongoProxyClient", () => {
});
updateConfigContext({
MONGO_PROXY_ENDPOINT: MongoProxyEndpoints.Prod,
globallyEnabledMongoAPIs: [],
});
window.fetch = jest.fn().mockImplementation(fetchMock);
});
@@ -90,10 +89,7 @@ describe("MongoProxyClient", () => {
});
it("builds the correct proxy URL in development", () => {
updateConfigContext({
MONGO_PROXY_ENDPOINT: "https://localhost:1234",
globallyEnabledMongoAPIs: [],
});
updateConfigContext({ MONGO_PROXY_ENDPOINT: "https://localhost:1234" });
queryDocuments(databaseId, collection, true, "{}");
expect(window.fetch).toHaveBeenCalledWith(
`${configContext.MONGO_PROXY_ENDPOINT}/api/mongo/explorer/resourcelist`,
@@ -109,7 +105,6 @@ describe("MongoProxyClient", () => {
});
updateConfigContext({
MONGO_PROXY_ENDPOINT: MongoProxyEndpoints.Prod,
globallyEnabledMongoAPIs: [],
});
window.fetch = jest.fn().mockImplementation(fetchMock);
});
@@ -126,10 +121,7 @@ describe("MongoProxyClient", () => {
});
it("builds the correct proxy URL in development", () => {
updateConfigContext({
MONGO_PROXY_ENDPOINT: "https://localhost:1234",
globallyEnabledMongoAPIs: [],
});
updateConfigContext({ MONGO_PROXY_ENDPOINT: "https://localhost:1234" });
readDocument(databaseId, collection, documentId);
expect(window.fetch).toHaveBeenCalledWith(
`${configContext.MONGO_PROXY_ENDPOINT}/api/mongo/explorer`,
@@ -145,7 +137,6 @@ describe("MongoProxyClient", () => {
});
updateConfigContext({
MONGO_PROXY_ENDPOINT: MongoProxyEndpoints.Prod,
globallyEnabledMongoAPIs: [],
});
window.fetch = jest.fn().mockImplementation(fetchMock);
});
@@ -162,10 +153,7 @@ describe("MongoProxyClient", () => {
});
it("builds the correct proxy URL in development", () => {
updateConfigContext({
MONGO_PROXY_ENDPOINT: "https://localhost:1234",
globallyEnabledMongoAPIs: [],
});
updateConfigContext({ MONGO_PROXY_ENDPOINT: "https://localhost:1234" });
readDocument(databaseId, collection, documentId);
expect(window.fetch).toHaveBeenCalledWith(
`${configContext.MONGO_PROXY_ENDPOINT}/api/mongo/explorer`,
@@ -181,7 +169,6 @@ describe("MongoProxyClient", () => {
});
updateConfigContext({
MONGO_PROXY_ENDPOINT: MongoProxyEndpoints.Prod,
globallyEnabledMongoAPIs: [],
});
window.fetch = jest.fn().mockImplementation(fetchMock);
});
@@ -198,10 +185,7 @@ describe("MongoProxyClient", () => {
});
it("builds the correct proxy URL in development", () => {
updateConfigContext({
MONGO_BACKEND_ENDPOINT: "https://localhost:1234",
globallyEnabledMongoAPIs: [],
});
updateConfigContext({ MONGO_BACKEND_ENDPOINT: "https://localhost:1234" });
updateDocument(databaseId, collection, documentId, "{}");
expect(window.fetch).toHaveBeenCalledWith(
`${configContext.MONGO_PROXY_ENDPOINT}/api/mongo/explorer`,
@@ -217,7 +201,6 @@ describe("MongoProxyClient", () => {
});
updateConfigContext({
MONGO_PROXY_ENDPOINT: MongoProxyEndpoints.Prod,
globallyEnabledMongoAPIs: [],
});
window.fetch = jest.fn().mockImplementation(fetchMock);
});
@@ -234,10 +217,7 @@ describe("MongoProxyClient", () => {
});
it("builds the correct proxy URL in development", () => {
updateConfigContext({
MONGO_PROXY_ENDPOINT: "https://localhost:1234",
globallyEnabledMongoAPIs: [],
});
updateConfigContext({ MONGO_PROXY_ENDPOINT: "https://localhost:1234" });
deleteDocument(databaseId, collection, documentId);
expect(window.fetch).toHaveBeenCalledWith(
`${configContext.MONGO_PROXY_ENDPOINT}/api/mongo/explorer`,
@@ -253,7 +233,6 @@ describe("MongoProxyClient", () => {
});
updateConfigContext({
MONGO_PROXY_ENDPOINT: MongoProxyEndpoints.Prod,
globallyEnabledMongoAPIs: [],
});
});
@@ -281,7 +260,6 @@ describe("MongoProxyClient", () => {
resetConfigContext();
updateConfigContext({
MONGO_PROXY_ENDPOINT: MongoProxyEndpoints.Prod,
globallyEnabledMongoAPIs: [],
});
const params = new URLSearchParams({
"feature.mongoProxyEndpoint": MongoProxyEndpoints.Prod,
@@ -295,12 +273,12 @@ describe("MongoProxyClient", () => {
});
it("returns a local endpoint", () => {
const endpoint = getFeatureEndpointOrDefault();
const endpoint = getFeatureEndpointOrDefault("readDocument");
expect(endpoint).toEqual(`${configContext.MONGO_PROXY_ENDPOINT}/api/mongo/explorer`);
});
it("returns a production endpoint", () => {
const endpoint = getFeatureEndpointOrDefault();
const endpoint = getFeatureEndpointOrDefault("DeleteDocument");
expect(endpoint).toEqual(`${configContext.MONGO_PROXY_ENDPOINT}/api/mongo/explorer`);
});
});

View File

@@ -1,13 +1,20 @@
import { Constants as CosmosSDKConstants } from "@azure/cosmos";
import {
allowedMongoProxyEndpoints_ToBeDeprecated,
defaultAllowedMongoProxyEndpoints,
validateEndpoint,
} from "Utils/EndpointUtils";
import queryString from "querystring";
import { AuthType } from "../AuthType";
import { configContext } from "../ConfigContext";
import * as DataModels from "../Contracts/DataModels";
import { MessageTypes } from "../Contracts/ExplorerContracts";
import { Collection } from "../Contracts/ViewModels";
import DocumentId from "../Explorer/Tree/DocumentId";
import { hasFlag } from "../Platform/Hosted/extractFeatures";
import { userContext } from "../UserContext";
import { logConsoleError } from "../Utils/NotificationConsoleUtils";
import { ApiType, ContentType, HttpHeaders, HttpStatusCodes } from "./Constants";
import { ApiType, ContentType, HttpHeaders, HttpStatusCodes, MongoProxyApi, MongoProxyEndpoints } from "./Constants";
import { MinimalQueryIterator } from "./IteratorUtilities";
import { sendMessage } from "./MessageHandler";
@@ -60,6 +67,10 @@ export function queryDocuments(
query: string,
continuationToken?: string,
): Promise<QueryResponse> {
if (!useMongoProxyEndpoint(MongoProxyApi.ResourceList) || !useMongoProxyEndpoint(MongoProxyApi.QueryDocuments)) {
return queryDocuments_ToBeDeprecated(databaseId, collection, isResourceList, query, continuationToken);
}
const { databaseAccount } = userContext;
const resourceEndpoint = databaseAccount.properties.mongoEndpoint || databaseAccount.properties.documentEndpoint;
const params = {
@@ -78,7 +89,7 @@ export function queryDocuments(
query,
};
const endpoint = getFeatureEndpointOrDefault();
const endpoint = getFeatureEndpointOrDefault(MongoProxyApi.ResourceList) || "";
const headers = {
...defaultHeaders,
@@ -116,11 +127,76 @@ export function queryDocuments(
});
}
function queryDocuments_ToBeDeprecated(
databaseId: string,
collection: Collection,
isResourceList: boolean,
query: string,
continuationToken?: string,
): Promise<QueryResponse> {
const { databaseAccount } = userContext;
const resourceEndpoint = databaseAccount.properties.mongoEndpoint || databaseAccount.properties.documentEndpoint;
const params = {
db: databaseId,
coll: collection.id(),
resourceUrl: `${resourceEndpoint}dbs/${databaseId}/colls/${collection.id()}/docs/`,
rid: collection.rid,
rtype: "docs",
sid: userContext.subscriptionId,
rg: userContext.resourceGroup,
dba: databaseAccount.name,
pk:
collection && collection.partitionKey && !collection.partitionKey.systemKey
? collection.partitionKeyProperties?.[0]
: "",
};
const endpoint = getFeatureEndpointOrDefault("resourcelist") || "";
const headers = {
...defaultHeaders,
...authHeaders(),
[CosmosSDKConstants.HttpHeaders.IsQuery]: "true",
[CosmosSDKConstants.HttpHeaders.PopulateQueryMetrics]: "true",
[CosmosSDKConstants.HttpHeaders.EnableScanInQuery]: "true",
[CosmosSDKConstants.HttpHeaders.EnableCrossPartitionQuery]: "true",
[CosmosSDKConstants.HttpHeaders.ParallelizeCrossPartitionQuery]: "true",
[HttpHeaders.contentType]: "application/query+json",
};
if (continuationToken) {
headers[CosmosSDKConstants.HttpHeaders.Continuation] = continuationToken;
}
const path = isResourceList ? "/resourcelist" : "";
return window
.fetch(`${endpoint}${path}?${queryString.stringify(params)}`, {
method: "POST",
body: JSON.stringify({ query }),
headers,
})
.then(async (response) => {
if (response.ok) {
return {
continuationToken: response.headers.get(CosmosSDKConstants.HttpHeaders.Continuation),
documents: (await response.json()).Documents as DataModels.DocumentId[],
headers: response.headers,
};
}
await errorHandling(response, "querying documents", params);
return undefined;
});
}
export function readDocument(
databaseId: string,
collection: Collection,
documentId: DocumentId,
): Promise<DataModels.DocumentId> {
if (!useMongoProxyEndpoint(MongoProxyApi.ReadDocument)) {
return readDocument_ToBeDeprecated(databaseId, collection, documentId);
}
const { databaseAccount } = userContext;
const resourceEndpoint = databaseAccount.properties.mongoEndpoint || databaseAccount.properties.documentEndpoint;
const idComponents = documentId.self.split("/");
@@ -141,7 +217,7 @@ export function readDocument(
: "",
};
const endpoint = getFeatureEndpointOrDefault();
const endpoint = getFeatureEndpointOrDefault(MongoProxyApi.ReadDocument);
return window
.fetch(endpoint, {
@@ -161,12 +237,61 @@ export function readDocument(
});
}
export function readDocument_ToBeDeprecated(
databaseId: string,
collection: Collection,
documentId: DocumentId,
): Promise<DataModels.DocumentId> {
const { databaseAccount } = userContext;
const resourceEndpoint = databaseAccount.properties.mongoEndpoint || databaseAccount.properties.documentEndpoint;
const idComponents = documentId.self.split("/");
const path = idComponents.slice(0, 4).join("/");
const rid = encodeURIComponent(idComponents[5]);
const params = {
db: databaseId,
coll: collection.id(),
resourceUrl: `${resourceEndpoint}${path}/${rid}`,
rid,
rtype: "docs",
sid: userContext.subscriptionId,
rg: userContext.resourceGroup,
dba: databaseAccount.name,
pk:
documentId && documentId.partitionKey && !documentId.partitionKey.systemKey
? documentId.partitionKeyProperties?.[0]
: "",
};
const endpoint = getFeatureEndpointOrDefault("readDocument");
return window
.fetch(`${endpoint}?${queryString.stringify(params)}`, {
method: "GET",
headers: {
...defaultHeaders,
...authHeaders(),
[CosmosSDKConstants.HttpHeaders.PartitionKey]: encodeURIComponent(
JSON.stringify(documentId.partitionKeyHeader()),
),
},
})
.then(async (response) => {
if (response.ok) {
return response.json();
}
return await errorHandling(response, "reading document", params);
});
}
export function createDocument(
databaseId: string,
collection: Collection,
partitionKeyProperty: string,
documentContent: unknown,
): Promise<DataModels.DocumentId> {
if (!useMongoProxyEndpoint(MongoProxyApi.CreateDocument)) {
return createDocument_ToBeDeprecated(databaseId, collection, partitionKeyProperty, documentContent);
}
const { databaseAccount } = userContext;
const resourceEndpoint = databaseAccount.properties.mongoEndpoint || databaseAccount.properties.documentEndpoint;
const params = {
@@ -183,7 +308,7 @@ export function createDocument(
documentContent: JSON.stringify(documentContent),
};
const endpoint = getFeatureEndpointOrDefault();
const endpoint = getFeatureEndpointOrDefault(MongoProxyApi.CreateDocument);
return window
.fetch(`${endpoint}/createDocument`, {
@@ -203,12 +328,54 @@ export function createDocument(
});
}
export function createDocument_ToBeDeprecated(
databaseId: string,
collection: Collection,
partitionKeyProperty: string,
documentContent: unknown,
): Promise<DataModels.DocumentId> {
const { databaseAccount } = userContext;
const resourceEndpoint = databaseAccount.properties.mongoEndpoint || databaseAccount.properties.documentEndpoint;
const params = {
db: databaseId,
coll: collection.id(),
resourceUrl: `${resourceEndpoint}dbs/${databaseId}/colls/${collection.id()}/docs/`,
rid: collection.rid,
rtype: "docs",
sid: userContext.subscriptionId,
rg: userContext.resourceGroup,
dba: databaseAccount.name,
pk: collection && collection.partitionKey && !collection.partitionKey.systemKey ? partitionKeyProperty : "",
};
const endpoint = getFeatureEndpointOrDefault("createDocument");
return window
.fetch(`${endpoint}/resourcelist?${queryString.stringify(params)}`, {
method: "POST",
body: JSON.stringify(documentContent),
headers: {
...defaultHeaders,
...authHeaders(),
},
})
.then(async (response) => {
if (response.ok) {
return response.json();
}
return await errorHandling(response, "creating document", params);
});
}
export function updateDocument(
databaseId: string,
collection: Collection,
documentId: DocumentId,
documentContent: string,
): Promise<DataModels.DocumentId> {
if (!useMongoProxyEndpoint(MongoProxyApi.UpdateDocument)) {
return updateDocument_ToBeDeprecated(databaseId, collection, documentId, documentContent);
}
const { databaseAccount } = userContext;
const resourceEndpoint = databaseAccount.properties.mongoEndpoint || databaseAccount.properties.documentEndpoint;
const idComponents = documentId.self.split("/");
@@ -229,7 +396,7 @@ export function updateDocument(
: "",
documentContent,
};
const endpoint = getFeatureEndpointOrDefault();
const endpoint = getFeatureEndpointOrDefault(MongoProxyApi.UpdateDocument);
return window
.fetch(endpoint, {
@@ -250,7 +417,56 @@ export function updateDocument(
});
}
export function updateDocument_ToBeDeprecated(
databaseId: string,
collection: Collection,
documentId: DocumentId,
documentContent: string,
): Promise<DataModels.DocumentId> {
const { databaseAccount } = userContext;
const resourceEndpoint = databaseAccount.properties.mongoEndpoint || databaseAccount.properties.documentEndpoint;
const idComponents = documentId.self.split("/");
const path = idComponents.slice(0, 5).join("/");
const rid = encodeURIComponent(idComponents[5]);
const params = {
db: databaseId,
coll: collection.id(),
resourceUrl: `${resourceEndpoint}${path}/${rid}`,
rid,
rtype: "docs",
sid: userContext.subscriptionId,
rg: userContext.resourceGroup,
dba: databaseAccount.name,
pk:
documentId && documentId.partitionKey && !documentId.partitionKey.systemKey
? documentId.partitionKeyProperties?.[0]
: "",
};
const endpoint = getFeatureEndpointOrDefault("updateDocument");
return window
.fetch(`${endpoint}?${queryString.stringify(params)}`, {
method: "PUT",
body: documentContent,
headers: {
...defaultHeaders,
...authHeaders(),
[HttpHeaders.contentType]: ContentType.applicationJson,
[CosmosSDKConstants.HttpHeaders.PartitionKey]: JSON.stringify(documentId.partitionKeyHeader()),
},
})
.then(async (response) => {
if (response.ok) {
return response.json();
}
return await errorHandling(response, "updating document", params);
});
}
export function deleteDocument(databaseId: string, collection: Collection, documentId: DocumentId): Promise<void> {
if (!useMongoProxyEndpoint(MongoProxyApi.DeleteDocument)) {
return deleteDocument_ToBeDeprecated(databaseId, collection, documentId);
}
const { databaseAccount } = userContext;
const resourceEndpoint = databaseAccount.properties.mongoEndpoint || databaseAccount.properties.documentEndpoint;
const idComponents = documentId.self.split("/");
@@ -270,7 +486,7 @@ export function deleteDocument(databaseId: string, collection: Collection, docum
? documentId.partitionKeyProperties?.[0]
: "",
};
const endpoint = getFeatureEndpointOrDefault();
const endpoint = getFeatureEndpointOrDefault(MongoProxyApi.DeleteDocument);
return window
.fetch(endpoint, {
@@ -290,6 +506,50 @@ export function deleteDocument(databaseId: string, collection: Collection, docum
});
}
export function deleteDocument_ToBeDeprecated(
databaseId: string,
collection: Collection,
documentId: DocumentId,
): Promise<void> {
const { databaseAccount } = userContext;
const resourceEndpoint = databaseAccount.properties.mongoEndpoint || databaseAccount.properties.documentEndpoint;
const idComponents = documentId.self.split("/");
const path = idComponents.slice(0, 5).join("/");
const rid = encodeURIComponent(idComponents[5]);
const params = {
db: databaseId,
coll: collection.id(),
resourceUrl: `${resourceEndpoint}${path}/${rid}`,
rid,
rtype: "docs",
sid: userContext.subscriptionId,
rg: userContext.resourceGroup,
dba: databaseAccount.name,
pk:
documentId && documentId.partitionKey && !documentId.partitionKey.systemKey
? documentId.partitionKeyProperties?.[0]
: "",
};
const endpoint = getFeatureEndpointOrDefault("deleteDocument");
return window
.fetch(`${endpoint}?${queryString.stringify(params)}`, {
method: "DELETE",
headers: {
...defaultHeaders,
...authHeaders(),
[HttpHeaders.contentType]: ContentType.applicationJson,
[CosmosSDKConstants.HttpHeaders.PartitionKey]: JSON.stringify(documentId.partitionKeyHeader()),
},
})
.then(async (response) => {
if (response.ok) {
return undefined;
}
return await errorHandling(response, "deleting document", params);
});
}
export function deleteDocuments(
databaseId: string,
collection: Collection,
@@ -315,7 +575,7 @@ export function deleteDocuments(
resourceGroup: userContext.resourceGroup,
databaseAccountName: databaseAccount.name,
};
const endpoint = getFeatureEndpointOrDefault();
const endpoint = getFeatureEndpointOrDefault(MongoProxyApi.BulkDelete);
return window
.fetch(`${endpoint}/bulkdelete`, {
@@ -339,6 +599,9 @@ export function deleteDocuments(
export function createMongoCollectionWithProxy(
params: DataModels.CreateCollectionParams,
): Promise<DataModels.Collection> {
if (!useMongoProxyEndpoint(MongoProxyApi.CreateCollectionWithProxy)) {
return createMongoCollectionWithProxy_ToBeDeprecated(params);
}
const { databaseAccount } = userContext;
const shardKey: string = params.partitionKey?.paths[0];
@@ -359,7 +622,7 @@ export function createMongoCollectionWithProxy(
isSharded: !!shardKey,
};
const endpoint = getFeatureEndpointOrDefault();
const endpoint = getFeatureEndpointOrDefault(MongoProxyApi.CreateCollectionWithProxy);
return window
.fetch(`${endpoint}/createCollection`, {
@@ -379,8 +642,67 @@ export function createMongoCollectionWithProxy(
});
}
export function getFeatureEndpointOrDefault(): string {
const endpoint: string = configContext.MONGO_PROXY_ENDPOINT;
export function createMongoCollectionWithProxy_ToBeDeprecated(
params: DataModels.CreateCollectionParams,
): Promise<DataModels.Collection> {
const { databaseAccount } = userContext;
const shardKey: string = params.partitionKey?.paths[0];
const mongoParams: DataModels.MongoParameters = {
resourceUrl: databaseAccount.properties.mongoEndpoint || databaseAccount.properties.documentEndpoint,
db: params.databaseId,
coll: params.collectionId,
pk: shardKey,
offerThroughput: params.autoPilotMaxThroughput || params.offerThroughput,
cd: params.createNewDatabase,
st: params.databaseLevelThroughput,
is: !!shardKey,
rid: "",
rtype: "colls",
sid: userContext.subscriptionId,
rg: userContext.resourceGroup,
dba: databaseAccount.name,
isAutoPilot: !!params.autoPilotMaxThroughput,
};
const endpoint = getFeatureEndpointOrDefault("createCollectionWithProxy");
return window
.fetch(
`${endpoint}/createCollection?${queryString.stringify(
mongoParams as unknown as queryString.ParsedUrlQueryInput,
)}`,
{
method: "POST",
headers: {
...defaultHeaders,
...authHeaders(),
[HttpHeaders.contentType]: "application/json",
},
},
)
.then(async (response) => {
if (response.ok) {
return response.json();
}
return await errorHandling(response, "creating collection", mongoParams);
});
}
export function getFeatureEndpointOrDefault(feature: string): string {
let endpoint;
const allowedMongoProxyEndpoints = configContext.allowedMongoProxyEndpoints || [
...defaultAllowedMongoProxyEndpoints,
...allowedMongoProxyEndpoints_ToBeDeprecated,
];
if (useMongoProxyEndpoint(feature)) {
endpoint = configContext.MONGO_PROXY_ENDPOINT;
} else {
endpoint =
hasFlag(userContext.features.mongoProxyAPIs, feature) &&
validateEndpoint(userContext.features.mongoProxyEndpoint, allowedMongoProxyEndpoints)
? userContext.features.mongoProxyEndpoint
: configContext.MONGO_BACKEND_ENDPOINT || configContext.BACKEND_ENDPOINT;
}
return getEndpoint(endpoint);
}
@@ -397,6 +719,80 @@ export function getEndpoint(endpoint: string): string {
return url;
}
export function useMongoProxyEndpoint(mongoProxyApi: string): boolean {
const mongoProxyEnvironmentMap: { [key: string]: string[] } = {
[MongoProxyApi.ResourceList]: [
MongoProxyEndpoints.Local,
MongoProxyEndpoints.Mpac,
MongoProxyEndpoints.Prod,
MongoProxyEndpoints.Fairfax,
MongoProxyEndpoints.Mooncake,
],
[MongoProxyApi.QueryDocuments]: [
MongoProxyEndpoints.Local,
MongoProxyEndpoints.Mpac,
MongoProxyEndpoints.Prod,
MongoProxyEndpoints.Fairfax,
MongoProxyEndpoints.Mooncake,
],
[MongoProxyApi.CreateDocument]: [
MongoProxyEndpoints.Local,
MongoProxyEndpoints.Mpac,
MongoProxyEndpoints.Prod,
MongoProxyEndpoints.Fairfax,
MongoProxyEndpoints.Mooncake,
],
[MongoProxyApi.ReadDocument]: [
MongoProxyEndpoints.Local,
MongoProxyEndpoints.Mpac,
MongoProxyEndpoints.Prod,
MongoProxyEndpoints.Fairfax,
MongoProxyEndpoints.Mooncake,
],
[MongoProxyApi.UpdateDocument]: [
MongoProxyEndpoints.Local,
MongoProxyEndpoints.Mpac,
MongoProxyEndpoints.Prod,
MongoProxyEndpoints.Fairfax,
MongoProxyEndpoints.Mooncake,
],
[MongoProxyApi.DeleteDocument]: [
MongoProxyEndpoints.Local,
MongoProxyEndpoints.Mpac,
MongoProxyEndpoints.Prod,
MongoProxyEndpoints.Fairfax,
MongoProxyEndpoints.Mooncake,
],
[MongoProxyApi.CreateCollectionWithProxy]: [
MongoProxyEndpoints.Local,
MongoProxyEndpoints.Mpac,
MongoProxyEndpoints.Prod,
MongoProxyEndpoints.Fairfax,
MongoProxyEndpoints.Mooncake,
],
[MongoProxyApi.LegacyMongoShell]: [
MongoProxyEndpoints.Local,
MongoProxyEndpoints.Mpac,
MongoProxyEndpoints.Prod,
MongoProxyEndpoints.Fairfax,
MongoProxyEndpoints.Mooncake,
],
[MongoProxyApi.BulkDelete]: [
MongoProxyEndpoints.Local,
MongoProxyEndpoints.Mpac,
MongoProxyEndpoints.Prod,
MongoProxyEndpoints.Fairfax,
MongoProxyEndpoints.Mooncake,
],
};
if (!mongoProxyEnvironmentMap[mongoProxyApi] || !configContext.MONGO_PROXY_ENDPOINT) {
return false;
}
return mongoProxyEnvironmentMap[mongoProxyApi].includes(configContext.MONGO_PROXY_ENDPOINT);
}
export class ThrottlingError extends Error {
constructor(message: string) {
super(message);

View File

@@ -1,4 +1,11 @@
import { CassandraProxyEndpoints, JunoEndpoints, MongoProxyEndpoints, PortalBackendEndpoints } from "Common/Constants";
import {
BackendApi,
CassandraProxyEndpoints,
JunoEndpoints,
MongoProxyEndpoints,
PortalBackendEndpoints,
} from "Common/Constants";
import { userContext } from "UserContext";
import {
allowedAadEndpoints,
allowedArcadiaEndpoints,
@@ -9,9 +16,9 @@ import {
allowedMongoBackendEndpoints,
allowedMsalRedirectEndpoints,
defaultAllowedArmEndpoints,
defaultAllowedBackendEndpoints,
defaultAllowedCassandraProxyEndpoints,
defaultAllowedMongoProxyEndpoints,
defaultAllowedPortalBackendEndpoints,
validateEndpoint,
} from "Utils/EndpointUtils";
@@ -25,13 +32,14 @@ export enum Platform {
export interface ConfigContext {
platform: Platform;
allowedArmEndpoints: ReadonlyArray<string>;
allowedPortalBackendEndpoints: ReadonlyArray<string>;
allowedBackendEndpoints: ReadonlyArray<string>;
allowedCassandraProxyEndpoints: ReadonlyArray<string>;
allowedMongoProxyEndpoints: ReadonlyArray<string>;
allowedParentFrameOrigins: ReadonlyArray<string>;
gitSha?: string;
proxyPath?: string;
AAD_ENDPOINT: string;
ENVIRONMENT: string;
ARM_AUTH_AREA: string;
ARM_ENDPOINT: string;
EMULATOR_ENDPOINT?: string;
@@ -44,10 +52,13 @@ export interface ConfigContext {
CATALOG_API_KEY: string;
ARCADIA_ENDPOINT: string;
ARCADIA_LIVY_ENDPOINT_DNS_ZONE: string;
BACKEND_ENDPOINT?: string;
PORTAL_BACKEND_ENDPOINT: string;
NEW_BACKEND_APIS?: BackendApi[];
MONGO_BACKEND_ENDPOINT?: string;
MONGO_PROXY_ENDPOINT: string;
CASSANDRA_PROXY_ENDPOINT: string;
NEW_CASSANDRA_APIS?: string[];
PROXY_PATH?: string;
JUNO_ENDPOINT: string;
GITHUB_CLIENT_ID: string;
@@ -58,15 +69,13 @@ export interface ConfigContext {
hostedExplorerURL: string;
armAPIVersion?: string;
msalRedirectURI?: string;
globallyEnabledCassandraAPIs?: string[];
globallyEnabledMongoAPIs?: string[];
}
// Default configuration
let configContext: Readonly<ConfigContext> = {
platform: Platform.Portal,
allowedArmEndpoints: defaultAllowedArmEndpoints,
allowedPortalBackendEndpoints: defaultAllowedPortalBackendEndpoints,
allowedBackendEndpoints: defaultAllowedBackendEndpoints,
allowedCassandraProxyEndpoints: defaultAllowedCassandraProxyEndpoints,
allowedMongoProxyEndpoints: defaultAllowedMongoProxyEndpoints,
allowedParentFrameOrigins: [
@@ -86,7 +95,7 @@ let configContext: Readonly<ConfigContext> = {
], // Webpack injects this at build time
gitSha: process.env.GIT_SHA,
hostedExplorerURL: "https://cosmos.azure.com/",
AAD_ENDPOINT: "https://login.microsoftonline.com/",
AAD_ENDPOINT: "",
ARM_AUTH_AREA: "https://management.azure.com/",
ARM_ENDPOINT: "https://management.azure.com/",
ARM_API_VERSION: "2016-06-01",
@@ -100,13 +109,13 @@ let configContext: Readonly<ConfigContext> = {
GITHUB_CLIENT_ID: "6cb2f63cf6f7b5cbdeca", // Registered OAuth app: https://github.com/organizations/AzureCosmosDBNotebooks/settings/applications/1189306
GITHUB_TEST_ENV_CLIENT_ID: "b63fc8cbf87fd3c6e2eb", // Registered OAuth app: https://github.com/organizations/AzureCosmosDBNotebooks/settings/applications/1777772
JUNO_ENDPOINT: JunoEndpoints.Prod,
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
PORTAL_BACKEND_ENDPOINT: PortalBackendEndpoints.Prod,
MONGO_PROXY_ENDPOINT: MongoProxyEndpoints.Prod,
CASSANDRA_PROXY_ENDPOINT: CassandraProxyEndpoints.Prod,
NEW_CASSANDRA_APIS: ["postQuery", "createOrDelete", "getKeys", "getSchema"],
isTerminalEnabled: false,
isPhoenixEnabled: false,
globallyEnabledCassandraAPIs: [],
globallyEnabledMongoAPIs: [],
};
export function resetConfigContext(): void {
@@ -143,11 +152,11 @@ export function updateConfigContext(newContext: Partial<ConfigContext>): void {
if (
!validateEndpoint(
newContext.PORTAL_BACKEND_ENDPOINT,
configContext.allowedPortalBackendEndpoints || defaultAllowedPortalBackendEndpoints,
newContext.BACKEND_ENDPOINT,
configContext.allowedBackendEndpoints || defaultAllowedBackendEndpoints,
)
) {
delete newContext.PORTAL_BACKEND_ENDPOINT;
delete newContext.BACKEND_ENDPOINT;
}
if (

View File

@@ -381,15 +381,15 @@ export enum TerminalKind {
export interface DataExplorerInputsFrame {
databaseAccount: any;
subscriptionId?: string;
resourceGroup?: string;
tenantId?: string;
userName?: string;
resourceGroup?: string;
masterKey?: string;
hasWriteAccess?: boolean;
authorizationToken?: string;
csmEndpoint?: string;
dnsSuffix?: string;
serverId?: string;
extensionEndpoint?: string;
portalBackendEndpoint?: string;
mongoProxyEndpoint?: string;
cassandraProxyEndpoint?: string;

View File

@@ -1,5 +1,6 @@
import { isPublicInternetAccessAllowed } from "Common/DatabaseAccountUtility";
import { PhoenixClient } from "Phoenix/PhoenixClient";
import { useNewPortalBackendEndpoint } from "Utils/EndpointUtils";
import { cloneDeep } from "lodash";
import create, { UseStore } from "zustand";
import { AuthType } from "../../AuthType";
@@ -127,7 +128,9 @@ export const useNotebook: UseStore<NotebookState> = create((set, get) => ({
userContext.apiType === "Postgres" || userContext.apiType === "VCoreMongo"
? databaseAccount?.location
: databaseAccount?.properties?.writeLocations?.[0]?.locationName.toLowerCase();
const disallowedLocationsUri: string = `${configContext.PORTAL_BACKEND_ENDPOINT}/api/disallowedlocations`;
const disallowedLocationsUri: string = useNewPortalBackendEndpoint(Constants.BackendApi.DisallowedLocations)
? `${configContext.PORTAL_BACKEND_ENDPOINT}/api/disallowedlocations`
: `${configContext.BACKEND_ENDPOINT}/api/disallowedLocations`;
const authorizationHeader = getAuthorizationHeader();
try {
const response = await fetch(disallowedLocationsUri, {

View File

@@ -1,6 +1,7 @@
import { FeedOptions } from "@azure/cosmos";
import {
Areas,
BackendApi,
ConnectionStatusType,
ContainerStatusType,
HttpStatusCodes,
@@ -31,6 +32,7 @@ import { Action } from "Shared/Telemetry/TelemetryConstants";
import { traceFailure, traceStart, traceSuccess } from "Shared/Telemetry/TelemetryProcessor";
import { userContext } from "UserContext";
import { getAuthorizationHeader } from "Utils/AuthorizationUtils";
import { useNewPortalBackendEndpoint } from "Utils/EndpointUtils";
import { queryPagesUntilContentPresent } from "Utils/QueryUtils";
import { QueryCopilotState, useQueryCopilot } from "hooks/useQueryCopilot";
import { useTabs } from "hooks/useTabs";
@@ -80,7 +82,11 @@ export const isCopilotFeatureRegistered = async (subscriptionId: string): Promis
};
export const getCopilotEnabled = async (): Promise<boolean> => {
const url = `${configContext.PORTAL_BACKEND_ENDPOINT}/api/portalsettings/querycopilot`;
const backendEndpoint: string = useNewPortalBackendEndpoint(BackendApi.PortalSettings)
? configContext.PORTAL_BACKEND_ENDPOINT
: configContext.BACKEND_ENDPOINT;
const url = `${backendEndpoint}/api/portalsettings/querycopilot`;
const authorizationHeader: AuthorizationTokenHeaderMetadata = getAuthorizationHeader();
const headers = { [authorizationHeader.header]: authorizationHeader.token };

View File

@@ -282,69 +282,67 @@ export const SidebarContainer: React.FC<SidebarProps> = ({ explorer }) => {
);
return (
<div className="sidebarContainer">
<Allotment ref={allotment} onChange={onChange} onDragEnd={onDragEnd} className="resourceTreeAndTabs">
{/* Collections Tree - Start */}
{hasSidebar && (
// When collapsed, we force the pane to 24 pixels wide and make it non-resizable.
<Allotment.Pane minSize={24} preferredSize={250}>
<CosmosFluentProvider className={mergeClasses(styles.sidebar)}>
<div className={styles.sidebarContainer}>
{loading && (
// The Fluent UI progress bar has some issues in reduced-motion environments so we use a simple CSS animation here.
// https://github.com/microsoft/fluentui/issues/29076
<div className={styles.loadingProgressBar} title="Refreshing tree..." />
)}
{expanded ? (
<>
<div className={styles.floatingControlsContainer}>
<div className={styles.floatingControls}>
<button
type="button"
data-test="Sidebar/RefreshButton"
className={styles.floatingControlButton}
disabled={loading}
title="Refresh"
onClick={onRefreshClick}
>
<ArrowSync12Regular />
</button>
<button
type="button"
className={styles.floatingControlButton}
title="Collapse sidebar"
onClick={() => collapse()}
>
<ChevronLeft12Regular />
</button>
</div>
<Allotment ref={allotment} onChange={onChange} onDragEnd={onDragEnd} className="resourceTreeAndTabs">
{/* Collections Tree - Start */}
{hasSidebar && (
// When collapsed, we force the pane to 24 pixels wide and make it non-resizable.
<Allotment.Pane minSize={24} preferredSize={250}>
<CosmosFluentProvider className={mergeClasses(styles.sidebar)}>
<div className={styles.sidebarContainer}>
{loading && (
// The Fluent UI progress bar has some issues in reduced-motion environments so we use a simple CSS animation here.
// https://github.com/microsoft/fluentui/issues/29076
<div className={styles.loadingProgressBar} title="Refreshing tree..." />
)}
{expanded ? (
<>
<div className={styles.floatingControlsContainer}>
<div className={styles.floatingControls}>
<button
type="button"
data-test="Sidebar/RefreshButton"
className={styles.floatingControlButton}
disabled={loading}
title="Refresh"
onClick={onRefreshClick}
>
<ArrowSync12Regular />
</button>
<button
type="button"
className={styles.floatingControlButton}
title="Collapse sidebar"
onClick={() => collapse()}
>
<ChevronLeft12Regular />
</button>
</div>
<div
className={styles.expandedContent}
style={!hasGlobalCommands ? { gridTemplateRows: "1fr" } : undefined}
>
{hasGlobalCommands && <GlobalCommands explorer={explorer} />}
<ResourceTree explorer={explorer} />
</div>
</>
) : (
<button
type="button"
className={styles.floatingControlButton}
title="Expand sidebar"
onClick={() => expand()}
</div>
<div
className={styles.expandedContent}
style={!hasGlobalCommands ? { gridTemplateRows: "1fr" } : undefined}
>
<ChevronRight12Regular />
</button>
)}
</div>
</CosmosFluentProvider>
</Allotment.Pane>
)}
<Allotment.Pane minSize={200}>
<Tabs explorer={explorer} />
{hasGlobalCommands && <GlobalCommands explorer={explorer} />}
<ResourceTree explorer={explorer} />
</div>
</>
) : (
<button
type="button"
className={styles.floatingControlButton}
title="Expand sidebar"
onClick={() => expand()}
>
<ChevronRight12Regular />
</button>
)}
</div>
</CosmosFluentProvider>
</Allotment.Pane>
</Allotment>
</div>
)}
<Allotment.Pane minSize={200}>
<Tabs explorer={explorer} />
</Allotment.Pane>
</Allotment>
);
};

View File

@@ -3,7 +3,7 @@ import * as ko from "knockout";
import Q from "q";
import { AuthType } from "../../AuthType";
import * as Constants from "../../Common/Constants";
import { CassandraProxyAPIs } from "../../Common/Constants";
import { CassandraProxyAPIs, CassandraProxyEndpoints } from "../../Common/Constants";
import { handleError } from "../../Common/ErrorHandlingUtils";
import * as HeadersUtility from "../../Common/HeadersUtility";
import { createDocument } from "../../Common/dataAccess/createDocument";
@@ -264,6 +264,9 @@ export class CassandraAPIDataClient extends TableDataClient {
shouldNotify?: boolean,
paginationToken?: string,
): Promise<Entities.IListTableEntitiesResult> {
if (!this.useCassandraProxyEndpoint("postQuery")) {
return this.queryDocuments_ToBeDeprecated(collection, query, shouldNotify, paginationToken);
}
const clearMessage =
shouldNotify && NotificationConsoleUtils.logConsoleProgress(`Querying rows for table ${collection.id()}`);
try {
@@ -306,6 +309,55 @@ export class CassandraAPIDataClient extends TableDataClient {
}
}
public async queryDocuments_ToBeDeprecated(
collection: ViewModels.Collection,
query: string,
shouldNotify?: boolean,
paginationToken?: string,
): Promise<Entities.IListTableEntitiesResult> {
const clearMessage =
shouldNotify && NotificationConsoleUtils.logConsoleProgress(`Querying rows for table ${collection.id()}`);
try {
const { authType, databaseAccount } = userContext;
const apiEndpoint: string =
authType === AuthType.EncryptedToken
? Constants.CassandraBackend.guestQueryApi
: Constants.CassandraBackend.queryApi;
const data: any = await $.ajax(`${configContext.BACKEND_ENDPOINT}/${apiEndpoint}`, {
type: "POST",
data: {
accountName: databaseAccount?.name,
cassandraEndpoint: this.trimCassandraEndpoint(databaseAccount?.properties.cassandraEndpoint),
resourceId: databaseAccount?.id,
keyspaceId: collection.databaseId,
tableId: collection.id(),
query,
paginationToken,
},
beforeSend: this.setAuthorizationHeader as any,
cache: false,
});
shouldNotify &&
NotificationConsoleUtils.logConsoleInfo(
`Successfully fetched ${data.result.length} rows for table ${collection.id()}`,
);
return {
Results: data.result,
ContinuationToken: data.paginationToken,
};
} catch (error) {
shouldNotify &&
handleError(
error,
"QueryDocuments_ToBeDeprecated_Cassandra",
`Failed to query rows for table ${collection.id()}`,
);
throw error;
} finally {
clearMessage?.();
}
}
public async deleteDocuments(
collection: ViewModels.Collection,
entitiesToDelete: Entities.ITableEntity[],
@@ -419,6 +471,10 @@ export class CassandraAPIDataClient extends TableDataClient {
}
public getTableKeys(collection: ViewModels.Collection): Q.Promise<CassandraTableKeys> {
if (!this.useCassandraProxyEndpoint("getKeys")) {
return this.getTableKeys_ToBeDeprecated(collection);
}
if (!!collection.cassandraKeys) {
return Q.resolve(collection.cassandraKeys);
}
@@ -459,7 +515,52 @@ export class CassandraAPIDataClient extends TableDataClient {
return deferred.promise;
}
public getTableKeys_ToBeDeprecated(collection: ViewModels.Collection): Q.Promise<CassandraTableKeys> {
if (!!collection.cassandraKeys) {
return Q.resolve(collection.cassandraKeys);
}
const clearInProgressMessage = logConsoleProgress(`Fetching keys for table ${collection.id()}`);
const { authType, databaseAccount } = userContext;
const apiEndpoint: string =
authType === AuthType.EncryptedToken
? Constants.CassandraBackend.guestKeysApi
: Constants.CassandraBackend.keysApi;
let endpoint = `${configContext.BACKEND_ENDPOINT}/${apiEndpoint}`;
const deferred = Q.defer<CassandraTableKeys>();
$.ajax(endpoint, {
type: "POST",
data: {
accountName: databaseAccount?.name,
cassandraEndpoint: this.trimCassandraEndpoint(databaseAccount?.properties.cassandraEndpoint),
resourceId: databaseAccount?.id,
keyspaceId: collection.databaseId,
tableId: collection.id(),
},
beforeSend: this.setAuthorizationHeader as any,
cache: false,
})
.then(
(data: CassandraTableKeys) => {
collection.cassandraKeys = data;
logConsoleInfo(`Successfully fetched keys for table ${collection.id()}`);
deferred.resolve(data);
},
(error: any) => {
const errorText = error.responseJSON?.message ?? JSON.stringify(error);
handleError(errorText, "FetchKeysCassandra", `Error fetching keys for table ${collection.id()}`);
deferred.reject(errorText);
},
)
.done(clearInProgressMessage);
return deferred.promise;
}
public getTableSchema(collection: ViewModels.Collection): Q.Promise<CassandraTableKey[]> {
if (!this.useCassandraProxyEndpoint("getSchema")) {
return this.getTableSchema_ToBeDeprecated(collection);
}
if (!!collection.cassandraSchema) {
return Q.resolve(collection.cassandraSchema);
}
@@ -501,7 +602,52 @@ export class CassandraAPIDataClient extends TableDataClient {
return deferred.promise;
}
public getTableSchema_ToBeDeprecated(collection: ViewModels.Collection): Q.Promise<CassandraTableKey[]> {
if (!!collection.cassandraSchema) {
return Q.resolve(collection.cassandraSchema);
}
const clearInProgressMessage = logConsoleProgress(`Fetching schema for table ${collection.id()}`);
const { databaseAccount, authType } = userContext;
const apiEndpoint: string =
authType === AuthType.EncryptedToken
? Constants.CassandraBackend.guestSchemaApi
: Constants.CassandraBackend.schemaApi;
let endpoint = `${configContext.BACKEND_ENDPOINT}/${apiEndpoint}`;
const deferred = Q.defer<CassandraTableKey[]>();
$.ajax(endpoint, {
type: "POST",
data: {
accountName: databaseAccount?.name,
cassandraEndpoint: this.trimCassandraEndpoint(databaseAccount?.properties.cassandraEndpoint),
resourceId: databaseAccount?.id,
keyspaceId: collection.databaseId,
tableId: collection.id(),
},
beforeSend: this.setAuthorizationHeader as any,
cache: false,
})
.then(
(data: any) => {
collection.cassandraSchema = data.columns;
logConsoleInfo(`Successfully fetched schema for table ${collection.id()}`);
deferred.resolve(data.columns);
},
(error: any) => {
const errorText = error.responseJSON?.message ?? JSON.stringify(error);
handleError(errorText, "FetchSchemaCassandra", `Error fetching schema for table ${collection.id()}`);
deferred.reject(errorText);
},
)
.done(clearInProgressMessage);
return deferred.promise;
}
private createOrDeleteQuery(cassandraEndpoint: string, resourceId: string, query: string): Q.Promise<any> {
if (!this.useCassandraProxyEndpoint("createOrDelete")) {
return this.createOrDeleteQuery_ToBeDeprecated(cassandraEndpoint, resourceId, query);
}
const deferred = Q.defer();
const { authType, databaseAccount } = userContext;
const apiEndpoint: string =
@@ -531,6 +677,38 @@ export class CassandraAPIDataClient extends TableDataClient {
return deferred.promise;
}
private createOrDeleteQuery_ToBeDeprecated(
cassandraEndpoint: string,
resourceId: string,
query: string,
): Q.Promise<any> {
const deferred = Q.defer();
const { authType, databaseAccount } = userContext;
const apiEndpoint: string =
authType === AuthType.EncryptedToken
? Constants.CassandraBackend.guestCreateOrDeleteApi
: Constants.CassandraBackend.createOrDeleteApi;
$.ajax(`${configContext.BACKEND_ENDPOINT}/${apiEndpoint}`, {
type: "POST",
data: {
accountName: databaseAccount?.name,
cassandraEndpoint: this.trimCassandraEndpoint(cassandraEndpoint),
resourceId: resourceId,
query: query,
},
beforeSend: this.setAuthorizationHeader as any,
cache: false,
}).then(
(data: any) => {
deferred.resolve();
},
(reason) => {
deferred.reject(reason);
},
);
return deferred.promise;
}
private trimCassandraEndpoint(cassandraEndpoint: string): string {
if (!cassandraEndpoint) {
return cassandraEndpoint;
@@ -569,4 +747,19 @@ export class CassandraAPIDataClient extends TableDataClient {
private getCassandraPartitionKeyProperty(collection: ViewModels.Collection): string {
return collection.cassandraKeys.partitionKeys[0].property;
}
private useCassandraProxyEndpoint(api: string): boolean {
const activeCassandraProxyEndpoints: string[] = [
CassandraProxyEndpoints.Development,
CassandraProxyEndpoints.Mpac,
CassandraProxyEndpoints.Prod,
CassandraProxyEndpoints.Fairfax,
CassandraProxyEndpoints.Mooncake,
];
return (
configContext.NEW_CASSANDRA_APIS?.includes(api) &&
activeCassandraProxyEndpoints.includes(configContext.CASSANDRA_PROXY_ENDPOINT)
);
}
}

View File

@@ -610,7 +610,7 @@ export const DocumentsTabComponent: React.FunctionComponent<IDocumentsTabCompone
// Table user clicked on this row
const [clickedRowIndex, setClickedRowIndex] = useState<number>(RESET_INDEX);
// Table multiple selection
const [selectedRows, setSelectedRows] = React.useState<Set<TableRowId>>(() => new Set<TableRowId>());
const [selectedRows, setSelectedRows] = React.useState<Set<TableRowId>>(() => new Set<TableRowId>([0]));
// Command buttons
const [editorState, setEditorState] = useState<ViewModels.DocumentExplorerState>(
@@ -663,6 +663,23 @@ export const DocumentsTabComponent: React.FunctionComponent<IDocumentsTabCompone
}
}, [isFilterFocused]);
// Clicked row must be defined
useEffect(() => {
if (documentIds.length > 0) {
let currentClickedRowIndex = clickedRowIndex;
if (
(currentClickedRowIndex === RESET_INDEX &&
editorState === ViewModels.DocumentExplorerState.noDocumentSelected) ||
currentClickedRowIndex > documentIds.length - 1
) {
// reset clicked row or the current clicked row is out of bounds
currentClickedRowIndex = INITIAL_SELECTED_ROW_INDEX;
setSelectedRows(new Set([INITIAL_SELECTED_ROW_INDEX]));
onDocumentClicked(currentClickedRowIndex, documentIds);
}
}
}, [documentIds, clickedRowIndex, editorState]);
/**
* Recursively delete all documents by retrying throttled requests (429).
* This only works for NoSQL, because the bulk response includes status for each delete document request.
@@ -1164,16 +1181,27 @@ export const DocumentsTabComponent: React.FunctionComponent<IDocumentsTabCompone
deletePromise = _bulkDeleteNoSqlDocuments(_collection, toDeleteDocumentIds);
}
} else {
deletePromise = MongoProxyClient.deleteDocuments(
_collection.databaseId,
_collection as ViewModels.Collection,
toDeleteDocumentIds,
).then(({ deletedCount, isAcknowledged }) => {
if (deletedCount === toDeleteDocumentIds.length && isAcknowledged) {
return toDeleteDocumentIds;
}
throw new Error(`Delete failed with deletedCount: ${deletedCount} and isAcknowledged: ${isAcknowledged}`);
});
if (isMongoBulkDeleteDisabled) {
// TODO: Once new mongo proxy is available for all users, remove the call for MongoProxyClient.deleteDocument().
// MongoProxyClient.deleteDocuments() should be called for all users.
deletePromise = MongoProxyClient.deleteDocument(
_collection.databaseId,
_collection as ViewModels.Collection,
toDeleteDocumentIds[0],
).then(() => [toDeleteDocumentIds[0]]);
// ----------------------------------------------------------------------------------------------------
} else {
deletePromise = MongoProxyClient.deleteDocuments(
_collection.databaseId,
_collection as ViewModels.Collection,
toDeleteDocumentIds,
).then(({ deletedCount, isAcknowledged }) => {
if (deletedCount === toDeleteDocumentIds.length && isAcknowledged) {
return toDeleteDocumentIds;
}
throw new Error(`Delete failed with deletedCount: ${deletedCount} and isAcknowledged: ${isAcknowledged}`);
});
}
}
return deletePromise
@@ -2085,8 +2113,11 @@ export const DocumentsTabComponent: React.FunctionComponent<IDocumentsTabCompone
}
}, [prevSelectedColumnIds, refreshDocumentsGrid, selectedColumnIds]);
// TODO: remove isMongoBulkDeleteDisabled when new mongo proxy is enabled for all users
// TODO: remove partitionKey.systemKey when JS SDK bug is fixed
const isBulkDeleteDisabled = partitionKey.systemKey && !isPreferredApiMongoDB;
const isMongoBulkDeleteDisabled = !MongoProxyClient.useMongoProxyEndpoint(Constants.MongoProxyApi.BulkDelete);
const isBulkDeleteDisabled =
(partitionKey.systemKey && !isPreferredApiMongoDB) || (isPreferredApiMongoDB && isMongoBulkDeleteDisabled);
// -------------------------------------------------------
return (
@@ -2201,6 +2232,7 @@ export const DocumentsTabComponent: React.FunctionComponent<IDocumentsTabCompone
<DocumentsTableComponent
onRefreshTable={() => refreshDocumentsGrid(false)}
items={tableItems}
onItemClicked={(index) => onDocumentClicked(index, documentIds)}
onSelectedRowsChange={onSelectedRowsChange}
selectedRows={selectedRows}
size={tableContainerSizePx}

View File

@@ -14,6 +14,7 @@ describe("DocumentsTableComponent", () => {
{ [ID_HEADER]: "2", [PARTITION_KEY_HEADER]: "pk2" },
{ [ID_HEADER]: "3", [PARTITION_KEY_HEADER]: "pk3" },
],
onItemClicked: (): void => {},
onSelectedRowsChange: (): void => {},
selectedRows: new Set<TableRowId>(),
size: {

View File

@@ -68,6 +68,7 @@ export type ColumnDefinition = {
export interface IDocumentsTableComponentProps {
onRefreshTable: () => void;
items: DocumentsTableComponentItem[];
onItemClicked: (index: number) => void;
onSelectedRowsChange: (selectedItemsIndices: Set<TableRowId>) => void;
selectedRows: Set<TableRowId>;
size: { height: number; width: number };
@@ -97,7 +98,6 @@ const defaultSize = {
idealWidth: 200,
minWidth: 50,
};
export const DocumentsTableComponent: React.FC<IDocumentsTableComponentProps> = ({
onRefreshTable,
items,
@@ -115,8 +115,6 @@ export const DocumentsTableComponent: React.FC<IDocumentsTableComponentProps> =
}: IDocumentsTableComponentProps) => {
const styles = useDocumentsTabStyles();
const sortedRowsRef = React.useRef(null);
const [columnSizingOptions, setColumnSizingOptions] = React.useState<TableColumnSizingOptions>(() => {
const columnSizesMap: ColumnSizesMap = readSubComponentState(SubComponentName.ColumnSizes, collection, {});
const columnSizesPx: TableColumnSizingOptions = {};
@@ -293,42 +291,22 @@ export const DocumentsTableComponent: React.FC<IDocumentsTableComponentProps> =
const [selectionStartIndex, setSelectionStartIndex] = React.useState<number>(INITIAL_SELECTED_ROW_INDEX);
const onTableCellClicked = useCallback(
(e: React.MouseEvent | undefined, index: number, rowId: TableRowId) => {
(e: React.MouseEvent, index: number) => {
if (isSelectionDisabled) {
// Only allow click
onSelectedRowsChange(new Set<TableRowId>([rowId]));
onSelectedRowsChange(new Set<TableRowId>([index]));
setSelectionStartIndex(index);
return;
}
// The selection helper computes in the index space (what's visible to the user in the table, ie the sorted array).
// selectedRows is in the rowId space (the index of the original unsorted array), so it must be converted to the index space.
const selectedRowsIndex = new Set<number>();
selectedRows.forEach((rowId) => {
const index = sortedRowsRef.current.findIndex((row: TableRowData) => row.rowId === rowId);
if (index !== -1) {
selectedRowsIndex.add(index);
} else {
// This should never happen
console.error(`Row with rowId ${rowId} not found in sorted rows`);
}
});
const result = selectionHelper(
selectedRowsIndex,
selectedRows as Set<number>,
index,
e && isEnvironmentShiftPressed(e),
e && isEnvironmentCtrlPressed(e),
isEnvironmentShiftPressed(e),
isEnvironmentCtrlPressed(e),
selectionStartIndex,
);
// Convert selectionHelper result from index space back to rowId space
const selectedRowIds = new Set<TableRowId>();
result.selection.forEach((index) => {
selectedRowIds.add(sortedRowsRef.current[index].rowId);
});
onSelectedRowsChange(selectedRowIds);
onSelectedRowsChange(result.selection);
if (result.selectionStartIndex !== undefined) {
setSelectionStartIndex(result.selectionStartIndex);
}
@@ -342,20 +320,16 @@ export const DocumentsTableComponent: React.FC<IDocumentsTableComponentProps> =
* - a key is down and the cell is clicked by the mouse
*/
const onIdClicked = useCallback(
(e: React.KeyboardEvent<Element>, rowId: TableRowId) => {
(e: React.KeyboardEvent<Element>, index: number) => {
if (e.key === NormalizedEventKey.Enter || e.key === NormalizedEventKey.Space) {
onSelectedRowsChange(new Set<TableRowId>([rowId]));
onSelectedRowsChange(new Set<TableRowId>([index]));
}
},
[onSelectedRowsChange],
);
const RenderRow = ({ index, style, data }: ReactWindowRenderFnProps) => {
// WARNING: because the table sorts the data, 'index' is not the same as 'rowId'
// The rowId is the index of the item in the original array,
// while the index is the index of the item in the sorted array
const { item, selected, appearance, onClick, onKeyDown, rowId } = data[index];
const { item, selected, appearance, onClick, onKeyDown } = data[index];
return (
<TableRow
aria-rowindex={index + 2}
@@ -385,8 +359,8 @@ export const DocumentsTableComponent: React.FC<IDocumentsTableComponentProps> =
key={column.columnId}
className={styles.tableCell}
// When clicking on a cell with shift/ctrl key, onKeyDown is called instead of onClick.
onClick={(e: React.MouseEvent<Element, MouseEvent>) => onTableCellClicked(e, index, rowId)}
onKeyPress={(e: React.KeyboardEvent<Element>) => onIdClicked(e, rowId)}
onClick={(e: React.MouseEvent<Element, MouseEvent>) => onTableCellClicked(e, index)}
onKeyPress={(e: React.KeyboardEvent<Element>) => onIdClicked(e, index)}
{...columnSizing.getTableCellProps(column.columnId)}
tabIndex={column.columnId === "id" ? 0 : -1}
>
@@ -446,19 +420,6 @@ export const DocumentsTableComponent: React.FC<IDocumentsTableComponentProps> =
}),
);
// Store the sorted rows in a ref which won't trigger a re-render (as opposed to a state)
sortedRowsRef.current = rows;
// If there are no selected rows, auto select the first row
const [autoSelectFirstDoc, setAutoSelectFirstDoc] = React.useState<boolean>(true);
React.useEffect(() => {
if (autoSelectFirstDoc && sortedRowsRef.current?.length > 0 && selectedRows.size === 0) {
setAutoSelectFirstDoc(false);
const DOC_INDEX_TO_SELECT = 0;
onTableCellClicked(undefined, DOC_INDEX_TO_SELECT, sortedRowsRef.current[DOC_INDEX_TO_SELECT].rowId);
}
}, [selectedRows, onTableCellClicked, autoSelectFirstDoc]);
const toggleAllKeydown = React.useCallback(
(e: React.KeyboardEvent<HTMLDivElement>) => {
if (e.key === " ") {

View File

@@ -90,6 +90,7 @@ exports[`Documents tab (noSql API) when rendered should render the page 1`] = `
isRowSelectionDisabled={true}
items={[]}
onColumnSelectionChange={[Function]}
onItemClicked={[Function]}
onRefreshTable={[Function]}
onSelectedRowsChange={[Function]}
selectedColumnIds={
@@ -97,7 +98,11 @@ exports[`Documents tab (noSql API) when rendered should render the page 1`] = `
"id",
]
}
selectedRows={Set {}}
selectedRows={
Set {
0,
}
}
/>
</div>
</div>

View File

@@ -39,6 +39,7 @@ exports[`DocumentsTableComponent should not render selection column when isSelec
},
]
}
onItemClicked={[Function]}
onRefreshTable={[Function]}
onSelectedRowsChange={[Function]}
selectedColumnIds={[]}
@@ -503,6 +504,7 @@ exports[`DocumentsTableComponent should render documents and partition keys in h
},
]
}
onItemClicked={[Function]}
onRefreshTable={[Function]}
onSelectedRowsChange={[Function]}
selectedColumnIds={[]}

View File

@@ -1,5 +1,7 @@
import { configContext } from "ConfigContext";
import { useMongoProxyEndpoint } from "Common/MongoProxyClient";
import React, { Component } from "react";
import * as Constants from "../../../Common/Constants";
import { configContext } from "../../../ConfigContext";
import * as ViewModels from "../../../Contracts/ViewModels";
import { Action, ActionModifiers } from "../../../Shared/Telemetry/TelemetryConstants";
import * as TelemetryProcessor from "../../../Shared/Telemetry/TelemetryProcessor";
@@ -48,13 +50,15 @@ export default class MongoShellTabComponent extends Component<
IMongoShellTabComponentStates
> {
private _logTraces: Map<string, number>;
private _useMongoProxyEndpoint: boolean;
constructor(props: IMongoShellTabComponentProps) {
super(props);
this._logTraces = new Map();
this._useMongoProxyEndpoint = useMongoProxyEndpoint(Constants.MongoProxyApi.LegacyMongoShell);
this.state = {
url: getMongoShellUrl(),
url: getMongoShellUrl(this._useMongoProxyEndpoint),
};
props.onMongoShellTabAccessor({
@@ -109,8 +113,17 @@ export default class MongoShellTabComponent extends Component<
const resourceId = databaseAccount?.id;
const accountName = databaseAccount?.name;
const documentEndpoint = databaseAccount?.properties.mongoEndpoint || databaseAccount?.properties.documentEndpoint;
const mongoEndpoint =
documentEndpoint.substr(
Constants.MongoDBAccounts.protocol.length + 3,
documentEndpoint.length -
(Constants.MongoDBAccounts.protocol.length + 2 + Constants.MongoDBAccounts.defaultPort.length),
) + Constants.MongoDBAccounts.defaultPort.toString();
const databaseId = this.props.collection.databaseId;
const collectionId = this.props.collection.id();
const apiEndpoint = this._useMongoProxyEndpoint
? configContext.MONGO_PROXY_ENDPOINT
: configContext.BACKEND_ENDPOINT;
const encryptedAuthToken: string = userContext.accessToken;
shellIframe.contentWindow.postMessage(
@@ -119,12 +132,12 @@ export default class MongoShellTabComponent extends Component<
data: {
resourceId: resourceId,
accountName: accountName,
mongoEndpoint: documentEndpoint,
mongoEndpoint: this._useMongoProxyEndpoint ? documentEndpoint : mongoEndpoint,
authorization: authorization,
databaseId: databaseId,
collectionId: collectionId,
encryptedAuthToken: encryptedAuthToken,
apiEndpoint: configContext.MONGO_PROXY_ENDPOINT,
apiEndpoint: apiEndpoint,
},
},
window.origin,

View File

@@ -2,6 +2,8 @@ import { Platform, resetConfigContext, updateConfigContext } from "../../../Conf
import { updateUserContext, userContext } from "../../../UserContext";
import { getMongoShellUrl } from "./getMongoShellUrl";
const mongoBackendEndpoint = "https://localhost:1234";
describe("getMongoShellUrl", () => {
let queryString = "";
@@ -9,6 +11,7 @@ describe("getMongoShellUrl", () => {
resetConfigContext();
updateConfigContext({
BACKEND_ENDPOINT: mongoBackendEndpoint,
platform: Platform.Hosted,
});
@@ -34,7 +37,12 @@ describe("getMongoShellUrl", () => {
queryString = `resourceId=${userContext.databaseAccount.id}&accountName=${userContext.databaseAccount.name}&mongoEndpoint=${userContext.databaseAccount.properties.documentEndpoint}`;
});
it("should return /index.html by default", () => {
expect(getMongoShellUrl().toString()).toContain(`/index.html?${queryString}`);
it("should return /indexv2.html by default", () => {
expect(getMongoShellUrl().toString()).toContain(`/indexv2.html?${queryString}`);
});
it("should return /index.html when useMongoProxyEndpoint is true", () => {
const useMongoProxyEndpoint: boolean = true;
expect(getMongoShellUrl(useMongoProxyEndpoint).toString()).toContain(`/index.html?${queryString}`);
});
});

View File

@@ -1,11 +1,11 @@
import { userContext } from "../../../UserContext";
export function getMongoShellUrl(): string {
export function getMongoShellUrl(useMongoProxyEndpoint?: boolean): string {
const { databaseAccount: account } = userContext;
const resourceId = account?.id;
const accountName = account?.name;
const mongoEndpoint = account?.properties?.mongoEndpoint || account?.properties?.documentEndpoint;
const queryString = `resourceId=${resourceId}&accountName=${accountName}&mongoEndpoint=${mongoEndpoint}`;
return `/mongoshell/index.html?${queryString}`;
return useMongoProxyEndpoint ? `/mongoshell/index.html?${queryString}` : `/mongoshell/indexv2.html?${queryString}`;
}

View File

@@ -1,5 +1,8 @@
import { IMessageBarStyles, MessageBar, MessageBarButton, MessageBarType } from "@fluentui/react";
import { CassandraProxyEndpoints, MongoProxyEndpoints } from "Common/Constants";
import { sendMessage } from "Common/MessageHandler";
import { configContext } from "ConfigContext";
import { IpRule } from "Contracts/DataModels";
import { MessageTypes } from "Contracts/ExplorerContracts";
import { CollectionTabKind } from "Contracts/ViewModels";
import Explorer from "Explorer/Explorer";
@@ -14,6 +17,7 @@ import { VcoreMongoQuickstartTab } from "Explorer/Tabs/VCoreMongoQuickstartTab";
import { LayoutConstants } from "Explorer/Theme/ThemeUtil";
import { KeyboardAction, KeyboardActionGroup, useKeyboardActionGroup } from "KeyboardShortcuts";
import { userContext } from "UserContext";
import { CassandraProxyOutboundIPs, MongoProxyOutboundIPs, PortalBackendIPs } from "Utils/EndpointUtils";
import { useTeachingBubble } from "hooks/useTeachingBubble";
import ko from "knockout";
import React, { MutableRefObject, useEffect, useRef, useState } from "react";
@@ -32,6 +36,10 @@ interface TabsProps {
export const Tabs = ({ explorer }: TabsProps): JSX.Element => {
const { openedTabs, openedReactTabs, activeTab, activeReactTab, networkSettingsWarning } = useTabs();
const [
showMongoAndCassandraProxiesNetworkSettingsWarningState,
setShowMongoAndCassandraProxiesNetworkSettingsWarningState,
] = useState<boolean>(showMongoAndCassandraProxiesNetworkSettingsWarning());
const setKeyboardHandlers = useKeyboardActionGroup(KeyboardActionGroup.TABS);
useEffect(() => {
@@ -75,6 +83,18 @@ export const Tabs = ({ explorer }: TabsProps): JSX.Element => {
{networkSettingsWarning}
</MessageBar>
)}
{showMongoAndCassandraProxiesNetworkSettingsWarningState && (
<MessageBar
messageBarType={MessageBarType.warning}
styles={defaultMessageBarStyles}
onDismiss={() => {
setShowMongoAndCassandraProxiesNetworkSettingsWarningState(false);
}}
>
{`We have migrated our middleware to new infrastructure. To avoid issues with Data Explorer access, please
re-enable "Allow access from Azure Portal" on the Networking blade for your account.`}
</MessageBar>
)}
<div className="nav-tabs-margin">
<ul className="nav nav-tabs level navTabHeight" id="navTabs" role="tablist">
{openedReactTabs.map((tab) => (
@@ -319,3 +339,57 @@ const getReactTabContent = (activeReactTab: ReactTabKind, explorer: Explorer): J
throw Error(`Unsupported tab kind ${ReactTabKind[activeReactTab]}`);
}
};
const showMongoAndCassandraProxiesNetworkSettingsWarning = (): boolean => {
const ipRules: IpRule[] = userContext.databaseAccount?.properties?.ipRules;
if (
((userContext.apiType === "Mongo" && configContext.MONGO_PROXY_ENDPOINT !== MongoProxyEndpoints.Local) ||
(userContext.apiType === "Cassandra" &&
configContext.CASSANDRA_PROXY_ENDPOINT !== CassandraProxyEndpoints.Development)) &&
ipRules?.length
) {
const legacyPortalBackendIPs: string[] = PortalBackendIPs[configContext.BACKEND_ENDPOINT];
const ipAddressesFromIPRules: string[] = ipRules.map((ipRule) => ipRule.ipAddressOrRange);
const ipRulesIncludeLegacyPortalBackend: boolean = legacyPortalBackendIPs.every((legacyPortalBackendIP: string) =>
ipAddressesFromIPRules.includes(legacyPortalBackendIP),
);
if (!ipRulesIncludeLegacyPortalBackend) {
return false;
}
if (userContext.apiType === "Mongo") {
const isProdOrMpacMongoProxyEndpoint: boolean = [MongoProxyEndpoints.Mpac, MongoProxyEndpoints.Prod].includes(
configContext.MONGO_PROXY_ENDPOINT,
);
const mongoProxyOutboundIPs: string[] = isProdOrMpacMongoProxyEndpoint
? [...MongoProxyOutboundIPs[MongoProxyEndpoints.Mpac], ...MongoProxyOutboundIPs[MongoProxyEndpoints.Prod]]
: MongoProxyOutboundIPs[configContext.MONGO_PROXY_ENDPOINT];
const ipRulesIncludeMongoProxy: boolean = mongoProxyOutboundIPs.every((mongoProxyOutboundIP: string) =>
ipAddressesFromIPRules.includes(mongoProxyOutboundIP),
);
return !ipRulesIncludeMongoProxy;
} else if (userContext.apiType === "Cassandra") {
const isProdOrMpacCassandraProxyEndpoint: boolean = [
CassandraProxyEndpoints.Mpac,
CassandraProxyEndpoints.Prod,
].includes(configContext.CASSANDRA_PROXY_ENDPOINT);
const cassandraProxyOutboundIPs: string[] = isProdOrMpacCassandraProxyEndpoint
? [
...CassandraProxyOutboundIPs[CassandraProxyEndpoints.Mpac],
...CassandraProxyOutboundIPs[CassandraProxyEndpoints.Prod],
]
: CassandraProxyOutboundIPs[configContext.CASSANDRA_PROXY_ENDPOINT];
const ipRulesIncludeCassandraProxy: boolean = cassandraProxyOutboundIPs.every(
(cassandraProxyOutboundIP: string) => ipAddressesFromIPRules.includes(cassandraProxyOutboundIP),
);
return !ipRulesIncludeCassandraProxy;
}
}
return false;
};

View File

@@ -1,11 +1,13 @@
import { useBoolean } from "@fluentui/react-hooks";
import { userContext } from "UserContext";
import { useNewPortalBackendEndpoint } from "Utils/EndpointUtils";
import * as React from "react";
import ConnectImage from "../../../../images/HdeConnectCosmosDB.svg";
import ErrorImage from "../../../../images/error.svg";
import { AuthType } from "../../../AuthType";
import { HttpHeaders } from "../../../Common/Constants";
import { BackendApi, HttpHeaders } from "../../../Common/Constants";
import { configContext } from "../../../ConfigContext";
import { GenerateTokenResponse } from "../../../Contracts/DataModels";
import { isResourceTokenConnectionString } from "../Helpers/ResourceTokenUtils";
interface Props {
@@ -17,6 +19,10 @@ interface Props {
}
export const fetchEncryptedToken = async (connectionString: string): Promise<string> => {
if (!useNewPortalBackendEndpoint(BackendApi.GenerateToken)) {
return await fetchEncryptedToken_ToBeDeprecated(connectionString);
}
const headers = new Headers();
headers.append(HttpHeaders.connectionString, connectionString);
const url = configContext.PORTAL_BACKEND_ENDPOINT + "/api/connectionstring/token/generatetoken";
@@ -29,11 +35,28 @@ export const fetchEncryptedToken = async (connectionString: string): Promise<str
return decodeURIComponent(encryptedTokenResponse);
};
export const fetchEncryptedToken_ToBeDeprecated = async (connectionString: string): Promise<string> => {
const headers = new Headers();
headers.append(HttpHeaders.connectionString, connectionString);
const url = configContext.BACKEND_ENDPOINT + "/api/guest/tokens/generateToken";
const response = await fetch(url, { headers, method: "POST" });
if (!response.ok) {
throw response;
}
// This API has a quirk where it must be parsed twice
const result: GenerateTokenResponse = JSON.parse(await response.json());
return decodeURIComponent(result.readWrite || result.read);
};
export const isAccountRestrictedForConnectionStringLogin = async (connectionString: string): Promise<boolean> => {
const headers = new Headers();
headers.append(HttpHeaders.connectionString, connectionString);
const url: string = `${configContext.PORTAL_BACKEND_ENDPOINT}/api/guest/accountrestrictions/checkconnectionstringlogin`;
const backendEndpoint: string = useNewPortalBackendEndpoint(BackendApi.AccountRestrictions)
? configContext.PORTAL_BACKEND_ENDPOINT
: configContext.BACKEND_ENDPOINT;
const url = backendEndpoint + "/api/guest/accountrestrictions/checkconnectionstringlogin";
const response = await fetch(url, { headers, method: "POST" });
if (!response.ok) {
throw response;

View File

@@ -75,12 +75,12 @@ export interface UserContext {
readonly masterKey?: string;
readonly subscriptionId?: string;
readonly tenantId?: string;
readonly userName?: string;
readonly resourceGroup?: string;
readonly databaseAccount?: DatabaseAccount;
readonly endpoint?: string;
readonly aadToken?: string;
readonly accessToken?: string;
readonly armToken?: string;
readonly authorizationToken?: string;
readonly resourceToken?: string;
readonly subscriptionType?: SubscriptionType;

View File

@@ -91,8 +91,7 @@ export async function acquireMsalTokenForAccount(
// This will eventually throw InteractionRequiredAuthError if silent is true, we won't handle it here.
const loginRequest = {
scopes: [hrefEndpoint],
loginHint: user_hint ?? userContext.userName,
authority: userContext.tenantId ? `${configContext.AAD_ENDPOINT}${userContext.tenantId}` : undefined,
loginHint: user_hint,
};
try {
if (silent) {
@@ -133,8 +132,7 @@ export async function acquireMsalTokenForAccount(
account: msalAccount || null,
forceRefresh: true,
scopes: [hrefEndpoint],
loginHint: user_hint ?? userContext.userName,
authority: `${configContext.AAD_ENDPOINT}${userContext.tenantId ?? msalAccount.tenantId}`,
authority: `${configContext.AAD_ENDPOINT}${msalAccount.tenantId}`,
};
return acquireTokenWithMsal(msalInstance, tokenRequest, silent);
}

View File

@@ -1,4 +1,11 @@
import { CassandraProxyEndpoints, JunoEndpoints, MongoProxyEndpoints, PortalBackendEndpoints } from "Common/Constants";
import {
BackendApi,
CassandraProxyEndpoints,
JunoEndpoints,
MongoProxyEndpoints,
PortalBackendEndpoints,
} from "Common/Constants";
import { configContext } from "ConfigContext";
import * as Logger from "../Common/Logger";
export function validateEndpoint(
@@ -51,6 +58,26 @@ export const allowedAadEndpoints: ReadonlyArray<string> = [
"https://login.partner.microsoftonline.cn/",
];
export const defaultAllowedBackendEndpoints: ReadonlyArray<string> = [
"https://main.documentdb.ext.azure.com",
"https://main.documentdb.ext.azure.cn",
"https://main.documentdb.ext.azure.us",
"https://main.cosmos.ext.azure",
"https://localhost:12901",
"https://localhost:1234",
];
export const PortalBackendIPs: { [key: string]: string[] } = {
"https://main.documentdb.ext.azure.com": ["104.42.195.92", "40.76.54.131"],
// DE doesn't talk to prod2 (main2) but it might be added
//"https://main2.documentdb.ext.azure.com": ["104.42.196.69"],
"https://main.documentdb.ext.azure.cn": ["139.217.8.252"],
"https://main.documentdb.ext.azure.us": ["52.244.48.71"],
// Add ussec and usnat when endpoint address is known:
//ussec: ["29.26.26.67", "29.26.26.66"],
//usnat: ["7.28.202.68"],
};
export const PortalBackendOutboundIPs: { [key: string]: string[] } = {
[PortalBackendEndpoints.Mpac]: ["13.91.105.215", "4.210.172.107"],
[PortalBackendEndpoints.Prod]: ["13.88.56.148", "40.91.218.243"],
@@ -65,22 +92,22 @@ export const MongoProxyOutboundIPs: { [key: string]: string[] } = {
[MongoProxyEndpoints.Mooncake]: ["52.131.240.99", "143.64.61.130"],
};
export const defaultAllowedPortalBackendEndpoints: ReadonlyArray<string> = [
PortalBackendEndpoints.Development,
PortalBackendEndpoints.Mpac,
PortalBackendEndpoints.Prod,
PortalBackendEndpoints.Fairfax,
PortalBackendEndpoints.Mooncake,
];
export const defaultAllowedMongoProxyEndpoints: ReadonlyArray<string> = [
MongoProxyEndpoints.Development,
MongoProxyEndpoints.Local,
MongoProxyEndpoints.Mpac,
MongoProxyEndpoints.Prod,
MongoProxyEndpoints.Fairfax,
MongoProxyEndpoints.Mooncake,
];
export const allowedMongoProxyEndpoints_ToBeDeprecated: ReadonlyArray<string> = [
"https://main.documentdb.ext.azure.com",
"https://main.documentdb.ext.azure.cn",
"https://main.documentdb.ext.azure.us",
"https://main.cosmos.ext.azure",
"https://localhost:12901",
];
export const defaultAllowedCassandraProxyEndpoints: ReadonlyArray<string> = [
CassandraProxyEndpoints.Development,
CassandraProxyEndpoints.Mpac,
@@ -89,6 +116,14 @@ export const defaultAllowedCassandraProxyEndpoints: ReadonlyArray<string> = [
CassandraProxyEndpoints.Mooncake,
];
export const allowedCassandraProxyEndpoints_ToBeDeprecated: ReadonlyArray<string> = [
"https://main.documentdb.ext.azure.com",
"https://main.documentdb.ext.azure.cn",
"https://main.documentdb.ext.azure.us",
"https://main.cosmos.ext.azure",
"https://localhost:12901",
];
export const CassandraProxyOutboundIPs: { [key: string]: string[] } = {
[CassandraProxyEndpoints.Mpac]: ["40.113.96.14", "104.42.11.145"],
[CassandraProxyEndpoints.Prod]: ["137.117.230.240", "168.61.72.237"],
@@ -120,3 +155,53 @@ export const allowedJunoOrigins: ReadonlyArray<string> = [
];
export const allowedNotebookServerUrls: ReadonlyArray<string> = [];
//
// Temporary function to determine if a portal backend API is supported by the
// new backend in this environment.
//
// TODO: Remove this function once new backend migration is completed for all environments.
//
export function useNewPortalBackendEndpoint(backendApi: string): boolean {
// This maps backend APIs to the environments supported by the new backend.
const newBackendApiEnvironmentMap: { [key: string]: string[] } = {
[BackendApi.GenerateToken]: [
PortalBackendEndpoints.Development,
PortalBackendEndpoints.Mpac,
PortalBackendEndpoints.Prod,
],
[BackendApi.PortalSettings]: [
PortalBackendEndpoints.Development,
PortalBackendEndpoints.Mpac,
PortalBackendEndpoints.Prod,
],
[BackendApi.AccountRestrictions]: [
PortalBackendEndpoints.Development,
PortalBackendEndpoints.Mpac,
PortalBackendEndpoints.Prod,
],
[BackendApi.RuntimeProxy]: [
PortalBackendEndpoints.Development,
PortalBackendEndpoints.Mpac,
PortalBackendEndpoints.Prod,
],
[BackendApi.DisallowedLocations]: [
PortalBackendEndpoints.Development,
PortalBackendEndpoints.Mpac,
PortalBackendEndpoints.Prod,
PortalBackendEndpoints.Fairfax,
PortalBackendEndpoints.Mooncake,
],
[BackendApi.SampleData]: [
PortalBackendEndpoints.Development,
PortalBackendEndpoints.Mpac,
PortalBackendEndpoints.Prod,
],
};
if (!newBackendApiEnvironmentMap[backendApi] || !configContext.PORTAL_BACKEND_ENDPOINT) {
return false;
}
return newBackendApiEnvironmentMap[backendApi].includes(configContext.PORTAL_BACKEND_ENDPOINT);
}

View File

@@ -3,6 +3,7 @@ import { useBoolean } from "@fluentui/react-hooks";
import * as React from "react";
import { configContext } from "../ConfigContext";
import { acquireTokenWithMsal, getMsalInstance } from "../Utils/AuthorizationUtils";
import { updateUserContext } from "UserContext";
const msalInstance = await getMsalInstance();
@@ -79,7 +80,7 @@ export function useAADAuth(): ReturnType {
authority: `${configContext.AAD_ENDPOINT}${tenantId}`,
scopes: [`${configContext.ARM_ENDPOINT}/.default`],
});
updateUserContext({ armToken: armToken});
setArmToken(armToken);
setAuthFailure(null);
} catch (error) {

View File

@@ -1,8 +1,8 @@
import { HttpHeaders } from "Common/Constants";
import { QueryRequestOptions, QueryResponse } from "Contracts/AzureResourceGraph";
import useSWR from "swr";
import { configContext } from "../ConfigContext";
import { DatabaseAccount } from "../Contracts/DataModels";
import { acquireTokenWithMsal, getMsalInstance } from "Utils/AuthorizationUtils";
import React from "react";
import { updateUserContext, userContext } from "UserContext";
/* eslint-disable @typescript-eslint/no-explicit-any */
interface AccountListResult {
@@ -34,11 +34,10 @@ export async function fetchDatabaseAccounts(subscriptionId: string, accessToken:
}
export async function fetchDatabaseAccountsFromGraph(
subscriptionId: string,
accessToken: string,
subscriptionId: string
): Promise<DatabaseAccount[]> {
const headers = new Headers();
const bearer = `Bearer ${accessToken}`;
const bearer = `Bearer ${userContext.armToken}`;
headers.append("Authorization", bearer);
headers.append(HttpHeaders.contentType, "application/json");
@@ -46,8 +45,9 @@ export async function fetchDatabaseAccountsFromGraph(
const apiVersion = "2021-03-01";
const managementResourceGraphAPIURL = `${configContext.ARM_ENDPOINT}providers/Microsoft.ResourceGraph/resources?api-version=${apiVersion}`;
const databaseAccounts: DatabaseAccount[] = [];
let databaseAccounts: DatabaseAccount[] = [];
let skipToken: string;
console.log("Old ARM Token", userContext.armToken);
do {
const body = {
query: databaseAccountsQuery,
@@ -74,21 +74,166 @@ export async function fetchDatabaseAccountsFromGraph(
if (!response.ok) {
throw new Error(await response.text());
}
const queryResponse: QueryResponse = (await response.json()) as QueryResponse;
skipToken = queryResponse.$skipToken;
queryResponse.data?.map((databaseAccount: any) => {
databaseAccounts.push(databaseAccount as DatabaseAccount);
});
// else {
// try{
// console.log("Token expired");
// databaseAccounts = await acquireNewTokenAndRetry(body);
// }
// catch (error) {
// throw new Error(error);
// }
//}
} while (skipToken);
return databaseAccounts.sort((a, b) => a.name.localeCompare(b.name));
}
export function useDatabaseAccounts(subscriptionId: string, armToken: string): DatabaseAccount[] | undefined {
export function useDatabaseAccounts(subscriptionId: string): DatabaseAccount[] | undefined {
const { data } = useSWR(
() => (armToken && subscriptionId ? ["databaseAccounts", subscriptionId, armToken] : undefined),
(_, subscriptionId, armToken) => fetchDatabaseAccountsFromGraph(subscriptionId, armToken),
() => ( subscriptionId ? ["databaseAccounts", subscriptionId] : undefined),
(_, subscriptionId) => runCommand(fetchDatabaseAccountsFromGraph, subscriptionId),
);
return data;
}
// Define the types for your responses
interface DatabaseAccount {
name: string;
id: string;
// Add other relevant fields as per your use case
}
interface Subscription {
displayName: string;
subscriptionId: string;
state: string;
}
interface QueryRequestOptions {
$top?: number;
$skipToken?: string;
$allowPartialScopes?: boolean;
}
// Define the configuration context and headers if not already defined
const configContext = {
ARM_ENDPOINT: 'https://management.azure.com/',
AAD_ENDPOINT: 'https://login.microsoftonline.com/'
};
interface QueryResponse {
data?: any[];
$skipToken?: string;
}
export async function runCommand<T>(
fn: (...args: any[]) => Promise<T>,
...args: any[]
): Promise<T> {
try {
// Attempt to execute the function passed as an argument
const result = await fn(...args);
console.log('Successfully executed function:', result);
return result;
} catch (error) {
// Handle any error that is thrown during the execution of the function
//(error.code === "ExpiredAuthenticationToken")
if(error) {
console.log('Creating new token');
const msalInstance = await getMsalInstance();
const cachedAccount = msalInstance.getAllAccounts()?.[0];
const cachedTenantId = localStorage.getItem("cachedTenantId");
msalInstance.setActiveAccount(cachedAccount);
const newAccessToken = await acquireTokenWithMsal(msalInstance, {
authority: `${configContext.AAD_ENDPOINT}${cachedTenantId}`,
scopes: [`${configContext.ARM_ENDPOINT}/.default`],
});
console.log("Latest ARM Token", userContext.armToken);
updateUserContext({armToken: newAccessToken});
const result = await fn(...args);
return result;
}
else {
console.error('An error occurred:', error.message);
throw new error;
}
}
}
// Running the functions using runCommand
const accessToken = 'your-access-token';
const subscriptionId = 'your-subscription-id';
//runCommand(fetchDatabaseAccountsFromGraph, subscriptionId, accessToken);
//runCommand(fetchSubscriptionsFromGraph, accessToken);
async function acquireNewTokenAndRetry(body: any) : Promise<DatabaseAccount[]> {
try {
const msalInstance = await getMsalInstance();
const cachedAccount = msalInstance.getAllAccounts()?.[0];
const cachedTenantId = localStorage.getItem("cachedTenantId");
// const [tenantId, setTenantId] = React.useState<string>(cachedTenantId);
msalInstance.setActiveAccount(cachedAccount);
const newAccessToken = await acquireTokenWithMsal(msalInstance, {
authority: `${configContext.AAD_ENDPOINT}${cachedTenantId}`,
scopes: [`${configContext.ARM_ENDPOINT}/.default`],
});
console.log("New ARM Token", newAccessToken);
const newBearer = `Bearer ${newAccessToken}`;
const newHeaders = new Headers();
newHeaders.append("Authorization", newBearer);
newHeaders.append(HttpHeaders.contentType, "application/json");
const apiVersion = "2021-03-01";
const managementResourceGraphAPIURL = `${configContext.ARM_ENDPOINT}providers/Microsoft.ResourceGraph/resources?api-version=${apiVersion}`;
const databaseAccounts: DatabaseAccount[] = [];
let skipToken: string;
// Retry the request with the new token
const response = await fetch(managementResourceGraphAPIURL, {
method: "POST",
headers: newHeaders,
body: JSON.stringify(body),
});
if (response.ok) {
// Handle successful response with new token
const queryResponse: QueryResponse = await response.json();
skipToken = queryResponse.$skipToken;
queryResponse.data?.forEach((databaseAccount: any) => {
databaseAccounts.push(databaseAccount as DatabaseAccount);
});
return databaseAccounts;
} else {
throw new Error(`Failed to fetch data after acquiring new token. Status: ${response.status}, ${await response.text()}`);
}
} catch (error) {
console.error("Error acquiring new token and retrying:", error);
throw error;
}
}

View File

@@ -5,9 +5,11 @@ import { FabricMessageTypes } from "Contracts/FabricMessageTypes";
import { FABRIC_RPC_VERSION, FabricMessageV2 } from "Contracts/FabricMessagesContract";
import Explorer from "Explorer/Explorer";
import { useDataPlaneRbac } from "Explorer/Panes/SettingsPane/SettingsPane";
import { useDataPlaneRbac } from "Explorer/Panes/SettingsPane/SettingsPane";
import { useSelectedNode } from "Explorer/useSelectedNode";
import { scheduleRefreshDatabaseResourceToken } from "Platform/Fabric/FabricUtil";
import { LocalStorageUtility, StorageKey } from "Shared/StorageUtility";
import { useNewPortalBackendEndpoint } from "Utils/EndpointUtils";
import { getNetworkSettingsWarningMessage } from "Utils/NetworkUtility";
import { logConsoleError } from "Utils/NotificationConsoleUtils";
import { useQueryCopilot } from "hooks/useQueryCopilot";
@@ -17,6 +19,7 @@ import { AuthType } from "../AuthType";
import { AccountKind, Flights } from "../Common/Constants";
import { normalizeArmEndpoint } from "../Common/EnvironmentUtility";
import * as Logger from "../Common/Logger";
import * as Logger from "../Common/Logger";
import { handleCachedDataMessage, sendMessage, sendReadyMessage } from "../Common/MessageHandler";
import { Platform, configContext, updateConfigContext } from "../ConfigContext";
import { ActionType, DataExplorerAction, TabKind } from "../Contracts/ActionContracts";
@@ -463,6 +466,7 @@ export async function fetchAndUpdateKeys(subscriptionId: string, resourceGroup:
Logger.logInfo(`Fetching keys for ${userContext.apiType} account ${account}`, "Explorer/fetchAndUpdateKeys");
let keys;
try {
keys = await listKeys(subscriptionId, resourceGroup, account);
keys = await listKeys(subscriptionId, resourceGroup, account);
Logger.logInfo(`Keys fetched for ${userContext.apiType} account ${account}`, "Explorer/fetchAndUpdateKeys");
updateUserContext({
@@ -486,6 +490,23 @@ export async function fetchAndUpdateKeys(subscriptionId: string, resourceGroup:
);
throw error;
}
if (error.code === "AuthorizationFailed") {
keys = await getReadOnlyKeys(subscriptionId, resourceGroup, account);
Logger.logInfo(
`Read only Keys fetched for ${userContext.apiType} account ${account}`,
"Explorer/fetchAndUpdateKeys",
);
updateUserContext({
masterKey: keys.primaryReadonlyMasterKey,
});
} else {
logConsoleError(`Error occurred fetching keys for the account." ${error.message}`);
Logger.logError(
`Error during fetching keys or updating user context: ${error} for ${userContext.apiType} account ${account}`,
"Explorer/fetchAndUpdateKeys",
);
throw error;
}
}
}
@@ -535,6 +556,14 @@ async function configurePortal(): Promise<Explorer> {
const inputs = message?.inputs;
const openAction = message?.openAction;
if (inputs) {
if (
configContext.BACKEND_ENDPOINT &&
configContext.platform === Platform.Portal &&
process.env.NODE_ENV === "development"
) {
inputs.extensionEndpoint = configContext.PROXY_PATH;
}
updateContextsFromPortalMessage(inputs);
const { databaseAccount: account, subscriptionId, resourceGroup } = userContext;
@@ -658,10 +687,19 @@ function updateAADEndpoints(portalEnv: PortalEnv) {
}
function updateContextsFromPortalMessage(inputs: DataExplorerInputsFrame) {
if (
configContext.BACKEND_ENDPOINT &&
configContext.platform === Platform.Portal &&
process.env.NODE_ENV === "development"
) {
inputs.extensionEndpoint = configContext.PROXY_PATH;
}
const authorizationToken = inputs.authorizationToken || "";
const databaseAccount = inputs.databaseAccount;
updateConfigContext({
BACKEND_ENDPOINT: inputs.extensionEndpoint || configContext.BACKEND_ENDPOINT,
ARM_ENDPOINT: normalizeArmEndpoint(inputs.csmEndpoint || configContext.ARM_ENDPOINT),
MONGO_PROXY_ENDPOINT: inputs.mongoProxyEndpoint,
CASSANDRA_PROXY_ENDPOINT: inputs.cassandraProxyEndpoint,
@@ -677,7 +715,6 @@ function updateContextsFromPortalMessage(inputs: DataExplorerInputsFrame) {
subscriptionId: inputs.subscriptionId,
tenantId: inputs.tenantId,
subscriptionType: inputs.subscriptionType,
userName: inputs.userName,
quotaId: inputs.quotaId,
portalEnv: inputs.serverId as PortalEnv,
hasWriteAccess: inputs.hasWriteAccess ?? true,
@@ -766,7 +803,16 @@ async function updateContextForSampleData(explorer: Explorer): Promise<void> {
return;
}
const url: string = createUri(configContext.PORTAL_BACKEND_ENDPOINT, "/api/sampledata");
let url: string;
if (useNewPortalBackendEndpoint(Constants.BackendApi.SampleData)) {
url = createUri(configContext.PORTAL_BACKEND_ENDPOINT, "/api/sampledata");
} else {
const sampleDatabaseEndpoint = useQueryCopilot.getState().copilotUserDBEnabled
? `/api/tokens/sampledataconnection/v2`
: `/api/tokens/sampledataconnection`;
url = createUri(`${configContext.BACKEND_ENDPOINT}`, sampleDatabaseEndpoint);
}
const authorizationHeader = getAuthorizationHeader();
const headers = { [authorizationHeader.header]: authorizationHeader.token };
@@ -788,4 +834,4 @@ async function updateContextForSampleData(explorer: Explorer): Promise<void> {
interface SampledataconnectionResponse {
connectionString: string;
}
}

View File

@@ -1,9 +1,16 @@
import { useEffect, useState } from "react";
import { HttpHeaders } from "../Common/Constants";
import { useNewPortalBackendEndpoint } from "Utils/EndpointUtils";
import { ApiEndpoints, BackendApi, HttpHeaders } from "../Common/Constants";
import { configContext } from "../ConfigContext";
import { AccessInputMetadata } from "../Contracts/DataModels";
const url = `${configContext.BACKEND_ENDPOINT}${ApiEndpoints.guestRuntimeProxy}/accessinputmetadata?_=1609359229955`;
export async function fetchAccessData(portalToken: string): Promise<AccessInputMetadata> {
if (!useNewPortalBackendEndpoint(BackendApi.RuntimeProxy)) {
return fetchAccessData_ToBeDeprecated(portalToken);
}
const headers = new Headers();
// Portal encrypted token API quirk: The token header must be URL encoded
headers.append(HttpHeaders.guestAccessToken, encodeURIComponent(portalToken));
@@ -18,6 +25,25 @@ export async function fetchAccessData(portalToken: string): Promise<AccessInputM
.catch((error) => console.error(error));
}
export async function fetchAccessData_ToBeDeprecated(portalToken: string): Promise<AccessInputMetadata> {
const headers = new Headers();
// Portal encrypted token API quirk: The token header must be URL encoded
headers.append(HttpHeaders.guestAccessToken, encodeURIComponent(portalToken));
const options = {
method: "GET",
headers: headers,
};
return (
fetch(url, options)
.then((response) => response.json())
// Portal encrypted token API quirk: The response is double JSON encoded
.then((json) => JSON.parse(json))
.catch((error) => console.error(error))
);
}
export function useTokenMetadata(token: string): AccessInputMetadata | undefined {
const [state, setState] = useState<AccessInputMetadata | undefined>();

View File

@@ -3,6 +3,7 @@ import { QueryRequestOptions, QueryResponse } from "Contracts/AzureResourceGraph
import useSWR from "swr";
import { configContext } from "../ConfigContext";
import { Subscription } from "../Contracts/DataModels";
import { acquireTokenWithMsal, getMsalInstance } from "Utils/AuthorizationUtils";
/* eslint-disable @typescript-eslint/no-explicit-any */
interface SubscriptionListResult {
@@ -92,3 +93,5 @@ export function useSubscriptions(armToken: string): Subscription[] | undefined {
);
return data;
}