Merge branch 'master' into users/dshilov/rid-of-force-query-plan

This commit is contained in:
Dmitrii Shilov 2025-08-06 16:20:55 +02:00
commit 7df143d928
9 changed files with 166 additions and 34 deletions

View File

@ -177,9 +177,27 @@ jobs:
- name: "Az CLI login"
uses: Azure/login@v2
with:
client-id: ${{ secrets.AZURE_CLIENT_ID }}
client-id: ${{ secrets.E2E_TESTS_CLIENT_ID }}
tenant-id: ${{ secrets.AZURE_TENANT_ID }}
subscription-id: ${{ secrets.AZURE_SUBSCRIPTION_ID }}
# We can't use MSAL within playwright so we acquire tokens prior to running the tests
- name: "Acquire RBAC tokens for test accounts"
uses: azure/cli@v2
with:
azcliversion: latest
inlineScript: |
NOSQL_TESTACCOUNT_TOKEN=$(az account get-access-token --scope "https://github-e2etests-sql.documents.azure.com/.default" -o tsv --query accessToken)
echo "::add-mask::$NOSQL_TESTACCOUNT_TOKEN"
echo NOSQL_TESTACCOUNT_TOKEN=$NOSQL_TESTACCOUNT_TOKEN >> $GITHUB_ENV
NOSQL_READONLY_TESTACCOUNT_TOKEN=$(az account get-access-token --scope "https://github-e2etests-sql-readonly.documents.azure.com/.default" -o tsv --query accessToken)
echo "::add-mask::$NOSQL_READONLY_TESTACCOUNT_TOKEN"
echo NOSQL_READONLY_TESTACCOUNT_TOKEN=$NOSQL_READONLY_TESTACCOUNT_TOKEN >> $GITHUB_ENV
TABLE_TESTACCOUNT_TOKEN=$(az account get-access-token --scope "https://github-e2etests-tables.documents.azure.com/.default" -o tsv --query accessToken)
echo "::add-mask::$TABLE_TESTACCOUNT_TOKEN"
echo TABLE_TESTACCOUNT_TOKEN=$TABLE_TESTACCOUNT_TOKEN >> $GITHUB_ENV
GREMLIN_TESTACCOUNT_TOKEN=$(az account get-access-token --scope "https://github-e2etests-gremlin.documents.azure.com/.default" -o tsv --query accessToken)
echo "::add-mask::$GREMLIN_TESTACCOUNT_TOKEN"
echo GREMLIN_TESTACCOUNT_TOKEN=$GREMLIN_TESTACCOUNT_TOKEN >> $GITHUB_ENV
- name: Run test shard ${{ matrix['shardIndex'] }} of ${{ matrix['shardTotal']}}
run: npx playwright test --shard=${{ matrix.shardIndex }}/${{ matrix.shardTotal }} --workers=3
- name: Upload blob report to GitHub Actions Artifacts

View File

@ -27,7 +27,7 @@ jobs:
- name: "Az CLI login"
uses: azure/login@v1
with:
client-id: ${{ secrets.AZURE_CLIENT_ID }}
client-id: ${{ secrets.E2E_TESTS_CLIENT_ID }}
tenant-id: ${{ secrets.AZURE_TENANT_ID }}
subscription-id: ${{ secrets.AZURE_SUBSCRIPTION_ID }}

View File

@ -443,6 +443,7 @@ export interface DataExplorerInputsFrame {
[key: string]: string;
};
feedbackPolicies?: any;
aadToken?: string;
}
export interface SelfServeFrameInputs {

View File

@ -894,6 +894,7 @@ function updateContextsFromPortalMessage(inputs: DataExplorerInputsFrame) {
const authorizationToken = inputs.authorizationToken || "";
const databaseAccount = inputs.databaseAccount;
const aadToken = inputs.aadToken || "";
updateConfigContext({
ARM_ENDPOINT: normalizeArmEndpoint(inputs.csmEndpoint || configContext.ARM_ENDPOINT),
@ -906,6 +907,7 @@ function updateContextsFromPortalMessage(inputs: DataExplorerInputsFrame) {
updateUserContext({
authorizationToken,
aadToken,
databaseAccount,
resourceGroup: inputs.resourceGroup,
subscriptionId: inputs.subscriptionId,

View File

@ -8,12 +8,12 @@ The tests run in [Playwright](https://playwright.dev/), using the official Playw
To run all the tests, you need:
* A CosmosDB Account using the Cassandra API
* A CosmosDB Account using the Gremlin API
* A CosmosDB Account using the MongoDB API, API version 6.0
* A CosmosDB Account using the MongoDB API, API version 3.2
* A CosmosDB Account using the NoSQL API
* A CosmosDB Account using the Tables API
- A CosmosDB Account using the Cassandra API
- A CosmosDB Account using the Gremlin API
- A CosmosDB Account using the MongoDB API, API version 6.0
- A CosmosDB Account using the MongoDB API, API version 3.2
- A CosmosDB Account using the NoSQL API
- A CosmosDB Account using the Tables API
Each Account must have at least 1000 RU/s of throughput available for new databases/collections/etc.
The tests create new databases/keyspaces/etc. for each test, and delete them when the test is done.
@ -62,10 +62,10 @@ Do you want to continue? (y/n):
This prompt shows:
* The resources that will be deployed, in this case, all of them. You can filter to deploy only a subset by specifying the `-ResourceTypes` parameter. For example `-ResourceTypes @("cassandra", "sql")`.
* The location the resources will be deployed to, `West US 3` in this case.
* The resource group that will be used, `ashleyst-e2e-testing` in this case.
* The subscription that will be used.
- The resources that will be deployed, in this case, all of them. You can filter to deploy only a subset by specifying the `-ResourceTypes` parameter. For example `-ResourceTypes @("cassandra", "sql")`.
- The location the resources will be deployed to, `West US 3` in this case.
- The resource group that will be used, `ashleyst-e2e-testing` in this case.
- The subscription that will be used.
Once you confirm, the resources will be deployed using Azure PowerShell and the Bicep templates in the `resources` directory. The script will wait for all the deployments to complete before exiting.
@ -76,18 +76,18 @@ You can re-run this script at any time to update the resources, if the Bicep tem
Before running the tests, you need to configure your environment to specify the accounts to use for testing.
The following environment variables are used:
* `DE_TEST_RESOURCE_GROUP` - The resource group to use for testing. This should be the same resource group that the resources were deployed to.
* `DE_TEST_SUBSCRIPTION_ID` - The subscription ID to use for testing. This should be the same subscription that the resources were deployed to.
* `DE_TEST_ACCOUNT_PREFIX` - If you used the default naming scheme provided by the `deploy.ps1` script, this should be your Windows username (or whatever value you passed in for the `-ResourcePrefix` argument when deploying). This is used to find the accounts that were deployed.
- `DE_TEST_RESOURCE_GROUP` - The resource group to use for testing. This should be the same resource group that the resources were deployed to.
- `DE_TEST_SUBSCRIPTION_ID` - The subscription ID to use for testing. This should be the same subscription that the resources were deployed to.
- `DE_TEST_ACCOUNT_PREFIX` - If you used the default naming scheme provided by the `deploy.ps1` script, this should be your Windows username (or whatever value you passed in for the `-ResourcePrefix` argument when deploying). This is used to find the accounts that were deployed.
In the event you didn't use the `deploy.ps1` script, you can specify the accounts directly using the following environment variables:
* `DE_TEST_ACCOUNT_NAME_CASSANDRA` - The name of the CosmosDB Account using the Cassandra API.
* `DE_TEST_ACCOUNT_NAME_GREMLIN` - The name of the CosmosDB Account using the Gremlin API.
* `DE_TEST_ACCOUNT_NAME_MONGO` - The name of the CosmosDB Account using the MongoDB API, API version 6.0.
* `DE_TEST_ACCOUNT_NAME_MONGO32` - The name of the CosmosDB Account using the MongoDB API, API version 3.2.
* `DE_TEST_ACCOUNT_NAME_SQL` - The name of the CosmosDB Account using the NoSQL API.
* `DE_TEST_ACCOUNT_NAME_TABLES` - The name of the CosmosDB Account using the Tables API.
- `DE_TEST_ACCOUNT_NAME_CASSANDRA` - The name of the CosmosDB Account using the Cassandra API.
- `DE_TEST_ACCOUNT_NAME_GREMLIN` - The name of the CosmosDB Account using the Gremlin API.
- `DE_TEST_ACCOUNT_NAME_MONGO` - The name of the CosmosDB Account using the MongoDB API, API version 6.0.
- `DE_TEST_ACCOUNT_NAME_MONGO32` - The name of the CosmosDB Account using the MongoDB API, API version 3.2.
- `DE_TEST_ACCOUNT_NAME_SQL` - The name of the CosmosDB Account using the NoSQL API.
- `DE_TEST_ACCOUNT_NAME_TABLES` - The name of the CosmosDB Account using the Tables API.
If you used all the standard deployment scripts and naming scheme, you can set these environment variables using the following command:
@ -152,6 +152,46 @@ The UI allows you to select a specific test to run and to see the results of the
See the [Playwright docs](https://playwright.dev/docs/running-tests) for more information on running tests.
### Testing with Data Plane RBAC Authentication
By default, the tests will use key based authentication to access the database accounts. For APIs that support data plane RBAC, the
test can be configured to use that instead, by acquiring access tokens and setting them to environment variables:
```powershell
# NoSQL API
$ENV:NOSQL_TESTACCOUNT_TOKEN=az account get-access-token --scope "https://<account name>.documents.azure.com/.default" -o tsv --query accessToken
# NoSQL API (Readonly)
$ENV:NOSQL_READONLY_TESTACCOUNT_TOKEN=az account get-access-token --scope "https://<account name>.documents.azure.com/.default" -o tsv --query accessToken
# Tables API
$ENV:TABLE_TESTACCOUNT_TOKEN=az account get-access-token --scope "https://<account name>.documents.azure.com/.default" -o tsv --query accessToken
# Gremlin API
$ENV:GREMLIN_TESTACCOUNT_TOKEN=az account get-access-token --scope "https://<account name>.documents.azure.com/.default" -o tsv --query accessToken
```
When setting up test accounts to use dataplane RBAC, you will need to create custom role definitions with the following roles:
```txt
# NoSQL API roles
Microsoft.DocumentDB/databaseAccounts/readMetadata
Microsoft.DocumentDB/databaseAccounts/sqlDatabases/*
Microsoft.DocumentDB/databaseAccounts/sqlDatabases/containers/*
Microsoft.DocumentDB/databaseAccounts/sqlDatabases/containers/items/*
Microsoft.DocumentDB/databaseAccounts/throughputSettings/*
# Tables API roles
Microsoft.DocumentDB/databaseAccounts/readMetadata
Microsoft.DocumentDB/databaseAccounts/tables/*
Microsoft.DocumentDB/databaseAccounts/throughputSettings/*
# Gremlin API roles
Microsoft.DocumentDB/databaseAccounts/readMetadata
Microsoft.DocumentDB/databaseAccounts/gremlin/*
Microsoft.DocumentDB/databaseAccounts/throughputSettings/
```
## Clean-up
Tests should clean-up after themselves if they succeed (and sometimes even when they fail).

View File

@ -86,6 +86,30 @@ export async function getTestExplorerUrl(accountType: TestAccount, iframeSrc?: s
// For now, since we don't test copilot, we can disable the copilot APIs by setting the feature flag to false.
params.set("feature.enableCopilot", "false");
const nosqlRbacToken = process.env.NOSQL_TESTACCOUNT_TOKEN;
if (nosqlRbacToken) {
params.set("nosqlRbacToken", nosqlRbacToken);
params.set("enableaaddataplane", "true");
}
const nosqlReadOnlyRbacToken = process.env.NOSQL_READONLY_TESTACCOUNT_TOKEN;
if (nosqlReadOnlyRbacToken) {
params.set("nosqlReadOnlyRbacToken", nosqlReadOnlyRbacToken);
params.set("enableaaddataplane", "true");
}
const tableRbacToken = process.env.TABLE_TESTACCOUNT_TOKEN;
if (tableRbacToken) {
params.set("tableRbacToken", tableRbacToken);
params.set("enableaaddataplane", "true");
}
const gremlinRbacToken = process.env.GREMLIN_TESTACCOUNT_TOKEN;
if (gremlinRbacToken) {
params.set("gremlinRbacToken", gremlinRbacToken);
params.set("enableaaddataplane", "true");
}
if (iframeSrc) {
params.set("iframeSrc", iframeSrc);
}

View File

@ -2,6 +2,7 @@ import { expect, test } from "@playwright/test";
import { CosmosDBManagementClient } from "@azure/arm-cosmosdb";
import { CosmosClient, PermissionMode } from "@azure/cosmos";
import { AzureIdentityCredentialAdapter } from "@azure/ms-rest-js";
import {
DataExplorer,
TestAccount,
@ -13,8 +14,12 @@ import {
} from "../fx";
test("SQL account using Resource token", async ({ page }) => {
const nosqlAccountRbacToken = process.env.NOSQL_TESTACCOUNT_TOKEN || "";
test.skip(nosqlAccountRbacToken.length > 0, "Resource tokens not supported when using data plane RBAC.");
const credentials = getAzureCLICredentials();
const armClient = new CosmosDBManagementClient(credentials, subscriptionId);
const adaptedCredentials = new AzureIdentityCredentialAdapter(credentials);
const armClient = new CosmosDBManagementClient(adaptedCredentials, subscriptionId);
const accountName = getAccountName(TestAccount.SQL);
const account = await armClient.databaseAccounts.get(resourceGroupName, accountName);
const keys = await armClient.databaseAccounts.listKeys(resourceGroupName, accountName);

View File

@ -1,7 +1,7 @@
import crypto from "crypto";
import { CosmosDBManagementClient } from "@azure/arm-cosmosdb";
import { BulkOperationType, Container, CosmosClient, Database, JSONObject } from "@azure/cosmos";
import { BulkOperationType, Container, CosmosClient, CosmosClientOptions, Database, JSONObject } from "@azure/cosmos";
import { AzureIdentityCredentialAdapter } from "@azure/ms-rest-js";
import {
@ -82,11 +82,24 @@ export async function createTestSQLContainer(includeTestData?: boolean) {
const armClient = new CosmosDBManagementClient(adaptedCredentials, subscriptionId);
const accountName = getAccountName(TestAccount.SQL);
const account = await armClient.databaseAccounts.get(resourceGroupName, accountName);
const keys = await armClient.databaseAccounts.listKeys(resourceGroupName, accountName);
const client = new CosmosClient({
const clientOptions: CosmosClientOptions = {
endpoint: account.documentEndpoint!,
key: keys.primaryMasterKey,
});
};
const nosqlAccountRbacToken = process.env.NOSQL_TESTACCOUNT_TOKEN;
if (nosqlAccountRbacToken) {
clientOptions.tokenProvider = async (): Promise<string> => {
const AUTH_PREFIX = `type=aad&ver=1.0&sig=`;
const authorizationToken = `${AUTH_PREFIX}${nosqlAccountRbacToken}`;
return authorizationToken;
};
} else {
const keys = await armClient.databaseAccounts.listKeys(resourceGroupName, accountName);
clientOptions.key = keys.primaryMasterKey;
}
const client = new CosmosClient(clientOptions);
const { database } = await client.databases.createIfNotExists({ id: databaseId });
try {
const { container } = await database.containers.createIfNotExists({

View File

@ -10,17 +10,45 @@ const subscriptionId = urlSearchParams.get("subscriptionId") || process.env.SUBS
const accountName = urlSearchParams.get("accountName") || "portal-sql-runner-west-us";
const selfServeType = urlSearchParams.get("selfServeType") || "example";
const iframeSrc = urlSearchParams.get("iframeSrc") || "explorer.html?platform=Portal&disablePortalInitCache";
const token = urlSearchParams.get("token");
const authToken = urlSearchParams.get("token");
console.log("Resource Group:", resourceGroup);
console.log("Subcription: ", subscriptionId);
console.log("Account Name: ", accountName);
const nosqlRbacToken = urlSearchParams.get("nosqlRbacToken") || process.env.NOSQL_TESTACCOUNT_TOKEN || "";
const nosqlReadOnlyRbacToken =
urlSearchParams.get("nosqlReadOnlyRbacToken") || process.env.NOSQL_READONLY_TESTACCOUNT_TOKEN || "";
const tableRbacToken = urlSearchParams.get("tableRbacToken") || process.env.TABLE_TESTACCOUNT_TOKEN || "";
const gremlinRbacToken = urlSearchParams.get("gremlinRbacToken") || process.env.GREMLIN_TESTACCOUNT_TOKEN || "";
const initTestExplorer = async (): Promise<void> => {
updateUserContext({
authorizationToken: `bearer ${token}`,
authorizationToken: `bearer ${authToken}`,
});
const databaseAccount = await get(subscriptionId, resourceGroup, accountName);
const tags = databaseAccount?.tags;
const testAccountType = tags && tags["DataExplorer:TestAccountType"];
let rbacToken = "";
switch (testAccountType) {
case "sql":
rbacToken = nosqlRbacToken;
break;
case "sql-readonly":
rbacToken = nosqlReadOnlyRbacToken;
break;
case "gremlin":
rbacToken = gremlinRbacToken;
break;
case "tables":
rbacToken = tableRbacToken;
break;
}
if (rbacToken.length > 0) {
updateUserContext({
dataPlaneRbacEnabled: true,
});
}
const keys = await listKeys(subscriptionId, resourceGroup, accountName);
// Disable the quickstart carousel.
@ -33,7 +61,8 @@ const initTestExplorer = async (): Promise<void> => {
databaseAccount: databaseAccount,
subscriptionId,
resourceGroup,
authorizationToken: `Bearer ${token}`,
authorizationToken: `Bearer ${authToken}`,
aadToken: rbacToken,
features: {},
hasWriteAccess: true,
csmEndpoint: "https://management.azure.com",