Fix playwright tests (#2285)

* Temporarily re-enable key based auth for Mongo and Cassandra tests.

* Increase number of shards for playwright tests.

* Another small bump to test shard count.

* click global new... button then collection in playwright tests

* get new table button

* create and delete container for every individual scale test

* for scale and settings, dont create sample data in container

* run scale tests serially

* refactor scale setup and tear down to be within each test

* record network traces

* record network calls on all retries

* when disposing of database during playwright test, refresh tree to remove deleted database

* refresh tree before  opening scale and settings

* When opening scale and settings, refresh databases

* reload all databases before loading offers

* increase time for change partition key request

* increase time for change partition key request

* refresh databases in test instead of product code

* when refreshing containers, open console window to check for status completion

* close notification console window after seeing desired log

* create and delete a container for each individual test

* dont delete database after every test. leave it to the CI

* Don't refresh databases when opening Scale+Settings and only delete database if running locally

* only open scale and settings at the beginning of each test suite

* get it back to working

* change settings.spect.ts from serial to parallel

* don't delete database after each test

* update container creation throughpout to be 5000

* run tests with no throughput limit on the account

* adjust scale test to reflect no throughput limit on account

* remove test container throughput

* don't refresh collections when clicking settings in product code

* refactor and run cleanup during pr check

* copy cleanup accounts

* run cleanup after playwright tests

* run cleanup every three hours

* revert ci.yml

* update cpk test

* remove cpk

* remove cleanup accounts and add cpk

* add cpk

* remove cpk changes

* revert ci.yml

* run cleanup every two hours

---------

Co-authored-by: Jade Welton <jawelton@microsoft.com>
Co-authored-by: Asier Isayas <aisayas@microsoft.com>
This commit is contained in:
asier-isayas
2026-01-07 00:36:54 -05:00
committed by GitHub
parent 53288dec6f
commit 4ac8cd8fe4
16 changed files with 305 additions and 245 deletions

View File

@@ -74,17 +74,50 @@ async function main() {
}
} else if (account.kind === "GlobalDocumentDB") {
const sqlDatabases = await client.sqlResources.listSqlDatabases(resourceGroupName, account.name);
for (const database of sqlDatabases) {
const timestamp = Number(database.resource._ts) * 1000;
if (timestamp && timestamp < thirtyMinutesAgo) {
await client.sqlResources.deleteSqlDatabase(resourceGroupName, account.name, database.name);
console.log(`DELETED: ${account.name} | ${database.name} | Age: ${friendlyTime(Date.now() - timestamp)}`);
} else {
console.log(`SKIPPED: ${account.name} | ${database.name} | Age: ${friendlyTime(Date.now() - timestamp)}`);
}
const sqlDatabasesToDelete = sqlDatabases.map(async (database) => {
await deleteWithRetry(client, database, account.name);
});
await Promise.all(sqlDatabasesToDelete);
}
}
}
// Retry logic for handling throttling
async function deleteWithRetry(client, database, accountName) {
const maxRetries = 5;
let attempt = 0;
let backoffTime = 1000; // Start with 1 second
while (attempt < maxRetries) {
try {
const timestamp = Number(database.resource._ts) * 1000;
if (timestamp && timestamp < thirtyMinutesAgo) {
await client.sqlResources.deleteSqlDatabase(resourceGroupName, accountName, database.name);
console.log(`DELETED: ${accountName} | ${database.name} | Age: ${friendlyTime(Date.now() - timestamp)}`);
} else {
console.log(`SKIPPED: ${accountName} | ${database.name} | Age: ${friendlyTime(Date.now() - timestamp)}`);
}
return;
} catch (error) {
if (error.statusCode === 429) {
// Throttling error (HTTP 429), apply exponential backoff
console.log(`Throttling detected, retrying ${database.name}... (Attempt ${attempt + 1})`);
await delay(backoffTime);
attempt++;
backoffTime *= 2; // Exponential backoff
} else {
// For other errors, log and break
console.error(`Error deleting ${database.name}:`, error);
break;
}
}
}
console.log(`Failed to delete ${database.name} after ${maxRetries} attempts.`);
}
// Helper function to delay the retry attempts
function delay(ms) {
return new Promise(resolve => setTimeout(resolve, ms));
}
main()
@@ -96,4 +129,4 @@ main()
console.log(err);
console.log("Cleanup failed! Exiting with success. Cleanup should always fail safe.");
process.exit(0);
});
});