mirror of
https://github.com/Azure/cosmos-explorer.git
synced 2025-12-24 19:31:36 +00:00
Compare commits
2 Commits
hosted-msa
...
remove-rup
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
123ec2e45c | ||
|
|
2f4abfa796 |
11
.env.example
11
.env.example
@@ -1,14 +1,7 @@
|
||||
# These options are only needed when if running end to end tests locally
|
||||
PORTAL_RUNNER_USERNAME=
|
||||
PORTAL_RUNNER_PASSWORD=
|
||||
PORTAL_RUNNER_SUBSCRIPTION=
|
||||
PORTAL_RUNNER_RESOURCE_GROUP=
|
||||
PORTAL_RUNNER_DATABASE_ACCOUNT=
|
||||
PORTAL_RUNNER_DATABASE_ACCOUNT_KEY=
|
||||
PORTAL_RUNNER_CONNECTION_STRING=
|
||||
NOTEBOOKS_TEST_RUNNER_TENANT_ID=
|
||||
NOTEBOOKS_TEST_RUNNER_CLIENT_ID=
|
||||
NOTEBOOKS_TEST_RUNNER_CLIENT_SECRET=
|
||||
CASSANDRA_CONNECTION_STRING=
|
||||
MONGO_CONNECTION_STRING=
|
||||
TABLES_CONNECTION_STRING=
|
||||
DATA_EXPLORER_ENDPOINT=https://localhost:1234/hostedExplorer.html
|
||||
PORTAL_RUNNER_CONNECTION_STRING=
|
||||
@@ -14,6 +14,9 @@ src/Common/DataAccessUtilityBase.ts
|
||||
src/Common/DeleteFeedback.ts
|
||||
src/Common/DocumentClientUtilityBase.ts
|
||||
src/Common/EditableUtility.ts
|
||||
src/Common/EnvironmentUtility.ts
|
||||
src/Common/ErrorParserUtility.test.ts
|
||||
src/Common/ErrorParserUtility.ts
|
||||
src/Common/HashMap.test.ts
|
||||
src/Common/HashMap.ts
|
||||
src/Common/HeadersUtility.test.ts
|
||||
@@ -42,6 +45,7 @@ src/Contracts/ViewModels.ts
|
||||
src/Controls/Heatmap/Heatmap.test.ts
|
||||
src/Controls/Heatmap/Heatmap.ts
|
||||
src/Controls/Heatmap/HeatmapDatatypes.ts
|
||||
src/Definitions/adal.d.ts
|
||||
src/Definitions/datatables.d.ts
|
||||
src/Definitions/gif.d.ts
|
||||
src/Definitions/globals.d.ts
|
||||
@@ -200,6 +204,8 @@ src/Explorer/Tabs/QueryTab.test.ts
|
||||
src/Explorer/Tabs/QueryTab.ts
|
||||
src/Explorer/Tabs/QueryTablesTab.ts
|
||||
src/Explorer/Tabs/ScriptTabBase.ts
|
||||
src/Explorer/Tabs/SettingsTab.test.ts
|
||||
src/Explorer/Tabs/SettingsTab.ts
|
||||
src/Explorer/Tabs/SparkMasterTab.ts
|
||||
src/Explorer/Tabs/StoredProcedureTab.ts
|
||||
src/Explorer/Tabs/TabComponents.ts
|
||||
@@ -286,6 +292,8 @@ src/Utils/DatabaseAccountUtils.ts
|
||||
src/Utils/JunoUtils.ts
|
||||
src/Utils/MessageValidation.ts
|
||||
src/Utils/NotebookConfigurationUtils.ts
|
||||
src/Utils/OfferUtils.test.ts
|
||||
src/Utils/OfferUtils.ts
|
||||
src/Utils/PricingUtils.test.ts
|
||||
src/Utils/QueryUtils.test.ts
|
||||
src/Utils/QueryUtils.ts
|
||||
@@ -390,5 +398,19 @@ src/Explorer/Tree/ResourceTreeAdapterForResourceToken.tsx
|
||||
src/GalleryViewer/Cards/GalleryCardComponent.tsx
|
||||
src/GalleryViewer/GalleryViewer.tsx
|
||||
src/GalleryViewer/GalleryViewerComponent.tsx
|
||||
cypress/integration/dataexplorer/CASSANDRA/addCollection.spec.ts
|
||||
cypress/integration/dataexplorer/GRAPH/addCollection.spec.ts
|
||||
cypress/integration/dataexplorer/ci-tests/addCollectionPane.spec.ts
|
||||
cypress/integration/dataexplorer/ci-tests/createDatabase.spec.ts
|
||||
cypress/integration/dataexplorer/ci-tests/deleteCollection.spec.ts
|
||||
cypress/integration/dataexplorer/ci-tests/deleteDatabase.spec.ts
|
||||
cypress/integration/dataexplorer/MONGO/addCollection.spec.ts
|
||||
cypress/integration/dataexplorer/MONGO/addCollectionAutopilot.spec.ts
|
||||
cypress/integration/dataexplorer/MONGO/addCollectionExistingDatabase.spec.ts
|
||||
cypress/integration/dataexplorer/MONGO/provisionDatabaseThroughput.spec.ts
|
||||
cypress/integration/dataexplorer/SQL/addCollection.spec.ts
|
||||
cypress/integration/dataexplorer/TABLE/addCollection.spec.ts
|
||||
cypress/integration/notebook/newNotebook.spec.ts
|
||||
cypress/integration/notebook/resourceTree.spec.ts
|
||||
__mocks__/monaco-editor.ts
|
||||
src/Explorer/Tree/ResourceTreeAdapterForResourceToken.test.tsx
|
||||
73
.github/workflows/ci.yml
vendored
73
.github/workflows/ci.yml
vendored
@@ -79,32 +79,32 @@ jobs:
|
||||
name: dist
|
||||
path: dist/
|
||||
endtoendemulator:
|
||||
name: "End To End Emulator Tests"
|
||||
name: "End To End Tests | Emulator | SQL"
|
||||
needs: [lint, format, compile, unittest]
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: southpolesteve/cosmos-emulator-github-action@v1
|
||||
- name: Use Node.js 12.x
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 12.x
|
||||
- uses: southpolesteve/cosmos-emulator-github-action@v1
|
||||
- name: Restore Cypress Binary Cache
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ~/.cache/Cypress
|
||||
key: ${{ runner.os }}-cypress-binary-cache
|
||||
- name: End to End Tests
|
||||
run: |
|
||||
npm ci
|
||||
npm start &
|
||||
npm run wait-for-server
|
||||
npx jest -c ./jest.config.e2e.js --detectOpenHandles sql
|
||||
npm ci --prefix ./cypress
|
||||
npm run test:ci --prefix ./cypress -- --spec ./integration/dataexplorer/ci-tests/createDatabase.spec.ts
|
||||
shell: bash
|
||||
env:
|
||||
DATA_EXPLORER_ENDPOINT: "https://localhost:1234/explorer.html?platform=Emulator"
|
||||
PLATFORM: "Emulator"
|
||||
EMULATOR_ENDPOINT: https://0.0.0.0:8081/
|
||||
NODE_TLS_REJECT_UNAUTHORIZED: 0
|
||||
- uses: actions/upload-artifact@v2
|
||||
if: failure()
|
||||
with:
|
||||
name: screenshots
|
||||
path: failed-*
|
||||
CYPRESS_CACHE_FOLDER: ~/.cache/Cypress
|
||||
accessibility:
|
||||
name: "Accessibility | Hosted"
|
||||
needs: [lint, format, compile, unittest]
|
||||
@@ -123,13 +123,13 @@ jobs:
|
||||
sudo sysctl -p
|
||||
npm ci
|
||||
npm start &
|
||||
npx wait-on -i 5000 https-get://0.0.0.0:1234/
|
||||
npx wait-on -i 5000 https-get://0.0.0.0:1234/
|
||||
node utils/accesibilityCheck.js
|
||||
shell: bash
|
||||
env:
|
||||
NODE_TLS_REJECT_UNAUTHORIZED: 0
|
||||
endtoendhosted:
|
||||
name: "End to End Hosted Tests"
|
||||
endtoendpuppeteer:
|
||||
name: "End to end puppeteer tests"
|
||||
needs: [lint, format, compile, unittest]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
@@ -138,7 +138,7 @@ jobs:
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 12.x
|
||||
- name: End to End Hosted Tests
|
||||
- name: End to End Puppeteer Tests
|
||||
run: |
|
||||
npm ci
|
||||
npm start &
|
||||
@@ -147,27 +147,13 @@ jobs:
|
||||
shell: bash
|
||||
env:
|
||||
NODE_TLS_REJECT_UNAUTHORIZED: 0
|
||||
PORTAL_RUNNER_SUBSCRIPTION: ${{ secrets.PORTAL_RUNNER_SUBSCRIPTION }}
|
||||
PORTAL_RUNNER_RESOURCE_GROUP: ${{ secrets.PORTAL_RUNNER_RESOURCE_GROUP }}
|
||||
PORTAL_RUNNER_DATABASE_ACCOUNT: ${{ secrets.PORTAL_RUNNER_DATABASE_ACCOUNT }}
|
||||
PORTAL_RUNNER_DATABASE_ACCOUNT_KEY: ${{ secrets.PORTAL_RUNNER_DATABASE_ACCOUNT_KEY }}
|
||||
NOTEBOOKS_TEST_RUNNER_TENANT_ID: ${{ secrets.NOTEBOOKS_TEST_RUNNER_TENANT_ID }}
|
||||
NOTEBOOKS_TEST_RUNNER_CLIENT_ID: ${{ secrets.NOTEBOOKS_TEST_RUNNER_CLIENT_ID }}
|
||||
NOTEBOOKS_TEST_RUNNER_CLIENT_SECRET: ${{ secrets.NOTEBOOKS_TEST_RUNNER_CLIENT_SECRET }}
|
||||
PORTAL_RUNNER_CONNECTION_STRING: ${{ secrets.CONNECTION_STRING_SQL }}
|
||||
MONGO_CONNECTION_STRING: ${{ secrets.CONNECTION_STRING_MONGO }}
|
||||
CASSANDRA_CONNECTION_STRING: ${{ secrets.CONNECTION_STRING_CASSANDRA }}
|
||||
TABLES_CONNECTION_STRING: ${{ secrets.CONNECTION_STRING_TABLE }}
|
||||
DATA_EXPLORER_ENDPOINT: "https://localhost:1234/hostedExplorer.html"
|
||||
- uses: actions/upload-artifact@v2
|
||||
if: failure()
|
||||
with:
|
||||
name: screenshots
|
||||
path: failed-*
|
||||
nuget:
|
||||
name: Publish Nuget
|
||||
if: github.ref == 'refs/heads/master' || contains(github.ref, 'hotfix/') || contains(github.ref, 'release/')
|
||||
needs: [lint, format, compile, build, unittest, endtoendemulator, endtoendhosted, accessibility]
|
||||
needs: [lint, format, compile, build, unittest, endtoendemulator, endtoendpuppeteer]
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
NUGET_SOURCE: ${{ secrets.NUGET_SOURCE }}
|
||||
@@ -191,7 +177,7 @@ jobs:
|
||||
nugetmpac:
|
||||
name: Publish Nuget MPAC
|
||||
if: github.ref == 'refs/heads/master' || contains(github.ref, 'hotfix/') || contains(github.ref, 'release/')
|
||||
needs: [lint, format, compile, build, unittest, endtoendemulator, endtoendhosted, accessibility]
|
||||
needs: [lint, format, compile, build, unittest, endtoendemulator, endtoendpuppeteer]
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
NUGET_SOURCE: ${{ secrets.NUGET_SOURCE }}
|
||||
@@ -213,28 +199,3 @@ jobs:
|
||||
name: packages
|
||||
with:
|
||||
path: "*.nupkg"
|
||||
nugetie:
|
||||
name: Publish Nuget IE
|
||||
if: github.ref == 'refs/heads/master' || contains(github.ref, 'hotfix/') || contains(github.ref, 'release/')
|
||||
needs: [lint, format, compile, build, unittest, endtoendemulator, endtoendhosted, accessibility]
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
NUGET_SOURCE: ${{ secrets.NUGET_SOURCE }}
|
||||
AZURE_DEVOPS_PAT: ${{ secrets.AZURE_DEVOPS_PAT }}
|
||||
steps:
|
||||
- uses: nuget/setup-nuget@v1
|
||||
with:
|
||||
nuget-api-key: ${{ secrets.NUGET_API_KEY }}
|
||||
- name: Download Dist Folder
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: dist
|
||||
- run: cp ./configs/prod.json config.json
|
||||
- run: sed -i 's/Azure.Cosmos.DB.Data.Explorer/Azure.Cosmos.DB.Data.Explorer.IE/g' DataExplorer.nuspec
|
||||
- run: nuget sources add -Name "ADO" -Source "$NUGET_SOURCE" -UserName "GitHub" -Password "$AZURE_DEVOPS_PAT"
|
||||
- run: nuget pack -Version "2.0.0-github-${GITHUB_SHA}"
|
||||
- run: nuget push -Source "$NUGET_SOURCE" -ApiKey Az *.nupkg
|
||||
- uses: actions/upload-artifact@v2
|
||||
name: packages
|
||||
with:
|
||||
path: "*.nupkg"
|
||||
|
||||
25
.github/workflows/runners.yml
vendored
Normal file
25
.github/workflows/runners.yml
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
name: Runners
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 * 1 * *"
|
||||
jobs:
|
||||
sqlcreatecollection:
|
||||
runs-on: ubuntu-latest
|
||||
name: "SQL | Create Collection"
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-node@v1
|
||||
- run: npm ci
|
||||
- run: npm run test:e2e
|
||||
env:
|
||||
PORTAL_RUNNER_APP_INSIGHTS_KEY: ${{ secrets.PORTAL_RUNNER_APP_INSIGHTS_KEY }}
|
||||
PORTAL_RUNNER_USERNAME: ${{ secrets.PORTAL_RUNNER_USERNAME }}
|
||||
PORTAL_RUNNER_PASSWORD: ${{ secrets.PORTAL_RUNNER_PASSWORD }}
|
||||
PORTAL_RUNNER_SUBSCRIPTION: 69e02f2d-f059-4409-9eac-97e8a276ae2c
|
||||
PORTAL_RUNNER_RESOURCE_GROUP: runners
|
||||
PORTAL_RUNNER_DATABASE_ACCOUNT: portal-sql-runner
|
||||
- uses: actions/upload-artifact@v2
|
||||
if: failure()
|
||||
with:
|
||||
name: screenshots
|
||||
path: failure.png
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -9,6 +9,9 @@ pkg/DataExplorer/*
|
||||
test/out/*
|
||||
workers/**/*.js
|
||||
*.trx
|
||||
cypress/videos
|
||||
cypress/screenshots
|
||||
cypress/fixtures
|
||||
notebookapp/*
|
||||
Contracts/*
|
||||
.DS_Store
|
||||
|
||||
BIN
.vs/slnx.sqlite
BIN
.vs/slnx.sqlite
Binary file not shown.
58
README.md
58
README.md
@@ -13,18 +13,29 @@ UI for Azure Cosmos DB. Powers the [Azure Portal](https://portal.azure.com/), ht
|
||||
|
||||
### Watch mode
|
||||
|
||||
Run `npm start` to start the development server and automatically rebuild on changes
|
||||
Run `npm run watch` to start the development server and automatically rebuild on changes
|
||||
|
||||
### Hosted Development (https://cosmos.azure.com)
|
||||
### Specifying Development Platform
|
||||
|
||||
- Visit: `https://localhost:1234/hostedExplorer.html`
|
||||
- Local sign in via AAD will NOT work. Connection string only in dev mode. Use the Portal if you need AAD auth.
|
||||
- The default webpack dev server configuration will proxy requests to the production portal backend: `https://main.documentdb.ext.azure.com`. This will allow you to use production connection strings on your local machine.
|
||||
Setting the environment variable `PLATFORM` during the build process will force the explorer to load the specified platform. By default in development it will run in `Hosted` mode. Valid options:
|
||||
|
||||
- Hosted
|
||||
- Emulator
|
||||
- Portal
|
||||
|
||||
`PLATFORM=Emulator npm run watch`
|
||||
|
||||
### Hosted Development
|
||||
|
||||
The default webpack dev server configuration will proxy requests to the production portal backend: `https://main.documentdb.ext.azure.com`. This will allow you to use production connection strings on your local machine.
|
||||
|
||||
To run pure hosted mode, in `webpack.config.js` change index HtmlWebpackPlugin to use hostedExplorer.html and change entry for index to use HostedExplorer.ts.
|
||||
|
||||
### Emulator Development
|
||||
|
||||
- Start the Cosmos Emulator
|
||||
- Visit: https://localhost:1234/index.html
|
||||
In a window environment, running `npm run build` will automatically copy the built files from `/dist` over to the default emulator install paths. In a non-windows enironment you can specify an alternate endpoint using `EMULATOR_ENDPOINT` and webpack dev server will proxy requests for you.
|
||||
|
||||
`PLATFORM=Emulator EMULATOR_ENDPOINT=https://my-vm.azure.com:8081 npm run watch`
|
||||
|
||||
#### Setting up a Remote Emulator
|
||||
|
||||
@@ -44,8 +55,16 @@ The Cosmos emulator currently only runs in Windows environments. You can still d
|
||||
|
||||
### Portal Development
|
||||
|
||||
- Visit: https://ms.portal.azure.com/?dataExplorerSource=https%3A%2F%2Flocalhost%3A1234%2Fexplorer.html
|
||||
- You may have to manually visit https://localhost:1234/explorer.html first and click through any SSL certificate warnings
|
||||
The Cosmos Portal that consumes this repo is not currently open source. If you have access to this project, `npm run build` will copy the built files over to the portal where they will be loaded by the portal development environment
|
||||
|
||||
You can however load a local running instance of data explorer in the production portal.
|
||||
|
||||
1. Turn off browser SSL validation for localhost: chrome://flags/#allow-insecure-localhost OR Install valid SSL certs for localhost (on IE, follow these [instructions](https://www.technipages.com/ie-bypass-problem-with-this-websites-security-certificate) to install the localhost certificate in the right place)
|
||||
2. Whitelist `https://localhost:1234` domain for CORS in the Azure Cosmos DB portal
|
||||
3. Start the project in portal mode: `PLATFORM=Portal npm run watch`
|
||||
4. Load the portal using the following link: https://ms.portal.azure.com/?dataExplorerSource=https%3A%2F%2Flocalhost%3A1234%2Fexplorer.html
|
||||
|
||||
Live reload will occur, but data explorer will not properly integrate again with the parent iframe. You will have to manually reload the page.
|
||||
|
||||
### Testing
|
||||
|
||||
@@ -57,17 +76,24 @@ Unit tests are located adjacent to the code under test and run with [Jest](https
|
||||
|
||||
#### End to End CI Tests
|
||||
|
||||
Jest and Puppeteer are used for end to end browser based tests and are contained in `test/`. To run these tests locally:
|
||||
[Cypress](https://www.cypress.io/) is used for end to end tests and are contained in `cypress/`. Currently, it operates as sub project with its own typescript config and dependencies. It also only operates against the emulator. To run cypress tests:
|
||||
|
||||
1. Copy .env.example to .env
|
||||
2. Update the values in .env including your local data explorer endpoint (ask a teammate/codeowner for help with .env values)
|
||||
3. Make sure all packages are installed `npm install`
|
||||
4. Run the server `npm run start` and wait for it to start
|
||||
5. Run `npm run test:e2e`
|
||||
1. Ensure the emulator is running
|
||||
2. Start cosmos explorer in emulator mode: `PLATFORM=Emulator npm run watch`
|
||||
3. Move into `cypress/` folder: `cd cypress`
|
||||
4. Install dependencies: `npm install`
|
||||
5. Run cypress headless(`npm run test`) or in interactive mode(`npm run test:debug`)
|
||||
|
||||
#### End to End Production Runners
|
||||
|
||||
Jest and Puppeteer are used for end to end production runners and are contained in `test/`. To run these tests locally:
|
||||
|
||||
1. Copy .env.example to .env and fill in all variables
|
||||
2. Run `npm run test:e2e`
|
||||
|
||||
### Releasing
|
||||
|
||||
We generally adhere to the release strategy [documented by the Azure SDK Guidelines](https://azure.github.io/azure-sdk/policies_repobranching.html#release-branches). Most releases should happen from the master branch. If master contains commits that cannot be released, you may create a release from a `release/` or `hotfix/` branch. See linked documentation for more details.
|
||||
We generally adhear to the release strategy [documented by the Azure SDK Guidelines](https://azure.github.io/azure-sdk/policies_repobranching.html#release-branches). Most releases should happen from the master branch. If master contains commits that cannot be released, you may create a release from a `release/` or `hotfix/` branch. See linked documentation for more details.
|
||||
|
||||
# Contributing
|
||||
|
||||
|
||||
4
cypress/.gitignore
vendored
Normal file
4
cypress/.gitignore
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
cypress.env.json
|
||||
cypress/report
|
||||
cypress/screenshots
|
||||
cypress/videos
|
||||
51
cypress/cleanup.js
Normal file
51
cypress/cleanup.js
Normal file
@@ -0,0 +1,51 @@
|
||||
// Cleans up old databases from previous test runs
|
||||
const { CosmosClient } = require("@azure/cosmos");
|
||||
|
||||
// TODO: Add support for other API connection strings
|
||||
const mongoRegex = RegExp("mongodb://.*:(.*)@(.*).mongo.cosmos.azure.com");
|
||||
|
||||
async function cleanup() {
|
||||
const connectionString = process.env.CYPRESS_CONNECTION_STRING;
|
||||
if (!connectionString) {
|
||||
throw new Error("Connection string not provided");
|
||||
}
|
||||
|
||||
let client;
|
||||
switch (true) {
|
||||
case connectionString.includes("mongodb://"): {
|
||||
const [, key, accountName] = connectionString.match(mongoRegex);
|
||||
client = new CosmosClient({
|
||||
key,
|
||||
endpoint: `https://${accountName}.documents.azure.com:443/`
|
||||
});
|
||||
break;
|
||||
}
|
||||
// TODO: Add support for other API connection strings
|
||||
default:
|
||||
client = new CosmosClient(connectionString);
|
||||
break;
|
||||
}
|
||||
|
||||
const response = await client.databases.readAll().fetchAll();
|
||||
return Promise.all(
|
||||
response.resources.map(async db => {
|
||||
const dbTimestamp = new Date(db._ts * 1000);
|
||||
const twentyMinutesAgo = new Date(Date.now() - 1000 * 60 * 20);
|
||||
if (dbTimestamp < twentyMinutesAgo) {
|
||||
await client.database(db.id).delete();
|
||||
console.log(`DELETED: ${db.id} | Timestamp: ${dbTimestamp}`);
|
||||
} else {
|
||||
console.log(`SKIPPED: ${db.id} | Timestamp: ${dbTimestamp}`);
|
||||
}
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
cleanup()
|
||||
.then(() => {
|
||||
process.exit(0);
|
||||
})
|
||||
.catch(error => {
|
||||
console.error(error);
|
||||
process.exit(1);
|
||||
});
|
||||
15
cypress/cypress.json
Normal file
15
cypress/cypress.json
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"integrationFolder": "./integration",
|
||||
"pluginsFile": false,
|
||||
"fixturesFolder": false,
|
||||
"supportFile": "./support/index.js",
|
||||
"defaultCommandTimeout": 90000,
|
||||
"chromeWebSecurity": false,
|
||||
"reporter": "mochawesome",
|
||||
"reporterOptions": {
|
||||
"reportDir": "cypress/report",
|
||||
"json": true,
|
||||
"overwrite": false,
|
||||
"html": false
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,66 @@
|
||||
// 1. Click on "New Container" on the command bar.
|
||||
// 2. Pane with the title "Add Container" should appear on the right side of the screen
|
||||
// 3. It includes an input box for the database Id.
|
||||
// 4. It includes a checkbox called "Create now".
|
||||
// 5. When the checkbox is marked, enter new database id.
|
||||
// 3. Create a database WITH "Provision throughput" checked.
|
||||
// 4. Enter minimum throughput value of 400.
|
||||
// 5. Enter container id to the container id text box.
|
||||
// 6. Enter partition key to the partition key text box.
|
||||
// 7. Click "OK" to create a new container.
|
||||
// 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
|
||||
|
||||
const connectionString = require("../../../utilities/connectionString");
|
||||
|
||||
let crypt = require("crypto");
|
||||
|
||||
context("Cassandra API Test - createDatabase", () => {
|
||||
beforeEach(() => {
|
||||
connectionString.loginUsingConnectionString(connectionString.constants.cassandra);
|
||||
});
|
||||
|
||||
it("Create a new table in Cassandra API", () => {
|
||||
const keyspaceId = `KeyspaceId${crypt.randomBytes(8).toString("hex")}`;
|
||||
const tableId = `TableId112`;
|
||||
|
||||
cy.get("iframe").then($element => {
|
||||
const $body = $element.contents().find("body");
|
||||
cy.wrap($body)
|
||||
.find('div[class="commandBarContainer"]')
|
||||
.should("be.visible")
|
||||
.find('button[data-test="New Table"]')
|
||||
.should("be.visible")
|
||||
.click();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[class="contextual-pane-in"]')
|
||||
.should("be.visible")
|
||||
.find('span[id="containerTitle"]');
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[id="keyspace-id"]')
|
||||
.should("be.visible")
|
||||
.type(keyspaceId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[class="textfontclr"]')
|
||||
.type(tableId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="databaseThroughputValue"]')
|
||||
.should("have.value", "400");
|
||||
|
||||
cy.wrap($body)
|
||||
.find('data-test="addCollection-createCollection"')
|
||||
.click();
|
||||
|
||||
cy.wait(10000);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[data-test="resourceTreeId"]')
|
||||
.should("exist")
|
||||
.find('div[class="treeComponent dataResourceTree"]')
|
||||
.should("contain", tableId);
|
||||
});
|
||||
});
|
||||
});
|
||||
81
cypress/integration/dataexplorer/GRAPH/addCollection.spec.ts
Normal file
81
cypress/integration/dataexplorer/GRAPH/addCollection.spec.ts
Normal file
@@ -0,0 +1,81 @@
|
||||
// 1. Click on "New Graph" on the command bar.
|
||||
// 2. Pane with the title "Add Container" should appear on the right side of the screen
|
||||
// 3. It includes an input box for the database Id.
|
||||
// 4. It includes a checkbox called "Create now".
|
||||
// 5. When the checkbox is marked, enter new database id.
|
||||
// 3. Create a database WITH "Provision throughput" checked.
|
||||
// 4. Enter minimum throughput value of 400.
|
||||
// 5. Enter container id to the container id text box.
|
||||
// 6. Enter partition key to the partition key text box.
|
||||
// 7. Click "OK" to create a new container.
|
||||
// 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
|
||||
|
||||
const connectionString = require("../../../utilities/connectionString");
|
||||
|
||||
let crypt = require("crypto");
|
||||
|
||||
context("Graph API Test", () => {
|
||||
beforeEach(() => {
|
||||
connectionString.loginUsingConnectionString(connectionString.constants.graph);
|
||||
});
|
||||
|
||||
it("Create a new graph in Graph API", () => {
|
||||
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
|
||||
const graphId = `TestGraph${crypt.randomBytes(8).toString("hex")}`;
|
||||
const partitionKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
|
||||
|
||||
cy.get("iframe").then($element => {
|
||||
const $body = $element.contents().find("body");
|
||||
cy.wrap($body)
|
||||
.find('div[class="commandBarContainer"]')
|
||||
.should("be.visible")
|
||||
.find('button[data-test="New Graph"]')
|
||||
.should("be.visible")
|
||||
.click();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[class="contextual-pane-in"]')
|
||||
.should("be.visible")
|
||||
.find('span[id="containerTitle"]');
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-createNewDatabase"]')
|
||||
.check();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-newDatabaseId"]')
|
||||
.should("be.visible")
|
||||
.type(dbId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
|
||||
.check();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="databaseThroughputValue"]')
|
||||
.should("have.value", "400");
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-collectionId"]')
|
||||
.type(graphId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-partitionKeyValue"]')
|
||||
.type(partitionKey);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-createCollection"]')
|
||||
.click();
|
||||
|
||||
cy.wait(10000);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[data-test="resourceTreeId"]')
|
||||
.should("exist")
|
||||
.find('div[class="treeComponent dataResourceTree"]')
|
||||
.should("contain", dbId)
|
||||
.click()
|
||||
.should("contain", graphId);
|
||||
});
|
||||
});
|
||||
});
|
||||
80
cypress/integration/dataexplorer/MONGO/addCollection.spec.ts
Normal file
80
cypress/integration/dataexplorer/MONGO/addCollection.spec.ts
Normal file
@@ -0,0 +1,80 @@
|
||||
// 1. Click on "New Container" on the command bar.
|
||||
// 2. Pane with the title "Add Container" should appear on the right side of the screen
|
||||
// 3. It includes an input box for the database Id.
|
||||
// 4. It includes a checkbox called "Create now".
|
||||
// 5. When the checkbox is marked, enter new database id.
|
||||
// 3. Create a database WITH "Provision throughput" checked.
|
||||
// 4. Enter minimum throughput value of 400.
|
||||
// 5. Enter container id to the container id text box.
|
||||
// 6. Enter partition key to the partition key text box.
|
||||
// 7. Click "OK" to create a new container.
|
||||
// // 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
|
||||
|
||||
const connectionString = require("../../../utilities/connectionString");
|
||||
|
||||
let crypt = require("crypto");
|
||||
|
||||
context("Mongo API Test - createDatabase", () => {
|
||||
beforeEach(() => {
|
||||
connectionString.loginUsingConnectionString();
|
||||
});
|
||||
|
||||
it("Create a new collection in Mongo API", () => {
|
||||
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
|
||||
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
|
||||
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
|
||||
|
||||
cy.get("iframe").then($element => {
|
||||
const $body = $element.contents().find("body");
|
||||
cy.wrap($body)
|
||||
.find('div[class="commandBarContainer"]')
|
||||
.should("be.visible")
|
||||
.find('button[data-test="New Collection"]')
|
||||
.should("be.visible")
|
||||
.click();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[class="contextual-pane-in"]')
|
||||
.should("be.visible")
|
||||
.find('span[id="containerTitle"]');
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-createNewDatabase"]')
|
||||
.check();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-newDatabaseId"]')
|
||||
.type(dbId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
|
||||
.check();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-collectionId"]')
|
||||
.type(collectionId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="databaseThroughputValue"]')
|
||||
.should("have.value", "400");
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-partitionKeyValue"]')
|
||||
.type(sharedKey);
|
||||
|
||||
cy.wrap($body)
|
||||
.find("#submitBtnAddCollection")
|
||||
.click();
|
||||
|
||||
cy.wait(10000);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[data-test="resourceTreeId"]')
|
||||
.should("exist")
|
||||
.find('div[class="treeComponent dataResourceTree"]')
|
||||
.should("contain", dbId)
|
||||
.click()
|
||||
.should("contain", collectionId);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,96 @@
|
||||
// 1. Click on "New Container" on the command bar.
|
||||
// 2. Pane with the title "Add Container" should appear on the right side of the screen
|
||||
// 3. It includes an input box for the database Id.
|
||||
// 4. It includes a checkbox called "Create now".
|
||||
// 5. When the checkbox is marked, enter new database id.
|
||||
// 3. Create a database WITH "Provision throughput" checked.
|
||||
// 4. Enter minimum throughput value of 400.
|
||||
// 5. Enter container id to the container id text box.
|
||||
// 6. Enter partition key to the partition key text box.
|
||||
// 7. Click "OK" to create a new container.
|
||||
// 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
|
||||
|
||||
const connectionString = require("../../../utilities/connectionString");
|
||||
|
||||
let crypt = require("crypto");
|
||||
|
||||
context("Mongo API Test", () => {
|
||||
beforeEach(() => {
|
||||
connectionString.loginUsingConnectionString();
|
||||
});
|
||||
|
||||
it.skip("Create a new collection in Mongo API - Autopilot", () => {
|
||||
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
|
||||
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
|
||||
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
|
||||
|
||||
cy.get("iframe").then($element => {
|
||||
const $body = $element.contents().find("body");
|
||||
cy.wrap($body)
|
||||
.find('div[class="commandBarContainer"]')
|
||||
.should("be.visible")
|
||||
.find('button[data-test="New Collection"]')
|
||||
.should("be.visible")
|
||||
.click();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[class="contextual-pane-in"]')
|
||||
.should("be.visible")
|
||||
.find('span[id="containerTitle"]');
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-createNewDatabase"]')
|
||||
.check();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-newDatabaseId"]')
|
||||
.type(dbId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
|
||||
.check();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[class="throughputModeContainer"]')
|
||||
.should("be.visible")
|
||||
.and(input => {
|
||||
expect(input.get(0).textContent, "first item").contains("Autopilot (preview)");
|
||||
expect(input.get(1).textContent, "second item").contains("Manual");
|
||||
});
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[id="newContainer-databaseThroughput-autoPilotRadio"]')
|
||||
.check();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('select[name="autoPilotTiers"]')
|
||||
// .eq(1).should('contain', '4,000 RU/s');
|
||||
// // .select('4,000 RU/s').should('have.value', '1');
|
||||
|
||||
.find('option[value="2"]')
|
||||
.then($element => $element.get(1).setAttribute("selected", "selected"));
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-collectionId"]')
|
||||
.type(collectionId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-partitionKeyValue"]')
|
||||
.type(sharedKey);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-createCollection"]')
|
||||
.click();
|
||||
|
||||
cy.wait(10000);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[data-test="resourceTreeId"]')
|
||||
.should("exist")
|
||||
.find('div[class="treeComponent dataResourceTree"]')
|
||||
.should("contain", dbId)
|
||||
.click()
|
||||
.should("contain", collectionId);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,67 @@
|
||||
const connectionString = require("../../../utilities/connectionString");
|
||||
|
||||
let crypt = require("crypto");
|
||||
|
||||
context("Mongo API Test", () => {
|
||||
beforeEach(() => {
|
||||
connectionString.loginUsingConnectionString();
|
||||
});
|
||||
|
||||
it.skip("Create a new collection in existing database in Mongo API", () => {
|
||||
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
|
||||
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
|
||||
|
||||
cy.get("iframe").then($element => {
|
||||
const $body = $element.contents().find("body");
|
||||
|
||||
cy.wrap($body)
|
||||
.find('span[class="nodeLabel"]')
|
||||
.should("be.visible")
|
||||
.then($span => {
|
||||
const dbId1 = $span.text();
|
||||
cy.log("DBBB", dbId1);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[class="commandBarContainer"]')
|
||||
.should("be.visible")
|
||||
.find('button[data-test="New Collection"]')
|
||||
.should("be.visible")
|
||||
.click();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[class="contextual-pane-in"]')
|
||||
.should("be.visible")
|
||||
.find('span[id="containerTitle"]');
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-existingDatabase"]')
|
||||
.check();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-existingDatabase"]')
|
||||
.type(dbId1);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-collectionId"]')
|
||||
.type(collectionId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-partitionKeyValue"]')
|
||||
.type(sharedKey);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-createCollection"]')
|
||||
.click();
|
||||
|
||||
cy.wait(10000);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[data-test="resourceTreeId"]')
|
||||
.should("exist")
|
||||
.find('div[class="treeComponent dataResourceTree"]')
|
||||
.click()
|
||||
.should("contain", collectionId);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,203 @@
|
||||
const connectionString = require("../../../utilities/connectionString");
|
||||
|
||||
let crypt = require("crypto");
|
||||
|
||||
context.skip("Mongo API Test", () => {
|
||||
beforeEach(() => {
|
||||
connectionString.loginUsingConnectionString();
|
||||
});
|
||||
|
||||
it("Create a new collection in Mongo API - Provision database throughput", () => {
|
||||
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
|
||||
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
|
||||
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
|
||||
|
||||
cy.get("iframe").then($element => {
|
||||
const $body = $element.contents().find("body");
|
||||
cy.wrap($body)
|
||||
.find('div[class="commandBarContainer"]')
|
||||
.should("be.visible")
|
||||
.find('button[data-test="New Collection"]')
|
||||
.should("be.visible")
|
||||
.click();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[class="contextual-pane-in"]')
|
||||
.should("be.visible")
|
||||
.find('span[id="containerTitle"]');
|
||||
|
||||
cy.wrap($body)
|
||||
.find(".createNewDatabaseOrUseExisting")
|
||||
.should("have.length", 2)
|
||||
.and(input => {
|
||||
expect(input.get(0).textContent, "first item").contains("Create new");
|
||||
expect(input.get(1).textContent, "second item").contains("Use existing");
|
||||
});
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-createNewDatabase"]')
|
||||
.check();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
|
||||
.check();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-newDatabaseId"]')
|
||||
.type(dbId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
|
||||
.check();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="databaseThroughputValue"]')
|
||||
.should("have.value", "400");
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-collectionId"]')
|
||||
.type(collectionId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-partitionKeyValue"]')
|
||||
.type(sharedKey);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-createCollection"]')
|
||||
.click();
|
||||
|
||||
cy.wait(10000);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[data-test="resourceTreeId"]')
|
||||
.should("exist")
|
||||
.find('div[class="treeComponent dataResourceTree"]')
|
||||
.should("contain", dbId)
|
||||
.click()
|
||||
.should("contain", collectionId);
|
||||
});
|
||||
});
|
||||
|
||||
it("Create a new collection - without provision database throughput", () => {
|
||||
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
|
||||
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
|
||||
const collectionIdTitle = `Add Collection`;
|
||||
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
|
||||
|
||||
cy.get("iframe").then($element => {
|
||||
const $body = $element.contents().find("body");
|
||||
cy.wrap($body)
|
||||
.find('div[class="commandBarContainer"]')
|
||||
.should("be.visible")
|
||||
.find('button[data-test="New Collection"]')
|
||||
.should("be.visible")
|
||||
.click();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[class="contextual-pane-in"]')
|
||||
.should("be.visible")
|
||||
.find('span[id="containerTitle"]');
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-createNewDatabase"]')
|
||||
.check();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-newDatabaseId"]')
|
||||
.type(dbId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
|
||||
.uncheck();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-collectionId"]')
|
||||
.type(collectionId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[id="tab2"]')
|
||||
.check({ force: true });
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-partitionKeyValue"]')
|
||||
.type(sharedKey);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="databaseThroughputValue"]')
|
||||
.should("have.value", "400");
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-createCollection"]')
|
||||
.click();
|
||||
|
||||
cy.wait(10000);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[data-test="resourceTreeId"]')
|
||||
.should("exist")
|
||||
.find('div[class="treeComponent dataResourceTree"]')
|
||||
.should("contain", dbId)
|
||||
.click()
|
||||
.should("contain", collectionId);
|
||||
});
|
||||
});
|
||||
|
||||
it("Create a new collection - without provision database throughput Fixed Storage Capacity", () => {
|
||||
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
|
||||
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
|
||||
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
|
||||
|
||||
cy.get("iframe").then($element => {
|
||||
const $body = $element.contents().find("body");
|
||||
cy.wrap($body)
|
||||
.find('div[class="commandBarContainer"]')
|
||||
.should("be.visible")
|
||||
.find('button[data-test="New Collection"]')
|
||||
.should("be.visible")
|
||||
.click();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[class="contextual-pane-in"]')
|
||||
.should("be.visible")
|
||||
.find('span[id="containerTitle"]');
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-createNewDatabase"]')
|
||||
.check();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-newDatabaseId"]')
|
||||
.type(dbId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
|
||||
.uncheck();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-collectionId"]')
|
||||
.type(collectionId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[id="tab1"]')
|
||||
.check({ force: true });
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="databaseThroughputValue"]')
|
||||
.should("have.value", "400");
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-createCollection"]')
|
||||
.click();
|
||||
|
||||
cy.wait(10000);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[data-test="resourceTreeId"]')
|
||||
.should("exist")
|
||||
.find('div[class="treeComponent dataResourceTree"]')
|
||||
.should("contain", dbId)
|
||||
.click()
|
||||
.should("contain", collectionId);
|
||||
});
|
||||
});
|
||||
});
|
||||
79
cypress/integration/dataexplorer/SQL/addCollection.spec.ts
Normal file
79
cypress/integration/dataexplorer/SQL/addCollection.spec.ts
Normal file
@@ -0,0 +1,79 @@
|
||||
// 1. Click on "New Container" on the command bar.
|
||||
// 2. Pane with the title "Add Container" should appear on the right side of the screen
|
||||
// 3. It includes an input box for the database Id.
|
||||
// 4. It includes a checkbox called "Create now".
|
||||
// 5. When the checkbox is marked, enter new database id.
|
||||
// 3. Create a database WITH "Provision throughput" checked.
|
||||
// 4. Enter minimum throughput value of 400.
|
||||
// 5. Enter container id to the container id text box.
|
||||
// 6. Enter partition key to the partition key text box.
|
||||
// 7. Click "OK" to create a new container.
|
||||
// 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
|
||||
|
||||
const connectionString = require("../../../utilities/connectionString");
|
||||
|
||||
let crypt = require("crypto");
|
||||
|
||||
context("SQL API Test", () => {
|
||||
beforeEach(() => {
|
||||
connectionString.loginUsingConnectionString();
|
||||
});
|
||||
|
||||
it("Create a new container in SQL API", () => {
|
||||
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
|
||||
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
|
||||
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
|
||||
connectionString.loginUsingConnectionString();
|
||||
|
||||
cy.get("iframe").then($element => {
|
||||
const $body = $element.contents().find("body");
|
||||
cy.wrap($body)
|
||||
.find('div[class="commandBarContainer"]')
|
||||
.should("be.visible")
|
||||
.find('button[data-test="New Container"]')
|
||||
.should("be.visible")
|
||||
.click();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[class="contextual-pane-in"]')
|
||||
.should("be.visible")
|
||||
.find('span[id="containerTitle"]');
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-createNewDatabase"]')
|
||||
.check();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
|
||||
.check();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-newDatabaseId"]')
|
||||
.type(dbId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-collectionId"]')
|
||||
.type(collectionId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="databaseThroughputValue"]')
|
||||
.should("have.value", "400");
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-partitionKeyValue"]')
|
||||
.type(sharedKey);
|
||||
|
||||
cy.wrap($body)
|
||||
.find("#submitBtnAddCollection")
|
||||
.click();
|
||||
|
||||
cy.wait(10000);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[data-test="resourceTreeId"]')
|
||||
.should("exist")
|
||||
.find('div[class="treeComponent dataResourceTree"]')
|
||||
.should("contain", dbId);
|
||||
});
|
||||
});
|
||||
});
|
||||
60
cypress/integration/dataexplorer/TABLE/addCollection.spec.ts
Normal file
60
cypress/integration/dataexplorer/TABLE/addCollection.spec.ts
Normal file
@@ -0,0 +1,60 @@
|
||||
// 1. Click on "New Container" on the command bar.
|
||||
// 2. Pane with the title "Add Container" should appear on the right side of the screen
|
||||
// 3. It includes an input box for the database Id.
|
||||
// 4. It includes a checkbox called "Create now".
|
||||
// 5. When the checkbox is marked, enter new database id.
|
||||
// 3. Create a database WITH "Provision throughput" checked.
|
||||
// 4. Enter minimum throughput value of 400.
|
||||
// 5. Enter container id to the container id text box.
|
||||
// 6. Enter partition key to the partition key text box.
|
||||
// 7. Click "OK" to create a new container.
|
||||
// 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
|
||||
|
||||
const connectionString = require("../../../utilities/connectionString");
|
||||
|
||||
let crypt = require("crypto");
|
||||
|
||||
context("Table API Test", () => {
|
||||
beforeEach(() => {
|
||||
connectionString.loginUsingConnectionString(connectionString.constants.table);
|
||||
});
|
||||
|
||||
it("Create a new table in Table API", () => {
|
||||
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
|
||||
|
||||
cy.get("iframe").then($element => {
|
||||
const $body = $element.contents().find("body");
|
||||
cy.wrap($body)
|
||||
.find('div[class="commandBarContainer"]')
|
||||
.should("be.visible")
|
||||
.find('button[data-test="New Table"]')
|
||||
.should("be.visible")
|
||||
.click();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[class="contextual-pane-in"]')
|
||||
.should("be.visible")
|
||||
.find('span[id="containerTitle"]');
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-collectionId"]')
|
||||
.type(collectionId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="databaseThroughputValue"]')
|
||||
.should("have.value", "400");
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-createCollection"]')
|
||||
.click();
|
||||
|
||||
cy.wait(10000);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[data-test="resourceTreeId"]')
|
||||
.should("exist")
|
||||
.find('div[class="treeComponent dataResourceTree"]')
|
||||
.should("contain", collectionId);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,55 @@
|
||||
// 1. Click on "New Container" on the command bar.
|
||||
// 2. Pane with the title "Add Container" should appear on the right side of the screen
|
||||
// 3. It includes an input box for the database Id.
|
||||
// 4. It includes a checkbox called "Create now".
|
||||
// 5. When the checkbox is marked, enter new database id.
|
||||
// 3. Create a database WITH "Provision throughput" checked.
|
||||
// 4. Enter minimum throughput value of 400.
|
||||
// 5. Enter container id to the container id text box.
|
||||
// 6. Enter partition key to the partition key text box.
|
||||
// 7. Click "OK" to create a new container.
|
||||
// 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
|
||||
|
||||
let crypt = require("crypto");
|
||||
|
||||
context("Emulator - createDatabase", () => {
|
||||
beforeEach(() => {
|
||||
cy.visit("http://localhost:1234/explorer.html");
|
||||
});
|
||||
|
||||
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
|
||||
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
|
||||
const collectionIdTitle = `Add Collection`;
|
||||
const partitionKey = `PartitionKey${crypt.randomBytes(8).toString("hex")}`;
|
||||
|
||||
it("Create a new collection", () => {
|
||||
cy.contains("New Container").click();
|
||||
|
||||
// cy.contains(collectionIdTitle);
|
||||
|
||||
cy.get(".createNewDatabaseOrUseExisting")
|
||||
.should("have.length", 2)
|
||||
.and(input => {
|
||||
expect(input.get(0).textContent, "first item").contains("Create new");
|
||||
expect(input.get(1).textContent, "second item").contains("Use existing");
|
||||
});
|
||||
|
||||
cy.get('input[data-test="addCollection-createNewDatabase"]').check();
|
||||
|
||||
cy.get('input[data-test="addCollection-newDatabaseId"]').type(dbId);
|
||||
|
||||
cy.get('input[data-test="addCollection-collectionId"]').type(collectionId);
|
||||
|
||||
cy.get('input[data-test="databaseThroughputValue"]').should("have.value", "400");
|
||||
|
||||
cy.get('input[data-test="addCollection-partitionKeyValue"]').type(partitionKey);
|
||||
|
||||
cy.get('input[data-test="addCollection-createCollection"]').click();
|
||||
|
||||
cy.get('div[data-test="resourceTreeId"]').should("exist");
|
||||
|
||||
cy.get('div[data-test="resourceTree-collectionsTree"]').should("contain", dbId);
|
||||
|
||||
cy.get('div[data-test="databaseList"]').should("contain", collectionId);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,65 @@
|
||||
// 1. Click on "New Database" on the command bar
|
||||
// 2. a Pane with the title "Add Database" should appear on the right side of the screen
|
||||
// i. It includes an input box for the database Id.
|
||||
// ii. It includes a checkbox called "Provision throughput".
|
||||
// iii. Whe the checkbox is marked, a new input with a throughput control let's you customize RU at the database level
|
||||
// 3. Create a database WITHOUT "Provision throughput" checked.
|
||||
// 4. It should appear in the Data Explorer list.
|
||||
// 5. Repeat steps 1-3 but create a database WITH "Provision throughput" with the default RU value.
|
||||
// 6. It should appear in the Data Explorer list.
|
||||
// 7. If expanded, it should have the list item called "Scale", that once clicked, it should show the "Scale" tab.
|
||||
// 8. Inside that tab, a throughput control will let you change the RU value within the permited range.
|
||||
// 9. If you change the value, it should enable the "Save" button.
|
||||
// 10. Click "Save" and verify that the process completes without error.
|
||||
// 11. Close the tab and reopen it and verify that the input contains the last saved value.%
|
||||
|
||||
const crypto = require("crypto");
|
||||
const client = require("../../../utilities/cosmosClient");
|
||||
const randomString = crypto.randomBytes(2).toString("hex");
|
||||
const databaseId = `TestDB-${randomString}`;
|
||||
const collectionId = `TestColl-${randomString}`;
|
||||
|
||||
context("Emulator - Create database -> container -> item", () => {
|
||||
beforeEach(async () => {
|
||||
const { resources } = await client.databases.readAll().fetchAll();
|
||||
for (const database of resources) {
|
||||
await client.database(database.id).delete();
|
||||
}
|
||||
});
|
||||
|
||||
it("creates a new database", () => {
|
||||
cy.visit("https://0.0.0.0:1234/explorer.html?platform=Emulator");
|
||||
cy.contains("New Container").click();
|
||||
cy.get("[data-test=addCollection-newDatabaseId]").click();
|
||||
cy.get("[data-test=addCollection-newDatabaseId]").type(databaseId);
|
||||
cy.get("[data-test=addCollection-collectionId]").click();
|
||||
cy.get("[data-test=addCollection-collectionId]").type(collectionId);
|
||||
cy.get("[data-test=addCollection-partitionKeyValue]").click();
|
||||
cy.get("[data-test=addCollection-partitionKeyValue]").type("/pk");
|
||||
cy.get('input[name="createCollection"]').click();
|
||||
cy.get(".dataResourceTree").should("contain", databaseId);
|
||||
cy.get(".dataResourceTree")
|
||||
.contains(databaseId)
|
||||
.click();
|
||||
cy.get(".dataResourceTree").should("contain", collectionId);
|
||||
cy.get(".dataResourceTree")
|
||||
.contains(collectionId)
|
||||
.click();
|
||||
cy.get(".dataResourceTree")
|
||||
.contains("Items")
|
||||
.click();
|
||||
cy.get(".dataResourceTree")
|
||||
.contains("Items")
|
||||
.click();
|
||||
cy.wait(1000); // React rendering inside KO causes some weird async rendering that makes this test flaky without waiting
|
||||
cy.get(".commandBarContainer")
|
||||
.contains("New Item")
|
||||
.click();
|
||||
cy.wait(1000); // React rendering inside KO causes some weird async rendering that makes this test flaky without waiting
|
||||
cy.get(".commandBarContainer")
|
||||
.contains("Save")
|
||||
.click();
|
||||
cy.wait(1000); // React rendering inside KO causes some weird async rendering that makes this test flaky without waiting
|
||||
cy.get(".documentsGridHeaderContainer").should("contain", "replace_with_new_document_id");
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,46 @@
|
||||
// 1. Click last database in the resource tree
|
||||
// 2. Click the last collection within the database
|
||||
// 3. Select the context menu within the collection
|
||||
// 4. Select "Delete Container" option in the dropdown
|
||||
// 5. On Selection, Delete Container pane opens on the right side
|
||||
// 6. Enter the same collection id that is to be deleted and click ok
|
||||
// 7. Now, the resource tree refreshes, the deleted collection should not appear under the database
|
||||
|
||||
let crypt = require("crypto");
|
||||
|
||||
context("Emulator - deleteCollection", () => {
|
||||
beforeEach(() => {
|
||||
cy.visit("http://localhost:1234/explorer.html");
|
||||
});
|
||||
|
||||
it("Delete a collection", () => {
|
||||
cy.get(".databaseId")
|
||||
.last()
|
||||
.click();
|
||||
|
||||
cy.get(".collectionList")
|
||||
.last()
|
||||
.then($id => {
|
||||
const collectionId = $id.text();
|
||||
|
||||
cy.get('span[data-test="collectionEllipsisMenu"]').should("exist");
|
||||
|
||||
cy.get('span[data-test="collectionEllipsisMenu"]')
|
||||
.invoke("show")
|
||||
.last()
|
||||
.click();
|
||||
|
||||
cy.get('div[data-test="collectionContextMenu"]')
|
||||
.contains("Delete Container")
|
||||
.click({ force: true });
|
||||
|
||||
cy.get('input[data-test="confirmCollectionId"]').type(collectionId.trim());
|
||||
|
||||
cy.get('input[data-test="deleteCollection"]').click();
|
||||
|
||||
cy.get('div[data-test="databaseList"]').should("not.contain", collectionId);
|
||||
|
||||
cy.get('div[data-test="databaseMenu"]').should("not.contain", collectionId);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,83 @@
|
||||
// 1. Click last database in the resource tree
|
||||
// 2. Select the context menu within the database
|
||||
// 4. Select "Delete Database" option in the dropdown
|
||||
// 5. On Selection, Delete Database pane opens on the right side
|
||||
// 6. Enter the same database id that is to be deleted and click ok
|
||||
// 7. Now, the resource tree refreshes, the deleted database should not appear in the resource tree
|
||||
|
||||
let crypt = require("crypto");
|
||||
|
||||
context("Emulator - deleteDatabase", () => {
|
||||
beforeEach(() => {
|
||||
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
|
||||
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
|
||||
let db_rid = "";
|
||||
const date = new Date().toUTCString();
|
||||
let authToken = "";
|
||||
cy.visit("http://localhost:1234/explorer.html");
|
||||
|
||||
// Creating auth token for collection creation
|
||||
cy.request({
|
||||
method: "GET",
|
||||
url: "https://localhost:8081/_explorer/authorization/post/dbs/",
|
||||
headers: {
|
||||
"x-ms-date": date,
|
||||
authorization: "-"
|
||||
}
|
||||
})
|
||||
.then(response => {
|
||||
authToken = response.body.Token; // Getting auth token for collection creation
|
||||
return new Cypress.Promise((resolve, reject) => {
|
||||
return resolve();
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
cy.request({
|
||||
method: "POST",
|
||||
url: "https://localhost:8081/dbs",
|
||||
headers: {
|
||||
"x-ms-date": date,
|
||||
authorization: authToken,
|
||||
"x-ms-version": "2018-12-31"
|
||||
},
|
||||
body: {
|
||||
id: dbId
|
||||
}
|
||||
}).then(response => {
|
||||
cy.log("Response", response);
|
||||
db_rid = response.body._rid;
|
||||
return new Cypress.Promise((resolve, reject) => {
|
||||
cy.log("Rid", db_rid);
|
||||
return resolve();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it("Delete a database", () => {
|
||||
cy.get('span[data-test="refreshTree"]').click();
|
||||
|
||||
cy.get(".databaseId")
|
||||
.last()
|
||||
.then($id => {
|
||||
const dbId = $id.text();
|
||||
|
||||
cy.get('span[data-test="databaseEllipsisMenu"]').should("exist");
|
||||
|
||||
cy.get('span[data-test="databaseEllipsisMenu"]')
|
||||
.invoke("show")
|
||||
.last()
|
||||
.click();
|
||||
|
||||
cy.get('div[data-test="databaseContextMenu"]')
|
||||
.contains("Delete Database")
|
||||
.click({ force: true });
|
||||
|
||||
cy.get('input[data-test="confirmDatabaseId"]').type(dbId.trim());
|
||||
|
||||
cy.get('input[data-test="deleteDatabase"]').click();
|
||||
|
||||
cy.get('div[data-test="databaseList"]').should("not.contain", dbId);
|
||||
});
|
||||
});
|
||||
});
|
||||
35
cypress/integration/notebook/README.md
Normal file
35
cypress/integration/notebook/README.md
Normal file
@@ -0,0 +1,35 @@
|
||||
# Notebook end-to-end tests
|
||||
This describes how to run the tests locally
|
||||
|
||||
## Stand up a local notebook container instance:
|
||||
Instructions on how to build and run the container [here](https://microsoft.sharepoint.com/teams/DocDB/_layouts/OneNote.aspx?id=%2Fteams%2FDocDB%2FSiteAssets%2FDocDB%20Team%20Notebook&wd=target%28Tools%20_%20SDK%2FPortal%2FDevelopment.one%7CF800BE8E-1E31-48FE-90D7-EF698EF88112%2FHow%20to%20build%20notebook%20service%7C4BAA153B-422C-41E2-B997-F3FCE02CD743%2F%29)
|
||||
|
||||
## Run a local data explorer
|
||||
Instructions are in [`DataExplorer/README.md`](https://msdata.visualstudio.com/CosmosDB/_git/cosmosdb-dataexplorer?path=%2FProduct%2FPortal%2FDataExplorer%2FREADME.md&_a=preview).
|
||||
|
||||
Make sure you can run Data Explorer locally from the web browser.
|
||||
|
||||
## Run cypress tests
|
||||
1. Edit the URL for your DataExplorer in the `.spec.ts` file
|
||||
2. Run the test:
|
||||
```bash
|
||||
cd DataExplorer/cypress
|
||||
npm i
|
||||
npm t -- --spec 'integration/notebook/newNotebook.spec.ts'
|
||||
```
|
||||
|
||||
To run in Debug mode:
|
||||
```
|
||||
npm run test:debug
|
||||
```
|
||||
This opens Cypress UI
|
||||
|
||||
## Troubleshooting
|
||||
* The tests are recorded in the `videos` folder.
|
||||
* Cypress does not support hover: workarounds [here](https://docs.cypress.io/api/commands/hover.html#Workarounds).
|
||||
|
||||
|
||||
## References
|
||||
* [Cypress API](https://docs.cypress.io/api/api/table-of-contents.html)
|
||||
* [Cypress cookbook](https://docs.cypress.io/faq/questions/using-cypress-faq.html#How-do-I-get-an-element%E2%80%99s-text-contents)
|
||||
* [Cypress best practices](https://docs.cypress.io/guides/references/best-practices.html#Selecting-Elements)
|
||||
93
cypress/integration/notebook/newNotebook.spec.ts
Normal file
93
cypress/integration/notebook/newNotebook.spec.ts
Normal file
@@ -0,0 +1,93 @@
|
||||
// THIS ADDS A NEW NOTEBOOK TO YOUR NOTEBOOKS
|
||||
context("New Notebook smoke test", () => {
|
||||
const timeout = 15000; // in ms
|
||||
const explorerUrl =
|
||||
"https://localhost:1234/explorer.html?feature.notebookserverurl=https%3A%2F%2Flocalhost%3A10001%2F12345%2Fnotebook&feature.notebookServerToken=token&feature.enablenotebooks=true";
|
||||
|
||||
/**
|
||||
* Wait for UI to be ready
|
||||
*/
|
||||
const waitForReady = () => {
|
||||
cy.get(".splashScreenContainer", { timeout }).should("be.visible");
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
cy.visit(explorerUrl);
|
||||
waitForReady();
|
||||
});
|
||||
|
||||
it("Create a new notebook and run some code", () => {
|
||||
// Create new notebook
|
||||
cy.contains("New Notebook").click();
|
||||
|
||||
// Check tab name
|
||||
cy.get("li.tabList .tabNavText").should($span => {
|
||||
const text = $span.text();
|
||||
expect(text).to.match(/^Untitled.*\.ipynb$/);
|
||||
});
|
||||
|
||||
// Wait for python3 | idle status
|
||||
cy.get('[data-test="notebookStatusBar"] [data-test="kernelStatus"]', { timeout }).should($p => {
|
||||
const text = $p.text();
|
||||
expect(text).to.match(/^python3.*idle$/);
|
||||
});
|
||||
|
||||
// Click on a cell
|
||||
cy.get(".cell-container")
|
||||
.as("cellContainer")
|
||||
.click();
|
||||
|
||||
// Type in some code
|
||||
cy.get("@cellContainer").type("2+4");
|
||||
|
||||
// Execute
|
||||
cy.get('[data-test="Run"]')
|
||||
.first()
|
||||
.click();
|
||||
|
||||
// Verify results
|
||||
cy.get("@cellContainer").within(() => {
|
||||
cy.get("pre code span").should("contain", "6");
|
||||
});
|
||||
|
||||
// Restart kernel
|
||||
cy.get('[data-test="Run"] button')
|
||||
.eq(-1)
|
||||
.click();
|
||||
cy.get("li")
|
||||
.contains("Restart Kernel")
|
||||
.click();
|
||||
|
||||
// Wait for python3 | restarting status
|
||||
cy.get('[data-test="notebookStatusBar"] [data-test="kernelStatus"]', { timeout }).should($p => {
|
||||
const text = $p.text();
|
||||
expect(text).to.match(/^python3.*restarting$/);
|
||||
});
|
||||
|
||||
// Wait for python3 | idle status
|
||||
cy.get('[data-test="notebookStatusBar"] [data-test="kernelStatus"]', { timeout }).should($p => {
|
||||
const text = $p.text();
|
||||
expect(text).to.match(/^python3.*idle$/);
|
||||
});
|
||||
|
||||
// Click on a cell
|
||||
cy.get(".cell-container")
|
||||
.as("cellContainer")
|
||||
.find(".input")
|
||||
.as("codeInput")
|
||||
.click();
|
||||
|
||||
// Type in some code
|
||||
cy.get("@codeInput").type("{backspace}{backspace}{backspace}4+5");
|
||||
|
||||
// Execute
|
||||
cy.get('[data-test="Run"]')
|
||||
.first()
|
||||
.click();
|
||||
|
||||
// Verify results
|
||||
cy.get("@cellContainer").within(() => {
|
||||
cy.get("pre code span").should("contain", "9");
|
||||
});
|
||||
});
|
||||
});
|
||||
172
cypress/integration/notebook/resourceTree.spec.ts
Normal file
172
cypress/integration/notebook/resourceTree.spec.ts
Normal file
@@ -0,0 +1,172 @@
|
||||
context("Resource tree notebook file manipulation", () => {
|
||||
const timeout = 15000; // in ms
|
||||
const explorerUrl =
|
||||
"https://localhost:1234/explorer.html?feature.notebookserverurl=https%3A%2F%2Flocalhost%3A10001%2F12345%2Fnotebook&feature.notebookServerToken=token&feature.enablenotebooks=true";
|
||||
|
||||
/**
|
||||
* Wait for UI to be ready
|
||||
*/
|
||||
const waitForReady = () => {
|
||||
cy.get(".splashScreenContainer", { timeout }).should("be.visible");
|
||||
};
|
||||
|
||||
const clickContextMenuAndSelectOption = (nodeLabel, option) => {
|
||||
cy.get(`.treeNodeHeader[data-test="${nodeLabel}"]`)
|
||||
.find("button.treeMenuEllipsis")
|
||||
.click();
|
||||
cy.get('[data-test="treeComponentMenuItemContainer"]')
|
||||
.contains(option)
|
||||
.click();
|
||||
};
|
||||
|
||||
const createFolder = folder => {
|
||||
clickContextMenuAndSelectOption("My Notebooks/", "New Directory");
|
||||
|
||||
cy.get("#stringInputPane").within(() => {
|
||||
cy.get('input[name="collectionIdConfirmation"]').type(folder);
|
||||
cy.get("form").submit();
|
||||
});
|
||||
};
|
||||
|
||||
const deleteItem = nodeName => {
|
||||
clickContextMenuAndSelectOption(`${nodeName}`, "Delete");
|
||||
cy.get(".ms-Dialog-main")
|
||||
.contains("Delete")
|
||||
.click();
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
cy.visit(explorerUrl);
|
||||
waitForReady();
|
||||
});
|
||||
|
||||
it("Create and remove a directory", () => {
|
||||
const folder = "e2etest_folder1";
|
||||
createFolder(folder);
|
||||
cy.get(`.treeNodeHeader[data-test="${folder}/"]`).should("exist");
|
||||
deleteItem(`${folder}/`);
|
||||
cy.get(`.treeNodeHeader[data-test="${folder}/"]`).should("not.exist");
|
||||
});
|
||||
|
||||
it("Create and rename a directory", () => {
|
||||
const folder = "e2etest_folder2";
|
||||
const renamedFolder = "e2etest_folder2_renamed";
|
||||
createFolder(folder);
|
||||
|
||||
// Rename
|
||||
clickContextMenuAndSelectOption(`${folder}/`, "Rename");
|
||||
cy.get("#stringInputPane").within(() => {
|
||||
cy.get('input[name="collectionIdConfirmation"]')
|
||||
.clear()
|
||||
.type(renamedFolder);
|
||||
cy.get("form").submit();
|
||||
});
|
||||
cy.get(`.treeNodeHeader[data-test="${renamedFolder}/"]`).should("exist");
|
||||
cy.get(`.treeNodeHeader[data-test="${folder}/"]`).should("not.exist");
|
||||
|
||||
deleteItem(`${renamedFolder}/`);
|
||||
cy.get(`.treeNodeHeader[data-test="${renamedFolder}/"]`).should("not.exist");
|
||||
});
|
||||
|
||||
it("Create a notebook inside a directory", () => {
|
||||
const folder = "e2etest_folder3";
|
||||
const newNotebookName = "Untitled.ipynb";
|
||||
createFolder(folder);
|
||||
clickContextMenuAndSelectOption(`${folder}/`, "New Notebook");
|
||||
|
||||
// Verify tab is open
|
||||
cy.get(".tabList")
|
||||
.contains(newNotebookName)
|
||||
.should("exist");
|
||||
|
||||
// Close tab
|
||||
cy.get(`.tabList[title="notebooks/${folder}/${newNotebookName}"]`)
|
||||
.find(".cancelButton")
|
||||
.click();
|
||||
// When running from command line, closing the tab is too fast
|
||||
cy.get("body").then($body => {
|
||||
if ($body.find(".ms-Dialog-main").length) {
|
||||
// For some reason, this does not work
|
||||
// cy.get(".ms-Dialog-main").contains("Close").click();
|
||||
cy.get(".ms-Dialog-main .ms-Button--primary").click();
|
||||
}
|
||||
});
|
||||
|
||||
// Expand folder node
|
||||
cy.get(`.treeNodeHeader[data-test="${folder}/"]`).click();
|
||||
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${newNotebookName}"]`).should("exist");
|
||||
|
||||
// Delete notebook
|
||||
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${newNotebookName}"]`)
|
||||
.find("button.treeMenuEllipsis")
|
||||
.click();
|
||||
cy.get('[data-test="treeComponentMenuItemContainer"]')
|
||||
.contains("Delete")
|
||||
.click();
|
||||
|
||||
// Confirm
|
||||
cy.get(".ms-Dialog-main")
|
||||
.contains("Delete")
|
||||
.click();
|
||||
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${newNotebookName}"]`).should("not.exist");
|
||||
|
||||
deleteItem(`${folder}/`);
|
||||
});
|
||||
|
||||
it("Create and rename a notebook inside a directory", () => {
|
||||
const folder = "e2etest_folder4";
|
||||
const newNotebookName = "Untitled.ipynb";
|
||||
const renamedNotebookName = "mynotebook.ipynb";
|
||||
createFolder(folder);
|
||||
clickContextMenuAndSelectOption(`${folder}/`, "New Notebook");
|
||||
|
||||
// Close tab
|
||||
cy.get(`.tabList[title="notebooks/${folder}/${newNotebookName}"]`)
|
||||
.find(".cancelButton")
|
||||
.click();
|
||||
cy.get("body").then($body => {
|
||||
if ($body.find(".ms-Dialog-main").length) {
|
||||
// For some reason, this does not work
|
||||
// cy.get(".ms-Dialog-main").contains("Close").click();
|
||||
cy.get(".ms-Dialog-main .ms-Button--primary").click();
|
||||
}
|
||||
});
|
||||
|
||||
// Expand folder node
|
||||
cy.get(`.treeNodeHeader[data-test="${folder}/"]`).click();
|
||||
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${newNotebookName}"]`).should("exist");
|
||||
|
||||
// Rename notebook
|
||||
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${newNotebookName}"]`)
|
||||
.find("button.treeMenuEllipsis")
|
||||
.click();
|
||||
cy.get('[data-test="treeComponentMenuItemContainer"]')
|
||||
.contains("Rename")
|
||||
.click();
|
||||
|
||||
cy.get("#stringInputPane").within(() => {
|
||||
cy.get('input[name="collectionIdConfirmation"]')
|
||||
.clear()
|
||||
.type(renamedNotebookName);
|
||||
cy.get("form").submit();
|
||||
});
|
||||
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${newNotebookName}"]`).should("not.exist");
|
||||
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${renamedNotebookName}"]`).should("exist");
|
||||
|
||||
// Delete notebook
|
||||
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${renamedNotebookName}"]`)
|
||||
.find("button.treeMenuEllipsis")
|
||||
.click();
|
||||
cy.get('[data-test="treeComponentMenuItemContainer"]')
|
||||
.contains("Delete")
|
||||
.click();
|
||||
|
||||
// Confirm
|
||||
cy.get(".ms-Dialog-main")
|
||||
.contains("Delete")
|
||||
.click();
|
||||
// Give it time to settle
|
||||
cy.wait(1000);
|
||||
deleteItem(`${folder}/`);
|
||||
});
|
||||
});
|
||||
3066
cypress/package-lock.json
generated
Normal file
3066
cypress/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
25
cypress/package.json
Normal file
25
cypress/package.json
Normal file
@@ -0,0 +1,25 @@
|
||||
{
|
||||
"name": "cosmos-explorer-cypress",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "cypress run",
|
||||
"wait-for-server": "wait-on -t 240000 -i 5000 -v https-get://0.0.0.0:1234/",
|
||||
"test:sql": "cypress run --browser chrome --spec \"./integration/dataexplorer/SQL/*\"",
|
||||
"test:ci": "wait-on -t 240000 -i 5000 -v https-get://0.0.0.0:1234/ https-get://0.0.0.0:8081/_explorer/index.html && cypress run --browser edge --headless",
|
||||
"test:debug": "cypress open"
|
||||
},
|
||||
"devDependencies": {
|
||||
"cypress": "^4.8.0",
|
||||
"mocha": "^7.0.1",
|
||||
"mochawesome": "^4.1.0",
|
||||
"mochawesome-merge": "^4.0.1",
|
||||
"mochawesome-report-generator": "^4.1.0",
|
||||
"typescript": "3.4.3",
|
||||
"wait-on": "^4.0.2"
|
||||
},
|
||||
"dependencies": {
|
||||
"@microsoft/applicationinsights-web": "^2.5.2"
|
||||
}
|
||||
}
|
||||
23
cypress/support/index.js
Normal file
23
cypress/support/index.js
Normal file
@@ -0,0 +1,23 @@
|
||||
let appInsightsLib = require("@microsoft/applicationinsights-web");
|
||||
|
||||
const appInsights = new appInsightsLib.ApplicationInsights({
|
||||
config: {
|
||||
instrumentationKey: "fe61c39f-7d32-4488-a191-b13621965315"
|
||||
/* ...Other Configuration Options... */
|
||||
}
|
||||
});
|
||||
|
||||
appInsights.loadAppInsights();
|
||||
|
||||
Cypress.on("fail", (error, runnable) => {
|
||||
// App Insights will record the fail tests for Create Collection
|
||||
let message = JSON.stringify(runnable.title);
|
||||
appInsights.trackTrace({
|
||||
message: `${message}`,
|
||||
properties: {
|
||||
passed: false,
|
||||
error: error
|
||||
}
|
||||
});
|
||||
throw error; // throw error to have test still fail
|
||||
});
|
||||
11
cypress/tsconfig.json
Normal file
11
cypress/tsconfig.json
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"strict": true,
|
||||
"noEmit": true,
|
||||
"module": "commonjs",
|
||||
"target": "es5",
|
||||
"lib": ["es5", "dom", "es6"],
|
||||
"types": ["cypress", "node"]
|
||||
},
|
||||
"include": ["**/*.ts", "**/*.spec.ts"]
|
||||
}
|
||||
41
cypress/utilities/connectionString.js
Normal file
41
cypress/utilities/connectionString.js
Normal file
@@ -0,0 +1,41 @@
|
||||
module.exports = {
|
||||
loginUsingConnectionString: function() {
|
||||
const prodUrl = Cypress.env("TEST_ENDPOINT") || "https://localhost:1234/hostedExplorer.html";
|
||||
const timeout = 15000;
|
||||
|
||||
cy.visit(prodUrl);
|
||||
cy.get('iframe[id="explorerMenu"]').should("be.visible");
|
||||
|
||||
cy.get("iframe").then($element => {
|
||||
const $body = $element.contents().find("body");
|
||||
|
||||
cy.wrap($body)
|
||||
.find("#connectExplorer")
|
||||
.should("exist")
|
||||
.find("div[class='connectExplorer']")
|
||||
.should("exist")
|
||||
.find("p[class='welcomeText']")
|
||||
.should("exist");
|
||||
|
||||
cy.wrap($body.find("div > p.switchConnectTypeText"))
|
||||
.should("exist")
|
||||
.last()
|
||||
.click({ force: true });
|
||||
|
||||
const secret = Cypress.env("CONNECTION_STRING");
|
||||
|
||||
cy.wrap($body)
|
||||
.find("input[class='inputToken']")
|
||||
.should("exist")
|
||||
.type(secret, {
|
||||
force: true
|
||||
});
|
||||
|
||||
cy.wrap($body.find("input[value='Connect']"), { timeout })
|
||||
.first()
|
||||
.click({ force: true });
|
||||
|
||||
cy.wait(15000);
|
||||
});
|
||||
}
|
||||
};
|
||||
6
cypress/utilities/cosmosClient.js
Normal file
6
cypress/utilities/cosmosClient.js
Normal file
@@ -0,0 +1,6 @@
|
||||
const { CosmosClient } = require("@azure/cosmos");
|
||||
|
||||
module.exports = new CosmosClient({
|
||||
endpoint: "https://0.0.0.0:8081",
|
||||
key: "C2y6yDjf5/R+ob0N8A7Cgv30VRDJIWEHLM+4QDU5DE2nQ9nDuVTqobD4b8mGGyPMbIZnqyMsEcaGQy67XIw/Jw=="
|
||||
});
|
||||
1963
externals/adal.js
vendored
Normal file
1963
externals/adal.js
vendored
Normal file
File diff suppressed because it is too large
Load Diff
3663
package-lock.json
generated
3663
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
17
package.json
17
package.json
@@ -4,12 +4,8 @@
|
||||
"description": "Cosmos Explorer",
|
||||
"main": "index.js",
|
||||
"dependencies": {
|
||||
"@azure/arm-cosmosdb": "9.1.0",
|
||||
"@azure/cosmos": "3.9.0",
|
||||
"@azure/cosmos-language-service": "0.0.5",
|
||||
"@azure/identity": "1.1.0",
|
||||
"@azure/msal-browser": "2.8.0",
|
||||
"@azure/msal-react": "1.0.0-alpha.1",
|
||||
"@azure/cosmos-language-service": "0.0.4",
|
||||
"@jupyterlab/services": "6.0.0-rc.2",
|
||||
"@jupyterlab/terminal": "3.0.0-rc.2",
|
||||
"@microsoft/applicationinsights-web": "2.5.9",
|
||||
@@ -25,7 +21,7 @@
|
||||
"@nteract/jupyter-widgets": "2.0.0",
|
||||
"@nteract/logos": "1.0.0",
|
||||
"@nteract/markdown": "4.4.0",
|
||||
"@nteract/monaco-editor": "3.2.2",
|
||||
"@nteract/monaco-editor": "3.2.0",
|
||||
"@nteract/octicons": "2.0.0",
|
||||
"@nteract/outputs": "3.0.9",
|
||||
"@nteract/presentational-components": "3.0.7",
|
||||
@@ -70,7 +66,7 @@
|
||||
"jquery-ui-dist": "1.12.1",
|
||||
"knockout": "3.5.1",
|
||||
"mkdirp": "1.0.4",
|
||||
"monaco-editor": "0.18.1",
|
||||
"monaco-editor": "0.15.6",
|
||||
"object.entries": "1.1.0",
|
||||
"office-ui-fabric-react": "7.134.1",
|
||||
"p-retry": "4.2.0",
|
||||
@@ -119,7 +115,7 @@
|
||||
"@types/prop-types": "15.5.8",
|
||||
"@types/puppeteer": "3.0.1",
|
||||
"@types/q": "1.5.1",
|
||||
"@types/react": "16.9.56",
|
||||
"@types/react": "16.9.49",
|
||||
"@types/react-dom": "16.0.7",
|
||||
"@types/react-notification-system": "0.2.39",
|
||||
"@types/react-redux": "7.1.7",
|
||||
@@ -130,6 +126,7 @@
|
||||
"@types/webfontloader": "1.6.29",
|
||||
"@typescript-eslint/eslint-plugin": "4.0.1",
|
||||
"@typescript-eslint/parser": "4.0.1",
|
||||
"adal-angular": "1.0.15",
|
||||
"axe-puppeteer": "1.1.0",
|
||||
"babel-jest": "24.9.0",
|
||||
"babel-loader": "8.1.0",
|
||||
@@ -197,8 +194,8 @@
|
||||
"compile": "tsc",
|
||||
"compile:contracts": "tsc -p ./tsconfig.contracts.json",
|
||||
"compile:strict": "tsc -p ./tsconfig.strict.json",
|
||||
"format": "prettier --write \"{src,test}/**/*.{ts,tsx,html}\" \"*.{js,html}\"",
|
||||
"format:check": "prettier --check \"{src,test}/**/*.{ts,tsx,html}\" \"*.{js,html}\"",
|
||||
"format": "prettier --write \"{src,cypress,test}/**/*.{ts,tsx,html}\" \"*.{js,html}\"",
|
||||
"format:check": "prettier --check \"{src,cypress,test}/**/*.{ts,tsx,html}\" \"*.{js,html}\"",
|
||||
"lint": "tslint --project tsconfig.json && eslint \"**/*.{ts,tsx}\"",
|
||||
"build:contracts": "npm run compile:contracts",
|
||||
"strictEligibleFiles": "node ./strict-migration-tools/index.js",
|
||||
|
||||
@@ -12,4 +12,4 @@
|
||||
"g.V('1').addE('knows').to(g.V('2')).outV().addE('knows').to(g.V('3'))",
|
||||
"g.V('3').addE('knows').to(g.V('4'))"
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -113,6 +113,7 @@ export class Features {
|
||||
public static readonly enableTtl = "enablettl";
|
||||
public static readonly enableNotebooks = "enablenotebooks";
|
||||
public static readonly enableGalleryPublish = "enablegallerypublish";
|
||||
public static readonly enableCodeOfConduct = "enablecodeofconduct";
|
||||
public static readonly enableLinkInjection = "enablelinkinjection";
|
||||
public static readonly enableSpark = "enablespark";
|
||||
public static readonly livyEndpoint = "livyendpoint";
|
||||
@@ -123,9 +124,7 @@ export class Features {
|
||||
public static readonly enableFixedCollectionWithSharedThroughput = "enablefixedcollectionwithsharedthroughput";
|
||||
public static readonly ttl90Days = "ttl90days";
|
||||
public static readonly enableRightPanelV2 = "enablerightpanelv2";
|
||||
public static readonly enableSchema = "enableschema";
|
||||
public static readonly enableSDKoperations = "enablesdkoperations";
|
||||
public static readonly showMinRUSurvey = "showminrusurvey";
|
||||
}
|
||||
|
||||
// flight names returned from the portal are always lowercase
|
||||
@@ -178,7 +177,6 @@ export class CassandraBackend {
|
||||
public static readonly schemaApi: string = "api/cassandra/schema";
|
||||
public static readonly guestSchemaApi: string = "api/guest/cassandra/schema";
|
||||
}
|
||||
|
||||
export class Queries {
|
||||
public static CustomPageOption: string = "custom";
|
||||
public static UnlimitedPageOption: string = "unlimited";
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import * as Cosmos from "@azure/cosmos";
|
||||
import { RequestInfo, setAuthorizationTokenHeaderUsingMasterKey } from "@azure/cosmos";
|
||||
import { configContext, Platform } from "../ConfigContext";
|
||||
import { getErrorMessage } from "./ErrorHandlingUtils";
|
||||
import { logConsoleError } from "../Utils/NotificationConsoleUtils";
|
||||
import { EmulatorMasterKey, HttpHeaders } from "./Constants";
|
||||
import { userContext } from "../UserContext";
|
||||
@@ -70,7 +69,7 @@ export async function getTokenFromAuthService(verb: string, resourceType: string
|
||||
const result = JSON.parse(await response.json());
|
||||
return result;
|
||||
} catch (error) {
|
||||
logConsoleError(`Failed to get authorization headers for ${resourceType}: ${getErrorMessage(error)}`);
|
||||
logConsoleError(`Failed to get authorization headers for ${resourceType}: ${error.message}`);
|
||||
return Promise.reject(error);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
import { getCommonQueryOptions } from "./queryDocuments";
|
||||
import { LocalStorageUtility, StorageKey } from "../../Shared/StorageUtility";
|
||||
|
||||
describe("getCommonQueryOptions", () => {
|
||||
it("builds the correct default options objects", () => {
|
||||
expect(getCommonQueryOptions({})).toMatchSnapshot();
|
||||
});
|
||||
it("reads from localStorage", () => {
|
||||
LocalStorageUtility.setEntryNumber(StorageKey.ActualItemPerPage, 37);
|
||||
LocalStorageUtility.setEntryNumber(StorageKey.MaxDegreeOfParellism, 17);
|
||||
expect(getCommonQueryOptions({})).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
import { getCommonQueryOptions } from "./DataAccessUtilityBase";
|
||||
import { LocalStorageUtility, StorageKey } from "../Shared/StorageUtility";
|
||||
|
||||
describe("getCommonQueryOptions", () => {
|
||||
it("builds the correct default options objects", () => {
|
||||
expect(getCommonQueryOptions({})).toMatchSnapshot();
|
||||
});
|
||||
it("reads from localStorage", () => {
|
||||
LocalStorageUtility.setEntryNumber(StorageKey.ActualItemPerPage, 37);
|
||||
LocalStorageUtility.setEntryNumber(StorageKey.MaxDegreeOfParellism, 17);
|
||||
expect(getCommonQueryOptions({})).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
182
src/Common/DataAccessUtilityBase.ts
Normal file
182
src/Common/DataAccessUtilityBase.ts
Normal file
@@ -0,0 +1,182 @@
|
||||
import {
|
||||
ConflictDefinition,
|
||||
FeedOptions,
|
||||
ItemDefinition,
|
||||
OfferDefinition,
|
||||
QueryIterator,
|
||||
Resource
|
||||
} from "@azure/cosmos";
|
||||
import { RequestOptions } from "@azure/cosmos/dist-esm";
|
||||
import Q from "q";
|
||||
import { configContext, Platform } from "../ConfigContext";
|
||||
import * as DataModels from "../Contracts/DataModels";
|
||||
import { MessageTypes } from "../Contracts/ExplorerContracts";
|
||||
import * as ViewModels from "../Contracts/ViewModels";
|
||||
import ConflictId from "../Explorer/Tree/ConflictId";
|
||||
import DocumentId from "../Explorer/Tree/DocumentId";
|
||||
import StoredProcedure from "../Explorer/Tree/StoredProcedure";
|
||||
import { LocalStorageUtility, StorageKey } from "../Shared/StorageUtility";
|
||||
import { OfferUtils } from "../Utils/OfferUtils";
|
||||
import * as Constants from "./Constants";
|
||||
import { client } from "./CosmosClient";
|
||||
import * as HeadersUtility from "./HeadersUtility";
|
||||
import { sendCachedDataMessage } from "./MessageHandler";
|
||||
|
||||
export function getCommonQueryOptions(options: FeedOptions): any {
|
||||
const storedItemPerPageSetting: number = LocalStorageUtility.getEntryNumber(StorageKey.ActualItemPerPage);
|
||||
options = options || {};
|
||||
options.populateQueryMetrics = true;
|
||||
options.enableScanInQuery = options.enableScanInQuery || true;
|
||||
if (!options.partitionKey) {
|
||||
options.forceQueryPlan = true;
|
||||
}
|
||||
options.maxItemCount =
|
||||
options.maxItemCount ||
|
||||
(storedItemPerPageSetting !== undefined && storedItemPerPageSetting) ||
|
||||
Constants.Queries.itemsPerPage;
|
||||
options.maxDegreeOfParallelism = LocalStorageUtility.getEntryNumber(StorageKey.MaxDegreeOfParellism);
|
||||
|
||||
return options;
|
||||
}
|
||||
|
||||
export function queryDocuments(
|
||||
databaseId: string,
|
||||
containerId: string,
|
||||
query: string,
|
||||
options: any
|
||||
): Q.Promise<QueryIterator<ItemDefinition & Resource>> {
|
||||
options = getCommonQueryOptions(options);
|
||||
const documentsIterator = client()
|
||||
.database(databaseId)
|
||||
.container(containerId)
|
||||
.items.query(query, options);
|
||||
return Q(documentsIterator);
|
||||
}
|
||||
|
||||
export function getPartitionKeyHeaderForConflict(conflictId: ConflictId): Object {
|
||||
const partitionKeyDefinition: DataModels.PartitionKey = conflictId.partitionKey;
|
||||
const partitionKeyValue: any = conflictId.partitionKeyValue;
|
||||
|
||||
return getPartitionKeyHeader(partitionKeyDefinition, partitionKeyValue);
|
||||
}
|
||||
|
||||
export function getPartitionKeyHeader(partitionKeyDefinition: DataModels.PartitionKey, partitionKeyValue: any): Object {
|
||||
if (!partitionKeyDefinition) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
if (partitionKeyValue === undefined) {
|
||||
return [{}];
|
||||
}
|
||||
|
||||
return [partitionKeyValue];
|
||||
}
|
||||
|
||||
export function updateDocument(
|
||||
collection: ViewModels.CollectionBase,
|
||||
documentId: DocumentId,
|
||||
newDocument: any
|
||||
): Q.Promise<any> {
|
||||
const partitionKey = documentId.partitionKeyValue;
|
||||
|
||||
return Q(
|
||||
client()
|
||||
.database(collection.databaseId)
|
||||
.container(collection.id())
|
||||
.item(documentId.id(), partitionKey)
|
||||
.replace(newDocument)
|
||||
.then(response => response.resource)
|
||||
);
|
||||
}
|
||||
|
||||
export function executeStoredProcedure(
|
||||
collection: ViewModels.Collection,
|
||||
storedProcedure: StoredProcedure,
|
||||
partitionKeyValue: any,
|
||||
params: any[]
|
||||
): Q.Promise<any> {
|
||||
// TODO remove this deferred. Kept it because of timeout code at bottom of function
|
||||
const deferred = Q.defer<any>();
|
||||
|
||||
client()
|
||||
.database(collection.databaseId)
|
||||
.container(collection.id())
|
||||
.scripts.storedProcedure(storedProcedure.id())
|
||||
.execute(partitionKeyValue, params, { enableScriptLogging: true })
|
||||
.then(response =>
|
||||
deferred.resolve({
|
||||
result: response.resource,
|
||||
scriptLogs: response.headers[Constants.HttpHeaders.scriptLogResults]
|
||||
})
|
||||
)
|
||||
.catch(error => deferred.reject(error));
|
||||
|
||||
return deferred.promise.timeout(
|
||||
Constants.ClientDefaults.requestTimeoutMs,
|
||||
`Request timed out while executing stored procedure ${storedProcedure.id()}`
|
||||
);
|
||||
}
|
||||
|
||||
export function createDocument(collection: ViewModels.CollectionBase, newDocument: any): Q.Promise<any> {
|
||||
return Q(
|
||||
client()
|
||||
.database(collection.databaseId)
|
||||
.container(collection.id())
|
||||
.items.create(newDocument)
|
||||
.then(response => response.resource)
|
||||
);
|
||||
}
|
||||
|
||||
export function readDocument(collection: ViewModels.CollectionBase, documentId: DocumentId): Q.Promise<any> {
|
||||
const partitionKey = documentId.partitionKeyValue;
|
||||
|
||||
return Q(
|
||||
client()
|
||||
.database(collection.databaseId)
|
||||
.container(collection.id())
|
||||
.item(documentId.id(), partitionKey)
|
||||
.read()
|
||||
.then(response => response.resource)
|
||||
);
|
||||
}
|
||||
|
||||
export function deleteDocument(collection: ViewModels.CollectionBase, documentId: DocumentId): Q.Promise<any> {
|
||||
const partitionKey = documentId.partitionKeyValue;
|
||||
|
||||
return Q(
|
||||
client()
|
||||
.database(collection.databaseId)
|
||||
.container(collection.id())
|
||||
.item(documentId.id(), partitionKey)
|
||||
.delete()
|
||||
);
|
||||
}
|
||||
|
||||
export function deleteConflict(
|
||||
collection: ViewModels.CollectionBase,
|
||||
conflictId: ConflictId,
|
||||
options: any = {}
|
||||
): Q.Promise<any> {
|
||||
options.partitionKey = options.partitionKey || getPartitionKeyHeaderForConflict(conflictId);
|
||||
|
||||
return Q(
|
||||
client()
|
||||
.database(collection.databaseId)
|
||||
.container(collection.id())
|
||||
.conflict(conflictId.id())
|
||||
.delete(options)
|
||||
);
|
||||
}
|
||||
|
||||
export function queryConflicts(
|
||||
databaseId: string,
|
||||
containerId: string,
|
||||
query: string,
|
||||
options: any
|
||||
): Q.Promise<QueryIterator<ConflictDefinition & Resource>> {
|
||||
const documentsIterator = client()
|
||||
.database(databaseId)
|
||||
.container(containerId)
|
||||
.conflicts.query(query, options);
|
||||
return Q(documentsIterator);
|
||||
}
|
||||
217
src/Common/DocumentClientUtilityBase.ts
Normal file
217
src/Common/DocumentClientUtilityBase.ts
Normal file
@@ -0,0 +1,217 @@
|
||||
import { ConflictDefinition, ItemDefinition, QueryIterator, Resource } from "@azure/cosmos";
|
||||
import Q from "q";
|
||||
import * as ViewModels from "../Contracts/ViewModels";
|
||||
import ConflictId from "../Explorer/Tree/ConflictId";
|
||||
import DocumentId from "../Explorer/Tree/DocumentId";
|
||||
import StoredProcedure from "../Explorer/Tree/StoredProcedure";
|
||||
import { logConsoleInfo, logConsoleProgress } from "../Utils/NotificationConsoleUtils";
|
||||
import * as Constants from "./Constants";
|
||||
import * as DataAccessUtilityBase from "./DataAccessUtilityBase";
|
||||
import { MinimalQueryIterator, nextPage } from "./IteratorUtilities";
|
||||
import { handleError } from "./ErrorHandlingUtils";
|
||||
|
||||
// TODO: Log all promise resolutions and errors with verbosity levels
|
||||
export function queryDocuments(
|
||||
databaseId: string,
|
||||
containerId: string,
|
||||
query: string,
|
||||
options: any
|
||||
): Q.Promise<QueryIterator<ItemDefinition & Resource>> {
|
||||
return DataAccessUtilityBase.queryDocuments(databaseId, containerId, query, options);
|
||||
}
|
||||
|
||||
export function queryConflicts(
|
||||
databaseId: string,
|
||||
containerId: string,
|
||||
query: string,
|
||||
options: any
|
||||
): Q.Promise<QueryIterator<ConflictDefinition & Resource>> {
|
||||
return DataAccessUtilityBase.queryConflicts(databaseId, containerId, query, options);
|
||||
}
|
||||
|
||||
export function getEntityName() {
|
||||
const defaultExperience =
|
||||
window.dataExplorer && window.dataExplorer.defaultExperience && window.dataExplorer.defaultExperience();
|
||||
if (defaultExperience === Constants.DefaultAccountExperience.MongoDB) {
|
||||
return "document";
|
||||
}
|
||||
return "item";
|
||||
}
|
||||
|
||||
export function executeStoredProcedure(
|
||||
collection: ViewModels.Collection,
|
||||
storedProcedure: StoredProcedure,
|
||||
partitionKeyValue: any,
|
||||
params: any[]
|
||||
): Q.Promise<any> {
|
||||
var deferred = Q.defer<any>();
|
||||
|
||||
const clearMessage = logConsoleProgress(`Executing stored procedure ${storedProcedure.id()}`);
|
||||
DataAccessUtilityBase.executeStoredProcedure(collection, storedProcedure, partitionKeyValue, params)
|
||||
.then(
|
||||
(response: any) => {
|
||||
deferred.resolve(response);
|
||||
logConsoleInfo(
|
||||
`Finished executing stored procedure ${storedProcedure.id()} for container ${storedProcedure.collection.id()}`
|
||||
);
|
||||
},
|
||||
(error: any) => {
|
||||
handleError(
|
||||
error,
|
||||
`Failed to execute stored procedure ${storedProcedure.id()} for container ${storedProcedure.collection.id()}`,
|
||||
"ExecuteStoredProcedure"
|
||||
);
|
||||
deferred.reject(error);
|
||||
}
|
||||
)
|
||||
.finally(() => {
|
||||
clearMessage();
|
||||
});
|
||||
|
||||
return deferred.promise;
|
||||
}
|
||||
|
||||
export function queryDocumentsPage(
|
||||
resourceName: string,
|
||||
documentsIterator: MinimalQueryIterator,
|
||||
firstItemIndex: number,
|
||||
options: any
|
||||
): Q.Promise<ViewModels.QueryResults> {
|
||||
var deferred = Q.defer<ViewModels.QueryResults>();
|
||||
const entityName = getEntityName();
|
||||
const clearMessage = logConsoleProgress(`Querying ${entityName} for container ${resourceName}`);
|
||||
Q(nextPage(documentsIterator, firstItemIndex))
|
||||
.then(
|
||||
(result: ViewModels.QueryResults) => {
|
||||
const itemCount = (result.documents && result.documents.length) || 0;
|
||||
logConsoleInfo(`Successfully fetched ${itemCount} ${entityName} for container ${resourceName}`);
|
||||
deferred.resolve(result);
|
||||
},
|
||||
(error: any) => {
|
||||
handleError(error, `Failed to query ${entityName} for container ${resourceName}`, "QueryDocumentsPage");
|
||||
deferred.reject(error);
|
||||
}
|
||||
)
|
||||
.finally(() => {
|
||||
clearMessage();
|
||||
});
|
||||
|
||||
return deferred.promise;
|
||||
}
|
||||
|
||||
export function readDocument(collection: ViewModels.CollectionBase, documentId: DocumentId): Q.Promise<any> {
|
||||
var deferred = Q.defer<any>();
|
||||
const entityName = getEntityName();
|
||||
const clearMessage = logConsoleProgress(`Reading ${entityName} ${documentId.id()}`);
|
||||
DataAccessUtilityBase.readDocument(collection, documentId)
|
||||
.then(
|
||||
(document: any) => {
|
||||
deferred.resolve(document);
|
||||
},
|
||||
(error: any) => {
|
||||
handleError(error, `Failed to read ${entityName} ${documentId.id()}`, "ReadDocument");
|
||||
deferred.reject(error);
|
||||
}
|
||||
)
|
||||
.finally(() => {
|
||||
clearMessage();
|
||||
});
|
||||
|
||||
return deferred.promise;
|
||||
}
|
||||
|
||||
export function updateDocument(
|
||||
collection: ViewModels.CollectionBase,
|
||||
documentId: DocumentId,
|
||||
newDocument: any
|
||||
): Q.Promise<any> {
|
||||
var deferred = Q.defer<any>();
|
||||
const entityName = getEntityName();
|
||||
const clearMessage = logConsoleProgress(`Updating ${entityName} ${documentId.id()}`);
|
||||
DataAccessUtilityBase.updateDocument(collection, documentId, newDocument)
|
||||
.then(
|
||||
(updatedDocument: any) => {
|
||||
logConsoleInfo(`Successfully updated ${entityName} ${documentId.id()}`);
|
||||
deferred.resolve(updatedDocument);
|
||||
},
|
||||
(error: any) => {
|
||||
handleError(error, `Failed to update ${entityName} ${documentId.id()}`, "UpdateDocument");
|
||||
deferred.reject(error);
|
||||
}
|
||||
)
|
||||
.finally(() => {
|
||||
clearMessage();
|
||||
});
|
||||
|
||||
return deferred.promise;
|
||||
}
|
||||
|
||||
export function createDocument(collection: ViewModels.CollectionBase, newDocument: any): Q.Promise<any> {
|
||||
var deferred = Q.defer<any>();
|
||||
const entityName = getEntityName();
|
||||
const clearMessage = logConsoleProgress(`Creating new ${entityName} for container ${collection.id()}`);
|
||||
DataAccessUtilityBase.createDocument(collection, newDocument)
|
||||
.then(
|
||||
(savedDocument: any) => {
|
||||
logConsoleInfo(`Successfully created new ${entityName} for container ${collection.id()}`);
|
||||
deferred.resolve(savedDocument);
|
||||
},
|
||||
(error: any) => {
|
||||
handleError(error, `Error while creating new ${entityName} for container ${collection.id()}`, "CreateDocument");
|
||||
deferred.reject(error);
|
||||
}
|
||||
)
|
||||
.finally(() => {
|
||||
clearMessage();
|
||||
});
|
||||
|
||||
return deferred.promise;
|
||||
}
|
||||
|
||||
export function deleteDocument(collection: ViewModels.CollectionBase, documentId: DocumentId): Q.Promise<any> {
|
||||
var deferred = Q.defer<any>();
|
||||
const entityName = getEntityName();
|
||||
const clearMessage = logConsoleProgress(`Deleting ${entityName} ${documentId.id()}`);
|
||||
DataAccessUtilityBase.deleteDocument(collection, documentId)
|
||||
.then(
|
||||
(response: any) => {
|
||||
logConsoleInfo(`Successfully deleted ${entityName} ${documentId.id()}`);
|
||||
deferred.resolve(response);
|
||||
},
|
||||
(error: any) => {
|
||||
handleError(error, `Error while deleting ${entityName} ${documentId.id()}`, "DeleteDocument");
|
||||
deferred.reject(error);
|
||||
}
|
||||
)
|
||||
.finally(() => {
|
||||
clearMessage();
|
||||
});
|
||||
|
||||
return deferred.promise;
|
||||
}
|
||||
|
||||
export function deleteConflict(
|
||||
collection: ViewModels.CollectionBase,
|
||||
conflictId: ConflictId,
|
||||
options?: any
|
||||
): Q.Promise<any> {
|
||||
var deferred = Q.defer<any>();
|
||||
|
||||
const clearMessage = logConsoleProgress(`Deleting conflict ${conflictId.id()}`);
|
||||
DataAccessUtilityBase.deleteConflict(collection, conflictId, options)
|
||||
.then(
|
||||
(response: any) => {
|
||||
logConsoleInfo(`Successfully deleted conflict ${conflictId.id()}`);
|
||||
deferred.resolve(response);
|
||||
},
|
||||
(error: any) => {
|
||||
handleError(error, `Error while deleting conflict ${conflictId.id()}`, "DeleteConflict");
|
||||
deferred.reject(error);
|
||||
}
|
||||
)
|
||||
.finally(() => {
|
||||
clearMessage();
|
||||
});
|
||||
|
||||
return deferred.promise;
|
||||
}
|
||||
@@ -1,10 +0,0 @@
|
||||
import { DefaultAccountExperienceType } from "../DefaultAccountExperienceType";
|
||||
import { userContext } from "../UserContext";
|
||||
|
||||
export const getEntityName = (): string => {
|
||||
if (userContext.defaultExperience === DefaultAccountExperienceType.MongoDB) {
|
||||
return "document";
|
||||
}
|
||||
|
||||
return "item";
|
||||
};
|
||||
@@ -1,6 +1,8 @@
|
||||
export function normalizeArmEndpoint(uri: string): string {
|
||||
if (uri && uri.slice(-1) !== "/") {
|
||||
return `${uri}/`;
|
||||
export default class EnvironmentUtility {
|
||||
public static normalizeArmEndpointUri(uri: string): string {
|
||||
if (uri && uri.slice(-1) !== "/") {
|
||||
return `${uri}/`;
|
||||
}
|
||||
return uri;
|
||||
}
|
||||
return uri;
|
||||
}
|
||||
|
||||
@@ -1,56 +1,11 @@
|
||||
import { ARMError } from "../Utils/arm/request";
|
||||
import { HttpStatusCodes } from "./Constants";
|
||||
import { MessageTypes } from "../Contracts/ExplorerContracts";
|
||||
import { SubscriptionType } from "../Contracts/SubscriptionType";
|
||||
import { CosmosError, sendNotificationForError } from "./dataAccess/sendNotificationForError";
|
||||
import { logConsoleError } from "../Utils/NotificationConsoleUtils";
|
||||
import { logError } from "./Logger";
|
||||
import { sendMessage } from "./MessageHandler";
|
||||
import { replaceKnownError } from "./ErrorParserUtility";
|
||||
|
||||
export const handleError = (error: string | ARMError | Error, area: string, consoleErrorPrefix?: string): void => {
|
||||
const errorMessage = getErrorMessage(error);
|
||||
const errorCode = error instanceof ARMError ? error.code : undefined;
|
||||
|
||||
// logs error to data explorer console
|
||||
const consoleErrorMessage = consoleErrorPrefix ? `${consoleErrorPrefix}:\n ${errorMessage}` : errorMessage;
|
||||
logConsoleError(consoleErrorMessage);
|
||||
|
||||
// logs error to both app insight and kusto
|
||||
logError(errorMessage, area, errorCode);
|
||||
|
||||
// checks for errors caused by firewall and sends them to portal to handle
|
||||
sendNotificationForError(errorMessage, errorCode);
|
||||
};
|
||||
|
||||
export const getErrorMessage = (error: string | Error): string => {
|
||||
const errorMessage = typeof error === "string" ? error : error.message;
|
||||
return replaceKnownError(errorMessage);
|
||||
};
|
||||
|
||||
export const getErrorStack = (error: string | Error): string => {
|
||||
return typeof error === "string" ? undefined : error.stack;
|
||||
};
|
||||
|
||||
const sendNotificationForError = (errorMessage: string, errorCode: number | string): void => {
|
||||
if (errorCode === HttpStatusCodes.Forbidden) {
|
||||
if (errorMessage?.toLowerCase().indexOf("sharedoffer is disabled for your account") > 0) {
|
||||
return;
|
||||
}
|
||||
sendMessage({
|
||||
type: MessageTypes.ForbiddenError,
|
||||
reason: errorMessage
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const replaceKnownError = (errorMessage: string): string => {
|
||||
if (
|
||||
window.dataExplorer?.subscriptionType() === SubscriptionType.Internal &&
|
||||
errorMessage.indexOf("SharedOffer is Disabled for your account") >= 0
|
||||
) {
|
||||
return "Database throughput is not supported for internal subscriptions.";
|
||||
} else if (errorMessage.indexOf("Partition key paths must contain only valid") >= 0) {
|
||||
return "Partition key paths must contain only valid characters and not contain a trailing slash or wildcard character.";
|
||||
}
|
||||
|
||||
return errorMessage;
|
||||
export const handleError = (error: CosmosError, consoleErrorPrefix: string, area: string): void => {
|
||||
const sanitizedErrorMsg = replaceKnownError(error.message);
|
||||
logConsoleError(`${consoleErrorPrefix}:\n ${sanitizedErrorMsg}`);
|
||||
logError(sanitizedErrorMsg, area, error.code);
|
||||
sendNotificationForError(error);
|
||||
};
|
||||
|
||||
24
src/Common/ErrorParserUtility.test.ts
Normal file
24
src/Common/ErrorParserUtility.test.ts
Normal file
@@ -0,0 +1,24 @@
|
||||
import * as ErrorParserUtility from "./ErrorParserUtility";
|
||||
|
||||
describe("Error Parser Utility", () => {
|
||||
describe("shouldEnableCrossPartitionKeyForResourceWithPartitionKey()", () => {
|
||||
it("should parse a backend error correctly", () => {
|
||||
// A fake error matching what is thrown by the SDK on a bad collection create request
|
||||
const innerMessage =
|
||||
"The partition key component definition path '/asdwqr31 @#$#$WRadf' could not be accepted, failed near position '10'. Partition key paths must contain only valid characters and not contain a trailing slash or wildcard character.";
|
||||
const message = `Message: {\"Errors\":[\"${innerMessage}\"]}\r\nActivityId: 97b2e684-7505-4921-85f6-2513b9b28220, Request URI: /apps/89fdcf25-2a0b-4d2a-aab6-e161e565b26f/services/54911149-7bb1-4e7d-a1fa-22c8b36a4bb9/partitions/cc2a7a04-5f5a-4709-bcf7-8509b264963f/replicas/132304018743619218p, RequestStats: , SDK: Microsoft.Azure.Documents.Common/2.10.0`;
|
||||
const err = new Error(message) as any;
|
||||
err.code = 400;
|
||||
err.body = {
|
||||
code: "BadRequest",
|
||||
message
|
||||
};
|
||||
err.headers = {};
|
||||
err.activityId = "97b2e684-7505-4921-85f6-2513b9b28220";
|
||||
|
||||
const parsedError = ErrorParserUtility.parse(err);
|
||||
expect(parsedError.length).toBe(1);
|
||||
expect(parsedError[0].message).toBe(innerMessage);
|
||||
});
|
||||
});
|
||||
});
|
||||
69
src/Common/ErrorParserUtility.ts
Normal file
69
src/Common/ErrorParserUtility.ts
Normal file
@@ -0,0 +1,69 @@
|
||||
import * as DataModels from "../Contracts/DataModels";
|
||||
import * as ViewModels from "../Contracts/ViewModels";
|
||||
|
||||
export function replaceKnownError(err: string): string {
|
||||
if (
|
||||
window.dataExplorer.subscriptionType() === ViewModels.SubscriptionType.Internal &&
|
||||
err.indexOf("SharedOffer is Disabled for your account") >= 0
|
||||
) {
|
||||
return "Database throughput is not supported for internal subscriptions.";
|
||||
} else if (err.indexOf("Partition key paths must contain only valid") >= 0) {
|
||||
return "Partition key paths must contain only valid characters and not contain a trailing slash or wildcard character.";
|
||||
}
|
||||
|
||||
return err;
|
||||
}
|
||||
|
||||
export function parse(err: any): DataModels.ErrorDataModel[] {
|
||||
try {
|
||||
return _parse(err);
|
||||
} catch (e) {
|
||||
return [<DataModels.ErrorDataModel>{ message: JSON.stringify(err) }];
|
||||
}
|
||||
}
|
||||
|
||||
function _parse(err: any): DataModels.ErrorDataModel[] {
|
||||
var normalizedErrors: DataModels.ErrorDataModel[] = [];
|
||||
if (err.message && !err.code) {
|
||||
normalizedErrors.push(err);
|
||||
} else {
|
||||
const innerErrors: any[] = _getInnerErrors(err.message);
|
||||
normalizedErrors = innerErrors.map(innerError =>
|
||||
typeof innerError === "string" ? { message: innerError } : innerError
|
||||
);
|
||||
}
|
||||
|
||||
return normalizedErrors;
|
||||
}
|
||||
|
||||
function _getInnerErrors(message: string): any[] {
|
||||
/*
|
||||
The backend error message has an inner-message which is a stringified object.
|
||||
|
||||
For SQL errors, the "errors" property is an array of SqlErrorDataModel.
|
||||
Example:
|
||||
"Message: {"Errors":["Resource with specified id or name already exists"]}\r\nActivityId: 80005000008d40b6a, Request URI: /apps/19000c000c0a0005/services/mctestdocdbprod-MasterService-0-00066ab9937/partitions/900005f9000e676fb8/replicas/13000000000955p"
|
||||
For non-SQL errors the "Errors" propery is an array of string.
|
||||
Example:
|
||||
"Message: {"errors":[{"severity":"Error","location":{"start":7,"end":8},"code":"SC1001","message":"Syntax error, incorrect syntax near '.'."}]}\r\nActivityId: d3300016d4084e310a, Request URI: /apps/12401f9e1df77/services/dc100232b1f44545/partitions/f86f3bc0001a2f78/replicas/13085003638s"
|
||||
*/
|
||||
|
||||
let innerMessage: any = null;
|
||||
|
||||
const singleLineMessage = message.replace(/[\r\n]|\r|\n/g, "");
|
||||
try {
|
||||
// Multi-Partition error flavor
|
||||
const regExp = /^(.*)ActivityId: (.*)/g;
|
||||
const regString = regExp.exec(singleLineMessage);
|
||||
const innerMessageString = regString[1];
|
||||
innerMessage = JSON.parse(innerMessageString);
|
||||
} catch (e) {
|
||||
// Single-partition error flavor
|
||||
const regExp = /^Message: (.*)ActivityId: (.*), Request URI: (.*)/g;
|
||||
const regString = regExp.exec(singleLineMessage);
|
||||
const innerMessageString = regString[1];
|
||||
innerMessage = JSON.parse(innerMessageString);
|
||||
}
|
||||
|
||||
return innerMessage.errors ? innerMessage.errors : innerMessage.Errors;
|
||||
}
|
||||
@@ -21,8 +21,14 @@ export function logWarning(message: string, area: string, code?: number): void {
|
||||
return _logEntry(entry);
|
||||
}
|
||||
|
||||
export function logError(errorMessage: string, area: string, code?: number | string): void {
|
||||
const entry: Diagnostics.LogEntry = _generateLogEntry(Diagnostics.LogEntryLevel.Error, errorMessage, area, code);
|
||||
export function logError(message: string | Error, area: string, code?: number): void {
|
||||
let logMessage: string;
|
||||
if (typeof message === "string") {
|
||||
logMessage = message;
|
||||
} else {
|
||||
logMessage = JSON.stringify(message, Object.getOwnPropertyNames(message));
|
||||
}
|
||||
const entry: Diagnostics.LogEntry = _generateLogEntry(Diagnostics.LogEntryLevel.Error, logMessage, area, code);
|
||||
return _logEntry(entry);
|
||||
}
|
||||
|
||||
@@ -53,7 +59,7 @@ function _generateLogEntry(
|
||||
level: Diagnostics.LogEntryLevel,
|
||||
message: string,
|
||||
area: string,
|
||||
code?: number | string
|
||||
code?: number
|
||||
): Diagnostics.LogEntry {
|
||||
return {
|
||||
timestamp: new Date().getUTCSeconds(),
|
||||
|
||||
@@ -1,64 +0,0 @@
|
||||
import * as OfferUtility from "./OfferUtility";
|
||||
import { SDKOfferDefinition, Offer } from "../Contracts/DataModels";
|
||||
import { OfferResponse } from "@azure/cosmos";
|
||||
|
||||
describe("parseSDKOfferResponse", () => {
|
||||
it("manual throughput", () => {
|
||||
const mockOfferDefinition = {
|
||||
content: {
|
||||
offerThroughput: 500,
|
||||
collectionThroughputInfo: {
|
||||
minimumRUForCollection: 400,
|
||||
numPhysicalPartitions: 1
|
||||
}
|
||||
},
|
||||
id: "test"
|
||||
} as SDKOfferDefinition;
|
||||
|
||||
const mockResponse = {
|
||||
resource: mockOfferDefinition
|
||||
} as OfferResponse;
|
||||
|
||||
const expectedResult: Offer = {
|
||||
manualThroughput: 500,
|
||||
autoscaleMaxThroughput: undefined,
|
||||
minimumThroughput: 400,
|
||||
id: "test",
|
||||
offerDefinition: mockOfferDefinition,
|
||||
offerReplacePending: false
|
||||
};
|
||||
|
||||
expect(OfferUtility.parseSDKOfferResponse(mockResponse)).toEqual(expectedResult);
|
||||
});
|
||||
|
||||
it("autoscale throughput", () => {
|
||||
const mockOfferDefinition = {
|
||||
content: {
|
||||
offerThroughput: 400,
|
||||
collectionThroughputInfo: {
|
||||
minimumRUForCollection: 400,
|
||||
numPhysicalPartitions: 1
|
||||
},
|
||||
offerAutopilotSettings: {
|
||||
maxThroughput: 5000
|
||||
}
|
||||
},
|
||||
id: "test"
|
||||
} as SDKOfferDefinition;
|
||||
|
||||
const mockResponse = {
|
||||
resource: mockOfferDefinition
|
||||
} as OfferResponse;
|
||||
|
||||
const expectedResult: Offer = {
|
||||
manualThroughput: undefined,
|
||||
autoscaleMaxThroughput: 5000,
|
||||
minimumThroughput: 400,
|
||||
id: "test",
|
||||
offerDefinition: mockOfferDefinition,
|
||||
offerReplacePending: false
|
||||
};
|
||||
|
||||
expect(OfferUtility.parseSDKOfferResponse(mockResponse)).toEqual(expectedResult);
|
||||
});
|
||||
});
|
||||
@@ -1,34 +0,0 @@
|
||||
import { Offer, SDKOfferDefinition } from "../Contracts/DataModels";
|
||||
import { OfferResponse } from "@azure/cosmos";
|
||||
import { HttpHeaders } from "./Constants";
|
||||
|
||||
export const parseSDKOfferResponse = (offerResponse: OfferResponse): Offer => {
|
||||
const offerDefinition: SDKOfferDefinition = offerResponse?.resource;
|
||||
const offerContent = offerDefinition.content;
|
||||
if (!offerContent) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const minimumThroughput = offerContent.collectionThroughputInfo?.minimumRUForCollection;
|
||||
const autopilotSettings = offerContent.offerAutopilotSettings;
|
||||
|
||||
if (autopilotSettings) {
|
||||
return {
|
||||
id: offerDefinition.id,
|
||||
autoscaleMaxThroughput: autopilotSettings.maxThroughput,
|
||||
manualThroughput: undefined,
|
||||
minimumThroughput,
|
||||
offerDefinition,
|
||||
offerReplacePending: offerResponse.headers?.[HttpHeaders.offerReplacePending] === "true"
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
id: offerDefinition.id,
|
||||
autoscaleMaxThroughput: undefined,
|
||||
manualThroughput: offerContent.offerThroughput,
|
||||
minimumThroughput,
|
||||
offerDefinition,
|
||||
offerReplacePending: offerResponse.headers?.[HttpHeaders.offerReplacePending] === "true"
|
||||
};
|
||||
};
|
||||
@@ -16,7 +16,7 @@ const notificationsPath = () => {
|
||||
};
|
||||
|
||||
export const fetchPortalNotifications = async (): Promise<DataModels.Notification[]> => {
|
||||
if (configContext.platform === Platform.Emulator || configContext.platform === Platform.Hosted) {
|
||||
if (configContext.platform === Platform.Emulator) {
|
||||
return [];
|
||||
}
|
||||
|
||||
|
||||
@@ -3,18 +3,17 @@ import * as _ from "underscore";
|
||||
import * as DataModels from "../Contracts/DataModels";
|
||||
import * as ViewModels from "../Contracts/ViewModels";
|
||||
import Explorer from "../Explorer/Explorer";
|
||||
import { ConsoleDataType } from "../Explorer/Menus/NotificationConsole/NotificationConsoleComponent";
|
||||
import DocumentsTab from "../Explorer/Tabs/DocumentsTab";
|
||||
import DocumentId from "../Explorer/Tree/DocumentId";
|
||||
import * as NotificationConsoleUtils from "../Utils/NotificationConsoleUtils";
|
||||
import { QueryUtils } from "../Utils/QueryUtils";
|
||||
import { BackendDefaults, HttpStatusCodes, SavedQueries } from "./Constants";
|
||||
import { userContext } from "../UserContext";
|
||||
import { queryDocumentsPage } from "./dataAccess/queryDocumentsPage";
|
||||
import { createDocument, deleteDocument, queryDocuments, queryDocumentsPage } from "./DocumentClientUtilityBase";
|
||||
import { createCollection } from "./dataAccess/createCollection";
|
||||
import { handleError } from "./ErrorHandlingUtils";
|
||||
import { createDocument } from "./dataAccess/createDocument";
|
||||
import { deleteDocument } from "./dataAccess/deleteDocument";
|
||||
import { queryDocuments } from "./dataAccess/queryDocuments";
|
||||
import * as ErrorParserUtility from "./ErrorParserUtility";
|
||||
import * as Logger from "./Logger";
|
||||
|
||||
export class QueriesClient {
|
||||
private static readonly PartitionKey: DataModels.PartitionKey = {
|
||||
@@ -33,7 +32,10 @@ export class QueriesClient {
|
||||
return Promise.resolve(queriesCollection.rawDataModel);
|
||||
}
|
||||
|
||||
const clearMessage = NotificationConsoleUtils.logConsoleProgress("Setting up account for saving queries");
|
||||
const id = NotificationConsoleUtils.logConsoleMessage(
|
||||
ConsoleDataType.InProgress,
|
||||
"Setting up account for saving queries"
|
||||
);
|
||||
return createCollection({
|
||||
collectionId: SavedQueries.CollectionName,
|
||||
createNewDatabase: true,
|
||||
@@ -44,22 +46,33 @@ export class QueriesClient {
|
||||
})
|
||||
.then(
|
||||
(collection: DataModels.Collection) => {
|
||||
NotificationConsoleUtils.logConsoleInfo("Successfully set up account for saving queries");
|
||||
NotificationConsoleUtils.logConsoleMessage(
|
||||
ConsoleDataType.Info,
|
||||
"Successfully set up account for saving queries"
|
||||
);
|
||||
return Promise.resolve(collection);
|
||||
},
|
||||
(error: any) => {
|
||||
handleError(error, "setupQueriesCollection", "Failed to set up account for saving queries");
|
||||
return Promise.reject(error);
|
||||
const stringifiedError: string = error.message;
|
||||
NotificationConsoleUtils.logConsoleMessage(
|
||||
ConsoleDataType.Error,
|
||||
`Failed to set up account for saving queries: ${stringifiedError}`
|
||||
);
|
||||
Logger.logError(stringifiedError, "setupQueriesCollection");
|
||||
return Promise.reject(stringifiedError);
|
||||
}
|
||||
)
|
||||
.finally(() => clearMessage());
|
||||
.finally(() => NotificationConsoleUtils.clearInProgressMessageWithId(id));
|
||||
}
|
||||
|
||||
public async saveQuery(query: DataModels.Query): Promise<void> {
|
||||
const queriesCollection = this.findQueriesCollection();
|
||||
if (!queriesCollection) {
|
||||
const errorMessage: string = "Account not set up to perform saved query operations";
|
||||
NotificationConsoleUtils.logConsoleError(`Failed to save query ${query.queryName}: ${errorMessage}`);
|
||||
NotificationConsoleUtils.logConsoleMessage(
|
||||
ConsoleDataType.Error,
|
||||
`Failed to save query ${query.queryName}: ${errorMessage}`
|
||||
);
|
||||
return Promise.reject(errorMessage);
|
||||
}
|
||||
|
||||
@@ -67,85 +80,121 @@ export class QueriesClient {
|
||||
this.validateQuery(query);
|
||||
} catch (error) {
|
||||
const errorMessage: string = "Invalid query specified";
|
||||
NotificationConsoleUtils.logConsoleError(`Failed to save query ${query.queryName}: ${errorMessage}`);
|
||||
NotificationConsoleUtils.logConsoleMessage(
|
||||
ConsoleDataType.Error,
|
||||
`Failed to save query ${query.queryName}: ${errorMessage}`
|
||||
);
|
||||
return Promise.reject(errorMessage);
|
||||
}
|
||||
|
||||
const clearMessage = NotificationConsoleUtils.logConsoleProgress(`Saving query ${query.queryName}`);
|
||||
const id = NotificationConsoleUtils.logConsoleMessage(
|
||||
ConsoleDataType.InProgress,
|
||||
`Saving query ${query.queryName}`
|
||||
);
|
||||
query.id = query.queryName;
|
||||
return createDocument(queriesCollection, query)
|
||||
.then(
|
||||
(savedQuery: DataModels.Query) => {
|
||||
NotificationConsoleUtils.logConsoleInfo(`Successfully saved query ${query.queryName}`);
|
||||
NotificationConsoleUtils.logConsoleMessage(
|
||||
ConsoleDataType.Info,
|
||||
`Successfully saved query ${query.queryName}`
|
||||
);
|
||||
return Promise.resolve();
|
||||
},
|
||||
(error: any) => {
|
||||
if (error.code === HttpStatusCodes.Conflict.toString()) {
|
||||
error = `Query ${query.queryName} already exists`;
|
||||
let errorMessage: string;
|
||||
const parsedError: DataModels.ErrorDataModel = ErrorParserUtility.parse(error)[0];
|
||||
if (parsedError.code === HttpStatusCodes.Conflict.toString()) {
|
||||
errorMessage = `Query ${query.queryName} already exists`;
|
||||
} else {
|
||||
errorMessage = parsedError.message;
|
||||
}
|
||||
handleError(error, "saveQuery", `Failed to save query ${query.queryName}`);
|
||||
return Promise.reject(error);
|
||||
NotificationConsoleUtils.logConsoleMessage(
|
||||
ConsoleDataType.Error,
|
||||
`Failed to save query ${query.queryName}: ${errorMessage}`
|
||||
);
|
||||
Logger.logError(JSON.stringify(parsedError), "saveQuery");
|
||||
return Promise.reject(errorMessage);
|
||||
}
|
||||
)
|
||||
.finally(() => clearMessage());
|
||||
.finally(() => NotificationConsoleUtils.clearInProgressMessageWithId(id));
|
||||
}
|
||||
|
||||
public async getQueries(): Promise<DataModels.Query[]> {
|
||||
const queriesCollection = this.findQueriesCollection();
|
||||
if (!queriesCollection) {
|
||||
const errorMessage: string = "Account not set up to perform saved query operations";
|
||||
NotificationConsoleUtils.logConsoleError(`Failed to fetch saved queries: ${errorMessage}`);
|
||||
NotificationConsoleUtils.logConsoleMessage(
|
||||
ConsoleDataType.Error,
|
||||
`Failed to fetch saved queries: ${errorMessage}`
|
||||
);
|
||||
return Promise.reject(errorMessage);
|
||||
}
|
||||
|
||||
const options: any = { enableCrossPartitionQuery: true };
|
||||
const clearMessage = NotificationConsoleUtils.logConsoleProgress("Fetching saved queries");
|
||||
const queryIterator: QueryIterator<ItemDefinition & Resource> = queryDocuments(
|
||||
SavedQueries.DatabaseName,
|
||||
SavedQueries.CollectionName,
|
||||
this.fetchQueriesQuery(),
|
||||
options
|
||||
);
|
||||
const fetchQueries = async (firstItemIndex: number): Promise<ViewModels.QueryResults> =>
|
||||
await queryDocumentsPage(queriesCollection.id(), queryIterator, firstItemIndex);
|
||||
return QueryUtils.queryAllPages(fetchQueries)
|
||||
const id = NotificationConsoleUtils.logConsoleMessage(ConsoleDataType.InProgress, "Fetching saved queries");
|
||||
return queryDocuments(SavedQueries.DatabaseName, SavedQueries.CollectionName, this.fetchQueriesQuery(), options)
|
||||
.then(
|
||||
(results: ViewModels.QueryResults) => {
|
||||
let queries: DataModels.Query[] = _.map(results.documents, (document: DataModels.Query) => {
|
||||
if (!document) {
|
||||
return undefined;
|
||||
(queryIterator: QueryIterator<ItemDefinition & Resource>) => {
|
||||
const fetchQueries = (firstItemIndex: number): Q.Promise<ViewModels.QueryResults> =>
|
||||
queryDocumentsPage(queriesCollection.id(), queryIterator, firstItemIndex, options);
|
||||
return QueryUtils.queryAllPages(fetchQueries).then(
|
||||
(results: ViewModels.QueryResults) => {
|
||||
let queries: DataModels.Query[] = _.map(results.documents, (document: DataModels.Query) => {
|
||||
if (!document) {
|
||||
return undefined;
|
||||
}
|
||||
const { id, resourceId, query, queryName } = document;
|
||||
const parsedQuery: DataModels.Query = {
|
||||
resourceId: resourceId,
|
||||
queryName: queryName,
|
||||
query: query,
|
||||
id: id
|
||||
};
|
||||
try {
|
||||
this.validateQuery(parsedQuery);
|
||||
return parsedQuery;
|
||||
} catch (error) {
|
||||
return undefined;
|
||||
}
|
||||
});
|
||||
queries = _.reject(queries, (parsedQuery: DataModels.Query) => !parsedQuery);
|
||||
NotificationConsoleUtils.logConsoleMessage(ConsoleDataType.Info, "Successfully fetched saved queries");
|
||||
return Promise.resolve(queries);
|
||||
},
|
||||
(error: any) => {
|
||||
const stringifiedError: string = error.message;
|
||||
NotificationConsoleUtils.logConsoleMessage(
|
||||
ConsoleDataType.Error,
|
||||
`Failed to fetch saved queries: ${stringifiedError}`
|
||||
);
|
||||
Logger.logError(stringifiedError, "getSavedQueries");
|
||||
return Promise.reject(stringifiedError);
|
||||
}
|
||||
const { id, resourceId, query, queryName } = document;
|
||||
const parsedQuery: DataModels.Query = {
|
||||
resourceId: resourceId,
|
||||
queryName: queryName,
|
||||
query: query,
|
||||
id: id
|
||||
};
|
||||
try {
|
||||
this.validateQuery(parsedQuery);
|
||||
return parsedQuery;
|
||||
} catch (error) {
|
||||
return undefined;
|
||||
}
|
||||
});
|
||||
queries = _.reject(queries, (parsedQuery: DataModels.Query) => !parsedQuery);
|
||||
NotificationConsoleUtils.logConsoleInfo("Successfully fetched saved queries");
|
||||
return Promise.resolve(queries);
|
||||
);
|
||||
},
|
||||
(error: any) => {
|
||||
handleError(error, "getSavedQueries", "Failed to fetch saved queries");
|
||||
return Promise.reject(error);
|
||||
// should never get into this state but we handle this regardless
|
||||
const stringifiedError: string = error.message;
|
||||
NotificationConsoleUtils.logConsoleMessage(
|
||||
ConsoleDataType.Error,
|
||||
`Failed to fetch saved queries: ${stringifiedError}`
|
||||
);
|
||||
Logger.logError(stringifiedError, "getSavedQueries");
|
||||
return Promise.reject(stringifiedError);
|
||||
}
|
||||
)
|
||||
.finally(() => clearMessage());
|
||||
.finally(() => NotificationConsoleUtils.clearInProgressMessageWithId(id));
|
||||
}
|
||||
|
||||
public async deleteQuery(query: DataModels.Query): Promise<void> {
|
||||
const queriesCollection = this.findQueriesCollection();
|
||||
if (!queriesCollection) {
|
||||
const errorMessage: string = "Account not set up to perform saved query operations";
|
||||
NotificationConsoleUtils.logConsoleError(`Failed to fetch saved queries: ${errorMessage}`);
|
||||
NotificationConsoleUtils.logConsoleMessage(
|
||||
ConsoleDataType.Error,
|
||||
`Failed to fetch saved queries: ${errorMessage}`
|
||||
);
|
||||
return Promise.reject(errorMessage);
|
||||
}
|
||||
|
||||
@@ -153,10 +202,16 @@ export class QueriesClient {
|
||||
this.validateQuery(query);
|
||||
} catch (error) {
|
||||
const errorMessage: string = "Invalid query specified";
|
||||
NotificationConsoleUtils.logConsoleError(`Failed to delete query ${query.queryName}: ${errorMessage}`);
|
||||
NotificationConsoleUtils.logConsoleMessage(
|
||||
ConsoleDataType.Error,
|
||||
`Failed to delete query ${query.queryName}: ${errorMessage}`
|
||||
);
|
||||
}
|
||||
|
||||
const clearMessage = NotificationConsoleUtils.logConsoleProgress(`Deleting query ${query.queryName}`);
|
||||
const id = NotificationConsoleUtils.logConsoleMessage(
|
||||
ConsoleDataType.InProgress,
|
||||
`Deleting query ${query.queryName}`
|
||||
);
|
||||
query.id = query.queryName;
|
||||
const documentId = new DocumentId(
|
||||
{
|
||||
@@ -170,15 +225,23 @@ export class QueriesClient {
|
||||
return deleteDocument(queriesCollection, documentId)
|
||||
.then(
|
||||
() => {
|
||||
NotificationConsoleUtils.logConsoleInfo(`Successfully deleted query ${query.queryName}`);
|
||||
NotificationConsoleUtils.logConsoleMessage(
|
||||
ConsoleDataType.Info,
|
||||
`Successfully deleted query ${query.queryName}`
|
||||
);
|
||||
return Promise.resolve();
|
||||
},
|
||||
(error: any) => {
|
||||
handleError(error, "deleteQuery", `Failed to delete query ${query.queryName}`);
|
||||
return Promise.reject(error);
|
||||
const stringifiedError: string = error.message;
|
||||
NotificationConsoleUtils.logConsoleMessage(
|
||||
ConsoleDataType.Error,
|
||||
`Failed to delete query ${query.queryName}: ${stringifiedError}`
|
||||
);
|
||||
Logger.logError(stringifiedError, "deleteQuery");
|
||||
return Promise.reject(stringifiedError);
|
||||
}
|
||||
)
|
||||
.finally(() => clearMessage());
|
||||
.finally(() => NotificationConsoleUtils.clearInProgressMessageWithId(id));
|
||||
}
|
||||
|
||||
public getResourceId(): string {
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
jest.mock("../../Utils/arm/request");
|
||||
jest.mock("../CosmosClient");
|
||||
jest.mock("../DataAccessUtilityBase");
|
||||
import { AuthType } from "../../AuthType";
|
||||
import { CreateCollectionParams, DatabaseAccount } from "../../Contracts/DataModels";
|
||||
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
||||
|
||||
@@ -55,7 +55,7 @@ export const createCollection = async (params: DataModels.CreateCollectionParams
|
||||
logConsoleInfo(`Successfully created container ${params.collectionId}`);
|
||||
return collection;
|
||||
} catch (error) {
|
||||
handleError(error, "CreateCollection", `Error while creating container ${params.collectionId}`);
|
||||
handleError(error, `Error while creating container ${params.collectionId}`, "CreateCollection");
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
|
||||
@@ -41,7 +41,7 @@ export async function createDatabase(params: DataModels.CreateDatabaseParams): P
|
||||
logConsoleInfo(`Successfully created database ${params.databaseId}`);
|
||||
return database;
|
||||
} catch (error) {
|
||||
handleError(error, "CreateDatabase", `Error while creating database ${params.databaseId}`);
|
||||
handleError(error, `Error while creating database ${params.databaseId}`, "CreateDatabase");
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
|
||||
@@ -1,25 +0,0 @@
|
||||
import { CollectionBase } from "../../Contracts/ViewModels";
|
||||
import { client } from "../CosmosClient";
|
||||
import { getEntityName } from "../DocumentUtility";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleInfo, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
|
||||
export const createDocument = async (collection: CollectionBase, newDocument: unknown): Promise<unknown> => {
|
||||
const entityName = getEntityName();
|
||||
const clearMessage = logConsoleProgress(`Creating new ${entityName} for container ${collection.id()}`);
|
||||
|
||||
try {
|
||||
const response = await client()
|
||||
.database(collection.databaseId)
|
||||
.container(collection.id())
|
||||
.items.create(newDocument);
|
||||
|
||||
logConsoleInfo(`Successfully created new ${entityName} for container ${collection.id()}`);
|
||||
return response?.resource;
|
||||
} catch (error) {
|
||||
handleError(error, "CreateDocument", `Error while creating new ${entityName} for container ${collection.id()}`);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
}
|
||||
};
|
||||
@@ -70,7 +70,7 @@ export async function createStoredProcedure(
|
||||
.scripts.storedProcedures.create(storedProcedure);
|
||||
return response?.resource;
|
||||
} catch (error) {
|
||||
handleError(error, "CreateStoredProcedure", `Error while creating stored procedure ${storedProcedure.id}`);
|
||||
handleError(error, `Error while creating stored procedure ${storedProcedure.id}`, "CreateStoredProcedure");
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
|
||||
@@ -7,8 +7,9 @@ import {
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/types";
|
||||
import { client } from "../CosmosClient";
|
||||
import { createUpdateSqlTrigger, getSqlTrigger } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logConsoleError, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logError } from "../Logger";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import { userContext } from "../../UserContext";
|
||||
|
||||
export async function createTrigger(
|
||||
@@ -65,7 +66,9 @@ export async function createTrigger(
|
||||
.scripts.triggers.create(trigger);
|
||||
return response.resource;
|
||||
} catch (error) {
|
||||
handleError(error, "CreateTrigger", `Error while creating trigger ${trigger.id}`);
|
||||
logConsoleError(`Error while creating trigger ${trigger.id}:\n ${error.message}`);
|
||||
logError(error.message, "CreateTrigger", error.code);
|
||||
sendNotificationForError(error);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
|
||||
@@ -72,8 +72,8 @@ export async function createUserDefinedFunction(
|
||||
} catch (error) {
|
||||
handleError(
|
||||
error,
|
||||
"CreateUserupdateUserDefinedFunction",
|
||||
`Error while creating user defined function ${userDefinedFunction.id}`
|
||||
`Error while creating user defined function ${userDefinedFunction.id}`,
|
||||
"CreateUserupdateUserDefinedFunction"
|
||||
);
|
||||
throw error;
|
||||
} finally {
|
||||
|
||||
@@ -23,7 +23,7 @@ export async function deleteCollection(databaseId: string, collectionId: string)
|
||||
}
|
||||
logConsoleInfo(`Successfully deleted container ${collectionId}`);
|
||||
} catch (error) {
|
||||
handleError(error, "DeleteCollection", `Error while deleting container ${collectionId}`);
|
||||
handleError(error, `Error while deleting container ${collectionId}`, "DeleteCollection");
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
|
||||
@@ -1,36 +0,0 @@
|
||||
import ConflictId from "../../Explorer/Tree/ConflictId";
|
||||
import { CollectionBase } from "../../Contracts/ViewModels";
|
||||
import { RequestOptions } from "@azure/cosmos";
|
||||
import { client } from "../CosmosClient";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleProgress, logConsoleInfo } from "../../Utils/NotificationConsoleUtils";
|
||||
|
||||
export const deleteConflict = async (collection: CollectionBase, conflictId: ConflictId): Promise<void> => {
|
||||
const clearMessage = logConsoleProgress(`Deleting conflict ${conflictId.id()}`);
|
||||
|
||||
try {
|
||||
const options = {
|
||||
partitionKey: getPartitionKeyHeaderForConflict(conflictId)
|
||||
};
|
||||
|
||||
await client()
|
||||
.database(collection.databaseId)
|
||||
.container(collection.id())
|
||||
.conflict(conflictId.id())
|
||||
.delete(options as RequestOptions);
|
||||
logConsoleInfo(`Successfully deleted conflict ${conflictId.id()}`);
|
||||
} catch (error) {
|
||||
handleError(error, "DeleteConflict", `Error while deleting conflict ${conflictId.id()}`);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
}
|
||||
};
|
||||
|
||||
const getPartitionKeyHeaderForConflict = (conflictId: ConflictId): unknown => {
|
||||
if (!conflictId.partitionKey) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return conflictId.partitionKeyValue === undefined ? [{}] : [conflictId.partitionKeyValue];
|
||||
};
|
||||
@@ -25,7 +25,7 @@ export async function deleteDatabase(databaseId: string): Promise<void> {
|
||||
}
|
||||
logConsoleInfo(`Successfully deleted database ${databaseId}`);
|
||||
} catch (error) {
|
||||
handleError(error, "DeleteDatabase", `Error while deleting database ${databaseId}`);
|
||||
handleError(error, `Error while deleting database ${databaseId}`, "DeleteDatabase");
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
|
||||
@@ -1,25 +0,0 @@
|
||||
import { CollectionBase } from "../../Contracts/ViewModels";
|
||||
import { client } from "../CosmosClient";
|
||||
import { getEntityName } from "../DocumentUtility";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleInfo, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import DocumentId from "../../Explorer/Tree/DocumentId";
|
||||
|
||||
export const deleteDocument = async (collection: CollectionBase, documentId: DocumentId): Promise<void> => {
|
||||
const entityName: string = getEntityName();
|
||||
const clearMessage = logConsoleProgress(`Deleting ${entityName} ${documentId.id()}`);
|
||||
|
||||
try {
|
||||
await client()
|
||||
.database(collection.databaseId)
|
||||
.container(collection.id())
|
||||
.item(documentId.id(), documentId.partitionKeyValue)
|
||||
.delete();
|
||||
logConsoleInfo(`Successfully deleted ${entityName} ${documentId.id()}`);
|
||||
} catch (error) {
|
||||
handleError(error, "DeleteDocument", `Error while deleting ${entityName} ${documentId.id()}`);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
}
|
||||
};
|
||||
@@ -34,7 +34,7 @@ export async function deleteStoredProcedure(
|
||||
.delete();
|
||||
}
|
||||
} catch (error) {
|
||||
handleError(error, "DeleteStoredProcedure", `Error while deleting stored procedure ${storedProcedureId}`);
|
||||
handleError(error, `Error while deleting stored procedure ${storedProcedureId}`, "DeleteStoredProcedure");
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
|
||||
@@ -30,7 +30,7 @@ export async function deleteTrigger(databaseId: string, collectionId: string, tr
|
||||
.delete();
|
||||
}
|
||||
} catch (error) {
|
||||
handleError(error, "DeleteTrigger", `Error while deleting trigger ${triggerId}`);
|
||||
handleError(error, `Error while deleting trigger ${triggerId}`, "DeleteTrigger");
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
|
||||
@@ -30,7 +30,7 @@ export async function deleteUserDefinedFunction(databaseId: string, collectionId
|
||||
.delete();
|
||||
}
|
||||
} catch (error) {
|
||||
handleError(error, "DeleteUserDefinedFunction", `Error while deleting user defined function ${id}`);
|
||||
handleError(error, `Error while deleting user defined function ${id}`, "DeleteUserDefinedFunction");
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
|
||||
@@ -1,48 +0,0 @@
|
||||
import { Collection } from "../../Contracts/ViewModels";
|
||||
import { ClientDefaults, HttpHeaders } from "../Constants";
|
||||
import { client } from "../CosmosClient";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleProgress, logConsoleInfo } from "../../Utils/NotificationConsoleUtils";
|
||||
import StoredProcedure from "../../Explorer/Tree/StoredProcedure";
|
||||
|
||||
export interface ExecuteSprocResult {
|
||||
result: StoredProcedure;
|
||||
scriptLogs: string;
|
||||
}
|
||||
|
||||
export const executeStoredProcedure = async (
|
||||
collection: Collection,
|
||||
storedProcedure: StoredProcedure,
|
||||
partitionKeyValue: string,
|
||||
params: string[]
|
||||
): Promise<ExecuteSprocResult> => {
|
||||
const clearMessage = logConsoleProgress(`Executing stored procedure ${storedProcedure.id()}`);
|
||||
const timeout = setTimeout(() => {
|
||||
throw Error(`Request timed out while executing stored procedure ${storedProcedure.id()}`);
|
||||
}, ClientDefaults.requestTimeoutMs);
|
||||
|
||||
try {
|
||||
const response = await client()
|
||||
.database(collection.databaseId)
|
||||
.container(collection.id())
|
||||
.scripts.storedProcedure(storedProcedure.id())
|
||||
.execute(partitionKeyValue, params, { enableScriptLogging: true });
|
||||
clearTimeout(timeout);
|
||||
logConsoleInfo(
|
||||
`Finished executing stored procedure ${storedProcedure.id()} for container ${storedProcedure.collection.id()}`
|
||||
);
|
||||
return {
|
||||
result: response.resource,
|
||||
scriptLogs: response.headers[HttpHeaders.scriptLogResults] as string
|
||||
};
|
||||
} catch (error) {
|
||||
handleError(
|
||||
error,
|
||||
"ExecuteStoredProcedure",
|
||||
`Failed to execute stored procedure ${storedProcedure.id()} for container ${storedProcedure.collection.id()}`
|
||||
);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
}
|
||||
};
|
||||
@@ -1,92 +0,0 @@
|
||||
import { AuthType } from "../../AuthType";
|
||||
import { armRequest } from "../../Utils/arm/request";
|
||||
import { configContext } from "../../ConfigContext";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { userContext } from "../../UserContext";
|
||||
|
||||
interface TimeSeriesData {
|
||||
data: {
|
||||
timeStamp: string;
|
||||
total: number;
|
||||
}[];
|
||||
metadatavalues: {
|
||||
name: {
|
||||
localizedValue: string;
|
||||
value: string;
|
||||
};
|
||||
value: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface MetricsData {
|
||||
displayDescription: string;
|
||||
errorCode: string;
|
||||
id: string;
|
||||
name: {
|
||||
value: string;
|
||||
localizedValue: string;
|
||||
};
|
||||
timeseries: TimeSeriesData[];
|
||||
type: string;
|
||||
unit: string;
|
||||
}
|
||||
|
||||
interface MetricsResponse {
|
||||
cost: number;
|
||||
interval: string;
|
||||
namespace: string;
|
||||
resourceregion: string;
|
||||
timespan: string;
|
||||
value: MetricsData[];
|
||||
}
|
||||
|
||||
export const getCollectionUsageSizeInKB = async (databaseName: string, containerName: string): Promise<number> => {
|
||||
if (window.authType !== AuthType.AAD) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const subscriptionId = userContext.subscriptionId;
|
||||
const resourceGroup = userContext.resourceGroup;
|
||||
const accountName = userContext.databaseAccount.name;
|
||||
const filter = `DatabaseName eq '${databaseName}' and CollectionName eq '${containerName}'`;
|
||||
const metricNames = "DataUsage,IndexUsage";
|
||||
const path = `/subscriptions/${subscriptionId}/resourceGroups/${resourceGroup}/providers/Microsoft.DocumentDB/databaseAccounts/${accountName}/providers/microsoft.insights/metrics`;
|
||||
|
||||
try {
|
||||
const metricsResponse: MetricsResponse = await armRequest({
|
||||
host: configContext.ARM_ENDPOINT,
|
||||
path,
|
||||
method: "GET",
|
||||
apiVersion: "2018-01-01",
|
||||
queryParams: {
|
||||
filter,
|
||||
metricNames
|
||||
}
|
||||
});
|
||||
|
||||
if (metricsResponse?.value?.length !== 2) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const dataUsageData: MetricsData = metricsResponse.value[0];
|
||||
const indexUsagedata: MetricsData = metricsResponse.value[1];
|
||||
const dataUsageSizeInKb: number = getUsageSizeInKb(dataUsageData);
|
||||
const indexUsageSizeInKb: number = getUsageSizeInKb(indexUsagedata);
|
||||
|
||||
return dataUsageSizeInKb + indexUsageSizeInKb;
|
||||
} catch (error) {
|
||||
handleError(error, "getCollectionUsageSize");
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
const getUsageSizeInKb = (metricsData: MetricsData): number => {
|
||||
if (metricsData?.errorCode !== "Success") {
|
||||
throw Error(`Get collection usage size failed: ${metricsData.errorCode}`);
|
||||
}
|
||||
|
||||
const timeSeriesData: TimeSeriesData = metricsData?.timeseries?.[0];
|
||||
const usageSizeInBytes: number = timeSeriesData?.data?.[0]?.total;
|
||||
|
||||
return usageSizeInBytes ? usageSizeInBytes / 1024 : 0;
|
||||
};
|
||||
@@ -1,28 +0,0 @@
|
||||
import { client } from "../CosmosClient";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import * as Constants from "../Constants";
|
||||
import { AuthType } from "../../AuthType";
|
||||
|
||||
export async function getIndexTransformationProgress(databaseId: string, collectionId: string): Promise<number> {
|
||||
if (window.authType !== AuthType.AAD) {
|
||||
return undefined;
|
||||
}
|
||||
let indexTransformationPercentage: number;
|
||||
const clearMessage = logConsoleProgress(`Reading container ${collectionId}`);
|
||||
try {
|
||||
const response = await client()
|
||||
.database(databaseId)
|
||||
.container(collectionId)
|
||||
.read({ populateQuotaInfo: true });
|
||||
|
||||
indexTransformationPercentage = parseInt(
|
||||
response.headers[Constants.HttpHeaders.collectionIndexTransformationProgress] as string
|
||||
);
|
||||
} catch (error) {
|
||||
handleError(error, "ReadMongoDBCollection", `Error while reading container ${collectionId}`);
|
||||
throw error;
|
||||
}
|
||||
clearMessage();
|
||||
return indexTransformationPercentage;
|
||||
}
|
||||
@@ -1,14 +0,0 @@
|
||||
import { ConflictDefinition, FeedOptions, QueryIterator, Resource } from "@azure/cosmos";
|
||||
import { client } from "../CosmosClient";
|
||||
|
||||
export const queryConflicts = (
|
||||
databaseId: string,
|
||||
containerId: string,
|
||||
query: string,
|
||||
options: FeedOptions
|
||||
): QueryIterator<ConflictDefinition & Resource> => {
|
||||
return client()
|
||||
.database(databaseId)
|
||||
.container(containerId)
|
||||
.conflicts.query(query, options);
|
||||
};
|
||||
@@ -1,34 +0,0 @@
|
||||
import { Queries } from "../Constants";
|
||||
import { FeedOptions, ItemDefinition, QueryIterator, Resource } from "@azure/cosmos";
|
||||
import { LocalStorageUtility, StorageKey } from "../../Shared/StorageUtility";
|
||||
import { client } from "../CosmosClient";
|
||||
|
||||
export const queryDocuments = (
|
||||
databaseId: string,
|
||||
containerId: string,
|
||||
query: string,
|
||||
options: FeedOptions
|
||||
): QueryIterator<ItemDefinition & Resource> => {
|
||||
options = getCommonQueryOptions(options);
|
||||
return client()
|
||||
.database(databaseId)
|
||||
.container(containerId)
|
||||
.items.query(query, options);
|
||||
};
|
||||
|
||||
export const getCommonQueryOptions = (options: FeedOptions): FeedOptions => {
|
||||
const storedItemPerPageSetting: number = LocalStorageUtility.getEntryNumber(StorageKey.ActualItemPerPage);
|
||||
options = options || {};
|
||||
options.populateQueryMetrics = true;
|
||||
options.enableScanInQuery = options.enableScanInQuery || true;
|
||||
if (!options.partitionKey) {
|
||||
options.forceQueryPlan = true;
|
||||
}
|
||||
options.maxItemCount =
|
||||
options.maxItemCount ||
|
||||
(storedItemPerPageSetting !== undefined && storedItemPerPageSetting) ||
|
||||
Queries.itemsPerPage;
|
||||
options.maxDegreeOfParallelism = LocalStorageUtility.getEntryNumber(StorageKey.MaxDegreeOfParellism);
|
||||
|
||||
return options;
|
||||
};
|
||||
@@ -1,26 +0,0 @@
|
||||
import { QueryResults } from "../../Contracts/ViewModels";
|
||||
import { logConsoleInfo, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { MinimalQueryIterator, nextPage } from "../IteratorUtilities";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { getEntityName } from "../DocumentUtility";
|
||||
|
||||
export const queryDocumentsPage = async (
|
||||
resourceName: string,
|
||||
documentsIterator: MinimalQueryIterator,
|
||||
firstItemIndex: number
|
||||
): Promise<QueryResults> => {
|
||||
const entityName = getEntityName();
|
||||
const clearMessage = logConsoleProgress(`Querying ${entityName} for container ${resourceName}`);
|
||||
|
||||
try {
|
||||
const result: QueryResults = await nextPage(documentsIterator, firstItemIndex);
|
||||
const itemCount = (result.documents && result.documents.length) || 0;
|
||||
logConsoleInfo(`Successfully fetched ${itemCount} ${entityName} for container ${resourceName}`);
|
||||
return result;
|
||||
} catch (error) {
|
||||
handleError(error, "QueryDocumentsPage", `Failed to query ${entityName} for container ${resourceName}`);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
}
|
||||
};
|
||||
@@ -13,7 +13,7 @@ export async function readCollection(databaseId: string, collectionId: string):
|
||||
.read();
|
||||
collection = response.resource as DataModels.Collection;
|
||||
} catch (error) {
|
||||
handleError(error, "ReadCollection", `Error while querying container ${collectionId}`);
|
||||
handleError(error, `Error while querying container ${collectionId}`, "ReadCollection");
|
||||
throw error;
|
||||
}
|
||||
clearMessage();
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
import * as DataModels from "../../Contracts/DataModels";
|
||||
import { AuthType } from "../../AuthType";
|
||||
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
||||
import { Offer, ReadCollectionOfferParams } from "../../Contracts/DataModels";
|
||||
import { HttpHeaders } from "../Constants";
|
||||
import { RequestOptions } from "@azure/cosmos/dist-esm";
|
||||
import { client } from "../CosmosClient";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { getSqlContainerThroughput } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||
import { getMongoDBCollectionThroughput } from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
|
||||
@@ -8,116 +11,113 @@ import { getCassandraTableThroughput } from "../../Utils/arm/generatedClients/20
|
||||
import { getGremlinGraphThroughput } from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
|
||||
import { getTableThroughput } from "../../Utils/arm/generatedClients/2020-04-01/tableResources";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { readOfferWithSDK } from "./readOfferWithSDK";
|
||||
import { readOffers } from "./readOffers";
|
||||
import { userContext } from "../../UserContext";
|
||||
|
||||
export const readCollectionOffer = async (params: ReadCollectionOfferParams): Promise<Offer> => {
|
||||
export const readCollectionOffer = async (
|
||||
params: DataModels.ReadCollectionOfferParams
|
||||
): Promise<DataModels.OfferWithHeaders> => {
|
||||
const clearMessage = logConsoleProgress(`Querying offer for collection ${params.collectionId}`);
|
||||
let offerId = params.offerId;
|
||||
if (!offerId) {
|
||||
if (window.authType === AuthType.AAD && !userContext.useSDKOperations) {
|
||||
try {
|
||||
offerId = await getCollectionOfferIdWithARM(params.databaseId, params.collectionId);
|
||||
} catch (error) {
|
||||
clearMessage();
|
||||
if (error.code !== "NotFound") {
|
||||
throw error;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
} else {
|
||||
offerId = await getCollectionOfferIdWithSDK(params.collectionResourceId);
|
||||
if (!offerId) {
|
||||
clearMessage();
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const options: RequestOptions = {
|
||||
initialHeaders: {
|
||||
[HttpHeaders.populateCollectionThroughputInfo]: true
|
||||
}
|
||||
};
|
||||
|
||||
try {
|
||||
if (
|
||||
window.authType === AuthType.AAD &&
|
||||
!userContext.useSDKOperations &&
|
||||
userContext.defaultExperience !== DefaultAccountExperienceType.Table
|
||||
) {
|
||||
return await readCollectionOfferWithARM(params.databaseId, params.collectionId);
|
||||
}
|
||||
|
||||
return await readOfferWithSDK(params.offerId, params.collectionResourceId);
|
||||
const response = await client()
|
||||
.offer(offerId)
|
||||
.read(options);
|
||||
return (
|
||||
response && {
|
||||
...response.resource,
|
||||
headers: response.headers
|
||||
}
|
||||
);
|
||||
} catch (error) {
|
||||
handleError(error, "ReadCollectionOffer", `Error while querying offer for collection ${params.collectionId}`);
|
||||
handleError(error, `Error while querying offer for collection ${params.collectionId}`, "ReadCollectionOffer");
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
}
|
||||
};
|
||||
|
||||
const readCollectionOfferWithARM = async (databaseId: string, collectionId: string): Promise<Offer> => {
|
||||
const getCollectionOfferIdWithARM = async (databaseId: string, collectionId: string): Promise<string> => {
|
||||
let rpResponse;
|
||||
const subscriptionId = userContext.subscriptionId;
|
||||
const resourceGroup = userContext.resourceGroup;
|
||||
const accountName = userContext.databaseAccount.name;
|
||||
const defaultExperience = userContext.defaultExperience;
|
||||
|
||||
let rpResponse;
|
||||
try {
|
||||
switch (defaultExperience) {
|
||||
case DefaultAccountExperienceType.DocumentDB:
|
||||
rpResponse = await getSqlContainerThroughput(
|
||||
subscriptionId,
|
||||
resourceGroup,
|
||||
accountName,
|
||||
databaseId,
|
||||
collectionId
|
||||
);
|
||||
break;
|
||||
case DefaultAccountExperienceType.MongoDB:
|
||||
rpResponse = await getMongoDBCollectionThroughput(
|
||||
subscriptionId,
|
||||
resourceGroup,
|
||||
accountName,
|
||||
databaseId,
|
||||
collectionId
|
||||
);
|
||||
break;
|
||||
case DefaultAccountExperienceType.Cassandra:
|
||||
rpResponse = await getCassandraTableThroughput(
|
||||
subscriptionId,
|
||||
resourceGroup,
|
||||
accountName,
|
||||
databaseId,
|
||||
collectionId
|
||||
);
|
||||
break;
|
||||
case DefaultAccountExperienceType.Graph:
|
||||
rpResponse = await getGremlinGraphThroughput(
|
||||
subscriptionId,
|
||||
resourceGroup,
|
||||
accountName,
|
||||
databaseId,
|
||||
collectionId
|
||||
);
|
||||
break;
|
||||
case DefaultAccountExperienceType.Table:
|
||||
rpResponse = await getTableThroughput(subscriptionId, resourceGroup, accountName, collectionId);
|
||||
break;
|
||||
default:
|
||||
throw new Error(`Unsupported default experience type: ${defaultExperience}`);
|
||||
}
|
||||
} catch (error) {
|
||||
if (error.code !== "NotFound") {
|
||||
throw error;
|
||||
}
|
||||
|
||||
return undefined;
|
||||
switch (defaultExperience) {
|
||||
case DefaultAccountExperienceType.DocumentDB:
|
||||
rpResponse = await getSqlContainerThroughput(
|
||||
subscriptionId,
|
||||
resourceGroup,
|
||||
accountName,
|
||||
databaseId,
|
||||
collectionId
|
||||
);
|
||||
break;
|
||||
case DefaultAccountExperienceType.MongoDB:
|
||||
rpResponse = await getMongoDBCollectionThroughput(
|
||||
subscriptionId,
|
||||
resourceGroup,
|
||||
accountName,
|
||||
databaseId,
|
||||
collectionId
|
||||
);
|
||||
break;
|
||||
case DefaultAccountExperienceType.Cassandra:
|
||||
rpResponse = await getCassandraTableThroughput(
|
||||
subscriptionId,
|
||||
resourceGroup,
|
||||
accountName,
|
||||
databaseId,
|
||||
collectionId
|
||||
);
|
||||
break;
|
||||
case DefaultAccountExperienceType.Graph:
|
||||
rpResponse = await getGremlinGraphThroughput(
|
||||
subscriptionId,
|
||||
resourceGroup,
|
||||
accountName,
|
||||
databaseId,
|
||||
collectionId
|
||||
);
|
||||
break;
|
||||
case DefaultAccountExperienceType.Table:
|
||||
rpResponse = await getTableThroughput(subscriptionId, resourceGroup, accountName, collectionId);
|
||||
break;
|
||||
default:
|
||||
throw new Error(`Unsupported default experience type: ${defaultExperience}`);
|
||||
}
|
||||
|
||||
const resource = rpResponse?.properties?.resource;
|
||||
if (resource) {
|
||||
const offerId: string = rpResponse.name;
|
||||
const minimumThroughput: number =
|
||||
typeof resource.minimumThroughput === "string"
|
||||
? parseInt(resource.minimumThroughput)
|
||||
: resource.minimumThroughput;
|
||||
const autoscaleSettings = resource.autoscaleSettings;
|
||||
|
||||
if (autoscaleSettings) {
|
||||
return {
|
||||
id: offerId,
|
||||
autoscaleMaxThroughput: autoscaleSettings.maxThroughput,
|
||||
manualThroughput: undefined,
|
||||
minimumThroughput,
|
||||
offerReplacePending: resource.offerReplacePending === "true"
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
id: offerId,
|
||||
autoscaleMaxThroughput: undefined,
|
||||
manualThroughput: resource.throughput,
|
||||
minimumThroughput,
|
||||
offerReplacePending: resource.offerReplacePending === "true"
|
||||
};
|
||||
}
|
||||
|
||||
return undefined;
|
||||
return rpResponse?.name;
|
||||
};
|
||||
|
||||
const getCollectionOfferIdWithSDK = async (collectionResourceId: string): Promise<string> => {
|
||||
const offers = await readOffers();
|
||||
const offer = offers.find(offer => offer.resource === collectionResourceId);
|
||||
return offer?.id;
|
||||
};
|
||||
|
||||
45
src/Common/dataAccess/readCollectionQuotaInfo.ts
Normal file
45
src/Common/dataAccess/readCollectionQuotaInfo.ts
Normal file
@@ -0,0 +1,45 @@
|
||||
import * as DataModels from "../../Contracts/DataModels";
|
||||
import * as HeadersUtility from "../HeadersUtility";
|
||||
import * as ViewModels from "../../Contracts/ViewModels";
|
||||
import { ContainerDefinition, Resource } from "@azure/cosmos";
|
||||
import { HttpHeaders } from "../Constants";
|
||||
import { RequestOptions } from "@azure/cosmos/dist-esm";
|
||||
import { client } from "../CosmosClient";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
|
||||
interface ResourceWithStatistics {
|
||||
statistics: DataModels.Statistic[];
|
||||
}
|
||||
|
||||
export const readCollectionQuotaInfo = async (
|
||||
collection: ViewModels.Collection
|
||||
): Promise<DataModels.CollectionQuotaInfo> => {
|
||||
const clearMessage = logConsoleProgress(`Querying containers for database ${collection.id}`);
|
||||
const options: RequestOptions = {};
|
||||
options.populateQuotaInfo = true;
|
||||
options.initialHeaders = options.initialHeaders || {};
|
||||
options.initialHeaders[HttpHeaders.populatePartitionStatistics] = true;
|
||||
|
||||
try {
|
||||
const response = await client()
|
||||
.database(collection.databaseId)
|
||||
.container(collection.id())
|
||||
.read(options);
|
||||
const quota: DataModels.CollectionQuotaInfo = HeadersUtility.getQuota(response.headers);
|
||||
const resource = response.resource as ContainerDefinition & Resource & ResourceWithStatistics;
|
||||
quota["usageSizeInKB"] = resource.statistics.reduce(
|
||||
(previousValue: number, currentValue: DataModels.Statistic) => previousValue + currentValue.sizeInKB,
|
||||
0
|
||||
);
|
||||
quota["numPartitions"] = resource.statistics.length;
|
||||
quota["uniqueKeyPolicy"] = collection.uniqueKeyPolicy; // TODO: Remove after refactoring (#119617)
|
||||
|
||||
return quota;
|
||||
} catch (error) {
|
||||
handleError(error, `Error while querying quota info for container ${collection.id}`, "ReadCollectionQuotaInfo");
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
}
|
||||
};
|
||||
@@ -29,7 +29,7 @@ export async function readCollections(databaseId: string): Promise<DataModels.Co
|
||||
.fetchAll();
|
||||
return sdkResponse.resources as DataModels.Collection[];
|
||||
} catch (error) {
|
||||
handleError(error, "ReadCollections", `Error while querying containers for database ${databaseId}`);
|
||||
handleError(error, `Error while querying containers for database ${databaseId}`, "ReadCollections");
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
|
||||
@@ -1,43 +1,66 @@
|
||||
import * as DataModels from "../../Contracts/DataModels";
|
||||
import { AuthType } from "../../AuthType";
|
||||
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
||||
import { Offer, ReadDatabaseOfferParams } from "../../Contracts/DataModels";
|
||||
import { HttpHeaders } from "../Constants";
|
||||
import { RequestOptions } from "@azure/cosmos/dist-esm";
|
||||
import { client } from "../CosmosClient";
|
||||
import { getSqlDatabaseThroughput } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||
import { getMongoDBDatabaseThroughput } from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
|
||||
import { getCassandraKeyspaceThroughput } from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources";
|
||||
import { getGremlinDatabaseThroughput } from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { readOfferWithSDK } from "./readOfferWithSDK";
|
||||
import { readOffers } from "./readOffers";
|
||||
import { userContext } from "../../UserContext";
|
||||
|
||||
export const readDatabaseOffer = async (params: ReadDatabaseOfferParams): Promise<Offer> => {
|
||||
export const readDatabaseOffer = async (
|
||||
params: DataModels.ReadDatabaseOfferParams
|
||||
): Promise<DataModels.OfferWithHeaders> => {
|
||||
const clearMessage = logConsoleProgress(`Querying offer for database ${params.databaseId}`);
|
||||
let offerId = params.offerId;
|
||||
if (!offerId) {
|
||||
offerId = await (window.authType === AuthType.AAD &&
|
||||
!userContext.useSDKOperations &&
|
||||
userContext.defaultExperience !== DefaultAccountExperienceType.Table
|
||||
? getDatabaseOfferIdWithARM(params.databaseId)
|
||||
: getDatabaseOfferIdWithSDK(params.databaseResourceId));
|
||||
if (!offerId) {
|
||||
clearMessage();
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
const options: RequestOptions = {
|
||||
initialHeaders: {
|
||||
[HttpHeaders.populateCollectionThroughputInfo]: true
|
||||
}
|
||||
};
|
||||
|
||||
try {
|
||||
if (
|
||||
window.authType === AuthType.AAD &&
|
||||
!userContext.useSDKOperations &&
|
||||
userContext.defaultExperience !== DefaultAccountExperienceType.Table
|
||||
) {
|
||||
return await readDatabaseOfferWithARM(params.databaseId);
|
||||
}
|
||||
|
||||
return await readOfferWithSDK(params.offerId, params.databaseResourceId);
|
||||
const response = await client()
|
||||
.offer(offerId)
|
||||
.read(options);
|
||||
return (
|
||||
response && {
|
||||
...response.resource,
|
||||
headers: response.headers
|
||||
}
|
||||
);
|
||||
} catch (error) {
|
||||
handleError(error, "ReadDatabaseOffer", `Error while querying offer for database ${params.databaseId}`);
|
||||
handleError(error, `Error while querying offer for database ${params.databaseId}`, "ReadDatabaseOffer");
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
}
|
||||
};
|
||||
|
||||
const readDatabaseOfferWithARM = async (databaseId: string): Promise<Offer> => {
|
||||
const getDatabaseOfferIdWithARM = async (databaseId: string): Promise<string> => {
|
||||
let rpResponse;
|
||||
const subscriptionId = userContext.subscriptionId;
|
||||
const resourceGroup = userContext.resourceGroup;
|
||||
const accountName = userContext.databaseAccount.name;
|
||||
const defaultExperience = userContext.defaultExperience;
|
||||
|
||||
let rpResponse;
|
||||
try {
|
||||
switch (defaultExperience) {
|
||||
case DefaultAccountExperienceType.DocumentDB:
|
||||
@@ -55,41 +78,18 @@ const readDatabaseOfferWithARM = async (databaseId: string): Promise<Offer> => {
|
||||
default:
|
||||
throw new Error(`Unsupported default experience type: ${defaultExperience}`);
|
||||
}
|
||||
|
||||
return rpResponse?.name;
|
||||
} catch (error) {
|
||||
if (error.code !== "NotFound") {
|
||||
throw error;
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const resource = rpResponse?.properties?.resource;
|
||||
if (resource) {
|
||||
const offerId: string = rpResponse.name;
|
||||
const minimumThroughput: number =
|
||||
typeof resource.minimumThroughput === "string"
|
||||
? parseInt(resource.minimumThroughput)
|
||||
: resource.minimumThroughput;
|
||||
const autoscaleSettings = resource.autoscaleSettings;
|
||||
|
||||
if (autoscaleSettings) {
|
||||
return {
|
||||
id: offerId,
|
||||
autoscaleMaxThroughput: autoscaleSettings.maxThroughput,
|
||||
manualThroughput: undefined,
|
||||
minimumThroughput,
|
||||
offerReplacePending: resource.offerReplacePending === "true"
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
id: offerId,
|
||||
autoscaleMaxThroughput: undefined,
|
||||
manualThroughput: resource.throughput,
|
||||
minimumThroughput,
|
||||
offerReplacePending: resource.offerReplacePending === "true"
|
||||
};
|
||||
}
|
||||
|
||||
return undefined;
|
||||
};
|
||||
|
||||
const getDatabaseOfferIdWithSDK = async (databaseResourceId: string): Promise<string> => {
|
||||
const offers = await readOffers();
|
||||
const offer = offers.find(offer => offer.resource === databaseResourceId);
|
||||
return offer?.id;
|
||||
};
|
||||
|
||||
@@ -27,7 +27,7 @@ export async function readDatabases(): Promise<DataModels.Database[]> {
|
||||
databases = sdkResponse.resources as DataModels.Database[];
|
||||
}
|
||||
} catch (error) {
|
||||
handleError(error, "ReadDatabases", `Error while querying databases`);
|
||||
handleError(error, `Error while querying databases`, "ReadDatabases");
|
||||
throw error;
|
||||
}
|
||||
clearMessage();
|
||||
|
||||
@@ -1,27 +0,0 @@
|
||||
import { Item } from "@azure/cosmos";
|
||||
import { CollectionBase } from "../../Contracts/ViewModels";
|
||||
import { client } from "../CosmosClient";
|
||||
import { getEntityName } from "../DocumentUtility";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import DocumentId from "../../Explorer/Tree/DocumentId";
|
||||
|
||||
export const readDocument = async (collection: CollectionBase, documentId: DocumentId): Promise<Item> => {
|
||||
const entityName = getEntityName();
|
||||
const clearMessage = logConsoleProgress(`Reading ${entityName} ${documentId.id()}`);
|
||||
|
||||
try {
|
||||
const response = await client()
|
||||
.database(collection.databaseId)
|
||||
.container(collection.id())
|
||||
.item(documentId.id(), documentId.partitionKeyValue)
|
||||
.read();
|
||||
|
||||
return response?.resource;
|
||||
} catch (error) {
|
||||
handleError(error, "ReadDocument", `Failed to read ${entityName} ${documentId.id()}`);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
}
|
||||
};
|
||||
@@ -2,6 +2,8 @@ import { userContext } from "../../UserContext";
|
||||
import { getMongoDBCollection } from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
|
||||
import { MongoDBCollectionResource } from "../../Utils/arm/generatedClients/2020-04-01/types";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import * as Constants from "../Constants";
|
||||
import { client } from "../CosmosClient";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { AuthType } from "../../AuthType";
|
||||
|
||||
@@ -22,9 +24,35 @@ export async function readMongoDBCollectionThroughRP(
|
||||
const response = await getMongoDBCollection(subscriptionId, resourceGroup, accountName, databaseId, collectionId);
|
||||
collection = response.properties.resource;
|
||||
} catch (error) {
|
||||
handleError(error, "ReadMongoDBCollection", `Error while reading container ${collectionId}`);
|
||||
handleError(error, `Error while reading container ${collectionId}`, "ReadMongoDBCollection");
|
||||
throw error;
|
||||
}
|
||||
clearMessage();
|
||||
return collection;
|
||||
}
|
||||
|
||||
export async function getMongoDBCollectionIndexTransformationProgress(
|
||||
databaseId: string,
|
||||
collectionId: string
|
||||
): Promise<number> {
|
||||
if (window.authType !== AuthType.AAD) {
|
||||
return undefined;
|
||||
}
|
||||
let indexTransformationPercentage: number;
|
||||
const clearMessage = logConsoleProgress(`Reading container ${collectionId}`);
|
||||
try {
|
||||
const response = await client()
|
||||
.database(databaseId)
|
||||
.container(collectionId)
|
||||
.read({ populateQuotaInfo: true });
|
||||
|
||||
indexTransformationPercentage = parseInt(
|
||||
response.headers[Constants.HttpHeaders.collectionIndexTransformationProgress] as string
|
||||
);
|
||||
} catch (error) {
|
||||
handleError(error, `Error while reading container ${collectionId}`, "ReadMongoDBCollection");
|
||||
throw error;
|
||||
}
|
||||
clearMessage();
|
||||
return indexTransformationPercentage;
|
||||
}
|
||||
|
||||
@@ -1,29 +0,0 @@
|
||||
import { HttpHeaders } from "../Constants";
|
||||
import { Offer } from "../../Contracts/DataModels";
|
||||
import { RequestOptions } from "@azure/cosmos/dist-esm";
|
||||
import { client } from "../CosmosClient";
|
||||
import { parseSDKOfferResponse } from "../OfferUtility";
|
||||
import { readOffers } from "./readOffers";
|
||||
|
||||
export const readOfferWithSDK = async (offerId: string, resourceId: string): Promise<Offer> => {
|
||||
if (!offerId) {
|
||||
const offers = await readOffers();
|
||||
const offer = offers.find(offer => offer.resource === resourceId);
|
||||
|
||||
if (!offer) {
|
||||
return undefined;
|
||||
}
|
||||
offerId = offer.id;
|
||||
}
|
||||
|
||||
const options: RequestOptions = {
|
||||
initialHeaders: {
|
||||
[HttpHeaders.populateCollectionThroughputInfo]: true
|
||||
}
|
||||
};
|
||||
const response = await client()
|
||||
.offer(offerId)
|
||||
.read(options);
|
||||
|
||||
return parseSDKOfferResponse(response);
|
||||
};
|
||||
@@ -1,9 +1,9 @@
|
||||
import { SDKOfferDefinition } from "../../Contracts/DataModels";
|
||||
import { Offer } from "../../Contracts/DataModels";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { client } from "../CosmosClient";
|
||||
import { handleError, getErrorMessage } from "../ErrorHandlingUtils";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
|
||||
export const readOffers = async (): Promise<SDKOfferDefinition[]> => {
|
||||
export const readOffers = async (): Promise<Offer[]> => {
|
||||
const clearMessage = logConsoleProgress(`Querying offers`);
|
||||
|
||||
try {
|
||||
@@ -13,11 +13,11 @@ export const readOffers = async (): Promise<SDKOfferDefinition[]> => {
|
||||
return response?.resources;
|
||||
} catch (error) {
|
||||
// This should be removed when we can correctly identify if an account is serverless when connected using connection string too.
|
||||
if (getErrorMessage(error)?.includes("Reading or replacing offers is not supported for serverless accounts")) {
|
||||
if (error.message.includes("Reading or replacing offers is not supported for serverless accounts")) {
|
||||
return [];
|
||||
}
|
||||
|
||||
handleError(error, "ReadOffers", `Error while querying offers`);
|
||||
handleError(error, `Error while querying offers`, "ReadOffers");
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
|
||||
@@ -35,7 +35,7 @@ export async function readStoredProcedures(
|
||||
.fetchAll();
|
||||
return response?.resources;
|
||||
} catch (error) {
|
||||
handleError(error, "ReadStoredProcedures", `Failed to query stored procedures for container ${collectionId}`);
|
||||
handleError(error, `Failed to query stored procedures for container ${collectionId}`, "ReadStoredProcedures");
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
|
||||
@@ -35,7 +35,7 @@ export async function readTriggers(
|
||||
.fetchAll();
|
||||
return response?.resources;
|
||||
} catch (error) {
|
||||
handleError(error, "ReadTriggers", `Failed to query triggers for container ${collectionId}`);
|
||||
handleError(error, `Failed to query triggers for container ${collectionId}`, "ReadTriggers");
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
|
||||
@@ -37,8 +37,8 @@ export async function readUserDefinedFunctions(
|
||||
} catch (error) {
|
||||
handleError(
|
||||
error,
|
||||
"ReadUserDefinedFunctions",
|
||||
`Failed to query user defined functions for container ${collectionId}`
|
||||
`Failed to query user defined functions for container ${collectionId}`,
|
||||
"ReadUserDefinedFunctions"
|
||||
);
|
||||
throw error;
|
||||
} finally {
|
||||
|
||||
20
src/Common/dataAccess/sendNotificationForError.ts
Normal file
20
src/Common/dataAccess/sendNotificationForError.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
import * as Constants from "../Constants";
|
||||
import { sendMessage } from "../MessageHandler";
|
||||
import { MessageTypes } from "../../Contracts/ExplorerContracts";
|
||||
|
||||
export interface CosmosError {
|
||||
code: number;
|
||||
message?: string;
|
||||
}
|
||||
|
||||
export function sendNotificationForError(error: CosmosError): void {
|
||||
if (error && error.code === Constants.HttpStatusCodes.Forbidden) {
|
||||
if (error.message && error.message.toLowerCase().indexOf("sharedoffer is disabled for your account") > 0) {
|
||||
return;
|
||||
}
|
||||
sendMessage({
|
||||
type: MessageTypes.ForbiddenError,
|
||||
reason: error && error.message ? error.message : error
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -59,7 +59,7 @@ export async function updateCollection(
|
||||
logConsoleInfo(`Successfully updated container ${collectionId}`);
|
||||
return collection;
|
||||
} catch (error) {
|
||||
handleError(error, "UpdateCollection", `Failed to update container ${collectionId}`);
|
||||
handleError(error, `Failed to update container ${collectionId}`, "UpdateCollection");
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
|
||||
@@ -1,32 +0,0 @@
|
||||
import { CollectionBase } from "../../Contracts/ViewModels";
|
||||
import { Item } from "@azure/cosmos";
|
||||
import { client } from "../CosmosClient";
|
||||
import { getEntityName } from "../DocumentUtility";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleInfo, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import DocumentId from "../../Explorer/Tree/DocumentId";
|
||||
|
||||
export const updateDocument = async (
|
||||
collection: CollectionBase,
|
||||
documentId: DocumentId,
|
||||
newDocument: Item
|
||||
): Promise<Item> => {
|
||||
const entityName = getEntityName();
|
||||
const clearMessage = logConsoleProgress(`Updating ${entityName} ${documentId.id()}`);
|
||||
|
||||
try {
|
||||
const response = await client()
|
||||
.database(collection.databaseId)
|
||||
.container(collection.id())
|
||||
.item(documentId.id(), documentId.partitionKeyValue)
|
||||
.replace(newDocument);
|
||||
|
||||
logConsoleInfo(`Successfully updated ${entityName} ${documentId.id()}`);
|
||||
return response?.resource;
|
||||
} catch (error) {
|
||||
handleError(error, "UpdateDocument", `Failed to update ${entityName} ${documentId.id()}`);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
}
|
||||
};
|
||||
@@ -1,14 +1,13 @@
|
||||
import { AuthType } from "../../AuthType";
|
||||
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
||||
import { HttpHeaders } from "../Constants";
|
||||
import { Offer, SDKOfferDefinition, UpdateOfferParams } from "../../Contracts/DataModels";
|
||||
import { Offer, UpdateOfferParams } from "../../Contracts/DataModels";
|
||||
import { OfferDefinition } from "@azure/cosmos";
|
||||
import { RequestOptions } from "@azure/cosmos/dist-esm";
|
||||
import { ThroughputSettingsUpdateParameters } from "../../Utils/arm/generatedClients/2020-04-01/types";
|
||||
import { client } from "../CosmosClient";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleInfo, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { parseSDKOfferResponse } from "../OfferUtility";
|
||||
import { readCollectionOffer } from "./readCollectionOffer";
|
||||
import { readDatabaseOffer } from "./readDatabaseOffer";
|
||||
import {
|
||||
@@ -73,7 +72,7 @@ export const updateOffer = async (params: UpdateOfferParams): Promise<Offer> =>
|
||||
logConsoleInfo(`Successfully updated offer for ${offerResourceText}`);
|
||||
return updatedOffer;
|
||||
} catch (error) {
|
||||
handleError(error, "UpdateCollection", `Error updating offer for ${offerResourceText}`);
|
||||
handleError(error, `Error updating offer for ${offerResourceText}`, "UpdateCollection");
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
@@ -374,21 +373,20 @@ const createUpdateOfferBody = (params: UpdateOfferParams): ThroughputSettingsUpd
|
||||
};
|
||||
|
||||
const updateOfferWithSDK = async (params: UpdateOfferParams): Promise<Offer> => {
|
||||
const sdkOfferDefinition = params.currentOffer.offerDefinition;
|
||||
const newOffer: SDKOfferDefinition = {
|
||||
const currentOffer = params.currentOffer;
|
||||
const newOffer: Offer = {
|
||||
content: {
|
||||
offerThroughput: undefined,
|
||||
offerIsRUPerMinuteThroughputEnabled: false
|
||||
offerThroughput: undefined
|
||||
},
|
||||
_etag: undefined,
|
||||
_ts: undefined,
|
||||
_rid: sdkOfferDefinition._rid,
|
||||
_self: sdkOfferDefinition._self,
|
||||
id: sdkOfferDefinition.id,
|
||||
offerResourceId: sdkOfferDefinition.offerResourceId,
|
||||
offerVersion: sdkOfferDefinition.offerVersion,
|
||||
offerType: sdkOfferDefinition.offerType,
|
||||
resource: sdkOfferDefinition.resource
|
||||
_rid: currentOffer._rid,
|
||||
_self: currentOffer._self,
|
||||
id: currentOffer.id,
|
||||
offerResourceId: currentOffer.offerResourceId,
|
||||
offerVersion: currentOffer.offerVersion,
|
||||
offerType: currentOffer.offerType,
|
||||
resource: currentOffer.resource
|
||||
};
|
||||
|
||||
if (params.autopilotThroughput) {
|
||||
@@ -416,6 +414,5 @@ const updateOfferWithSDK = async (params: UpdateOfferParams): Promise<Offer> =>
|
||||
.offer(params.currentOffer.id)
|
||||
// TODO Remove casting when SDK types are fixed (https://github.com/Azure/azure-sdk-for-js/issues/10660)
|
||||
.replace((newOffer as unknown) as OfferDefinition, options);
|
||||
|
||||
return parseSDKOfferResponse(sdkResponse);
|
||||
return sdkResponse?.resource;
|
||||
};
|
||||
|
||||
@@ -0,0 +1,24 @@
|
||||
import { updateOfferThroughputBeyondLimit } from "./updateOfferThroughputBeyondLimit";
|
||||
|
||||
describe("updateOfferThroughputBeyondLimit", () => {
|
||||
it("should call fetch", async () => {
|
||||
window.fetch = jest.fn(() => {
|
||||
return {
|
||||
ok: true
|
||||
};
|
||||
});
|
||||
window.dataExplorer = {
|
||||
logConsoleData: jest.fn(),
|
||||
deleteInProgressConsoleDataWithId: jest.fn()
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
} as any;
|
||||
await updateOfferThroughputBeyondLimit({
|
||||
subscriptionId: "foo",
|
||||
resourceGroup: "foo",
|
||||
databaseAccountName: "foo",
|
||||
databaseName: "foo",
|
||||
throughput: 1000000000
|
||||
});
|
||||
expect(window.fetch).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
50
src/Common/dataAccess/updateOfferThroughputBeyondLimit.ts
Normal file
50
src/Common/dataAccess/updateOfferThroughputBeyondLimit.ts
Normal file
@@ -0,0 +1,50 @@
|
||||
import { Platform, configContext } from "../../ConfigContext";
|
||||
import { getAuthorizationHeader } from "../../Utils/AuthorizationUtils";
|
||||
import { AutoPilotOfferSettings } from "../../Contracts/DataModels";
|
||||
import { logConsoleProgress, logConsoleInfo, logConsoleError } from "../../Utils/NotificationConsoleUtils";
|
||||
import { HttpHeaders } from "../Constants";
|
||||
|
||||
interface UpdateOfferThroughputRequest {
|
||||
subscriptionId: string;
|
||||
resourceGroup: string;
|
||||
databaseAccountName: string;
|
||||
databaseName: string;
|
||||
collectionName?: string;
|
||||
throughput: number;
|
||||
offerAutopilotSettings?: AutoPilotOfferSettings;
|
||||
}
|
||||
|
||||
export async function updateOfferThroughputBeyondLimit(request: UpdateOfferThroughputRequest): Promise<void> {
|
||||
if (configContext.platform !== Platform.Portal) {
|
||||
throw new Error("Updating throughput beyond specified limit is not supported on this platform");
|
||||
}
|
||||
|
||||
const resourceDescriptionInfo = request.collectionName
|
||||
? `database ${request.databaseName} and container ${request.collectionName}`
|
||||
: `database ${request.databaseName}`;
|
||||
|
||||
const clearMessage = logConsoleProgress(
|
||||
`Requesting increase in throughput to ${request.throughput} for ${resourceDescriptionInfo}`
|
||||
);
|
||||
|
||||
const url = `${configContext.BACKEND_ENDPOINT}/api/offerthroughputrequest/updatebeyondspecifiedlimit`;
|
||||
const authorizationHeader = getAuthorizationHeader();
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: "POST",
|
||||
body: JSON.stringify(request),
|
||||
headers: { [authorizationHeader.header]: authorizationHeader.token, [HttpHeaders.contentType]: "application/json" }
|
||||
});
|
||||
|
||||
if (response.ok) {
|
||||
logConsoleInfo(
|
||||
`Successfully requested an increase in throughput to ${request.throughput} for ${resourceDescriptionInfo}`
|
||||
);
|
||||
clearMessage();
|
||||
return undefined;
|
||||
}
|
||||
const error = await response.json();
|
||||
logConsoleError(`Failed to request an increase in throughput for ${request.throughput}: ${error.message}`);
|
||||
clearMessage();
|
||||
throw new Error(error.message);
|
||||
}
|
||||
@@ -64,7 +64,7 @@ export async function updateStoredProcedure(
|
||||
.replace(storedProcedure);
|
||||
return response?.resource;
|
||||
} catch (error) {
|
||||
handleError(error, "UpdateStoredProcedure", `Error while updating stored procedure ${storedProcedure.id}`);
|
||||
handleError(error, `Error while updating stored procedure ${storedProcedure.id}`, "UpdateStoredProcedure");
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
|
||||
@@ -61,7 +61,7 @@ export async function updateTrigger(
|
||||
.replace(trigger);
|
||||
return response?.resource;
|
||||
} catch (error) {
|
||||
handleError(error, "UpdateTrigger", `Error while updating trigger ${trigger.id}`);
|
||||
handleError(error, `Error while updating trigger ${trigger.id}`, "UpdateTrigger");
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
|
||||
@@ -66,8 +66,8 @@ export async function updateUserDefinedFunction(
|
||||
} catch (error) {
|
||||
handleError(
|
||||
error,
|
||||
"UpdateUserupdateUserDefinedFunction",
|
||||
`Error while updating user defined function ${userDefinedFunction.id}`
|
||||
`Error while updating user defined function ${userDefinedFunction.id}`,
|
||||
"UpdateUserupdateUserDefinedFunction"
|
||||
);
|
||||
throw error;
|
||||
} finally {
|
||||
|
||||
@@ -88,38 +88,6 @@ export interface Resource {
|
||||
id: string;
|
||||
}
|
||||
|
||||
export interface IType {
|
||||
name: string;
|
||||
code: number;
|
||||
}
|
||||
|
||||
export interface IDataField {
|
||||
dataType: IType;
|
||||
hasNulls: boolean;
|
||||
isArray: boolean;
|
||||
schemaType: IType;
|
||||
name: string;
|
||||
path: string;
|
||||
maxRepetitionLevel: number;
|
||||
maxDefinitionLevel: number;
|
||||
}
|
||||
|
||||
export interface ISchema {
|
||||
id: string;
|
||||
accountName: string;
|
||||
resource: string;
|
||||
fields: IDataField[];
|
||||
}
|
||||
|
||||
export interface ISchemaRequest {
|
||||
id: string;
|
||||
subscriptionId: string;
|
||||
resourceGroup: string;
|
||||
accountName: string;
|
||||
resource: string;
|
||||
status: string;
|
||||
}
|
||||
|
||||
export interface Collection extends Resource {
|
||||
defaultTtl?: number;
|
||||
indexingPolicy?: IndexingPolicy;
|
||||
@@ -130,8 +98,6 @@ export interface Collection extends Resource {
|
||||
changeFeedPolicy?: ChangeFeedPolicy;
|
||||
analyticalStorageTtl?: number;
|
||||
geospatialConfig?: GeospatialConfig;
|
||||
schema?: ISchema;
|
||||
requestSchema?: () => void;
|
||||
}
|
||||
|
||||
export interface Database extends Resource {
|
||||
@@ -208,21 +174,11 @@ export interface QueryMetrics {
|
||||
vmExecutionTime: any;
|
||||
}
|
||||
|
||||
export interface Offer {
|
||||
id: string;
|
||||
autoscaleMaxThroughput: number;
|
||||
manualThroughput: number;
|
||||
minimumThroughput: number;
|
||||
offerDefinition?: SDKOfferDefinition;
|
||||
offerReplacePending: boolean;
|
||||
}
|
||||
|
||||
export interface SDKOfferDefinition extends Resource {
|
||||
export interface Offer extends Resource {
|
||||
offerVersion?: string;
|
||||
offerType?: string;
|
||||
content?: {
|
||||
offerThroughput: number;
|
||||
offerIsRUPerMinuteThroughputEnabled?: boolean;
|
||||
collectionThroughputInfo?: OfferThroughputInfo;
|
||||
offerAutopilotSettings?: AutoPilotOfferSettings;
|
||||
};
|
||||
@@ -230,6 +186,22 @@ export interface SDKOfferDefinition extends Resource {
|
||||
offerResourceId?: string;
|
||||
}
|
||||
|
||||
export interface OfferWithHeaders extends Offer {
|
||||
headers: any;
|
||||
}
|
||||
|
||||
export interface CollectionQuotaInfo {
|
||||
storedProcedures: number;
|
||||
triggers: number;
|
||||
functions: number;
|
||||
documentsSize: number;
|
||||
collectionSize: number;
|
||||
documentsCount: number;
|
||||
usageSizeInKB: number;
|
||||
numPartitions: number;
|
||||
uniqueKeyPolicy?: UniqueKeyPolicy; // TODO: This should ideally not be a part of the collection quota. Remove after refactoring. (#119617)
|
||||
}
|
||||
|
||||
export interface OfferThroughputInfo {
|
||||
minimumRUForCollection: number;
|
||||
numPhysicalPartitions: number;
|
||||
@@ -243,6 +215,18 @@ export interface UniqueKey {
|
||||
paths: string[];
|
||||
}
|
||||
|
||||
// Returned by DocumentDb client proxy
|
||||
// Inner errors in BackendErrorDataModel when error is in SQL syntax
|
||||
export interface ErrorDataModel {
|
||||
message: string;
|
||||
severity?: string;
|
||||
location?: {
|
||||
start: string;
|
||||
end: string;
|
||||
};
|
||||
code?: string;
|
||||
}
|
||||
|
||||
export interface CreateDatabaseAndCollectionRequest {
|
||||
databaseId: string;
|
||||
collectionId: string;
|
||||
|
||||
@@ -46,7 +46,7 @@ export interface LogEntry {
|
||||
/**
|
||||
* The message code.
|
||||
*/
|
||||
code?: number | string;
|
||||
code?: number;
|
||||
/**
|
||||
* Any additional data to be logged.
|
||||
*/
|
||||
|
||||
@@ -32,8 +32,7 @@ export enum MessageTypes {
|
||||
GetArcadiaToken,
|
||||
CreateWorkspace,
|
||||
CreateSparkPool,
|
||||
RefreshDatabaseAccount,
|
||||
InitTestExplorer
|
||||
RefreshDatabaseAccount
|
||||
}
|
||||
|
||||
export { Versions, ActionContracts, Diagnostics };
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
export enum SubscriptionType {
|
||||
Benefits,
|
||||
EA,
|
||||
Free,
|
||||
Internal,
|
||||
PAYG
|
||||
}
|
||||
@@ -17,7 +17,6 @@ import Trigger from "../Explorer/Tree/Trigger";
|
||||
import UserDefinedFunction from "../Explorer/Tree/UserDefinedFunction";
|
||||
import { UploadDetails } from "../workers/upload/definitions";
|
||||
import * as DataModels from "./DataModels";
|
||||
import { SubscriptionType } from "./SubscriptionType";
|
||||
|
||||
export interface TokenProvider {
|
||||
getAuthHeader(): Promise<Headers>;
|
||||
@@ -116,11 +115,9 @@ export interface CollectionBase extends TreeNode {
|
||||
export interface Collection extends CollectionBase {
|
||||
defaultTtl: ko.Observable<number>;
|
||||
analyticalStorageTtl: ko.Observable<number>;
|
||||
schema?: DataModels.ISchema;
|
||||
requestSchema?: () => void;
|
||||
indexingPolicy: ko.Observable<DataModels.IndexingPolicy>;
|
||||
uniqueKeyPolicy: DataModels.UniqueKeyPolicy;
|
||||
usageSizeInKB: ko.Observable<number>;
|
||||
quotaInfo: ko.Observable<DataModels.CollectionQuotaInfo>;
|
||||
offer: ko.Observable<DataModels.Offer>;
|
||||
conflictResolutionPolicy: ko.Observable<DataModels.ConflictResolutionPolicy>;
|
||||
changeFeedPolicy: ko.Observable<DataModels.ChangeFeedPolicy>;
|
||||
@@ -361,8 +358,7 @@ export enum CollectionTabKind {
|
||||
SparkMasterTab = 16,
|
||||
Gallery = 17,
|
||||
NotebookViewer = 18,
|
||||
Schema = 19,
|
||||
SettingsV2 = 20
|
||||
SettingsV2 = 19
|
||||
}
|
||||
|
||||
export enum TerminalKind {
|
||||
@@ -416,6 +412,14 @@ export interface ThroughputDefaults {
|
||||
shared: number;
|
||||
}
|
||||
|
||||
export enum SubscriptionType {
|
||||
Benefits,
|
||||
EA,
|
||||
Free,
|
||||
Internal,
|
||||
PAYG
|
||||
}
|
||||
|
||||
export class MonacoEditorSettings {
|
||||
public readonly language: string;
|
||||
public readonly readOnly: boolean;
|
||||
|
||||
383
src/Definitions/adal.d.ts
vendored
Normal file
383
src/Definitions/adal.d.ts
vendored
Normal file
@@ -0,0 +1,383 @@
|
||||
// Type definitions for adal-angular 1.0.1.1
|
||||
// Project: https://github.com/AzureAD/azure-activedirectory-library-for-js#readme
|
||||
// Definitions by: Daniel Perez Alvarez <https://github.com/unindented>
|
||||
// Anthony Ciccarello <https://github.com/aciccarello>
|
||||
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
|
||||
|
||||
//This is a customized version of adal on top of version 1.0.1 which does not support multi tenant
|
||||
// Customized version add tenantId to stored tokens so when tenant change, adal will refetch instead of read from sessionStorage
|
||||
|
||||
// In module contexts the class constructor function is the exported object
|
||||
// export = AuthenticationContext;
|
||||
|
||||
// This class is defined globally in not in a module context
|
||||
declare class AuthenticationContext {
|
||||
instance: string;
|
||||
config: AuthenticationContext.Options;
|
||||
callback: AuthenticationContext.TokenCallback;
|
||||
popUp: boolean;
|
||||
isAngular: boolean;
|
||||
|
||||
/**
|
||||
* Enum for request type
|
||||
*/
|
||||
REQUEST_TYPE: AuthenticationContext.RequestType;
|
||||
RESPONSE_TYPE: AuthenticationContext.ResponseType;
|
||||
CONSTANTS: AuthenticationContext.Constants;
|
||||
|
||||
constructor(options: AuthenticationContext.Options);
|
||||
/**
|
||||
* Initiates the login process by redirecting the user to Azure AD authorization endpoint.
|
||||
*/
|
||||
login(): void;
|
||||
/**
|
||||
* Returns whether a login is in progress.
|
||||
*/
|
||||
loginInProgress(): boolean;
|
||||
/**
|
||||
* Gets token for the specified resource from the cache.
|
||||
* @param resource A URI that identifies the resource for which the token is requested.
|
||||
* @param tenantId tenant Id.
|
||||
*/
|
||||
getCachedToken(resource: string, tenantId: string): string;
|
||||
/**
|
||||
* If user object exists, returns it. Else creates a new user object by decoding `id_token` from the cache.
|
||||
*/
|
||||
getCachedUser(): AuthenticationContext.UserInfo;
|
||||
/**
|
||||
* Adds the passed callback to the array of callbacks for the specified resource.
|
||||
* @param resource A URI that identifies the resource for which the token is requested.
|
||||
* @param expectedState A unique identifier (guid).
|
||||
* @param callback The callback provided by the caller. It will be called with token or error.
|
||||
*/
|
||||
registerCallback(
|
||||
expectedState: string,
|
||||
resource: string,
|
||||
callback: AuthenticationContext.TokenCallback,
|
||||
tenantId: string
|
||||
): void;
|
||||
/**
|
||||
* Acquires token from the cache if it is not expired. Otherwise sends request to AAD to obtain a new token.
|
||||
* @param resource Resource URI identifying the target resource.
|
||||
* @param callback The callback provided by the caller. It will be called with token or error.
|
||||
*/
|
||||
acquireToken(resource: string, tenantId: string, callback: AuthenticationContext.TokenCallback): void;
|
||||
/**
|
||||
* Acquires token (interactive flow using a popup window) by sending request to AAD to obtain a new token.
|
||||
* @param resource Resource URI identifying the target resource.
|
||||
* @param extraQueryParameters Query parameters to add to the authentication request.
|
||||
* @param claims Claims to add to the authentication request.
|
||||
* @param callback The callback provided by the caller. It will be called with token or error.
|
||||
*/
|
||||
acquireTokenPopup(
|
||||
resource: string,
|
||||
tenantId: string,
|
||||
extraQueryParameters: string | null | undefined,
|
||||
claims: string | null | undefined,
|
||||
callback: AuthenticationContext.TokenCallback
|
||||
): void;
|
||||
/**
|
||||
* Acquires token (interactive flow using a redirect) by sending request to AAD to obtain a new token. In this case the callback passed in the authentication request constructor will be called.
|
||||
* @param resource Resource URI identifying the target resource.
|
||||
* @param extraQueryParameters Query parameters to add to the authentication request.
|
||||
* @param claims Claims to add to the authentication request.
|
||||
*/
|
||||
acquireTokenRedirect(
|
||||
resource: string,
|
||||
tenantId: string,
|
||||
extraQueryParameters?: string | null,
|
||||
claims?: string | null
|
||||
): void;
|
||||
/**
|
||||
* Redirects the browser to Azure AD authorization endpoint.
|
||||
* @param urlNavigate URL of the authorization endpoint.
|
||||
*/
|
||||
promptUser(urlNavigate: string): void;
|
||||
/**
|
||||
* Clears cache items.
|
||||
*/
|
||||
clearCache(): void;
|
||||
/**
|
||||
* Clears cache items for a given resource.
|
||||
* @param resource Resource URI identifying the target resource.
|
||||
*/
|
||||
clearCacheForResource(resource: string): void;
|
||||
/**
|
||||
* Redirects user to logout endpoint. After logout, it will redirect to `postLogoutRedirectUri` if added as a property on the config object.
|
||||
*/
|
||||
logOut(): void;
|
||||
/**
|
||||
* Calls the passed in callback with the user object or error message related to the user.
|
||||
* @param callback The callback provided by the caller. It will be called with user or error.
|
||||
*/
|
||||
getUser(callback: AuthenticationContext.UserCallback): void;
|
||||
/**
|
||||
* Checks if the URL fragment contains access token, id token or error description.
|
||||
* @param hash Hash passed from redirect page.
|
||||
*/
|
||||
isCallback(hash: string): boolean;
|
||||
/**
|
||||
* Gets login error.
|
||||
*/
|
||||
getLoginError(): string;
|
||||
/**
|
||||
* Creates a request info object from the URL fragment and returns it.
|
||||
*/
|
||||
getRequestInfo(hash: string): AuthenticationContext.RequestInfo;
|
||||
/**
|
||||
* Saves token or error received in the response from AAD in the cache. In case of `id_token`, it also creates the user object.
|
||||
*/
|
||||
saveTokenFromHash(requestInfo: AuthenticationContext.RequestInfo): void;
|
||||
/**
|
||||
* Gets resource for given endpoint if mapping is provided with config.
|
||||
* @param endpoint Resource URI identifying the target resource.
|
||||
*/
|
||||
getResourceForEndpoint(resource: string): string;
|
||||
/**
|
||||
* This method must be called for processing the response received from AAD. It extracts the hash, processes the token or error, saves it in the cache and calls the callbacks with the result.
|
||||
* @param hash Hash fragment of URL. Defaults to `window.location.hash`.
|
||||
*/
|
||||
handleWindowCallback(hash?: string): void;
|
||||
|
||||
/**
|
||||
* Checks the logging Level, constructs the log message and logs it. Users need to implement/override this method to turn on logging.
|
||||
* @param level Level can be set 0, 1, 2 and 3 which turns on 'error', 'warning', 'info' or 'verbose' level logging respectively.
|
||||
* @param message Message to log.
|
||||
* @param error Error to log.
|
||||
*/
|
||||
log(level: AuthenticationContext.LoggingLevel, message: string, error: any): void;
|
||||
/**
|
||||
* Logs messages when logging level is set to 0.
|
||||
* @param message Message to log.
|
||||
* @param error Error to log.
|
||||
*/
|
||||
error(message: string, error: any): void;
|
||||
/**
|
||||
* Logs messages when logging level is set to 1.
|
||||
* @param message Message to log.
|
||||
*/
|
||||
warn(message: string): void;
|
||||
/**
|
||||
* Logs messages when logging level is set to 2.
|
||||
* @param message Message to log.
|
||||
*/
|
||||
info(message: string): void;
|
||||
/**
|
||||
* Logs messages when logging level is set to 3.
|
||||
* @param message Message to log.
|
||||
*/
|
||||
verbose(message: string): void;
|
||||
|
||||
/**
|
||||
* Logs Pii messages when Logging Level is set to 0 and window.piiLoggingEnabled is set to true.
|
||||
* @param message Message to log.
|
||||
* @param error Error to log.
|
||||
*/
|
||||
errorPii(message: string, error: any): void;
|
||||
|
||||
/**
|
||||
* Logs Pii messages when Logging Level is set to 1 and window.piiLoggingEnabled is set to true.
|
||||
* @param message Message to log.
|
||||
*/
|
||||
warnPii(message: string): void;
|
||||
|
||||
/**
|
||||
* Logs messages when Logging Level is set to 2 and window.piiLoggingEnabled is set to true.
|
||||
* @param message Message to log.
|
||||
*/
|
||||
infoPii(message: string): void;
|
||||
|
||||
/**
|
||||
* Logs messages when Logging Level is set to 3 and window.piiLoggingEnabled is set to true.
|
||||
* @param message Message to log.
|
||||
*/
|
||||
verbosePii(message: string): void;
|
||||
}
|
||||
|
||||
declare namespace AuthenticationContext {
|
||||
function inject(config: Options): AuthenticationContext;
|
||||
|
||||
type LoggingLevel = 0 | 1 | 2 | 3;
|
||||
|
||||
type RequestType = "LOGIN" | "RENEW_TOKEN" | "UNKNOWN";
|
||||
|
||||
type ResponseType = "id_token token" | "token";
|
||||
|
||||
interface RequestInfo {
|
||||
/**
|
||||
* Object comprising of fields such as id_token/error, session_state, state, e.t.c.
|
||||
*/
|
||||
parameters: any;
|
||||
/**
|
||||
* Request type.
|
||||
*/
|
||||
requestType: RequestType;
|
||||
/**
|
||||
* Whether state is valid.
|
||||
*/
|
||||
stateMatch: boolean;
|
||||
/**
|
||||
* Unique guid used to match the response with the request.
|
||||
*/
|
||||
stateResponse: string;
|
||||
/**
|
||||
* Whether `requestType` contains `id_token`, `access_token` or error.
|
||||
*/
|
||||
valid: boolean;
|
||||
}
|
||||
|
||||
interface UserInfo {
|
||||
/**
|
||||
* Username assigned from UPN or email.
|
||||
*/
|
||||
userName: string;
|
||||
/**
|
||||
* Properties parsed from `id_token`.
|
||||
*/
|
||||
profile: any;
|
||||
}
|
||||
|
||||
type TokenCallback = (errorDesc: string | null, token: string | null, error: any) => void;
|
||||
|
||||
type UserCallback = (errorDesc: string | null, user: UserInfo | null) => void;
|
||||
|
||||
/**
|
||||
* Configuration options for Authentication Context
|
||||
*/
|
||||
interface Options {
|
||||
/**
|
||||
* Client ID assigned to your app by Azure Active Directory.
|
||||
*/
|
||||
clientId: string;
|
||||
/**
|
||||
* Endpoint at which you expect to receive tokens.Defaults to `window.location.href`.
|
||||
*/
|
||||
redirectUri?: string;
|
||||
/**
|
||||
* Azure Active Directory instance. Defaults to `https://login.microsoftonline.com/`.
|
||||
*/
|
||||
instance?: string;
|
||||
/**
|
||||
* Your target tenant. Defaults to `common`.
|
||||
*/
|
||||
tenant?: string;
|
||||
/**
|
||||
* Query parameters to add to the authentication request.
|
||||
*/
|
||||
extraQueryParameter?: string;
|
||||
/**
|
||||
* Unique identifier used to map the request with the response. Defaults to RFC4122 version 4 guid (128 bits).
|
||||
*/
|
||||
correlationId?: string;
|
||||
/**
|
||||
* User defined function of handling the navigation to Azure AD authorization endpoint in case of login.
|
||||
*/
|
||||
displayCall?: (url: string) => void;
|
||||
/**
|
||||
* Set this to true to enable login in a popup winodow instead of a full redirect. Defaults to `false`.
|
||||
*/
|
||||
popUp?: boolean;
|
||||
/**
|
||||
* Set this to the resource to request on login. Defaults to `clientId`.
|
||||
*/
|
||||
loginResource?: string;
|
||||
/**
|
||||
* Set this to redirect the user to a custom login page.
|
||||
*/
|
||||
localLoginUrl?: string;
|
||||
/**
|
||||
* Redirects to start page after login. Defaults to `true`.
|
||||
*/
|
||||
navigateToLoginRequestUrl?: boolean;
|
||||
/**
|
||||
* Set this to redirect the user to a custom logout page.
|
||||
*/
|
||||
logOutUri?: string;
|
||||
/**
|
||||
* Redirects the user to postLogoutRedirectUri after logout. Defaults to `redirectUri`.
|
||||
*/
|
||||
postLogoutRedirectUri?: string;
|
||||
/**
|
||||
* Sets browser storage to either 'localStorage' or sessionStorage'. Defaults to `sessionStorage`.
|
||||
*/
|
||||
cacheLocation?: "localStorage" | "sessionStorage";
|
||||
/**
|
||||
* Array of keywords or URIs. Adal will attach a token to outgoing requests that have these keywords or URIs.
|
||||
*/
|
||||
endpoints?: { [resource: string]: string };
|
||||
/**
|
||||
* Array of keywords or URIs. Adal will not attach a token to outgoing requests that have these keywords or URIs.
|
||||
*/
|
||||
anonymousEndpoints?: string[];
|
||||
/**
|
||||
* If the cached token is about to be expired in the expireOffsetSeconds (in seconds), Adal will renew the token instead of using the cached token. Defaults to 300 seconds.
|
||||
*/
|
||||
expireOffsetSeconds?: number;
|
||||
/**
|
||||
* The number of milliseconds of inactivity before a token renewal response from AAD should be considered timed out. Defaults to 6 seconds.
|
||||
*/
|
||||
loadFrameTimeout?: number;
|
||||
/**
|
||||
* Callback to be invoked when a token is acquired.
|
||||
*/
|
||||
callback?: TokenCallback;
|
||||
}
|
||||
|
||||
interface LoggingConfig {
|
||||
level: LoggingLevel;
|
||||
log: (message: string) => void;
|
||||
piiLoggingEnabled: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Enum for storage constants
|
||||
*/
|
||||
interface Constants {
|
||||
ACCESS_TOKEN: "access_token";
|
||||
EXPIRES_IN: "expires_in";
|
||||
ID_TOKEN: "id_token";
|
||||
ERROR_DESCRIPTION: "error_description";
|
||||
SESSION_STATE: "session_state";
|
||||
STORAGE: {
|
||||
TOKEN_KEYS: "adal.token.keys";
|
||||
ACCESS_TOKEN_KEY: "adal.access.token.key";
|
||||
EXPIRATION_KEY: "adal.expiration.key";
|
||||
STATE_LOGIN: "adal.state.login";
|
||||
STATE_RENEW: "adal.state.renew";
|
||||
NONCE_IDTOKEN: "adal.nonce.idtoken";
|
||||
SESSION_STATE: "adal.session.state";
|
||||
USERNAME: "adal.username";
|
||||
IDTOKEN: "adal.idtoken";
|
||||
ERROR: "adal.error";
|
||||
ERROR_DESCRIPTION: "adal.error.description";
|
||||
LOGIN_REQUEST: "adal.login.request";
|
||||
LOGIN_ERROR: "adal.login.error";
|
||||
RENEW_STATUS: "adal.token.renew.status";
|
||||
};
|
||||
RESOURCE_DELIMETER: "|";
|
||||
LOADFRAME_TIMEOUT: "6000";
|
||||
TOKEN_RENEW_STATUS_CANCELED: "Canceled";
|
||||
TOKEN_RENEW_STATUS_COMPLETED: "Completed";
|
||||
TOKEN_RENEW_STATUS_IN_PROGRESS: "In Progress";
|
||||
LOGGING_LEVEL: {
|
||||
ERROR: 0;
|
||||
WARN: 1;
|
||||
INFO: 2;
|
||||
VERBOSE: 3;
|
||||
};
|
||||
LEVEL_STRING_MAP: {
|
||||
0: "ERROR:";
|
||||
1: "WARNING:";
|
||||
2: "INFO:";
|
||||
3: "VERBOSE:";
|
||||
};
|
||||
POPUP_WIDTH: 483;
|
||||
POPUP_HEIGHT: 600;
|
||||
}
|
||||
}
|
||||
|
||||
// declare global {
|
||||
// interface Window {
|
||||
// Logging: AuthenticationContext.LoggingConfig;
|
||||
// }
|
||||
// }
|
||||
@@ -44,6 +44,10 @@ describe("Component Registerer", () => {
|
||||
expect(ko.components.isRegistered("user-defined-function-tab")).toBe(true);
|
||||
});
|
||||
|
||||
it("should register settings-tab component", () => {
|
||||
expect(ko.components.isRegistered("settings-tab")).toBe(true);
|
||||
});
|
||||
|
||||
it("should register settings-tab-v2 component", () => {
|
||||
expect(ko.components.isRegistered("settings-tab-v2")).toBe(true);
|
||||
});
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user