mirror of
https://github.com/Azure/cosmos-explorer.git
synced 2026-01-06 11:11:23 +00:00
Compare commits
2 Commits
users/srna
...
jbunster/r
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3dd25bdb4a | ||
|
|
a293a87528 |
11
.env.example
11
.env.example
@@ -1,14 +1,7 @@
|
||||
# These options are only needed when if running end to end tests locally
|
||||
PORTAL_RUNNER_USERNAME=
|
||||
PORTAL_RUNNER_PASSWORD=
|
||||
PORTAL_RUNNER_SUBSCRIPTION=
|
||||
PORTAL_RUNNER_RESOURCE_GROUP=
|
||||
PORTAL_RUNNER_DATABASE_ACCOUNT=
|
||||
PORTAL_RUNNER_DATABASE_ACCOUNT_KEY=
|
||||
PORTAL_RUNNER_CONNECTION_STRING=
|
||||
NOTEBOOKS_TEST_RUNNER_TENANT_ID=
|
||||
NOTEBOOKS_TEST_RUNNER_CLIENT_ID=
|
||||
NOTEBOOKS_TEST_RUNNER_CLIENT_SECRET=
|
||||
CASSANDRA_CONNECTION_STRING=
|
||||
MONGO_CONNECTION_STRING=
|
||||
TABLES_CONNECTION_STRING=
|
||||
DATA_EXPLORER_ENDPOINT=https://localhost:1234/hostedExplorer.html
|
||||
PORTAL_RUNNER_CONNECTION_STRING=
|
||||
@@ -15,6 +15,8 @@ src/Common/DeleteFeedback.ts
|
||||
src/Common/DocumentClientUtilityBase.ts
|
||||
src/Common/EditableUtility.ts
|
||||
src/Common/EnvironmentUtility.ts
|
||||
src/Common/ErrorParserUtility.test.ts
|
||||
src/Common/ErrorParserUtility.ts
|
||||
src/Common/HashMap.test.ts
|
||||
src/Common/HashMap.ts
|
||||
src/Common/HeadersUtility.test.ts
|
||||
@@ -202,6 +204,8 @@ src/Explorer/Tabs/QueryTab.test.ts
|
||||
src/Explorer/Tabs/QueryTab.ts
|
||||
src/Explorer/Tabs/QueryTablesTab.ts
|
||||
src/Explorer/Tabs/ScriptTabBase.ts
|
||||
src/Explorer/Tabs/SettingsTab.test.ts
|
||||
src/Explorer/Tabs/SettingsTab.ts
|
||||
src/Explorer/Tabs/SparkMasterTab.ts
|
||||
src/Explorer/Tabs/StoredProcedureTab.ts
|
||||
src/Explorer/Tabs/TabComponents.ts
|
||||
@@ -394,5 +398,19 @@ src/Explorer/Tree/ResourceTreeAdapterForResourceToken.tsx
|
||||
src/GalleryViewer/Cards/GalleryCardComponent.tsx
|
||||
src/GalleryViewer/GalleryViewer.tsx
|
||||
src/GalleryViewer/GalleryViewerComponent.tsx
|
||||
cypress/integration/dataexplorer/CASSANDRA/addCollection.spec.ts
|
||||
cypress/integration/dataexplorer/GRAPH/addCollection.spec.ts
|
||||
cypress/integration/dataexplorer/ci-tests/addCollectionPane.spec.ts
|
||||
cypress/integration/dataexplorer/ci-tests/createDatabase.spec.ts
|
||||
cypress/integration/dataexplorer/ci-tests/deleteCollection.spec.ts
|
||||
cypress/integration/dataexplorer/ci-tests/deleteDatabase.spec.ts
|
||||
cypress/integration/dataexplorer/MONGO/addCollection.spec.ts
|
||||
cypress/integration/dataexplorer/MONGO/addCollectionAutopilot.spec.ts
|
||||
cypress/integration/dataexplorer/MONGO/addCollectionExistingDatabase.spec.ts
|
||||
cypress/integration/dataexplorer/MONGO/provisionDatabaseThroughput.spec.ts
|
||||
cypress/integration/dataexplorer/SQL/addCollection.spec.ts
|
||||
cypress/integration/dataexplorer/TABLE/addCollection.spec.ts
|
||||
cypress/integration/notebook/newNotebook.spec.ts
|
||||
cypress/integration/notebook/resourceTree.spec.ts
|
||||
__mocks__/monaco-editor.ts
|
||||
src/Explorer/Tree/ResourceTreeAdapterForResourceToken.test.tsx
|
||||
@@ -42,13 +42,6 @@ module.exports = {
|
||||
"no-null/no-null": "error",
|
||||
"@typescript-eslint/no-explicit-any": "error",
|
||||
"prefer-arrow/prefer-arrow-functions": ["error", { allowStandaloneDeclarations: true }],
|
||||
eqeqeq: "error",
|
||||
"no-restricted-syntax": [
|
||||
"error",
|
||||
{
|
||||
selector: "CallExpression[callee.object.name='JSON'][callee.property.name='stringify'] Identifier[name=/$err/]",
|
||||
message: "Do not use JSON.stringify(error). It will print '{}'"
|
||||
}
|
||||
]
|
||||
eqeqeq: "error"
|
||||
}
|
||||
};
|
||||
|
||||
46
.github/workflows/ci.yml
vendored
46
.github/workflows/ci.yml
vendored
@@ -79,31 +79,32 @@ jobs:
|
||||
name: dist
|
||||
path: dist/
|
||||
endtoendemulator:
|
||||
name: "End To End Emulator Tests"
|
||||
name: "End To End Tests | Emulator | SQL"
|
||||
needs: [lint, format, compile, unittest]
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: southpolesteve/cosmos-emulator-github-action@v1
|
||||
- name: Use Node.js 12.x
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 12.x
|
||||
- uses: southpolesteve/cosmos-emulator-github-action@v1
|
||||
- name: Restore Cypress Binary Cache
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ~/.cache/Cypress
|
||||
key: ${{ runner.os }}-cypress-binary-cache
|
||||
- name: End to End Tests
|
||||
run: |
|
||||
npm ci
|
||||
npm start &
|
||||
npm run wait-for-server
|
||||
npx jest -c ./jest.config.e2e.js --detectOpenHandles sql
|
||||
npm ci --prefix ./cypress
|
||||
npm run test:ci --prefix ./cypress -- --spec ./integration/dataexplorer/ci-tests/createDatabase.spec.ts
|
||||
shell: bash
|
||||
env:
|
||||
DATA_EXPLORER_ENDPOINT: "https://localhost:1234/explorer.html?platform=Emulator"
|
||||
PLATFORM: "Emulator"
|
||||
EMULATOR_ENDPOINT: https://0.0.0.0:8081/
|
||||
NODE_TLS_REJECT_UNAUTHORIZED: 0
|
||||
- uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: screenshots
|
||||
path: failed-*
|
||||
CYPRESS_CACHE_FOLDER: ~/.cache/Cypress
|
||||
accessibility:
|
||||
name: "Accessibility | Hosted"
|
||||
needs: [lint, format, compile, unittest]
|
||||
@@ -122,13 +123,13 @@ jobs:
|
||||
sudo sysctl -p
|
||||
npm ci
|
||||
npm start &
|
||||
npx wait-on -i 5000 https-get://0.0.0.0:1234/
|
||||
npx wait-on -i 5000 https-get://0.0.0.0:1234/
|
||||
node utils/accesibilityCheck.js
|
||||
shell: bash
|
||||
env:
|
||||
NODE_TLS_REJECT_UNAUTHORIZED: 0
|
||||
endtoendhosted:
|
||||
name: "End to End Hosted Tests"
|
||||
endtoendpuppeteer:
|
||||
name: "End to end puppeteer tests"
|
||||
needs: [lint, format, compile, unittest]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
@@ -137,7 +138,7 @@ jobs:
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 12.x
|
||||
- name: End to End Hosted Tests
|
||||
- name: End to End Puppeteer Tests
|
||||
run: |
|
||||
npm ci
|
||||
npm start &
|
||||
@@ -146,26 +147,13 @@ jobs:
|
||||
shell: bash
|
||||
env:
|
||||
NODE_TLS_REJECT_UNAUTHORIZED: 0
|
||||
PORTAL_RUNNER_SUBSCRIPTION: ${{ secrets.PORTAL_RUNNER_SUBSCRIPTION }}
|
||||
PORTAL_RUNNER_RESOURCE_GROUP: ${{ secrets.PORTAL_RUNNER_RESOURCE_GROUP }}
|
||||
PORTAL_RUNNER_DATABASE_ACCOUNT: ${{ secrets.PORTAL_RUNNER_DATABASE_ACCOUNT }}
|
||||
PORTAL_RUNNER_DATABASE_ACCOUNT_KEY: ${{ secrets.PORTAL_RUNNER_DATABASE_ACCOUNT_KEY }}
|
||||
NOTEBOOKS_TEST_RUNNER_TENANT_ID: ${{ secrets.NOTEBOOKS_TEST_RUNNER_TENANT_ID }}
|
||||
NOTEBOOKS_TEST_RUNNER_CLIENT_ID: ${{ secrets.NOTEBOOKS_TEST_RUNNER_CLIENT_ID }}
|
||||
NOTEBOOKS_TEST_RUNNER_CLIENT_SECRET: ${{ secrets.NOTEBOOKS_TEST_RUNNER_CLIENT_SECRET }}
|
||||
PORTAL_RUNNER_CONNECTION_STRING: ${{ secrets.CONNECTION_STRING_SQL }}
|
||||
MONGO_CONNECTION_STRING: ${{ secrets.CONNECTION_STRING_MONGO }}
|
||||
CASSANDRA_CONNECTION_STRING: ${{ secrets.CONNECTION_STRING_CASSANDRA }}
|
||||
TABLES_CONNECTION_STRING: ${{ secrets.CONNECTION_STRING_TABLE }}
|
||||
DATA_EXPLORER_ENDPOINT: "https://localhost:1234/hostedExplorer.html"
|
||||
- uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: screenshots
|
||||
path: failed-*
|
||||
nuget:
|
||||
name: Publish Nuget
|
||||
if: github.ref == 'refs/heads/master' || contains(github.ref, 'hotfix/') || contains(github.ref, 'release/')
|
||||
needs: [lint, format, compile, build, unittest, endtoendemulator, endtoendhosted]
|
||||
needs: [lint, format, compile, build, unittest, endtoendemulator, endtoendpuppeteer]
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
NUGET_SOURCE: ${{ secrets.NUGET_SOURCE }}
|
||||
@@ -189,7 +177,7 @@ jobs:
|
||||
nugetmpac:
|
||||
name: Publish Nuget MPAC
|
||||
if: github.ref == 'refs/heads/master' || contains(github.ref, 'hotfix/') || contains(github.ref, 'release/')
|
||||
needs: [lint, format, compile, build, unittest, endtoendemulator, endtoendhosted]
|
||||
needs: [lint, format, compile, build, unittest, endtoendemulator, endtoendpuppeteer]
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
NUGET_SOURCE: ${{ secrets.NUGET_SOURCE }}
|
||||
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -9,6 +9,9 @@ pkg/DataExplorer/*
|
||||
test/out/*
|
||||
workers/**/*.js
|
||||
*.trx
|
||||
cypress/videos
|
||||
cypress/screenshots
|
||||
cypress/fixtures
|
||||
notebookapp/*
|
||||
Contracts/*
|
||||
.DS_Store
|
||||
|
||||
25
README.md
25
README.md
@@ -33,7 +33,7 @@ To run pure hosted mode, in `webpack.config.js` change index HtmlWebpackPlugin t
|
||||
|
||||
### Emulator Development
|
||||
|
||||
In a window environment, running `npm run build` will automatically copy the built files from `/dist` over to the default emulator install paths. In a non-windows environment you can specify an alternate endpoint using `EMULATOR_ENDPOINT` and webpack dev server will proxy requests for you.
|
||||
In a window environment, running `npm run build` will automatically copy the built files from `/dist` over to the default emulator install paths. In a non-windows enironment you can specify an alternate endpoint using `EMULATOR_ENDPOINT` and webpack dev server will proxy requests for you.
|
||||
|
||||
`PLATFORM=Emulator EMULATOR_ENDPOINT=https://my-vm.azure.com:8081 npm run watch`
|
||||
|
||||
@@ -60,7 +60,7 @@ The Cosmos Portal that consumes this repo is not currently open source. If you h
|
||||
You can however load a local running instance of data explorer in the production portal.
|
||||
|
||||
1. Turn off browser SSL validation for localhost: chrome://flags/#allow-insecure-localhost OR Install valid SSL certs for localhost (on IE, follow these [instructions](https://www.technipages.com/ie-bypass-problem-with-this-websites-security-certificate) to install the localhost certificate in the right place)
|
||||
2. Allowlist `https://localhost:1234` domain for CORS in the Azure Cosmos DB portal
|
||||
2. Whitelist `https://localhost:1234` domain for CORS in the Azure Cosmos DB portal
|
||||
3. Start the project in portal mode: `PLATFORM=Portal npm run watch`
|
||||
4. Load the portal using the following link: https://ms.portal.azure.com/?dataExplorerSource=https%3A%2F%2Flocalhost%3A1234%2Fexplorer.html
|
||||
|
||||
@@ -76,17 +76,24 @@ Unit tests are located adjacent to the code under test and run with [Jest](https
|
||||
|
||||
#### End to End CI Tests
|
||||
|
||||
Jest and Puppeteer are used for end to end browser based tests and are contained in `test/`. To run these tests locally:
|
||||
[Cypress](https://www.cypress.io/) is used for end to end tests and are contained in `cypress/`. Currently, it operates as sub project with its own typescript config and dependencies. It also only operates against the emulator. To run cypress tests:
|
||||
|
||||
1. Copy .env.example to .env
|
||||
2. Update the values in .env including your local data explorer endpoint (ask a teammate/codeowner for help with .env values)
|
||||
3. Make sure all packages are installed `npm install`
|
||||
4. Run the server `npm run start` and wait for it to start
|
||||
5. Run `npm run test:e2e`
|
||||
1. Ensure the emulator is running
|
||||
2. Start cosmos explorer in emulator mode: `PLATFORM=Emulator npm run watch`
|
||||
3. Move into `cypress/` folder: `cd cypress`
|
||||
4. Install dependencies: `npm install`
|
||||
5. Run cypress headless(`npm run test`) or in interactive mode(`npm run test:debug`)
|
||||
|
||||
#### End to End Production Runners
|
||||
|
||||
Jest and Puppeteer are used for end to end production runners and are contained in `test/`. To run these tests locally:
|
||||
|
||||
1. Copy .env.example to .env and fill in all variables
|
||||
2. Run `npm run test:e2e`
|
||||
|
||||
### Releasing
|
||||
|
||||
We generally adhere to the release strategy [documented by the Azure SDK Guidelines](https://azure.github.io/azure-sdk/policies_repobranching.html#release-branches). Most releases should happen from the master branch. If master contains commits that cannot be released, you may create a release from a `release/` or `hotfix/` branch. See linked documentation for more details.
|
||||
We generally adhear to the release strategy [documented by the Azure SDK Guidelines](https://azure.github.io/azure-sdk/policies_repobranching.html#release-branches). Most releases should happen from the master branch. If master contains commits that cannot be released, you may create a release from a `release/` or `hotfix/` branch. See linked documentation for more details.
|
||||
|
||||
# Contributing
|
||||
|
||||
|
||||
4
cypress/.gitignore
vendored
Normal file
4
cypress/.gitignore
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
cypress.env.json
|
||||
cypress/report
|
||||
cypress/screenshots
|
||||
cypress/videos
|
||||
51
cypress/cleanup.js
Normal file
51
cypress/cleanup.js
Normal file
@@ -0,0 +1,51 @@
|
||||
// Cleans up old databases from previous test runs
|
||||
const { CosmosClient } = require("@azure/cosmos");
|
||||
|
||||
// TODO: Add support for other API connection strings
|
||||
const mongoRegex = RegExp("mongodb://.*:(.*)@(.*).mongo.cosmos.azure.com");
|
||||
|
||||
async function cleanup() {
|
||||
const connectionString = process.env.CYPRESS_CONNECTION_STRING;
|
||||
if (!connectionString) {
|
||||
throw new Error("Connection string not provided");
|
||||
}
|
||||
|
||||
let client;
|
||||
switch (true) {
|
||||
case connectionString.includes("mongodb://"): {
|
||||
const [, key, accountName] = connectionString.match(mongoRegex);
|
||||
client = new CosmosClient({
|
||||
key,
|
||||
endpoint: `https://${accountName}.documents.azure.com:443/`
|
||||
});
|
||||
break;
|
||||
}
|
||||
// TODO: Add support for other API connection strings
|
||||
default:
|
||||
client = new CosmosClient(connectionString);
|
||||
break;
|
||||
}
|
||||
|
||||
const response = await client.databases.readAll().fetchAll();
|
||||
return Promise.all(
|
||||
response.resources.map(async db => {
|
||||
const dbTimestamp = new Date(db._ts * 1000);
|
||||
const twentyMinutesAgo = new Date(Date.now() - 1000 * 60 * 20);
|
||||
if (dbTimestamp < twentyMinutesAgo) {
|
||||
await client.database(db.id).delete();
|
||||
console.log(`DELETED: ${db.id} | Timestamp: ${dbTimestamp}`);
|
||||
} else {
|
||||
console.log(`SKIPPED: ${db.id} | Timestamp: ${dbTimestamp}`);
|
||||
}
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
cleanup()
|
||||
.then(() => {
|
||||
process.exit(0);
|
||||
})
|
||||
.catch(error => {
|
||||
console.error(error);
|
||||
process.exit(1);
|
||||
});
|
||||
15
cypress/cypress.json
Normal file
15
cypress/cypress.json
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"integrationFolder": "./integration",
|
||||
"pluginsFile": false,
|
||||
"fixturesFolder": false,
|
||||
"supportFile": "./support/index.js",
|
||||
"defaultCommandTimeout": 90000,
|
||||
"chromeWebSecurity": false,
|
||||
"reporter": "mochawesome",
|
||||
"reporterOptions": {
|
||||
"reportDir": "cypress/report",
|
||||
"json": true,
|
||||
"overwrite": false,
|
||||
"html": false
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,66 @@
|
||||
// 1. Click on "New Container" on the command bar.
|
||||
// 2. Pane with the title "Add Container" should appear on the right side of the screen
|
||||
// 3. It includes an input box for the database Id.
|
||||
// 4. It includes a checkbox called "Create now".
|
||||
// 5. When the checkbox is marked, enter new database id.
|
||||
// 3. Create a database WITH "Provision throughput" checked.
|
||||
// 4. Enter minimum throughput value of 400.
|
||||
// 5. Enter container id to the container id text box.
|
||||
// 6. Enter partition key to the partition key text box.
|
||||
// 7. Click "OK" to create a new container.
|
||||
// 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
|
||||
|
||||
const connectionString = require("../../../utilities/connectionString");
|
||||
|
||||
let crypt = require("crypto");
|
||||
|
||||
context("Cassandra API Test - createDatabase", () => {
|
||||
beforeEach(() => {
|
||||
connectionString.loginUsingConnectionString(connectionString.constants.cassandra);
|
||||
});
|
||||
|
||||
it("Create a new table in Cassandra API", () => {
|
||||
const keyspaceId = `KeyspaceId${crypt.randomBytes(8).toString("hex")}`;
|
||||
const tableId = `TableId112`;
|
||||
|
||||
cy.get("iframe").then($element => {
|
||||
const $body = $element.contents().find("body");
|
||||
cy.wrap($body)
|
||||
.find('div[class="commandBarContainer"]')
|
||||
.should("be.visible")
|
||||
.find('button[data-test="New Table"]')
|
||||
.should("be.visible")
|
||||
.click();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[class="contextual-pane-in"]')
|
||||
.should("be.visible")
|
||||
.find('span[id="containerTitle"]');
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[id="keyspace-id"]')
|
||||
.should("be.visible")
|
||||
.type(keyspaceId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[class="textfontclr"]')
|
||||
.type(tableId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="databaseThroughputValue"]')
|
||||
.should("have.value", "400");
|
||||
|
||||
cy.wrap($body)
|
||||
.find('data-test="addCollection-createCollection"')
|
||||
.click();
|
||||
|
||||
cy.wait(10000);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[data-test="resourceTreeId"]')
|
||||
.should("exist")
|
||||
.find('div[class="treeComponent dataResourceTree"]')
|
||||
.should("contain", tableId);
|
||||
});
|
||||
});
|
||||
});
|
||||
81
cypress/integration/dataexplorer/GRAPH/addCollection.spec.ts
Normal file
81
cypress/integration/dataexplorer/GRAPH/addCollection.spec.ts
Normal file
@@ -0,0 +1,81 @@
|
||||
// 1. Click on "New Graph" on the command bar.
|
||||
// 2. Pane with the title "Add Container" should appear on the right side of the screen
|
||||
// 3. It includes an input box for the database Id.
|
||||
// 4. It includes a checkbox called "Create now".
|
||||
// 5. When the checkbox is marked, enter new database id.
|
||||
// 3. Create a database WITH "Provision throughput" checked.
|
||||
// 4. Enter minimum throughput value of 400.
|
||||
// 5. Enter container id to the container id text box.
|
||||
// 6. Enter partition key to the partition key text box.
|
||||
// 7. Click "OK" to create a new container.
|
||||
// 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
|
||||
|
||||
const connectionString = require("../../../utilities/connectionString");
|
||||
|
||||
let crypt = require("crypto");
|
||||
|
||||
context("Graph API Test", () => {
|
||||
beforeEach(() => {
|
||||
connectionString.loginUsingConnectionString(connectionString.constants.graph);
|
||||
});
|
||||
|
||||
it("Create a new graph in Graph API", () => {
|
||||
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
|
||||
const graphId = `TestGraph${crypt.randomBytes(8).toString("hex")}`;
|
||||
const partitionKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
|
||||
|
||||
cy.get("iframe").then($element => {
|
||||
const $body = $element.contents().find("body");
|
||||
cy.wrap($body)
|
||||
.find('div[class="commandBarContainer"]')
|
||||
.should("be.visible")
|
||||
.find('button[data-test="New Graph"]')
|
||||
.should("be.visible")
|
||||
.click();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[class="contextual-pane-in"]')
|
||||
.should("be.visible")
|
||||
.find('span[id="containerTitle"]');
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-createNewDatabase"]')
|
||||
.check();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-newDatabaseId"]')
|
||||
.should("be.visible")
|
||||
.type(dbId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
|
||||
.check();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="databaseThroughputValue"]')
|
||||
.should("have.value", "400");
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-collectionId"]')
|
||||
.type(graphId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-partitionKeyValue"]')
|
||||
.type(partitionKey);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-createCollection"]')
|
||||
.click();
|
||||
|
||||
cy.wait(10000);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[data-test="resourceTreeId"]')
|
||||
.should("exist")
|
||||
.find('div[class="treeComponent dataResourceTree"]')
|
||||
.should("contain", dbId)
|
||||
.click()
|
||||
.should("contain", graphId);
|
||||
});
|
||||
});
|
||||
});
|
||||
80
cypress/integration/dataexplorer/MONGO/addCollection.spec.ts
Normal file
80
cypress/integration/dataexplorer/MONGO/addCollection.spec.ts
Normal file
@@ -0,0 +1,80 @@
|
||||
// 1. Click on "New Container" on the command bar.
|
||||
// 2. Pane with the title "Add Container" should appear on the right side of the screen
|
||||
// 3. It includes an input box for the database Id.
|
||||
// 4. It includes a checkbox called "Create now".
|
||||
// 5. When the checkbox is marked, enter new database id.
|
||||
// 3. Create a database WITH "Provision throughput" checked.
|
||||
// 4. Enter minimum throughput value of 400.
|
||||
// 5. Enter container id to the container id text box.
|
||||
// 6. Enter partition key to the partition key text box.
|
||||
// 7. Click "OK" to create a new container.
|
||||
// // 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
|
||||
|
||||
const connectionString = require("../../../utilities/connectionString");
|
||||
|
||||
let crypt = require("crypto");
|
||||
|
||||
context("Mongo API Test - createDatabase", () => {
|
||||
beforeEach(() => {
|
||||
connectionString.loginUsingConnectionString();
|
||||
});
|
||||
|
||||
it("Create a new collection in Mongo API", () => {
|
||||
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
|
||||
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
|
||||
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
|
||||
|
||||
cy.get("iframe").then($element => {
|
||||
const $body = $element.contents().find("body");
|
||||
cy.wrap($body)
|
||||
.find('div[class="commandBarContainer"]')
|
||||
.should("be.visible")
|
||||
.find('button[data-test="New Collection"]')
|
||||
.should("be.visible")
|
||||
.click();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[class="contextual-pane-in"]')
|
||||
.should("be.visible")
|
||||
.find('span[id="containerTitle"]');
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-createNewDatabase"]')
|
||||
.check();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-newDatabaseId"]')
|
||||
.type(dbId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
|
||||
.check();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-collectionId"]')
|
||||
.type(collectionId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="databaseThroughputValue"]')
|
||||
.should("have.value", "400");
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-partitionKeyValue"]')
|
||||
.type(sharedKey);
|
||||
|
||||
cy.wrap($body)
|
||||
.find("#submitBtnAddCollection")
|
||||
.click();
|
||||
|
||||
cy.wait(10000);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[data-test="resourceTreeId"]')
|
||||
.should("exist")
|
||||
.find('div[class="treeComponent dataResourceTree"]')
|
||||
.should("contain", dbId)
|
||||
.click()
|
||||
.should("contain", collectionId);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,96 @@
|
||||
// 1. Click on "New Container" on the command bar.
|
||||
// 2. Pane with the title "Add Container" should appear on the right side of the screen
|
||||
// 3. It includes an input box for the database Id.
|
||||
// 4. It includes a checkbox called "Create now".
|
||||
// 5. When the checkbox is marked, enter new database id.
|
||||
// 3. Create a database WITH "Provision throughput" checked.
|
||||
// 4. Enter minimum throughput value of 400.
|
||||
// 5. Enter container id to the container id text box.
|
||||
// 6. Enter partition key to the partition key text box.
|
||||
// 7. Click "OK" to create a new container.
|
||||
// 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
|
||||
|
||||
const connectionString = require("../../../utilities/connectionString");
|
||||
|
||||
let crypt = require("crypto");
|
||||
|
||||
context("Mongo API Test", () => {
|
||||
beforeEach(() => {
|
||||
connectionString.loginUsingConnectionString();
|
||||
});
|
||||
|
||||
it.skip("Create a new collection in Mongo API - Autopilot", () => {
|
||||
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
|
||||
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
|
||||
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
|
||||
|
||||
cy.get("iframe").then($element => {
|
||||
const $body = $element.contents().find("body");
|
||||
cy.wrap($body)
|
||||
.find('div[class="commandBarContainer"]')
|
||||
.should("be.visible")
|
||||
.find('button[data-test="New Collection"]')
|
||||
.should("be.visible")
|
||||
.click();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[class="contextual-pane-in"]')
|
||||
.should("be.visible")
|
||||
.find('span[id="containerTitle"]');
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-createNewDatabase"]')
|
||||
.check();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-newDatabaseId"]')
|
||||
.type(dbId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
|
||||
.check();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[class="throughputModeContainer"]')
|
||||
.should("be.visible")
|
||||
.and(input => {
|
||||
expect(input.get(0).textContent, "first item").contains("Autopilot (preview)");
|
||||
expect(input.get(1).textContent, "second item").contains("Manual");
|
||||
});
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[id="newContainer-databaseThroughput-autoPilotRadio"]')
|
||||
.check();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('select[name="autoPilotTiers"]')
|
||||
// .eq(1).should('contain', '4,000 RU/s');
|
||||
// // .select('4,000 RU/s').should('have.value', '1');
|
||||
|
||||
.find('option[value="2"]')
|
||||
.then($element => $element.get(1).setAttribute("selected", "selected"));
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-collectionId"]')
|
||||
.type(collectionId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-partitionKeyValue"]')
|
||||
.type(sharedKey);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-createCollection"]')
|
||||
.click();
|
||||
|
||||
cy.wait(10000);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[data-test="resourceTreeId"]')
|
||||
.should("exist")
|
||||
.find('div[class="treeComponent dataResourceTree"]')
|
||||
.should("contain", dbId)
|
||||
.click()
|
||||
.should("contain", collectionId);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,67 @@
|
||||
const connectionString = require("../../../utilities/connectionString");
|
||||
|
||||
let crypt = require("crypto");
|
||||
|
||||
context("Mongo API Test", () => {
|
||||
beforeEach(() => {
|
||||
connectionString.loginUsingConnectionString();
|
||||
});
|
||||
|
||||
it.skip("Create a new collection in existing database in Mongo API", () => {
|
||||
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
|
||||
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
|
||||
|
||||
cy.get("iframe").then($element => {
|
||||
const $body = $element.contents().find("body");
|
||||
|
||||
cy.wrap($body)
|
||||
.find('span[class="nodeLabel"]')
|
||||
.should("be.visible")
|
||||
.then($span => {
|
||||
const dbId1 = $span.text();
|
||||
cy.log("DBBB", dbId1);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[class="commandBarContainer"]')
|
||||
.should("be.visible")
|
||||
.find('button[data-test="New Collection"]')
|
||||
.should("be.visible")
|
||||
.click();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[class="contextual-pane-in"]')
|
||||
.should("be.visible")
|
||||
.find('span[id="containerTitle"]');
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-existingDatabase"]')
|
||||
.check();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-existingDatabase"]')
|
||||
.type(dbId1);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-collectionId"]')
|
||||
.type(collectionId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-partitionKeyValue"]')
|
||||
.type(sharedKey);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-createCollection"]')
|
||||
.click();
|
||||
|
||||
cy.wait(10000);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[data-test="resourceTreeId"]')
|
||||
.should("exist")
|
||||
.find('div[class="treeComponent dataResourceTree"]')
|
||||
.click()
|
||||
.should("contain", collectionId);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,203 @@
|
||||
const connectionString = require("../../../utilities/connectionString");
|
||||
|
||||
let crypt = require("crypto");
|
||||
|
||||
context.skip("Mongo API Test", () => {
|
||||
beforeEach(() => {
|
||||
connectionString.loginUsingConnectionString();
|
||||
});
|
||||
|
||||
it("Create a new collection in Mongo API - Provision database throughput", () => {
|
||||
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
|
||||
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
|
||||
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
|
||||
|
||||
cy.get("iframe").then($element => {
|
||||
const $body = $element.contents().find("body");
|
||||
cy.wrap($body)
|
||||
.find('div[class="commandBarContainer"]')
|
||||
.should("be.visible")
|
||||
.find('button[data-test="New Collection"]')
|
||||
.should("be.visible")
|
||||
.click();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[class="contextual-pane-in"]')
|
||||
.should("be.visible")
|
||||
.find('span[id="containerTitle"]');
|
||||
|
||||
cy.wrap($body)
|
||||
.find(".createNewDatabaseOrUseExisting")
|
||||
.should("have.length", 2)
|
||||
.and(input => {
|
||||
expect(input.get(0).textContent, "first item").contains("Create new");
|
||||
expect(input.get(1).textContent, "second item").contains("Use existing");
|
||||
});
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-createNewDatabase"]')
|
||||
.check();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
|
||||
.check();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-newDatabaseId"]')
|
||||
.type(dbId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
|
||||
.check();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="databaseThroughputValue"]')
|
||||
.should("have.value", "400");
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-collectionId"]')
|
||||
.type(collectionId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-partitionKeyValue"]')
|
||||
.type(sharedKey);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-createCollection"]')
|
||||
.click();
|
||||
|
||||
cy.wait(10000);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[data-test="resourceTreeId"]')
|
||||
.should("exist")
|
||||
.find('div[class="treeComponent dataResourceTree"]')
|
||||
.should("contain", dbId)
|
||||
.click()
|
||||
.should("contain", collectionId);
|
||||
});
|
||||
});
|
||||
|
||||
it("Create a new collection - without provision database throughput", () => {
|
||||
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
|
||||
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
|
||||
const collectionIdTitle = `Add Collection`;
|
||||
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
|
||||
|
||||
cy.get("iframe").then($element => {
|
||||
const $body = $element.contents().find("body");
|
||||
cy.wrap($body)
|
||||
.find('div[class="commandBarContainer"]')
|
||||
.should("be.visible")
|
||||
.find('button[data-test="New Collection"]')
|
||||
.should("be.visible")
|
||||
.click();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[class="contextual-pane-in"]')
|
||||
.should("be.visible")
|
||||
.find('span[id="containerTitle"]');
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-createNewDatabase"]')
|
||||
.check();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-newDatabaseId"]')
|
||||
.type(dbId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
|
||||
.uncheck();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-collectionId"]')
|
||||
.type(collectionId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[id="tab2"]')
|
||||
.check({ force: true });
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-partitionKeyValue"]')
|
||||
.type(sharedKey);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="databaseThroughputValue"]')
|
||||
.should("have.value", "400");
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-createCollection"]')
|
||||
.click();
|
||||
|
||||
cy.wait(10000);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[data-test="resourceTreeId"]')
|
||||
.should("exist")
|
||||
.find('div[class="treeComponent dataResourceTree"]')
|
||||
.should("contain", dbId)
|
||||
.click()
|
||||
.should("contain", collectionId);
|
||||
});
|
||||
});
|
||||
|
||||
it("Create a new collection - without provision database throughput Fixed Storage Capacity", () => {
|
||||
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
|
||||
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
|
||||
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
|
||||
|
||||
cy.get("iframe").then($element => {
|
||||
const $body = $element.contents().find("body");
|
||||
cy.wrap($body)
|
||||
.find('div[class="commandBarContainer"]')
|
||||
.should("be.visible")
|
||||
.find('button[data-test="New Collection"]')
|
||||
.should("be.visible")
|
||||
.click();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[class="contextual-pane-in"]')
|
||||
.should("be.visible")
|
||||
.find('span[id="containerTitle"]');
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-createNewDatabase"]')
|
||||
.check();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-newDatabaseId"]')
|
||||
.type(dbId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
|
||||
.uncheck();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-collectionId"]')
|
||||
.type(collectionId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[id="tab1"]')
|
||||
.check({ force: true });
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="databaseThroughputValue"]')
|
||||
.should("have.value", "400");
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-createCollection"]')
|
||||
.click();
|
||||
|
||||
cy.wait(10000);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[data-test="resourceTreeId"]')
|
||||
.should("exist")
|
||||
.find('div[class="treeComponent dataResourceTree"]')
|
||||
.should("contain", dbId)
|
||||
.click()
|
||||
.should("contain", collectionId);
|
||||
});
|
||||
});
|
||||
});
|
||||
79
cypress/integration/dataexplorer/SQL/addCollection.spec.ts
Normal file
79
cypress/integration/dataexplorer/SQL/addCollection.spec.ts
Normal file
@@ -0,0 +1,79 @@
|
||||
// 1. Click on "New Container" on the command bar.
|
||||
// 2. Pane with the title "Add Container" should appear on the right side of the screen
|
||||
// 3. It includes an input box for the database Id.
|
||||
// 4. It includes a checkbox called "Create now".
|
||||
// 5. When the checkbox is marked, enter new database id.
|
||||
// 3. Create a database WITH "Provision throughput" checked.
|
||||
// 4. Enter minimum throughput value of 400.
|
||||
// 5. Enter container id to the container id text box.
|
||||
// 6. Enter partition key to the partition key text box.
|
||||
// 7. Click "OK" to create a new container.
|
||||
// 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
|
||||
|
||||
const connectionString = require("../../../utilities/connectionString");
|
||||
|
||||
let crypt = require("crypto");
|
||||
|
||||
context("SQL API Test", () => {
|
||||
beforeEach(() => {
|
||||
connectionString.loginUsingConnectionString();
|
||||
});
|
||||
|
||||
it("Create a new container in SQL API", () => {
|
||||
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
|
||||
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
|
||||
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
|
||||
connectionString.loginUsingConnectionString();
|
||||
|
||||
cy.get("iframe").then($element => {
|
||||
const $body = $element.contents().find("body");
|
||||
cy.wrap($body)
|
||||
.find('div[class="commandBarContainer"]')
|
||||
.should("be.visible")
|
||||
.find('button[data-test="New Container"]')
|
||||
.should("be.visible")
|
||||
.click();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[class="contextual-pane-in"]')
|
||||
.should("be.visible")
|
||||
.find('span[id="containerTitle"]');
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-createNewDatabase"]')
|
||||
.check();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
|
||||
.check();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-newDatabaseId"]')
|
||||
.type(dbId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-collectionId"]')
|
||||
.type(collectionId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="databaseThroughputValue"]')
|
||||
.should("have.value", "400");
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-partitionKeyValue"]')
|
||||
.type(sharedKey);
|
||||
|
||||
cy.wrap($body)
|
||||
.find("#submitBtnAddCollection")
|
||||
.click();
|
||||
|
||||
cy.wait(10000);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[data-test="resourceTreeId"]')
|
||||
.should("exist")
|
||||
.find('div[class="treeComponent dataResourceTree"]')
|
||||
.should("contain", dbId);
|
||||
});
|
||||
});
|
||||
});
|
||||
60
cypress/integration/dataexplorer/TABLE/addCollection.spec.ts
Normal file
60
cypress/integration/dataexplorer/TABLE/addCollection.spec.ts
Normal file
@@ -0,0 +1,60 @@
|
||||
// 1. Click on "New Container" on the command bar.
|
||||
// 2. Pane with the title "Add Container" should appear on the right side of the screen
|
||||
// 3. It includes an input box for the database Id.
|
||||
// 4. It includes a checkbox called "Create now".
|
||||
// 5. When the checkbox is marked, enter new database id.
|
||||
// 3. Create a database WITH "Provision throughput" checked.
|
||||
// 4. Enter minimum throughput value of 400.
|
||||
// 5. Enter container id to the container id text box.
|
||||
// 6. Enter partition key to the partition key text box.
|
||||
// 7. Click "OK" to create a new container.
|
||||
// 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
|
||||
|
||||
const connectionString = require("../../../utilities/connectionString");
|
||||
|
||||
let crypt = require("crypto");
|
||||
|
||||
context("Table API Test", () => {
|
||||
beforeEach(() => {
|
||||
connectionString.loginUsingConnectionString(connectionString.constants.table);
|
||||
});
|
||||
|
||||
it("Create a new table in Table API", () => {
|
||||
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
|
||||
|
||||
cy.get("iframe").then($element => {
|
||||
const $body = $element.contents().find("body");
|
||||
cy.wrap($body)
|
||||
.find('div[class="commandBarContainer"]')
|
||||
.should("be.visible")
|
||||
.find('button[data-test="New Table"]')
|
||||
.should("be.visible")
|
||||
.click();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[class="contextual-pane-in"]')
|
||||
.should("be.visible")
|
||||
.find('span[id="containerTitle"]');
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-collectionId"]')
|
||||
.type(collectionId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="databaseThroughputValue"]')
|
||||
.should("have.value", "400");
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-createCollection"]')
|
||||
.click();
|
||||
|
||||
cy.wait(10000);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[data-test="resourceTreeId"]')
|
||||
.should("exist")
|
||||
.find('div[class="treeComponent dataResourceTree"]')
|
||||
.should("contain", collectionId);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,55 @@
|
||||
// 1. Click on "New Container" on the command bar.
|
||||
// 2. Pane with the title "Add Container" should appear on the right side of the screen
|
||||
// 3. It includes an input box for the database Id.
|
||||
// 4. It includes a checkbox called "Create now".
|
||||
// 5. When the checkbox is marked, enter new database id.
|
||||
// 3. Create a database WITH "Provision throughput" checked.
|
||||
// 4. Enter minimum throughput value of 400.
|
||||
// 5. Enter container id to the container id text box.
|
||||
// 6. Enter partition key to the partition key text box.
|
||||
// 7. Click "OK" to create a new container.
|
||||
// 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
|
||||
|
||||
let crypt = require("crypto");
|
||||
|
||||
context("Emulator - createDatabase", () => {
|
||||
beforeEach(() => {
|
||||
cy.visit("http://localhost:1234/explorer.html");
|
||||
});
|
||||
|
||||
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
|
||||
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
|
||||
const collectionIdTitle = `Add Collection`;
|
||||
const partitionKey = `PartitionKey${crypt.randomBytes(8).toString("hex")}`;
|
||||
|
||||
it("Create a new collection", () => {
|
||||
cy.contains("New Container").click();
|
||||
|
||||
// cy.contains(collectionIdTitle);
|
||||
|
||||
cy.get(".createNewDatabaseOrUseExisting")
|
||||
.should("have.length", 2)
|
||||
.and(input => {
|
||||
expect(input.get(0).textContent, "first item").contains("Create new");
|
||||
expect(input.get(1).textContent, "second item").contains("Use existing");
|
||||
});
|
||||
|
||||
cy.get('input[data-test="addCollection-createNewDatabase"]').check();
|
||||
|
||||
cy.get('input[data-test="addCollection-newDatabaseId"]').type(dbId);
|
||||
|
||||
cy.get('input[data-test="addCollection-collectionId"]').type(collectionId);
|
||||
|
||||
cy.get('input[data-test="databaseThroughputValue"]').should("have.value", "400");
|
||||
|
||||
cy.get('input[data-test="addCollection-partitionKeyValue"]').type(partitionKey);
|
||||
|
||||
cy.get('input[data-test="addCollection-createCollection"]').click();
|
||||
|
||||
cy.get('div[data-test="resourceTreeId"]').should("exist");
|
||||
|
||||
cy.get('div[data-test="resourceTree-collectionsTree"]').should("contain", dbId);
|
||||
|
||||
cy.get('div[data-test="databaseList"]').should("contain", collectionId);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,65 @@
|
||||
// 1. Click on "New Database" on the command bar
|
||||
// 2. a Pane with the title "Add Database" should appear on the right side of the screen
|
||||
// i. It includes an input box for the database Id.
|
||||
// ii. It includes a checkbox called "Provision throughput".
|
||||
// iii. Whe the checkbox is marked, a new input with a throughput control let's you customize RU at the database level
|
||||
// 3. Create a database WITHOUT "Provision throughput" checked.
|
||||
// 4. It should appear in the Data Explorer list.
|
||||
// 5. Repeat steps 1-3 but create a database WITH "Provision throughput" with the default RU value.
|
||||
// 6. It should appear in the Data Explorer list.
|
||||
// 7. If expanded, it should have the list item called "Scale", that once clicked, it should show the "Scale" tab.
|
||||
// 8. Inside that tab, a throughput control will let you change the RU value within the permited range.
|
||||
// 9. If you change the value, it should enable the "Save" button.
|
||||
// 10. Click "Save" and verify that the process completes without error.
|
||||
// 11. Close the tab and reopen it and verify that the input contains the last saved value.%
|
||||
|
||||
const crypto = require("crypto");
|
||||
const client = require("../../../utilities/cosmosClient");
|
||||
const randomString = crypto.randomBytes(2).toString("hex");
|
||||
const databaseId = `TestDB-${randomString}`;
|
||||
const collectionId = `TestColl-${randomString}`;
|
||||
|
||||
context("Emulator - Create database -> container -> item", () => {
|
||||
beforeEach(async () => {
|
||||
const { resources } = await client.databases.readAll().fetchAll();
|
||||
for (const database of resources) {
|
||||
await client.database(database.id).delete();
|
||||
}
|
||||
});
|
||||
|
||||
it("creates a new database", () => {
|
||||
cy.visit("https://0.0.0.0:1234/explorer.html?platform=Emulator");
|
||||
cy.contains("New Container").click();
|
||||
cy.get("[data-test=addCollection-newDatabaseId]").click();
|
||||
cy.get("[data-test=addCollection-newDatabaseId]").type(databaseId);
|
||||
cy.get("[data-test=addCollection-collectionId]").click();
|
||||
cy.get("[data-test=addCollection-collectionId]").type(collectionId);
|
||||
cy.get("[data-test=addCollection-partitionKeyValue]").click();
|
||||
cy.get("[data-test=addCollection-partitionKeyValue]").type("/pk");
|
||||
cy.get('input[name="createCollection"]').click();
|
||||
cy.get(".dataResourceTree").should("contain", databaseId);
|
||||
cy.get(".dataResourceTree")
|
||||
.contains(databaseId)
|
||||
.click();
|
||||
cy.get(".dataResourceTree").should("contain", collectionId);
|
||||
cy.get(".dataResourceTree")
|
||||
.contains(collectionId)
|
||||
.click();
|
||||
cy.get(".dataResourceTree")
|
||||
.contains("Items")
|
||||
.click();
|
||||
cy.get(".dataResourceTree")
|
||||
.contains("Items")
|
||||
.click();
|
||||
cy.wait(1000); // React rendering inside KO causes some weird async rendering that makes this test flaky without waiting
|
||||
cy.get(".commandBarContainer")
|
||||
.contains("New Item")
|
||||
.click();
|
||||
cy.wait(1000); // React rendering inside KO causes some weird async rendering that makes this test flaky without waiting
|
||||
cy.get(".commandBarContainer")
|
||||
.contains("Save")
|
||||
.click();
|
||||
cy.wait(1000); // React rendering inside KO causes some weird async rendering that makes this test flaky without waiting
|
||||
cy.get(".documentsGridHeaderContainer").should("contain", "replace_with_new_document_id");
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,46 @@
|
||||
// 1. Click last database in the resource tree
|
||||
// 2. Click the last collection within the database
|
||||
// 3. Select the context menu within the collection
|
||||
// 4. Select "Delete Container" option in the dropdown
|
||||
// 5. On Selection, Delete Container pane opens on the right side
|
||||
// 6. Enter the same collection id that is to be deleted and click ok
|
||||
// 7. Now, the resource tree refreshes, the deleted collection should not appear under the database
|
||||
|
||||
let crypt = require("crypto");
|
||||
|
||||
context("Emulator - deleteCollection", () => {
|
||||
beforeEach(() => {
|
||||
cy.visit("http://localhost:1234/explorer.html");
|
||||
});
|
||||
|
||||
it("Delete a collection", () => {
|
||||
cy.get(".databaseId")
|
||||
.last()
|
||||
.click();
|
||||
|
||||
cy.get(".collectionList")
|
||||
.last()
|
||||
.then($id => {
|
||||
const collectionId = $id.text();
|
||||
|
||||
cy.get('span[data-test="collectionEllipsisMenu"]').should("exist");
|
||||
|
||||
cy.get('span[data-test="collectionEllipsisMenu"]')
|
||||
.invoke("show")
|
||||
.last()
|
||||
.click();
|
||||
|
||||
cy.get('div[data-test="collectionContextMenu"]')
|
||||
.contains("Delete Container")
|
||||
.click({ force: true });
|
||||
|
||||
cy.get('input[data-test="confirmCollectionId"]').type(collectionId.trim());
|
||||
|
||||
cy.get('input[data-test="deleteCollection"]').click();
|
||||
|
||||
cy.get('div[data-test="databaseList"]').should("not.contain", collectionId);
|
||||
|
||||
cy.get('div[data-test="databaseMenu"]').should("not.contain", collectionId);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,83 @@
|
||||
// 1. Click last database in the resource tree
|
||||
// 2. Select the context menu within the database
|
||||
// 4. Select "Delete Database" option in the dropdown
|
||||
// 5. On Selection, Delete Database pane opens on the right side
|
||||
// 6. Enter the same database id that is to be deleted and click ok
|
||||
// 7. Now, the resource tree refreshes, the deleted database should not appear in the resource tree
|
||||
|
||||
let crypt = require("crypto");
|
||||
|
||||
context("Emulator - deleteDatabase", () => {
|
||||
beforeEach(() => {
|
||||
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
|
||||
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
|
||||
let db_rid = "";
|
||||
const date = new Date().toUTCString();
|
||||
let authToken = "";
|
||||
cy.visit("http://localhost:1234/explorer.html");
|
||||
|
||||
// Creating auth token for collection creation
|
||||
cy.request({
|
||||
method: "GET",
|
||||
url: "https://localhost:8081/_explorer/authorization/post/dbs/",
|
||||
headers: {
|
||||
"x-ms-date": date,
|
||||
authorization: "-"
|
||||
}
|
||||
})
|
||||
.then(response => {
|
||||
authToken = response.body.Token; // Getting auth token for collection creation
|
||||
return new Cypress.Promise((resolve, reject) => {
|
||||
return resolve();
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
cy.request({
|
||||
method: "POST",
|
||||
url: "https://localhost:8081/dbs",
|
||||
headers: {
|
||||
"x-ms-date": date,
|
||||
authorization: authToken,
|
||||
"x-ms-version": "2018-12-31"
|
||||
},
|
||||
body: {
|
||||
id: dbId
|
||||
}
|
||||
}).then(response => {
|
||||
cy.log("Response", response);
|
||||
db_rid = response.body._rid;
|
||||
return new Cypress.Promise((resolve, reject) => {
|
||||
cy.log("Rid", db_rid);
|
||||
return resolve();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it("Delete a database", () => {
|
||||
cy.get('span[data-test="refreshTree"]').click();
|
||||
|
||||
cy.get(".databaseId")
|
||||
.last()
|
||||
.then($id => {
|
||||
const dbId = $id.text();
|
||||
|
||||
cy.get('span[data-test="databaseEllipsisMenu"]').should("exist");
|
||||
|
||||
cy.get('span[data-test="databaseEllipsisMenu"]')
|
||||
.invoke("show")
|
||||
.last()
|
||||
.click();
|
||||
|
||||
cy.get('div[data-test="databaseContextMenu"]')
|
||||
.contains("Delete Database")
|
||||
.click({ force: true });
|
||||
|
||||
cy.get('input[data-test="confirmDatabaseId"]').type(dbId.trim());
|
||||
|
||||
cy.get('input[data-test="deleteDatabase"]').click();
|
||||
|
||||
cy.get('div[data-test="databaseList"]').should("not.contain", dbId);
|
||||
});
|
||||
});
|
||||
});
|
||||
35
cypress/integration/notebook/README.md
Normal file
35
cypress/integration/notebook/README.md
Normal file
@@ -0,0 +1,35 @@
|
||||
# Notebook end-to-end tests
|
||||
This describes how to run the tests locally
|
||||
|
||||
## Stand up a local notebook container instance:
|
||||
Instructions on how to build and run the container [here](https://microsoft.sharepoint.com/teams/DocDB/_layouts/OneNote.aspx?id=%2Fteams%2FDocDB%2FSiteAssets%2FDocDB%20Team%20Notebook&wd=target%28Tools%20_%20SDK%2FPortal%2FDevelopment.one%7CF800BE8E-1E31-48FE-90D7-EF698EF88112%2FHow%20to%20build%20notebook%20service%7C4BAA153B-422C-41E2-B997-F3FCE02CD743%2F%29)
|
||||
|
||||
## Run a local data explorer
|
||||
Instructions are in [`DataExplorer/README.md`](https://msdata.visualstudio.com/CosmosDB/_git/cosmosdb-dataexplorer?path=%2FProduct%2FPortal%2FDataExplorer%2FREADME.md&_a=preview).
|
||||
|
||||
Make sure you can run Data Explorer locally from the web browser.
|
||||
|
||||
## Run cypress tests
|
||||
1. Edit the URL for your DataExplorer in the `.spec.ts` file
|
||||
2. Run the test:
|
||||
```bash
|
||||
cd DataExplorer/cypress
|
||||
npm i
|
||||
npm t -- --spec 'integration/notebook/newNotebook.spec.ts'
|
||||
```
|
||||
|
||||
To run in Debug mode:
|
||||
```
|
||||
npm run test:debug
|
||||
```
|
||||
This opens Cypress UI
|
||||
|
||||
## Troubleshooting
|
||||
* The tests are recorded in the `videos` folder.
|
||||
* Cypress does not support hover: workarounds [here](https://docs.cypress.io/api/commands/hover.html#Workarounds).
|
||||
|
||||
|
||||
## References
|
||||
* [Cypress API](https://docs.cypress.io/api/api/table-of-contents.html)
|
||||
* [Cypress cookbook](https://docs.cypress.io/faq/questions/using-cypress-faq.html#How-do-I-get-an-element%E2%80%99s-text-contents)
|
||||
* [Cypress best practices](https://docs.cypress.io/guides/references/best-practices.html#Selecting-Elements)
|
||||
93
cypress/integration/notebook/newNotebook.spec.ts
Normal file
93
cypress/integration/notebook/newNotebook.spec.ts
Normal file
@@ -0,0 +1,93 @@
|
||||
// THIS ADDS A NEW NOTEBOOK TO YOUR NOTEBOOKS
|
||||
context("New Notebook smoke test", () => {
|
||||
const timeout = 15000; // in ms
|
||||
const explorerUrl =
|
||||
"https://localhost:1234/explorer.html?feature.notebookserverurl=https%3A%2F%2Flocalhost%3A10001%2F12345%2Fnotebook&feature.notebookServerToken=token&feature.enablenotebooks=true";
|
||||
|
||||
/**
|
||||
* Wait for UI to be ready
|
||||
*/
|
||||
const waitForReady = () => {
|
||||
cy.get(".splashScreenContainer", { timeout }).should("be.visible");
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
cy.visit(explorerUrl);
|
||||
waitForReady();
|
||||
});
|
||||
|
||||
it("Create a new notebook and run some code", () => {
|
||||
// Create new notebook
|
||||
cy.contains("New Notebook").click();
|
||||
|
||||
// Check tab name
|
||||
cy.get("li.tabList .tabNavText").should($span => {
|
||||
const text = $span.text();
|
||||
expect(text).to.match(/^Untitled.*\.ipynb$/);
|
||||
});
|
||||
|
||||
// Wait for python3 | idle status
|
||||
cy.get('[data-test="notebookStatusBar"] [data-test="kernelStatus"]', { timeout }).should($p => {
|
||||
const text = $p.text();
|
||||
expect(text).to.match(/^python3.*idle$/);
|
||||
});
|
||||
|
||||
// Click on a cell
|
||||
cy.get(".cell-container")
|
||||
.as("cellContainer")
|
||||
.click();
|
||||
|
||||
// Type in some code
|
||||
cy.get("@cellContainer").type("2+4");
|
||||
|
||||
// Execute
|
||||
cy.get('[data-test="Run"]')
|
||||
.first()
|
||||
.click();
|
||||
|
||||
// Verify results
|
||||
cy.get("@cellContainer").within(() => {
|
||||
cy.get("pre code span").should("contain", "6");
|
||||
});
|
||||
|
||||
// Restart kernel
|
||||
cy.get('[data-test="Run"] button')
|
||||
.eq(-1)
|
||||
.click();
|
||||
cy.get("li")
|
||||
.contains("Restart Kernel")
|
||||
.click();
|
||||
|
||||
// Wait for python3 | restarting status
|
||||
cy.get('[data-test="notebookStatusBar"] [data-test="kernelStatus"]', { timeout }).should($p => {
|
||||
const text = $p.text();
|
||||
expect(text).to.match(/^python3.*restarting$/);
|
||||
});
|
||||
|
||||
// Wait for python3 | idle status
|
||||
cy.get('[data-test="notebookStatusBar"] [data-test="kernelStatus"]', { timeout }).should($p => {
|
||||
const text = $p.text();
|
||||
expect(text).to.match(/^python3.*idle$/);
|
||||
});
|
||||
|
||||
// Click on a cell
|
||||
cy.get(".cell-container")
|
||||
.as("cellContainer")
|
||||
.find(".input")
|
||||
.as("codeInput")
|
||||
.click();
|
||||
|
||||
// Type in some code
|
||||
cy.get("@codeInput").type("{backspace}{backspace}{backspace}4+5");
|
||||
|
||||
// Execute
|
||||
cy.get('[data-test="Run"]')
|
||||
.first()
|
||||
.click();
|
||||
|
||||
// Verify results
|
||||
cy.get("@cellContainer").within(() => {
|
||||
cy.get("pre code span").should("contain", "9");
|
||||
});
|
||||
});
|
||||
});
|
||||
172
cypress/integration/notebook/resourceTree.spec.ts
Normal file
172
cypress/integration/notebook/resourceTree.spec.ts
Normal file
@@ -0,0 +1,172 @@
|
||||
context("Resource tree notebook file manipulation", () => {
|
||||
const timeout = 15000; // in ms
|
||||
const explorerUrl =
|
||||
"https://localhost:1234/explorer.html?feature.notebookserverurl=https%3A%2F%2Flocalhost%3A10001%2F12345%2Fnotebook&feature.notebookServerToken=token&feature.enablenotebooks=true";
|
||||
|
||||
/**
|
||||
* Wait for UI to be ready
|
||||
*/
|
||||
const waitForReady = () => {
|
||||
cy.get(".splashScreenContainer", { timeout }).should("be.visible");
|
||||
};
|
||||
|
||||
const clickContextMenuAndSelectOption = (nodeLabel, option) => {
|
||||
cy.get(`.treeNodeHeader[data-test="${nodeLabel}"]`)
|
||||
.find("button.treeMenuEllipsis")
|
||||
.click();
|
||||
cy.get('[data-test="treeComponentMenuItemContainer"]')
|
||||
.contains(option)
|
||||
.click();
|
||||
};
|
||||
|
||||
const createFolder = folder => {
|
||||
clickContextMenuAndSelectOption("My Notebooks/", "New Directory");
|
||||
|
||||
cy.get("#stringInputPane").within(() => {
|
||||
cy.get('input[name="collectionIdConfirmation"]').type(folder);
|
||||
cy.get("form").submit();
|
||||
});
|
||||
};
|
||||
|
||||
const deleteItem = nodeName => {
|
||||
clickContextMenuAndSelectOption(`${nodeName}`, "Delete");
|
||||
cy.get(".ms-Dialog-main")
|
||||
.contains("Delete")
|
||||
.click();
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
cy.visit(explorerUrl);
|
||||
waitForReady();
|
||||
});
|
||||
|
||||
it("Create and remove a directory", () => {
|
||||
const folder = "e2etest_folder1";
|
||||
createFolder(folder);
|
||||
cy.get(`.treeNodeHeader[data-test="${folder}/"]`).should("exist");
|
||||
deleteItem(`${folder}/`);
|
||||
cy.get(`.treeNodeHeader[data-test="${folder}/"]`).should("not.exist");
|
||||
});
|
||||
|
||||
it("Create and rename a directory", () => {
|
||||
const folder = "e2etest_folder2";
|
||||
const renamedFolder = "e2etest_folder2_renamed";
|
||||
createFolder(folder);
|
||||
|
||||
// Rename
|
||||
clickContextMenuAndSelectOption(`${folder}/`, "Rename");
|
||||
cy.get("#stringInputPane").within(() => {
|
||||
cy.get('input[name="collectionIdConfirmation"]')
|
||||
.clear()
|
||||
.type(renamedFolder);
|
||||
cy.get("form").submit();
|
||||
});
|
||||
cy.get(`.treeNodeHeader[data-test="${renamedFolder}/"]`).should("exist");
|
||||
cy.get(`.treeNodeHeader[data-test="${folder}/"]`).should("not.exist");
|
||||
|
||||
deleteItem(`${renamedFolder}/`);
|
||||
cy.get(`.treeNodeHeader[data-test="${renamedFolder}/"]`).should("not.exist");
|
||||
});
|
||||
|
||||
it("Create a notebook inside a directory", () => {
|
||||
const folder = "e2etest_folder3";
|
||||
const newNotebookName = "Untitled.ipynb";
|
||||
createFolder(folder);
|
||||
clickContextMenuAndSelectOption(`${folder}/`, "New Notebook");
|
||||
|
||||
// Verify tab is open
|
||||
cy.get(".tabList")
|
||||
.contains(newNotebookName)
|
||||
.should("exist");
|
||||
|
||||
// Close tab
|
||||
cy.get(`.tabList[title="notebooks/${folder}/${newNotebookName}"]`)
|
||||
.find(".cancelButton")
|
||||
.click();
|
||||
// When running from command line, closing the tab is too fast
|
||||
cy.get("body").then($body => {
|
||||
if ($body.find(".ms-Dialog-main").length) {
|
||||
// For some reason, this does not work
|
||||
// cy.get(".ms-Dialog-main").contains("Close").click();
|
||||
cy.get(".ms-Dialog-main .ms-Button--primary").click();
|
||||
}
|
||||
});
|
||||
|
||||
// Expand folder node
|
||||
cy.get(`.treeNodeHeader[data-test="${folder}/"]`).click();
|
||||
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${newNotebookName}"]`).should("exist");
|
||||
|
||||
// Delete notebook
|
||||
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${newNotebookName}"]`)
|
||||
.find("button.treeMenuEllipsis")
|
||||
.click();
|
||||
cy.get('[data-test="treeComponentMenuItemContainer"]')
|
||||
.contains("Delete")
|
||||
.click();
|
||||
|
||||
// Confirm
|
||||
cy.get(".ms-Dialog-main")
|
||||
.contains("Delete")
|
||||
.click();
|
||||
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${newNotebookName}"]`).should("not.exist");
|
||||
|
||||
deleteItem(`${folder}/`);
|
||||
});
|
||||
|
||||
it("Create and rename a notebook inside a directory", () => {
|
||||
const folder = "e2etest_folder4";
|
||||
const newNotebookName = "Untitled.ipynb";
|
||||
const renamedNotebookName = "mynotebook.ipynb";
|
||||
createFolder(folder);
|
||||
clickContextMenuAndSelectOption(`${folder}/`, "New Notebook");
|
||||
|
||||
// Close tab
|
||||
cy.get(`.tabList[title="notebooks/${folder}/${newNotebookName}"]`)
|
||||
.find(".cancelButton")
|
||||
.click();
|
||||
cy.get("body").then($body => {
|
||||
if ($body.find(".ms-Dialog-main").length) {
|
||||
// For some reason, this does not work
|
||||
// cy.get(".ms-Dialog-main").contains("Close").click();
|
||||
cy.get(".ms-Dialog-main .ms-Button--primary").click();
|
||||
}
|
||||
});
|
||||
|
||||
// Expand folder node
|
||||
cy.get(`.treeNodeHeader[data-test="${folder}/"]`).click();
|
||||
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${newNotebookName}"]`).should("exist");
|
||||
|
||||
// Rename notebook
|
||||
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${newNotebookName}"]`)
|
||||
.find("button.treeMenuEllipsis")
|
||||
.click();
|
||||
cy.get('[data-test="treeComponentMenuItemContainer"]')
|
||||
.contains("Rename")
|
||||
.click();
|
||||
|
||||
cy.get("#stringInputPane").within(() => {
|
||||
cy.get('input[name="collectionIdConfirmation"]')
|
||||
.clear()
|
||||
.type(renamedNotebookName);
|
||||
cy.get("form").submit();
|
||||
});
|
||||
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${newNotebookName}"]`).should("not.exist");
|
||||
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${renamedNotebookName}"]`).should("exist");
|
||||
|
||||
// Delete notebook
|
||||
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${renamedNotebookName}"]`)
|
||||
.find("button.treeMenuEllipsis")
|
||||
.click();
|
||||
cy.get('[data-test="treeComponentMenuItemContainer"]')
|
||||
.contains("Delete")
|
||||
.click();
|
||||
|
||||
// Confirm
|
||||
cy.get(".ms-Dialog-main")
|
||||
.contains("Delete")
|
||||
.click();
|
||||
// Give it time to settle
|
||||
cy.wait(1000);
|
||||
deleteItem(`${folder}/`);
|
||||
});
|
||||
});
|
||||
3066
cypress/package-lock.json
generated
Normal file
3066
cypress/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
25
cypress/package.json
Normal file
25
cypress/package.json
Normal file
@@ -0,0 +1,25 @@
|
||||
{
|
||||
"name": "cosmos-explorer-cypress",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "cypress run",
|
||||
"wait-for-server": "wait-on -t 240000 -i 5000 -v https-get://0.0.0.0:1234/",
|
||||
"test:sql": "cypress run --browser chrome --spec \"./integration/dataexplorer/SQL/*\"",
|
||||
"test:ci": "wait-on -t 240000 -i 5000 -v https-get://0.0.0.0:1234/ https-get://0.0.0.0:8081/_explorer/index.html && cypress run --browser edge --headless",
|
||||
"test:debug": "cypress open"
|
||||
},
|
||||
"devDependencies": {
|
||||
"cypress": "^4.8.0",
|
||||
"mocha": "^7.0.1",
|
||||
"mochawesome": "^4.1.0",
|
||||
"mochawesome-merge": "^4.0.1",
|
||||
"mochawesome-report-generator": "^4.1.0",
|
||||
"typescript": "3.4.3",
|
||||
"wait-on": "^4.0.2"
|
||||
},
|
||||
"dependencies": {
|
||||
"@microsoft/applicationinsights-web": "^2.5.2"
|
||||
}
|
||||
}
|
||||
23
cypress/support/index.js
Normal file
23
cypress/support/index.js
Normal file
@@ -0,0 +1,23 @@
|
||||
let appInsightsLib = require("@microsoft/applicationinsights-web");
|
||||
|
||||
const appInsights = new appInsightsLib.ApplicationInsights({
|
||||
config: {
|
||||
instrumentationKey: "fe61c39f-7d32-4488-a191-b13621965315"
|
||||
/* ...Other Configuration Options... */
|
||||
}
|
||||
});
|
||||
|
||||
appInsights.loadAppInsights();
|
||||
|
||||
Cypress.on("fail", (error, runnable) => {
|
||||
// App Insights will record the fail tests for Create Collection
|
||||
let message = JSON.stringify(runnable.title);
|
||||
appInsights.trackTrace({
|
||||
message: `${message}`,
|
||||
properties: {
|
||||
passed: false,
|
||||
error: error
|
||||
}
|
||||
});
|
||||
throw error; // throw error to have test still fail
|
||||
});
|
||||
11
cypress/tsconfig.json
Normal file
11
cypress/tsconfig.json
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"strict": true,
|
||||
"noEmit": true,
|
||||
"module": "commonjs",
|
||||
"target": "es5",
|
||||
"lib": ["es5", "dom", "es6"],
|
||||
"types": ["cypress", "node"]
|
||||
},
|
||||
"include": ["**/*.ts", "**/*.spec.ts"]
|
||||
}
|
||||
41
cypress/utilities/connectionString.js
Normal file
41
cypress/utilities/connectionString.js
Normal file
@@ -0,0 +1,41 @@
|
||||
module.exports = {
|
||||
loginUsingConnectionString: function() {
|
||||
const prodUrl = Cypress.env("TEST_ENDPOINT") || "https://localhost:1234/hostedExplorer.html";
|
||||
const timeout = 15000;
|
||||
|
||||
cy.visit(prodUrl);
|
||||
cy.get('iframe[id="explorerMenu"]').should("be.visible");
|
||||
|
||||
cy.get("iframe").then($element => {
|
||||
const $body = $element.contents().find("body");
|
||||
|
||||
cy.wrap($body)
|
||||
.find("#connectExplorer")
|
||||
.should("exist")
|
||||
.find("div[class='connectExplorer']")
|
||||
.should("exist")
|
||||
.find("p[class='welcomeText']")
|
||||
.should("exist");
|
||||
|
||||
cy.wrap($body.find("div > p.switchConnectTypeText"))
|
||||
.should("exist")
|
||||
.last()
|
||||
.click({ force: true });
|
||||
|
||||
const secret = Cypress.env("CONNECTION_STRING");
|
||||
|
||||
cy.wrap($body)
|
||||
.find("input[class='inputToken']")
|
||||
.should("exist")
|
||||
.type(secret, {
|
||||
force: true
|
||||
});
|
||||
|
||||
cy.wrap($body.find("input[value='Connect']"), { timeout })
|
||||
.first()
|
||||
.click({ force: true });
|
||||
|
||||
cy.wait(15000);
|
||||
});
|
||||
}
|
||||
};
|
||||
6
cypress/utilities/cosmosClient.js
Normal file
6
cypress/utilities/cosmosClient.js
Normal file
@@ -0,0 +1,6 @@
|
||||
const { CosmosClient } = require("@azure/cosmos");
|
||||
|
||||
module.exports = new CosmosClient({
|
||||
endpoint: "https://0.0.0.0:8081",
|
||||
key: "C2y6yDjf5/R+ob0N8A7Cgv30VRDJIWEHLM+4QDU5DE2nQ9nDuVTqobD4b8mGGyPMbIZnqyMsEcaGQy67XIw/Jw=="
|
||||
});
|
||||
@@ -3023,7 +3023,3 @@ settings-pane {
|
||||
.infoBoxContent a {
|
||||
color: @AccentMediumHigh
|
||||
}
|
||||
|
||||
.collapsibleSection :hover{
|
||||
cursor: pointer;
|
||||
}
|
||||
3686
package-lock.json
generated
3686
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
14
package.json
14
package.json
@@ -4,10 +4,8 @@
|
||||
"description": "Cosmos Explorer",
|
||||
"main": "index.js",
|
||||
"dependencies": {
|
||||
"@azure/arm-cosmosdb": "9.1.0",
|
||||
"@azure/cosmos": "3.9.0",
|
||||
"@azure/identity": "1.1.0",
|
||||
"@azure/cosmos-language-service": "0.0.5",
|
||||
"@azure/cosmos-language-service": "0.0.4",
|
||||
"@jupyterlab/services": "6.0.0-rc.2",
|
||||
"@jupyterlab/terminal": "3.0.0-rc.2",
|
||||
"@microsoft/applicationinsights-web": "2.5.9",
|
||||
@@ -23,7 +21,7 @@
|
||||
"@nteract/jupyter-widgets": "2.0.0",
|
||||
"@nteract/logos": "1.0.0",
|
||||
"@nteract/markdown": "4.4.0",
|
||||
"@nteract/monaco-editor": "3.2.2",
|
||||
"@nteract/monaco-editor": "3.2.0",
|
||||
"@nteract/octicons": "2.0.0",
|
||||
"@nteract/outputs": "3.0.9",
|
||||
"@nteract/presentational-components": "3.0.7",
|
||||
@@ -68,7 +66,7 @@
|
||||
"jquery-ui-dist": "1.12.1",
|
||||
"knockout": "3.5.1",
|
||||
"mkdirp": "1.0.4",
|
||||
"monaco-editor": "0.18.1",
|
||||
"monaco-editor": "0.15.6",
|
||||
"object.entries": "1.1.0",
|
||||
"office-ui-fabric-react": "7.134.1",
|
||||
"p-retry": "4.2.0",
|
||||
@@ -117,7 +115,7 @@
|
||||
"@types/prop-types": "15.5.8",
|
||||
"@types/puppeteer": "3.0.1",
|
||||
"@types/q": "1.5.1",
|
||||
"@types/react": "16.9.56",
|
||||
"@types/react": "16.9.49",
|
||||
"@types/react-dom": "16.0.7",
|
||||
"@types/react-notification-system": "0.2.39",
|
||||
"@types/react-redux": "7.1.7",
|
||||
@@ -196,8 +194,8 @@
|
||||
"compile": "tsc",
|
||||
"compile:contracts": "tsc -p ./tsconfig.contracts.json",
|
||||
"compile:strict": "tsc -p ./tsconfig.strict.json",
|
||||
"format": "prettier --write \"{src,test}/**/*.{ts,tsx,html}\" \"*.{js,html}\"",
|
||||
"format:check": "prettier --check \"{src,test}/**/*.{ts,tsx,html}\" \"*.{js,html}\"",
|
||||
"format": "prettier --write \"{src,cypress,test}/**/*.{ts,tsx,html}\" \"*.{js,html}\"",
|
||||
"format:check": "prettier --check \"{src,cypress,test}/**/*.{ts,tsx,html}\" \"*.{js,html}\"",
|
||||
"lint": "tslint --project tsconfig.json && eslint \"**/*.{ts,tsx}\"",
|
||||
"build:contracts": "npm run compile:contracts",
|
||||
"strictEligibleFiles": "node ./strict-migration-tools/index.js",
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { AutopilotTier } from "../Contracts/DataModels";
|
||||
import { HashMap } from "./HashMap";
|
||||
|
||||
export class AuthorizationEndpoints {
|
||||
@@ -123,17 +124,15 @@ export class Features {
|
||||
public static readonly notebookBasePath = "notebookbasepath";
|
||||
public static readonly canExceedMaximumValue = "canexceedmaximumvalue";
|
||||
public static readonly enableFixedCollectionWithSharedThroughput = "enablefixedcollectionwithsharedthroughput";
|
||||
public static readonly enableAutoPilotV2 = "enableautopilotv2";
|
||||
public static readonly ttl90Days = "ttl90days";
|
||||
public static readonly enableRightPanelV2 = "enablerightpanelv2";
|
||||
public static readonly enableSchema = "enableschema";
|
||||
public static readonly enableSDKoperations = "enablesdkoperations";
|
||||
public static readonly showMinRUSurvey = "showminrusurvey";
|
||||
}
|
||||
|
||||
// flight names returned from the portal are always lowercase
|
||||
export class Flights {
|
||||
public static readonly SettingsV2 = "settingsv2";
|
||||
public static readonly MongoIndexEditor = "mongoindexeditor";
|
||||
}
|
||||
|
||||
export class AfecFeatures {
|
||||
@@ -262,6 +261,7 @@ export class HttpHeaders {
|
||||
public static usePolygonsSmallerThanAHemisphere = "x-ms-documentdb-usepolygonssmallerthanahemisphere";
|
||||
public static autoPilotThroughput = "autoscaleSettings";
|
||||
public static autoPilotThroughputSDK = "x-ms-cosmos-offer-autopilot-settings";
|
||||
public static autoPilotTier = "x-ms-cosmos-offer-autopilot-tier";
|
||||
public static partitionKey: string = "x-ms-documentdb-partitionkey";
|
||||
public static migrateOfferToManualThroughput: string = "x-ms-cosmos-migrate-offer-to-manual-throughput";
|
||||
public static migrateOfferToAutopilot: string = "x-ms-cosmos-migrate-offer-to-autopilot";
|
||||
@@ -406,6 +406,54 @@ export enum ConflictOperationType {
|
||||
Delete = "delete"
|
||||
}
|
||||
|
||||
export class AutoPilot {
|
||||
public static tier1Text: string = "4,000 RU/s";
|
||||
public static tier2Text: string = "20,000 RU/s";
|
||||
public static tier3Text: string = "100,000 RU/s";
|
||||
public static tier4Text: string = "500,000 RU/s";
|
||||
|
||||
public static tierText = {
|
||||
[AutopilotTier.Tier1]: "Tier 1",
|
||||
[AutopilotTier.Tier2]: "Tier 2",
|
||||
[AutopilotTier.Tier3]: "Tier 3",
|
||||
[AutopilotTier.Tier4]: "Tier 4"
|
||||
};
|
||||
|
||||
public static tierMaxRus = {
|
||||
[AutopilotTier.Tier1]: 2000,
|
||||
[AutopilotTier.Tier2]: 20000,
|
||||
[AutopilotTier.Tier3]: 100000,
|
||||
[AutopilotTier.Tier4]: 500000
|
||||
};
|
||||
|
||||
public static tierMinRus = {
|
||||
[AutopilotTier.Tier1]: 0,
|
||||
[AutopilotTier.Tier2]: 0,
|
||||
[AutopilotTier.Tier3]: 0,
|
||||
[AutopilotTier.Tier4]: 0
|
||||
};
|
||||
|
||||
public static tierStorageInGB = {
|
||||
[AutopilotTier.Tier1]: 50,
|
||||
[AutopilotTier.Tier2]: 200,
|
||||
[AutopilotTier.Tier3]: 1000,
|
||||
[AutopilotTier.Tier4]: 5000
|
||||
};
|
||||
}
|
||||
|
||||
export class DataExplorerVersions {
|
||||
public static readonly v_1_0_0: string = "1.0.0";
|
||||
public static readonly v_1_0_1: string = "1.0.1";
|
||||
}
|
||||
|
||||
export class DataExplorerFeatures {
|
||||
public static offerCache: string = "OfferCache";
|
||||
}
|
||||
|
||||
export const DataExplorerFeaturesVersions: any = {
|
||||
OfferCache: DataExplorerVersions.v_1_0_1
|
||||
};
|
||||
|
||||
export const EmulatorMasterKey =
|
||||
//[SuppressMessage("Microsoft.Security", "CS002:SecretInNextLine", Justification="Well known public masterKey for emulator")]
|
||||
"C2y6yDjf5/R+ob0N8A7Cgv30VRDJIWEHLM+4QDU5DE2nQ9nDuVTqobD4b8mGGyPMbIZnqyMsEcaGQy67XIw/Jw==";
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import * as Cosmos from "@azure/cosmos";
|
||||
import { RequestInfo, setAuthorizationTokenHeaderUsingMasterKey } from "@azure/cosmos";
|
||||
import { configContext, Platform } from "../ConfigContext";
|
||||
import { getErrorMessage } from "./ErrorHandlingUtils";
|
||||
import { logConsoleError } from "../Utils/NotificationConsoleUtils";
|
||||
import { EmulatorMasterKey, HttpHeaders } from "./Constants";
|
||||
import { userContext } from "../UserContext";
|
||||
@@ -70,7 +69,7 @@ export async function getTokenFromAuthService(verb: string, resourceType: string
|
||||
const result = JSON.parse(await response.json());
|
||||
return result;
|
||||
} catch (error) {
|
||||
logConsoleError(`Failed to get authorization headers for ${resourceType}: ${getErrorMessage(error)}`);
|
||||
logConsoleError(`Failed to get authorization headers for ${resourceType}: ${JSON.stringify(error)}`);
|
||||
return Promise.reject(error);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -168,6 +168,22 @@ export function deleteConflict(
|
||||
);
|
||||
}
|
||||
|
||||
export function refreshCachedOffers(): Q.Promise<void> {
|
||||
if (configContext.platform === Platform.Portal) {
|
||||
return sendCachedDataMessage(MessageTypes.RefreshOffers, []);
|
||||
} else {
|
||||
return Q();
|
||||
}
|
||||
}
|
||||
|
||||
export function refreshCachedResources(options?: any): Q.Promise<void> {
|
||||
if (configContext.platform === Platform.Portal) {
|
||||
return sendCachedDataMessage(MessageTypes.RefreshResources, []);
|
||||
} else {
|
||||
return Q();
|
||||
}
|
||||
}
|
||||
|
||||
export function queryConflicts(
|
||||
databaseId: string,
|
||||
containerId: string,
|
||||
|
||||
@@ -1,14 +1,17 @@
|
||||
import { ConflictDefinition, ItemDefinition, QueryIterator, Resource } from "@azure/cosmos";
|
||||
import { RequestOptions } from "@azure/cosmos/dist-esm";
|
||||
import Q from "q";
|
||||
import * as DataModels from "../Contracts/DataModels";
|
||||
import * as ViewModels from "../Contracts/ViewModels";
|
||||
import ConflictId from "../Explorer/Tree/ConflictId";
|
||||
import DocumentId from "../Explorer/Tree/DocumentId";
|
||||
import StoredProcedure from "../Explorer/Tree/StoredProcedure";
|
||||
import { logConsoleInfo, logConsoleProgress } from "../Utils/NotificationConsoleUtils";
|
||||
import { logConsoleError, logConsoleInfo, logConsoleProgress } from "../Utils/NotificationConsoleUtils";
|
||||
import * as Constants from "./Constants";
|
||||
import { sendNotificationForError } from "./dataAccess/sendNotificationForError";
|
||||
import * as DataAccessUtilityBase from "./DataAccessUtilityBase";
|
||||
import { MinimalQueryIterator, nextPage } from "./IteratorUtilities";
|
||||
import { handleError } from "./ErrorHandlingUtils";
|
||||
import * as Logger from "./Logger";
|
||||
|
||||
// TODO: Log all promise resolutions and errors with verbosity levels
|
||||
export function queryDocuments(
|
||||
@@ -56,11 +59,13 @@ export function executeStoredProcedure(
|
||||
);
|
||||
},
|
||||
(error: any) => {
|
||||
handleError(
|
||||
error,
|
||||
"ExecuteStoredProcedure",
|
||||
`Failed to execute stored procedure ${storedProcedure.id()} for container ${storedProcedure.collection.id()}`
|
||||
logConsoleError(
|
||||
`Failed to execute stored procedure ${storedProcedure.id()} for container ${storedProcedure.collection.id()}: ${JSON.stringify(
|
||||
error
|
||||
)}`
|
||||
);
|
||||
Logger.logError(JSON.stringify(error), "ExecuteStoredProcedure", error.code);
|
||||
sendNotificationForError(error);
|
||||
deferred.reject(error);
|
||||
}
|
||||
)
|
||||
@@ -88,7 +93,9 @@ export function queryDocumentsPage(
|
||||
deferred.resolve(result);
|
||||
},
|
||||
(error: any) => {
|
||||
handleError(error, "QueryDocumentsPage", `Failed to query ${entityName} for container ${resourceName}`);
|
||||
logConsoleError(`Failed to query ${entityName} for container ${resourceName}: ${JSON.stringify(error)}`);
|
||||
Logger.logError(JSON.stringify(error), "QueryDocumentsPage", error.code);
|
||||
sendNotificationForError(error);
|
||||
deferred.reject(error);
|
||||
}
|
||||
)
|
||||
@@ -109,7 +116,9 @@ export function readDocument(collection: ViewModels.CollectionBase, documentId:
|
||||
deferred.resolve(document);
|
||||
},
|
||||
(error: any) => {
|
||||
handleError(error, "ReadDocument", `Failed to read ${entityName} ${documentId.id()}`);
|
||||
logConsoleError(`Failed to read ${entityName} ${documentId.id()}: ${JSON.stringify(error)}`);
|
||||
Logger.logError(JSON.stringify(error), "ReadDocument", error.code);
|
||||
sendNotificationForError(error);
|
||||
deferred.reject(error);
|
||||
}
|
||||
)
|
||||
@@ -135,7 +144,9 @@ export function updateDocument(
|
||||
deferred.resolve(updatedDocument);
|
||||
},
|
||||
(error: any) => {
|
||||
handleError(error, "UpdateDocument", `Failed to update ${entityName} ${documentId.id()}`);
|
||||
logConsoleError(`Failed to update ${entityName} ${documentId.id()}: ${JSON.stringify(error)}`);
|
||||
Logger.logError(JSON.stringify(error), "UpdateDocument", error.code);
|
||||
sendNotificationForError(error);
|
||||
deferred.reject(error);
|
||||
}
|
||||
)
|
||||
@@ -157,7 +168,11 @@ export function createDocument(collection: ViewModels.CollectionBase, newDocumen
|
||||
deferred.resolve(savedDocument);
|
||||
},
|
||||
(error: any) => {
|
||||
handleError(error, "CreateDocument", `Error while creating new ${entityName} for container ${collection.id()}`);
|
||||
logConsoleError(
|
||||
`Error while creating new ${entityName} for container ${collection.id()}:\n ${JSON.stringify(error)}`
|
||||
);
|
||||
Logger.logError(JSON.stringify(error), "CreateDocument", error.code);
|
||||
sendNotificationForError(error);
|
||||
deferred.reject(error);
|
||||
}
|
||||
)
|
||||
@@ -179,7 +194,9 @@ export function deleteDocument(collection: ViewModels.CollectionBase, documentId
|
||||
deferred.resolve(response);
|
||||
},
|
||||
(error: any) => {
|
||||
handleError(error, "DeleteDocument", `Error while deleting ${entityName} ${documentId.id()}`);
|
||||
logConsoleError(`Error while deleting ${entityName} ${documentId.id()}:\n ${JSON.stringify(error)}`);
|
||||
Logger.logError(JSON.stringify(error), "DeleteDocument", error.code);
|
||||
sendNotificationForError(error);
|
||||
deferred.reject(error);
|
||||
}
|
||||
)
|
||||
@@ -205,7 +222,9 @@ export function deleteConflict(
|
||||
deferred.resolve(response);
|
||||
},
|
||||
(error: any) => {
|
||||
handleError(error, "DeleteConflict", `Error while deleting conflict ${conflictId.id()}`);
|
||||
logConsoleError(`Error while deleting conflict ${conflictId.id()}:\n ${JSON.stringify(error)}`);
|
||||
Logger.logError(JSON.stringify(error), "DeleteConflict", error.code);
|
||||
sendNotificationForError(error);
|
||||
deferred.reject(error);
|
||||
}
|
||||
)
|
||||
@@ -215,3 +234,11 @@ export function deleteConflict(
|
||||
|
||||
return deferred.promise;
|
||||
}
|
||||
|
||||
export function refreshCachedResources(options: any = {}): Q.Promise<void> {
|
||||
return DataAccessUtilityBase.refreshCachedResources(options);
|
||||
}
|
||||
|
||||
export function refreshCachedOffers(): Q.Promise<void> {
|
||||
return DataAccessUtilityBase.refreshCachedOffers();
|
||||
}
|
||||
|
||||
@@ -1,56 +0,0 @@
|
||||
import { ARMError } from "../Utils/arm/request";
|
||||
import { HttpStatusCodes } from "./Constants";
|
||||
import { MessageTypes } from "../Contracts/ExplorerContracts";
|
||||
import { SubscriptionType } from "../Contracts/SubscriptionType";
|
||||
import { logConsoleError } from "../Utils/NotificationConsoleUtils";
|
||||
import { logError } from "./Logger";
|
||||
import { sendMessage } from "./MessageHandler";
|
||||
|
||||
export const handleError = (error: string | ARMError | Error, area: string, consoleErrorPrefix?: string): void => {
|
||||
const errorMessage = getErrorMessage(error);
|
||||
const errorCode = error instanceof ARMError ? error.code : undefined;
|
||||
|
||||
// logs error to data explorer console
|
||||
const consoleErrorMessage = consoleErrorPrefix ? `${consoleErrorPrefix}:\n ${errorMessage}` : errorMessage;
|
||||
logConsoleError(consoleErrorMessage);
|
||||
|
||||
// logs error to both app insight and kusto
|
||||
logError(errorMessage, area, errorCode);
|
||||
|
||||
// checks for errors caused by firewall and sends them to portal to handle
|
||||
sendNotificationForError(errorMessage, errorCode);
|
||||
};
|
||||
|
||||
export const getErrorMessage = (error: string | Error): string => {
|
||||
const errorMessage = typeof error === "string" ? error : error.message;
|
||||
return replaceKnownError(errorMessage);
|
||||
};
|
||||
|
||||
export const getErrorStack = (error: string | Error): string => {
|
||||
return typeof error === "string" ? undefined : error.stack;
|
||||
};
|
||||
|
||||
const sendNotificationForError = (errorMessage: string, errorCode: number | string): void => {
|
||||
if (errorCode === HttpStatusCodes.Forbidden) {
|
||||
if (errorMessage?.toLowerCase().indexOf("sharedoffer is disabled for your account") > 0) {
|
||||
return;
|
||||
}
|
||||
sendMessage({
|
||||
type: MessageTypes.ForbiddenError,
|
||||
reason: errorMessage
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const replaceKnownError = (errorMessage: string): string => {
|
||||
if (
|
||||
window.dataExplorer?.subscriptionType() === SubscriptionType.Internal &&
|
||||
errorMessage.indexOf("SharedOffer is Disabled for your account") >= 0
|
||||
) {
|
||||
return "Database throughput is not supported for internal subscriptions.";
|
||||
} else if (errorMessage.indexOf("Partition key paths must contain only valid") >= 0) {
|
||||
return "Partition key paths must contain only valid characters and not contain a trailing slash or wildcard character.";
|
||||
}
|
||||
|
||||
return errorMessage;
|
||||
};
|
||||
24
src/Common/ErrorParserUtility.test.ts
Normal file
24
src/Common/ErrorParserUtility.test.ts
Normal file
@@ -0,0 +1,24 @@
|
||||
import * as ErrorParserUtility from "./ErrorParserUtility";
|
||||
|
||||
describe("Error Parser Utility", () => {
|
||||
describe("shouldEnableCrossPartitionKeyForResourceWithPartitionKey()", () => {
|
||||
it("should parse a backend error correctly", () => {
|
||||
// A fake error matching what is thrown by the SDK on a bad collection create request
|
||||
const innerMessage =
|
||||
"The partition key component definition path '/asdwqr31 @#$#$WRadf' could not be accepted, failed near position '10'. Partition key paths must contain only valid characters and not contain a trailing slash or wildcard character.";
|
||||
const message = `Message: {\"Errors\":[\"${innerMessage}\"]}\r\nActivityId: 97b2e684-7505-4921-85f6-2513b9b28220, Request URI: /apps/89fdcf25-2a0b-4d2a-aab6-e161e565b26f/services/54911149-7bb1-4e7d-a1fa-22c8b36a4bb9/partitions/cc2a7a04-5f5a-4709-bcf7-8509b264963f/replicas/132304018743619218p, RequestStats: , SDK: Microsoft.Azure.Documents.Common/2.10.0`;
|
||||
const err = new Error(message) as any;
|
||||
err.code = 400;
|
||||
err.body = {
|
||||
code: "BadRequest",
|
||||
message
|
||||
};
|
||||
err.headers = {};
|
||||
err.activityId = "97b2e684-7505-4921-85f6-2513b9b28220";
|
||||
|
||||
const parsedError = ErrorParserUtility.parse(err);
|
||||
expect(parsedError.length).toBe(1);
|
||||
expect(parsedError[0].message).toBe(innerMessage);
|
||||
});
|
||||
});
|
||||
});
|
||||
69
src/Common/ErrorParserUtility.ts
Normal file
69
src/Common/ErrorParserUtility.ts
Normal file
@@ -0,0 +1,69 @@
|
||||
import * as DataModels from "../Contracts/DataModels";
|
||||
import * as ViewModels from "../Contracts/ViewModels";
|
||||
|
||||
export function replaceKnownError(err: string): string {
|
||||
if (
|
||||
window.dataExplorer.subscriptionType() === ViewModels.SubscriptionType.Internal &&
|
||||
err.indexOf("SharedOffer is Disabled for your account") >= 0
|
||||
) {
|
||||
return "Database throughput is not supported for internal subscriptions.";
|
||||
} else if (err.indexOf("Partition key paths must contain only valid") >= 0) {
|
||||
return "Partition key paths must contain only valid characters and not contain a trailing slash or wildcard character.";
|
||||
}
|
||||
|
||||
return err;
|
||||
}
|
||||
|
||||
export function parse(err: any): DataModels.ErrorDataModel[] {
|
||||
try {
|
||||
return _parse(err);
|
||||
} catch (e) {
|
||||
return [<DataModels.ErrorDataModel>{ message: JSON.stringify(err) }];
|
||||
}
|
||||
}
|
||||
|
||||
function _parse(err: any): DataModels.ErrorDataModel[] {
|
||||
var normalizedErrors: DataModels.ErrorDataModel[] = [];
|
||||
if (err.message && !err.code) {
|
||||
normalizedErrors.push(err);
|
||||
} else {
|
||||
const innerErrors: any[] = _getInnerErrors(err.message);
|
||||
normalizedErrors = innerErrors.map(innerError =>
|
||||
typeof innerError === "string" ? { message: innerError } : innerError
|
||||
);
|
||||
}
|
||||
|
||||
return normalizedErrors;
|
||||
}
|
||||
|
||||
function _getInnerErrors(message: string): any[] {
|
||||
/*
|
||||
The backend error message has an inner-message which is a stringified object.
|
||||
|
||||
For SQL errors, the "errors" property is an array of SqlErrorDataModel.
|
||||
Example:
|
||||
"Message: {"Errors":["Resource with specified id or name already exists"]}\r\nActivityId: 80005000008d40b6a, Request URI: /apps/19000c000c0a0005/services/mctestdocdbprod-MasterService-0-00066ab9937/partitions/900005f9000e676fb8/replicas/13000000000955p"
|
||||
For non-SQL errors the "Errors" propery is an array of string.
|
||||
Example:
|
||||
"Message: {"errors":[{"severity":"Error","location":{"start":7,"end":8},"code":"SC1001","message":"Syntax error, incorrect syntax near '.'."}]}\r\nActivityId: d3300016d4084e310a, Request URI: /apps/12401f9e1df77/services/dc100232b1f44545/partitions/f86f3bc0001a2f78/replicas/13085003638s"
|
||||
*/
|
||||
|
||||
let innerMessage: any = null;
|
||||
|
||||
const singleLineMessage = message.replace(/[\r\n]|\r|\n/g, "");
|
||||
try {
|
||||
// Multi-Partition error flavor
|
||||
const regExp = /^(.*)ActivityId: (.*)/g;
|
||||
const regString = regExp.exec(singleLineMessage);
|
||||
const innerMessageString = regString[1];
|
||||
innerMessage = JSON.parse(innerMessageString);
|
||||
} catch (e) {
|
||||
// Single-partition error flavor
|
||||
const regExp = /^Message: (.*)ActivityId: (.*), Request URI: (.*)/g;
|
||||
const regString = regExp.exec(singleLineMessage);
|
||||
const innerMessageString = regString[1];
|
||||
innerMessage = JSON.parse(innerMessageString);
|
||||
}
|
||||
|
||||
return innerMessage.errors ? innerMessage.errors : innerMessage.Errors;
|
||||
}
|
||||
@@ -21,8 +21,14 @@ export function logWarning(message: string, area: string, code?: number): void {
|
||||
return _logEntry(entry);
|
||||
}
|
||||
|
||||
export function logError(errorMessage: string, area: string, code?: number | string): void {
|
||||
const entry: Diagnostics.LogEntry = _generateLogEntry(Diagnostics.LogEntryLevel.Error, errorMessage, area, code);
|
||||
export function logError(message: string | Error, area: string, code?: number): void {
|
||||
let logMessage: string;
|
||||
if (typeof message === "string") {
|
||||
logMessage = message;
|
||||
} else {
|
||||
logMessage = JSON.stringify(message, Object.getOwnPropertyNames(message));
|
||||
}
|
||||
const entry: Diagnostics.LogEntry = _generateLogEntry(Diagnostics.LogEntryLevel.Error, logMessage, area, code);
|
||||
return _logEntry(entry);
|
||||
}
|
||||
|
||||
@@ -53,7 +59,7 @@ function _generateLogEntry(
|
||||
level: Diagnostics.LogEntryLevel,
|
||||
message: string,
|
||||
area: string,
|
||||
code?: number | string
|
||||
code?: number
|
||||
): Diagnostics.LogEntry {
|
||||
return {
|
||||
timestamp: new Date().getUTCSeconds(),
|
||||
|
||||
@@ -1,41 +1,41 @@
|
||||
import * as DataModels from "../Contracts/DataModels";
|
||||
import * as ViewModels from "../Contracts/ViewModels";
|
||||
import { getAuthorizationHeader } from "../Utils/AuthorizationUtils";
|
||||
import { userContext } from "../UserContext";
|
||||
import { configContext, Platform } from "../ConfigContext";
|
||||
|
||||
const notificationsPath = () => {
|
||||
switch (configContext.platform) {
|
||||
case Platform.Hosted:
|
||||
return "/api/guest/notifications";
|
||||
case Platform.Portal:
|
||||
return "/api/notifications";
|
||||
default:
|
||||
throw new Error(`Unknown platform: ${configContext.platform}`);
|
||||
}
|
||||
};
|
||||
|
||||
export const fetchPortalNotifications = async (): Promise<DataModels.Notification[]> => {
|
||||
if (configContext.platform === Platform.Emulator || configContext.platform === Platform.Hosted) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const databaseAccount = userContext.databaseAccount;
|
||||
const subscriptionId = userContext.subscriptionId;
|
||||
const resourceGroup = userContext.resourceGroup;
|
||||
const url = `${configContext.BACKEND_ENDPOINT}${notificationsPath()}?accountName=${
|
||||
databaseAccount.name
|
||||
}&subscriptionId=${subscriptionId}&resourceGroup=${resourceGroup}`;
|
||||
const authorizationHeader: ViewModels.AuthorizationTokenHeaderMetadata = getAuthorizationHeader();
|
||||
const headers = { [authorizationHeader.header]: authorizationHeader.token };
|
||||
|
||||
const response = await window.fetch(url, {
|
||||
headers
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(await response.text());
|
||||
}
|
||||
|
||||
return (await response.json()) as DataModels.Notification[];
|
||||
};
|
||||
import * as DataModels from "../Contracts/DataModels";
|
||||
import * as ViewModels from "../Contracts/ViewModels";
|
||||
import { getAuthorizationHeader } from "../Utils/AuthorizationUtils";
|
||||
import { userContext } from "../UserContext";
|
||||
import { configContext, Platform } from "../ConfigContext";
|
||||
|
||||
const notificationsPath = () => {
|
||||
switch (configContext.platform) {
|
||||
case Platform.Hosted:
|
||||
return "/api/guest/notifications";
|
||||
case Platform.Portal:
|
||||
return "/api/notifications";
|
||||
default:
|
||||
throw new Error(`Unknown platform: ${configContext.platform}`);
|
||||
}
|
||||
};
|
||||
|
||||
export const fetchPortalNotifications = async (): Promise<DataModels.Notification[]> => {
|
||||
if (configContext.platform === Platform.Emulator) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const databaseAccount = userContext.databaseAccount;
|
||||
const subscriptionId = userContext.subscriptionId;
|
||||
const resourceGroup = userContext.resourceGroup;
|
||||
const url = `${configContext.BACKEND_ENDPOINT}${notificationsPath()}?accountName=${
|
||||
databaseAccount.name
|
||||
}&subscriptionId=${subscriptionId}&resourceGroup=${resourceGroup}`;
|
||||
const authorizationHeader: ViewModels.AuthorizationTokenHeaderMetadata = getAuthorizationHeader();
|
||||
const headers = { [authorizationHeader.header]: authorizationHeader.token };
|
||||
|
||||
const response = await window.fetch(url, {
|
||||
headers
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(await response.text());
|
||||
}
|
||||
|
||||
return (await response.json()) as DataModels.Notification[];
|
||||
};
|
||||
|
||||
@@ -12,7 +12,8 @@ import { BackendDefaults, HttpStatusCodes, SavedQueries } from "./Constants";
|
||||
import { userContext } from "../UserContext";
|
||||
import { createDocument, deleteDocument, queryDocuments, queryDocumentsPage } from "./DocumentClientUtilityBase";
|
||||
import { createCollection } from "./dataAccess/createCollection";
|
||||
import { handleError } from "./ErrorHandlingUtils";
|
||||
import * as ErrorParserUtility from "./ErrorParserUtility";
|
||||
import * as Logger from "./Logger";
|
||||
|
||||
export class QueriesClient {
|
||||
private static readonly PartitionKey: DataModels.PartitionKey = {
|
||||
@@ -52,8 +53,13 @@ export class QueriesClient {
|
||||
return Promise.resolve(collection);
|
||||
},
|
||||
(error: any) => {
|
||||
handleError(error, "setupQueriesCollection", "Failed to set up account for saving queries");
|
||||
return Promise.reject(error);
|
||||
const stringifiedError: string = JSON.stringify(error);
|
||||
NotificationConsoleUtils.logConsoleMessage(
|
||||
ConsoleDataType.Error,
|
||||
`Failed to set up account for saving queries: ${stringifiedError}`
|
||||
);
|
||||
Logger.logError(stringifiedError, "setupQueriesCollection");
|
||||
return Promise.reject(stringifiedError);
|
||||
}
|
||||
)
|
||||
.finally(() => NotificationConsoleUtils.clearInProgressMessageWithId(id));
|
||||
@@ -96,11 +102,19 @@ export class QueriesClient {
|
||||
return Promise.resolve();
|
||||
},
|
||||
(error: any) => {
|
||||
if (error.code === HttpStatusCodes.Conflict.toString()) {
|
||||
error = `Query ${query.queryName} already exists`;
|
||||
let errorMessage: string;
|
||||
const parsedError: DataModels.ErrorDataModel = ErrorParserUtility.parse(error)[0];
|
||||
if (parsedError.code === HttpStatusCodes.Conflict.toString()) {
|
||||
errorMessage = `Query ${query.queryName} already exists`;
|
||||
} else {
|
||||
errorMessage = parsedError.message;
|
||||
}
|
||||
handleError(error, "saveQuery", `Failed to save query ${query.queryName}`);
|
||||
return Promise.reject(error);
|
||||
NotificationConsoleUtils.logConsoleMessage(
|
||||
ConsoleDataType.Error,
|
||||
`Failed to save query ${query.queryName}: ${errorMessage}`
|
||||
);
|
||||
Logger.logError(JSON.stringify(parsedError), "saveQuery");
|
||||
return Promise.reject(errorMessage);
|
||||
}
|
||||
)
|
||||
.finally(() => NotificationConsoleUtils.clearInProgressMessageWithId(id));
|
||||
@@ -149,15 +163,25 @@ export class QueriesClient {
|
||||
return Promise.resolve(queries);
|
||||
},
|
||||
(error: any) => {
|
||||
handleError(error, "getSavedQueries", "Failed to fetch saved queries");
|
||||
return Promise.reject(error);
|
||||
const stringifiedError: string = JSON.stringify(error);
|
||||
NotificationConsoleUtils.logConsoleMessage(
|
||||
ConsoleDataType.Error,
|
||||
`Failed to fetch saved queries: ${stringifiedError}`
|
||||
);
|
||||
Logger.logError(stringifiedError, "getSavedQueries");
|
||||
return Promise.reject(stringifiedError);
|
||||
}
|
||||
);
|
||||
},
|
||||
(error: any) => {
|
||||
// should never get into this state but we handle this regardless
|
||||
handleError(error, "getSavedQueries", "Failed to fetch saved queries");
|
||||
return Promise.reject(error);
|
||||
const stringifiedError: string = JSON.stringify(error);
|
||||
NotificationConsoleUtils.logConsoleMessage(
|
||||
ConsoleDataType.Error,
|
||||
`Failed to fetch saved queries: ${stringifiedError}`
|
||||
);
|
||||
Logger.logError(stringifiedError, "getSavedQueries");
|
||||
return Promise.reject(stringifiedError);
|
||||
}
|
||||
)
|
||||
.finally(() => NotificationConsoleUtils.clearInProgressMessageWithId(id));
|
||||
@@ -208,8 +232,13 @@ export class QueriesClient {
|
||||
return Promise.resolve();
|
||||
},
|
||||
(error: any) => {
|
||||
handleError(error, "deleteQuery", `Failed to delete query ${query.queryName}`);
|
||||
return Promise.reject(error);
|
||||
const stringifiedError: string = JSON.stringify(error);
|
||||
NotificationConsoleUtils.logConsoleMessage(
|
||||
ConsoleDataType.Error,
|
||||
`Failed to delete query ${query.queryName}: ${stringifiedError}`
|
||||
);
|
||||
Logger.logError(stringifiedError, "deleteQuery");
|
||||
return Promise.reject(stringifiedError);
|
||||
}
|
||||
)
|
||||
.finally(() => NotificationConsoleUtils.clearInProgressMessageWithId(id));
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import * as DataModels from "../../Contracts/DataModels";
|
||||
import * as ErrorParserUtility from "../ErrorParserUtility";
|
||||
import { AuthType } from "../../AuthType";
|
||||
import { ContainerResponse, DatabaseResponse } from "@azure/cosmos";
|
||||
import { ContainerRequest } from "@azure/cosmos/dist-esm/client/Container/ContainerRequest";
|
||||
@@ -22,19 +23,21 @@ import {
|
||||
getGremlinGraph
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
|
||||
import { createUpdateTable, getTable } from "../../Utils/arm/generatedClients/2020-04-01/tableResources";
|
||||
import { logConsoleProgress, logConsoleInfo } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logConsoleProgress, logConsoleError, logConsoleInfo } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logError } from "../Logger";
|
||||
import { refreshCachedResources } from "../DataAccessUtilityBase";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import { userContext } from "../../UserContext";
|
||||
import { createDatabase } from "./createDatabase";
|
||||
import * as TelemetryProcessor from "../../Shared/Telemetry/TelemetryProcessor";
|
||||
import { Action, ActionModifiers } from "../../Shared/Telemetry/TelemetryConstants";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
|
||||
export const createCollection = async (params: DataModels.CreateCollectionParams): Promise<DataModels.Collection> => {
|
||||
let collection: DataModels.Collection;
|
||||
const clearMessage = logConsoleProgress(
|
||||
`Creating a new container ${params.collectionId} for database ${params.databaseId}`
|
||||
);
|
||||
try {
|
||||
let collection: DataModels.Collection;
|
||||
if (window.authType === AuthType.AAD && !userContext.useSDKOperations) {
|
||||
if (params.createNewDatabase) {
|
||||
const createDatabaseParams: DataModels.CreateDatabaseParams = {
|
||||
@@ -51,15 +54,18 @@ export const createCollection = async (params: DataModels.CreateCollectionParams
|
||||
} else {
|
||||
collection = await createCollectionWithSDK(params);
|
||||
}
|
||||
|
||||
logConsoleInfo(`Successfully created container ${params.collectionId}`);
|
||||
return collection;
|
||||
} catch (error) {
|
||||
handleError(error, "CreateCollection", `Error while creating container ${params.collectionId}`);
|
||||
throw error;
|
||||
} finally {
|
||||
const sanitizedError = ErrorParserUtility.replaceKnownError(JSON.stringify(error));
|
||||
logConsoleError(`Error while creating container ${params.collectionId}:\n ${sanitizedError}`);
|
||||
logError(JSON.stringify(error), "CreateCollection", error.code);
|
||||
sendNotificationForError(error);
|
||||
clearMessage();
|
||||
throw error;
|
||||
}
|
||||
logConsoleInfo(`Successfully created container ${params.collectionId}`);
|
||||
await refreshCachedResources();
|
||||
clearMessage();
|
||||
return collection;
|
||||
};
|
||||
|
||||
const createCollectionWithARM = async (params: DataModels.CreateCollectionParams): Promise<DataModels.Collection> => {
|
||||
|
||||
@@ -24,28 +24,36 @@ import {
|
||||
createUpdateGremlinDatabase,
|
||||
getGremlinDatabase
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleProgress, logConsoleInfo } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logConsoleProgress, logConsoleError, logConsoleInfo } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logError } from "../Logger";
|
||||
import { refreshCachedOffers, refreshCachedResources } from "../DataAccessUtilityBase";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import { userContext } from "../../UserContext";
|
||||
|
||||
export async function createDatabase(params: DataModels.CreateDatabaseParams): Promise<DataModels.Database> {
|
||||
let database: DataModels.Database;
|
||||
const clearMessage = logConsoleProgress(`Creating a new database ${params.databaseId}`);
|
||||
try {
|
||||
if (userContext.defaultExperience === DefaultAccountExperienceType.Table) {
|
||||
throw new Error("Creating database resources is not allowed for tables accounts");
|
||||
}
|
||||
const database: DataModels.Database = await (window.authType === AuthType.AAD && !userContext.useSDKOperations
|
||||
? createDatabaseWithARM(params)
|
||||
: createDatabaseWithSDK(params));
|
||||
|
||||
logConsoleInfo(`Successfully created database ${params.databaseId}`);
|
||||
return database;
|
||||
if (window.authType === AuthType.AAD && !userContext.useSDKOperations) {
|
||||
database = await createDatabaseWithARM(params);
|
||||
} else {
|
||||
database = await createDatabaseWithSDK(params);
|
||||
}
|
||||
} catch (error) {
|
||||
handleError(error, "CreateDatabase", `Error while creating database ${params.databaseId}`);
|
||||
throw error;
|
||||
} finally {
|
||||
logConsoleError(`Error while creating database ${params.databaseId}:\n ${error.message}`);
|
||||
logError(JSON.stringify(error), "CreateDatabase", error.code);
|
||||
sendNotificationForError(error);
|
||||
clearMessage();
|
||||
throw error;
|
||||
}
|
||||
logConsoleInfo(`Successfully created database ${params.databaseId}`);
|
||||
await refreshCachedResources();
|
||||
await refreshCachedOffers();
|
||||
clearMessage();
|
||||
return database;
|
||||
}
|
||||
|
||||
async function createDatabaseWithARM(params: DataModels.CreateDatabaseParams): Promise<DataModels.Database> {
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import { AuthType } from "../../AuthType";
|
||||
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
||||
import { Resource, StoredProcedureDefinition } from "@azure/cosmos";
|
||||
import {
|
||||
SqlStoredProcedureCreateUpdateParameters,
|
||||
@@ -10,8 +9,9 @@ import {
|
||||
createUpdateSqlStoredProcedure,
|
||||
getSqlStoredProcedure
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logConsoleError, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logError } from "../Logger";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import { userContext } from "../../UserContext";
|
||||
|
||||
export async function createStoredProcedure(
|
||||
@@ -21,11 +21,7 @@ export async function createStoredProcedure(
|
||||
): Promise<StoredProcedureDefinition & Resource> {
|
||||
const clearMessage = logConsoleProgress(`Creating stored procedure ${storedProcedure.id}`);
|
||||
try {
|
||||
if (
|
||||
window.authType === AuthType.AAD &&
|
||||
!userContext.useSDKOperations &&
|
||||
userContext.defaultExperience === DefaultAccountExperienceType.DocumentDB
|
||||
) {
|
||||
if (window.authType === AuthType.AAD && !userContext.useSDKOperations) {
|
||||
try {
|
||||
const getResponse = await getSqlStoredProcedure(
|
||||
userContext.subscriptionId,
|
||||
@@ -70,7 +66,9 @@ export async function createStoredProcedure(
|
||||
.scripts.storedProcedures.create(storedProcedure);
|
||||
return response?.resource;
|
||||
} catch (error) {
|
||||
handleError(error, "CreateStoredProcedure", `Error while creating stored procedure ${storedProcedure.id}`);
|
||||
logConsoleError(`Error while creating stored procedure ${storedProcedure.id}:\n ${JSON.stringify(error)}`);
|
||||
logError(JSON.stringify(error), "CreateStoredProcedure", error.code);
|
||||
sendNotificationForError(error);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import { AuthType } from "../../AuthType";
|
||||
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
||||
import { Resource, TriggerDefinition } from "@azure/cosmos";
|
||||
import {
|
||||
SqlTriggerCreateUpdateParameters,
|
||||
@@ -7,8 +6,9 @@ import {
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/types";
|
||||
import { client } from "../CosmosClient";
|
||||
import { createUpdateSqlTrigger, getSqlTrigger } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logConsoleError, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logError } from "../Logger";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import { userContext } from "../../UserContext";
|
||||
|
||||
export async function createTrigger(
|
||||
@@ -18,11 +18,7 @@ export async function createTrigger(
|
||||
): Promise<TriggerDefinition & Resource> {
|
||||
const clearMessage = logConsoleProgress(`Creating trigger ${trigger.id}`);
|
||||
try {
|
||||
if (
|
||||
window.authType === AuthType.AAD &&
|
||||
!userContext.useSDKOperations &&
|
||||
userContext.defaultExperience === DefaultAccountExperienceType.DocumentDB
|
||||
) {
|
||||
if (window.authType === AuthType.AAD && !userContext.useSDKOperations) {
|
||||
try {
|
||||
const getResponse = await getSqlTrigger(
|
||||
userContext.subscriptionId,
|
||||
@@ -33,7 +29,7 @@ export async function createTrigger(
|
||||
trigger.id
|
||||
);
|
||||
if (getResponse?.properties?.resource) {
|
||||
throw new Error(`Create trigger failed: ${trigger.id} already exists`);
|
||||
throw new Error(`Create trigger failed: trigger with id ${trigger.id} already exists`);
|
||||
}
|
||||
} catch (error) {
|
||||
if (error.code !== "NotFound") {
|
||||
@@ -65,7 +61,9 @@ export async function createTrigger(
|
||||
.scripts.triggers.create(trigger);
|
||||
return response.resource;
|
||||
} catch (error) {
|
||||
handleError(error, "CreateTrigger", `Error while creating trigger ${trigger.id}`);
|
||||
logConsoleError(`Error while creating trigger ${trigger.id}:\n ${JSON.stringify(error)}`);
|
||||
logError(JSON.stringify(error), "CreateTrigger", error.code);
|
||||
sendNotificationForError(error);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import { AuthType } from "../../AuthType";
|
||||
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
||||
import { Resource, UserDefinedFunctionDefinition } from "@azure/cosmos";
|
||||
import {
|
||||
SqlUserDefinedFunctionCreateUpdateParameters,
|
||||
@@ -10,8 +9,9 @@ import {
|
||||
createUpdateSqlUserDefinedFunction,
|
||||
getSqlUserDefinedFunction
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logConsoleError, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logError } from "../Logger";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import { userContext } from "../../UserContext";
|
||||
|
||||
export async function createUserDefinedFunction(
|
||||
@@ -21,11 +21,7 @@ export async function createUserDefinedFunction(
|
||||
): Promise<UserDefinedFunctionDefinition & Resource> {
|
||||
const clearMessage = logConsoleProgress(`Creating user defined function ${userDefinedFunction.id}`);
|
||||
try {
|
||||
if (
|
||||
window.authType === AuthType.AAD &&
|
||||
!userContext.useSDKOperations &&
|
||||
userContext.defaultExperience === DefaultAccountExperienceType.DocumentDB
|
||||
) {
|
||||
if (window.authType === AuthType.AAD && !userContext.useSDKOperations) {
|
||||
try {
|
||||
const getResponse = await getSqlUserDefinedFunction(
|
||||
userContext.subscriptionId,
|
||||
@@ -70,11 +66,9 @@ export async function createUserDefinedFunction(
|
||||
.scripts.userDefinedFunctions.create(userDefinedFunction);
|
||||
return response?.resource;
|
||||
} catch (error) {
|
||||
handleError(
|
||||
error,
|
||||
"CreateUserupdateUserDefinedFunction",
|
||||
`Error while creating user defined function ${userDefinedFunction.id}`
|
||||
);
|
||||
logConsoleError(`Error while creating user defined function ${userDefinedFunction.id}:\n ${JSON.stringify(error)}`);
|
||||
logError(JSON.stringify(error), "CreateUserupdateUserDefinedFunction", error.code);
|
||||
sendNotificationForError(error);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
|
||||
@@ -7,6 +7,7 @@ import { AuthType } from "../../AuthType";
|
||||
import { client } from "../CosmosClient";
|
||||
import { updateUserContext } from "../../UserContext";
|
||||
import { DatabaseAccount } from "../../Contracts/DataModels";
|
||||
import { sendCachedDataMessage } from "../MessageHandler";
|
||||
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
||||
|
||||
describe("deleteCollection", () => {
|
||||
@@ -17,6 +18,7 @@ describe("deleteCollection", () => {
|
||||
} as DatabaseAccount,
|
||||
defaultExperience: DefaultAccountExperienceType.DocumentDB
|
||||
});
|
||||
(sendCachedDataMessage as jest.Mock).mockResolvedValue(undefined);
|
||||
});
|
||||
|
||||
it("should call ARM if logged in with AAD", async () => {
|
||||
|
||||
@@ -5,10 +5,12 @@ import { deleteCassandraTable } from "../../Utils/arm/generatedClients/2020-04-0
|
||||
import { deleteMongoDBCollection } from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
|
||||
import { deleteGremlinGraph } from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
|
||||
import { deleteTable } from "../../Utils/arm/generatedClients/2020-04-01/tableResources";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleInfo, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logConsoleError, logConsoleInfo, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logError } from "../Logger";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import { userContext } from "../../UserContext";
|
||||
import { client } from "../CosmosClient";
|
||||
import { refreshCachedResources } from "../DataAccessUtilityBase";
|
||||
|
||||
export async function deleteCollection(databaseId: string, collectionId: string): Promise<void> {
|
||||
const clearMessage = logConsoleProgress(`Deleting container ${collectionId}`);
|
||||
@@ -21,13 +23,15 @@ export async function deleteCollection(databaseId: string, collectionId: string)
|
||||
.container(collectionId)
|
||||
.delete();
|
||||
}
|
||||
logConsoleInfo(`Successfully deleted container ${collectionId}`);
|
||||
} catch (error) {
|
||||
handleError(error, "DeleteCollection", `Error while deleting container ${collectionId}`);
|
||||
logConsoleError(`Error while deleting container ${collectionId}:\n ${JSON.stringify(error)}`);
|
||||
logError(JSON.stringify(error), "DeleteCollection", error.code);
|
||||
sendNotificationForError(error);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
}
|
||||
logConsoleInfo(`Successfully deleted container ${collectionId}`);
|
||||
clearMessage();
|
||||
await refreshCachedResources();
|
||||
}
|
||||
|
||||
function deleteCollectionWithARM(databaseId: string, collectionId: string): Promise<void> {
|
||||
|
||||
@@ -7,6 +7,7 @@ import { AuthType } from "../../AuthType";
|
||||
import { client } from "../CosmosClient";
|
||||
import { updateUserContext } from "../../UserContext";
|
||||
import { DatabaseAccount } from "../../Contracts/DataModels";
|
||||
import { sendCachedDataMessage } from "../MessageHandler";
|
||||
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
||||
|
||||
describe("deleteDatabase", () => {
|
||||
@@ -17,6 +18,7 @@ describe("deleteDatabase", () => {
|
||||
} as DatabaseAccount,
|
||||
defaultExperience: DefaultAccountExperienceType.DocumentDB
|
||||
});
|
||||
(sendCachedDataMessage as jest.Mock).mockResolvedValue(undefined);
|
||||
});
|
||||
|
||||
it("should call ARM if logged in with AAD", async () => {
|
||||
|
||||
@@ -4,10 +4,12 @@ import { deleteSqlDatabase } from "../../Utils/arm/generatedClients/2020-04-01/s
|
||||
import { deleteCassandraKeyspace } from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources";
|
||||
import { deleteMongoDBDatabase } from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
|
||||
import { deleteGremlinDatabase } from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleInfo, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logConsoleError, logConsoleInfo, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { userContext } from "../../UserContext";
|
||||
import { client } from "../CosmosClient";
|
||||
import { refreshCachedResources } from "../DataAccessUtilityBase";
|
||||
import { logError } from "../Logger";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
|
||||
export async function deleteDatabase(databaseId: string): Promise<void> {
|
||||
const clearMessage = logConsoleProgress(`Deleting database ${databaseId}`);
|
||||
@@ -23,13 +25,15 @@ export async function deleteDatabase(databaseId: string): Promise<void> {
|
||||
.database(databaseId)
|
||||
.delete();
|
||||
}
|
||||
logConsoleInfo(`Successfully deleted database ${databaseId}`);
|
||||
} catch (error) {
|
||||
handleError(error, "DeleteDatabase", `Error while deleting database ${databaseId}`);
|
||||
logConsoleError(`Error while deleting database ${databaseId}:\n ${JSON.stringify(error)}`);
|
||||
logError(JSON.stringify(error), "DeleteDatabase", error.code);
|
||||
sendNotificationForError(error);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
}
|
||||
logConsoleInfo(`Successfully deleted database ${databaseId}`);
|
||||
clearMessage();
|
||||
await refreshCachedResources();
|
||||
}
|
||||
|
||||
function deleteDatabaseWithARM(databaseId: string): Promise<void> {
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
import { AuthType } from "../../AuthType";
|
||||
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
||||
import { client } from "../CosmosClient";
|
||||
import { deleteSqlStoredProcedure } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logConsoleError, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logError } from "../Logger";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import { userContext } from "../../UserContext";
|
||||
|
||||
export async function deleteStoredProcedure(
|
||||
@@ -13,11 +13,7 @@ export async function deleteStoredProcedure(
|
||||
): Promise<void> {
|
||||
const clearMessage = logConsoleProgress(`Deleting stored procedure ${storedProcedureId}`);
|
||||
try {
|
||||
if (
|
||||
window.authType === AuthType.AAD &&
|
||||
!userContext.useSDKOperations &&
|
||||
userContext.defaultExperience === DefaultAccountExperienceType.DocumentDB
|
||||
) {
|
||||
if (window.authType === AuthType.AAD && !userContext.useSDKOperations) {
|
||||
await deleteSqlStoredProcedure(
|
||||
userContext.subscriptionId,
|
||||
userContext.resourceGroup,
|
||||
@@ -34,7 +30,9 @@ export async function deleteStoredProcedure(
|
||||
.delete();
|
||||
}
|
||||
} catch (error) {
|
||||
handleError(error, "DeleteStoredProcedure", `Error while deleting stored procedure ${storedProcedureId}`);
|
||||
logConsoleError(`Error while deleting stored procedure ${storedProcedureId}:\n ${JSON.stringify(error)}`);
|
||||
logError(JSON.stringify(error), "DeleteStoredProcedure", error.code);
|
||||
sendNotificationForError(error);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
|
||||
@@ -1,19 +1,15 @@
|
||||
import { AuthType } from "../../AuthType";
|
||||
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
||||
import { client } from "../CosmosClient";
|
||||
import { deleteSqlTrigger } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logConsoleError, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logError } from "../Logger";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import { userContext } from "../../UserContext";
|
||||
|
||||
export async function deleteTrigger(databaseId: string, collectionId: string, triggerId: string): Promise<void> {
|
||||
const clearMessage = logConsoleProgress(`Deleting trigger ${triggerId}`);
|
||||
try {
|
||||
if (
|
||||
window.authType === AuthType.AAD &&
|
||||
!userContext.useSDKOperations &&
|
||||
userContext.defaultExperience === DefaultAccountExperienceType.DocumentDB
|
||||
) {
|
||||
if (window.authType === AuthType.AAD && !userContext.useSDKOperations) {
|
||||
await deleteSqlTrigger(
|
||||
userContext.subscriptionId,
|
||||
userContext.resourceGroup,
|
||||
@@ -30,7 +26,9 @@ export async function deleteTrigger(databaseId: string, collectionId: string, tr
|
||||
.delete();
|
||||
}
|
||||
} catch (error) {
|
||||
handleError(error, "DeleteTrigger", `Error while deleting trigger ${triggerId}`);
|
||||
logConsoleError(`Error while deleting trigger ${triggerId}:\n ${JSON.stringify(error)}`);
|
||||
logError(JSON.stringify(error), "DeleteTrigger", error.code);
|
||||
sendNotificationForError(error);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
|
||||
@@ -1,19 +1,15 @@
|
||||
import { AuthType } from "../../AuthType";
|
||||
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
||||
import { client } from "../CosmosClient";
|
||||
import { deleteSqlUserDefinedFunction } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logConsoleError, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logError } from "../Logger";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import { userContext } from "../../UserContext";
|
||||
|
||||
export async function deleteUserDefinedFunction(databaseId: string, collectionId: string, id: string): Promise<void> {
|
||||
const clearMessage = logConsoleProgress(`Deleting user defined function ${id}`);
|
||||
try {
|
||||
if (
|
||||
window.authType === AuthType.AAD &&
|
||||
!userContext.useSDKOperations &&
|
||||
userContext.defaultExperience === DefaultAccountExperienceType.DocumentDB
|
||||
) {
|
||||
if (window.authType === AuthType.AAD && !userContext.useSDKOperations) {
|
||||
await deleteSqlUserDefinedFunction(
|
||||
userContext.subscriptionId,
|
||||
userContext.resourceGroup,
|
||||
@@ -30,7 +26,9 @@ export async function deleteUserDefinedFunction(databaseId: string, collectionId
|
||||
.delete();
|
||||
}
|
||||
} catch (error) {
|
||||
handleError(error, "DeleteUserDefinedFunction", `Error while deleting user defined function ${id}`);
|
||||
logConsoleError(`Error while deleting user defined function ${id}:\n ${JSON.stringify(error)}`);
|
||||
logError(JSON.stringify(error), "DeleteUserDefinedFunction", error.code);
|
||||
sendNotificationForError(error);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
|
||||
@@ -1,28 +0,0 @@
|
||||
import { client } from "../CosmosClient";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import * as Constants from "../Constants";
|
||||
import { AuthType } from "../../AuthType";
|
||||
|
||||
export async function getIndexTransformationProgress(databaseId: string, collectionId: string): Promise<number> {
|
||||
if (window.authType !== AuthType.AAD) {
|
||||
return undefined;
|
||||
}
|
||||
let indexTransformationPercentage: number;
|
||||
const clearMessage = logConsoleProgress(`Reading container ${collectionId}`);
|
||||
try {
|
||||
const response = await client()
|
||||
.database(databaseId)
|
||||
.container(collectionId)
|
||||
.read({ populateQuotaInfo: true });
|
||||
|
||||
indexTransformationPercentage = parseInt(
|
||||
response.headers[Constants.HttpHeaders.collectionIndexTransformationProgress] as string
|
||||
);
|
||||
} catch (error) {
|
||||
handleError(error, "ReadMongoDBCollection", `Error while reading container ${collectionId}`);
|
||||
throw error;
|
||||
}
|
||||
clearMessage();
|
||||
return indexTransformationPercentage;
|
||||
}
|
||||
@@ -1,7 +1,8 @@
|
||||
import * as DataModels from "../../Contracts/DataModels";
|
||||
import { client } from "../CosmosClient";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logConsoleProgress, logConsoleError } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logError } from "../Logger";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
|
||||
export async function readCollection(databaseId: string, collectionId: string): Promise<DataModels.Collection> {
|
||||
let collection: DataModels.Collection;
|
||||
@@ -13,7 +14,9 @@ export async function readCollection(databaseId: string, collectionId: string):
|
||||
.read();
|
||||
collection = response.resource as DataModels.Collection;
|
||||
} catch (error) {
|
||||
handleError(error, "ReadCollection", `Error while querying container ${collectionId}`);
|
||||
logConsoleError(`Error while querying container ${collectionId}:\n ${JSON.stringify(error)}`);
|
||||
logError(JSON.stringify(error), "ReadCollection", error.code);
|
||||
sendNotificationForError(error);
|
||||
throw error;
|
||||
}
|
||||
clearMessage();
|
||||
|
||||
@@ -4,14 +4,15 @@ import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType
|
||||
import { HttpHeaders } from "../Constants";
|
||||
import { RequestOptions } from "@azure/cosmos/dist-esm";
|
||||
import { client } from "../CosmosClient";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { getSqlContainerThroughput } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||
import { getMongoDBCollectionThroughput } from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
|
||||
import { getCassandraTableThroughput } from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources";
|
||||
import { getGremlinGraphThroughput } from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
|
||||
import { getTableThroughput } from "../../Utils/arm/generatedClients/2020-04-01/tableResources";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logConsoleProgress, logConsoleError } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logError } from "../Logger";
|
||||
import { readOffers } from "./readOffers";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import { userContext } from "../../UserContext";
|
||||
|
||||
export const readCollectionOffer = async (
|
||||
@@ -56,7 +57,9 @@ export const readCollectionOffer = async (
|
||||
}
|
||||
);
|
||||
} catch (error) {
|
||||
handleError(error, "ReadCollectionOffer", `Error while querying offer for collection ${params.collectionId}`);
|
||||
logConsoleError(`Error while querying offer for collection ${params.collectionId}:\n ${JSON.stringify(error)}`);
|
||||
logError(JSON.stringify(error), "ReadCollectionOffer", error.code);
|
||||
sendNotificationForError(error);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
|
||||
@@ -5,8 +5,9 @@ import { ContainerDefinition, Resource } from "@azure/cosmos";
|
||||
import { HttpHeaders } from "../Constants";
|
||||
import { RequestOptions } from "@azure/cosmos/dist-esm";
|
||||
import { client } from "../CosmosClient";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logConsoleProgress, logConsoleError } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logError } from "../Logger";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
|
||||
interface ResourceWithStatistics {
|
||||
statistics: DataModels.Statistic[];
|
||||
@@ -37,7 +38,9 @@ export const readCollectionQuotaInfo = async (
|
||||
|
||||
return quota;
|
||||
} catch (error) {
|
||||
handleError(error, "ReadCollectionQuotaInfo", `Error while querying quota info for container ${collection.id}`);
|
||||
logConsoleError(`Error while querying quota info for container ${collection.id}:\n ${JSON.stringify(error)}`);
|
||||
logError(JSON.stringify(error), "ReadCollectionQuotaInfo", error.code);
|
||||
sendNotificationForError(error);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
|
||||
@@ -2,16 +2,18 @@ import * as DataModels from "../../Contracts/DataModels";
|
||||
import { AuthType } from "../../AuthType";
|
||||
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
||||
import { client } from "../CosmosClient";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { listSqlContainers } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||
import { listCassandraTables } from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources";
|
||||
import { listMongoDBCollections } from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
|
||||
import { listGremlinGraphs } from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
|
||||
import { listTables } from "../../Utils/arm/generatedClients/2020-04-01/tableResources";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logConsoleProgress, logConsoleError } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logError } from "../Logger";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import { userContext } from "../../UserContext";
|
||||
|
||||
export async function readCollections(databaseId: string): Promise<DataModels.Collection[]> {
|
||||
let collections: DataModels.Collection[];
|
||||
const clearMessage = logConsoleProgress(`Querying containers for database ${databaseId}`);
|
||||
try {
|
||||
if (
|
||||
@@ -20,20 +22,22 @@ export async function readCollections(databaseId: string): Promise<DataModels.Co
|
||||
userContext.defaultExperience !== DefaultAccountExperienceType.MongoDB &&
|
||||
userContext.defaultExperience !== DefaultAccountExperienceType.Table
|
||||
) {
|
||||
return await readCollectionsWithARM(databaseId);
|
||||
collections = await readCollectionsWithARM(databaseId);
|
||||
} else {
|
||||
const sdkResponse = await client()
|
||||
.database(databaseId)
|
||||
.containers.readAll()
|
||||
.fetchAll();
|
||||
collections = sdkResponse.resources as DataModels.Collection[];
|
||||
}
|
||||
|
||||
const sdkResponse = await client()
|
||||
.database(databaseId)
|
||||
.containers.readAll()
|
||||
.fetchAll();
|
||||
return sdkResponse.resources as DataModels.Collection[];
|
||||
} catch (error) {
|
||||
handleError(error, "ReadCollections", `Error while querying containers for database ${databaseId}`);
|
||||
logConsoleError(`Error while querying containers for database ${databaseId}:\n ${JSON.stringify(error)}`);
|
||||
logError(JSON.stringify(error), "ReadCollections", error.code);
|
||||
sendNotificationForError(error);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
}
|
||||
clearMessage();
|
||||
return collections;
|
||||
}
|
||||
|
||||
async function readCollectionsWithARM(databaseId: string): Promise<DataModels.Collection[]> {
|
||||
|
||||
@@ -8,9 +8,10 @@ import { getSqlDatabaseThroughput } from "../../Utils/arm/generatedClients/2020-
|
||||
import { getMongoDBDatabaseThroughput } from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
|
||||
import { getCassandraKeyspaceThroughput } from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources";
|
||||
import { getGremlinDatabaseThroughput } from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logConsoleProgress, logConsoleError } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logError } from "../Logger";
|
||||
import { readOffers } from "./readOffers";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import { userContext } from "../../UserContext";
|
||||
|
||||
export const readDatabaseOffer = async (
|
||||
@@ -47,7 +48,9 @@ export const readDatabaseOffer = async (
|
||||
}
|
||||
);
|
||||
} catch (error) {
|
||||
handleError(error, "ReadDatabaseOffer", `Error while querying offer for database ${params.databaseId}`);
|
||||
logConsoleError(`Error while querying offer for database ${params.databaseId}:\n ${JSON.stringify(error)}`);
|
||||
logError(JSON.stringify(error), "ReadDatabaseOffer", error.code);
|
||||
sendNotificationForError(error);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
|
||||
@@ -2,12 +2,13 @@ import * as DataModels from "../../Contracts/DataModels";
|
||||
import { AuthType } from "../../AuthType";
|
||||
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
||||
import { client } from "../CosmosClient";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { listSqlDatabases } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||
import { listCassandraKeyspaces } from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources";
|
||||
import { listMongoDBDatabases } from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
|
||||
import { listGremlinDatabases } from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logConsoleProgress, logConsoleError } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logError } from "../Logger";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import { userContext } from "../../UserContext";
|
||||
|
||||
export async function readDatabases(): Promise<DataModels.Database[]> {
|
||||
@@ -27,7 +28,9 @@ export async function readDatabases(): Promise<DataModels.Database[]> {
|
||||
databases = sdkResponse.resources as DataModels.Database[];
|
||||
}
|
||||
} catch (error) {
|
||||
handleError(error, "ReadDatabases", `Error while querying databases`);
|
||||
logConsoleError(`Error while querying databases:\n ${JSON.stringify(error)}`);
|
||||
logError(JSON.stringify(error), "ReadDatabases", error.code);
|
||||
sendNotificationForError(error);
|
||||
throw error;
|
||||
}
|
||||
clearMessage();
|
||||
|
||||
@@ -1,30 +0,0 @@
|
||||
import { userContext } from "../../UserContext";
|
||||
import { getMongoDBCollection } from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
|
||||
import { MongoDBCollectionResource } from "../../Utils/arm/generatedClients/2020-04-01/types";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { AuthType } from "../../AuthType";
|
||||
|
||||
export async function readMongoDBCollectionThroughRP(
|
||||
databaseId: string,
|
||||
collectionId: string
|
||||
): Promise<MongoDBCollectionResource> {
|
||||
if (window.authType !== AuthType.AAD) {
|
||||
return undefined;
|
||||
}
|
||||
let collection: MongoDBCollectionResource;
|
||||
const subscriptionId = userContext.subscriptionId;
|
||||
const resourceGroup = userContext.resourceGroup;
|
||||
const accountName = userContext.databaseAccount.name;
|
||||
|
||||
const clearMessage = logConsoleProgress(`Reading container ${collectionId}`);
|
||||
try {
|
||||
const response = await getMongoDBCollection(subscriptionId, resourceGroup, accountName, databaseId, collectionId);
|
||||
collection = response.properties.resource;
|
||||
} catch (error) {
|
||||
handleError(error, "ReadMongoDBCollection", `Error while reading container ${collectionId}`);
|
||||
throw error;
|
||||
}
|
||||
clearMessage();
|
||||
return collection;
|
||||
}
|
||||
@@ -1,10 +1,29 @@
|
||||
import { Offer } from "../../Contracts/DataModels";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { ClientDefaults } from "../Constants";
|
||||
import { MessageTypes } from "../../Contracts/ExplorerContracts";
|
||||
import { Platform, configContext } from "../../ConfigContext";
|
||||
import { client } from "../CosmosClient";
|
||||
import { handleError, getErrorMessage } from "../ErrorHandlingUtils";
|
||||
import { logConsoleProgress, logConsoleError } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logError } from "../Logger";
|
||||
import { sendCachedDataMessage } from "../MessageHandler";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import { userContext } from "../../UserContext";
|
||||
|
||||
export const readOffers = async (): Promise<Offer[]> => {
|
||||
const clearMessage = logConsoleProgress(`Querying offers`);
|
||||
try {
|
||||
if (configContext.platform === Platform.Portal) {
|
||||
const offers = sendCachedDataMessage<Offer[]>(MessageTypes.AllOffers, [
|
||||
userContext.databaseAccount.id,
|
||||
ClientDefaults.portalCacheTimeoutMs
|
||||
]);
|
||||
clearMessage();
|
||||
|
||||
return offers;
|
||||
}
|
||||
} catch (error) {
|
||||
// If error getting cached Offers, continue on and read via SDK
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await client()
|
||||
@@ -13,11 +32,13 @@ export const readOffers = async (): Promise<Offer[]> => {
|
||||
return response?.resources;
|
||||
} catch (error) {
|
||||
// This should be removed when we can correctly identify if an account is serverless when connected using connection string too.
|
||||
if (getErrorMessage(error)?.includes("Reading or replacing offers is not supported for serverless accounts")) {
|
||||
if (error.message.includes("Reading or replacing offers is not supported for serverless accounts")) {
|
||||
return [];
|
||||
}
|
||||
|
||||
handleError(error, "ReadOffers", `Error while querying offers`);
|
||||
logConsoleError(`Error while querying offers:\n ${JSON.stringify(error)}`);
|
||||
logError(JSON.stringify(error), "ReadOffers", error.code);
|
||||
sendNotificationForError(error);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import { AuthType } from "../../AuthType";
|
||||
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
||||
import { Resource, StoredProcedureDefinition } from "@azure/cosmos";
|
||||
import { client } from "../CosmosClient";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { listSqlStoredProcedures } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logConsoleError, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logError } from "../Logger";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import { userContext } from "../../UserContext";
|
||||
|
||||
export async function readStoredProcedures(
|
||||
@@ -13,11 +13,7 @@ export async function readStoredProcedures(
|
||||
): Promise<(StoredProcedureDefinition & Resource)[]> {
|
||||
const clearMessage = logConsoleProgress(`Querying stored procedures for container ${collectionId}`);
|
||||
try {
|
||||
if (
|
||||
window.authType === AuthType.AAD &&
|
||||
!userContext.useSDKOperations &&
|
||||
userContext.defaultExperience === DefaultAccountExperienceType.DocumentDB
|
||||
) {
|
||||
if (window.authType === AuthType.AAD && !userContext.useSDKOperations) {
|
||||
const rpResponse = await listSqlStoredProcedures(
|
||||
userContext.subscriptionId,
|
||||
userContext.resourceGroup,
|
||||
@@ -35,7 +31,9 @@ export async function readStoredProcedures(
|
||||
.fetchAll();
|
||||
return response?.resources;
|
||||
} catch (error) {
|
||||
handleError(error, "ReadStoredProcedures", `Failed to query stored procedures for container ${collectionId}`);
|
||||
logConsoleError(`Failed to query stored procedures for container ${collectionId}: ${JSON.stringify(error)}`);
|
||||
logError(JSON.stringify(error), "ReadStoredProcedures", error.code);
|
||||
sendNotificationForError(error);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
import { AuthType } from "../../AuthType";
|
||||
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
||||
import { Resource, TriggerDefinition } from "@azure/cosmos";
|
||||
import { client } from "../CosmosClient";
|
||||
import { listSqlTriggers } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logConsoleError, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logError } from "../Logger";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import { userContext } from "../../UserContext";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
|
||||
export async function readTriggers(
|
||||
databaseId: string,
|
||||
@@ -13,11 +13,7 @@ export async function readTriggers(
|
||||
): Promise<(TriggerDefinition & Resource)[]> {
|
||||
const clearMessage = logConsoleProgress(`Querying triggers for container ${collectionId}`);
|
||||
try {
|
||||
if (
|
||||
window.authType === AuthType.AAD &&
|
||||
!userContext.useSDKOperations &&
|
||||
userContext.defaultExperience === DefaultAccountExperienceType.DocumentDB
|
||||
) {
|
||||
if (window.authType === AuthType.AAD && !userContext.useSDKOperations) {
|
||||
const rpResponse = await listSqlTriggers(
|
||||
userContext.subscriptionId,
|
||||
userContext.resourceGroup,
|
||||
@@ -35,7 +31,9 @@ export async function readTriggers(
|
||||
.fetchAll();
|
||||
return response?.resources;
|
||||
} catch (error) {
|
||||
handleError(error, "ReadTriggers", `Failed to query triggers for container ${collectionId}`);
|
||||
logConsoleError(`Failed to query triggers for container ${collectionId}: ${JSON.stringify(error)}`);
|
||||
logError(JSON.stringify(error), "ReadTriggers", error.code);
|
||||
sendNotificationForError(error);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import { AuthType } from "../../AuthType";
|
||||
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
||||
import { Resource, UserDefinedFunctionDefinition } from "@azure/cosmos";
|
||||
import { client } from "../CosmosClient";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { listSqlUserDefinedFunctions } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logConsoleError, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logError } from "../Logger";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import { userContext } from "../../UserContext";
|
||||
|
||||
export async function readUserDefinedFunctions(
|
||||
@@ -13,11 +13,7 @@ export async function readUserDefinedFunctions(
|
||||
): Promise<(UserDefinedFunctionDefinition & Resource)[]> {
|
||||
const clearMessage = logConsoleProgress(`Querying user defined functions for container ${collectionId}`);
|
||||
try {
|
||||
if (
|
||||
window.authType === AuthType.AAD &&
|
||||
!userContext.useSDKOperations &&
|
||||
userContext.defaultExperience === DefaultAccountExperienceType.DocumentDB
|
||||
) {
|
||||
if (window.authType === AuthType.AAD && !userContext.useSDKOperations) {
|
||||
const rpResponse = await listSqlUserDefinedFunctions(
|
||||
userContext.subscriptionId,
|
||||
userContext.resourceGroup,
|
||||
@@ -35,11 +31,9 @@ export async function readUserDefinedFunctions(
|
||||
.fetchAll();
|
||||
return response?.resources;
|
||||
} catch (error) {
|
||||
handleError(
|
||||
error,
|
||||
"ReadUserDefinedFunctions",
|
||||
`Failed to query user defined functions for container ${collectionId}`
|
||||
);
|
||||
logConsoleError(`Failed to query user defined functions for container ${collectionId}: ${JSON.stringify(error)}`);
|
||||
logError(JSON.stringify(error), "ReadUserDefinedFunctions", error.code);
|
||||
sendNotificationForError(error);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
|
||||
20
src/Common/dataAccess/sendNotificationForError.ts
Normal file
20
src/Common/dataAccess/sendNotificationForError.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
import * as Constants from "../Constants";
|
||||
import { sendMessage } from "../MessageHandler";
|
||||
import { MessageTypes } from "../../Contracts/ExplorerContracts";
|
||||
|
||||
interface CosmosError {
|
||||
code: number;
|
||||
message?: string;
|
||||
}
|
||||
|
||||
export function sendNotificationForError(error: CosmosError): void {
|
||||
if (error && error.code === Constants.HttpStatusCodes.Forbidden) {
|
||||
if (error.message && error.message.toLowerCase().indexOf("sharedoffer is disabled for your account") > 0) {
|
||||
return;
|
||||
}
|
||||
sendMessage({
|
||||
type: MessageTypes.ForbiddenError,
|
||||
reason: error && error.message ? error.message : error
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -3,10 +3,7 @@ import { Collection } from "../../Contracts/DataModels";
|
||||
import { ContainerDefinition } from "@azure/cosmos";
|
||||
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
||||
import {
|
||||
CreateUpdateOptions,
|
||||
ExtendedResourceProperties,
|
||||
MongoDBCollectionCreateUpdateParameters,
|
||||
MongoDBCollectionResource,
|
||||
SqlContainerCreateUpdateParameters,
|
||||
SqlContainerResource
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/types";
|
||||
@@ -26,8 +23,10 @@ import {
|
||||
getGremlinGraph
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
|
||||
import { createUpdateTable, getTable } from "../../Utils/arm/generatedClients/2020-04-01/tableResources";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleInfo, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logConsoleError, logConsoleInfo, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logError } from "../Logger";
|
||||
import { refreshCachedResources } from "../DataAccessUtilityBase";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import { userContext } from "../../UserContext";
|
||||
|
||||
export async function updateCollection(
|
||||
@@ -52,14 +51,16 @@ export async function updateCollection(
|
||||
.database(databaseId)
|
||||
.container(collectionId)
|
||||
.replace(newCollection as ContainerDefinition, options);
|
||||
|
||||
collection = sdkResponse.resource as Collection;
|
||||
}
|
||||
|
||||
logConsoleInfo(`Successfully updated container ${collectionId}`);
|
||||
await refreshCachedResources();
|
||||
return collection;
|
||||
} catch (error) {
|
||||
handleError(error, "UpdateCollection", `Failed to update container ${collectionId}`);
|
||||
logConsoleError(`Failed to update container ${collectionId}: ${JSON.stringify(error)}`);
|
||||
logError(JSON.stringify(error), "UpdateCollection", error.code);
|
||||
sendNotificationForError(error);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
@@ -79,6 +80,15 @@ async function updateCollectionWithARM(
|
||||
switch (defaultExperience) {
|
||||
case DefaultAccountExperienceType.DocumentDB:
|
||||
return updateSqlContainer(databaseId, collectionId, subscriptionId, resourceGroup, accountName, newCollection);
|
||||
case DefaultAccountExperienceType.MongoDB:
|
||||
return updateMongoDBCollection(
|
||||
databaseId,
|
||||
collectionId,
|
||||
subscriptionId,
|
||||
resourceGroup,
|
||||
accountName,
|
||||
newCollection
|
||||
);
|
||||
case DefaultAccountExperienceType.Cassandra:
|
||||
return updateCassandraTable(databaseId, collectionId, subscriptionId, resourceGroup, accountName, newCollection);
|
||||
case DefaultAccountExperienceType.Graph:
|
||||
@@ -115,35 +125,26 @@ async function updateSqlContainer(
|
||||
throw new Error(`Sql container to update does not exist. Database id: ${databaseId} Collection id: ${collectionId}`);
|
||||
}
|
||||
|
||||
export async function updateMongoDBCollectionThroughRP(
|
||||
async function updateMongoDBCollection(
|
||||
databaseId: string,
|
||||
collectionId: string,
|
||||
newCollection: MongoDBCollectionResource,
|
||||
updateOptions?: CreateUpdateOptions
|
||||
): Promise<MongoDBCollectionResource> {
|
||||
const subscriptionId = userContext.subscriptionId;
|
||||
const resourceGroup = userContext.resourceGroup;
|
||||
const accountName = userContext.databaseAccount.name;
|
||||
|
||||
subscriptionId: string,
|
||||
resourceGroup: string,
|
||||
accountName: string,
|
||||
newCollection: Collection
|
||||
): Promise<Collection> {
|
||||
const getResponse = await getMongoDBCollection(subscriptionId, resourceGroup, accountName, databaseId, collectionId);
|
||||
if (getResponse && getResponse.properties && getResponse.properties.resource) {
|
||||
const updateParams: MongoDBCollectionCreateUpdateParameters = {
|
||||
properties: {
|
||||
resource: newCollection,
|
||||
options: updateOptions
|
||||
}
|
||||
};
|
||||
|
||||
getResponse.properties.resource = newCollection as SqlContainerResource & ExtendedResourceProperties;
|
||||
const updateResponse = await createUpdateMongoDBCollection(
|
||||
subscriptionId,
|
||||
resourceGroup,
|
||||
accountName,
|
||||
databaseId,
|
||||
collectionId,
|
||||
updateParams
|
||||
getResponse as SqlContainerCreateUpdateParameters
|
||||
);
|
||||
|
||||
return updateResponse && (updateResponse.properties.resource as MongoDBCollectionResource);
|
||||
return updateResponse && (updateResponse.properties.resource as Collection);
|
||||
}
|
||||
|
||||
throw new Error(
|
||||
|
||||
@@ -6,10 +6,12 @@ import { OfferDefinition } from "@azure/cosmos";
|
||||
import { RequestOptions } from "@azure/cosmos/dist-esm";
|
||||
import { ThroughputSettingsUpdateParameters } from "../../Utils/arm/generatedClients/2020-04-01/types";
|
||||
import { client } from "../CosmosClient";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleInfo, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logConsoleError, logConsoleInfo, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logError } from "../Logger";
|
||||
import { readCollectionOffer } from "./readCollectionOffer";
|
||||
import { readDatabaseOffer } from "./readDatabaseOffer";
|
||||
import { refreshCachedOffers, refreshCachedResources } from "../DataAccessUtilityBase";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import {
|
||||
updateSqlDatabaseThroughput,
|
||||
migrateSqlDatabaseToAutoscale,
|
||||
@@ -69,10 +71,14 @@ export const updateOffer = async (params: UpdateOfferParams): Promise<Offer> =>
|
||||
} else {
|
||||
updatedOffer = await updateOfferWithSDK(params);
|
||||
}
|
||||
await refreshCachedOffers();
|
||||
await refreshCachedResources();
|
||||
logConsoleInfo(`Successfully updated offer for ${offerResourceText}`);
|
||||
return updatedOffer;
|
||||
} catch (error) {
|
||||
handleError(error, "UpdateCollection", `Error updating offer for ${offerResourceText}`);
|
||||
logConsoleError(`Error updating offer for ${offerResourceText}: ${JSON.stringify(error)}`);
|
||||
logError(JSON.stringify(error), "UpdateCollection", error.code);
|
||||
sendNotificationForError(error);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
import { Platform, configContext } from "../../ConfigContext";
|
||||
import { getAuthorizationHeader } from "../../Utils/AuthorizationUtils";
|
||||
import { AutoPilotOfferSettings } from "../../Contracts/DataModels";
|
||||
import { logConsoleProgress, logConsoleInfo } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logConsoleProgress, logConsoleInfo, logConsoleError } from "../../Utils/NotificationConsoleUtils";
|
||||
import { HttpHeaders } from "../Constants";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
|
||||
interface UpdateOfferThroughputRequest {
|
||||
subscriptionId: string;
|
||||
@@ -45,13 +44,8 @@ export async function updateOfferThroughputBeyondLimit(request: UpdateOfferThrou
|
||||
clearMessage();
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const error = await response.json();
|
||||
handleError(
|
||||
error,
|
||||
"updateOfferThroughputBeyondLimit",
|
||||
`Failed to request an increase in throughput for ${request.throughput}`
|
||||
);
|
||||
logConsoleError(`Failed to request an increase in throughput for ${request.throughput}: ${error.message}`);
|
||||
clearMessage();
|
||||
throw error;
|
||||
throw new Error(error.message);
|
||||
}
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import { AuthType } from "../../AuthType";
|
||||
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
||||
import { Resource, StoredProcedureDefinition } from "@azure/cosmos";
|
||||
import {
|
||||
SqlStoredProcedureCreateUpdateParameters,
|
||||
@@ -10,8 +9,9 @@ import {
|
||||
createUpdateSqlStoredProcedure,
|
||||
getSqlStoredProcedure
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logConsoleError, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logError } from "../Logger";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import { userContext } from "../../UserContext";
|
||||
|
||||
export async function updateStoredProcedure(
|
||||
@@ -21,11 +21,7 @@ export async function updateStoredProcedure(
|
||||
): Promise<StoredProcedureDefinition & Resource> {
|
||||
const clearMessage = logConsoleProgress(`Updating stored procedure ${storedProcedure.id}`);
|
||||
try {
|
||||
if (
|
||||
window.authType === AuthType.AAD &&
|
||||
!userContext.useSDKOperations &&
|
||||
userContext.defaultExperience === DefaultAccountExperienceType.DocumentDB
|
||||
) {
|
||||
if (window.authType === AuthType.AAD && !userContext.useSDKOperations) {
|
||||
const getResponse = await getSqlStoredProcedure(
|
||||
userContext.subscriptionId,
|
||||
userContext.resourceGroup,
|
||||
@@ -64,7 +60,10 @@ export async function updateStoredProcedure(
|
||||
.replace(storedProcedure);
|
||||
return response?.resource;
|
||||
} catch (error) {
|
||||
handleError(error, "UpdateStoredProcedure", `Error while updating stored procedure ${storedProcedure.id}`);
|
||||
const errorMessage = error.code === "NotFound" ? `${storedProcedure.id} does not exist.` : JSON.stringify(error);
|
||||
logConsoleError(`Error while updating stored procedure ${storedProcedure.id}:\n ${errorMessage}`);
|
||||
logError(errorMessage, "UpdateStoredProcedure", error.code);
|
||||
sendNotificationForError(error);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import { AuthType } from "../../AuthType";
|
||||
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
||||
import {
|
||||
SqlTriggerCreateUpdateParameters,
|
||||
SqlTriggerResource
|
||||
@@ -7,8 +6,9 @@ import {
|
||||
import { TriggerDefinition } from "@azure/cosmos";
|
||||
import { client } from "../CosmosClient";
|
||||
import { createUpdateSqlTrigger, getSqlTrigger } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logConsoleError, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logError } from "../Logger";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import { userContext } from "../../UserContext";
|
||||
|
||||
export async function updateTrigger(
|
||||
@@ -18,11 +18,7 @@ export async function updateTrigger(
|
||||
): Promise<TriggerDefinition> {
|
||||
const clearMessage = logConsoleProgress(`Updating trigger ${trigger.id}`);
|
||||
try {
|
||||
if (
|
||||
window.authType === AuthType.AAD &&
|
||||
!userContext.useSDKOperations &&
|
||||
userContext.defaultExperience === DefaultAccountExperienceType.DocumentDB
|
||||
) {
|
||||
if (window.authType === AuthType.AAD && !userContext.useSDKOperations) {
|
||||
const getResponse = await getSqlTrigger(
|
||||
userContext.subscriptionId,
|
||||
userContext.resourceGroup,
|
||||
@@ -61,7 +57,10 @@ export async function updateTrigger(
|
||||
.replace(trigger);
|
||||
return response?.resource;
|
||||
} catch (error) {
|
||||
handleError(error, "UpdateTrigger", `Error while updating trigger ${trigger.id}`);
|
||||
const errorMessage = error.code === "NotFound" ? `${trigger.id} does not exist.` : JSON.stringify(error);
|
||||
logConsoleError(`Error while updating trigger ${trigger.id}:\n ${errorMessage}`);
|
||||
logError(errorMessage, "UpdateTrigger", error.code);
|
||||
sendNotificationForError(error);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import { AuthType } from "../../AuthType";
|
||||
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
||||
import { Resource, UserDefinedFunctionDefinition } from "@azure/cosmos";
|
||||
import {
|
||||
SqlUserDefinedFunctionCreateUpdateParameters,
|
||||
@@ -10,8 +9,9 @@ import {
|
||||
createUpdateSqlUserDefinedFunction,
|
||||
getSqlUserDefinedFunction
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logConsoleError, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logError } from "../Logger";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import { userContext } from "../../UserContext";
|
||||
|
||||
export async function updateUserDefinedFunction(
|
||||
@@ -21,11 +21,7 @@ export async function updateUserDefinedFunction(
|
||||
): Promise<UserDefinedFunctionDefinition & Resource> {
|
||||
const clearMessage = logConsoleProgress(`Updating user defined function ${userDefinedFunction.id}`);
|
||||
try {
|
||||
if (
|
||||
window.authType === AuthType.AAD &&
|
||||
!userContext.useSDKOperations &&
|
||||
userContext.defaultExperience === DefaultAccountExperienceType.DocumentDB
|
||||
) {
|
||||
if (window.authType === AuthType.AAD && !userContext.useSDKOperations) {
|
||||
const getResponse = await getSqlUserDefinedFunction(
|
||||
userContext.subscriptionId,
|
||||
userContext.resourceGroup,
|
||||
@@ -64,11 +60,11 @@ export async function updateUserDefinedFunction(
|
||||
.replace(userDefinedFunction);
|
||||
return response?.resource;
|
||||
} catch (error) {
|
||||
handleError(
|
||||
error,
|
||||
"UpdateUserupdateUserDefinedFunction",
|
||||
`Error while updating user defined function ${userDefinedFunction.id}`
|
||||
);
|
||||
const errorMessage =
|
||||
error.code === "NotFound" ? `${userDefinedFunction.id} does not exist.` : JSON.stringify(error);
|
||||
logConsoleError(`Error while updating user defined function ${userDefinedFunction.id}:\n ${errorMessage}`);
|
||||
logError(errorMessage, "UpdateUserupdateUserDefinedFunction", error.code);
|
||||
sendNotificationForError(error);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
|
||||
@@ -88,38 +88,6 @@ export interface Resource {
|
||||
id: string;
|
||||
}
|
||||
|
||||
export interface IType {
|
||||
name: string;
|
||||
code: number;
|
||||
}
|
||||
|
||||
export interface IDataField {
|
||||
dataType: IType;
|
||||
hasNulls: boolean;
|
||||
isArray: boolean;
|
||||
schemaType: IType;
|
||||
name: string;
|
||||
path: string;
|
||||
maxRepetitionLevel: number;
|
||||
maxDefinitionLevel: number;
|
||||
}
|
||||
|
||||
export interface ISchema {
|
||||
id: string;
|
||||
accountName: string;
|
||||
resource: string;
|
||||
fields: IDataField[];
|
||||
}
|
||||
|
||||
export interface ISchemaRequest {
|
||||
id: string;
|
||||
subscriptionId: string;
|
||||
resourceGroup: string;
|
||||
accountName: string;
|
||||
resource: string;
|
||||
status: string;
|
||||
}
|
||||
|
||||
export interface Collection extends Resource {
|
||||
defaultTtl?: number;
|
||||
indexingPolicy?: IndexingPolicy;
|
||||
@@ -130,8 +98,6 @@ export interface Collection extends Resource {
|
||||
changeFeedPolicy?: ChangeFeedPolicy;
|
||||
analyticalStorageTtl?: number;
|
||||
geospatialConfig?: GeospatialConfig;
|
||||
schema?: ISchema;
|
||||
requestSchema?: () => void;
|
||||
}
|
||||
|
||||
export interface Database extends Resource {
|
||||
@@ -250,6 +216,18 @@ export interface UniqueKey {
|
||||
paths: string[];
|
||||
}
|
||||
|
||||
// Returned by DocumentDb client proxy
|
||||
// Inner errors in BackendErrorDataModel when error is in SQL syntax
|
||||
export interface ErrorDataModel {
|
||||
message: string;
|
||||
severity?: string;
|
||||
location?: {
|
||||
start: string;
|
||||
end: string;
|
||||
};
|
||||
code?: string;
|
||||
}
|
||||
|
||||
export interface CreateDatabaseAndCollectionRequest {
|
||||
databaseId: string;
|
||||
collectionId: string;
|
||||
@@ -261,12 +239,21 @@ export interface CreateDatabaseAndCollectionRequest {
|
||||
uniqueKeyPolicy?: UniqueKeyPolicy;
|
||||
autoPilot?: AutoPilotCreationSettings;
|
||||
analyticalStorageTtl?: number;
|
||||
hasAutoPilotV2FeatureFlag?: boolean;
|
||||
}
|
||||
|
||||
export interface AutoPilotCreationSettings {
|
||||
autopilotTier?: AutopilotTier;
|
||||
maxThroughput?: number;
|
||||
}
|
||||
|
||||
export enum AutopilotTier {
|
||||
Tier1 = 1,
|
||||
Tier2 = 2,
|
||||
Tier3 = 3,
|
||||
Tier4 = 4
|
||||
}
|
||||
|
||||
export interface Query {
|
||||
id: string;
|
||||
resourceId: string;
|
||||
@@ -275,7 +262,9 @@ export interface Query {
|
||||
}
|
||||
|
||||
export interface AutoPilotOfferSettings {
|
||||
tier?: AutopilotTier;
|
||||
maximumTierThroughput?: number;
|
||||
targetTier?: AutopilotTier;
|
||||
maxThroughput?: number;
|
||||
targetMaxThroughput?: number;
|
||||
}
|
||||
@@ -502,6 +491,7 @@ export interface MongoParameters extends RpParameters {
|
||||
rid?: string;
|
||||
rtype?: string;
|
||||
isAutoPilot?: Boolean;
|
||||
autoPilotTier?: string;
|
||||
autoPilotThroughput?: string;
|
||||
analyticalStorageTtl?: number;
|
||||
}
|
||||
|
||||
@@ -46,7 +46,7 @@ export interface LogEntry {
|
||||
/**
|
||||
* The message code.
|
||||
*/
|
||||
code?: number | string;
|
||||
code?: number;
|
||||
/**
|
||||
* Any additional data to be logged.
|
||||
*/
|
||||
|
||||
@@ -32,8 +32,7 @@ export enum MessageTypes {
|
||||
GetArcadiaToken,
|
||||
CreateWorkspace,
|
||||
CreateSparkPool,
|
||||
RefreshDatabaseAccount,
|
||||
InitTestExplorer
|
||||
RefreshDatabaseAccount
|
||||
}
|
||||
|
||||
export { Versions, ActionContracts, Diagnostics };
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
export enum SubscriptionType {
|
||||
Benefits,
|
||||
EA,
|
||||
Free,
|
||||
Internal,
|
||||
PAYG
|
||||
}
|
||||
@@ -17,7 +17,6 @@ import Trigger from "../Explorer/Tree/Trigger";
|
||||
import UserDefinedFunction from "../Explorer/Tree/UserDefinedFunction";
|
||||
import { UploadDetails } from "../workers/upload/definitions";
|
||||
import * as DataModels from "./DataModels";
|
||||
import { SubscriptionType } from "./SubscriptionType";
|
||||
|
||||
export interface TokenProvider {
|
||||
getAuthHeader(): Promise<Headers>;
|
||||
@@ -116,8 +115,6 @@ export interface CollectionBase extends TreeNode {
|
||||
export interface Collection extends CollectionBase {
|
||||
defaultTtl: ko.Observable<number>;
|
||||
analyticalStorageTtl: ko.Observable<number>;
|
||||
schema?: DataModels.ISchema;
|
||||
requestSchema?: () => void;
|
||||
indexingPolicy: ko.Observable<DataModels.IndexingPolicy>;
|
||||
uniqueKeyPolicy: DataModels.UniqueKeyPolicy;
|
||||
quotaInfo: ko.Observable<DataModels.CollectionQuotaInfo>;
|
||||
@@ -292,10 +289,6 @@ export interface DocumentsTabOptions extends TabOptions {
|
||||
resourceTokenPartitionKey?: string;
|
||||
}
|
||||
|
||||
export interface SettingsTabV2Options extends TabOptions {
|
||||
getPendingNotification: Q.Promise<DataModels.Notification>;
|
||||
}
|
||||
|
||||
export interface ConflictsTabOptions extends TabOptions {
|
||||
partitionKey: DataModels.PartitionKey;
|
||||
conflictIds: ko.ObservableArray<ConflictId>;
|
||||
@@ -361,7 +354,6 @@ export enum CollectionTabKind {
|
||||
SparkMasterTab = 16,
|
||||
Gallery = 17,
|
||||
NotebookViewer = 18,
|
||||
Schema = 19,
|
||||
SettingsV2 = 19
|
||||
}
|
||||
|
||||
@@ -416,6 +408,14 @@ export interface ThroughputDefaults {
|
||||
shared: number;
|
||||
}
|
||||
|
||||
export enum SubscriptionType {
|
||||
Benefits,
|
||||
EA,
|
||||
Free,
|
||||
Internal,
|
||||
PAYG
|
||||
}
|
||||
|
||||
export class MonacoEditorSettings {
|
||||
public readonly language: string;
|
||||
public readonly readOnly: boolean;
|
||||
|
||||
@@ -44,6 +44,10 @@ describe("Component Registerer", () => {
|
||||
expect(ko.components.isRegistered("user-defined-function-tab")).toBe(true);
|
||||
});
|
||||
|
||||
it("should register settings-tab component", () => {
|
||||
expect(ko.components.isRegistered("settings-tab")).toBe(true);
|
||||
});
|
||||
|
||||
it("should register settings-tab-v2 component", () => {
|
||||
expect(ko.components.isRegistered("settings-tab-v2")).toBe(true);
|
||||
});
|
||||
@@ -119,4 +123,8 @@ describe("Component Registerer", () => {
|
||||
it("should register dynamic-list component", () => {
|
||||
expect(ko.components.isRegistered("dynamic-list")).toBe(true);
|
||||
});
|
||||
|
||||
it("should register throughput-input component", () => {
|
||||
expect(ko.components.isRegistered("throughput-input")).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -11,6 +11,7 @@ import { InputTypeaheadComponent } from "./Controls/InputTypeahead/InputTypeahea
|
||||
import { JsonEditorComponent } from "./Controls/JsonEditor/JsonEditorComponent";
|
||||
import { NewVertexComponent } from "./Graph/NewVertexComponent/NewVertexComponent";
|
||||
import { TabsManagerKOComponent } from "./Tabs/TabsManager";
|
||||
import { ThroughputInputComponent } from "./Controls/ThroughputInput/ThroughputInputComponent";
|
||||
import { ThroughputInputComponentAutoPilotV3 } from "./Controls/ThroughputInput/ThroughputInputComponentAutoPilotV3";
|
||||
|
||||
ko.components.register("input-typeahead", new InputTypeaheadComponent());
|
||||
@@ -22,6 +23,7 @@ ko.components.register("editor", new EditorComponent());
|
||||
ko.components.register("json-editor", new JsonEditorComponent());
|
||||
ko.components.register("diff-editor", new DiffEditorComponent());
|
||||
ko.components.register("dynamic-list", DynamicListComponent);
|
||||
ko.components.register("throughput-input", ThroughputInputComponent);
|
||||
ko.components.register("throughput-input-autopilot-v3", ThroughputInputComponentAutoPilotV3);
|
||||
ko.components.register("tabs-manager", TabsManagerKOComponent());
|
||||
|
||||
@@ -31,6 +33,7 @@ ko.components.register("mongo-documents-tab", new TabComponents.MongoDocumentsTa
|
||||
ko.components.register("stored-procedure-tab", new TabComponents.StoredProcedureTab());
|
||||
ko.components.register("trigger-tab", new TabComponents.TriggerTab());
|
||||
ko.components.register("user-defined-function-tab", new TabComponents.UserDefinedFunctionTab());
|
||||
ko.components.register("settings-tab", new TabComponents.SettingsTab());
|
||||
ko.components.register("settings-tab-v2", new TabComponents.SettingsTabV2());
|
||||
ko.components.register("query-tab", new TabComponents.QueryTab());
|
||||
ko.components.register("tables-query-tab", new TabComponents.QueryTablesTab());
|
||||
|
||||
@@ -1,9 +1,12 @@
|
||||
import * as React from "react";
|
||||
import { ArcadiaWorkspace, SparkPool } from "../../../Contracts/DataModels";
|
||||
import { DefaultButton, IButtonStyles } from "office-ui-fabric-react/lib/Button";
|
||||
import { IContextualMenuItem, IContextualMenuProps } from "office-ui-fabric-react/lib/ContextualMenu";
|
||||
import {
|
||||
IContextualMenuItem,
|
||||
IContextualMenuProps,
|
||||
ContextualMenuItemType
|
||||
} from "office-ui-fabric-react/lib/ContextualMenu";
|
||||
import * as Logger from "../../../Common/Logger";
|
||||
import { getErrorMessage } from "../../../Common/ErrorHandlingUtils";
|
||||
|
||||
export interface ArcadiaMenuPickerProps {
|
||||
selectText?: string;
|
||||
@@ -44,7 +47,7 @@ export class ArcadiaMenuPicker extends React.Component<ArcadiaMenuPickerProps, A
|
||||
selectedSparkPool: item.text
|
||||
});
|
||||
} catch (error) {
|
||||
Logger.logError(getErrorMessage(error), "ArcadiaMenuPicker/_onSparkPoolClicked");
|
||||
Logger.logError(error, "ArcadiaMenuPicker/_onSparkPoolClicked");
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
@@ -1,14 +0,0 @@
|
||||
import { shallow } from "enzyme";
|
||||
import React from "react";
|
||||
import { CollapsibleSectionComponent, CollapsibleSectionProps } from "./CollapsibleSectionComponent";
|
||||
|
||||
describe("CollapsibleSectionComponent", () => {
|
||||
it("renders", () => {
|
||||
const props: CollapsibleSectionProps = {
|
||||
title: "Sample title"
|
||||
};
|
||||
|
||||
const wrapper = shallow(<CollapsibleSectionComponent {...props} />);
|
||||
expect(wrapper).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
@@ -1,36 +0,0 @@
|
||||
import { Icon, Label, Stack } from "office-ui-fabric-react";
|
||||
import * as React from "react";
|
||||
import { accordionIconStyles, accordionStackTokens } from "../Settings/SettingsRenderUtils";
|
||||
|
||||
export interface CollapsibleSectionProps {
|
||||
title: string;
|
||||
}
|
||||
|
||||
export interface CollapsibleSectionState {
|
||||
isExpanded: boolean;
|
||||
}
|
||||
|
||||
export class CollapsibleSectionComponent extends React.Component<CollapsibleSectionProps, CollapsibleSectionState> {
|
||||
constructor(props: CollapsibleSectionProps) {
|
||||
super(props);
|
||||
this.state = {
|
||||
isExpanded: true
|
||||
};
|
||||
}
|
||||
|
||||
private toggleCollapsed = (): void => {
|
||||
this.setState({ isExpanded: !this.state.isExpanded });
|
||||
};
|
||||
|
||||
public render(): JSX.Element {
|
||||
return (
|
||||
<>
|
||||
<Stack className="collapsibleSection" horizontal tokens={accordionStackTokens} onClick={this.toggleCollapsed}>
|
||||
<Icon iconName={this.state.isExpanded ? "ChevronDown" : "ChevronRight"} styles={accordionIconStyles} />
|
||||
<Label>{this.props.title}</Label>
|
||||
</Stack>
|
||||
{this.state.isExpanded && this.props.children}
|
||||
</>
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1,30 +0,0 @@
|
||||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`CollapsibleSectionComponent renders 1`] = `
|
||||
<Fragment>
|
||||
<Stack
|
||||
className="collapsibleSection"
|
||||
horizontal={true}
|
||||
onClick={[Function]}
|
||||
tokens={
|
||||
Object {
|
||||
"childrenGap": 10,
|
||||
}
|
||||
}
|
||||
>
|
||||
<StyledIconBase
|
||||
iconName="ChevronDown"
|
||||
styles={
|
||||
Object {
|
||||
"root": Object {
|
||||
"paddingTop": 7,
|
||||
},
|
||||
}
|
||||
}
|
||||
/>
|
||||
<StyledLabelBase>
|
||||
Sample title
|
||||
</StyledLabelBase>
|
||||
</Stack>
|
||||
</Fragment>
|
||||
`;
|
||||
@@ -4,10 +4,12 @@
|
||||
|
||||
import * as React from "react";
|
||||
import * as DataModels from "../../../Contracts/DataModels";
|
||||
import * as Logger from "../../../Common/Logger";
|
||||
import * as NotificationConsoleUtils from "../../../Utils/NotificationConsoleUtils";
|
||||
import { ConsoleDataType } from "../../Menus/NotificationConsole/NotificationConsoleComponent";
|
||||
import { StringUtils } from "../../../Utils/StringUtils";
|
||||
import { userContext } from "../../../UserContext";
|
||||
import { TerminalQueryParams } from "../../../Common/Constants";
|
||||
import { handleError } from "../../../Common/ErrorHandlingUtils";
|
||||
|
||||
export interface NotebookTerminalComponentProps {
|
||||
notebookServerInfo: DataModels.NotebookWorkspaceConnectionInfo;
|
||||
@@ -69,10 +71,9 @@ export class NotebookTerminalComponent extends React.Component<NotebookTerminalC
|
||||
params: Map<string, string>
|
||||
): string {
|
||||
if (!serverInfo.notebookServerEndpoint) {
|
||||
handleError(
|
||||
"Notebook server endpoint not defined. Terminal will fail to connect to jupyter server.",
|
||||
"NotebookTerminalComponent/createNotebookAppSrc"
|
||||
);
|
||||
const error = "Notebook server endpoint not defined. Terminal will fail to connect to jupyter server.";
|
||||
Logger.logError(error, "NotebookTerminalComponent/createNotebookAppSrc");
|
||||
NotificationConsoleUtils.logConsoleMessage(ConsoleDataType.Error, error);
|
||||
return "";
|
||||
}
|
||||
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import * as React from "react";
|
||||
import { JunoClient } from "../../../Juno/JunoClient";
|
||||
import { HttpStatusCodes, CodeOfConductEndpoints } from "../../../Common/Constants";
|
||||
import * as Logger from "../../../Common/Logger";
|
||||
import { logConsoleError } from "../../../Utils/NotificationConsoleUtils";
|
||||
import { Stack, Text, Checkbox, PrimaryButton, Link } from "office-ui-fabric-react";
|
||||
import { handleError } from "../../../Common/ErrorHandlingUtils";
|
||||
|
||||
export interface CodeOfConductComponentProps {
|
||||
junoClient: JunoClient;
|
||||
@@ -44,7 +45,9 @@ export class CodeOfConductComponent extends React.Component<CodeOfConductCompone
|
||||
|
||||
this.props.onAcceptCodeOfConduct(response.data);
|
||||
} catch (error) {
|
||||
handleError(error, "CodeOfConductComponent/acceptCodeOfConduct", "Failed to accept code of conduct");
|
||||
const message = `Failed to accept code of conduct: ${error}`;
|
||||
Logger.logError(message, "CodeOfConductComponent/acceptCodeOfConduct");
|
||||
logConsoleError(message);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -16,8 +16,11 @@ import {
|
||||
Text
|
||||
} from "office-ui-fabric-react";
|
||||
import * as React from "react";
|
||||
import * as Logger from "../../../Common/Logger";
|
||||
import { IGalleryItem, JunoClient, IJunoResponse, IPublicGalleryData } from "../../../Juno/JunoClient";
|
||||
import * as GalleryUtils from "../../../Utils/GalleryUtils";
|
||||
import * as NotificationConsoleUtils from "../../../Utils/NotificationConsoleUtils";
|
||||
import { ConsoleDataType } from "../../Menus/NotificationConsole/NotificationConsoleComponent";
|
||||
import { DialogComponent, DialogProps } from "../DialogReactComponent/DialogComponent";
|
||||
import { GalleryCardComponent, GalleryCardComponentProps } from "./Cards/GalleryCardComponent";
|
||||
import "./GalleryViewerComponent.less";
|
||||
@@ -25,7 +28,6 @@ import { HttpStatusCodes } from "../../../Common/Constants";
|
||||
import Explorer from "../../Explorer";
|
||||
import { CodeOfConductComponent } from "./CodeOfConductComponent";
|
||||
import { InfoComponent } from "./InfoComponent/InfoComponent";
|
||||
import { handleError } from "../../../Common/ErrorHandlingUtils";
|
||||
|
||||
export interface GalleryViewerComponentProps {
|
||||
container?: Explorer;
|
||||
@@ -352,7 +354,9 @@ export class GalleryViewerComponent extends React.Component<GalleryViewerCompone
|
||||
|
||||
this.sampleNotebooks = response.data;
|
||||
} catch (error) {
|
||||
handleError(error, "GalleryViewerComponent/loadSampleNotebooks", "Failed to load sample notebooks");
|
||||
const message = `Failed to load sample notebooks: ${error}`;
|
||||
Logger.logError(message, "GalleryViewerComponent/loadSampleNotebooks");
|
||||
NotificationConsoleUtils.logConsoleMessage(ConsoleDataType.Error, message);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -378,7 +382,9 @@ export class GalleryViewerComponent extends React.Component<GalleryViewerCompone
|
||||
throw new Error(`Received HTTP ${response.status} when loading public notebooks`);
|
||||
}
|
||||
} catch (error) {
|
||||
handleError(error, "GalleryViewerComponent/loadPublicNotebooks", "Failed to load public notebooks");
|
||||
const message = `Failed to load public notebooks: ${error}`;
|
||||
Logger.logError(message, "GalleryViewerComponent/loadPublicNotebooks");
|
||||
NotificationConsoleUtils.logConsoleMessage(ConsoleDataType.Error, message);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -398,7 +404,9 @@ export class GalleryViewerComponent extends React.Component<GalleryViewerCompone
|
||||
|
||||
this.favoriteNotebooks = response.data;
|
||||
} catch (error) {
|
||||
handleError(error, "GalleryViewerComponent/loadFavoriteNotebooks", "Failed to load favorite notebooks");
|
||||
const message = `Failed to load favorite notebooks: ${error}`;
|
||||
Logger.logError(message, "GalleryViewerComponent/loadFavoriteNotebooks");
|
||||
NotificationConsoleUtils.logConsoleMessage(ConsoleDataType.Error, message);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -424,7 +432,9 @@ export class GalleryViewerComponent extends React.Component<GalleryViewerCompone
|
||||
|
||||
this.publishedNotebooks = response.data;
|
||||
} catch (error) {
|
||||
handleError(error, "GalleryViewerComponent/loadPublishedNotebooks", "Failed to load published notebooks");
|
||||
const message = `Failed to load published notebooks: ${error}`;
|
||||
Logger.logError(message, "GalleryViewerComponent/loadPublishedNotebooks");
|
||||
NotificationConsoleUtils.logConsoleMessage(ConsoleDataType.Error, message);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -21,7 +21,6 @@ import Explorer from "../../Explorer";
|
||||
import { NotebookV4 } from "@nteract/commutable/lib/v4";
|
||||
import { SessionStorageUtility } from "../../../Shared/StorageUtility";
|
||||
import { DialogHost } from "../../../Utils/GalleryUtils";
|
||||
import { getErrorMessage, handleError } from "../../../Common/ErrorHandlingUtils";
|
||||
|
||||
export interface NotebookViewerComponentProps {
|
||||
container?: Explorer;
|
||||
@@ -101,7 +100,9 @@ export class NotebookViewerComponent extends React.Component<NotebookViewerCompo
|
||||
}
|
||||
} catch (error) {
|
||||
this.setState({ showProgressBar: false });
|
||||
handleError(error, "NotebookViewerComponent/loadNotebookContent", "Failed to load notebook content");
|
||||
const message = `Failed to load notebook content: ${error}`;
|
||||
Logger.logError(message, "NotebookViewerComponent/loadNotebookContent");
|
||||
NotificationConsoleUtils.logConsoleMessage(ConsoleDataType.Error, message);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -28,7 +28,6 @@ import * as TelemetryProcessor from "../../../Shared/Telemetry/TelemetryProcesso
|
||||
|
||||
import SaveQueryBannerIcon from "../../../../images/save_query_banner.png";
|
||||
import { QueriesClient } from "../../../Common/QueriesClient";
|
||||
import { getErrorMessage, getErrorStack } from "../../../Common/ErrorHandlingUtils";
|
||||
|
||||
export interface QueriesGridComponentProps {
|
||||
queriesClient: QueriesClient;
|
||||
@@ -245,9 +244,7 @@ export class QueriesGridComponent extends React.Component<QueriesGridComponentPr
|
||||
databaseAccountName: container && container.databaseAccount().name,
|
||||
defaultExperience: container && container.defaultExperience(),
|
||||
dataExplorerArea: Constants.Areas.ContextualPane,
|
||||
paneTitle: container && container.browseQueriesPane.title(),
|
||||
error: getErrorMessage(error),
|
||||
errorStack: getErrorStack(error)
|
||||
paneTitle: container && container.browseQueriesPane.title()
|
||||
},
|
||||
startKey
|
||||
);
|
||||
|
||||
@@ -8,10 +8,7 @@ import * as DataModels from "../../../Contracts/DataModels";
|
||||
import ko from "knockout";
|
||||
import { TtlType, isDirty } from "./SettingsUtils";
|
||||
import Explorer from "../../Explorer";
|
||||
jest.mock("../../../Common/dataAccess/getIndexTransformationProgress", () => ({
|
||||
getIndexTransformationProgress: jest.fn().mockReturnValue(undefined)
|
||||
}));
|
||||
import { updateCollection, updateMongoDBCollectionThroughRP } from "../../../Common/dataAccess/updateCollection";
|
||||
import { updateCollection } from "../../../Common/dataAccess/updateCollection";
|
||||
jest.mock("../../../Common/dataAccess/updateCollection", () => ({
|
||||
updateCollection: jest.fn().mockReturnValue({
|
||||
id: undefined,
|
||||
@@ -21,17 +18,9 @@ jest.mock("../../../Common/dataAccess/updateCollection", () => ({
|
||||
changeFeedPolicy: undefined,
|
||||
analyticalStorageTtl: undefined,
|
||||
geospatialConfig: undefined
|
||||
} as DataModels.Collection),
|
||||
updateMongoDBCollectionThroughRP: jest.fn().mockReturnValue({
|
||||
id: undefined,
|
||||
shardKey: undefined,
|
||||
indexes: [],
|
||||
analyticalStorageTtl: undefined
|
||||
} as MongoDBCollectionResource)
|
||||
} as DataModels.Collection)
|
||||
}));
|
||||
import { updateOffer } from "../../../Common/dataAccess/updateOffer";
|
||||
import { MongoDBCollectionResource } from "../../../Utils/arm/generatedClients/2020-04-01/types";
|
||||
import Q from "q";
|
||||
jest.mock("../../../Common/dataAccess/updateOffer", () => ({
|
||||
updateOffer: jest.fn().mockReturnValue({} as DataModels.Offer)
|
||||
}));
|
||||
@@ -46,10 +35,7 @@ describe("SettingsComponent", () => {
|
||||
node: undefined,
|
||||
hashLocation: "settings",
|
||||
isActive: ko.observable(false),
|
||||
onUpdateTabsButtons: undefined,
|
||||
getPendingNotification: Q.Promise<DataModels.Notification>(() => {
|
||||
return;
|
||||
})
|
||||
onUpdateTabsButtons: undefined
|
||||
})
|
||||
};
|
||||
|
||||
@@ -202,17 +188,13 @@ describe("SettingsComponent", () => {
|
||||
expect(settingsComponentInstance.isOfferReplacePending()).toEqual(true);
|
||||
});
|
||||
|
||||
it("save calls updateCollection, updateMongoDBCollectionThroughRP and updateOffer", async () => {
|
||||
it("save calls updateCollection and updateOffer", async () => {
|
||||
const wrapper = shallow(<SettingsComponent {...baseProps} />);
|
||||
wrapper.setState({ isSubSettingsSaveable: true, isScaleSaveable: true, isMongoIndexingPolicySaveable: true });
|
||||
wrapper.setState({ isSubSettingsSaveable: true, isScaleSaveable: true });
|
||||
wrapper.update();
|
||||
const settingsComponentInstance = wrapper.instance() as SettingsComponent;
|
||||
settingsComponentInstance.mongoDBCollectionResource = {
|
||||
id: "id"
|
||||
};
|
||||
await settingsComponentInstance.onSaveClick();
|
||||
expect(updateCollection).toBeCalled();
|
||||
expect(updateMongoDBCollectionThroughRP).toBeCalled();
|
||||
expect(updateOffer).toBeCalled();
|
||||
});
|
||||
|
||||
|
||||
@@ -11,17 +11,13 @@ import { Action, ActionModifiers } from "../../../Shared/Telemetry/TelemetryCons
|
||||
import { RequestOptions } from "@azure/cosmos/dist-esm";
|
||||
import Explorer from "../../Explorer";
|
||||
import { updateOffer } from "../../../Common/dataAccess/updateOffer";
|
||||
import { updateCollection, updateMongoDBCollectionThroughRP } from "../../../Common/dataAccess/updateCollection";
|
||||
import { updateCollection } from "../../../Common/dataAccess/updateCollection";
|
||||
import { CommandButtonComponentProps } from "../../Controls/CommandButton/CommandButtonComponent";
|
||||
import { userContext } from "../../../UserContext";
|
||||
import { updateOfferThroughputBeyondLimit } from "../../../Common/dataAccess/updateOfferThroughputBeyondLimit";
|
||||
import SettingsTab from "../../Tabs/SettingsTabV2";
|
||||
import { mongoIndexingPolicyAADError, throughputUnit } from "./SettingsRenderUtils";
|
||||
import { throughputUnit } from "./SettingsRenderUtils";
|
||||
import { ScaleComponent, ScaleComponentProps } from "./SettingsSubComponents/ScaleComponent";
|
||||
import {
|
||||
MongoIndexingPolicyComponent,
|
||||
MongoIndexingPolicyComponentProps
|
||||
} from "./SettingsSubComponents/MongoIndexingPolicy/MongoIndexingPolicyComponent";
|
||||
import {
|
||||
getMaxRUs,
|
||||
hasDatabaseSharedThroughput,
|
||||
@@ -31,11 +27,8 @@ import {
|
||||
SettingsV2TabTypes,
|
||||
getTabTitle,
|
||||
isDirty,
|
||||
AddMongoIndexProps,
|
||||
MongoIndexTypes,
|
||||
parseConflictResolutionMode,
|
||||
parseConflictResolutionProcedure,
|
||||
getMongoNotification
|
||||
parseConflictResolutionProcedure
|
||||
} from "./SettingsUtils";
|
||||
import {
|
||||
ConflictResolutionComponent,
|
||||
@@ -45,11 +38,6 @@ import { SubSettingsComponent, SubSettingsComponentProps } from "./SettingsSubCo
|
||||
import { Pivot, PivotItem, IPivotProps, IPivotItemProps } from "office-ui-fabric-react";
|
||||
import "./SettingsComponent.less";
|
||||
import { IndexingPolicyComponent, IndexingPolicyComponentProps } from "./SettingsSubComponents/IndexingPolicyComponent";
|
||||
import { MongoDBCollectionResource, MongoIndex } from "../../../Utils/arm/generatedClients/2020-04-01/types";
|
||||
import { readMongoDBCollectionThroughRP } from "../../../Common/dataAccess/readMongoDBCollection";
|
||||
import { getIndexTransformationProgress } from "../../../Common/dataAccess/getIndexTransformationProgress";
|
||||
import { getErrorMessage, getErrorStack } from "../../../Common/ErrorHandlingUtils";
|
||||
import { isEmpty } from "underscore";
|
||||
|
||||
interface SettingsV2TabInfo {
|
||||
tab: SettingsV2TabTypes;
|
||||
@@ -96,13 +84,6 @@ export interface SettingsComponentState {
|
||||
shouldDiscardIndexingPolicy: boolean;
|
||||
isIndexingPolicyDirty: boolean;
|
||||
|
||||
isMongoIndexingPolicySaveable: boolean;
|
||||
isMongoIndexingPolicyDiscardable: boolean;
|
||||
currentMongoIndexes: MongoIndex[];
|
||||
indexesToDrop: number[];
|
||||
indexesToAdd: AddMongoIndexProps[];
|
||||
indexTransformationProgress: number;
|
||||
|
||||
conflictResolutionPolicyMode: DataModels.ConflictResolutionMode;
|
||||
conflictResolutionPolicyModeBaseline: DataModels.ConflictResolutionMode;
|
||||
conflictResolutionPolicyPath: string;
|
||||
@@ -117,16 +98,17 @@ export interface SettingsComponentState {
|
||||
|
||||
export class SettingsComponent extends React.Component<SettingsComponentProps, SettingsComponentState> {
|
||||
private static readonly sixMonthsInSeconds = 15768000;
|
||||
private saveSettingsButton: ButtonV2;
|
||||
private discardSettingsChangesButton: ButtonV2;
|
||||
|
||||
private isAnalyticalStorageEnabled: boolean;
|
||||
public saveSettingsButton: ButtonV2;
|
||||
public discardSettingsChangesButton: ButtonV2;
|
||||
|
||||
public isAnalyticalStorageEnabled: boolean;
|
||||
private collection: ViewModels.Collection;
|
||||
private container: Explorer;
|
||||
private changeFeedPolicyVisible: boolean;
|
||||
private isFixedContainer: boolean;
|
||||
private autoPilotTiersList: ViewModels.DropdownOption<DataModels.AutopilotTier>[];
|
||||
private shouldShowIndexingPolicyEditor: boolean;
|
||||
public mongoDBCollectionResource: MongoDBCollectionResource;
|
||||
|
||||
constructor(props: SettingsComponentProps) {
|
||||
super(props);
|
||||
@@ -176,13 +158,6 @@ export class SettingsComponent extends React.Component<SettingsComponentProps, S
|
||||
shouldDiscardIndexingPolicy: false,
|
||||
isIndexingPolicyDirty: false,
|
||||
|
||||
indexesToDrop: [],
|
||||
indexesToAdd: [],
|
||||
currentMongoIndexes: undefined,
|
||||
isMongoIndexingPolicySaveable: false,
|
||||
isMongoIndexingPolicyDiscardable: false,
|
||||
indexTransformationProgress: undefined,
|
||||
|
||||
conflictResolutionPolicyMode: undefined,
|
||||
conflictResolutionPolicyModeBaseline: undefined,
|
||||
conflictResolutionPolicyPath: undefined,
|
||||
@@ -211,8 +186,6 @@ export class SettingsComponent extends React.Component<SettingsComponentProps, S
|
||||
}
|
||||
|
||||
componentDidMount(): void {
|
||||
this.refreshIndexTransformationProgress();
|
||||
this.loadMongoIndexes();
|
||||
this.setAutoPilotStates();
|
||||
this.setBaseline();
|
||||
if (this.props.settingsTab.isActive()) {
|
||||
@@ -226,30 +199,6 @@ export class SettingsComponent extends React.Component<SettingsComponentProps, S
|
||||
}
|
||||
}
|
||||
|
||||
public loadMongoIndexes = async (): Promise<void> => {
|
||||
if (
|
||||
this.container.isPreferredApiMongoDB() &&
|
||||
this.container.isEnableMongoCapabilityPresent() &&
|
||||
this.container.databaseAccount()
|
||||
) {
|
||||
this.mongoDBCollectionResource = await readMongoDBCollectionThroughRP(
|
||||
this.collection.databaseId,
|
||||
this.collection.id()
|
||||
);
|
||||
|
||||
if (this.mongoDBCollectionResource) {
|
||||
this.setState({
|
||||
currentMongoIndexes: [...this.mongoDBCollectionResource.indexes]
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
public refreshIndexTransformationProgress = async (): Promise<void> => {
|
||||
const currentProgress = await getIndexTransformationProgress(this.collection.databaseId, this.collection.id());
|
||||
this.setState({ indexTransformationProgress: currentProgress });
|
||||
};
|
||||
|
||||
public isSaveSettingsButtonEnabled = (): boolean => {
|
||||
if (this.isOfferReplacePending()) {
|
||||
return false;
|
||||
@@ -259,8 +208,7 @@ export class SettingsComponent extends React.Component<SettingsComponentProps, S
|
||||
this.state.isScaleSaveable ||
|
||||
this.state.isSubSettingsSaveable ||
|
||||
this.state.isIndexingPolicyDirty ||
|
||||
this.state.isConflictResolutionDirty ||
|
||||
(!!this.state.currentMongoIndexes && this.state.isMongoIndexingPolicySaveable)
|
||||
this.state.isConflictResolutionDirty
|
||||
);
|
||||
};
|
||||
|
||||
@@ -269,8 +217,7 @@ export class SettingsComponent extends React.Component<SettingsComponentProps, S
|
||||
this.state.isScaleDiscardable ||
|
||||
this.state.isSubSettingsDiscardable ||
|
||||
this.state.isIndexingPolicyDirty ||
|
||||
this.state.isConflictResolutionDirty ||
|
||||
(!!this.state.currentMongoIndexes && this.state.isMongoIndexingPolicyDiscardable)
|
||||
this.state.isConflictResolutionDirty
|
||||
);
|
||||
};
|
||||
|
||||
@@ -346,7 +293,6 @@ export class SettingsComponent extends React.Component<SettingsComponentProps, S
|
||||
break;
|
||||
}
|
||||
|
||||
const wasIndexingPolicyModified = this.state.isIndexingPolicyDirty;
|
||||
newCollection.defaultTtl = defaultTtl;
|
||||
|
||||
newCollection.indexingPolicy = this.state.indexingPolicyContent;
|
||||
@@ -382,11 +328,6 @@ export class SettingsComponent extends React.Component<SettingsComponentProps, S
|
||||
this.collection.conflictResolutionPolicy(updatedCollection.conflictResolutionPolicy);
|
||||
this.collection.changeFeedPolicy(updatedCollection.changeFeedPolicy);
|
||||
this.collection.geospatialConfig(updatedCollection.geospatialConfig);
|
||||
|
||||
if (wasIndexingPolicyModified) {
|
||||
await this.refreshIndexTransformationProgress();
|
||||
}
|
||||
|
||||
this.setState({
|
||||
isSubSettingsSaveable: false,
|
||||
isSubSettingsDiscardable: false,
|
||||
@@ -395,58 +336,6 @@ export class SettingsComponent extends React.Component<SettingsComponentProps, S
|
||||
});
|
||||
}
|
||||
|
||||
if (this.state.isMongoIndexingPolicySaveable && this.mongoDBCollectionResource) {
|
||||
try {
|
||||
const newMongoIndexes = this.getMongoIndexesToSave();
|
||||
const newMongoCollection: MongoDBCollectionResource = {
|
||||
...this.mongoDBCollectionResource,
|
||||
indexes: newMongoIndexes
|
||||
};
|
||||
|
||||
this.mongoDBCollectionResource = await updateMongoDBCollectionThroughRP(
|
||||
this.collection.databaseId,
|
||||
this.collection.id(),
|
||||
newMongoCollection
|
||||
);
|
||||
|
||||
await this.refreshIndexTransformationProgress();
|
||||
this.setState({
|
||||
isMongoIndexingPolicySaveable: false,
|
||||
indexesToDrop: [],
|
||||
indexesToAdd: [],
|
||||
currentMongoIndexes: [...this.mongoDBCollectionResource.indexes]
|
||||
});
|
||||
traceSuccess(
|
||||
Action.MongoIndexUpdated,
|
||||
{
|
||||
databaseAccountName: this.container.databaseAccount()?.name,
|
||||
databaseName: this.collection?.databaseId,
|
||||
collectionName: this.collection?.id(),
|
||||
defaultExperience: this.container.defaultExperience(),
|
||||
dataExplorerArea: Constants.Areas.Tab,
|
||||
tabTitle: this.props.settingsTab.tabTitle()
|
||||
},
|
||||
startKey
|
||||
);
|
||||
} catch (error) {
|
||||
traceFailure(
|
||||
Action.MongoIndexUpdated,
|
||||
{
|
||||
databaseAccountName: this.container.databaseAccount()?.name,
|
||||
databaseName: this.collection?.databaseId,
|
||||
collectionName: this.collection?.id(),
|
||||
defaultExperience: this.container.defaultExperience(),
|
||||
dataExplorerArea: Constants.Areas.Tab,
|
||||
tabTitle: this.props.settingsTab.tabTitle(),
|
||||
error: getErrorMessage(error),
|
||||
errorStack: getErrorStack(error)
|
||||
},
|
||||
startKey
|
||||
);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
if (this.state.isScaleSaveable) {
|
||||
const newThroughput = this.state.throughput;
|
||||
const newOffer: DataModels.Offer = { ...this.collection.offer() };
|
||||
@@ -503,18 +392,33 @@ export class SettingsComponent extends React.Component<SettingsComponentProps, S
|
||||
throughput: newThroughput,
|
||||
offerIsRUPerMinuteThroughputEnabled: false
|
||||
};
|
||||
|
||||
await updateOfferThroughputBeyondLimit(requestPayload);
|
||||
this.collection.offer().content.offerThroughput = originalThroughputValue;
|
||||
this.setState({
|
||||
isScaleSaveable: false,
|
||||
isScaleDiscardable: false,
|
||||
throughput: originalThroughputValue,
|
||||
throughputBaseline: originalThroughputValue,
|
||||
initialNotification: {
|
||||
description: `Throughput update for ${newThroughput} ${throughputUnit}`
|
||||
} as DataModels.Notification
|
||||
});
|
||||
try {
|
||||
await updateOfferThroughputBeyondLimit(requestPayload);
|
||||
this.collection.offer().content.offerThroughput = originalThroughputValue;
|
||||
this.setState({
|
||||
throughput: originalThroughputValue,
|
||||
throughputBaseline: originalThroughputValue,
|
||||
initialNotification: {
|
||||
description: `Throughput update for ${newThroughput} ${throughputUnit}`
|
||||
} as DataModels.Notification
|
||||
});
|
||||
this.setState({ isScaleSaveable: false, isScaleDiscardable: false });
|
||||
} catch (error) {
|
||||
traceFailure(
|
||||
Action.SettingsV2Updated,
|
||||
{
|
||||
databaseAccountName: this.container.databaseAccount().name,
|
||||
databaseName: this.collection?.databaseId,
|
||||
collectionName: this.collection?.id(),
|
||||
defaultExperience: this.container.defaultExperience(),
|
||||
dataExplorerArea: Constants.Areas.Tab,
|
||||
tabTitle: this.props.settingsTab.tabTitle(),
|
||||
error: error
|
||||
},
|
||||
startKey
|
||||
);
|
||||
throw error;
|
||||
}
|
||||
} else {
|
||||
const updateOfferParams: DataModels.UpdateOfferParams = {
|
||||
databaseId: this.collection.databaseId,
|
||||
@@ -561,10 +465,10 @@ export class SettingsComponent extends React.Component<SettingsComponentProps, S
|
||||
},
|
||||
startKey
|
||||
);
|
||||
} catch (error) {
|
||||
} catch (reason) {
|
||||
this.container.isRefreshingExplorer(false);
|
||||
this.props.settingsTab.isExecutionError(true);
|
||||
console.error(error);
|
||||
console.error(reason);
|
||||
traceFailure(
|
||||
Action.SettingsV2Updated,
|
||||
{
|
||||
@@ -574,8 +478,7 @@ export class SettingsComponent extends React.Component<SettingsComponentProps, S
|
||||
defaultExperience: this.container.defaultExperience(),
|
||||
dataExplorerArea: Constants.Areas.Tab,
|
||||
tabTitle: this.props.settingsTab.tabTitle(),
|
||||
error: getErrorMessage(error),
|
||||
errorStack: getErrorStack(error)
|
||||
error: reason
|
||||
},
|
||||
startKey
|
||||
);
|
||||
@@ -594,8 +497,6 @@ export class SettingsComponent extends React.Component<SettingsComponentProps, S
|
||||
timeToLiveSeconds: this.state.timeToLiveSecondsBaseline,
|
||||
geospatialConfigType: this.state.geospatialConfigTypeBaseline,
|
||||
indexingPolicyContent: this.state.indexingPolicyContentBaseline,
|
||||
indexesToAdd: [],
|
||||
indexesToDrop: [],
|
||||
conflictResolutionPolicyMode: this.state.conflictResolutionPolicyModeBaseline,
|
||||
conflictResolutionPolicyPath: this.state.conflictResolutionPolicyPathBaseline,
|
||||
conflictResolutionPolicyProcedure: this.state.conflictResolutionPolicyProcedureBaseline,
|
||||
@@ -610,23 +511,10 @@ export class SettingsComponent extends React.Component<SettingsComponentProps, S
|
||||
isSubSettingsSaveable: false,
|
||||
isSubSettingsDiscardable: false,
|
||||
isIndexingPolicyDirty: false,
|
||||
isMongoIndexingPolicySaveable: false,
|
||||
isMongoIndexingPolicyDiscardable: false,
|
||||
isConflictResolutionDirty: false
|
||||
});
|
||||
};
|
||||
|
||||
private getMongoIndexesToSave = (): MongoIndex[] => {
|
||||
let finalIndexes: MongoIndex[] = [];
|
||||
this.state.currentMongoIndexes?.map((mongoIndex: MongoIndex, index: number) => {
|
||||
if (!this.state.indexesToDrop.includes(index)) {
|
||||
finalIndexes.push(mongoIndex);
|
||||
}
|
||||
});
|
||||
finalIndexes = finalIndexes.concat(this.state.indexesToAdd.map((m: AddMongoIndexProps) => m.mongoIndex));
|
||||
return finalIndexes;
|
||||
};
|
||||
|
||||
private onScaleSaveableChange = (isScaleSaveable: boolean): void =>
|
||||
this.setState({ isScaleSaveable: isScaleSaveable });
|
||||
|
||||
@@ -656,36 +544,6 @@ export class SettingsComponent extends React.Component<SettingsComponentProps, S
|
||||
}
|
||||
};
|
||||
|
||||
private onIndexDrop = (index: number): void => this.setState({ indexesToDrop: [...this.state.indexesToDrop, index] });
|
||||
|
||||
private onRevertIndexDrop = (index: number): void => {
|
||||
const indexesToDrop = [...this.state.indexesToDrop];
|
||||
indexesToDrop.splice(index, 1);
|
||||
this.setState({ indexesToDrop });
|
||||
};
|
||||
|
||||
private onRevertIndexAdd = (index: number): void => {
|
||||
const indexesToAdd = [...this.state.indexesToAdd];
|
||||
indexesToAdd.splice(index, 1);
|
||||
this.setState({ indexesToAdd });
|
||||
};
|
||||
|
||||
private onIndexAddOrChange = (index: number, description: string, type: MongoIndexTypes): void => {
|
||||
const newIndexesToAdd = [...this.state.indexesToAdd];
|
||||
const notification = getMongoNotification(description, type);
|
||||
const newMongoIndexWithType: AddMongoIndexProps = {
|
||||
mongoIndex: { key: { keys: [description] } } as MongoIndex,
|
||||
type: type,
|
||||
notification: notification
|
||||
};
|
||||
if (index === newIndexesToAdd.length) {
|
||||
newIndexesToAdd.push(newMongoIndexWithType);
|
||||
} else {
|
||||
newIndexesToAdd[index] = newMongoIndexWithType;
|
||||
}
|
||||
this.setState({ indexesToAdd: newIndexesToAdd });
|
||||
};
|
||||
|
||||
private onConflictResolutionPolicyModeChange = (newMode: DataModels.ConflictResolutionMode): void =>
|
||||
this.setState({ conflictResolutionPolicyMode: newMode });
|
||||
|
||||
@@ -724,12 +582,6 @@ export class SettingsComponent extends React.Component<SettingsComponentProps, S
|
||||
private onIndexingPolicyDirtyChange = (isIndexingPolicyDirty: boolean): void =>
|
||||
this.setState({ isIndexingPolicyDirty: isIndexingPolicyDirty });
|
||||
|
||||
private onMongoIndexingPolicySaveableChange = (isMongoIndexingPolicySaveable: boolean): void =>
|
||||
this.setState({ isMongoIndexingPolicySaveable });
|
||||
|
||||
private onMongoIndexingPolicyDiscardableChange = (isMongoIndexingPolicyDiscardable: boolean): void =>
|
||||
this.setState({ isMongoIndexingPolicyDiscardable });
|
||||
|
||||
public getAnalyticalStorageTtl = (): number => {
|
||||
if (this.isAnalyticalStorageEnabled) {
|
||||
if (this.state.analyticalStorageTtlSelection === TtlType.On) {
|
||||
@@ -900,6 +752,7 @@ export class SettingsComponent extends React.Component<SettingsComponentProps, S
|
||||
collection: this.collection,
|
||||
container: this.container,
|
||||
isFixedContainer: this.isFixedContainer,
|
||||
autoPilotTiersList: this.autoPilotTiersList,
|
||||
onThroughputChange: this.onThroughputChange,
|
||||
throughput: this.state.throughput,
|
||||
throughputBaseline: this.state.throughputBaseline,
|
||||
@@ -948,25 +801,9 @@ export class SettingsComponent extends React.Component<SettingsComponentProps, S
|
||||
indexingPolicyContentBaseline: this.state.indexingPolicyContentBaseline,
|
||||
onIndexingPolicyContentChange: this.onIndexingPolicyContentChange,
|
||||
logIndexingPolicySuccessMessage: this.logIndexingPolicySuccessMessage,
|
||||
indexTransformationProgress: this.state.indexTransformationProgress,
|
||||
refreshIndexTransformationProgress: this.refreshIndexTransformationProgress,
|
||||
onIndexingPolicyDirtyChange: this.onIndexingPolicyDirtyChange
|
||||
};
|
||||
|
||||
const mongoIndexingPolicyComponentProps: MongoIndexingPolicyComponentProps = {
|
||||
mongoIndexes: this.state.currentMongoIndexes,
|
||||
onIndexDrop: this.onIndexDrop,
|
||||
indexesToDrop: this.state.indexesToDrop,
|
||||
onRevertIndexDrop: this.onRevertIndexDrop,
|
||||
indexesToAdd: this.state.indexesToAdd,
|
||||
onRevertIndexAdd: this.onRevertIndexAdd,
|
||||
onIndexAddOrChange: this.onIndexAddOrChange,
|
||||
indexTransformationProgress: this.state.indexTransformationProgress,
|
||||
refreshIndexTransformationProgress: this.refreshIndexTransformationProgress,
|
||||
onMongoIndexingPolicySaveableChange: this.onMongoIndexingPolicySaveableChange,
|
||||
onMongoIndexingPolicyDiscardableChange: this.onMongoIndexingPolicyDiscardableChange
|
||||
};
|
||||
|
||||
const conflictResolutionPolicyComponentProps: ConflictResolutionComponentProps = {
|
||||
collection: this.collection,
|
||||
container: this.container,
|
||||
@@ -983,7 +820,7 @@ export class SettingsComponent extends React.Component<SettingsComponentProps, S
|
||||
};
|
||||
|
||||
const tabs: SettingsV2TabInfo[] = [];
|
||||
if (!hasDatabaseSharedThroughput(this.collection) && this.collection.offer()) {
|
||||
if (!hasDatabaseSharedThroughput(this.collection)) {
|
||||
tabs.push({
|
||||
tab: SettingsV2TabTypes.ScaleTab,
|
||||
content: <ScaleComponent {...scaleComponentProps} />
|
||||
@@ -1000,18 +837,6 @@ export class SettingsComponent extends React.Component<SettingsComponentProps, S
|
||||
tab: SettingsV2TabTypes.IndexingPolicyTab,
|
||||
content: <IndexingPolicyComponent {...indexingPolicyComponentProps} />
|
||||
});
|
||||
} else if (this.container.isPreferredApiMongoDB()) {
|
||||
if (isEmpty(this.container.features())) {
|
||||
tabs.push({
|
||||
tab: SettingsV2TabTypes.IndexingPolicyTab,
|
||||
content: mongoIndexingPolicyAADError
|
||||
});
|
||||
} else if (this.container.isEnableMongoCapabilityPresent()) {
|
||||
tabs.push({
|
||||
tab: SettingsV2TabTypes.IndexingPolicyTab,
|
||||
content: <MongoIndexingPolicyComponent {...mongoIndexingPolicyComponentProps} />
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (this.hasConflictResolution()) {
|
||||
|
||||
@@ -4,11 +4,17 @@ import { ReactAdapter } from "../../../Bindings/ReactBindingHandler";
|
||||
import { SettingsComponent, SettingsComponentProps } from "./SettingsComponent";
|
||||
|
||||
export class SettingsComponentAdapter implements ReactAdapter {
|
||||
public parameters: ko.Computed<boolean>;
|
||||
public parameters: ko.Observable<number>;
|
||||
|
||||
constructor(private props: SettingsComponentProps) {}
|
||||
constructor(private props: SettingsComponentProps) {
|
||||
this.parameters = ko.observable<number>(Date.now());
|
||||
}
|
||||
|
||||
public renderComponent(): JSX.Element {
|
||||
return this.parameters() ? <SettingsComponent {...this.props} /> : <></>;
|
||||
return <SettingsComponent {...this.props} />;
|
||||
}
|
||||
|
||||
public triggerRender(): void {
|
||||
window.requestAnimationFrame(() => this.parameters(Date.now()));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ import {
|
||||
getEstimatedAutoscaleSpendElement,
|
||||
manualToAutoscaleDisclaimerElement,
|
||||
ttlWarning,
|
||||
indexingPolicynUnsavedWarningMessage,
|
||||
indexingPolicyTTLWarningMessage,
|
||||
updateThroughputBeyondLimitWarningMessage,
|
||||
updateThroughputDelayedApplyWarningMessage,
|
||||
getThroughputApplyDelayedMessage,
|
||||
@@ -15,11 +15,7 @@ import {
|
||||
getToolTipContainer,
|
||||
conflictResolutionCustomToolTip,
|
||||
changeFeedPolicyToolTip,
|
||||
conflictResolutionLwwTooltip,
|
||||
mongoIndexingPolicyDisclaimer,
|
||||
mongoIndexingPolicyAADError,
|
||||
mongoIndexTransformationRefreshingMessage,
|
||||
renderMongoIndexTransformationRefreshMessage
|
||||
conflictResolutionLwwTooltip
|
||||
} from "./SettingsRenderUtils";
|
||||
|
||||
class SettingsRenderUtilsTestComponent extends React.Component {
|
||||
@@ -37,7 +33,7 @@ class SettingsRenderUtilsTestComponent extends React.Component {
|
||||
|
||||
{manualToAutoscaleDisclaimerElement}
|
||||
{ttlWarning}
|
||||
{indexingPolicynUnsavedWarningMessage}
|
||||
{indexingPolicyTTLWarningMessage}
|
||||
{updateThroughputBeyondLimitWarningMessage}
|
||||
{updateThroughputDelayedApplyWarningMessage}
|
||||
|
||||
@@ -49,16 +45,6 @@ class SettingsRenderUtilsTestComponent extends React.Component {
|
||||
{conflictResolutionLwwTooltip}
|
||||
{conflictResolutionCustomToolTip}
|
||||
{changeFeedPolicyToolTip}
|
||||
|
||||
{mongoIndexingPolicyDisclaimer}
|
||||
{mongoIndexingPolicyAADError}
|
||||
{mongoIndexTransformationRefreshingMessage}
|
||||
{renderMongoIndexTransformationRefreshMessage(0, () => {
|
||||
return;
|
||||
})}
|
||||
{renderMongoIndexTransformationRefreshMessage(90, () => {
|
||||
return;
|
||||
})}
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -21,24 +21,13 @@ import {
|
||||
Link,
|
||||
Text,
|
||||
IMessageBarStyles,
|
||||
ITextStyles,
|
||||
IDetailsRowStyles,
|
||||
IStackStyles,
|
||||
IIconStyles,
|
||||
IDetailsListStyles,
|
||||
IDropdownStyles,
|
||||
ISeparatorStyles,
|
||||
MessageBar,
|
||||
MessageBarType,
|
||||
Stack,
|
||||
Spinner,
|
||||
SpinnerSize
|
||||
ITextStyles
|
||||
} from "office-ui-fabric-react";
|
||||
import { isDirtyTypes, isDirty } from "./SettingsUtils";
|
||||
|
||||
export const infoAndToolTipTextStyle: ITextStyles = { root: { fontSize: 12 } };
|
||||
const infoAndToolTipTextStyle: ITextStyles = { root: { fontSize: 12 } };
|
||||
|
||||
export const noLeftPaddingCheckBoxStyle: ICheckboxStyles = {
|
||||
export const spendAckCheckBoxStyle: ICheckboxStyles = {
|
||||
label: {
|
||||
margin: 0,
|
||||
padding: "2 0 2 0"
|
||||
@@ -56,20 +45,6 @@ export const titleAndInputStackProps: Partial<IStackProps> = {
|
||||
tokens: { childrenGap: 5 }
|
||||
};
|
||||
|
||||
export const mongoWarningStackProps: Partial<IStackProps> = {
|
||||
tokens: { childrenGap: 5 }
|
||||
};
|
||||
|
||||
export const mongoErrorMessageStyles: Partial<IMessageBarStyles> = { root: { marginLeft: 10 } };
|
||||
|
||||
export const createAndAddMongoIndexStackProps: Partial<IStackProps> = {
|
||||
tokens: { childrenGap: 5 }
|
||||
};
|
||||
|
||||
export const addMongoIndexStackProps: Partial<IStackProps> = {
|
||||
tokens: { childrenGap: 10 }
|
||||
};
|
||||
|
||||
export const checkBoxAndInputStackProps: Partial<IStackProps> = {
|
||||
tokens: { childrenGap: 10 }
|
||||
};
|
||||
@@ -78,54 +53,6 @@ export const toolTipLabelStackTokens: IStackTokens = {
|
||||
childrenGap: 6
|
||||
};
|
||||
|
||||
export const accordionStackTokens: IStackTokens = {
|
||||
childrenGap: 10
|
||||
};
|
||||
|
||||
export const addMongoIndexSubElementsTokens: IStackTokens = {
|
||||
childrenGap: 20
|
||||
};
|
||||
|
||||
export const accordionIconStyles: IIconStyles = { root: { paddingTop: 7 } };
|
||||
|
||||
export const mediumWidthStackStyles: IStackStyles = { root: { width: 600 } };
|
||||
|
||||
export const shortWidthTextFieldStyles: Partial<ITextFieldStyles> = { root: { paddingLeft: 10, width: 210 } };
|
||||
|
||||
export const shortWidthDropDownStyles: Partial<IDropdownStyles> = { dropdown: { paddingleft: 10, width: 202 } };
|
||||
|
||||
export const transparentDetailsRowStyles: Partial<IDetailsRowStyles> = {
|
||||
root: {
|
||||
selectors: {
|
||||
":hover": {
|
||||
background: "transparent"
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
export const customDetailsListStyles: Partial<IDetailsListStyles> = {
|
||||
root: {
|
||||
selectors: {
|
||||
".ms-FocusZone": {
|
||||
paddingTop: 0
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
export const separatorStyles: Partial<ISeparatorStyles> = {
|
||||
root: [
|
||||
{
|
||||
selectors: {
|
||||
"::before": {
|
||||
background: StyleConstants.BaseMedium
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
export const messageBarStyles: Partial<IMessageBarStyles> = { root: { marginTop: "5px" } };
|
||||
|
||||
export const throughputUnit = "RU/s";
|
||||
@@ -245,9 +172,15 @@ export const ttlWarning: JSX.Element = (
|
||||
</Text>
|
||||
);
|
||||
|
||||
export const indexingPolicynUnsavedWarningMessage: JSX.Element = (
|
||||
export const indexingPolicyTTLWarningMessage: JSX.Element = (
|
||||
<Text styles={infoAndToolTipTextStyle}>
|
||||
You have not saved the latest changes made to your indexing policy. Please click save to confirm the changes.
|
||||
Changing the Indexing Policy impacts query results while the index transformation occurs. When a change is made and
|
||||
the indexing mode is set to consistent or lazy, queries return eventual results until the operation completes. For
|
||||
more information see,{" "}
|
||||
<Link target="_blank" href="https://aka.ms/cosmosdb/modify-index-policy">
|
||||
Modifying Indexing Policies
|
||||
</Link>
|
||||
.
|
||||
</Text>
|
||||
);
|
||||
|
||||
@@ -380,56 +313,6 @@ export const changeFeedPolicyToolTip: JSX.Element = (
|
||||
</Text>
|
||||
);
|
||||
|
||||
export const mongoIndexingPolicyDisclaimer: JSX.Element = (
|
||||
<Text>
|
||||
For queries that filter on multiple properties, create multiple single field indexes instead of a compound index.
|
||||
<Link href="https://docs.microsoft.com/azure/cosmos-db/mongodb-indexing#index-types" target="_blank">
|
||||
{` Compound indexes `}
|
||||
</Link>
|
||||
are only used for sorting query results. If you need to add a compound index, you can create one using the Mongo
|
||||
shell.
|
||||
</Text>
|
||||
);
|
||||
|
||||
export const mongoIndexingPolicyAADError: JSX.Element = (
|
||||
<MessageBar messageBarType={MessageBarType.error}>
|
||||
<Text>
|
||||
To use the indexing policy editor, please login to the
|
||||
<Link target="_blank" href="https://portal.azure.com">
|
||||
{"azure portal."}
|
||||
</Link>
|
||||
</Text>
|
||||
</MessageBar>
|
||||
);
|
||||
|
||||
export const mongoIndexTransformationRefreshingMessage: JSX.Element = (
|
||||
<Stack horizontal {...mongoWarningStackProps}>
|
||||
<Text styles={infoAndToolTipTextStyle}>Refreshing index transformation progress</Text>
|
||||
<Spinner size={SpinnerSize.small} />
|
||||
</Stack>
|
||||
);
|
||||
|
||||
export const renderMongoIndexTransformationRefreshMessage = (
|
||||
progress: number,
|
||||
performRefresh: () => void
|
||||
): JSX.Element => {
|
||||
if (progress === 0) {
|
||||
return (
|
||||
<Text styles={infoAndToolTipTextStyle}>
|
||||
{"You can make more indexing changes once the current index transformation is complete. "}
|
||||
<Link onClick={performRefresh}>{"Refresh to check if it has completed."}</Link>
|
||||
</Text>
|
||||
);
|
||||
} else {
|
||||
return (
|
||||
<Text styles={infoAndToolTipTextStyle}>
|
||||
{`You can make more indexing changes once the current index transformation has completed. It is ${progress}% complete. `}
|
||||
<Link onClick={performRefresh}>{"Refresh to check the progress."}</Link>
|
||||
</Text>
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
export const getTextFieldStyles = (current: isDirtyTypes, baseline: isDirtyTypes): Partial<ITextFieldStyles> => ({
|
||||
fieldGroup: {
|
||||
height: 25,
|
||||
|
||||
@@ -25,9 +25,7 @@ describe("IndexingPolicyComponent", () => {
|
||||
},
|
||||
onIndexingPolicyDirtyChange: () => {
|
||||
return;
|
||||
},
|
||||
indexTransformationProgress: undefined,
|
||||
refreshIndexTransformationProgress: () => new Promise(jest.fn())
|
||||
}
|
||||
};
|
||||
|
||||
it("renders", () => {
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
import * as React from "react";
|
||||
import * as DataModels from "../../../../Contracts/DataModels";
|
||||
import * as monaco from "monaco-editor";
|
||||
import { isDirty, isIndexTransforming } from "../SettingsUtils";
|
||||
import { isDirty } from "../SettingsUtils";
|
||||
import { MessageBar, MessageBarType, Stack } from "office-ui-fabric-react";
|
||||
import { indexingPolicynUnsavedWarningMessage, titleAndInputStackProps } from "../SettingsRenderUtils";
|
||||
import { IndexingPolicyRefreshComponent } from "./IndexingPolicyRefresh/IndexingPolicyRefreshComponent";
|
||||
import { indexingPolicyTTLWarningMessage, titleAndInputStackProps } from "../SettingsRenderUtils";
|
||||
|
||||
export interface IndexingPolicyComponentProps {
|
||||
shouldDiscardIndexingPolicy: boolean;
|
||||
@@ -13,8 +12,6 @@ export interface IndexingPolicyComponentProps {
|
||||
indexingPolicyContentBaseline: DataModels.IndexingPolicy;
|
||||
onIndexingPolicyContentChange: (newIndexingPolicy: DataModels.IndexingPolicy) => void;
|
||||
logIndexingPolicySuccessMessage: () => void;
|
||||
indexTransformationProgress: number;
|
||||
refreshIndexTransformationProgress: () => Promise<void>;
|
||||
onIndexingPolicyDirtyChange: (isIndexingPolicyDirty: boolean) => void;
|
||||
}
|
||||
|
||||
@@ -54,9 +51,6 @@ export class IndexingPolicyComponent extends React.Component<
|
||||
if (!this.indexingPolicyEditor) {
|
||||
this.createIndexingPolicyEditor();
|
||||
} else {
|
||||
this.indexingPolicyEditor.updateOptions({
|
||||
readOnly: isIndexTransforming(this.props.indexTransformationProgress)
|
||||
});
|
||||
const indexingPolicyEditorModel = this.indexingPolicyEditor.getModel();
|
||||
const value: string = JSON.stringify(this.props.indexingPolicyContent, undefined, 4);
|
||||
indexingPolicyEditorModel.setValue(value);
|
||||
@@ -90,7 +84,7 @@ export class IndexingPolicyComponent extends React.Component<
|
||||
this.indexingPolicyEditor = monaco.editor.create(this.indexingPolicyDiv.current, {
|
||||
value: value,
|
||||
language: "json",
|
||||
readOnly: isIndexTransforming(this.props.indexTransformationProgress),
|
||||
readOnly: false,
|
||||
ariaLabel: "Indexing Policy"
|
||||
});
|
||||
if (this.indexingPolicyEditor) {
|
||||
@@ -114,12 +108,8 @@ export class IndexingPolicyComponent extends React.Component<
|
||||
public render(): JSX.Element {
|
||||
return (
|
||||
<Stack {...titleAndInputStackProps}>
|
||||
<IndexingPolicyRefreshComponent
|
||||
indexTransformationProgress={this.props.indexTransformationProgress}
|
||||
refreshIndexTransformationProgress={this.props.refreshIndexTransformationProgress}
|
||||
/>
|
||||
{isDirty(this.props.indexingPolicyContent, this.props.indexingPolicyContentBaseline) && (
|
||||
<MessageBar messageBarType={MessageBarType.warning}>{indexingPolicynUnsavedWarningMessage}</MessageBar>
|
||||
<MessageBar messageBarType={MessageBarType.warning}>{indexingPolicyTTLWarningMessage}</MessageBar>
|
||||
)}
|
||||
<div className="settingsV2IndexingPolicyEditor" tabIndex={0} ref={this.indexingPolicyDiv}></div>
|
||||
</Stack>
|
||||
|
||||
@@ -1,15 +0,0 @@
|
||||
import { shallow } from "enzyme";
|
||||
import React from "react";
|
||||
import { IndexingPolicyRefreshComponentProps, IndexingPolicyRefreshComponent } from "./IndexingPolicyRefreshComponent";
|
||||
|
||||
describe("IndexingPolicyRefreshComponent", () => {
|
||||
it("renders", () => {
|
||||
const props: IndexingPolicyRefreshComponentProps = {
|
||||
indexTransformationProgress: 90,
|
||||
refreshIndexTransformationProgress: () => new Promise(jest.fn())
|
||||
};
|
||||
|
||||
const wrapper = shallow(<IndexingPolicyRefreshComponent {...props} />);
|
||||
expect(wrapper).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
@@ -1,62 +0,0 @@
|
||||
import * as React from "react";
|
||||
import { MessageBar, MessageBarType } from "office-ui-fabric-react";
|
||||
import {
|
||||
mongoIndexTransformationRefreshingMessage,
|
||||
renderMongoIndexTransformationRefreshMessage
|
||||
} from "../../SettingsRenderUtils";
|
||||
import { handleError } from "../../../../../Common/ErrorHandlingUtils";
|
||||
import { isIndexTransforming } from "../../SettingsUtils";
|
||||
|
||||
export interface IndexingPolicyRefreshComponentProps {
|
||||
indexTransformationProgress: number;
|
||||
refreshIndexTransformationProgress: () => Promise<void>;
|
||||
}
|
||||
|
||||
interface IndexingPolicyRefreshComponentState {
|
||||
isRefreshing: boolean;
|
||||
}
|
||||
|
||||
export class IndexingPolicyRefreshComponent extends React.Component<
|
||||
IndexingPolicyRefreshComponentProps,
|
||||
IndexingPolicyRefreshComponentState
|
||||
> {
|
||||
constructor(props: IndexingPolicyRefreshComponentProps) {
|
||||
super(props);
|
||||
this.state = {
|
||||
isRefreshing: false
|
||||
};
|
||||
}
|
||||
|
||||
private onClickRefreshIndexingTransformationLink = async () => await this.refreshIndexTransformationProgress();
|
||||
|
||||
private renderIndexTransformationWarning = (): JSX.Element => {
|
||||
if (this.state.isRefreshing) {
|
||||
return mongoIndexTransformationRefreshingMessage;
|
||||
} else if (isIndexTransforming(this.props.indexTransformationProgress)) {
|
||||
return renderMongoIndexTransformationRefreshMessage(
|
||||
this.props.indexTransformationProgress,
|
||||
this.onClickRefreshIndexingTransformationLink
|
||||
);
|
||||
}
|
||||
return undefined;
|
||||
};
|
||||
|
||||
private refreshIndexTransformationProgress = async () => {
|
||||
this.setState({ isRefreshing: true });
|
||||
try {
|
||||
await this.props.refreshIndexTransformationProgress();
|
||||
} catch (error) {
|
||||
handleError(error, "RefreshIndexTransformationProgress", "Refreshing index transformation progress failed");
|
||||
} finally {
|
||||
this.setState({ isRefreshing: false });
|
||||
}
|
||||
};
|
||||
|
||||
public render(): JSX.Element {
|
||||
return this.renderIndexTransformationWarning() ? (
|
||||
<MessageBar messageBarType={MessageBarType.warning}>{this.renderIndexTransformationWarning()}</MessageBar>
|
||||
) : (
|
||||
<></>
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1,24 +0,0 @@
|
||||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`IndexingPolicyRefreshComponent renders 1`] = `
|
||||
<StyledMessageBarBase
|
||||
messageBarType={5}
|
||||
>
|
||||
<Text
|
||||
styles={
|
||||
Object {
|
||||
"root": Object {
|
||||
"fontSize": 12,
|
||||
},
|
||||
}
|
||||
}
|
||||
>
|
||||
You can make more indexing changes once the current index transformation has completed. It is 90% complete.
|
||||
<StyledLinkBase
|
||||
onClick={[Function]}
|
||||
>
|
||||
Refresh to check the progress.
|
||||
</StyledLinkBase>
|
||||
</Text>
|
||||
</StyledMessageBarBase>
|
||||
`;
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user