Compare commits

..

2 Commits

Author SHA1 Message Date
Steve Faulkner
627c346559 More cleanup 2020-11-11 15:45:32 -06:00
Steve Faulkner
415ebc505b Remove RU Max Limits 2020-11-11 13:50:17 -06:00
537 changed files with 53179 additions and 47997 deletions

View File

@@ -3,11 +3,7 @@ PORTAL_RUNNER_PASSWORD=
PORTAL_RUNNER_SUBSCRIPTION= PORTAL_RUNNER_SUBSCRIPTION=
PORTAL_RUNNER_RESOURCE_GROUP= PORTAL_RUNNER_RESOURCE_GROUP=
PORTAL_RUNNER_DATABASE_ACCOUNT= PORTAL_RUNNER_DATABASE_ACCOUNT=
PORTAL_RUNNER_DATABASE_ACCOUNT_KEY=
PORTAL_RUNNER_CONNECTION_STRING= PORTAL_RUNNER_CONNECTION_STRING=
NOTEBOOKS_TEST_RUNNER_TENANT_ID=
NOTEBOOKS_TEST_RUNNER_CLIENT_ID=
NOTEBOOKS_TEST_RUNNER_CLIENT_SECRET=
CASSANDRA_CONNECTION_STRING= CASSANDRA_CONNECTION_STRING=
MONGO_CONNECTION_STRING= MONGO_CONNECTION_STRING=
TABLES_CONNECTION_STRING= TABLES_CONNECTION_STRING=

View File

@@ -202,6 +202,8 @@ src/Explorer/Tabs/QueryTab.test.ts
src/Explorer/Tabs/QueryTab.ts src/Explorer/Tabs/QueryTab.ts
src/Explorer/Tabs/QueryTablesTab.ts src/Explorer/Tabs/QueryTablesTab.ts
src/Explorer/Tabs/ScriptTabBase.ts src/Explorer/Tabs/ScriptTabBase.ts
src/Explorer/Tabs/SettingsTab.test.ts
src/Explorer/Tabs/SettingsTab.ts
src/Explorer/Tabs/SparkMasterTab.ts src/Explorer/Tabs/SparkMasterTab.ts
src/Explorer/Tabs/StoredProcedureTab.ts src/Explorer/Tabs/StoredProcedureTab.ts
src/Explorer/Tabs/TabComponents.ts src/Explorer/Tabs/TabComponents.ts
@@ -288,6 +290,8 @@ src/Utils/DatabaseAccountUtils.ts
src/Utils/JunoUtils.ts src/Utils/JunoUtils.ts
src/Utils/MessageValidation.ts src/Utils/MessageValidation.ts
src/Utils/NotebookConfigurationUtils.ts src/Utils/NotebookConfigurationUtils.ts
src/Utils/OfferUtils.test.ts
src/Utils/OfferUtils.ts
src/Utils/PricingUtils.test.ts src/Utils/PricingUtils.test.ts
src/Utils/QueryUtils.test.ts src/Utils/QueryUtils.test.ts
src/Utils/QueryUtils.ts src/Utils/QueryUtils.ts
@@ -392,5 +396,19 @@ src/Explorer/Tree/ResourceTreeAdapterForResourceToken.tsx
src/GalleryViewer/Cards/GalleryCardComponent.tsx src/GalleryViewer/Cards/GalleryCardComponent.tsx
src/GalleryViewer/GalleryViewer.tsx src/GalleryViewer/GalleryViewer.tsx
src/GalleryViewer/GalleryViewerComponent.tsx src/GalleryViewer/GalleryViewerComponent.tsx
cypress/integration/dataexplorer/CASSANDRA/addCollection.spec.ts
cypress/integration/dataexplorer/GRAPH/addCollection.spec.ts
cypress/integration/dataexplorer/ci-tests/addCollectionPane.spec.ts
cypress/integration/dataexplorer/ci-tests/createDatabase.spec.ts
cypress/integration/dataexplorer/ci-tests/deleteCollection.spec.ts
cypress/integration/dataexplorer/ci-tests/deleteDatabase.spec.ts
cypress/integration/dataexplorer/MONGO/addCollection.spec.ts
cypress/integration/dataexplorer/MONGO/addCollectionAutopilot.spec.ts
cypress/integration/dataexplorer/MONGO/addCollectionExistingDatabase.spec.ts
cypress/integration/dataexplorer/MONGO/provisionDatabaseThroughput.spec.ts
cypress/integration/dataexplorer/SQL/addCollection.spec.ts
cypress/integration/dataexplorer/TABLE/addCollection.spec.ts
cypress/integration/notebook/newNotebook.spec.ts
cypress/integration/notebook/resourceTree.spec.ts
__mocks__/monaco-editor.ts __mocks__/monaco-editor.ts
src/Explorer/Tree/ResourceTreeAdapterForResourceToken.test.tsx src/Explorer/Tree/ResourceTreeAdapterForResourceToken.test.tsx

View File

@@ -1,39 +1,39 @@
module.exports = { module.exports = {
env: { env: {
browser: true, browser: true,
es6: true, es6: true
}, },
plugins: ["@typescript-eslint", "no-null", "prefer-arrow"], plugins: ["@typescript-eslint", "no-null", "prefer-arrow"],
extends: ["eslint:recommended", "plugin:@typescript-eslint/recommended"], extends: ["eslint:recommended", "plugin:@typescript-eslint/recommended"],
globals: { globals: {
Atomics: "readonly", Atomics: "readonly",
SharedArrayBuffer: "readonly", SharedArrayBuffer: "readonly"
}, },
parser: "@typescript-eslint/parser", parser: "@typescript-eslint/parser",
parserOptions: { parserOptions: {
ecmaFeatures: { ecmaFeatures: {
jsx: true, jsx: true
}, },
ecmaVersion: 2018, ecmaVersion: 2018,
sourceType: "module", sourceType: "module"
}, },
overrides: [ overrides: [
{ {
files: ["**/*.tsx"], files: ["**/*.tsx"],
env: { env: {
jest: true, jest: true
}, },
extends: ["plugin:react/recommended"], extends: ["plugin:react/recommended"],
plugins: ["react"], plugins: ["react"]
}, },
{ {
files: ["**/*.{test,spec}.{ts,tsx}"], files: ["**/*.{test,spec}.{ts,tsx}"],
env: { env: {
jest: true, jest: true
}, },
extends: ["plugin:jest/recommended"], extends: ["plugin:jest/recommended"],
plugins: ["jest"], plugins: ["jest"]
}, }
], ],
rules: { rules: {
curly: "error", curly: "error",
@@ -47,8 +47,8 @@ module.exports = {
"error", "error",
{ {
selector: "CallExpression[callee.object.name='JSON'][callee.property.name='stringify'] Identifier[name=/$err/]", selector: "CallExpression[callee.object.name='JSON'][callee.property.name='stringify'] Identifier[name=/$err/]",
message: "Do not use JSON.stringify(error). It will print '{}'", message: "Do not use JSON.stringify(error). It will print '{}'"
}, }
], ]
}, }
}; };

View File

@@ -79,31 +79,32 @@ jobs:
name: dist name: dist
path: dist/ path: dist/
endtoendemulator: endtoendemulator:
name: "End To End Emulator Tests" name: "End To End Tests | Emulator | SQL"
needs: [lint, format, compile, unittest] needs: [lint, format, compile, unittest]
runs-on: windows-latest runs-on: windows-latest
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- uses: southpolesteve/cosmos-emulator-github-action@v1
- name: Use Node.js 12.x - name: Use Node.js 12.x
uses: actions/setup-node@v1 uses: actions/setup-node@v1
with: with:
node-version: 12.x node-version: 12.x
- uses: southpolesteve/cosmos-emulator-github-action@v1 - name: Restore Cypress Binary Cache
uses: actions/cache@v2
with:
path: ~/.cache/Cypress
key: ${{ runner.os }}-cypress-binary-cache
- name: End to End Tests - name: End to End Tests
run: | run: |
npm ci npm ci
npm start & npm start &
npm run wait-for-server npm ci --prefix ./cypress
npx jest -c ./jest.config.e2e.js --detectOpenHandles sql npm run test:ci --prefix ./cypress -- --spec ./integration/dataexplorer/ci-tests/createDatabase.spec.ts
shell: bash shell: bash
env: env:
DATA_EXPLORER_ENDPOINT: "https://localhost:1234/explorer.html?platform=Emulator" EMULATOR_ENDPOINT: https://0.0.0.0:8081/
PLATFORM: "Emulator"
NODE_TLS_REJECT_UNAUTHORIZED: 0 NODE_TLS_REJECT_UNAUTHORIZED: 0
- uses: actions/upload-artifact@v2 CYPRESS_CACHE_FOLDER: ~/.cache/Cypress
with:
name: screenshots
path: failed-*
accessibility: accessibility:
name: "Accessibility | Hosted" name: "Accessibility | Hosted"
needs: [lint, format, compile, unittest] needs: [lint, format, compile, unittest]
@@ -127,8 +128,8 @@ jobs:
shell: bash shell: bash
env: env:
NODE_TLS_REJECT_UNAUTHORIZED: 0 NODE_TLS_REJECT_UNAUTHORIZED: 0
endtoendhosted: endtoendpuppeteer:
name: "End to End Hosted Tests" name: "End to end puppeteer tests"
needs: [lint, format, compile, unittest] needs: [lint, format, compile, unittest]
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
@@ -137,7 +138,7 @@ jobs:
uses: actions/setup-node@v1 uses: actions/setup-node@v1
with: with:
node-version: 12.x node-version: 12.x
- name: End to End Hosted Tests - name: End to End Puppeteer Tests
run: | run: |
npm ci npm ci
npm start & npm start &
@@ -146,13 +147,6 @@ jobs:
shell: bash shell: bash
env: env:
NODE_TLS_REJECT_UNAUTHORIZED: 0 NODE_TLS_REJECT_UNAUTHORIZED: 0
PORTAL_RUNNER_SUBSCRIPTION: ${{ secrets.PORTAL_RUNNER_SUBSCRIPTION }}
PORTAL_RUNNER_RESOURCE_GROUP: ${{ secrets.PORTAL_RUNNER_RESOURCE_GROUP }}
PORTAL_RUNNER_DATABASE_ACCOUNT: ${{ secrets.PORTAL_RUNNER_DATABASE_ACCOUNT }}
PORTAL_RUNNER_DATABASE_ACCOUNT_KEY: ${{ secrets.PORTAL_RUNNER_DATABASE_ACCOUNT_KEY }}
NOTEBOOKS_TEST_RUNNER_TENANT_ID: ${{ secrets.NOTEBOOKS_TEST_RUNNER_TENANT_ID }}
NOTEBOOKS_TEST_RUNNER_CLIENT_ID: ${{ secrets.NOTEBOOKS_TEST_RUNNER_CLIENT_ID }}
NOTEBOOKS_TEST_RUNNER_CLIENT_SECRET: ${{ secrets.NOTEBOOKS_TEST_RUNNER_CLIENT_SECRET }}
PORTAL_RUNNER_CONNECTION_STRING: ${{ secrets.CONNECTION_STRING_SQL }} PORTAL_RUNNER_CONNECTION_STRING: ${{ secrets.CONNECTION_STRING_SQL }}
MONGO_CONNECTION_STRING: ${{ secrets.CONNECTION_STRING_MONGO }} MONGO_CONNECTION_STRING: ${{ secrets.CONNECTION_STRING_MONGO }}
CASSANDRA_CONNECTION_STRING: ${{ secrets.CONNECTION_STRING_CASSANDRA }} CASSANDRA_CONNECTION_STRING: ${{ secrets.CONNECTION_STRING_CASSANDRA }}
@@ -165,7 +159,7 @@ jobs:
nuget: nuget:
name: Publish Nuget name: Publish Nuget
if: github.ref == 'refs/heads/master' || contains(github.ref, 'hotfix/') || contains(github.ref, 'release/') if: github.ref == 'refs/heads/master' || contains(github.ref, 'hotfix/') || contains(github.ref, 'release/')
needs: [lint, format, compile, build, unittest, endtoendemulator, endtoendhosted] needs: [lint, format, compile, build, unittest, endtoendemulator, endtoendpuppeteer]
runs-on: ubuntu-latest runs-on: ubuntu-latest
env: env:
NUGET_SOURCE: ${{ secrets.NUGET_SOURCE }} NUGET_SOURCE: ${{ secrets.NUGET_SOURCE }}
@@ -189,7 +183,7 @@ jobs:
nugetmpac: nugetmpac:
name: Publish Nuget MPAC name: Publish Nuget MPAC
if: github.ref == 'refs/heads/master' || contains(github.ref, 'hotfix/') || contains(github.ref, 'release/') if: github.ref == 'refs/heads/master' || contains(github.ref, 'hotfix/') || contains(github.ref, 'release/')
needs: [lint, format, compile, build, unittest, endtoendemulator, endtoendhosted] needs: [lint, format, compile, build, unittest, endtoendemulator, endtoendpuppeteer]
runs-on: ubuntu-latest runs-on: ubuntu-latest
env: env:
NUGET_SOURCE: ${{ secrets.NUGET_SOURCE }} NUGET_SOURCE: ${{ secrets.NUGET_SOURCE }}

25
.github/workflows/runners.yml vendored Normal file
View File

@@ -0,0 +1,25 @@
name: Runners
on:
schedule:
- cron: "0 * 1 * *"
jobs:
sqlcreatecollection:
runs-on: ubuntu-latest
name: "SQL | Create Collection"
steps:
- uses: actions/checkout@v2
- uses: actions/setup-node@v1
- run: npm ci
- run: npm run test:e2e
env:
PORTAL_RUNNER_APP_INSIGHTS_KEY: ${{ secrets.PORTAL_RUNNER_APP_INSIGHTS_KEY }}
PORTAL_RUNNER_USERNAME: ${{ secrets.PORTAL_RUNNER_USERNAME }}
PORTAL_RUNNER_PASSWORD: ${{ secrets.PORTAL_RUNNER_PASSWORD }}
PORTAL_RUNNER_SUBSCRIPTION: 69e02f2d-f059-4409-9eac-97e8a276ae2c
PORTAL_RUNNER_RESOURCE_GROUP: runners
PORTAL_RUNNER_DATABASE_ACCOUNT: portal-sql-runner
- uses: actions/upload-artifact@v2
if: failure()
with:
name: screenshots
path: failure.png

3
.gitignore vendored
View File

@@ -9,6 +9,9 @@ pkg/DataExplorer/*
test/out/* test/out/*
workers/**/*.js workers/**/*.js
*.trx *.trx
cypress/videos
cypress/screenshots
cypress/fixtures
notebookapp/* notebookapp/*
Contracts/* Contracts/*
.DS_Store .DS_Store

View File

@@ -76,7 +76,17 @@ Unit tests are located adjacent to the code under test and run with [Jest](https
#### End to End CI Tests #### End to End CI Tests
Jest and Puppeteer are used for end to end browser based tests and are contained in `test/`. To run these tests locally: [Cypress](https://www.cypress.io/) is used for end to end tests and are contained in `cypress/`. Currently, it operates as sub project with its own typescript config and dependencies. It also only operates against the emulator. To run cypress tests:
1. Ensure the emulator is running
2. Start cosmos explorer in emulator mode: `PLATFORM=Emulator npm run watch`
3. Move into `cypress/` folder: `cd cypress`
4. Install dependencies: `npm install`
5. Run cypress headless(`npm run test`) or in interactive mode(`npm run test:debug`)
#### End to End Production Tests
Jest and Puppeteer are used for end to end production runners and are contained in `test/`. To run these tests locally:
1. Copy .env.example to .env 1. Copy .env.example to .env
2. Update the values in .env including your local data explorer endpoint (ask a teammate/codeowner for help with .env values) 2. Update the values in .env including your local data explorer endpoint (ask a teammate/codeowner for help with .env values)

View File

@@ -1,3 +1,3 @@
module.exports = { module.exports = {
presets: [["@babel/preset-env", { targets: { node: "current" } }], "@babel/preset-react", "@babel/preset-typescript"], presets: [["@babel/preset-env", { targets: { node: "current" } }], "@babel/preset-react", "@babel/preset-typescript"]
}; };

4
cypress/.gitignore vendored Normal file
View File

@@ -0,0 +1,4 @@
cypress.env.json
cypress/report
cypress/screenshots
cypress/videos

51
cypress/cleanup.js Normal file
View File

@@ -0,0 +1,51 @@
// Cleans up old databases from previous test runs
const { CosmosClient } = require("@azure/cosmos");
// TODO: Add support for other API connection strings
const mongoRegex = RegExp("mongodb://.*:(.*)@(.*).mongo.cosmos.azure.com");
async function cleanup() {
const connectionString = process.env.CYPRESS_CONNECTION_STRING;
if (!connectionString) {
throw new Error("Connection string not provided");
}
let client;
switch (true) {
case connectionString.includes("mongodb://"): {
const [, key, accountName] = connectionString.match(mongoRegex);
client = new CosmosClient({
key,
endpoint: `https://${accountName}.documents.azure.com:443/`
});
break;
}
// TODO: Add support for other API connection strings
default:
client = new CosmosClient(connectionString);
break;
}
const response = await client.databases.readAll().fetchAll();
return Promise.all(
response.resources.map(async db => {
const dbTimestamp = new Date(db._ts * 1000);
const twentyMinutesAgo = new Date(Date.now() - 1000 * 60 * 20);
if (dbTimestamp < twentyMinutesAgo) {
await client.database(db.id).delete();
console.log(`DELETED: ${db.id} | Timestamp: ${dbTimestamp}`);
} else {
console.log(`SKIPPED: ${db.id} | Timestamp: ${dbTimestamp}`);
}
})
);
}
cleanup()
.then(() => {
process.exit(0);
})
.catch(error => {
console.error(error);
process.exit(1);
});

15
cypress/cypress.json Normal file
View File

@@ -0,0 +1,15 @@
{
"integrationFolder": "./integration",
"pluginsFile": false,
"fixturesFolder": false,
"supportFile": "./support/index.js",
"defaultCommandTimeout": 90000,
"chromeWebSecurity": false,
"reporter": "mochawesome",
"reporterOptions": {
"reportDir": "cypress/report",
"json": true,
"overwrite": false,
"html": false
}
}

View File

@@ -0,0 +1,66 @@
// 1. Click on "New Container" on the command bar.
// 2. Pane with the title "Add Container" should appear on the right side of the screen
// 3. It includes an input box for the database Id.
// 4. It includes a checkbox called "Create now".
// 5. When the checkbox is marked, enter new database id.
// 3. Create a database WITH "Provision throughput" checked.
// 4. Enter minimum throughput value of 400.
// 5. Enter container id to the container id text box.
// 6. Enter partition key to the partition key text box.
// 7. Click "OK" to create a new container.
// 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
const connectionString = require("../../../utilities/connectionString");
let crypt = require("crypto");
context("Cassandra API Test - createDatabase", () => {
beforeEach(() => {
connectionString.loginUsingConnectionString(connectionString.constants.cassandra);
});
it("Create a new table in Cassandra API", () => {
const keyspaceId = `KeyspaceId${crypt.randomBytes(8).toString("hex")}`;
const tableId = `TableId112`;
cy.get("iframe").then($element => {
const $body = $element.contents().find("body");
cy.wrap($body)
.find('div[class="commandBarContainer"]')
.should("be.visible")
.find('button[data-test="New Table"]')
.should("be.visible")
.click();
cy.wrap($body)
.find('div[class="contextual-pane-in"]')
.should("be.visible")
.find('span[id="containerTitle"]');
cy.wrap($body)
.find('input[id="keyspace-id"]')
.should("be.visible")
.type(keyspaceId);
cy.wrap($body)
.find('input[class="textfontclr"]')
.type(tableId);
cy.wrap($body)
.find('input[data-test="databaseThroughputValue"]')
.should("have.value", "400");
cy.wrap($body)
.find('data-test="addCollection-createCollection"')
.click();
cy.wait(10000);
cy.wrap($body)
.find('div[data-test="resourceTreeId"]')
.should("exist")
.find('div[class="treeComponent dataResourceTree"]')
.should("contain", tableId);
});
});
});

View File

@@ -0,0 +1,81 @@
// 1. Click on "New Graph" on the command bar.
// 2. Pane with the title "Add Container" should appear on the right side of the screen
// 3. It includes an input box for the database Id.
// 4. It includes a checkbox called "Create now".
// 5. When the checkbox is marked, enter new database id.
// 3. Create a database WITH "Provision throughput" checked.
// 4. Enter minimum throughput value of 400.
// 5. Enter container id to the container id text box.
// 6. Enter partition key to the partition key text box.
// 7. Click "OK" to create a new container.
// 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
const connectionString = require("../../../utilities/connectionString");
let crypt = require("crypto");
context("Graph API Test", () => {
beforeEach(() => {
connectionString.loginUsingConnectionString(connectionString.constants.graph);
});
it("Create a new graph in Graph API", () => {
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
const graphId = `TestGraph${crypt.randomBytes(8).toString("hex")}`;
const partitionKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
cy.get("iframe").then($element => {
const $body = $element.contents().find("body");
cy.wrap($body)
.find('div[class="commandBarContainer"]')
.should("be.visible")
.find('button[data-test="New Graph"]')
.should("be.visible")
.click();
cy.wrap($body)
.find('div[class="contextual-pane-in"]')
.should("be.visible")
.find('span[id="containerTitle"]');
cy.wrap($body)
.find('input[data-test="addCollection-createNewDatabase"]')
.check();
cy.wrap($body)
.find('input[data-test="addCollection-newDatabaseId"]')
.should("be.visible")
.type(dbId);
cy.wrap($body)
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
.check();
cy.wrap($body)
.find('input[data-test="databaseThroughputValue"]')
.should("have.value", "400");
cy.wrap($body)
.find('input[data-test="addCollection-collectionId"]')
.type(graphId);
cy.wrap($body)
.find('input[data-test="addCollection-partitionKeyValue"]')
.type(partitionKey);
cy.wrap($body)
.find('input[data-test="addCollection-createCollection"]')
.click();
cy.wait(10000);
cy.wrap($body)
.find('div[data-test="resourceTreeId"]')
.should("exist")
.find('div[class="treeComponent dataResourceTree"]')
.should("contain", dbId)
.click()
.should("contain", graphId);
});
});
});

View File

@@ -0,0 +1,80 @@
// 1. Click on "New Container" on the command bar.
// 2. Pane with the title "Add Container" should appear on the right side of the screen
// 3. It includes an input box for the database Id.
// 4. It includes a checkbox called "Create now".
// 5. When the checkbox is marked, enter new database id.
// 3. Create a database WITH "Provision throughput" checked.
// 4. Enter minimum throughput value of 400.
// 5. Enter container id to the container id text box.
// 6. Enter partition key to the partition key text box.
// 7. Click "OK" to create a new container.
// // 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
const connectionString = require("../../../utilities/connectionString");
let crypt = require("crypto");
context("Mongo API Test - createDatabase", () => {
beforeEach(() => {
connectionString.loginUsingConnectionString();
});
it("Create a new collection in Mongo API", () => {
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
cy.get("iframe").then($element => {
const $body = $element.contents().find("body");
cy.wrap($body)
.find('div[class="commandBarContainer"]')
.should("be.visible")
.find('button[data-test="New Collection"]')
.should("be.visible")
.click();
cy.wrap($body)
.find('div[class="contextual-pane-in"]')
.should("be.visible")
.find('span[id="containerTitle"]');
cy.wrap($body)
.find('input[data-test="addCollection-createNewDatabase"]')
.check();
cy.wrap($body)
.find('input[data-test="addCollection-newDatabaseId"]')
.type(dbId);
cy.wrap($body)
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
.check();
cy.wrap($body)
.find('input[data-test="addCollection-collectionId"]')
.type(collectionId);
cy.wrap($body)
.find('input[data-test="databaseThroughputValue"]')
.should("have.value", "400");
cy.wrap($body)
.find('input[data-test="addCollection-partitionKeyValue"]')
.type(sharedKey);
cy.wrap($body)
.find("#submitBtnAddCollection")
.click();
cy.wait(10000);
cy.wrap($body)
.find('div[data-test="resourceTreeId"]')
.should("exist")
.find('div[class="treeComponent dataResourceTree"]')
.should("contain", dbId)
.click()
.should("contain", collectionId);
});
});
});

View File

@@ -0,0 +1,96 @@
// 1. Click on "New Container" on the command bar.
// 2. Pane with the title "Add Container" should appear on the right side of the screen
// 3. It includes an input box for the database Id.
// 4. It includes a checkbox called "Create now".
// 5. When the checkbox is marked, enter new database id.
// 3. Create a database WITH "Provision throughput" checked.
// 4. Enter minimum throughput value of 400.
// 5. Enter container id to the container id text box.
// 6. Enter partition key to the partition key text box.
// 7. Click "OK" to create a new container.
// 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
const connectionString = require("../../../utilities/connectionString");
let crypt = require("crypto");
context("Mongo API Test", () => {
beforeEach(() => {
connectionString.loginUsingConnectionString();
});
it.skip("Create a new collection in Mongo API - Autopilot", () => {
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
cy.get("iframe").then($element => {
const $body = $element.contents().find("body");
cy.wrap($body)
.find('div[class="commandBarContainer"]')
.should("be.visible")
.find('button[data-test="New Collection"]')
.should("be.visible")
.click();
cy.wrap($body)
.find('div[class="contextual-pane-in"]')
.should("be.visible")
.find('span[id="containerTitle"]');
cy.wrap($body)
.find('input[data-test="addCollection-createNewDatabase"]')
.check();
cy.wrap($body)
.find('input[data-test="addCollection-newDatabaseId"]')
.type(dbId);
cy.wrap($body)
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
.check();
cy.wrap($body)
.find('div[class="throughputModeContainer"]')
.should("be.visible")
.and(input => {
expect(input.get(0).textContent, "first item").contains("Autopilot (preview)");
expect(input.get(1).textContent, "second item").contains("Manual");
});
cy.wrap($body)
.find('input[id="newContainer-databaseThroughput-autoPilotRadio"]')
.check();
cy.wrap($body)
.find('select[name="autoPilotTiers"]')
// .eq(1).should('contain', '4,000 RU/s');
// // .select('4,000 RU/s').should('have.value', '1');
.find('option[value="2"]')
.then($element => $element.get(1).setAttribute("selected", "selected"));
cy.wrap($body)
.find('input[data-test="addCollection-collectionId"]')
.type(collectionId);
cy.wrap($body)
.find('input[data-test="addCollection-partitionKeyValue"]')
.type(sharedKey);
cy.wrap($body)
.find('input[data-test="addCollection-createCollection"]')
.click();
cy.wait(10000);
cy.wrap($body)
.find('div[data-test="resourceTreeId"]')
.should("exist")
.find('div[class="treeComponent dataResourceTree"]')
.should("contain", dbId)
.click()
.should("contain", collectionId);
});
});
});

View File

@@ -0,0 +1,67 @@
const connectionString = require("../../../utilities/connectionString");
let crypt = require("crypto");
context("Mongo API Test", () => {
beforeEach(() => {
connectionString.loginUsingConnectionString();
});
it.skip("Create a new collection in existing database in Mongo API", () => {
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
cy.get("iframe").then($element => {
const $body = $element.contents().find("body");
cy.wrap($body)
.find('span[class="nodeLabel"]')
.should("be.visible")
.then($span => {
const dbId1 = $span.text();
cy.log("DBBB", dbId1);
cy.wrap($body)
.find('div[class="commandBarContainer"]')
.should("be.visible")
.find('button[data-test="New Collection"]')
.should("be.visible")
.click();
cy.wrap($body)
.find('div[class="contextual-pane-in"]')
.should("be.visible")
.find('span[id="containerTitle"]');
cy.wrap($body)
.find('input[data-test="addCollection-existingDatabase"]')
.check();
cy.wrap($body)
.find('input[data-test="addCollection-existingDatabase"]')
.type(dbId1);
cy.wrap($body)
.find('input[data-test="addCollection-collectionId"]')
.type(collectionId);
cy.wrap($body)
.find('input[data-test="addCollection-partitionKeyValue"]')
.type(sharedKey);
cy.wrap($body)
.find('input[data-test="addCollection-createCollection"]')
.click();
cy.wait(10000);
cy.wrap($body)
.find('div[data-test="resourceTreeId"]')
.should("exist")
.find('div[class="treeComponent dataResourceTree"]')
.click()
.should("contain", collectionId);
});
});
});
});

View File

@@ -0,0 +1,203 @@
const connectionString = require("../../../utilities/connectionString");
let crypt = require("crypto");
context.skip("Mongo API Test", () => {
beforeEach(() => {
connectionString.loginUsingConnectionString();
});
it("Create a new collection in Mongo API - Provision database throughput", () => {
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
cy.get("iframe").then($element => {
const $body = $element.contents().find("body");
cy.wrap($body)
.find('div[class="commandBarContainer"]')
.should("be.visible")
.find('button[data-test="New Collection"]')
.should("be.visible")
.click();
cy.wrap($body)
.find('div[class="contextual-pane-in"]')
.should("be.visible")
.find('span[id="containerTitle"]');
cy.wrap($body)
.find(".createNewDatabaseOrUseExisting")
.should("have.length", 2)
.and(input => {
expect(input.get(0).textContent, "first item").contains("Create new");
expect(input.get(1).textContent, "second item").contains("Use existing");
});
cy.wrap($body)
.find('input[data-test="addCollection-createNewDatabase"]')
.check();
cy.wrap($body)
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
.check();
cy.wrap($body)
.find('input[data-test="addCollection-newDatabaseId"]')
.type(dbId);
cy.wrap($body)
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
.check();
cy.wrap($body)
.find('input[data-test="databaseThroughputValue"]')
.should("have.value", "400");
cy.wrap($body)
.find('input[data-test="addCollection-collectionId"]')
.type(collectionId);
cy.wrap($body)
.find('input[data-test="addCollection-partitionKeyValue"]')
.type(sharedKey);
cy.wrap($body)
.find('input[data-test="addCollection-createCollection"]')
.click();
cy.wait(10000);
cy.wrap($body)
.find('div[data-test="resourceTreeId"]')
.should("exist")
.find('div[class="treeComponent dataResourceTree"]')
.should("contain", dbId)
.click()
.should("contain", collectionId);
});
});
it("Create a new collection - without provision database throughput", () => {
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
const collectionIdTitle = `Add Collection`;
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
cy.get("iframe").then($element => {
const $body = $element.contents().find("body");
cy.wrap($body)
.find('div[class="commandBarContainer"]')
.should("be.visible")
.find('button[data-test="New Collection"]')
.should("be.visible")
.click();
cy.wrap($body)
.find('div[class="contextual-pane-in"]')
.should("be.visible")
.find('span[id="containerTitle"]');
cy.wrap($body)
.find('input[data-test="addCollection-createNewDatabase"]')
.check();
cy.wrap($body)
.find('input[data-test="addCollection-newDatabaseId"]')
.type(dbId);
cy.wrap($body)
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
.uncheck();
cy.wrap($body)
.find('input[data-test="addCollection-collectionId"]')
.type(collectionId);
cy.wrap($body)
.find('input[id="tab2"]')
.check({ force: true });
cy.wrap($body)
.find('input[data-test="addCollection-partitionKeyValue"]')
.type(sharedKey);
cy.wrap($body)
.find('input[data-test="databaseThroughputValue"]')
.should("have.value", "400");
cy.wrap($body)
.find('input[data-test="addCollection-createCollection"]')
.click();
cy.wait(10000);
cy.wrap($body)
.find('div[data-test="resourceTreeId"]')
.should("exist")
.find('div[class="treeComponent dataResourceTree"]')
.should("contain", dbId)
.click()
.should("contain", collectionId);
});
});
it("Create a new collection - without provision database throughput Fixed Storage Capacity", () => {
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
cy.get("iframe").then($element => {
const $body = $element.contents().find("body");
cy.wrap($body)
.find('div[class="commandBarContainer"]')
.should("be.visible")
.find('button[data-test="New Collection"]')
.should("be.visible")
.click();
cy.wrap($body)
.find('div[class="contextual-pane-in"]')
.should("be.visible")
.find('span[id="containerTitle"]');
cy.wrap($body)
.find('input[data-test="addCollection-createNewDatabase"]')
.check();
cy.wrap($body)
.find('input[data-test="addCollection-newDatabaseId"]')
.type(dbId);
cy.wrap($body)
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
.uncheck();
cy.wrap($body)
.find('input[data-test="addCollection-collectionId"]')
.type(collectionId);
cy.wrap($body)
.find('input[id="tab1"]')
.check({ force: true });
cy.wrap($body)
.find('input[data-test="databaseThroughputValue"]')
.should("have.value", "400");
cy.wrap($body)
.find('input[data-test="addCollection-createCollection"]')
.click();
cy.wait(10000);
cy.wrap($body)
.find('div[data-test="resourceTreeId"]')
.should("exist")
.find('div[class="treeComponent dataResourceTree"]')
.should("contain", dbId)
.click()
.should("contain", collectionId);
});
});
});

View File

@@ -0,0 +1,79 @@
// 1. Click on "New Container" on the command bar.
// 2. Pane with the title "Add Container" should appear on the right side of the screen
// 3. It includes an input box for the database Id.
// 4. It includes a checkbox called "Create now".
// 5. When the checkbox is marked, enter new database id.
// 3. Create a database WITH "Provision throughput" checked.
// 4. Enter minimum throughput value of 400.
// 5. Enter container id to the container id text box.
// 6. Enter partition key to the partition key text box.
// 7. Click "OK" to create a new container.
// 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
const connectionString = require("../../../utilities/connectionString");
let crypt = require("crypto");
context("SQL API Test", () => {
beforeEach(() => {
connectionString.loginUsingConnectionString();
});
it("Create a new container in SQL API", () => {
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
connectionString.loginUsingConnectionString();
cy.get("iframe").then($element => {
const $body = $element.contents().find("body");
cy.wrap($body)
.find('div[class="commandBarContainer"]')
.should("be.visible")
.find('button[data-test="New Container"]')
.should("be.visible")
.click();
cy.wrap($body)
.find('div[class="contextual-pane-in"]')
.should("be.visible")
.find('span[id="containerTitle"]');
cy.wrap($body)
.find('input[data-test="addCollection-createNewDatabase"]')
.check();
cy.wrap($body)
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
.check();
cy.wrap($body)
.find('input[data-test="addCollection-newDatabaseId"]')
.type(dbId);
cy.wrap($body)
.find('input[data-test="addCollection-collectionId"]')
.type(collectionId);
cy.wrap($body)
.find('input[data-test="databaseThroughputValue"]')
.should("have.value", "400");
cy.wrap($body)
.find('input[data-test="addCollection-partitionKeyValue"]')
.type(sharedKey);
cy.wrap($body)
.find("#submitBtnAddCollection")
.click();
cy.wait(10000);
cy.wrap($body)
.find('div[data-test="resourceTreeId"]')
.should("exist")
.find('div[class="treeComponent dataResourceTree"]')
.should("contain", dbId);
});
});
});

View File

@@ -0,0 +1,60 @@
// 1. Click on "New Container" on the command bar.
// 2. Pane with the title "Add Container" should appear on the right side of the screen
// 3. It includes an input box for the database Id.
// 4. It includes a checkbox called "Create now".
// 5. When the checkbox is marked, enter new database id.
// 3. Create a database WITH "Provision throughput" checked.
// 4. Enter minimum throughput value of 400.
// 5. Enter container id to the container id text box.
// 6. Enter partition key to the partition key text box.
// 7. Click "OK" to create a new container.
// 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
const connectionString = require("../../../utilities/connectionString");
let crypt = require("crypto");
context("Table API Test", () => {
beforeEach(() => {
connectionString.loginUsingConnectionString(connectionString.constants.table);
});
it("Create a new table in Table API", () => {
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
cy.get("iframe").then($element => {
const $body = $element.contents().find("body");
cy.wrap($body)
.find('div[class="commandBarContainer"]')
.should("be.visible")
.find('button[data-test="New Table"]')
.should("be.visible")
.click();
cy.wrap($body)
.find('div[class="contextual-pane-in"]')
.should("be.visible")
.find('span[id="containerTitle"]');
cy.wrap($body)
.find('input[data-test="addCollection-collectionId"]')
.type(collectionId);
cy.wrap($body)
.find('input[data-test="databaseThroughputValue"]')
.should("have.value", "400");
cy.wrap($body)
.find('input[data-test="addCollection-createCollection"]')
.click();
cy.wait(10000);
cy.wrap($body)
.find('div[data-test="resourceTreeId"]')
.should("exist")
.find('div[class="treeComponent dataResourceTree"]')
.should("contain", collectionId);
});
});
});

View File

@@ -0,0 +1,55 @@
// 1. Click on "New Container" on the command bar.
// 2. Pane with the title "Add Container" should appear on the right side of the screen
// 3. It includes an input box for the database Id.
// 4. It includes a checkbox called "Create now".
// 5. When the checkbox is marked, enter new database id.
// 3. Create a database WITH "Provision throughput" checked.
// 4. Enter minimum throughput value of 400.
// 5. Enter container id to the container id text box.
// 6. Enter partition key to the partition key text box.
// 7. Click "OK" to create a new container.
// 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
let crypt = require("crypto");
context("Emulator - createDatabase", () => {
beforeEach(() => {
cy.visit("http://localhost:1234/explorer.html");
});
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
const collectionIdTitle = `Add Collection`;
const partitionKey = `PartitionKey${crypt.randomBytes(8).toString("hex")}`;
it("Create a new collection", () => {
cy.contains("New Container").click();
// cy.contains(collectionIdTitle);
cy.get(".createNewDatabaseOrUseExisting")
.should("have.length", 2)
.and(input => {
expect(input.get(0).textContent, "first item").contains("Create new");
expect(input.get(1).textContent, "second item").contains("Use existing");
});
cy.get('input[data-test="addCollection-createNewDatabase"]').check();
cy.get('input[data-test="addCollection-newDatabaseId"]').type(dbId);
cy.get('input[data-test="addCollection-collectionId"]').type(collectionId);
cy.get('input[data-test="databaseThroughputValue"]').should("have.value", "400");
cy.get('input[data-test="addCollection-partitionKeyValue"]').type(partitionKey);
cy.get('input[data-test="addCollection-createCollection"]').click();
cy.get('div[data-test="resourceTreeId"]').should("exist");
cy.get('div[data-test="resourceTree-collectionsTree"]').should("contain", dbId);
cy.get('div[data-test="databaseList"]').should("contain", collectionId);
});
});

View File

@@ -0,0 +1,65 @@
// 1. Click on "New Database" on the command bar
// 2. a Pane with the title "Add Database" should appear on the right side of the screen
// i. It includes an input box for the database Id.
// ii. It includes a checkbox called "Provision throughput".
// iii. Whe the checkbox is marked, a new input with a throughput control let's you customize RU at the database level
// 3. Create a database WITHOUT "Provision throughput" checked.
// 4. It should appear in the Data Explorer list.
// 5. Repeat steps 1-3 but create a database WITH "Provision throughput" with the default RU value.
// 6. It should appear in the Data Explorer list.
// 7. If expanded, it should have the list item called "Scale", that once clicked, it should show the "Scale" tab.
// 8. Inside that tab, a throughput control will let you change the RU value within the permited range.
// 9. If you change the value, it should enable the "Save" button.
// 10. Click "Save" and verify that the process completes without error.
// 11. Close the tab and reopen it and verify that the input contains the last saved value.%
const crypto = require("crypto");
const client = require("../../../utilities/cosmosClient");
const randomString = crypto.randomBytes(2).toString("hex");
const databaseId = `TestDB-${randomString}`;
const collectionId = `TestColl-${randomString}`;
context("Emulator - Create database -> container -> item", () => {
beforeEach(async () => {
const { resources } = await client.databases.readAll().fetchAll();
for (const database of resources) {
await client.database(database.id).delete();
}
});
it("creates a new database", () => {
cy.visit("https://0.0.0.0:1234/explorer.html?platform=Emulator");
cy.contains("New Container").click();
cy.get("[data-test=addCollection-newDatabaseId]").click();
cy.get("[data-test=addCollection-newDatabaseId]").type(databaseId);
cy.get("[data-test=addCollection-collectionId]").click();
cy.get("[data-test=addCollection-collectionId]").type(collectionId);
cy.get("[data-test=addCollection-partitionKeyValue]").click();
cy.get("[data-test=addCollection-partitionKeyValue]").type("/pk");
cy.get('input[name="createCollection"]').click();
cy.get(".dataResourceTree").should("contain", databaseId);
cy.get(".dataResourceTree")
.contains(databaseId)
.click();
cy.get(".dataResourceTree").should("contain", collectionId);
cy.get(".dataResourceTree")
.contains(collectionId)
.click();
cy.get(".dataResourceTree")
.contains("Items")
.click();
cy.get(".dataResourceTree")
.contains("Items")
.click();
cy.wait(1000); // React rendering inside KO causes some weird async rendering that makes this test flaky without waiting
cy.get(".commandBarContainer")
.contains("New Item")
.click();
cy.wait(1000); // React rendering inside KO causes some weird async rendering that makes this test flaky without waiting
cy.get(".commandBarContainer")
.contains("Save")
.click();
cy.wait(1000); // React rendering inside KO causes some weird async rendering that makes this test flaky without waiting
cy.get(".documentsGridHeaderContainer").should("contain", "replace_with_new_document_id");
});
});

View File

@@ -0,0 +1,46 @@
// 1. Click last database in the resource tree
// 2. Click the last collection within the database
// 3. Select the context menu within the collection
// 4. Select "Delete Container" option in the dropdown
// 5. On Selection, Delete Container pane opens on the right side
// 6. Enter the same collection id that is to be deleted and click ok
// 7. Now, the resource tree refreshes, the deleted collection should not appear under the database
let crypt = require("crypto");
context("Emulator - deleteCollection", () => {
beforeEach(() => {
cy.visit("http://localhost:1234/explorer.html");
});
it("Delete a collection", () => {
cy.get(".databaseId")
.last()
.click();
cy.get(".collectionList")
.last()
.then($id => {
const collectionId = $id.text();
cy.get('span[data-test="collectionEllipsisMenu"]').should("exist");
cy.get('span[data-test="collectionEllipsisMenu"]')
.invoke("show")
.last()
.click();
cy.get('div[data-test="collectionContextMenu"]')
.contains("Delete Container")
.click({ force: true });
cy.get('input[data-test="confirmCollectionId"]').type(collectionId.trim());
cy.get('input[data-test="deleteCollection"]').click();
cy.get('div[data-test="databaseList"]').should("not.contain", collectionId);
cy.get('div[data-test="databaseMenu"]').should("not.contain", collectionId);
});
});
});

View File

@@ -0,0 +1,83 @@
// 1. Click last database in the resource tree
// 2. Select the context menu within the database
// 4. Select "Delete Database" option in the dropdown
// 5. On Selection, Delete Database pane opens on the right side
// 6. Enter the same database id that is to be deleted and click ok
// 7. Now, the resource tree refreshes, the deleted database should not appear in the resource tree
let crypt = require("crypto");
context("Emulator - deleteDatabase", () => {
beforeEach(() => {
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
let db_rid = "";
const date = new Date().toUTCString();
let authToken = "";
cy.visit("http://localhost:1234/explorer.html");
// Creating auth token for collection creation
cy.request({
method: "GET",
url: "https://localhost:8081/_explorer/authorization/post/dbs/",
headers: {
"x-ms-date": date,
authorization: "-"
}
})
.then(response => {
authToken = response.body.Token; // Getting auth token for collection creation
return new Cypress.Promise((resolve, reject) => {
return resolve();
});
})
.then(() => {
cy.request({
method: "POST",
url: "https://localhost:8081/dbs",
headers: {
"x-ms-date": date,
authorization: authToken,
"x-ms-version": "2018-12-31"
},
body: {
id: dbId
}
}).then(response => {
cy.log("Response", response);
db_rid = response.body._rid;
return new Cypress.Promise((resolve, reject) => {
cy.log("Rid", db_rid);
return resolve();
});
});
});
});
it("Delete a database", () => {
cy.get('span[data-test="refreshTree"]').click();
cy.get(".databaseId")
.last()
.then($id => {
const dbId = $id.text();
cy.get('span[data-test="databaseEllipsisMenu"]').should("exist");
cy.get('span[data-test="databaseEllipsisMenu"]')
.invoke("show")
.last()
.click();
cy.get('div[data-test="databaseContextMenu"]')
.contains("Delete Database")
.click({ force: true });
cy.get('input[data-test="confirmDatabaseId"]').type(dbId.trim());
cy.get('input[data-test="deleteDatabase"]').click();
cy.get('div[data-test="databaseList"]').should("not.contain", dbId);
});
});
});

View File

@@ -0,0 +1,35 @@
# Notebook end-to-end tests
This describes how to run the tests locally
## Stand up a local notebook container instance:
Instructions on how to build and run the container [here](https://microsoft.sharepoint.com/teams/DocDB/_layouts/OneNote.aspx?id=%2Fteams%2FDocDB%2FSiteAssets%2FDocDB%20Team%20Notebook&wd=target%28Tools%20_%20SDK%2FPortal%2FDevelopment.one%7CF800BE8E-1E31-48FE-90D7-EF698EF88112%2FHow%20to%20build%20notebook%20service%7C4BAA153B-422C-41E2-B997-F3FCE02CD743%2F%29)
## Run a local data explorer
Instructions are in [`DataExplorer/README.md`](https://msdata.visualstudio.com/CosmosDB/_git/cosmosdb-dataexplorer?path=%2FProduct%2FPortal%2FDataExplorer%2FREADME.md&_a=preview).
Make sure you can run Data Explorer locally from the web browser.
## Run cypress tests
1. Edit the URL for your DataExplorer in the `.spec.ts` file
2. Run the test:
```bash
cd DataExplorer/cypress
npm i
npm t -- --spec 'integration/notebook/newNotebook.spec.ts'
```
To run in Debug mode:
```
npm run test:debug
```
This opens Cypress UI
## Troubleshooting
* The tests are recorded in the `videos` folder.
* Cypress does not support hover: workarounds [here](https://docs.cypress.io/api/commands/hover.html#Workarounds).
## References
* [Cypress API](https://docs.cypress.io/api/api/table-of-contents.html)
* [Cypress cookbook](https://docs.cypress.io/faq/questions/using-cypress-faq.html#How-do-I-get-an-element%E2%80%99s-text-contents)
* [Cypress best practices](https://docs.cypress.io/guides/references/best-practices.html#Selecting-Elements)

View File

@@ -0,0 +1,93 @@
// THIS ADDS A NEW NOTEBOOK TO YOUR NOTEBOOKS
context("New Notebook smoke test", () => {
const timeout = 15000; // in ms
const explorerUrl =
"https://localhost:1234/explorer.html?feature.notebookserverurl=https%3A%2F%2Flocalhost%3A10001%2F12345%2Fnotebook&feature.notebookServerToken=token&feature.enablenotebooks=true";
/**
* Wait for UI to be ready
*/
const waitForReady = () => {
cy.get(".splashScreenContainer", { timeout }).should("be.visible");
};
beforeEach(() => {
cy.visit(explorerUrl);
waitForReady();
});
it("Create a new notebook and run some code", () => {
// Create new notebook
cy.contains("New Notebook").click();
// Check tab name
cy.get("li.tabList .tabNavText").should($span => {
const text = $span.text();
expect(text).to.match(/^Untitled.*\.ipynb$/);
});
// Wait for python3 | idle status
cy.get('[data-test="notebookStatusBar"] [data-test="kernelStatus"]', { timeout }).should($p => {
const text = $p.text();
expect(text).to.match(/^python3.*idle$/);
});
// Click on a cell
cy.get(".cell-container")
.as("cellContainer")
.click();
// Type in some code
cy.get("@cellContainer").type("2+4");
// Execute
cy.get('[data-test="Run"]')
.first()
.click();
// Verify results
cy.get("@cellContainer").within(() => {
cy.get("pre code span").should("contain", "6");
});
// Restart kernel
cy.get('[data-test="Run"] button')
.eq(-1)
.click();
cy.get("li")
.contains("Restart Kernel")
.click();
// Wait for python3 | restarting status
cy.get('[data-test="notebookStatusBar"] [data-test="kernelStatus"]', { timeout }).should($p => {
const text = $p.text();
expect(text).to.match(/^python3.*restarting$/);
});
// Wait for python3 | idle status
cy.get('[data-test="notebookStatusBar"] [data-test="kernelStatus"]', { timeout }).should($p => {
const text = $p.text();
expect(text).to.match(/^python3.*idle$/);
});
// Click on a cell
cy.get(".cell-container")
.as("cellContainer")
.find(".input")
.as("codeInput")
.click();
// Type in some code
cy.get("@codeInput").type("{backspace}{backspace}{backspace}4+5");
// Execute
cy.get('[data-test="Run"]')
.first()
.click();
// Verify results
cy.get("@cellContainer").within(() => {
cy.get("pre code span").should("contain", "9");
});
});
});

View File

@@ -0,0 +1,172 @@
context("Resource tree notebook file manipulation", () => {
const timeout = 15000; // in ms
const explorerUrl =
"https://localhost:1234/explorer.html?feature.notebookserverurl=https%3A%2F%2Flocalhost%3A10001%2F12345%2Fnotebook&feature.notebookServerToken=token&feature.enablenotebooks=true";
/**
* Wait for UI to be ready
*/
const waitForReady = () => {
cy.get(".splashScreenContainer", { timeout }).should("be.visible");
};
const clickContextMenuAndSelectOption = (nodeLabel, option) => {
cy.get(`.treeNodeHeader[data-test="${nodeLabel}"]`)
.find("button.treeMenuEllipsis")
.click();
cy.get('[data-test="treeComponentMenuItemContainer"]')
.contains(option)
.click();
};
const createFolder = folder => {
clickContextMenuAndSelectOption("My Notebooks/", "New Directory");
cy.get("#stringInputPane").within(() => {
cy.get('input[name="collectionIdConfirmation"]').type(folder);
cy.get("form").submit();
});
};
const deleteItem = nodeName => {
clickContextMenuAndSelectOption(`${nodeName}`, "Delete");
cy.get(".ms-Dialog-main")
.contains("Delete")
.click();
};
beforeEach(() => {
cy.visit(explorerUrl);
waitForReady();
});
it("Create and remove a directory", () => {
const folder = "e2etest_folder1";
createFolder(folder);
cy.get(`.treeNodeHeader[data-test="${folder}/"]`).should("exist");
deleteItem(`${folder}/`);
cy.get(`.treeNodeHeader[data-test="${folder}/"]`).should("not.exist");
});
it("Create and rename a directory", () => {
const folder = "e2etest_folder2";
const renamedFolder = "e2etest_folder2_renamed";
createFolder(folder);
// Rename
clickContextMenuAndSelectOption(`${folder}/`, "Rename");
cy.get("#stringInputPane").within(() => {
cy.get('input[name="collectionIdConfirmation"]')
.clear()
.type(renamedFolder);
cy.get("form").submit();
});
cy.get(`.treeNodeHeader[data-test="${renamedFolder}/"]`).should("exist");
cy.get(`.treeNodeHeader[data-test="${folder}/"]`).should("not.exist");
deleteItem(`${renamedFolder}/`);
cy.get(`.treeNodeHeader[data-test="${renamedFolder}/"]`).should("not.exist");
});
it("Create a notebook inside a directory", () => {
const folder = "e2etest_folder3";
const newNotebookName = "Untitled.ipynb";
createFolder(folder);
clickContextMenuAndSelectOption(`${folder}/`, "New Notebook");
// Verify tab is open
cy.get(".tabList")
.contains(newNotebookName)
.should("exist");
// Close tab
cy.get(`.tabList[title="notebooks/${folder}/${newNotebookName}"]`)
.find(".cancelButton")
.click();
// When running from command line, closing the tab is too fast
cy.get("body").then($body => {
if ($body.find(".ms-Dialog-main").length) {
// For some reason, this does not work
// cy.get(".ms-Dialog-main").contains("Close").click();
cy.get(".ms-Dialog-main .ms-Button--primary").click();
}
});
// Expand folder node
cy.get(`.treeNodeHeader[data-test="${folder}/"]`).click();
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${newNotebookName}"]`).should("exist");
// Delete notebook
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${newNotebookName}"]`)
.find("button.treeMenuEllipsis")
.click();
cy.get('[data-test="treeComponentMenuItemContainer"]')
.contains("Delete")
.click();
// Confirm
cy.get(".ms-Dialog-main")
.contains("Delete")
.click();
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${newNotebookName}"]`).should("not.exist");
deleteItem(`${folder}/`);
});
it("Create and rename a notebook inside a directory", () => {
const folder = "e2etest_folder4";
const newNotebookName = "Untitled.ipynb";
const renamedNotebookName = "mynotebook.ipynb";
createFolder(folder);
clickContextMenuAndSelectOption(`${folder}/`, "New Notebook");
// Close tab
cy.get(`.tabList[title="notebooks/${folder}/${newNotebookName}"]`)
.find(".cancelButton")
.click();
cy.get("body").then($body => {
if ($body.find(".ms-Dialog-main").length) {
// For some reason, this does not work
// cy.get(".ms-Dialog-main").contains("Close").click();
cy.get(".ms-Dialog-main .ms-Button--primary").click();
}
});
// Expand folder node
cy.get(`.treeNodeHeader[data-test="${folder}/"]`).click();
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${newNotebookName}"]`).should("exist");
// Rename notebook
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${newNotebookName}"]`)
.find("button.treeMenuEllipsis")
.click();
cy.get('[data-test="treeComponentMenuItemContainer"]')
.contains("Rename")
.click();
cy.get("#stringInputPane").within(() => {
cy.get('input[name="collectionIdConfirmation"]')
.clear()
.type(renamedNotebookName);
cy.get("form").submit();
});
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${newNotebookName}"]`).should("not.exist");
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${renamedNotebookName}"]`).should("exist");
// Delete notebook
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${renamedNotebookName}"]`)
.find("button.treeMenuEllipsis")
.click();
cy.get('[data-test="treeComponentMenuItemContainer"]')
.contains("Delete")
.click();
// Confirm
cy.get(".ms-Dialog-main")
.contains("Delete")
.click();
// Give it time to settle
cy.wait(1000);
deleteItem(`${folder}/`);
});
});

3066
cypress/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

25
cypress/package.json Normal file
View File

@@ -0,0 +1,25 @@
{
"name": "cosmos-explorer-cypress",
"version": "1.0.0",
"description": "",
"main": "index.js",
"scripts": {
"test": "cypress run",
"wait-for-server": "wait-on -t 240000 -i 5000 -v https-get://0.0.0.0:1234/",
"test:sql": "cypress run --browser chrome --spec \"./integration/dataexplorer/SQL/*\"",
"test:ci": "wait-on -t 240000 -i 5000 -v https-get://0.0.0.0:1234/ https-get://0.0.0.0:8081/_explorer/index.html && cypress run --browser edge --headless",
"test:debug": "cypress open"
},
"devDependencies": {
"cypress": "^4.8.0",
"mocha": "^7.0.1",
"mochawesome": "^4.1.0",
"mochawesome-merge": "^4.0.1",
"mochawesome-report-generator": "^4.1.0",
"typescript": "3.4.3",
"wait-on": "^4.0.2"
},
"dependencies": {
"@microsoft/applicationinsights-web": "^2.5.2"
}
}

23
cypress/support/index.js Normal file
View File

@@ -0,0 +1,23 @@
let appInsightsLib = require("@microsoft/applicationinsights-web");
const appInsights = new appInsightsLib.ApplicationInsights({
config: {
instrumentationKey: "fe61c39f-7d32-4488-a191-b13621965315"
/* ...Other Configuration Options... */
}
});
appInsights.loadAppInsights();
Cypress.on("fail", (error, runnable) => {
// App Insights will record the fail tests for Create Collection
let message = JSON.stringify(runnable.title);
appInsights.trackTrace({
message: `${message}`,
properties: {
passed: false,
error: error
}
});
throw error; // throw error to have test still fail
});

11
cypress/tsconfig.json Normal file
View File

@@ -0,0 +1,11 @@
{
"compilerOptions": {
"strict": true,
"noEmit": true,
"module": "commonjs",
"target": "es5",
"lib": ["es5", "dom", "es6"],
"types": ["cypress", "node"]
},
"include": ["**/*.ts", "**/*.spec.ts"]
}

View File

@@ -0,0 +1,41 @@
module.exports = {
loginUsingConnectionString: function() {
const prodUrl = Cypress.env("TEST_ENDPOINT") || "https://localhost:1234/hostedExplorer.html";
const timeout = 15000;
cy.visit(prodUrl);
cy.get('iframe[id="explorerMenu"]').should("be.visible");
cy.get("iframe").then($element => {
const $body = $element.contents().find("body");
cy.wrap($body)
.find("#connectExplorer")
.should("exist")
.find("div[class='connectExplorer']")
.should("exist")
.find("p[class='welcomeText']")
.should("exist");
cy.wrap($body.find("div > p.switchConnectTypeText"))
.should("exist")
.last()
.click({ force: true });
const secret = Cypress.env("CONNECTION_STRING");
cy.wrap($body)
.find("input[class='inputToken']")
.should("exist")
.type(secret, {
force: true
});
cy.wrap($body.find("input[value='Connect']"), { timeout })
.first()
.click({ force: true });
cy.wait(15000);
});
}
};

View File

@@ -0,0 +1,6 @@
const { CosmosClient } = require("@azure/cosmos");
module.exports = new CosmosClient({
endpoint: "https://0.0.0.0:8081",
key: "C2y6yDjf5/R+ob0N8A7Cgv30VRDJIWEHLM+4QDU5DE2nQ9nDuVTqobD4b8mGGyPMbIZnqyMsEcaGQy67XIw/Jw=="
});

View File

@@ -6,6 +6,6 @@ module.exports = {
slowMo: 55, slowMo: 55,
defaultViewport: null, defaultViewport: null,
ignoreHTTPSErrors: true, ignoreHTTPSErrors: true,
args: ["--disable-web-security"], args: ["--disable-web-security"]
}, }
}; };

View File

@@ -1,5 +1,5 @@
module.exports = { module.exports = {
preset: "jest-puppeteer", preset: "jest-puppeteer",
testMatch: ["<rootDir>/test/**/*.spec.[jt]s?(x)"], testMatch: ["<rootDir>/test/**/*.spec.[jt]s?(x)"],
setupFiles: ["dotenv/config"], setupFiles: ["dotenv/config"]
}; };

View File

@@ -42,8 +42,8 @@ module.exports = {
branches: 20, branches: 20,
functions: 24, functions: 24,
lines: 30, lines: 30,
statements: 29.0, statements: 29.0
}, }
}, },
// Make calling deprecated APIs throw helpful error messages // Make calling deprecated APIs throw helpful error messages
@@ -76,7 +76,7 @@ module.exports = {
"office-ui-fabric-react/lib/(.*)$": "office-ui-fabric-react/lib-commonjs/$1", // https://github.com/OfficeDev/office-ui-fabric-react/wiki/Fabric-6-Release-Notes "office-ui-fabric-react/lib/(.*)$": "office-ui-fabric-react/lib-commonjs/$1", // https://github.com/OfficeDev/office-ui-fabric-react/wiki/Fabric-6-Release-Notes
"^dnd-core$": "dnd-core/dist/cjs", "^dnd-core$": "dnd-core/dist/cjs",
"^react-dnd$": "react-dnd/dist/cjs", "^react-dnd$": "react-dnd/dist/cjs",
"^react-dnd-html5-backend$": "react-dnd-html5-backend/dist/cjs", "^react-dnd-html5-backend$": "react-dnd-html5-backend/dist/cjs"
}, },
// An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader // An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader
@@ -164,11 +164,11 @@ module.exports = {
// A map from regular expressions to paths to transformers // A map from regular expressions to paths to transformers
transform: { transform: {
"^.+\\.html?$": "html-loader-jest", "^.+\\.html?$": "html-loader-jest",
"^.+\\.[t|j]sx?$": "babel-jest", "^.+\\.[t|j]sx?$": "babel-jest"
}, },
// An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation // An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation
transformIgnorePatterns: ["/node_modules/", "/externals/"], transformIgnorePatterns: ["/node_modules/", "/externals/"]
// An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them // An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them
// unmockedModulePathPatterns: undefined, // unmockedModulePathPatterns: undefined,

3668
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -4,10 +4,8 @@
"description": "Cosmos Explorer", "description": "Cosmos Explorer",
"main": "index.js", "main": "index.js",
"dependencies": { "dependencies": {
"@azure/arm-cosmosdb": "9.1.0",
"@azure/cosmos": "3.9.0", "@azure/cosmos": "3.9.0",
"@azure/identity": "1.1.0", "@azure/cosmos-language-service": "0.0.4",
"@azure/cosmos-language-service": "0.0.5",
"@jupyterlab/services": "6.0.0-rc.2", "@jupyterlab/services": "6.0.0-rc.2",
"@jupyterlab/terminal": "3.0.0-rc.2", "@jupyterlab/terminal": "3.0.0-rc.2",
"@microsoft/applicationinsights-web": "2.5.9", "@microsoft/applicationinsights-web": "2.5.9",
@@ -68,7 +66,7 @@
"jquery-ui-dist": "1.12.1", "jquery-ui-dist": "1.12.1",
"knockout": "3.5.1", "knockout": "3.5.1",
"mkdirp": "1.0.4", "mkdirp": "1.0.4",
"monaco-editor": "0.18.1", "monaco-editor": "0.15.6",
"object.entries": "1.1.0", "object.entries": "1.1.0",
"office-ui-fabric-react": "7.134.1", "office-ui-fabric-react": "7.134.1",
"p-retry": "4.2.0", "p-retry": "4.2.0",
@@ -117,7 +115,7 @@
"@types/prop-types": "15.5.8", "@types/prop-types": "15.5.8",
"@types/puppeteer": "3.0.1", "@types/puppeteer": "3.0.1",
"@types/q": "1.5.1", "@types/q": "1.5.1",
"@types/react": "16.9.56", "@types/react": "16.9.49",
"@types/react-dom": "16.0.7", "@types/react-dom": "16.0.7",
"@types/react-notification-system": "0.2.39", "@types/react-notification-system": "0.2.39",
"@types/react-redux": "7.1.7", "@types/react-redux": "7.1.7",
@@ -160,7 +158,7 @@
"mini-css-extract-plugin": "0.4.3", "mini-css-extract-plugin": "0.4.3",
"monaco-editor-webpack-plugin": "1.7.0", "monaco-editor-webpack-plugin": "1.7.0",
"node-fetch": "2.6.1", "node-fetch": "2.6.1",
"prettier": "2.2.1", "prettier": "1.19.1",
"puppeteer": "4.0.0", "puppeteer": "4.0.0",
"raw-loader": "0.5.1", "raw-loader": "0.5.1",
"rimraf": "3.0.0", "rimraf": "3.0.0",
@@ -170,7 +168,7 @@
"ts-loader": "6.2.2", "ts-loader": "6.2.2",
"tslint": "5.11.0", "tslint": "5.11.0",
"tslint-microsoft-contrib": "6.0.0", "tslint-microsoft-contrib": "6.0.0",
"typescript": "4.1.2", "typescript": "4.0.2",
"url-loader": "1.1.1", "url-loader": "1.1.1",
"wait-on": "4.0.2", "wait-on": "4.0.2",
"webpack": "4.43.0", "webpack": "4.43.0",
@@ -196,8 +194,8 @@
"compile": "tsc", "compile": "tsc",
"compile:contracts": "tsc -p ./tsconfig.contracts.json", "compile:contracts": "tsc -p ./tsconfig.contracts.json",
"compile:strict": "tsc -p ./tsconfig.strict.json", "compile:strict": "tsc -p ./tsconfig.strict.json",
"format": "prettier --write \"{src,test}/**/*.{ts,tsx,html}\" \"*.{js,html}\"", "format": "prettier --write \"{src,cypress,test}/**/*.{ts,tsx,html}\" \"*.{js,html}\"",
"format:check": "prettier --check \"{src,test}/**/*.{ts,tsx,html}\" \"*.{js,html}\"", "format:check": "prettier --check \"{src,cypress,test}/**/*.{ts,tsx,html}\" \"*.{js,html}\"",
"lint": "tslint --project tsconfig.json && eslint \"**/*.{ts,tsx}\"", "lint": "tslint --project tsconfig.json && eslint \"**/*.{ts,tsx}\"",
"build:contracts": "npm run compile:contracts", "build:contracts": "npm run compile:contracts",
"strictEligibleFiles": "node ./strict-migration-tools/index.js", "strictEligibleFiles": "node ./strict-migration-tools/index.js",

View File

@@ -3,6 +3,7 @@
"offerThroughput": 400, "offerThroughput": 400,
"databaseLevelThroughput": false, "databaseLevelThroughput": false,
"collectionId": "Persons", "collectionId": "Persons",
"rupmEnabled": false,
"partitionKey": { "kind": "Hash", "paths": ["/name"] }, "partitionKey": { "kind": "Hash", "paths": ["/name"] },
"data": [ "data": [
"g.addV('person').property(id, '1').property('name', 'Eva').property('age', 44)", "g.addV('person').property(id, '1').property('name', 'Eva').property('age', 44)",

View File

@@ -2,5 +2,5 @@ export enum AuthType {
AAD = "aad", AAD = "aad",
EncryptedToken = "encryptedtoken", EncryptedToken = "encryptedtoken",
MasterKey = "masterkey", MasterKey = "masterkey",
ResourceToken = "resourcetoken", ResourceToken = "resourcetoken"
} }

View File

@@ -13,7 +13,7 @@ export class BindingHandlersRegisterer {
) { ) {
const value = ko.unwrap(wrappedValueAccessor()); const value = ko.unwrap(wrappedValueAccessor());
bindingContext?.$data.isTemplateReady(value); bindingContext?.$data.isTemplateReady(value);
}, }
} as ko.BindingHandler; } as ko.BindingHandler;
ReactBindingHandler.Registerer.register(); ReactBindingHandler.Registerer.register();

View File

@@ -42,7 +42,7 @@ export class Registerer {
// Initial rendering at mount point // Initial rendering at mount point
ReactDOM.render(adapter.renderComponent(), element); ReactDOM.render(adapter.renderComponent(), element);
}, }
} as ko.BindingHandler; } as ko.BindingHandler;
} }
} }

View File

@@ -40,7 +40,7 @@ export class ArrayHashMap<T> {
public forEach(key: string, iteratorFct: (value: T) => void) { public forEach(key: string, iteratorFct: (value: T) => void) {
const values = this.store.get(key); const values = this.store.get(key);
if (values) { if (values) {
values.forEach((value) => iteratorFct(value)); values.forEach(value => iteratorFct(value));
} }
} }

View File

@@ -14,7 +14,7 @@ export class CodeOfConductEndpoints {
export class EndpointsRegex { export class EndpointsRegex {
public static readonly cassandra = [ public static readonly cassandra = [
"AccountEndpoint=(.*).cassandra.cosmosdb.azure.com", "AccountEndpoint=(.*).cassandra.cosmosdb.azure.com",
"HostName=(.*).cassandra.cosmos.azure.com", "HostName=(.*).cassandra.cosmos.azure.com"
]; ];
public static readonly mongo = "mongodb://.*:(.*)@(.*).documents.azure.com"; public static readonly mongo = "mongodb://.*:(.*)@(.*).documents.azure.com";
public static readonly mongoCompute = "mongodb://.*:(.*)@(.*).mongo.cosmos.azure.com"; public static readonly mongoCompute = "mongodb://.*:(.*)@(.*).mongo.cosmos.azure.com";
@@ -108,6 +108,7 @@ export class CapabilityNames {
export class Features { export class Features {
public static readonly cosmosdb = "cosmosdb"; public static readonly cosmosdb = "cosmosdb";
public static readonly enableChangeFeedPolicy = "enablechangefeedpolicy"; public static readonly enableChangeFeedPolicy = "enablechangefeedpolicy";
public static readonly enableRupm = "enablerupm";
public static readonly executeSproc = "dataexplorerexecutesproc"; public static readonly executeSproc = "dataexplorerexecutesproc";
public static readonly hostedDataExplorer = "hosteddataexplorerenabled"; public static readonly hostedDataExplorer = "hosteddataexplorerenabled";
public static readonly enableTtl = "enablettl"; public static readonly enableTtl = "enablettl";
@@ -124,9 +125,7 @@ export class Features {
public static readonly enableFixedCollectionWithSharedThroughput = "enablefixedcollectionwithsharedthroughput"; public static readonly enableFixedCollectionWithSharedThroughput = "enablefixedcollectionwithsharedthroughput";
public static readonly ttl90Days = "ttl90days"; public static readonly ttl90Days = "ttl90days";
public static readonly enableRightPanelV2 = "enablerightpanelv2"; public static readonly enableRightPanelV2 = "enablerightpanelv2";
public static readonly enableSchema = "enableschema";
public static readonly enableSDKoperations = "enablesdkoperations"; public static readonly enableSDKoperations = "enablesdkoperations";
public static readonly showMinRUSurvey = "showminrusurvey";
} }
// flight names returned from the portal are always lowercase // flight names returned from the portal are always lowercase
@@ -150,7 +149,7 @@ export class Spark {
"Cosmos.Spark.D8s": "D8s / 8 cores / 32GB RAM", "Cosmos.Spark.D8s": "D8s / 8 cores / 32GB RAM",
"Cosmos.Spark.D16s": "D16s / 16 cores / 64GB RAM", "Cosmos.Spark.D16s": "D16s / 16 cores / 64GB RAM",
"Cosmos.Spark.D32s": "D32s / 32 cores / 128GB RAM", "Cosmos.Spark.D32s": "D32s / 32 cores / 128GB RAM",
"Cosmos.Spark.D64s": "D64s / 64 cores / 256GB RAM", "Cosmos.Spark.D64s": "D64s / 64 cores / 256GB RAM"
}); });
} }
@@ -165,7 +164,7 @@ export class MongoDBAccounts {
export enum MongoBackendEndpointType { export enum MongoBackendEndpointType {
local, local,
remote, remote
} }
// TODO: 435619 Add default endpoints per cloud and use regional only when available // TODO: 435619 Add default endpoints per cloud and use regional only when available
@@ -180,6 +179,11 @@ export class CassandraBackend {
public static readonly guestSchemaApi: string = "api/guest/cassandra/schema"; public static readonly guestSchemaApi: string = "api/guest/cassandra/schema";
} }
export class RUPMStates {
public static on: string = "on";
public static off: string = "off";
}
export class Queries { export class Queries {
public static CustomPageOption: string = "custom"; public static CustomPageOption: string = "custom";
public static UnlimitedPageOption: string = "unlimited"; public static UnlimitedPageOption: string = "unlimited";
@@ -292,7 +296,7 @@ export class HttpStatusCodes {
HttpStatusCodes.InternalServerError, // TODO: Handle all 500s on Portal backend and remove from retries list HttpStatusCodes.InternalServerError, // TODO: Handle all 500s on Portal backend and remove from retries list
HttpStatusCodes.BadGateway, HttpStatusCodes.BadGateway,
HttpStatusCodes.ServiceUnavailable, HttpStatusCodes.ServiceUnavailable,
HttpStatusCodes.GatewayTimeout, HttpStatusCodes.GatewayTimeout
]; ];
} }
@@ -348,7 +352,10 @@ export class HashRoutePrefixes {
public static docsWithIds(databaseId: string, collectionId: string, docId: string) { public static docsWithIds(databaseId: string, collectionId: string, docId: string) {
const transformedDatabasePrefix: string = this.docs.replace("{db_id}", databaseId); const transformedDatabasePrefix: string = this.docs.replace("{db_id}", databaseId);
return transformedDatabasePrefix.replace("{coll_id}", collectionId).replace("{doc_id}", docId).replace("/", ""); // strip the first slash since hasher adds it return transformedDatabasePrefix
.replace("{coll_id}", collectionId)
.replace("{doc_id}", docId)
.replace("/", ""); // strip the first slash since hasher adds it
} }
} }
@@ -394,7 +401,7 @@ export class OfferVersions {
export enum ConflictOperationType { export enum ConflictOperationType {
Replace = "replace", Replace = "replace",
Create = "create", Create = "create",
Delete = "delete", Delete = "delete"
} }
export const EmulatorMasterKey = export const EmulatorMasterKey =

View File

@@ -10,17 +10,17 @@ describe("tokenProvider", () => {
resourceId: "", resourceId: "",
resourceType: "dbs" as ResourceType, resourceType: "dbs" as ResourceType,
headers: {}, headers: {},
getAuthorizationTokenUsingMasterKey: () => "", getAuthorizationTokenUsingMasterKey: () => ""
}; };
beforeEach(() => { beforeEach(() => {
updateConfigContext({ updateConfigContext({
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com", BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
}); });
window.fetch = jest.fn().mockImplementation(() => { window.fetch = jest.fn().mockImplementation(() => {
return { return {
json: () => "{}", json: () => "{}",
headers: new Map(), headers: new Map()
}; };
}); });
}); });
@@ -36,7 +36,7 @@ describe("tokenProvider", () => {
it("does not call the auth service if a master key is set", async () => { it("does not call the auth service if a master key is set", async () => {
updateUserContext({ updateUserContext({
masterKey: "foo", masterKey: "foo"
}); });
await tokenProvider(options); await tokenProvider(options);
expect((window.fetch as any).mock.calls.length).toBe(0); expect((window.fetch as any).mock.calls.length).toBe(0);
@@ -50,7 +50,7 @@ describe("getTokenFromAuthService", () => {
window.fetch = jest.fn().mockImplementation(() => { window.fetch = jest.fn().mockImplementation(() => {
return { return {
json: () => "{}", json: () => "{}",
headers: new Map(), headers: new Map()
}; };
}); });
}); });
@@ -61,7 +61,7 @@ describe("getTokenFromAuthService", () => {
it("builds the correct URL in production", () => { it("builds the correct URL in production", () => {
updateConfigContext({ updateConfigContext({
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com", BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
}); });
getTokenFromAuthService("GET", "dbs", "foo"); getTokenFromAuthService("GET", "dbs", "foo");
expect(window.fetch).toHaveBeenCalledWith( expect(window.fetch).toHaveBeenCalledWith(
@@ -72,7 +72,7 @@ describe("getTokenFromAuthService", () => {
it("builds the correct URL in dev", () => { it("builds the correct URL in dev", () => {
updateConfigContext({ updateConfigContext({
BACKEND_ENDPOINT: "https://localhost:1234", BACKEND_ENDPOINT: "https://localhost:1234"
}); });
getTokenFromAuthService("GET", "dbs", "foo"); getTokenFromAuthService("GET", "dbs", "foo");
expect(window.fetch).toHaveBeenCalledWith( expect(window.fetch).toHaveBeenCalledWith(
@@ -96,15 +96,15 @@ describe("endpoint", () => {
documentEndpoint: "bar", documentEndpoint: "bar",
gremlinEndpoint: "foo", gremlinEndpoint: "foo",
tableEndpoint: "foo", tableEndpoint: "foo",
cassandraEndpoint: "foo", cassandraEndpoint: "foo"
}, }
}, }
}); });
expect(endpoint()).toEqual("bar"); expect(endpoint()).toEqual("bar");
}); });
it("uses _endpoint if set", () => { it("uses _endpoint if set", () => {
updateUserContext({ updateUserContext({
endpoint: "baz", endpoint: "baz"
}); });
expect(endpoint()).toEqual("baz"); expect(endpoint()).toEqual("baz");
}); });
@@ -121,7 +121,7 @@ describe("requestPlugin", () => {
updateConfigContext({ updateConfigContext({
platform: Platform.Hosted, platform: Platform.Hosted,
BACKEND_ENDPOINT: "https://localhost:1234", BACKEND_ENDPOINT: "https://localhost:1234",
PROXY_PATH: "/proxy", PROXY_PATH: "/proxy"
}); });
const headers = {}; const headers = {};
const endpoint = "https://docs.azure.com"; const endpoint = "https://docs.azure.com";

View File

@@ -58,13 +58,13 @@ export async function getTokenFromAuthService(verb: string, resourceType: string
method: "POST", method: "POST",
headers: { headers: {
"content-type": "application/json", "content-type": "application/json",
"x-ms-encrypted-auth-token": userContext.accessToken, "x-ms-encrypted-auth-token": userContext.accessToken
}, },
body: JSON.stringify({ body: JSON.stringify({
verb, verb,
resourceType, resourceType,
resourceId, resourceId
}), })
}); });
//TODO I am not sure why we have to parse the JSON again here. fetch should do it for us when we call .json() //TODO I am not sure why we have to parse the JSON again here. fetch should do it for us when we call .json()
const result = JSON.parse(await response.json()); const result = JSON.parse(await response.json());
@@ -81,9 +81,9 @@ export function client(): Cosmos.CosmosClient {
key: userContext.masterKey, key: userContext.masterKey,
tokenProvider, tokenProvider,
connectionPolicy: { connectionPolicy: {
enableEndpointDiscovery: false, enableEndpointDiscovery: false
}, },
userAgentSuffix: "Azure Portal", userAgentSuffix: "Azure Portal"
}; };
if (configContext.PROXY_PATH !== undefined) { if (configContext.PROXY_PATH !== undefined) {

View File

@@ -1,13 +1,26 @@
import { ConflictDefinition, FeedOptions, ItemDefinition, QueryIterator, Resource } from "@azure/cosmos"; import {
ConflictDefinition,
FeedOptions,
ItemDefinition,
OfferDefinition,
QueryIterator,
Resource
} from "@azure/cosmos";
import { RequestOptions } from "@azure/cosmos/dist-esm";
import Q from "q"; import Q from "q";
import { configContext, Platform } from "../ConfigContext";
import * as DataModels from "../Contracts/DataModels"; import * as DataModels from "../Contracts/DataModels";
import { MessageTypes } from "../Contracts/ExplorerContracts";
import * as ViewModels from "../Contracts/ViewModels"; import * as ViewModels from "../Contracts/ViewModels";
import ConflictId from "../Explorer/Tree/ConflictId"; import ConflictId from "../Explorer/Tree/ConflictId";
import DocumentId from "../Explorer/Tree/DocumentId"; import DocumentId from "../Explorer/Tree/DocumentId";
import StoredProcedure from "../Explorer/Tree/StoredProcedure"; import StoredProcedure from "../Explorer/Tree/StoredProcedure";
import { LocalStorageUtility, StorageKey } from "../Shared/StorageUtility"; import { LocalStorageUtility, StorageKey } from "../Shared/StorageUtility";
import { OfferUtils } from "../Utils/OfferUtils";
import * as Constants from "./Constants"; import * as Constants from "./Constants";
import { client } from "./CosmosClient"; import { client } from "./CosmosClient";
import * as HeadersUtility from "./HeadersUtility";
import { sendCachedDataMessage } from "./MessageHandler";
export function getCommonQueryOptions(options: FeedOptions): any { export function getCommonQueryOptions(options: FeedOptions): any {
const storedItemPerPageSetting: number = LocalStorageUtility.getEntryNumber(StorageKey.ActualItemPerPage); const storedItemPerPageSetting: number = LocalStorageUtility.getEntryNumber(StorageKey.ActualItemPerPage);
@@ -33,7 +46,10 @@ export function queryDocuments(
options: any options: any
): Q.Promise<QueryIterator<ItemDefinition & Resource>> { ): Q.Promise<QueryIterator<ItemDefinition & Resource>> {
options = getCommonQueryOptions(options); options = getCommonQueryOptions(options);
const documentsIterator = client().database(databaseId).container(containerId).items.query(query, options); const documentsIterator = client()
.database(databaseId)
.container(containerId)
.items.query(query, options);
return Q(documentsIterator); return Q(documentsIterator);
} }
@@ -69,7 +85,7 @@ export function updateDocument(
.container(collection.id()) .container(collection.id())
.item(documentId.id(), partitionKey) .item(documentId.id(), partitionKey)
.replace(newDocument) .replace(newDocument)
.then((response) => response.resource) .then(response => response.resource)
); );
} }
@@ -87,13 +103,13 @@ export function executeStoredProcedure(
.container(collection.id()) .container(collection.id())
.scripts.storedProcedure(storedProcedure.id()) .scripts.storedProcedure(storedProcedure.id())
.execute(partitionKeyValue, params, { enableScriptLogging: true }) .execute(partitionKeyValue, params, { enableScriptLogging: true })
.then((response) => .then(response =>
deferred.resolve({ deferred.resolve({
result: response.resource, result: response.resource,
scriptLogs: response.headers[Constants.HttpHeaders.scriptLogResults], scriptLogs: response.headers[Constants.HttpHeaders.scriptLogResults]
}) })
) )
.catch((error) => deferred.reject(error)); .catch(error => deferred.reject(error));
return deferred.promise.timeout( return deferred.promise.timeout(
Constants.ClientDefaults.requestTimeoutMs, Constants.ClientDefaults.requestTimeoutMs,
@@ -107,7 +123,7 @@ export function createDocument(collection: ViewModels.CollectionBase, newDocumen
.database(collection.databaseId) .database(collection.databaseId)
.container(collection.id()) .container(collection.id())
.items.create(newDocument) .items.create(newDocument)
.then((response) => response.resource) .then(response => response.resource)
); );
} }
@@ -120,7 +136,7 @@ export function readDocument(collection: ViewModels.CollectionBase, documentId:
.container(collection.id()) .container(collection.id())
.item(documentId.id(), partitionKey) .item(documentId.id(), partitionKey)
.read() .read()
.then((response) => response.resource) .then(response => response.resource)
); );
} }
@@ -128,7 +144,11 @@ export function deleteDocument(collection: ViewModels.CollectionBase, documentId
const partitionKey = documentId.partitionKeyValue; const partitionKey = documentId.partitionKeyValue;
return Q( return Q(
client().database(collection.databaseId).container(collection.id()).item(documentId.id(), partitionKey).delete() client()
.database(collection.databaseId)
.container(collection.id())
.item(documentId.id(), partitionKey)
.delete()
); );
} }
@@ -140,7 +160,11 @@ export function deleteConflict(
options.partitionKey = options.partitionKey || getPartitionKeyHeaderForConflict(conflictId); options.partitionKey = options.partitionKey || getPartitionKeyHeaderForConflict(conflictId);
return Q( return Q(
client().database(collection.databaseId).container(collection.id()).conflict(conflictId.id()).delete(options) client()
.database(collection.databaseId)
.container(collection.id())
.conflict(conflictId.id())
.delete(options)
); );
} }
@@ -150,6 +174,9 @@ export function queryConflicts(
query: string, query: string,
options: any options: any
): Q.Promise<QueryIterator<ConflictDefinition & Resource>> { ): Q.Promise<QueryIterator<ConflictDefinition & Resource>> {
const documentsIterator = client().database(databaseId).container(containerId).conflicts.query(query, options); const documentsIterator = client()
.database(databaseId)
.container(containerId)
.conflicts.query(query, options);
return Q(documentsIterator); return Q(documentsIterator);
} }

View File

@@ -73,7 +73,7 @@ export default class EditableUtility {
return false; return false;
}); });
observable.subscribe((edit) => { observable.subscribe(edit => {
var edits = observable.edits && observable.edits(); var edits = observable.edits && observable.edits();
if (!edits) { if (!edits) {
return; return;
@@ -83,9 +83,9 @@ export default class EditableUtility {
}); });
observable.editableIsValid = ko.observable<boolean>(true); observable.editableIsValid = ko.observable<boolean>(true);
observable.subscribe((value) => { observable.subscribe(value => {
const validations: ((value: T) => boolean)[] = (observable.validations && observable.validations()) || []; const validations: ((value: T) => boolean)[] = (observable.validations && observable.validations()) || [];
const isValid = validations.every((validate) => validate(value)); const isValid = validations.every(validate => validate(value));
observable.editableIsValid(isValid); observable.editableIsValid(isValid);
}); });

View File

@@ -1,7 +1,7 @@
import { ARMError } from "../Utils/arm/request"; import { ARMError } from "../Utils/arm/request";
import { HttpStatusCodes } from "./Constants"; import { HttpStatusCodes } from "./Constants";
import { MessageTypes } from "../Contracts/ExplorerContracts"; import { MessageTypes } from "../Contracts/ExplorerContracts";
import { SubscriptionType } from "../Contracts/SubscriptionType"; import { SubscriptionType } from "../Contracts/ViewModels";
import { logConsoleError } from "../Utils/NotificationConsoleUtils"; import { logConsoleError } from "../Utils/NotificationConsoleUtils";
import { logError } from "./Logger"; import { logError } from "./Logger";
import { sendMessage } from "./MessageHandler"; import { sendMessage } from "./MessageHandler";
@@ -37,7 +37,7 @@ const sendNotificationForError = (errorMessage: string, errorCode: number | stri
} }
sendMessage({ sendMessage({
type: MessageTypes.ForbiddenError, type: MessageTypes.ForbiddenError,
reason: errorMessage, reason: errorMessage
}); });
} }
}; };

View File

@@ -11,8 +11,8 @@ describe("nextPage", () => {
queryMetrics: {}, queryMetrics: {},
requestCharge: 1, requestCharge: 1,
headers: {}, headers: {},
activityId: "foo", activityId: "foo"
}), })
}; };
expect(await nextPage(fakeIterator, 10)).toMatchSnapshot(); expect(await nextPage(fakeIterator, 10)).toMatchSnapshot();

View File

@@ -14,7 +14,7 @@ export interface MinimalQueryIterator {
// Pick<QueryIterator<any>, "fetchNext">; // Pick<QueryIterator<any>, "fetchNext">;
export function nextPage(documentsIterator: MinimalQueryIterator, firstItemIndex: number): Promise<QueryResults> { export function nextPage(documentsIterator: MinimalQueryIterator, firstItemIndex: number): Promise<QueryResults> {
return documentsIterator.fetchNext().then((response) => { return documentsIterator.fetchNext().then(response => {
const documents = response.resources; const documents = response.resources;
const headers = (response as any).headers || {}; // TODO this is a private key. Remove any const headers = (response as any).headers || {}; // TODO this is a private key. Remove any
const itemCount = (documents && documents.length) || 0; const itemCount = (documents && documents.length) || 0;
@@ -26,7 +26,7 @@ export function nextPage(documentsIterator: MinimalQueryIterator, firstItemIndex
lastItemIndex: Number(firstItemIndex) + Number(itemCount), lastItemIndex: Number(firstItemIndex) + Number(itemCount),
headers, headers,
activityId: response.activityId, activityId: response.activityId,
requestCharge: response.requestCharge, requestCharge: response.requestCharge
}; };
}); });
} }

View File

@@ -29,7 +29,7 @@ export function logError(errorMessage: string, area: string, code?: number | str
function _logEntry(entry: Diagnostics.LogEntry): void { function _logEntry(entry: Diagnostics.LogEntry): void {
sendMessage({ sendMessage({
type: MessageTypes.LogInfo, type: MessageTypes.LogInfo,
data: JSON.stringify(entry), data: JSON.stringify(entry)
}); });
const severityLevel = ((level: Diagnostics.LogEntryLevel): SeverityLevel => { const severityLevel = ((level: Diagnostics.LogEntryLevel): SeverityLevel => {
@@ -60,6 +60,6 @@ function _generateLogEntry(
level, level,
message, message,
area, area,
code, code
}; };
} }

View File

@@ -6,7 +6,7 @@ describe("Message Handler", () => {
let mockPromise = { let mockPromise = {
id: "123", id: "123",
startTime: new Date(), startTime: new Date(),
deferred: Q.defer<any>(), deferred: Q.defer<any>()
}; };
let mockMessage = { message: { id: "123", data: "{}" } }; let mockMessage = { message: { id: "123", data: "{}" } };
MessageHandler.RequestMap[mockPromise.id] = mockPromise; MessageHandler.RequestMap[mockPromise.id] = mockPromise;
@@ -18,7 +18,7 @@ describe("Message Handler", () => {
let message = { let message = {
id: "123", id: "123",
startTime: new Date(), startTime: new Date(),
deferred: Q.defer<any>(), deferred: Q.defer<any>()
}; };
MessageHandler.handleCachedDataMessage(message); MessageHandler.handleCachedDataMessage(message);

View File

@@ -35,7 +35,7 @@ export function sendCachedDataMessage<TResponseDataModel>(
let cachedDataPromise: CachedDataPromise<TResponseDataModel> = { let cachedDataPromise: CachedDataPromise<TResponseDataModel> = {
deferred: Q.defer<TResponseDataModel>(), deferred: Q.defer<TResponseDataModel>(),
startTime: new Date(), startTime: new Date(),
id: _.uniqueId(), id: _.uniqueId()
}; };
RequestMap[cachedDataPromise.id] = cachedDataPromise; RequestMap[cachedDataPromise.id] = cachedDataPromise;
sendMessage({ type: messageType, params: params, id: cachedDataPromise.id }); sendMessage({ type: messageType, params: params, id: cachedDataPromise.id });
@@ -54,7 +54,7 @@ export function sendMessage(data: any): void {
portalChildWindow.parent.postMessage( portalChildWindow.parent.postMessage(
{ {
signature: "pcIframe", signature: "pcIframe",
data: data, data: data
}, },
portalChildWindow.document.referrer portalChildWindow.document.referrer
); );

View File

@@ -14,7 +14,7 @@ const fetchMock = () => {
ok: true, ok: true,
text: () => "{}", text: () => "{}",
json: () => "{}", json: () => "{}",
headers: new Map(), headers: new Map()
}); });
}; };
@@ -27,8 +27,8 @@ const collection = {
partitionKey: { partitionKey: {
paths: ["/pk"], paths: ["/pk"],
kind: "Hash", kind: "Hash",
version: 1, version: 1
}, }
} as Collection; } as Collection;
const documentId = ({ const documentId = ({
@@ -38,8 +38,8 @@ const documentId = ({
partitionKey: { partitionKey: {
paths: ["/pk"], paths: ["/pk"],
kind: "Hash", kind: "Hash",
version: 1, version: 1
}, }
} as unknown) as DocumentId; } as unknown) as DocumentId;
const databaseAccount = { const databaseAccount = {
@@ -52,8 +52,8 @@ const databaseAccount = {
documentEndpoint: "bar", documentEndpoint: "bar",
gremlinEndpoint: "foo", gremlinEndpoint: "foo",
tableEndpoint: "foo", tableEndpoint: "foo",
cassandraEndpoint: "foo", cassandraEndpoint: "foo"
}, }
} as DatabaseAccount; } as DatabaseAccount;
describe("MongoProxyClient", () => { describe("MongoProxyClient", () => {
@@ -61,10 +61,10 @@ describe("MongoProxyClient", () => {
beforeEach(() => { beforeEach(() => {
resetConfigContext(); resetConfigContext();
updateUserContext({ updateUserContext({
databaseAccount, databaseAccount
}); });
updateConfigContext({ updateConfigContext({
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com", BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
}); });
window.fetch = jest.fn().mockImplementation(fetchMock); window.fetch = jest.fn().mockImplementation(fetchMock);
}); });
@@ -93,10 +93,10 @@ describe("MongoProxyClient", () => {
beforeEach(() => { beforeEach(() => {
resetConfigContext(); resetConfigContext();
updateUserContext({ updateUserContext({
databaseAccount, databaseAccount
}); });
updateConfigContext({ updateConfigContext({
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com", BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
}); });
window.fetch = jest.fn().mockImplementation(fetchMock); window.fetch = jest.fn().mockImplementation(fetchMock);
}); });
@@ -125,10 +125,10 @@ describe("MongoProxyClient", () => {
beforeEach(() => { beforeEach(() => {
resetConfigContext(); resetConfigContext();
updateUserContext({ updateUserContext({
databaseAccount, databaseAccount
}); });
updateConfigContext({ updateConfigContext({
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com", BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
}); });
window.fetch = jest.fn().mockImplementation(fetchMock); window.fetch = jest.fn().mockImplementation(fetchMock);
}); });
@@ -157,10 +157,10 @@ describe("MongoProxyClient", () => {
beforeEach(() => { beforeEach(() => {
resetConfigContext(); resetConfigContext();
updateUserContext({ updateUserContext({
databaseAccount, databaseAccount
}); });
updateConfigContext({ updateConfigContext({
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com", BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
}); });
window.fetch = jest.fn().mockImplementation(fetchMock); window.fetch = jest.fn().mockImplementation(fetchMock);
}); });
@@ -189,10 +189,10 @@ describe("MongoProxyClient", () => {
beforeEach(() => { beforeEach(() => {
resetConfigContext(); resetConfigContext();
updateUserContext({ updateUserContext({
databaseAccount, databaseAccount
}); });
updateConfigContext({ updateConfigContext({
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com", BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
}); });
window.fetch = jest.fn().mockImplementation(fetchMock); window.fetch = jest.fn().mockImplementation(fetchMock);
}); });
@@ -222,10 +222,10 @@ describe("MongoProxyClient", () => {
resetConfigContext(); resetConfigContext();
delete window.authType; delete window.authType;
updateUserContext({ updateUserContext({
databaseAccount, databaseAccount
}); });
updateConfigContext({ updateConfigContext({
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com", BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
}); });
}); });

View File

@@ -16,7 +16,7 @@ import { sendMessage } from "./MessageHandler";
const defaultHeaders = { const defaultHeaders = {
[HttpHeaders.apiType]: ApiType.MongoDB.toString(), [HttpHeaders.apiType]: ApiType.MongoDB.toString(),
[CosmosSDKConstants.HttpHeaders.MaxEntityCount]: "100", [CosmosSDKConstants.HttpHeaders.MaxEntityCount]: "100",
[CosmosSDKConstants.HttpHeaders.Version]: "2017-11-15", [CosmosSDKConstants.HttpHeaders.Version]: "2017-11-15"
}; };
function authHeaders() { function authHeaders() {
@@ -31,7 +31,7 @@ export function queryIterator(databaseId: string, collection: Collection, query:
let continuationToken: string; let continuationToken: string;
return { return {
fetchNext: () => { fetchNext: () => {
return queryDocuments(databaseId, collection, false, query).then((response) => { return queryDocuments(databaseId, collection, false, query).then(response => {
continuationToken = response.continuationToken; continuationToken = response.continuationToken;
const headers: { [key: string]: string | number } = {}; const headers: { [key: string]: string | number } = {};
response.headers.forEach((value, key) => { response.headers.forEach((value, key) => {
@@ -42,10 +42,10 @@ export function queryIterator(databaseId: string, collection: Collection, query:
headers, headers,
requestCharge: Number(headers[CosmosSDKConstants.HttpHeaders.RequestCharge]), requestCharge: Number(headers[CosmosSDKConstants.HttpHeaders.RequestCharge]),
activityId: String(headers[CosmosSDKConstants.HttpHeaders.ActivityId]), activityId: String(headers[CosmosSDKConstants.HttpHeaders.ActivityId]),
hasMoreResults: !!continuationToken, hasMoreResults: !!continuationToken
}; };
}); });
}, }
}; };
} }
@@ -74,9 +74,7 @@ export function queryDocuments(
rg: userContext.resourceGroup, rg: userContext.resourceGroup,
dba: databaseAccount.name, dba: databaseAccount.name,
pk: pk:
collection && collection.partitionKey && !collection.partitionKey.systemKey collection && collection.partitionKey && !collection.partitionKey.systemKey ? collection.partitionKeyProperty : ""
? collection.partitionKeyProperty
: "",
}; };
const endpoint = getEndpoint() || ""; const endpoint = getEndpoint() || "";
@@ -89,7 +87,7 @@ export function queryDocuments(
[CosmosSDKConstants.HttpHeaders.EnableScanInQuery]: "true", [CosmosSDKConstants.HttpHeaders.EnableScanInQuery]: "true",
[CosmosSDKConstants.HttpHeaders.EnableCrossPartitionQuery]: "true", [CosmosSDKConstants.HttpHeaders.EnableCrossPartitionQuery]: "true",
[CosmosSDKConstants.HttpHeaders.ParallelizeCrossPartitionQuery]: "true", [CosmosSDKConstants.HttpHeaders.ParallelizeCrossPartitionQuery]: "true",
[HttpHeaders.contentType]: "application/query+json", [HttpHeaders.contentType]: "application/query+json"
}; };
if (continuationToken) { if (continuationToken) {
@@ -102,14 +100,14 @@ export function queryDocuments(
.fetch(`${endpoint}${path}?${queryString.stringify(params)}`, { .fetch(`${endpoint}${path}?${queryString.stringify(params)}`, {
method: "POST", method: "POST",
body: JSON.stringify({ query }), body: JSON.stringify({ query }),
headers, headers
}) })
.then(async (response) => { .then(async response => {
if (response.ok) { if (response.ok) {
return { return {
continuationToken: response.headers.get(CosmosSDKConstants.HttpHeaders.Continuation), continuationToken: response.headers.get(CosmosSDKConstants.HttpHeaders.Continuation),
documents: (await response.json()).Documents as DataModels.DocumentId[], documents: (await response.json()).Documents as DataModels.DocumentId[],
headers: response.headers, headers: response.headers
}; };
} }
errorHandling(response, "querying documents", params); errorHandling(response, "querying documents", params);
@@ -137,9 +135,7 @@ export function readDocument(
rg: userContext.resourceGroup, rg: userContext.resourceGroup,
dba: databaseAccount.name, dba: databaseAccount.name,
pk: pk:
documentId && documentId.partitionKey && !documentId.partitionKey.systemKey documentId && documentId.partitionKey && !documentId.partitionKey.systemKey ? documentId.partitionKeyProperty : ""
? documentId.partitionKeyProperty
: "",
}; };
const endpoint = getEndpoint(); const endpoint = getEndpoint();
@@ -151,10 +147,10 @@ export function readDocument(
...authHeaders(), ...authHeaders(),
[CosmosSDKConstants.HttpHeaders.PartitionKey]: encodeURIComponent( [CosmosSDKConstants.HttpHeaders.PartitionKey]: encodeURIComponent(
JSON.stringify(documentId.partitionKeyHeader()) JSON.stringify(documentId.partitionKeyHeader())
), )
}, }
}) })
.then((response) => { .then(response => {
if (response.ok) { if (response.ok) {
return response.json(); return response.json();
} }
@@ -179,7 +175,7 @@ export function createDocument(
sid: userContext.subscriptionId, sid: userContext.subscriptionId,
rg: userContext.resourceGroup, rg: userContext.resourceGroup,
dba: databaseAccount.name, dba: databaseAccount.name,
pk: collection && collection.partitionKey && !collection.partitionKey.systemKey ? partitionKeyProperty : "", pk: collection && collection.partitionKey && !collection.partitionKey.systemKey ? partitionKeyProperty : ""
}; };
const endpoint = getEndpoint(); const endpoint = getEndpoint();
@@ -190,10 +186,10 @@ export function createDocument(
body: JSON.stringify(documentContent), body: JSON.stringify(documentContent),
headers: { headers: {
...defaultHeaders, ...defaultHeaders,
...authHeaders(), ...authHeaders()
}, }
}) })
.then((response) => { .then(response => {
if (response.ok) { if (response.ok) {
return response.json(); return response.json();
} }
@@ -222,9 +218,7 @@ export function updateDocument(
rg: userContext.resourceGroup, rg: userContext.resourceGroup,
dba: databaseAccount.name, dba: databaseAccount.name,
pk: pk:
documentId && documentId.partitionKey && !documentId.partitionKey.systemKey documentId && documentId.partitionKey && !documentId.partitionKey.systemKey ? documentId.partitionKeyProperty : ""
? documentId.partitionKeyProperty
: "",
}; };
const endpoint = getEndpoint(); const endpoint = getEndpoint();
@@ -236,10 +230,10 @@ export function updateDocument(
...defaultHeaders, ...defaultHeaders,
...authHeaders(), ...authHeaders(),
[HttpHeaders.contentType]: "application/json", [HttpHeaders.contentType]: "application/json",
[CosmosSDKConstants.HttpHeaders.PartitionKey]: JSON.stringify(documentId.partitionKeyHeader()), [CosmosSDKConstants.HttpHeaders.PartitionKey]: JSON.stringify(documentId.partitionKeyHeader())
}, }
}) })
.then((response) => { .then(response => {
if (response.ok) { if (response.ok) {
return response.json(); return response.json();
} }
@@ -263,9 +257,7 @@ export function deleteDocument(databaseId: string, collection: Collection, docum
rg: userContext.resourceGroup, rg: userContext.resourceGroup,
dba: databaseAccount.name, dba: databaseAccount.name,
pk: pk:
documentId && documentId.partitionKey && !documentId.partitionKey.systemKey documentId && documentId.partitionKey && !documentId.partitionKey.systemKey ? documentId.partitionKeyProperty : ""
? documentId.partitionKeyProperty
: "",
}; };
const endpoint = getEndpoint(); const endpoint = getEndpoint();
@@ -276,10 +268,10 @@ export function deleteDocument(databaseId: string, collection: Collection, docum
...defaultHeaders, ...defaultHeaders,
...authHeaders(), ...authHeaders(),
[HttpHeaders.contentType]: "application/json", [HttpHeaders.contentType]: "application/json",
[CosmosSDKConstants.HttpHeaders.PartitionKey]: JSON.stringify(documentId.partitionKeyHeader()), [CosmosSDKConstants.HttpHeaders.PartitionKey]: JSON.stringify(documentId.partitionKeyHeader())
}, }
}) })
.then((response) => { .then(response => {
if (response.ok) { if (response.ok) {
return undefined; return undefined;
} }
@@ -307,7 +299,7 @@ export function createMongoCollectionWithProxy(
rg: userContext.resourceGroup, rg: userContext.resourceGroup,
dba: databaseAccount.name, dba: databaseAccount.name,
isAutoPilot: !!params.autoPilotMaxThroughput, isAutoPilot: !!params.autoPilotMaxThroughput,
autoPilotThroughput: params.autoPilotMaxThroughput?.toString(), autoPilotThroughput: params.autoPilotMaxThroughput?.toString()
}; };
const endpoint = getEndpoint(); const endpoint = getEndpoint();
@@ -322,11 +314,11 @@ export function createMongoCollectionWithProxy(
headers: { headers: {
...defaultHeaders, ...defaultHeaders,
...authHeaders(), ...authHeaders(),
[HttpHeaders.contentType]: "application/json", [HttpHeaders.contentType]: "application/json"
}, }
} }
) )
.then((response) => { .then(response => {
if (response.ok) { if (response.ok) {
return response.json(); return response.json();
} }

View File

@@ -1,62 +0,0 @@
import * as OfferUtility from "./OfferUtility";
import { SDKOfferDefinition, Offer } from "../Contracts/DataModels";
import { OfferResponse } from "@azure/cosmos";
describe("parseSDKOfferResponse", () => {
it("manual throughput", () => {
const mockOfferDefinition = {
content: {
offerThroughput: 500,
collectionThroughputInfo: {
minimumRUForCollection: 400,
numPhysicalPartitions: 1,
},
},
id: "test",
} as SDKOfferDefinition;
const mockResponse = {
resource: mockOfferDefinition,
} as OfferResponse;
const expectedResult: Offer = {
manualThroughput: 500,
autoscaleMaxThroughput: undefined,
minimumThroughput: 400,
id: "test",
offerDefinition: mockOfferDefinition,
};
expect(OfferUtility.parseSDKOfferResponse(mockResponse)).toEqual(expectedResult);
});
it("autoscale throughput", () => {
const mockOfferDefinition = {
content: {
offerThroughput: 400,
collectionThroughputInfo: {
minimumRUForCollection: 400,
numPhysicalPartitions: 1,
},
offerAutopilotSettings: {
maxThroughput: 5000,
},
},
id: "test",
} as SDKOfferDefinition;
const mockResponse = {
resource: mockOfferDefinition,
} as OfferResponse;
const expectedResult: Offer = {
manualThroughput: undefined,
autoscaleMaxThroughput: 5000,
minimumThroughput: 400,
id: "test",
offerDefinition: mockOfferDefinition,
};
expect(OfferUtility.parseSDKOfferResponse(mockResponse)).toEqual(expectedResult);
});
});

View File

@@ -1,33 +0,0 @@
import { Offer, SDKOfferDefinition } from "../Contracts/DataModels";
import { OfferResponse } from "@azure/cosmos";
export const parseSDKOfferResponse = (offerResponse: OfferResponse): Offer => {
const offerDefinition: SDKOfferDefinition = offerResponse?.resource;
const offerContent = offerDefinition.content;
if (!offerContent) {
return undefined;
}
const minimumThroughput = offerContent.collectionThroughputInfo?.minimumRUForCollection;
const autopilotSettings = offerContent.offerAutopilotSettings;
if (autopilotSettings) {
return {
id: offerDefinition.id,
autoscaleMaxThroughput: autopilotSettings.maxThroughput,
manualThroughput: undefined,
minimumThroughput,
offerDefinition,
headers: offerResponse.headers,
};
}
return {
id: offerDefinition.id,
autoscaleMaxThroughput: undefined,
manualThroughput: offerContent.offerThroughput,
minimumThroughput,
offerDefinition,
headers: offerResponse.headers,
};
};

View File

@@ -16,7 +16,7 @@ const notificationsPath = () => {
}; };
export const fetchPortalNotifications = async (): Promise<DataModels.Notification[]> => { export const fetchPortalNotifications = async (): Promise<DataModels.Notification[]> => {
if (configContext.platform === Platform.Emulator || configContext.platform === Platform.Hosted) { if (configContext.platform === Platform.Emulator) {
return []; return [];
} }
@@ -30,7 +30,7 @@ export const fetchPortalNotifications = async (): Promise<DataModels.Notificatio
const headers = { [authorizationHeader.header]: authorizationHeader.token }; const headers = { [authorizationHeader.header]: authorizationHeader.token };
const response = await window.fetch(url, { const response = await window.fetch(url, {
headers, headers
}); });
if (!response.ok) { if (!response.ok) {

View File

@@ -18,7 +18,7 @@ export class QueriesClient {
private static readonly PartitionKey: DataModels.PartitionKey = { private static readonly PartitionKey: DataModels.PartitionKey = {
paths: [`/${SavedQueries.PartitionKeyProperty}`], paths: [`/${SavedQueries.PartitionKeyProperty}`],
kind: BackendDefaults.partitionKeyKind, kind: BackendDefaults.partitionKeyKind,
version: BackendDefaults.partitionKeyVersion, version: BackendDefaults.partitionKeyVersion
}; };
private static readonly FetchQuery: string = "SELECT * FROM c"; private static readonly FetchQuery: string = "SELECT * FROM c";
private static readonly FetchMongoQuery: string = "{}"; private static readonly FetchMongoQuery: string = "{}";
@@ -41,7 +41,7 @@ export class QueriesClient {
databaseId: SavedQueries.DatabaseName, databaseId: SavedQueries.DatabaseName,
partitionKey: QueriesClient.PartitionKey, partitionKey: QueriesClient.PartitionKey,
offerThroughput: SavedQueries.OfferThroughput, offerThroughput: SavedQueries.OfferThroughput,
databaseLevelThroughput: false, databaseLevelThroughput: false
}) })
.then( .then(
(collection: DataModels.Collection) => { (collection: DataModels.Collection) => {
@@ -135,7 +135,7 @@ export class QueriesClient {
resourceId: resourceId, resourceId: resourceId,
queryName: queryName, queryName: queryName,
query: query, query: query,
id: id, id: id
}; };
try { try {
this.validateQuery(parsedQuery); this.validateQuery(parsedQuery);
@@ -192,7 +192,7 @@ export class QueriesClient {
const documentId = new DocumentId( const documentId = new DocumentId(
{ {
partitionKey: QueriesClient.PartitionKey, partitionKey: QueriesClient.PartitionKey,
partitionKeyProperty: "id", partitionKeyProperty: "id"
} as DocumentsTab, } as DocumentsTab,
query, query,
query.queryName query.queryName

View File

@@ -4,7 +4,7 @@ import { SplitterMetrics } from "./Constants";
export enum SplitterDirection { export enum SplitterDirection {
Horizontal = "horizontal", Horizontal = "horizontal",
Vertical = "vertical", Vertical = "vertical"
} }
export interface SplitterBounds { export interface SplitterBounds {
@@ -50,7 +50,7 @@ export class Splitter {
animate: true, animate: true,
animateDuration: "fast", animateDuration: "fast",
start: this.onResizeStart, start: this.onResizeStart,
stop: this.onResizeStop, stop: this.onResizeStop
}; };
if (isVerticalSplitter) { if (isVerticalSplitter) {
@@ -90,7 +90,9 @@ export class Splitter {
this.lastWidth = $(this.leftSide).width(); this.lastWidth = $(this.leftSide).width();
$(this.splitter).css("left", SplitterMetrics.CollapsedPositionLeft); $(this.splitter).css("left", SplitterMetrics.CollapsedPositionLeft);
$(this.leftSide).css("width", ""); $(this.leftSide).css("width", "");
$(this.leftSide).resizable("option", "disabled", true).removeClass("ui-resizable-disabled"); // remove class so splitter is visible $(this.leftSide)
.resizable("option", "disabled", true)
.removeClass("ui-resizable-disabled"); // remove class so splitter is visible
$(this.splitter).removeClass("ui-resizable-e"); $(this.splitter).removeClass("ui-resizable-e");
this.isCollapsed(true); this.isCollapsed(true);
} }

View File

@@ -32,8 +32,8 @@ export default class UrlUtility {
type: type, type: type,
objectBody: { objectBody: {
id: id, id: id,
self: resourcePath, self: resourcePath
}, }
}; };
return result; return result;

View File

@@ -15,15 +15,15 @@ describe("createCollection", () => {
collectionId: "testContainer", collectionId: "testContainer",
databaseId: "testDatabase", databaseId: "testDatabase",
databaseLevelThroughput: true, databaseLevelThroughput: true,
offerThroughput: 400, offerThroughput: 400
}; };
beforeAll(() => { beforeAll(() => {
updateUserContext({ updateUserContext({
databaseAccount: { databaseAccount: {
name: "test", name: "test"
} as DatabaseAccount, } as DatabaseAccount,
defaultExperience: DefaultAccountExperienceType.DocumentDB, defaultExperience: DefaultAccountExperienceType.DocumentDB
}); });
}); });
@@ -41,12 +41,12 @@ describe("createCollection", () => {
return { return {
database: { database: {
containers: { containers: {
create: () => ({}), create: () => ({})
}, }
}, }
}; };
}, }
}, }
}); });
await createCollection(createCollectionParams); await createCollection(createCollectionParams);
expect(client).toHaveBeenCalled(); expect(client).toHaveBeenCalled();
@@ -60,7 +60,7 @@ describe("createCollection", () => {
collectionId: "testContainer", collectionId: "testContainer",
databaseId: "testDatabase", databaseId: "testDatabase",
databaseLevelThroughput: false, databaseLevelThroughput: false,
offerThroughput: 400, offerThroughput: 400
}; };
expect(constructRpOptions(manualThroughputParams)).toEqual({ throughput: 400 }); expect(constructRpOptions(manualThroughputParams)).toEqual({ throughput: 400 });
@@ -70,12 +70,12 @@ describe("createCollection", () => {
databaseId: "testDatabase", databaseId: "testDatabase",
databaseLevelThroughput: false, databaseLevelThroughput: false,
offerThroughput: 400, offerThroughput: 400,
autoPilotMaxThroughput: 4000, autoPilotMaxThroughput: 4000
}; };
expect(constructRpOptions(autoPilotThroughputParams)).toEqual({ expect(constructRpOptions(autoPilotThroughputParams)).toEqual({
autoscaleSettings: { autoscaleSettings: {
maxThroughput: 4000, maxThroughput: 4000
}, }
}); });
}); });
}); });

View File

@@ -11,15 +11,15 @@ import { createMongoCollectionWithProxy } from "../MongoProxyClient";
import { createUpdateSqlContainer, getSqlContainer } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources"; import { createUpdateSqlContainer, getSqlContainer } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import { import {
createUpdateCassandraTable, createUpdateCassandraTable,
getCassandraTable, getCassandraTable
} from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources"; } from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources";
import { import {
createUpdateMongoDBCollection, createUpdateMongoDBCollection,
getMongoDBCollection, getMongoDBCollection
} from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources"; } from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
import { import {
createUpdateGremlinGraph, createUpdateGremlinGraph,
getGremlinGraph, getGremlinGraph
} from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources"; } from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
import { createUpdateTable, getTable } from "../../Utils/arm/generatedClients/2020-04-01/tableResources"; import { createUpdateTable, getTable } from "../../Utils/arm/generatedClients/2020-04-01/tableResources";
import { logConsoleProgress, logConsoleInfo } from "../../Utils/NotificationConsoleUtils"; import { logConsoleProgress, logConsoleInfo } from "../../Utils/NotificationConsoleUtils";
@@ -41,7 +41,7 @@ export const createCollection = async (params: DataModels.CreateCollectionParams
autoPilotMaxThroughput: params.autoPilotMaxThroughput, autoPilotMaxThroughput: params.autoPilotMaxThroughput,
databaseId: params.databaseId, databaseId: params.databaseId,
databaseLevelThroughput: params.databaseLevelThroughput, databaseLevelThroughput: params.databaseLevelThroughput,
offerThroughput: params.offerThroughput, offerThroughput: params.offerThroughput
}; };
await createDatabase(createDatabaseParams); await createDatabase(createDatabaseParams);
} }
@@ -100,7 +100,7 @@ const createSqlContainer = async (params: DataModels.CreateCollectionParams): Pr
const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params); const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params);
const resource: ARMTypes.SqlContainerResource = { const resource: ARMTypes.SqlContainerResource = {
id: params.collectionId, id: params.collectionId
}; };
if (params.analyticalStorageTtl) { if (params.analyticalStorageTtl) {
resource.analyticalStorageTtl = params.analyticalStorageTtl; resource.analyticalStorageTtl = params.analyticalStorageTtl;
@@ -118,8 +118,8 @@ const createSqlContainer = async (params: DataModels.CreateCollectionParams): Pr
const rpPayload: ARMTypes.SqlDatabaseCreateUpdateParameters = { const rpPayload: ARMTypes.SqlDatabaseCreateUpdateParameters = {
properties: { properties: {
resource, resource,
options, options
}, }
}; };
const createResponse = await createUpdateSqlContainer( const createResponse = await createUpdateSqlContainer(
@@ -154,7 +154,7 @@ const createMongoCollection = async (params: DataModels.CreateCollectionParams):
const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params); const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params);
const resource: ARMTypes.MongoDBCollectionResource = { const resource: ARMTypes.MongoDBCollectionResource = {
id: params.collectionId, id: params.collectionId
}; };
if (params.analyticalStorageTtl) { if (params.analyticalStorageTtl) {
resource.analyticalStorageTtl = params.analyticalStorageTtl; resource.analyticalStorageTtl = params.analyticalStorageTtl;
@@ -170,8 +170,8 @@ const createMongoCollection = async (params: DataModels.CreateCollectionParams):
const rpPayload: ARMTypes.MongoDBCollectionCreateUpdateParameters = { const rpPayload: ARMTypes.MongoDBCollectionCreateUpdateParameters = {
properties: { properties: {
resource, resource,
options, options
}, }
}; };
const createResponse = await createUpdateMongoDBCollection( const createResponse = await createUpdateMongoDBCollection(
@@ -185,7 +185,7 @@ const createMongoCollection = async (params: DataModels.CreateCollectionParams):
if (params.createMongoWildcardIndex) { if (params.createMongoWildcardIndex) {
TelemetryProcessor.trace(Action.CreateMongoCollectionWithWildcardIndex, ActionModifiers.Mark, { TelemetryProcessor.trace(Action.CreateMongoCollectionWithWildcardIndex, ActionModifiers.Mark, {
message: "Mongo Collection created with wildcard index on all fields.", message: "Mongo Collection created with wildcard index on all fields."
}); });
} }
@@ -212,7 +212,7 @@ const createCassandraTable = async (params: DataModels.CreateCollectionParams):
const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params); const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params);
const resource: ARMTypes.CassandraTableResource = { const resource: ARMTypes.CassandraTableResource = {
id: params.collectionId, id: params.collectionId
}; };
if (params.analyticalStorageTtl) { if (params.analyticalStorageTtl) {
resource.analyticalStorageTtl = params.analyticalStorageTtl; resource.analyticalStorageTtl = params.analyticalStorageTtl;
@@ -221,8 +221,8 @@ const createCassandraTable = async (params: DataModels.CreateCollectionParams):
const rpPayload: ARMTypes.CassandraTableCreateUpdateParameters = { const rpPayload: ARMTypes.CassandraTableCreateUpdateParameters = {
properties: { properties: {
resource, resource,
options, options
}, }
}; };
const createResponse = await createUpdateCassandraTable( const createResponse = await createUpdateCassandraTable(
@@ -256,7 +256,7 @@ const createGraph = async (params: DataModels.CreateCollectionParams): Promise<D
const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params); const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params);
const resource: ARMTypes.GremlinGraphResource = { const resource: ARMTypes.GremlinGraphResource = {
id: params.collectionId, id: params.collectionId
}; };
if (params.indexingPolicy) { if (params.indexingPolicy) {
@@ -272,8 +272,8 @@ const createGraph = async (params: DataModels.CreateCollectionParams): Promise<D
const rpPayload: ARMTypes.GremlinGraphCreateUpdateParameters = { const rpPayload: ARMTypes.GremlinGraphCreateUpdateParameters = {
properties: { properties: {
resource, resource,
options, options
}, }
}; };
const createResponse = await createUpdateGremlinGraph( const createResponse = await createUpdateGremlinGraph(
@@ -306,14 +306,14 @@ const createTable = async (params: DataModels.CreateCollectionParams): Promise<D
const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params); const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params);
const resource: ARMTypes.TableResource = { const resource: ARMTypes.TableResource = {
id: params.collectionId, id: params.collectionId
}; };
const rpPayload: ARMTypes.TableCreateUpdateParameters = { const rpPayload: ARMTypes.TableCreateUpdateParameters = {
properties: { properties: {
resource, resource,
options, options
}, }
}; };
const createResponse = await createUpdateTable( const createResponse = await createUpdateTable(
@@ -334,13 +334,13 @@ export const constructRpOptions = (params: DataModels.CreateDatabaseParams): ARM
if (params.autoPilotMaxThroughput) { if (params.autoPilotMaxThroughput) {
return { return {
autoscaleSettings: { autoscaleSettings: {
maxThroughput: params.autoPilotMaxThroughput, maxThroughput: params.autoPilotMaxThroughput
}, }
}; };
} }
return { return {
throughput: params.offerThroughput, throughput: params.offerThroughput
}; };
}; };
@@ -350,7 +350,7 @@ const createCollectionWithSDK = async (params: DataModels.CreateCollectionParams
partitionKey: params.partitionKey || undefined, partitionKey: params.partitionKey || undefined,
indexingPolicy: params.indexingPolicy || undefined, indexingPolicy: params.indexingPolicy || undefined,
uniqueKeyPolicy: params.uniqueKeyPolicy || undefined, uniqueKeyPolicy: params.uniqueKeyPolicy || undefined,
analyticalStorageTtl: params.analyticalStorageTtl, analyticalStorageTtl: params.analyticalStorageTtl
} as ContainerRequest; // TODO: remove cast when https://github.com/Azure/azure-cosmos-js/issues/423 is fixed } as ContainerRequest; // TODO: remove cast when https://github.com/Azure/azure-cosmos-js/issues/423 is fixed
const collectionOptions: RequestOptions = {}; const collectionOptions: RequestOptions = {};
const createDatabaseBody: DatabaseRequest = { id: params.databaseId }; const createDatabaseBody: DatabaseRequest = { id: params.databaseId };

View File

@@ -8,21 +8,21 @@ import {
GremlinDatabaseCreateUpdateParameters, GremlinDatabaseCreateUpdateParameters,
MongoDBDatabaseCreateUpdateParameters, MongoDBDatabaseCreateUpdateParameters,
SqlDatabaseCreateUpdateParameters, SqlDatabaseCreateUpdateParameters,
CreateUpdateOptions, CreateUpdateOptions
} from "../../Utils/arm/generatedClients/2020-04-01/types"; } from "../../Utils/arm/generatedClients/2020-04-01/types";
import { client } from "../CosmosClient"; import { client } from "../CosmosClient";
import { createUpdateSqlDatabase, getSqlDatabase } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources"; import { createUpdateSqlDatabase, getSqlDatabase } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import { import {
createUpdateCassandraKeyspace, createUpdateCassandraKeyspace,
getCassandraKeyspace, getCassandraKeyspace
} from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources"; } from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources";
import { import {
createUpdateMongoDBDatabase, createUpdateMongoDBDatabase,
getMongoDBDatabase, getMongoDBDatabase
} from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources"; } from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
import { import {
createUpdateGremlinDatabase, createUpdateGremlinDatabase,
getGremlinDatabase, getGremlinDatabase
} from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources"; } from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
import { handleError } from "../ErrorHandlingUtils"; import { handleError } from "../ErrorHandlingUtils";
import { logConsoleProgress, logConsoleInfo } from "../../Utils/NotificationConsoleUtils"; import { logConsoleProgress, logConsoleInfo } from "../../Utils/NotificationConsoleUtils";
@@ -85,10 +85,10 @@ async function createSqlDatabase(params: DataModels.CreateDatabaseParams): Promi
const rpPayload: SqlDatabaseCreateUpdateParameters = { const rpPayload: SqlDatabaseCreateUpdateParameters = {
properties: { properties: {
resource: { resource: {
id: params.databaseId, id: params.databaseId
},
options,
}, },
options
}
}; };
const createResponse = await createUpdateSqlDatabase( const createResponse = await createUpdateSqlDatabase(
userContext.subscriptionId, userContext.subscriptionId,
@@ -121,10 +121,10 @@ async function createMongoDatabase(params: DataModels.CreateDatabaseParams): Pro
const rpPayload: MongoDBDatabaseCreateUpdateParameters = { const rpPayload: MongoDBDatabaseCreateUpdateParameters = {
properties: { properties: {
resource: { resource: {
id: params.databaseId, id: params.databaseId
},
options,
}, },
options
}
}; };
const createResponse = await createUpdateMongoDBDatabase( const createResponse = await createUpdateMongoDBDatabase(
userContext.subscriptionId, userContext.subscriptionId,
@@ -157,10 +157,10 @@ async function createCassandraKeyspace(params: DataModels.CreateDatabaseParams):
const rpPayload: CassandraKeyspaceCreateUpdateParameters = { const rpPayload: CassandraKeyspaceCreateUpdateParameters = {
properties: { properties: {
resource: { resource: {
id: params.databaseId, id: params.databaseId
},
options,
}, },
options
}
}; };
const createResponse = await createUpdateCassandraKeyspace( const createResponse = await createUpdateCassandraKeyspace(
userContext.subscriptionId, userContext.subscriptionId,
@@ -193,10 +193,10 @@ async function createGremlineDatabase(params: DataModels.CreateDatabaseParams):
const rpPayload: GremlinDatabaseCreateUpdateParameters = { const rpPayload: GremlinDatabaseCreateUpdateParameters = {
properties: { properties: {
resource: { resource: {
id: params.databaseId, id: params.databaseId
},
options,
}, },
options
}
}; };
const createResponse = await createUpdateGremlinDatabase( const createResponse = await createUpdateGremlinDatabase(
userContext.subscriptionId, userContext.subscriptionId,
@@ -231,12 +231,12 @@ function constructRpOptions(params: DataModels.CreateDatabaseParams): CreateUpda
if (params.autoPilotMaxThroughput) { if (params.autoPilotMaxThroughput) {
return { return {
autoscaleSettings: { autoscaleSettings: {
maxThroughput: params.autoPilotMaxThroughput, maxThroughput: params.autoPilotMaxThroughput
}, }
}; };
} }
return { return {
throughput: params.offerThroughput, throughput: params.offerThroughput
}; };
} }

View File

@@ -3,12 +3,12 @@ import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType
import { Resource, StoredProcedureDefinition } from "@azure/cosmos"; import { Resource, StoredProcedureDefinition } from "@azure/cosmos";
import { import {
SqlStoredProcedureCreateUpdateParameters, SqlStoredProcedureCreateUpdateParameters,
SqlStoredProcedureResource, SqlStoredProcedureResource
} from "../../Utils/arm/generatedClients/2020-04-01/types"; } from "../../Utils/arm/generatedClients/2020-04-01/types";
import { client } from "../CosmosClient"; import { client } from "../CosmosClient";
import { import {
createUpdateSqlStoredProcedure, createUpdateSqlStoredProcedure,
getSqlStoredProcedure, getSqlStoredProcedure
} from "../../Utils/arm/generatedClients/2020-04-01/sqlResources"; } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import { handleError } from "../ErrorHandlingUtils"; import { handleError } from "../ErrorHandlingUtils";
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils"; import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
@@ -49,8 +49,8 @@ export async function createStoredProcedure(
const createSprocParams: SqlStoredProcedureCreateUpdateParameters = { const createSprocParams: SqlStoredProcedureCreateUpdateParameters = {
properties: { properties: {
resource: storedProcedure as SqlStoredProcedureResource, resource: storedProcedure as SqlStoredProcedureResource,
options: {}, options: {}
}, }
}; };
const rpResponse = await createUpdateSqlStoredProcedure( const rpResponse = await createUpdateSqlStoredProcedure(
userContext.subscriptionId, userContext.subscriptionId,

View File

@@ -3,7 +3,7 @@ import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType
import { Resource, TriggerDefinition } from "@azure/cosmos"; import { Resource, TriggerDefinition } from "@azure/cosmos";
import { import {
SqlTriggerCreateUpdateParameters, SqlTriggerCreateUpdateParameters,
SqlTriggerResource, SqlTriggerResource
} from "../../Utils/arm/generatedClients/2020-04-01/types"; } from "../../Utils/arm/generatedClients/2020-04-01/types";
import { client } from "../CosmosClient"; import { client } from "../CosmosClient";
import { createUpdateSqlTrigger, getSqlTrigger } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources"; import { createUpdateSqlTrigger, getSqlTrigger } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
@@ -44,8 +44,8 @@ export async function createTrigger(
const createTriggerParams: SqlTriggerCreateUpdateParameters = { const createTriggerParams: SqlTriggerCreateUpdateParameters = {
properties: { properties: {
resource: trigger as SqlTriggerResource, resource: trigger as SqlTriggerResource,
options: {}, options: {}
}, }
}; };
const rpResponse = await createUpdateSqlTrigger( const rpResponse = await createUpdateSqlTrigger(
userContext.subscriptionId, userContext.subscriptionId,
@@ -59,7 +59,10 @@ export async function createTrigger(
return rpResponse && (rpResponse.properties?.resource as TriggerDefinition & Resource); return rpResponse && (rpResponse.properties?.resource as TriggerDefinition & Resource);
} }
const response = await client().database(databaseId).container(collectionId).scripts.triggers.create(trigger); const response = await client()
.database(databaseId)
.container(collectionId)
.scripts.triggers.create(trigger);
return response.resource; return response.resource;
} catch (error) { } catch (error) {
handleError(error, "CreateTrigger", `Error while creating trigger ${trigger.id}`); handleError(error, "CreateTrigger", `Error while creating trigger ${trigger.id}`);

View File

@@ -3,12 +3,12 @@ import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType
import { Resource, UserDefinedFunctionDefinition } from "@azure/cosmos"; import { Resource, UserDefinedFunctionDefinition } from "@azure/cosmos";
import { import {
SqlUserDefinedFunctionCreateUpdateParameters, SqlUserDefinedFunctionCreateUpdateParameters,
SqlUserDefinedFunctionResource, SqlUserDefinedFunctionResource
} from "../../Utils/arm/generatedClients/2020-04-01/types"; } from "../../Utils/arm/generatedClients/2020-04-01/types";
import { client } from "../CosmosClient"; import { client } from "../CosmosClient";
import { import {
createUpdateSqlUserDefinedFunction, createUpdateSqlUserDefinedFunction,
getSqlUserDefinedFunction, getSqlUserDefinedFunction
} from "../../Utils/arm/generatedClients/2020-04-01/sqlResources"; } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import { handleError } from "../ErrorHandlingUtils"; import { handleError } from "../ErrorHandlingUtils";
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils"; import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
@@ -49,8 +49,8 @@ export async function createUserDefinedFunction(
const createUDFParams: SqlUserDefinedFunctionCreateUpdateParameters = { const createUDFParams: SqlUserDefinedFunctionCreateUpdateParameters = {
properties: { properties: {
resource: userDefinedFunction as SqlUserDefinedFunctionResource, resource: userDefinedFunction as SqlUserDefinedFunctionResource,
options: {}, options: {}
}, }
}; };
const rpResponse = await createUpdateSqlUserDefinedFunction( const rpResponse = await createUpdateSqlUserDefinedFunction(
userContext.subscriptionId, userContext.subscriptionId,

View File

@@ -13,9 +13,9 @@ describe("deleteCollection", () => {
beforeAll(() => { beforeAll(() => {
updateUserContext({ updateUserContext({
databaseAccount: { databaseAccount: {
name: "test", name: "test"
} as DatabaseAccount, } as DatabaseAccount,
defaultExperience: DefaultAccountExperienceType.DocumentDB, defaultExperience: DefaultAccountExperienceType.DocumentDB
}); });
}); });
@@ -32,11 +32,11 @@ describe("deleteCollection", () => {
return { return {
container: () => { container: () => {
return { return {
delete: (): unknown => undefined, delete: (): unknown => undefined
}; };
}, }
}; };
}, }
}); });
await deleteCollection("database", "collection"); await deleteCollection("database", "collection");
expect(client).toHaveBeenCalled(); expect(client).toHaveBeenCalled();

View File

@@ -16,7 +16,10 @@ export async function deleteCollection(databaseId: string, collectionId: string)
if (window.authType === AuthType.AAD && !userContext.useSDKOperations) { if (window.authType === AuthType.AAD && !userContext.useSDKOperations) {
await deleteCollectionWithARM(databaseId, collectionId); await deleteCollectionWithARM(databaseId, collectionId);
} else { } else {
await client().database(databaseId).container(collectionId).delete(); await client()
.database(databaseId)
.container(collectionId)
.delete();
} }
logConsoleInfo(`Successfully deleted container ${collectionId}`); logConsoleInfo(`Successfully deleted container ${collectionId}`);
} catch (error) { } catch (error) {

View File

@@ -13,9 +13,9 @@ describe("deleteDatabase", () => {
beforeAll(() => { beforeAll(() => {
updateUserContext({ updateUserContext({
databaseAccount: { databaseAccount: {
name: "test", name: "test"
} as DatabaseAccount, } as DatabaseAccount,
defaultExperience: DefaultAccountExperienceType.DocumentDB, defaultExperience: DefaultAccountExperienceType.DocumentDB
}); });
}); });
@@ -30,9 +30,9 @@ describe("deleteDatabase", () => {
(client as jest.Mock).mockReturnValue({ (client as jest.Mock).mockReturnValue({
database: () => { database: () => {
return { return {
delete: (): unknown => undefined, delete: (): unknown => undefined
}; };
}, }
}); });
await deleteDatabase("database"); await deleteDatabase("database");
expect(client).toHaveBeenCalled(); expect(client).toHaveBeenCalled();

View File

@@ -19,7 +19,9 @@ export async function deleteDatabase(databaseId: string): Promise<void> {
if (window.authType === AuthType.AAD && !userContext.useSDKOperations) { if (window.authType === AuthType.AAD && !userContext.useSDKOperations) {
await deleteDatabaseWithARM(databaseId); await deleteDatabaseWithARM(databaseId);
} else { } else {
await client().database(databaseId).delete(); await client()
.database(databaseId)
.delete();
} }
logConsoleInfo(`Successfully deleted database ${databaseId}`); logConsoleInfo(`Successfully deleted database ${databaseId}`);
} catch (error) { } catch (error) {

View File

@@ -27,7 +27,11 @@ export async function deleteStoredProcedure(
storedProcedureId storedProcedureId
); );
} else { } else {
await client().database(databaseId).container(collectionId).scripts.storedProcedure(storedProcedureId).delete(); await client()
.database(databaseId)
.container(collectionId)
.scripts.storedProcedure(storedProcedureId)
.delete();
} }
} catch (error) { } catch (error) {
handleError(error, "DeleteStoredProcedure", `Error while deleting stored procedure ${storedProcedureId}`); handleError(error, "DeleteStoredProcedure", `Error while deleting stored procedure ${storedProcedureId}`);

View File

@@ -23,7 +23,11 @@ export async function deleteTrigger(databaseId: string, collectionId: string, tr
triggerId triggerId
); );
} else { } else {
await client().database(databaseId).container(collectionId).scripts.trigger(triggerId).delete(); await client()
.database(databaseId)
.container(collectionId)
.scripts.trigger(triggerId)
.delete();
} }
} catch (error) { } catch (error) {
handleError(error, "DeleteTrigger", `Error while deleting trigger ${triggerId}`); handleError(error, "DeleteTrigger", `Error while deleting trigger ${triggerId}`);

View File

@@ -23,7 +23,11 @@ export async function deleteUserDefinedFunction(databaseId: string, collectionId
id id
); );
} else { } else {
await client().database(databaseId).container(collectionId).scripts.userDefinedFunction(id).delete(); await client()
.database(databaseId)
.container(collectionId)
.scripts.userDefinedFunction(id)
.delete();
} }
} catch (error) { } catch (error) {
handleError(error, "DeleteUserDefinedFunction", `Error while deleting user defined function ${id}`); handleError(error, "DeleteUserDefinedFunction", `Error while deleting user defined function ${id}`);

View File

@@ -1,87 +0,0 @@
import { armRequest } from "../../Utils/arm/request";
import { configContext } from "../../ConfigContext";
import { handleError } from "../ErrorHandlingUtils";
import { userContext } from "../../UserContext";
interface TimeSeriesData {
data: {
timeStamp: string;
total: number;
}[];
metadatavalues: {
name: {
localizedValue: string;
value: string;
};
value: string;
};
}
interface MetricsData {
displayDescription: string;
errorCode: string;
id: string;
name: {
value: string;
localizedValue: string;
};
timeseries: TimeSeriesData[];
type: string;
unit: string;
}
interface MetricsResponse {
cost: number;
interval: string;
namespace: string;
resourceregion: string;
timespan: string;
value: MetricsData[];
}
export const getCollectionUsageSizeInKB = async (databaseName: string, containerName: string): Promise<number> => {
const subscriptionId = userContext.subscriptionId;
const resourceGroup = userContext.resourceGroup;
const accountName = userContext.databaseAccount.name;
const filter = `DatabaseName eq '${databaseName}' and CollectionName eq '${containerName}'`;
const metricNames = "DataUsage,IndexUsage";
const path = `/subscriptions/${subscriptionId}/resourceGroups/${resourceGroup}/providers/Microsoft.DocumentDB/databaseAccounts/${accountName}/providers/microsoft.insights/metrics`;
try {
const metricsResponse: MetricsResponse = await armRequest({
host: configContext.ARM_ENDPOINT,
path,
method: "GET",
apiVersion: "2018-01-01",
queryParams: {
filter,
metricNames,
},
});
if (metricsResponse?.value?.length !== 2) {
return undefined;
}
const dataUsageData: MetricsData = metricsResponse.value[0];
const indexUsagedata: MetricsData = metricsResponse.value[1];
const dataUsageSizeInKb: number = getUsageSizeInKb(dataUsageData);
const indexUsageSizeInKb: number = getUsageSizeInKb(indexUsagedata);
return dataUsageSizeInKb + indexUsageSizeInKb;
} catch (error) {
handleError(error, "getCollectionUsageSize");
throw error;
}
};
const getUsageSizeInKb = (metricsData: MetricsData): number => {
if (metricsData?.errorCode !== "Success") {
throw Error(`Get collection usage size failed: ${metricsData.errorCode}`);
}
const timeSeriesData: TimeSeriesData = metricsData?.timeseries?.[0];
const usageSizeInBytes: number = timeSeriesData?.data?.[0]?.total;
return usageSizeInBytes ? usageSizeInBytes / 1024 : 0;
};

View File

@@ -11,7 +11,10 @@ export async function getIndexTransformationProgress(databaseId: string, collect
let indexTransformationPercentage: number; let indexTransformationPercentage: number;
const clearMessage = logConsoleProgress(`Reading container ${collectionId}`); const clearMessage = logConsoleProgress(`Reading container ${collectionId}`);
try { try {
const response = await client().database(databaseId).container(collectionId).read({ populateQuotaInfo: true }); const response = await client()
.database(databaseId)
.container(collectionId)
.read({ populateQuotaInfo: true });
indexTransformationPercentage = parseInt( indexTransformationPercentage = parseInt(
response.headers[Constants.HttpHeaders.collectionIndexTransformationProgress] as string response.headers[Constants.HttpHeaders.collectionIndexTransformationProgress] as string

View File

@@ -10,9 +10,9 @@ describe("readCollection", () => {
beforeAll(() => { beforeAll(() => {
updateUserContext({ updateUserContext({
databaseAccount: { databaseAccount: {
name: "test", name: "test"
} as DatabaseAccount, } as DatabaseAccount,
defaultExperience: DefaultAccountExperienceType.DocumentDB, defaultExperience: DefaultAccountExperienceType.DocumentDB
}); });
}); });
@@ -23,11 +23,11 @@ describe("readCollection", () => {
return { return {
container: () => { container: () => {
return { return {
read: (): unknown => ({}), read: (): unknown => ({})
}; };
}, }
}; };
}, }
}); });
await readCollection("database", "collection"); await readCollection("database", "collection");
expect(client).toHaveBeenCalled(); expect(client).toHaveBeenCalled();

View File

@@ -7,7 +7,10 @@ export async function readCollection(databaseId: string, collectionId: string):
let collection: DataModels.Collection; let collection: DataModels.Collection;
const clearMessage = logConsoleProgress(`Querying container ${collectionId}`); const clearMessage = logConsoleProgress(`Querying container ${collectionId}`);
try { try {
const response = await client().database(databaseId).container(collectionId).read(); const response = await client()
.database(databaseId)
.container(collectionId)
.read();
collection = response.resource as DataModels.Collection; collection = response.resource as DataModels.Collection;
} catch (error) { } catch (error) {
handleError(error, "ReadCollection", `Error while querying container ${collectionId}`); handleError(error, "ReadCollection", `Error while querying container ${collectionId}`);

View File

@@ -1,6 +1,9 @@
import * as DataModels from "../../Contracts/DataModels";
import { AuthType } from "../../AuthType"; import { AuthType } from "../../AuthType";
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType"; import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
import { Offer, ReadCollectionOfferParams } from "../../Contracts/DataModels"; import { HttpHeaders } from "../Constants";
import { RequestOptions } from "@azure/cosmos/dist-esm";
import { client } from "../CosmosClient";
import { handleError } from "../ErrorHandlingUtils"; import { handleError } from "../ErrorHandlingUtils";
import { getSqlContainerThroughput } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources"; import { getSqlContainerThroughput } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import { getMongoDBCollectionThroughput } from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources"; import { getMongoDBCollectionThroughput } from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
@@ -8,22 +11,50 @@ import { getCassandraTableThroughput } from "../../Utils/arm/generatedClients/20
import { getGremlinGraphThroughput } from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources"; import { getGremlinGraphThroughput } from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
import { getTableThroughput } from "../../Utils/arm/generatedClients/2020-04-01/tableResources"; import { getTableThroughput } from "../../Utils/arm/generatedClients/2020-04-01/tableResources";
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils"; import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
import { readOfferWithSDK } from "./readOfferWithSDK"; import { readOffers } from "./readOffers";
import { userContext } from "../../UserContext"; import { userContext } from "../../UserContext";
export const readCollectionOffer = async (params: ReadCollectionOfferParams): Promise<Offer> => { export const readCollectionOffer = async (
params: DataModels.ReadCollectionOfferParams
): Promise<DataModels.OfferWithHeaders> => {
const clearMessage = logConsoleProgress(`Querying offer for collection ${params.collectionId}`); const clearMessage = logConsoleProgress(`Querying offer for collection ${params.collectionId}`);
let offerId = params.offerId;
if (!offerId) {
if (window.authType === AuthType.AAD && !userContext.useSDKOperations) {
try { try {
if ( offerId = await getCollectionOfferIdWithARM(params.databaseId, params.collectionId);
window.authType === AuthType.AAD && } catch (error) {
!userContext.useSDKOperations && clearMessage();
userContext.defaultExperience !== DefaultAccountExperienceType.Table if (error.code !== "NotFound") {
) { throw error;
return await readCollectionOfferWithARM(params.databaseId, params.collectionId); }
return undefined;
}
} else {
offerId = await getCollectionOfferIdWithSDK(params.collectionResourceId);
if (!offerId) {
clearMessage();
return undefined;
}
}
} }
return await readOfferWithSDK(params.offerId, params.collectionResourceId); const options: RequestOptions = {
initialHeaders: {
[HttpHeaders.populateCollectionThroughputInfo]: true
}
};
try {
const response = await client()
.offer(offerId)
.read(options);
return (
response && {
...response.resource,
headers: response.headers
}
);
} catch (error) { } catch (error) {
handleError(error, "ReadCollectionOffer", `Error while querying offer for collection ${params.collectionId}`); handleError(error, "ReadCollectionOffer", `Error while querying offer for collection ${params.collectionId}`);
throw error; throw error;
@@ -32,14 +63,12 @@ export const readCollectionOffer = async (params: ReadCollectionOfferParams): Pr
} }
}; };
const readCollectionOfferWithARM = async (databaseId: string, collectionId: string): Promise<Offer> => { const getCollectionOfferIdWithARM = async (databaseId: string, collectionId: string): Promise<string> => {
let rpResponse;
const subscriptionId = userContext.subscriptionId; const subscriptionId = userContext.subscriptionId;
const resourceGroup = userContext.resourceGroup; const resourceGroup = userContext.resourceGroup;
const accountName = userContext.databaseAccount.name; const accountName = userContext.databaseAccount.name;
const defaultExperience = userContext.defaultExperience; const defaultExperience = userContext.defaultExperience;
let rpResponse;
try {
switch (defaultExperience) { switch (defaultExperience) {
case DefaultAccountExperienceType.DocumentDB: case DefaultAccountExperienceType.DocumentDB:
rpResponse = await getSqlContainerThroughput( rpResponse = await getSqlContainerThroughput(
@@ -83,39 +112,12 @@ const readCollectionOfferWithARM = async (databaseId: string, collectionId: stri
default: default:
throw new Error(`Unsupported default experience type: ${defaultExperience}`); throw new Error(`Unsupported default experience type: ${defaultExperience}`);
} }
} catch (error) {
if (error.code !== "NotFound") {
throw error;
}
return undefined; return rpResponse?.name;
}
const resource = rpResponse?.properties?.resource;
if (resource) {
const offerId: string = rpResponse.name;
const minimumThroughput: number =
typeof resource.minimumThroughput === "string"
? parseInt(resource.minimumThroughput)
: resource.minimumThroughput;
const autoscaleSettings = resource.autoscaleSettings;
if (autoscaleSettings) {
return {
id: offerId,
autoscaleMaxThroughput: autoscaleSettings.maxThroughput,
manualThroughput: undefined,
minimumThroughput,
}; };
}
return { const getCollectionOfferIdWithSDK = async (collectionResourceId: string): Promise<string> => {
id: offerId, const offers = await readOffers();
autoscaleMaxThroughput: undefined, const offer = offers.find(offer => offer.resource === collectionResourceId);
manualThroughput: resource.throughput, return offer?.id;
minimumThroughput,
};
}
return undefined;
}; };

View File

@@ -12,9 +12,9 @@ describe("readCollections", () => {
beforeAll(() => { beforeAll(() => {
updateUserContext({ updateUserContext({
databaseAccount: { databaseAccount: {
name: "test", name: "test"
} as DatabaseAccount, } as DatabaseAccount,
defaultExperience: DefaultAccountExperienceType.DocumentDB, defaultExperience: DefaultAccountExperienceType.DocumentDB
}); });
}); });
@@ -32,12 +32,12 @@ describe("readCollections", () => {
containers: { containers: {
readAll: () => { readAll: () => {
return { return {
fetchAll: (): unknown => [], fetchAll: (): unknown => []
}; };
}, }
}, }
}; };
}, }
}); });
await readCollections("database"); await readCollections("database");
expect(client).toHaveBeenCalled(); expect(client).toHaveBeenCalled();

View File

@@ -23,7 +23,10 @@ export async function readCollections(databaseId: string): Promise<DataModels.Co
return await readCollectionsWithARM(databaseId); return await readCollectionsWithARM(databaseId);
} }
const sdkResponse = await client().database(databaseId).containers.readAll().fetchAll(); const sdkResponse = await client()
.database(databaseId)
.containers.readAll()
.fetchAll();
return sdkResponse.resources as DataModels.Collection[]; return sdkResponse.resources as DataModels.Collection[];
} catch (error) { } catch (error) {
handleError(error, "ReadCollections", `Error while querying containers for database ${databaseId}`); handleError(error, "ReadCollections", `Error while querying containers for database ${databaseId}`);
@@ -60,5 +63,5 @@ async function readCollectionsWithARM(databaseId: string): Promise<DataModels.Co
throw new Error(`Unsupported default experience type: ${defaultExperience}`); throw new Error(`Unsupported default experience type: ${defaultExperience}`);
} }
return rpResponse?.value?.map((collection) => collection.properties?.resource as DataModels.Collection); return rpResponse?.value?.map(collection => collection.properties?.resource as DataModels.Collection);
} }

View File

@@ -1,28 +1,51 @@
import * as DataModels from "../../Contracts/DataModels";
import { AuthType } from "../../AuthType"; import { AuthType } from "../../AuthType";
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType"; import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
import { Offer, ReadDatabaseOfferParams } from "../../Contracts/DataModels"; import { HttpHeaders } from "../Constants";
import { RequestOptions } from "@azure/cosmos/dist-esm";
import { client } from "../CosmosClient";
import { getSqlDatabaseThroughput } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources"; import { getSqlDatabaseThroughput } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import { getMongoDBDatabaseThroughput } from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources"; import { getMongoDBDatabaseThroughput } from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
import { getCassandraKeyspaceThroughput } from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources"; import { getCassandraKeyspaceThroughput } from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources";
import { getGremlinDatabaseThroughput } from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources"; import { getGremlinDatabaseThroughput } from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
import { handleError } from "../ErrorHandlingUtils"; import { handleError } from "../ErrorHandlingUtils";
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils"; import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
import { readOfferWithSDK } from "./readOfferWithSDK"; import { readOffers } from "./readOffers";
import { userContext } from "../../UserContext"; import { userContext } from "../../UserContext";
export const readDatabaseOffer = async (params: ReadDatabaseOfferParams): Promise<Offer> => { export const readDatabaseOffer = async (
params: DataModels.ReadDatabaseOfferParams
): Promise<DataModels.OfferWithHeaders> => {
const clearMessage = logConsoleProgress(`Querying offer for database ${params.databaseId}`); const clearMessage = logConsoleProgress(`Querying offer for database ${params.databaseId}`);
let offerId = params.offerId;
try { if (!offerId) {
if ( offerId = await (window.authType === AuthType.AAD &&
window.authType === AuthType.AAD &&
!userContext.useSDKOperations && !userContext.useSDKOperations &&
userContext.defaultExperience !== DefaultAccountExperienceType.Table userContext.defaultExperience !== DefaultAccountExperienceType.Table
) { ? getDatabaseOfferIdWithARM(params.databaseId)
return await readDatabaseOfferWithARM(params.databaseId); : getDatabaseOfferIdWithSDK(params.databaseResourceId));
if (!offerId) {
clearMessage();
return undefined;
}
} }
return await readOfferWithSDK(params.offerId, params.databaseResourceId); const options: RequestOptions = {
initialHeaders: {
[HttpHeaders.populateCollectionThroughputInfo]: true
}
};
try {
const response = await client()
.offer(offerId)
.read(options);
return (
response && {
...response.resource,
headers: response.headers
}
);
} catch (error) { } catch (error) {
handleError(error, "ReadDatabaseOffer", `Error while querying offer for database ${params.databaseId}`); handleError(error, "ReadDatabaseOffer", `Error while querying offer for database ${params.databaseId}`);
throw error; throw error;
@@ -31,13 +54,13 @@ export const readDatabaseOffer = async (params: ReadDatabaseOfferParams): Promis
} }
}; };
const readDatabaseOfferWithARM = async (databaseId: string): Promise<Offer> => { const getDatabaseOfferIdWithARM = async (databaseId: string): Promise<string> => {
let rpResponse;
const subscriptionId = userContext.subscriptionId; const subscriptionId = userContext.subscriptionId;
const resourceGroup = userContext.resourceGroup; const resourceGroup = userContext.resourceGroup;
const accountName = userContext.databaseAccount.name; const accountName = userContext.databaseAccount.name;
const defaultExperience = userContext.defaultExperience; const defaultExperience = userContext.defaultExperience;
let rpResponse;
try { try {
switch (defaultExperience) { switch (defaultExperience) {
case DefaultAccountExperienceType.DocumentDB: case DefaultAccountExperienceType.DocumentDB:
@@ -55,39 +78,18 @@ const readDatabaseOfferWithARM = async (databaseId: string): Promise<Offer> => {
default: default:
throw new Error(`Unsupported default experience type: ${defaultExperience}`); throw new Error(`Unsupported default experience type: ${defaultExperience}`);
} }
return rpResponse?.name;
} catch (error) { } catch (error) {
if (error.code !== "NotFound") { if (error.code !== "NotFound") {
throw error; throw error;
} }
return undefined; return undefined;
} }
const resource = rpResponse?.properties?.resource;
if (resource) {
const offerId: string = rpResponse.name;
const minimumThroughput: number =
typeof resource.minimumThroughput === "string"
? parseInt(resource.minimumThroughput)
: resource.minimumThroughput;
const autoscaleSettings = resource.autoscaleSettings;
if (autoscaleSettings) {
return {
id: offerId,
autoscaleMaxThroughput: autoscaleSettings.maxThroughput,
manualThroughput: undefined,
minimumThroughput,
}; };
}
return { const getDatabaseOfferIdWithSDK = async (databaseResourceId: string): Promise<string> => {
id: offerId, const offers = await readOffers();
autoscaleMaxThroughput: undefined, const offer = offers.find(offer => offer.resource === databaseResourceId);
manualThroughput: resource.throughput, return offer?.id;
minimumThroughput,
};
}
return undefined;
}; };

View File

@@ -12,9 +12,9 @@ describe("readDatabases", () => {
beforeAll(() => { beforeAll(() => {
updateUserContext({ updateUserContext({
databaseAccount: { databaseAccount: {
name: "test", name: "test"
} as DatabaseAccount, } as DatabaseAccount,
defaultExperience: DefaultAccountExperienceType.DocumentDB, defaultExperience: DefaultAccountExperienceType.DocumentDB
}); });
}); });
@@ -30,10 +30,10 @@ describe("readDatabases", () => {
databases: { databases: {
readAll: () => { readAll: () => {
return { return {
fetchAll: (): unknown => [], fetchAll: (): unknown => []
}; };
}, }
}, }
}); });
await readDatabases(); await readDatabases();
expect(client).toHaveBeenCalled(); expect(client).toHaveBeenCalled();

View File

@@ -21,7 +21,9 @@ export async function readDatabases(): Promise<DataModels.Database[]> {
) { ) {
databases = await readDatabasesWithARM(); databases = await readDatabasesWithARM();
} else { } else {
const sdkResponse = await client().databases.readAll().fetchAll(); const sdkResponse = await client()
.databases.readAll()
.fetchAll();
databases = sdkResponse.resources as DataModels.Database[]; databases = sdkResponse.resources as DataModels.Database[];
} }
} catch (error) { } catch (error) {
@@ -56,5 +58,5 @@ async function readDatabasesWithARM(): Promise<DataModels.Database[]> {
throw new Error(`Unsupported default experience type: ${defaultExperience}`); throw new Error(`Unsupported default experience type: ${defaultExperience}`);
} }
return rpResponse?.value?.map((database) => database.properties?.resource as DataModels.Database); return rpResponse?.value?.map(database => database.properties?.resource as DataModels.Database);
} }

View File

@@ -1,27 +0,0 @@
import { HttpHeaders } from "../Constants";
import { Offer } from "../../Contracts/DataModels";
import { RequestOptions } from "@azure/cosmos/dist-esm";
import { client } from "../CosmosClient";
import { parseSDKOfferResponse } from "../OfferUtility";
import { readOffers } from "./readOffers";
export const readOfferWithSDK = async (offerId: string, resourceId: string): Promise<Offer> => {
if (!offerId) {
const offers = await readOffers();
const offer = offers.find((offer) => offer.resource === resourceId);
if (!offer) {
return undefined;
}
offerId = offer.id;
}
const options: RequestOptions = {
initialHeaders: {
[HttpHeaders.populateCollectionThroughputInfo]: true,
},
};
const response = await client().offer(offerId).read(options);
return parseSDKOfferResponse(response);
};

View File

@@ -1,13 +1,15 @@
import { SDKOfferDefinition } from "../../Contracts/DataModels"; import { Offer } from "../../Contracts/DataModels";
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils"; import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
import { client } from "../CosmosClient"; import { client } from "../CosmosClient";
import { handleError, getErrorMessage } from "../ErrorHandlingUtils"; import { handleError, getErrorMessage } from "../ErrorHandlingUtils";
export const readOffers = async (): Promise<SDKOfferDefinition[]> => { export const readOffers = async (): Promise<Offer[]> => {
const clearMessage = logConsoleProgress(`Querying offers`); const clearMessage = logConsoleProgress(`Querying offers`);
try { try {
const response = await client().offers.readAll().fetchAll(); const response = await client()
.offers.readAll()
.fetchAll();
return response?.resources; return response?.resources;
} catch (error) { } catch (error) {
// This should be removed when we can correctly identify if an account is serverless when connected using connection string too. // This should be removed when we can correctly identify if an account is serverless when connected using connection string too.

View File

@@ -25,7 +25,7 @@ export async function readStoredProcedures(
databaseId, databaseId,
collectionId collectionId
); );
return rpResponse?.value?.map((sproc) => sproc.properties?.resource as StoredProcedureDefinition & Resource); return rpResponse?.value?.map(sproc => sproc.properties?.resource as StoredProcedureDefinition & Resource);
} }
const response = await client() const response = await client()

View File

@@ -25,10 +25,14 @@ export async function readTriggers(
databaseId, databaseId,
collectionId collectionId
); );
return rpResponse?.value?.map((trigger) => trigger.properties?.resource as TriggerDefinition & Resource); return rpResponse?.value?.map(trigger => trigger.properties?.resource as TriggerDefinition & Resource);
} }
const response = await client().database(databaseId).container(collectionId).scripts.triggers.readAll().fetchAll(); const response = await client()
.database(databaseId)
.container(collectionId)
.scripts.triggers.readAll()
.fetchAll();
return response?.resources; return response?.resources;
} catch (error) { } catch (error) {
handleError(error, "ReadTriggers", `Failed to query triggers for container ${collectionId}`); handleError(error, "ReadTriggers", `Failed to query triggers for container ${collectionId}`);

View File

@@ -25,7 +25,7 @@ export async function readUserDefinedFunctions(
databaseId, databaseId,
collectionId collectionId
); );
return rpResponse?.value?.map((udf) => udf.properties?.resource as UserDefinedFunctionDefinition & Resource); return rpResponse?.value?.map(udf => udf.properties?.resource as UserDefinedFunctionDefinition & Resource);
} }
const response = await client() const response = await client()

View File

@@ -8,22 +8,22 @@ import {
MongoDBCollectionCreateUpdateParameters, MongoDBCollectionCreateUpdateParameters,
MongoDBCollectionResource, MongoDBCollectionResource,
SqlContainerCreateUpdateParameters, SqlContainerCreateUpdateParameters,
SqlContainerResource, SqlContainerResource
} from "../../Utils/arm/generatedClients/2020-04-01/types"; } from "../../Utils/arm/generatedClients/2020-04-01/types";
import { RequestOptions } from "@azure/cosmos/dist-esm"; import { RequestOptions } from "@azure/cosmos/dist-esm";
import { client } from "../CosmosClient"; import { client } from "../CosmosClient";
import { createUpdateSqlContainer, getSqlContainer } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources"; import { createUpdateSqlContainer, getSqlContainer } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import { import {
createUpdateCassandraTable, createUpdateCassandraTable,
getCassandraTable, getCassandraTable
} from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources"; } from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources";
import { import {
createUpdateMongoDBCollection, createUpdateMongoDBCollection,
getMongoDBCollection, getMongoDBCollection
} from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources"; } from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
import { import {
createUpdateGremlinGraph, createUpdateGremlinGraph,
getGremlinGraph, getGremlinGraph
} from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources"; } from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
import { createUpdateTable, getTable } from "../../Utils/arm/generatedClients/2020-04-01/tableResources"; import { createUpdateTable, getTable } from "../../Utils/arm/generatedClients/2020-04-01/tableResources";
import { handleError } from "../ErrorHandlingUtils"; import { handleError } from "../ErrorHandlingUtils";
@@ -130,8 +130,8 @@ export async function updateMongoDBCollectionThroughRP(
const updateParams: MongoDBCollectionCreateUpdateParameters = { const updateParams: MongoDBCollectionCreateUpdateParameters = {
properties: { properties: {
resource: newCollection, resource: newCollection,
options: updateOptions, options: updateOptions
}, }
}; };
const updateResponse = await createUpdateMongoDBCollection( const updateResponse = await createUpdateMongoDBCollection(

View File

@@ -1,14 +1,13 @@
import { AuthType } from "../../AuthType"; import { AuthType } from "../../AuthType";
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType"; import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
import { HttpHeaders } from "../Constants"; import { HttpHeaders } from "../Constants";
import { Offer, SDKOfferDefinition, UpdateOfferParams } from "../../Contracts/DataModels"; import { Offer, UpdateOfferParams } from "../../Contracts/DataModels";
import { OfferDefinition } from "@azure/cosmos"; import { OfferDefinition } from "@azure/cosmos";
import { RequestOptions } from "@azure/cosmos/dist-esm"; import { RequestOptions } from "@azure/cosmos/dist-esm";
import { ThroughputSettingsUpdateParameters } from "../../Utils/arm/generatedClients/2020-04-01/types"; import { ThroughputSettingsUpdateParameters } from "../../Utils/arm/generatedClients/2020-04-01/types";
import { client } from "../CosmosClient"; import { client } from "../CosmosClient";
import { handleError } from "../ErrorHandlingUtils"; import { handleError } from "../ErrorHandlingUtils";
import { logConsoleInfo, logConsoleProgress } from "../../Utils/NotificationConsoleUtils"; import { logConsoleInfo, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
import { parseSDKOfferResponse } from "../OfferUtility";
import { readCollectionOffer } from "./readCollectionOffer"; import { readCollectionOffer } from "./readCollectionOffer";
import { readDatabaseOffer } from "./readDatabaseOffer"; import { readDatabaseOffer } from "./readDatabaseOffer";
import { import {
@@ -17,7 +16,7 @@ import {
migrateSqlDatabaseToManualThroughput, migrateSqlDatabaseToManualThroughput,
migrateSqlContainerToAutoscale, migrateSqlContainerToAutoscale,
migrateSqlContainerToManualThroughput, migrateSqlContainerToManualThroughput,
updateSqlContainerThroughput, updateSqlContainerThroughput
} from "../../Utils/arm/generatedClients/2020-04-01/sqlResources"; } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import { import {
updateCassandraKeyspaceThroughput, updateCassandraKeyspaceThroughput,
@@ -25,7 +24,7 @@ import {
migrateCassandraKeyspaceToManualThroughput, migrateCassandraKeyspaceToManualThroughput,
migrateCassandraTableToAutoscale, migrateCassandraTableToAutoscale,
migrateCassandraTableToManualThroughput, migrateCassandraTableToManualThroughput,
updateCassandraTableThroughput, updateCassandraTableThroughput
} from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources"; } from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources";
import { import {
updateMongoDBDatabaseThroughput, updateMongoDBDatabaseThroughput,
@@ -33,7 +32,7 @@ import {
migrateMongoDBDatabaseToManualThroughput, migrateMongoDBDatabaseToManualThroughput,
migrateMongoDBCollectionToAutoscale, migrateMongoDBCollectionToAutoscale,
migrateMongoDBCollectionToManualThroughput, migrateMongoDBCollectionToManualThroughput,
updateMongoDBCollectionThroughput, updateMongoDBCollectionThroughput
} from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources"; } from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
import { import {
updateGremlinDatabaseThroughput, updateGremlinDatabaseThroughput,
@@ -41,13 +40,13 @@ import {
migrateGremlinDatabaseToManualThroughput, migrateGremlinDatabaseToManualThroughput,
migrateGremlinGraphToAutoscale, migrateGremlinGraphToAutoscale,
migrateGremlinGraphToManualThroughput, migrateGremlinGraphToManualThroughput,
updateGremlinGraphThroughput, updateGremlinGraphThroughput
} from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources"; } from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
import { userContext } from "../../UserContext"; import { userContext } from "../../UserContext";
import { import {
migrateTableToAutoscale, migrateTableToAutoscale,
migrateTableToManualThroughput, migrateTableToManualThroughput,
updateTableThroughput, updateTableThroughput
} from "../../Utils/arm/generatedClients/2020-04-01/tableResources"; } from "../../Utils/arm/generatedClients/2020-04-01/tableResources";
export const updateOffer = async (params: UpdateOfferParams): Promise<Offer> => { export const updateOffer = async (params: UpdateOfferParams): Promise<Offer> => {
@@ -110,7 +109,7 @@ const updateCollectionOfferWithARM = async (params: UpdateOfferParams): Promise<
return await readCollectionOffer({ return await readCollectionOffer({
collectionId: params.collectionId, collectionId: params.collectionId,
databaseId: params.databaseId, databaseId: params.databaseId,
offerId: params.currentOffer.id, offerId: params.currentOffer.id
}); });
}; };
@@ -140,7 +139,7 @@ const updateDatabaseOfferWithARM = async (params: UpdateOfferParams): Promise<Of
return await readDatabaseOffer({ return await readDatabaseOffer({
databaseId: params.databaseId, databaseId: params.databaseId,
offerId: params.currentOffer.id, offerId: params.currentOffer.id
}); });
}; };
@@ -358,13 +357,13 @@ const updateGremlinDatabaseOffer = async (params: UpdateOfferParams): Promise<vo
const createUpdateOfferBody = (params: UpdateOfferParams): ThroughputSettingsUpdateParameters => { const createUpdateOfferBody = (params: UpdateOfferParams): ThroughputSettingsUpdateParameters => {
const body: ThroughputSettingsUpdateParameters = { const body: ThroughputSettingsUpdateParameters = {
properties: { properties: {
resource: {}, resource: {}
}, }
}; };
if (params.autopilotThroughput) { if (params.autopilotThroughput) {
body.properties.resource.autoscaleSettings = { body.properties.resource.autoscaleSettings = {
maxThroughput: params.autopilotThroughput, maxThroughput: params.autopilotThroughput
}; };
} else { } else {
body.properties.resource.throughput = params.manualThroughput; body.properties.resource.throughput = params.manualThroughput;
@@ -374,26 +373,26 @@ const createUpdateOfferBody = (params: UpdateOfferParams): ThroughputSettingsUpd
}; };
const updateOfferWithSDK = async (params: UpdateOfferParams): Promise<Offer> => { const updateOfferWithSDK = async (params: UpdateOfferParams): Promise<Offer> => {
const sdkOfferDefinition = params.currentOffer.offerDefinition; const currentOffer = params.currentOffer;
const newOffer: SDKOfferDefinition = { const newOffer: Offer = {
content: { content: {
offerThroughput: undefined, offerThroughput: undefined,
offerIsRUPerMinuteThroughputEnabled: false, offerIsRUPerMinuteThroughputEnabled: false
}, },
_etag: undefined, _etag: undefined,
_ts: undefined, _ts: undefined,
_rid: sdkOfferDefinition._rid, _rid: currentOffer._rid,
_self: sdkOfferDefinition._self, _self: currentOffer._self,
id: sdkOfferDefinition.id, id: currentOffer.id,
offerResourceId: sdkOfferDefinition.offerResourceId, offerResourceId: currentOffer.offerResourceId,
offerVersion: sdkOfferDefinition.offerVersion, offerVersion: currentOffer.offerVersion,
offerType: sdkOfferDefinition.offerType, offerType: currentOffer.offerType,
resource: sdkOfferDefinition.resource, resource: currentOffer.resource
}; };
if (params.autopilotThroughput) { if (params.autopilotThroughput) {
newOffer.content.offerAutopilotSettings = { newOffer.content.offerAutopilotSettings = {
maxThroughput: params.autopilotThroughput, maxThroughput: params.autopilotThroughput
}; };
} else { } else {
newOffer.content.offerThroughput = params.manualThroughput; newOffer.content.offerThroughput = params.manualThroughput;
@@ -402,12 +401,12 @@ const updateOfferWithSDK = async (params: UpdateOfferParams): Promise<Offer> =>
const options: RequestOptions = {}; const options: RequestOptions = {};
if (params.migrateToAutoPilot) { if (params.migrateToAutoPilot) {
options.initialHeaders = { options.initialHeaders = {
[HttpHeaders.migrateOfferToAutopilot]: "true", [HttpHeaders.migrateOfferToAutopilot]: "true"
}; };
delete newOffer.content.offerAutopilotSettings; delete newOffer.content.offerAutopilotSettings;
} else if (params.migrateToManual) { } else if (params.migrateToManual) {
options.initialHeaders = { options.initialHeaders = {
[HttpHeaders.migrateOfferToManualThroughput]: "true", [HttpHeaders.migrateOfferToManualThroughput]: "true"
}; };
newOffer.content.offerAutopilotSettings = { maxThroughput: 0 }; newOffer.content.offerAutopilotSettings = { maxThroughput: 0 };
} }
@@ -416,6 +415,5 @@ const updateOfferWithSDK = async (params: UpdateOfferParams): Promise<Offer> =>
.offer(params.currentOffer.id) .offer(params.currentOffer.id)
// TODO Remove casting when SDK types are fixed (https://github.com/Azure/azure-sdk-for-js/issues/10660) // TODO Remove casting when SDK types are fixed (https://github.com/Azure/azure-sdk-for-js/issues/10660)
.replace((newOffer as unknown) as OfferDefinition, options); .replace((newOffer as unknown) as OfferDefinition, options);
return sdkResponse?.resource;
return parseSDKOfferResponse(sdkResponse);
}; };

View File

@@ -3,12 +3,12 @@ import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType
import { Resource, StoredProcedureDefinition } from "@azure/cosmos"; import { Resource, StoredProcedureDefinition } from "@azure/cosmos";
import { import {
SqlStoredProcedureCreateUpdateParameters, SqlStoredProcedureCreateUpdateParameters,
SqlStoredProcedureResource, SqlStoredProcedureResource
} from "../../Utils/arm/generatedClients/2020-04-01/types"; } from "../../Utils/arm/generatedClients/2020-04-01/types";
import { client } from "../CosmosClient"; import { client } from "../CosmosClient";
import { import {
createUpdateSqlStoredProcedure, createUpdateSqlStoredProcedure,
getSqlStoredProcedure, getSqlStoredProcedure
} from "../../Utils/arm/generatedClients/2020-04-01/sqlResources"; } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import { handleError } from "../ErrorHandlingUtils"; import { handleError } from "../ErrorHandlingUtils";
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils"; import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
@@ -39,8 +39,8 @@ export async function updateStoredProcedure(
const createSprocParams: SqlStoredProcedureCreateUpdateParameters = { const createSprocParams: SqlStoredProcedureCreateUpdateParameters = {
properties: { properties: {
resource: storedProcedure as SqlStoredProcedureResource, resource: storedProcedure as SqlStoredProcedureResource,
options: {}, options: {}
}, }
}; };
const rpResponse = await createUpdateSqlStoredProcedure( const rpResponse = await createUpdateSqlStoredProcedure(
userContext.subscriptionId, userContext.subscriptionId,

View File

@@ -2,7 +2,7 @@ import { AuthType } from "../../AuthType";
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType"; import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
import { import {
SqlTriggerCreateUpdateParameters, SqlTriggerCreateUpdateParameters,
SqlTriggerResource, SqlTriggerResource
} from "../../Utils/arm/generatedClients/2020-04-01/types"; } from "../../Utils/arm/generatedClients/2020-04-01/types";
import { TriggerDefinition } from "@azure/cosmos"; import { TriggerDefinition } from "@azure/cosmos";
import { client } from "../CosmosClient"; import { client } from "../CosmosClient";
@@ -36,8 +36,8 @@ export async function updateTrigger(
const createTriggerParams: SqlTriggerCreateUpdateParameters = { const createTriggerParams: SqlTriggerCreateUpdateParameters = {
properties: { properties: {
resource: trigger as SqlTriggerResource, resource: trigger as SqlTriggerResource,
options: {}, options: {}
}, }
}; };
const rpResponse = await createUpdateSqlTrigger( const rpResponse = await createUpdateSqlTrigger(
userContext.subscriptionId, userContext.subscriptionId,

View File

@@ -3,12 +3,12 @@ import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType
import { Resource, UserDefinedFunctionDefinition } from "@azure/cosmos"; import { Resource, UserDefinedFunctionDefinition } from "@azure/cosmos";
import { import {
SqlUserDefinedFunctionCreateUpdateParameters, SqlUserDefinedFunctionCreateUpdateParameters,
SqlUserDefinedFunctionResource, SqlUserDefinedFunctionResource
} from "../../Utils/arm/generatedClients/2020-04-01/types"; } from "../../Utils/arm/generatedClients/2020-04-01/types";
import { client } from "../CosmosClient"; import { client } from "../CosmosClient";
import { import {
createUpdateSqlUserDefinedFunction, createUpdateSqlUserDefinedFunction,
getSqlUserDefinedFunction, getSqlUserDefinedFunction
} from "../../Utils/arm/generatedClients/2020-04-01/sqlResources"; } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import { handleError } from "../ErrorHandlingUtils"; import { handleError } from "../ErrorHandlingUtils";
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils"; import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
@@ -39,8 +39,8 @@ export async function updateUserDefinedFunction(
const createUDFParams: SqlUserDefinedFunctionCreateUpdateParameters = { const createUDFParams: SqlUserDefinedFunctionCreateUpdateParameters = {
properties: { properties: {
resource: userDefinedFunction as SqlUserDefinedFunctionResource, resource: userDefinedFunction as SqlUserDefinedFunctionResource,
options: {}, options: {}
}, }
}; };
const rpResponse = await createUpdateSqlUserDefinedFunction( const rpResponse = await createUpdateSqlUserDefinedFunction(
userContext.subscriptionId, userContext.subscriptionId,

View File

@@ -1,7 +1,7 @@
export enum Platform { export enum Platform {
Portal = "Portal", Portal = "Portal",
Hosted = "Hosted", Hosted = "Hosted",
Emulator = "Emulator", Emulator = "Emulator"
} }
interface ConfigContext { interface ConfigContext {
@@ -37,7 +37,7 @@ let configContext: Readonly<ConfigContext> = {
`^https:\\/\\/[\\.\\w]*portal\\.microsoftazure.de$`, `^https:\\/\\/[\\.\\w]*portal\\.microsoftazure.de$`,
`^https:\\/\\/[\\.\\w]*ext\\.azure\\.(com|cn|us)$`, `^https:\\/\\/[\\.\\w]*ext\\.azure\\.(com|cn|us)$`,
`^https:\\/\\/[\\.\\w]*\\.ext\\.microsoftazure\\.de$`, `^https:\\/\\/[\\.\\w]*\\.ext\\.microsoftazure\\.de$`,
`^https://cosmos-db-dataexplorer-germanycentral.azurewebsites.de$`, `^https://cosmos-db-dataexplorer-germanycentral.azurewebsites.de$`
], ],
// Webpack injects this at build time // Webpack injects this at build time
gitSha: process.env.GIT_SHA, gitSha: process.env.GIT_SHA,
@@ -52,7 +52,7 @@ let configContext: Readonly<ConfigContext> = {
ARCADIA_LIVY_ENDPOINT_DNS_ZONE: "dev.azuresynapse.net", ARCADIA_LIVY_ENDPOINT_DNS_ZONE: "dev.azuresynapse.net",
GITHUB_CLIENT_ID: "6cb2f63cf6f7b5cbdeca", // Registered OAuth app: https://github.com/settings/applications/1189306 GITHUB_CLIENT_ID: "6cb2f63cf6f7b5cbdeca", // Registered OAuth app: https://github.com/settings/applications/1189306
JUNO_ENDPOINT: "https://tools.cosmos.azure.com", JUNO_ENDPOINT: "https://tools.cosmos.azure.com",
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com", BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
}; };
export function resetConfigContext(): void { export function resetConfigContext(): void {
@@ -73,7 +73,7 @@ if (process.env.NODE_ENV === "development") {
BACKEND_ENDPOINT: "https://localhost:" + port, BACKEND_ENDPOINT: "https://localhost:" + port,
MONGO_BACKEND_ENDPOINT: "https://localhost:" + port, MONGO_BACKEND_ENDPOINT: "https://localhost:" + port,
PROXY_PATH: "/proxy", PROXY_PATH: "/proxy",
EMULATOR_ENDPOINT: "https://localhost:8081", EMULATOR_ENDPOINT: "https://localhost:8081"
}); });
} }
@@ -86,7 +86,7 @@ export async function initializeConfiguration(): Promise<ConfigContext> {
Object.assign(configContext, externalConfig); Object.assign(configContext, externalConfig);
if (allowedParentFrameOrigins && allowedParentFrameOrigins.length > 0) { if (allowedParentFrameOrigins && allowedParentFrameOrigins.length > 0) {
updateConfigContext({ updateConfigContext({
allowedParentFrameOrigins: [...configContext.allowedParentFrameOrigins, ...allowedParentFrameOrigins], allowedParentFrameOrigins: [...configContext.allowedParentFrameOrigins, ...allowedParentFrameOrigins]
}); });
} }
} catch (error) { } catch (error) {

View File

@@ -7,7 +7,7 @@ export enum TabKind {
TableEntities, TableEntities,
Graph, Graph,
SQLQuery, SQLQuery,
ScaleSettings, ScaleSettings
} }
/** /**
@@ -20,7 +20,7 @@ export enum PaneKind {
DeleteDatabase, DeleteDatabase,
GlobalSettings, GlobalSettings,
AdHocAccess, AdHocAccess,
SwitchDirectory, SwitchDirectory
} }
/** /**
@@ -79,5 +79,5 @@ export enum ActionType {
OpenCollectionTab, OpenCollectionTab,
OpenPane, OpenPane,
TransmitCachedData, TransmitCachedData,
OpenSampleNotebook, OpenSampleNotebook
} }

View File

@@ -56,7 +56,7 @@ export enum ApiKind {
Table, Table,
Cassandra, Cassandra,
Graph, Graph,
MongoDBCompute, MongoDBCompute
} }
export interface GenerateTokenResponse { export interface GenerateTokenResponse {
@@ -88,38 +88,6 @@ export interface Resource {
id: string; id: string;
} }
export interface IType {
name: string;
code: number;
}
export interface IDataField {
dataType: IType;
hasNulls: boolean;
isArray: boolean;
schemaType: IType;
name: string;
path: string;
maxRepetitionLevel: number;
maxDefinitionLevel: number;
}
export interface ISchema {
id: string;
accountName: string;
resource: string;
fields: IDataField[];
}
export interface ISchemaRequest {
id: string;
subscriptionId: string;
resourceGroup: string;
accountName: string;
resource: string;
status: string;
}
export interface Collection extends Resource { export interface Collection extends Resource {
defaultTtl?: number; defaultTtl?: number;
indexingPolicy?: IndexingPolicy; indexingPolicy?: IndexingPolicy;
@@ -130,8 +98,6 @@ export interface Collection extends Resource {
changeFeedPolicy?: ChangeFeedPolicy; changeFeedPolicy?: ChangeFeedPolicy;
analyticalStorageTtl?: number; analyticalStorageTtl?: number;
geospatialConfig?: GeospatialConfig; geospatialConfig?: GeospatialConfig;
schema?: ISchema;
requestSchema?: () => void;
} }
export interface Database extends Resource { export interface Database extends Resource {
@@ -208,21 +174,12 @@ export interface QueryMetrics {
vmExecutionTime: any; vmExecutionTime: any;
} }
export interface Offer { export interface Offer extends Resource {
id: string;
autoscaleMaxThroughput: number;
manualThroughput: number;
minimumThroughput: number;
offerDefinition?: SDKOfferDefinition;
headers?: any;
}
export interface SDKOfferDefinition extends Resource {
offerVersion?: string; offerVersion?: string;
offerType?: string; offerType?: string;
content?: { content?: {
offerThroughput: number; offerThroughput: number;
offerIsRUPerMinuteThroughputEnabled?: boolean; offerIsRUPerMinuteThroughputEnabled: boolean;
collectionThroughputInfo?: OfferThroughputInfo; collectionThroughputInfo?: OfferThroughputInfo;
offerAutopilotSettings?: AutoPilotOfferSettings; offerAutopilotSettings?: AutoPilotOfferSettings;
}; };
@@ -230,6 +187,10 @@ export interface SDKOfferDefinition extends Resource {
offerResourceId?: string; offerResourceId?: string;
} }
export interface OfferWithHeaders extends Offer {
headers: any;
}
export interface OfferThroughputInfo { export interface OfferThroughputInfo {
minimumRUForCollection: number; minimumRUForCollection: number;
numPhysicalPartitions: number; numPhysicalPartitions: number;
@@ -248,6 +209,7 @@ export interface CreateDatabaseAndCollectionRequest {
collectionId: string; collectionId: string;
offerThroughput: number; offerThroughput: number;
databaseLevelThroughput: boolean; databaseLevelThroughput: boolean;
rupmEnabled?: boolean;
partitionKey?: PartitionKey; partitionKey?: PartitionKey;
indexingPolicy?: IndexingPolicy; indexingPolicy?: IndexingPolicy;
uniqueKeyPolicy?: UniqueKeyPolicy; uniqueKeyPolicy?: UniqueKeyPolicy;
@@ -334,7 +296,7 @@ export interface Notification {
export enum ConflictResolutionMode { export enum ConflictResolutionMode {
Custom = "Custom", Custom = "Custom",
LastWriterWins = "LastWriterWins", LastWriterWins = "LastWriterWins"
} }
/** /**
@@ -472,7 +434,7 @@ export interface SparkClusterEndpoint {
export enum SparkClusterEndpointKind { export enum SparkClusterEndpointKind {
SparkUI = "SparkUI", SparkUI = "SparkUI",
HistoryServerUI = "HistoryServerUI", HistoryServerUI = "HistoryServerUI",
Livy = "Livy", Livy = "Livy"
} }
export interface RpParameters { export interface RpParameters {

View File

@@ -21,7 +21,7 @@ export enum LogEntryLevel {
/** /**
* Error level. * Error level.
*/ */
Error = 2, Error = 2
} }
/** /**
* Schema of a log entry. * Schema of a log entry.

View File

@@ -32,8 +32,7 @@ export enum MessageTypes {
GetArcadiaToken, GetArcadiaToken,
CreateWorkspace, CreateWorkspace,
CreateSparkPool, CreateSparkPool,
RefreshDatabaseAccount, RefreshDatabaseAccount
InitTestExplorer,
} }
export { Versions, ActionContracts, Diagnostics }; export { Versions, ActionContracts, Diagnostics };

View File

@@ -1,7 +0,0 @@
export enum SubscriptionType {
Benefits,
EA,
Free,
Internal,
PAYG,
}

Some files were not shown because too many files have changed in this diff Show More