Compare commits

..

2 Commits

Author SHA1 Message Date
Steve Faulkner
627c346559 More cleanup 2020-11-11 15:45:32 -06:00
Steve Faulkner
415ebc505b Remove RU Max Limits 2020-11-11 13:50:17 -06:00
537 changed files with 53179 additions and 47997 deletions

View File

@@ -3,11 +3,7 @@ PORTAL_RUNNER_PASSWORD=
PORTAL_RUNNER_SUBSCRIPTION= PORTAL_RUNNER_SUBSCRIPTION=
PORTAL_RUNNER_RESOURCE_GROUP= PORTAL_RUNNER_RESOURCE_GROUP=
PORTAL_RUNNER_DATABASE_ACCOUNT= PORTAL_RUNNER_DATABASE_ACCOUNT=
PORTAL_RUNNER_DATABASE_ACCOUNT_KEY=
PORTAL_RUNNER_CONNECTION_STRING= PORTAL_RUNNER_CONNECTION_STRING=
NOTEBOOKS_TEST_RUNNER_TENANT_ID=
NOTEBOOKS_TEST_RUNNER_CLIENT_ID=
NOTEBOOKS_TEST_RUNNER_CLIENT_SECRET=
CASSANDRA_CONNECTION_STRING= CASSANDRA_CONNECTION_STRING=
MONGO_CONNECTION_STRING= MONGO_CONNECTION_STRING=
TABLES_CONNECTION_STRING= TABLES_CONNECTION_STRING=

View File

@@ -202,6 +202,8 @@ src/Explorer/Tabs/QueryTab.test.ts
src/Explorer/Tabs/QueryTab.ts src/Explorer/Tabs/QueryTab.ts
src/Explorer/Tabs/QueryTablesTab.ts src/Explorer/Tabs/QueryTablesTab.ts
src/Explorer/Tabs/ScriptTabBase.ts src/Explorer/Tabs/ScriptTabBase.ts
src/Explorer/Tabs/SettingsTab.test.ts
src/Explorer/Tabs/SettingsTab.ts
src/Explorer/Tabs/SparkMasterTab.ts src/Explorer/Tabs/SparkMasterTab.ts
src/Explorer/Tabs/StoredProcedureTab.ts src/Explorer/Tabs/StoredProcedureTab.ts
src/Explorer/Tabs/TabComponents.ts src/Explorer/Tabs/TabComponents.ts
@@ -288,6 +290,8 @@ src/Utils/DatabaseAccountUtils.ts
src/Utils/JunoUtils.ts src/Utils/JunoUtils.ts
src/Utils/MessageValidation.ts src/Utils/MessageValidation.ts
src/Utils/NotebookConfigurationUtils.ts src/Utils/NotebookConfigurationUtils.ts
src/Utils/OfferUtils.test.ts
src/Utils/OfferUtils.ts
src/Utils/PricingUtils.test.ts src/Utils/PricingUtils.test.ts
src/Utils/QueryUtils.test.ts src/Utils/QueryUtils.test.ts
src/Utils/QueryUtils.ts src/Utils/QueryUtils.ts
@@ -392,5 +396,19 @@ src/Explorer/Tree/ResourceTreeAdapterForResourceToken.tsx
src/GalleryViewer/Cards/GalleryCardComponent.tsx src/GalleryViewer/Cards/GalleryCardComponent.tsx
src/GalleryViewer/GalleryViewer.tsx src/GalleryViewer/GalleryViewer.tsx
src/GalleryViewer/GalleryViewerComponent.tsx src/GalleryViewer/GalleryViewerComponent.tsx
cypress/integration/dataexplorer/CASSANDRA/addCollection.spec.ts
cypress/integration/dataexplorer/GRAPH/addCollection.spec.ts
cypress/integration/dataexplorer/ci-tests/addCollectionPane.spec.ts
cypress/integration/dataexplorer/ci-tests/createDatabase.spec.ts
cypress/integration/dataexplorer/ci-tests/deleteCollection.spec.ts
cypress/integration/dataexplorer/ci-tests/deleteDatabase.spec.ts
cypress/integration/dataexplorer/MONGO/addCollection.spec.ts
cypress/integration/dataexplorer/MONGO/addCollectionAutopilot.spec.ts
cypress/integration/dataexplorer/MONGO/addCollectionExistingDatabase.spec.ts
cypress/integration/dataexplorer/MONGO/provisionDatabaseThroughput.spec.ts
cypress/integration/dataexplorer/SQL/addCollection.spec.ts
cypress/integration/dataexplorer/TABLE/addCollection.spec.ts
cypress/integration/notebook/newNotebook.spec.ts
cypress/integration/notebook/resourceTree.spec.ts
__mocks__/monaco-editor.ts __mocks__/monaco-editor.ts
src/Explorer/Tree/ResourceTreeAdapterForResourceToken.test.tsx src/Explorer/Tree/ResourceTreeAdapterForResourceToken.test.tsx

View File

@@ -1,39 +1,39 @@
module.exports = { module.exports = {
env: { env: {
browser: true, browser: true,
es6: true, es6: true
}, },
plugins: ["@typescript-eslint", "no-null", "prefer-arrow"], plugins: ["@typescript-eslint", "no-null", "prefer-arrow"],
extends: ["eslint:recommended", "plugin:@typescript-eslint/recommended"], extends: ["eslint:recommended", "plugin:@typescript-eslint/recommended"],
globals: { globals: {
Atomics: "readonly", Atomics: "readonly",
SharedArrayBuffer: "readonly", SharedArrayBuffer: "readonly"
}, },
parser: "@typescript-eslint/parser", parser: "@typescript-eslint/parser",
parserOptions: { parserOptions: {
ecmaFeatures: { ecmaFeatures: {
jsx: true, jsx: true
}, },
ecmaVersion: 2018, ecmaVersion: 2018,
sourceType: "module", sourceType: "module"
}, },
overrides: [ overrides: [
{ {
files: ["**/*.tsx"], files: ["**/*.tsx"],
env: { env: {
jest: true, jest: true
}, },
extends: ["plugin:react/recommended"], extends: ["plugin:react/recommended"],
plugins: ["react"], plugins: ["react"]
}, },
{ {
files: ["**/*.{test,spec}.{ts,tsx}"], files: ["**/*.{test,spec}.{ts,tsx}"],
env: { env: {
jest: true, jest: true
}, },
extends: ["plugin:jest/recommended"], extends: ["plugin:jest/recommended"],
plugins: ["jest"], plugins: ["jest"]
}, }
], ],
rules: { rules: {
curly: "error", curly: "error",
@@ -47,8 +47,8 @@ module.exports = {
"error", "error",
{ {
selector: "CallExpression[callee.object.name='JSON'][callee.property.name='stringify'] Identifier[name=/$err/]", selector: "CallExpression[callee.object.name='JSON'][callee.property.name='stringify'] Identifier[name=/$err/]",
message: "Do not use JSON.stringify(error). It will print '{}'", message: "Do not use JSON.stringify(error). It will print '{}'"
}, }
], ]
}, }
}; };

View File

@@ -79,31 +79,32 @@ jobs:
name: dist name: dist
path: dist/ path: dist/
endtoendemulator: endtoendemulator:
name: "End To End Emulator Tests" name: "End To End Tests | Emulator | SQL"
needs: [lint, format, compile, unittest] needs: [lint, format, compile, unittest]
runs-on: windows-latest runs-on: windows-latest
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- uses: southpolesteve/cosmos-emulator-github-action@v1
- name: Use Node.js 12.x - name: Use Node.js 12.x
uses: actions/setup-node@v1 uses: actions/setup-node@v1
with: with:
node-version: 12.x node-version: 12.x
- uses: southpolesteve/cosmos-emulator-github-action@v1 - name: Restore Cypress Binary Cache
uses: actions/cache@v2
with:
path: ~/.cache/Cypress
key: ${{ runner.os }}-cypress-binary-cache
- name: End to End Tests - name: End to End Tests
run: | run: |
npm ci npm ci
npm start & npm start &
npm run wait-for-server npm ci --prefix ./cypress
npx jest -c ./jest.config.e2e.js --detectOpenHandles sql npm run test:ci --prefix ./cypress -- --spec ./integration/dataexplorer/ci-tests/createDatabase.spec.ts
shell: bash shell: bash
env: env:
DATA_EXPLORER_ENDPOINT: "https://localhost:1234/explorer.html?platform=Emulator" EMULATOR_ENDPOINT: https://0.0.0.0:8081/
PLATFORM: "Emulator"
NODE_TLS_REJECT_UNAUTHORIZED: 0 NODE_TLS_REJECT_UNAUTHORIZED: 0
- uses: actions/upload-artifact@v2 CYPRESS_CACHE_FOLDER: ~/.cache/Cypress
with:
name: screenshots
path: failed-*
accessibility: accessibility:
name: "Accessibility | Hosted" name: "Accessibility | Hosted"
needs: [lint, format, compile, unittest] needs: [lint, format, compile, unittest]
@@ -122,13 +123,13 @@ jobs:
sudo sysctl -p sudo sysctl -p
npm ci npm ci
npm start & npm start &
npx wait-on -i 5000 https-get://0.0.0.0:1234/ npx wait-on -i 5000 https-get://0.0.0.0:1234/
node utils/accesibilityCheck.js node utils/accesibilityCheck.js
shell: bash shell: bash
env: env:
NODE_TLS_REJECT_UNAUTHORIZED: 0 NODE_TLS_REJECT_UNAUTHORIZED: 0
endtoendhosted: endtoendpuppeteer:
name: "End to End Hosted Tests" name: "End to end puppeteer tests"
needs: [lint, format, compile, unittest] needs: [lint, format, compile, unittest]
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
@@ -137,7 +138,7 @@ jobs:
uses: actions/setup-node@v1 uses: actions/setup-node@v1
with: with:
node-version: 12.x node-version: 12.x
- name: End to End Hosted Tests - name: End to End Puppeteer Tests
run: | run: |
npm ci npm ci
npm start & npm start &
@@ -146,13 +147,6 @@ jobs:
shell: bash shell: bash
env: env:
NODE_TLS_REJECT_UNAUTHORIZED: 0 NODE_TLS_REJECT_UNAUTHORIZED: 0
PORTAL_RUNNER_SUBSCRIPTION: ${{ secrets.PORTAL_RUNNER_SUBSCRIPTION }}
PORTAL_RUNNER_RESOURCE_GROUP: ${{ secrets.PORTAL_RUNNER_RESOURCE_GROUP }}
PORTAL_RUNNER_DATABASE_ACCOUNT: ${{ secrets.PORTAL_RUNNER_DATABASE_ACCOUNT }}
PORTAL_RUNNER_DATABASE_ACCOUNT_KEY: ${{ secrets.PORTAL_RUNNER_DATABASE_ACCOUNT_KEY }}
NOTEBOOKS_TEST_RUNNER_TENANT_ID: ${{ secrets.NOTEBOOKS_TEST_RUNNER_TENANT_ID }}
NOTEBOOKS_TEST_RUNNER_CLIENT_ID: ${{ secrets.NOTEBOOKS_TEST_RUNNER_CLIENT_ID }}
NOTEBOOKS_TEST_RUNNER_CLIENT_SECRET: ${{ secrets.NOTEBOOKS_TEST_RUNNER_CLIENT_SECRET }}
PORTAL_RUNNER_CONNECTION_STRING: ${{ secrets.CONNECTION_STRING_SQL }} PORTAL_RUNNER_CONNECTION_STRING: ${{ secrets.CONNECTION_STRING_SQL }}
MONGO_CONNECTION_STRING: ${{ secrets.CONNECTION_STRING_MONGO }} MONGO_CONNECTION_STRING: ${{ secrets.CONNECTION_STRING_MONGO }}
CASSANDRA_CONNECTION_STRING: ${{ secrets.CONNECTION_STRING_CASSANDRA }} CASSANDRA_CONNECTION_STRING: ${{ secrets.CONNECTION_STRING_CASSANDRA }}
@@ -165,7 +159,7 @@ jobs:
nuget: nuget:
name: Publish Nuget name: Publish Nuget
if: github.ref == 'refs/heads/master' || contains(github.ref, 'hotfix/') || contains(github.ref, 'release/') if: github.ref == 'refs/heads/master' || contains(github.ref, 'hotfix/') || contains(github.ref, 'release/')
needs: [lint, format, compile, build, unittest, endtoendemulator, endtoendhosted] needs: [lint, format, compile, build, unittest, endtoendemulator, endtoendpuppeteer]
runs-on: ubuntu-latest runs-on: ubuntu-latest
env: env:
NUGET_SOURCE: ${{ secrets.NUGET_SOURCE }} NUGET_SOURCE: ${{ secrets.NUGET_SOURCE }}
@@ -189,7 +183,7 @@ jobs:
nugetmpac: nugetmpac:
name: Publish Nuget MPAC name: Publish Nuget MPAC
if: github.ref == 'refs/heads/master' || contains(github.ref, 'hotfix/') || contains(github.ref, 'release/') if: github.ref == 'refs/heads/master' || contains(github.ref, 'hotfix/') || contains(github.ref, 'release/')
needs: [lint, format, compile, build, unittest, endtoendemulator, endtoendhosted] needs: [lint, format, compile, build, unittest, endtoendemulator, endtoendpuppeteer]
runs-on: ubuntu-latest runs-on: ubuntu-latest
env: env:
NUGET_SOURCE: ${{ secrets.NUGET_SOURCE }} NUGET_SOURCE: ${{ secrets.NUGET_SOURCE }}

25
.github/workflows/runners.yml vendored Normal file
View File

@@ -0,0 +1,25 @@
name: Runners
on:
schedule:
- cron: "0 * 1 * *"
jobs:
sqlcreatecollection:
runs-on: ubuntu-latest
name: "SQL | Create Collection"
steps:
- uses: actions/checkout@v2
- uses: actions/setup-node@v1
- run: npm ci
- run: npm run test:e2e
env:
PORTAL_RUNNER_APP_INSIGHTS_KEY: ${{ secrets.PORTAL_RUNNER_APP_INSIGHTS_KEY }}
PORTAL_RUNNER_USERNAME: ${{ secrets.PORTAL_RUNNER_USERNAME }}
PORTAL_RUNNER_PASSWORD: ${{ secrets.PORTAL_RUNNER_PASSWORD }}
PORTAL_RUNNER_SUBSCRIPTION: 69e02f2d-f059-4409-9eac-97e8a276ae2c
PORTAL_RUNNER_RESOURCE_GROUP: runners
PORTAL_RUNNER_DATABASE_ACCOUNT: portal-sql-runner
- uses: actions/upload-artifact@v2
if: failure()
with:
name: screenshots
path: failure.png

3
.gitignore vendored
View File

@@ -9,6 +9,9 @@ pkg/DataExplorer/*
test/out/* test/out/*
workers/**/*.js workers/**/*.js
*.trx *.trx
cypress/videos
cypress/screenshots
cypress/fixtures
notebookapp/* notebookapp/*
Contracts/* Contracts/*
.DS_Store .DS_Store

View File

@@ -76,7 +76,17 @@ Unit tests are located adjacent to the code under test and run with [Jest](https
#### End to End CI Tests #### End to End CI Tests
Jest and Puppeteer are used for end to end browser based tests and are contained in `test/`. To run these tests locally: [Cypress](https://www.cypress.io/) is used for end to end tests and are contained in `cypress/`. Currently, it operates as sub project with its own typescript config and dependencies. It also only operates against the emulator. To run cypress tests:
1. Ensure the emulator is running
2. Start cosmos explorer in emulator mode: `PLATFORM=Emulator npm run watch`
3. Move into `cypress/` folder: `cd cypress`
4. Install dependencies: `npm install`
5. Run cypress headless(`npm run test`) or in interactive mode(`npm run test:debug`)
#### End to End Production Tests
Jest and Puppeteer are used for end to end production runners and are contained in `test/`. To run these tests locally:
1. Copy .env.example to .env 1. Copy .env.example to .env
2. Update the values in .env including your local data explorer endpoint (ask a teammate/codeowner for help with .env values) 2. Update the values in .env including your local data explorer endpoint (ask a teammate/codeowner for help with .env values)

View File

@@ -1,3 +1,3 @@
module.exports = { module.exports = {
presets: [["@babel/preset-env", { targets: { node: "current" } }], "@babel/preset-react", "@babel/preset-typescript"], presets: [["@babel/preset-env", { targets: { node: "current" } }], "@babel/preset-react", "@babel/preset-typescript"]
}; };

4
cypress/.gitignore vendored Normal file
View File

@@ -0,0 +1,4 @@
cypress.env.json
cypress/report
cypress/screenshots
cypress/videos

51
cypress/cleanup.js Normal file
View File

@@ -0,0 +1,51 @@
// Cleans up old databases from previous test runs
const { CosmosClient } = require("@azure/cosmos");
// TODO: Add support for other API connection strings
const mongoRegex = RegExp("mongodb://.*:(.*)@(.*).mongo.cosmos.azure.com");
async function cleanup() {
const connectionString = process.env.CYPRESS_CONNECTION_STRING;
if (!connectionString) {
throw new Error("Connection string not provided");
}
let client;
switch (true) {
case connectionString.includes("mongodb://"): {
const [, key, accountName] = connectionString.match(mongoRegex);
client = new CosmosClient({
key,
endpoint: `https://${accountName}.documents.azure.com:443/`
});
break;
}
// TODO: Add support for other API connection strings
default:
client = new CosmosClient(connectionString);
break;
}
const response = await client.databases.readAll().fetchAll();
return Promise.all(
response.resources.map(async db => {
const dbTimestamp = new Date(db._ts * 1000);
const twentyMinutesAgo = new Date(Date.now() - 1000 * 60 * 20);
if (dbTimestamp < twentyMinutesAgo) {
await client.database(db.id).delete();
console.log(`DELETED: ${db.id} | Timestamp: ${dbTimestamp}`);
} else {
console.log(`SKIPPED: ${db.id} | Timestamp: ${dbTimestamp}`);
}
})
);
}
cleanup()
.then(() => {
process.exit(0);
})
.catch(error => {
console.error(error);
process.exit(1);
});

15
cypress/cypress.json Normal file
View File

@@ -0,0 +1,15 @@
{
"integrationFolder": "./integration",
"pluginsFile": false,
"fixturesFolder": false,
"supportFile": "./support/index.js",
"defaultCommandTimeout": 90000,
"chromeWebSecurity": false,
"reporter": "mochawesome",
"reporterOptions": {
"reportDir": "cypress/report",
"json": true,
"overwrite": false,
"html": false
}
}

View File

@@ -0,0 +1,66 @@
// 1. Click on "New Container" on the command bar.
// 2. Pane with the title "Add Container" should appear on the right side of the screen
// 3. It includes an input box for the database Id.
// 4. It includes a checkbox called "Create now".
// 5. When the checkbox is marked, enter new database id.
// 3. Create a database WITH "Provision throughput" checked.
// 4. Enter minimum throughput value of 400.
// 5. Enter container id to the container id text box.
// 6. Enter partition key to the partition key text box.
// 7. Click "OK" to create a new container.
// 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
const connectionString = require("../../../utilities/connectionString");
let crypt = require("crypto");
context("Cassandra API Test - createDatabase", () => {
beforeEach(() => {
connectionString.loginUsingConnectionString(connectionString.constants.cassandra);
});
it("Create a new table in Cassandra API", () => {
const keyspaceId = `KeyspaceId${crypt.randomBytes(8).toString("hex")}`;
const tableId = `TableId112`;
cy.get("iframe").then($element => {
const $body = $element.contents().find("body");
cy.wrap($body)
.find('div[class="commandBarContainer"]')
.should("be.visible")
.find('button[data-test="New Table"]')
.should("be.visible")
.click();
cy.wrap($body)
.find('div[class="contextual-pane-in"]')
.should("be.visible")
.find('span[id="containerTitle"]');
cy.wrap($body)
.find('input[id="keyspace-id"]')
.should("be.visible")
.type(keyspaceId);
cy.wrap($body)
.find('input[class="textfontclr"]')
.type(tableId);
cy.wrap($body)
.find('input[data-test="databaseThroughputValue"]')
.should("have.value", "400");
cy.wrap($body)
.find('data-test="addCollection-createCollection"')
.click();
cy.wait(10000);
cy.wrap($body)
.find('div[data-test="resourceTreeId"]')
.should("exist")
.find('div[class="treeComponent dataResourceTree"]')
.should("contain", tableId);
});
});
});

View File

@@ -0,0 +1,81 @@
// 1. Click on "New Graph" on the command bar.
// 2. Pane with the title "Add Container" should appear on the right side of the screen
// 3. It includes an input box for the database Id.
// 4. It includes a checkbox called "Create now".
// 5. When the checkbox is marked, enter new database id.
// 3. Create a database WITH "Provision throughput" checked.
// 4. Enter minimum throughput value of 400.
// 5. Enter container id to the container id text box.
// 6. Enter partition key to the partition key text box.
// 7. Click "OK" to create a new container.
// 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
const connectionString = require("../../../utilities/connectionString");
let crypt = require("crypto");
context("Graph API Test", () => {
beforeEach(() => {
connectionString.loginUsingConnectionString(connectionString.constants.graph);
});
it("Create a new graph in Graph API", () => {
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
const graphId = `TestGraph${crypt.randomBytes(8).toString("hex")}`;
const partitionKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
cy.get("iframe").then($element => {
const $body = $element.contents().find("body");
cy.wrap($body)
.find('div[class="commandBarContainer"]')
.should("be.visible")
.find('button[data-test="New Graph"]')
.should("be.visible")
.click();
cy.wrap($body)
.find('div[class="contextual-pane-in"]')
.should("be.visible")
.find('span[id="containerTitle"]');
cy.wrap($body)
.find('input[data-test="addCollection-createNewDatabase"]')
.check();
cy.wrap($body)
.find('input[data-test="addCollection-newDatabaseId"]')
.should("be.visible")
.type(dbId);
cy.wrap($body)
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
.check();
cy.wrap($body)
.find('input[data-test="databaseThroughputValue"]')
.should("have.value", "400");
cy.wrap($body)
.find('input[data-test="addCollection-collectionId"]')
.type(graphId);
cy.wrap($body)
.find('input[data-test="addCollection-partitionKeyValue"]')
.type(partitionKey);
cy.wrap($body)
.find('input[data-test="addCollection-createCollection"]')
.click();
cy.wait(10000);
cy.wrap($body)
.find('div[data-test="resourceTreeId"]')
.should("exist")
.find('div[class="treeComponent dataResourceTree"]')
.should("contain", dbId)
.click()
.should("contain", graphId);
});
});
});

View File

@@ -0,0 +1,80 @@
// 1. Click on "New Container" on the command bar.
// 2. Pane with the title "Add Container" should appear on the right side of the screen
// 3. It includes an input box for the database Id.
// 4. It includes a checkbox called "Create now".
// 5. When the checkbox is marked, enter new database id.
// 3. Create a database WITH "Provision throughput" checked.
// 4. Enter minimum throughput value of 400.
// 5. Enter container id to the container id text box.
// 6. Enter partition key to the partition key text box.
// 7. Click "OK" to create a new container.
// // 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
const connectionString = require("../../../utilities/connectionString");
let crypt = require("crypto");
context("Mongo API Test - createDatabase", () => {
beforeEach(() => {
connectionString.loginUsingConnectionString();
});
it("Create a new collection in Mongo API", () => {
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
cy.get("iframe").then($element => {
const $body = $element.contents().find("body");
cy.wrap($body)
.find('div[class="commandBarContainer"]')
.should("be.visible")
.find('button[data-test="New Collection"]')
.should("be.visible")
.click();
cy.wrap($body)
.find('div[class="contextual-pane-in"]')
.should("be.visible")
.find('span[id="containerTitle"]');
cy.wrap($body)
.find('input[data-test="addCollection-createNewDatabase"]')
.check();
cy.wrap($body)
.find('input[data-test="addCollection-newDatabaseId"]')
.type(dbId);
cy.wrap($body)
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
.check();
cy.wrap($body)
.find('input[data-test="addCollection-collectionId"]')
.type(collectionId);
cy.wrap($body)
.find('input[data-test="databaseThroughputValue"]')
.should("have.value", "400");
cy.wrap($body)
.find('input[data-test="addCollection-partitionKeyValue"]')
.type(sharedKey);
cy.wrap($body)
.find("#submitBtnAddCollection")
.click();
cy.wait(10000);
cy.wrap($body)
.find('div[data-test="resourceTreeId"]')
.should("exist")
.find('div[class="treeComponent dataResourceTree"]')
.should("contain", dbId)
.click()
.should("contain", collectionId);
});
});
});

View File

@@ -0,0 +1,96 @@
// 1. Click on "New Container" on the command bar.
// 2. Pane with the title "Add Container" should appear on the right side of the screen
// 3. It includes an input box for the database Id.
// 4. It includes a checkbox called "Create now".
// 5. When the checkbox is marked, enter new database id.
// 3. Create a database WITH "Provision throughput" checked.
// 4. Enter minimum throughput value of 400.
// 5. Enter container id to the container id text box.
// 6. Enter partition key to the partition key text box.
// 7. Click "OK" to create a new container.
// 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
const connectionString = require("../../../utilities/connectionString");
let crypt = require("crypto");
context("Mongo API Test", () => {
beforeEach(() => {
connectionString.loginUsingConnectionString();
});
it.skip("Create a new collection in Mongo API - Autopilot", () => {
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
cy.get("iframe").then($element => {
const $body = $element.contents().find("body");
cy.wrap($body)
.find('div[class="commandBarContainer"]')
.should("be.visible")
.find('button[data-test="New Collection"]')
.should("be.visible")
.click();
cy.wrap($body)
.find('div[class="contextual-pane-in"]')
.should("be.visible")
.find('span[id="containerTitle"]');
cy.wrap($body)
.find('input[data-test="addCollection-createNewDatabase"]')
.check();
cy.wrap($body)
.find('input[data-test="addCollection-newDatabaseId"]')
.type(dbId);
cy.wrap($body)
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
.check();
cy.wrap($body)
.find('div[class="throughputModeContainer"]')
.should("be.visible")
.and(input => {
expect(input.get(0).textContent, "first item").contains("Autopilot (preview)");
expect(input.get(1).textContent, "second item").contains("Manual");
});
cy.wrap($body)
.find('input[id="newContainer-databaseThroughput-autoPilotRadio"]')
.check();
cy.wrap($body)
.find('select[name="autoPilotTiers"]')
// .eq(1).should('contain', '4,000 RU/s');
// // .select('4,000 RU/s').should('have.value', '1');
.find('option[value="2"]')
.then($element => $element.get(1).setAttribute("selected", "selected"));
cy.wrap($body)
.find('input[data-test="addCollection-collectionId"]')
.type(collectionId);
cy.wrap($body)
.find('input[data-test="addCollection-partitionKeyValue"]')
.type(sharedKey);
cy.wrap($body)
.find('input[data-test="addCollection-createCollection"]')
.click();
cy.wait(10000);
cy.wrap($body)
.find('div[data-test="resourceTreeId"]')
.should("exist")
.find('div[class="treeComponent dataResourceTree"]')
.should("contain", dbId)
.click()
.should("contain", collectionId);
});
});
});

View File

@@ -0,0 +1,67 @@
const connectionString = require("../../../utilities/connectionString");
let crypt = require("crypto");
context("Mongo API Test", () => {
beforeEach(() => {
connectionString.loginUsingConnectionString();
});
it.skip("Create a new collection in existing database in Mongo API", () => {
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
cy.get("iframe").then($element => {
const $body = $element.contents().find("body");
cy.wrap($body)
.find('span[class="nodeLabel"]')
.should("be.visible")
.then($span => {
const dbId1 = $span.text();
cy.log("DBBB", dbId1);
cy.wrap($body)
.find('div[class="commandBarContainer"]')
.should("be.visible")
.find('button[data-test="New Collection"]')
.should("be.visible")
.click();
cy.wrap($body)
.find('div[class="contextual-pane-in"]')
.should("be.visible")
.find('span[id="containerTitle"]');
cy.wrap($body)
.find('input[data-test="addCollection-existingDatabase"]')
.check();
cy.wrap($body)
.find('input[data-test="addCollection-existingDatabase"]')
.type(dbId1);
cy.wrap($body)
.find('input[data-test="addCollection-collectionId"]')
.type(collectionId);
cy.wrap($body)
.find('input[data-test="addCollection-partitionKeyValue"]')
.type(sharedKey);
cy.wrap($body)
.find('input[data-test="addCollection-createCollection"]')
.click();
cy.wait(10000);
cy.wrap($body)
.find('div[data-test="resourceTreeId"]')
.should("exist")
.find('div[class="treeComponent dataResourceTree"]')
.click()
.should("contain", collectionId);
});
});
});
});

View File

@@ -0,0 +1,203 @@
const connectionString = require("../../../utilities/connectionString");
let crypt = require("crypto");
context.skip("Mongo API Test", () => {
beforeEach(() => {
connectionString.loginUsingConnectionString();
});
it("Create a new collection in Mongo API - Provision database throughput", () => {
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
cy.get("iframe").then($element => {
const $body = $element.contents().find("body");
cy.wrap($body)
.find('div[class="commandBarContainer"]')
.should("be.visible")
.find('button[data-test="New Collection"]')
.should("be.visible")
.click();
cy.wrap($body)
.find('div[class="contextual-pane-in"]')
.should("be.visible")
.find('span[id="containerTitle"]');
cy.wrap($body)
.find(".createNewDatabaseOrUseExisting")
.should("have.length", 2)
.and(input => {
expect(input.get(0).textContent, "first item").contains("Create new");
expect(input.get(1).textContent, "second item").contains("Use existing");
});
cy.wrap($body)
.find('input[data-test="addCollection-createNewDatabase"]')
.check();
cy.wrap($body)
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
.check();
cy.wrap($body)
.find('input[data-test="addCollection-newDatabaseId"]')
.type(dbId);
cy.wrap($body)
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
.check();
cy.wrap($body)
.find('input[data-test="databaseThroughputValue"]')
.should("have.value", "400");
cy.wrap($body)
.find('input[data-test="addCollection-collectionId"]')
.type(collectionId);
cy.wrap($body)
.find('input[data-test="addCollection-partitionKeyValue"]')
.type(sharedKey);
cy.wrap($body)
.find('input[data-test="addCollection-createCollection"]')
.click();
cy.wait(10000);
cy.wrap($body)
.find('div[data-test="resourceTreeId"]')
.should("exist")
.find('div[class="treeComponent dataResourceTree"]')
.should("contain", dbId)
.click()
.should("contain", collectionId);
});
});
it("Create a new collection - without provision database throughput", () => {
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
const collectionIdTitle = `Add Collection`;
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
cy.get("iframe").then($element => {
const $body = $element.contents().find("body");
cy.wrap($body)
.find('div[class="commandBarContainer"]')
.should("be.visible")
.find('button[data-test="New Collection"]')
.should("be.visible")
.click();
cy.wrap($body)
.find('div[class="contextual-pane-in"]')
.should("be.visible")
.find('span[id="containerTitle"]');
cy.wrap($body)
.find('input[data-test="addCollection-createNewDatabase"]')
.check();
cy.wrap($body)
.find('input[data-test="addCollection-newDatabaseId"]')
.type(dbId);
cy.wrap($body)
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
.uncheck();
cy.wrap($body)
.find('input[data-test="addCollection-collectionId"]')
.type(collectionId);
cy.wrap($body)
.find('input[id="tab2"]')
.check({ force: true });
cy.wrap($body)
.find('input[data-test="addCollection-partitionKeyValue"]')
.type(sharedKey);
cy.wrap($body)
.find('input[data-test="databaseThroughputValue"]')
.should("have.value", "400");
cy.wrap($body)
.find('input[data-test="addCollection-createCollection"]')
.click();
cy.wait(10000);
cy.wrap($body)
.find('div[data-test="resourceTreeId"]')
.should("exist")
.find('div[class="treeComponent dataResourceTree"]')
.should("contain", dbId)
.click()
.should("contain", collectionId);
});
});
it("Create a new collection - without provision database throughput Fixed Storage Capacity", () => {
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
cy.get("iframe").then($element => {
const $body = $element.contents().find("body");
cy.wrap($body)
.find('div[class="commandBarContainer"]')
.should("be.visible")
.find('button[data-test="New Collection"]')
.should("be.visible")
.click();
cy.wrap($body)
.find('div[class="contextual-pane-in"]')
.should("be.visible")
.find('span[id="containerTitle"]');
cy.wrap($body)
.find('input[data-test="addCollection-createNewDatabase"]')
.check();
cy.wrap($body)
.find('input[data-test="addCollection-newDatabaseId"]')
.type(dbId);
cy.wrap($body)
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
.uncheck();
cy.wrap($body)
.find('input[data-test="addCollection-collectionId"]')
.type(collectionId);
cy.wrap($body)
.find('input[id="tab1"]')
.check({ force: true });
cy.wrap($body)
.find('input[data-test="databaseThroughputValue"]')
.should("have.value", "400");
cy.wrap($body)
.find('input[data-test="addCollection-createCollection"]')
.click();
cy.wait(10000);
cy.wrap($body)
.find('div[data-test="resourceTreeId"]')
.should("exist")
.find('div[class="treeComponent dataResourceTree"]')
.should("contain", dbId)
.click()
.should("contain", collectionId);
});
});
});

View File

@@ -0,0 +1,79 @@
// 1. Click on "New Container" on the command bar.
// 2. Pane with the title "Add Container" should appear on the right side of the screen
// 3. It includes an input box for the database Id.
// 4. It includes a checkbox called "Create now".
// 5. When the checkbox is marked, enter new database id.
// 3. Create a database WITH "Provision throughput" checked.
// 4. Enter minimum throughput value of 400.
// 5. Enter container id to the container id text box.
// 6. Enter partition key to the partition key text box.
// 7. Click "OK" to create a new container.
// 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
const connectionString = require("../../../utilities/connectionString");
let crypt = require("crypto");
context("SQL API Test", () => {
beforeEach(() => {
connectionString.loginUsingConnectionString();
});
it("Create a new container in SQL API", () => {
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
connectionString.loginUsingConnectionString();
cy.get("iframe").then($element => {
const $body = $element.contents().find("body");
cy.wrap($body)
.find('div[class="commandBarContainer"]')
.should("be.visible")
.find('button[data-test="New Container"]')
.should("be.visible")
.click();
cy.wrap($body)
.find('div[class="contextual-pane-in"]')
.should("be.visible")
.find('span[id="containerTitle"]');
cy.wrap($body)
.find('input[data-test="addCollection-createNewDatabase"]')
.check();
cy.wrap($body)
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
.check();
cy.wrap($body)
.find('input[data-test="addCollection-newDatabaseId"]')
.type(dbId);
cy.wrap($body)
.find('input[data-test="addCollection-collectionId"]')
.type(collectionId);
cy.wrap($body)
.find('input[data-test="databaseThroughputValue"]')
.should("have.value", "400");
cy.wrap($body)
.find('input[data-test="addCollection-partitionKeyValue"]')
.type(sharedKey);
cy.wrap($body)
.find("#submitBtnAddCollection")
.click();
cy.wait(10000);
cy.wrap($body)
.find('div[data-test="resourceTreeId"]')
.should("exist")
.find('div[class="treeComponent dataResourceTree"]')
.should("contain", dbId);
});
});
});

View File

@@ -0,0 +1,60 @@
// 1. Click on "New Container" on the command bar.
// 2. Pane with the title "Add Container" should appear on the right side of the screen
// 3. It includes an input box for the database Id.
// 4. It includes a checkbox called "Create now".
// 5. When the checkbox is marked, enter new database id.
// 3. Create a database WITH "Provision throughput" checked.
// 4. Enter minimum throughput value of 400.
// 5. Enter container id to the container id text box.
// 6. Enter partition key to the partition key text box.
// 7. Click "OK" to create a new container.
// 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
const connectionString = require("../../../utilities/connectionString");
let crypt = require("crypto");
context("Table API Test", () => {
beforeEach(() => {
connectionString.loginUsingConnectionString(connectionString.constants.table);
});
it("Create a new table in Table API", () => {
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
cy.get("iframe").then($element => {
const $body = $element.contents().find("body");
cy.wrap($body)
.find('div[class="commandBarContainer"]')
.should("be.visible")
.find('button[data-test="New Table"]')
.should("be.visible")
.click();
cy.wrap($body)
.find('div[class="contextual-pane-in"]')
.should("be.visible")
.find('span[id="containerTitle"]');
cy.wrap($body)
.find('input[data-test="addCollection-collectionId"]')
.type(collectionId);
cy.wrap($body)
.find('input[data-test="databaseThroughputValue"]')
.should("have.value", "400");
cy.wrap($body)
.find('input[data-test="addCollection-createCollection"]')
.click();
cy.wait(10000);
cy.wrap($body)
.find('div[data-test="resourceTreeId"]')
.should("exist")
.find('div[class="treeComponent dataResourceTree"]')
.should("contain", collectionId);
});
});
});

View File

@@ -0,0 +1,55 @@
// 1. Click on "New Container" on the command bar.
// 2. Pane with the title "Add Container" should appear on the right side of the screen
// 3. It includes an input box for the database Id.
// 4. It includes a checkbox called "Create now".
// 5. When the checkbox is marked, enter new database id.
// 3. Create a database WITH "Provision throughput" checked.
// 4. Enter minimum throughput value of 400.
// 5. Enter container id to the container id text box.
// 6. Enter partition key to the partition key text box.
// 7. Click "OK" to create a new container.
// 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
let crypt = require("crypto");
context("Emulator - createDatabase", () => {
beforeEach(() => {
cy.visit("http://localhost:1234/explorer.html");
});
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
const collectionIdTitle = `Add Collection`;
const partitionKey = `PartitionKey${crypt.randomBytes(8).toString("hex")}`;
it("Create a new collection", () => {
cy.contains("New Container").click();
// cy.contains(collectionIdTitle);
cy.get(".createNewDatabaseOrUseExisting")
.should("have.length", 2)
.and(input => {
expect(input.get(0).textContent, "first item").contains("Create new");
expect(input.get(1).textContent, "second item").contains("Use existing");
});
cy.get('input[data-test="addCollection-createNewDatabase"]').check();
cy.get('input[data-test="addCollection-newDatabaseId"]').type(dbId);
cy.get('input[data-test="addCollection-collectionId"]').type(collectionId);
cy.get('input[data-test="databaseThroughputValue"]').should("have.value", "400");
cy.get('input[data-test="addCollection-partitionKeyValue"]').type(partitionKey);
cy.get('input[data-test="addCollection-createCollection"]').click();
cy.get('div[data-test="resourceTreeId"]').should("exist");
cy.get('div[data-test="resourceTree-collectionsTree"]').should("contain", dbId);
cy.get('div[data-test="databaseList"]').should("contain", collectionId);
});
});

View File

@@ -0,0 +1,65 @@
// 1. Click on "New Database" on the command bar
// 2. a Pane with the title "Add Database" should appear on the right side of the screen
// i. It includes an input box for the database Id.
// ii. It includes a checkbox called "Provision throughput".
// iii. Whe the checkbox is marked, a new input with a throughput control let's you customize RU at the database level
// 3. Create a database WITHOUT "Provision throughput" checked.
// 4. It should appear in the Data Explorer list.
// 5. Repeat steps 1-3 but create a database WITH "Provision throughput" with the default RU value.
// 6. It should appear in the Data Explorer list.
// 7. If expanded, it should have the list item called "Scale", that once clicked, it should show the "Scale" tab.
// 8. Inside that tab, a throughput control will let you change the RU value within the permited range.
// 9. If you change the value, it should enable the "Save" button.
// 10. Click "Save" and verify that the process completes without error.
// 11. Close the tab and reopen it and verify that the input contains the last saved value.%
const crypto = require("crypto");
const client = require("../../../utilities/cosmosClient");
const randomString = crypto.randomBytes(2).toString("hex");
const databaseId = `TestDB-${randomString}`;
const collectionId = `TestColl-${randomString}`;
context("Emulator - Create database -> container -> item", () => {
beforeEach(async () => {
const { resources } = await client.databases.readAll().fetchAll();
for (const database of resources) {
await client.database(database.id).delete();
}
});
it("creates a new database", () => {
cy.visit("https://0.0.0.0:1234/explorer.html?platform=Emulator");
cy.contains("New Container").click();
cy.get("[data-test=addCollection-newDatabaseId]").click();
cy.get("[data-test=addCollection-newDatabaseId]").type(databaseId);
cy.get("[data-test=addCollection-collectionId]").click();
cy.get("[data-test=addCollection-collectionId]").type(collectionId);
cy.get("[data-test=addCollection-partitionKeyValue]").click();
cy.get("[data-test=addCollection-partitionKeyValue]").type("/pk");
cy.get('input[name="createCollection"]').click();
cy.get(".dataResourceTree").should("contain", databaseId);
cy.get(".dataResourceTree")
.contains(databaseId)
.click();
cy.get(".dataResourceTree").should("contain", collectionId);
cy.get(".dataResourceTree")
.contains(collectionId)
.click();
cy.get(".dataResourceTree")
.contains("Items")
.click();
cy.get(".dataResourceTree")
.contains("Items")
.click();
cy.wait(1000); // React rendering inside KO causes some weird async rendering that makes this test flaky without waiting
cy.get(".commandBarContainer")
.contains("New Item")
.click();
cy.wait(1000); // React rendering inside KO causes some weird async rendering that makes this test flaky without waiting
cy.get(".commandBarContainer")
.contains("Save")
.click();
cy.wait(1000); // React rendering inside KO causes some weird async rendering that makes this test flaky without waiting
cy.get(".documentsGridHeaderContainer").should("contain", "replace_with_new_document_id");
});
});

View File

@@ -0,0 +1,46 @@
// 1. Click last database in the resource tree
// 2. Click the last collection within the database
// 3. Select the context menu within the collection
// 4. Select "Delete Container" option in the dropdown
// 5. On Selection, Delete Container pane opens on the right side
// 6. Enter the same collection id that is to be deleted and click ok
// 7. Now, the resource tree refreshes, the deleted collection should not appear under the database
let crypt = require("crypto");
context("Emulator - deleteCollection", () => {
beforeEach(() => {
cy.visit("http://localhost:1234/explorer.html");
});
it("Delete a collection", () => {
cy.get(".databaseId")
.last()
.click();
cy.get(".collectionList")
.last()
.then($id => {
const collectionId = $id.text();
cy.get('span[data-test="collectionEllipsisMenu"]').should("exist");
cy.get('span[data-test="collectionEllipsisMenu"]')
.invoke("show")
.last()
.click();
cy.get('div[data-test="collectionContextMenu"]')
.contains("Delete Container")
.click({ force: true });
cy.get('input[data-test="confirmCollectionId"]').type(collectionId.trim());
cy.get('input[data-test="deleteCollection"]').click();
cy.get('div[data-test="databaseList"]').should("not.contain", collectionId);
cy.get('div[data-test="databaseMenu"]').should("not.contain", collectionId);
});
});
});

View File

@@ -0,0 +1,83 @@
// 1. Click last database in the resource tree
// 2. Select the context menu within the database
// 4. Select "Delete Database" option in the dropdown
// 5. On Selection, Delete Database pane opens on the right side
// 6. Enter the same database id that is to be deleted and click ok
// 7. Now, the resource tree refreshes, the deleted database should not appear in the resource tree
let crypt = require("crypto");
context("Emulator - deleteDatabase", () => {
beforeEach(() => {
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
let db_rid = "";
const date = new Date().toUTCString();
let authToken = "";
cy.visit("http://localhost:1234/explorer.html");
// Creating auth token for collection creation
cy.request({
method: "GET",
url: "https://localhost:8081/_explorer/authorization/post/dbs/",
headers: {
"x-ms-date": date,
authorization: "-"
}
})
.then(response => {
authToken = response.body.Token; // Getting auth token for collection creation
return new Cypress.Promise((resolve, reject) => {
return resolve();
});
})
.then(() => {
cy.request({
method: "POST",
url: "https://localhost:8081/dbs",
headers: {
"x-ms-date": date,
authorization: authToken,
"x-ms-version": "2018-12-31"
},
body: {
id: dbId
}
}).then(response => {
cy.log("Response", response);
db_rid = response.body._rid;
return new Cypress.Promise((resolve, reject) => {
cy.log("Rid", db_rid);
return resolve();
});
});
});
});
it("Delete a database", () => {
cy.get('span[data-test="refreshTree"]').click();
cy.get(".databaseId")
.last()
.then($id => {
const dbId = $id.text();
cy.get('span[data-test="databaseEllipsisMenu"]').should("exist");
cy.get('span[data-test="databaseEllipsisMenu"]')
.invoke("show")
.last()
.click();
cy.get('div[data-test="databaseContextMenu"]')
.contains("Delete Database")
.click({ force: true });
cy.get('input[data-test="confirmDatabaseId"]').type(dbId.trim());
cy.get('input[data-test="deleteDatabase"]').click();
cy.get('div[data-test="databaseList"]').should("not.contain", dbId);
});
});
});

View File

@@ -0,0 +1,35 @@
# Notebook end-to-end tests
This describes how to run the tests locally
## Stand up a local notebook container instance:
Instructions on how to build and run the container [here](https://microsoft.sharepoint.com/teams/DocDB/_layouts/OneNote.aspx?id=%2Fteams%2FDocDB%2FSiteAssets%2FDocDB%20Team%20Notebook&wd=target%28Tools%20_%20SDK%2FPortal%2FDevelopment.one%7CF800BE8E-1E31-48FE-90D7-EF698EF88112%2FHow%20to%20build%20notebook%20service%7C4BAA153B-422C-41E2-B997-F3FCE02CD743%2F%29)
## Run a local data explorer
Instructions are in [`DataExplorer/README.md`](https://msdata.visualstudio.com/CosmosDB/_git/cosmosdb-dataexplorer?path=%2FProduct%2FPortal%2FDataExplorer%2FREADME.md&_a=preview).
Make sure you can run Data Explorer locally from the web browser.
## Run cypress tests
1. Edit the URL for your DataExplorer in the `.spec.ts` file
2. Run the test:
```bash
cd DataExplorer/cypress
npm i
npm t -- --spec 'integration/notebook/newNotebook.spec.ts'
```
To run in Debug mode:
```
npm run test:debug
```
This opens Cypress UI
## Troubleshooting
* The tests are recorded in the `videos` folder.
* Cypress does not support hover: workarounds [here](https://docs.cypress.io/api/commands/hover.html#Workarounds).
## References
* [Cypress API](https://docs.cypress.io/api/api/table-of-contents.html)
* [Cypress cookbook](https://docs.cypress.io/faq/questions/using-cypress-faq.html#How-do-I-get-an-element%E2%80%99s-text-contents)
* [Cypress best practices](https://docs.cypress.io/guides/references/best-practices.html#Selecting-Elements)

View File

@@ -0,0 +1,93 @@
// THIS ADDS A NEW NOTEBOOK TO YOUR NOTEBOOKS
context("New Notebook smoke test", () => {
const timeout = 15000; // in ms
const explorerUrl =
"https://localhost:1234/explorer.html?feature.notebookserverurl=https%3A%2F%2Flocalhost%3A10001%2F12345%2Fnotebook&feature.notebookServerToken=token&feature.enablenotebooks=true";
/**
* Wait for UI to be ready
*/
const waitForReady = () => {
cy.get(".splashScreenContainer", { timeout }).should("be.visible");
};
beforeEach(() => {
cy.visit(explorerUrl);
waitForReady();
});
it("Create a new notebook and run some code", () => {
// Create new notebook
cy.contains("New Notebook").click();
// Check tab name
cy.get("li.tabList .tabNavText").should($span => {
const text = $span.text();
expect(text).to.match(/^Untitled.*\.ipynb$/);
});
// Wait for python3 | idle status
cy.get('[data-test="notebookStatusBar"] [data-test="kernelStatus"]', { timeout }).should($p => {
const text = $p.text();
expect(text).to.match(/^python3.*idle$/);
});
// Click on a cell
cy.get(".cell-container")
.as("cellContainer")
.click();
// Type in some code
cy.get("@cellContainer").type("2+4");
// Execute
cy.get('[data-test="Run"]')
.first()
.click();
// Verify results
cy.get("@cellContainer").within(() => {
cy.get("pre code span").should("contain", "6");
});
// Restart kernel
cy.get('[data-test="Run"] button')
.eq(-1)
.click();
cy.get("li")
.contains("Restart Kernel")
.click();
// Wait for python3 | restarting status
cy.get('[data-test="notebookStatusBar"] [data-test="kernelStatus"]', { timeout }).should($p => {
const text = $p.text();
expect(text).to.match(/^python3.*restarting$/);
});
// Wait for python3 | idle status
cy.get('[data-test="notebookStatusBar"] [data-test="kernelStatus"]', { timeout }).should($p => {
const text = $p.text();
expect(text).to.match(/^python3.*idle$/);
});
// Click on a cell
cy.get(".cell-container")
.as("cellContainer")
.find(".input")
.as("codeInput")
.click();
// Type in some code
cy.get("@codeInput").type("{backspace}{backspace}{backspace}4+5");
// Execute
cy.get('[data-test="Run"]')
.first()
.click();
// Verify results
cy.get("@cellContainer").within(() => {
cy.get("pre code span").should("contain", "9");
});
});
});

View File

@@ -0,0 +1,172 @@
context("Resource tree notebook file manipulation", () => {
const timeout = 15000; // in ms
const explorerUrl =
"https://localhost:1234/explorer.html?feature.notebookserverurl=https%3A%2F%2Flocalhost%3A10001%2F12345%2Fnotebook&feature.notebookServerToken=token&feature.enablenotebooks=true";
/**
* Wait for UI to be ready
*/
const waitForReady = () => {
cy.get(".splashScreenContainer", { timeout }).should("be.visible");
};
const clickContextMenuAndSelectOption = (nodeLabel, option) => {
cy.get(`.treeNodeHeader[data-test="${nodeLabel}"]`)
.find("button.treeMenuEllipsis")
.click();
cy.get('[data-test="treeComponentMenuItemContainer"]')
.contains(option)
.click();
};
const createFolder = folder => {
clickContextMenuAndSelectOption("My Notebooks/", "New Directory");
cy.get("#stringInputPane").within(() => {
cy.get('input[name="collectionIdConfirmation"]').type(folder);
cy.get("form").submit();
});
};
const deleteItem = nodeName => {
clickContextMenuAndSelectOption(`${nodeName}`, "Delete");
cy.get(".ms-Dialog-main")
.contains("Delete")
.click();
};
beforeEach(() => {
cy.visit(explorerUrl);
waitForReady();
});
it("Create and remove a directory", () => {
const folder = "e2etest_folder1";
createFolder(folder);
cy.get(`.treeNodeHeader[data-test="${folder}/"]`).should("exist");
deleteItem(`${folder}/`);
cy.get(`.treeNodeHeader[data-test="${folder}/"]`).should("not.exist");
});
it("Create and rename a directory", () => {
const folder = "e2etest_folder2";
const renamedFolder = "e2etest_folder2_renamed";
createFolder(folder);
// Rename
clickContextMenuAndSelectOption(`${folder}/`, "Rename");
cy.get("#stringInputPane").within(() => {
cy.get('input[name="collectionIdConfirmation"]')
.clear()
.type(renamedFolder);
cy.get("form").submit();
});
cy.get(`.treeNodeHeader[data-test="${renamedFolder}/"]`).should("exist");
cy.get(`.treeNodeHeader[data-test="${folder}/"]`).should("not.exist");
deleteItem(`${renamedFolder}/`);
cy.get(`.treeNodeHeader[data-test="${renamedFolder}/"]`).should("not.exist");
});
it("Create a notebook inside a directory", () => {
const folder = "e2etest_folder3";
const newNotebookName = "Untitled.ipynb";
createFolder(folder);
clickContextMenuAndSelectOption(`${folder}/`, "New Notebook");
// Verify tab is open
cy.get(".tabList")
.contains(newNotebookName)
.should("exist");
// Close tab
cy.get(`.tabList[title="notebooks/${folder}/${newNotebookName}"]`)
.find(".cancelButton")
.click();
// When running from command line, closing the tab is too fast
cy.get("body").then($body => {
if ($body.find(".ms-Dialog-main").length) {
// For some reason, this does not work
// cy.get(".ms-Dialog-main").contains("Close").click();
cy.get(".ms-Dialog-main .ms-Button--primary").click();
}
});
// Expand folder node
cy.get(`.treeNodeHeader[data-test="${folder}/"]`).click();
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${newNotebookName}"]`).should("exist");
// Delete notebook
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${newNotebookName}"]`)
.find("button.treeMenuEllipsis")
.click();
cy.get('[data-test="treeComponentMenuItemContainer"]')
.contains("Delete")
.click();
// Confirm
cy.get(".ms-Dialog-main")
.contains("Delete")
.click();
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${newNotebookName}"]`).should("not.exist");
deleteItem(`${folder}/`);
});
it("Create and rename a notebook inside a directory", () => {
const folder = "e2etest_folder4";
const newNotebookName = "Untitled.ipynb";
const renamedNotebookName = "mynotebook.ipynb";
createFolder(folder);
clickContextMenuAndSelectOption(`${folder}/`, "New Notebook");
// Close tab
cy.get(`.tabList[title="notebooks/${folder}/${newNotebookName}"]`)
.find(".cancelButton")
.click();
cy.get("body").then($body => {
if ($body.find(".ms-Dialog-main").length) {
// For some reason, this does not work
// cy.get(".ms-Dialog-main").contains("Close").click();
cy.get(".ms-Dialog-main .ms-Button--primary").click();
}
});
// Expand folder node
cy.get(`.treeNodeHeader[data-test="${folder}/"]`).click();
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${newNotebookName}"]`).should("exist");
// Rename notebook
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${newNotebookName}"]`)
.find("button.treeMenuEllipsis")
.click();
cy.get('[data-test="treeComponentMenuItemContainer"]')
.contains("Rename")
.click();
cy.get("#stringInputPane").within(() => {
cy.get('input[name="collectionIdConfirmation"]')
.clear()
.type(renamedNotebookName);
cy.get("form").submit();
});
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${newNotebookName}"]`).should("not.exist");
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${renamedNotebookName}"]`).should("exist");
// Delete notebook
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${renamedNotebookName}"]`)
.find("button.treeMenuEllipsis")
.click();
cy.get('[data-test="treeComponentMenuItemContainer"]')
.contains("Delete")
.click();
// Confirm
cy.get(".ms-Dialog-main")
.contains("Delete")
.click();
// Give it time to settle
cy.wait(1000);
deleteItem(`${folder}/`);
});
});

3066
cypress/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

25
cypress/package.json Normal file
View File

@@ -0,0 +1,25 @@
{
"name": "cosmos-explorer-cypress",
"version": "1.0.0",
"description": "",
"main": "index.js",
"scripts": {
"test": "cypress run",
"wait-for-server": "wait-on -t 240000 -i 5000 -v https-get://0.0.0.0:1234/",
"test:sql": "cypress run --browser chrome --spec \"./integration/dataexplorer/SQL/*\"",
"test:ci": "wait-on -t 240000 -i 5000 -v https-get://0.0.0.0:1234/ https-get://0.0.0.0:8081/_explorer/index.html && cypress run --browser edge --headless",
"test:debug": "cypress open"
},
"devDependencies": {
"cypress": "^4.8.0",
"mocha": "^7.0.1",
"mochawesome": "^4.1.0",
"mochawesome-merge": "^4.0.1",
"mochawesome-report-generator": "^4.1.0",
"typescript": "3.4.3",
"wait-on": "^4.0.2"
},
"dependencies": {
"@microsoft/applicationinsights-web": "^2.5.2"
}
}

23
cypress/support/index.js Normal file
View File

@@ -0,0 +1,23 @@
let appInsightsLib = require("@microsoft/applicationinsights-web");
const appInsights = new appInsightsLib.ApplicationInsights({
config: {
instrumentationKey: "fe61c39f-7d32-4488-a191-b13621965315"
/* ...Other Configuration Options... */
}
});
appInsights.loadAppInsights();
Cypress.on("fail", (error, runnable) => {
// App Insights will record the fail tests for Create Collection
let message = JSON.stringify(runnable.title);
appInsights.trackTrace({
message: `${message}`,
properties: {
passed: false,
error: error
}
});
throw error; // throw error to have test still fail
});

11
cypress/tsconfig.json Normal file
View File

@@ -0,0 +1,11 @@
{
"compilerOptions": {
"strict": true,
"noEmit": true,
"module": "commonjs",
"target": "es5",
"lib": ["es5", "dom", "es6"],
"types": ["cypress", "node"]
},
"include": ["**/*.ts", "**/*.spec.ts"]
}

View File

@@ -0,0 +1,41 @@
module.exports = {
loginUsingConnectionString: function() {
const prodUrl = Cypress.env("TEST_ENDPOINT") || "https://localhost:1234/hostedExplorer.html";
const timeout = 15000;
cy.visit(prodUrl);
cy.get('iframe[id="explorerMenu"]').should("be.visible");
cy.get("iframe").then($element => {
const $body = $element.contents().find("body");
cy.wrap($body)
.find("#connectExplorer")
.should("exist")
.find("div[class='connectExplorer']")
.should("exist")
.find("p[class='welcomeText']")
.should("exist");
cy.wrap($body.find("div > p.switchConnectTypeText"))
.should("exist")
.last()
.click({ force: true });
const secret = Cypress.env("CONNECTION_STRING");
cy.wrap($body)
.find("input[class='inputToken']")
.should("exist")
.type(secret, {
force: true
});
cy.wrap($body.find("input[value='Connect']"), { timeout })
.first()
.click({ force: true });
cy.wait(15000);
});
}
};

View File

@@ -0,0 +1,6 @@
const { CosmosClient } = require("@azure/cosmos");
module.exports = new CosmosClient({
endpoint: "https://0.0.0.0:8081",
key: "C2y6yDjf5/R+ob0N8A7Cgv30VRDJIWEHLM+4QDU5DE2nQ9nDuVTqobD4b8mGGyPMbIZnqyMsEcaGQy67XIw/Jw=="
});

View File

@@ -6,6 +6,6 @@ module.exports = {
slowMo: 55, slowMo: 55,
defaultViewport: null, defaultViewport: null,
ignoreHTTPSErrors: true, ignoreHTTPSErrors: true,
args: ["--disable-web-security"], args: ["--disable-web-security"]
}, }
}; };

View File

@@ -1,5 +1,5 @@
module.exports = { module.exports = {
preset: "jest-puppeteer", preset: "jest-puppeteer",
testMatch: ["<rootDir>/test/**/*.spec.[jt]s?(x)"], testMatch: ["<rootDir>/test/**/*.spec.[jt]s?(x)"],
setupFiles: ["dotenv/config"], setupFiles: ["dotenv/config"]
}; };

View File

@@ -42,8 +42,8 @@ module.exports = {
branches: 20, branches: 20,
functions: 24, functions: 24,
lines: 30, lines: 30,
statements: 29.0, statements: 29.0
}, }
}, },
// Make calling deprecated APIs throw helpful error messages // Make calling deprecated APIs throw helpful error messages
@@ -76,7 +76,7 @@ module.exports = {
"office-ui-fabric-react/lib/(.*)$": "office-ui-fabric-react/lib-commonjs/$1", // https://github.com/OfficeDev/office-ui-fabric-react/wiki/Fabric-6-Release-Notes "office-ui-fabric-react/lib/(.*)$": "office-ui-fabric-react/lib-commonjs/$1", // https://github.com/OfficeDev/office-ui-fabric-react/wiki/Fabric-6-Release-Notes
"^dnd-core$": "dnd-core/dist/cjs", "^dnd-core$": "dnd-core/dist/cjs",
"^react-dnd$": "react-dnd/dist/cjs", "^react-dnd$": "react-dnd/dist/cjs",
"^react-dnd-html5-backend$": "react-dnd-html5-backend/dist/cjs", "^react-dnd-html5-backend$": "react-dnd-html5-backend/dist/cjs"
}, },
// An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader // An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader
@@ -164,11 +164,11 @@ module.exports = {
// A map from regular expressions to paths to transformers // A map from regular expressions to paths to transformers
transform: { transform: {
"^.+\\.html?$": "html-loader-jest", "^.+\\.html?$": "html-loader-jest",
"^.+\\.[t|j]sx?$": "babel-jest", "^.+\\.[t|j]sx?$": "babel-jest"
}, },
// An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation // An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation
transformIgnorePatterns: ["/node_modules/", "/externals/"], transformIgnorePatterns: ["/node_modules/", "/externals/"]
// An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them // An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them
// unmockedModulePathPatterns: undefined, // unmockedModulePathPatterns: undefined,

3668
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -4,10 +4,8 @@
"description": "Cosmos Explorer", "description": "Cosmos Explorer",
"main": "index.js", "main": "index.js",
"dependencies": { "dependencies": {
"@azure/arm-cosmosdb": "9.1.0",
"@azure/cosmos": "3.9.0", "@azure/cosmos": "3.9.0",
"@azure/identity": "1.1.0", "@azure/cosmos-language-service": "0.0.4",
"@azure/cosmos-language-service": "0.0.5",
"@jupyterlab/services": "6.0.0-rc.2", "@jupyterlab/services": "6.0.0-rc.2",
"@jupyterlab/terminal": "3.0.0-rc.2", "@jupyterlab/terminal": "3.0.0-rc.2",
"@microsoft/applicationinsights-web": "2.5.9", "@microsoft/applicationinsights-web": "2.5.9",
@@ -68,7 +66,7 @@
"jquery-ui-dist": "1.12.1", "jquery-ui-dist": "1.12.1",
"knockout": "3.5.1", "knockout": "3.5.1",
"mkdirp": "1.0.4", "mkdirp": "1.0.4",
"monaco-editor": "0.18.1", "monaco-editor": "0.15.6",
"object.entries": "1.1.0", "object.entries": "1.1.0",
"office-ui-fabric-react": "7.134.1", "office-ui-fabric-react": "7.134.1",
"p-retry": "4.2.0", "p-retry": "4.2.0",
@@ -117,7 +115,7 @@
"@types/prop-types": "15.5.8", "@types/prop-types": "15.5.8",
"@types/puppeteer": "3.0.1", "@types/puppeteer": "3.0.1",
"@types/q": "1.5.1", "@types/q": "1.5.1",
"@types/react": "16.9.56", "@types/react": "16.9.49",
"@types/react-dom": "16.0.7", "@types/react-dom": "16.0.7",
"@types/react-notification-system": "0.2.39", "@types/react-notification-system": "0.2.39",
"@types/react-redux": "7.1.7", "@types/react-redux": "7.1.7",
@@ -160,7 +158,7 @@
"mini-css-extract-plugin": "0.4.3", "mini-css-extract-plugin": "0.4.3",
"monaco-editor-webpack-plugin": "1.7.0", "monaco-editor-webpack-plugin": "1.7.0",
"node-fetch": "2.6.1", "node-fetch": "2.6.1",
"prettier": "2.2.1", "prettier": "1.19.1",
"puppeteer": "4.0.0", "puppeteer": "4.0.0",
"raw-loader": "0.5.1", "raw-loader": "0.5.1",
"rimraf": "3.0.0", "rimraf": "3.0.0",
@@ -170,7 +168,7 @@
"ts-loader": "6.2.2", "ts-loader": "6.2.2",
"tslint": "5.11.0", "tslint": "5.11.0",
"tslint-microsoft-contrib": "6.0.0", "tslint-microsoft-contrib": "6.0.0",
"typescript": "4.1.2", "typescript": "4.0.2",
"url-loader": "1.1.1", "url-loader": "1.1.1",
"wait-on": "4.0.2", "wait-on": "4.0.2",
"webpack": "4.43.0", "webpack": "4.43.0",
@@ -196,8 +194,8 @@
"compile": "tsc", "compile": "tsc",
"compile:contracts": "tsc -p ./tsconfig.contracts.json", "compile:contracts": "tsc -p ./tsconfig.contracts.json",
"compile:strict": "tsc -p ./tsconfig.strict.json", "compile:strict": "tsc -p ./tsconfig.strict.json",
"format": "prettier --write \"{src,test}/**/*.{ts,tsx,html}\" \"*.{js,html}\"", "format": "prettier --write \"{src,cypress,test}/**/*.{ts,tsx,html}\" \"*.{js,html}\"",
"format:check": "prettier --check \"{src,test}/**/*.{ts,tsx,html}\" \"*.{js,html}\"", "format:check": "prettier --check \"{src,cypress,test}/**/*.{ts,tsx,html}\" \"*.{js,html}\"",
"lint": "tslint --project tsconfig.json && eslint \"**/*.{ts,tsx}\"", "lint": "tslint --project tsconfig.json && eslint \"**/*.{ts,tsx}\"",
"build:contracts": "npm run compile:contracts", "build:contracts": "npm run compile:contracts",
"strictEligibleFiles": "node ./strict-migration-tools/index.js", "strictEligibleFiles": "node ./strict-migration-tools/index.js",

View File

@@ -3,6 +3,7 @@
"offerThroughput": 400, "offerThroughput": 400,
"databaseLevelThroughput": false, "databaseLevelThroughput": false,
"collectionId": "Persons", "collectionId": "Persons",
"rupmEnabled": false,
"partitionKey": { "kind": "Hash", "paths": ["/name"] }, "partitionKey": { "kind": "Hash", "paths": ["/name"] },
"data": [ "data": [
"g.addV('person').property(id, '1').property('name', 'Eva').property('age', 44)", "g.addV('person').property(id, '1').property('name', 'Eva').property('age', 44)",
@@ -12,4 +13,4 @@
"g.V('1').addE('knows').to(g.V('2')).outV().addE('knows').to(g.V('3'))", "g.V('1').addE('knows').to(g.V('2')).outV().addE('knows').to(g.V('3'))",
"g.V('3').addE('knows').to(g.V('4'))" "g.V('3').addE('knows').to(g.V('4'))"
] ]
} }

View File

@@ -1,6 +1,6 @@
export enum AuthType { export enum AuthType {
AAD = "aad", AAD = "aad",
EncryptedToken = "encryptedtoken", EncryptedToken = "encryptedtoken",
MasterKey = "masterkey", MasterKey = "masterkey",
ResourceToken = "resourcetoken", ResourceToken = "resourcetoken"
} }

View File

@@ -1,21 +1,21 @@
import * as ko from "knockout"; import * as ko from "knockout";
import * as ReactBindingHandler from "./ReactBindingHandler"; import * as ReactBindingHandler from "./ReactBindingHandler";
export class BindingHandlersRegisterer { export class BindingHandlersRegisterer {
public static registerBindingHandlers() { public static registerBindingHandlers() {
ko.bindingHandlers.setTemplateReady = { ko.bindingHandlers.setTemplateReady = {
init( init(
element: any, element: any,
wrappedValueAccessor: () => any, wrappedValueAccessor: () => any,
allBindings?: ko.AllBindings, allBindings?: ko.AllBindings,
viewModel?: any, viewModel?: any,
bindingContext?: ko.BindingContext bindingContext?: ko.BindingContext
) { ) {
const value = ko.unwrap(wrappedValueAccessor()); const value = ko.unwrap(wrappedValueAccessor());
bindingContext?.$data.isTemplateReady(value); bindingContext?.$data.isTemplateReady(value);
}, }
} as ko.BindingHandler; } as ko.BindingHandler;
ReactBindingHandler.Registerer.register(); ReactBindingHandler.Registerer.register();
} }
} }

View File

@@ -42,7 +42,7 @@ export class Registerer {
// Initial rendering at mount point // Initial rendering at mount point
ReactDOM.render(adapter.renderComponent(), element); ReactDOM.render(adapter.renderComponent(), element);
}, }
} as ko.BindingHandler; } as ko.BindingHandler;
} }
} }

View File

@@ -40,7 +40,7 @@ export class ArrayHashMap<T> {
public forEach(key: string, iteratorFct: (value: T) => void) { public forEach(key: string, iteratorFct: (value: T) => void) {
const values = this.store.get(key); const values = this.store.get(key);
if (values) { if (values) {
values.forEach((value) => iteratorFct(value)); values.forEach(value => iteratorFct(value));
} }
} }

View File

@@ -1,432 +1,439 @@
import { HashMap } from "./HashMap"; import { HashMap } from "./HashMap";
export class AuthorizationEndpoints { export class AuthorizationEndpoints {
public static arm: string = "https://management.core.windows.net/"; public static arm: string = "https://management.core.windows.net/";
public static common: string = "https://login.windows.net/"; public static common: string = "https://login.windows.net/";
} }
export class CodeOfConductEndpoints { export class CodeOfConductEndpoints {
public static privacyStatement: string = "https://aka.ms/ms-privacy-policy"; public static privacyStatement: string = "https://aka.ms/ms-privacy-policy";
public static codeOfConduct: string = "https://aka.ms/cosmos-code-of-conduct"; public static codeOfConduct: string = "https://aka.ms/cosmos-code-of-conduct";
public static termsOfUse: string = "https://aka.ms/ms-terms-of-use"; public static termsOfUse: string = "https://aka.ms/ms-terms-of-use";
} }
export class EndpointsRegex { export class EndpointsRegex {
public static readonly cassandra = [ public static readonly cassandra = [
"AccountEndpoint=(.*).cassandra.cosmosdb.azure.com", "AccountEndpoint=(.*).cassandra.cosmosdb.azure.com",
"HostName=(.*).cassandra.cosmos.azure.com", "HostName=(.*).cassandra.cosmos.azure.com"
]; ];
public static readonly mongo = "mongodb://.*:(.*)@(.*).documents.azure.com"; public static readonly mongo = "mongodb://.*:(.*)@(.*).documents.azure.com";
public static readonly mongoCompute = "mongodb://.*:(.*)@(.*).mongo.cosmos.azure.com"; public static readonly mongoCompute = "mongodb://.*:(.*)@(.*).mongo.cosmos.azure.com";
public static readonly sql = "AccountEndpoint=https://(.*).documents.azure.com"; public static readonly sql = "AccountEndpoint=https://(.*).documents.azure.com";
public static readonly table = "TableEndpoint=https://(.*).table.cosmosdb.azure.com"; public static readonly table = "TableEndpoint=https://(.*).table.cosmosdb.azure.com";
} }
export class ApiEndpoints { export class ApiEndpoints {
public static runtimeProxy: string = "/api/RuntimeProxy"; public static runtimeProxy: string = "/api/RuntimeProxy";
public static guestRuntimeProxy: string = "/api/guest/RuntimeProxy"; public static guestRuntimeProxy: string = "/api/guest/RuntimeProxy";
} }
export class ServerIds { export class ServerIds {
public static localhost: string = "localhost"; public static localhost: string = "localhost";
public static blackforest: string = "blackforest"; public static blackforest: string = "blackforest";
public static fairfax: string = "fairfax"; public static fairfax: string = "fairfax";
public static mooncake: string = "mooncake"; public static mooncake: string = "mooncake";
public static productionPortal: string = "prod"; public static productionPortal: string = "prod";
public static dev: string = "dev"; public static dev: string = "dev";
} }
export class ArmApiVersions { export class ArmApiVersions {
public static readonly documentDB: string = "2015-11-06"; public static readonly documentDB: string = "2015-11-06";
public static readonly arcadia: string = "2019-06-01-preview"; public static readonly arcadia: string = "2019-06-01-preview";
public static readonly arcadiaLivy: string = "2019-11-01-preview"; public static readonly arcadiaLivy: string = "2019-11-01-preview";
public static readonly arm: string = "2015-11-01"; public static readonly arm: string = "2015-11-01";
public static readonly armFeatures: string = "2014-08-01-preview"; public static readonly armFeatures: string = "2014-08-01-preview";
public static readonly publicVersion = "2020-04-01"; public static readonly publicVersion = "2020-04-01";
} }
export class ArmResourceTypes { export class ArmResourceTypes {
public static readonly notebookWorkspaces = "Microsoft.DocumentDB/databaseAccounts/notebookWorkspaces"; public static readonly notebookWorkspaces = "Microsoft.DocumentDB/databaseAccounts/notebookWorkspaces";
public static readonly synapseWorkspaces = "Microsoft.Synapse/workspaces"; public static readonly synapseWorkspaces = "Microsoft.Synapse/workspaces";
} }
export class BackendDefaults { export class BackendDefaults {
public static partitionKeyKind: string = "Hash"; public static partitionKeyKind: string = "Hash";
public static singlePartitionStorageInGb: string = "10"; public static singlePartitionStorageInGb: string = "10";
public static multiPartitionStorageInGb: string = "100"; public static multiPartitionStorageInGb: string = "100";
public static maxChangeFeedRetentionDuration: number = 10; public static maxChangeFeedRetentionDuration: number = 10;
public static partitionKeyVersion = 2; public static partitionKeyVersion = 2;
} }
export class ClientDefaults { export class ClientDefaults {
public static requestTimeoutMs: number = 60000; public static requestTimeoutMs: number = 60000;
public static portalCacheTimeoutMs: number = 10000; public static portalCacheTimeoutMs: number = 10000;
public static errorNotificationTimeoutMs: number = 5000; public static errorNotificationTimeoutMs: number = 5000;
public static copyHelperTimeoutMs: number = 2000; public static copyHelperTimeoutMs: number = 2000;
public static waitForDOMElementMs: number = 500; public static waitForDOMElementMs: number = 500;
public static cacheBustingTimeoutMs: number = public static cacheBustingTimeoutMs: number =
10 /** minutes **/ * 60 /** to seconds **/ * 1000 /** to milliseconds **/; 10 /** minutes **/ * 60 /** to seconds **/ * 1000 /** to milliseconds **/;
public static databaseThroughputIncreaseFactor: number = 100; public static databaseThroughputIncreaseFactor: number = 100;
public static readonly arcadiaTokenRefreshInterval: number = public static readonly arcadiaTokenRefreshInterval: number =
20 /** minutes **/ * 60 /** to seconds **/ * 1000 /** to milliseconds **/; 20 /** minutes **/ * 60 /** to seconds **/ * 1000 /** to milliseconds **/;
public static readonly arcadiaTokenRefreshIntervalPaddingMs: number = 2000; public static readonly arcadiaTokenRefreshIntervalPaddingMs: number = 2000;
} }
export class AccountKind { export class AccountKind {
public static DocumentDB: string = "DocumentDB"; public static DocumentDB: string = "DocumentDB";
public static MongoDB: string = "MongoDB"; public static MongoDB: string = "MongoDB";
public static Parse: string = "Parse"; public static Parse: string = "Parse";
public static GlobalDocumentDB: string = "GlobalDocumentDB"; public static GlobalDocumentDB: string = "GlobalDocumentDB";
public static Default: string = AccountKind.DocumentDB; public static Default: string = AccountKind.DocumentDB;
} }
export class CorrelationBackend { export class CorrelationBackend {
public static Url: string = "https://aka.ms/cosmosdbanalytics"; public static Url: string = "https://aka.ms/cosmosdbanalytics";
} }
export class DefaultAccountExperience { export class DefaultAccountExperience {
public static DocumentDB: string = "DocumentDB"; public static DocumentDB: string = "DocumentDB";
public static Graph: string = "Graph"; public static Graph: string = "Graph";
public static MongoDB: string = "MongoDB"; public static MongoDB: string = "MongoDB";
public static ApiForMongoDB: string = "Azure Cosmos DB for MongoDB API"; public static ApiForMongoDB: string = "Azure Cosmos DB for MongoDB API";
public static Table: string = "Table"; public static Table: string = "Table";
public static Cassandra: string = "Cassandra"; public static Cassandra: string = "Cassandra";
public static Default: string = DefaultAccountExperience.DocumentDB; public static Default: string = DefaultAccountExperience.DocumentDB;
} }
export class CapabilityNames { export class CapabilityNames {
public static EnableTable: string = "EnableTable"; public static EnableTable: string = "EnableTable";
public static EnableGremlin: string = "EnableGremlin"; public static EnableGremlin: string = "EnableGremlin";
public static EnableCassandra: string = "EnableCassandra"; public static EnableCassandra: string = "EnableCassandra";
public static EnableAutoScale: string = "EnableAutoScale"; public static EnableAutoScale: string = "EnableAutoScale";
public static readonly EnableNotebooks: string = "EnableNotebooks"; public static readonly EnableNotebooks: string = "EnableNotebooks";
public static readonly EnableStorageAnalytics: string = "EnableStorageAnalytics"; public static readonly EnableStorageAnalytics: string = "EnableStorageAnalytics";
public static readonly EnableMongo: string = "EnableMongo"; public static readonly EnableMongo: string = "EnableMongo";
public static readonly EnableServerless: string = "EnableServerless"; public static readonly EnableServerless: string = "EnableServerless";
} }
export class Features { export class Features {
public static readonly cosmosdb = "cosmosdb"; public static readonly cosmosdb = "cosmosdb";
public static readonly enableChangeFeedPolicy = "enablechangefeedpolicy"; public static readonly enableChangeFeedPolicy = "enablechangefeedpolicy";
public static readonly executeSproc = "dataexplorerexecutesproc"; public static readonly enableRupm = "enablerupm";
public static readonly hostedDataExplorer = "hosteddataexplorerenabled"; public static readonly executeSproc = "dataexplorerexecutesproc";
public static readonly enableTtl = "enablettl"; public static readonly hostedDataExplorer = "hosteddataexplorerenabled";
public static readonly enableNotebooks = "enablenotebooks"; public static readonly enableTtl = "enablettl";
public static readonly enableGalleryPublish = "enablegallerypublish"; public static readonly enableNotebooks = "enablenotebooks";
public static readonly enableCodeOfConduct = "enablecodeofconduct"; public static readonly enableGalleryPublish = "enablegallerypublish";
public static readonly enableLinkInjection = "enablelinkinjection"; public static readonly enableCodeOfConduct = "enablecodeofconduct";
public static readonly enableSpark = "enablespark"; public static readonly enableLinkInjection = "enablelinkinjection";
public static readonly livyEndpoint = "livyendpoint"; public static readonly enableSpark = "enablespark";
public static readonly notebookServerUrl = "notebookserverurl"; public static readonly livyEndpoint = "livyendpoint";
public static readonly notebookServerToken = "notebookservertoken"; public static readonly notebookServerUrl = "notebookserverurl";
public static readonly notebookBasePath = "notebookbasepath"; public static readonly notebookServerToken = "notebookservertoken";
public static readonly canExceedMaximumValue = "canexceedmaximumvalue"; public static readonly notebookBasePath = "notebookbasepath";
public static readonly enableFixedCollectionWithSharedThroughput = "enablefixedcollectionwithsharedthroughput"; public static readonly canExceedMaximumValue = "canexceedmaximumvalue";
public static readonly ttl90Days = "ttl90days"; public static readonly enableFixedCollectionWithSharedThroughput = "enablefixedcollectionwithsharedthroughput";
public static readonly enableRightPanelV2 = "enablerightpanelv2"; public static readonly ttl90Days = "ttl90days";
public static readonly enableSchema = "enableschema"; public static readonly enableRightPanelV2 = "enablerightpanelv2";
public static readonly enableSDKoperations = "enablesdkoperations"; public static readonly enableSDKoperations = "enablesdkoperations";
public static readonly showMinRUSurvey = "showminrusurvey"; }
}
// flight names returned from the portal are always lowercase
// flight names returned from the portal are always lowercase export class Flights {
export class Flights { public static readonly SettingsV2 = "settingsv2";
public static readonly SettingsV2 = "settingsv2"; public static readonly MongoIndexEditor = "mongoindexeditor";
public static readonly MongoIndexEditor = "mongoindexeditor"; }
}
export class AfecFeatures {
export class AfecFeatures { public static readonly Spark = "spark-public-preview";
public static readonly Spark = "spark-public-preview"; public static readonly Notebooks = "sparknotebooks-public-preview";
public static readonly Notebooks = "sparknotebooks-public-preview"; public static readonly StorageAnalytics = "storageanalytics-public-preview";
public static readonly StorageAnalytics = "storageanalytics-public-preview"; }
}
export class Spark {
export class Spark { public static readonly MaxWorkerCount = 10;
public static readonly MaxWorkerCount = 10; public static readonly SKUs: HashMap<string> = new HashMap({
public static readonly SKUs: HashMap<string> = new HashMap({ "Cosmos.Spark.D1s": "D1s / 1 core / 4GB RAM",
"Cosmos.Spark.D1s": "D1s / 1 core / 4GB RAM", "Cosmos.Spark.D2s": "D2s / 2 cores / 8GB RAM",
"Cosmos.Spark.D2s": "D2s / 2 cores / 8GB RAM", "Cosmos.Spark.D4s": "D4s / 4 cores / 16GB RAM",
"Cosmos.Spark.D4s": "D4s / 4 cores / 16GB RAM", "Cosmos.Spark.D8s": "D8s / 8 cores / 32GB RAM",
"Cosmos.Spark.D8s": "D8s / 8 cores / 32GB RAM", "Cosmos.Spark.D16s": "D16s / 16 cores / 64GB RAM",
"Cosmos.Spark.D16s": "D16s / 16 cores / 64GB RAM", "Cosmos.Spark.D32s": "D32s / 32 cores / 128GB RAM",
"Cosmos.Spark.D32s": "D32s / 32 cores / 128GB RAM", "Cosmos.Spark.D64s": "D64s / 64 cores / 256GB RAM"
"Cosmos.Spark.D64s": "D64s / 64 cores / 256GB RAM", });
}); }
}
export class TagNames {
export class TagNames { public static defaultExperience: string = "defaultExperience";
public static defaultExperience: string = "defaultExperience"; }
}
export class MongoDBAccounts {
export class MongoDBAccounts { public static protocol: string = "https";
public static protocol: string = "https"; public static defaultPort: string = "10255";
public static defaultPort: string = "10255"; }
}
export enum MongoBackendEndpointType {
export enum MongoBackendEndpointType { local,
local, remote
remote, }
}
// TODO: 435619 Add default endpoints per cloud and use regional only when available
// TODO: 435619 Add default endpoints per cloud and use regional only when available export class CassandraBackend {
export class CassandraBackend { public static readonly createOrDeleteApi: string = "api/cassandra/createordelete";
public static readonly createOrDeleteApi: string = "api/cassandra/createordelete"; public static readonly guestCreateOrDeleteApi: string = "api/guest/cassandra/createordelete";
public static readonly guestCreateOrDeleteApi: string = "api/guest/cassandra/createordelete"; public static readonly queryApi: string = "api/cassandra";
public static readonly queryApi: string = "api/cassandra"; public static readonly guestQueryApi: string = "api/guest/cassandra";
public static readonly guestQueryApi: string = "api/guest/cassandra"; public static readonly keysApi: string = "api/cassandra/keys";
public static readonly keysApi: string = "api/cassandra/keys"; public static readonly guestKeysApi: string = "api/guest/cassandra/keys";
public static readonly guestKeysApi: string = "api/guest/cassandra/keys"; public static readonly schemaApi: string = "api/cassandra/schema";
public static readonly schemaApi: string = "api/cassandra/schema"; public static readonly guestSchemaApi: string = "api/guest/cassandra/schema";
public static readonly guestSchemaApi: string = "api/guest/cassandra/schema"; }
}
export class RUPMStates {
export class Queries { public static on: string = "on";
public static CustomPageOption: string = "custom"; public static off: string = "off";
public static UnlimitedPageOption: string = "unlimited"; }
public static itemsPerPage: number = 100;
public static unlimitedItemsPerPage: number = 100; // TODO: Figure out appropriate value so it works for accounts with a large number of partitions export class Queries {
public static CustomPageOption: string = "custom";
public static QueryEditorMinHeightRatio: number = 0.1; public static UnlimitedPageOption: string = "unlimited";
public static QueryEditorMaxHeightRatio: number = 0.4; public static itemsPerPage: number = 100;
public static readonly DefaultMaxDegreeOfParallelism = 6; public static unlimitedItemsPerPage: number = 100; // TODO: Figure out appropriate value so it works for accounts with a large number of partitions
}
public static QueryEditorMinHeightRatio: number = 0.1;
export class SavedQueries { public static QueryEditorMaxHeightRatio: number = 0.4;
public static readonly CollectionName: string = "___Query"; public static readonly DefaultMaxDegreeOfParallelism = 6;
public static readonly DatabaseName: string = "___Cosmos"; }
public static readonly OfferThroughput: number = 400;
public static readonly PartitionKeyProperty: string = "id"; export class SavedQueries {
} public static readonly CollectionName: string = "___Query";
public static readonly DatabaseName: string = "___Cosmos";
export class DocumentsGridMetrics { public static readonly OfferThroughput: number = 400;
public static DocumentsPerPage: number = 100; public static readonly PartitionKeyProperty: string = "id";
public static IndividualRowHeight: number = 34; }
public static BufferHeight: number = 28;
public static SplitterMinWidth: number = 200; export class DocumentsGridMetrics {
public static SplitterMaxWidth: number = 360; public static DocumentsPerPage: number = 100;
public static IndividualRowHeight: number = 34;
public static DocumentEditorMinWidthRatio: number = 0.2; public static BufferHeight: number = 28;
public static DocumentEditorMaxWidthRatio: number = 0.4; public static SplitterMinWidth: number = 200;
} public static SplitterMaxWidth: number = 360;
export class ExplorerMetrics { public static DocumentEditorMinWidthRatio: number = 0.2;
public static SplitterMinWidth: number = 240; public static DocumentEditorMaxWidthRatio: number = 0.4;
public static SplitterMaxWidth: number = 400; }
public static CollapsedResourceTreeWidth: number = 36;
} export class ExplorerMetrics {
public static SplitterMinWidth: number = 240;
export class SplitterMetrics { public static SplitterMaxWidth: number = 400;
public static CollapsedPositionLeft: number = ExplorerMetrics.CollapsedResourceTreeWidth; public static CollapsedResourceTreeWidth: number = 36;
} }
export class Areas { export class SplitterMetrics {
public static ResourceTree: string = "Resource Tree"; public static CollapsedPositionLeft: number = ExplorerMetrics.CollapsedResourceTreeWidth;
public static ContextualPane: string = "Contextual Pane"; }
public static Tab: string = "Tab";
public static ShareDialog: string = "Share Access Dialog"; export class Areas {
public static Notebook: string = "Notebook"; public static ResourceTree: string = "Resource Tree";
} public static ContextualPane: string = "Contextual Pane";
public static Tab: string = "Tab";
export class HttpHeaders { public static ShareDialog: string = "Share Access Dialog";
public static activityId: string = "x-ms-activity-id"; public static Notebook: string = "Notebook";
public static apiType: string = "x-ms-cosmos-apitype"; }
public static authorization: string = "authorization";
public static collectionIndexTransformationProgress: string = export class HttpHeaders {
"x-ms-documentdb-collection-index-transformation-progress"; public static activityId: string = "x-ms-activity-id";
public static continuation: string = "x-ms-continuation"; public static apiType: string = "x-ms-cosmos-apitype";
public static correlationRequestId: string = "x-ms-correlation-request-id"; public static authorization: string = "authorization";
public static enableScriptLogging: string = "x-ms-documentdb-script-enable-logging"; public static collectionIndexTransformationProgress: string =
public static guestAccessToken: string = "x-ms-encrypted-auth-token"; "x-ms-documentdb-collection-index-transformation-progress";
public static getReadOnlyKey: string = "x-ms-get-read-only-key"; public static continuation: string = "x-ms-continuation";
public static connectionString: string = "x-ms-connection-string"; public static correlationRequestId: string = "x-ms-correlation-request-id";
public static msDate: string = "x-ms-date"; public static enableScriptLogging: string = "x-ms-documentdb-script-enable-logging";
public static location: string = "Location"; public static guestAccessToken: string = "x-ms-encrypted-auth-token";
public static contentType: string = "Content-Type"; public static getReadOnlyKey: string = "x-ms-get-read-only-key";
public static offerReplacePending: string = "x-ms-offer-replace-pending"; public static connectionString: string = "x-ms-connection-string";
public static user: string = "x-ms-user"; public static msDate: string = "x-ms-date";
public static populatePartitionStatistics: string = "x-ms-documentdb-populatepartitionstatistics"; public static location: string = "Location";
public static queryMetrics: string = "x-ms-documentdb-query-metrics"; public static contentType: string = "Content-Type";
public static requestCharge: string = "x-ms-request-charge"; public static offerReplacePending: string = "x-ms-offer-replace-pending";
public static resourceQuota: string = "x-ms-resource-quota"; public static user: string = "x-ms-user";
public static resourceUsage: string = "x-ms-resource-usage"; public static populatePartitionStatistics: string = "x-ms-documentdb-populatepartitionstatistics";
public static retryAfterMs: string = "x-ms-retry-after-ms"; public static queryMetrics: string = "x-ms-documentdb-query-metrics";
public static scriptLogResults: string = "x-ms-documentdb-script-log-results"; public static requestCharge: string = "x-ms-request-charge";
public static populateCollectionThroughputInfo = "x-ms-documentdb-populatecollectionthroughputinfo"; public static resourceQuota: string = "x-ms-resource-quota";
public static supportSpatialLegacyCoordinates = "x-ms-documentdb-supportspatiallegacycoordinates"; public static resourceUsage: string = "x-ms-resource-usage";
public static usePolygonsSmallerThanAHemisphere = "x-ms-documentdb-usepolygonssmallerthanahemisphere"; public static retryAfterMs: string = "x-ms-retry-after-ms";
public static autoPilotThroughput = "autoscaleSettings"; public static scriptLogResults: string = "x-ms-documentdb-script-log-results";
public static autoPilotThroughputSDK = "x-ms-cosmos-offer-autopilot-settings"; public static populateCollectionThroughputInfo = "x-ms-documentdb-populatecollectionthroughputinfo";
public static partitionKey: string = "x-ms-documentdb-partitionkey"; public static supportSpatialLegacyCoordinates = "x-ms-documentdb-supportspatiallegacycoordinates";
public static migrateOfferToManualThroughput: string = "x-ms-cosmos-migrate-offer-to-manual-throughput"; public static usePolygonsSmallerThanAHemisphere = "x-ms-documentdb-usepolygonssmallerthanahemisphere";
public static migrateOfferToAutopilot: string = "x-ms-cosmos-migrate-offer-to-autopilot"; public static autoPilotThroughput = "autoscaleSettings";
} public static autoPilotThroughputSDK = "x-ms-cosmos-offer-autopilot-settings";
public static partitionKey: string = "x-ms-documentdb-partitionkey";
export class ApiType { public static migrateOfferToManualThroughput: string = "x-ms-cosmos-migrate-offer-to-manual-throughput";
// Mapped to hexadecimal values in the backend public static migrateOfferToAutopilot: string = "x-ms-cosmos-migrate-offer-to-autopilot";
public static readonly MongoDB: number = 1; }
public static readonly Gremlin: number = 2;
public static readonly Cassandra: number = 4; export class ApiType {
public static readonly Table: number = 8; // Mapped to hexadecimal values in the backend
public static readonly SQL: number = 16; public static readonly MongoDB: number = 1;
} public static readonly Gremlin: number = 2;
public static readonly Cassandra: number = 4;
export class HttpStatusCodes { public static readonly Table: number = 8;
public static readonly OK: number = 200; public static readonly SQL: number = 16;
public static readonly Created: number = 201; }
public static readonly Accepted: number = 202;
public static readonly NoContent: number = 204; export class HttpStatusCodes {
public static readonly NotModified: number = 304; public static readonly OK: number = 200;
public static readonly Unauthorized: number = 401; public static readonly Created: number = 201;
public static readonly Forbidden: number = 403; public static readonly Accepted: number = 202;
public static readonly NotFound: number = 404; public static readonly NoContent: number = 204;
public static readonly TooManyRequests: number = 429; public static readonly NotModified: number = 304;
public static readonly Conflict: number = 409; public static readonly Unauthorized: number = 401;
public static readonly Forbidden: number = 403;
public static readonly InternalServerError: number = 500; public static readonly NotFound: number = 404;
public static readonly BadGateway: number = 502; public static readonly TooManyRequests: number = 429;
public static readonly ServiceUnavailable: number = 503; public static readonly Conflict: number = 409;
public static readonly GatewayTimeout: number = 504;
public static readonly InternalServerError: number = 500;
public static readonly RetryableStatusCodes: number[] = [ public static readonly BadGateway: number = 502;
HttpStatusCodes.TooManyRequests, public static readonly ServiceUnavailable: number = 503;
HttpStatusCodes.InternalServerError, // TODO: Handle all 500s on Portal backend and remove from retries list public static readonly GatewayTimeout: number = 504;
HttpStatusCodes.BadGateway,
HttpStatusCodes.ServiceUnavailable, public static readonly RetryableStatusCodes: number[] = [
HttpStatusCodes.GatewayTimeout, HttpStatusCodes.TooManyRequests,
]; HttpStatusCodes.InternalServerError, // TODO: Handle all 500s on Portal backend and remove from retries list
} HttpStatusCodes.BadGateway,
HttpStatusCodes.ServiceUnavailable,
export class Urls { HttpStatusCodes.GatewayTimeout
public static feedbackEmail = "https://aka.ms/cosmosdbfeedback?subject=Cosmos%20DB%20Data%20Explorer%20Feedback"; ];
public static autoscaleMigration = "https://aka.ms/cosmos-autoscale-migration"; }
public static freeTierInformation = "https://aka.ms/cosmos-free-tier";
public static cosmosPricing = "https://aka.ms/azure-cosmos-db-pricing"; export class Urls {
} public static feedbackEmail = "https://aka.ms/cosmosdbfeedback?subject=Cosmos%20DB%20Data%20Explorer%20Feedback";
public static autoscaleMigration = "https://aka.ms/cosmos-autoscale-migration";
export class HashRoutePrefixes { public static freeTierInformation = "https://aka.ms/cosmos-free-tier";
public static databases: string = "/dbs/{db_id}"; public static cosmosPricing = "https://aka.ms/azure-cosmos-db-pricing";
public static collections: string = "/dbs/{db_id}/colls/{coll_id}"; }
public static sprocHash: string = "/sprocs/";
public static sprocs: string = HashRoutePrefixes.collections + HashRoutePrefixes.sprocHash + "{sproc_id}"; export class HashRoutePrefixes {
public static docs: string = HashRoutePrefixes.collections + "/docs/{doc_id}/"; public static databases: string = "/dbs/{db_id}";
public static conflicts: string = HashRoutePrefixes.collections + "/conflicts"; public static collections: string = "/dbs/{db_id}/colls/{coll_id}";
public static sprocHash: string = "/sprocs/";
public static databasesWithId(databaseId: string): string { public static sprocs: string = HashRoutePrefixes.collections + HashRoutePrefixes.sprocHash + "{sproc_id}";
return this.databases.replace("{db_id}", databaseId).replace("/", ""); // strip the first slash since hasher adds it public static docs: string = HashRoutePrefixes.collections + "/docs/{doc_id}/";
} public static conflicts: string = HashRoutePrefixes.collections + "/conflicts";
public static collectionsWithIds(databaseId: string, collectionId: string): string { public static databasesWithId(databaseId: string): string {
const transformedDatabasePrefix: string = this.collections.replace("{db_id}", databaseId); return this.databases.replace("{db_id}", databaseId).replace("/", ""); // strip the first slash since hasher adds it
}
return transformedDatabasePrefix.replace("{coll_id}", collectionId).replace("/", ""); // strip the first slash since hasher adds it
} public static collectionsWithIds(databaseId: string, collectionId: string): string {
const transformedDatabasePrefix: string = this.collections.replace("{db_id}", databaseId);
public static sprocWithIds(
databaseId: string, return transformedDatabasePrefix.replace("{coll_id}", collectionId).replace("/", ""); // strip the first slash since hasher adds it
collectionId: string, }
sprocId: string,
stripFirstSlash: boolean = true public static sprocWithIds(
): string { databaseId: string,
const transformedDatabasePrefix: string = this.sprocs.replace("{db_id}", databaseId); collectionId: string,
sprocId: string,
const transformedSprocRoute: string = transformedDatabasePrefix stripFirstSlash: boolean = true
.replace("{coll_id}", collectionId) ): string {
.replace("{sproc_id}", sprocId); const transformedDatabasePrefix: string = this.sprocs.replace("{db_id}", databaseId);
if (!!stripFirstSlash) {
return transformedSprocRoute.replace("/", ""); // strip the first slash since hasher adds it const transformedSprocRoute: string = transformedDatabasePrefix
} .replace("{coll_id}", collectionId)
.replace("{sproc_id}", sprocId);
return transformedSprocRoute; if (!!stripFirstSlash) {
} return transformedSprocRoute.replace("/", ""); // strip the first slash since hasher adds it
}
public static conflictsWithIds(databaseId: string, collectionId: string) {
const transformedDatabasePrefix: string = this.conflicts.replace("{db_id}", databaseId); return transformedSprocRoute;
}
return transformedDatabasePrefix.replace("{coll_id}", collectionId).replace("/", ""); // strip the first slash since hasher adds it;
} public static conflictsWithIds(databaseId: string, collectionId: string) {
const transformedDatabasePrefix: string = this.conflicts.replace("{db_id}", databaseId);
public static docsWithIds(databaseId: string, collectionId: string, docId: string) {
const transformedDatabasePrefix: string = this.docs.replace("{db_id}", databaseId); return transformedDatabasePrefix.replace("{coll_id}", collectionId).replace("/", ""); // strip the first slash since hasher adds it;
}
return transformedDatabasePrefix.replace("{coll_id}", collectionId).replace("{doc_id}", docId).replace("/", ""); // strip the first slash since hasher adds it
} public static docsWithIds(databaseId: string, collectionId: string, docId: string) {
} const transformedDatabasePrefix: string = this.docs.replace("{db_id}", databaseId);
export class ConfigurationOverridesValues { return transformedDatabasePrefix
public static IsBsonSchemaV2: string = "true"; .replace("{coll_id}", collectionId)
} .replace("{doc_id}", docId)
.replace("/", ""); // strip the first slash since hasher adds it
export class KeyCodes { }
public static Space: number = 32; }
public static Enter: number = 13;
public static Escape: number = 27; export class ConfigurationOverridesValues {
public static UpArrow: number = 38; public static IsBsonSchemaV2: string = "true";
public static DownArrow: number = 40; }
public static LeftArrow: number = 37;
public static RightArrow: number = 39; export class KeyCodes {
public static Tab: number = 9; public static Space: number = 32;
} public static Enter: number = 13;
public static Escape: number = 27;
// Normalized per: https://www.w3.org/TR/uievents-key/#named-key-attribute-values public static UpArrow: number = 38;
export class NormalizedEventKey { public static DownArrow: number = 40;
public static readonly Space = " "; public static LeftArrow: number = 37;
public static readonly Enter = "Enter"; public static RightArrow: number = 39;
public static readonly Escape = "Escape"; public static Tab: number = 9;
public static readonly UpArrow = "ArrowUp"; }
public static readonly DownArrow = "ArrowDown";
public static readonly LeftArrow = "ArrowLeft"; // Normalized per: https://www.w3.org/TR/uievents-key/#named-key-attribute-values
public static readonly RightArrow = "ArrowRight"; export class NormalizedEventKey {
} public static readonly Space = " ";
public static readonly Enter = "Enter";
export class TryCosmosExperience { public static readonly Escape = "Escape";
public static extendUrl: string = "https://trycosmosdb.azure.com/api/resource/extendportal?userId={0}"; public static readonly UpArrow = "ArrowUp";
public static deleteUrl: string = "https://trycosmosdb.azure.com/api/resource/deleteportal?userId={0}"; public static readonly DownArrow = "ArrowDown";
public static collectionsPerAccount: number = 3; public static readonly LeftArrow = "ArrowLeft";
public static maxRU: number = 5000; public static readonly RightArrow = "ArrowRight";
public static defaultRU: number = 3000; }
}
export class TryCosmosExperience {
export class OfferVersions { public static extendUrl: string = "https://trycosmosdb.azure.com/api/resource/extendportal?userId={0}";
public static V1: string = "V1"; public static deleteUrl: string = "https://trycosmosdb.azure.com/api/resource/deleteportal?userId={0}";
public static V2: string = "V2"; public static collectionsPerAccount: number = 3;
} public static maxRU: number = 5000;
public static defaultRU: number = 3000;
export enum ConflictOperationType { }
Replace = "replace",
Create = "create", export class OfferVersions {
Delete = "delete", public static V1: string = "V1";
} public static V2: string = "V2";
}
export const EmulatorMasterKey =
//[SuppressMessage("Microsoft.Security", "CS002:SecretInNextLine", Justification="Well known public masterKey for emulator")] export enum ConflictOperationType {
"C2y6yDjf5/R+ob0N8A7Cgv30VRDJIWEHLM+4QDU5DE2nQ9nDuVTqobD4b8mGGyPMbIZnqyMsEcaGQy67XIw/Jw=="; Replace = "replace",
Create = "create",
// A variable @MyVariable defined in Constants.less is accessible as StyleConstants.MyVariable Delete = "delete"
export const StyleConstants = require("less-vars-loader!../../less/Common/Constants.less"); }
export class Notebook { export const EmulatorMasterKey =
public static readonly defaultBasePath = "./notebooks"; //[SuppressMessage("Microsoft.Security", "CS002:SecretInNextLine", Justification="Well known public masterKey for emulator")]
public static readonly heartbeatDelayMs = 5000; "C2y6yDjf5/R+ob0N8A7Cgv30VRDJIWEHLM+4QDU5DE2nQ9nDuVTqobD4b8mGGyPMbIZnqyMsEcaGQy67XIw/Jw==";
public static readonly kernelRestartInitialDelayMs = 1000;
public static readonly kernelRestartMaxDelayMs = 20000; // A variable @MyVariable defined in Constants.less is accessible as StyleConstants.MyVariable
public static readonly autoSaveIntervalMs = 120000; export const StyleConstants = require("less-vars-loader!../../less/Common/Constants.less");
}
export class Notebook {
export class SparkLibrary { public static readonly defaultBasePath = "./notebooks";
public static readonly nameMinLength = 3; public static readonly heartbeatDelayMs = 5000;
public static readonly nameMaxLength = 63; public static readonly kernelRestartInitialDelayMs = 1000;
} public static readonly kernelRestartMaxDelayMs = 20000;
public static readonly autoSaveIntervalMs = 120000;
export class AnalyticalStorageTtl { }
public static readonly Days90: number = 7776000;
public static readonly Infinite: number = -1; export class SparkLibrary {
public static readonly Disabled: number = 0; public static readonly nameMinLength = 3;
} public static readonly nameMaxLength = 63;
}
export class TerminalQueryParams {
public static readonly Terminal = "terminal"; export class AnalyticalStorageTtl {
public static readonly Server = "server"; public static readonly Days90: number = 7776000;
public static readonly Token = "token"; public static readonly Infinite: number = -1;
public static readonly SubscriptionId = "subscriptionId"; public static readonly Disabled: number = 0;
public static readonly TerminalEndpoint = "terminalEndpoint"; }
}
export class TerminalQueryParams {
public static readonly Terminal = "terminal";
public static readonly Server = "server";
public static readonly Token = "token";
public static readonly SubscriptionId = "subscriptionId";
public static readonly TerminalEndpoint = "terminalEndpoint";
}

View File

@@ -10,17 +10,17 @@ describe("tokenProvider", () => {
resourceId: "", resourceId: "",
resourceType: "dbs" as ResourceType, resourceType: "dbs" as ResourceType,
headers: {}, headers: {},
getAuthorizationTokenUsingMasterKey: () => "", getAuthorizationTokenUsingMasterKey: () => ""
}; };
beforeEach(() => { beforeEach(() => {
updateConfigContext({ updateConfigContext({
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com", BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
}); });
window.fetch = jest.fn().mockImplementation(() => { window.fetch = jest.fn().mockImplementation(() => {
return { return {
json: () => "{}", json: () => "{}",
headers: new Map(), headers: new Map()
}; };
}); });
}); });
@@ -36,7 +36,7 @@ describe("tokenProvider", () => {
it("does not call the auth service if a master key is set", async () => { it("does not call the auth service if a master key is set", async () => {
updateUserContext({ updateUserContext({
masterKey: "foo", masterKey: "foo"
}); });
await tokenProvider(options); await tokenProvider(options);
expect((window.fetch as any).mock.calls.length).toBe(0); expect((window.fetch as any).mock.calls.length).toBe(0);
@@ -50,7 +50,7 @@ describe("getTokenFromAuthService", () => {
window.fetch = jest.fn().mockImplementation(() => { window.fetch = jest.fn().mockImplementation(() => {
return { return {
json: () => "{}", json: () => "{}",
headers: new Map(), headers: new Map()
}; };
}); });
}); });
@@ -61,7 +61,7 @@ describe("getTokenFromAuthService", () => {
it("builds the correct URL in production", () => { it("builds the correct URL in production", () => {
updateConfigContext({ updateConfigContext({
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com", BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
}); });
getTokenFromAuthService("GET", "dbs", "foo"); getTokenFromAuthService("GET", "dbs", "foo");
expect(window.fetch).toHaveBeenCalledWith( expect(window.fetch).toHaveBeenCalledWith(
@@ -72,7 +72,7 @@ describe("getTokenFromAuthService", () => {
it("builds the correct URL in dev", () => { it("builds the correct URL in dev", () => {
updateConfigContext({ updateConfigContext({
BACKEND_ENDPOINT: "https://localhost:1234", BACKEND_ENDPOINT: "https://localhost:1234"
}); });
getTokenFromAuthService("GET", "dbs", "foo"); getTokenFromAuthService("GET", "dbs", "foo");
expect(window.fetch).toHaveBeenCalledWith( expect(window.fetch).toHaveBeenCalledWith(
@@ -96,15 +96,15 @@ describe("endpoint", () => {
documentEndpoint: "bar", documentEndpoint: "bar",
gremlinEndpoint: "foo", gremlinEndpoint: "foo",
tableEndpoint: "foo", tableEndpoint: "foo",
cassandraEndpoint: "foo", cassandraEndpoint: "foo"
}, }
}, }
}); });
expect(endpoint()).toEqual("bar"); expect(endpoint()).toEqual("bar");
}); });
it("uses _endpoint if set", () => { it("uses _endpoint if set", () => {
updateUserContext({ updateUserContext({
endpoint: "baz", endpoint: "baz"
}); });
expect(endpoint()).toEqual("baz"); expect(endpoint()).toEqual("baz");
}); });
@@ -121,7 +121,7 @@ describe("requestPlugin", () => {
updateConfigContext({ updateConfigContext({
platform: Platform.Hosted, platform: Platform.Hosted,
BACKEND_ENDPOINT: "https://localhost:1234", BACKEND_ENDPOINT: "https://localhost:1234",
PROXY_PATH: "/proxy", PROXY_PATH: "/proxy"
}); });
const headers = {}; const headers = {};
const endpoint = "https://docs.azure.com"; const endpoint = "https://docs.azure.com";

View File

@@ -58,13 +58,13 @@ export async function getTokenFromAuthService(verb: string, resourceType: string
method: "POST", method: "POST",
headers: { headers: {
"content-type": "application/json", "content-type": "application/json",
"x-ms-encrypted-auth-token": userContext.accessToken, "x-ms-encrypted-auth-token": userContext.accessToken
}, },
body: JSON.stringify({ body: JSON.stringify({
verb, verb,
resourceType, resourceType,
resourceId, resourceId
}), })
}); });
//TODO I am not sure why we have to parse the JSON again here. fetch should do it for us when we call .json() //TODO I am not sure why we have to parse the JSON again here. fetch should do it for us when we call .json()
const result = JSON.parse(await response.json()); const result = JSON.parse(await response.json());
@@ -81,9 +81,9 @@ export function client(): Cosmos.CosmosClient {
key: userContext.masterKey, key: userContext.masterKey,
tokenProvider, tokenProvider,
connectionPolicy: { connectionPolicy: {
enableEndpointDiscovery: false, enableEndpointDiscovery: false
}, },
userAgentSuffix: "Azure Portal", userAgentSuffix: "Azure Portal"
}; };
if (configContext.PROXY_PATH !== undefined) { if (configContext.PROXY_PATH !== undefined) {

View File

@@ -1,13 +1,13 @@
import { getCommonQueryOptions } from "./DataAccessUtilityBase"; import { getCommonQueryOptions } from "./DataAccessUtilityBase";
import { LocalStorageUtility, StorageKey } from "../Shared/StorageUtility"; import { LocalStorageUtility, StorageKey } from "../Shared/StorageUtility";
describe("getCommonQueryOptions", () => { describe("getCommonQueryOptions", () => {
it("builds the correct default options objects", () => { it("builds the correct default options objects", () => {
expect(getCommonQueryOptions({})).toMatchSnapshot(); expect(getCommonQueryOptions({})).toMatchSnapshot();
}); });
it("reads from localStorage", () => { it("reads from localStorage", () => {
LocalStorageUtility.setEntryNumber(StorageKey.ActualItemPerPage, 37); LocalStorageUtility.setEntryNumber(StorageKey.ActualItemPerPage, 37);
LocalStorageUtility.setEntryNumber(StorageKey.MaxDegreeOfParellism, 17); LocalStorageUtility.setEntryNumber(StorageKey.MaxDegreeOfParellism, 17);
expect(getCommonQueryOptions({})).toMatchSnapshot(); expect(getCommonQueryOptions({})).toMatchSnapshot();
}); });
}); });

View File

@@ -1,155 +1,182 @@
import { ConflictDefinition, FeedOptions, ItemDefinition, QueryIterator, Resource } from "@azure/cosmos"; import {
import Q from "q"; ConflictDefinition,
import * as DataModels from "../Contracts/DataModels"; FeedOptions,
import * as ViewModels from "../Contracts/ViewModels"; ItemDefinition,
import ConflictId from "../Explorer/Tree/ConflictId"; OfferDefinition,
import DocumentId from "../Explorer/Tree/DocumentId"; QueryIterator,
import StoredProcedure from "../Explorer/Tree/StoredProcedure"; Resource
import { LocalStorageUtility, StorageKey } from "../Shared/StorageUtility"; } from "@azure/cosmos";
import * as Constants from "./Constants"; import { RequestOptions } from "@azure/cosmos/dist-esm";
import { client } from "./CosmosClient"; import Q from "q";
import { configContext, Platform } from "../ConfigContext";
export function getCommonQueryOptions(options: FeedOptions): any { import * as DataModels from "../Contracts/DataModels";
const storedItemPerPageSetting: number = LocalStorageUtility.getEntryNumber(StorageKey.ActualItemPerPage); import { MessageTypes } from "../Contracts/ExplorerContracts";
options = options || {}; import * as ViewModels from "../Contracts/ViewModels";
options.populateQueryMetrics = true; import ConflictId from "../Explorer/Tree/ConflictId";
options.enableScanInQuery = options.enableScanInQuery || true; import DocumentId from "../Explorer/Tree/DocumentId";
if (!options.partitionKey) { import StoredProcedure from "../Explorer/Tree/StoredProcedure";
options.forceQueryPlan = true; import { LocalStorageUtility, StorageKey } from "../Shared/StorageUtility";
} import { OfferUtils } from "../Utils/OfferUtils";
options.maxItemCount = import * as Constants from "./Constants";
options.maxItemCount || import { client } from "./CosmosClient";
(storedItemPerPageSetting !== undefined && storedItemPerPageSetting) || import * as HeadersUtility from "./HeadersUtility";
Constants.Queries.itemsPerPage; import { sendCachedDataMessage } from "./MessageHandler";
options.maxDegreeOfParallelism = LocalStorageUtility.getEntryNumber(StorageKey.MaxDegreeOfParellism);
export function getCommonQueryOptions(options: FeedOptions): any {
return options; const storedItemPerPageSetting: number = LocalStorageUtility.getEntryNumber(StorageKey.ActualItemPerPage);
} options = options || {};
options.populateQueryMetrics = true;
export function queryDocuments( options.enableScanInQuery = options.enableScanInQuery || true;
databaseId: string, if (!options.partitionKey) {
containerId: string, options.forceQueryPlan = true;
query: string, }
options: any options.maxItemCount =
): Q.Promise<QueryIterator<ItemDefinition & Resource>> { options.maxItemCount ||
options = getCommonQueryOptions(options); (storedItemPerPageSetting !== undefined && storedItemPerPageSetting) ||
const documentsIterator = client().database(databaseId).container(containerId).items.query(query, options); Constants.Queries.itemsPerPage;
return Q(documentsIterator); options.maxDegreeOfParallelism = LocalStorageUtility.getEntryNumber(StorageKey.MaxDegreeOfParellism);
}
return options;
export function getPartitionKeyHeaderForConflict(conflictId: ConflictId): Object { }
const partitionKeyDefinition: DataModels.PartitionKey = conflictId.partitionKey;
const partitionKeyValue: any = conflictId.partitionKeyValue; export function queryDocuments(
databaseId: string,
return getPartitionKeyHeader(partitionKeyDefinition, partitionKeyValue); containerId: string,
} query: string,
options: any
export function getPartitionKeyHeader(partitionKeyDefinition: DataModels.PartitionKey, partitionKeyValue: any): Object { ): Q.Promise<QueryIterator<ItemDefinition & Resource>> {
if (!partitionKeyDefinition) { options = getCommonQueryOptions(options);
return undefined; const documentsIterator = client()
} .database(databaseId)
.container(containerId)
if (partitionKeyValue === undefined) { .items.query(query, options);
return [{}]; return Q(documentsIterator);
} }
return [partitionKeyValue]; export function getPartitionKeyHeaderForConflict(conflictId: ConflictId): Object {
} const partitionKeyDefinition: DataModels.PartitionKey = conflictId.partitionKey;
const partitionKeyValue: any = conflictId.partitionKeyValue;
export function updateDocument(
collection: ViewModels.CollectionBase, return getPartitionKeyHeader(partitionKeyDefinition, partitionKeyValue);
documentId: DocumentId, }
newDocument: any
): Q.Promise<any> { export function getPartitionKeyHeader(partitionKeyDefinition: DataModels.PartitionKey, partitionKeyValue: any): Object {
const partitionKey = documentId.partitionKeyValue; if (!partitionKeyDefinition) {
return undefined;
return Q( }
client()
.database(collection.databaseId) if (partitionKeyValue === undefined) {
.container(collection.id()) return [{}];
.item(documentId.id(), partitionKey) }
.replace(newDocument)
.then((response) => response.resource) return [partitionKeyValue];
); }
}
export function updateDocument(
export function executeStoredProcedure( collection: ViewModels.CollectionBase,
collection: ViewModels.Collection, documentId: DocumentId,
storedProcedure: StoredProcedure, newDocument: any
partitionKeyValue: any, ): Q.Promise<any> {
params: any[] const partitionKey = documentId.partitionKeyValue;
): Q.Promise<any> {
// TODO remove this deferred. Kept it because of timeout code at bottom of function return Q(
const deferred = Q.defer<any>(); client()
.database(collection.databaseId)
client() .container(collection.id())
.database(collection.databaseId) .item(documentId.id(), partitionKey)
.container(collection.id()) .replace(newDocument)
.scripts.storedProcedure(storedProcedure.id()) .then(response => response.resource)
.execute(partitionKeyValue, params, { enableScriptLogging: true }) );
.then((response) => }
deferred.resolve({
result: response.resource, export function executeStoredProcedure(
scriptLogs: response.headers[Constants.HttpHeaders.scriptLogResults], collection: ViewModels.Collection,
}) storedProcedure: StoredProcedure,
) partitionKeyValue: any,
.catch((error) => deferred.reject(error)); params: any[]
): Q.Promise<any> {
return deferred.promise.timeout( // TODO remove this deferred. Kept it because of timeout code at bottom of function
Constants.ClientDefaults.requestTimeoutMs, const deferred = Q.defer<any>();
`Request timed out while executing stored procedure ${storedProcedure.id()}`
); client()
} .database(collection.databaseId)
.container(collection.id())
export function createDocument(collection: ViewModels.CollectionBase, newDocument: any): Q.Promise<any> { .scripts.storedProcedure(storedProcedure.id())
return Q( .execute(partitionKeyValue, params, { enableScriptLogging: true })
client() .then(response =>
.database(collection.databaseId) deferred.resolve({
.container(collection.id()) result: response.resource,
.items.create(newDocument) scriptLogs: response.headers[Constants.HttpHeaders.scriptLogResults]
.then((response) => response.resource) })
); )
} .catch(error => deferred.reject(error));
export function readDocument(collection: ViewModels.CollectionBase, documentId: DocumentId): Q.Promise<any> { return deferred.promise.timeout(
const partitionKey = documentId.partitionKeyValue; Constants.ClientDefaults.requestTimeoutMs,
`Request timed out while executing stored procedure ${storedProcedure.id()}`
return Q( );
client() }
.database(collection.databaseId)
.container(collection.id()) export function createDocument(collection: ViewModels.CollectionBase, newDocument: any): Q.Promise<any> {
.item(documentId.id(), partitionKey) return Q(
.read() client()
.then((response) => response.resource) .database(collection.databaseId)
); .container(collection.id())
} .items.create(newDocument)
.then(response => response.resource)
export function deleteDocument(collection: ViewModels.CollectionBase, documentId: DocumentId): Q.Promise<any> { );
const partitionKey = documentId.partitionKeyValue; }
return Q( export function readDocument(collection: ViewModels.CollectionBase, documentId: DocumentId): Q.Promise<any> {
client().database(collection.databaseId).container(collection.id()).item(documentId.id(), partitionKey).delete() const partitionKey = documentId.partitionKeyValue;
);
} return Q(
client()
export function deleteConflict( .database(collection.databaseId)
collection: ViewModels.CollectionBase, .container(collection.id())
conflictId: ConflictId, .item(documentId.id(), partitionKey)
options: any = {} .read()
): Q.Promise<any> { .then(response => response.resource)
options.partitionKey = options.partitionKey || getPartitionKeyHeaderForConflict(conflictId); );
}
return Q(
client().database(collection.databaseId).container(collection.id()).conflict(conflictId.id()).delete(options) export function deleteDocument(collection: ViewModels.CollectionBase, documentId: DocumentId): Q.Promise<any> {
); const partitionKey = documentId.partitionKeyValue;
}
return Q(
export function queryConflicts( client()
databaseId: string, .database(collection.databaseId)
containerId: string, .container(collection.id())
query: string, .item(documentId.id(), partitionKey)
options: any .delete()
): Q.Promise<QueryIterator<ConflictDefinition & Resource>> { );
const documentsIterator = client().database(databaseId).container(containerId).conflicts.query(query, options); }
return Q(documentsIterator);
} export function deleteConflict(
collection: ViewModels.CollectionBase,
conflictId: ConflictId,
options: any = {}
): Q.Promise<any> {
options.partitionKey = options.partitionKey || getPartitionKeyHeaderForConflict(conflictId);
return Q(
client()
.database(collection.databaseId)
.container(collection.id())
.conflict(conflictId.id())
.delete(options)
);
}
export function queryConflicts(
databaseId: string,
containerId: string,
query: string,
options: any
): Q.Promise<QueryIterator<ConflictDefinition & Resource>> {
const documentsIterator = client()
.database(databaseId)
.container(containerId)
.conflicts.query(query, options);
return Q(documentsIterator);
}

View File

@@ -1,217 +1,217 @@
import { ConflictDefinition, ItemDefinition, QueryIterator, Resource } from "@azure/cosmos"; import { ConflictDefinition, ItemDefinition, QueryIterator, Resource } from "@azure/cosmos";
import Q from "q"; import Q from "q";
import * as ViewModels from "../Contracts/ViewModels"; import * as ViewModels from "../Contracts/ViewModels";
import ConflictId from "../Explorer/Tree/ConflictId"; import ConflictId from "../Explorer/Tree/ConflictId";
import DocumentId from "../Explorer/Tree/DocumentId"; import DocumentId from "../Explorer/Tree/DocumentId";
import StoredProcedure from "../Explorer/Tree/StoredProcedure"; import StoredProcedure from "../Explorer/Tree/StoredProcedure";
import { logConsoleInfo, logConsoleProgress } from "../Utils/NotificationConsoleUtils"; import { logConsoleInfo, logConsoleProgress } from "../Utils/NotificationConsoleUtils";
import * as Constants from "./Constants"; import * as Constants from "./Constants";
import * as DataAccessUtilityBase from "./DataAccessUtilityBase"; import * as DataAccessUtilityBase from "./DataAccessUtilityBase";
import { MinimalQueryIterator, nextPage } from "./IteratorUtilities"; import { MinimalQueryIterator, nextPage } from "./IteratorUtilities";
import { handleError } from "./ErrorHandlingUtils"; import { handleError } from "./ErrorHandlingUtils";
// TODO: Log all promise resolutions and errors with verbosity levels // TODO: Log all promise resolutions and errors with verbosity levels
export function queryDocuments( export function queryDocuments(
databaseId: string, databaseId: string,
containerId: string, containerId: string,
query: string, query: string,
options: any options: any
): Q.Promise<QueryIterator<ItemDefinition & Resource>> { ): Q.Promise<QueryIterator<ItemDefinition & Resource>> {
return DataAccessUtilityBase.queryDocuments(databaseId, containerId, query, options); return DataAccessUtilityBase.queryDocuments(databaseId, containerId, query, options);
} }
export function queryConflicts( export function queryConflicts(
databaseId: string, databaseId: string,
containerId: string, containerId: string,
query: string, query: string,
options: any options: any
): Q.Promise<QueryIterator<ConflictDefinition & Resource>> { ): Q.Promise<QueryIterator<ConflictDefinition & Resource>> {
return DataAccessUtilityBase.queryConflicts(databaseId, containerId, query, options); return DataAccessUtilityBase.queryConflicts(databaseId, containerId, query, options);
} }
export function getEntityName() { export function getEntityName() {
const defaultExperience = const defaultExperience =
window.dataExplorer && window.dataExplorer.defaultExperience && window.dataExplorer.defaultExperience(); window.dataExplorer && window.dataExplorer.defaultExperience && window.dataExplorer.defaultExperience();
if (defaultExperience === Constants.DefaultAccountExperience.MongoDB) { if (defaultExperience === Constants.DefaultAccountExperience.MongoDB) {
return "document"; return "document";
} }
return "item"; return "item";
} }
export function executeStoredProcedure( export function executeStoredProcedure(
collection: ViewModels.Collection, collection: ViewModels.Collection,
storedProcedure: StoredProcedure, storedProcedure: StoredProcedure,
partitionKeyValue: any, partitionKeyValue: any,
params: any[] params: any[]
): Q.Promise<any> { ): Q.Promise<any> {
var deferred = Q.defer<any>(); var deferred = Q.defer<any>();
const clearMessage = logConsoleProgress(`Executing stored procedure ${storedProcedure.id()}`); const clearMessage = logConsoleProgress(`Executing stored procedure ${storedProcedure.id()}`);
DataAccessUtilityBase.executeStoredProcedure(collection, storedProcedure, partitionKeyValue, params) DataAccessUtilityBase.executeStoredProcedure(collection, storedProcedure, partitionKeyValue, params)
.then( .then(
(response: any) => { (response: any) => {
deferred.resolve(response); deferred.resolve(response);
logConsoleInfo( logConsoleInfo(
`Finished executing stored procedure ${storedProcedure.id()} for container ${storedProcedure.collection.id()}` `Finished executing stored procedure ${storedProcedure.id()} for container ${storedProcedure.collection.id()}`
); );
}, },
(error: any) => { (error: any) => {
handleError( handleError(
error, error,
"ExecuteStoredProcedure", "ExecuteStoredProcedure",
`Failed to execute stored procedure ${storedProcedure.id()} for container ${storedProcedure.collection.id()}` `Failed to execute stored procedure ${storedProcedure.id()} for container ${storedProcedure.collection.id()}`
); );
deferred.reject(error); deferred.reject(error);
} }
) )
.finally(() => { .finally(() => {
clearMessage(); clearMessage();
}); });
return deferred.promise; return deferred.promise;
} }
export function queryDocumentsPage( export function queryDocumentsPage(
resourceName: string, resourceName: string,
documentsIterator: MinimalQueryIterator, documentsIterator: MinimalQueryIterator,
firstItemIndex: number, firstItemIndex: number,
options: any options: any
): Q.Promise<ViewModels.QueryResults> { ): Q.Promise<ViewModels.QueryResults> {
var deferred = Q.defer<ViewModels.QueryResults>(); var deferred = Q.defer<ViewModels.QueryResults>();
const entityName = getEntityName(); const entityName = getEntityName();
const clearMessage = logConsoleProgress(`Querying ${entityName} for container ${resourceName}`); const clearMessage = logConsoleProgress(`Querying ${entityName} for container ${resourceName}`);
Q(nextPage(documentsIterator, firstItemIndex)) Q(nextPage(documentsIterator, firstItemIndex))
.then( .then(
(result: ViewModels.QueryResults) => { (result: ViewModels.QueryResults) => {
const itemCount = (result.documents && result.documents.length) || 0; const itemCount = (result.documents && result.documents.length) || 0;
logConsoleInfo(`Successfully fetched ${itemCount} ${entityName} for container ${resourceName}`); logConsoleInfo(`Successfully fetched ${itemCount} ${entityName} for container ${resourceName}`);
deferred.resolve(result); deferred.resolve(result);
}, },
(error: any) => { (error: any) => {
handleError(error, "QueryDocumentsPage", `Failed to query ${entityName} for container ${resourceName}`); handleError(error, "QueryDocumentsPage", `Failed to query ${entityName} for container ${resourceName}`);
deferred.reject(error); deferred.reject(error);
} }
) )
.finally(() => { .finally(() => {
clearMessage(); clearMessage();
}); });
return deferred.promise; return deferred.promise;
} }
export function readDocument(collection: ViewModels.CollectionBase, documentId: DocumentId): Q.Promise<any> { export function readDocument(collection: ViewModels.CollectionBase, documentId: DocumentId): Q.Promise<any> {
var deferred = Q.defer<any>(); var deferred = Q.defer<any>();
const entityName = getEntityName(); const entityName = getEntityName();
const clearMessage = logConsoleProgress(`Reading ${entityName} ${documentId.id()}`); const clearMessage = logConsoleProgress(`Reading ${entityName} ${documentId.id()}`);
DataAccessUtilityBase.readDocument(collection, documentId) DataAccessUtilityBase.readDocument(collection, documentId)
.then( .then(
(document: any) => { (document: any) => {
deferred.resolve(document); deferred.resolve(document);
}, },
(error: any) => { (error: any) => {
handleError(error, "ReadDocument", `Failed to read ${entityName} ${documentId.id()}`); handleError(error, "ReadDocument", `Failed to read ${entityName} ${documentId.id()}`);
deferred.reject(error); deferred.reject(error);
} }
) )
.finally(() => { .finally(() => {
clearMessage(); clearMessage();
}); });
return deferred.promise; return deferred.promise;
} }
export function updateDocument( export function updateDocument(
collection: ViewModels.CollectionBase, collection: ViewModels.CollectionBase,
documentId: DocumentId, documentId: DocumentId,
newDocument: any newDocument: any
): Q.Promise<any> { ): Q.Promise<any> {
var deferred = Q.defer<any>(); var deferred = Q.defer<any>();
const entityName = getEntityName(); const entityName = getEntityName();
const clearMessage = logConsoleProgress(`Updating ${entityName} ${documentId.id()}`); const clearMessage = logConsoleProgress(`Updating ${entityName} ${documentId.id()}`);
DataAccessUtilityBase.updateDocument(collection, documentId, newDocument) DataAccessUtilityBase.updateDocument(collection, documentId, newDocument)
.then( .then(
(updatedDocument: any) => { (updatedDocument: any) => {
logConsoleInfo(`Successfully updated ${entityName} ${documentId.id()}`); logConsoleInfo(`Successfully updated ${entityName} ${documentId.id()}`);
deferred.resolve(updatedDocument); deferred.resolve(updatedDocument);
}, },
(error: any) => { (error: any) => {
handleError(error, "UpdateDocument", `Failed to update ${entityName} ${documentId.id()}`); handleError(error, "UpdateDocument", `Failed to update ${entityName} ${documentId.id()}`);
deferred.reject(error); deferred.reject(error);
} }
) )
.finally(() => { .finally(() => {
clearMessage(); clearMessage();
}); });
return deferred.promise; return deferred.promise;
} }
export function createDocument(collection: ViewModels.CollectionBase, newDocument: any): Q.Promise<any> { export function createDocument(collection: ViewModels.CollectionBase, newDocument: any): Q.Promise<any> {
var deferred = Q.defer<any>(); var deferred = Q.defer<any>();
const entityName = getEntityName(); const entityName = getEntityName();
const clearMessage = logConsoleProgress(`Creating new ${entityName} for container ${collection.id()}`); const clearMessage = logConsoleProgress(`Creating new ${entityName} for container ${collection.id()}`);
DataAccessUtilityBase.createDocument(collection, newDocument) DataAccessUtilityBase.createDocument(collection, newDocument)
.then( .then(
(savedDocument: any) => { (savedDocument: any) => {
logConsoleInfo(`Successfully created new ${entityName} for container ${collection.id()}`); logConsoleInfo(`Successfully created new ${entityName} for container ${collection.id()}`);
deferred.resolve(savedDocument); deferred.resolve(savedDocument);
}, },
(error: any) => { (error: any) => {
handleError(error, "CreateDocument", `Error while creating new ${entityName} for container ${collection.id()}`); handleError(error, "CreateDocument", `Error while creating new ${entityName} for container ${collection.id()}`);
deferred.reject(error); deferred.reject(error);
} }
) )
.finally(() => { .finally(() => {
clearMessage(); clearMessage();
}); });
return deferred.promise; return deferred.promise;
} }
export function deleteDocument(collection: ViewModels.CollectionBase, documentId: DocumentId): Q.Promise<any> { export function deleteDocument(collection: ViewModels.CollectionBase, documentId: DocumentId): Q.Promise<any> {
var deferred = Q.defer<any>(); var deferred = Q.defer<any>();
const entityName = getEntityName(); const entityName = getEntityName();
const clearMessage = logConsoleProgress(`Deleting ${entityName} ${documentId.id()}`); const clearMessage = logConsoleProgress(`Deleting ${entityName} ${documentId.id()}`);
DataAccessUtilityBase.deleteDocument(collection, documentId) DataAccessUtilityBase.deleteDocument(collection, documentId)
.then( .then(
(response: any) => { (response: any) => {
logConsoleInfo(`Successfully deleted ${entityName} ${documentId.id()}`); logConsoleInfo(`Successfully deleted ${entityName} ${documentId.id()}`);
deferred.resolve(response); deferred.resolve(response);
}, },
(error: any) => { (error: any) => {
handleError(error, "DeleteDocument", `Error while deleting ${entityName} ${documentId.id()}`); handleError(error, "DeleteDocument", `Error while deleting ${entityName} ${documentId.id()}`);
deferred.reject(error); deferred.reject(error);
} }
) )
.finally(() => { .finally(() => {
clearMessage(); clearMessage();
}); });
return deferred.promise; return deferred.promise;
} }
export function deleteConflict( export function deleteConflict(
collection: ViewModels.CollectionBase, collection: ViewModels.CollectionBase,
conflictId: ConflictId, conflictId: ConflictId,
options?: any options?: any
): Q.Promise<any> { ): Q.Promise<any> {
var deferred = Q.defer<any>(); var deferred = Q.defer<any>();
const clearMessage = logConsoleProgress(`Deleting conflict ${conflictId.id()}`); const clearMessage = logConsoleProgress(`Deleting conflict ${conflictId.id()}`);
DataAccessUtilityBase.deleteConflict(collection, conflictId, options) DataAccessUtilityBase.deleteConflict(collection, conflictId, options)
.then( .then(
(response: any) => { (response: any) => {
logConsoleInfo(`Successfully deleted conflict ${conflictId.id()}`); logConsoleInfo(`Successfully deleted conflict ${conflictId.id()}`);
deferred.resolve(response); deferred.resolve(response);
}, },
(error: any) => { (error: any) => {
handleError(error, "DeleteConflict", `Error while deleting conflict ${conflictId.id()}`); handleError(error, "DeleteConflict", `Error while deleting conflict ${conflictId.id()}`);
deferred.reject(error); deferred.reject(error);
} }
) )
.finally(() => { .finally(() => {
clearMessage(); clearMessage();
}); });
return deferred.promise; return deferred.promise;
} }

View File

@@ -1,94 +1,94 @@
import * as ko from "knockout"; import * as ko from "knockout";
import * as ViewModels from "../Contracts/ViewModels"; import * as ViewModels from "../Contracts/ViewModels";
export default class EditableUtility { export default class EditableUtility {
public static observable<T>(initialValue?: T): ViewModels.Editable<T> { public static observable<T>(initialValue?: T): ViewModels.Editable<T> {
var observable: ViewModels.Editable<T> = <ViewModels.Editable<T>>ko.observable<T>(initialValue); var observable: ViewModels.Editable<T> = <ViewModels.Editable<T>>ko.observable<T>(initialValue);
observable.edits = ko.observableArray<T>([initialValue]); observable.edits = ko.observableArray<T>([initialValue]);
observable.validations = ko.observableArray<(value: T) => boolean>([]); observable.validations = ko.observableArray<(value: T) => boolean>([]);
observable.setBaseline = (baseline: T) => { observable.setBaseline = (baseline: T) => {
observable(baseline); observable(baseline);
observable.edits([baseline]); observable.edits([baseline]);
}; };
observable.getEditableCurrentValue = ko.computed<T>(() => { observable.getEditableCurrentValue = ko.computed<T>(() => {
const edits = (observable.edits && observable.edits()) || []; const edits = (observable.edits && observable.edits()) || [];
if (edits.length === 0) { if (edits.length === 0) {
return undefined; return undefined;
} }
return edits[edits.length - 1]; return edits[edits.length - 1];
}); });
observable.getEditableOriginalValue = ko.computed<T>(() => { observable.getEditableOriginalValue = ko.computed<T>(() => {
const edits = (observable.edits && observable.edits()) || []; const edits = (observable.edits && observable.edits()) || [];
if (edits.length === 0) { if (edits.length === 0) {
return undefined; return undefined;
} }
return edits[0]; return edits[0];
}); });
observable.editableIsDirty = ko.computed<boolean>(() => { observable.editableIsDirty = ko.computed<boolean>(() => {
const edits = (observable.edits && observable.edits()) || []; const edits = (observable.edits && observable.edits()) || [];
if (edits.length <= 1) { if (edits.length <= 1) {
return false; return false;
} }
let current: any = observable.getEditableCurrentValue(); let current: any = observable.getEditableCurrentValue();
let original: any = observable.getEditableOriginalValue(); let original: any = observable.getEditableOriginalValue();
switch (typeof current) { switch (typeof current) {
case "string": case "string":
case "undefined": case "undefined":
case "number": case "number":
case "boolean": case "boolean":
current = current && current.toString(); current = current && current.toString();
break; break;
default: default:
current = JSON.stringify(current); current = JSON.stringify(current);
break; break;
} }
switch (typeof original) { switch (typeof original) {
case "string": case "string":
case "undefined": case "undefined":
case "number": case "number":
case "boolean": case "boolean":
original = original && original.toString(); original = original && original.toString();
break; break;
default: default:
original = JSON.stringify(original); original = JSON.stringify(original);
break; break;
} }
if (current !== original) { if (current !== original) {
return true; return true;
} }
return false; return false;
}); });
observable.subscribe((edit) => { observable.subscribe(edit => {
var edits = observable.edits && observable.edits(); var edits = observable.edits && observable.edits();
if (!edits) { if (!edits) {
return; return;
} }
edits.push(edit); edits.push(edit);
observable.edits(edits); observable.edits(edits);
}); });
observable.editableIsValid = ko.observable<boolean>(true); observable.editableIsValid = ko.observable<boolean>(true);
observable.subscribe((value) => { observable.subscribe(value => {
const validations: ((value: T) => boolean)[] = (observable.validations && observable.validations()) || []; const validations: ((value: T) => boolean)[] = (observable.validations && observable.validations()) || [];
const isValid = validations.every((validate) => validate(value)); const isValid = validations.every(validate => validate(value));
observable.editableIsValid(isValid); observable.editableIsValid(isValid);
}); });
return observable; return observable;
} }
} }

View File

@@ -1,8 +1,8 @@
export default class EnvironmentUtility { export default class EnvironmentUtility {
public static normalizeArmEndpointUri(uri: string): string { public static normalizeArmEndpointUri(uri: string): string {
if (uri && uri.slice(-1) !== "/") { if (uri && uri.slice(-1) !== "/") {
return `${uri}/`; return `${uri}/`;
} }
return uri; return uri;
} }
} }

View File

@@ -1,7 +1,7 @@
import { ARMError } from "../Utils/arm/request"; import { ARMError } from "../Utils/arm/request";
import { HttpStatusCodes } from "./Constants"; import { HttpStatusCodes } from "./Constants";
import { MessageTypes } from "../Contracts/ExplorerContracts"; import { MessageTypes } from "../Contracts/ExplorerContracts";
import { SubscriptionType } from "../Contracts/SubscriptionType"; import { SubscriptionType } from "../Contracts/ViewModels";
import { logConsoleError } from "../Utils/NotificationConsoleUtils"; import { logConsoleError } from "../Utils/NotificationConsoleUtils";
import { logError } from "./Logger"; import { logError } from "./Logger";
import { sendMessage } from "./MessageHandler"; import { sendMessage } from "./MessageHandler";
@@ -37,7 +37,7 @@ const sendNotificationForError = (errorMessage: string, errorCode: number | stri
} }
sendMessage({ sendMessage({
type: MessageTypes.ForbiddenError, type: MessageTypes.ForbiddenError,
reason: errorMessage, reason: errorMessage
}); });
} }
}; };

View File

@@ -1,25 +1,25 @@
import * as HeadersUtility from "./HeadersUtility"; import * as HeadersUtility from "./HeadersUtility";
import { ExplorerSettings } from "../Shared/ExplorerSettings"; import { ExplorerSettings } from "../Shared/ExplorerSettings";
import { LocalStorageUtility, StorageKey } from "../Shared/StorageUtility"; import { LocalStorageUtility, StorageKey } from "../Shared/StorageUtility";
describe("Headers Utility", () => { describe("Headers Utility", () => {
describe("shouldEnableCrossPartitionKeyForResourceWithPartitionKey()", () => { describe("shouldEnableCrossPartitionKeyForResourceWithPartitionKey()", () => {
beforeEach(() => { beforeEach(() => {
ExplorerSettings.createDefaultSettings(); ExplorerSettings.createDefaultSettings();
}); });
it("should return true by default", () => { it("should return true by default", () => {
expect(HeadersUtility.shouldEnableCrossPartitionKey()).toBe(true); expect(HeadersUtility.shouldEnableCrossPartitionKey()).toBe(true);
}); });
it("should return false if the enable cross partition key feed option is false", () => { it("should return false if the enable cross partition key feed option is false", () => {
LocalStorageUtility.setEntryString(StorageKey.IsCrossPartitionQueryEnabled, "false"); LocalStorageUtility.setEntryString(StorageKey.IsCrossPartitionQueryEnabled, "false");
expect(HeadersUtility.shouldEnableCrossPartitionKey()).toBe(false); expect(HeadersUtility.shouldEnableCrossPartitionKey()).toBe(false);
}); });
it("should return true if the enable cross partition key feed option is true", () => { it("should return true if the enable cross partition key feed option is true", () => {
LocalStorageUtility.setEntryString(StorageKey.IsCrossPartitionQueryEnabled, "true"); LocalStorageUtility.setEntryString(StorageKey.IsCrossPartitionQueryEnabled, "true");
expect(HeadersUtility.shouldEnableCrossPartitionKey()).toBe(true); expect(HeadersUtility.shouldEnableCrossPartitionKey()).toBe(true);
}); });
}); });
}); });

View File

@@ -1,28 +1,28 @@
import * as Constants from "./Constants"; import * as Constants from "./Constants";
import { LocalStorageUtility, StorageKey } from "../Shared/StorageUtility"; import { LocalStorageUtility, StorageKey } from "../Shared/StorageUtility";
// x-ms-resource-quota: databases = 100; collections = 5000; users = 500000; permissions = 2000000; // x-ms-resource-quota: databases = 100; collections = 5000; users = 500000; permissions = 2000000;
export function getQuota(responseHeaders: any): any { export function getQuota(responseHeaders: any): any {
return responseHeaders && responseHeaders[Constants.HttpHeaders.resourceQuota] return responseHeaders && responseHeaders[Constants.HttpHeaders.resourceQuota]
? parseStringIntoObject(responseHeaders[Constants.HttpHeaders.resourceQuota]) ? parseStringIntoObject(responseHeaders[Constants.HttpHeaders.resourceQuota])
: null; : null;
} }
export function shouldEnableCrossPartitionKey(): boolean { export function shouldEnableCrossPartitionKey(): boolean {
return LocalStorageUtility.getEntryString(StorageKey.IsCrossPartitionQueryEnabled) === "true"; return LocalStorageUtility.getEntryString(StorageKey.IsCrossPartitionQueryEnabled) === "true";
} }
function parseStringIntoObject(resourceString: string) { function parseStringIntoObject(resourceString: string) {
var entityObject: any = {}; var entityObject: any = {};
if (resourceString) { if (resourceString) {
var entitiesArray: string[] = resourceString.split(";"); var entitiesArray: string[] = resourceString.split(";");
for (var i: any = 0; i < entitiesArray.length; i++) { for (var i: any = 0; i < entitiesArray.length; i++) {
var entity: string[] = entitiesArray[i].split("="); var entity: string[] = entitiesArray[i].split("=");
entityObject[entity[0]] = entity[1]; entityObject[entity[0]] = entity[1];
} }
} }
return entityObject; return entityObject;
} }

View File

@@ -11,8 +11,8 @@ describe("nextPage", () => {
queryMetrics: {}, queryMetrics: {},
requestCharge: 1, requestCharge: 1,
headers: {}, headers: {},
activityId: "foo", activityId: "foo"
}), })
}; };
expect(await nextPage(fakeIterator, 10)).toMatchSnapshot(); expect(await nextPage(fakeIterator, 10)).toMatchSnapshot();

View File

@@ -14,7 +14,7 @@ export interface MinimalQueryIterator {
// Pick<QueryIterator<any>, "fetchNext">; // Pick<QueryIterator<any>, "fetchNext">;
export function nextPage(documentsIterator: MinimalQueryIterator, firstItemIndex: number): Promise<QueryResults> { export function nextPage(documentsIterator: MinimalQueryIterator, firstItemIndex: number): Promise<QueryResults> {
return documentsIterator.fetchNext().then((response) => { return documentsIterator.fetchNext().then(response => {
const documents = response.resources; const documents = response.resources;
const headers = (response as any).headers || {}; // TODO this is a private key. Remove any const headers = (response as any).headers || {}; // TODO this is a private key. Remove any
const itemCount = (documents && documents.length) || 0; const itemCount = (documents && documents.length) || 0;
@@ -26,7 +26,7 @@ export function nextPage(documentsIterator: MinimalQueryIterator, firstItemIndex
lastItemIndex: Number(firstItemIndex) + Number(itemCount), lastItemIndex: Number(firstItemIndex) + Number(itemCount),
headers, headers,
activityId: response.activityId, activityId: response.activityId,
requestCharge: response.requestCharge, requestCharge: response.requestCharge
}; };
}); });
} }

View File

@@ -1,26 +1,26 @@
jest.mock("./MessageHandler"); jest.mock("./MessageHandler");
import { LogEntryLevel } from "../Contracts/Diagnostics"; import { LogEntryLevel } from "../Contracts/Diagnostics";
import * as Logger from "./Logger"; import * as Logger from "./Logger";
import { MessageTypes } from "../Contracts/ExplorerContracts"; import { MessageTypes } from "../Contracts/ExplorerContracts";
import { sendMessage } from "./MessageHandler"; import { sendMessage } from "./MessageHandler";
describe("Logger", () => { describe("Logger", () => {
beforeEach(() => { beforeEach(() => {
jest.resetAllMocks(); jest.resetAllMocks();
}); });
it("should log info messages", () => { it("should log info messages", () => {
Logger.logInfo("Test info", "DocDB"); Logger.logInfo("Test info", "DocDB");
expect(sendMessage).toBeCalled(); expect(sendMessage).toBeCalled();
}); });
it("should log error messages", () => { it("should log error messages", () => {
Logger.logError("Test error", "DocDB"); Logger.logError("Test error", "DocDB");
expect(sendMessage).toBeCalled(); expect(sendMessage).toBeCalled();
}); });
it("should log warnings", () => { it("should log warnings", () => {
Logger.logWarning("Test warning", "DocDB"); Logger.logWarning("Test warning", "DocDB");
expect(sendMessage).toBeCalled(); expect(sendMessage).toBeCalled();
}); });
}); });

View File

@@ -29,7 +29,7 @@ export function logError(errorMessage: string, area: string, code?: number | str
function _logEntry(entry: Diagnostics.LogEntry): void { function _logEntry(entry: Diagnostics.LogEntry): void {
sendMessage({ sendMessage({
type: MessageTypes.LogInfo, type: MessageTypes.LogInfo,
data: JSON.stringify(entry), data: JSON.stringify(entry)
}); });
const severityLevel = ((level: Diagnostics.LogEntryLevel): SeverityLevel => { const severityLevel = ((level: Diagnostics.LogEntryLevel): SeverityLevel => {
@@ -60,6 +60,6 @@ function _generateLogEntry(
level, level,
message, message,
area, area,
code, code
}; };
} }

View File

@@ -1,28 +1,28 @@
import Q from "q"; import Q from "q";
import * as MessageHandler from "./MessageHandler"; import * as MessageHandler from "./MessageHandler";
describe("Message Handler", () => { describe("Message Handler", () => {
it("should handle cached message", async () => { it("should handle cached message", async () => {
let mockPromise = { let mockPromise = {
id: "123", id: "123",
startTime: new Date(), startTime: new Date(),
deferred: Q.defer<any>(), deferred: Q.defer<any>()
}; };
let mockMessage = { message: { id: "123", data: "{}" } }; let mockMessage = { message: { id: "123", data: "{}" } };
MessageHandler.RequestMap[mockPromise.id] = mockPromise; MessageHandler.RequestMap[mockPromise.id] = mockPromise;
MessageHandler.handleCachedDataMessage(mockMessage); MessageHandler.handleCachedDataMessage(mockMessage);
expect(mockPromise.deferred.promise.isFulfilled()).toBe(true); expect(mockPromise.deferred.promise.isFulfilled()).toBe(true);
}); });
it("should delete fulfilled promises on running the garbage collector", async () => { it("should delete fulfilled promises on running the garbage collector", async () => {
let message = { let message = {
id: "123", id: "123",
startTime: new Date(), startTime: new Date(),
deferred: Q.defer<any>(), deferred: Q.defer<any>()
}; };
MessageHandler.handleCachedDataMessage(message); MessageHandler.handleCachedDataMessage(message);
MessageHandler.runGarbageCollector(); MessageHandler.runGarbageCollector();
expect(MessageHandler.RequestMap["123"]).toBeUndefined(); expect(MessageHandler.RequestMap["123"]).toBeUndefined();
}); });
}); });

View File

@@ -35,7 +35,7 @@ export function sendCachedDataMessage<TResponseDataModel>(
let cachedDataPromise: CachedDataPromise<TResponseDataModel> = { let cachedDataPromise: CachedDataPromise<TResponseDataModel> = {
deferred: Q.defer<TResponseDataModel>(), deferred: Q.defer<TResponseDataModel>(),
startTime: new Date(), startTime: new Date(),
id: _.uniqueId(), id: _.uniqueId()
}; };
RequestMap[cachedDataPromise.id] = cachedDataPromise; RequestMap[cachedDataPromise.id] = cachedDataPromise;
sendMessage({ type: messageType, params: params, id: cachedDataPromise.id }); sendMessage({ type: messageType, params: params, id: cachedDataPromise.id });
@@ -54,7 +54,7 @@ export function sendMessage(data: any): void {
portalChildWindow.parent.postMessage( portalChildWindow.parent.postMessage(
{ {
signature: "pcIframe", signature: "pcIframe",
data: data, data: data
}, },
portalChildWindow.document.referrer portalChildWindow.document.referrer
); );

View File

@@ -14,7 +14,7 @@ const fetchMock = () => {
ok: true, ok: true,
text: () => "{}", text: () => "{}",
json: () => "{}", json: () => "{}",
headers: new Map(), headers: new Map()
}); });
}; };
@@ -27,8 +27,8 @@ const collection = {
partitionKey: { partitionKey: {
paths: ["/pk"], paths: ["/pk"],
kind: "Hash", kind: "Hash",
version: 1, version: 1
}, }
} as Collection; } as Collection;
const documentId = ({ const documentId = ({
@@ -38,8 +38,8 @@ const documentId = ({
partitionKey: { partitionKey: {
paths: ["/pk"], paths: ["/pk"],
kind: "Hash", kind: "Hash",
version: 1, version: 1
}, }
} as unknown) as DocumentId; } as unknown) as DocumentId;
const databaseAccount = { const databaseAccount = {
@@ -52,8 +52,8 @@ const databaseAccount = {
documentEndpoint: "bar", documentEndpoint: "bar",
gremlinEndpoint: "foo", gremlinEndpoint: "foo",
tableEndpoint: "foo", tableEndpoint: "foo",
cassandraEndpoint: "foo", cassandraEndpoint: "foo"
}, }
} as DatabaseAccount; } as DatabaseAccount;
describe("MongoProxyClient", () => { describe("MongoProxyClient", () => {
@@ -61,10 +61,10 @@ describe("MongoProxyClient", () => {
beforeEach(() => { beforeEach(() => {
resetConfigContext(); resetConfigContext();
updateUserContext({ updateUserContext({
databaseAccount, databaseAccount
}); });
updateConfigContext({ updateConfigContext({
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com", BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
}); });
window.fetch = jest.fn().mockImplementation(fetchMock); window.fetch = jest.fn().mockImplementation(fetchMock);
}); });
@@ -93,10 +93,10 @@ describe("MongoProxyClient", () => {
beforeEach(() => { beforeEach(() => {
resetConfigContext(); resetConfigContext();
updateUserContext({ updateUserContext({
databaseAccount, databaseAccount
}); });
updateConfigContext({ updateConfigContext({
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com", BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
}); });
window.fetch = jest.fn().mockImplementation(fetchMock); window.fetch = jest.fn().mockImplementation(fetchMock);
}); });
@@ -125,10 +125,10 @@ describe("MongoProxyClient", () => {
beforeEach(() => { beforeEach(() => {
resetConfigContext(); resetConfigContext();
updateUserContext({ updateUserContext({
databaseAccount, databaseAccount
}); });
updateConfigContext({ updateConfigContext({
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com", BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
}); });
window.fetch = jest.fn().mockImplementation(fetchMock); window.fetch = jest.fn().mockImplementation(fetchMock);
}); });
@@ -157,10 +157,10 @@ describe("MongoProxyClient", () => {
beforeEach(() => { beforeEach(() => {
resetConfigContext(); resetConfigContext();
updateUserContext({ updateUserContext({
databaseAccount, databaseAccount
}); });
updateConfigContext({ updateConfigContext({
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com", BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
}); });
window.fetch = jest.fn().mockImplementation(fetchMock); window.fetch = jest.fn().mockImplementation(fetchMock);
}); });
@@ -189,10 +189,10 @@ describe("MongoProxyClient", () => {
beforeEach(() => { beforeEach(() => {
resetConfigContext(); resetConfigContext();
updateUserContext({ updateUserContext({
databaseAccount, databaseAccount
}); });
updateConfigContext({ updateConfigContext({
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com", BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
}); });
window.fetch = jest.fn().mockImplementation(fetchMock); window.fetch = jest.fn().mockImplementation(fetchMock);
}); });
@@ -222,10 +222,10 @@ describe("MongoProxyClient", () => {
resetConfigContext(); resetConfigContext();
delete window.authType; delete window.authType;
updateUserContext({ updateUserContext({
databaseAccount, databaseAccount
}); });
updateConfigContext({ updateConfigContext({
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com", BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
}); });
}); });

View File

@@ -16,7 +16,7 @@ import { sendMessage } from "./MessageHandler";
const defaultHeaders = { const defaultHeaders = {
[HttpHeaders.apiType]: ApiType.MongoDB.toString(), [HttpHeaders.apiType]: ApiType.MongoDB.toString(),
[CosmosSDKConstants.HttpHeaders.MaxEntityCount]: "100", [CosmosSDKConstants.HttpHeaders.MaxEntityCount]: "100",
[CosmosSDKConstants.HttpHeaders.Version]: "2017-11-15", [CosmosSDKConstants.HttpHeaders.Version]: "2017-11-15"
}; };
function authHeaders() { function authHeaders() {
@@ -31,7 +31,7 @@ export function queryIterator(databaseId: string, collection: Collection, query:
let continuationToken: string; let continuationToken: string;
return { return {
fetchNext: () => { fetchNext: () => {
return queryDocuments(databaseId, collection, false, query).then((response) => { return queryDocuments(databaseId, collection, false, query).then(response => {
continuationToken = response.continuationToken; continuationToken = response.continuationToken;
const headers: { [key: string]: string | number } = {}; const headers: { [key: string]: string | number } = {};
response.headers.forEach((value, key) => { response.headers.forEach((value, key) => {
@@ -42,10 +42,10 @@ export function queryIterator(databaseId: string, collection: Collection, query:
headers, headers,
requestCharge: Number(headers[CosmosSDKConstants.HttpHeaders.RequestCharge]), requestCharge: Number(headers[CosmosSDKConstants.HttpHeaders.RequestCharge]),
activityId: String(headers[CosmosSDKConstants.HttpHeaders.ActivityId]), activityId: String(headers[CosmosSDKConstants.HttpHeaders.ActivityId]),
hasMoreResults: !!continuationToken, hasMoreResults: !!continuationToken
}; };
}); });
}, }
}; };
} }
@@ -74,9 +74,7 @@ export function queryDocuments(
rg: userContext.resourceGroup, rg: userContext.resourceGroup,
dba: databaseAccount.name, dba: databaseAccount.name,
pk: pk:
collection && collection.partitionKey && !collection.partitionKey.systemKey collection && collection.partitionKey && !collection.partitionKey.systemKey ? collection.partitionKeyProperty : ""
? collection.partitionKeyProperty
: "",
}; };
const endpoint = getEndpoint() || ""; const endpoint = getEndpoint() || "";
@@ -89,7 +87,7 @@ export function queryDocuments(
[CosmosSDKConstants.HttpHeaders.EnableScanInQuery]: "true", [CosmosSDKConstants.HttpHeaders.EnableScanInQuery]: "true",
[CosmosSDKConstants.HttpHeaders.EnableCrossPartitionQuery]: "true", [CosmosSDKConstants.HttpHeaders.EnableCrossPartitionQuery]: "true",
[CosmosSDKConstants.HttpHeaders.ParallelizeCrossPartitionQuery]: "true", [CosmosSDKConstants.HttpHeaders.ParallelizeCrossPartitionQuery]: "true",
[HttpHeaders.contentType]: "application/query+json", [HttpHeaders.contentType]: "application/query+json"
}; };
if (continuationToken) { if (continuationToken) {
@@ -102,14 +100,14 @@ export function queryDocuments(
.fetch(`${endpoint}${path}?${queryString.stringify(params)}`, { .fetch(`${endpoint}${path}?${queryString.stringify(params)}`, {
method: "POST", method: "POST",
body: JSON.stringify({ query }), body: JSON.stringify({ query }),
headers, headers
}) })
.then(async (response) => { .then(async response => {
if (response.ok) { if (response.ok) {
return { return {
continuationToken: response.headers.get(CosmosSDKConstants.HttpHeaders.Continuation), continuationToken: response.headers.get(CosmosSDKConstants.HttpHeaders.Continuation),
documents: (await response.json()).Documents as DataModels.DocumentId[], documents: (await response.json()).Documents as DataModels.DocumentId[],
headers: response.headers, headers: response.headers
}; };
} }
errorHandling(response, "querying documents", params); errorHandling(response, "querying documents", params);
@@ -137,9 +135,7 @@ export function readDocument(
rg: userContext.resourceGroup, rg: userContext.resourceGroup,
dba: databaseAccount.name, dba: databaseAccount.name,
pk: pk:
documentId && documentId.partitionKey && !documentId.partitionKey.systemKey documentId && documentId.partitionKey && !documentId.partitionKey.systemKey ? documentId.partitionKeyProperty : ""
? documentId.partitionKeyProperty
: "",
}; };
const endpoint = getEndpoint(); const endpoint = getEndpoint();
@@ -151,10 +147,10 @@ export function readDocument(
...authHeaders(), ...authHeaders(),
[CosmosSDKConstants.HttpHeaders.PartitionKey]: encodeURIComponent( [CosmosSDKConstants.HttpHeaders.PartitionKey]: encodeURIComponent(
JSON.stringify(documentId.partitionKeyHeader()) JSON.stringify(documentId.partitionKeyHeader())
), )
}, }
}) })
.then((response) => { .then(response => {
if (response.ok) { if (response.ok) {
return response.json(); return response.json();
} }
@@ -179,7 +175,7 @@ export function createDocument(
sid: userContext.subscriptionId, sid: userContext.subscriptionId,
rg: userContext.resourceGroup, rg: userContext.resourceGroup,
dba: databaseAccount.name, dba: databaseAccount.name,
pk: collection && collection.partitionKey && !collection.partitionKey.systemKey ? partitionKeyProperty : "", pk: collection && collection.partitionKey && !collection.partitionKey.systemKey ? partitionKeyProperty : ""
}; };
const endpoint = getEndpoint(); const endpoint = getEndpoint();
@@ -190,10 +186,10 @@ export function createDocument(
body: JSON.stringify(documentContent), body: JSON.stringify(documentContent),
headers: { headers: {
...defaultHeaders, ...defaultHeaders,
...authHeaders(), ...authHeaders()
}, }
}) })
.then((response) => { .then(response => {
if (response.ok) { if (response.ok) {
return response.json(); return response.json();
} }
@@ -222,9 +218,7 @@ export function updateDocument(
rg: userContext.resourceGroup, rg: userContext.resourceGroup,
dba: databaseAccount.name, dba: databaseAccount.name,
pk: pk:
documentId && documentId.partitionKey && !documentId.partitionKey.systemKey documentId && documentId.partitionKey && !documentId.partitionKey.systemKey ? documentId.partitionKeyProperty : ""
? documentId.partitionKeyProperty
: "",
}; };
const endpoint = getEndpoint(); const endpoint = getEndpoint();
@@ -236,10 +230,10 @@ export function updateDocument(
...defaultHeaders, ...defaultHeaders,
...authHeaders(), ...authHeaders(),
[HttpHeaders.contentType]: "application/json", [HttpHeaders.contentType]: "application/json",
[CosmosSDKConstants.HttpHeaders.PartitionKey]: JSON.stringify(documentId.partitionKeyHeader()), [CosmosSDKConstants.HttpHeaders.PartitionKey]: JSON.stringify(documentId.partitionKeyHeader())
}, }
}) })
.then((response) => { .then(response => {
if (response.ok) { if (response.ok) {
return response.json(); return response.json();
} }
@@ -263,9 +257,7 @@ export function deleteDocument(databaseId: string, collection: Collection, docum
rg: userContext.resourceGroup, rg: userContext.resourceGroup,
dba: databaseAccount.name, dba: databaseAccount.name,
pk: pk:
documentId && documentId.partitionKey && !documentId.partitionKey.systemKey documentId && documentId.partitionKey && !documentId.partitionKey.systemKey ? documentId.partitionKeyProperty : ""
? documentId.partitionKeyProperty
: "",
}; };
const endpoint = getEndpoint(); const endpoint = getEndpoint();
@@ -276,10 +268,10 @@ export function deleteDocument(databaseId: string, collection: Collection, docum
...defaultHeaders, ...defaultHeaders,
...authHeaders(), ...authHeaders(),
[HttpHeaders.contentType]: "application/json", [HttpHeaders.contentType]: "application/json",
[CosmosSDKConstants.HttpHeaders.PartitionKey]: JSON.stringify(documentId.partitionKeyHeader()), [CosmosSDKConstants.HttpHeaders.PartitionKey]: JSON.stringify(documentId.partitionKeyHeader())
}, }
}) })
.then((response) => { .then(response => {
if (response.ok) { if (response.ok) {
return undefined; return undefined;
} }
@@ -307,7 +299,7 @@ export function createMongoCollectionWithProxy(
rg: userContext.resourceGroup, rg: userContext.resourceGroup,
dba: databaseAccount.name, dba: databaseAccount.name,
isAutoPilot: !!params.autoPilotMaxThroughput, isAutoPilot: !!params.autoPilotMaxThroughput,
autoPilotThroughput: params.autoPilotMaxThroughput?.toString(), autoPilotThroughput: params.autoPilotMaxThroughput?.toString()
}; };
const endpoint = getEndpoint(); const endpoint = getEndpoint();
@@ -322,11 +314,11 @@ export function createMongoCollectionWithProxy(
headers: { headers: {
...defaultHeaders, ...defaultHeaders,
...authHeaders(), ...authHeaders(),
[HttpHeaders.contentType]: "application/json", [HttpHeaders.contentType]: "application/json"
}, }
} }
) )
.then((response) => { .then(response => {
if (response.ok) { if (response.ok) {
return response.json(); return response.json();
} }

View File

@@ -1,168 +1,168 @@
/* Copyright 2013 10gen Inc. /* Copyright 2013 10gen Inc.
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. * you may not use this file except in compliance with the License.
* You may obtain a copy of the License at * You may obtain a copy of the License at
* *
* http://www.apache.org/licenses/LICENSE-2.0 * http://www.apache.org/licenses/LICENSE-2.0
* *
* Unless required by applicable law or agreed to in writing, software * Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, * distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
export default class MongoUtility { export default class MongoUtility {
public static tojson = function (x: any, indent: string, nolint: boolean) { public static tojson = function(x: any, indent: string, nolint: boolean) {
if (x === null || x === undefined) { if (x === null || x === undefined) {
return String(x); return String(x);
} }
indent = indent || ""; indent = indent || "";
switch (typeof x) { switch (typeof x) {
case "string": case "string":
var out = new Array(x.length + 1); var out = new Array(x.length + 1);
out[0] = '"'; out[0] = '"';
for (var i = 0; i < x.length; i++) { for (var i = 0; i < x.length; i++) {
if (x[i] === '"') { if (x[i] === '"') {
out[out.length] = '\\"'; out[out.length] = '\\"';
} else if (x[i] === "\\") { } else if (x[i] === "\\") {
out[out.length] = "\\\\"; out[out.length] = "\\\\";
} else if (x[i] === "\b") { } else if (x[i] === "\b") {
out[out.length] = "\\b"; out[out.length] = "\\b";
} else if (x[i] === "\f") { } else if (x[i] === "\f") {
out[out.length] = "\\f"; out[out.length] = "\\f";
} else if (x[i] === "\n") { } else if (x[i] === "\n") {
out[out.length] = "\\n"; out[out.length] = "\\n";
} else if (x[i] === "\r") { } else if (x[i] === "\r") {
out[out.length] = "\\r"; out[out.length] = "\\r";
} else if (x[i] === "\t") { } else if (x[i] === "\t") {
out[out.length] = "\\t"; out[out.length] = "\\t";
} else { } else {
var code = x.charCodeAt(i); var code = x.charCodeAt(i);
if (code < 0x20) { if (code < 0x20) {
out[out.length] = (code < 0x10 ? "\\u000" : "\\u00") + code.toString(16); out[out.length] = (code < 0x10 ? "\\u000" : "\\u00") + code.toString(16);
} else { } else {
out[out.length] = x[i]; out[out.length] = x[i];
} }
} }
} }
return out.join("") + '"'; return out.join("") + '"';
case "number": case "number":
/* falls through */ /* falls through */
case "boolean": case "boolean":
return "" + x; return "" + x;
case "object": case "object":
var func = $.isArray(x) ? MongoUtility.tojsonArray : MongoUtility.tojsonObject; var func = $.isArray(x) ? MongoUtility.tojsonArray : MongoUtility.tojsonObject;
var s = func(x, indent, nolint); var s = func(x, indent, nolint);
if ( if (
(nolint === null || nolint === undefined || nolint === true) && (nolint === null || nolint === undefined || nolint === true) &&
s.length < 80 && s.length < 80 &&
(indent === null || indent.length === 0) (indent === null || indent.length === 0)
) { ) {
s = s.replace(/[\t\r\n]+/gm, " "); s = s.replace(/[\t\r\n]+/gm, " ");
} }
return s; return s;
case "function": case "function":
return x.toString(); return x.toString();
default: default:
throw new Error("tojson can't handle type " + typeof x); throw new Error("tojson can't handle type " + typeof x);
} }
}; };
private static tojsonObject = function (x: any, indent: string, nolint: boolean) { private static tojsonObject = function(x: any, indent: string, nolint: boolean) {
var lineEnding = nolint ? " " : "\n"; var lineEnding = nolint ? " " : "\n";
var tabSpace = nolint ? "" : "\t"; var tabSpace = nolint ? "" : "\t";
indent = indent || ""; indent = indent || "";
if (typeof x.tojson === "function" && x.tojson !== MongoUtility.tojson) { if (typeof x.tojson === "function" && x.tojson !== MongoUtility.tojson) {
return x.tojson(indent, nolint); return x.tojson(indent, nolint);
} }
if (x.constructor && typeof x.constructor.tojson === "function" && x.constructor.tojson !== MongoUtility.tojson) { if (x.constructor && typeof x.constructor.tojson === "function" && x.constructor.tojson !== MongoUtility.tojson) {
return x.constructor.tojson(x, indent, nolint); return x.constructor.tojson(x, indent, nolint);
} }
if (MongoUtility.hasDefinedProperty(x, "toString") && !$.isArray(x)) { if (MongoUtility.hasDefinedProperty(x, "toString") && !$.isArray(x)) {
return x.toString(); return x.toString();
} }
if (x instanceof Error) { if (x instanceof Error) {
return x.toString(); return x.toString();
} }
if (MongoUtility.isObjectId(x)) { if (MongoUtility.isObjectId(x)) {
return 'ObjectId("' + x.$oid + '")'; return 'ObjectId("' + x.$oid + '")';
} }
// push one level of indent // push one level of indent
indent += tabSpace; indent += tabSpace;
var s = "{"; var s = "{";
var pairs = []; var pairs = [];
for (var k in x) { for (var k in x) {
if (x.hasOwnProperty(k)) { if (x.hasOwnProperty(k)) {
var val = x[k]; var val = x[k];
var pair = '"' + k + '" : ' + MongoUtility.tojson(val, indent, nolint); var pair = '"' + k + '" : ' + MongoUtility.tojson(val, indent, nolint);
if (k === "_id") { if (k === "_id") {
pairs.unshift(pair); pairs.unshift(pair);
} else { } else {
pairs.push(pair); pairs.push(pair);
} }
} }
} }
// Add proper line endings, indents, and commas to each line // Add proper line endings, indents, and commas to each line
s += $.map(pairs, function (pair) { s += $.map(pairs, function(pair) {
return lineEnding + indent + pair; return lineEnding + indent + pair;
}).join(","); }).join(",");
s += lineEnding; s += lineEnding;
// pop one level of indent // pop one level of indent
indent = indent.substring(1); indent = indent.substring(1);
return s + indent + "}"; return s + indent + "}";
}; };
private static tojsonArray = function (a: any, indent: string, nolint: boolean) { private static tojsonArray = function(a: any, indent: string, nolint: boolean) {
if (a.length === 0) { if (a.length === 0) {
return "[ ]"; return "[ ]";
} }
var lineEnding = nolint ? " " : "\n"; var lineEnding = nolint ? " " : "\n";
if (!indent || nolint) { if (!indent || nolint) {
indent = ""; indent = "";
} }
var s = "[" + lineEnding; var s = "[" + lineEnding;
indent += "\t"; indent += "\t";
for (var i = 0; i < a.length; i++) { for (var i = 0; i < a.length; i++) {
s += indent + MongoUtility.tojson(a[i], indent, nolint); s += indent + MongoUtility.tojson(a[i], indent, nolint);
if (i < a.length - 1) { if (i < a.length - 1) {
s += "," + lineEnding; s += "," + lineEnding;
} }
} }
if (a.length === 0) { if (a.length === 0) {
s += indent; s += indent;
} }
indent = indent.substring(1); indent = indent.substring(1);
s += lineEnding + indent + "]"; s += lineEnding + indent + "]";
return s; return s;
}; };
private static hasDefinedProperty = function (obj: any, prop: string): boolean { private static hasDefinedProperty = function(obj: any, prop: string): boolean {
if (Object.getPrototypeOf === undefined || Object.getPrototypeOf(obj) === null) { if (Object.getPrototypeOf === undefined || Object.getPrototypeOf(obj) === null) {
return false; return false;
} else if (obj.hasOwnProperty(prop)) { } else if (obj.hasOwnProperty(prop)) {
return true; return true;
} else { } else {
return MongoUtility.hasDefinedProperty(Object.getPrototypeOf(obj), prop); return MongoUtility.hasDefinedProperty(Object.getPrototypeOf(obj), prop);
} }
}; };
private static isObjectId(obj: any): boolean { private static isObjectId(obj: any): boolean {
var keys = Object.keys(obj); var keys = Object.keys(obj);
return keys.length === 1 && keys[0] === "$oid" && typeof obj.$oid === "string" && /^[0-9a-f]{24}$/.test(obj.$oid); return keys.length === 1 && keys[0] === "$oid" && typeof obj.$oid === "string" && /^[0-9a-f]{24}$/.test(obj.$oid);
} }
} }

View File

@@ -1,62 +0,0 @@
import * as OfferUtility from "./OfferUtility";
import { SDKOfferDefinition, Offer } from "../Contracts/DataModels";
import { OfferResponse } from "@azure/cosmos";
describe("parseSDKOfferResponse", () => {
it("manual throughput", () => {
const mockOfferDefinition = {
content: {
offerThroughput: 500,
collectionThroughputInfo: {
minimumRUForCollection: 400,
numPhysicalPartitions: 1,
},
},
id: "test",
} as SDKOfferDefinition;
const mockResponse = {
resource: mockOfferDefinition,
} as OfferResponse;
const expectedResult: Offer = {
manualThroughput: 500,
autoscaleMaxThroughput: undefined,
minimumThroughput: 400,
id: "test",
offerDefinition: mockOfferDefinition,
};
expect(OfferUtility.parseSDKOfferResponse(mockResponse)).toEqual(expectedResult);
});
it("autoscale throughput", () => {
const mockOfferDefinition = {
content: {
offerThroughput: 400,
collectionThroughputInfo: {
minimumRUForCollection: 400,
numPhysicalPartitions: 1,
},
offerAutopilotSettings: {
maxThroughput: 5000,
},
},
id: "test",
} as SDKOfferDefinition;
const mockResponse = {
resource: mockOfferDefinition,
} as OfferResponse;
const expectedResult: Offer = {
manualThroughput: undefined,
autoscaleMaxThroughput: 5000,
minimumThroughput: 400,
id: "test",
offerDefinition: mockOfferDefinition,
};
expect(OfferUtility.parseSDKOfferResponse(mockResponse)).toEqual(expectedResult);
});
});

View File

@@ -1,33 +0,0 @@
import { Offer, SDKOfferDefinition } from "../Contracts/DataModels";
import { OfferResponse } from "@azure/cosmos";
export const parseSDKOfferResponse = (offerResponse: OfferResponse): Offer => {
const offerDefinition: SDKOfferDefinition = offerResponse?.resource;
const offerContent = offerDefinition.content;
if (!offerContent) {
return undefined;
}
const minimumThroughput = offerContent.collectionThroughputInfo?.minimumRUForCollection;
const autopilotSettings = offerContent.offerAutopilotSettings;
if (autopilotSettings) {
return {
id: offerDefinition.id,
autoscaleMaxThroughput: autopilotSettings.maxThroughput,
manualThroughput: undefined,
minimumThroughput,
offerDefinition,
headers: offerResponse.headers,
};
}
return {
id: offerDefinition.id,
autoscaleMaxThroughput: undefined,
manualThroughput: offerContent.offerThroughput,
minimumThroughput,
offerDefinition,
headers: offerResponse.headers,
};
};

View File

@@ -16,7 +16,7 @@ const notificationsPath = () => {
}; };
export const fetchPortalNotifications = async (): Promise<DataModels.Notification[]> => { export const fetchPortalNotifications = async (): Promise<DataModels.Notification[]> => {
if (configContext.platform === Platform.Emulator || configContext.platform === Platform.Hosted) { if (configContext.platform === Platform.Emulator) {
return []; return [];
} }
@@ -30,7 +30,7 @@ export const fetchPortalNotifications = async (): Promise<DataModels.Notificatio
const headers = { [authorizationHeader.header]: authorizationHeader.token }; const headers = { [authorizationHeader.header]: authorizationHeader.token };
const response = await window.fetch(url, { const response = await window.fetch(url, {
headers, headers
}); });
if (!response.ok) { if (!response.ok) {

View File

@@ -1,253 +1,253 @@
import { ItemDefinition, QueryIterator, Resource } from "@azure/cosmos"; import { ItemDefinition, QueryIterator, Resource } from "@azure/cosmos";
import * as _ from "underscore"; import * as _ from "underscore";
import * as DataModels from "../Contracts/DataModels"; import * as DataModels from "../Contracts/DataModels";
import * as ViewModels from "../Contracts/ViewModels"; import * as ViewModels from "../Contracts/ViewModels";
import Explorer from "../Explorer/Explorer"; import Explorer from "../Explorer/Explorer";
import { ConsoleDataType } from "../Explorer/Menus/NotificationConsole/NotificationConsoleComponent"; import { ConsoleDataType } from "../Explorer/Menus/NotificationConsole/NotificationConsoleComponent";
import DocumentsTab from "../Explorer/Tabs/DocumentsTab"; import DocumentsTab from "../Explorer/Tabs/DocumentsTab";
import DocumentId from "../Explorer/Tree/DocumentId"; import DocumentId from "../Explorer/Tree/DocumentId";
import * as NotificationConsoleUtils from "../Utils/NotificationConsoleUtils"; import * as NotificationConsoleUtils from "../Utils/NotificationConsoleUtils";
import { QueryUtils } from "../Utils/QueryUtils"; import { QueryUtils } from "../Utils/QueryUtils";
import { BackendDefaults, HttpStatusCodes, SavedQueries } from "./Constants"; import { BackendDefaults, HttpStatusCodes, SavedQueries } from "./Constants";
import { userContext } from "../UserContext"; import { userContext } from "../UserContext";
import { createDocument, deleteDocument, queryDocuments, queryDocumentsPage } from "./DocumentClientUtilityBase"; import { createDocument, deleteDocument, queryDocuments, queryDocumentsPage } from "./DocumentClientUtilityBase";
import { createCollection } from "./dataAccess/createCollection"; import { createCollection } from "./dataAccess/createCollection";
import { handleError } from "./ErrorHandlingUtils"; import { handleError } from "./ErrorHandlingUtils";
export class QueriesClient { export class QueriesClient {
private static readonly PartitionKey: DataModels.PartitionKey = { private static readonly PartitionKey: DataModels.PartitionKey = {
paths: [`/${SavedQueries.PartitionKeyProperty}`], paths: [`/${SavedQueries.PartitionKeyProperty}`],
kind: BackendDefaults.partitionKeyKind, kind: BackendDefaults.partitionKeyKind,
version: BackendDefaults.partitionKeyVersion, version: BackendDefaults.partitionKeyVersion
}; };
private static readonly FetchQuery: string = "SELECT * FROM c"; private static readonly FetchQuery: string = "SELECT * FROM c";
private static readonly FetchMongoQuery: string = "{}"; private static readonly FetchMongoQuery: string = "{}";
public constructor(private container: Explorer) {} public constructor(private container: Explorer) {}
public async setupQueriesCollection(): Promise<DataModels.Collection> { public async setupQueriesCollection(): Promise<DataModels.Collection> {
const queriesCollection: ViewModels.Collection = this.findQueriesCollection(); const queriesCollection: ViewModels.Collection = this.findQueriesCollection();
if (queriesCollection) { if (queriesCollection) {
return Promise.resolve(queriesCollection.rawDataModel); return Promise.resolve(queriesCollection.rawDataModel);
} }
const id = NotificationConsoleUtils.logConsoleMessage( const id = NotificationConsoleUtils.logConsoleMessage(
ConsoleDataType.InProgress, ConsoleDataType.InProgress,
"Setting up account for saving queries" "Setting up account for saving queries"
); );
return createCollection({ return createCollection({
collectionId: SavedQueries.CollectionName, collectionId: SavedQueries.CollectionName,
createNewDatabase: true, createNewDatabase: true,
databaseId: SavedQueries.DatabaseName, databaseId: SavedQueries.DatabaseName,
partitionKey: QueriesClient.PartitionKey, partitionKey: QueriesClient.PartitionKey,
offerThroughput: SavedQueries.OfferThroughput, offerThroughput: SavedQueries.OfferThroughput,
databaseLevelThroughput: false, databaseLevelThroughput: false
}) })
.then( .then(
(collection: DataModels.Collection) => { (collection: DataModels.Collection) => {
NotificationConsoleUtils.logConsoleMessage( NotificationConsoleUtils.logConsoleMessage(
ConsoleDataType.Info, ConsoleDataType.Info,
"Successfully set up account for saving queries" "Successfully set up account for saving queries"
); );
return Promise.resolve(collection); return Promise.resolve(collection);
}, },
(error: any) => { (error: any) => {
handleError(error, "setupQueriesCollection", "Failed to set up account for saving queries"); handleError(error, "setupQueriesCollection", "Failed to set up account for saving queries");
return Promise.reject(error); return Promise.reject(error);
} }
) )
.finally(() => NotificationConsoleUtils.clearInProgressMessageWithId(id)); .finally(() => NotificationConsoleUtils.clearInProgressMessageWithId(id));
} }
public async saveQuery(query: DataModels.Query): Promise<void> { public async saveQuery(query: DataModels.Query): Promise<void> {
const queriesCollection = this.findQueriesCollection(); const queriesCollection = this.findQueriesCollection();
if (!queriesCollection) { if (!queriesCollection) {
const errorMessage: string = "Account not set up to perform saved query operations"; const errorMessage: string = "Account not set up to perform saved query operations";
NotificationConsoleUtils.logConsoleMessage( NotificationConsoleUtils.logConsoleMessage(
ConsoleDataType.Error, ConsoleDataType.Error,
`Failed to save query ${query.queryName}: ${errorMessage}` `Failed to save query ${query.queryName}: ${errorMessage}`
); );
return Promise.reject(errorMessage); return Promise.reject(errorMessage);
} }
try { try {
this.validateQuery(query); this.validateQuery(query);
} catch (error) { } catch (error) {
const errorMessage: string = "Invalid query specified"; const errorMessage: string = "Invalid query specified";
NotificationConsoleUtils.logConsoleMessage( NotificationConsoleUtils.logConsoleMessage(
ConsoleDataType.Error, ConsoleDataType.Error,
`Failed to save query ${query.queryName}: ${errorMessage}` `Failed to save query ${query.queryName}: ${errorMessage}`
); );
return Promise.reject(errorMessage); return Promise.reject(errorMessage);
} }
const id = NotificationConsoleUtils.logConsoleMessage( const id = NotificationConsoleUtils.logConsoleMessage(
ConsoleDataType.InProgress, ConsoleDataType.InProgress,
`Saving query ${query.queryName}` `Saving query ${query.queryName}`
); );
query.id = query.queryName; query.id = query.queryName;
return createDocument(queriesCollection, query) return createDocument(queriesCollection, query)
.then( .then(
(savedQuery: DataModels.Query) => { (savedQuery: DataModels.Query) => {
NotificationConsoleUtils.logConsoleMessage( NotificationConsoleUtils.logConsoleMessage(
ConsoleDataType.Info, ConsoleDataType.Info,
`Successfully saved query ${query.queryName}` `Successfully saved query ${query.queryName}`
); );
return Promise.resolve(); return Promise.resolve();
}, },
(error: any) => { (error: any) => {
if (error.code === HttpStatusCodes.Conflict.toString()) { if (error.code === HttpStatusCodes.Conflict.toString()) {
error = `Query ${query.queryName} already exists`; error = `Query ${query.queryName} already exists`;
} }
handleError(error, "saveQuery", `Failed to save query ${query.queryName}`); handleError(error, "saveQuery", `Failed to save query ${query.queryName}`);
return Promise.reject(error); return Promise.reject(error);
} }
) )
.finally(() => NotificationConsoleUtils.clearInProgressMessageWithId(id)); .finally(() => NotificationConsoleUtils.clearInProgressMessageWithId(id));
} }
public async getQueries(): Promise<DataModels.Query[]> { public async getQueries(): Promise<DataModels.Query[]> {
const queriesCollection = this.findQueriesCollection(); const queriesCollection = this.findQueriesCollection();
if (!queriesCollection) { if (!queriesCollection) {
const errorMessage: string = "Account not set up to perform saved query operations"; const errorMessage: string = "Account not set up to perform saved query operations";
NotificationConsoleUtils.logConsoleMessage( NotificationConsoleUtils.logConsoleMessage(
ConsoleDataType.Error, ConsoleDataType.Error,
`Failed to fetch saved queries: ${errorMessage}` `Failed to fetch saved queries: ${errorMessage}`
); );
return Promise.reject(errorMessage); return Promise.reject(errorMessage);
} }
const options: any = { enableCrossPartitionQuery: true }; const options: any = { enableCrossPartitionQuery: true };
const id = NotificationConsoleUtils.logConsoleMessage(ConsoleDataType.InProgress, "Fetching saved queries"); const id = NotificationConsoleUtils.logConsoleMessage(ConsoleDataType.InProgress, "Fetching saved queries");
return queryDocuments(SavedQueries.DatabaseName, SavedQueries.CollectionName, this.fetchQueriesQuery(), options) return queryDocuments(SavedQueries.DatabaseName, SavedQueries.CollectionName, this.fetchQueriesQuery(), options)
.then( .then(
(queryIterator: QueryIterator<ItemDefinition & Resource>) => { (queryIterator: QueryIterator<ItemDefinition & Resource>) => {
const fetchQueries = (firstItemIndex: number): Q.Promise<ViewModels.QueryResults> => const fetchQueries = (firstItemIndex: number): Q.Promise<ViewModels.QueryResults> =>
queryDocumentsPage(queriesCollection.id(), queryIterator, firstItemIndex, options); queryDocumentsPage(queriesCollection.id(), queryIterator, firstItemIndex, options);
return QueryUtils.queryAllPages(fetchQueries).then( return QueryUtils.queryAllPages(fetchQueries).then(
(results: ViewModels.QueryResults) => { (results: ViewModels.QueryResults) => {
let queries: DataModels.Query[] = _.map(results.documents, (document: DataModels.Query) => { let queries: DataModels.Query[] = _.map(results.documents, (document: DataModels.Query) => {
if (!document) { if (!document) {
return undefined; return undefined;
} }
const { id, resourceId, query, queryName } = document; const { id, resourceId, query, queryName } = document;
const parsedQuery: DataModels.Query = { const parsedQuery: DataModels.Query = {
resourceId: resourceId, resourceId: resourceId,
queryName: queryName, queryName: queryName,
query: query, query: query,
id: id, id: id
}; };
try { try {
this.validateQuery(parsedQuery); this.validateQuery(parsedQuery);
return parsedQuery; return parsedQuery;
} catch (error) { } catch (error) {
return undefined; return undefined;
} }
}); });
queries = _.reject(queries, (parsedQuery: DataModels.Query) => !parsedQuery); queries = _.reject(queries, (parsedQuery: DataModels.Query) => !parsedQuery);
NotificationConsoleUtils.logConsoleMessage(ConsoleDataType.Info, "Successfully fetched saved queries"); NotificationConsoleUtils.logConsoleMessage(ConsoleDataType.Info, "Successfully fetched saved queries");
return Promise.resolve(queries); return Promise.resolve(queries);
}, },
(error: any) => { (error: any) => {
handleError(error, "getSavedQueries", "Failed to fetch saved queries"); handleError(error, "getSavedQueries", "Failed to fetch saved queries");
return Promise.reject(error); return Promise.reject(error);
} }
); );
}, },
(error: any) => { (error: any) => {
// should never get into this state but we handle this regardless // should never get into this state but we handle this regardless
handleError(error, "getSavedQueries", "Failed to fetch saved queries"); handleError(error, "getSavedQueries", "Failed to fetch saved queries");
return Promise.reject(error); return Promise.reject(error);
} }
) )
.finally(() => NotificationConsoleUtils.clearInProgressMessageWithId(id)); .finally(() => NotificationConsoleUtils.clearInProgressMessageWithId(id));
} }
public async deleteQuery(query: DataModels.Query): Promise<void> { public async deleteQuery(query: DataModels.Query): Promise<void> {
const queriesCollection = this.findQueriesCollection(); const queriesCollection = this.findQueriesCollection();
if (!queriesCollection) { if (!queriesCollection) {
const errorMessage: string = "Account not set up to perform saved query operations"; const errorMessage: string = "Account not set up to perform saved query operations";
NotificationConsoleUtils.logConsoleMessage( NotificationConsoleUtils.logConsoleMessage(
ConsoleDataType.Error, ConsoleDataType.Error,
`Failed to fetch saved queries: ${errorMessage}` `Failed to fetch saved queries: ${errorMessage}`
); );
return Promise.reject(errorMessage); return Promise.reject(errorMessage);
} }
try { try {
this.validateQuery(query); this.validateQuery(query);
} catch (error) { } catch (error) {
const errorMessage: string = "Invalid query specified"; const errorMessage: string = "Invalid query specified";
NotificationConsoleUtils.logConsoleMessage( NotificationConsoleUtils.logConsoleMessage(
ConsoleDataType.Error, ConsoleDataType.Error,
`Failed to delete query ${query.queryName}: ${errorMessage}` `Failed to delete query ${query.queryName}: ${errorMessage}`
); );
} }
const id = NotificationConsoleUtils.logConsoleMessage( const id = NotificationConsoleUtils.logConsoleMessage(
ConsoleDataType.InProgress, ConsoleDataType.InProgress,
`Deleting query ${query.queryName}` `Deleting query ${query.queryName}`
); );
query.id = query.queryName; query.id = query.queryName;
const documentId = new DocumentId( const documentId = new DocumentId(
{ {
partitionKey: QueriesClient.PartitionKey, partitionKey: QueriesClient.PartitionKey,
partitionKeyProperty: "id", partitionKeyProperty: "id"
} as DocumentsTab, } as DocumentsTab,
query, query,
query.queryName query.queryName
); // TODO: Remove DocumentId's dependency on DocumentsTab ); // TODO: Remove DocumentId's dependency on DocumentsTab
const options: any = { partitionKey: query.resourceId }; const options: any = { partitionKey: query.resourceId };
return deleteDocument(queriesCollection, documentId) return deleteDocument(queriesCollection, documentId)
.then( .then(
() => { () => {
NotificationConsoleUtils.logConsoleMessage( NotificationConsoleUtils.logConsoleMessage(
ConsoleDataType.Info, ConsoleDataType.Info,
`Successfully deleted query ${query.queryName}` `Successfully deleted query ${query.queryName}`
); );
return Promise.resolve(); return Promise.resolve();
}, },
(error: any) => { (error: any) => {
handleError(error, "deleteQuery", `Failed to delete query ${query.queryName}`); handleError(error, "deleteQuery", `Failed to delete query ${query.queryName}`);
return Promise.reject(error); return Promise.reject(error);
} }
) )
.finally(() => NotificationConsoleUtils.clearInProgressMessageWithId(id)); .finally(() => NotificationConsoleUtils.clearInProgressMessageWithId(id));
} }
public getResourceId(): string { public getResourceId(): string {
const databaseAccount = userContext.databaseAccount; const databaseAccount = userContext.databaseAccount;
const databaseAccountName = (databaseAccount && databaseAccount.name) || ""; const databaseAccountName = (databaseAccount && databaseAccount.name) || "";
const subscriptionId = userContext.subscriptionId || ""; const subscriptionId = userContext.subscriptionId || "";
const resourceGroup = userContext.resourceGroup || ""; const resourceGroup = userContext.resourceGroup || "";
return `/subscriptions/${subscriptionId}/resourceGroups/${resourceGroup}/providers/Microsoft.DocumentDb/databaseAccounts/${databaseAccountName}`; return `/subscriptions/${subscriptionId}/resourceGroups/${resourceGroup}/providers/Microsoft.DocumentDb/databaseAccounts/${databaseAccountName}`;
} }
private findQueriesCollection(): ViewModels.Collection { private findQueriesCollection(): ViewModels.Collection {
const queriesDatabase: ViewModels.Database = _.find( const queriesDatabase: ViewModels.Database = _.find(
this.container.databases(), this.container.databases(),
(database: ViewModels.Database) => database.id() === SavedQueries.DatabaseName (database: ViewModels.Database) => database.id() === SavedQueries.DatabaseName
); );
if (!queriesDatabase) { if (!queriesDatabase) {
return undefined; return undefined;
} }
return _.find( return _.find(
queriesDatabase.collections(), queriesDatabase.collections(),
(collection: ViewModels.Collection) => collection.id() === SavedQueries.CollectionName (collection: ViewModels.Collection) => collection.id() === SavedQueries.CollectionName
); );
} }
private validateQuery(query: DataModels.Query): void { private validateQuery(query: DataModels.Query): void {
if (!query || query.queryName == null || query.query == null || query.resourceId == null) { if (!query || query.queryName == null || query.query == null || query.resourceId == null) {
throw new Error("Invalid query specified"); throw new Error("Invalid query specified");
} }
} }
private fetchQueriesQuery(): string { private fetchQueriesQuery(): string {
if (this.container.isPreferredApiMongoDB()) { if (this.container.isPreferredApiMongoDB()) {
return QueriesClient.FetchMongoQuery; return QueriesClient.FetchMongoQuery;
} }
return QueriesClient.FetchQuery; return QueriesClient.FetchQuery;
} }
} }

View File

@@ -1,106 +1,108 @@
import * as ko from "knockout"; import * as ko from "knockout";
import { SplitterMetrics } from "./Constants"; import { SplitterMetrics } from "./Constants";
export enum SplitterDirection { export enum SplitterDirection {
Horizontal = "horizontal", Horizontal = "horizontal",
Vertical = "vertical", Vertical = "vertical"
} }
export interface SplitterBounds { export interface SplitterBounds {
max: number; max: number;
min: number; min: number;
} }
export interface SplitterOptions { export interface SplitterOptions {
splitterId: string; splitterId: string;
leftId: string; leftId: string;
bounds: SplitterBounds; bounds: SplitterBounds;
direction: SplitterDirection; direction: SplitterDirection;
} }
export class Splitter { export class Splitter {
public splitterId: string; public splitterId: string;
public leftSideId: string; public leftSideId: string;
public splitter: HTMLElement; public splitter: HTMLElement;
public leftSide: HTMLElement; public leftSide: HTMLElement;
public lastX: number; public lastX: number;
public lastWidth: number; public lastWidth: number;
private isCollapsed: ko.Observable<boolean>; private isCollapsed: ko.Observable<boolean>;
private bounds: SplitterBounds; private bounds: SplitterBounds;
private direction: SplitterDirection; private direction: SplitterDirection;
constructor(options: SplitterOptions) { constructor(options: SplitterOptions) {
this.splitterId = options.splitterId; this.splitterId = options.splitterId;
this.leftSideId = options.leftId; this.leftSideId = options.leftId;
this.isCollapsed = ko.observable<boolean>(false); this.isCollapsed = ko.observable<boolean>(false);
this.bounds = options.bounds; this.bounds = options.bounds;
this.direction = options.direction; this.direction = options.direction;
this.initialize(); this.initialize();
} }
public initialize() { public initialize() {
this.splitter = document.getElementById(this.splitterId); this.splitter = document.getElementById(this.splitterId);
this.leftSide = document.getElementById(this.leftSideId); this.leftSide = document.getElementById(this.leftSideId);
const isVerticalSplitter: boolean = this.direction === SplitterDirection.Vertical; const isVerticalSplitter: boolean = this.direction === SplitterDirection.Vertical;
const splitterOptions: JQueryUI.ResizableOptions = { const splitterOptions: JQueryUI.ResizableOptions = {
animate: true, animate: true,
animateDuration: "fast", animateDuration: "fast",
start: this.onResizeStart, start: this.onResizeStart,
stop: this.onResizeStop, stop: this.onResizeStop
}; };
if (isVerticalSplitter) { if (isVerticalSplitter) {
$(this.leftSide).css("width", this.bounds.min); $(this.leftSide).css("width", this.bounds.min);
$(this.splitter).css("height", "100%"); $(this.splitter).css("height", "100%");
splitterOptions.maxWidth = this.bounds.max; splitterOptions.maxWidth = this.bounds.max;
splitterOptions.minWidth = this.bounds.min; splitterOptions.minWidth = this.bounds.min;
splitterOptions.handles = { e: "#" + this.splitterId }; splitterOptions.handles = { e: "#" + this.splitterId };
} else { } else {
$(this.leftSide).css("height", this.bounds.min); $(this.leftSide).css("height", this.bounds.min);
$(this.splitter).css("width", "100%"); $(this.splitter).css("width", "100%");
splitterOptions.maxHeight = this.bounds.max; splitterOptions.maxHeight = this.bounds.max;
splitterOptions.minHeight = this.bounds.min; splitterOptions.minHeight = this.bounds.min;
splitterOptions.handles = { s: "#" + this.splitterId }; splitterOptions.handles = { s: "#" + this.splitterId };
} }
$(this.leftSide).resizable(splitterOptions); $(this.leftSide).resizable(splitterOptions);
} }
private onResizeStart: JQueryUI.ResizableEvent = (e: Event, ui: JQueryUI.ResizableUIParams) => { private onResizeStart: JQueryUI.ResizableEvent = (e: Event, ui: JQueryUI.ResizableUIParams) => {
if (this.direction === SplitterDirection.Vertical) { if (this.direction === SplitterDirection.Vertical) {
$(".ui-resizable-helper").height("100%"); $(".ui-resizable-helper").height("100%");
} else { } else {
$(".ui-resizable-helper").width("100%"); $(".ui-resizable-helper").width("100%");
} }
$("iframe").css("pointer-events", "none"); $("iframe").css("pointer-events", "none");
}; };
private onResizeStop: JQueryUI.ResizableEvent = (e: Event, ui: JQueryUI.ResizableUIParams) => { private onResizeStop: JQueryUI.ResizableEvent = (e: Event, ui: JQueryUI.ResizableUIParams) => {
$("iframe").css("pointer-events", "auto"); $("iframe").css("pointer-events", "auto");
}; };
public collapseLeft() { public collapseLeft() {
this.lastX = $(this.splitter).position().left; this.lastX = $(this.splitter).position().left;
this.lastWidth = $(this.leftSide).width(); this.lastWidth = $(this.leftSide).width();
$(this.splitter).css("left", SplitterMetrics.CollapsedPositionLeft); $(this.splitter).css("left", SplitterMetrics.CollapsedPositionLeft);
$(this.leftSide).css("width", ""); $(this.leftSide).css("width", "");
$(this.leftSide).resizable("option", "disabled", true).removeClass("ui-resizable-disabled"); // remove class so splitter is visible $(this.leftSide)
$(this.splitter).removeClass("ui-resizable-e"); .resizable("option", "disabled", true)
this.isCollapsed(true); .removeClass("ui-resizable-disabled"); // remove class so splitter is visible
} $(this.splitter).removeClass("ui-resizable-e");
this.isCollapsed(true);
public expandLeft() { }
$(this.splitter).addClass("ui-resizable-e");
$(this.leftSide).css("width", this.lastWidth); public expandLeft() {
$(this.splitter).css("left", this.lastX); $(this.splitter).addClass("ui-resizable-e");
$(this.splitter).css("left", ""); // this ensures the splitter's position is not fixed and enables movement during resizing $(this.leftSide).css("width", this.lastWidth);
$(this.leftSide).resizable("enable"); $(this.splitter).css("left", this.lastX);
this.isCollapsed(false); $(this.splitter).css("left", ""); // this ensures the splitter's position is not fixed and enables movement during resizing
} $(this.leftSide).resizable("enable");
} this.isCollapsed(false);
}
}

View File

@@ -32,8 +32,8 @@ export default class UrlUtility {
type: type, type: type,
objectBody: { objectBody: {
id: id, id: id,
self: resourcePath, self: resourcePath
}, }
}; };
return result; return result;

View File

@@ -15,15 +15,15 @@ describe("createCollection", () => {
collectionId: "testContainer", collectionId: "testContainer",
databaseId: "testDatabase", databaseId: "testDatabase",
databaseLevelThroughput: true, databaseLevelThroughput: true,
offerThroughput: 400, offerThroughput: 400
}; };
beforeAll(() => { beforeAll(() => {
updateUserContext({ updateUserContext({
databaseAccount: { databaseAccount: {
name: "test", name: "test"
} as DatabaseAccount, } as DatabaseAccount,
defaultExperience: DefaultAccountExperienceType.DocumentDB, defaultExperience: DefaultAccountExperienceType.DocumentDB
}); });
}); });
@@ -41,12 +41,12 @@ describe("createCollection", () => {
return { return {
database: { database: {
containers: { containers: {
create: () => ({}), create: () => ({})
}, }
}, }
}; };
}, }
}, }
}); });
await createCollection(createCollectionParams); await createCollection(createCollectionParams);
expect(client).toHaveBeenCalled(); expect(client).toHaveBeenCalled();
@@ -60,7 +60,7 @@ describe("createCollection", () => {
collectionId: "testContainer", collectionId: "testContainer",
databaseId: "testDatabase", databaseId: "testDatabase",
databaseLevelThroughput: false, databaseLevelThroughput: false,
offerThroughput: 400, offerThroughput: 400
}; };
expect(constructRpOptions(manualThroughputParams)).toEqual({ throughput: 400 }); expect(constructRpOptions(manualThroughputParams)).toEqual({ throughput: 400 });
@@ -70,12 +70,12 @@ describe("createCollection", () => {
databaseId: "testDatabase", databaseId: "testDatabase",
databaseLevelThroughput: false, databaseLevelThroughput: false,
offerThroughput: 400, offerThroughput: 400,
autoPilotMaxThroughput: 4000, autoPilotMaxThroughput: 4000
}; };
expect(constructRpOptions(autoPilotThroughputParams)).toEqual({ expect(constructRpOptions(autoPilotThroughputParams)).toEqual({
autoscaleSettings: { autoscaleSettings: {
maxThroughput: 4000, maxThroughput: 4000
}, }
}); });
}); });
}); });

View File

@@ -11,15 +11,15 @@ import { createMongoCollectionWithProxy } from "../MongoProxyClient";
import { createUpdateSqlContainer, getSqlContainer } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources"; import { createUpdateSqlContainer, getSqlContainer } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import { import {
createUpdateCassandraTable, createUpdateCassandraTable,
getCassandraTable, getCassandraTable
} from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources"; } from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources";
import { import {
createUpdateMongoDBCollection, createUpdateMongoDBCollection,
getMongoDBCollection, getMongoDBCollection
} from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources"; } from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
import { import {
createUpdateGremlinGraph, createUpdateGremlinGraph,
getGremlinGraph, getGremlinGraph
} from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources"; } from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
import { createUpdateTable, getTable } from "../../Utils/arm/generatedClients/2020-04-01/tableResources"; import { createUpdateTable, getTable } from "../../Utils/arm/generatedClients/2020-04-01/tableResources";
import { logConsoleProgress, logConsoleInfo } from "../../Utils/NotificationConsoleUtils"; import { logConsoleProgress, logConsoleInfo } from "../../Utils/NotificationConsoleUtils";
@@ -41,7 +41,7 @@ export const createCollection = async (params: DataModels.CreateCollectionParams
autoPilotMaxThroughput: params.autoPilotMaxThroughput, autoPilotMaxThroughput: params.autoPilotMaxThroughput,
databaseId: params.databaseId, databaseId: params.databaseId,
databaseLevelThroughput: params.databaseLevelThroughput, databaseLevelThroughput: params.databaseLevelThroughput,
offerThroughput: params.offerThroughput, offerThroughput: params.offerThroughput
}; };
await createDatabase(createDatabaseParams); await createDatabase(createDatabaseParams);
} }
@@ -100,7 +100,7 @@ const createSqlContainer = async (params: DataModels.CreateCollectionParams): Pr
const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params); const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params);
const resource: ARMTypes.SqlContainerResource = { const resource: ARMTypes.SqlContainerResource = {
id: params.collectionId, id: params.collectionId
}; };
if (params.analyticalStorageTtl) { if (params.analyticalStorageTtl) {
resource.analyticalStorageTtl = params.analyticalStorageTtl; resource.analyticalStorageTtl = params.analyticalStorageTtl;
@@ -118,8 +118,8 @@ const createSqlContainer = async (params: DataModels.CreateCollectionParams): Pr
const rpPayload: ARMTypes.SqlDatabaseCreateUpdateParameters = { const rpPayload: ARMTypes.SqlDatabaseCreateUpdateParameters = {
properties: { properties: {
resource, resource,
options, options
}, }
}; };
const createResponse = await createUpdateSqlContainer( const createResponse = await createUpdateSqlContainer(
@@ -154,7 +154,7 @@ const createMongoCollection = async (params: DataModels.CreateCollectionParams):
const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params); const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params);
const resource: ARMTypes.MongoDBCollectionResource = { const resource: ARMTypes.MongoDBCollectionResource = {
id: params.collectionId, id: params.collectionId
}; };
if (params.analyticalStorageTtl) { if (params.analyticalStorageTtl) {
resource.analyticalStorageTtl = params.analyticalStorageTtl; resource.analyticalStorageTtl = params.analyticalStorageTtl;
@@ -170,8 +170,8 @@ const createMongoCollection = async (params: DataModels.CreateCollectionParams):
const rpPayload: ARMTypes.MongoDBCollectionCreateUpdateParameters = { const rpPayload: ARMTypes.MongoDBCollectionCreateUpdateParameters = {
properties: { properties: {
resource, resource,
options, options
}, }
}; };
const createResponse = await createUpdateMongoDBCollection( const createResponse = await createUpdateMongoDBCollection(
@@ -185,7 +185,7 @@ const createMongoCollection = async (params: DataModels.CreateCollectionParams):
if (params.createMongoWildcardIndex) { if (params.createMongoWildcardIndex) {
TelemetryProcessor.trace(Action.CreateMongoCollectionWithWildcardIndex, ActionModifiers.Mark, { TelemetryProcessor.trace(Action.CreateMongoCollectionWithWildcardIndex, ActionModifiers.Mark, {
message: "Mongo Collection created with wildcard index on all fields.", message: "Mongo Collection created with wildcard index on all fields."
}); });
} }
@@ -212,7 +212,7 @@ const createCassandraTable = async (params: DataModels.CreateCollectionParams):
const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params); const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params);
const resource: ARMTypes.CassandraTableResource = { const resource: ARMTypes.CassandraTableResource = {
id: params.collectionId, id: params.collectionId
}; };
if (params.analyticalStorageTtl) { if (params.analyticalStorageTtl) {
resource.analyticalStorageTtl = params.analyticalStorageTtl; resource.analyticalStorageTtl = params.analyticalStorageTtl;
@@ -221,8 +221,8 @@ const createCassandraTable = async (params: DataModels.CreateCollectionParams):
const rpPayload: ARMTypes.CassandraTableCreateUpdateParameters = { const rpPayload: ARMTypes.CassandraTableCreateUpdateParameters = {
properties: { properties: {
resource, resource,
options, options
}, }
}; };
const createResponse = await createUpdateCassandraTable( const createResponse = await createUpdateCassandraTable(
@@ -256,7 +256,7 @@ const createGraph = async (params: DataModels.CreateCollectionParams): Promise<D
const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params); const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params);
const resource: ARMTypes.GremlinGraphResource = { const resource: ARMTypes.GremlinGraphResource = {
id: params.collectionId, id: params.collectionId
}; };
if (params.indexingPolicy) { if (params.indexingPolicy) {
@@ -272,8 +272,8 @@ const createGraph = async (params: DataModels.CreateCollectionParams): Promise<D
const rpPayload: ARMTypes.GremlinGraphCreateUpdateParameters = { const rpPayload: ARMTypes.GremlinGraphCreateUpdateParameters = {
properties: { properties: {
resource, resource,
options, options
}, }
}; };
const createResponse = await createUpdateGremlinGraph( const createResponse = await createUpdateGremlinGraph(
@@ -306,14 +306,14 @@ const createTable = async (params: DataModels.CreateCollectionParams): Promise<D
const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params); const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params);
const resource: ARMTypes.TableResource = { const resource: ARMTypes.TableResource = {
id: params.collectionId, id: params.collectionId
}; };
const rpPayload: ARMTypes.TableCreateUpdateParameters = { const rpPayload: ARMTypes.TableCreateUpdateParameters = {
properties: { properties: {
resource, resource,
options, options
}, }
}; };
const createResponse = await createUpdateTable( const createResponse = await createUpdateTable(
@@ -334,13 +334,13 @@ export const constructRpOptions = (params: DataModels.CreateDatabaseParams): ARM
if (params.autoPilotMaxThroughput) { if (params.autoPilotMaxThroughput) {
return { return {
autoscaleSettings: { autoscaleSettings: {
maxThroughput: params.autoPilotMaxThroughput, maxThroughput: params.autoPilotMaxThroughput
}, }
}; };
} }
return { return {
throughput: params.offerThroughput, throughput: params.offerThroughput
}; };
}; };
@@ -350,7 +350,7 @@ const createCollectionWithSDK = async (params: DataModels.CreateCollectionParams
partitionKey: params.partitionKey || undefined, partitionKey: params.partitionKey || undefined,
indexingPolicy: params.indexingPolicy || undefined, indexingPolicy: params.indexingPolicy || undefined,
uniqueKeyPolicy: params.uniqueKeyPolicy || undefined, uniqueKeyPolicy: params.uniqueKeyPolicy || undefined,
analyticalStorageTtl: params.analyticalStorageTtl, analyticalStorageTtl: params.analyticalStorageTtl
} as ContainerRequest; // TODO: remove cast when https://github.com/Azure/azure-cosmos-js/issues/423 is fixed } as ContainerRequest; // TODO: remove cast when https://github.com/Azure/azure-cosmos-js/issues/423 is fixed
const collectionOptions: RequestOptions = {}; const collectionOptions: RequestOptions = {};
const createDatabaseBody: DatabaseRequest = { id: params.databaseId }; const createDatabaseBody: DatabaseRequest = { id: params.databaseId };

View File

@@ -8,21 +8,21 @@ import {
GremlinDatabaseCreateUpdateParameters, GremlinDatabaseCreateUpdateParameters,
MongoDBDatabaseCreateUpdateParameters, MongoDBDatabaseCreateUpdateParameters,
SqlDatabaseCreateUpdateParameters, SqlDatabaseCreateUpdateParameters,
CreateUpdateOptions, CreateUpdateOptions
} from "../../Utils/arm/generatedClients/2020-04-01/types"; } from "../../Utils/arm/generatedClients/2020-04-01/types";
import { client } from "../CosmosClient"; import { client } from "../CosmosClient";
import { createUpdateSqlDatabase, getSqlDatabase } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources"; import { createUpdateSqlDatabase, getSqlDatabase } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import { import {
createUpdateCassandraKeyspace, createUpdateCassandraKeyspace,
getCassandraKeyspace, getCassandraKeyspace
} from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources"; } from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources";
import { import {
createUpdateMongoDBDatabase, createUpdateMongoDBDatabase,
getMongoDBDatabase, getMongoDBDatabase
} from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources"; } from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
import { import {
createUpdateGremlinDatabase, createUpdateGremlinDatabase,
getGremlinDatabase, getGremlinDatabase
} from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources"; } from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
import { handleError } from "../ErrorHandlingUtils"; import { handleError } from "../ErrorHandlingUtils";
import { logConsoleProgress, logConsoleInfo } from "../../Utils/NotificationConsoleUtils"; import { logConsoleProgress, logConsoleInfo } from "../../Utils/NotificationConsoleUtils";
@@ -85,10 +85,10 @@ async function createSqlDatabase(params: DataModels.CreateDatabaseParams): Promi
const rpPayload: SqlDatabaseCreateUpdateParameters = { const rpPayload: SqlDatabaseCreateUpdateParameters = {
properties: { properties: {
resource: { resource: {
id: params.databaseId, id: params.databaseId
}, },
options, options
}, }
}; };
const createResponse = await createUpdateSqlDatabase( const createResponse = await createUpdateSqlDatabase(
userContext.subscriptionId, userContext.subscriptionId,
@@ -121,10 +121,10 @@ async function createMongoDatabase(params: DataModels.CreateDatabaseParams): Pro
const rpPayload: MongoDBDatabaseCreateUpdateParameters = { const rpPayload: MongoDBDatabaseCreateUpdateParameters = {
properties: { properties: {
resource: { resource: {
id: params.databaseId, id: params.databaseId
}, },
options, options
}, }
}; };
const createResponse = await createUpdateMongoDBDatabase( const createResponse = await createUpdateMongoDBDatabase(
userContext.subscriptionId, userContext.subscriptionId,
@@ -157,10 +157,10 @@ async function createCassandraKeyspace(params: DataModels.CreateDatabaseParams):
const rpPayload: CassandraKeyspaceCreateUpdateParameters = { const rpPayload: CassandraKeyspaceCreateUpdateParameters = {
properties: { properties: {
resource: { resource: {
id: params.databaseId, id: params.databaseId
}, },
options, options
}, }
}; };
const createResponse = await createUpdateCassandraKeyspace( const createResponse = await createUpdateCassandraKeyspace(
userContext.subscriptionId, userContext.subscriptionId,
@@ -193,10 +193,10 @@ async function createGremlineDatabase(params: DataModels.CreateDatabaseParams):
const rpPayload: GremlinDatabaseCreateUpdateParameters = { const rpPayload: GremlinDatabaseCreateUpdateParameters = {
properties: { properties: {
resource: { resource: {
id: params.databaseId, id: params.databaseId
}, },
options, options
}, }
}; };
const createResponse = await createUpdateGremlinDatabase( const createResponse = await createUpdateGremlinDatabase(
userContext.subscriptionId, userContext.subscriptionId,
@@ -231,12 +231,12 @@ function constructRpOptions(params: DataModels.CreateDatabaseParams): CreateUpda
if (params.autoPilotMaxThroughput) { if (params.autoPilotMaxThroughput) {
return { return {
autoscaleSettings: { autoscaleSettings: {
maxThroughput: params.autoPilotMaxThroughput, maxThroughput: params.autoPilotMaxThroughput
}, }
}; };
} }
return { return {
throughput: params.offerThroughput, throughput: params.offerThroughput
}; };
} }

View File

@@ -3,12 +3,12 @@ import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType
import { Resource, StoredProcedureDefinition } from "@azure/cosmos"; import { Resource, StoredProcedureDefinition } from "@azure/cosmos";
import { import {
SqlStoredProcedureCreateUpdateParameters, SqlStoredProcedureCreateUpdateParameters,
SqlStoredProcedureResource, SqlStoredProcedureResource
} from "../../Utils/arm/generatedClients/2020-04-01/types"; } from "../../Utils/arm/generatedClients/2020-04-01/types";
import { client } from "../CosmosClient"; import { client } from "../CosmosClient";
import { import {
createUpdateSqlStoredProcedure, createUpdateSqlStoredProcedure,
getSqlStoredProcedure, getSqlStoredProcedure
} from "../../Utils/arm/generatedClients/2020-04-01/sqlResources"; } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import { handleError } from "../ErrorHandlingUtils"; import { handleError } from "../ErrorHandlingUtils";
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils"; import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
@@ -49,8 +49,8 @@ export async function createStoredProcedure(
const createSprocParams: SqlStoredProcedureCreateUpdateParameters = { const createSprocParams: SqlStoredProcedureCreateUpdateParameters = {
properties: { properties: {
resource: storedProcedure as SqlStoredProcedureResource, resource: storedProcedure as SqlStoredProcedureResource,
options: {}, options: {}
}, }
}; };
const rpResponse = await createUpdateSqlStoredProcedure( const rpResponse = await createUpdateSqlStoredProcedure(
userContext.subscriptionId, userContext.subscriptionId,

View File

@@ -3,7 +3,7 @@ import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType
import { Resource, TriggerDefinition } from "@azure/cosmos"; import { Resource, TriggerDefinition } from "@azure/cosmos";
import { import {
SqlTriggerCreateUpdateParameters, SqlTriggerCreateUpdateParameters,
SqlTriggerResource, SqlTriggerResource
} from "../../Utils/arm/generatedClients/2020-04-01/types"; } from "../../Utils/arm/generatedClients/2020-04-01/types";
import { client } from "../CosmosClient"; import { client } from "../CosmosClient";
import { createUpdateSqlTrigger, getSqlTrigger } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources"; import { createUpdateSqlTrigger, getSqlTrigger } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
@@ -44,8 +44,8 @@ export async function createTrigger(
const createTriggerParams: SqlTriggerCreateUpdateParameters = { const createTriggerParams: SqlTriggerCreateUpdateParameters = {
properties: { properties: {
resource: trigger as SqlTriggerResource, resource: trigger as SqlTriggerResource,
options: {}, options: {}
}, }
}; };
const rpResponse = await createUpdateSqlTrigger( const rpResponse = await createUpdateSqlTrigger(
userContext.subscriptionId, userContext.subscriptionId,
@@ -59,7 +59,10 @@ export async function createTrigger(
return rpResponse && (rpResponse.properties?.resource as TriggerDefinition & Resource); return rpResponse && (rpResponse.properties?.resource as TriggerDefinition & Resource);
} }
const response = await client().database(databaseId).container(collectionId).scripts.triggers.create(trigger); const response = await client()
.database(databaseId)
.container(collectionId)
.scripts.triggers.create(trigger);
return response.resource; return response.resource;
} catch (error) { } catch (error) {
handleError(error, "CreateTrigger", `Error while creating trigger ${trigger.id}`); handleError(error, "CreateTrigger", `Error while creating trigger ${trigger.id}`);

View File

@@ -3,12 +3,12 @@ import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType
import { Resource, UserDefinedFunctionDefinition } from "@azure/cosmos"; import { Resource, UserDefinedFunctionDefinition } from "@azure/cosmos";
import { import {
SqlUserDefinedFunctionCreateUpdateParameters, SqlUserDefinedFunctionCreateUpdateParameters,
SqlUserDefinedFunctionResource, SqlUserDefinedFunctionResource
} from "../../Utils/arm/generatedClients/2020-04-01/types"; } from "../../Utils/arm/generatedClients/2020-04-01/types";
import { client } from "../CosmosClient"; import { client } from "../CosmosClient";
import { import {
createUpdateSqlUserDefinedFunction, createUpdateSqlUserDefinedFunction,
getSqlUserDefinedFunction, getSqlUserDefinedFunction
} from "../../Utils/arm/generatedClients/2020-04-01/sqlResources"; } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import { handleError } from "../ErrorHandlingUtils"; import { handleError } from "../ErrorHandlingUtils";
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils"; import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
@@ -49,8 +49,8 @@ export async function createUserDefinedFunction(
const createUDFParams: SqlUserDefinedFunctionCreateUpdateParameters = { const createUDFParams: SqlUserDefinedFunctionCreateUpdateParameters = {
properties: { properties: {
resource: userDefinedFunction as SqlUserDefinedFunctionResource, resource: userDefinedFunction as SqlUserDefinedFunctionResource,
options: {}, options: {}
}, }
}; };
const rpResponse = await createUpdateSqlUserDefinedFunction( const rpResponse = await createUpdateSqlUserDefinedFunction(
userContext.subscriptionId, userContext.subscriptionId,

View File

@@ -13,9 +13,9 @@ describe("deleteCollection", () => {
beforeAll(() => { beforeAll(() => {
updateUserContext({ updateUserContext({
databaseAccount: { databaseAccount: {
name: "test", name: "test"
} as DatabaseAccount, } as DatabaseAccount,
defaultExperience: DefaultAccountExperienceType.DocumentDB, defaultExperience: DefaultAccountExperienceType.DocumentDB
}); });
}); });
@@ -32,11 +32,11 @@ describe("deleteCollection", () => {
return { return {
container: () => { container: () => {
return { return {
delete: (): unknown => undefined, delete: (): unknown => undefined
}; };
}, }
}; };
}, }
}); });
await deleteCollection("database", "collection"); await deleteCollection("database", "collection");
expect(client).toHaveBeenCalled(); expect(client).toHaveBeenCalled();

View File

@@ -16,7 +16,10 @@ export async function deleteCollection(databaseId: string, collectionId: string)
if (window.authType === AuthType.AAD && !userContext.useSDKOperations) { if (window.authType === AuthType.AAD && !userContext.useSDKOperations) {
await deleteCollectionWithARM(databaseId, collectionId); await deleteCollectionWithARM(databaseId, collectionId);
} else { } else {
await client().database(databaseId).container(collectionId).delete(); await client()
.database(databaseId)
.container(collectionId)
.delete();
} }
logConsoleInfo(`Successfully deleted container ${collectionId}`); logConsoleInfo(`Successfully deleted container ${collectionId}`);
} catch (error) { } catch (error) {

View File

@@ -13,9 +13,9 @@ describe("deleteDatabase", () => {
beforeAll(() => { beforeAll(() => {
updateUserContext({ updateUserContext({
databaseAccount: { databaseAccount: {
name: "test", name: "test"
} as DatabaseAccount, } as DatabaseAccount,
defaultExperience: DefaultAccountExperienceType.DocumentDB, defaultExperience: DefaultAccountExperienceType.DocumentDB
}); });
}); });
@@ -30,9 +30,9 @@ describe("deleteDatabase", () => {
(client as jest.Mock).mockReturnValue({ (client as jest.Mock).mockReturnValue({
database: () => { database: () => {
return { return {
delete: (): unknown => undefined, delete: (): unknown => undefined
}; };
}, }
}); });
await deleteDatabase("database"); await deleteDatabase("database");
expect(client).toHaveBeenCalled(); expect(client).toHaveBeenCalled();

View File

@@ -19,7 +19,9 @@ export async function deleteDatabase(databaseId: string): Promise<void> {
if (window.authType === AuthType.AAD && !userContext.useSDKOperations) { if (window.authType === AuthType.AAD && !userContext.useSDKOperations) {
await deleteDatabaseWithARM(databaseId); await deleteDatabaseWithARM(databaseId);
} else { } else {
await client().database(databaseId).delete(); await client()
.database(databaseId)
.delete();
} }
logConsoleInfo(`Successfully deleted database ${databaseId}`); logConsoleInfo(`Successfully deleted database ${databaseId}`);
} catch (error) { } catch (error) {

View File

@@ -27,7 +27,11 @@ export async function deleteStoredProcedure(
storedProcedureId storedProcedureId
); );
} else { } else {
await client().database(databaseId).container(collectionId).scripts.storedProcedure(storedProcedureId).delete(); await client()
.database(databaseId)
.container(collectionId)
.scripts.storedProcedure(storedProcedureId)
.delete();
} }
} catch (error) { } catch (error) {
handleError(error, "DeleteStoredProcedure", `Error while deleting stored procedure ${storedProcedureId}`); handleError(error, "DeleteStoredProcedure", `Error while deleting stored procedure ${storedProcedureId}`);

View File

@@ -23,7 +23,11 @@ export async function deleteTrigger(databaseId: string, collectionId: string, tr
triggerId triggerId
); );
} else { } else {
await client().database(databaseId).container(collectionId).scripts.trigger(triggerId).delete(); await client()
.database(databaseId)
.container(collectionId)
.scripts.trigger(triggerId)
.delete();
} }
} catch (error) { } catch (error) {
handleError(error, "DeleteTrigger", `Error while deleting trigger ${triggerId}`); handleError(error, "DeleteTrigger", `Error while deleting trigger ${triggerId}`);

View File

@@ -23,7 +23,11 @@ export async function deleteUserDefinedFunction(databaseId: string, collectionId
id id
); );
} else { } else {
await client().database(databaseId).container(collectionId).scripts.userDefinedFunction(id).delete(); await client()
.database(databaseId)
.container(collectionId)
.scripts.userDefinedFunction(id)
.delete();
} }
} catch (error) { } catch (error) {
handleError(error, "DeleteUserDefinedFunction", `Error while deleting user defined function ${id}`); handleError(error, "DeleteUserDefinedFunction", `Error while deleting user defined function ${id}`);

View File

@@ -1,87 +0,0 @@
import { armRequest } from "../../Utils/arm/request";
import { configContext } from "../../ConfigContext";
import { handleError } from "../ErrorHandlingUtils";
import { userContext } from "../../UserContext";
interface TimeSeriesData {
data: {
timeStamp: string;
total: number;
}[];
metadatavalues: {
name: {
localizedValue: string;
value: string;
};
value: string;
};
}
interface MetricsData {
displayDescription: string;
errorCode: string;
id: string;
name: {
value: string;
localizedValue: string;
};
timeseries: TimeSeriesData[];
type: string;
unit: string;
}
interface MetricsResponse {
cost: number;
interval: string;
namespace: string;
resourceregion: string;
timespan: string;
value: MetricsData[];
}
export const getCollectionUsageSizeInKB = async (databaseName: string, containerName: string): Promise<number> => {
const subscriptionId = userContext.subscriptionId;
const resourceGroup = userContext.resourceGroup;
const accountName = userContext.databaseAccount.name;
const filter = `DatabaseName eq '${databaseName}' and CollectionName eq '${containerName}'`;
const metricNames = "DataUsage,IndexUsage";
const path = `/subscriptions/${subscriptionId}/resourceGroups/${resourceGroup}/providers/Microsoft.DocumentDB/databaseAccounts/${accountName}/providers/microsoft.insights/metrics`;
try {
const metricsResponse: MetricsResponse = await armRequest({
host: configContext.ARM_ENDPOINT,
path,
method: "GET",
apiVersion: "2018-01-01",
queryParams: {
filter,
metricNames,
},
});
if (metricsResponse?.value?.length !== 2) {
return undefined;
}
const dataUsageData: MetricsData = metricsResponse.value[0];
const indexUsagedata: MetricsData = metricsResponse.value[1];
const dataUsageSizeInKb: number = getUsageSizeInKb(dataUsageData);
const indexUsageSizeInKb: number = getUsageSizeInKb(indexUsagedata);
return dataUsageSizeInKb + indexUsageSizeInKb;
} catch (error) {
handleError(error, "getCollectionUsageSize");
throw error;
}
};
const getUsageSizeInKb = (metricsData: MetricsData): number => {
if (metricsData?.errorCode !== "Success") {
throw Error(`Get collection usage size failed: ${metricsData.errorCode}`);
}
const timeSeriesData: TimeSeriesData = metricsData?.timeseries?.[0];
const usageSizeInBytes: number = timeSeriesData?.data?.[0]?.total;
return usageSizeInBytes ? usageSizeInBytes / 1024 : 0;
};

View File

@@ -11,7 +11,10 @@ export async function getIndexTransformationProgress(databaseId: string, collect
let indexTransformationPercentage: number; let indexTransformationPercentage: number;
const clearMessage = logConsoleProgress(`Reading container ${collectionId}`); const clearMessage = logConsoleProgress(`Reading container ${collectionId}`);
try { try {
const response = await client().database(databaseId).container(collectionId).read({ populateQuotaInfo: true }); const response = await client()
.database(databaseId)
.container(collectionId)
.read({ populateQuotaInfo: true });
indexTransformationPercentage = parseInt( indexTransformationPercentage = parseInt(
response.headers[Constants.HttpHeaders.collectionIndexTransformationProgress] as string response.headers[Constants.HttpHeaders.collectionIndexTransformationProgress] as string

View File

@@ -10,9 +10,9 @@ describe("readCollection", () => {
beforeAll(() => { beforeAll(() => {
updateUserContext({ updateUserContext({
databaseAccount: { databaseAccount: {
name: "test", name: "test"
} as DatabaseAccount, } as DatabaseAccount,
defaultExperience: DefaultAccountExperienceType.DocumentDB, defaultExperience: DefaultAccountExperienceType.DocumentDB
}); });
}); });
@@ -23,11 +23,11 @@ describe("readCollection", () => {
return { return {
container: () => { container: () => {
return { return {
read: (): unknown => ({}), read: (): unknown => ({})
}; };
}, }
}; };
}, }
}); });
await readCollection("database", "collection"); await readCollection("database", "collection");
expect(client).toHaveBeenCalled(); expect(client).toHaveBeenCalled();

View File

@@ -7,7 +7,10 @@ export async function readCollection(databaseId: string, collectionId: string):
let collection: DataModels.Collection; let collection: DataModels.Collection;
const clearMessage = logConsoleProgress(`Querying container ${collectionId}`); const clearMessage = logConsoleProgress(`Querying container ${collectionId}`);
try { try {
const response = await client().database(databaseId).container(collectionId).read(); const response = await client()
.database(databaseId)
.container(collectionId)
.read();
collection = response.resource as DataModels.Collection; collection = response.resource as DataModels.Collection;
} catch (error) { } catch (error) {
handleError(error, "ReadCollection", `Error while querying container ${collectionId}`); handleError(error, "ReadCollection", `Error while querying container ${collectionId}`);

View File

@@ -1,6 +1,9 @@
import * as DataModels from "../../Contracts/DataModels";
import { AuthType } from "../../AuthType"; import { AuthType } from "../../AuthType";
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType"; import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
import { Offer, ReadCollectionOfferParams } from "../../Contracts/DataModels"; import { HttpHeaders } from "../Constants";
import { RequestOptions } from "@azure/cosmos/dist-esm";
import { client } from "../CosmosClient";
import { handleError } from "../ErrorHandlingUtils"; import { handleError } from "../ErrorHandlingUtils";
import { getSqlContainerThroughput } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources"; import { getSqlContainerThroughput } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import { getMongoDBCollectionThroughput } from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources"; import { getMongoDBCollectionThroughput } from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
@@ -8,22 +11,50 @@ import { getCassandraTableThroughput } from "../../Utils/arm/generatedClients/20
import { getGremlinGraphThroughput } from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources"; import { getGremlinGraphThroughput } from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
import { getTableThroughput } from "../../Utils/arm/generatedClients/2020-04-01/tableResources"; import { getTableThroughput } from "../../Utils/arm/generatedClients/2020-04-01/tableResources";
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils"; import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
import { readOfferWithSDK } from "./readOfferWithSDK"; import { readOffers } from "./readOffers";
import { userContext } from "../../UserContext"; import { userContext } from "../../UserContext";
export const readCollectionOffer = async (params: ReadCollectionOfferParams): Promise<Offer> => { export const readCollectionOffer = async (
params: DataModels.ReadCollectionOfferParams
): Promise<DataModels.OfferWithHeaders> => {
const clearMessage = logConsoleProgress(`Querying offer for collection ${params.collectionId}`); const clearMessage = logConsoleProgress(`Querying offer for collection ${params.collectionId}`);
let offerId = params.offerId;
if (!offerId) {
if (window.authType === AuthType.AAD && !userContext.useSDKOperations) {
try {
offerId = await getCollectionOfferIdWithARM(params.databaseId, params.collectionId);
} catch (error) {
clearMessage();
if (error.code !== "NotFound") {
throw error;
}
return undefined;
}
} else {
offerId = await getCollectionOfferIdWithSDK(params.collectionResourceId);
if (!offerId) {
clearMessage();
return undefined;
}
}
}
const options: RequestOptions = {
initialHeaders: {
[HttpHeaders.populateCollectionThroughputInfo]: true
}
};
try { try {
if ( const response = await client()
window.authType === AuthType.AAD && .offer(offerId)
!userContext.useSDKOperations && .read(options);
userContext.defaultExperience !== DefaultAccountExperienceType.Table return (
) { response && {
return await readCollectionOfferWithARM(params.databaseId, params.collectionId); ...response.resource,
} headers: response.headers
}
return await readOfferWithSDK(params.offerId, params.collectionResourceId); );
} catch (error) { } catch (error) {
handleError(error, "ReadCollectionOffer", `Error while querying offer for collection ${params.collectionId}`); handleError(error, "ReadCollectionOffer", `Error while querying offer for collection ${params.collectionId}`);
throw error; throw error;
@@ -32,90 +63,61 @@ export const readCollectionOffer = async (params: ReadCollectionOfferParams): Pr
} }
}; };
const readCollectionOfferWithARM = async (databaseId: string, collectionId: string): Promise<Offer> => { const getCollectionOfferIdWithARM = async (databaseId: string, collectionId: string): Promise<string> => {
let rpResponse;
const subscriptionId = userContext.subscriptionId; const subscriptionId = userContext.subscriptionId;
const resourceGroup = userContext.resourceGroup; const resourceGroup = userContext.resourceGroup;
const accountName = userContext.databaseAccount.name; const accountName = userContext.databaseAccount.name;
const defaultExperience = userContext.defaultExperience; const defaultExperience = userContext.defaultExperience;
switch (defaultExperience) {
let rpResponse; case DefaultAccountExperienceType.DocumentDB:
try { rpResponse = await getSqlContainerThroughput(
switch (defaultExperience) { subscriptionId,
case DefaultAccountExperienceType.DocumentDB: resourceGroup,
rpResponse = await getSqlContainerThroughput( accountName,
subscriptionId, databaseId,
resourceGroup, collectionId
accountName, );
databaseId, break;
collectionId case DefaultAccountExperienceType.MongoDB:
); rpResponse = await getMongoDBCollectionThroughput(
break; subscriptionId,
case DefaultAccountExperienceType.MongoDB: resourceGroup,
rpResponse = await getMongoDBCollectionThroughput( accountName,
subscriptionId, databaseId,
resourceGroup, collectionId
accountName, );
databaseId, break;
collectionId case DefaultAccountExperienceType.Cassandra:
); rpResponse = await getCassandraTableThroughput(
break; subscriptionId,
case DefaultAccountExperienceType.Cassandra: resourceGroup,
rpResponse = await getCassandraTableThroughput( accountName,
subscriptionId, databaseId,
resourceGroup, collectionId
accountName, );
databaseId, break;
collectionId case DefaultAccountExperienceType.Graph:
); rpResponse = await getGremlinGraphThroughput(
break; subscriptionId,
case DefaultAccountExperienceType.Graph: resourceGroup,
rpResponse = await getGremlinGraphThroughput( accountName,
subscriptionId, databaseId,
resourceGroup, collectionId
accountName, );
databaseId, break;
collectionId case DefaultAccountExperienceType.Table:
); rpResponse = await getTableThroughput(subscriptionId, resourceGroup, accountName, collectionId);
break; break;
case DefaultAccountExperienceType.Table: default:
rpResponse = await getTableThroughput(subscriptionId, resourceGroup, accountName, collectionId); throw new Error(`Unsupported default experience type: ${defaultExperience}`);
break;
default:
throw new Error(`Unsupported default experience type: ${defaultExperience}`);
}
} catch (error) {
if (error.code !== "NotFound") {
throw error;
}
return undefined;
} }
const resource = rpResponse?.properties?.resource; return rpResponse?.name;
if (resource) { };
const offerId: string = rpResponse.name;
const minimumThroughput: number = const getCollectionOfferIdWithSDK = async (collectionResourceId: string): Promise<string> => {
typeof resource.minimumThroughput === "string" const offers = await readOffers();
? parseInt(resource.minimumThroughput) const offer = offers.find(offer => offer.resource === collectionResourceId);
: resource.minimumThroughput; return offer?.id;
const autoscaleSettings = resource.autoscaleSettings;
if (autoscaleSettings) {
return {
id: offerId,
autoscaleMaxThroughput: autoscaleSettings.maxThroughput,
manualThroughput: undefined,
minimumThroughput,
};
}
return {
id: offerId,
autoscaleMaxThroughput: undefined,
manualThroughput: resource.throughput,
minimumThroughput,
};
}
return undefined;
}; };

View File

@@ -12,9 +12,9 @@ describe("readCollections", () => {
beforeAll(() => { beforeAll(() => {
updateUserContext({ updateUserContext({
databaseAccount: { databaseAccount: {
name: "test", name: "test"
} as DatabaseAccount, } as DatabaseAccount,
defaultExperience: DefaultAccountExperienceType.DocumentDB, defaultExperience: DefaultAccountExperienceType.DocumentDB
}); });
}); });
@@ -32,12 +32,12 @@ describe("readCollections", () => {
containers: { containers: {
readAll: () => { readAll: () => {
return { return {
fetchAll: (): unknown => [], fetchAll: (): unknown => []
}; };
}, }
}, }
}; };
}, }
}); });
await readCollections("database"); await readCollections("database");
expect(client).toHaveBeenCalled(); expect(client).toHaveBeenCalled();

View File

@@ -23,7 +23,10 @@ export async function readCollections(databaseId: string): Promise<DataModels.Co
return await readCollectionsWithARM(databaseId); return await readCollectionsWithARM(databaseId);
} }
const sdkResponse = await client().database(databaseId).containers.readAll().fetchAll(); const sdkResponse = await client()
.database(databaseId)
.containers.readAll()
.fetchAll();
return sdkResponse.resources as DataModels.Collection[]; return sdkResponse.resources as DataModels.Collection[];
} catch (error) { } catch (error) {
handleError(error, "ReadCollections", `Error while querying containers for database ${databaseId}`); handleError(error, "ReadCollections", `Error while querying containers for database ${databaseId}`);
@@ -60,5 +63,5 @@ async function readCollectionsWithARM(databaseId: string): Promise<DataModels.Co
throw new Error(`Unsupported default experience type: ${defaultExperience}`); throw new Error(`Unsupported default experience type: ${defaultExperience}`);
} }
return rpResponse?.value?.map((collection) => collection.properties?.resource as DataModels.Collection); return rpResponse?.value?.map(collection => collection.properties?.resource as DataModels.Collection);
} }

View File

@@ -1,28 +1,51 @@
import * as DataModels from "../../Contracts/DataModels";
import { AuthType } from "../../AuthType"; import { AuthType } from "../../AuthType";
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType"; import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
import { Offer, ReadDatabaseOfferParams } from "../../Contracts/DataModels"; import { HttpHeaders } from "../Constants";
import { RequestOptions } from "@azure/cosmos/dist-esm";
import { client } from "../CosmosClient";
import { getSqlDatabaseThroughput } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources"; import { getSqlDatabaseThroughput } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import { getMongoDBDatabaseThroughput } from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources"; import { getMongoDBDatabaseThroughput } from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
import { getCassandraKeyspaceThroughput } from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources"; import { getCassandraKeyspaceThroughput } from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources";
import { getGremlinDatabaseThroughput } from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources"; import { getGremlinDatabaseThroughput } from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
import { handleError } from "../ErrorHandlingUtils"; import { handleError } from "../ErrorHandlingUtils";
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils"; import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
import { readOfferWithSDK } from "./readOfferWithSDK"; import { readOffers } from "./readOffers";
import { userContext } from "../../UserContext"; import { userContext } from "../../UserContext";
export const readDatabaseOffer = async (params: ReadDatabaseOfferParams): Promise<Offer> => { export const readDatabaseOffer = async (
params: DataModels.ReadDatabaseOfferParams
): Promise<DataModels.OfferWithHeaders> => {
const clearMessage = logConsoleProgress(`Querying offer for database ${params.databaseId}`); const clearMessage = logConsoleProgress(`Querying offer for database ${params.databaseId}`);
let offerId = params.offerId;
if (!offerId) {
offerId = await (window.authType === AuthType.AAD &&
!userContext.useSDKOperations &&
userContext.defaultExperience !== DefaultAccountExperienceType.Table
? getDatabaseOfferIdWithARM(params.databaseId)
: getDatabaseOfferIdWithSDK(params.databaseResourceId));
if (!offerId) {
clearMessage();
return undefined;
}
}
const options: RequestOptions = {
initialHeaders: {
[HttpHeaders.populateCollectionThroughputInfo]: true
}
};
try { try {
if ( const response = await client()
window.authType === AuthType.AAD && .offer(offerId)
!userContext.useSDKOperations && .read(options);
userContext.defaultExperience !== DefaultAccountExperienceType.Table return (
) { response && {
return await readDatabaseOfferWithARM(params.databaseId); ...response.resource,
} headers: response.headers
}
return await readOfferWithSDK(params.offerId, params.databaseResourceId); );
} catch (error) { } catch (error) {
handleError(error, "ReadDatabaseOffer", `Error while querying offer for database ${params.databaseId}`); handleError(error, "ReadDatabaseOffer", `Error while querying offer for database ${params.databaseId}`);
throw error; throw error;
@@ -31,13 +54,13 @@ export const readDatabaseOffer = async (params: ReadDatabaseOfferParams): Promis
} }
}; };
const readDatabaseOfferWithARM = async (databaseId: string): Promise<Offer> => { const getDatabaseOfferIdWithARM = async (databaseId: string): Promise<string> => {
let rpResponse;
const subscriptionId = userContext.subscriptionId; const subscriptionId = userContext.subscriptionId;
const resourceGroup = userContext.resourceGroup; const resourceGroup = userContext.resourceGroup;
const accountName = userContext.databaseAccount.name; const accountName = userContext.databaseAccount.name;
const defaultExperience = userContext.defaultExperience; const defaultExperience = userContext.defaultExperience;
let rpResponse;
try { try {
switch (defaultExperience) { switch (defaultExperience) {
case DefaultAccountExperienceType.DocumentDB: case DefaultAccountExperienceType.DocumentDB:
@@ -55,39 +78,18 @@ const readDatabaseOfferWithARM = async (databaseId: string): Promise<Offer> => {
default: default:
throw new Error(`Unsupported default experience type: ${defaultExperience}`); throw new Error(`Unsupported default experience type: ${defaultExperience}`);
} }
return rpResponse?.name;
} catch (error) { } catch (error) {
if (error.code !== "NotFound") { if (error.code !== "NotFound") {
throw error; throw error;
} }
return undefined; return undefined;
} }
};
const resource = rpResponse?.properties?.resource;
if (resource) { const getDatabaseOfferIdWithSDK = async (databaseResourceId: string): Promise<string> => {
const offerId: string = rpResponse.name; const offers = await readOffers();
const minimumThroughput: number = const offer = offers.find(offer => offer.resource === databaseResourceId);
typeof resource.minimumThroughput === "string" return offer?.id;
? parseInt(resource.minimumThroughput)
: resource.minimumThroughput;
const autoscaleSettings = resource.autoscaleSettings;
if (autoscaleSettings) {
return {
id: offerId,
autoscaleMaxThroughput: autoscaleSettings.maxThroughput,
manualThroughput: undefined,
minimumThroughput,
};
}
return {
id: offerId,
autoscaleMaxThroughput: undefined,
manualThroughput: resource.throughput,
minimumThroughput,
};
}
return undefined;
}; };

View File

@@ -12,9 +12,9 @@ describe("readDatabases", () => {
beforeAll(() => { beforeAll(() => {
updateUserContext({ updateUserContext({
databaseAccount: { databaseAccount: {
name: "test", name: "test"
} as DatabaseAccount, } as DatabaseAccount,
defaultExperience: DefaultAccountExperienceType.DocumentDB, defaultExperience: DefaultAccountExperienceType.DocumentDB
}); });
}); });
@@ -30,10 +30,10 @@ describe("readDatabases", () => {
databases: { databases: {
readAll: () => { readAll: () => {
return { return {
fetchAll: (): unknown => [], fetchAll: (): unknown => []
}; };
}, }
}, }
}); });
await readDatabases(); await readDatabases();
expect(client).toHaveBeenCalled(); expect(client).toHaveBeenCalled();

View File

@@ -21,7 +21,9 @@ export async function readDatabases(): Promise<DataModels.Database[]> {
) { ) {
databases = await readDatabasesWithARM(); databases = await readDatabasesWithARM();
} else { } else {
const sdkResponse = await client().databases.readAll().fetchAll(); const sdkResponse = await client()
.databases.readAll()
.fetchAll();
databases = sdkResponse.resources as DataModels.Database[]; databases = sdkResponse.resources as DataModels.Database[];
} }
} catch (error) { } catch (error) {
@@ -56,5 +58,5 @@ async function readDatabasesWithARM(): Promise<DataModels.Database[]> {
throw new Error(`Unsupported default experience type: ${defaultExperience}`); throw new Error(`Unsupported default experience type: ${defaultExperience}`);
} }
return rpResponse?.value?.map((database) => database.properties?.resource as DataModels.Database); return rpResponse?.value?.map(database => database.properties?.resource as DataModels.Database);
} }

View File

@@ -1,27 +0,0 @@
import { HttpHeaders } from "../Constants";
import { Offer } from "../../Contracts/DataModels";
import { RequestOptions } from "@azure/cosmos/dist-esm";
import { client } from "../CosmosClient";
import { parseSDKOfferResponse } from "../OfferUtility";
import { readOffers } from "./readOffers";
export const readOfferWithSDK = async (offerId: string, resourceId: string): Promise<Offer> => {
if (!offerId) {
const offers = await readOffers();
const offer = offers.find((offer) => offer.resource === resourceId);
if (!offer) {
return undefined;
}
offerId = offer.id;
}
const options: RequestOptions = {
initialHeaders: {
[HttpHeaders.populateCollectionThroughputInfo]: true,
},
};
const response = await client().offer(offerId).read(options);
return parseSDKOfferResponse(response);
};

View File

@@ -1,13 +1,15 @@
import { SDKOfferDefinition } from "../../Contracts/DataModels"; import { Offer } from "../../Contracts/DataModels";
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils"; import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
import { client } from "../CosmosClient"; import { client } from "../CosmosClient";
import { handleError, getErrorMessage } from "../ErrorHandlingUtils"; import { handleError, getErrorMessage } from "../ErrorHandlingUtils";
export const readOffers = async (): Promise<SDKOfferDefinition[]> => { export const readOffers = async (): Promise<Offer[]> => {
const clearMessage = logConsoleProgress(`Querying offers`); const clearMessage = logConsoleProgress(`Querying offers`);
try { try {
const response = await client().offers.readAll().fetchAll(); const response = await client()
.offers.readAll()
.fetchAll();
return response?.resources; return response?.resources;
} catch (error) { } catch (error) {
// This should be removed when we can correctly identify if an account is serverless when connected using connection string too. // This should be removed when we can correctly identify if an account is serverless when connected using connection string too.

View File

@@ -25,7 +25,7 @@ export async function readStoredProcedures(
databaseId, databaseId,
collectionId collectionId
); );
return rpResponse?.value?.map((sproc) => sproc.properties?.resource as StoredProcedureDefinition & Resource); return rpResponse?.value?.map(sproc => sproc.properties?.resource as StoredProcedureDefinition & Resource);
} }
const response = await client() const response = await client()

View File

@@ -25,10 +25,14 @@ export async function readTriggers(
databaseId, databaseId,
collectionId collectionId
); );
return rpResponse?.value?.map((trigger) => trigger.properties?.resource as TriggerDefinition & Resource); return rpResponse?.value?.map(trigger => trigger.properties?.resource as TriggerDefinition & Resource);
} }
const response = await client().database(databaseId).container(collectionId).scripts.triggers.readAll().fetchAll(); const response = await client()
.database(databaseId)
.container(collectionId)
.scripts.triggers.readAll()
.fetchAll();
return response?.resources; return response?.resources;
} catch (error) { } catch (error) {
handleError(error, "ReadTriggers", `Failed to query triggers for container ${collectionId}`); handleError(error, "ReadTriggers", `Failed to query triggers for container ${collectionId}`);

View File

@@ -25,7 +25,7 @@ export async function readUserDefinedFunctions(
databaseId, databaseId,
collectionId collectionId
); );
return rpResponse?.value?.map((udf) => udf.properties?.resource as UserDefinedFunctionDefinition & Resource); return rpResponse?.value?.map(udf => udf.properties?.resource as UserDefinedFunctionDefinition & Resource);
} }
const response = await client() const response = await client()

View File

@@ -8,22 +8,22 @@ import {
MongoDBCollectionCreateUpdateParameters, MongoDBCollectionCreateUpdateParameters,
MongoDBCollectionResource, MongoDBCollectionResource,
SqlContainerCreateUpdateParameters, SqlContainerCreateUpdateParameters,
SqlContainerResource, SqlContainerResource
} from "../../Utils/arm/generatedClients/2020-04-01/types"; } from "../../Utils/arm/generatedClients/2020-04-01/types";
import { RequestOptions } from "@azure/cosmos/dist-esm"; import { RequestOptions } from "@azure/cosmos/dist-esm";
import { client } from "../CosmosClient"; import { client } from "../CosmosClient";
import { createUpdateSqlContainer, getSqlContainer } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources"; import { createUpdateSqlContainer, getSqlContainer } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import { import {
createUpdateCassandraTable, createUpdateCassandraTable,
getCassandraTable, getCassandraTable
} from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources"; } from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources";
import { import {
createUpdateMongoDBCollection, createUpdateMongoDBCollection,
getMongoDBCollection, getMongoDBCollection
} from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources"; } from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
import { import {
createUpdateGremlinGraph, createUpdateGremlinGraph,
getGremlinGraph, getGremlinGraph
} from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources"; } from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
import { createUpdateTable, getTable } from "../../Utils/arm/generatedClients/2020-04-01/tableResources"; import { createUpdateTable, getTable } from "../../Utils/arm/generatedClients/2020-04-01/tableResources";
import { handleError } from "../ErrorHandlingUtils"; import { handleError } from "../ErrorHandlingUtils";
@@ -130,8 +130,8 @@ export async function updateMongoDBCollectionThroughRP(
const updateParams: MongoDBCollectionCreateUpdateParameters = { const updateParams: MongoDBCollectionCreateUpdateParameters = {
properties: { properties: {
resource: newCollection, resource: newCollection,
options: updateOptions, options: updateOptions
}, }
}; };
const updateResponse = await createUpdateMongoDBCollection( const updateResponse = await createUpdateMongoDBCollection(

View File

@@ -1,14 +1,13 @@
import { AuthType } from "../../AuthType"; import { AuthType } from "../../AuthType";
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType"; import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
import { HttpHeaders } from "../Constants"; import { HttpHeaders } from "../Constants";
import { Offer, SDKOfferDefinition, UpdateOfferParams } from "../../Contracts/DataModels"; import { Offer, UpdateOfferParams } from "../../Contracts/DataModels";
import { OfferDefinition } from "@azure/cosmos"; import { OfferDefinition } from "@azure/cosmos";
import { RequestOptions } from "@azure/cosmos/dist-esm"; import { RequestOptions } from "@azure/cosmos/dist-esm";
import { ThroughputSettingsUpdateParameters } from "../../Utils/arm/generatedClients/2020-04-01/types"; import { ThroughputSettingsUpdateParameters } from "../../Utils/arm/generatedClients/2020-04-01/types";
import { client } from "../CosmosClient"; import { client } from "../CosmosClient";
import { handleError } from "../ErrorHandlingUtils"; import { handleError } from "../ErrorHandlingUtils";
import { logConsoleInfo, logConsoleProgress } from "../../Utils/NotificationConsoleUtils"; import { logConsoleInfo, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
import { parseSDKOfferResponse } from "../OfferUtility";
import { readCollectionOffer } from "./readCollectionOffer"; import { readCollectionOffer } from "./readCollectionOffer";
import { readDatabaseOffer } from "./readDatabaseOffer"; import { readDatabaseOffer } from "./readDatabaseOffer";
import { import {
@@ -17,7 +16,7 @@ import {
migrateSqlDatabaseToManualThroughput, migrateSqlDatabaseToManualThroughput,
migrateSqlContainerToAutoscale, migrateSqlContainerToAutoscale,
migrateSqlContainerToManualThroughput, migrateSqlContainerToManualThroughput,
updateSqlContainerThroughput, updateSqlContainerThroughput
} from "../../Utils/arm/generatedClients/2020-04-01/sqlResources"; } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import { import {
updateCassandraKeyspaceThroughput, updateCassandraKeyspaceThroughput,
@@ -25,7 +24,7 @@ import {
migrateCassandraKeyspaceToManualThroughput, migrateCassandraKeyspaceToManualThroughput,
migrateCassandraTableToAutoscale, migrateCassandraTableToAutoscale,
migrateCassandraTableToManualThroughput, migrateCassandraTableToManualThroughput,
updateCassandraTableThroughput, updateCassandraTableThroughput
} from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources"; } from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources";
import { import {
updateMongoDBDatabaseThroughput, updateMongoDBDatabaseThroughput,
@@ -33,7 +32,7 @@ import {
migrateMongoDBDatabaseToManualThroughput, migrateMongoDBDatabaseToManualThroughput,
migrateMongoDBCollectionToAutoscale, migrateMongoDBCollectionToAutoscale,
migrateMongoDBCollectionToManualThroughput, migrateMongoDBCollectionToManualThroughput,
updateMongoDBCollectionThroughput, updateMongoDBCollectionThroughput
} from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources"; } from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
import { import {
updateGremlinDatabaseThroughput, updateGremlinDatabaseThroughput,
@@ -41,13 +40,13 @@ import {
migrateGremlinDatabaseToManualThroughput, migrateGremlinDatabaseToManualThroughput,
migrateGremlinGraphToAutoscale, migrateGremlinGraphToAutoscale,
migrateGremlinGraphToManualThroughput, migrateGremlinGraphToManualThroughput,
updateGremlinGraphThroughput, updateGremlinGraphThroughput
} from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources"; } from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
import { userContext } from "../../UserContext"; import { userContext } from "../../UserContext";
import { import {
migrateTableToAutoscale, migrateTableToAutoscale,
migrateTableToManualThroughput, migrateTableToManualThroughput,
updateTableThroughput, updateTableThroughput
} from "../../Utils/arm/generatedClients/2020-04-01/tableResources"; } from "../../Utils/arm/generatedClients/2020-04-01/tableResources";
export const updateOffer = async (params: UpdateOfferParams): Promise<Offer> => { export const updateOffer = async (params: UpdateOfferParams): Promise<Offer> => {
@@ -110,7 +109,7 @@ const updateCollectionOfferWithARM = async (params: UpdateOfferParams): Promise<
return await readCollectionOffer({ return await readCollectionOffer({
collectionId: params.collectionId, collectionId: params.collectionId,
databaseId: params.databaseId, databaseId: params.databaseId,
offerId: params.currentOffer.id, offerId: params.currentOffer.id
}); });
}; };
@@ -140,7 +139,7 @@ const updateDatabaseOfferWithARM = async (params: UpdateOfferParams): Promise<Of
return await readDatabaseOffer({ return await readDatabaseOffer({
databaseId: params.databaseId, databaseId: params.databaseId,
offerId: params.currentOffer.id, offerId: params.currentOffer.id
}); });
}; };
@@ -358,13 +357,13 @@ const updateGremlinDatabaseOffer = async (params: UpdateOfferParams): Promise<vo
const createUpdateOfferBody = (params: UpdateOfferParams): ThroughputSettingsUpdateParameters => { const createUpdateOfferBody = (params: UpdateOfferParams): ThroughputSettingsUpdateParameters => {
const body: ThroughputSettingsUpdateParameters = { const body: ThroughputSettingsUpdateParameters = {
properties: { properties: {
resource: {}, resource: {}
}, }
}; };
if (params.autopilotThroughput) { if (params.autopilotThroughput) {
body.properties.resource.autoscaleSettings = { body.properties.resource.autoscaleSettings = {
maxThroughput: params.autopilotThroughput, maxThroughput: params.autopilotThroughput
}; };
} else { } else {
body.properties.resource.throughput = params.manualThroughput; body.properties.resource.throughput = params.manualThroughput;
@@ -374,26 +373,26 @@ const createUpdateOfferBody = (params: UpdateOfferParams): ThroughputSettingsUpd
}; };
const updateOfferWithSDK = async (params: UpdateOfferParams): Promise<Offer> => { const updateOfferWithSDK = async (params: UpdateOfferParams): Promise<Offer> => {
const sdkOfferDefinition = params.currentOffer.offerDefinition; const currentOffer = params.currentOffer;
const newOffer: SDKOfferDefinition = { const newOffer: Offer = {
content: { content: {
offerThroughput: undefined, offerThroughput: undefined,
offerIsRUPerMinuteThroughputEnabled: false, offerIsRUPerMinuteThroughputEnabled: false
}, },
_etag: undefined, _etag: undefined,
_ts: undefined, _ts: undefined,
_rid: sdkOfferDefinition._rid, _rid: currentOffer._rid,
_self: sdkOfferDefinition._self, _self: currentOffer._self,
id: sdkOfferDefinition.id, id: currentOffer.id,
offerResourceId: sdkOfferDefinition.offerResourceId, offerResourceId: currentOffer.offerResourceId,
offerVersion: sdkOfferDefinition.offerVersion, offerVersion: currentOffer.offerVersion,
offerType: sdkOfferDefinition.offerType, offerType: currentOffer.offerType,
resource: sdkOfferDefinition.resource, resource: currentOffer.resource
}; };
if (params.autopilotThroughput) { if (params.autopilotThroughput) {
newOffer.content.offerAutopilotSettings = { newOffer.content.offerAutopilotSettings = {
maxThroughput: params.autopilotThroughput, maxThroughput: params.autopilotThroughput
}; };
} else { } else {
newOffer.content.offerThroughput = params.manualThroughput; newOffer.content.offerThroughput = params.manualThroughput;
@@ -402,12 +401,12 @@ const updateOfferWithSDK = async (params: UpdateOfferParams): Promise<Offer> =>
const options: RequestOptions = {}; const options: RequestOptions = {};
if (params.migrateToAutoPilot) { if (params.migrateToAutoPilot) {
options.initialHeaders = { options.initialHeaders = {
[HttpHeaders.migrateOfferToAutopilot]: "true", [HttpHeaders.migrateOfferToAutopilot]: "true"
}; };
delete newOffer.content.offerAutopilotSettings; delete newOffer.content.offerAutopilotSettings;
} else if (params.migrateToManual) { } else if (params.migrateToManual) {
options.initialHeaders = { options.initialHeaders = {
[HttpHeaders.migrateOfferToManualThroughput]: "true", [HttpHeaders.migrateOfferToManualThroughput]: "true"
}; };
newOffer.content.offerAutopilotSettings = { maxThroughput: 0 }; newOffer.content.offerAutopilotSettings = { maxThroughput: 0 };
} }
@@ -416,6 +415,5 @@ const updateOfferWithSDK = async (params: UpdateOfferParams): Promise<Offer> =>
.offer(params.currentOffer.id) .offer(params.currentOffer.id)
// TODO Remove casting when SDK types are fixed (https://github.com/Azure/azure-sdk-for-js/issues/10660) // TODO Remove casting when SDK types are fixed (https://github.com/Azure/azure-sdk-for-js/issues/10660)
.replace((newOffer as unknown) as OfferDefinition, options); .replace((newOffer as unknown) as OfferDefinition, options);
return sdkResponse?.resource;
return parseSDKOfferResponse(sdkResponse);
}; };

View File

@@ -3,12 +3,12 @@ import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType
import { Resource, StoredProcedureDefinition } from "@azure/cosmos"; import { Resource, StoredProcedureDefinition } from "@azure/cosmos";
import { import {
SqlStoredProcedureCreateUpdateParameters, SqlStoredProcedureCreateUpdateParameters,
SqlStoredProcedureResource, SqlStoredProcedureResource
} from "../../Utils/arm/generatedClients/2020-04-01/types"; } from "../../Utils/arm/generatedClients/2020-04-01/types";
import { client } from "../CosmosClient"; import { client } from "../CosmosClient";
import { import {
createUpdateSqlStoredProcedure, createUpdateSqlStoredProcedure,
getSqlStoredProcedure, getSqlStoredProcedure
} from "../../Utils/arm/generatedClients/2020-04-01/sqlResources"; } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import { handleError } from "../ErrorHandlingUtils"; import { handleError } from "../ErrorHandlingUtils";
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils"; import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
@@ -39,8 +39,8 @@ export async function updateStoredProcedure(
const createSprocParams: SqlStoredProcedureCreateUpdateParameters = { const createSprocParams: SqlStoredProcedureCreateUpdateParameters = {
properties: { properties: {
resource: storedProcedure as SqlStoredProcedureResource, resource: storedProcedure as SqlStoredProcedureResource,
options: {}, options: {}
}, }
}; };
const rpResponse = await createUpdateSqlStoredProcedure( const rpResponse = await createUpdateSqlStoredProcedure(
userContext.subscriptionId, userContext.subscriptionId,

View File

@@ -2,7 +2,7 @@ import { AuthType } from "../../AuthType";
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType"; import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
import { import {
SqlTriggerCreateUpdateParameters, SqlTriggerCreateUpdateParameters,
SqlTriggerResource, SqlTriggerResource
} from "../../Utils/arm/generatedClients/2020-04-01/types"; } from "../../Utils/arm/generatedClients/2020-04-01/types";
import { TriggerDefinition } from "@azure/cosmos"; import { TriggerDefinition } from "@azure/cosmos";
import { client } from "../CosmosClient"; import { client } from "../CosmosClient";
@@ -36,8 +36,8 @@ export async function updateTrigger(
const createTriggerParams: SqlTriggerCreateUpdateParameters = { const createTriggerParams: SqlTriggerCreateUpdateParameters = {
properties: { properties: {
resource: trigger as SqlTriggerResource, resource: trigger as SqlTriggerResource,
options: {}, options: {}
}, }
}; };
const rpResponse = await createUpdateSqlTrigger( const rpResponse = await createUpdateSqlTrigger(
userContext.subscriptionId, userContext.subscriptionId,

Some files were not shown because too many files have changed in this diff Show More