Compare commits

..

2 Commits

Author SHA1 Message Date
Steve Faulkner
123ec2e45c Fix commas 2020-10-29 19:20:30 -05:00
Steve Faulkner
2f4abfa796 Remove rupm 2020-10-29 19:15:08 -05:00
546 changed files with 53894 additions and 48542 deletions

View File

@@ -1,14 +1,7 @@
# These options are only needed when if running end to end tests locally
PORTAL_RUNNER_USERNAME=
PORTAL_RUNNER_PASSWORD=
PORTAL_RUNNER_SUBSCRIPTION=
PORTAL_RUNNER_RESOURCE_GROUP=
PORTAL_RUNNER_DATABASE_ACCOUNT=
PORTAL_RUNNER_DATABASE_ACCOUNT_KEY=
PORTAL_RUNNER_CONNECTION_STRING=
NOTEBOOKS_TEST_RUNNER_TENANT_ID=
NOTEBOOKS_TEST_RUNNER_CLIENT_ID=
NOTEBOOKS_TEST_RUNNER_CLIENT_SECRET=
CASSANDRA_CONNECTION_STRING=
MONGO_CONNECTION_STRING=
TABLES_CONNECTION_STRING=
DATA_EXPLORER_ENDPOINT=https://localhost:1234/hostedExplorer.html

View File

@@ -15,6 +15,8 @@ src/Common/DeleteFeedback.ts
src/Common/DocumentClientUtilityBase.ts
src/Common/EditableUtility.ts
src/Common/EnvironmentUtility.ts
src/Common/ErrorParserUtility.test.ts
src/Common/ErrorParserUtility.ts
src/Common/HashMap.test.ts
src/Common/HashMap.ts
src/Common/HeadersUtility.test.ts
@@ -202,6 +204,8 @@ src/Explorer/Tabs/QueryTab.test.ts
src/Explorer/Tabs/QueryTab.ts
src/Explorer/Tabs/QueryTablesTab.ts
src/Explorer/Tabs/ScriptTabBase.ts
src/Explorer/Tabs/SettingsTab.test.ts
src/Explorer/Tabs/SettingsTab.ts
src/Explorer/Tabs/SparkMasterTab.ts
src/Explorer/Tabs/StoredProcedureTab.ts
src/Explorer/Tabs/TabComponents.ts
@@ -288,6 +292,8 @@ src/Utils/DatabaseAccountUtils.ts
src/Utils/JunoUtils.ts
src/Utils/MessageValidation.ts
src/Utils/NotebookConfigurationUtils.ts
src/Utils/OfferUtils.test.ts
src/Utils/OfferUtils.ts
src/Utils/PricingUtils.test.ts
src/Utils/QueryUtils.test.ts
src/Utils/QueryUtils.ts
@@ -392,5 +398,19 @@ src/Explorer/Tree/ResourceTreeAdapterForResourceToken.tsx
src/GalleryViewer/Cards/GalleryCardComponent.tsx
src/GalleryViewer/GalleryViewer.tsx
src/GalleryViewer/GalleryViewerComponent.tsx
cypress/integration/dataexplorer/CASSANDRA/addCollection.spec.ts
cypress/integration/dataexplorer/GRAPH/addCollection.spec.ts
cypress/integration/dataexplorer/ci-tests/addCollectionPane.spec.ts
cypress/integration/dataexplorer/ci-tests/createDatabase.spec.ts
cypress/integration/dataexplorer/ci-tests/deleteCollection.spec.ts
cypress/integration/dataexplorer/ci-tests/deleteDatabase.spec.ts
cypress/integration/dataexplorer/MONGO/addCollection.spec.ts
cypress/integration/dataexplorer/MONGO/addCollectionAutopilot.spec.ts
cypress/integration/dataexplorer/MONGO/addCollectionExistingDatabase.spec.ts
cypress/integration/dataexplorer/MONGO/provisionDatabaseThroughput.spec.ts
cypress/integration/dataexplorer/SQL/addCollection.spec.ts
cypress/integration/dataexplorer/TABLE/addCollection.spec.ts
cypress/integration/notebook/newNotebook.spec.ts
cypress/integration/notebook/resourceTree.spec.ts
__mocks__/monaco-editor.ts
src/Explorer/Tree/ResourceTreeAdapterForResourceToken.test.tsx

View File

@@ -1,39 +1,39 @@
module.exports = {
env: {
browser: true,
es6: true,
es6: true
},
plugins: ["@typescript-eslint", "no-null", "prefer-arrow"],
extends: ["eslint:recommended", "plugin:@typescript-eslint/recommended"],
globals: {
Atomics: "readonly",
SharedArrayBuffer: "readonly",
SharedArrayBuffer: "readonly"
},
parser: "@typescript-eslint/parser",
parserOptions: {
ecmaFeatures: {
jsx: true,
jsx: true
},
ecmaVersion: 2018,
sourceType: "module",
sourceType: "module"
},
overrides: [
{
files: ["**/*.tsx"],
env: {
jest: true,
jest: true
},
extends: ["plugin:react/recommended"],
plugins: ["react"],
plugins: ["react"]
},
{
files: ["**/*.{test,spec}.{ts,tsx}"],
env: {
jest: true,
jest: true
},
extends: ["plugin:jest/recommended"],
plugins: ["jest"],
},
plugins: ["jest"]
}
],
rules: {
curly: "error",
@@ -47,8 +47,8 @@ module.exports = {
"error",
{
selector: "CallExpression[callee.object.name='JSON'][callee.property.name='stringify'] Identifier[name=/$err/]",
message: "Do not use JSON.stringify(error). It will print '{}'",
},
],
},
message: "Do not use JSON.stringify(error). It will print '{}'"
}
]
}
};

View File

@@ -79,31 +79,32 @@ jobs:
name: dist
path: dist/
endtoendemulator:
name: "End To End Emulator Tests"
name: "End To End Tests | Emulator | SQL"
needs: [lint, format, compile, unittest]
runs-on: windows-latest
steps:
- uses: actions/checkout@v2
- uses: southpolesteve/cosmos-emulator-github-action@v1
- name: Use Node.js 12.x
uses: actions/setup-node@v1
with:
node-version: 12.x
- uses: southpolesteve/cosmos-emulator-github-action@v1
- name: Restore Cypress Binary Cache
uses: actions/cache@v2
with:
path: ~/.cache/Cypress
key: ${{ runner.os }}-cypress-binary-cache
- name: End to End Tests
run: |
npm ci
npm start &
npm run wait-for-server
npx jest -c ./jest.config.e2e.js --detectOpenHandles sql
npm ci --prefix ./cypress
npm run test:ci --prefix ./cypress -- --spec ./integration/dataexplorer/ci-tests/createDatabase.spec.ts
shell: bash
env:
DATA_EXPLORER_ENDPOINT: "https://localhost:1234/explorer.html?platform=Emulator"
PLATFORM: "Emulator"
EMULATOR_ENDPOINT: https://0.0.0.0:8081/
NODE_TLS_REJECT_UNAUTHORIZED: 0
- uses: actions/upload-artifact@v2
with:
name: screenshots
path: failed-*
CYPRESS_CACHE_FOLDER: ~/.cache/Cypress
accessibility:
name: "Accessibility | Hosted"
needs: [lint, format, compile, unittest]
@@ -122,13 +123,13 @@ jobs:
sudo sysctl -p
npm ci
npm start &
npx wait-on -i 5000 https-get://0.0.0.0:1234/
npx wait-on -i 5000 https-get://0.0.0.0:1234/
node utils/accesibilityCheck.js
shell: bash
env:
NODE_TLS_REJECT_UNAUTHORIZED: 0
endtoendhosted:
name: "End to End Hosted Tests"
endtoendpuppeteer:
name: "End to end puppeteer tests"
needs: [lint, format, compile, unittest]
runs-on: ubuntu-latest
steps:
@@ -137,7 +138,7 @@ jobs:
uses: actions/setup-node@v1
with:
node-version: 12.x
- name: End to End Hosted Tests
- name: End to End Puppeteer Tests
run: |
npm ci
npm start &
@@ -146,26 +147,13 @@ jobs:
shell: bash
env:
NODE_TLS_REJECT_UNAUTHORIZED: 0
PORTAL_RUNNER_SUBSCRIPTION: ${{ secrets.PORTAL_RUNNER_SUBSCRIPTION }}
PORTAL_RUNNER_RESOURCE_GROUP: ${{ secrets.PORTAL_RUNNER_RESOURCE_GROUP }}
PORTAL_RUNNER_DATABASE_ACCOUNT: ${{ secrets.PORTAL_RUNNER_DATABASE_ACCOUNT }}
PORTAL_RUNNER_DATABASE_ACCOUNT_KEY: ${{ secrets.PORTAL_RUNNER_DATABASE_ACCOUNT_KEY }}
NOTEBOOKS_TEST_RUNNER_TENANT_ID: ${{ secrets.NOTEBOOKS_TEST_RUNNER_TENANT_ID }}
NOTEBOOKS_TEST_RUNNER_CLIENT_ID: ${{ secrets.NOTEBOOKS_TEST_RUNNER_CLIENT_ID }}
NOTEBOOKS_TEST_RUNNER_CLIENT_SECRET: ${{ secrets.NOTEBOOKS_TEST_RUNNER_CLIENT_SECRET }}
PORTAL_RUNNER_CONNECTION_STRING: ${{ secrets.CONNECTION_STRING_SQL }}
MONGO_CONNECTION_STRING: ${{ secrets.CONNECTION_STRING_MONGO }}
CASSANDRA_CONNECTION_STRING: ${{ secrets.CONNECTION_STRING_CASSANDRA }}
TABLES_CONNECTION_STRING: ${{ secrets.CONNECTION_STRING_TABLE }}
DATA_EXPLORER_ENDPOINT: "https://localhost:1234/hostedExplorer.html"
- uses: actions/upload-artifact@v2
with:
name: screenshots
path: failed-*
nuget:
name: Publish Nuget
if: github.ref == 'refs/heads/master' || contains(github.ref, 'hotfix/') || contains(github.ref, 'release/')
needs: [lint, format, compile, build, unittest, endtoendemulator, endtoendhosted]
needs: [lint, format, compile, build, unittest, endtoendemulator, endtoendpuppeteer]
runs-on: ubuntu-latest
env:
NUGET_SOURCE: ${{ secrets.NUGET_SOURCE }}
@@ -189,7 +177,7 @@ jobs:
nugetmpac:
name: Publish Nuget MPAC
if: github.ref == 'refs/heads/master' || contains(github.ref, 'hotfix/') || contains(github.ref, 'release/')
needs: [lint, format, compile, build, unittest, endtoendemulator, endtoendhosted]
needs: [lint, format, compile, build, unittest, endtoendemulator, endtoendpuppeteer]
runs-on: ubuntu-latest
env:
NUGET_SOURCE: ${{ secrets.NUGET_SOURCE }}

25
.github/workflows/runners.yml vendored Normal file
View File

@@ -0,0 +1,25 @@
name: Runners
on:
schedule:
- cron: "0 * 1 * *"
jobs:
sqlcreatecollection:
runs-on: ubuntu-latest
name: "SQL | Create Collection"
steps:
- uses: actions/checkout@v2
- uses: actions/setup-node@v1
- run: npm ci
- run: npm run test:e2e
env:
PORTAL_RUNNER_APP_INSIGHTS_KEY: ${{ secrets.PORTAL_RUNNER_APP_INSIGHTS_KEY }}
PORTAL_RUNNER_USERNAME: ${{ secrets.PORTAL_RUNNER_USERNAME }}
PORTAL_RUNNER_PASSWORD: ${{ secrets.PORTAL_RUNNER_PASSWORD }}
PORTAL_RUNNER_SUBSCRIPTION: 69e02f2d-f059-4409-9eac-97e8a276ae2c
PORTAL_RUNNER_RESOURCE_GROUP: runners
PORTAL_RUNNER_DATABASE_ACCOUNT: portal-sql-runner
- uses: actions/upload-artifact@v2
if: failure()
with:
name: screenshots
path: failure.png

3
.gitignore vendored
View File

@@ -9,6 +9,9 @@ pkg/DataExplorer/*
test/out/*
workers/**/*.js
*.trx
cypress/videos
cypress/screenshots
cypress/fixtures
notebookapp/*
Contracts/*
.DS_Store

View File

@@ -33,7 +33,7 @@ To run pure hosted mode, in `webpack.config.js` change index HtmlWebpackPlugin t
### Emulator Development
In a window environment, running `npm run build` will automatically copy the built files from `/dist` over to the default emulator install paths. In a non-windows environment you can specify an alternate endpoint using `EMULATOR_ENDPOINT` and webpack dev server will proxy requests for you.
In a window environment, running `npm run build` will automatically copy the built files from `/dist` over to the default emulator install paths. In a non-windows enironment you can specify an alternate endpoint using `EMULATOR_ENDPOINT` and webpack dev server will proxy requests for you.
`PLATFORM=Emulator EMULATOR_ENDPOINT=https://my-vm.azure.com:8081 npm run watch`
@@ -60,7 +60,7 @@ The Cosmos Portal that consumes this repo is not currently open source. If you h
You can however load a local running instance of data explorer in the production portal.
1. Turn off browser SSL validation for localhost: chrome://flags/#allow-insecure-localhost OR Install valid SSL certs for localhost (on IE, follow these [instructions](https://www.technipages.com/ie-bypass-problem-with-this-websites-security-certificate) to install the localhost certificate in the right place)
2. Allowlist `https://localhost:1234` domain for CORS in the Azure Cosmos DB portal
2. Whitelist `https://localhost:1234` domain for CORS in the Azure Cosmos DB portal
3. Start the project in portal mode: `PLATFORM=Portal npm run watch`
4. Load the portal using the following link: https://ms.portal.azure.com/?dataExplorerSource=https%3A%2F%2Flocalhost%3A1234%2Fexplorer.html
@@ -76,17 +76,24 @@ Unit tests are located adjacent to the code under test and run with [Jest](https
#### End to End CI Tests
Jest and Puppeteer are used for end to end browser based tests and are contained in `test/`. To run these tests locally:
[Cypress](https://www.cypress.io/) is used for end to end tests and are contained in `cypress/`. Currently, it operates as sub project with its own typescript config and dependencies. It also only operates against the emulator. To run cypress tests:
1. Copy .env.example to .env
2. Update the values in .env including your local data explorer endpoint (ask a teammate/codeowner for help with .env values)
3. Make sure all packages are installed `npm install`
4. Run the server `npm run start` and wait for it to start
5. Run `npm run test:e2e`
1. Ensure the emulator is running
2. Start cosmos explorer in emulator mode: `PLATFORM=Emulator npm run watch`
3. Move into `cypress/` folder: `cd cypress`
4. Install dependencies: `npm install`
5. Run cypress headless(`npm run test`) or in interactive mode(`npm run test:debug`)
#### End to End Production Runners
Jest and Puppeteer are used for end to end production runners and are contained in `test/`. To run these tests locally:
1. Copy .env.example to .env and fill in all variables
2. Run `npm run test:e2e`
### Releasing
We generally adhere to the release strategy [documented by the Azure SDK Guidelines](https://azure.github.io/azure-sdk/policies_repobranching.html#release-branches). Most releases should happen from the master branch. If master contains commits that cannot be released, you may create a release from a `release/` or `hotfix/` branch. See linked documentation for more details.
We generally adhear to the release strategy [documented by the Azure SDK Guidelines](https://azure.github.io/azure-sdk/policies_repobranching.html#release-branches). Most releases should happen from the master branch. If master contains commits that cannot be released, you may create a release from a `release/` or `hotfix/` branch. See linked documentation for more details.
# Contributing

View File

@@ -1,3 +1,3 @@
module.exports = {
presets: [["@babel/preset-env", { targets: { node: "current" } }], "@babel/preset-react", "@babel/preset-typescript"],
presets: [["@babel/preset-env", { targets: { node: "current" } }], "@babel/preset-react", "@babel/preset-typescript"]
};

4
cypress/.gitignore vendored Normal file
View File

@@ -0,0 +1,4 @@
cypress.env.json
cypress/report
cypress/screenshots
cypress/videos

51
cypress/cleanup.js Normal file
View File

@@ -0,0 +1,51 @@
// Cleans up old databases from previous test runs
const { CosmosClient } = require("@azure/cosmos");
// TODO: Add support for other API connection strings
const mongoRegex = RegExp("mongodb://.*:(.*)@(.*).mongo.cosmos.azure.com");
async function cleanup() {
const connectionString = process.env.CYPRESS_CONNECTION_STRING;
if (!connectionString) {
throw new Error("Connection string not provided");
}
let client;
switch (true) {
case connectionString.includes("mongodb://"): {
const [, key, accountName] = connectionString.match(mongoRegex);
client = new CosmosClient({
key,
endpoint: `https://${accountName}.documents.azure.com:443/`
});
break;
}
// TODO: Add support for other API connection strings
default:
client = new CosmosClient(connectionString);
break;
}
const response = await client.databases.readAll().fetchAll();
return Promise.all(
response.resources.map(async db => {
const dbTimestamp = new Date(db._ts * 1000);
const twentyMinutesAgo = new Date(Date.now() - 1000 * 60 * 20);
if (dbTimestamp < twentyMinutesAgo) {
await client.database(db.id).delete();
console.log(`DELETED: ${db.id} | Timestamp: ${dbTimestamp}`);
} else {
console.log(`SKIPPED: ${db.id} | Timestamp: ${dbTimestamp}`);
}
})
);
}
cleanup()
.then(() => {
process.exit(0);
})
.catch(error => {
console.error(error);
process.exit(1);
});

15
cypress/cypress.json Normal file
View File

@@ -0,0 +1,15 @@
{
"integrationFolder": "./integration",
"pluginsFile": false,
"fixturesFolder": false,
"supportFile": "./support/index.js",
"defaultCommandTimeout": 90000,
"chromeWebSecurity": false,
"reporter": "mochawesome",
"reporterOptions": {
"reportDir": "cypress/report",
"json": true,
"overwrite": false,
"html": false
}
}

View File

@@ -0,0 +1,66 @@
// 1. Click on "New Container" on the command bar.
// 2. Pane with the title "Add Container" should appear on the right side of the screen
// 3. It includes an input box for the database Id.
// 4. It includes a checkbox called "Create now".
// 5. When the checkbox is marked, enter new database id.
// 3. Create a database WITH "Provision throughput" checked.
// 4. Enter minimum throughput value of 400.
// 5. Enter container id to the container id text box.
// 6. Enter partition key to the partition key text box.
// 7. Click "OK" to create a new container.
// 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
const connectionString = require("../../../utilities/connectionString");
let crypt = require("crypto");
context("Cassandra API Test - createDatabase", () => {
beforeEach(() => {
connectionString.loginUsingConnectionString(connectionString.constants.cassandra);
});
it("Create a new table in Cassandra API", () => {
const keyspaceId = `KeyspaceId${crypt.randomBytes(8).toString("hex")}`;
const tableId = `TableId112`;
cy.get("iframe").then($element => {
const $body = $element.contents().find("body");
cy.wrap($body)
.find('div[class="commandBarContainer"]')
.should("be.visible")
.find('button[data-test="New Table"]')
.should("be.visible")
.click();
cy.wrap($body)
.find('div[class="contextual-pane-in"]')
.should("be.visible")
.find('span[id="containerTitle"]');
cy.wrap($body)
.find('input[id="keyspace-id"]')
.should("be.visible")
.type(keyspaceId);
cy.wrap($body)
.find('input[class="textfontclr"]')
.type(tableId);
cy.wrap($body)
.find('input[data-test="databaseThroughputValue"]')
.should("have.value", "400");
cy.wrap($body)
.find('data-test="addCollection-createCollection"')
.click();
cy.wait(10000);
cy.wrap($body)
.find('div[data-test="resourceTreeId"]')
.should("exist")
.find('div[class="treeComponent dataResourceTree"]')
.should("contain", tableId);
});
});
});

View File

@@ -0,0 +1,81 @@
// 1. Click on "New Graph" on the command bar.
// 2. Pane with the title "Add Container" should appear on the right side of the screen
// 3. It includes an input box for the database Id.
// 4. It includes a checkbox called "Create now".
// 5. When the checkbox is marked, enter new database id.
// 3. Create a database WITH "Provision throughput" checked.
// 4. Enter minimum throughput value of 400.
// 5. Enter container id to the container id text box.
// 6. Enter partition key to the partition key text box.
// 7. Click "OK" to create a new container.
// 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
const connectionString = require("../../../utilities/connectionString");
let crypt = require("crypto");
context("Graph API Test", () => {
beforeEach(() => {
connectionString.loginUsingConnectionString(connectionString.constants.graph);
});
it("Create a new graph in Graph API", () => {
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
const graphId = `TestGraph${crypt.randomBytes(8).toString("hex")}`;
const partitionKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
cy.get("iframe").then($element => {
const $body = $element.contents().find("body");
cy.wrap($body)
.find('div[class="commandBarContainer"]')
.should("be.visible")
.find('button[data-test="New Graph"]')
.should("be.visible")
.click();
cy.wrap($body)
.find('div[class="contextual-pane-in"]')
.should("be.visible")
.find('span[id="containerTitle"]');
cy.wrap($body)
.find('input[data-test="addCollection-createNewDatabase"]')
.check();
cy.wrap($body)
.find('input[data-test="addCollection-newDatabaseId"]')
.should("be.visible")
.type(dbId);
cy.wrap($body)
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
.check();
cy.wrap($body)
.find('input[data-test="databaseThroughputValue"]')
.should("have.value", "400");
cy.wrap($body)
.find('input[data-test="addCollection-collectionId"]')
.type(graphId);
cy.wrap($body)
.find('input[data-test="addCollection-partitionKeyValue"]')
.type(partitionKey);
cy.wrap($body)
.find('input[data-test="addCollection-createCollection"]')
.click();
cy.wait(10000);
cy.wrap($body)
.find('div[data-test="resourceTreeId"]')
.should("exist")
.find('div[class="treeComponent dataResourceTree"]')
.should("contain", dbId)
.click()
.should("contain", graphId);
});
});
});

View File

@@ -0,0 +1,80 @@
// 1. Click on "New Container" on the command bar.
// 2. Pane with the title "Add Container" should appear on the right side of the screen
// 3. It includes an input box for the database Id.
// 4. It includes a checkbox called "Create now".
// 5. When the checkbox is marked, enter new database id.
// 3. Create a database WITH "Provision throughput" checked.
// 4. Enter minimum throughput value of 400.
// 5. Enter container id to the container id text box.
// 6. Enter partition key to the partition key text box.
// 7. Click "OK" to create a new container.
// // 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
const connectionString = require("../../../utilities/connectionString");
let crypt = require("crypto");
context("Mongo API Test - createDatabase", () => {
beforeEach(() => {
connectionString.loginUsingConnectionString();
});
it("Create a new collection in Mongo API", () => {
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
cy.get("iframe").then($element => {
const $body = $element.contents().find("body");
cy.wrap($body)
.find('div[class="commandBarContainer"]')
.should("be.visible")
.find('button[data-test="New Collection"]')
.should("be.visible")
.click();
cy.wrap($body)
.find('div[class="contextual-pane-in"]')
.should("be.visible")
.find('span[id="containerTitle"]');
cy.wrap($body)
.find('input[data-test="addCollection-createNewDatabase"]')
.check();
cy.wrap($body)
.find('input[data-test="addCollection-newDatabaseId"]')
.type(dbId);
cy.wrap($body)
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
.check();
cy.wrap($body)
.find('input[data-test="addCollection-collectionId"]')
.type(collectionId);
cy.wrap($body)
.find('input[data-test="databaseThroughputValue"]')
.should("have.value", "400");
cy.wrap($body)
.find('input[data-test="addCollection-partitionKeyValue"]')
.type(sharedKey);
cy.wrap($body)
.find("#submitBtnAddCollection")
.click();
cy.wait(10000);
cy.wrap($body)
.find('div[data-test="resourceTreeId"]')
.should("exist")
.find('div[class="treeComponent dataResourceTree"]')
.should("contain", dbId)
.click()
.should("contain", collectionId);
});
});
});

View File

@@ -0,0 +1,96 @@
// 1. Click on "New Container" on the command bar.
// 2. Pane with the title "Add Container" should appear on the right side of the screen
// 3. It includes an input box for the database Id.
// 4. It includes a checkbox called "Create now".
// 5. When the checkbox is marked, enter new database id.
// 3. Create a database WITH "Provision throughput" checked.
// 4. Enter minimum throughput value of 400.
// 5. Enter container id to the container id text box.
// 6. Enter partition key to the partition key text box.
// 7. Click "OK" to create a new container.
// 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
const connectionString = require("../../../utilities/connectionString");
let crypt = require("crypto");
context("Mongo API Test", () => {
beforeEach(() => {
connectionString.loginUsingConnectionString();
});
it.skip("Create a new collection in Mongo API - Autopilot", () => {
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
cy.get("iframe").then($element => {
const $body = $element.contents().find("body");
cy.wrap($body)
.find('div[class="commandBarContainer"]')
.should("be.visible")
.find('button[data-test="New Collection"]')
.should("be.visible")
.click();
cy.wrap($body)
.find('div[class="contextual-pane-in"]')
.should("be.visible")
.find('span[id="containerTitle"]');
cy.wrap($body)
.find('input[data-test="addCollection-createNewDatabase"]')
.check();
cy.wrap($body)
.find('input[data-test="addCollection-newDatabaseId"]')
.type(dbId);
cy.wrap($body)
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
.check();
cy.wrap($body)
.find('div[class="throughputModeContainer"]')
.should("be.visible")
.and(input => {
expect(input.get(0).textContent, "first item").contains("Autopilot (preview)");
expect(input.get(1).textContent, "second item").contains("Manual");
});
cy.wrap($body)
.find('input[id="newContainer-databaseThroughput-autoPilotRadio"]')
.check();
cy.wrap($body)
.find('select[name="autoPilotTiers"]')
// .eq(1).should('contain', '4,000 RU/s');
// // .select('4,000 RU/s').should('have.value', '1');
.find('option[value="2"]')
.then($element => $element.get(1).setAttribute("selected", "selected"));
cy.wrap($body)
.find('input[data-test="addCollection-collectionId"]')
.type(collectionId);
cy.wrap($body)
.find('input[data-test="addCollection-partitionKeyValue"]')
.type(sharedKey);
cy.wrap($body)
.find('input[data-test="addCollection-createCollection"]')
.click();
cy.wait(10000);
cy.wrap($body)
.find('div[data-test="resourceTreeId"]')
.should("exist")
.find('div[class="treeComponent dataResourceTree"]')
.should("contain", dbId)
.click()
.should("contain", collectionId);
});
});
});

View File

@@ -0,0 +1,67 @@
const connectionString = require("../../../utilities/connectionString");
let crypt = require("crypto");
context("Mongo API Test", () => {
beforeEach(() => {
connectionString.loginUsingConnectionString();
});
it.skip("Create a new collection in existing database in Mongo API", () => {
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
cy.get("iframe").then($element => {
const $body = $element.contents().find("body");
cy.wrap($body)
.find('span[class="nodeLabel"]')
.should("be.visible")
.then($span => {
const dbId1 = $span.text();
cy.log("DBBB", dbId1);
cy.wrap($body)
.find('div[class="commandBarContainer"]')
.should("be.visible")
.find('button[data-test="New Collection"]')
.should("be.visible")
.click();
cy.wrap($body)
.find('div[class="contextual-pane-in"]')
.should("be.visible")
.find('span[id="containerTitle"]');
cy.wrap($body)
.find('input[data-test="addCollection-existingDatabase"]')
.check();
cy.wrap($body)
.find('input[data-test="addCollection-existingDatabase"]')
.type(dbId1);
cy.wrap($body)
.find('input[data-test="addCollection-collectionId"]')
.type(collectionId);
cy.wrap($body)
.find('input[data-test="addCollection-partitionKeyValue"]')
.type(sharedKey);
cy.wrap($body)
.find('input[data-test="addCollection-createCollection"]')
.click();
cy.wait(10000);
cy.wrap($body)
.find('div[data-test="resourceTreeId"]')
.should("exist")
.find('div[class="treeComponent dataResourceTree"]')
.click()
.should("contain", collectionId);
});
});
});
});

View File

@@ -0,0 +1,203 @@
const connectionString = require("../../../utilities/connectionString");
let crypt = require("crypto");
context.skip("Mongo API Test", () => {
beforeEach(() => {
connectionString.loginUsingConnectionString();
});
it("Create a new collection in Mongo API - Provision database throughput", () => {
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
cy.get("iframe").then($element => {
const $body = $element.contents().find("body");
cy.wrap($body)
.find('div[class="commandBarContainer"]')
.should("be.visible")
.find('button[data-test="New Collection"]')
.should("be.visible")
.click();
cy.wrap($body)
.find('div[class="contextual-pane-in"]')
.should("be.visible")
.find('span[id="containerTitle"]');
cy.wrap($body)
.find(".createNewDatabaseOrUseExisting")
.should("have.length", 2)
.and(input => {
expect(input.get(0).textContent, "first item").contains("Create new");
expect(input.get(1).textContent, "second item").contains("Use existing");
});
cy.wrap($body)
.find('input[data-test="addCollection-createNewDatabase"]')
.check();
cy.wrap($body)
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
.check();
cy.wrap($body)
.find('input[data-test="addCollection-newDatabaseId"]')
.type(dbId);
cy.wrap($body)
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
.check();
cy.wrap($body)
.find('input[data-test="databaseThroughputValue"]')
.should("have.value", "400");
cy.wrap($body)
.find('input[data-test="addCollection-collectionId"]')
.type(collectionId);
cy.wrap($body)
.find('input[data-test="addCollection-partitionKeyValue"]')
.type(sharedKey);
cy.wrap($body)
.find('input[data-test="addCollection-createCollection"]')
.click();
cy.wait(10000);
cy.wrap($body)
.find('div[data-test="resourceTreeId"]')
.should("exist")
.find('div[class="treeComponent dataResourceTree"]')
.should("contain", dbId)
.click()
.should("contain", collectionId);
});
});
it("Create a new collection - without provision database throughput", () => {
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
const collectionIdTitle = `Add Collection`;
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
cy.get("iframe").then($element => {
const $body = $element.contents().find("body");
cy.wrap($body)
.find('div[class="commandBarContainer"]')
.should("be.visible")
.find('button[data-test="New Collection"]')
.should("be.visible")
.click();
cy.wrap($body)
.find('div[class="contextual-pane-in"]')
.should("be.visible")
.find('span[id="containerTitle"]');
cy.wrap($body)
.find('input[data-test="addCollection-createNewDatabase"]')
.check();
cy.wrap($body)
.find('input[data-test="addCollection-newDatabaseId"]')
.type(dbId);
cy.wrap($body)
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
.uncheck();
cy.wrap($body)
.find('input[data-test="addCollection-collectionId"]')
.type(collectionId);
cy.wrap($body)
.find('input[id="tab2"]')
.check({ force: true });
cy.wrap($body)
.find('input[data-test="addCollection-partitionKeyValue"]')
.type(sharedKey);
cy.wrap($body)
.find('input[data-test="databaseThroughputValue"]')
.should("have.value", "400");
cy.wrap($body)
.find('input[data-test="addCollection-createCollection"]')
.click();
cy.wait(10000);
cy.wrap($body)
.find('div[data-test="resourceTreeId"]')
.should("exist")
.find('div[class="treeComponent dataResourceTree"]')
.should("contain", dbId)
.click()
.should("contain", collectionId);
});
});
it("Create a new collection - without provision database throughput Fixed Storage Capacity", () => {
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
cy.get("iframe").then($element => {
const $body = $element.contents().find("body");
cy.wrap($body)
.find('div[class="commandBarContainer"]')
.should("be.visible")
.find('button[data-test="New Collection"]')
.should("be.visible")
.click();
cy.wrap($body)
.find('div[class="contextual-pane-in"]')
.should("be.visible")
.find('span[id="containerTitle"]');
cy.wrap($body)
.find('input[data-test="addCollection-createNewDatabase"]')
.check();
cy.wrap($body)
.find('input[data-test="addCollection-newDatabaseId"]')
.type(dbId);
cy.wrap($body)
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
.uncheck();
cy.wrap($body)
.find('input[data-test="addCollection-collectionId"]')
.type(collectionId);
cy.wrap($body)
.find('input[id="tab1"]')
.check({ force: true });
cy.wrap($body)
.find('input[data-test="databaseThroughputValue"]')
.should("have.value", "400");
cy.wrap($body)
.find('input[data-test="addCollection-createCollection"]')
.click();
cy.wait(10000);
cy.wrap($body)
.find('div[data-test="resourceTreeId"]')
.should("exist")
.find('div[class="treeComponent dataResourceTree"]')
.should("contain", dbId)
.click()
.should("contain", collectionId);
});
});
});

View File

@@ -0,0 +1,79 @@
// 1. Click on "New Container" on the command bar.
// 2. Pane with the title "Add Container" should appear on the right side of the screen
// 3. It includes an input box for the database Id.
// 4. It includes a checkbox called "Create now".
// 5. When the checkbox is marked, enter new database id.
// 3. Create a database WITH "Provision throughput" checked.
// 4. Enter minimum throughput value of 400.
// 5. Enter container id to the container id text box.
// 6. Enter partition key to the partition key text box.
// 7. Click "OK" to create a new container.
// 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
const connectionString = require("../../../utilities/connectionString");
let crypt = require("crypto");
context("SQL API Test", () => {
beforeEach(() => {
connectionString.loginUsingConnectionString();
});
it("Create a new container in SQL API", () => {
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
connectionString.loginUsingConnectionString();
cy.get("iframe").then($element => {
const $body = $element.contents().find("body");
cy.wrap($body)
.find('div[class="commandBarContainer"]')
.should("be.visible")
.find('button[data-test="New Container"]')
.should("be.visible")
.click();
cy.wrap($body)
.find('div[class="contextual-pane-in"]')
.should("be.visible")
.find('span[id="containerTitle"]');
cy.wrap($body)
.find('input[data-test="addCollection-createNewDatabase"]')
.check();
cy.wrap($body)
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
.check();
cy.wrap($body)
.find('input[data-test="addCollection-newDatabaseId"]')
.type(dbId);
cy.wrap($body)
.find('input[data-test="addCollection-collectionId"]')
.type(collectionId);
cy.wrap($body)
.find('input[data-test="databaseThroughputValue"]')
.should("have.value", "400");
cy.wrap($body)
.find('input[data-test="addCollection-partitionKeyValue"]')
.type(sharedKey);
cy.wrap($body)
.find("#submitBtnAddCollection")
.click();
cy.wait(10000);
cy.wrap($body)
.find('div[data-test="resourceTreeId"]')
.should("exist")
.find('div[class="treeComponent dataResourceTree"]')
.should("contain", dbId);
});
});
});

View File

@@ -0,0 +1,60 @@
// 1. Click on "New Container" on the command bar.
// 2. Pane with the title "Add Container" should appear on the right side of the screen
// 3. It includes an input box for the database Id.
// 4. It includes a checkbox called "Create now".
// 5. When the checkbox is marked, enter new database id.
// 3. Create a database WITH "Provision throughput" checked.
// 4. Enter minimum throughput value of 400.
// 5. Enter container id to the container id text box.
// 6. Enter partition key to the partition key text box.
// 7. Click "OK" to create a new container.
// 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
const connectionString = require("../../../utilities/connectionString");
let crypt = require("crypto");
context("Table API Test", () => {
beforeEach(() => {
connectionString.loginUsingConnectionString(connectionString.constants.table);
});
it("Create a new table in Table API", () => {
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
cy.get("iframe").then($element => {
const $body = $element.contents().find("body");
cy.wrap($body)
.find('div[class="commandBarContainer"]')
.should("be.visible")
.find('button[data-test="New Table"]')
.should("be.visible")
.click();
cy.wrap($body)
.find('div[class="contextual-pane-in"]')
.should("be.visible")
.find('span[id="containerTitle"]');
cy.wrap($body)
.find('input[data-test="addCollection-collectionId"]')
.type(collectionId);
cy.wrap($body)
.find('input[data-test="databaseThroughputValue"]')
.should("have.value", "400");
cy.wrap($body)
.find('input[data-test="addCollection-createCollection"]')
.click();
cy.wait(10000);
cy.wrap($body)
.find('div[data-test="resourceTreeId"]')
.should("exist")
.find('div[class="treeComponent dataResourceTree"]')
.should("contain", collectionId);
});
});
});

View File

@@ -0,0 +1,55 @@
// 1. Click on "New Container" on the command bar.
// 2. Pane with the title "Add Container" should appear on the right side of the screen
// 3. It includes an input box for the database Id.
// 4. It includes a checkbox called "Create now".
// 5. When the checkbox is marked, enter new database id.
// 3. Create a database WITH "Provision throughput" checked.
// 4. Enter minimum throughput value of 400.
// 5. Enter container id to the container id text box.
// 6. Enter partition key to the partition key text box.
// 7. Click "OK" to create a new container.
// 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
let crypt = require("crypto");
context("Emulator - createDatabase", () => {
beforeEach(() => {
cy.visit("http://localhost:1234/explorer.html");
});
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
const collectionIdTitle = `Add Collection`;
const partitionKey = `PartitionKey${crypt.randomBytes(8).toString("hex")}`;
it("Create a new collection", () => {
cy.contains("New Container").click();
// cy.contains(collectionIdTitle);
cy.get(".createNewDatabaseOrUseExisting")
.should("have.length", 2)
.and(input => {
expect(input.get(0).textContent, "first item").contains("Create new");
expect(input.get(1).textContent, "second item").contains("Use existing");
});
cy.get('input[data-test="addCollection-createNewDatabase"]').check();
cy.get('input[data-test="addCollection-newDatabaseId"]').type(dbId);
cy.get('input[data-test="addCollection-collectionId"]').type(collectionId);
cy.get('input[data-test="databaseThroughputValue"]').should("have.value", "400");
cy.get('input[data-test="addCollection-partitionKeyValue"]').type(partitionKey);
cy.get('input[data-test="addCollection-createCollection"]').click();
cy.get('div[data-test="resourceTreeId"]').should("exist");
cy.get('div[data-test="resourceTree-collectionsTree"]').should("contain", dbId);
cy.get('div[data-test="databaseList"]').should("contain", collectionId);
});
});

View File

@@ -0,0 +1,65 @@
// 1. Click on "New Database" on the command bar
// 2. a Pane with the title "Add Database" should appear on the right side of the screen
// i. It includes an input box for the database Id.
// ii. It includes a checkbox called "Provision throughput".
// iii. Whe the checkbox is marked, a new input with a throughput control let's you customize RU at the database level
// 3. Create a database WITHOUT "Provision throughput" checked.
// 4. It should appear in the Data Explorer list.
// 5. Repeat steps 1-3 but create a database WITH "Provision throughput" with the default RU value.
// 6. It should appear in the Data Explorer list.
// 7. If expanded, it should have the list item called "Scale", that once clicked, it should show the "Scale" tab.
// 8. Inside that tab, a throughput control will let you change the RU value within the permited range.
// 9. If you change the value, it should enable the "Save" button.
// 10. Click "Save" and verify that the process completes without error.
// 11. Close the tab and reopen it and verify that the input contains the last saved value.%
const crypto = require("crypto");
const client = require("../../../utilities/cosmosClient");
const randomString = crypto.randomBytes(2).toString("hex");
const databaseId = `TestDB-${randomString}`;
const collectionId = `TestColl-${randomString}`;
context("Emulator - Create database -> container -> item", () => {
beforeEach(async () => {
const { resources } = await client.databases.readAll().fetchAll();
for (const database of resources) {
await client.database(database.id).delete();
}
});
it("creates a new database", () => {
cy.visit("https://0.0.0.0:1234/explorer.html?platform=Emulator");
cy.contains("New Container").click();
cy.get("[data-test=addCollection-newDatabaseId]").click();
cy.get("[data-test=addCollection-newDatabaseId]").type(databaseId);
cy.get("[data-test=addCollection-collectionId]").click();
cy.get("[data-test=addCollection-collectionId]").type(collectionId);
cy.get("[data-test=addCollection-partitionKeyValue]").click();
cy.get("[data-test=addCollection-partitionKeyValue]").type("/pk");
cy.get('input[name="createCollection"]').click();
cy.get(".dataResourceTree").should("contain", databaseId);
cy.get(".dataResourceTree")
.contains(databaseId)
.click();
cy.get(".dataResourceTree").should("contain", collectionId);
cy.get(".dataResourceTree")
.contains(collectionId)
.click();
cy.get(".dataResourceTree")
.contains("Items")
.click();
cy.get(".dataResourceTree")
.contains("Items")
.click();
cy.wait(1000); // React rendering inside KO causes some weird async rendering that makes this test flaky without waiting
cy.get(".commandBarContainer")
.contains("New Item")
.click();
cy.wait(1000); // React rendering inside KO causes some weird async rendering that makes this test flaky without waiting
cy.get(".commandBarContainer")
.contains("Save")
.click();
cy.wait(1000); // React rendering inside KO causes some weird async rendering that makes this test flaky without waiting
cy.get(".documentsGridHeaderContainer").should("contain", "replace_with_new_document_id");
});
});

View File

@@ -0,0 +1,46 @@
// 1. Click last database in the resource tree
// 2. Click the last collection within the database
// 3. Select the context menu within the collection
// 4. Select "Delete Container" option in the dropdown
// 5. On Selection, Delete Container pane opens on the right side
// 6. Enter the same collection id that is to be deleted and click ok
// 7. Now, the resource tree refreshes, the deleted collection should not appear under the database
let crypt = require("crypto");
context("Emulator - deleteCollection", () => {
beforeEach(() => {
cy.visit("http://localhost:1234/explorer.html");
});
it("Delete a collection", () => {
cy.get(".databaseId")
.last()
.click();
cy.get(".collectionList")
.last()
.then($id => {
const collectionId = $id.text();
cy.get('span[data-test="collectionEllipsisMenu"]').should("exist");
cy.get('span[data-test="collectionEllipsisMenu"]')
.invoke("show")
.last()
.click();
cy.get('div[data-test="collectionContextMenu"]')
.contains("Delete Container")
.click({ force: true });
cy.get('input[data-test="confirmCollectionId"]').type(collectionId.trim());
cy.get('input[data-test="deleteCollection"]').click();
cy.get('div[data-test="databaseList"]').should("not.contain", collectionId);
cy.get('div[data-test="databaseMenu"]').should("not.contain", collectionId);
});
});
});

View File

@@ -0,0 +1,83 @@
// 1. Click last database in the resource tree
// 2. Select the context menu within the database
// 4. Select "Delete Database" option in the dropdown
// 5. On Selection, Delete Database pane opens on the right side
// 6. Enter the same database id that is to be deleted and click ok
// 7. Now, the resource tree refreshes, the deleted database should not appear in the resource tree
let crypt = require("crypto");
context("Emulator - deleteDatabase", () => {
beforeEach(() => {
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
let db_rid = "";
const date = new Date().toUTCString();
let authToken = "";
cy.visit("http://localhost:1234/explorer.html");
// Creating auth token for collection creation
cy.request({
method: "GET",
url: "https://localhost:8081/_explorer/authorization/post/dbs/",
headers: {
"x-ms-date": date,
authorization: "-"
}
})
.then(response => {
authToken = response.body.Token; // Getting auth token for collection creation
return new Cypress.Promise((resolve, reject) => {
return resolve();
});
})
.then(() => {
cy.request({
method: "POST",
url: "https://localhost:8081/dbs",
headers: {
"x-ms-date": date,
authorization: authToken,
"x-ms-version": "2018-12-31"
},
body: {
id: dbId
}
}).then(response => {
cy.log("Response", response);
db_rid = response.body._rid;
return new Cypress.Promise((resolve, reject) => {
cy.log("Rid", db_rid);
return resolve();
});
});
});
});
it("Delete a database", () => {
cy.get('span[data-test="refreshTree"]').click();
cy.get(".databaseId")
.last()
.then($id => {
const dbId = $id.text();
cy.get('span[data-test="databaseEllipsisMenu"]').should("exist");
cy.get('span[data-test="databaseEllipsisMenu"]')
.invoke("show")
.last()
.click();
cy.get('div[data-test="databaseContextMenu"]')
.contains("Delete Database")
.click({ force: true });
cy.get('input[data-test="confirmDatabaseId"]').type(dbId.trim());
cy.get('input[data-test="deleteDatabase"]').click();
cy.get('div[data-test="databaseList"]').should("not.contain", dbId);
});
});
});

View File

@@ -0,0 +1,35 @@
# Notebook end-to-end tests
This describes how to run the tests locally
## Stand up a local notebook container instance:
Instructions on how to build and run the container [here](https://microsoft.sharepoint.com/teams/DocDB/_layouts/OneNote.aspx?id=%2Fteams%2FDocDB%2FSiteAssets%2FDocDB%20Team%20Notebook&wd=target%28Tools%20_%20SDK%2FPortal%2FDevelopment.one%7CF800BE8E-1E31-48FE-90D7-EF698EF88112%2FHow%20to%20build%20notebook%20service%7C4BAA153B-422C-41E2-B997-F3FCE02CD743%2F%29)
## Run a local data explorer
Instructions are in [`DataExplorer/README.md`](https://msdata.visualstudio.com/CosmosDB/_git/cosmosdb-dataexplorer?path=%2FProduct%2FPortal%2FDataExplorer%2FREADME.md&_a=preview).
Make sure you can run Data Explorer locally from the web browser.
## Run cypress tests
1. Edit the URL for your DataExplorer in the `.spec.ts` file
2. Run the test:
```bash
cd DataExplorer/cypress
npm i
npm t -- --spec 'integration/notebook/newNotebook.spec.ts'
```
To run in Debug mode:
```
npm run test:debug
```
This opens Cypress UI
## Troubleshooting
* The tests are recorded in the `videos` folder.
* Cypress does not support hover: workarounds [here](https://docs.cypress.io/api/commands/hover.html#Workarounds).
## References
* [Cypress API](https://docs.cypress.io/api/api/table-of-contents.html)
* [Cypress cookbook](https://docs.cypress.io/faq/questions/using-cypress-faq.html#How-do-I-get-an-element%E2%80%99s-text-contents)
* [Cypress best practices](https://docs.cypress.io/guides/references/best-practices.html#Selecting-Elements)

View File

@@ -0,0 +1,93 @@
// THIS ADDS A NEW NOTEBOOK TO YOUR NOTEBOOKS
context("New Notebook smoke test", () => {
const timeout = 15000; // in ms
const explorerUrl =
"https://localhost:1234/explorer.html?feature.notebookserverurl=https%3A%2F%2Flocalhost%3A10001%2F12345%2Fnotebook&feature.notebookServerToken=token&feature.enablenotebooks=true";
/**
* Wait for UI to be ready
*/
const waitForReady = () => {
cy.get(".splashScreenContainer", { timeout }).should("be.visible");
};
beforeEach(() => {
cy.visit(explorerUrl);
waitForReady();
});
it("Create a new notebook and run some code", () => {
// Create new notebook
cy.contains("New Notebook").click();
// Check tab name
cy.get("li.tabList .tabNavText").should($span => {
const text = $span.text();
expect(text).to.match(/^Untitled.*\.ipynb$/);
});
// Wait for python3 | idle status
cy.get('[data-test="notebookStatusBar"] [data-test="kernelStatus"]', { timeout }).should($p => {
const text = $p.text();
expect(text).to.match(/^python3.*idle$/);
});
// Click on a cell
cy.get(".cell-container")
.as("cellContainer")
.click();
// Type in some code
cy.get("@cellContainer").type("2+4");
// Execute
cy.get('[data-test="Run"]')
.first()
.click();
// Verify results
cy.get("@cellContainer").within(() => {
cy.get("pre code span").should("contain", "6");
});
// Restart kernel
cy.get('[data-test="Run"] button')
.eq(-1)
.click();
cy.get("li")
.contains("Restart Kernel")
.click();
// Wait for python3 | restarting status
cy.get('[data-test="notebookStatusBar"] [data-test="kernelStatus"]', { timeout }).should($p => {
const text = $p.text();
expect(text).to.match(/^python3.*restarting$/);
});
// Wait for python3 | idle status
cy.get('[data-test="notebookStatusBar"] [data-test="kernelStatus"]', { timeout }).should($p => {
const text = $p.text();
expect(text).to.match(/^python3.*idle$/);
});
// Click on a cell
cy.get(".cell-container")
.as("cellContainer")
.find(".input")
.as("codeInput")
.click();
// Type in some code
cy.get("@codeInput").type("{backspace}{backspace}{backspace}4+5");
// Execute
cy.get('[data-test="Run"]')
.first()
.click();
// Verify results
cy.get("@cellContainer").within(() => {
cy.get("pre code span").should("contain", "9");
});
});
});

View File

@@ -0,0 +1,172 @@
context("Resource tree notebook file manipulation", () => {
const timeout = 15000; // in ms
const explorerUrl =
"https://localhost:1234/explorer.html?feature.notebookserverurl=https%3A%2F%2Flocalhost%3A10001%2F12345%2Fnotebook&feature.notebookServerToken=token&feature.enablenotebooks=true";
/**
* Wait for UI to be ready
*/
const waitForReady = () => {
cy.get(".splashScreenContainer", { timeout }).should("be.visible");
};
const clickContextMenuAndSelectOption = (nodeLabel, option) => {
cy.get(`.treeNodeHeader[data-test="${nodeLabel}"]`)
.find("button.treeMenuEllipsis")
.click();
cy.get('[data-test="treeComponentMenuItemContainer"]')
.contains(option)
.click();
};
const createFolder = folder => {
clickContextMenuAndSelectOption("My Notebooks/", "New Directory");
cy.get("#stringInputPane").within(() => {
cy.get('input[name="collectionIdConfirmation"]').type(folder);
cy.get("form").submit();
});
};
const deleteItem = nodeName => {
clickContextMenuAndSelectOption(`${nodeName}`, "Delete");
cy.get(".ms-Dialog-main")
.contains("Delete")
.click();
};
beforeEach(() => {
cy.visit(explorerUrl);
waitForReady();
});
it("Create and remove a directory", () => {
const folder = "e2etest_folder1";
createFolder(folder);
cy.get(`.treeNodeHeader[data-test="${folder}/"]`).should("exist");
deleteItem(`${folder}/`);
cy.get(`.treeNodeHeader[data-test="${folder}/"]`).should("not.exist");
});
it("Create and rename a directory", () => {
const folder = "e2etest_folder2";
const renamedFolder = "e2etest_folder2_renamed";
createFolder(folder);
// Rename
clickContextMenuAndSelectOption(`${folder}/`, "Rename");
cy.get("#stringInputPane").within(() => {
cy.get('input[name="collectionIdConfirmation"]')
.clear()
.type(renamedFolder);
cy.get("form").submit();
});
cy.get(`.treeNodeHeader[data-test="${renamedFolder}/"]`).should("exist");
cy.get(`.treeNodeHeader[data-test="${folder}/"]`).should("not.exist");
deleteItem(`${renamedFolder}/`);
cy.get(`.treeNodeHeader[data-test="${renamedFolder}/"]`).should("not.exist");
});
it("Create a notebook inside a directory", () => {
const folder = "e2etest_folder3";
const newNotebookName = "Untitled.ipynb";
createFolder(folder);
clickContextMenuAndSelectOption(`${folder}/`, "New Notebook");
// Verify tab is open
cy.get(".tabList")
.contains(newNotebookName)
.should("exist");
// Close tab
cy.get(`.tabList[title="notebooks/${folder}/${newNotebookName}"]`)
.find(".cancelButton")
.click();
// When running from command line, closing the tab is too fast
cy.get("body").then($body => {
if ($body.find(".ms-Dialog-main").length) {
// For some reason, this does not work
// cy.get(".ms-Dialog-main").contains("Close").click();
cy.get(".ms-Dialog-main .ms-Button--primary").click();
}
});
// Expand folder node
cy.get(`.treeNodeHeader[data-test="${folder}/"]`).click();
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${newNotebookName}"]`).should("exist");
// Delete notebook
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${newNotebookName}"]`)
.find("button.treeMenuEllipsis")
.click();
cy.get('[data-test="treeComponentMenuItemContainer"]')
.contains("Delete")
.click();
// Confirm
cy.get(".ms-Dialog-main")
.contains("Delete")
.click();
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${newNotebookName}"]`).should("not.exist");
deleteItem(`${folder}/`);
});
it("Create and rename a notebook inside a directory", () => {
const folder = "e2etest_folder4";
const newNotebookName = "Untitled.ipynb";
const renamedNotebookName = "mynotebook.ipynb";
createFolder(folder);
clickContextMenuAndSelectOption(`${folder}/`, "New Notebook");
// Close tab
cy.get(`.tabList[title="notebooks/${folder}/${newNotebookName}"]`)
.find(".cancelButton")
.click();
cy.get("body").then($body => {
if ($body.find(".ms-Dialog-main").length) {
// For some reason, this does not work
// cy.get(".ms-Dialog-main").contains("Close").click();
cy.get(".ms-Dialog-main .ms-Button--primary").click();
}
});
// Expand folder node
cy.get(`.treeNodeHeader[data-test="${folder}/"]`).click();
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${newNotebookName}"]`).should("exist");
// Rename notebook
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${newNotebookName}"]`)
.find("button.treeMenuEllipsis")
.click();
cy.get('[data-test="treeComponentMenuItemContainer"]')
.contains("Rename")
.click();
cy.get("#stringInputPane").within(() => {
cy.get('input[name="collectionIdConfirmation"]')
.clear()
.type(renamedNotebookName);
cy.get("form").submit();
});
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${newNotebookName}"]`).should("not.exist");
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${renamedNotebookName}"]`).should("exist");
// Delete notebook
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${renamedNotebookName}"]`)
.find("button.treeMenuEllipsis")
.click();
cy.get('[data-test="treeComponentMenuItemContainer"]')
.contains("Delete")
.click();
// Confirm
cy.get(".ms-Dialog-main")
.contains("Delete")
.click();
// Give it time to settle
cy.wait(1000);
deleteItem(`${folder}/`);
});
});

3066
cypress/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

25
cypress/package.json Normal file
View File

@@ -0,0 +1,25 @@
{
"name": "cosmos-explorer-cypress",
"version": "1.0.0",
"description": "",
"main": "index.js",
"scripts": {
"test": "cypress run",
"wait-for-server": "wait-on -t 240000 -i 5000 -v https-get://0.0.0.0:1234/",
"test:sql": "cypress run --browser chrome --spec \"./integration/dataexplorer/SQL/*\"",
"test:ci": "wait-on -t 240000 -i 5000 -v https-get://0.0.0.0:1234/ https-get://0.0.0.0:8081/_explorer/index.html && cypress run --browser edge --headless",
"test:debug": "cypress open"
},
"devDependencies": {
"cypress": "^4.8.0",
"mocha": "^7.0.1",
"mochawesome": "^4.1.0",
"mochawesome-merge": "^4.0.1",
"mochawesome-report-generator": "^4.1.0",
"typescript": "3.4.3",
"wait-on": "^4.0.2"
},
"dependencies": {
"@microsoft/applicationinsights-web": "^2.5.2"
}
}

23
cypress/support/index.js Normal file
View File

@@ -0,0 +1,23 @@
let appInsightsLib = require("@microsoft/applicationinsights-web");
const appInsights = new appInsightsLib.ApplicationInsights({
config: {
instrumentationKey: "fe61c39f-7d32-4488-a191-b13621965315"
/* ...Other Configuration Options... */
}
});
appInsights.loadAppInsights();
Cypress.on("fail", (error, runnable) => {
// App Insights will record the fail tests for Create Collection
let message = JSON.stringify(runnable.title);
appInsights.trackTrace({
message: `${message}`,
properties: {
passed: false,
error: error
}
});
throw error; // throw error to have test still fail
});

11
cypress/tsconfig.json Normal file
View File

@@ -0,0 +1,11 @@
{
"compilerOptions": {
"strict": true,
"noEmit": true,
"module": "commonjs",
"target": "es5",
"lib": ["es5", "dom", "es6"],
"types": ["cypress", "node"]
},
"include": ["**/*.ts", "**/*.spec.ts"]
}

View File

@@ -0,0 +1,41 @@
module.exports = {
loginUsingConnectionString: function() {
const prodUrl = Cypress.env("TEST_ENDPOINT") || "https://localhost:1234/hostedExplorer.html";
const timeout = 15000;
cy.visit(prodUrl);
cy.get('iframe[id="explorerMenu"]').should("be.visible");
cy.get("iframe").then($element => {
const $body = $element.contents().find("body");
cy.wrap($body)
.find("#connectExplorer")
.should("exist")
.find("div[class='connectExplorer']")
.should("exist")
.find("p[class='welcomeText']")
.should("exist");
cy.wrap($body.find("div > p.switchConnectTypeText"))
.should("exist")
.last()
.click({ force: true });
const secret = Cypress.env("CONNECTION_STRING");
cy.wrap($body)
.find("input[class='inputToken']")
.should("exist")
.type(secret, {
force: true
});
cy.wrap($body.find("input[value='Connect']"), { timeout })
.first()
.click({ force: true });
cy.wait(15000);
});
}
};

View File

@@ -0,0 +1,6 @@
const { CosmosClient } = require("@azure/cosmos");
module.exports = new CosmosClient({
endpoint: "https://0.0.0.0:8081",
key: "C2y6yDjf5/R+ob0N8A7Cgv30VRDJIWEHLM+4QDU5DE2nQ9nDuVTqobD4b8mGGyPMbIZnqyMsEcaGQy67XIw/Jw=="
});

View File

@@ -6,6 +6,6 @@ module.exports = {
slowMo: 55,
defaultViewport: null,
ignoreHTTPSErrors: true,
args: ["--disable-web-security"],
},
args: ["--disable-web-security"]
}
};

View File

@@ -1,5 +1,5 @@
module.exports = {
preset: "jest-puppeteer",
testMatch: ["<rootDir>/test/**/*.spec.[jt]s?(x)"],
setupFiles: ["dotenv/config"],
setupFiles: ["dotenv/config"]
};

View File

@@ -42,8 +42,8 @@ module.exports = {
branches: 20,
functions: 24,
lines: 30,
statements: 29.0,
},
statements: 29.0
}
},
// Make calling deprecated APIs throw helpful error messages
@@ -76,7 +76,7 @@ module.exports = {
"office-ui-fabric-react/lib/(.*)$": "office-ui-fabric-react/lib-commonjs/$1", // https://github.com/OfficeDev/office-ui-fabric-react/wiki/Fabric-6-Release-Notes
"^dnd-core$": "dnd-core/dist/cjs",
"^react-dnd$": "react-dnd/dist/cjs",
"^react-dnd-html5-backend$": "react-dnd-html5-backend/dist/cjs",
"^react-dnd-html5-backend$": "react-dnd-html5-backend/dist/cjs"
},
// An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader
@@ -164,11 +164,11 @@ module.exports = {
// A map from regular expressions to paths to transformers
transform: {
"^.+\\.html?$": "html-loader-jest",
"^.+\\.[t|j]sx?$": "babel-jest",
"^.+\\.[t|j]sx?$": "babel-jest"
},
// An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation
transformIgnorePatterns: ["/node_modules/", "/externals/"],
transformIgnorePatterns: ["/node_modules/", "/externals/"]
// An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them
// unmockedModulePathPatterns: undefined,

3648
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -4,10 +4,8 @@
"description": "Cosmos Explorer",
"main": "index.js",
"dependencies": {
"@azure/arm-cosmosdb": "9.1.0",
"@azure/cosmos": "3.9.0",
"@azure/identity": "1.1.0",
"@azure/cosmos-language-service": "0.0.5",
"@azure/cosmos-language-service": "0.0.4",
"@jupyterlab/services": "6.0.0-rc.2",
"@jupyterlab/terminal": "3.0.0-rc.2",
"@microsoft/applicationinsights-web": "2.5.9",
@@ -23,7 +21,7 @@
"@nteract/jupyter-widgets": "2.0.0",
"@nteract/logos": "1.0.0",
"@nteract/markdown": "4.4.0",
"@nteract/monaco-editor": "3.2.2",
"@nteract/monaco-editor": "3.2.0",
"@nteract/octicons": "2.0.0",
"@nteract/outputs": "3.0.9",
"@nteract/presentational-components": "3.0.7",
@@ -68,7 +66,7 @@
"jquery-ui-dist": "1.12.1",
"knockout": "3.5.1",
"mkdirp": "1.0.4",
"monaco-editor": "0.18.1",
"monaco-editor": "0.15.6",
"object.entries": "1.1.0",
"office-ui-fabric-react": "7.134.1",
"p-retry": "4.2.0",
@@ -117,7 +115,7 @@
"@types/prop-types": "15.5.8",
"@types/puppeteer": "3.0.1",
"@types/q": "1.5.1",
"@types/react": "16.9.56",
"@types/react": "16.9.49",
"@types/react-dom": "16.0.7",
"@types/react-notification-system": "0.2.39",
"@types/react-redux": "7.1.7",
@@ -160,7 +158,7 @@
"mini-css-extract-plugin": "0.4.3",
"monaco-editor-webpack-plugin": "1.7.0",
"node-fetch": "2.6.1",
"prettier": "2.2.1",
"prettier": "1.19.1",
"puppeteer": "4.0.0",
"raw-loader": "0.5.1",
"rimraf": "3.0.0",
@@ -170,7 +168,7 @@
"ts-loader": "6.2.2",
"tslint": "5.11.0",
"tslint-microsoft-contrib": "6.0.0",
"typescript": "4.1.2",
"typescript": "4.0.2",
"url-loader": "1.1.1",
"wait-on": "4.0.2",
"webpack": "4.43.0",
@@ -196,8 +194,8 @@
"compile": "tsc",
"compile:contracts": "tsc -p ./tsconfig.contracts.json",
"compile:strict": "tsc -p ./tsconfig.strict.json",
"format": "prettier --write \"{src,test}/**/*.{ts,tsx,html}\" \"*.{js,html}\"",
"format:check": "prettier --check \"{src,test}/**/*.{ts,tsx,html}\" \"*.{js,html}\"",
"format": "prettier --write \"{src,cypress,test}/**/*.{ts,tsx,html}\" \"*.{js,html}\"",
"format:check": "prettier --check \"{src,cypress,test}/**/*.{ts,tsx,html}\" \"*.{js,html}\"",
"lint": "tslint --project tsconfig.json && eslint \"**/*.{ts,tsx}\"",
"build:contracts": "npm run compile:contracts",
"strictEligibleFiles": "node ./strict-migration-tools/index.js",

View File

@@ -2,5 +2,5 @@ export enum AuthType {
AAD = "aad",
EncryptedToken = "encryptedtoken",
MasterKey = "masterkey",
ResourceToken = "resourcetoken",
ResourceToken = "resourcetoken"
}

View File

@@ -13,7 +13,7 @@ export class BindingHandlersRegisterer {
) {
const value = ko.unwrap(wrappedValueAccessor());
bindingContext?.$data.isTemplateReady(value);
},
}
} as ko.BindingHandler;
ReactBindingHandler.Registerer.register();

View File

@@ -42,7 +42,7 @@ export class Registerer {
// Initial rendering at mount point
ReactDOM.render(adapter.renderComponent(), element);
},
}
} as ko.BindingHandler;
}
}

View File

@@ -40,7 +40,7 @@ export class ArrayHashMap<T> {
public forEach(key: string, iteratorFct: (value: T) => void) {
const values = this.store.get(key);
if (values) {
values.forEach((value) => iteratorFct(value));
values.forEach(value => iteratorFct(value));
}
}

View File

@@ -14,7 +14,7 @@ export class CodeOfConductEndpoints {
export class EndpointsRegex {
public static readonly cassandra = [
"AccountEndpoint=(.*).cassandra.cosmosdb.azure.com",
"HostName=(.*).cassandra.cosmos.azure.com",
"HostName=(.*).cassandra.cosmos.azure.com"
];
public static readonly mongo = "mongodb://.*:(.*)@(.*).documents.azure.com";
public static readonly mongoCompute = "mongodb://.*:(.*)@(.*).mongo.cosmos.azure.com";
@@ -124,9 +124,7 @@ export class Features {
public static readonly enableFixedCollectionWithSharedThroughput = "enablefixedcollectionwithsharedthroughput";
public static readonly ttl90Days = "ttl90days";
public static readonly enableRightPanelV2 = "enablerightpanelv2";
public static readonly enableSchema = "enableschema";
public static readonly enableSDKoperations = "enablesdkoperations";
public static readonly showMinRUSurvey = "showminrusurvey";
}
// flight names returned from the portal are always lowercase
@@ -150,7 +148,7 @@ export class Spark {
"Cosmos.Spark.D8s": "D8s / 8 cores / 32GB RAM",
"Cosmos.Spark.D16s": "D16s / 16 cores / 64GB RAM",
"Cosmos.Spark.D32s": "D32s / 32 cores / 128GB RAM",
"Cosmos.Spark.D64s": "D64s / 64 cores / 256GB RAM",
"Cosmos.Spark.D64s": "D64s / 64 cores / 256GB RAM"
});
}
@@ -165,7 +163,7 @@ export class MongoDBAccounts {
export enum MongoBackendEndpointType {
local,
remote,
remote
}
// TODO: 435619 Add default endpoints per cloud and use regional only when available
@@ -179,7 +177,6 @@ export class CassandraBackend {
public static readonly schemaApi: string = "api/cassandra/schema";
public static readonly guestSchemaApi: string = "api/guest/cassandra/schema";
}
export class Queries {
public static CustomPageOption: string = "custom";
public static UnlimitedPageOption: string = "unlimited";
@@ -292,7 +289,7 @@ export class HttpStatusCodes {
HttpStatusCodes.InternalServerError, // TODO: Handle all 500s on Portal backend and remove from retries list
HttpStatusCodes.BadGateway,
HttpStatusCodes.ServiceUnavailable,
HttpStatusCodes.GatewayTimeout,
HttpStatusCodes.GatewayTimeout
];
}
@@ -348,7 +345,10 @@ export class HashRoutePrefixes {
public static docsWithIds(databaseId: string, collectionId: string, docId: string) {
const transformedDatabasePrefix: string = this.docs.replace("{db_id}", databaseId);
return transformedDatabasePrefix.replace("{coll_id}", collectionId).replace("{doc_id}", docId).replace("/", ""); // strip the first slash since hasher adds it
return transformedDatabasePrefix
.replace("{coll_id}", collectionId)
.replace("{doc_id}", docId)
.replace("/", ""); // strip the first slash since hasher adds it
}
}
@@ -394,7 +394,7 @@ export class OfferVersions {
export enum ConflictOperationType {
Replace = "replace",
Create = "create",
Delete = "delete",
Delete = "delete"
}
export const EmulatorMasterKey =

View File

@@ -10,17 +10,17 @@ describe("tokenProvider", () => {
resourceId: "",
resourceType: "dbs" as ResourceType,
headers: {},
getAuthorizationTokenUsingMasterKey: () => "",
getAuthorizationTokenUsingMasterKey: () => ""
};
beforeEach(() => {
updateConfigContext({
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
});
window.fetch = jest.fn().mockImplementation(() => {
return {
json: () => "{}",
headers: new Map(),
headers: new Map()
};
});
});
@@ -36,7 +36,7 @@ describe("tokenProvider", () => {
it("does not call the auth service if a master key is set", async () => {
updateUserContext({
masterKey: "foo",
masterKey: "foo"
});
await tokenProvider(options);
expect((window.fetch as any).mock.calls.length).toBe(0);
@@ -50,7 +50,7 @@ describe("getTokenFromAuthService", () => {
window.fetch = jest.fn().mockImplementation(() => {
return {
json: () => "{}",
headers: new Map(),
headers: new Map()
};
});
});
@@ -61,7 +61,7 @@ describe("getTokenFromAuthService", () => {
it("builds the correct URL in production", () => {
updateConfigContext({
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
});
getTokenFromAuthService("GET", "dbs", "foo");
expect(window.fetch).toHaveBeenCalledWith(
@@ -72,7 +72,7 @@ describe("getTokenFromAuthService", () => {
it("builds the correct URL in dev", () => {
updateConfigContext({
BACKEND_ENDPOINT: "https://localhost:1234",
BACKEND_ENDPOINT: "https://localhost:1234"
});
getTokenFromAuthService("GET", "dbs", "foo");
expect(window.fetch).toHaveBeenCalledWith(
@@ -96,15 +96,15 @@ describe("endpoint", () => {
documentEndpoint: "bar",
gremlinEndpoint: "foo",
tableEndpoint: "foo",
cassandraEndpoint: "foo",
},
},
cassandraEndpoint: "foo"
}
}
});
expect(endpoint()).toEqual("bar");
});
it("uses _endpoint if set", () => {
updateUserContext({
endpoint: "baz",
endpoint: "baz"
});
expect(endpoint()).toEqual("baz");
});
@@ -121,7 +121,7 @@ describe("requestPlugin", () => {
updateConfigContext({
platform: Platform.Hosted,
BACKEND_ENDPOINT: "https://localhost:1234",
PROXY_PATH: "/proxy",
PROXY_PATH: "/proxy"
});
const headers = {};
const endpoint = "https://docs.azure.com";

View File

@@ -1,7 +1,6 @@
import * as Cosmos from "@azure/cosmos";
import { RequestInfo, setAuthorizationTokenHeaderUsingMasterKey } from "@azure/cosmos";
import { configContext, Platform } from "../ConfigContext";
import { getErrorMessage } from "./ErrorHandlingUtils";
import { logConsoleError } from "../Utils/NotificationConsoleUtils";
import { EmulatorMasterKey, HttpHeaders } from "./Constants";
import { userContext } from "../UserContext";
@@ -58,19 +57,19 @@ export async function getTokenFromAuthService(verb: string, resourceType: string
method: "POST",
headers: {
"content-type": "application/json",
"x-ms-encrypted-auth-token": userContext.accessToken,
"x-ms-encrypted-auth-token": userContext.accessToken
},
body: JSON.stringify({
verb,
resourceType,
resourceId,
}),
resourceId
})
});
//TODO I am not sure why we have to parse the JSON again here. fetch should do it for us when we call .json()
const result = JSON.parse(await response.json());
return result;
} catch (error) {
logConsoleError(`Failed to get authorization headers for ${resourceType}: ${getErrorMessage(error)}`);
logConsoleError(`Failed to get authorization headers for ${resourceType}: ${error.message}`);
return Promise.reject(error);
}
}
@@ -81,9 +80,9 @@ export function client(): Cosmos.CosmosClient {
key: userContext.masterKey,
tokenProvider,
connectionPolicy: {
enableEndpointDiscovery: false,
enableEndpointDiscovery: false
},
userAgentSuffix: "Azure Portal",
userAgentSuffix: "Azure Portal"
};
if (configContext.PROXY_PATH !== undefined) {

View File

@@ -1,13 +1,26 @@
import { ConflictDefinition, FeedOptions, ItemDefinition, QueryIterator, Resource } from "@azure/cosmos";
import {
ConflictDefinition,
FeedOptions,
ItemDefinition,
OfferDefinition,
QueryIterator,
Resource
} from "@azure/cosmos";
import { RequestOptions } from "@azure/cosmos/dist-esm";
import Q from "q";
import { configContext, Platform } from "../ConfigContext";
import * as DataModels from "../Contracts/DataModels";
import { MessageTypes } from "../Contracts/ExplorerContracts";
import * as ViewModels from "../Contracts/ViewModels";
import ConflictId from "../Explorer/Tree/ConflictId";
import DocumentId from "../Explorer/Tree/DocumentId";
import StoredProcedure from "../Explorer/Tree/StoredProcedure";
import { LocalStorageUtility, StorageKey } from "../Shared/StorageUtility";
import { OfferUtils } from "../Utils/OfferUtils";
import * as Constants from "./Constants";
import { client } from "./CosmosClient";
import * as HeadersUtility from "./HeadersUtility";
import { sendCachedDataMessage } from "./MessageHandler";
export function getCommonQueryOptions(options: FeedOptions): any {
const storedItemPerPageSetting: number = LocalStorageUtility.getEntryNumber(StorageKey.ActualItemPerPage);
@@ -33,7 +46,10 @@ export function queryDocuments(
options: any
): Q.Promise<QueryIterator<ItemDefinition & Resource>> {
options = getCommonQueryOptions(options);
const documentsIterator = client().database(databaseId).container(containerId).items.query(query, options);
const documentsIterator = client()
.database(databaseId)
.container(containerId)
.items.query(query, options);
return Q(documentsIterator);
}
@@ -69,7 +85,7 @@ export function updateDocument(
.container(collection.id())
.item(documentId.id(), partitionKey)
.replace(newDocument)
.then((response) => response.resource)
.then(response => response.resource)
);
}
@@ -87,13 +103,13 @@ export function executeStoredProcedure(
.container(collection.id())
.scripts.storedProcedure(storedProcedure.id())
.execute(partitionKeyValue, params, { enableScriptLogging: true })
.then((response) =>
.then(response =>
deferred.resolve({
result: response.resource,
scriptLogs: response.headers[Constants.HttpHeaders.scriptLogResults],
scriptLogs: response.headers[Constants.HttpHeaders.scriptLogResults]
})
)
.catch((error) => deferred.reject(error));
.catch(error => deferred.reject(error));
return deferred.promise.timeout(
Constants.ClientDefaults.requestTimeoutMs,
@@ -107,7 +123,7 @@ export function createDocument(collection: ViewModels.CollectionBase, newDocumen
.database(collection.databaseId)
.container(collection.id())
.items.create(newDocument)
.then((response) => response.resource)
.then(response => response.resource)
);
}
@@ -120,7 +136,7 @@ export function readDocument(collection: ViewModels.CollectionBase, documentId:
.container(collection.id())
.item(documentId.id(), partitionKey)
.read()
.then((response) => response.resource)
.then(response => response.resource)
);
}
@@ -128,7 +144,11 @@ export function deleteDocument(collection: ViewModels.CollectionBase, documentId
const partitionKey = documentId.partitionKeyValue;
return Q(
client().database(collection.databaseId).container(collection.id()).item(documentId.id(), partitionKey).delete()
client()
.database(collection.databaseId)
.container(collection.id())
.item(documentId.id(), partitionKey)
.delete()
);
}
@@ -140,7 +160,11 @@ export function deleteConflict(
options.partitionKey = options.partitionKey || getPartitionKeyHeaderForConflict(conflictId);
return Q(
client().database(collection.databaseId).container(collection.id()).conflict(conflictId.id()).delete(options)
client()
.database(collection.databaseId)
.container(collection.id())
.conflict(conflictId.id())
.delete(options)
);
}
@@ -150,6 +174,9 @@ export function queryConflicts(
query: string,
options: any
): Q.Promise<QueryIterator<ConflictDefinition & Resource>> {
const documentsIterator = client().database(databaseId).container(containerId).conflicts.query(query, options);
const documentsIterator = client()
.database(databaseId)
.container(containerId)
.conflicts.query(query, options);
return Q(documentsIterator);
}

View File

@@ -58,8 +58,8 @@ export function executeStoredProcedure(
(error: any) => {
handleError(
error,
"ExecuteStoredProcedure",
`Failed to execute stored procedure ${storedProcedure.id()} for container ${storedProcedure.collection.id()}`
`Failed to execute stored procedure ${storedProcedure.id()} for container ${storedProcedure.collection.id()}`,
"ExecuteStoredProcedure"
);
deferred.reject(error);
}
@@ -88,7 +88,7 @@ export function queryDocumentsPage(
deferred.resolve(result);
},
(error: any) => {
handleError(error, "QueryDocumentsPage", `Failed to query ${entityName} for container ${resourceName}`);
handleError(error, `Failed to query ${entityName} for container ${resourceName}`, "QueryDocumentsPage");
deferred.reject(error);
}
)
@@ -109,7 +109,7 @@ export function readDocument(collection: ViewModels.CollectionBase, documentId:
deferred.resolve(document);
},
(error: any) => {
handleError(error, "ReadDocument", `Failed to read ${entityName} ${documentId.id()}`);
handleError(error, `Failed to read ${entityName} ${documentId.id()}`, "ReadDocument");
deferred.reject(error);
}
)
@@ -135,7 +135,7 @@ export function updateDocument(
deferred.resolve(updatedDocument);
},
(error: any) => {
handleError(error, "UpdateDocument", `Failed to update ${entityName} ${documentId.id()}`);
handleError(error, `Failed to update ${entityName} ${documentId.id()}`, "UpdateDocument");
deferred.reject(error);
}
)
@@ -157,7 +157,7 @@ export function createDocument(collection: ViewModels.CollectionBase, newDocumen
deferred.resolve(savedDocument);
},
(error: any) => {
handleError(error, "CreateDocument", `Error while creating new ${entityName} for container ${collection.id()}`);
handleError(error, `Error while creating new ${entityName} for container ${collection.id()}`, "CreateDocument");
deferred.reject(error);
}
)
@@ -179,7 +179,7 @@ export function deleteDocument(collection: ViewModels.CollectionBase, documentId
deferred.resolve(response);
},
(error: any) => {
handleError(error, "DeleteDocument", `Error while deleting ${entityName} ${documentId.id()}`);
handleError(error, `Error while deleting ${entityName} ${documentId.id()}`, "DeleteDocument");
deferred.reject(error);
}
)
@@ -205,7 +205,7 @@ export function deleteConflict(
deferred.resolve(response);
},
(error: any) => {
handleError(error, "DeleteConflict", `Error while deleting conflict ${conflictId.id()}`);
handleError(error, `Error while deleting conflict ${conflictId.id()}`, "DeleteConflict");
deferred.reject(error);
}
)

View File

@@ -73,7 +73,7 @@ export default class EditableUtility {
return false;
});
observable.subscribe((edit) => {
observable.subscribe(edit => {
var edits = observable.edits && observable.edits();
if (!edits) {
return;
@@ -83,9 +83,9 @@ export default class EditableUtility {
});
observable.editableIsValid = ko.observable<boolean>(true);
observable.subscribe((value) => {
observable.subscribe(value => {
const validations: ((value: T) => boolean)[] = (observable.validations && observable.validations()) || [];
const isValid = validations.every((validate) => validate(value));
const isValid = validations.every(validate => validate(value));
observable.editableIsValid(isValid);
});

View File

@@ -1,56 +1,11 @@
import { ARMError } from "../Utils/arm/request";
import { HttpStatusCodes } from "./Constants";
import { MessageTypes } from "../Contracts/ExplorerContracts";
import { SubscriptionType } from "../Contracts/SubscriptionType";
import { CosmosError, sendNotificationForError } from "./dataAccess/sendNotificationForError";
import { logConsoleError } from "../Utils/NotificationConsoleUtils";
import { logError } from "./Logger";
import { sendMessage } from "./MessageHandler";
import { replaceKnownError } from "./ErrorParserUtility";
export const handleError = (error: string | ARMError | Error, area: string, consoleErrorPrefix?: string): void => {
const errorMessage = getErrorMessage(error);
const errorCode = error instanceof ARMError ? error.code : undefined;
// logs error to data explorer console
const consoleErrorMessage = consoleErrorPrefix ? `${consoleErrorPrefix}:\n ${errorMessage}` : errorMessage;
logConsoleError(consoleErrorMessage);
// logs error to both app insight and kusto
logError(errorMessage, area, errorCode);
// checks for errors caused by firewall and sends them to portal to handle
sendNotificationForError(errorMessage, errorCode);
};
export const getErrorMessage = (error: string | Error): string => {
const errorMessage = typeof error === "string" ? error : error.message;
return replaceKnownError(errorMessage);
};
export const getErrorStack = (error: string | Error): string => {
return typeof error === "string" ? undefined : error.stack;
};
const sendNotificationForError = (errorMessage: string, errorCode: number | string): void => {
if (errorCode === HttpStatusCodes.Forbidden) {
if (errorMessage?.toLowerCase().indexOf("sharedoffer is disabled for your account") > 0) {
return;
}
sendMessage({
type: MessageTypes.ForbiddenError,
reason: errorMessage,
});
}
};
const replaceKnownError = (errorMessage: string): string => {
if (
window.dataExplorer?.subscriptionType() === SubscriptionType.Internal &&
errorMessage.indexOf("SharedOffer is Disabled for your account") >= 0
) {
return "Database throughput is not supported for internal subscriptions.";
} else if (errorMessage.indexOf("Partition key paths must contain only valid") >= 0) {
return "Partition key paths must contain only valid characters and not contain a trailing slash or wildcard character.";
}
return errorMessage;
export const handleError = (error: CosmosError, consoleErrorPrefix: string, area: string): void => {
const sanitizedErrorMsg = replaceKnownError(error.message);
logConsoleError(`${consoleErrorPrefix}:\n ${sanitizedErrorMsg}`);
logError(sanitizedErrorMsg, area, error.code);
sendNotificationForError(error);
};

View File

@@ -0,0 +1,24 @@
import * as ErrorParserUtility from "./ErrorParserUtility";
describe("Error Parser Utility", () => {
describe("shouldEnableCrossPartitionKeyForResourceWithPartitionKey()", () => {
it("should parse a backend error correctly", () => {
// A fake error matching what is thrown by the SDK on a bad collection create request
const innerMessage =
"The partition key component definition path '/asdwqr31 @#$#$WRadf' could not be accepted, failed near position '10'. Partition key paths must contain only valid characters and not contain a trailing slash or wildcard character.";
const message = `Message: {\"Errors\":[\"${innerMessage}\"]}\r\nActivityId: 97b2e684-7505-4921-85f6-2513b9b28220, Request URI: /apps/89fdcf25-2a0b-4d2a-aab6-e161e565b26f/services/54911149-7bb1-4e7d-a1fa-22c8b36a4bb9/partitions/cc2a7a04-5f5a-4709-bcf7-8509b264963f/replicas/132304018743619218p, RequestStats: , SDK: Microsoft.Azure.Documents.Common/2.10.0`;
const err = new Error(message) as any;
err.code = 400;
err.body = {
code: "BadRequest",
message
};
err.headers = {};
err.activityId = "97b2e684-7505-4921-85f6-2513b9b28220";
const parsedError = ErrorParserUtility.parse(err);
expect(parsedError.length).toBe(1);
expect(parsedError[0].message).toBe(innerMessage);
});
});
});

View File

@@ -0,0 +1,69 @@
import * as DataModels from "../Contracts/DataModels";
import * as ViewModels from "../Contracts/ViewModels";
export function replaceKnownError(err: string): string {
if (
window.dataExplorer.subscriptionType() === ViewModels.SubscriptionType.Internal &&
err.indexOf("SharedOffer is Disabled for your account") >= 0
) {
return "Database throughput is not supported for internal subscriptions.";
} else if (err.indexOf("Partition key paths must contain only valid") >= 0) {
return "Partition key paths must contain only valid characters and not contain a trailing slash or wildcard character.";
}
return err;
}
export function parse(err: any): DataModels.ErrorDataModel[] {
try {
return _parse(err);
} catch (e) {
return [<DataModels.ErrorDataModel>{ message: JSON.stringify(err) }];
}
}
function _parse(err: any): DataModels.ErrorDataModel[] {
var normalizedErrors: DataModels.ErrorDataModel[] = [];
if (err.message && !err.code) {
normalizedErrors.push(err);
} else {
const innerErrors: any[] = _getInnerErrors(err.message);
normalizedErrors = innerErrors.map(innerError =>
typeof innerError === "string" ? { message: innerError } : innerError
);
}
return normalizedErrors;
}
function _getInnerErrors(message: string): any[] {
/*
The backend error message has an inner-message which is a stringified object.
For SQL errors, the "errors" property is an array of SqlErrorDataModel.
Example:
"Message: {"Errors":["Resource with specified id or name already exists"]}\r\nActivityId: 80005000008d40b6a, Request URI: /apps/19000c000c0a0005/services/mctestdocdbprod-MasterService-0-00066ab9937/partitions/900005f9000e676fb8/replicas/13000000000955p"
For non-SQL errors the "Errors" propery is an array of string.
Example:
"Message: {"errors":[{"severity":"Error","location":{"start":7,"end":8},"code":"SC1001","message":"Syntax error, incorrect syntax near '.'."}]}\r\nActivityId: d3300016d4084e310a, Request URI: /apps/12401f9e1df77/services/dc100232b1f44545/partitions/f86f3bc0001a2f78/replicas/13085003638s"
*/
let innerMessage: any = null;
const singleLineMessage = message.replace(/[\r\n]|\r|\n/g, "");
try {
// Multi-Partition error flavor
const regExp = /^(.*)ActivityId: (.*)/g;
const regString = regExp.exec(singleLineMessage);
const innerMessageString = regString[1];
innerMessage = JSON.parse(innerMessageString);
} catch (e) {
// Single-partition error flavor
const regExp = /^Message: (.*)ActivityId: (.*), Request URI: (.*)/g;
const regString = regExp.exec(singleLineMessage);
const innerMessageString = regString[1];
innerMessage = JSON.parse(innerMessageString);
}
return innerMessage.errors ? innerMessage.errors : innerMessage.Errors;
}

View File

@@ -11,8 +11,8 @@ describe("nextPage", () => {
queryMetrics: {},
requestCharge: 1,
headers: {},
activityId: "foo",
}),
activityId: "foo"
})
};
expect(await nextPage(fakeIterator, 10)).toMatchSnapshot();

View File

@@ -14,7 +14,7 @@ export interface MinimalQueryIterator {
// Pick<QueryIterator<any>, "fetchNext">;
export function nextPage(documentsIterator: MinimalQueryIterator, firstItemIndex: number): Promise<QueryResults> {
return documentsIterator.fetchNext().then((response) => {
return documentsIterator.fetchNext().then(response => {
const documents = response.resources;
const headers = (response as any).headers || {}; // TODO this is a private key. Remove any
const itemCount = (documents && documents.length) || 0;
@@ -26,7 +26,7 @@ export function nextPage(documentsIterator: MinimalQueryIterator, firstItemIndex
lastItemIndex: Number(firstItemIndex) + Number(itemCount),
headers,
activityId: response.activityId,
requestCharge: response.requestCharge,
requestCharge: response.requestCharge
};
});
}

View File

@@ -21,15 +21,21 @@ export function logWarning(message: string, area: string, code?: number): void {
return _logEntry(entry);
}
export function logError(errorMessage: string, area: string, code?: number | string): void {
const entry: Diagnostics.LogEntry = _generateLogEntry(Diagnostics.LogEntryLevel.Error, errorMessage, area, code);
export function logError(message: string | Error, area: string, code?: number): void {
let logMessage: string;
if (typeof message === "string") {
logMessage = message;
} else {
logMessage = JSON.stringify(message, Object.getOwnPropertyNames(message));
}
const entry: Diagnostics.LogEntry = _generateLogEntry(Diagnostics.LogEntryLevel.Error, logMessage, area, code);
return _logEntry(entry);
}
function _logEntry(entry: Diagnostics.LogEntry): void {
sendMessage({
type: MessageTypes.LogInfo,
data: JSON.stringify(entry),
data: JSON.stringify(entry)
});
const severityLevel = ((level: Diagnostics.LogEntryLevel): SeverityLevel => {
@@ -53,13 +59,13 @@ function _generateLogEntry(
level: Diagnostics.LogEntryLevel,
message: string,
area: string,
code?: number | string
code?: number
): Diagnostics.LogEntry {
return {
timestamp: new Date().getUTCSeconds(),
level,
message,
area,
code,
code
};
}

View File

@@ -6,7 +6,7 @@ describe("Message Handler", () => {
let mockPromise = {
id: "123",
startTime: new Date(),
deferred: Q.defer<any>(),
deferred: Q.defer<any>()
};
let mockMessage = { message: { id: "123", data: "{}" } };
MessageHandler.RequestMap[mockPromise.id] = mockPromise;
@@ -18,7 +18,7 @@ describe("Message Handler", () => {
let message = {
id: "123",
startTime: new Date(),
deferred: Q.defer<any>(),
deferred: Q.defer<any>()
};
MessageHandler.handleCachedDataMessage(message);

View File

@@ -35,7 +35,7 @@ export function sendCachedDataMessage<TResponseDataModel>(
let cachedDataPromise: CachedDataPromise<TResponseDataModel> = {
deferred: Q.defer<TResponseDataModel>(),
startTime: new Date(),
id: _.uniqueId(),
id: _.uniqueId()
};
RequestMap[cachedDataPromise.id] = cachedDataPromise;
sendMessage({ type: messageType, params: params, id: cachedDataPromise.id });
@@ -54,7 +54,7 @@ export function sendMessage(data: any): void {
portalChildWindow.parent.postMessage(
{
signature: "pcIframe",
data: data,
data: data
},
portalChildWindow.document.referrer
);

View File

@@ -14,7 +14,7 @@ const fetchMock = () => {
ok: true,
text: () => "{}",
json: () => "{}",
headers: new Map(),
headers: new Map()
});
};
@@ -27,8 +27,8 @@ const collection = {
partitionKey: {
paths: ["/pk"],
kind: "Hash",
version: 1,
},
version: 1
}
} as Collection;
const documentId = ({
@@ -38,8 +38,8 @@ const documentId = ({
partitionKey: {
paths: ["/pk"],
kind: "Hash",
version: 1,
},
version: 1
}
} as unknown) as DocumentId;
const databaseAccount = {
@@ -52,8 +52,8 @@ const databaseAccount = {
documentEndpoint: "bar",
gremlinEndpoint: "foo",
tableEndpoint: "foo",
cassandraEndpoint: "foo",
},
cassandraEndpoint: "foo"
}
} as DatabaseAccount;
describe("MongoProxyClient", () => {
@@ -61,10 +61,10 @@ describe("MongoProxyClient", () => {
beforeEach(() => {
resetConfigContext();
updateUserContext({
databaseAccount,
databaseAccount
});
updateConfigContext({
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
});
window.fetch = jest.fn().mockImplementation(fetchMock);
});
@@ -93,10 +93,10 @@ describe("MongoProxyClient", () => {
beforeEach(() => {
resetConfigContext();
updateUserContext({
databaseAccount,
databaseAccount
});
updateConfigContext({
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
});
window.fetch = jest.fn().mockImplementation(fetchMock);
});
@@ -125,10 +125,10 @@ describe("MongoProxyClient", () => {
beforeEach(() => {
resetConfigContext();
updateUserContext({
databaseAccount,
databaseAccount
});
updateConfigContext({
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
});
window.fetch = jest.fn().mockImplementation(fetchMock);
});
@@ -157,10 +157,10 @@ describe("MongoProxyClient", () => {
beforeEach(() => {
resetConfigContext();
updateUserContext({
databaseAccount,
databaseAccount
});
updateConfigContext({
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
});
window.fetch = jest.fn().mockImplementation(fetchMock);
});
@@ -189,10 +189,10 @@ describe("MongoProxyClient", () => {
beforeEach(() => {
resetConfigContext();
updateUserContext({
databaseAccount,
databaseAccount
});
updateConfigContext({
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
});
window.fetch = jest.fn().mockImplementation(fetchMock);
});
@@ -222,10 +222,10 @@ describe("MongoProxyClient", () => {
resetConfigContext();
delete window.authType;
updateUserContext({
databaseAccount,
databaseAccount
});
updateConfigContext({
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
});
});

View File

@@ -16,7 +16,7 @@ import { sendMessage } from "./MessageHandler";
const defaultHeaders = {
[HttpHeaders.apiType]: ApiType.MongoDB.toString(),
[CosmosSDKConstants.HttpHeaders.MaxEntityCount]: "100",
[CosmosSDKConstants.HttpHeaders.Version]: "2017-11-15",
[CosmosSDKConstants.HttpHeaders.Version]: "2017-11-15"
};
function authHeaders() {
@@ -31,7 +31,7 @@ export function queryIterator(databaseId: string, collection: Collection, query:
let continuationToken: string;
return {
fetchNext: () => {
return queryDocuments(databaseId, collection, false, query).then((response) => {
return queryDocuments(databaseId, collection, false, query).then(response => {
continuationToken = response.continuationToken;
const headers: { [key: string]: string | number } = {};
response.headers.forEach((value, key) => {
@@ -42,10 +42,10 @@ export function queryIterator(databaseId: string, collection: Collection, query:
headers,
requestCharge: Number(headers[CosmosSDKConstants.HttpHeaders.RequestCharge]),
activityId: String(headers[CosmosSDKConstants.HttpHeaders.ActivityId]),
hasMoreResults: !!continuationToken,
hasMoreResults: !!continuationToken
};
});
},
}
};
}
@@ -74,9 +74,7 @@ export function queryDocuments(
rg: userContext.resourceGroup,
dba: databaseAccount.name,
pk:
collection && collection.partitionKey && !collection.partitionKey.systemKey
? collection.partitionKeyProperty
: "",
collection && collection.partitionKey && !collection.partitionKey.systemKey ? collection.partitionKeyProperty : ""
};
const endpoint = getEndpoint() || "";
@@ -89,7 +87,7 @@ export function queryDocuments(
[CosmosSDKConstants.HttpHeaders.EnableScanInQuery]: "true",
[CosmosSDKConstants.HttpHeaders.EnableCrossPartitionQuery]: "true",
[CosmosSDKConstants.HttpHeaders.ParallelizeCrossPartitionQuery]: "true",
[HttpHeaders.contentType]: "application/query+json",
[HttpHeaders.contentType]: "application/query+json"
};
if (continuationToken) {
@@ -102,14 +100,14 @@ export function queryDocuments(
.fetch(`${endpoint}${path}?${queryString.stringify(params)}`, {
method: "POST",
body: JSON.stringify({ query }),
headers,
headers
})
.then(async (response) => {
.then(async response => {
if (response.ok) {
return {
continuationToken: response.headers.get(CosmosSDKConstants.HttpHeaders.Continuation),
documents: (await response.json()).Documents as DataModels.DocumentId[],
headers: response.headers,
headers: response.headers
};
}
errorHandling(response, "querying documents", params);
@@ -137,9 +135,7 @@ export function readDocument(
rg: userContext.resourceGroup,
dba: databaseAccount.name,
pk:
documentId && documentId.partitionKey && !documentId.partitionKey.systemKey
? documentId.partitionKeyProperty
: "",
documentId && documentId.partitionKey && !documentId.partitionKey.systemKey ? documentId.partitionKeyProperty : ""
};
const endpoint = getEndpoint();
@@ -151,10 +147,10 @@ export function readDocument(
...authHeaders(),
[CosmosSDKConstants.HttpHeaders.PartitionKey]: encodeURIComponent(
JSON.stringify(documentId.partitionKeyHeader())
),
},
)
}
})
.then((response) => {
.then(response => {
if (response.ok) {
return response.json();
}
@@ -179,7 +175,7 @@ export function createDocument(
sid: userContext.subscriptionId,
rg: userContext.resourceGroup,
dba: databaseAccount.name,
pk: collection && collection.partitionKey && !collection.partitionKey.systemKey ? partitionKeyProperty : "",
pk: collection && collection.partitionKey && !collection.partitionKey.systemKey ? partitionKeyProperty : ""
};
const endpoint = getEndpoint();
@@ -190,10 +186,10 @@ export function createDocument(
body: JSON.stringify(documentContent),
headers: {
...defaultHeaders,
...authHeaders(),
},
...authHeaders()
}
})
.then((response) => {
.then(response => {
if (response.ok) {
return response.json();
}
@@ -222,9 +218,7 @@ export function updateDocument(
rg: userContext.resourceGroup,
dba: databaseAccount.name,
pk:
documentId && documentId.partitionKey && !documentId.partitionKey.systemKey
? documentId.partitionKeyProperty
: "",
documentId && documentId.partitionKey && !documentId.partitionKey.systemKey ? documentId.partitionKeyProperty : ""
};
const endpoint = getEndpoint();
@@ -236,10 +230,10 @@ export function updateDocument(
...defaultHeaders,
...authHeaders(),
[HttpHeaders.contentType]: "application/json",
[CosmosSDKConstants.HttpHeaders.PartitionKey]: JSON.stringify(documentId.partitionKeyHeader()),
},
[CosmosSDKConstants.HttpHeaders.PartitionKey]: JSON.stringify(documentId.partitionKeyHeader())
}
})
.then((response) => {
.then(response => {
if (response.ok) {
return response.json();
}
@@ -263,9 +257,7 @@ export function deleteDocument(databaseId: string, collection: Collection, docum
rg: userContext.resourceGroup,
dba: databaseAccount.name,
pk:
documentId && documentId.partitionKey && !documentId.partitionKey.systemKey
? documentId.partitionKeyProperty
: "",
documentId && documentId.partitionKey && !documentId.partitionKey.systemKey ? documentId.partitionKeyProperty : ""
};
const endpoint = getEndpoint();
@@ -276,10 +268,10 @@ export function deleteDocument(databaseId: string, collection: Collection, docum
...defaultHeaders,
...authHeaders(),
[HttpHeaders.contentType]: "application/json",
[CosmosSDKConstants.HttpHeaders.PartitionKey]: JSON.stringify(documentId.partitionKeyHeader()),
},
[CosmosSDKConstants.HttpHeaders.PartitionKey]: JSON.stringify(documentId.partitionKeyHeader())
}
})
.then((response) => {
.then(response => {
if (response.ok) {
return undefined;
}
@@ -307,7 +299,7 @@ export function createMongoCollectionWithProxy(
rg: userContext.resourceGroup,
dba: databaseAccount.name,
isAutoPilot: !!params.autoPilotMaxThroughput,
autoPilotThroughput: params.autoPilotMaxThroughput?.toString(),
autoPilotThroughput: params.autoPilotMaxThroughput?.toString()
};
const endpoint = getEndpoint();
@@ -322,11 +314,11 @@ export function createMongoCollectionWithProxy(
headers: {
...defaultHeaders,
...authHeaders(),
[HttpHeaders.contentType]: "application/json",
},
[HttpHeaders.contentType]: "application/json"
}
}
)
.then((response) => {
.then(response => {
if (response.ok) {
return response.json();
}

View File

@@ -14,7 +14,7 @@
*/
export default class MongoUtility {
public static tojson = function (x: any, indent: string, nolint: boolean) {
public static tojson = function(x: any, indent: string, nolint: boolean) {
if (x === null || x === undefined) {
return String(x);
}
@@ -71,7 +71,7 @@ export default class MongoUtility {
}
};
private static tojsonObject = function (x: any, indent: string, nolint: boolean) {
private static tojsonObject = function(x: any, indent: string, nolint: boolean) {
var lineEnding = nolint ? " " : "\n";
var tabSpace = nolint ? "" : "\t";
indent = indent || "";
@@ -114,7 +114,7 @@ export default class MongoUtility {
}
}
// Add proper line endings, indents, and commas to each line
s += $.map(pairs, function (pair) {
s += $.map(pairs, function(pair) {
return lineEnding + indent + pair;
}).join(",");
s += lineEnding;
@@ -124,7 +124,7 @@ export default class MongoUtility {
return s + indent + "}";
};
private static tojsonArray = function (a: any, indent: string, nolint: boolean) {
private static tojsonArray = function(a: any, indent: string, nolint: boolean) {
if (a.length === 0) {
return "[ ]";
}
@@ -151,7 +151,7 @@ export default class MongoUtility {
return s;
};
private static hasDefinedProperty = function (obj: any, prop: string): boolean {
private static hasDefinedProperty = function(obj: any, prop: string): boolean {
if (Object.getPrototypeOf === undefined || Object.getPrototypeOf(obj) === null) {
return false;
} else if (obj.hasOwnProperty(prop)) {

View File

@@ -1,62 +0,0 @@
import * as OfferUtility from "./OfferUtility";
import { SDKOfferDefinition, Offer } from "../Contracts/DataModels";
import { OfferResponse } from "@azure/cosmos";
describe("parseSDKOfferResponse", () => {
it("manual throughput", () => {
const mockOfferDefinition = {
content: {
offerThroughput: 500,
collectionThroughputInfo: {
minimumRUForCollection: 400,
numPhysicalPartitions: 1,
},
},
id: "test",
} as SDKOfferDefinition;
const mockResponse = {
resource: mockOfferDefinition,
} as OfferResponse;
const expectedResult: Offer = {
manualThroughput: 500,
autoscaleMaxThroughput: undefined,
minimumThroughput: 400,
id: "test",
offerDefinition: mockOfferDefinition,
};
expect(OfferUtility.parseSDKOfferResponse(mockResponse)).toEqual(expectedResult);
});
it("autoscale throughput", () => {
const mockOfferDefinition = {
content: {
offerThroughput: 400,
collectionThroughputInfo: {
minimumRUForCollection: 400,
numPhysicalPartitions: 1,
},
offerAutopilotSettings: {
maxThroughput: 5000,
},
},
id: "test",
} as SDKOfferDefinition;
const mockResponse = {
resource: mockOfferDefinition,
} as OfferResponse;
const expectedResult: Offer = {
manualThroughput: undefined,
autoscaleMaxThroughput: 5000,
minimumThroughput: 400,
id: "test",
offerDefinition: mockOfferDefinition,
};
expect(OfferUtility.parseSDKOfferResponse(mockResponse)).toEqual(expectedResult);
});
});

View File

@@ -1,33 +0,0 @@
import { Offer, SDKOfferDefinition } from "../Contracts/DataModels";
import { OfferResponse } from "@azure/cosmos";
export const parseSDKOfferResponse = (offerResponse: OfferResponse): Offer => {
const offerDefinition: SDKOfferDefinition = offerResponse?.resource;
const offerContent = offerDefinition.content;
if (!offerContent) {
return undefined;
}
const minimumThroughput = offerContent.collectionThroughputInfo?.minimumRUForCollection;
const autopilotSettings = offerContent.offerAutopilotSettings;
if (autopilotSettings) {
return {
id: offerDefinition.id,
autoscaleMaxThroughput: autopilotSettings.maxThroughput,
manualThroughput: undefined,
minimumThroughput,
offerDefinition,
headers: offerResponse.headers,
};
}
return {
id: offerDefinition.id,
autoscaleMaxThroughput: undefined,
manualThroughput: offerContent.offerThroughput,
minimumThroughput,
offerDefinition,
headers: offerResponse.headers,
};
};

View File

@@ -16,7 +16,7 @@ const notificationsPath = () => {
};
export const fetchPortalNotifications = async (): Promise<DataModels.Notification[]> => {
if (configContext.platform === Platform.Emulator || configContext.platform === Platform.Hosted) {
if (configContext.platform === Platform.Emulator) {
return [];
}
@@ -30,7 +30,7 @@ export const fetchPortalNotifications = async (): Promise<DataModels.Notificatio
const headers = { [authorizationHeader.header]: authorizationHeader.token };
const response = await window.fetch(url, {
headers,
headers
});
if (!response.ok) {

View File

@@ -12,13 +12,14 @@ import { BackendDefaults, HttpStatusCodes, SavedQueries } from "./Constants";
import { userContext } from "../UserContext";
import { createDocument, deleteDocument, queryDocuments, queryDocumentsPage } from "./DocumentClientUtilityBase";
import { createCollection } from "./dataAccess/createCollection";
import { handleError } from "./ErrorHandlingUtils";
import * as ErrorParserUtility from "./ErrorParserUtility";
import * as Logger from "./Logger";
export class QueriesClient {
private static readonly PartitionKey: DataModels.PartitionKey = {
paths: [`/${SavedQueries.PartitionKeyProperty}`],
kind: BackendDefaults.partitionKeyKind,
version: BackendDefaults.partitionKeyVersion,
version: BackendDefaults.partitionKeyVersion
};
private static readonly FetchQuery: string = "SELECT * FROM c";
private static readonly FetchMongoQuery: string = "{}";
@@ -41,7 +42,7 @@ export class QueriesClient {
databaseId: SavedQueries.DatabaseName,
partitionKey: QueriesClient.PartitionKey,
offerThroughput: SavedQueries.OfferThroughput,
databaseLevelThroughput: false,
databaseLevelThroughput: false
})
.then(
(collection: DataModels.Collection) => {
@@ -52,8 +53,13 @@ export class QueriesClient {
return Promise.resolve(collection);
},
(error: any) => {
handleError(error, "setupQueriesCollection", "Failed to set up account for saving queries");
return Promise.reject(error);
const stringifiedError: string = error.message;
NotificationConsoleUtils.logConsoleMessage(
ConsoleDataType.Error,
`Failed to set up account for saving queries: ${stringifiedError}`
);
Logger.logError(stringifiedError, "setupQueriesCollection");
return Promise.reject(stringifiedError);
}
)
.finally(() => NotificationConsoleUtils.clearInProgressMessageWithId(id));
@@ -96,11 +102,19 @@ export class QueriesClient {
return Promise.resolve();
},
(error: any) => {
if (error.code === HttpStatusCodes.Conflict.toString()) {
error = `Query ${query.queryName} already exists`;
let errorMessage: string;
const parsedError: DataModels.ErrorDataModel = ErrorParserUtility.parse(error)[0];
if (parsedError.code === HttpStatusCodes.Conflict.toString()) {
errorMessage = `Query ${query.queryName} already exists`;
} else {
errorMessage = parsedError.message;
}
handleError(error, "saveQuery", `Failed to save query ${query.queryName}`);
return Promise.reject(error);
NotificationConsoleUtils.logConsoleMessage(
ConsoleDataType.Error,
`Failed to save query ${query.queryName}: ${errorMessage}`
);
Logger.logError(JSON.stringify(parsedError), "saveQuery");
return Promise.reject(errorMessage);
}
)
.finally(() => NotificationConsoleUtils.clearInProgressMessageWithId(id));
@@ -135,7 +149,7 @@ export class QueriesClient {
resourceId: resourceId,
queryName: queryName,
query: query,
id: id,
id: id
};
try {
this.validateQuery(parsedQuery);
@@ -149,15 +163,25 @@ export class QueriesClient {
return Promise.resolve(queries);
},
(error: any) => {
handleError(error, "getSavedQueries", "Failed to fetch saved queries");
return Promise.reject(error);
const stringifiedError: string = error.message;
NotificationConsoleUtils.logConsoleMessage(
ConsoleDataType.Error,
`Failed to fetch saved queries: ${stringifiedError}`
);
Logger.logError(stringifiedError, "getSavedQueries");
return Promise.reject(stringifiedError);
}
);
},
(error: any) => {
// should never get into this state but we handle this regardless
handleError(error, "getSavedQueries", "Failed to fetch saved queries");
return Promise.reject(error);
const stringifiedError: string = error.message;
NotificationConsoleUtils.logConsoleMessage(
ConsoleDataType.Error,
`Failed to fetch saved queries: ${stringifiedError}`
);
Logger.logError(stringifiedError, "getSavedQueries");
return Promise.reject(stringifiedError);
}
)
.finally(() => NotificationConsoleUtils.clearInProgressMessageWithId(id));
@@ -192,7 +216,7 @@ export class QueriesClient {
const documentId = new DocumentId(
{
partitionKey: QueriesClient.PartitionKey,
partitionKeyProperty: "id",
partitionKeyProperty: "id"
} as DocumentsTab,
query,
query.queryName
@@ -208,8 +232,13 @@ export class QueriesClient {
return Promise.resolve();
},
(error: any) => {
handleError(error, "deleteQuery", `Failed to delete query ${query.queryName}`);
return Promise.reject(error);
const stringifiedError: string = error.message;
NotificationConsoleUtils.logConsoleMessage(
ConsoleDataType.Error,
`Failed to delete query ${query.queryName}: ${stringifiedError}`
);
Logger.logError(stringifiedError, "deleteQuery");
return Promise.reject(stringifiedError);
}
)
.finally(() => NotificationConsoleUtils.clearInProgressMessageWithId(id));

View File

@@ -4,7 +4,7 @@ import { SplitterMetrics } from "./Constants";
export enum SplitterDirection {
Horizontal = "horizontal",
Vertical = "vertical",
Vertical = "vertical"
}
export interface SplitterBounds {
@@ -50,7 +50,7 @@ export class Splitter {
animate: true,
animateDuration: "fast",
start: this.onResizeStart,
stop: this.onResizeStop,
stop: this.onResizeStop
};
if (isVerticalSplitter) {
@@ -90,7 +90,9 @@ export class Splitter {
this.lastWidth = $(this.leftSide).width();
$(this.splitter).css("left", SplitterMetrics.CollapsedPositionLeft);
$(this.leftSide).css("width", "");
$(this.leftSide).resizable("option", "disabled", true).removeClass("ui-resizable-disabled"); // remove class so splitter is visible
$(this.leftSide)
.resizable("option", "disabled", true)
.removeClass("ui-resizable-disabled"); // remove class so splitter is visible
$(this.splitter).removeClass("ui-resizable-e");
this.isCollapsed(true);
}

View File

@@ -32,8 +32,8 @@ export default class UrlUtility {
type: type,
objectBody: {
id: id,
self: resourcePath,
},
self: resourcePath
}
};
return result;

View File

@@ -15,15 +15,15 @@ describe("createCollection", () => {
collectionId: "testContainer",
databaseId: "testDatabase",
databaseLevelThroughput: true,
offerThroughput: 400,
offerThroughput: 400
};
beforeAll(() => {
updateUserContext({
databaseAccount: {
name: "test",
name: "test"
} as DatabaseAccount,
defaultExperience: DefaultAccountExperienceType.DocumentDB,
defaultExperience: DefaultAccountExperienceType.DocumentDB
});
});
@@ -41,12 +41,12 @@ describe("createCollection", () => {
return {
database: {
containers: {
create: () => ({}),
},
},
create: () => ({})
}
}
};
},
},
}
}
});
await createCollection(createCollectionParams);
expect(client).toHaveBeenCalled();
@@ -60,7 +60,7 @@ describe("createCollection", () => {
collectionId: "testContainer",
databaseId: "testDatabase",
databaseLevelThroughput: false,
offerThroughput: 400,
offerThroughput: 400
};
expect(constructRpOptions(manualThroughputParams)).toEqual({ throughput: 400 });
@@ -70,12 +70,12 @@ describe("createCollection", () => {
databaseId: "testDatabase",
databaseLevelThroughput: false,
offerThroughput: 400,
autoPilotMaxThroughput: 4000,
autoPilotMaxThroughput: 4000
};
expect(constructRpOptions(autoPilotThroughputParams)).toEqual({
autoscaleSettings: {
maxThroughput: 4000,
},
maxThroughput: 4000
}
});
});
});

View File

@@ -11,15 +11,15 @@ import { createMongoCollectionWithProxy } from "../MongoProxyClient";
import { createUpdateSqlContainer, getSqlContainer } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import {
createUpdateCassandraTable,
getCassandraTable,
getCassandraTable
} from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources";
import {
createUpdateMongoDBCollection,
getMongoDBCollection,
getMongoDBCollection
} from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
import {
createUpdateGremlinGraph,
getGremlinGraph,
getGremlinGraph
} from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
import { createUpdateTable, getTable } from "../../Utils/arm/generatedClients/2020-04-01/tableResources";
import { logConsoleProgress, logConsoleInfo } from "../../Utils/NotificationConsoleUtils";
@@ -41,7 +41,7 @@ export const createCollection = async (params: DataModels.CreateCollectionParams
autoPilotMaxThroughput: params.autoPilotMaxThroughput,
databaseId: params.databaseId,
databaseLevelThroughput: params.databaseLevelThroughput,
offerThroughput: params.offerThroughput,
offerThroughput: params.offerThroughput
};
await createDatabase(createDatabaseParams);
}
@@ -55,7 +55,7 @@ export const createCollection = async (params: DataModels.CreateCollectionParams
logConsoleInfo(`Successfully created container ${params.collectionId}`);
return collection;
} catch (error) {
handleError(error, "CreateCollection", `Error while creating container ${params.collectionId}`);
handleError(error, `Error while creating container ${params.collectionId}`, "CreateCollection");
throw error;
} finally {
clearMessage();
@@ -100,7 +100,7 @@ const createSqlContainer = async (params: DataModels.CreateCollectionParams): Pr
const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params);
const resource: ARMTypes.SqlContainerResource = {
id: params.collectionId,
id: params.collectionId
};
if (params.analyticalStorageTtl) {
resource.analyticalStorageTtl = params.analyticalStorageTtl;
@@ -118,8 +118,8 @@ const createSqlContainer = async (params: DataModels.CreateCollectionParams): Pr
const rpPayload: ARMTypes.SqlDatabaseCreateUpdateParameters = {
properties: {
resource,
options,
},
options
}
};
const createResponse = await createUpdateSqlContainer(
@@ -154,7 +154,7 @@ const createMongoCollection = async (params: DataModels.CreateCollectionParams):
const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params);
const resource: ARMTypes.MongoDBCollectionResource = {
id: params.collectionId,
id: params.collectionId
};
if (params.analyticalStorageTtl) {
resource.analyticalStorageTtl = params.analyticalStorageTtl;
@@ -170,8 +170,8 @@ const createMongoCollection = async (params: DataModels.CreateCollectionParams):
const rpPayload: ARMTypes.MongoDBCollectionCreateUpdateParameters = {
properties: {
resource,
options,
},
options
}
};
const createResponse = await createUpdateMongoDBCollection(
@@ -185,7 +185,7 @@ const createMongoCollection = async (params: DataModels.CreateCollectionParams):
if (params.createMongoWildcardIndex) {
TelemetryProcessor.trace(Action.CreateMongoCollectionWithWildcardIndex, ActionModifiers.Mark, {
message: "Mongo Collection created with wildcard index on all fields.",
message: "Mongo Collection created with wildcard index on all fields."
});
}
@@ -212,7 +212,7 @@ const createCassandraTable = async (params: DataModels.CreateCollectionParams):
const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params);
const resource: ARMTypes.CassandraTableResource = {
id: params.collectionId,
id: params.collectionId
};
if (params.analyticalStorageTtl) {
resource.analyticalStorageTtl = params.analyticalStorageTtl;
@@ -221,8 +221,8 @@ const createCassandraTable = async (params: DataModels.CreateCollectionParams):
const rpPayload: ARMTypes.CassandraTableCreateUpdateParameters = {
properties: {
resource,
options,
},
options
}
};
const createResponse = await createUpdateCassandraTable(
@@ -256,7 +256,7 @@ const createGraph = async (params: DataModels.CreateCollectionParams): Promise<D
const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params);
const resource: ARMTypes.GremlinGraphResource = {
id: params.collectionId,
id: params.collectionId
};
if (params.indexingPolicy) {
@@ -272,8 +272,8 @@ const createGraph = async (params: DataModels.CreateCollectionParams): Promise<D
const rpPayload: ARMTypes.GremlinGraphCreateUpdateParameters = {
properties: {
resource,
options,
},
options
}
};
const createResponse = await createUpdateGremlinGraph(
@@ -306,14 +306,14 @@ const createTable = async (params: DataModels.CreateCollectionParams): Promise<D
const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params);
const resource: ARMTypes.TableResource = {
id: params.collectionId,
id: params.collectionId
};
const rpPayload: ARMTypes.TableCreateUpdateParameters = {
properties: {
resource,
options,
},
options
}
};
const createResponse = await createUpdateTable(
@@ -334,13 +334,13 @@ export const constructRpOptions = (params: DataModels.CreateDatabaseParams): ARM
if (params.autoPilotMaxThroughput) {
return {
autoscaleSettings: {
maxThroughput: params.autoPilotMaxThroughput,
},
maxThroughput: params.autoPilotMaxThroughput
}
};
}
return {
throughput: params.offerThroughput,
throughput: params.offerThroughput
};
};
@@ -350,7 +350,7 @@ const createCollectionWithSDK = async (params: DataModels.CreateCollectionParams
partitionKey: params.partitionKey || undefined,
indexingPolicy: params.indexingPolicy || undefined,
uniqueKeyPolicy: params.uniqueKeyPolicy || undefined,
analyticalStorageTtl: params.analyticalStorageTtl,
analyticalStorageTtl: params.analyticalStorageTtl
} as ContainerRequest; // TODO: remove cast when https://github.com/Azure/azure-cosmos-js/issues/423 is fixed
const collectionOptions: RequestOptions = {};
const createDatabaseBody: DatabaseRequest = { id: params.databaseId };

View File

@@ -8,21 +8,21 @@ import {
GremlinDatabaseCreateUpdateParameters,
MongoDBDatabaseCreateUpdateParameters,
SqlDatabaseCreateUpdateParameters,
CreateUpdateOptions,
CreateUpdateOptions
} from "../../Utils/arm/generatedClients/2020-04-01/types";
import { client } from "../CosmosClient";
import { createUpdateSqlDatabase, getSqlDatabase } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import {
createUpdateCassandraKeyspace,
getCassandraKeyspace,
getCassandraKeyspace
} from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources";
import {
createUpdateMongoDBDatabase,
getMongoDBDatabase,
getMongoDBDatabase
} from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
import {
createUpdateGremlinDatabase,
getGremlinDatabase,
getGremlinDatabase
} from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
import { handleError } from "../ErrorHandlingUtils";
import { logConsoleProgress, logConsoleInfo } from "../../Utils/NotificationConsoleUtils";
@@ -41,7 +41,7 @@ export async function createDatabase(params: DataModels.CreateDatabaseParams): P
logConsoleInfo(`Successfully created database ${params.databaseId}`);
return database;
} catch (error) {
handleError(error, "CreateDatabase", `Error while creating database ${params.databaseId}`);
handleError(error, `Error while creating database ${params.databaseId}`, "CreateDatabase");
throw error;
} finally {
clearMessage();
@@ -85,10 +85,10 @@ async function createSqlDatabase(params: DataModels.CreateDatabaseParams): Promi
const rpPayload: SqlDatabaseCreateUpdateParameters = {
properties: {
resource: {
id: params.databaseId,
id: params.databaseId
},
options,
},
options
}
};
const createResponse = await createUpdateSqlDatabase(
userContext.subscriptionId,
@@ -121,10 +121,10 @@ async function createMongoDatabase(params: DataModels.CreateDatabaseParams): Pro
const rpPayload: MongoDBDatabaseCreateUpdateParameters = {
properties: {
resource: {
id: params.databaseId,
id: params.databaseId
},
options,
},
options
}
};
const createResponse = await createUpdateMongoDBDatabase(
userContext.subscriptionId,
@@ -157,10 +157,10 @@ async function createCassandraKeyspace(params: DataModels.CreateDatabaseParams):
const rpPayload: CassandraKeyspaceCreateUpdateParameters = {
properties: {
resource: {
id: params.databaseId,
id: params.databaseId
},
options,
},
options
}
};
const createResponse = await createUpdateCassandraKeyspace(
userContext.subscriptionId,
@@ -193,10 +193,10 @@ async function createGremlineDatabase(params: DataModels.CreateDatabaseParams):
const rpPayload: GremlinDatabaseCreateUpdateParameters = {
properties: {
resource: {
id: params.databaseId,
id: params.databaseId
},
options,
},
options
}
};
const createResponse = await createUpdateGremlinDatabase(
userContext.subscriptionId,
@@ -231,12 +231,12 @@ function constructRpOptions(params: DataModels.CreateDatabaseParams): CreateUpda
if (params.autoPilotMaxThroughput) {
return {
autoscaleSettings: {
maxThroughput: params.autoPilotMaxThroughput,
},
maxThroughput: params.autoPilotMaxThroughput
}
};
}
return {
throughput: params.offerThroughput,
throughput: params.offerThroughput
};
}

View File

@@ -3,12 +3,12 @@ import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType
import { Resource, StoredProcedureDefinition } from "@azure/cosmos";
import {
SqlStoredProcedureCreateUpdateParameters,
SqlStoredProcedureResource,
SqlStoredProcedureResource
} from "../../Utils/arm/generatedClients/2020-04-01/types";
import { client } from "../CosmosClient";
import {
createUpdateSqlStoredProcedure,
getSqlStoredProcedure,
getSqlStoredProcedure
} from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import { handleError } from "../ErrorHandlingUtils";
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
@@ -49,8 +49,8 @@ export async function createStoredProcedure(
const createSprocParams: SqlStoredProcedureCreateUpdateParameters = {
properties: {
resource: storedProcedure as SqlStoredProcedureResource,
options: {},
},
options: {}
}
};
const rpResponse = await createUpdateSqlStoredProcedure(
userContext.subscriptionId,
@@ -70,7 +70,7 @@ export async function createStoredProcedure(
.scripts.storedProcedures.create(storedProcedure);
return response?.resource;
} catch (error) {
handleError(error, "CreateStoredProcedure", `Error while creating stored procedure ${storedProcedure.id}`);
handleError(error, `Error while creating stored procedure ${storedProcedure.id}`, "CreateStoredProcedure");
throw error;
} finally {
clearMessage();

View File

@@ -3,12 +3,13 @@ import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType
import { Resource, TriggerDefinition } from "@azure/cosmos";
import {
SqlTriggerCreateUpdateParameters,
SqlTriggerResource,
SqlTriggerResource
} from "../../Utils/arm/generatedClients/2020-04-01/types";
import { client } from "../CosmosClient";
import { createUpdateSqlTrigger, getSqlTrigger } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import { handleError } from "../ErrorHandlingUtils";
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
import { logConsoleError, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
import { logError } from "../Logger";
import { sendNotificationForError } from "./sendNotificationForError";
import { userContext } from "../../UserContext";
export async function createTrigger(
@@ -44,8 +45,8 @@ export async function createTrigger(
const createTriggerParams: SqlTriggerCreateUpdateParameters = {
properties: {
resource: trigger as SqlTriggerResource,
options: {},
},
options: {}
}
};
const rpResponse = await createUpdateSqlTrigger(
userContext.subscriptionId,
@@ -59,10 +60,15 @@ export async function createTrigger(
return rpResponse && (rpResponse.properties?.resource as TriggerDefinition & Resource);
}
const response = await client().database(databaseId).container(collectionId).scripts.triggers.create(trigger);
const response = await client()
.database(databaseId)
.container(collectionId)
.scripts.triggers.create(trigger);
return response.resource;
} catch (error) {
handleError(error, "CreateTrigger", `Error while creating trigger ${trigger.id}`);
logConsoleError(`Error while creating trigger ${trigger.id}:\n ${error.message}`);
logError(error.message, "CreateTrigger", error.code);
sendNotificationForError(error);
throw error;
} finally {
clearMessage();

View File

@@ -3,12 +3,12 @@ import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType
import { Resource, UserDefinedFunctionDefinition } from "@azure/cosmos";
import {
SqlUserDefinedFunctionCreateUpdateParameters,
SqlUserDefinedFunctionResource,
SqlUserDefinedFunctionResource
} from "../../Utils/arm/generatedClients/2020-04-01/types";
import { client } from "../CosmosClient";
import {
createUpdateSqlUserDefinedFunction,
getSqlUserDefinedFunction,
getSqlUserDefinedFunction
} from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import { handleError } from "../ErrorHandlingUtils";
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
@@ -49,8 +49,8 @@ export async function createUserDefinedFunction(
const createUDFParams: SqlUserDefinedFunctionCreateUpdateParameters = {
properties: {
resource: userDefinedFunction as SqlUserDefinedFunctionResource,
options: {},
},
options: {}
}
};
const rpResponse = await createUpdateSqlUserDefinedFunction(
userContext.subscriptionId,
@@ -72,8 +72,8 @@ export async function createUserDefinedFunction(
} catch (error) {
handleError(
error,
"CreateUserupdateUserDefinedFunction",
`Error while creating user defined function ${userDefinedFunction.id}`
`Error while creating user defined function ${userDefinedFunction.id}`,
"CreateUserupdateUserDefinedFunction"
);
throw error;
} finally {

View File

@@ -13,9 +13,9 @@ describe("deleteCollection", () => {
beforeAll(() => {
updateUserContext({
databaseAccount: {
name: "test",
name: "test"
} as DatabaseAccount,
defaultExperience: DefaultAccountExperienceType.DocumentDB,
defaultExperience: DefaultAccountExperienceType.DocumentDB
});
});
@@ -32,11 +32,11 @@ describe("deleteCollection", () => {
return {
container: () => {
return {
delete: (): unknown => undefined,
delete: (): unknown => undefined
};
},
}
};
},
}
});
await deleteCollection("database", "collection");
expect(client).toHaveBeenCalled();

View File

@@ -16,11 +16,14 @@ export async function deleteCollection(databaseId: string, collectionId: string)
if (window.authType === AuthType.AAD && !userContext.useSDKOperations) {
await deleteCollectionWithARM(databaseId, collectionId);
} else {
await client().database(databaseId).container(collectionId).delete();
await client()
.database(databaseId)
.container(collectionId)
.delete();
}
logConsoleInfo(`Successfully deleted container ${collectionId}`);
} catch (error) {
handleError(error, "DeleteCollection", `Error while deleting container ${collectionId}`);
handleError(error, `Error while deleting container ${collectionId}`, "DeleteCollection");
throw error;
} finally {
clearMessage();

View File

@@ -13,9 +13,9 @@ describe("deleteDatabase", () => {
beforeAll(() => {
updateUserContext({
databaseAccount: {
name: "test",
name: "test"
} as DatabaseAccount,
defaultExperience: DefaultAccountExperienceType.DocumentDB,
defaultExperience: DefaultAccountExperienceType.DocumentDB
});
});
@@ -30,9 +30,9 @@ describe("deleteDatabase", () => {
(client as jest.Mock).mockReturnValue({
database: () => {
return {
delete: (): unknown => undefined,
delete: (): unknown => undefined
};
},
}
});
await deleteDatabase("database");
expect(client).toHaveBeenCalled();

View File

@@ -19,11 +19,13 @@ export async function deleteDatabase(databaseId: string): Promise<void> {
if (window.authType === AuthType.AAD && !userContext.useSDKOperations) {
await deleteDatabaseWithARM(databaseId);
} else {
await client().database(databaseId).delete();
await client()
.database(databaseId)
.delete();
}
logConsoleInfo(`Successfully deleted database ${databaseId}`);
} catch (error) {
handleError(error, "DeleteDatabase", `Error while deleting database ${databaseId}`);
handleError(error, `Error while deleting database ${databaseId}`, "DeleteDatabase");
throw error;
} finally {
clearMessage();

View File

@@ -27,10 +27,14 @@ export async function deleteStoredProcedure(
storedProcedureId
);
} else {
await client().database(databaseId).container(collectionId).scripts.storedProcedure(storedProcedureId).delete();
await client()
.database(databaseId)
.container(collectionId)
.scripts.storedProcedure(storedProcedureId)
.delete();
}
} catch (error) {
handleError(error, "DeleteStoredProcedure", `Error while deleting stored procedure ${storedProcedureId}`);
handleError(error, `Error while deleting stored procedure ${storedProcedureId}`, "DeleteStoredProcedure");
throw error;
} finally {
clearMessage();

View File

@@ -23,10 +23,14 @@ export async function deleteTrigger(databaseId: string, collectionId: string, tr
triggerId
);
} else {
await client().database(databaseId).container(collectionId).scripts.trigger(triggerId).delete();
await client()
.database(databaseId)
.container(collectionId)
.scripts.trigger(triggerId)
.delete();
}
} catch (error) {
handleError(error, "DeleteTrigger", `Error while deleting trigger ${triggerId}`);
handleError(error, `Error while deleting trigger ${triggerId}`, "DeleteTrigger");
throw error;
} finally {
clearMessage();

View File

@@ -23,10 +23,14 @@ export async function deleteUserDefinedFunction(databaseId: string, collectionId
id
);
} else {
await client().database(databaseId).container(collectionId).scripts.userDefinedFunction(id).delete();
await client()
.database(databaseId)
.container(collectionId)
.scripts.userDefinedFunction(id)
.delete();
}
} catch (error) {
handleError(error, "DeleteUserDefinedFunction", `Error while deleting user defined function ${id}`);
handleError(error, `Error while deleting user defined function ${id}`, "DeleteUserDefinedFunction");
throw error;
} finally {
clearMessage();

View File

@@ -1,87 +0,0 @@
import { armRequest } from "../../Utils/arm/request";
import { configContext } from "../../ConfigContext";
import { handleError } from "../ErrorHandlingUtils";
import { userContext } from "../../UserContext";
interface TimeSeriesData {
data: {
timeStamp: string;
total: number;
}[];
metadatavalues: {
name: {
localizedValue: string;
value: string;
};
value: string;
};
}
interface MetricsData {
displayDescription: string;
errorCode: string;
id: string;
name: {
value: string;
localizedValue: string;
};
timeseries: TimeSeriesData[];
type: string;
unit: string;
}
interface MetricsResponse {
cost: number;
interval: string;
namespace: string;
resourceregion: string;
timespan: string;
value: MetricsData[];
}
export const getCollectionUsageSizeInKB = async (databaseName: string, containerName: string): Promise<number> => {
const subscriptionId = userContext.subscriptionId;
const resourceGroup = userContext.resourceGroup;
const accountName = userContext.databaseAccount.name;
const filter = `DatabaseName eq '${databaseName}' and CollectionName eq '${containerName}'`;
const metricNames = "DataUsage,IndexUsage";
const path = `/subscriptions/${subscriptionId}/resourceGroups/${resourceGroup}/providers/Microsoft.DocumentDB/databaseAccounts/${accountName}/providers/microsoft.insights/metrics`;
try {
const metricsResponse: MetricsResponse = await armRequest({
host: configContext.ARM_ENDPOINT,
path,
method: "GET",
apiVersion: "2018-01-01",
queryParams: {
filter,
metricNames,
},
});
if (metricsResponse?.value?.length !== 2) {
return undefined;
}
const dataUsageData: MetricsData = metricsResponse.value[0];
const indexUsagedata: MetricsData = metricsResponse.value[1];
const dataUsageSizeInKb: number = getUsageSizeInKb(dataUsageData);
const indexUsageSizeInKb: number = getUsageSizeInKb(indexUsagedata);
return dataUsageSizeInKb + indexUsageSizeInKb;
} catch (error) {
handleError(error, "getCollectionUsageSize");
throw error;
}
};
const getUsageSizeInKb = (metricsData: MetricsData): number => {
if (metricsData?.errorCode !== "Success") {
throw Error(`Get collection usage size failed: ${metricsData.errorCode}`);
}
const timeSeriesData: TimeSeriesData = metricsData?.timeseries?.[0];
const usageSizeInBytes: number = timeSeriesData?.data?.[0]?.total;
return usageSizeInBytes ? usageSizeInBytes / 1024 : 0;
};

View File

@@ -1,25 +0,0 @@
import { client } from "../CosmosClient";
import { handleError } from "../ErrorHandlingUtils";
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
import * as Constants from "../Constants";
import { AuthType } from "../../AuthType";
export async function getIndexTransformationProgress(databaseId: string, collectionId: string): Promise<number> {
if (window.authType !== AuthType.AAD) {
return undefined;
}
let indexTransformationPercentage: number;
const clearMessage = logConsoleProgress(`Reading container ${collectionId}`);
try {
const response = await client().database(databaseId).container(collectionId).read({ populateQuotaInfo: true });
indexTransformationPercentage = parseInt(
response.headers[Constants.HttpHeaders.collectionIndexTransformationProgress] as string
);
} catch (error) {
handleError(error, "ReadMongoDBCollection", `Error while reading container ${collectionId}`);
throw error;
}
clearMessage();
return indexTransformationPercentage;
}

View File

@@ -10,9 +10,9 @@ describe("readCollection", () => {
beforeAll(() => {
updateUserContext({
databaseAccount: {
name: "test",
name: "test"
} as DatabaseAccount,
defaultExperience: DefaultAccountExperienceType.DocumentDB,
defaultExperience: DefaultAccountExperienceType.DocumentDB
});
});
@@ -23,11 +23,11 @@ describe("readCollection", () => {
return {
container: () => {
return {
read: (): unknown => ({}),
read: (): unknown => ({})
};
},
}
};
},
}
});
await readCollection("database", "collection");
expect(client).toHaveBeenCalled();

View File

@@ -7,10 +7,13 @@ export async function readCollection(databaseId: string, collectionId: string):
let collection: DataModels.Collection;
const clearMessage = logConsoleProgress(`Querying container ${collectionId}`);
try {
const response = await client().database(databaseId).container(collectionId).read();
const response = await client()
.database(databaseId)
.container(collectionId)
.read();
collection = response.resource as DataModels.Collection;
} catch (error) {
handleError(error, "ReadCollection", `Error while querying container ${collectionId}`);
handleError(error, `Error while querying container ${collectionId}`, "ReadCollection");
throw error;
}
clearMessage();

View File

@@ -1,6 +1,9 @@
import * as DataModels from "../../Contracts/DataModels";
import { AuthType } from "../../AuthType";
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
import { Offer, ReadCollectionOfferParams } from "../../Contracts/DataModels";
import { HttpHeaders } from "../Constants";
import { RequestOptions } from "@azure/cosmos/dist-esm";
import { client } from "../CosmosClient";
import { handleError } from "../ErrorHandlingUtils";
import { getSqlContainerThroughput } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import { getMongoDBCollectionThroughput } from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
@@ -8,114 +11,113 @@ import { getCassandraTableThroughput } from "../../Utils/arm/generatedClients/20
import { getGremlinGraphThroughput } from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
import { getTableThroughput } from "../../Utils/arm/generatedClients/2020-04-01/tableResources";
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
import { readOfferWithSDK } from "./readOfferWithSDK";
import { readOffers } from "./readOffers";
import { userContext } from "../../UserContext";
export const readCollectionOffer = async (params: ReadCollectionOfferParams): Promise<Offer> => {
export const readCollectionOffer = async (
params: DataModels.ReadCollectionOfferParams
): Promise<DataModels.OfferWithHeaders> => {
const clearMessage = logConsoleProgress(`Querying offer for collection ${params.collectionId}`);
let offerId = params.offerId;
if (!offerId) {
if (window.authType === AuthType.AAD && !userContext.useSDKOperations) {
try {
offerId = await getCollectionOfferIdWithARM(params.databaseId, params.collectionId);
} catch (error) {
clearMessage();
if (error.code !== "NotFound") {
throw error;
}
return undefined;
}
} else {
offerId = await getCollectionOfferIdWithSDK(params.collectionResourceId);
if (!offerId) {
clearMessage();
return undefined;
}
}
}
const options: RequestOptions = {
initialHeaders: {
[HttpHeaders.populateCollectionThroughputInfo]: true
}
};
try {
if (
window.authType === AuthType.AAD &&
!userContext.useSDKOperations &&
userContext.defaultExperience !== DefaultAccountExperienceType.Table
) {
return await readCollectionOfferWithARM(params.databaseId, params.collectionId);
}
return await readOfferWithSDK(params.offerId, params.collectionResourceId);
const response = await client()
.offer(offerId)
.read(options);
return (
response && {
...response.resource,
headers: response.headers
}
);
} catch (error) {
handleError(error, "ReadCollectionOffer", `Error while querying offer for collection ${params.collectionId}`);
handleError(error, `Error while querying offer for collection ${params.collectionId}`, "ReadCollectionOffer");
throw error;
} finally {
clearMessage();
}
};
const readCollectionOfferWithARM = async (databaseId: string, collectionId: string): Promise<Offer> => {
const getCollectionOfferIdWithARM = async (databaseId: string, collectionId: string): Promise<string> => {
let rpResponse;
const subscriptionId = userContext.subscriptionId;
const resourceGroup = userContext.resourceGroup;
const accountName = userContext.databaseAccount.name;
const defaultExperience = userContext.defaultExperience;
let rpResponse;
try {
switch (defaultExperience) {
case DefaultAccountExperienceType.DocumentDB:
rpResponse = await getSqlContainerThroughput(
subscriptionId,
resourceGroup,
accountName,
databaseId,
collectionId
);
break;
case DefaultAccountExperienceType.MongoDB:
rpResponse = await getMongoDBCollectionThroughput(
subscriptionId,
resourceGroup,
accountName,
databaseId,
collectionId
);
break;
case DefaultAccountExperienceType.Cassandra:
rpResponse = await getCassandraTableThroughput(
subscriptionId,
resourceGroup,
accountName,
databaseId,
collectionId
);
break;
case DefaultAccountExperienceType.Graph:
rpResponse = await getGremlinGraphThroughput(
subscriptionId,
resourceGroup,
accountName,
databaseId,
collectionId
);
break;
case DefaultAccountExperienceType.Table:
rpResponse = await getTableThroughput(subscriptionId, resourceGroup, accountName, collectionId);
break;
default:
throw new Error(`Unsupported default experience type: ${defaultExperience}`);
}
} catch (error) {
if (error.code !== "NotFound") {
throw error;
}
return undefined;
switch (defaultExperience) {
case DefaultAccountExperienceType.DocumentDB:
rpResponse = await getSqlContainerThroughput(
subscriptionId,
resourceGroup,
accountName,
databaseId,
collectionId
);
break;
case DefaultAccountExperienceType.MongoDB:
rpResponse = await getMongoDBCollectionThroughput(
subscriptionId,
resourceGroup,
accountName,
databaseId,
collectionId
);
break;
case DefaultAccountExperienceType.Cassandra:
rpResponse = await getCassandraTableThroughput(
subscriptionId,
resourceGroup,
accountName,
databaseId,
collectionId
);
break;
case DefaultAccountExperienceType.Graph:
rpResponse = await getGremlinGraphThroughput(
subscriptionId,
resourceGroup,
accountName,
databaseId,
collectionId
);
break;
case DefaultAccountExperienceType.Table:
rpResponse = await getTableThroughput(subscriptionId, resourceGroup, accountName, collectionId);
break;
default:
throw new Error(`Unsupported default experience type: ${defaultExperience}`);
}
const resource = rpResponse?.properties?.resource;
if (resource) {
const offerId: string = rpResponse.name;
const minimumThroughput: number =
typeof resource.minimumThroughput === "string"
? parseInt(resource.minimumThroughput)
: resource.minimumThroughput;
const autoscaleSettings = resource.autoscaleSettings;
if (autoscaleSettings) {
return {
id: offerId,
autoscaleMaxThroughput: autoscaleSettings.maxThroughput,
manualThroughput: undefined,
minimumThroughput,
};
}
return {
id: offerId,
autoscaleMaxThroughput: undefined,
manualThroughput: resource.throughput,
minimumThroughput,
};
}
return undefined;
return rpResponse?.name;
};
const getCollectionOfferIdWithSDK = async (collectionResourceId: string): Promise<string> => {
const offers = await readOffers();
const offer = offers.find(offer => offer.resource === collectionResourceId);
return offer?.id;
};

View File

@@ -0,0 +1,45 @@
import * as DataModels from "../../Contracts/DataModels";
import * as HeadersUtility from "../HeadersUtility";
import * as ViewModels from "../../Contracts/ViewModels";
import { ContainerDefinition, Resource } from "@azure/cosmos";
import { HttpHeaders } from "../Constants";
import { RequestOptions } from "@azure/cosmos/dist-esm";
import { client } from "../CosmosClient";
import { handleError } from "../ErrorHandlingUtils";
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
interface ResourceWithStatistics {
statistics: DataModels.Statistic[];
}
export const readCollectionQuotaInfo = async (
collection: ViewModels.Collection
): Promise<DataModels.CollectionQuotaInfo> => {
const clearMessage = logConsoleProgress(`Querying containers for database ${collection.id}`);
const options: RequestOptions = {};
options.populateQuotaInfo = true;
options.initialHeaders = options.initialHeaders || {};
options.initialHeaders[HttpHeaders.populatePartitionStatistics] = true;
try {
const response = await client()
.database(collection.databaseId)
.container(collection.id())
.read(options);
const quota: DataModels.CollectionQuotaInfo = HeadersUtility.getQuota(response.headers);
const resource = response.resource as ContainerDefinition & Resource & ResourceWithStatistics;
quota["usageSizeInKB"] = resource.statistics.reduce(
(previousValue: number, currentValue: DataModels.Statistic) => previousValue + currentValue.sizeInKB,
0
);
quota["numPartitions"] = resource.statistics.length;
quota["uniqueKeyPolicy"] = collection.uniqueKeyPolicy; // TODO: Remove after refactoring (#119617)
return quota;
} catch (error) {
handleError(error, `Error while querying quota info for container ${collection.id}`, "ReadCollectionQuotaInfo");
throw error;
} finally {
clearMessage();
}
};

View File

@@ -12,9 +12,9 @@ describe("readCollections", () => {
beforeAll(() => {
updateUserContext({
databaseAccount: {
name: "test",
name: "test"
} as DatabaseAccount,
defaultExperience: DefaultAccountExperienceType.DocumentDB,
defaultExperience: DefaultAccountExperienceType.DocumentDB
});
});
@@ -32,12 +32,12 @@ describe("readCollections", () => {
containers: {
readAll: () => {
return {
fetchAll: (): unknown => [],
fetchAll: (): unknown => []
};
},
},
}
}
};
},
}
});
await readCollections("database");
expect(client).toHaveBeenCalled();

View File

@@ -23,10 +23,13 @@ export async function readCollections(databaseId: string): Promise<DataModels.Co
return await readCollectionsWithARM(databaseId);
}
const sdkResponse = await client().database(databaseId).containers.readAll().fetchAll();
const sdkResponse = await client()
.database(databaseId)
.containers.readAll()
.fetchAll();
return sdkResponse.resources as DataModels.Collection[];
} catch (error) {
handleError(error, "ReadCollections", `Error while querying containers for database ${databaseId}`);
handleError(error, `Error while querying containers for database ${databaseId}`, "ReadCollections");
throw error;
} finally {
clearMessage();
@@ -60,5 +63,5 @@ async function readCollectionsWithARM(databaseId: string): Promise<DataModels.Co
throw new Error(`Unsupported default experience type: ${defaultExperience}`);
}
return rpResponse?.value?.map((collection) => collection.properties?.resource as DataModels.Collection);
return rpResponse?.value?.map(collection => collection.properties?.resource as DataModels.Collection);
}

View File

@@ -1,43 +1,66 @@
import * as DataModels from "../../Contracts/DataModels";
import { AuthType } from "../../AuthType";
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
import { Offer, ReadDatabaseOfferParams } from "../../Contracts/DataModels";
import { HttpHeaders } from "../Constants";
import { RequestOptions } from "@azure/cosmos/dist-esm";
import { client } from "../CosmosClient";
import { getSqlDatabaseThroughput } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import { getMongoDBDatabaseThroughput } from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
import { getCassandraKeyspaceThroughput } from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources";
import { getGremlinDatabaseThroughput } from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
import { handleError } from "../ErrorHandlingUtils";
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
import { readOfferWithSDK } from "./readOfferWithSDK";
import { readOffers } from "./readOffers";
import { userContext } from "../../UserContext";
export const readDatabaseOffer = async (params: ReadDatabaseOfferParams): Promise<Offer> => {
export const readDatabaseOffer = async (
params: DataModels.ReadDatabaseOfferParams
): Promise<DataModels.OfferWithHeaders> => {
const clearMessage = logConsoleProgress(`Querying offer for database ${params.databaseId}`);
let offerId = params.offerId;
if (!offerId) {
offerId = await (window.authType === AuthType.AAD &&
!userContext.useSDKOperations &&
userContext.defaultExperience !== DefaultAccountExperienceType.Table
? getDatabaseOfferIdWithARM(params.databaseId)
: getDatabaseOfferIdWithSDK(params.databaseResourceId));
if (!offerId) {
clearMessage();
return undefined;
}
}
const options: RequestOptions = {
initialHeaders: {
[HttpHeaders.populateCollectionThroughputInfo]: true
}
};
try {
if (
window.authType === AuthType.AAD &&
!userContext.useSDKOperations &&
userContext.defaultExperience !== DefaultAccountExperienceType.Table
) {
return await readDatabaseOfferWithARM(params.databaseId);
}
return await readOfferWithSDK(params.offerId, params.databaseResourceId);
const response = await client()
.offer(offerId)
.read(options);
return (
response && {
...response.resource,
headers: response.headers
}
);
} catch (error) {
handleError(error, "ReadDatabaseOffer", `Error while querying offer for database ${params.databaseId}`);
handleError(error, `Error while querying offer for database ${params.databaseId}`, "ReadDatabaseOffer");
throw error;
} finally {
clearMessage();
}
};
const readDatabaseOfferWithARM = async (databaseId: string): Promise<Offer> => {
const getDatabaseOfferIdWithARM = async (databaseId: string): Promise<string> => {
let rpResponse;
const subscriptionId = userContext.subscriptionId;
const resourceGroup = userContext.resourceGroup;
const accountName = userContext.databaseAccount.name;
const defaultExperience = userContext.defaultExperience;
let rpResponse;
try {
switch (defaultExperience) {
case DefaultAccountExperienceType.DocumentDB:
@@ -55,39 +78,18 @@ const readDatabaseOfferWithARM = async (databaseId: string): Promise<Offer> => {
default:
throw new Error(`Unsupported default experience type: ${defaultExperience}`);
}
return rpResponse?.name;
} catch (error) {
if (error.code !== "NotFound") {
throw error;
}
return undefined;
}
const resource = rpResponse?.properties?.resource;
if (resource) {
const offerId: string = rpResponse.name;
const minimumThroughput: number =
typeof resource.minimumThroughput === "string"
? parseInt(resource.minimumThroughput)
: resource.minimumThroughput;
const autoscaleSettings = resource.autoscaleSettings;
if (autoscaleSettings) {
return {
id: offerId,
autoscaleMaxThroughput: autoscaleSettings.maxThroughput,
manualThroughput: undefined,
minimumThroughput,
};
}
return {
id: offerId,
autoscaleMaxThroughput: undefined,
manualThroughput: resource.throughput,
minimumThroughput,
};
}
return undefined;
};
const getDatabaseOfferIdWithSDK = async (databaseResourceId: string): Promise<string> => {
const offers = await readOffers();
const offer = offers.find(offer => offer.resource === databaseResourceId);
return offer?.id;
};

View File

@@ -12,9 +12,9 @@ describe("readDatabases", () => {
beforeAll(() => {
updateUserContext({
databaseAccount: {
name: "test",
name: "test"
} as DatabaseAccount,
defaultExperience: DefaultAccountExperienceType.DocumentDB,
defaultExperience: DefaultAccountExperienceType.DocumentDB
});
});
@@ -30,10 +30,10 @@ describe("readDatabases", () => {
databases: {
readAll: () => {
return {
fetchAll: (): unknown => [],
fetchAll: (): unknown => []
};
},
},
}
}
});
await readDatabases();
expect(client).toHaveBeenCalled();

View File

@@ -21,11 +21,13 @@ export async function readDatabases(): Promise<DataModels.Database[]> {
) {
databases = await readDatabasesWithARM();
} else {
const sdkResponse = await client().databases.readAll().fetchAll();
const sdkResponse = await client()
.databases.readAll()
.fetchAll();
databases = sdkResponse.resources as DataModels.Database[];
}
} catch (error) {
handleError(error, "ReadDatabases", `Error while querying databases`);
handleError(error, `Error while querying databases`, "ReadDatabases");
throw error;
}
clearMessage();
@@ -56,5 +58,5 @@ async function readDatabasesWithARM(): Promise<DataModels.Database[]> {
throw new Error(`Unsupported default experience type: ${defaultExperience}`);
}
return rpResponse?.value?.map((database) => database.properties?.resource as DataModels.Database);
return rpResponse?.value?.map(database => database.properties?.resource as DataModels.Database);
}

View File

@@ -2,6 +2,8 @@ import { userContext } from "../../UserContext";
import { getMongoDBCollection } from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
import { MongoDBCollectionResource } from "../../Utils/arm/generatedClients/2020-04-01/types";
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
import * as Constants from "../Constants";
import { client } from "../CosmosClient";
import { handleError } from "../ErrorHandlingUtils";
import { AuthType } from "../../AuthType";
@@ -22,9 +24,35 @@ export async function readMongoDBCollectionThroughRP(
const response = await getMongoDBCollection(subscriptionId, resourceGroup, accountName, databaseId, collectionId);
collection = response.properties.resource;
} catch (error) {
handleError(error, "ReadMongoDBCollection", `Error while reading container ${collectionId}`);
handleError(error, `Error while reading container ${collectionId}`, "ReadMongoDBCollection");
throw error;
}
clearMessage();
return collection;
}
export async function getMongoDBCollectionIndexTransformationProgress(
databaseId: string,
collectionId: string
): Promise<number> {
if (window.authType !== AuthType.AAD) {
return undefined;
}
let indexTransformationPercentage: number;
const clearMessage = logConsoleProgress(`Reading container ${collectionId}`);
try {
const response = await client()
.database(databaseId)
.container(collectionId)
.read({ populateQuotaInfo: true });
indexTransformationPercentage = parseInt(
response.headers[Constants.HttpHeaders.collectionIndexTransformationProgress] as string
);
} catch (error) {
handleError(error, `Error while reading container ${collectionId}`, "ReadMongoDBCollection");
throw error;
}
clearMessage();
return indexTransformationPercentage;
}

View File

@@ -1,27 +0,0 @@
import { HttpHeaders } from "../Constants";
import { Offer } from "../../Contracts/DataModels";
import { RequestOptions } from "@azure/cosmos/dist-esm";
import { client } from "../CosmosClient";
import { parseSDKOfferResponse } from "../OfferUtility";
import { readOffers } from "./readOffers";
export const readOfferWithSDK = async (offerId: string, resourceId: string): Promise<Offer> => {
if (!offerId) {
const offers = await readOffers();
const offer = offers.find((offer) => offer.resource === resourceId);
if (!offer) {
return undefined;
}
offerId = offer.id;
}
const options: RequestOptions = {
initialHeaders: {
[HttpHeaders.populateCollectionThroughputInfo]: true,
},
};
const response = await client().offer(offerId).read(options);
return parseSDKOfferResponse(response);
};

View File

@@ -1,21 +1,23 @@
import { SDKOfferDefinition } from "../../Contracts/DataModels";
import { Offer } from "../../Contracts/DataModels";
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
import { client } from "../CosmosClient";
import { handleError, getErrorMessage } from "../ErrorHandlingUtils";
import { handleError } from "../ErrorHandlingUtils";
export const readOffers = async (): Promise<SDKOfferDefinition[]> => {
export const readOffers = async (): Promise<Offer[]> => {
const clearMessage = logConsoleProgress(`Querying offers`);
try {
const response = await client().offers.readAll().fetchAll();
const response = await client()
.offers.readAll()
.fetchAll();
return response?.resources;
} catch (error) {
// This should be removed when we can correctly identify if an account is serverless when connected using connection string too.
if (getErrorMessage(error)?.includes("Reading or replacing offers is not supported for serverless accounts")) {
if (error.message.includes("Reading or replacing offers is not supported for serverless accounts")) {
return [];
}
handleError(error, "ReadOffers", `Error while querying offers`);
handleError(error, `Error while querying offers`, "ReadOffers");
throw error;
} finally {
clearMessage();

View File

@@ -25,7 +25,7 @@ export async function readStoredProcedures(
databaseId,
collectionId
);
return rpResponse?.value?.map((sproc) => sproc.properties?.resource as StoredProcedureDefinition & Resource);
return rpResponse?.value?.map(sproc => sproc.properties?.resource as StoredProcedureDefinition & Resource);
}
const response = await client()
@@ -35,7 +35,7 @@ export async function readStoredProcedures(
.fetchAll();
return response?.resources;
} catch (error) {
handleError(error, "ReadStoredProcedures", `Failed to query stored procedures for container ${collectionId}`);
handleError(error, `Failed to query stored procedures for container ${collectionId}`, "ReadStoredProcedures");
throw error;
} finally {
clearMessage();

View File

@@ -25,13 +25,17 @@ export async function readTriggers(
databaseId,
collectionId
);
return rpResponse?.value?.map((trigger) => trigger.properties?.resource as TriggerDefinition & Resource);
return rpResponse?.value?.map(trigger => trigger.properties?.resource as TriggerDefinition & Resource);
}
const response = await client().database(databaseId).container(collectionId).scripts.triggers.readAll().fetchAll();
const response = await client()
.database(databaseId)
.container(collectionId)
.scripts.triggers.readAll()
.fetchAll();
return response?.resources;
} catch (error) {
handleError(error, "ReadTriggers", `Failed to query triggers for container ${collectionId}`);
handleError(error, `Failed to query triggers for container ${collectionId}`, "ReadTriggers");
throw error;
} finally {
clearMessage();

View File

@@ -25,7 +25,7 @@ export async function readUserDefinedFunctions(
databaseId,
collectionId
);
return rpResponse?.value?.map((udf) => udf.properties?.resource as UserDefinedFunctionDefinition & Resource);
return rpResponse?.value?.map(udf => udf.properties?.resource as UserDefinedFunctionDefinition & Resource);
}
const response = await client()
@@ -37,8 +37,8 @@ export async function readUserDefinedFunctions(
} catch (error) {
handleError(
error,
"ReadUserDefinedFunctions",
`Failed to query user defined functions for container ${collectionId}`
`Failed to query user defined functions for container ${collectionId}`,
"ReadUserDefinedFunctions"
);
throw error;
} finally {

View File

@@ -0,0 +1,20 @@
import * as Constants from "../Constants";
import { sendMessage } from "../MessageHandler";
import { MessageTypes } from "../../Contracts/ExplorerContracts";
export interface CosmosError {
code: number;
message?: string;
}
export function sendNotificationForError(error: CosmosError): void {
if (error && error.code === Constants.HttpStatusCodes.Forbidden) {
if (error.message && error.message.toLowerCase().indexOf("sharedoffer is disabled for your account") > 0) {
return;
}
sendMessage({
type: MessageTypes.ForbiddenError,
reason: error && error.message ? error.message : error
});
}
}

View File

@@ -8,22 +8,22 @@ import {
MongoDBCollectionCreateUpdateParameters,
MongoDBCollectionResource,
SqlContainerCreateUpdateParameters,
SqlContainerResource,
SqlContainerResource
} from "../../Utils/arm/generatedClients/2020-04-01/types";
import { RequestOptions } from "@azure/cosmos/dist-esm";
import { client } from "../CosmosClient";
import { createUpdateSqlContainer, getSqlContainer } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import {
createUpdateCassandraTable,
getCassandraTable,
getCassandraTable
} from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources";
import {
createUpdateMongoDBCollection,
getMongoDBCollection,
getMongoDBCollection
} from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
import {
createUpdateGremlinGraph,
getGremlinGraph,
getGremlinGraph
} from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
import { createUpdateTable, getTable } from "../../Utils/arm/generatedClients/2020-04-01/tableResources";
import { handleError } from "../ErrorHandlingUtils";
@@ -59,7 +59,7 @@ export async function updateCollection(
logConsoleInfo(`Successfully updated container ${collectionId}`);
return collection;
} catch (error) {
handleError(error, "UpdateCollection", `Failed to update container ${collectionId}`);
handleError(error, `Failed to update container ${collectionId}`, "UpdateCollection");
throw error;
} finally {
clearMessage();
@@ -130,8 +130,8 @@ export async function updateMongoDBCollectionThroughRP(
const updateParams: MongoDBCollectionCreateUpdateParameters = {
properties: {
resource: newCollection,
options: updateOptions,
},
options: updateOptions
}
};
const updateResponse = await createUpdateMongoDBCollection(

View File

@@ -1,14 +1,13 @@
import { AuthType } from "../../AuthType";
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
import { HttpHeaders } from "../Constants";
import { Offer, SDKOfferDefinition, UpdateOfferParams } from "../../Contracts/DataModels";
import { Offer, UpdateOfferParams } from "../../Contracts/DataModels";
import { OfferDefinition } from "@azure/cosmos";
import { RequestOptions } from "@azure/cosmos/dist-esm";
import { ThroughputSettingsUpdateParameters } from "../../Utils/arm/generatedClients/2020-04-01/types";
import { client } from "../CosmosClient";
import { handleError } from "../ErrorHandlingUtils";
import { logConsoleInfo, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
import { parseSDKOfferResponse } from "../OfferUtility";
import { readCollectionOffer } from "./readCollectionOffer";
import { readDatabaseOffer } from "./readDatabaseOffer";
import {
@@ -17,7 +16,7 @@ import {
migrateSqlDatabaseToManualThroughput,
migrateSqlContainerToAutoscale,
migrateSqlContainerToManualThroughput,
updateSqlContainerThroughput,
updateSqlContainerThroughput
} from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import {
updateCassandraKeyspaceThroughput,
@@ -25,7 +24,7 @@ import {
migrateCassandraKeyspaceToManualThroughput,
migrateCassandraTableToAutoscale,
migrateCassandraTableToManualThroughput,
updateCassandraTableThroughput,
updateCassandraTableThroughput
} from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources";
import {
updateMongoDBDatabaseThroughput,
@@ -33,7 +32,7 @@ import {
migrateMongoDBDatabaseToManualThroughput,
migrateMongoDBCollectionToAutoscale,
migrateMongoDBCollectionToManualThroughput,
updateMongoDBCollectionThroughput,
updateMongoDBCollectionThroughput
} from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
import {
updateGremlinDatabaseThroughput,
@@ -41,13 +40,13 @@ import {
migrateGremlinDatabaseToManualThroughput,
migrateGremlinGraphToAutoscale,
migrateGremlinGraphToManualThroughput,
updateGremlinGraphThroughput,
updateGremlinGraphThroughput
} from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
import { userContext } from "../../UserContext";
import {
migrateTableToAutoscale,
migrateTableToManualThroughput,
updateTableThroughput,
updateTableThroughput
} from "../../Utils/arm/generatedClients/2020-04-01/tableResources";
export const updateOffer = async (params: UpdateOfferParams): Promise<Offer> => {
@@ -73,7 +72,7 @@ export const updateOffer = async (params: UpdateOfferParams): Promise<Offer> =>
logConsoleInfo(`Successfully updated offer for ${offerResourceText}`);
return updatedOffer;
} catch (error) {
handleError(error, "UpdateCollection", `Error updating offer for ${offerResourceText}`);
handleError(error, `Error updating offer for ${offerResourceText}`, "UpdateCollection");
throw error;
} finally {
clearMessage();
@@ -110,7 +109,7 @@ const updateCollectionOfferWithARM = async (params: UpdateOfferParams): Promise<
return await readCollectionOffer({
collectionId: params.collectionId,
databaseId: params.databaseId,
offerId: params.currentOffer.id,
offerId: params.currentOffer.id
});
};
@@ -140,7 +139,7 @@ const updateDatabaseOfferWithARM = async (params: UpdateOfferParams): Promise<Of
return await readDatabaseOffer({
databaseId: params.databaseId,
offerId: params.currentOffer.id,
offerId: params.currentOffer.id
});
};
@@ -358,13 +357,13 @@ const updateGremlinDatabaseOffer = async (params: UpdateOfferParams): Promise<vo
const createUpdateOfferBody = (params: UpdateOfferParams): ThroughputSettingsUpdateParameters => {
const body: ThroughputSettingsUpdateParameters = {
properties: {
resource: {},
},
resource: {}
}
};
if (params.autopilotThroughput) {
body.properties.resource.autoscaleSettings = {
maxThroughput: params.autopilotThroughput,
maxThroughput: params.autopilotThroughput
};
} else {
body.properties.resource.throughput = params.manualThroughput;
@@ -374,26 +373,25 @@ const createUpdateOfferBody = (params: UpdateOfferParams): ThroughputSettingsUpd
};
const updateOfferWithSDK = async (params: UpdateOfferParams): Promise<Offer> => {
const sdkOfferDefinition = params.currentOffer.offerDefinition;
const newOffer: SDKOfferDefinition = {
const currentOffer = params.currentOffer;
const newOffer: Offer = {
content: {
offerThroughput: undefined,
offerIsRUPerMinuteThroughputEnabled: false,
offerThroughput: undefined
},
_etag: undefined,
_ts: undefined,
_rid: sdkOfferDefinition._rid,
_self: sdkOfferDefinition._self,
id: sdkOfferDefinition.id,
offerResourceId: sdkOfferDefinition.offerResourceId,
offerVersion: sdkOfferDefinition.offerVersion,
offerType: sdkOfferDefinition.offerType,
resource: sdkOfferDefinition.resource,
_rid: currentOffer._rid,
_self: currentOffer._self,
id: currentOffer.id,
offerResourceId: currentOffer.offerResourceId,
offerVersion: currentOffer.offerVersion,
offerType: currentOffer.offerType,
resource: currentOffer.resource
};
if (params.autopilotThroughput) {
newOffer.content.offerAutopilotSettings = {
maxThroughput: params.autopilotThroughput,
maxThroughput: params.autopilotThroughput
};
} else {
newOffer.content.offerThroughput = params.manualThroughput;
@@ -402,12 +400,12 @@ const updateOfferWithSDK = async (params: UpdateOfferParams): Promise<Offer> =>
const options: RequestOptions = {};
if (params.migrateToAutoPilot) {
options.initialHeaders = {
[HttpHeaders.migrateOfferToAutopilot]: "true",
[HttpHeaders.migrateOfferToAutopilot]: "true"
};
delete newOffer.content.offerAutopilotSettings;
} else if (params.migrateToManual) {
options.initialHeaders = {
[HttpHeaders.migrateOfferToManualThroughput]: "true",
[HttpHeaders.migrateOfferToManualThroughput]: "true"
};
newOffer.content.offerAutopilotSettings = { maxThroughput: 0 };
}
@@ -416,6 +414,5 @@ const updateOfferWithSDK = async (params: UpdateOfferParams): Promise<Offer> =>
.offer(params.currentOffer.id)
// TODO Remove casting when SDK types are fixed (https://github.com/Azure/azure-sdk-for-js/issues/10660)
.replace((newOffer as unknown) as OfferDefinition, options);
return parseSDKOfferResponse(sdkResponse);
return sdkResponse?.resource;
};

View File

@@ -0,0 +1,24 @@
import { updateOfferThroughputBeyondLimit } from "./updateOfferThroughputBeyondLimit";
describe("updateOfferThroughputBeyondLimit", () => {
it("should call fetch", async () => {
window.fetch = jest.fn(() => {
return {
ok: true
};
});
window.dataExplorer = {
logConsoleData: jest.fn(),
deleteInProgressConsoleDataWithId: jest.fn()
// eslint-disable-next-line @typescript-eslint/no-explicit-any
} as any;
await updateOfferThroughputBeyondLimit({
subscriptionId: "foo",
resourceGroup: "foo",
databaseAccountName: "foo",
databaseName: "foo",
throughput: 1000000000
});
expect(window.fetch).toHaveBeenCalled();
});
});

View File

@@ -0,0 +1,50 @@
import { Platform, configContext } from "../../ConfigContext";
import { getAuthorizationHeader } from "../../Utils/AuthorizationUtils";
import { AutoPilotOfferSettings } from "../../Contracts/DataModels";
import { logConsoleProgress, logConsoleInfo, logConsoleError } from "../../Utils/NotificationConsoleUtils";
import { HttpHeaders } from "../Constants";
interface UpdateOfferThroughputRequest {
subscriptionId: string;
resourceGroup: string;
databaseAccountName: string;
databaseName: string;
collectionName?: string;
throughput: number;
offerAutopilotSettings?: AutoPilotOfferSettings;
}
export async function updateOfferThroughputBeyondLimit(request: UpdateOfferThroughputRequest): Promise<void> {
if (configContext.platform !== Platform.Portal) {
throw new Error("Updating throughput beyond specified limit is not supported on this platform");
}
const resourceDescriptionInfo = request.collectionName
? `database ${request.databaseName} and container ${request.collectionName}`
: `database ${request.databaseName}`;
const clearMessage = logConsoleProgress(
`Requesting increase in throughput to ${request.throughput} for ${resourceDescriptionInfo}`
);
const url = `${configContext.BACKEND_ENDPOINT}/api/offerthroughputrequest/updatebeyondspecifiedlimit`;
const authorizationHeader = getAuthorizationHeader();
const response = await fetch(url, {
method: "POST",
body: JSON.stringify(request),
headers: { [authorizationHeader.header]: authorizationHeader.token, [HttpHeaders.contentType]: "application/json" }
});
if (response.ok) {
logConsoleInfo(
`Successfully requested an increase in throughput to ${request.throughput} for ${resourceDescriptionInfo}`
);
clearMessage();
return undefined;
}
const error = await response.json();
logConsoleError(`Failed to request an increase in throughput for ${request.throughput}: ${error.message}`);
clearMessage();
throw new Error(error.message);
}

View File

@@ -3,12 +3,12 @@ import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType
import { Resource, StoredProcedureDefinition } from "@azure/cosmos";
import {
SqlStoredProcedureCreateUpdateParameters,
SqlStoredProcedureResource,
SqlStoredProcedureResource
} from "../../Utils/arm/generatedClients/2020-04-01/types";
import { client } from "../CosmosClient";
import {
createUpdateSqlStoredProcedure,
getSqlStoredProcedure,
getSqlStoredProcedure
} from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import { handleError } from "../ErrorHandlingUtils";
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
@@ -39,8 +39,8 @@ export async function updateStoredProcedure(
const createSprocParams: SqlStoredProcedureCreateUpdateParameters = {
properties: {
resource: storedProcedure as SqlStoredProcedureResource,
options: {},
},
options: {}
}
};
const rpResponse = await createUpdateSqlStoredProcedure(
userContext.subscriptionId,
@@ -64,7 +64,7 @@ export async function updateStoredProcedure(
.replace(storedProcedure);
return response?.resource;
} catch (error) {
handleError(error, "UpdateStoredProcedure", `Error while updating stored procedure ${storedProcedure.id}`);
handleError(error, `Error while updating stored procedure ${storedProcedure.id}`, "UpdateStoredProcedure");
throw error;
} finally {
clearMessage();

Some files were not shown because too many files have changed in this diff Show More