mirror of
https://github.com/Azure/cosmos-explorer.git
synced 2025-12-26 04:11:24 +00:00
Compare commits
67 Commits
remove-red
...
users/lang
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
572d573fdd | ||
|
|
37c64c4a4d | ||
|
|
fc5ffeb7ca | ||
|
|
f39b6accb1 | ||
|
|
64601693b7 | ||
|
|
0c80c45e22 | ||
|
|
84b6075ee8 | ||
|
|
d880723be9 | ||
|
|
4ce9dcc024 | ||
|
|
addcfedd5e | ||
|
|
a133134b8b | ||
|
|
79dec6a8a8 | ||
|
|
53a8cea95e | ||
|
|
5f1f7a8266 | ||
|
|
a009a8ba5f | ||
|
|
3e782527d0 | ||
|
|
e6ca1d25c9 | ||
|
|
473f722dcc | ||
|
|
5741802c25 | ||
|
|
e2e58f73b1 | ||
|
|
79769e9689 | ||
|
|
542abf4d3a | ||
|
|
4bee46ce55 | ||
|
|
fe58722002 | ||
|
|
94ff6b3e81 | ||
|
|
b4219e2994 | ||
|
|
294270b6aa | ||
|
|
e4bab1de4b | ||
|
|
703ceacd3f | ||
|
|
734df3dd18 | ||
|
|
1e19f02fd7 | ||
|
|
24b5b754ca | ||
|
|
e09730d782 | ||
|
|
09a95fded4 | ||
|
|
7ffa18a190 | ||
|
|
30353c26f3 | ||
|
|
34d8704071 | ||
|
|
23714831bd | ||
|
|
9a5d46b6e0 | ||
|
|
b9245101bc | ||
|
|
274deb13be | ||
|
|
9d50577800 | ||
|
|
bd00e5eb9b | ||
|
|
821f665e78 | ||
|
|
39f7ef331a | ||
|
|
9933a4988a | ||
|
|
d525afa142 | ||
|
|
cfb9a0b321 | ||
|
|
3b64d75322 | ||
|
|
daba1c4ed4 | ||
|
|
a698e08638 | ||
|
|
88d630fef4 | ||
|
|
5ffa746adb | ||
|
|
a9a57f4ba9 | ||
|
|
47cc6fd7a8 | ||
|
|
14cdf19efb | ||
|
|
5191ae3f3a | ||
|
|
ba862a8106 | ||
|
|
fe085b3e5a | ||
|
|
8028734cb0 | ||
|
|
444f663733 | ||
|
|
8c1ca35420 | ||
|
|
b69174788d | ||
|
|
ff03c79399 | ||
|
|
0382628249 | ||
|
|
d346ebe054 | ||
|
|
f5ecb8a04f |
@@ -1,7 +1,10 @@
|
||||
# These options are only needed when if running end to end tests locally
|
||||
PORTAL_RUNNER_USERNAME=
|
||||
PORTAL_RUNNER_PASSWORD=
|
||||
PORTAL_RUNNER_SUBSCRIPTION=
|
||||
PORTAL_RUNNER_RESOURCE_GROUP=
|
||||
PORTAL_RUNNER_DATABASE_ACCOUNT=
|
||||
PORTAL_RUNNER_CONNECTION_STRING=
|
||||
PORTAL_RUNNER_CONNECTION_STRING=
|
||||
CASSANDRA_CONNECTION_STRING=
|
||||
MONGO_CONNECTION_STRING=
|
||||
TABLES_CONNECTION_STRING=
|
||||
DATA_EXPLORER_ENDPOINT=https://localhost:1234/hostedExplorer.html
|
||||
@@ -15,8 +15,6 @@ src/Common/DeleteFeedback.ts
|
||||
src/Common/DocumentClientUtilityBase.ts
|
||||
src/Common/EditableUtility.ts
|
||||
src/Common/EnvironmentUtility.ts
|
||||
src/Common/ErrorParserUtility.test.ts
|
||||
src/Common/ErrorParserUtility.ts
|
||||
src/Common/HashMap.test.ts
|
||||
src/Common/HashMap.ts
|
||||
src/Common/HeadersUtility.test.ts
|
||||
@@ -398,19 +396,5 @@ src/Explorer/Tree/ResourceTreeAdapterForResourceToken.tsx
|
||||
src/GalleryViewer/Cards/GalleryCardComponent.tsx
|
||||
src/GalleryViewer/GalleryViewer.tsx
|
||||
src/GalleryViewer/GalleryViewerComponent.tsx
|
||||
cypress/integration/dataexplorer/CASSANDRA/addCollection.spec.ts
|
||||
cypress/integration/dataexplorer/GRAPH/addCollection.spec.ts
|
||||
cypress/integration/dataexplorer/ci-tests/addCollectionPane.spec.ts
|
||||
cypress/integration/dataexplorer/ci-tests/createDatabase.spec.ts
|
||||
cypress/integration/dataexplorer/ci-tests/deleteCollection.spec.ts
|
||||
cypress/integration/dataexplorer/ci-tests/deleteDatabase.spec.ts
|
||||
cypress/integration/dataexplorer/MONGO/addCollection.spec.ts
|
||||
cypress/integration/dataexplorer/MONGO/addCollectionAutopilot.spec.ts
|
||||
cypress/integration/dataexplorer/MONGO/addCollectionExistingDatabase.spec.ts
|
||||
cypress/integration/dataexplorer/MONGO/provisionDatabaseThroughput.spec.ts
|
||||
cypress/integration/dataexplorer/SQL/addCollection.spec.ts
|
||||
cypress/integration/dataexplorer/TABLE/addCollection.spec.ts
|
||||
cypress/integration/notebook/newNotebook.spec.ts
|
||||
cypress/integration/notebook/resourceTree.spec.ts
|
||||
__mocks__/monaco-editor.ts
|
||||
src/Explorer/Tree/ResourceTreeAdapterForResourceToken.test.tsx
|
||||
@@ -42,6 +42,13 @@ module.exports = {
|
||||
"no-null/no-null": "error",
|
||||
"@typescript-eslint/no-explicit-any": "error",
|
||||
"prefer-arrow/prefer-arrow-functions": ["error", { allowStandaloneDeclarations: true }],
|
||||
eqeqeq: "error"
|
||||
eqeqeq: "error",
|
||||
"no-restricted-syntax": [
|
||||
"error",
|
||||
{
|
||||
selector: "CallExpression[callee.object.name='JSON'][callee.property.name='stringify'] Identifier[name=/$err/]",
|
||||
message: "Do not use JSON.stringify(error). It will print '{}'"
|
||||
}
|
||||
]
|
||||
}
|
||||
};
|
||||
|
||||
29
.github/workflows/automerge.yml
vendored
29
.github/workflows/automerge.yml
vendored
@@ -1,29 +0,0 @@
|
||||
name: automerge
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- labeled
|
||||
- unlabeled
|
||||
- synchronize
|
||||
- opened
|
||||
- edited
|
||||
- ready_for_review
|
||||
- reopened
|
||||
- unlocked
|
||||
pull_request_review:
|
||||
types:
|
||||
- submitted
|
||||
check_suite:
|
||||
types:
|
||||
- completed
|
||||
status: {}
|
||||
jobs:
|
||||
automerge:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: automerge
|
||||
uses: "pascalgn/automerge-action@v0.11.0"
|
||||
env:
|
||||
GITHUB_TOKEN: "${{ secrets.AUTOMERGE_GITHUB_PAT }}"
|
||||
MERGE_METHOD: "squash"
|
||||
MERGE_COMMIT_MESSAGE: "pull-request-title"
|
||||
107
.github/workflows/ci.yml
vendored
107
.github/workflows/ci.yml
vendored
@@ -79,100 +79,31 @@ jobs:
|
||||
name: dist
|
||||
path: dist/
|
||||
endtoendemulator:
|
||||
name: "End To End Tests | Emulator | SQL"
|
||||
name: "End To End Emulator Tests"
|
||||
needs: [lint, format, compile, unittest]
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Use Node.js 12.x
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 12.x
|
||||
- uses: southpolesteve/cosmos-emulator-github-action@v1
|
||||
- name: Use Node.js 12.x
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 12.x
|
||||
- name: Restore Cypress Binary Cache
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ~/.cache/Cypress
|
||||
key: ${{ runner.os }}-cypress-binary-cache
|
||||
- name: End to End Tests
|
||||
run: |
|
||||
npm ci
|
||||
npm start &
|
||||
npm ci --prefix ./cypress
|
||||
npm run test:ci --prefix ./cypress -- --spec ./integration/dataexplorer/ci-tests/createDatabase.spec.ts
|
||||
shell: bash
|
||||
env:
|
||||
EMULATOR_ENDPOINT: https://0.0.0.0:8081/
|
||||
NODE_TLS_REJECT_UNAUTHORIZED: 0
|
||||
CYPRESS_CACHE_FOLDER: ~/.cache/Cypress
|
||||
endtoendsql:
|
||||
name: "End To End Tests | SQL"
|
||||
needs: [lint, format, compile, unittest]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Use Node.js 12.x
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 12.x
|
||||
- name: Restore Cypress Binary Cache
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ~/.cache/Cypress
|
||||
key: ${{ runner.os }}-cypress-binary-cache
|
||||
- run: npm ci
|
||||
- name: End to End Tests
|
||||
run: |
|
||||
npm start &
|
||||
cd cypress
|
||||
npm ci
|
||||
node cleanup.js
|
||||
npm run wait-for-server
|
||||
npx cypress run --browser chrome --headless --spec "./integration/dataexplorer/SQL/*"
|
||||
npx jest -c ./jest.config.e2e.js --detectOpenHandles sql
|
||||
shell: bash
|
||||
env:
|
||||
DATA_EXPLORER_ENDPOINT: "https://localhost:1234/explorer.html?platform=Emulator"
|
||||
PLATFORM: "Emulator"
|
||||
NODE_TLS_REJECT_UNAUTHORIZED: 0
|
||||
CYPRESS_CACHE_FOLDER: ~/.cache/Cypress
|
||||
CYPRESS_CONNECTION_STRING: ${{ secrets.CONNECTION_STRING_SQL }}
|
||||
- uses: actions/upload-artifact@v2
|
||||
name: videos
|
||||
if: ${{ failure() }}
|
||||
with:
|
||||
path: "**/*.mp4"
|
||||
endtoendmongo:
|
||||
name: "End To End Tests | Mongo"
|
||||
needs: [lint, format, compile, unittest]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Use Node.js 12.x
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 12.x
|
||||
- name: Restore Cypress Binary Cache
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ~/.cache/Cypress
|
||||
key: ${{ runner.os }}-cypress-binary-cache
|
||||
- name: End to End Tests
|
||||
run: |
|
||||
npm ci
|
||||
npm start &
|
||||
cd cypress
|
||||
npm ci
|
||||
node cleanup.js
|
||||
npm run wait-for-server
|
||||
npx cypress run --browser chrome --headless --spec "./integration/dataexplorer/MONGO/*"
|
||||
shell: bash
|
||||
env:
|
||||
NODE_TLS_REJECT_UNAUTHORIZED: 0
|
||||
CYPRESS_CACHE_FOLDER: ~/.cache/Cypress
|
||||
CYPRESS_CONNECTION_STRING: ${{ secrets.CONNECTION_STRING_MONGO }}
|
||||
- uses: actions/upload-artifact@v2
|
||||
if: ${{ failure() }}
|
||||
name: videos
|
||||
with:
|
||||
path: "**/*.mp4"
|
||||
name: screenshots
|
||||
path: failed-*
|
||||
accessibility:
|
||||
name: "Accessibility | Hosted"
|
||||
needs: [lint, format, compile, unittest]
|
||||
@@ -191,13 +122,13 @@ jobs:
|
||||
sudo sysctl -p
|
||||
npm ci
|
||||
npm start &
|
||||
npx wait-on -i 5000 https-get://0.0.0.0:1234/
|
||||
npx wait-on -i 5000 https-get://0.0.0.0:1234/
|
||||
node utils/accesibilityCheck.js
|
||||
shell: bash
|
||||
env:
|
||||
NODE_TLS_REJECT_UNAUTHORIZED: 0
|
||||
endtoendpuppeteer:
|
||||
name: "End to end puppeteer tests"
|
||||
endtoendhosted:
|
||||
name: "End to End Hosted Tests"
|
||||
needs: [lint, format, compile, unittest]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
@@ -206,7 +137,7 @@ jobs:
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 12.x
|
||||
- name: End to End Puppeteer Tests
|
||||
- name: End to End Hosted Tests
|
||||
run: |
|
||||
npm ci
|
||||
npm start &
|
||||
@@ -218,10 +149,16 @@ jobs:
|
||||
PORTAL_RUNNER_CONNECTION_STRING: ${{ secrets.CONNECTION_STRING_SQL }}
|
||||
MONGO_CONNECTION_STRING: ${{ secrets.CONNECTION_STRING_MONGO }}
|
||||
CASSANDRA_CONNECTION_STRING: ${{ secrets.CONNECTION_STRING_CASSANDRA }}
|
||||
TABLES_CONNECTION_STRING: ${{ secrets.CONNECTION_STRING_TABLE }}
|
||||
DATA_EXPLORER_ENDPOINT: "https://localhost:1234/hostedExplorer.html"
|
||||
- uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: screenshots
|
||||
path: failed-*
|
||||
nuget:
|
||||
name: Publish Nuget
|
||||
if: github.ref == 'refs/heads/master' || contains(github.ref, 'hotfix/') || contains(github.ref, 'release/')
|
||||
needs: [lint, format, compile, build, unittest, endtoendemulator, endtoendsql, endtoendmongo]
|
||||
needs: [lint, format, compile, build, unittest, endtoendemulator, endtoendhosted]
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
NUGET_SOURCE: ${{ secrets.NUGET_SOURCE }}
|
||||
@@ -245,7 +182,7 @@ jobs:
|
||||
nugetmpac:
|
||||
name: Publish Nuget MPAC
|
||||
if: github.ref == 'refs/heads/master' || contains(github.ref, 'hotfix/') || contains(github.ref, 'release/')
|
||||
needs: [lint, format, compile, build, unittest, endtoendemulator, endtoendsql, endtoendmongo]
|
||||
needs: [lint, format, compile, build, unittest, endtoendemulator, endtoendhosted]
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
NUGET_SOURCE: ${{ secrets.NUGET_SOURCE }}
|
||||
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -9,9 +9,6 @@ pkg/DataExplorer/*
|
||||
test/out/*
|
||||
workers/**/*.js
|
||||
*.trx
|
||||
cypress/videos
|
||||
cypress/screenshots
|
||||
cypress/fixtures
|
||||
notebookapp/*
|
||||
Contracts/*
|
||||
.DS_Store
|
||||
|
||||
27
README.md
27
README.md
@@ -1,4 +1,4 @@
|
||||
# CosmosDB Explorer
|
||||
# Cosmos DB Explorer
|
||||
|
||||
UI for Azure Cosmos DB. Powers the [Azure Portal](https://portal.azure.com/), https://cosmos.azure.com/, and the [Cosmos DB Emulator](https://docs.microsoft.com/en-us/azure/cosmos-db/local-emulator)
|
||||
|
||||
@@ -33,7 +33,7 @@ To run pure hosted mode, in `webpack.config.js` change index HtmlWebpackPlugin t
|
||||
|
||||
### Emulator Development
|
||||
|
||||
In a window environment, running `npm run build` will automatically copy the built files from `/dist` over to the default emulator install paths. In a non-windows enironment you can specify an alternate endpoint using `EMULATOR_ENDPOINT` and webpack dev server will proxy requests for you.
|
||||
In a window environment, running `npm run build` will automatically copy the built files from `/dist` over to the default emulator install paths. In a non-windows environment you can specify an alternate endpoint using `EMULATOR_ENDPOINT` and webpack dev server will proxy requests for you.
|
||||
|
||||
`PLATFORM=Emulator EMULATOR_ENDPOINT=https://my-vm.azure.com:8081 npm run watch`
|
||||
|
||||
@@ -60,7 +60,7 @@ The Cosmos Portal that consumes this repo is not currently open source. If you h
|
||||
You can however load a local running instance of data explorer in the production portal.
|
||||
|
||||
1. Turn off browser SSL validation for localhost: chrome://flags/#allow-insecure-localhost OR Install valid SSL certs for localhost (on IE, follow these [instructions](https://www.technipages.com/ie-bypass-problem-with-this-websites-security-certificate) to install the localhost certificate in the right place)
|
||||
2. Whitelist `https://localhost:1234` domain for CORS in the Azure Cosmos DB portal
|
||||
2. Allowlist `https://localhost:1234` domain for CORS in the Azure Cosmos DB portal
|
||||
3. Start the project in portal mode: `PLATFORM=Portal npm run watch`
|
||||
4. Load the portal using the following link: https://ms.portal.azure.com/?dataExplorerSource=https%3A%2F%2Flocalhost%3A1234%2Fexplorer.html
|
||||
|
||||
@@ -76,24 +76,17 @@ Unit tests are located adjacent to the code under test and run with [Jest](https
|
||||
|
||||
#### End to End CI Tests
|
||||
|
||||
[Cypress](https://www.cypress.io/) is used for end to end tests and are contained in `cypress/`. Currently, it operates as sub project with its own typescript config and dependencies. It also only operates against the emulator. To run cypress tests:
|
||||
Jest and Puppeteer are used for end to end browser based tests and are contained in `test/`. To run these tests locally:
|
||||
|
||||
1. Ensure the emulator is running
|
||||
2. Start cosmos explorer in emulator mode: `PLATFORM=Emulator npm run watch`
|
||||
3. Move into `cypress/` folder: `cd cypress`
|
||||
4. Install dependencies: `npm install`
|
||||
5. Run cypress headless(`npm run test`) or in interactive mode(`npm run test:debug`)
|
||||
|
||||
#### End to End Production Runners
|
||||
|
||||
Jest and Puppeteer are used for end to end production runners and are contained in `test/`. To run these tests locally:
|
||||
|
||||
1. Copy .env.example to .env and fill in all variables
|
||||
2. Run `npm run test:e2e`
|
||||
1. Copy .env.example to .env
|
||||
2. Update the values in .env including your local data explorer endpoint (ask a teammate/codeowner for help with .env values)
|
||||
3. Make sure all packages are installed `npm install`
|
||||
4. Run the server `npm run start` and wait for it to start
|
||||
5. Run `npm run test:e2e`
|
||||
|
||||
### Releasing
|
||||
|
||||
We generally adhear to the release strategy [documented by the Azure SDK Guidelines](https://azure.github.io/azure-sdk/policies_repobranching.html#release-branches). Most releases should happen from the master branch. If master contains commits that cannot be released, you may create a release from a `release/` or `hotfix/` branch. See linked documentation for more details.
|
||||
We generally adhere to the release strategy [documented by the Azure SDK Guidelines](https://azure.github.io/azure-sdk/policies_repobranching.html#release-branches). Most releases should happen from the master branch. If master contains commits that cannot be released, you may create a release from a `release/` or `hotfix/` branch. See linked documentation for more details.
|
||||
|
||||
# Contributing
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@ trigger:
|
||||
- master
|
||||
|
||||
pool:
|
||||
vmImage: 'ubuntu-latest'
|
||||
vmImage: "ubuntu-latest"
|
||||
|
||||
steps:
|
||||
- task: ComponentGovernanceComponentDetection@0
|
||||
- task: ComponentGovernanceComponentDetection@0
|
||||
|
||||
4
cypress/.gitignore
vendored
4
cypress/.gitignore
vendored
@@ -1,4 +0,0 @@
|
||||
cypress.env.json
|
||||
cypress/report
|
||||
cypress/screenshots
|
||||
cypress/videos
|
||||
@@ -1,51 +0,0 @@
|
||||
// Cleans up old databases from previous test runs
|
||||
const { CosmosClient } = require("@azure/cosmos");
|
||||
|
||||
// TODO: Add support for other API connection strings
|
||||
const mongoRegex = RegExp("mongodb://.*:(.*)@(.*).mongo.cosmos.azure.com");
|
||||
|
||||
async function cleanup() {
|
||||
const connectionString = process.env.CYPRESS_CONNECTION_STRING;
|
||||
if (!connectionString) {
|
||||
throw new Error("Connection string not provided");
|
||||
}
|
||||
|
||||
let client;
|
||||
switch (true) {
|
||||
case connectionString.includes("mongodb://"): {
|
||||
const [, key, accountName] = connectionString.match(mongoRegex);
|
||||
client = new CosmosClient({
|
||||
key,
|
||||
endpoint: `https://${accountName}.documents.azure.com:443/`
|
||||
});
|
||||
break;
|
||||
}
|
||||
// TODO: Add support for other API connection strings
|
||||
default:
|
||||
client = new CosmosClient(connectionString);
|
||||
break;
|
||||
}
|
||||
|
||||
const response = await client.databases.readAll().fetchAll();
|
||||
return Promise.all(
|
||||
response.resources.map(async db => {
|
||||
const dbTimestamp = new Date(db._ts * 1000);
|
||||
const twentyMinutesAgo = new Date(Date.now() - 1000 * 60 * 20);
|
||||
if (dbTimestamp < twentyMinutesAgo) {
|
||||
await client.database(db.id).delete();
|
||||
console.log(`DELETED: ${db.id} | Timestamp: ${dbTimestamp}`);
|
||||
} else {
|
||||
console.log(`SKIPPED: ${db.id} | Timestamp: ${dbTimestamp}`);
|
||||
}
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
cleanup()
|
||||
.then(() => {
|
||||
process.exit(0);
|
||||
})
|
||||
.catch(error => {
|
||||
console.error(error);
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -1,15 +0,0 @@
|
||||
{
|
||||
"integrationFolder": "./integration",
|
||||
"pluginsFile": false,
|
||||
"fixturesFolder": false,
|
||||
"supportFile": "./support/index.js",
|
||||
"defaultCommandTimeout": 90000,
|
||||
"chromeWebSecurity": false,
|
||||
"reporter": "mochawesome",
|
||||
"reporterOptions": {
|
||||
"reportDir": "cypress/report",
|
||||
"json": true,
|
||||
"overwrite": false,
|
||||
"html": false
|
||||
}
|
||||
}
|
||||
@@ -1,66 +0,0 @@
|
||||
// 1. Click on "New Container" on the command bar.
|
||||
// 2. Pane with the title "Add Container" should appear on the right side of the screen
|
||||
// 3. It includes an input box for the database Id.
|
||||
// 4. It includes a checkbox called "Create now".
|
||||
// 5. When the checkbox is marked, enter new database id.
|
||||
// 3. Create a database WITH "Provision throughput" checked.
|
||||
// 4. Enter minimum throughput value of 400.
|
||||
// 5. Enter container id to the container id text box.
|
||||
// 6. Enter partition key to the partition key text box.
|
||||
// 7. Click "OK" to create a new container.
|
||||
// 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
|
||||
|
||||
const connectionString = require("../../../utilities/connectionString");
|
||||
|
||||
let crypt = require("crypto");
|
||||
|
||||
context("Cassandra API Test - createDatabase", () => {
|
||||
beforeEach(() => {
|
||||
connectionString.loginUsingConnectionString(connectionString.constants.cassandra);
|
||||
});
|
||||
|
||||
it("Create a new table in Cassandra API", () => {
|
||||
const keyspaceId = `KeyspaceId${crypt.randomBytes(8).toString("hex")}`;
|
||||
const tableId = `TableId112`;
|
||||
|
||||
cy.get("iframe").then($element => {
|
||||
const $body = $element.contents().find("body");
|
||||
cy.wrap($body)
|
||||
.find('div[class="commandBarContainer"]')
|
||||
.should("be.visible")
|
||||
.find('button[data-test="New Table"]')
|
||||
.should("be.visible")
|
||||
.click();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[class="contextual-pane-in"]')
|
||||
.should("be.visible")
|
||||
.find('span[id="containerTitle"]');
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[id="keyspace-id"]')
|
||||
.should("be.visible")
|
||||
.type(keyspaceId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[class="textfontclr"]')
|
||||
.type(tableId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="databaseThroughputValue"]')
|
||||
.should("have.value", "400");
|
||||
|
||||
cy.wrap($body)
|
||||
.find('data-test="addCollection-createCollection"')
|
||||
.click();
|
||||
|
||||
cy.wait(10000);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[data-test="resourceTreeId"]')
|
||||
.should("exist")
|
||||
.find('div[class="treeComponent dataResourceTree"]')
|
||||
.should("contain", tableId);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,81 +0,0 @@
|
||||
// 1. Click on "New Graph" on the command bar.
|
||||
// 2. Pane with the title "Add Container" should appear on the right side of the screen
|
||||
// 3. It includes an input box for the database Id.
|
||||
// 4. It includes a checkbox called "Create now".
|
||||
// 5. When the checkbox is marked, enter new database id.
|
||||
// 3. Create a database WITH "Provision throughput" checked.
|
||||
// 4. Enter minimum throughput value of 400.
|
||||
// 5. Enter container id to the container id text box.
|
||||
// 6. Enter partition key to the partition key text box.
|
||||
// 7. Click "OK" to create a new container.
|
||||
// 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
|
||||
|
||||
const connectionString = require("../../../utilities/connectionString");
|
||||
|
||||
let crypt = require("crypto");
|
||||
|
||||
context("Graph API Test", () => {
|
||||
beforeEach(() => {
|
||||
connectionString.loginUsingConnectionString(connectionString.constants.graph);
|
||||
});
|
||||
|
||||
it("Create a new graph in Graph API", () => {
|
||||
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
|
||||
const graphId = `TestGraph${crypt.randomBytes(8).toString("hex")}`;
|
||||
const partitionKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
|
||||
|
||||
cy.get("iframe").then($element => {
|
||||
const $body = $element.contents().find("body");
|
||||
cy.wrap($body)
|
||||
.find('div[class="commandBarContainer"]')
|
||||
.should("be.visible")
|
||||
.find('button[data-test="New Graph"]')
|
||||
.should("be.visible")
|
||||
.click();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[class="contextual-pane-in"]')
|
||||
.should("be.visible")
|
||||
.find('span[id="containerTitle"]');
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-createNewDatabase"]')
|
||||
.check();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-newDatabaseId"]')
|
||||
.should("be.visible")
|
||||
.type(dbId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
|
||||
.check();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="databaseThroughputValue"]')
|
||||
.should("have.value", "400");
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-collectionId"]')
|
||||
.type(graphId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-partitionKeyValue"]')
|
||||
.type(partitionKey);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-createCollection"]')
|
||||
.click();
|
||||
|
||||
cy.wait(10000);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[data-test="resourceTreeId"]')
|
||||
.should("exist")
|
||||
.find('div[class="treeComponent dataResourceTree"]')
|
||||
.should("contain", dbId)
|
||||
.click()
|
||||
.should("contain", graphId);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,80 +0,0 @@
|
||||
// 1. Click on "New Container" on the command bar.
|
||||
// 2. Pane with the title "Add Container" should appear on the right side of the screen
|
||||
// 3. It includes an input box for the database Id.
|
||||
// 4. It includes a checkbox called "Create now".
|
||||
// 5. When the checkbox is marked, enter new database id.
|
||||
// 3. Create a database WITH "Provision throughput" checked.
|
||||
// 4. Enter minimum throughput value of 400.
|
||||
// 5. Enter container id to the container id text box.
|
||||
// 6. Enter partition key to the partition key text box.
|
||||
// 7. Click "OK" to create a new container.
|
||||
// // 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
|
||||
|
||||
const connectionString = require("../../../utilities/connectionString");
|
||||
|
||||
let crypt = require("crypto");
|
||||
|
||||
context("Mongo API Test - createDatabase", () => {
|
||||
beforeEach(() => {
|
||||
connectionString.loginUsingConnectionString();
|
||||
});
|
||||
|
||||
it("Create a new collection in Mongo API", () => {
|
||||
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
|
||||
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
|
||||
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
|
||||
|
||||
cy.get("iframe").then($element => {
|
||||
const $body = $element.contents().find("body");
|
||||
cy.wrap($body)
|
||||
.find('div[class="commandBarContainer"]')
|
||||
.should("be.visible")
|
||||
.find('button[data-test="New Collection"]')
|
||||
.should("be.visible")
|
||||
.click();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[class="contextual-pane-in"]')
|
||||
.should("be.visible")
|
||||
.find('span[id="containerTitle"]');
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-createNewDatabase"]')
|
||||
.check();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-newDatabaseId"]')
|
||||
.type(dbId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
|
||||
.check();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-collectionId"]')
|
||||
.type(collectionId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="databaseThroughputValue"]')
|
||||
.should("have.value", "400");
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-partitionKeyValue"]')
|
||||
.type(sharedKey);
|
||||
|
||||
cy.wrap($body)
|
||||
.find("#submitBtnAddCollection")
|
||||
.click();
|
||||
|
||||
cy.wait(10000);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[data-test="resourceTreeId"]')
|
||||
.should("exist")
|
||||
.find('div[class="treeComponent dataResourceTree"]')
|
||||
.should("contain", dbId)
|
||||
.click()
|
||||
.should("contain", collectionId);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,96 +0,0 @@
|
||||
// 1. Click on "New Container" on the command bar.
|
||||
// 2. Pane with the title "Add Container" should appear on the right side of the screen
|
||||
// 3. It includes an input box for the database Id.
|
||||
// 4. It includes a checkbox called "Create now".
|
||||
// 5. When the checkbox is marked, enter new database id.
|
||||
// 3. Create a database WITH "Provision throughput" checked.
|
||||
// 4. Enter minimum throughput value of 400.
|
||||
// 5. Enter container id to the container id text box.
|
||||
// 6. Enter partition key to the partition key text box.
|
||||
// 7. Click "OK" to create a new container.
|
||||
// 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
|
||||
|
||||
const connectionString = require("../../../utilities/connectionString");
|
||||
|
||||
let crypt = require("crypto");
|
||||
|
||||
context("Mongo API Test", () => {
|
||||
beforeEach(() => {
|
||||
connectionString.loginUsingConnectionString();
|
||||
});
|
||||
|
||||
it.skip("Create a new collection in Mongo API - Autopilot", () => {
|
||||
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
|
||||
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
|
||||
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
|
||||
|
||||
cy.get("iframe").then($element => {
|
||||
const $body = $element.contents().find("body");
|
||||
cy.wrap($body)
|
||||
.find('div[class="commandBarContainer"]')
|
||||
.should("be.visible")
|
||||
.find('button[data-test="New Collection"]')
|
||||
.should("be.visible")
|
||||
.click();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[class="contextual-pane-in"]')
|
||||
.should("be.visible")
|
||||
.find('span[id="containerTitle"]');
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-createNewDatabase"]')
|
||||
.check();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-newDatabaseId"]')
|
||||
.type(dbId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
|
||||
.check();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[class="throughputModeContainer"]')
|
||||
.should("be.visible")
|
||||
.and(input => {
|
||||
expect(input.get(0).textContent, "first item").contains("Autopilot (preview)");
|
||||
expect(input.get(1).textContent, "second item").contains("Manual");
|
||||
});
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[id="newContainer-databaseThroughput-autoPilotRadio"]')
|
||||
.check();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('select[name="autoPilotTiers"]')
|
||||
// .eq(1).should('contain', '4,000 RU/s');
|
||||
// // .select('4,000 RU/s').should('have.value', '1');
|
||||
|
||||
.find('option[value="2"]')
|
||||
.then($element => $element.get(1).setAttribute("selected", "selected"));
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-collectionId"]')
|
||||
.type(collectionId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-partitionKeyValue"]')
|
||||
.type(sharedKey);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-createCollection"]')
|
||||
.click();
|
||||
|
||||
cy.wait(10000);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[data-test="resourceTreeId"]')
|
||||
.should("exist")
|
||||
.find('div[class="treeComponent dataResourceTree"]')
|
||||
.should("contain", dbId)
|
||||
.click()
|
||||
.should("contain", collectionId);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,67 +0,0 @@
|
||||
const connectionString = require("../../../utilities/connectionString");
|
||||
|
||||
let crypt = require("crypto");
|
||||
|
||||
context("Mongo API Test", () => {
|
||||
beforeEach(() => {
|
||||
connectionString.loginUsingConnectionString();
|
||||
});
|
||||
|
||||
it.skip("Create a new collection in existing database in Mongo API", () => {
|
||||
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
|
||||
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
|
||||
|
||||
cy.get("iframe").then($element => {
|
||||
const $body = $element.contents().find("body");
|
||||
|
||||
cy.wrap($body)
|
||||
.find('span[class="nodeLabel"]')
|
||||
.should("be.visible")
|
||||
.then($span => {
|
||||
const dbId1 = $span.text();
|
||||
cy.log("DBBB", dbId1);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[class="commandBarContainer"]')
|
||||
.should("be.visible")
|
||||
.find('button[data-test="New Collection"]')
|
||||
.should("be.visible")
|
||||
.click();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[class="contextual-pane-in"]')
|
||||
.should("be.visible")
|
||||
.find('span[id="containerTitle"]');
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-existingDatabase"]')
|
||||
.check();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-existingDatabase"]')
|
||||
.type(dbId1);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-collectionId"]')
|
||||
.type(collectionId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-partitionKeyValue"]')
|
||||
.type(sharedKey);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-createCollection"]')
|
||||
.click();
|
||||
|
||||
cy.wait(10000);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[data-test="resourceTreeId"]')
|
||||
.should("exist")
|
||||
.find('div[class="treeComponent dataResourceTree"]')
|
||||
.click()
|
||||
.should("contain", collectionId);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,203 +0,0 @@
|
||||
const connectionString = require("../../../utilities/connectionString");
|
||||
|
||||
let crypt = require("crypto");
|
||||
|
||||
context.skip("Mongo API Test", () => {
|
||||
beforeEach(() => {
|
||||
connectionString.loginUsingConnectionString();
|
||||
});
|
||||
|
||||
it("Create a new collection in Mongo API - Provision database throughput", () => {
|
||||
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
|
||||
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
|
||||
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
|
||||
|
||||
cy.get("iframe").then($element => {
|
||||
const $body = $element.contents().find("body");
|
||||
cy.wrap($body)
|
||||
.find('div[class="commandBarContainer"]')
|
||||
.should("be.visible")
|
||||
.find('button[data-test="New Collection"]')
|
||||
.should("be.visible")
|
||||
.click();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[class="contextual-pane-in"]')
|
||||
.should("be.visible")
|
||||
.find('span[id="containerTitle"]');
|
||||
|
||||
cy.wrap($body)
|
||||
.find(".createNewDatabaseOrUseExisting")
|
||||
.should("have.length", 2)
|
||||
.and(input => {
|
||||
expect(input.get(0).textContent, "first item").contains("Create new");
|
||||
expect(input.get(1).textContent, "second item").contains("Use existing");
|
||||
});
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-createNewDatabase"]')
|
||||
.check();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
|
||||
.check();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-newDatabaseId"]')
|
||||
.type(dbId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
|
||||
.check();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="databaseThroughputValue"]')
|
||||
.should("have.value", "400");
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-collectionId"]')
|
||||
.type(collectionId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-partitionKeyValue"]')
|
||||
.type(sharedKey);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-createCollection"]')
|
||||
.click();
|
||||
|
||||
cy.wait(10000);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[data-test="resourceTreeId"]')
|
||||
.should("exist")
|
||||
.find('div[class="treeComponent dataResourceTree"]')
|
||||
.should("contain", dbId)
|
||||
.click()
|
||||
.should("contain", collectionId);
|
||||
});
|
||||
});
|
||||
|
||||
it("Create a new collection - without provision database throughput", () => {
|
||||
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
|
||||
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
|
||||
const collectionIdTitle = `Add Collection`;
|
||||
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
|
||||
|
||||
cy.get("iframe").then($element => {
|
||||
const $body = $element.contents().find("body");
|
||||
cy.wrap($body)
|
||||
.find('div[class="commandBarContainer"]')
|
||||
.should("be.visible")
|
||||
.find('button[data-test="New Collection"]')
|
||||
.should("be.visible")
|
||||
.click();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[class="contextual-pane-in"]')
|
||||
.should("be.visible")
|
||||
.find('span[id="containerTitle"]');
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-createNewDatabase"]')
|
||||
.check();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-newDatabaseId"]')
|
||||
.type(dbId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
|
||||
.uncheck();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-collectionId"]')
|
||||
.type(collectionId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[id="tab2"]')
|
||||
.check({ force: true });
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-partitionKeyValue"]')
|
||||
.type(sharedKey);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="databaseThroughputValue"]')
|
||||
.should("have.value", "400");
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-createCollection"]')
|
||||
.click();
|
||||
|
||||
cy.wait(10000);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[data-test="resourceTreeId"]')
|
||||
.should("exist")
|
||||
.find('div[class="treeComponent dataResourceTree"]')
|
||||
.should("contain", dbId)
|
||||
.click()
|
||||
.should("contain", collectionId);
|
||||
});
|
||||
});
|
||||
|
||||
it("Create a new collection - without provision database throughput Fixed Storage Capacity", () => {
|
||||
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
|
||||
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
|
||||
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
|
||||
|
||||
cy.get("iframe").then($element => {
|
||||
const $body = $element.contents().find("body");
|
||||
cy.wrap($body)
|
||||
.find('div[class="commandBarContainer"]')
|
||||
.should("be.visible")
|
||||
.find('button[data-test="New Collection"]')
|
||||
.should("be.visible")
|
||||
.click();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[class="contextual-pane-in"]')
|
||||
.should("be.visible")
|
||||
.find('span[id="containerTitle"]');
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-createNewDatabase"]')
|
||||
.check();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-newDatabaseId"]')
|
||||
.type(dbId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
|
||||
.uncheck();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-collectionId"]')
|
||||
.type(collectionId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[id="tab1"]')
|
||||
.check({ force: true });
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="databaseThroughputValue"]')
|
||||
.should("have.value", "400");
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-createCollection"]')
|
||||
.click();
|
||||
|
||||
cy.wait(10000);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[data-test="resourceTreeId"]')
|
||||
.should("exist")
|
||||
.find('div[class="treeComponent dataResourceTree"]')
|
||||
.should("contain", dbId)
|
||||
.click()
|
||||
.should("contain", collectionId);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,79 +0,0 @@
|
||||
// 1. Click on "New Container" on the command bar.
|
||||
// 2. Pane with the title "Add Container" should appear on the right side of the screen
|
||||
// 3. It includes an input box for the database Id.
|
||||
// 4. It includes a checkbox called "Create now".
|
||||
// 5. When the checkbox is marked, enter new database id.
|
||||
// 3. Create a database WITH "Provision throughput" checked.
|
||||
// 4. Enter minimum throughput value of 400.
|
||||
// 5. Enter container id to the container id text box.
|
||||
// 6. Enter partition key to the partition key text box.
|
||||
// 7. Click "OK" to create a new container.
|
||||
// 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
|
||||
|
||||
const connectionString = require("../../../utilities/connectionString");
|
||||
|
||||
let crypt = require("crypto");
|
||||
|
||||
context("SQL API Test", () => {
|
||||
beforeEach(() => {
|
||||
connectionString.loginUsingConnectionString();
|
||||
});
|
||||
|
||||
it("Create a new container in SQL API", () => {
|
||||
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
|
||||
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
|
||||
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
|
||||
connectionString.loginUsingConnectionString();
|
||||
|
||||
cy.get("iframe").then($element => {
|
||||
const $body = $element.contents().find("body");
|
||||
cy.wrap($body)
|
||||
.find('div[class="commandBarContainer"]')
|
||||
.should("be.visible")
|
||||
.find('button[data-test="New Container"]')
|
||||
.should("be.visible")
|
||||
.click();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[class="contextual-pane-in"]')
|
||||
.should("be.visible")
|
||||
.find('span[id="containerTitle"]');
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-createNewDatabase"]')
|
||||
.check();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
|
||||
.check();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-newDatabaseId"]')
|
||||
.type(dbId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-collectionId"]')
|
||||
.type(collectionId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="databaseThroughputValue"]')
|
||||
.should("have.value", "400");
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-partitionKeyValue"]')
|
||||
.type(sharedKey);
|
||||
|
||||
cy.wrap($body)
|
||||
.find("#submitBtnAddCollection")
|
||||
.click();
|
||||
|
||||
cy.wait(10000);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[data-test="resourceTreeId"]')
|
||||
.should("exist")
|
||||
.find('div[class="treeComponent dataResourceTree"]')
|
||||
.should("contain", dbId);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,60 +0,0 @@
|
||||
// 1. Click on "New Container" on the command bar.
|
||||
// 2. Pane with the title "Add Container" should appear on the right side of the screen
|
||||
// 3. It includes an input box for the database Id.
|
||||
// 4. It includes a checkbox called "Create now".
|
||||
// 5. When the checkbox is marked, enter new database id.
|
||||
// 3. Create a database WITH "Provision throughput" checked.
|
||||
// 4. Enter minimum throughput value of 400.
|
||||
// 5. Enter container id to the container id text box.
|
||||
// 6. Enter partition key to the partition key text box.
|
||||
// 7. Click "OK" to create a new container.
|
||||
// 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
|
||||
|
||||
const connectionString = require("../../../utilities/connectionString");
|
||||
|
||||
let crypt = require("crypto");
|
||||
|
||||
context("Table API Test", () => {
|
||||
beforeEach(() => {
|
||||
connectionString.loginUsingConnectionString(connectionString.constants.table);
|
||||
});
|
||||
|
||||
it("Create a new table in Table API", () => {
|
||||
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
|
||||
|
||||
cy.get("iframe").then($element => {
|
||||
const $body = $element.contents().find("body");
|
||||
cy.wrap($body)
|
||||
.find('div[class="commandBarContainer"]')
|
||||
.should("be.visible")
|
||||
.find('button[data-test="New Table"]')
|
||||
.should("be.visible")
|
||||
.click();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[class="contextual-pane-in"]')
|
||||
.should("be.visible")
|
||||
.find('span[id="containerTitle"]');
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-collectionId"]')
|
||||
.type(collectionId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="databaseThroughputValue"]')
|
||||
.should("have.value", "400");
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-createCollection"]')
|
||||
.click();
|
||||
|
||||
cy.wait(10000);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[data-test="resourceTreeId"]')
|
||||
.should("exist")
|
||||
.find('div[class="treeComponent dataResourceTree"]')
|
||||
.should("contain", collectionId);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,55 +0,0 @@
|
||||
// 1. Click on "New Container" on the command bar.
|
||||
// 2. Pane with the title "Add Container" should appear on the right side of the screen
|
||||
// 3. It includes an input box for the database Id.
|
||||
// 4. It includes a checkbox called "Create now".
|
||||
// 5. When the checkbox is marked, enter new database id.
|
||||
// 3. Create a database WITH "Provision throughput" checked.
|
||||
// 4. Enter minimum throughput value of 400.
|
||||
// 5. Enter container id to the container id text box.
|
||||
// 6. Enter partition key to the partition key text box.
|
||||
// 7. Click "OK" to create a new container.
|
||||
// 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
|
||||
|
||||
let crypt = require("crypto");
|
||||
|
||||
context("Emulator - createDatabase", () => {
|
||||
beforeEach(() => {
|
||||
cy.visit("http://localhost:1234/explorer.html");
|
||||
});
|
||||
|
||||
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
|
||||
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
|
||||
const collectionIdTitle = `Add Collection`;
|
||||
const partitionKey = `PartitionKey${crypt.randomBytes(8).toString("hex")}`;
|
||||
|
||||
it("Create a new collection", () => {
|
||||
cy.contains("New Container").click();
|
||||
|
||||
// cy.contains(collectionIdTitle);
|
||||
|
||||
cy.get(".createNewDatabaseOrUseExisting")
|
||||
.should("have.length", 2)
|
||||
.and(input => {
|
||||
expect(input.get(0).textContent, "first item").contains("Create new");
|
||||
expect(input.get(1).textContent, "second item").contains("Use existing");
|
||||
});
|
||||
|
||||
cy.get('input[data-test="addCollection-createNewDatabase"]').check();
|
||||
|
||||
cy.get('input[data-test="addCollection-newDatabaseId"]').type(dbId);
|
||||
|
||||
cy.get('input[data-test="addCollection-collectionId"]').type(collectionId);
|
||||
|
||||
cy.get('input[data-test="databaseThroughputValue"]').should("have.value", "400");
|
||||
|
||||
cy.get('input[data-test="addCollection-partitionKeyValue"]').type(partitionKey);
|
||||
|
||||
cy.get('input[data-test="addCollection-createCollection"]').click();
|
||||
|
||||
cy.get('div[data-test="resourceTreeId"]').should("exist");
|
||||
|
||||
cy.get('div[data-test="resourceTree-collectionsTree"]').should("contain", dbId);
|
||||
|
||||
cy.get('div[data-test="databaseList"]').should("contain", collectionId);
|
||||
});
|
||||
});
|
||||
@@ -1,65 +0,0 @@
|
||||
// 1. Click on "New Database" on the command bar
|
||||
// 2. a Pane with the title "Add Database" should appear on the right side of the screen
|
||||
// i. It includes an input box for the database Id.
|
||||
// ii. It includes a checkbox called "Provision throughput".
|
||||
// iii. Whe the checkbox is marked, a new input with a throughput control let's you customize RU at the database level
|
||||
// 3. Create a database WITHOUT "Provision throughput" checked.
|
||||
// 4. It should appear in the Data Explorer list.
|
||||
// 5. Repeat steps 1-3 but create a database WITH "Provision throughput" with the default RU value.
|
||||
// 6. It should appear in the Data Explorer list.
|
||||
// 7. If expanded, it should have the list item called "Scale", that once clicked, it should show the "Scale" tab.
|
||||
// 8. Inside that tab, a throughput control will let you change the RU value within the permited range.
|
||||
// 9. If you change the value, it should enable the "Save" button.
|
||||
// 10. Click "Save" and verify that the process completes without error.
|
||||
// 11. Close the tab and reopen it and verify that the input contains the last saved value.%
|
||||
|
||||
const crypto = require("crypto");
|
||||
const client = require("../../../utilities/cosmosClient");
|
||||
const randomString = crypto.randomBytes(2).toString("hex");
|
||||
const databaseId = `TestDB-${randomString}`;
|
||||
const collectionId = `TestColl-${randomString}`;
|
||||
|
||||
context("Emulator - Create database -> container -> item", () => {
|
||||
beforeEach(async () => {
|
||||
const { resources } = await client.databases.readAll().fetchAll();
|
||||
for (const database of resources) {
|
||||
await client.database(database.id).delete();
|
||||
}
|
||||
});
|
||||
|
||||
it("creates a new database", () => {
|
||||
cy.visit("https://0.0.0.0:1234/explorer.html?platform=Emulator");
|
||||
cy.contains("New Container").click();
|
||||
cy.get("[data-test=addCollection-newDatabaseId]").click();
|
||||
cy.get("[data-test=addCollection-newDatabaseId]").type(databaseId);
|
||||
cy.get("[data-test=addCollection-collectionId]").click();
|
||||
cy.get("[data-test=addCollection-collectionId]").type(collectionId);
|
||||
cy.get("[data-test=addCollection-partitionKeyValue]").click();
|
||||
cy.get("[data-test=addCollection-partitionKeyValue]").type("/pk");
|
||||
cy.get('input[name="createCollection"]').click();
|
||||
cy.get(".dataResourceTree").should("contain", databaseId);
|
||||
cy.get(".dataResourceTree")
|
||||
.contains(databaseId)
|
||||
.click();
|
||||
cy.get(".dataResourceTree").should("contain", collectionId);
|
||||
cy.get(".dataResourceTree")
|
||||
.contains(collectionId)
|
||||
.click();
|
||||
cy.get(".dataResourceTree")
|
||||
.contains("Items")
|
||||
.click();
|
||||
cy.get(".dataResourceTree")
|
||||
.contains("Items")
|
||||
.click();
|
||||
cy.wait(1000); // React rendering inside KO causes some weird async rendering that makes this test flaky without waiting
|
||||
cy.get(".commandBarContainer")
|
||||
.contains("New Item")
|
||||
.click();
|
||||
cy.wait(1000); // React rendering inside KO causes some weird async rendering that makes this test flaky without waiting
|
||||
cy.get(".commandBarContainer")
|
||||
.contains("Save")
|
||||
.click();
|
||||
cy.wait(1000); // React rendering inside KO causes some weird async rendering that makes this test flaky without waiting
|
||||
cy.get(".documentsGridHeaderContainer").should("contain", "replace_with_new_document_id");
|
||||
});
|
||||
});
|
||||
@@ -1,46 +0,0 @@
|
||||
// 1. Click last database in the resource tree
|
||||
// 2. Click the last collection within the database
|
||||
// 3. Select the context menu within the collection
|
||||
// 4. Select "Delete Container" option in the dropdown
|
||||
// 5. On Selection, Delete Container pane opens on the right side
|
||||
// 6. Enter the same collection id that is to be deleted and click ok
|
||||
// 7. Now, the resource tree refreshes, the deleted collection should not appear under the database
|
||||
|
||||
let crypt = require("crypto");
|
||||
|
||||
context("Emulator - deleteCollection", () => {
|
||||
beforeEach(() => {
|
||||
cy.visit("http://localhost:1234/explorer.html");
|
||||
});
|
||||
|
||||
it("Delete a collection", () => {
|
||||
cy.get(".databaseId")
|
||||
.last()
|
||||
.click();
|
||||
|
||||
cy.get(".collectionList")
|
||||
.last()
|
||||
.then($id => {
|
||||
const collectionId = $id.text();
|
||||
|
||||
cy.get('span[data-test="collectionEllipsisMenu"]').should("exist");
|
||||
|
||||
cy.get('span[data-test="collectionEllipsisMenu"]')
|
||||
.invoke("show")
|
||||
.last()
|
||||
.click();
|
||||
|
||||
cy.get('div[data-test="collectionContextMenu"]')
|
||||
.contains("Delete Container")
|
||||
.click({ force: true });
|
||||
|
||||
cy.get('input[data-test="confirmCollectionId"]').type(collectionId.trim());
|
||||
|
||||
cy.get('input[data-test="deleteCollection"]').click();
|
||||
|
||||
cy.get('div[data-test="databaseList"]').should("not.contain", collectionId);
|
||||
|
||||
cy.get('div[data-test="databaseMenu"]').should("not.contain", collectionId);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,83 +0,0 @@
|
||||
// 1. Click last database in the resource tree
|
||||
// 2. Select the context menu within the database
|
||||
// 4. Select "Delete Database" option in the dropdown
|
||||
// 5. On Selection, Delete Database pane opens on the right side
|
||||
// 6. Enter the same database id that is to be deleted and click ok
|
||||
// 7. Now, the resource tree refreshes, the deleted database should not appear in the resource tree
|
||||
|
||||
let crypt = require("crypto");
|
||||
|
||||
context("Emulator - deleteDatabase", () => {
|
||||
beforeEach(() => {
|
||||
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
|
||||
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
|
||||
let db_rid = "";
|
||||
const date = new Date().toUTCString();
|
||||
let authToken = "";
|
||||
cy.visit("http://localhost:1234/explorer.html");
|
||||
|
||||
// Creating auth token for collection creation
|
||||
cy.request({
|
||||
method: "GET",
|
||||
url: "https://localhost:8081/_explorer/authorization/post/dbs/",
|
||||
headers: {
|
||||
"x-ms-date": date,
|
||||
authorization: "-"
|
||||
}
|
||||
})
|
||||
.then(response => {
|
||||
authToken = response.body.Token; // Getting auth token for collection creation
|
||||
return new Cypress.Promise((resolve, reject) => {
|
||||
return resolve();
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
cy.request({
|
||||
method: "POST",
|
||||
url: "https://localhost:8081/dbs",
|
||||
headers: {
|
||||
"x-ms-date": date,
|
||||
authorization: authToken,
|
||||
"x-ms-version": "2018-12-31"
|
||||
},
|
||||
body: {
|
||||
id: dbId
|
||||
}
|
||||
}).then(response => {
|
||||
cy.log("Response", response);
|
||||
db_rid = response.body._rid;
|
||||
return new Cypress.Promise((resolve, reject) => {
|
||||
cy.log("Rid", db_rid);
|
||||
return resolve();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it("Delete a database", () => {
|
||||
cy.get('span[data-test="refreshTree"]').click();
|
||||
|
||||
cy.get(".databaseId")
|
||||
.last()
|
||||
.then($id => {
|
||||
const dbId = $id.text();
|
||||
|
||||
cy.get('span[data-test="databaseEllipsisMenu"]').should("exist");
|
||||
|
||||
cy.get('span[data-test="databaseEllipsisMenu"]')
|
||||
.invoke("show")
|
||||
.last()
|
||||
.click();
|
||||
|
||||
cy.get('div[data-test="databaseContextMenu"]')
|
||||
.contains("Delete Database")
|
||||
.click({ force: true });
|
||||
|
||||
cy.get('input[data-test="confirmDatabaseId"]').type(dbId.trim());
|
||||
|
||||
cy.get('input[data-test="deleteDatabase"]').click();
|
||||
|
||||
cy.get('div[data-test="databaseList"]').should("not.contain", dbId);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,35 +0,0 @@
|
||||
# Notebook end-to-end tests
|
||||
This describes how to run the tests locally
|
||||
|
||||
## Stand up a local notebook container instance:
|
||||
Instructions on how to build and run the container [here](https://microsoft.sharepoint.com/teams/DocDB/_layouts/OneNote.aspx?id=%2Fteams%2FDocDB%2FSiteAssets%2FDocDB%20Team%20Notebook&wd=target%28Tools%20_%20SDK%2FPortal%2FDevelopment.one%7CF800BE8E-1E31-48FE-90D7-EF698EF88112%2FHow%20to%20build%20notebook%20service%7C4BAA153B-422C-41E2-B997-F3FCE02CD743%2F%29)
|
||||
|
||||
## Run a local data explorer
|
||||
Instructions are in [`DataExplorer/README.md`](https://msdata.visualstudio.com/CosmosDB/_git/cosmosdb-dataexplorer?path=%2FProduct%2FPortal%2FDataExplorer%2FREADME.md&_a=preview).
|
||||
|
||||
Make sure you can run Data Explorer locally from the web browser.
|
||||
|
||||
## Run cypress tests
|
||||
1. Edit the URL for your DataExplorer in the `.spec.ts` file
|
||||
2. Run the test:
|
||||
```bash
|
||||
cd DataExplorer/cypress
|
||||
npm i
|
||||
npm t -- --spec 'integration/notebook/newNotebook.spec.ts'
|
||||
```
|
||||
|
||||
To run in Debug mode:
|
||||
```
|
||||
npm run test:debug
|
||||
```
|
||||
This opens Cypress UI
|
||||
|
||||
## Troubleshooting
|
||||
* The tests are recorded in the `videos` folder.
|
||||
* Cypress does not support hover: workarounds [here](https://docs.cypress.io/api/commands/hover.html#Workarounds).
|
||||
|
||||
|
||||
## References
|
||||
* [Cypress API](https://docs.cypress.io/api/api/table-of-contents.html)
|
||||
* [Cypress cookbook](https://docs.cypress.io/faq/questions/using-cypress-faq.html#How-do-I-get-an-element%E2%80%99s-text-contents)
|
||||
* [Cypress best practices](https://docs.cypress.io/guides/references/best-practices.html#Selecting-Elements)
|
||||
@@ -1,93 +0,0 @@
|
||||
// THIS ADDS A NEW NOTEBOOK TO YOUR NOTEBOOKS
|
||||
context("New Notebook smoke test", () => {
|
||||
const timeout = 15000; // in ms
|
||||
const explorerUrl =
|
||||
"https://localhost:1234/explorer.html?feature.notebookserverurl=https%3A%2F%2Flocalhost%3A10001%2F12345%2Fnotebook&feature.notebookServerToken=token&feature.enablenotebooks=true";
|
||||
|
||||
/**
|
||||
* Wait for UI to be ready
|
||||
*/
|
||||
const waitForReady = () => {
|
||||
cy.get(".splashScreenContainer", { timeout }).should("be.visible");
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
cy.visit(explorerUrl);
|
||||
waitForReady();
|
||||
});
|
||||
|
||||
it("Create a new notebook and run some code", () => {
|
||||
// Create new notebook
|
||||
cy.contains("New Notebook").click();
|
||||
|
||||
// Check tab name
|
||||
cy.get("li.tabList .tabNavText").should($span => {
|
||||
const text = $span.text();
|
||||
expect(text).to.match(/^Untitled.*\.ipynb$/);
|
||||
});
|
||||
|
||||
// Wait for python3 | idle status
|
||||
cy.get('[data-test="notebookStatusBar"] [data-test="kernelStatus"]', { timeout }).should($p => {
|
||||
const text = $p.text();
|
||||
expect(text).to.match(/^python3.*idle$/);
|
||||
});
|
||||
|
||||
// Click on a cell
|
||||
cy.get(".cell-container")
|
||||
.as("cellContainer")
|
||||
.click();
|
||||
|
||||
// Type in some code
|
||||
cy.get("@cellContainer").type("2+4");
|
||||
|
||||
// Execute
|
||||
cy.get('[data-test="Run"]')
|
||||
.first()
|
||||
.click();
|
||||
|
||||
// Verify results
|
||||
cy.get("@cellContainer").within(() => {
|
||||
cy.get("pre code span").should("contain", "6");
|
||||
});
|
||||
|
||||
// Restart kernel
|
||||
cy.get('[data-test="Run"] button')
|
||||
.eq(-1)
|
||||
.click();
|
||||
cy.get("li")
|
||||
.contains("Restart Kernel")
|
||||
.click();
|
||||
|
||||
// Wait for python3 | restarting status
|
||||
cy.get('[data-test="notebookStatusBar"] [data-test="kernelStatus"]', { timeout }).should($p => {
|
||||
const text = $p.text();
|
||||
expect(text).to.match(/^python3.*restarting$/);
|
||||
});
|
||||
|
||||
// Wait for python3 | idle status
|
||||
cy.get('[data-test="notebookStatusBar"] [data-test="kernelStatus"]', { timeout }).should($p => {
|
||||
const text = $p.text();
|
||||
expect(text).to.match(/^python3.*idle$/);
|
||||
});
|
||||
|
||||
// Click on a cell
|
||||
cy.get(".cell-container")
|
||||
.as("cellContainer")
|
||||
.find(".input")
|
||||
.as("codeInput")
|
||||
.click();
|
||||
|
||||
// Type in some code
|
||||
cy.get("@codeInput").type("{backspace}{backspace}{backspace}4+5");
|
||||
|
||||
// Execute
|
||||
cy.get('[data-test="Run"]')
|
||||
.first()
|
||||
.click();
|
||||
|
||||
// Verify results
|
||||
cy.get("@cellContainer").within(() => {
|
||||
cy.get("pre code span").should("contain", "9");
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,172 +0,0 @@
|
||||
context("Resource tree notebook file manipulation", () => {
|
||||
const timeout = 15000; // in ms
|
||||
const explorerUrl =
|
||||
"https://localhost:1234/explorer.html?feature.notebookserverurl=https%3A%2F%2Flocalhost%3A10001%2F12345%2Fnotebook&feature.notebookServerToken=token&feature.enablenotebooks=true";
|
||||
|
||||
/**
|
||||
* Wait for UI to be ready
|
||||
*/
|
||||
const waitForReady = () => {
|
||||
cy.get(".splashScreenContainer", { timeout }).should("be.visible");
|
||||
};
|
||||
|
||||
const clickContextMenuAndSelectOption = (nodeLabel, option) => {
|
||||
cy.get(`.treeNodeHeader[data-test="${nodeLabel}"]`)
|
||||
.find("button.treeMenuEllipsis")
|
||||
.click();
|
||||
cy.get('[data-test="treeComponentMenuItemContainer"]')
|
||||
.contains(option)
|
||||
.click();
|
||||
};
|
||||
|
||||
const createFolder = folder => {
|
||||
clickContextMenuAndSelectOption("My Notebooks/", "New Directory");
|
||||
|
||||
cy.get("#stringInputPane").within(() => {
|
||||
cy.get('input[name="collectionIdConfirmation"]').type(folder);
|
||||
cy.get("form").submit();
|
||||
});
|
||||
};
|
||||
|
||||
const deleteItem = nodeName => {
|
||||
clickContextMenuAndSelectOption(`${nodeName}`, "Delete");
|
||||
cy.get(".ms-Dialog-main")
|
||||
.contains("Delete")
|
||||
.click();
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
cy.visit(explorerUrl);
|
||||
waitForReady();
|
||||
});
|
||||
|
||||
it("Create and remove a directory", () => {
|
||||
const folder = "e2etest_folder1";
|
||||
createFolder(folder);
|
||||
cy.get(`.treeNodeHeader[data-test="${folder}/"]`).should("exist");
|
||||
deleteItem(`${folder}/`);
|
||||
cy.get(`.treeNodeHeader[data-test="${folder}/"]`).should("not.exist");
|
||||
});
|
||||
|
||||
it("Create and rename a directory", () => {
|
||||
const folder = "e2etest_folder2";
|
||||
const renamedFolder = "e2etest_folder2_renamed";
|
||||
createFolder(folder);
|
||||
|
||||
// Rename
|
||||
clickContextMenuAndSelectOption(`${folder}/`, "Rename");
|
||||
cy.get("#stringInputPane").within(() => {
|
||||
cy.get('input[name="collectionIdConfirmation"]')
|
||||
.clear()
|
||||
.type(renamedFolder);
|
||||
cy.get("form").submit();
|
||||
});
|
||||
cy.get(`.treeNodeHeader[data-test="${renamedFolder}/"]`).should("exist");
|
||||
cy.get(`.treeNodeHeader[data-test="${folder}/"]`).should("not.exist");
|
||||
|
||||
deleteItem(`${renamedFolder}/`);
|
||||
cy.get(`.treeNodeHeader[data-test="${renamedFolder}/"]`).should("not.exist");
|
||||
});
|
||||
|
||||
it("Create a notebook inside a directory", () => {
|
||||
const folder = "e2etest_folder3";
|
||||
const newNotebookName = "Untitled.ipynb";
|
||||
createFolder(folder);
|
||||
clickContextMenuAndSelectOption(`${folder}/`, "New Notebook");
|
||||
|
||||
// Verify tab is open
|
||||
cy.get(".tabList")
|
||||
.contains(newNotebookName)
|
||||
.should("exist");
|
||||
|
||||
// Close tab
|
||||
cy.get(`.tabList[title="notebooks/${folder}/${newNotebookName}"]`)
|
||||
.find(".cancelButton")
|
||||
.click();
|
||||
// When running from command line, closing the tab is too fast
|
||||
cy.get("body").then($body => {
|
||||
if ($body.find(".ms-Dialog-main").length) {
|
||||
// For some reason, this does not work
|
||||
// cy.get(".ms-Dialog-main").contains("Close").click();
|
||||
cy.get(".ms-Dialog-main .ms-Button--primary").click();
|
||||
}
|
||||
});
|
||||
|
||||
// Expand folder node
|
||||
cy.get(`.treeNodeHeader[data-test="${folder}/"]`).click();
|
||||
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${newNotebookName}"]`).should("exist");
|
||||
|
||||
// Delete notebook
|
||||
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${newNotebookName}"]`)
|
||||
.find("button.treeMenuEllipsis")
|
||||
.click();
|
||||
cy.get('[data-test="treeComponentMenuItemContainer"]')
|
||||
.contains("Delete")
|
||||
.click();
|
||||
|
||||
// Confirm
|
||||
cy.get(".ms-Dialog-main")
|
||||
.contains("Delete")
|
||||
.click();
|
||||
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${newNotebookName}"]`).should("not.exist");
|
||||
|
||||
deleteItem(`${folder}/`);
|
||||
});
|
||||
|
||||
it("Create and rename a notebook inside a directory", () => {
|
||||
const folder = "e2etest_folder4";
|
||||
const newNotebookName = "Untitled.ipynb";
|
||||
const renamedNotebookName = "mynotebook.ipynb";
|
||||
createFolder(folder);
|
||||
clickContextMenuAndSelectOption(`${folder}/`, "New Notebook");
|
||||
|
||||
// Close tab
|
||||
cy.get(`.tabList[title="notebooks/${folder}/${newNotebookName}"]`)
|
||||
.find(".cancelButton")
|
||||
.click();
|
||||
cy.get("body").then($body => {
|
||||
if ($body.find(".ms-Dialog-main").length) {
|
||||
// For some reason, this does not work
|
||||
// cy.get(".ms-Dialog-main").contains("Close").click();
|
||||
cy.get(".ms-Dialog-main .ms-Button--primary").click();
|
||||
}
|
||||
});
|
||||
|
||||
// Expand folder node
|
||||
cy.get(`.treeNodeHeader[data-test="${folder}/"]`).click();
|
||||
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${newNotebookName}"]`).should("exist");
|
||||
|
||||
// Rename notebook
|
||||
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${newNotebookName}"]`)
|
||||
.find("button.treeMenuEllipsis")
|
||||
.click();
|
||||
cy.get('[data-test="treeComponentMenuItemContainer"]')
|
||||
.contains("Rename")
|
||||
.click();
|
||||
|
||||
cy.get("#stringInputPane").within(() => {
|
||||
cy.get('input[name="collectionIdConfirmation"]')
|
||||
.clear()
|
||||
.type(renamedNotebookName);
|
||||
cy.get("form").submit();
|
||||
});
|
||||
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${newNotebookName}"]`).should("not.exist");
|
||||
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${renamedNotebookName}"]`).should("exist");
|
||||
|
||||
// Delete notebook
|
||||
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${renamedNotebookName}"]`)
|
||||
.find("button.treeMenuEllipsis")
|
||||
.click();
|
||||
cy.get('[data-test="treeComponentMenuItemContainer"]')
|
||||
.contains("Delete")
|
||||
.click();
|
||||
|
||||
// Confirm
|
||||
cy.get(".ms-Dialog-main")
|
||||
.contains("Delete")
|
||||
.click();
|
||||
// Give it time to settle
|
||||
cy.wait(1000);
|
||||
deleteItem(`${folder}/`);
|
||||
});
|
||||
});
|
||||
3066
cypress/package-lock.json
generated
3066
cypress/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,25 +0,0 @@
|
||||
{
|
||||
"name": "cosmos-explorer-cypress",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "cypress run",
|
||||
"wait-for-server": "wait-on -t 240000 -i 5000 -v https-get://0.0.0.0:1234/",
|
||||
"test:sql": "cypress run --browser chrome --spec \"./integration/dataexplorer/SQL/*\"",
|
||||
"test:ci": "wait-on -t 240000 -i 5000 -v https-get://0.0.0.0:1234/ https-get://0.0.0.0:8081/_explorer/index.html && cypress run --browser edge --headless",
|
||||
"test:debug": "cypress open"
|
||||
},
|
||||
"devDependencies": {
|
||||
"cypress": "^4.8.0",
|
||||
"mocha": "^7.0.1",
|
||||
"mochawesome": "^4.1.0",
|
||||
"mochawesome-merge": "^4.0.1",
|
||||
"mochawesome-report-generator": "^4.1.0",
|
||||
"typescript": "3.4.3",
|
||||
"wait-on": "^4.0.2"
|
||||
},
|
||||
"dependencies": {
|
||||
"@microsoft/applicationinsights-web": "^2.5.2"
|
||||
}
|
||||
}
|
||||
@@ -1,23 +0,0 @@
|
||||
let appInsightsLib = require("@microsoft/applicationinsights-web");
|
||||
|
||||
const appInsights = new appInsightsLib.ApplicationInsights({
|
||||
config: {
|
||||
instrumentationKey: "fe61c39f-7d32-4488-a191-b13621965315"
|
||||
/* ...Other Configuration Options... */
|
||||
}
|
||||
});
|
||||
|
||||
appInsights.loadAppInsights();
|
||||
|
||||
Cypress.on("fail", (error, runnable) => {
|
||||
// App Insights will record the fail tests for Create Collection
|
||||
let message = JSON.stringify(runnable.title);
|
||||
appInsights.trackTrace({
|
||||
message: `${message}`,
|
||||
properties: {
|
||||
passed: false,
|
||||
error: error
|
||||
}
|
||||
});
|
||||
throw error; // throw error to have test still fail
|
||||
});
|
||||
@@ -1,11 +0,0 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"strict": true,
|
||||
"noEmit": true,
|
||||
"module": "commonjs",
|
||||
"target": "es5",
|
||||
"lib": ["es5", "dom", "es6"],
|
||||
"types": ["cypress", "node"]
|
||||
},
|
||||
"include": ["**/*.ts", "**/*.spec.ts"]
|
||||
}
|
||||
@@ -1,41 +0,0 @@
|
||||
module.exports = {
|
||||
loginUsingConnectionString: function() {
|
||||
const prodUrl = Cypress.env("TEST_ENDPOINT") || "https://localhost:1234/hostedExplorer.html";
|
||||
const timeout = 15000;
|
||||
|
||||
cy.visit(prodUrl);
|
||||
cy.get('iframe[id="explorerMenu"]').should("be.visible");
|
||||
|
||||
cy.get("iframe").then($element => {
|
||||
const $body = $element.contents().find("body");
|
||||
|
||||
cy.wrap($body)
|
||||
.find("#connectExplorer")
|
||||
.should("exist")
|
||||
.find("div[class='connectExplorer']")
|
||||
.should("exist")
|
||||
.find("p[class='welcomeText']")
|
||||
.should("exist");
|
||||
|
||||
cy.wrap($body.find("div > p.switchConnectTypeText"))
|
||||
.should("exist")
|
||||
.last()
|
||||
.click({ force: true });
|
||||
|
||||
const secret = Cypress.env("CONNECTION_STRING");
|
||||
|
||||
cy.wrap($body)
|
||||
.find("input[class='inputToken']")
|
||||
.should("exist")
|
||||
.type(secret, {
|
||||
force: true
|
||||
});
|
||||
|
||||
cy.wrap($body.find("input[value='Connect']"), { timeout })
|
||||
.first()
|
||||
.click({ force: true });
|
||||
|
||||
cy.wait(15000);
|
||||
});
|
||||
}
|
||||
};
|
||||
@@ -1,6 +0,0 @@
|
||||
const { CosmosClient } = require("@azure/cosmos");
|
||||
|
||||
module.exports = new CosmosClient({
|
||||
endpoint: "https://0.0.0.0:8081",
|
||||
key: "C2y6yDjf5/R+ob0N8A7Cgv30VRDJIWEHLM+4QDU5DE2nQ9nDuVTqobD4b8mGGyPMbIZnqyMsEcaGQy67XIw/Jw=="
|
||||
});
|
||||
177
externals/jquery.contextMenu.css
vendored
177
externals/jquery.contextMenu.css
vendored
@@ -1,177 +0,0 @@
|
||||
/*!
|
||||
* jQuery contextMenu - Plugin for simple contextMenu handling
|
||||
*
|
||||
* Version: 1.6.6
|
||||
*
|
||||
* Authors: Rodney Rehm, Addy Osmani (patches for FF)
|
||||
* Web: http://medialize.github.com/jQuery-contextMenu/
|
||||
*
|
||||
* Licensed under
|
||||
* MIT License http://www.opensource.org/licenses/mit-license
|
||||
* GPL v3 http://opensource.org/licenses/GPL-3.0
|
||||
*
|
||||
*/
|
||||
|
||||
.context-menu-list {
|
||||
z-index: 1001;
|
||||
position: fixed;
|
||||
background: white;
|
||||
border: solid 1px gainsboro;
|
||||
box-shadow: 4px 4px 4px -2px #888888;
|
||||
padding: 8px 0px 8px 0px;
|
||||
line-height: 25px;
|
||||
width: 254px;
|
||||
list-style: none;
|
||||
margin-left: -10px;
|
||||
outline: 0px #fff;
|
||||
}
|
||||
|
||||
.context-menu-item {
|
||||
padding: 2px 2px 2px 31px;
|
||||
background-color: #fff;
|
||||
position: relative;
|
||||
-webkit-user-select: none;
|
||||
-moz-user-select: -moz-none;
|
||||
-ms-user-select: none;
|
||||
user-select: none;
|
||||
}
|
||||
|
||||
.context-menu-separator {
|
||||
padding-bottom: 0;
|
||||
border-bottom: 1px solid #DDD;
|
||||
}
|
||||
|
||||
.context-menu-item>label>input,
|
||||
.context-menu-item>label>textarea {
|
||||
-webkit-user-select: text;
|
||||
-moz-user-select: text;
|
||||
-ms-user-select: text;
|
||||
user-select: text;
|
||||
margin-left: -10px;
|
||||
}
|
||||
|
||||
.context-menu-item:hover {
|
||||
cursor: pointer;
|
||||
background-color: #eeeeee;
|
||||
}
|
||||
|
||||
.context-menu-item.disabled {
|
||||
color: #666;
|
||||
}
|
||||
|
||||
.context-menu-input.hover,
|
||||
.context-menu-item.disabled.hover {
|
||||
cursor: default;
|
||||
background-color: #EEE;
|
||||
}
|
||||
|
||||
.context-menu-submenu:after {
|
||||
content: ">";
|
||||
color: #666;
|
||||
position: absolute;
|
||||
top: 0;
|
||||
right: 3px;
|
||||
z-index: 1;
|
||||
}
|
||||
|
||||
|
||||
/* icons
|
||||
#protip:
|
||||
In case you want to use sprites for icons (which I would suggest you do) have a look at
|
||||
http://css-tricks.com/13224-pseudo-spriting/ to get an idea of how to implement
|
||||
.context-menu-item.icon:before {}
|
||||
*/
|
||||
|
||||
.context-menu-item.icon {
|
||||
min-height: 18px;
|
||||
background-repeat: no-repeat;
|
||||
background-position: 10px 7px;
|
||||
}
|
||||
|
||||
.context-menu-item.icon:hover {
|
||||
min-height: 18px;
|
||||
background-repeat: no-repeat;
|
||||
background-position: 10px 7px;
|
||||
}
|
||||
|
||||
|
||||
/*.context-menu-item.icon-edit {
|
||||
background-image: url(images/page_white_edit.png);
|
||||
}
|
||||
|
||||
.context-menu-item.icon-cut {
|
||||
background-image: url(images/cut.png);
|
||||
}
|
||||
|
||||
.context-menu-item.icon-copy {
|
||||
background-image: url(images/page_white_copy.png);
|
||||
}
|
||||
|
||||
.context-menu-item.icon-paste {
|
||||
background-image: url(images/page_white_paste.png);
|
||||
}
|
||||
|
||||
.context-menu-item.icon-delete {
|
||||
background-image: url(images/page_white_delete.png);
|
||||
}
|
||||
|
||||
.context-menu-item.icon-add {
|
||||
background-image: url(images/page_white_add.png);
|
||||
}
|
||||
|
||||
.context-menu-item.icon-quit {
|
||||
background-image: url(images/door.png);
|
||||
}*/
|
||||
|
||||
|
||||
/* vertically align inside labels */
|
||||
|
||||
.context-menu-input>label>* {
|
||||
vertical-align: top;
|
||||
}
|
||||
|
||||
|
||||
/* position checkboxes and radios as icons */
|
||||
|
||||
.context-menu-input>label>input[type="checkbox"],
|
||||
.context-menu-input>label>input[type="radio"] {
|
||||
margin-left: -17px;
|
||||
}
|
||||
|
||||
.context-menu-input>label>span {
|
||||
margin-left: 5px;
|
||||
}
|
||||
|
||||
.context-menu-input>label,
|
||||
.context-menu-input>label>input[type="text"],
|
||||
.context-menu-input>label>textarea,
|
||||
.context-menu-input>label>select {
|
||||
display: block;
|
||||
width: 100%;
|
||||
-webkit-box-sizing: border-box;
|
||||
-moz-box-sizing: border-box;
|
||||
-ms-box-sizing: border-box;
|
||||
-o-box-sizing: border-box;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
.context-menu-input>label>textarea {
|
||||
height: 100px;
|
||||
}
|
||||
|
||||
.context-menu-item>.context-menu-list {
|
||||
display: none;
|
||||
/* re-positioned by js */
|
||||
right: -5px;
|
||||
top: 5px;
|
||||
}
|
||||
|
||||
|
||||
/*.context-menu-item.hover>.context-menu-list {
|
||||
display: block;
|
||||
padding-left: 5px;
|
||||
}*/
|
||||
|
||||
.context-menu-accesskey {
|
||||
text-decoration: underline;
|
||||
}
|
||||
1686
externals/jquery.contextMenu.js
vendored
1686
externals/jquery.contextMenu.js
vendored
File diff suppressed because it is too large
Load Diff
@@ -3,7 +3,7 @@ const isCI = require("is-ci");
|
||||
module.exports = {
|
||||
launch: {
|
||||
headless: isCI,
|
||||
slowMo: 30,
|
||||
slowMo: 55,
|
||||
defaultViewport: null,
|
||||
ignoreHTTPSErrors: true,
|
||||
args: ["--disable-web-security"]
|
||||
|
||||
@@ -2082,7 +2082,8 @@ a:link {
|
||||
.resourceTreeAndTabs {
|
||||
display: flex;
|
||||
flex: 1 1 auto;
|
||||
overflow: auto;
|
||||
overflow-x: auto;
|
||||
overflow-y: hidden;
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
@@ -3022,3 +3023,7 @@ settings-pane {
|
||||
.infoBoxContent a {
|
||||
color: @AccentMediumHigh
|
||||
}
|
||||
|
||||
.collapsibleSection :hover{
|
||||
cursor: pointer;
|
||||
}
|
||||
250
package-lock.json
generated
250
package-lock.json
generated
@@ -76,7 +76,6 @@
|
||||
"version": "7.9.0",
|
||||
"resolved": "https://registry.npmjs.org/@babel/core/-/core-7.9.0.tgz",
|
||||
"integrity": "sha512-kWc7L0fw1xwvI0zi8OKVBuxRVefwGOrKSQMvrQ3dW+bIIavBY3/NpXmpjMy7bQnLgwgzWQZ8TlM57YHpHNHz4w==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@babel/code-frame": "^7.8.3",
|
||||
"@babel/generator": "^7.9.0",
|
||||
@@ -99,8 +98,7 @@
|
||||
"source-map": {
|
||||
"version": "0.5.7",
|
||||
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz",
|
||||
"integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=",
|
||||
"dev": true
|
||||
"integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w="
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -2326,83 +2324,83 @@
|
||||
}
|
||||
},
|
||||
"@microsoft/applicationinsights-analytics-js": {
|
||||
"version": "2.5.8",
|
||||
"resolved": "https://registry.npmjs.org/@microsoft/applicationinsights-analytics-js/-/applicationinsights-analytics-js-2.5.8.tgz",
|
||||
"integrity": "sha512-2RsftiXa5ojNXuHRIC7RybWbN+Z7TMrLK2XGTza1Wq/KHRJNB48WmQuxjd6SzsNguqxRoHsH0sUogIwlK+NO8A==",
|
||||
"version": "2.5.9",
|
||||
"resolved": "https://registry.npmjs.org/@microsoft/applicationinsights-analytics-js/-/applicationinsights-analytics-js-2.5.9.tgz",
|
||||
"integrity": "sha512-9L3fb1H1as+J3J2j2EDx1HEMdrucjgR4INqahy+ZAxDPFvR3HCOedYzx645zObBIPu7QkH2LAjPk4fuNGHR1rg==",
|
||||
"requires": {
|
||||
"@microsoft/applicationinsights-common": "2.5.8",
|
||||
"@microsoft/applicationinsights-core-js": "2.5.8",
|
||||
"@microsoft/applicationinsights-shims": "1.0.1",
|
||||
"@microsoft/applicationinsights-common": "2.5.9",
|
||||
"@microsoft/applicationinsights-core-js": "2.5.9",
|
||||
"@microsoft/applicationinsights-shims": "1.0.3",
|
||||
"@microsoft/dynamicproto-js": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"@microsoft/applicationinsights-channel-js": {
|
||||
"version": "2.5.8",
|
||||
"resolved": "https://registry.npmjs.org/@microsoft/applicationinsights-channel-js/-/applicationinsights-channel-js-2.5.8.tgz",
|
||||
"integrity": "sha512-etVGzhNluflTikOpW9dlDxdg+B+8gt/nxmGeXqti9Hsqq0fTxcY9bmNklFIEKhOIwai4DodgJjflaDdgR0ObUQ==",
|
||||
"version": "2.5.9",
|
||||
"resolved": "https://registry.npmjs.org/@microsoft/applicationinsights-channel-js/-/applicationinsights-channel-js-2.5.9.tgz",
|
||||
"integrity": "sha512-NAQ/2wWmD+gaIZDCMzzwxm8RcbswDvUO5BYeuW9UHJaFuEZ9o9xpztKVz32u4CMv7OI/mLOqnmR4rb0d+kUMwQ==",
|
||||
"requires": {
|
||||
"@microsoft/applicationinsights-common": "2.5.8",
|
||||
"@microsoft/applicationinsights-core-js": "2.5.8",
|
||||
"@microsoft/applicationinsights-shims": "1.0.1",
|
||||
"@microsoft/applicationinsights-common": "2.5.9",
|
||||
"@microsoft/applicationinsights-core-js": "2.5.9",
|
||||
"@microsoft/applicationinsights-shims": "1.0.3",
|
||||
"@microsoft/dynamicproto-js": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"@microsoft/applicationinsights-common": {
|
||||
"version": "2.5.8",
|
||||
"resolved": "https://registry.npmjs.org/@microsoft/applicationinsights-common/-/applicationinsights-common-2.5.8.tgz",
|
||||
"integrity": "sha512-RIMAJTsF0H5wiRZxYIF9H8sbMe2W5Ig4yMuH0/lho69DcNZpHf8p6PSa4Qhhli0AnoWYfLE7/WlWO1eR5SkByw==",
|
||||
"version": "2.5.9",
|
||||
"resolved": "https://registry.npmjs.org/@microsoft/applicationinsights-common/-/applicationinsights-common-2.5.9.tgz",
|
||||
"integrity": "sha512-dKmXO9m55uRDhpoa0P7l+BApf+lsrqjgoLeKv+ABM8ygIyd9JH6CDcdaT3af+kUFtt9Oj3ChyfueKr1EVOdGkQ==",
|
||||
"requires": {
|
||||
"@microsoft/applicationinsights-core-js": "2.5.8",
|
||||
"@microsoft/applicationinsights-shims": "1.0.1"
|
||||
"@microsoft/applicationinsights-core-js": "2.5.9",
|
||||
"@microsoft/applicationinsights-shims": "1.0.3"
|
||||
}
|
||||
},
|
||||
"@microsoft/applicationinsights-core-js": {
|
||||
"version": "2.5.8",
|
||||
"resolved": "https://registry.npmjs.org/@microsoft/applicationinsights-core-js/-/applicationinsights-core-js-2.5.8.tgz",
|
||||
"integrity": "sha512-NxxIViHKuqgpla+KdQk7Qy48Dm8lN4oy2mMEpv9kM5GW5MBJ8nZ4A5RV1kokF3kXuDmTUTHlWBXeLR8hauA3qQ==",
|
||||
"version": "2.5.9",
|
||||
"resolved": "https://registry.npmjs.org/@microsoft/applicationinsights-core-js/-/applicationinsights-core-js-2.5.9.tgz",
|
||||
"integrity": "sha512-KE9h1wmC/Ckm7jYjsMF1SEWQnk0v0CRzZq1upSARgPH7BgmyClXz1kdnLtuTWz8Aha8IIH9dW2hUOfPCdR+BpQ==",
|
||||
"requires": {
|
||||
"@microsoft/applicationinsights-shims": "1.0.1",
|
||||
"@microsoft/applicationinsights-shims": "1.0.3",
|
||||
"@microsoft/dynamicproto-js": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"@microsoft/applicationinsights-dependencies-js": {
|
||||
"version": "2.5.8",
|
||||
"resolved": "https://registry.npmjs.org/@microsoft/applicationinsights-dependencies-js/-/applicationinsights-dependencies-js-2.5.8.tgz",
|
||||
"integrity": "sha512-jnXpjE/bnlLeew78OsN8aPTLOPZQJ4y1MOO8R3E+eUXdproD2TemynSk5kUfrMdry91DZOBZnrmJ2NCB+g5ArQ==",
|
||||
"version": "2.5.9",
|
||||
"resolved": "https://registry.npmjs.org/@microsoft/applicationinsights-dependencies-js/-/applicationinsights-dependencies-js-2.5.9.tgz",
|
||||
"integrity": "sha512-pNM/dkUOscV0ul/YJe928+77EBtRkRXO/le/VWzlunoUFaEEo4pirc7NycvPx9w/KxA62JMEogbQsWE6nAmqPg==",
|
||||
"requires": {
|
||||
"@microsoft/applicationinsights-common": "2.5.8",
|
||||
"@microsoft/applicationinsights-core-js": "2.5.8",
|
||||
"@microsoft/applicationinsights-shims": "1.0.1",
|
||||
"@microsoft/applicationinsights-common": "2.5.9",
|
||||
"@microsoft/applicationinsights-core-js": "2.5.9",
|
||||
"@microsoft/applicationinsights-shims": "1.0.3",
|
||||
"@microsoft/dynamicproto-js": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"@microsoft/applicationinsights-properties-js": {
|
||||
"version": "2.5.8",
|
||||
"resolved": "https://registry.npmjs.org/@microsoft/applicationinsights-properties-js/-/applicationinsights-properties-js-2.5.8.tgz",
|
||||
"integrity": "sha512-BAOSguXe07ua6SqYmAW+8+vVvBZb4qmmUP6s5zc7kNMMgEoEGsCn2VHmM8wHHxq4J/TmYxIqwoufS+XKbUvCeQ==",
|
||||
"version": "2.5.9",
|
||||
"resolved": "https://registry.npmjs.org/@microsoft/applicationinsights-properties-js/-/applicationinsights-properties-js-2.5.9.tgz",
|
||||
"integrity": "sha512-mZxaC8CZsURn38IwsPaUx+o9QXQU2vm81THZL+1Lc+7scPo55ATDTFgZ2awIj7CdTp69oGzUkpB7maOn6+OVOw==",
|
||||
"requires": {
|
||||
"@microsoft/applicationinsights-common": "2.5.8",
|
||||
"@microsoft/applicationinsights-core-js": "2.5.8",
|
||||
"@microsoft/applicationinsights-shims": "1.0.1"
|
||||
"@microsoft/applicationinsights-common": "2.5.9",
|
||||
"@microsoft/applicationinsights-core-js": "2.5.9",
|
||||
"@microsoft/applicationinsights-shims": "1.0.3"
|
||||
}
|
||||
},
|
||||
"@microsoft/applicationinsights-shims": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/@microsoft/applicationinsights-shims/-/applicationinsights-shims-1.0.1.tgz",
|
||||
"integrity": "sha512-nPjUBSpvX5Dnkovp2lZzrg0/uSvYNtbAclWwVP7t8J1hy5OJ3xr3KPNaz79+b84G16Rj861ybau9Gbk7inXkTg=="
|
||||
"version": "1.0.3",
|
||||
"resolved": "https://registry.npmjs.org/@microsoft/applicationinsights-shims/-/applicationinsights-shims-1.0.3.tgz",
|
||||
"integrity": "sha512-+S17aqEkOYpyBpmclhgwcEplwnxSo5AxYBdRg38GBobI1GKPSpZfnLssLzcjJ6XZCS5tqB5xjyTZs6gHj7ZJWQ=="
|
||||
},
|
||||
"@microsoft/applicationinsights-web": {
|
||||
"version": "2.5.8",
|
||||
"resolved": "https://registry.npmjs.org/@microsoft/applicationinsights-web/-/applicationinsights-web-2.5.8.tgz",
|
||||
"integrity": "sha512-ZxCUkBJrCFNHa0LgWWbtwqu4TxJPlukuSvDrdLv6XV1yX2ETq6Q1kw/IUEtKhbtNbTZQ8aJ+x8Nc/iqbssdXTA==",
|
||||
"version": "2.5.9",
|
||||
"resolved": "https://registry.npmjs.org/@microsoft/applicationinsights-web/-/applicationinsights-web-2.5.9.tgz",
|
||||
"integrity": "sha512-dxg5XXbQqjWw9QmGdgbd7knb1qFA58FFYj9ObqRmlqiihk25kper7H15HH8LaV0lV6goClmBWc9KsNGA2veyeA==",
|
||||
"requires": {
|
||||
"@microsoft/applicationinsights-analytics-js": "2.5.8",
|
||||
"@microsoft/applicationinsights-channel-js": "2.5.8",
|
||||
"@microsoft/applicationinsights-common": "2.5.8",
|
||||
"@microsoft/applicationinsights-core-js": "2.5.8",
|
||||
"@microsoft/applicationinsights-dependencies-js": "2.5.8",
|
||||
"@microsoft/applicationinsights-properties-js": "2.5.8",
|
||||
"@microsoft/applicationinsights-shims": "1.0.1",
|
||||
"@microsoft/applicationinsights-analytics-js": "2.5.9",
|
||||
"@microsoft/applicationinsights-channel-js": "2.5.9",
|
||||
"@microsoft/applicationinsights-common": "2.5.9",
|
||||
"@microsoft/applicationinsights-core-js": "2.5.9",
|
||||
"@microsoft/applicationinsights-dependencies-js": "2.5.9",
|
||||
"@microsoft/applicationinsights-properties-js": "2.5.9",
|
||||
"@microsoft/applicationinsights-shims": "1.0.3",
|
||||
"@microsoft/dynamicproto-js": "^1.0.0"
|
||||
}
|
||||
},
|
||||
@@ -2805,12 +2803,13 @@
|
||||
}
|
||||
},
|
||||
"@nteract/monaco-editor": {
|
||||
"version": "3.2.0",
|
||||
"resolved": "https://registry.npmjs.org/@nteract/monaco-editor/-/monaco-editor-3.2.0.tgz",
|
||||
"integrity": "sha512-PGEUvy/GTBMECy4RUfh4wxO7GfA9YDBSV3hGt8MyrVz/GxUDtjB7FqrYS0ZhmVQPYl8hnV2i48F3YlypC+xIXA==",
|
||||
"version": "3.2.2",
|
||||
"resolved": "https://registry.npmjs.org/@nteract/monaco-editor/-/monaco-editor-3.2.2.tgz",
|
||||
"integrity": "sha512-51Pxt6v6qaAlbDY0BgEydk/Jxuu93t+uB8Geg3vJfE6VDphTEakB0wocBIfvcTKVV55Lx53/rTSp6QHqtaHiGg==",
|
||||
"requires": {
|
||||
"@nteract/core": "^14.0.0",
|
||||
"@nteract/messaging": "^7.0.10",
|
||||
"@nteract/messaging": "^7.0.12",
|
||||
"lodash.debounce": "^4.0.6",
|
||||
"monaco-editor": "0.18.1",
|
||||
"rxjs": "^6.3.3"
|
||||
},
|
||||
@@ -2859,6 +2858,40 @@
|
||||
"rxjs": "^6.3.3"
|
||||
}
|
||||
},
|
||||
"@nteract/messaging": {
|
||||
"version": "7.0.12",
|
||||
"resolved": "https://registry.npmjs.org/@nteract/messaging/-/messaging-7.0.12.tgz",
|
||||
"integrity": "sha512-5z2Ffd1hj7AsGBJTAoqJshLlUZ+ISJBjiZAdNDjb70PNEv0x8UOMk/di80RI3WBLK5MKxSJkGXfs4jfzfdW6bA==",
|
||||
"requires": {
|
||||
"@nteract/types": "^7.1.2",
|
||||
"@types/uuid": "^8.0.0",
|
||||
"lodash.clonedeep": "^4.5.0",
|
||||
"rxjs": "^6.6.0",
|
||||
"uuid": "^8.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"@nteract/commutable": {
|
||||
"version": "7.3.4",
|
||||
"resolved": "https://registry.npmjs.org/@nteract/commutable/-/commutable-7.3.4.tgz",
|
||||
"integrity": "sha512-Z6aUtIZN0CKUMJwbZjUUqaaBhT6P0RiEG5nHso+oG/FOXF20Qv+hf/TyvYhw9SXQVmmacaMk4zj0iOID20pIng==",
|
||||
"requires": {
|
||||
"immutable": "^4.0.0-rc.12",
|
||||
"uuid": "^8.0.0"
|
||||
}
|
||||
},
|
||||
"@nteract/types": {
|
||||
"version": "7.1.2",
|
||||
"resolved": "https://registry.npmjs.org/@nteract/types/-/types-7.1.2.tgz",
|
||||
"integrity": "sha512-I/1TvaUC/m9I/LFk1HemsOUqB0eNdagu0KRLA1YEtChPh9pk5F9flglA7m5+0/j31gLXBISj5+6tL8ikA8BxOQ==",
|
||||
"requires": {
|
||||
"@nteract/commutable": "^7.3.4",
|
||||
"immutable": "^4.0.0-rc.12",
|
||||
"rxjs": "^6.6.0",
|
||||
"uuid": "^8.0.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"@nteract/mythic-notifications": {
|
||||
"version": "0.1.9",
|
||||
"resolved": "https://registry.npmjs.org/@nteract/mythic-notifications/-/mythic-notifications-0.1.9.tgz",
|
||||
@@ -7203,11 +7236,10 @@
|
||||
}
|
||||
},
|
||||
"create-react-class": {
|
||||
"version": "15.6.3",
|
||||
"resolved": "https://registry.npmjs.org/create-react-class/-/create-react-class-15.6.3.tgz",
|
||||
"integrity": "sha512-M+/3Q6E6DLO6Yx3OwrWjwHBnvfXXYA7W+dFjt/ZDBemHO1DDZhsalX/NUtnTYclN6GfnBDRh4qRHjcDHmlJBJg==",
|
||||
"version": "15.7.0",
|
||||
"resolved": "https://registry.npmjs.org/create-react-class/-/create-react-class-15.7.0.tgz",
|
||||
"integrity": "sha512-QZv4sFWG9S5RUvkTYWbflxeZX+JG7Cz0Tn33rQBJ+WFQTqTfUTjMjiv9tnfXazjsO5r0KhPs+AqCjyrQX6h2ng==",
|
||||
"requires": {
|
||||
"fbjs": "^0.8.9",
|
||||
"loose-envify": "^1.3.1",
|
||||
"object-assign": "^4.1.1"
|
||||
}
|
||||
@@ -8504,24 +8536,6 @@
|
||||
"integrity": "sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k=",
|
||||
"dev": true
|
||||
},
|
||||
"encoding": {
|
||||
"version": "0.1.13",
|
||||
"resolved": "https://registry.npmjs.org/encoding/-/encoding-0.1.13.tgz",
|
||||
"integrity": "sha512-ETBauow1T35Y/WZMkio9jiM0Z5xjHHmJ4XmjZOq1l/dXz3lr2sRn87nJy20RupqSh1F2m3HHPSp8ShIPQJrJ3A==",
|
||||
"requires": {
|
||||
"iconv-lite": "^0.6.2"
|
||||
},
|
||||
"dependencies": {
|
||||
"iconv-lite": {
|
||||
"version": "0.6.2",
|
||||
"resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.2.tgz",
|
||||
"integrity": "sha512-2y91h5OpQlolefMPmUlivelittSWy0rP+oYVpn6A7GwVHNE8AWzoYOBNmlwks3LobaJxgHCYZAnyNo2GgpNRNQ==",
|
||||
"requires": {
|
||||
"safer-buffer": ">= 2.1.2 < 3.0.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"end-of-stream": {
|
||||
"version": "1.4.4",
|
||||
"resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz",
|
||||
@@ -9667,27 +9681,6 @@
|
||||
"bser": "2.1.1"
|
||||
}
|
||||
},
|
||||
"fbjs": {
|
||||
"version": "0.8.17",
|
||||
"resolved": "https://registry.npmjs.org/fbjs/-/fbjs-0.8.17.tgz",
|
||||
"integrity": "sha1-xNWY6taUkRJlPWWIsBpc3Nn5D90=",
|
||||
"requires": {
|
||||
"core-js": "^1.0.0",
|
||||
"isomorphic-fetch": "^2.1.1",
|
||||
"loose-envify": "^1.0.0",
|
||||
"object-assign": "^4.1.0",
|
||||
"promise": "^7.1.1",
|
||||
"setimmediate": "^1.0.5",
|
||||
"ua-parser-js": "^0.7.18"
|
||||
},
|
||||
"dependencies": {
|
||||
"core-js": {
|
||||
"version": "1.2.7",
|
||||
"resolved": "https://registry.npmjs.org/core-js/-/core-js-1.2.7.tgz",
|
||||
"integrity": "sha1-ZSKUwUZR2yj6k70tX/KYOk8IxjY="
|
||||
}
|
||||
}
|
||||
},
|
||||
"fd-slicer": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/fd-slicer/-/fd-slicer-1.1.0.tgz",
|
||||
@@ -11888,26 +11881,6 @@
|
||||
"resolved": "https://registry.npmjs.org/isobject/-/isobject-3.0.1.tgz",
|
||||
"integrity": "sha1-TkMekrEalzFjaqH5yNHMvP2reN8="
|
||||
},
|
||||
"isomorphic-fetch": {
|
||||
"version": "2.2.1",
|
||||
"resolved": "https://registry.npmjs.org/isomorphic-fetch/-/isomorphic-fetch-2.2.1.tgz",
|
||||
"integrity": "sha1-YRrhrPFPXoH3KVB0coGf6XM1WKk=",
|
||||
"requires": {
|
||||
"node-fetch": "^1.0.1",
|
||||
"whatwg-fetch": ">=0.10.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"node-fetch": {
|
||||
"version": "1.7.3",
|
||||
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-1.7.3.tgz",
|
||||
"integrity": "sha512-NhZ4CsKx7cYm2vSrBAr2PvFOe6sWDf0UYLRqA6svUYg7+/TSfVAu49jYC4BvQ4Sms9SZgdqGBgroqfDhJdTyKQ==",
|
||||
"requires": {
|
||||
"encoding": "^0.1.11",
|
||||
"is-stream": "^1.0.1"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"isstream": {
|
||||
"version": "0.1.2",
|
||||
"resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz",
|
||||
@@ -13406,36 +13379,6 @@
|
||||
"micromatch": "^3.1.10",
|
||||
"pretty-format": "^24.9.0",
|
||||
"realpath-native": "^1.1.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"@babel/core": {
|
||||
"version": "7.11.6",
|
||||
"resolved": "https://registry.npmjs.org/@babel/core/-/core-7.11.6.tgz",
|
||||
"integrity": "sha512-Wpcv03AGnmkgm6uS6k8iwhIwTrcP0m17TL1n1sy7qD0qelDu4XNeW0dN0mHfa+Gei211yDaLoEe/VlbXQzM4Bg==",
|
||||
"requires": {
|
||||
"@babel/code-frame": "^7.10.4",
|
||||
"@babel/generator": "^7.11.6",
|
||||
"@babel/helper-module-transforms": "^7.11.0",
|
||||
"@babel/helpers": "^7.10.4",
|
||||
"@babel/parser": "^7.11.5",
|
||||
"@babel/template": "^7.10.4",
|
||||
"@babel/traverse": "^7.11.5",
|
||||
"@babel/types": "^7.11.5",
|
||||
"convert-source-map": "^1.7.0",
|
||||
"debug": "^4.1.0",
|
||||
"gensync": "^1.0.0-beta.1",
|
||||
"json5": "^2.1.2",
|
||||
"lodash": "^4.17.19",
|
||||
"resolve": "^1.3.2",
|
||||
"semver": "^5.4.1",
|
||||
"source-map": "^0.5.0"
|
||||
}
|
||||
},
|
||||
"source-map": {
|
||||
"version": "0.5.7",
|
||||
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz",
|
||||
"integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w="
|
||||
}
|
||||
}
|
||||
},
|
||||
"jest-dev-server": {
|
||||
@@ -14514,6 +14457,11 @@
|
||||
"resolved": "https://registry.npmjs.org/lodash.curry/-/lodash.curry-4.1.1.tgz",
|
||||
"integrity": "sha1-JI42By7ekGUB11lmIAqG2riyMXA="
|
||||
},
|
||||
"lodash.debounce": {
|
||||
"version": "4.0.8",
|
||||
"resolved": "https://registry.npmjs.org/lodash.debounce/-/lodash.debounce-4.0.8.tgz",
|
||||
"integrity": "sha1-gteb/zCmfEAF/9XiUVMArZyk168="
|
||||
},
|
||||
"lodash.defaults": {
|
||||
"version": "4.2.0",
|
||||
"resolved": "https://registry.npmjs.org/lodash.defaults/-/lodash.defaults-4.2.0.tgz",
|
||||
@@ -16619,6 +16567,8 @@
|
||||
"version": "7.3.1",
|
||||
"resolved": "https://registry.npmjs.org/promise/-/promise-7.3.1.tgz",
|
||||
"integrity": "sha512-nolQXZ/4L+bP/UGlkfaIujX9BKxGwmQ9OT4mOt5yvy8iK1h3wqTEJCijzGANTCCl9nWjY41juyAn2K3Q1hLLTg==",
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"requires": {
|
||||
"asap": "~2.0.3"
|
||||
}
|
||||
@@ -18246,7 +18196,8 @@
|
||||
"setimmediate": {
|
||||
"version": "1.0.5",
|
||||
"resolved": "https://registry.npmjs.org/setimmediate/-/setimmediate-1.0.5.tgz",
|
||||
"integrity": "sha1-KQy7Iy4waULX1+qbg3Mqt4VvgoU="
|
||||
"integrity": "sha1-KQy7Iy4waULX1+qbg3Mqt4VvgoU=",
|
||||
"dev": true
|
||||
},
|
||||
"setprototypeof": {
|
||||
"version": "1.1.1",
|
||||
@@ -20006,11 +19957,6 @@
|
||||
"free-style": "3.1.0"
|
||||
}
|
||||
},
|
||||
"ua-parser-js": {
|
||||
"version": "0.7.22",
|
||||
"resolved": "https://registry.npmjs.org/ua-parser-js/-/ua-parser-js-0.7.22.tgz",
|
||||
"integrity": "sha512-YUxzMjJ5T71w6a8WWVcMGM6YWOTX27rCoIQgLXiWaxqXSx9D7DNjiGWn1aJIRSQ5qr0xuhra77bSIh6voR/46Q=="
|
||||
},
|
||||
"uglify-js": {
|
||||
"version": "3.4.10",
|
||||
"resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.4.10.tgz",
|
||||
|
||||
10
package.json
10
package.json
@@ -8,7 +8,7 @@
|
||||
"@azure/cosmos-language-service": "0.0.4",
|
||||
"@jupyterlab/services": "6.0.0-rc.2",
|
||||
"@jupyterlab/terminal": "3.0.0-rc.2",
|
||||
"@microsoft/applicationinsights-web": "2.5.8",
|
||||
"@microsoft/applicationinsights-web": "2.5.9",
|
||||
"@nteract/commutable": "7.3.2",
|
||||
"@nteract/connected-components": "6.8.2",
|
||||
"@nteract/core": "15.1.0",
|
||||
@@ -21,7 +21,7 @@
|
||||
"@nteract/jupyter-widgets": "2.0.0",
|
||||
"@nteract/logos": "1.0.0",
|
||||
"@nteract/markdown": "4.4.0",
|
||||
"@nteract/monaco-editor": "3.2.0",
|
||||
"@nteract/monaco-editor": "3.2.2",
|
||||
"@nteract/octicons": "2.0.0",
|
||||
"@nteract/outputs": "3.0.9",
|
||||
"@nteract/presentational-components": "3.0.7",
|
||||
@@ -88,8 +88,8 @@
|
||||
"styled-components": "4.3.2",
|
||||
"text-encoding": "0.7.0",
|
||||
"underscore": "1.9.1",
|
||||
"utility-types": "3.10.0",
|
||||
"url-polyfill": "1.1.7",
|
||||
"utility-types": "3.10.0",
|
||||
"webcrypto-liner": "1.1.4",
|
||||
"webfontloader": "1.6.28",
|
||||
"whatwg-fetch": "3.0.0"
|
||||
@@ -194,8 +194,8 @@
|
||||
"compile": "tsc",
|
||||
"compile:contracts": "tsc -p ./tsconfig.contracts.json",
|
||||
"compile:strict": "tsc -p ./tsconfig.strict.json",
|
||||
"format": "prettier --write \"{src,cypress,test}/**/*.{ts,tsx,html}\" \"*.{js,html}\"",
|
||||
"format:check": "prettier --check \"{src,cypress,test}/**/*.{ts,tsx,html}\" \"*.{js,html}\"",
|
||||
"format": "prettier --write \"{src,test}/**/*.{ts,tsx,html}\" \"*.{js,html}\"",
|
||||
"format:check": "prettier --check \"{src,test}/**/*.{ts,tsx,html}\" \"*.{js,html}\"",
|
||||
"lint": "tslint --project tsconfig.json && eslint \"**/*.{ts,tsx}\"",
|
||||
"build:contracts": "npm run compile:contracts",
|
||||
"strictEligibleFiles": "node ./strict-migration-tools/index.js",
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import { AutopilotTier } from "../Contracts/DataModels";
|
||||
import { HashMap } from "./HashMap";
|
||||
|
||||
export class AuthorizationEndpoints {
|
||||
@@ -117,7 +116,6 @@ export class Features {
|
||||
public static readonly enableGalleryPublish = "enablegallerypublish";
|
||||
public static readonly enableCodeOfConduct = "enablecodeofconduct";
|
||||
public static readonly enableLinkInjection = "enablelinkinjection";
|
||||
public static readonly enableSettingsV2 = "enablesettingsv2";
|
||||
public static readonly enableSpark = "enablespark";
|
||||
public static readonly livyEndpoint = "livyendpoint";
|
||||
public static readonly notebookServerUrl = "notebookserverurl";
|
||||
@@ -125,10 +123,17 @@ export class Features {
|
||||
public static readonly notebookBasePath = "notebookbasepath";
|
||||
public static readonly canExceedMaximumValue = "canexceedmaximumvalue";
|
||||
public static readonly enableFixedCollectionWithSharedThroughput = "enablefixedcollectionwithsharedthroughput";
|
||||
public static readonly enableAutoPilotV2 = "enableautopilotv2";
|
||||
public static readonly ttl90Days = "ttl90days";
|
||||
public static readonly enableRightPanelV2 = "enablerightpanelv2";
|
||||
public static readonly enableSchema = "enableschema";
|
||||
public static readonly enableSDKoperations = "enablesdkoperations";
|
||||
public static readonly showMinRUSurvey = "showminrusurvey";
|
||||
}
|
||||
|
||||
// flight names returned from the portal are always lowercase
|
||||
export class Flights {
|
||||
public static readonly SettingsV2 = "settingsv2";
|
||||
public static readonly MongoIndexEditor = "mongoindexeditor";
|
||||
}
|
||||
|
||||
export class AfecFeatures {
|
||||
@@ -257,7 +262,6 @@ export class HttpHeaders {
|
||||
public static usePolygonsSmallerThanAHemisphere = "x-ms-documentdb-usepolygonssmallerthanahemisphere";
|
||||
public static autoPilotThroughput = "autoscaleSettings";
|
||||
public static autoPilotThroughputSDK = "x-ms-cosmos-offer-autopilot-settings";
|
||||
public static autoPilotTier = "x-ms-cosmos-offer-autopilot-tier";
|
||||
public static partitionKey: string = "x-ms-documentdb-partitionkey";
|
||||
public static migrateOfferToManualThroughput: string = "x-ms-cosmos-migrate-offer-to-manual-throughput";
|
||||
public static migrateOfferToAutopilot: string = "x-ms-cosmos-migrate-offer-to-autopilot";
|
||||
@@ -402,54 +406,6 @@ export enum ConflictOperationType {
|
||||
Delete = "delete"
|
||||
}
|
||||
|
||||
export class AutoPilot {
|
||||
public static tier1Text: string = "4,000 RU/s";
|
||||
public static tier2Text: string = "20,000 RU/s";
|
||||
public static tier3Text: string = "100,000 RU/s";
|
||||
public static tier4Text: string = "500,000 RU/s";
|
||||
|
||||
public static tierText = {
|
||||
[AutopilotTier.Tier1]: "Tier 1",
|
||||
[AutopilotTier.Tier2]: "Tier 2",
|
||||
[AutopilotTier.Tier3]: "Tier 3",
|
||||
[AutopilotTier.Tier4]: "Tier 4"
|
||||
};
|
||||
|
||||
public static tierMaxRus = {
|
||||
[AutopilotTier.Tier1]: 2000,
|
||||
[AutopilotTier.Tier2]: 20000,
|
||||
[AutopilotTier.Tier3]: 100000,
|
||||
[AutopilotTier.Tier4]: 500000
|
||||
};
|
||||
|
||||
public static tierMinRus = {
|
||||
[AutopilotTier.Tier1]: 0,
|
||||
[AutopilotTier.Tier2]: 0,
|
||||
[AutopilotTier.Tier3]: 0,
|
||||
[AutopilotTier.Tier4]: 0
|
||||
};
|
||||
|
||||
public static tierStorageInGB = {
|
||||
[AutopilotTier.Tier1]: 50,
|
||||
[AutopilotTier.Tier2]: 200,
|
||||
[AutopilotTier.Tier3]: 1000,
|
||||
[AutopilotTier.Tier4]: 5000
|
||||
};
|
||||
}
|
||||
|
||||
export class DataExplorerVersions {
|
||||
public static readonly v_1_0_0: string = "1.0.0";
|
||||
public static readonly v_1_0_1: string = "1.0.1";
|
||||
}
|
||||
|
||||
export class DataExplorerFeatures {
|
||||
public static offerCache: string = "OfferCache";
|
||||
}
|
||||
|
||||
export const DataExplorerFeaturesVersions: any = {
|
||||
OfferCache: DataExplorerVersions.v_1_0_1
|
||||
};
|
||||
|
||||
export const EmulatorMasterKey =
|
||||
//[SuppressMessage("Microsoft.Security", "CS002:SecretInNextLine", Justification="Well known public masterKey for emulator")]
|
||||
"C2y6yDjf5/R+ob0N8A7Cgv30VRDJIWEHLM+4QDU5DE2nQ9nDuVTqobD4b8mGGyPMbIZnqyMsEcaGQy67XIw/Jw==";
|
||||
|
||||
@@ -112,7 +112,6 @@ describe("endpoint", () => {
|
||||
|
||||
describe("requestPlugin", () => {
|
||||
beforeEach(() => {
|
||||
delete window.dataExplorerPlatform;
|
||||
resetConfigContext();
|
||||
});
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import * as Cosmos from "@azure/cosmos";
|
||||
import { RequestInfo, setAuthorizationTokenHeaderUsingMasterKey } from "@azure/cosmos";
|
||||
import { configContext, Platform } from "../ConfigContext";
|
||||
import { getErrorMessage } from "./ErrorHandlingUtils";
|
||||
import { logConsoleError } from "../Utils/NotificationConsoleUtils";
|
||||
import { EmulatorMasterKey, HttpHeaders } from "./Constants";
|
||||
import { userContext } from "../UserContext";
|
||||
@@ -69,7 +70,7 @@ export async function getTokenFromAuthService(verb: string, resourceType: string
|
||||
const result = JSON.parse(await response.json());
|
||||
return result;
|
||||
} catch (error) {
|
||||
logConsoleError(`Failed to get authorization headers for ${resourceType}: ${JSON.stringify(error)}`);
|
||||
logConsoleError(`Failed to get authorization headers for ${resourceType}: ${getErrorMessage(error)}`);
|
||||
return Promise.reject(error);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -72,22 +72,6 @@ export function getPartitionKeyHeader(partitionKeyDefinition: DataModels.Partiti
|
||||
return [partitionKeyValue];
|
||||
}
|
||||
|
||||
export function updateOffer(
|
||||
offer: DataModels.Offer,
|
||||
newOffer: DataModels.Offer,
|
||||
options?: RequestOptions
|
||||
): Q.Promise<DataModels.Offer> {
|
||||
return Q(
|
||||
client()
|
||||
.offer(offer.id)
|
||||
// TODO Remove casting when SDK types are fixed (https://github.com/Azure/azure-sdk-for-js/issues/10660)
|
||||
.replace((newOffer as unknown) as OfferDefinition, options)
|
||||
.then(response => {
|
||||
return Promise.all([refreshCachedOffers(), refreshCachedResources()]).then(() => response.resource);
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
export function updateDocument(
|
||||
collection: ViewModels.CollectionBase,
|
||||
documentId: DocumentId,
|
||||
@@ -184,22 +168,6 @@ export function deleteConflict(
|
||||
);
|
||||
}
|
||||
|
||||
export function refreshCachedOffers(): Q.Promise<void> {
|
||||
if (configContext.platform === Platform.Portal) {
|
||||
return sendCachedDataMessage(MessageTypes.RefreshOffers, []);
|
||||
} else {
|
||||
return Q();
|
||||
}
|
||||
}
|
||||
|
||||
export function refreshCachedResources(options?: any): Q.Promise<void> {
|
||||
if (configContext.platform === Platform.Portal) {
|
||||
return sendCachedDataMessage(MessageTypes.RefreshResources, []);
|
||||
} else {
|
||||
return Q();
|
||||
}
|
||||
}
|
||||
|
||||
export function queryConflicts(
|
||||
databaseId: string,
|
||||
containerId: string,
|
||||
|
||||
@@ -1,17 +1,14 @@
|
||||
import { ConflictDefinition, ItemDefinition, QueryIterator, Resource } from "@azure/cosmos";
|
||||
import { RequestOptions } from "@azure/cosmos/dist-esm";
|
||||
import Q from "q";
|
||||
import * as DataModels from "../Contracts/DataModels";
|
||||
import * as ViewModels from "../Contracts/ViewModels";
|
||||
import ConflictId from "../Explorer/Tree/ConflictId";
|
||||
import DocumentId from "../Explorer/Tree/DocumentId";
|
||||
import StoredProcedure from "../Explorer/Tree/StoredProcedure";
|
||||
import { logConsoleError, logConsoleInfo, logConsoleProgress } from "../Utils/NotificationConsoleUtils";
|
||||
import { logConsoleInfo, logConsoleProgress } from "../Utils/NotificationConsoleUtils";
|
||||
import * as Constants from "./Constants";
|
||||
import { sendNotificationForError } from "./dataAccess/sendNotificationForError";
|
||||
import * as DataAccessUtilityBase from "./DataAccessUtilityBase";
|
||||
import { MinimalQueryIterator, nextPage } from "./IteratorUtilities";
|
||||
import * as Logger from "./Logger";
|
||||
import { handleError } from "./ErrorHandlingUtils";
|
||||
|
||||
// TODO: Log all promise resolutions and errors with verbosity levels
|
||||
export function queryDocuments(
|
||||
@@ -59,13 +56,11 @@ export function executeStoredProcedure(
|
||||
);
|
||||
},
|
||||
(error: any) => {
|
||||
logConsoleError(
|
||||
`Failed to execute stored procedure ${storedProcedure.id()} for container ${storedProcedure.collection.id()}: ${JSON.stringify(
|
||||
error
|
||||
)}`
|
||||
handleError(
|
||||
error,
|
||||
"ExecuteStoredProcedure",
|
||||
`Failed to execute stored procedure ${storedProcedure.id()} for container ${storedProcedure.collection.id()}`
|
||||
);
|
||||
Logger.logError(JSON.stringify(error), "ExecuteStoredProcedure", error.code);
|
||||
sendNotificationForError(error);
|
||||
deferred.reject(error);
|
||||
}
|
||||
)
|
||||
@@ -93,9 +88,7 @@ export function queryDocumentsPage(
|
||||
deferred.resolve(result);
|
||||
},
|
||||
(error: any) => {
|
||||
logConsoleError(`Failed to query ${entityName} for container ${resourceName}: ${JSON.stringify(error)}`);
|
||||
Logger.logError(JSON.stringify(error), "QueryDocumentsPage", error.code);
|
||||
sendNotificationForError(error);
|
||||
handleError(error, "QueryDocumentsPage", `Failed to query ${entityName} for container ${resourceName}`);
|
||||
deferred.reject(error);
|
||||
}
|
||||
)
|
||||
@@ -116,9 +109,7 @@ export function readDocument(collection: ViewModels.CollectionBase, documentId:
|
||||
deferred.resolve(document);
|
||||
},
|
||||
(error: any) => {
|
||||
logConsoleError(`Failed to read ${entityName} ${documentId.id()}: ${JSON.stringify(error)}`);
|
||||
Logger.logError(JSON.stringify(error), "ReadDocument", error.code);
|
||||
sendNotificationForError(error);
|
||||
handleError(error, "ReadDocument", `Failed to read ${entityName} ${documentId.id()}`);
|
||||
deferred.reject(error);
|
||||
}
|
||||
)
|
||||
@@ -144,44 +135,7 @@ export function updateDocument(
|
||||
deferred.resolve(updatedDocument);
|
||||
},
|
||||
(error: any) => {
|
||||
logConsoleError(`Failed to update ${entityName} ${documentId.id()}: ${JSON.stringify(error)}`);
|
||||
Logger.logError(JSON.stringify(error), "UpdateDocument", error.code);
|
||||
sendNotificationForError(error);
|
||||
deferred.reject(error);
|
||||
}
|
||||
)
|
||||
.finally(() => {
|
||||
clearMessage();
|
||||
});
|
||||
|
||||
return deferred.promise;
|
||||
}
|
||||
|
||||
export function updateOffer(
|
||||
offer: DataModels.Offer,
|
||||
newOffer: DataModels.Offer,
|
||||
options: RequestOptions
|
||||
): Q.Promise<DataModels.Offer> {
|
||||
var deferred = Q.defer<any>();
|
||||
const clearMessage = logConsoleProgress(`Updating offer for resource ${offer.resource}`);
|
||||
DataAccessUtilityBase.updateOffer(offer, newOffer, options)
|
||||
.then(
|
||||
(replacedOffer: DataModels.Offer) => {
|
||||
logConsoleInfo(`Successfully updated offer for resource ${offer.resource}`);
|
||||
deferred.resolve(replacedOffer);
|
||||
},
|
||||
(error: any) => {
|
||||
logConsoleError(`Error updating offer for resource ${offer.resource}: ${JSON.stringify(error)}`);
|
||||
Logger.logError(
|
||||
JSON.stringify({
|
||||
oldOffer: offer,
|
||||
newOffer: newOffer,
|
||||
error: error
|
||||
}),
|
||||
"UpdateOffer",
|
||||
error.code
|
||||
);
|
||||
sendNotificationForError(error);
|
||||
handleError(error, "UpdateDocument", `Failed to update ${entityName} ${documentId.id()}`);
|
||||
deferred.reject(error);
|
||||
}
|
||||
)
|
||||
@@ -203,11 +157,7 @@ export function createDocument(collection: ViewModels.CollectionBase, newDocumen
|
||||
deferred.resolve(savedDocument);
|
||||
},
|
||||
(error: any) => {
|
||||
logConsoleError(
|
||||
`Error while creating new ${entityName} for container ${collection.id()}:\n ${JSON.stringify(error)}`
|
||||
);
|
||||
Logger.logError(JSON.stringify(error), "CreateDocument", error.code);
|
||||
sendNotificationForError(error);
|
||||
handleError(error, "CreateDocument", `Error while creating new ${entityName} for container ${collection.id()}`);
|
||||
deferred.reject(error);
|
||||
}
|
||||
)
|
||||
@@ -229,9 +179,7 @@ export function deleteDocument(collection: ViewModels.CollectionBase, documentId
|
||||
deferred.resolve(response);
|
||||
},
|
||||
(error: any) => {
|
||||
logConsoleError(`Error while deleting ${entityName} ${documentId.id()}:\n ${JSON.stringify(error)}`);
|
||||
Logger.logError(JSON.stringify(error), "DeleteDocument", error.code);
|
||||
sendNotificationForError(error);
|
||||
handleError(error, "DeleteDocument", `Error while deleting ${entityName} ${documentId.id()}`);
|
||||
deferred.reject(error);
|
||||
}
|
||||
)
|
||||
@@ -257,9 +205,7 @@ export function deleteConflict(
|
||||
deferred.resolve(response);
|
||||
},
|
||||
(error: any) => {
|
||||
logConsoleError(`Error while deleting conflict ${conflictId.id()}:\n ${JSON.stringify(error)}`);
|
||||
Logger.logError(JSON.stringify(error), "DeleteConflict", error.code);
|
||||
sendNotificationForError(error);
|
||||
handleError(error, "DeleteConflict", `Error while deleting conflict ${conflictId.id()}`);
|
||||
deferred.reject(error);
|
||||
}
|
||||
)
|
||||
@@ -269,11 +215,3 @@ export function deleteConflict(
|
||||
|
||||
return deferred.promise;
|
||||
}
|
||||
|
||||
export function refreshCachedResources(options: any = {}): Q.Promise<void> {
|
||||
return DataAccessUtilityBase.refreshCachedResources(options);
|
||||
}
|
||||
|
||||
export function refreshCachedOffers(): Q.Promise<void> {
|
||||
return DataAccessUtilityBase.refreshCachedOffers();
|
||||
}
|
||||
|
||||
56
src/Common/ErrorHandlingUtils.ts
Normal file
56
src/Common/ErrorHandlingUtils.ts
Normal file
@@ -0,0 +1,56 @@
|
||||
import { ARMError } from "../Utils/arm/request";
|
||||
import { HttpStatusCodes } from "./Constants";
|
||||
import { MessageTypes } from "../Contracts/ExplorerContracts";
|
||||
import { SubscriptionType } from "../Contracts/SubscriptionType";
|
||||
import { logConsoleError } from "../Utils/NotificationConsoleUtils";
|
||||
import { logError } from "./Logger";
|
||||
import { sendMessage } from "./MessageHandler";
|
||||
|
||||
export const handleError = (error: string | ARMError | Error, area: string, consoleErrorPrefix?: string): void => {
|
||||
const errorMessage = getErrorMessage(error);
|
||||
const errorCode = error instanceof ARMError ? error.code : undefined;
|
||||
|
||||
// logs error to data explorer console
|
||||
const consoleErrorMessage = consoleErrorPrefix ? `${consoleErrorPrefix}:\n ${errorMessage}` : errorMessage;
|
||||
logConsoleError(consoleErrorMessage);
|
||||
|
||||
// logs error to both app insight and kusto
|
||||
logError(errorMessage, area, errorCode);
|
||||
|
||||
// checks for errors caused by firewall and sends them to portal to handle
|
||||
sendNotificationForError(errorMessage, errorCode);
|
||||
};
|
||||
|
||||
export const getErrorMessage = (error: string | Error): string => {
|
||||
const errorMessage = typeof error === "string" ? error : error.message;
|
||||
return replaceKnownError(errorMessage);
|
||||
};
|
||||
|
||||
export const getErrorStack = (error: string | Error): string => {
|
||||
return typeof error === "string" ? undefined : error.stack;
|
||||
};
|
||||
|
||||
const sendNotificationForError = (errorMessage: string, errorCode: number | string): void => {
|
||||
if (errorCode === HttpStatusCodes.Forbidden) {
|
||||
if (errorMessage?.toLowerCase().indexOf("sharedoffer is disabled for your account") > 0) {
|
||||
return;
|
||||
}
|
||||
sendMessage({
|
||||
type: MessageTypes.ForbiddenError,
|
||||
reason: errorMessage
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const replaceKnownError = (errorMessage: string): string => {
|
||||
if (
|
||||
window.dataExplorer?.subscriptionType() === SubscriptionType.Internal &&
|
||||
errorMessage.indexOf("SharedOffer is Disabled for your account") >= 0
|
||||
) {
|
||||
return "Database throughput is not supported for internal subscriptions.";
|
||||
} else if (errorMessage.indexOf("Partition key paths must contain only valid") >= 0) {
|
||||
return "Partition key paths must contain only valid characters and not contain a trailing slash or wildcard character.";
|
||||
}
|
||||
|
||||
return errorMessage;
|
||||
};
|
||||
@@ -1,24 +0,0 @@
|
||||
import * as ErrorParserUtility from "./ErrorParserUtility";
|
||||
|
||||
describe("Error Parser Utility", () => {
|
||||
describe("shouldEnableCrossPartitionKeyForResourceWithPartitionKey()", () => {
|
||||
it("should parse a backend error correctly", () => {
|
||||
// A fake error matching what is thrown by the SDK on a bad collection create request
|
||||
const innerMessage =
|
||||
"The partition key component definition path '/asdwqr31 @#$#$WRadf' could not be accepted, failed near position '10'. Partition key paths must contain only valid characters and not contain a trailing slash or wildcard character.";
|
||||
const message = `Message: {\"Errors\":[\"${innerMessage}\"]}\r\nActivityId: 97b2e684-7505-4921-85f6-2513b9b28220, Request URI: /apps/89fdcf25-2a0b-4d2a-aab6-e161e565b26f/services/54911149-7bb1-4e7d-a1fa-22c8b36a4bb9/partitions/cc2a7a04-5f5a-4709-bcf7-8509b264963f/replicas/132304018743619218p, RequestStats: , SDK: Microsoft.Azure.Documents.Common/2.10.0`;
|
||||
const err = new Error(message) as any;
|
||||
err.code = 400;
|
||||
err.body = {
|
||||
code: "BadRequest",
|
||||
message
|
||||
};
|
||||
err.headers = {};
|
||||
err.activityId = "97b2e684-7505-4921-85f6-2513b9b28220";
|
||||
|
||||
const parsedError = ErrorParserUtility.parse(err);
|
||||
expect(parsedError.length).toBe(1);
|
||||
expect(parsedError[0].message).toBe(innerMessage);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,69 +0,0 @@
|
||||
import * as DataModels from "../Contracts/DataModels";
|
||||
import * as ViewModels from "../Contracts/ViewModels";
|
||||
|
||||
export function replaceKnownError(err: string): string {
|
||||
if (
|
||||
window.dataExplorer.subscriptionType() === ViewModels.SubscriptionType.Internal &&
|
||||
err.indexOf("SharedOffer is Disabled for your account") >= 0
|
||||
) {
|
||||
return "Database throughput is not supported for internal subscriptions.";
|
||||
} else if (err.indexOf("Partition key paths must contain only valid") >= 0) {
|
||||
return "Partition key paths must contain only valid characters and not contain a trailing slash or wildcard character.";
|
||||
}
|
||||
|
||||
return err;
|
||||
}
|
||||
|
||||
export function parse(err: any): DataModels.ErrorDataModel[] {
|
||||
try {
|
||||
return _parse(err);
|
||||
} catch (e) {
|
||||
return [<DataModels.ErrorDataModel>{ message: JSON.stringify(err) }];
|
||||
}
|
||||
}
|
||||
|
||||
function _parse(err: any): DataModels.ErrorDataModel[] {
|
||||
var normalizedErrors: DataModels.ErrorDataModel[] = [];
|
||||
if (err.message && !err.code) {
|
||||
normalizedErrors.push(err);
|
||||
} else {
|
||||
const innerErrors: any[] = _getInnerErrors(err.message);
|
||||
normalizedErrors = innerErrors.map(innerError =>
|
||||
typeof innerError === "string" ? { message: innerError } : innerError
|
||||
);
|
||||
}
|
||||
|
||||
return normalizedErrors;
|
||||
}
|
||||
|
||||
function _getInnerErrors(message: string): any[] {
|
||||
/*
|
||||
The backend error message has an inner-message which is a stringified object.
|
||||
|
||||
For SQL errors, the "errors" property is an array of SqlErrorDataModel.
|
||||
Example:
|
||||
"Message: {"Errors":["Resource with specified id or name already exists"]}\r\nActivityId: 80005000008d40b6a, Request URI: /apps/19000c000c0a0005/services/mctestdocdbprod-MasterService-0-00066ab9937/partitions/900005f9000e676fb8/replicas/13000000000955p"
|
||||
For non-SQL errors the "Errors" propery is an array of string.
|
||||
Example:
|
||||
"Message: {"errors":[{"severity":"Error","location":{"start":7,"end":8},"code":"SC1001","message":"Syntax error, incorrect syntax near '.'."}]}\r\nActivityId: d3300016d4084e310a, Request URI: /apps/12401f9e1df77/services/dc100232b1f44545/partitions/f86f3bc0001a2f78/replicas/13085003638s"
|
||||
*/
|
||||
|
||||
let innerMessage: any = null;
|
||||
|
||||
const singleLineMessage = message.replace(/[\r\n]|\r|\n/g, "");
|
||||
try {
|
||||
// Multi-Partition error flavor
|
||||
const regExp = /^(.*)ActivityId: (.*)/g;
|
||||
const regString = regExp.exec(singleLineMessage);
|
||||
const innerMessageString = regString[1];
|
||||
innerMessage = JSON.parse(innerMessageString);
|
||||
} catch (e) {
|
||||
// Single-partition error flavor
|
||||
const regExp = /^Message: (.*)ActivityId: (.*), Request URI: (.*)/g;
|
||||
const regString = regExp.exec(singleLineMessage);
|
||||
const innerMessageString = regString[1];
|
||||
innerMessage = JSON.parse(innerMessageString);
|
||||
}
|
||||
|
||||
return innerMessage.errors ? innerMessage.errors : innerMessage.Errors;
|
||||
}
|
||||
@@ -21,14 +21,8 @@ export function logWarning(message: string, area: string, code?: number): void {
|
||||
return _logEntry(entry);
|
||||
}
|
||||
|
||||
export function logError(message: string | Error, area: string, code?: number): void {
|
||||
let logMessage: string;
|
||||
if (typeof message === "string") {
|
||||
logMessage = message;
|
||||
} else {
|
||||
logMessage = JSON.stringify(message, Object.getOwnPropertyNames(message));
|
||||
}
|
||||
const entry: Diagnostics.LogEntry = _generateLogEntry(Diagnostics.LogEntryLevel.Error, logMessage, area, code);
|
||||
export function logError(errorMessage: string, area: string, code?: number | string): void {
|
||||
const entry: Diagnostics.LogEntry = _generateLogEntry(Diagnostics.LogEntryLevel.Error, errorMessage, area, code);
|
||||
return _logEntry(entry);
|
||||
}
|
||||
|
||||
@@ -59,7 +53,7 @@ function _generateLogEntry(
|
||||
level: Diagnostics.LogEntryLevel,
|
||||
message: string,
|
||||
area: string,
|
||||
code?: number
|
||||
code?: number | string
|
||||
): Diagnostics.LogEntry {
|
||||
return {
|
||||
timestamp: new Date().getUTCSeconds(),
|
||||
|
||||
@@ -1,46 +0,0 @@
|
||||
import "jquery";
|
||||
import * as Q from "q";
|
||||
import * as DataModels from "../Contracts/DataModels";
|
||||
import * as ViewModels from "../Contracts/ViewModels";
|
||||
import { getAuthorizationHeader } from "../Utils/AuthorizationUtils";
|
||||
import { userContext } from "../UserContext";
|
||||
|
||||
export class NotificationsClientBase {
|
||||
private _extensionEndpoint: string;
|
||||
private _notificationsApiSuffix: string;
|
||||
|
||||
protected constructor(notificationsApiSuffix: string) {
|
||||
this._notificationsApiSuffix = notificationsApiSuffix;
|
||||
}
|
||||
|
||||
public fetchNotifications(): Q.Promise<DataModels.Notification[]> {
|
||||
const deferred: Q.Deferred<DataModels.Notification[]> = Q.defer<DataModels.Notification[]>();
|
||||
const databaseAccount = userContext.databaseAccount;
|
||||
const subscriptionId = userContext.subscriptionId;
|
||||
const resourceGroup = userContext.resourceGroup;
|
||||
const url = `${this._extensionEndpoint}${this._notificationsApiSuffix}?accountName=${databaseAccount.name}&subscriptionId=${subscriptionId}&resourceGroup=${resourceGroup}`;
|
||||
const authorizationHeader: ViewModels.AuthorizationTokenHeaderMetadata = getAuthorizationHeader();
|
||||
const headers: any = {};
|
||||
headers[authorizationHeader.header] = authorizationHeader.token;
|
||||
|
||||
$.ajax({
|
||||
url: url,
|
||||
type: "GET",
|
||||
headers: headers,
|
||||
cache: false
|
||||
}).then(
|
||||
(notifications: DataModels.Notification[], textStatus: string, xhr: JQueryXHR<any>) => {
|
||||
deferred.resolve(notifications);
|
||||
},
|
||||
(xhr: JQueryXHR<any>, textStatus: string, error: any) => {
|
||||
deferred.reject(xhr.responseText);
|
||||
}
|
||||
);
|
||||
|
||||
return deferred.promise;
|
||||
}
|
||||
|
||||
public setExtensionEndpoint(extensionEndpoint: string): void {
|
||||
this._extensionEndpoint = extensionEndpoint;
|
||||
}
|
||||
}
|
||||
41
src/Common/PortalNotifications.ts
Normal file
41
src/Common/PortalNotifications.ts
Normal file
@@ -0,0 +1,41 @@
|
||||
import * as DataModels from "../Contracts/DataModels";
|
||||
import * as ViewModels from "../Contracts/ViewModels";
|
||||
import { getAuthorizationHeader } from "../Utils/AuthorizationUtils";
|
||||
import { userContext } from "../UserContext";
|
||||
import { configContext, Platform } from "../ConfigContext";
|
||||
|
||||
const notificationsPath = () => {
|
||||
switch (configContext.platform) {
|
||||
case Platform.Hosted:
|
||||
return "/api/guest/notifications";
|
||||
case Platform.Portal:
|
||||
return "/api/notifications";
|
||||
default:
|
||||
throw new Error(`Unknown platform: ${configContext.platform}`);
|
||||
}
|
||||
};
|
||||
|
||||
export const fetchPortalNotifications = async (): Promise<DataModels.Notification[]> => {
|
||||
if (configContext.platform === Platform.Emulator) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const databaseAccount = userContext.databaseAccount;
|
||||
const subscriptionId = userContext.subscriptionId;
|
||||
const resourceGroup = userContext.resourceGroup;
|
||||
const url = `${configContext.BACKEND_ENDPOINT}${notificationsPath()}?accountName=${
|
||||
databaseAccount.name
|
||||
}&subscriptionId=${subscriptionId}&resourceGroup=${resourceGroup}`;
|
||||
const authorizationHeader: ViewModels.AuthorizationTokenHeaderMetadata = getAuthorizationHeader();
|
||||
const headers = { [authorizationHeader.header]: authorizationHeader.token };
|
||||
|
||||
const response = await window.fetch(url, {
|
||||
headers
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(await response.text());
|
||||
}
|
||||
|
||||
return (await response.json()) as DataModels.Notification[];
|
||||
};
|
||||
@@ -12,8 +12,7 @@ import { BackendDefaults, HttpStatusCodes, SavedQueries } from "./Constants";
|
||||
import { userContext } from "../UserContext";
|
||||
import { createDocument, deleteDocument, queryDocuments, queryDocumentsPage } from "./DocumentClientUtilityBase";
|
||||
import { createCollection } from "./dataAccess/createCollection";
|
||||
import * as ErrorParserUtility from "./ErrorParserUtility";
|
||||
import * as Logger from "./Logger";
|
||||
import { handleError } from "./ErrorHandlingUtils";
|
||||
|
||||
export class QueriesClient {
|
||||
private static readonly PartitionKey: DataModels.PartitionKey = {
|
||||
@@ -53,13 +52,8 @@ export class QueriesClient {
|
||||
return Promise.resolve(collection);
|
||||
},
|
||||
(error: any) => {
|
||||
const stringifiedError: string = JSON.stringify(error);
|
||||
NotificationConsoleUtils.logConsoleMessage(
|
||||
ConsoleDataType.Error,
|
||||
`Failed to set up account for saving queries: ${stringifiedError}`
|
||||
);
|
||||
Logger.logError(stringifiedError, "setupQueriesCollection");
|
||||
return Promise.reject(stringifiedError);
|
||||
handleError(error, "setupQueriesCollection", "Failed to set up account for saving queries");
|
||||
return Promise.reject(error);
|
||||
}
|
||||
)
|
||||
.finally(() => NotificationConsoleUtils.clearInProgressMessageWithId(id));
|
||||
@@ -102,19 +96,11 @@ export class QueriesClient {
|
||||
return Promise.resolve();
|
||||
},
|
||||
(error: any) => {
|
||||
let errorMessage: string;
|
||||
const parsedError: DataModels.ErrorDataModel = ErrorParserUtility.parse(error)[0];
|
||||
if (parsedError.code === HttpStatusCodes.Conflict.toString()) {
|
||||
errorMessage = `Query ${query.queryName} already exists`;
|
||||
} else {
|
||||
errorMessage = parsedError.message;
|
||||
if (error.code === HttpStatusCodes.Conflict.toString()) {
|
||||
error = `Query ${query.queryName} already exists`;
|
||||
}
|
||||
NotificationConsoleUtils.logConsoleMessage(
|
||||
ConsoleDataType.Error,
|
||||
`Failed to save query ${query.queryName}: ${errorMessage}`
|
||||
);
|
||||
Logger.logError(JSON.stringify(parsedError), "saveQuery");
|
||||
return Promise.reject(errorMessage);
|
||||
handleError(error, "saveQuery", `Failed to save query ${query.queryName}`);
|
||||
return Promise.reject(error);
|
||||
}
|
||||
)
|
||||
.finally(() => NotificationConsoleUtils.clearInProgressMessageWithId(id));
|
||||
@@ -163,25 +149,15 @@ export class QueriesClient {
|
||||
return Promise.resolve(queries);
|
||||
},
|
||||
(error: any) => {
|
||||
const stringifiedError: string = JSON.stringify(error);
|
||||
NotificationConsoleUtils.logConsoleMessage(
|
||||
ConsoleDataType.Error,
|
||||
`Failed to fetch saved queries: ${stringifiedError}`
|
||||
);
|
||||
Logger.logError(stringifiedError, "getSavedQueries");
|
||||
return Promise.reject(stringifiedError);
|
||||
handleError(error, "getSavedQueries", "Failed to fetch saved queries");
|
||||
return Promise.reject(error);
|
||||
}
|
||||
);
|
||||
},
|
||||
(error: any) => {
|
||||
// should never get into this state but we handle this regardless
|
||||
const stringifiedError: string = JSON.stringify(error);
|
||||
NotificationConsoleUtils.logConsoleMessage(
|
||||
ConsoleDataType.Error,
|
||||
`Failed to fetch saved queries: ${stringifiedError}`
|
||||
);
|
||||
Logger.logError(stringifiedError, "getSavedQueries");
|
||||
return Promise.reject(stringifiedError);
|
||||
handleError(error, "getSavedQueries", "Failed to fetch saved queries");
|
||||
return Promise.reject(error);
|
||||
}
|
||||
)
|
||||
.finally(() => NotificationConsoleUtils.clearInProgressMessageWithId(id));
|
||||
@@ -232,13 +208,8 @@ export class QueriesClient {
|
||||
return Promise.resolve();
|
||||
},
|
||||
(error: any) => {
|
||||
const stringifiedError: string = JSON.stringify(error);
|
||||
NotificationConsoleUtils.logConsoleMessage(
|
||||
ConsoleDataType.Error,
|
||||
`Failed to delete query ${query.queryName}: ${stringifiedError}`
|
||||
);
|
||||
Logger.logError(stringifiedError, "deleteQuery");
|
||||
return Promise.reject(stringifiedError);
|
||||
handleError(error, "deleteQuery", `Failed to delete query ${query.queryName}`);
|
||||
return Promise.reject(error);
|
||||
}
|
||||
)
|
||||
.finally(() => NotificationConsoleUtils.clearInProgressMessageWithId(id));
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import * as DataModels from "../../Contracts/DataModels";
|
||||
import * as ErrorParserUtility from "../ErrorParserUtility";
|
||||
import { AuthType } from "../../AuthType";
|
||||
import { ContainerResponse, DatabaseResponse } from "@azure/cosmos";
|
||||
import { ContainerRequest } from "@azure/cosmos/dist-esm/client/Container/ContainerRequest";
|
||||
@@ -23,21 +22,19 @@ import {
|
||||
getGremlinGraph
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
|
||||
import { createUpdateTable, getTable } from "../../Utils/arm/generatedClients/2020-04-01/tableResources";
|
||||
import { logConsoleProgress, logConsoleError, logConsoleInfo } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logError } from "../Logger";
|
||||
import { refreshCachedResources } from "../DataAccessUtilityBase";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import { logConsoleProgress, logConsoleInfo } from "../../Utils/NotificationConsoleUtils";
|
||||
import { userContext } from "../../UserContext";
|
||||
import { createDatabase } from "./createDatabase";
|
||||
import * as TelemetryProcessor from "../../Shared/Telemetry/TelemetryProcessor";
|
||||
import { Action, ActionModifiers } from "../../Shared/Telemetry/TelemetryConstants";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
|
||||
export const createCollection = async (params: DataModels.CreateCollectionParams): Promise<DataModels.Collection> => {
|
||||
let collection: DataModels.Collection;
|
||||
const clearMessage = logConsoleProgress(
|
||||
`Creating a new container ${params.collectionId} for database ${params.databaseId}`
|
||||
);
|
||||
try {
|
||||
let collection: DataModels.Collection;
|
||||
if (window.authType === AuthType.AAD && !userContext.useSDKOperations) {
|
||||
if (params.createNewDatabase) {
|
||||
const createDatabaseParams: DataModels.CreateDatabaseParams = {
|
||||
@@ -54,18 +51,15 @@ export const createCollection = async (params: DataModels.CreateCollectionParams
|
||||
} else {
|
||||
collection = await createCollectionWithSDK(params);
|
||||
}
|
||||
|
||||
logConsoleInfo(`Successfully created container ${params.collectionId}`);
|
||||
return collection;
|
||||
} catch (error) {
|
||||
const sanitizedError = ErrorParserUtility.replaceKnownError(JSON.stringify(error));
|
||||
logConsoleError(`Error while creating container ${params.collectionId}:\n ${sanitizedError}`);
|
||||
logError(JSON.stringify(error), "CreateCollection", error.code);
|
||||
sendNotificationForError(error);
|
||||
clearMessage();
|
||||
handleError(error, "CreateCollection", `Error while creating container ${params.collectionId}`);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
}
|
||||
logConsoleInfo(`Successfully created container ${params.collectionId}`);
|
||||
await refreshCachedResources();
|
||||
clearMessage();
|
||||
return collection;
|
||||
};
|
||||
|
||||
const createCollectionWithARM = async (params: DataModels.CreateCollectionParams): Promise<DataModels.Collection> => {
|
||||
|
||||
@@ -24,36 +24,28 @@ import {
|
||||
createUpdateGremlinDatabase,
|
||||
getGremlinDatabase
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
|
||||
import { logConsoleProgress, logConsoleError, logConsoleInfo } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logError } from "../Logger";
|
||||
import { refreshCachedOffers, refreshCachedResources } from "../DataAccessUtilityBase";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleProgress, logConsoleInfo } from "../../Utils/NotificationConsoleUtils";
|
||||
import { userContext } from "../../UserContext";
|
||||
|
||||
export async function createDatabase(params: DataModels.CreateDatabaseParams): Promise<DataModels.Database> {
|
||||
let database: DataModels.Database;
|
||||
const clearMessage = logConsoleProgress(`Creating a new database ${params.databaseId}`);
|
||||
try {
|
||||
if (userContext.defaultExperience === DefaultAccountExperienceType.Table) {
|
||||
throw new Error("Creating database resources is not allowed for tables accounts");
|
||||
}
|
||||
if (window.authType === AuthType.AAD && !userContext.useSDKOperations) {
|
||||
database = await createDatabaseWithARM(params);
|
||||
} else {
|
||||
database = await createDatabaseWithSDK(params);
|
||||
}
|
||||
const database: DataModels.Database = await (window.authType === AuthType.AAD && !userContext.useSDKOperations
|
||||
? createDatabaseWithARM(params)
|
||||
: createDatabaseWithSDK(params));
|
||||
|
||||
logConsoleInfo(`Successfully created database ${params.databaseId}`);
|
||||
return database;
|
||||
} catch (error) {
|
||||
logConsoleError(`Error while creating database ${params.databaseId}:\n ${error.message}`);
|
||||
logError(JSON.stringify(error), "CreateDatabase", error.code);
|
||||
sendNotificationForError(error);
|
||||
clearMessage();
|
||||
handleError(error, "CreateDatabase", `Error while creating database ${params.databaseId}`);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
}
|
||||
logConsoleInfo(`Successfully created database ${params.databaseId}`);
|
||||
await refreshCachedResources();
|
||||
await refreshCachedOffers();
|
||||
clearMessage();
|
||||
return database;
|
||||
}
|
||||
|
||||
async function createDatabaseWithARM(params: DataModels.CreateDatabaseParams): Promise<DataModels.Database> {
|
||||
|
||||
@@ -1,28 +1,78 @@
|
||||
import { AuthType } from "../../AuthType";
|
||||
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
||||
import { Resource, StoredProcedureDefinition } from "@azure/cosmos";
|
||||
import { logConsoleError, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import {
|
||||
SqlStoredProcedureCreateUpdateParameters,
|
||||
SqlStoredProcedureResource
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/types";
|
||||
import { client } from "../CosmosClient";
|
||||
import { logError } from "../Logger";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import {
|
||||
createUpdateSqlStoredProcedure,
|
||||
getSqlStoredProcedure
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { userContext } from "../../UserContext";
|
||||
|
||||
export async function createStoredProcedure(
|
||||
databaseId: string,
|
||||
collectionId: string,
|
||||
storedProcedure: StoredProcedureDefinition
|
||||
): Promise<StoredProcedureDefinition & Resource> {
|
||||
let createdStoredProcedure: StoredProcedureDefinition & Resource;
|
||||
const clearMessage = logConsoleProgress(`Creating stored procedure ${storedProcedure.id}`);
|
||||
try {
|
||||
if (
|
||||
window.authType === AuthType.AAD &&
|
||||
!userContext.useSDKOperations &&
|
||||
userContext.defaultExperience === DefaultAccountExperienceType.DocumentDB
|
||||
) {
|
||||
try {
|
||||
const getResponse = await getSqlStoredProcedure(
|
||||
userContext.subscriptionId,
|
||||
userContext.resourceGroup,
|
||||
userContext.databaseAccount.name,
|
||||
databaseId,
|
||||
collectionId,
|
||||
storedProcedure.id
|
||||
);
|
||||
if (getResponse?.properties?.resource) {
|
||||
throw new Error(
|
||||
`Create stored procedure failed: stored procedure with id ${storedProcedure.id} already exists`
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
if (error.code !== "NotFound") {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
const createSprocParams: SqlStoredProcedureCreateUpdateParameters = {
|
||||
properties: {
|
||||
resource: storedProcedure as SqlStoredProcedureResource,
|
||||
options: {}
|
||||
}
|
||||
};
|
||||
const rpResponse = await createUpdateSqlStoredProcedure(
|
||||
userContext.subscriptionId,
|
||||
userContext.resourceGroup,
|
||||
userContext.databaseAccount.name,
|
||||
databaseId,
|
||||
collectionId,
|
||||
storedProcedure.id,
|
||||
createSprocParams
|
||||
);
|
||||
return rpResponse && (rpResponse.properties?.resource as StoredProcedureDefinition & Resource);
|
||||
}
|
||||
|
||||
const response = await client()
|
||||
.database(databaseId)
|
||||
.container(collectionId)
|
||||
.scripts.storedProcedures.create(storedProcedure);
|
||||
createdStoredProcedure = response.resource;
|
||||
return response?.resource;
|
||||
} catch (error) {
|
||||
logConsoleError(`Error while creating stored procedure ${storedProcedure.id}:\n ${JSON.stringify(error)}`);
|
||||
logError(JSON.stringify(error), "CreateStoredProcedure", error.code);
|
||||
sendNotificationForError(error);
|
||||
handleError(error, "CreateStoredProcedure", `Error while creating stored procedure ${storedProcedure.id}`);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
}
|
||||
|
||||
clearMessage();
|
||||
return createdStoredProcedure;
|
||||
}
|
||||
|
||||
@@ -1,28 +1,73 @@
|
||||
import { AuthType } from "../../AuthType";
|
||||
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
||||
import { Resource, TriggerDefinition } from "@azure/cosmos";
|
||||
import { logConsoleError, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import {
|
||||
SqlTriggerCreateUpdateParameters,
|
||||
SqlTriggerResource
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/types";
|
||||
import { client } from "../CosmosClient";
|
||||
import { logError } from "../Logger";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import { createUpdateSqlTrigger, getSqlTrigger } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { userContext } from "../../UserContext";
|
||||
|
||||
export async function createTrigger(
|
||||
databaseId: string,
|
||||
collectionId: string,
|
||||
trigger: TriggerDefinition
|
||||
): Promise<TriggerDefinition & Resource> {
|
||||
let createdTrigger: TriggerDefinition & Resource;
|
||||
const clearMessage = logConsoleProgress(`Creating trigger ${trigger.id}`);
|
||||
try {
|
||||
if (
|
||||
window.authType === AuthType.AAD &&
|
||||
!userContext.useSDKOperations &&
|
||||
userContext.defaultExperience === DefaultAccountExperienceType.DocumentDB
|
||||
) {
|
||||
try {
|
||||
const getResponse = await getSqlTrigger(
|
||||
userContext.subscriptionId,
|
||||
userContext.resourceGroup,
|
||||
userContext.databaseAccount.name,
|
||||
databaseId,
|
||||
collectionId,
|
||||
trigger.id
|
||||
);
|
||||
if (getResponse?.properties?.resource) {
|
||||
throw new Error(`Create trigger failed: ${trigger.id} already exists`);
|
||||
}
|
||||
} catch (error) {
|
||||
if (error.code !== "NotFound") {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
const createTriggerParams: SqlTriggerCreateUpdateParameters = {
|
||||
properties: {
|
||||
resource: trigger as SqlTriggerResource,
|
||||
options: {}
|
||||
}
|
||||
};
|
||||
const rpResponse = await createUpdateSqlTrigger(
|
||||
userContext.subscriptionId,
|
||||
userContext.resourceGroup,
|
||||
userContext.databaseAccount.name,
|
||||
databaseId,
|
||||
collectionId,
|
||||
trigger.id,
|
||||
createTriggerParams
|
||||
);
|
||||
return rpResponse && (rpResponse.properties?.resource as TriggerDefinition & Resource);
|
||||
}
|
||||
|
||||
const response = await client()
|
||||
.database(databaseId)
|
||||
.container(collectionId)
|
||||
.scripts.triggers.create(trigger);
|
||||
createdTrigger = response.resource;
|
||||
return response.resource;
|
||||
} catch (error) {
|
||||
logConsoleError(`Error while creating trigger ${trigger.id}:\n ${JSON.stringify(error)}`);
|
||||
logError(JSON.stringify(error), "CreateTrigger", error.code);
|
||||
sendNotificationForError(error);
|
||||
handleError(error, "CreateTrigger", `Error while creating trigger ${trigger.id}`);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
}
|
||||
|
||||
clearMessage();
|
||||
return createdTrigger;
|
||||
}
|
||||
|
||||
@@ -1,28 +1,82 @@
|
||||
import { AuthType } from "../../AuthType";
|
||||
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
||||
import { Resource, UserDefinedFunctionDefinition } from "@azure/cosmos";
|
||||
import { logConsoleError, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import {
|
||||
SqlUserDefinedFunctionCreateUpdateParameters,
|
||||
SqlUserDefinedFunctionResource
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/types";
|
||||
import { client } from "../CosmosClient";
|
||||
import { logError } from "../Logger";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import {
|
||||
createUpdateSqlUserDefinedFunction,
|
||||
getSqlUserDefinedFunction
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { userContext } from "../../UserContext";
|
||||
|
||||
export async function createUserDefinedFunction(
|
||||
databaseId: string,
|
||||
collectionId: string,
|
||||
userDefinedFunction: UserDefinedFunctionDefinition
|
||||
): Promise<UserDefinedFunctionDefinition & Resource> {
|
||||
let createdUserDefinedFunction: UserDefinedFunctionDefinition & Resource;
|
||||
const clearMessage = logConsoleProgress(`Creating user defined function ${userDefinedFunction.id}`);
|
||||
try {
|
||||
if (
|
||||
window.authType === AuthType.AAD &&
|
||||
!userContext.useSDKOperations &&
|
||||
userContext.defaultExperience === DefaultAccountExperienceType.DocumentDB
|
||||
) {
|
||||
try {
|
||||
const getResponse = await getSqlUserDefinedFunction(
|
||||
userContext.subscriptionId,
|
||||
userContext.resourceGroup,
|
||||
userContext.databaseAccount.name,
|
||||
databaseId,
|
||||
collectionId,
|
||||
userDefinedFunction.id
|
||||
);
|
||||
if (getResponse?.properties?.resource) {
|
||||
throw new Error(
|
||||
`Create user defined function failed: user defined function with id ${userDefinedFunction.id} already exists`
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
if (error.code !== "NotFound") {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
const createUDFParams: SqlUserDefinedFunctionCreateUpdateParameters = {
|
||||
properties: {
|
||||
resource: userDefinedFunction as SqlUserDefinedFunctionResource,
|
||||
options: {}
|
||||
}
|
||||
};
|
||||
const rpResponse = await createUpdateSqlUserDefinedFunction(
|
||||
userContext.subscriptionId,
|
||||
userContext.resourceGroup,
|
||||
userContext.databaseAccount.name,
|
||||
databaseId,
|
||||
collectionId,
|
||||
userDefinedFunction.id,
|
||||
createUDFParams
|
||||
);
|
||||
return rpResponse && (rpResponse.properties?.resource as UserDefinedFunctionDefinition & Resource);
|
||||
}
|
||||
|
||||
const response = await client()
|
||||
.database(databaseId)
|
||||
.container(collectionId)
|
||||
.scripts.userDefinedFunctions.create(userDefinedFunction);
|
||||
createdUserDefinedFunction = response.resource;
|
||||
return response?.resource;
|
||||
} catch (error) {
|
||||
logConsoleError(`Error while creating user defined function ${userDefinedFunction.id}:\n ${JSON.stringify(error)}`);
|
||||
logError(JSON.stringify(error), "CreateUserupdateUserDefinedFunction", error.code);
|
||||
sendNotificationForError(error);
|
||||
handleError(
|
||||
error,
|
||||
"CreateUserupdateUserDefinedFunction",
|
||||
`Error while creating user defined function ${userDefinedFunction.id}`
|
||||
);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
}
|
||||
|
||||
clearMessage();
|
||||
return createdUserDefinedFunction;
|
||||
}
|
||||
|
||||
@@ -7,7 +7,6 @@ import { AuthType } from "../../AuthType";
|
||||
import { client } from "../CosmosClient";
|
||||
import { updateUserContext } from "../../UserContext";
|
||||
import { DatabaseAccount } from "../../Contracts/DataModels";
|
||||
import { sendCachedDataMessage } from "../MessageHandler";
|
||||
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
||||
|
||||
describe("deleteCollection", () => {
|
||||
@@ -18,7 +17,6 @@ describe("deleteCollection", () => {
|
||||
} as DatabaseAccount,
|
||||
defaultExperience: DefaultAccountExperienceType.DocumentDB
|
||||
});
|
||||
(sendCachedDataMessage as jest.Mock).mockResolvedValue(undefined);
|
||||
});
|
||||
|
||||
it("should call ARM if logged in with AAD", async () => {
|
||||
|
||||
@@ -5,12 +5,10 @@ import { deleteCassandraTable } from "../../Utils/arm/generatedClients/2020-04-0
|
||||
import { deleteMongoDBCollection } from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
|
||||
import { deleteGremlinGraph } from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
|
||||
import { deleteTable } from "../../Utils/arm/generatedClients/2020-04-01/tableResources";
|
||||
import { logConsoleError, logConsoleInfo, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logError } from "../Logger";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleInfo, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { userContext } from "../../UserContext";
|
||||
import { client } from "../CosmosClient";
|
||||
import { refreshCachedResources } from "../DataAccessUtilityBase";
|
||||
|
||||
export async function deleteCollection(databaseId: string, collectionId: string): Promise<void> {
|
||||
const clearMessage = logConsoleProgress(`Deleting container ${collectionId}`);
|
||||
@@ -23,15 +21,13 @@ export async function deleteCollection(databaseId: string, collectionId: string)
|
||||
.container(collectionId)
|
||||
.delete();
|
||||
}
|
||||
logConsoleInfo(`Successfully deleted container ${collectionId}`);
|
||||
} catch (error) {
|
||||
logConsoleError(`Error while deleting container ${collectionId}:\n ${JSON.stringify(error)}`);
|
||||
logError(JSON.stringify(error), "DeleteCollection", error.code);
|
||||
sendNotificationForError(error);
|
||||
handleError(error, "DeleteCollection", `Error while deleting container ${collectionId}`);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
}
|
||||
logConsoleInfo(`Successfully deleted container ${collectionId}`);
|
||||
clearMessage();
|
||||
await refreshCachedResources();
|
||||
}
|
||||
|
||||
function deleteCollectionWithARM(databaseId: string, collectionId: string): Promise<void> {
|
||||
|
||||
@@ -7,7 +7,6 @@ import { AuthType } from "../../AuthType";
|
||||
import { client } from "../CosmosClient";
|
||||
import { updateUserContext } from "../../UserContext";
|
||||
import { DatabaseAccount } from "../../Contracts/DataModels";
|
||||
import { sendCachedDataMessage } from "../MessageHandler";
|
||||
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
||||
|
||||
describe("deleteDatabase", () => {
|
||||
@@ -18,7 +17,6 @@ describe("deleteDatabase", () => {
|
||||
} as DatabaseAccount,
|
||||
defaultExperience: DefaultAccountExperienceType.DocumentDB
|
||||
});
|
||||
(sendCachedDataMessage as jest.Mock).mockResolvedValue(undefined);
|
||||
});
|
||||
|
||||
it("should call ARM if logged in with AAD", async () => {
|
||||
|
||||
@@ -4,12 +4,10 @@ import { deleteSqlDatabase } from "../../Utils/arm/generatedClients/2020-04-01/s
|
||||
import { deleteCassandraKeyspace } from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources";
|
||||
import { deleteMongoDBDatabase } from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
|
||||
import { deleteGremlinDatabase } from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
|
||||
import { logConsoleError, logConsoleInfo, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleInfo, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { userContext } from "../../UserContext";
|
||||
import { client } from "../CosmosClient";
|
||||
import { refreshCachedResources } from "../DataAccessUtilityBase";
|
||||
import { logError } from "../Logger";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
|
||||
export async function deleteDatabase(databaseId: string): Promise<void> {
|
||||
const clearMessage = logConsoleProgress(`Deleting database ${databaseId}`);
|
||||
@@ -25,15 +23,13 @@ export async function deleteDatabase(databaseId: string): Promise<void> {
|
||||
.database(databaseId)
|
||||
.delete();
|
||||
}
|
||||
logConsoleInfo(`Successfully deleted database ${databaseId}`);
|
||||
} catch (error) {
|
||||
logConsoleError(`Error while deleting database ${databaseId}:\n ${JSON.stringify(error)}`);
|
||||
logError(JSON.stringify(error), "DeleteDatabase", error.code);
|
||||
sendNotificationForError(error);
|
||||
handleError(error, "DeleteDatabase", `Error while deleting database ${databaseId}`);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
}
|
||||
logConsoleInfo(`Successfully deleted database ${databaseId}`);
|
||||
clearMessage();
|
||||
await refreshCachedResources();
|
||||
}
|
||||
|
||||
function deleteDatabaseWithARM(databaseId: string): Promise<void> {
|
||||
|
||||
@@ -1,7 +1,10 @@
|
||||
import { logConsoleError, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { AuthType } from "../../AuthType";
|
||||
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
||||
import { client } from "../CosmosClient";
|
||||
import { logError } from "../Logger";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import { deleteSqlStoredProcedure } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { userContext } from "../../UserContext";
|
||||
|
||||
export async function deleteStoredProcedure(
|
||||
databaseId: string,
|
||||
@@ -10,17 +13,30 @@ export async function deleteStoredProcedure(
|
||||
): Promise<void> {
|
||||
const clearMessage = logConsoleProgress(`Deleting stored procedure ${storedProcedureId}`);
|
||||
try {
|
||||
await client()
|
||||
.database(databaseId)
|
||||
.container(collectionId)
|
||||
.scripts.storedProcedure(storedProcedureId)
|
||||
.delete();
|
||||
if (
|
||||
window.authType === AuthType.AAD &&
|
||||
!userContext.useSDKOperations &&
|
||||
userContext.defaultExperience === DefaultAccountExperienceType.DocumentDB
|
||||
) {
|
||||
await deleteSqlStoredProcedure(
|
||||
userContext.subscriptionId,
|
||||
userContext.resourceGroup,
|
||||
userContext.databaseAccount.name,
|
||||
databaseId,
|
||||
collectionId,
|
||||
storedProcedureId
|
||||
);
|
||||
} else {
|
||||
await client()
|
||||
.database(databaseId)
|
||||
.container(collectionId)
|
||||
.scripts.storedProcedure(storedProcedureId)
|
||||
.delete();
|
||||
}
|
||||
} catch (error) {
|
||||
logConsoleError(`Error while deleting stored procedure ${storedProcedureId}:\n ${JSON.stringify(error)}`);
|
||||
logError(JSON.stringify(error), "DeleteStoredProcedure", error.code);
|
||||
sendNotificationForError(error);
|
||||
handleError(error, "DeleteStoredProcedure", `Error while deleting stored procedure ${storedProcedureId}`);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
}
|
||||
|
||||
clearMessage();
|
||||
return undefined;
|
||||
}
|
||||
|
||||
@@ -1,22 +1,38 @@
|
||||
import { logConsoleError, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { AuthType } from "../../AuthType";
|
||||
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
||||
import { client } from "../CosmosClient";
|
||||
import { logError } from "../Logger";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import { deleteSqlTrigger } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { userContext } from "../../UserContext";
|
||||
|
||||
export async function deleteTrigger(databaseId: string, collectionId: string, triggerId: string): Promise<void> {
|
||||
const clearMessage = logConsoleProgress(`Deleting trigger ${triggerId}`);
|
||||
try {
|
||||
await client()
|
||||
.database(databaseId)
|
||||
.container(collectionId)
|
||||
.scripts.trigger(triggerId)
|
||||
.delete();
|
||||
if (
|
||||
window.authType === AuthType.AAD &&
|
||||
!userContext.useSDKOperations &&
|
||||
userContext.defaultExperience === DefaultAccountExperienceType.DocumentDB
|
||||
) {
|
||||
await deleteSqlTrigger(
|
||||
userContext.subscriptionId,
|
||||
userContext.resourceGroup,
|
||||
userContext.databaseAccount.name,
|
||||
databaseId,
|
||||
collectionId,
|
||||
triggerId
|
||||
);
|
||||
} else {
|
||||
await client()
|
||||
.database(databaseId)
|
||||
.container(collectionId)
|
||||
.scripts.trigger(triggerId)
|
||||
.delete();
|
||||
}
|
||||
} catch (error) {
|
||||
logConsoleError(`Error while deleting trigger ${triggerId}:\n ${JSON.stringify(error)}`);
|
||||
logError(JSON.stringify(error), "DeleteTrigger", error.code);
|
||||
sendNotificationForError(error);
|
||||
handleError(error, "DeleteTrigger", `Error while deleting trigger ${triggerId}`);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
}
|
||||
|
||||
clearMessage();
|
||||
return undefined;
|
||||
}
|
||||
|
||||
@@ -1,22 +1,38 @@
|
||||
import { logConsoleError, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { AuthType } from "../../AuthType";
|
||||
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
||||
import { client } from "../CosmosClient";
|
||||
import { logError } from "../Logger";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import { deleteSqlUserDefinedFunction } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { userContext } from "../../UserContext";
|
||||
|
||||
export async function deleteUserDefinedFunction(databaseId: string, collectionId: string, id: string): Promise<void> {
|
||||
const clearMessage = logConsoleProgress(`Deleting user defined function ${id}`);
|
||||
try {
|
||||
await client()
|
||||
.database(databaseId)
|
||||
.container(collectionId)
|
||||
.scripts.userDefinedFunction(id)
|
||||
.delete();
|
||||
if (
|
||||
window.authType === AuthType.AAD &&
|
||||
!userContext.useSDKOperations &&
|
||||
userContext.defaultExperience === DefaultAccountExperienceType.DocumentDB
|
||||
) {
|
||||
await deleteSqlUserDefinedFunction(
|
||||
userContext.subscriptionId,
|
||||
userContext.resourceGroup,
|
||||
userContext.databaseAccount.name,
|
||||
databaseId,
|
||||
collectionId,
|
||||
id
|
||||
);
|
||||
} else {
|
||||
await client()
|
||||
.database(databaseId)
|
||||
.container(collectionId)
|
||||
.scripts.userDefinedFunction(id)
|
||||
.delete();
|
||||
}
|
||||
} catch (error) {
|
||||
logConsoleError(`Error while deleting user defined function ${id}:\n ${JSON.stringify(error)}`);
|
||||
logError(JSON.stringify(error), "DeleteUserDefinedFunction", error.code);
|
||||
sendNotificationForError(error);
|
||||
handleError(error, "DeleteUserDefinedFunction", `Error while deleting user defined function ${id}`);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
}
|
||||
|
||||
clearMessage();
|
||||
return undefined;
|
||||
}
|
||||
|
||||
28
src/Common/dataAccess/getIndexTransformationProgress.ts
Normal file
28
src/Common/dataAccess/getIndexTransformationProgress.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import { client } from "../CosmosClient";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import * as Constants from "../Constants";
|
||||
import { AuthType } from "../../AuthType";
|
||||
|
||||
export async function getIndexTransformationProgress(databaseId: string, collectionId: string): Promise<number> {
|
||||
if (window.authType !== AuthType.AAD) {
|
||||
return undefined;
|
||||
}
|
||||
let indexTransformationPercentage: number;
|
||||
const clearMessage = logConsoleProgress(`Reading container ${collectionId}`);
|
||||
try {
|
||||
const response = await client()
|
||||
.database(databaseId)
|
||||
.container(collectionId)
|
||||
.read({ populateQuotaInfo: true });
|
||||
|
||||
indexTransformationPercentage = parseInt(
|
||||
response.headers[Constants.HttpHeaders.collectionIndexTransformationProgress] as string
|
||||
);
|
||||
} catch (error) {
|
||||
handleError(error, "ReadMongoDBCollection", `Error while reading container ${collectionId}`);
|
||||
throw error;
|
||||
}
|
||||
clearMessage();
|
||||
return indexTransformationPercentage;
|
||||
}
|
||||
@@ -1,8 +1,7 @@
|
||||
import * as DataModels from "../../Contracts/DataModels";
|
||||
import { client } from "../CosmosClient";
|
||||
import { logConsoleProgress, logConsoleError } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logError } from "../Logger";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
|
||||
export async function readCollection(databaseId: string, collectionId: string): Promise<DataModels.Collection> {
|
||||
let collection: DataModels.Collection;
|
||||
@@ -14,9 +13,7 @@ export async function readCollection(databaseId: string, collectionId: string):
|
||||
.read();
|
||||
collection = response.resource as DataModels.Collection;
|
||||
} catch (error) {
|
||||
logConsoleError(`Error while querying container ${collectionId}:\n ${JSON.stringify(error)}`);
|
||||
logError(JSON.stringify(error), "ReadCollection", error.code);
|
||||
sendNotificationForError(error);
|
||||
handleError(error, "ReadCollection", `Error while querying container ${collectionId}`);
|
||||
throw error;
|
||||
}
|
||||
clearMessage();
|
||||
|
||||
@@ -4,15 +4,14 @@ import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType
|
||||
import { HttpHeaders } from "../Constants";
|
||||
import { RequestOptions } from "@azure/cosmos/dist-esm";
|
||||
import { client } from "../CosmosClient";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { getSqlContainerThroughput } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||
import { getMongoDBCollectionThroughput } from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
|
||||
import { getCassandraTableThroughput } from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources";
|
||||
import { getGremlinGraphThroughput } from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
|
||||
import { getTableThroughput } from "../../Utils/arm/generatedClients/2020-04-01/tableResources";
|
||||
import { logConsoleProgress, logConsoleError } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logError } from "../Logger";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { readOffers } from "./readOffers";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import { userContext } from "../../UserContext";
|
||||
|
||||
export const readCollectionOffer = async (
|
||||
@@ -57,9 +56,7 @@ export const readCollectionOffer = async (
|
||||
}
|
||||
);
|
||||
} catch (error) {
|
||||
logConsoleError(`Error while querying offer for collection ${params.collectionId}:\n ${JSON.stringify(error)}`);
|
||||
logError(JSON.stringify(error), "ReadCollectionOffer", error.code);
|
||||
sendNotificationForError(error);
|
||||
handleError(error, "ReadCollectionOffer", `Error while querying offer for collection ${params.collectionId}`);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
|
||||
@@ -5,9 +5,8 @@ import { ContainerDefinition, Resource } from "@azure/cosmos";
|
||||
import { HttpHeaders } from "../Constants";
|
||||
import { RequestOptions } from "@azure/cosmos/dist-esm";
|
||||
import { client } from "../CosmosClient";
|
||||
import { logConsoleProgress, logConsoleError } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logError } from "../Logger";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
|
||||
interface ResourceWithStatistics {
|
||||
statistics: DataModels.Statistic[];
|
||||
@@ -38,9 +37,7 @@ export const readCollectionQuotaInfo = async (
|
||||
|
||||
return quota;
|
||||
} catch (error) {
|
||||
logConsoleError(`Error while querying quota info for container ${collection.id}:\n ${JSON.stringify(error)}`);
|
||||
logError(JSON.stringify(error), "ReadCollectionQuotaInfo", error.code);
|
||||
sendNotificationForError(error);
|
||||
handleError(error, "ReadCollectionQuotaInfo", `Error while querying quota info for container ${collection.id}`);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
|
||||
@@ -2,18 +2,16 @@ import * as DataModels from "../../Contracts/DataModels";
|
||||
import { AuthType } from "../../AuthType";
|
||||
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
||||
import { client } from "../CosmosClient";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { listSqlContainers } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||
import { listCassandraTables } from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources";
|
||||
import { listMongoDBCollections } from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
|
||||
import { listGremlinGraphs } from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
|
||||
import { listTables } from "../../Utils/arm/generatedClients/2020-04-01/tableResources";
|
||||
import { logConsoleProgress, logConsoleError } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logError } from "../Logger";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { userContext } from "../../UserContext";
|
||||
|
||||
export async function readCollections(databaseId: string): Promise<DataModels.Collection[]> {
|
||||
let collections: DataModels.Collection[];
|
||||
const clearMessage = logConsoleProgress(`Querying containers for database ${databaseId}`);
|
||||
try {
|
||||
if (
|
||||
@@ -22,22 +20,20 @@ export async function readCollections(databaseId: string): Promise<DataModels.Co
|
||||
userContext.defaultExperience !== DefaultAccountExperienceType.MongoDB &&
|
||||
userContext.defaultExperience !== DefaultAccountExperienceType.Table
|
||||
) {
|
||||
collections = await readCollectionsWithARM(databaseId);
|
||||
} else {
|
||||
const sdkResponse = await client()
|
||||
.database(databaseId)
|
||||
.containers.readAll()
|
||||
.fetchAll();
|
||||
collections = sdkResponse.resources as DataModels.Collection[];
|
||||
return await readCollectionsWithARM(databaseId);
|
||||
}
|
||||
|
||||
const sdkResponse = await client()
|
||||
.database(databaseId)
|
||||
.containers.readAll()
|
||||
.fetchAll();
|
||||
return sdkResponse.resources as DataModels.Collection[];
|
||||
} catch (error) {
|
||||
logConsoleError(`Error while querying containers for database ${databaseId}:\n ${JSON.stringify(error)}`);
|
||||
logError(JSON.stringify(error), "ReadCollections", error.code);
|
||||
sendNotificationForError(error);
|
||||
handleError(error, "ReadCollections", `Error while querying containers for database ${databaseId}`);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
}
|
||||
clearMessage();
|
||||
return collections;
|
||||
}
|
||||
|
||||
async function readCollectionsWithARM(databaseId: string): Promise<DataModels.Collection[]> {
|
||||
|
||||
@@ -8,10 +8,9 @@ import { getSqlDatabaseThroughput } from "../../Utils/arm/generatedClients/2020-
|
||||
import { getMongoDBDatabaseThroughput } from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
|
||||
import { getCassandraKeyspaceThroughput } from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources";
|
||||
import { getGremlinDatabaseThroughput } from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
|
||||
import { logConsoleProgress, logConsoleError } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logError } from "../Logger";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { readOffers } from "./readOffers";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import { userContext } from "../../UserContext";
|
||||
|
||||
export const readDatabaseOffer = async (
|
||||
@@ -20,26 +19,14 @@ export const readDatabaseOffer = async (
|
||||
const clearMessage = logConsoleProgress(`Querying offer for database ${params.databaseId}`);
|
||||
let offerId = params.offerId;
|
||||
if (!offerId) {
|
||||
if (
|
||||
window.authType === AuthType.AAD &&
|
||||
!userContext.useSDKOperations &&
|
||||
userContext.defaultExperience !== DefaultAccountExperienceType.Table
|
||||
) {
|
||||
try {
|
||||
offerId = await getDatabaseOfferIdWithARM(params.databaseId);
|
||||
} catch (error) {
|
||||
clearMessage();
|
||||
if (error.code !== "NotFound") {
|
||||
throw new error();
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
} else {
|
||||
offerId = await getDatabaseOfferIdWithSDK(params.databaseResourceId);
|
||||
if (!offerId) {
|
||||
clearMessage();
|
||||
return undefined;
|
||||
}
|
||||
offerId = await (window.authType === AuthType.AAD &&
|
||||
!userContext.useSDKOperations &&
|
||||
userContext.defaultExperience !== DefaultAccountExperienceType.Table
|
||||
? getDatabaseOfferIdWithARM(params.databaseId)
|
||||
: getDatabaseOfferIdWithSDK(params.databaseResourceId));
|
||||
if (!offerId) {
|
||||
clearMessage();
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -60,9 +47,7 @@ export const readDatabaseOffer = async (
|
||||
}
|
||||
);
|
||||
} catch (error) {
|
||||
logConsoleError(`Error while querying offer for database ${params.databaseId}:\n ${JSON.stringify(error)}`);
|
||||
logError(JSON.stringify(error), "ReadDatabaseOffer", error.code);
|
||||
sendNotificationForError(error);
|
||||
handleError(error, "ReadDatabaseOffer", `Error while querying offer for database ${params.databaseId}`);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
@@ -75,24 +60,32 @@ const getDatabaseOfferIdWithARM = async (databaseId: string): Promise<string> =>
|
||||
const resourceGroup = userContext.resourceGroup;
|
||||
const accountName = userContext.databaseAccount.name;
|
||||
const defaultExperience = userContext.defaultExperience;
|
||||
switch (defaultExperience) {
|
||||
case DefaultAccountExperienceType.DocumentDB:
|
||||
rpResponse = await getSqlDatabaseThroughput(subscriptionId, resourceGroup, accountName, databaseId);
|
||||
break;
|
||||
case DefaultAccountExperienceType.MongoDB:
|
||||
rpResponse = await getMongoDBDatabaseThroughput(subscriptionId, resourceGroup, accountName, databaseId);
|
||||
break;
|
||||
case DefaultAccountExperienceType.Cassandra:
|
||||
rpResponse = await getCassandraKeyspaceThroughput(subscriptionId, resourceGroup, accountName, databaseId);
|
||||
break;
|
||||
case DefaultAccountExperienceType.Graph:
|
||||
rpResponse = await getGremlinDatabaseThroughput(subscriptionId, resourceGroup, accountName, databaseId);
|
||||
break;
|
||||
default:
|
||||
throw new Error(`Unsupported default experience type: ${defaultExperience}`);
|
||||
}
|
||||
|
||||
return rpResponse?.name;
|
||||
try {
|
||||
switch (defaultExperience) {
|
||||
case DefaultAccountExperienceType.DocumentDB:
|
||||
rpResponse = await getSqlDatabaseThroughput(subscriptionId, resourceGroup, accountName, databaseId);
|
||||
break;
|
||||
case DefaultAccountExperienceType.MongoDB:
|
||||
rpResponse = await getMongoDBDatabaseThroughput(subscriptionId, resourceGroup, accountName, databaseId);
|
||||
break;
|
||||
case DefaultAccountExperienceType.Cassandra:
|
||||
rpResponse = await getCassandraKeyspaceThroughput(subscriptionId, resourceGroup, accountName, databaseId);
|
||||
break;
|
||||
case DefaultAccountExperienceType.Graph:
|
||||
rpResponse = await getGremlinDatabaseThroughput(subscriptionId, resourceGroup, accountName, databaseId);
|
||||
break;
|
||||
default:
|
||||
throw new Error(`Unsupported default experience type: ${defaultExperience}`);
|
||||
}
|
||||
|
||||
return rpResponse?.name;
|
||||
} catch (error) {
|
||||
if (error.code !== "NotFound") {
|
||||
throw error;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
};
|
||||
|
||||
const getDatabaseOfferIdWithSDK = async (databaseResourceId: string): Promise<string> => {
|
||||
|
||||
@@ -2,13 +2,12 @@ import * as DataModels from "../../Contracts/DataModels";
|
||||
import { AuthType } from "../../AuthType";
|
||||
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
||||
import { client } from "../CosmosClient";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { listSqlDatabases } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||
import { listCassandraKeyspaces } from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources";
|
||||
import { listMongoDBDatabases } from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
|
||||
import { listGremlinDatabases } from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
|
||||
import { logConsoleProgress, logConsoleError } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logError } from "../Logger";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { userContext } from "../../UserContext";
|
||||
|
||||
export async function readDatabases(): Promise<DataModels.Database[]> {
|
||||
@@ -18,9 +17,7 @@ export async function readDatabases(): Promise<DataModels.Database[]> {
|
||||
if (
|
||||
window.authType === AuthType.AAD &&
|
||||
!userContext.useSDKOperations &&
|
||||
userContext.defaultExperience !== DefaultAccountExperienceType.MongoDB &&
|
||||
userContext.defaultExperience !== DefaultAccountExperienceType.Table &&
|
||||
userContext.defaultExperience !== DefaultAccountExperienceType.Cassandra
|
||||
userContext.defaultExperience !== DefaultAccountExperienceType.Table
|
||||
) {
|
||||
databases = await readDatabasesWithARM();
|
||||
} else {
|
||||
@@ -30,9 +27,7 @@ export async function readDatabases(): Promise<DataModels.Database[]> {
|
||||
databases = sdkResponse.resources as DataModels.Database[];
|
||||
}
|
||||
} catch (error) {
|
||||
logConsoleError(`Error while querying databases:\n ${JSON.stringify(error)}`);
|
||||
logError(JSON.stringify(error), "ReadDatabases", error.code);
|
||||
sendNotificationForError(error);
|
||||
handleError(error, "ReadDatabases", `Error while querying databases`);
|
||||
throw error;
|
||||
}
|
||||
clearMessage();
|
||||
|
||||
30
src/Common/dataAccess/readMongoDBCollection.tsx
Normal file
30
src/Common/dataAccess/readMongoDBCollection.tsx
Normal file
@@ -0,0 +1,30 @@
|
||||
import { userContext } from "../../UserContext";
|
||||
import { getMongoDBCollection } from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
|
||||
import { MongoDBCollectionResource } from "../../Utils/arm/generatedClients/2020-04-01/types";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { AuthType } from "../../AuthType";
|
||||
|
||||
export async function readMongoDBCollectionThroughRP(
|
||||
databaseId: string,
|
||||
collectionId: string
|
||||
): Promise<MongoDBCollectionResource> {
|
||||
if (window.authType !== AuthType.AAD) {
|
||||
return undefined;
|
||||
}
|
||||
let collection: MongoDBCollectionResource;
|
||||
const subscriptionId = userContext.subscriptionId;
|
||||
const resourceGroup = userContext.resourceGroup;
|
||||
const accountName = userContext.databaseAccount.name;
|
||||
|
||||
const clearMessage = logConsoleProgress(`Reading container ${collectionId}`);
|
||||
try {
|
||||
const response = await getMongoDBCollection(subscriptionId, resourceGroup, accountName, databaseId, collectionId);
|
||||
collection = response.properties.resource;
|
||||
} catch (error) {
|
||||
handleError(error, "ReadMongoDBCollection", `Error while reading container ${collectionId}`);
|
||||
throw error;
|
||||
}
|
||||
clearMessage();
|
||||
return collection;
|
||||
}
|
||||
@@ -1,26 +1,10 @@
|
||||
import { Offer } from "../../Contracts/DataModels";
|
||||
import { ClientDefaults } from "../Constants";
|
||||
import { MessageTypes } from "../../Contracts/ExplorerContracts";
|
||||
import { Platform, configContext } from "../../ConfigContext";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { client } from "../CosmosClient";
|
||||
import { logConsoleProgress, logConsoleError } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logError } from "../Logger";
|
||||
import { sendCachedDataMessage } from "../MessageHandler";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import { userContext } from "../../UserContext";
|
||||
import { handleError, getErrorMessage } from "../ErrorHandlingUtils";
|
||||
|
||||
export const readOffers = async (): Promise<Offer[]> => {
|
||||
const clearMessage = logConsoleProgress(`Querying offers`);
|
||||
try {
|
||||
if (configContext.platform === Platform.Portal) {
|
||||
return sendCachedDataMessage<Offer[]>(MessageTypes.AllOffers, [
|
||||
userContext.databaseAccount.id,
|
||||
ClientDefaults.portalCacheTimeoutMs
|
||||
]);
|
||||
}
|
||||
} catch (error) {
|
||||
// If error getting cached Offers, continue on and read via SDK
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await client()
|
||||
@@ -29,13 +13,11 @@ export const readOffers = async (): Promise<Offer[]> => {
|
||||
return response?.resources;
|
||||
} catch (error) {
|
||||
// This should be removed when we can correctly identify if an account is serverless when connected using connection string too.
|
||||
if (error.message.includes("Reading or replacing offers is not supported for serverless accounts")) {
|
||||
if (getErrorMessage(error)?.includes("Reading or replacing offers is not supported for serverless accounts")) {
|
||||
return [];
|
||||
}
|
||||
|
||||
logConsoleError(`Error while querying offers:\n ${JSON.stringify(error)}`);
|
||||
logError(JSON.stringify(error), "ReadOffers", error.code);
|
||||
sendNotificationForError(error);
|
||||
handleError(error, "ReadOffers", `Error while querying offers`);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
|
||||
@@ -1,27 +1,43 @@
|
||||
import { AuthType } from "../../AuthType";
|
||||
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
||||
import { Resource, StoredProcedureDefinition } from "@azure/cosmos";
|
||||
import { logConsoleError, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { client } from "../CosmosClient";
|
||||
import { logError } from "../Logger";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { listSqlStoredProcedures } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { userContext } from "../../UserContext";
|
||||
|
||||
export async function readStoredProcedures(
|
||||
databaseId: string,
|
||||
collectionId: string
|
||||
): Promise<(StoredProcedureDefinition & Resource)[]> {
|
||||
let sprocs: (StoredProcedureDefinition & Resource)[];
|
||||
const clearMessage = logConsoleProgress(`Querying stored procedures for container ${collectionId}`);
|
||||
try {
|
||||
if (
|
||||
window.authType === AuthType.AAD &&
|
||||
!userContext.useSDKOperations &&
|
||||
userContext.defaultExperience === DefaultAccountExperienceType.DocumentDB
|
||||
) {
|
||||
const rpResponse = await listSqlStoredProcedures(
|
||||
userContext.subscriptionId,
|
||||
userContext.resourceGroup,
|
||||
userContext.databaseAccount.name,
|
||||
databaseId,
|
||||
collectionId
|
||||
);
|
||||
return rpResponse?.value?.map(sproc => sproc.properties?.resource as StoredProcedureDefinition & Resource);
|
||||
}
|
||||
|
||||
const response = await client()
|
||||
.database(databaseId)
|
||||
.container(collectionId)
|
||||
.scripts.storedProcedures.readAll()
|
||||
.fetchAll();
|
||||
sprocs = response.resources;
|
||||
return response?.resources;
|
||||
} catch (error) {
|
||||
logConsoleError(`Failed to query stored procedures for container ${collectionId}: ${JSON.stringify(error)}`);
|
||||
logError(JSON.stringify(error), "ReadStoredProcedures", error.code);
|
||||
sendNotificationForError(error);
|
||||
handleError(error, "ReadStoredProcedures", `Failed to query stored procedures for container ${collectionId}`);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
}
|
||||
clearMessage();
|
||||
return sprocs;
|
||||
}
|
||||
|
||||
@@ -1,27 +1,43 @@
|
||||
import { AuthType } from "../../AuthType";
|
||||
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
||||
import { Resource, TriggerDefinition } from "@azure/cosmos";
|
||||
import { logConsoleError, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { client } from "../CosmosClient";
|
||||
import { logError } from "../Logger";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import { listSqlTriggers } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { userContext } from "../../UserContext";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
|
||||
export async function readTriggers(
|
||||
databaseId: string,
|
||||
collectionId: string
|
||||
): Promise<(TriggerDefinition & Resource)[]> {
|
||||
let triggers: (TriggerDefinition & Resource)[];
|
||||
const clearMessage = logConsoleProgress(`Querying triggers for container ${collectionId}`);
|
||||
try {
|
||||
if (
|
||||
window.authType === AuthType.AAD &&
|
||||
!userContext.useSDKOperations &&
|
||||
userContext.defaultExperience === DefaultAccountExperienceType.DocumentDB
|
||||
) {
|
||||
const rpResponse = await listSqlTriggers(
|
||||
userContext.subscriptionId,
|
||||
userContext.resourceGroup,
|
||||
userContext.databaseAccount.name,
|
||||
databaseId,
|
||||
collectionId
|
||||
);
|
||||
return rpResponse?.value?.map(trigger => trigger.properties?.resource as TriggerDefinition & Resource);
|
||||
}
|
||||
|
||||
const response = await client()
|
||||
.database(databaseId)
|
||||
.container(collectionId)
|
||||
.scripts.triggers.readAll()
|
||||
.fetchAll();
|
||||
triggers = response.resources;
|
||||
return response?.resources;
|
||||
} catch (error) {
|
||||
logConsoleError(`Failed to query triggers for container ${collectionId}: ${JSON.stringify(error)}`);
|
||||
logError(JSON.stringify(error), "ReadTriggers", error.code);
|
||||
sendNotificationForError(error);
|
||||
handleError(error, "ReadTriggers", `Failed to query triggers for container ${collectionId}`);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
}
|
||||
clearMessage();
|
||||
return triggers;
|
||||
}
|
||||
|
||||
@@ -1,27 +1,47 @@
|
||||
import { AuthType } from "../../AuthType";
|
||||
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
||||
import { Resource, UserDefinedFunctionDefinition } from "@azure/cosmos";
|
||||
import { logConsoleError, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { client } from "../CosmosClient";
|
||||
import { logError } from "../Logger";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { listSqlUserDefinedFunctions } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { userContext } from "../../UserContext";
|
||||
|
||||
export async function readUserDefinedFunctions(
|
||||
databaseId: string,
|
||||
collectionId: string
|
||||
): Promise<(UserDefinedFunctionDefinition & Resource)[]> {
|
||||
let udfs: (UserDefinedFunctionDefinition & Resource)[];
|
||||
const clearMessage = logConsoleProgress(`Querying user defined functions for container ${collectionId}`);
|
||||
try {
|
||||
if (
|
||||
window.authType === AuthType.AAD &&
|
||||
!userContext.useSDKOperations &&
|
||||
userContext.defaultExperience === DefaultAccountExperienceType.DocumentDB
|
||||
) {
|
||||
const rpResponse = await listSqlUserDefinedFunctions(
|
||||
userContext.subscriptionId,
|
||||
userContext.resourceGroup,
|
||||
userContext.databaseAccount.name,
|
||||
databaseId,
|
||||
collectionId
|
||||
);
|
||||
return rpResponse?.value?.map(udf => udf.properties?.resource as UserDefinedFunctionDefinition & Resource);
|
||||
}
|
||||
|
||||
const response = await client()
|
||||
.database(databaseId)
|
||||
.container(collectionId)
|
||||
.scripts.userDefinedFunctions.readAll()
|
||||
.fetchAll();
|
||||
udfs = response.resources;
|
||||
return response?.resources;
|
||||
} catch (error) {
|
||||
logConsoleError(`Failed to query user defined functions for container ${collectionId}: ${JSON.stringify(error)}`);
|
||||
logError(JSON.stringify(error), "ReadUserDefinedFunctions", error.code);
|
||||
sendNotificationForError(error);
|
||||
handleError(
|
||||
error,
|
||||
"ReadUserDefinedFunctions",
|
||||
`Failed to query user defined functions for container ${collectionId}`
|
||||
);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
}
|
||||
clearMessage();
|
||||
return udfs;
|
||||
}
|
||||
|
||||
@@ -1,20 +0,0 @@
|
||||
import * as Constants from "../Constants";
|
||||
import { sendMessage } from "../MessageHandler";
|
||||
import { MessageTypes } from "../../Contracts/ExplorerContracts";
|
||||
|
||||
interface CosmosError {
|
||||
code: number;
|
||||
message?: string;
|
||||
}
|
||||
|
||||
export function sendNotificationForError(error: CosmosError): void {
|
||||
if (error && error.code === Constants.HttpStatusCodes.Forbidden) {
|
||||
if (error.message && error.message.toLowerCase().indexOf("sharedoffer is disabled for your account") > 0) {
|
||||
return;
|
||||
}
|
||||
sendMessage({
|
||||
type: MessageTypes.ForbiddenError,
|
||||
reason: error && error.message ? error.message : error
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -3,7 +3,10 @@ import { Collection } from "../../Contracts/DataModels";
|
||||
import { ContainerDefinition } from "@azure/cosmos";
|
||||
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
||||
import {
|
||||
CreateUpdateOptions,
|
||||
ExtendedResourceProperties,
|
||||
MongoDBCollectionCreateUpdateParameters,
|
||||
MongoDBCollectionResource,
|
||||
SqlContainerCreateUpdateParameters,
|
||||
SqlContainerResource
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/types";
|
||||
@@ -23,10 +26,8 @@ import {
|
||||
getGremlinGraph
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
|
||||
import { createUpdateTable, getTable } from "../../Utils/arm/generatedClients/2020-04-01/tableResources";
|
||||
import { logConsoleError, logConsoleInfo, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logError } from "../Logger";
|
||||
import { refreshCachedResources } from "../DataAccessUtilityBase";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleInfo, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { userContext } from "../../UserContext";
|
||||
|
||||
export async function updateCollection(
|
||||
@@ -41,6 +42,7 @@ export async function updateCollection(
|
||||
try {
|
||||
if (
|
||||
window.authType === AuthType.AAD &&
|
||||
!userContext.useSDKOperations &&
|
||||
userContext.defaultExperience !== DefaultAccountExperienceType.MongoDB &&
|
||||
userContext.defaultExperience !== DefaultAccountExperienceType.Table
|
||||
) {
|
||||
@@ -50,18 +52,18 @@ export async function updateCollection(
|
||||
.database(databaseId)
|
||||
.container(collectionId)
|
||||
.replace(newCollection as ContainerDefinition, options);
|
||||
|
||||
collection = sdkResponse.resource as Collection;
|
||||
}
|
||||
|
||||
logConsoleInfo(`Successfully updated container ${collectionId}`);
|
||||
return collection;
|
||||
} catch (error) {
|
||||
logConsoleError(`Failed to update container ${collectionId}: ${JSON.stringify(error)}`);
|
||||
logError(JSON.stringify(error), "UpdateCollection", error.code);
|
||||
sendNotificationForError(error);
|
||||
handleError(error, "UpdateCollection", `Failed to update container ${collectionId}`);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
}
|
||||
logConsoleInfo(`Successfully updated container ${collectionId}`);
|
||||
clearMessage();
|
||||
await refreshCachedResources();
|
||||
return collection;
|
||||
}
|
||||
|
||||
async function updateCollectionWithARM(
|
||||
@@ -77,15 +79,6 @@ async function updateCollectionWithARM(
|
||||
switch (defaultExperience) {
|
||||
case DefaultAccountExperienceType.DocumentDB:
|
||||
return updateSqlContainer(databaseId, collectionId, subscriptionId, resourceGroup, accountName, newCollection);
|
||||
case DefaultAccountExperienceType.MongoDB:
|
||||
return updateMongoDBCollection(
|
||||
databaseId,
|
||||
collectionId,
|
||||
subscriptionId,
|
||||
resourceGroup,
|
||||
accountName,
|
||||
newCollection
|
||||
);
|
||||
case DefaultAccountExperienceType.Cassandra:
|
||||
return updateCassandraTable(databaseId, collectionId, subscriptionId, resourceGroup, accountName, newCollection);
|
||||
case DefaultAccountExperienceType.Graph:
|
||||
@@ -122,26 +115,35 @@ async function updateSqlContainer(
|
||||
throw new Error(`Sql container to update does not exist. Database id: ${databaseId} Collection id: ${collectionId}`);
|
||||
}
|
||||
|
||||
async function updateMongoDBCollection(
|
||||
export async function updateMongoDBCollectionThroughRP(
|
||||
databaseId: string,
|
||||
collectionId: string,
|
||||
subscriptionId: string,
|
||||
resourceGroup: string,
|
||||
accountName: string,
|
||||
newCollection: Collection
|
||||
): Promise<Collection> {
|
||||
newCollection: MongoDBCollectionResource,
|
||||
updateOptions?: CreateUpdateOptions
|
||||
): Promise<MongoDBCollectionResource> {
|
||||
const subscriptionId = userContext.subscriptionId;
|
||||
const resourceGroup = userContext.resourceGroup;
|
||||
const accountName = userContext.databaseAccount.name;
|
||||
|
||||
const getResponse = await getMongoDBCollection(subscriptionId, resourceGroup, accountName, databaseId, collectionId);
|
||||
if (getResponse && getResponse.properties && getResponse.properties.resource) {
|
||||
getResponse.properties.resource = newCollection as SqlContainerResource & ExtendedResourceProperties;
|
||||
const updateParams: MongoDBCollectionCreateUpdateParameters = {
|
||||
properties: {
|
||||
resource: newCollection,
|
||||
options: updateOptions
|
||||
}
|
||||
};
|
||||
|
||||
const updateResponse = await createUpdateMongoDBCollection(
|
||||
subscriptionId,
|
||||
resourceGroup,
|
||||
accountName,
|
||||
databaseId,
|
||||
collectionId,
|
||||
getResponse as SqlContainerCreateUpdateParameters
|
||||
updateParams
|
||||
);
|
||||
return updateResponse && (updateResponse.properties.resource as Collection);
|
||||
|
||||
return updateResponse && (updateResponse.properties.resource as MongoDBCollectionResource);
|
||||
}
|
||||
|
||||
throw new Error(
|
||||
|
||||
419
src/Common/dataAccess/updateOffer.ts
Normal file
419
src/Common/dataAccess/updateOffer.ts
Normal file
@@ -0,0 +1,419 @@
|
||||
import { AuthType } from "../../AuthType";
|
||||
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
||||
import { HttpHeaders } from "../Constants";
|
||||
import { Offer, UpdateOfferParams } from "../../Contracts/DataModels";
|
||||
import { OfferDefinition } from "@azure/cosmos";
|
||||
import { RequestOptions } from "@azure/cosmos/dist-esm";
|
||||
import { ThroughputSettingsUpdateParameters } from "../../Utils/arm/generatedClients/2020-04-01/types";
|
||||
import { client } from "../CosmosClient";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleInfo, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { readCollectionOffer } from "./readCollectionOffer";
|
||||
import { readDatabaseOffer } from "./readDatabaseOffer";
|
||||
import {
|
||||
updateSqlDatabaseThroughput,
|
||||
migrateSqlDatabaseToAutoscale,
|
||||
migrateSqlDatabaseToManualThroughput,
|
||||
migrateSqlContainerToAutoscale,
|
||||
migrateSqlContainerToManualThroughput,
|
||||
updateSqlContainerThroughput
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||
import {
|
||||
updateCassandraKeyspaceThroughput,
|
||||
migrateCassandraKeyspaceToAutoscale,
|
||||
migrateCassandraKeyspaceToManualThroughput,
|
||||
migrateCassandraTableToAutoscale,
|
||||
migrateCassandraTableToManualThroughput,
|
||||
updateCassandraTableThroughput
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources";
|
||||
import {
|
||||
updateMongoDBDatabaseThroughput,
|
||||
migrateMongoDBDatabaseToAutoscale,
|
||||
migrateMongoDBDatabaseToManualThroughput,
|
||||
migrateMongoDBCollectionToAutoscale,
|
||||
migrateMongoDBCollectionToManualThroughput,
|
||||
updateMongoDBCollectionThroughput
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
|
||||
import {
|
||||
updateGremlinDatabaseThroughput,
|
||||
migrateGremlinDatabaseToAutoscale,
|
||||
migrateGremlinDatabaseToManualThroughput,
|
||||
migrateGremlinGraphToAutoscale,
|
||||
migrateGremlinGraphToManualThroughput,
|
||||
updateGremlinGraphThroughput
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
|
||||
import { userContext } from "../../UserContext";
|
||||
import {
|
||||
migrateTableToAutoscale,
|
||||
migrateTableToManualThroughput,
|
||||
updateTableThroughput
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/tableResources";
|
||||
|
||||
export const updateOffer = async (params: UpdateOfferParams): Promise<Offer> => {
|
||||
let updatedOffer: Offer;
|
||||
const offerResourceText: string = params.collectionId
|
||||
? `collection ${params.collectionId}`
|
||||
: `database ${params.databaseId}`;
|
||||
const clearMessage = logConsoleProgress(`Updating offer for ${offerResourceText}`);
|
||||
|
||||
try {
|
||||
if (window.authType === AuthType.AAD && !userContext.useSDKOperations) {
|
||||
if (params.collectionId) {
|
||||
updatedOffer = await updateCollectionOfferWithARM(params);
|
||||
} else if (userContext.defaultExperience === DefaultAccountExperienceType.Table) {
|
||||
// update table's database offer with SDK since RP doesn't support it
|
||||
updatedOffer = await updateOfferWithSDK(params);
|
||||
} else {
|
||||
updatedOffer = await updateDatabaseOfferWithARM(params);
|
||||
}
|
||||
} else {
|
||||
updatedOffer = await updateOfferWithSDK(params);
|
||||
}
|
||||
logConsoleInfo(`Successfully updated offer for ${offerResourceText}`);
|
||||
return updatedOffer;
|
||||
} catch (error) {
|
||||
handleError(error, "UpdateCollection", `Error updating offer for ${offerResourceText}`);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
}
|
||||
};
|
||||
|
||||
const updateCollectionOfferWithARM = async (params: UpdateOfferParams): Promise<Offer> => {
|
||||
try {
|
||||
switch (userContext.defaultExperience) {
|
||||
case DefaultAccountExperienceType.DocumentDB:
|
||||
await updateSqlContainerOffer(params);
|
||||
break;
|
||||
case DefaultAccountExperienceType.MongoDB:
|
||||
await updateMongoCollectionOffer(params);
|
||||
break;
|
||||
case DefaultAccountExperienceType.Cassandra:
|
||||
await updateCassandraTableOffer(params);
|
||||
break;
|
||||
case DefaultAccountExperienceType.Graph:
|
||||
await updateGremlinGraphOffer(params);
|
||||
break;
|
||||
case DefaultAccountExperienceType.Table:
|
||||
await updateTableOffer(params);
|
||||
break;
|
||||
default:
|
||||
throw new Error(`Unsupported default experience type: ${userContext.defaultExperience}`);
|
||||
}
|
||||
} catch (error) {
|
||||
if (error.code !== "MethodNotAllowed") {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
return await readCollectionOffer({
|
||||
collectionId: params.collectionId,
|
||||
databaseId: params.databaseId,
|
||||
offerId: params.currentOffer.id
|
||||
});
|
||||
};
|
||||
|
||||
const updateDatabaseOfferWithARM = async (params: UpdateOfferParams): Promise<Offer> => {
|
||||
try {
|
||||
switch (userContext.defaultExperience) {
|
||||
case DefaultAccountExperienceType.DocumentDB:
|
||||
await updateSqlDatabaseOffer(params);
|
||||
break;
|
||||
case DefaultAccountExperienceType.MongoDB:
|
||||
await updateMongoDatabaseOffer(params);
|
||||
break;
|
||||
case DefaultAccountExperienceType.Cassandra:
|
||||
await updateCassandraKeyspaceOffer(params);
|
||||
break;
|
||||
case DefaultAccountExperienceType.Graph:
|
||||
await updateGremlinDatabaseOffer(params);
|
||||
break;
|
||||
default:
|
||||
throw new Error(`Unsupported default experience type: ${userContext.defaultExperience}`);
|
||||
}
|
||||
} catch (error) {
|
||||
if (error.code !== "MethodNotAllowed") {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
return await readDatabaseOffer({
|
||||
databaseId: params.databaseId,
|
||||
offerId: params.currentOffer.id
|
||||
});
|
||||
};
|
||||
|
||||
const updateSqlContainerOffer = async (params: UpdateOfferParams): Promise<void> => {
|
||||
const subscriptionId = userContext.subscriptionId;
|
||||
const resourceGroup = userContext.resourceGroup;
|
||||
const accountName = userContext.databaseAccount.name;
|
||||
|
||||
if (params.migrateToAutoPilot) {
|
||||
await migrateSqlContainerToAutoscale(
|
||||
subscriptionId,
|
||||
resourceGroup,
|
||||
accountName,
|
||||
params.databaseId,
|
||||
params.collectionId
|
||||
);
|
||||
} else if (params.migrateToManual) {
|
||||
await migrateSqlContainerToManualThroughput(
|
||||
subscriptionId,
|
||||
resourceGroup,
|
||||
accountName,
|
||||
params.databaseId,
|
||||
params.collectionId
|
||||
);
|
||||
} else {
|
||||
const body: ThroughputSettingsUpdateParameters = createUpdateOfferBody(params);
|
||||
await updateSqlContainerThroughput(
|
||||
subscriptionId,
|
||||
resourceGroup,
|
||||
accountName,
|
||||
params.databaseId,
|
||||
params.collectionId,
|
||||
body
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
const updateMongoCollectionOffer = async (params: UpdateOfferParams): Promise<void> => {
|
||||
const subscriptionId = userContext.subscriptionId;
|
||||
const resourceGroup = userContext.resourceGroup;
|
||||
const accountName = userContext.databaseAccount.name;
|
||||
|
||||
if (params.migrateToAutoPilot) {
|
||||
await migrateMongoDBCollectionToAutoscale(
|
||||
subscriptionId,
|
||||
resourceGroup,
|
||||
accountName,
|
||||
params.databaseId,
|
||||
params.collectionId
|
||||
);
|
||||
} else if (params.migrateToManual) {
|
||||
await migrateMongoDBCollectionToManualThroughput(
|
||||
subscriptionId,
|
||||
resourceGroup,
|
||||
accountName,
|
||||
params.databaseId,
|
||||
params.collectionId
|
||||
);
|
||||
} else {
|
||||
const body: ThroughputSettingsUpdateParameters = createUpdateOfferBody(params);
|
||||
await updateMongoDBCollectionThroughput(
|
||||
subscriptionId,
|
||||
resourceGroup,
|
||||
accountName,
|
||||
params.databaseId,
|
||||
params.collectionId,
|
||||
body
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
const updateCassandraTableOffer = async (params: UpdateOfferParams): Promise<void> => {
|
||||
const subscriptionId = userContext.subscriptionId;
|
||||
const resourceGroup = userContext.resourceGroup;
|
||||
const accountName = userContext.databaseAccount.name;
|
||||
|
||||
if (params.migrateToAutoPilot) {
|
||||
await migrateCassandraTableToAutoscale(
|
||||
subscriptionId,
|
||||
resourceGroup,
|
||||
accountName,
|
||||
params.databaseId,
|
||||
params.collectionId
|
||||
);
|
||||
} else if (params.migrateToManual) {
|
||||
await migrateCassandraTableToManualThroughput(
|
||||
subscriptionId,
|
||||
resourceGroup,
|
||||
accountName,
|
||||
params.databaseId,
|
||||
params.collectionId
|
||||
);
|
||||
} else {
|
||||
const body: ThroughputSettingsUpdateParameters = createUpdateOfferBody(params);
|
||||
await updateCassandraTableThroughput(
|
||||
subscriptionId,
|
||||
resourceGroup,
|
||||
accountName,
|
||||
params.databaseId,
|
||||
params.collectionId,
|
||||
body
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
const updateGremlinGraphOffer = async (params: UpdateOfferParams): Promise<void> => {
|
||||
const subscriptionId = userContext.subscriptionId;
|
||||
const resourceGroup = userContext.resourceGroup;
|
||||
const accountName = userContext.databaseAccount.name;
|
||||
|
||||
if (params.migrateToAutoPilot) {
|
||||
await migrateGremlinGraphToAutoscale(
|
||||
subscriptionId,
|
||||
resourceGroup,
|
||||
accountName,
|
||||
params.databaseId,
|
||||
params.collectionId
|
||||
);
|
||||
} else if (params.migrateToManual) {
|
||||
await migrateGremlinGraphToManualThroughput(
|
||||
subscriptionId,
|
||||
resourceGroup,
|
||||
accountName,
|
||||
params.databaseId,
|
||||
params.collectionId
|
||||
);
|
||||
} else {
|
||||
const body: ThroughputSettingsUpdateParameters = createUpdateOfferBody(params);
|
||||
await updateGremlinGraphThroughput(
|
||||
subscriptionId,
|
||||
resourceGroup,
|
||||
accountName,
|
||||
params.databaseId,
|
||||
params.collectionId,
|
||||
body
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
const updateTableOffer = async (params: UpdateOfferParams): Promise<void> => {
|
||||
const subscriptionId = userContext.subscriptionId;
|
||||
const resourceGroup = userContext.resourceGroup;
|
||||
const accountName = userContext.databaseAccount.name;
|
||||
|
||||
if (params.migrateToAutoPilot) {
|
||||
await migrateTableToAutoscale(subscriptionId, resourceGroup, accountName, params.collectionId);
|
||||
} else if (params.migrateToManual) {
|
||||
await migrateTableToManualThroughput(subscriptionId, resourceGroup, accountName, params.collectionId);
|
||||
} else {
|
||||
const body: ThroughputSettingsUpdateParameters = createUpdateOfferBody(params);
|
||||
await updateTableThroughput(subscriptionId, resourceGroup, accountName, params.collectionId, body);
|
||||
}
|
||||
};
|
||||
|
||||
const updateSqlDatabaseOffer = async (params: UpdateOfferParams): Promise<void> => {
|
||||
const subscriptionId = userContext.subscriptionId;
|
||||
const resourceGroup = userContext.resourceGroup;
|
||||
const accountName = userContext.databaseAccount.name;
|
||||
|
||||
if (params.migrateToAutoPilot) {
|
||||
await migrateSqlDatabaseToAutoscale(subscriptionId, resourceGroup, accountName, params.databaseId);
|
||||
} else if (params.migrateToManual) {
|
||||
await migrateSqlDatabaseToManualThroughput(subscriptionId, resourceGroup, accountName, params.databaseId);
|
||||
} else {
|
||||
const body: ThroughputSettingsUpdateParameters = createUpdateOfferBody(params);
|
||||
await updateSqlDatabaseThroughput(subscriptionId, resourceGroup, accountName, params.databaseId, body);
|
||||
}
|
||||
};
|
||||
|
||||
const updateMongoDatabaseOffer = async (params: UpdateOfferParams): Promise<void> => {
|
||||
const subscriptionId = userContext.subscriptionId;
|
||||
const resourceGroup = userContext.resourceGroup;
|
||||
const accountName = userContext.databaseAccount.name;
|
||||
|
||||
if (params.migrateToAutoPilot) {
|
||||
await migrateMongoDBDatabaseToAutoscale(subscriptionId, resourceGroup, accountName, params.databaseId);
|
||||
} else if (params.migrateToManual) {
|
||||
await migrateMongoDBDatabaseToManualThroughput(subscriptionId, resourceGroup, accountName, params.databaseId);
|
||||
} else {
|
||||
const body: ThroughputSettingsUpdateParameters = createUpdateOfferBody(params);
|
||||
await updateMongoDBDatabaseThroughput(subscriptionId, resourceGroup, accountName, params.databaseId, body);
|
||||
}
|
||||
};
|
||||
|
||||
const updateCassandraKeyspaceOffer = async (params: UpdateOfferParams): Promise<void> => {
|
||||
const subscriptionId = userContext.subscriptionId;
|
||||
const resourceGroup = userContext.resourceGroup;
|
||||
const accountName = userContext.databaseAccount.name;
|
||||
|
||||
if (params.migrateToAutoPilot) {
|
||||
await migrateCassandraKeyspaceToAutoscale(subscriptionId, resourceGroup, accountName, params.databaseId);
|
||||
} else if (params.migrateToManual) {
|
||||
await migrateCassandraKeyspaceToManualThroughput(subscriptionId, resourceGroup, accountName, params.databaseId);
|
||||
} else {
|
||||
const body: ThroughputSettingsUpdateParameters = createUpdateOfferBody(params);
|
||||
await updateCassandraKeyspaceThroughput(subscriptionId, resourceGroup, accountName, params.databaseId, body);
|
||||
}
|
||||
};
|
||||
|
||||
const updateGremlinDatabaseOffer = async (params: UpdateOfferParams): Promise<void> => {
|
||||
const subscriptionId = userContext.subscriptionId;
|
||||
const resourceGroup = userContext.resourceGroup;
|
||||
const accountName = userContext.databaseAccount.name;
|
||||
|
||||
if (params.migrateToAutoPilot) {
|
||||
await migrateGremlinDatabaseToAutoscale(subscriptionId, resourceGroup, accountName, params.databaseId);
|
||||
} else if (params.migrateToManual) {
|
||||
await migrateGremlinDatabaseToManualThroughput(subscriptionId, resourceGroup, accountName, params.databaseId);
|
||||
} else {
|
||||
const body: ThroughputSettingsUpdateParameters = createUpdateOfferBody(params);
|
||||
await updateGremlinDatabaseThroughput(subscriptionId, resourceGroup, accountName, params.databaseId, body);
|
||||
}
|
||||
};
|
||||
|
||||
const createUpdateOfferBody = (params: UpdateOfferParams): ThroughputSettingsUpdateParameters => {
|
||||
const body: ThroughputSettingsUpdateParameters = {
|
||||
properties: {
|
||||
resource: {}
|
||||
}
|
||||
};
|
||||
|
||||
if (params.autopilotThroughput) {
|
||||
body.properties.resource.autoscaleSettings = {
|
||||
maxThroughput: params.autopilotThroughput
|
||||
};
|
||||
} else {
|
||||
body.properties.resource.throughput = params.manualThroughput;
|
||||
}
|
||||
|
||||
return body;
|
||||
};
|
||||
|
||||
const updateOfferWithSDK = async (params: UpdateOfferParams): Promise<Offer> => {
|
||||
const currentOffer = params.currentOffer;
|
||||
const newOffer: Offer = {
|
||||
content: {
|
||||
offerThroughput: undefined,
|
||||
offerIsRUPerMinuteThroughputEnabled: false
|
||||
},
|
||||
_etag: undefined,
|
||||
_ts: undefined,
|
||||
_rid: currentOffer._rid,
|
||||
_self: currentOffer._self,
|
||||
id: currentOffer.id,
|
||||
offerResourceId: currentOffer.offerResourceId,
|
||||
offerVersion: currentOffer.offerVersion,
|
||||
offerType: currentOffer.offerType,
|
||||
resource: currentOffer.resource
|
||||
};
|
||||
|
||||
if (params.autopilotThroughput) {
|
||||
newOffer.content.offerAutopilotSettings = {
|
||||
maxThroughput: params.autopilotThroughput
|
||||
};
|
||||
} else {
|
||||
newOffer.content.offerThroughput = params.manualThroughput;
|
||||
}
|
||||
|
||||
const options: RequestOptions = {};
|
||||
if (params.migrateToAutoPilot) {
|
||||
options.initialHeaders = {
|
||||
[HttpHeaders.migrateOfferToAutopilot]: "true"
|
||||
};
|
||||
delete newOffer.content.offerAutopilotSettings;
|
||||
} else if (params.migrateToManual) {
|
||||
options.initialHeaders = {
|
||||
[HttpHeaders.migrateOfferToManualThroughput]: "true"
|
||||
};
|
||||
newOffer.content.offerAutopilotSettings = { maxThroughput: 0 };
|
||||
}
|
||||
|
||||
const sdkResponse = await client()
|
||||
.offer(params.currentOffer.id)
|
||||
// TODO Remove casting when SDK types are fixed (https://github.com/Azure/azure-sdk-for-js/issues/10660)
|
||||
.replace((newOffer as unknown) as OfferDefinition, options);
|
||||
return sdkResponse?.resource;
|
||||
};
|
||||
@@ -1,8 +1,9 @@
|
||||
import { Platform, configContext } from "../../ConfigContext";
|
||||
import { getAuthorizationHeader } from "../../Utils/AuthorizationUtils";
|
||||
import { AutoPilotOfferSettings } from "../../Contracts/DataModels";
|
||||
import { logConsoleProgress, logConsoleInfo, logConsoleError } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logConsoleProgress, logConsoleInfo } from "../../Utils/NotificationConsoleUtils";
|
||||
import { HttpHeaders } from "../Constants";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
|
||||
interface UpdateOfferThroughputRequest {
|
||||
subscriptionId: string;
|
||||
@@ -44,8 +45,13 @@ export async function updateOfferThroughputBeyondLimit(request: UpdateOfferThrou
|
||||
clearMessage();
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const error = await response.json();
|
||||
logConsoleError(`Failed to request an increase in throughput for ${request.throughput}: ${error.message}`);
|
||||
handleError(
|
||||
error,
|
||||
"updateOfferThroughputBeyondLimit",
|
||||
`Failed to request an increase in throughput for ${request.throughput}`
|
||||
);
|
||||
clearMessage();
|
||||
throw new Error(error.message);
|
||||
throw error;
|
||||
}
|
||||
|
||||
@@ -1,29 +1,72 @@
|
||||
import { AuthType } from "../../AuthType";
|
||||
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
||||
import { Resource, StoredProcedureDefinition } from "@azure/cosmos";
|
||||
import { logConsoleError, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import {
|
||||
SqlStoredProcedureCreateUpdateParameters,
|
||||
SqlStoredProcedureResource
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/types";
|
||||
import { client } from "../CosmosClient";
|
||||
import { logError } from "../Logger";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import {
|
||||
createUpdateSqlStoredProcedure,
|
||||
getSqlStoredProcedure
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { userContext } from "../../UserContext";
|
||||
|
||||
export async function updateStoredProcedure(
|
||||
databaseId: string,
|
||||
collectionId: string,
|
||||
storedProcedure: StoredProcedureDefinition
|
||||
): Promise<StoredProcedureDefinition & Resource> {
|
||||
let updatedStoredProcedure: StoredProcedureDefinition & Resource;
|
||||
const clearMessage = logConsoleProgress(`Updating stored procedure ${storedProcedure.id}`);
|
||||
try {
|
||||
if (
|
||||
window.authType === AuthType.AAD &&
|
||||
!userContext.useSDKOperations &&
|
||||
userContext.defaultExperience === DefaultAccountExperienceType.DocumentDB
|
||||
) {
|
||||
const getResponse = await getSqlStoredProcedure(
|
||||
userContext.subscriptionId,
|
||||
userContext.resourceGroup,
|
||||
userContext.databaseAccount.name,
|
||||
databaseId,
|
||||
collectionId,
|
||||
storedProcedure.id
|
||||
);
|
||||
|
||||
if (getResponse?.properties?.resource) {
|
||||
const createSprocParams: SqlStoredProcedureCreateUpdateParameters = {
|
||||
properties: {
|
||||
resource: storedProcedure as SqlStoredProcedureResource,
|
||||
options: {}
|
||||
}
|
||||
};
|
||||
const rpResponse = await createUpdateSqlStoredProcedure(
|
||||
userContext.subscriptionId,
|
||||
userContext.resourceGroup,
|
||||
userContext.databaseAccount.name,
|
||||
databaseId,
|
||||
collectionId,
|
||||
storedProcedure.id,
|
||||
createSprocParams
|
||||
);
|
||||
return rpResponse && (rpResponse.properties?.resource as StoredProcedureDefinition & Resource);
|
||||
}
|
||||
|
||||
throw new Error(`Failed to update stored procedure: ${storedProcedure.id} does not exist.`);
|
||||
}
|
||||
|
||||
const response = await client()
|
||||
.database(databaseId)
|
||||
.container(collectionId)
|
||||
.scripts.storedProcedure(storedProcedure.id)
|
||||
.replace(storedProcedure);
|
||||
updatedStoredProcedure = response.resource;
|
||||
return response?.resource;
|
||||
} catch (error) {
|
||||
logConsoleError(`Error while updating stored procedure ${storedProcedure.id}:\n ${JSON.stringify(error)}`);
|
||||
logError(JSON.stringify(error), "UpdateStoredProcedure", error.code);
|
||||
sendNotificationForError(error);
|
||||
handleError(error, "UpdateStoredProcedure", `Error while updating stored procedure ${storedProcedure.id}`);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
}
|
||||
|
||||
clearMessage();
|
||||
return updatedStoredProcedure;
|
||||
}
|
||||
|
||||
@@ -1,29 +1,69 @@
|
||||
import { AuthType } from "../../AuthType";
|
||||
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
||||
import {
|
||||
SqlTriggerCreateUpdateParameters,
|
||||
SqlTriggerResource
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/types";
|
||||
import { TriggerDefinition } from "@azure/cosmos";
|
||||
import { logConsoleError, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { client } from "../CosmosClient";
|
||||
import { logError } from "../Logger";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import { createUpdateSqlTrigger, getSqlTrigger } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { userContext } from "../../UserContext";
|
||||
|
||||
export async function updateTrigger(
|
||||
databaseId: string,
|
||||
collectionId: string,
|
||||
trigger: TriggerDefinition
|
||||
): Promise<TriggerDefinition> {
|
||||
let updatedTrigger: TriggerDefinition;
|
||||
const clearMessage = logConsoleProgress(`Updating trigger ${trigger.id}`);
|
||||
try {
|
||||
if (
|
||||
window.authType === AuthType.AAD &&
|
||||
!userContext.useSDKOperations &&
|
||||
userContext.defaultExperience === DefaultAccountExperienceType.DocumentDB
|
||||
) {
|
||||
const getResponse = await getSqlTrigger(
|
||||
userContext.subscriptionId,
|
||||
userContext.resourceGroup,
|
||||
userContext.databaseAccount.name,
|
||||
databaseId,
|
||||
collectionId,
|
||||
trigger.id
|
||||
);
|
||||
|
||||
if (getResponse?.properties?.resource) {
|
||||
const createTriggerParams: SqlTriggerCreateUpdateParameters = {
|
||||
properties: {
|
||||
resource: trigger as SqlTriggerResource,
|
||||
options: {}
|
||||
}
|
||||
};
|
||||
const rpResponse = await createUpdateSqlTrigger(
|
||||
userContext.subscriptionId,
|
||||
userContext.resourceGroup,
|
||||
userContext.databaseAccount.name,
|
||||
databaseId,
|
||||
collectionId,
|
||||
trigger.id,
|
||||
createTriggerParams
|
||||
);
|
||||
return rpResponse && (rpResponse.properties?.resource as TriggerDefinition);
|
||||
}
|
||||
|
||||
throw new Error(`Failed to update trigger: ${trigger.id} does not exist.`);
|
||||
}
|
||||
|
||||
const response = await client()
|
||||
.database(databaseId)
|
||||
.container(collectionId)
|
||||
.scripts.trigger(trigger.id)
|
||||
.replace(trigger);
|
||||
updatedTrigger = response.resource;
|
||||
return response?.resource;
|
||||
} catch (error) {
|
||||
logConsoleError(`Error while updating trigger ${trigger.id}:\n ${JSON.stringify(error)}`);
|
||||
logError(JSON.stringify(error), "UpdateTrigger", error.code);
|
||||
sendNotificationForError(error);
|
||||
handleError(error, "UpdateTrigger", `Error while updating trigger ${trigger.id}`);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
}
|
||||
|
||||
clearMessage();
|
||||
return updatedTrigger;
|
||||
}
|
||||
|
||||
@@ -1,29 +1,76 @@
|
||||
import { AuthType } from "../../AuthType";
|
||||
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
||||
import { Resource, UserDefinedFunctionDefinition } from "@azure/cosmos";
|
||||
import { logConsoleError, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import {
|
||||
SqlUserDefinedFunctionCreateUpdateParameters,
|
||||
SqlUserDefinedFunctionResource
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/types";
|
||||
import { client } from "../CosmosClient";
|
||||
import { logError } from "../Logger";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import {
|
||||
createUpdateSqlUserDefinedFunction,
|
||||
getSqlUserDefinedFunction
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { userContext } from "../../UserContext";
|
||||
|
||||
export async function updateUserDefinedFunction(
|
||||
databaseId: string,
|
||||
collectionId: string,
|
||||
userDefinedFunction: UserDefinedFunctionDefinition
|
||||
): Promise<UserDefinedFunctionDefinition & Resource> {
|
||||
let updatedUserDefinedFunction: UserDefinedFunctionDefinition & Resource;
|
||||
const clearMessage = logConsoleProgress(`Updating user defined function ${userDefinedFunction.id}`);
|
||||
try {
|
||||
if (
|
||||
window.authType === AuthType.AAD &&
|
||||
!userContext.useSDKOperations &&
|
||||
userContext.defaultExperience === DefaultAccountExperienceType.DocumentDB
|
||||
) {
|
||||
const getResponse = await getSqlUserDefinedFunction(
|
||||
userContext.subscriptionId,
|
||||
userContext.resourceGroup,
|
||||
userContext.databaseAccount.name,
|
||||
databaseId,
|
||||
collectionId,
|
||||
userDefinedFunction.id
|
||||
);
|
||||
|
||||
if (getResponse?.properties?.resource) {
|
||||
const createUDFParams: SqlUserDefinedFunctionCreateUpdateParameters = {
|
||||
properties: {
|
||||
resource: userDefinedFunction as SqlUserDefinedFunctionResource,
|
||||
options: {}
|
||||
}
|
||||
};
|
||||
const rpResponse = await createUpdateSqlUserDefinedFunction(
|
||||
userContext.subscriptionId,
|
||||
userContext.resourceGroup,
|
||||
userContext.databaseAccount.name,
|
||||
databaseId,
|
||||
collectionId,
|
||||
userDefinedFunction.id,
|
||||
createUDFParams
|
||||
);
|
||||
return rpResponse && (rpResponse.properties?.resource as UserDefinedFunctionDefinition & Resource);
|
||||
}
|
||||
|
||||
throw new Error(`Failed to update user defined function: ${userDefinedFunction.id} does not exist.`);
|
||||
}
|
||||
|
||||
const response = await client()
|
||||
.database(databaseId)
|
||||
.container(collectionId)
|
||||
.scripts.userDefinedFunction(userDefinedFunction.id)
|
||||
.replace(userDefinedFunction);
|
||||
updatedUserDefinedFunction = response.resource;
|
||||
return response?.resource;
|
||||
} catch (error) {
|
||||
logConsoleError(`Error while updating user defined function ${userDefinedFunction.id}:\n ${JSON.stringify(error)}`);
|
||||
logError(JSON.stringify(error), "UpdateUserupdateUserDefinedFunction", error.code);
|
||||
sendNotificationForError(error);
|
||||
handleError(
|
||||
error,
|
||||
"UpdateUserupdateUserDefinedFunction",
|
||||
`Error while updating user defined function ${userDefinedFunction.id}`
|
||||
);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
}
|
||||
|
||||
clearMessage();
|
||||
return updatedUserDefinedFunction;
|
||||
}
|
||||
|
||||
@@ -34,6 +34,7 @@ let configContext: Readonly<ConfigContext> = {
|
||||
allowedParentFrameOrigins: [
|
||||
`^https:\\/\\/cosmos\\.azure\\.(com|cn|us)$`,
|
||||
`^https:\\/\\/[\\.\\w]*portal\\.azure\\.(com|cn|us)$`,
|
||||
`^https:\\/\\/[\\.\\w]*portal\\.microsoftazure.de$`,
|
||||
`^https:\\/\\/[\\.\\w]*ext\\.azure\\.(com|cn|us)$`,
|
||||
`^https:\\/\\/[\\.\\w]*\\.ext\\.microsoftazure\\.de$`,
|
||||
`^https://cosmos-db-dataexplorer-germanycentral.azurewebsites.de$`
|
||||
@@ -50,7 +51,8 @@ let configContext: Readonly<ConfigContext> = {
|
||||
ARCADIA_ENDPOINT: "https://workspaceartifacts.projectarcadia.net",
|
||||
ARCADIA_LIVY_ENDPOINT_DNS_ZONE: "dev.azuresynapse.net",
|
||||
GITHUB_CLIENT_ID: "6cb2f63cf6f7b5cbdeca", // Registered OAuth app: https://github.com/settings/applications/1189306
|
||||
JUNO_ENDPOINT: "https://tools.cosmos.azure.com"
|
||||
JUNO_ENDPOINT: "https://tools.cosmos.azure.com",
|
||||
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
|
||||
};
|
||||
|
||||
export function resetConfigContext(): void {
|
||||
|
||||
@@ -88,6 +88,38 @@ export interface Resource {
|
||||
id: string;
|
||||
}
|
||||
|
||||
export interface IType {
|
||||
name: string;
|
||||
code: number;
|
||||
}
|
||||
|
||||
export interface IDataField {
|
||||
dataType: IType;
|
||||
hasNulls: boolean;
|
||||
isArray: boolean;
|
||||
schemaType: IType;
|
||||
name: string;
|
||||
path: string;
|
||||
maxRepetitionLevel: number;
|
||||
maxDefinitionLevel: number;
|
||||
}
|
||||
|
||||
export interface ISchema {
|
||||
id: string;
|
||||
accountName: string;
|
||||
resource: string;
|
||||
fields: IDataField[];
|
||||
}
|
||||
|
||||
export interface ISchemaRequest {
|
||||
id: string;
|
||||
subscriptionId: string;
|
||||
resourceGroup: string;
|
||||
accountName: string;
|
||||
resource: string;
|
||||
status: string;
|
||||
}
|
||||
|
||||
export interface Collection extends Resource {
|
||||
defaultTtl?: number;
|
||||
indexingPolicy?: IndexingPolicy;
|
||||
@@ -98,6 +130,8 @@ export interface Collection extends Resource {
|
||||
changeFeedPolicy?: ChangeFeedPolicy;
|
||||
analyticalStorageTtl?: number;
|
||||
geospatialConfig?: GeospatialConfig;
|
||||
schema?: ISchema;
|
||||
requestSchema?: () => void;
|
||||
}
|
||||
|
||||
export interface Database extends Resource {
|
||||
@@ -216,18 +250,6 @@ export interface UniqueKey {
|
||||
paths: string[];
|
||||
}
|
||||
|
||||
// Returned by DocumentDb client proxy
|
||||
// Inner errors in BackendErrorDataModel when error is in SQL syntax
|
||||
export interface ErrorDataModel {
|
||||
message: string;
|
||||
severity?: string;
|
||||
location?: {
|
||||
start: string;
|
||||
end: string;
|
||||
};
|
||||
code?: string;
|
||||
}
|
||||
|
||||
export interface CreateDatabaseAndCollectionRequest {
|
||||
databaseId: string;
|
||||
collectionId: string;
|
||||
@@ -239,21 +261,12 @@ export interface CreateDatabaseAndCollectionRequest {
|
||||
uniqueKeyPolicy?: UniqueKeyPolicy;
|
||||
autoPilot?: AutoPilotCreationSettings;
|
||||
analyticalStorageTtl?: number;
|
||||
hasAutoPilotV2FeatureFlag?: boolean;
|
||||
}
|
||||
|
||||
export interface AutoPilotCreationSettings {
|
||||
autopilotTier?: AutopilotTier;
|
||||
maxThroughput?: number;
|
||||
}
|
||||
|
||||
export enum AutopilotTier {
|
||||
Tier1 = 1,
|
||||
Tier2 = 2,
|
||||
Tier3 = 3,
|
||||
Tier4 = 4
|
||||
}
|
||||
|
||||
export interface Query {
|
||||
id: string;
|
||||
resourceId: string;
|
||||
@@ -262,9 +275,7 @@ export interface Query {
|
||||
}
|
||||
|
||||
export interface AutoPilotOfferSettings {
|
||||
tier?: AutopilotTier;
|
||||
maximumTierThroughput?: number;
|
||||
targetTier?: AutopilotTier;
|
||||
maxThroughput?: number;
|
||||
targetMaxThroughput?: number;
|
||||
}
|
||||
@@ -303,6 +314,16 @@ export interface ReadCollectionOfferParams {
|
||||
offerId?: string;
|
||||
}
|
||||
|
||||
export interface UpdateOfferParams {
|
||||
currentOffer: Offer;
|
||||
databaseId: string;
|
||||
autopilotThroughput: number;
|
||||
manualThroughput: number;
|
||||
collectionId?: string;
|
||||
migrateToAutoPilot?: boolean;
|
||||
migrateToManual?: boolean;
|
||||
}
|
||||
|
||||
export interface Notification {
|
||||
id: string;
|
||||
kind: string;
|
||||
@@ -481,7 +502,6 @@ export interface MongoParameters extends RpParameters {
|
||||
rid?: string;
|
||||
rtype?: string;
|
||||
isAutoPilot?: Boolean;
|
||||
autoPilotTier?: string;
|
||||
autoPilotThroughput?: string;
|
||||
analyticalStorageTtl?: number;
|
||||
}
|
||||
|
||||
@@ -46,7 +46,7 @@ export interface LogEntry {
|
||||
/**
|
||||
* The message code.
|
||||
*/
|
||||
code?: number;
|
||||
code?: number | string;
|
||||
/**
|
||||
* Any additional data to be logged.
|
||||
*/
|
||||
|
||||
7
src/Contracts/SubscriptionType.ts
Normal file
7
src/Contracts/SubscriptionType.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
export enum SubscriptionType {
|
||||
Benefits,
|
||||
EA,
|
||||
Free,
|
||||
Internal,
|
||||
PAYG
|
||||
}
|
||||
@@ -17,6 +17,7 @@ import Trigger from "../Explorer/Tree/Trigger";
|
||||
import UserDefinedFunction from "../Explorer/Tree/UserDefinedFunction";
|
||||
import { UploadDetails } from "../workers/upload/definitions";
|
||||
import * as DataModels from "./DataModels";
|
||||
import { SubscriptionType } from "./SubscriptionType";
|
||||
|
||||
export interface TokenProvider {
|
||||
getAuthHeader(): Promise<Headers>;
|
||||
@@ -85,7 +86,7 @@ export interface Database extends TreeNode {
|
||||
collapseDatabase(): void;
|
||||
|
||||
loadCollections(): Promise<void>;
|
||||
findCollectionWithId(collectionRid: string): Collection;
|
||||
findCollectionWithId(collectionId: string): Collection;
|
||||
openAddCollection(database: Database, event: MouseEvent): void;
|
||||
onDeleteDatabaseContextMenuClick(source: Database, event: MouseEvent | KeyboardEvent): void;
|
||||
onSettingsClick: () => void;
|
||||
@@ -115,6 +116,8 @@ export interface CollectionBase extends TreeNode {
|
||||
export interface Collection extends CollectionBase {
|
||||
defaultTtl: ko.Observable<number>;
|
||||
analyticalStorageTtl: ko.Observable<number>;
|
||||
schema?: DataModels.ISchema;
|
||||
requestSchema?: () => void;
|
||||
indexingPolicy: ko.Observable<DataModels.IndexingPolicy>;
|
||||
uniqueKeyPolicy: DataModels.UniqueKeyPolicy;
|
||||
quotaInfo: ko.Observable<DataModels.CollectionQuotaInfo>;
|
||||
@@ -266,7 +269,6 @@ export interface TabOptions {
|
||||
tabKind: CollectionTabKind;
|
||||
title: string;
|
||||
tabPath: string;
|
||||
selfLink: string;
|
||||
isActive: ko.Observable<boolean>;
|
||||
hashLocation: string;
|
||||
onUpdateTabsButtons: (buttons: CommandButtonComponentProps[]) => void;
|
||||
@@ -290,6 +292,10 @@ export interface DocumentsTabOptions extends TabOptions {
|
||||
resourceTokenPartitionKey?: string;
|
||||
}
|
||||
|
||||
export interface SettingsTabV2Options extends TabOptions {
|
||||
getPendingNotification: Q.Promise<DataModels.Notification>;
|
||||
}
|
||||
|
||||
export interface ConflictsTabOptions extends TabOptions {
|
||||
partitionKey: DataModels.PartitionKey;
|
||||
conflictIds: ko.ObservableArray<ConflictId>;
|
||||
@@ -305,7 +311,6 @@ export interface QueryTabOptions extends TabOptions {
|
||||
export interface ScriptTabOption extends TabOptions {
|
||||
resource: any;
|
||||
isNew: boolean;
|
||||
collectionSelfLink?: string;
|
||||
partitionKey?: DataModels.PartitionKey;
|
||||
}
|
||||
|
||||
@@ -356,6 +361,7 @@ export enum CollectionTabKind {
|
||||
SparkMasterTab = 16,
|
||||
Gallery = 17,
|
||||
NotebookViewer = 18,
|
||||
Schema = 19,
|
||||
SettingsV2 = 19
|
||||
}
|
||||
|
||||
@@ -388,6 +394,7 @@ export interface DataExplorerInputsFrame {
|
||||
dataExplorerVersion?: string;
|
||||
isAuthWithresourceToken?: boolean;
|
||||
defaultCollectionThroughput?: CollectionCreationDefaults;
|
||||
flights?: readonly string[];
|
||||
}
|
||||
|
||||
export interface CollectionCreationDefaults {
|
||||
@@ -409,14 +416,6 @@ export interface ThroughputDefaults {
|
||||
shared: number;
|
||||
}
|
||||
|
||||
export enum SubscriptionType {
|
||||
Benefits,
|
||||
EA,
|
||||
Free,
|
||||
Internal,
|
||||
PAYG
|
||||
}
|
||||
|
||||
export class MonacoEditorSettings {
|
||||
public readonly language: string;
|
||||
public readonly readOnly: boolean;
|
||||
|
||||
42
src/Definitions/jquery.contextmenu.d.ts
vendored
42
src/Definitions/jquery.contextmenu.d.ts
vendored
@@ -1,42 +0,0 @@
|
||||
// Type definitions for jQuery contextMenu 1.7.0
|
||||
// Project: http://medialize.github.com/jQuery-contextMenu/
|
||||
// Definitions by: Natan Vivo <https://github.com/nvivo/>
|
||||
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
|
||||
|
||||
/// <reference path="jquery.d.ts" />
|
||||
|
||||
interface JQueryContextMenuOptions {
|
||||
selector: string;
|
||||
appendTo?: string;
|
||||
trigger?: string;
|
||||
autoHide?: boolean;
|
||||
delay?: number;
|
||||
determinePosition?: (menu: JQuery) => void;
|
||||
position?: (opt: JQuery, x: number, y: number) => void;
|
||||
positionSubmenu?: (menu: JQuery) => void;
|
||||
zIndex?: number;
|
||||
animation?: {
|
||||
duration?: number;
|
||||
show?: string;
|
||||
hide?: string;
|
||||
};
|
||||
events?: {
|
||||
show?: () => void;
|
||||
hide?: () => void;
|
||||
};
|
||||
callback?: (key: any, options: any) => any;
|
||||
items?: any;
|
||||
build?: (triggerElement: JQuery, e: Event) => any;
|
||||
reposition?: boolean;
|
||||
className?: string;
|
||||
itemClickEvent?: string;
|
||||
}
|
||||
|
||||
interface JQueryStatic {
|
||||
contextMenu(options?: JQueryContextMenuOptions): JQuery;
|
||||
contextMenu(type: string, selector?: any): JQuery;
|
||||
}
|
||||
|
||||
interface JQuery {
|
||||
contextMenu(options?: any): JQuery;
|
||||
}
|
||||
@@ -123,8 +123,4 @@ describe("Component Registerer", () => {
|
||||
it("should register dynamic-list component", () => {
|
||||
expect(ko.components.isRegistered("dynamic-list")).toBe(true);
|
||||
});
|
||||
|
||||
it("should register throughput-input component", () => {
|
||||
expect(ko.components.isRegistered("throughput-input")).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -11,7 +11,6 @@ import { InputTypeaheadComponent } from "./Controls/InputTypeahead/InputTypeahea
|
||||
import { JsonEditorComponent } from "./Controls/JsonEditor/JsonEditorComponent";
|
||||
import { NewVertexComponent } from "./Graph/NewVertexComponent/NewVertexComponent";
|
||||
import { TabsManagerKOComponent } from "./Tabs/TabsManager";
|
||||
import { ThroughputInputComponent } from "./Controls/ThroughputInput/ThroughputInputComponent";
|
||||
import { ThroughputInputComponentAutoPilotV3 } from "./Controls/ThroughputInput/ThroughputInputComponentAutoPilotV3";
|
||||
|
||||
ko.components.register("input-typeahead", new InputTypeaheadComponent());
|
||||
@@ -23,12 +22,11 @@ ko.components.register("editor", new EditorComponent());
|
||||
ko.components.register("json-editor", new JsonEditorComponent());
|
||||
ko.components.register("diff-editor", new DiffEditorComponent());
|
||||
ko.components.register("dynamic-list", DynamicListComponent);
|
||||
ko.components.register("throughput-input", ThroughputInputComponent);
|
||||
ko.components.register("throughput-input-autopilot-v3", ThroughputInputComponentAutoPilotV3);
|
||||
ko.components.register("tabs-manager", TabsManagerKOComponent());
|
||||
|
||||
// Collection Tabs
|
||||
ko.components.register("documents-tab", new TabComponents.DocumentsTab());
|
||||
ko.components.register("documents-tab", new TabComponents.MongoDocumentsTabV2());
|
||||
ko.components.register("mongo-documents-tab", new TabComponents.MongoDocumentsTab());
|
||||
ko.components.register("stored-procedure-tab", new TabComponents.StoredProcedureTab());
|
||||
ko.components.register("trigger-tab", new TabComponents.TriggerTab());
|
||||
|
||||
@@ -1,12 +1,9 @@
|
||||
import * as React from "react";
|
||||
import { ArcadiaWorkspace, SparkPool } from "../../../Contracts/DataModels";
|
||||
import { DefaultButton, IButtonStyles } from "office-ui-fabric-react/lib/Button";
|
||||
import {
|
||||
IContextualMenuItem,
|
||||
IContextualMenuProps,
|
||||
ContextualMenuItemType
|
||||
} from "office-ui-fabric-react/lib/ContextualMenu";
|
||||
import { IContextualMenuItem, IContextualMenuProps } from "office-ui-fabric-react/lib/ContextualMenu";
|
||||
import * as Logger from "../../../Common/Logger";
|
||||
import { getErrorMessage } from "../../../Common/ErrorHandlingUtils";
|
||||
|
||||
export interface ArcadiaMenuPickerProps {
|
||||
selectText?: string;
|
||||
@@ -47,7 +44,7 @@ export class ArcadiaMenuPicker extends React.Component<ArcadiaMenuPickerProps, A
|
||||
selectedSparkPool: item.text
|
||||
});
|
||||
} catch (error) {
|
||||
Logger.logError(error, "ArcadiaMenuPicker/_onSparkPoolClicked");
|
||||
Logger.logError(getErrorMessage(error), "ArcadiaMenuPicker/_onSparkPoolClicked");
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
@@ -0,0 +1,14 @@
|
||||
import { shallow } from "enzyme";
|
||||
import React from "react";
|
||||
import { CollapsibleSectionComponent, CollapsibleSectionProps } from "./CollapsibleSectionComponent";
|
||||
|
||||
describe("CollapsibleSectionComponent", () => {
|
||||
it("renders", () => {
|
||||
const props: CollapsibleSectionProps = {
|
||||
title: "Sample title"
|
||||
};
|
||||
|
||||
const wrapper = shallow(<CollapsibleSectionComponent {...props} />);
|
||||
expect(wrapper).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,36 @@
|
||||
import { Icon, Label, Stack } from "office-ui-fabric-react";
|
||||
import * as React from "react";
|
||||
import { accordionIconStyles, accordionStackTokens } from "../Settings/SettingsRenderUtils";
|
||||
|
||||
export interface CollapsibleSectionProps {
|
||||
title: string;
|
||||
}
|
||||
|
||||
export interface CollapsibleSectionState {
|
||||
isExpanded: boolean;
|
||||
}
|
||||
|
||||
export class CollapsibleSectionComponent extends React.Component<CollapsibleSectionProps, CollapsibleSectionState> {
|
||||
constructor(props: CollapsibleSectionProps) {
|
||||
super(props);
|
||||
this.state = {
|
||||
isExpanded: true
|
||||
};
|
||||
}
|
||||
|
||||
private toggleCollapsed = (): void => {
|
||||
this.setState({ isExpanded: !this.state.isExpanded });
|
||||
};
|
||||
|
||||
public render(): JSX.Element {
|
||||
return (
|
||||
<>
|
||||
<Stack className="collapsibleSection" horizontal tokens={accordionStackTokens} onClick={this.toggleCollapsed}>
|
||||
<Icon iconName={this.state.isExpanded ? "ChevronDown" : "ChevronRight"} styles={accordionIconStyles} />
|
||||
<Label>{this.props.title}</Label>
|
||||
</Stack>
|
||||
{this.state.isExpanded && this.props.children}
|
||||
</>
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,30 @@
|
||||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`CollapsibleSectionComponent renders 1`] = `
|
||||
<Fragment>
|
||||
<Stack
|
||||
className="collapsibleSection"
|
||||
horizontal={true}
|
||||
onClick={[Function]}
|
||||
tokens={
|
||||
Object {
|
||||
"childrenGap": 10,
|
||||
}
|
||||
}
|
||||
>
|
||||
<StyledIconBase
|
||||
iconName="ChevronDown"
|
||||
styles={
|
||||
Object {
|
||||
"root": Object {
|
||||
"paddingTop": 7,
|
||||
},
|
||||
}
|
||||
}
|
||||
/>
|
||||
<StyledLabelBase>
|
||||
Sample title
|
||||
</StyledLabelBase>
|
||||
</Stack>
|
||||
</Fragment>
|
||||
`;
|
||||
@@ -3,7 +3,7 @@ import { Dialog, DialogType, DialogFooter, IDialogProps } from "office-ui-fabric
|
||||
import { IButtonProps, PrimaryButton, DefaultButton } from "office-ui-fabric-react/lib/Button";
|
||||
import { ITextFieldProps, TextField } from "office-ui-fabric-react/lib/TextField";
|
||||
import { Link } from "office-ui-fabric-react/lib/Link";
|
||||
import { FontIcon } from "office-ui-fabric-react";
|
||||
import { ChoiceGroup, FontIcon, IChoiceGroupProps } from "office-ui-fabric-react";
|
||||
|
||||
export interface TextFieldProps extends ITextFieldProps {
|
||||
label: string;
|
||||
@@ -24,6 +24,7 @@ export interface DialogProps {
|
||||
subText: string;
|
||||
isModal: boolean;
|
||||
visible: boolean;
|
||||
choiceGroupProps?: IChoiceGroupProps;
|
||||
textFieldProps?: TextFieldProps;
|
||||
linkProps?: LinkProps;
|
||||
primaryButtonText: string;
|
||||
@@ -65,6 +66,7 @@ export class DialogComponent extends React.Component<DialogProps, {}> {
|
||||
minWidth: DIALOG_MIN_WIDTH,
|
||||
maxWidth: DIALOG_MAX_WIDTH
|
||||
};
|
||||
const choiceGroupProps: IChoiceGroupProps = this.props.choiceGroupProps;
|
||||
const textFieldProps: ITextFieldProps = this.props.textFieldProps;
|
||||
const linkProps: LinkProps = this.props.linkProps;
|
||||
const primaryButtonProps: IButtonProps = {
|
||||
@@ -82,6 +84,7 @@ export class DialogComponent extends React.Component<DialogProps, {}> {
|
||||
|
||||
return (
|
||||
<Dialog {...dialogProps}>
|
||||
{choiceGroupProps && <ChoiceGroup {...choiceGroupProps} />}
|
||||
{textFieldProps && <TextField {...textFieldProps} />}
|
||||
{linkProps && (
|
||||
<Link href={linkProps.linkUrl} target="_blank">
|
||||
|
||||
@@ -55,7 +55,6 @@ export const FeaturePanelComponent: React.FunctionComponent = () => {
|
||||
label: "Enable Injecting Notebook Viewer Link into the first cell",
|
||||
value: "true"
|
||||
},
|
||||
{ key: "feature.enablesettingsv2", label: "Enable SettingsV2 Tab", value: "true" },
|
||||
{ key: "feature.canexceedmaximumvalue", label: "Can exceed max value", value: "true" },
|
||||
{
|
||||
key: "feature.enablefixedcollectionwithsharedthroughput",
|
||||
|
||||
@@ -178,12 +178,6 @@ exports[`Feature panel renders all flags 1`] = `
|
||||
className="checkboxRow"
|
||||
horizontalAlign="space-between"
|
||||
>
|
||||
<StyledCheckboxBase
|
||||
checked={false}
|
||||
key="feature.enablesettingsv2"
|
||||
label="Enable SettingsV2 Tab"
|
||||
onChange={[Function]}
|
||||
/>
|
||||
<StyledCheckboxBase
|
||||
checked={false}
|
||||
key="feature.canexceedmaximumvalue"
|
||||
|
||||
@@ -4,12 +4,10 @@
|
||||
|
||||
import * as React from "react";
|
||||
import * as DataModels from "../../../Contracts/DataModels";
|
||||
import * as Logger from "../../../Common/Logger";
|
||||
import * as NotificationConsoleUtils from "../../../Utils/NotificationConsoleUtils";
|
||||
import { ConsoleDataType } from "../../Menus/NotificationConsole/NotificationConsoleComponent";
|
||||
import { StringUtils } from "../../../Utils/StringUtils";
|
||||
import { userContext } from "../../../UserContext";
|
||||
import { TerminalQueryParams } from "../../../Common/Constants";
|
||||
import { handleError } from "../../../Common/ErrorHandlingUtils";
|
||||
|
||||
export interface NotebookTerminalComponentProps {
|
||||
notebookServerInfo: DataModels.NotebookWorkspaceConnectionInfo;
|
||||
@@ -71,9 +69,10 @@ export class NotebookTerminalComponent extends React.Component<NotebookTerminalC
|
||||
params: Map<string, string>
|
||||
): string {
|
||||
if (!serverInfo.notebookServerEndpoint) {
|
||||
const error = "Notebook server endpoint not defined. Terminal will fail to connect to jupyter server.";
|
||||
Logger.logError(error, "NotebookTerminalComponent/createNotebookAppSrc");
|
||||
NotificationConsoleUtils.logConsoleMessage(ConsoleDataType.Error, error);
|
||||
handleError(
|
||||
"Notebook server endpoint not defined. Terminal will fail to connect to jupyter server.",
|
||||
"NotebookTerminalComponent/createNotebookAppSrc"
|
||||
);
|
||||
return "";
|
||||
}
|
||||
|
||||
|
||||
@@ -18,7 +18,9 @@ describe("GalleryCardComponent", () => {
|
||||
downloads: 0,
|
||||
favorites: 0,
|
||||
views: 0,
|
||||
newCellId: undefined
|
||||
newCellId: undefined,
|
||||
policyViolations: undefined,
|
||||
pendingScanJobIds: undefined
|
||||
},
|
||||
isFavorite: false,
|
||||
showDownload: true,
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
import * as React from "react";
|
||||
import { JunoClient } from "../../../Juno/JunoClient";
|
||||
import { HttpStatusCodes, CodeOfConductEndpoints } from "../../../Common/Constants";
|
||||
import * as Logger from "../../../Common/Logger";
|
||||
import { logConsoleError } from "../../../Utils/NotificationConsoleUtils";
|
||||
import { Stack, Text, Checkbox, PrimaryButton, Link } from "office-ui-fabric-react";
|
||||
import { handleError } from "../../../Common/ErrorHandlingUtils";
|
||||
|
||||
export interface CodeOfConductComponentProps {
|
||||
junoClient: JunoClient;
|
||||
@@ -45,9 +44,7 @@ export class CodeOfConductComponent extends React.Component<CodeOfConductCompone
|
||||
|
||||
this.props.onAcceptCodeOfConduct(response.data);
|
||||
} catch (error) {
|
||||
const message = `Failed to accept code of conduct: ${error}`;
|
||||
Logger.logError(message, "CodeOfConductComponent/acceptCodeOfConduct");
|
||||
logConsoleError(message);
|
||||
handleError(error, "CodeOfConductComponent/acceptCodeOfConduct", "Failed to accept code of conduct");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import {
|
||||
Dropdown,
|
||||
FocusZone,
|
||||
FontWeights,
|
||||
IDropdownOption,
|
||||
IPageSpecification,
|
||||
IPivotItemProps,
|
||||
@@ -11,14 +12,12 @@ import {
|
||||
Pivot,
|
||||
PivotItem,
|
||||
SearchBox,
|
||||
Stack
|
||||
Stack,
|
||||
Text
|
||||
} from "office-ui-fabric-react";
|
||||
import * as React from "react";
|
||||
import * as Logger from "../../../Common/Logger";
|
||||
import { IGalleryItem, JunoClient, IJunoResponse, IPublicGalleryData } from "../../../Juno/JunoClient";
|
||||
import * as GalleryUtils from "../../../Utils/GalleryUtils";
|
||||
import * as NotificationConsoleUtils from "../../../Utils/NotificationConsoleUtils";
|
||||
import { ConsoleDataType } from "../../Menus/NotificationConsole/NotificationConsoleComponent";
|
||||
import { DialogComponent, DialogProps } from "../DialogReactComponent/DialogComponent";
|
||||
import { GalleryCardComponent, GalleryCardComponentProps } from "./Cards/GalleryCardComponent";
|
||||
import "./GalleryViewerComponent.less";
|
||||
@@ -26,6 +25,7 @@ import { HttpStatusCodes } from "../../../Common/Constants";
|
||||
import Explorer from "../../Explorer";
|
||||
import { CodeOfConductComponent } from "./CodeOfConductComponent";
|
||||
import { InfoComponent } from "./InfoComponent/InfoComponent";
|
||||
import { handleError } from "../../../Common/ErrorHandlingUtils";
|
||||
|
||||
export interface GalleryViewerComponentProps {
|
||||
container?: Explorer;
|
||||
@@ -151,7 +151,7 @@ export class GalleryViewerComponent extends React.Component<GalleryViewerCompone
|
||||
// explicitly checking if isCodeOfConductAccepted is not false, as it is initially undefined.
|
||||
// Displaying code of conduct component on gallery load should not be the default behavior.
|
||||
if (this.state.isCodeOfConductAccepted !== false) {
|
||||
tabs.push(this.createTab(GalleryTab.Published, this.state.publishedNotebooks));
|
||||
tabs.push(this.createPublishedNotebooksTab(GalleryTab.Published, this.state.publishedNotebooks));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -197,10 +197,59 @@ export class GalleryViewerComponent extends React.Component<GalleryViewerCompone
|
||||
private createTab(tab: GalleryTab, data: IGalleryItem[]): GalleryTabInfo {
|
||||
return {
|
||||
tab,
|
||||
content: this.createTabContent(data)
|
||||
content: this.createSearchBarHeader(this.createCardsTabContent(data))
|
||||
};
|
||||
}
|
||||
|
||||
private createPublishedNotebooksTab = (tab: GalleryTab, data: IGalleryItem[]): GalleryTabInfo => {
|
||||
return {
|
||||
tab,
|
||||
content: this.createPublishedNotebooksTabContent(data)
|
||||
};
|
||||
};
|
||||
|
||||
private createPublishedNotebooksTabContent = (data: IGalleryItem[]): JSX.Element => {
|
||||
const { published, underReview, removed } = GalleryUtils.filterPublishedNotebooks(data);
|
||||
const content = (
|
||||
<Stack tokens={{ childrenGap: 10 }}>
|
||||
{published?.length > 0 &&
|
||||
this.createPublishedNotebooksSectionContent(
|
||||
undefined,
|
||||
"You have successfully published the following notebook(s) to public gallery and shared with other Azure Cosmos DB users.",
|
||||
this.createCardsTabContent(published)
|
||||
)}
|
||||
{underReview?.length > 0 &&
|
||||
this.createPublishedNotebooksSectionContent(
|
||||
"Under Review",
|
||||
"Content of a notebook you published is currently being scanned for illegal content. It will not be available to public gallery until the review is completed (may take a few days)",
|
||||
this.createCardsTabContent(underReview)
|
||||
)}
|
||||
{removed?.length > 0 &&
|
||||
this.createPublishedNotebooksSectionContent(
|
||||
"Removed",
|
||||
"These notebooks were found to contain illegal content and has been taken down.",
|
||||
this.createPolicyViolationsListContent(removed)
|
||||
)}
|
||||
</Stack>
|
||||
);
|
||||
|
||||
return this.createSearchBarHeader(content);
|
||||
};
|
||||
|
||||
private createPublishedNotebooksSectionContent = (
|
||||
title: string,
|
||||
description: string,
|
||||
content: JSX.Element
|
||||
): JSX.Element => {
|
||||
return (
|
||||
<Stack tokens={{ childrenGap: 5 }}>
|
||||
{title && <Text styles={{ root: { fontWeight: FontWeights.semibold } }}>{title}</Text>}
|
||||
{description && <Text>{description}</Text>}
|
||||
{content}
|
||||
</Stack>
|
||||
);
|
||||
};
|
||||
|
||||
private createPublicGalleryTabContent(data: IGalleryItem[], acceptedCodeOfConduct: boolean): JSX.Element {
|
||||
return acceptedCodeOfConduct === false ? (
|
||||
<CodeOfConductComponent
|
||||
@@ -210,11 +259,11 @@ export class GalleryViewerComponent extends React.Component<GalleryViewerCompone
|
||||
}}
|
||||
/>
|
||||
) : (
|
||||
this.createTabContent(data)
|
||||
this.createSearchBarHeader(this.createCardsTabContent(data))
|
||||
);
|
||||
}
|
||||
|
||||
private createTabContent(data: IGalleryItem[]): JSX.Element {
|
||||
private createSearchBarHeader(content: JSX.Element): JSX.Element {
|
||||
return (
|
||||
<Stack tokens={{ childrenGap: 10 }}>
|
||||
<Stack horizontal tokens={{ childrenGap: 20, padding: 10 }}>
|
||||
@@ -227,13 +276,13 @@ export class GalleryViewerComponent extends React.Component<GalleryViewerCompone
|
||||
<Stack.Item styles={{ root: { minWidth: 200 } }}>
|
||||
<Dropdown options={this.sortingOptions} selectedKey={this.state.sortBy} onChange={this.onDropdownChange} />
|
||||
</Stack.Item>
|
||||
{this.props.container?.isGalleryPublishEnabled() && (
|
||||
{(!this.props.container || this.props.container.isGalleryPublishEnabled()) && (
|
||||
<Stack.Item>
|
||||
<InfoComponent />
|
||||
</Stack.Item>
|
||||
)}
|
||||
</Stack>
|
||||
{data && this.createCardsTabContent(data)}
|
||||
<Stack.Item>{content}</Stack.Item>
|
||||
</Stack>
|
||||
);
|
||||
}
|
||||
@@ -251,6 +300,25 @@ export class GalleryViewerComponent extends React.Component<GalleryViewerCompone
|
||||
);
|
||||
}
|
||||
|
||||
private createPolicyViolationsListContent(data: IGalleryItem[]): JSX.Element {
|
||||
return (
|
||||
<table>
|
||||
<tbody>
|
||||
<tr>
|
||||
<th>Name</th>
|
||||
<th>Policy violations</th>
|
||||
</tr>
|
||||
{data.map(item => (
|
||||
<tr key={`policy-violations-tr-${item.id}`}>
|
||||
<td>{item.name}</td>
|
||||
<td>{item.policyViolations.join(", ")}</td>
|
||||
</tr>
|
||||
))}
|
||||
</tbody>
|
||||
</table>
|
||||
);
|
||||
}
|
||||
|
||||
private loadTabContent(tab: GalleryTab, searchText: string, sortBy: SortBy, offline: boolean): void {
|
||||
switch (tab) {
|
||||
case GalleryTab.OfficialSamples:
|
||||
@@ -284,9 +352,7 @@ export class GalleryViewerComponent extends React.Component<GalleryViewerCompone
|
||||
|
||||
this.sampleNotebooks = response.data;
|
||||
} catch (error) {
|
||||
const message = `Failed to load sample notebooks: ${error}`;
|
||||
Logger.logError(message, "GalleryViewerComponent/loadSampleNotebooks");
|
||||
NotificationConsoleUtils.logConsoleMessage(ConsoleDataType.Error, message);
|
||||
handleError(error, "GalleryViewerComponent/loadSampleNotebooks", "Failed to load sample notebooks");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -312,9 +378,7 @@ export class GalleryViewerComponent extends React.Component<GalleryViewerCompone
|
||||
throw new Error(`Received HTTP ${response.status} when loading public notebooks`);
|
||||
}
|
||||
} catch (error) {
|
||||
const message = `Failed to load public notebooks: ${error}`;
|
||||
Logger.logError(message, "GalleryViewerComponent/loadPublicNotebooks");
|
||||
NotificationConsoleUtils.logConsoleMessage(ConsoleDataType.Error, message);
|
||||
handleError(error, "GalleryViewerComponent/loadPublicNotebooks", "Failed to load public notebooks");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -334,9 +398,7 @@ export class GalleryViewerComponent extends React.Component<GalleryViewerCompone
|
||||
|
||||
this.favoriteNotebooks = response.data;
|
||||
} catch (error) {
|
||||
const message = `Failed to load favorite notebooks: ${error}`;
|
||||
Logger.logError(message, "GalleryViewerComponent/loadFavoriteNotebooks");
|
||||
NotificationConsoleUtils.logConsoleMessage(ConsoleDataType.Error, message);
|
||||
handleError(error, "GalleryViewerComponent/loadFavoriteNotebooks", "Failed to load favorite notebooks");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -362,9 +424,7 @@ export class GalleryViewerComponent extends React.Component<GalleryViewerCompone
|
||||
|
||||
this.publishedNotebooks = response.data;
|
||||
} catch (error) {
|
||||
const message = `Failed to load published notebooks: ${error}`;
|
||||
Logger.logError(message, "GalleryViewerComponent/loadPublishedNotebooks");
|
||||
NotificationConsoleUtils.logConsoleMessage(ConsoleDataType.Error, message);
|
||||
handleError(error, "GalleryViewerComponent/loadPublishedNotebooks", "Failed to load published notebooks");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user