mirror of
https://github.com/Azure/cosmos-explorer.git
synced 2025-12-23 10:51:30 +00:00
Compare commits
86 Commits
remove-red
...
replace_jq
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
04b00be334 | ||
|
|
5374fd5b6e | ||
|
|
c3058ee5a9 | ||
|
|
b000631a0c | ||
|
|
e8f4c8f93c | ||
|
|
16bde97e47 | ||
|
|
6da43ee27b | ||
|
|
ebae484b8f | ||
|
|
dfb1b50621 | ||
|
|
f54e8eb692 | ||
|
|
ea39c1d092 | ||
|
|
c21f42159f | ||
|
|
31e4b49f11 | ||
|
|
40491ec9c5 | ||
|
|
e133df18dd | ||
|
|
0532ed26a2 | ||
|
|
fd60c9c15e | ||
|
|
04ab1f3918 | ||
|
|
b784ac0f86 | ||
|
|
28899f63d7 | ||
|
|
9cbf632577 | ||
|
|
17fd2185dc | ||
|
|
a93c8509cd | ||
|
|
5c93c11bd9 | ||
|
|
85d2378d3a | ||
|
|
84b6075ee8 | ||
|
|
d880723be9 | ||
|
|
4ce9dcc024 | ||
|
|
addcfedd5e | ||
|
|
a133134b8b | ||
|
|
79dec6a8a8 | ||
|
|
53a8cea95e | ||
|
|
5f1f7a8266 | ||
|
|
a009a8ba5f | ||
|
|
3e782527d0 | ||
|
|
e6ca1d25c9 | ||
|
|
473f722dcc | ||
|
|
5741802c25 | ||
|
|
e2e58f73b1 | ||
|
|
79769e9689 | ||
|
|
542abf4d3a | ||
|
|
4bee46ce55 | ||
|
|
fe58722002 | ||
|
|
94ff6b3e81 | ||
|
|
b4219e2994 | ||
|
|
294270b6aa | ||
|
|
e4bab1de4b | ||
|
|
703ceacd3f | ||
|
|
734df3dd18 | ||
|
|
1e19f02fd7 | ||
|
|
24b5b754ca | ||
|
|
e09730d782 | ||
|
|
09a95fded4 | ||
|
|
7ffa18a190 | ||
|
|
30353c26f3 | ||
|
|
34d8704071 | ||
|
|
23714831bd | ||
|
|
9a5d46b6e0 | ||
|
|
b9245101bc | ||
|
|
274deb13be | ||
|
|
9d50577800 | ||
|
|
bd00e5eb9b | ||
|
|
821f665e78 | ||
|
|
39f7ef331a | ||
|
|
9933a4988a | ||
|
|
d525afa142 | ||
|
|
cfb9a0b321 | ||
|
|
3b64d75322 | ||
|
|
daba1c4ed4 | ||
|
|
a698e08638 | ||
|
|
88d630fef4 | ||
|
|
5ffa746adb | ||
|
|
a9a57f4ba9 | ||
|
|
47cc6fd7a8 | ||
|
|
14cdf19efb | ||
|
|
5191ae3f3a | ||
|
|
ba862a8106 | ||
|
|
fe085b3e5a | ||
|
|
8028734cb0 | ||
|
|
444f663733 | ||
|
|
8c1ca35420 | ||
|
|
b69174788d | ||
|
|
ff03c79399 | ||
|
|
0382628249 | ||
|
|
d346ebe054 | ||
|
|
f5ecb8a04f |
11
.env.example
11
.env.example
@@ -1,7 +1,14 @@
|
||||
# These options are only needed when if running end to end tests locally
|
||||
PORTAL_RUNNER_USERNAME=
|
||||
PORTAL_RUNNER_PASSWORD=
|
||||
PORTAL_RUNNER_SUBSCRIPTION=
|
||||
PORTAL_RUNNER_RESOURCE_GROUP=
|
||||
PORTAL_RUNNER_DATABASE_ACCOUNT=
|
||||
PORTAL_RUNNER_CONNECTION_STRING=
|
||||
PORTAL_RUNNER_DATABASE_ACCOUNT_KEY=
|
||||
PORTAL_RUNNER_CONNECTION_STRING=
|
||||
NOTEBOOKS_TEST_RUNNER_TENANT_ID=
|
||||
NOTEBOOKS_TEST_RUNNER_CLIENT_ID=
|
||||
NOTEBOOKS_TEST_RUNNER_CLIENT_SECRET=
|
||||
CASSANDRA_CONNECTION_STRING=
|
||||
MONGO_CONNECTION_STRING=
|
||||
TABLES_CONNECTION_STRING=
|
||||
DATA_EXPLORER_ENDPOINT=https://localhost:1234/hostedExplorer.html
|
||||
@@ -14,9 +14,6 @@ src/Common/DataAccessUtilityBase.ts
|
||||
src/Common/DeleteFeedback.ts
|
||||
src/Common/DocumentClientUtilityBase.ts
|
||||
src/Common/EditableUtility.ts
|
||||
src/Common/EnvironmentUtility.ts
|
||||
src/Common/ErrorParserUtility.test.ts
|
||||
src/Common/ErrorParserUtility.ts
|
||||
src/Common/HashMap.test.ts
|
||||
src/Common/HashMap.ts
|
||||
src/Common/HeadersUtility.test.ts
|
||||
@@ -204,8 +201,6 @@ src/Explorer/Tabs/QueryTab.test.ts
|
||||
src/Explorer/Tabs/QueryTab.ts
|
||||
src/Explorer/Tabs/QueryTablesTab.ts
|
||||
src/Explorer/Tabs/ScriptTabBase.ts
|
||||
src/Explorer/Tabs/SettingsTab.test.ts
|
||||
src/Explorer/Tabs/SettingsTab.ts
|
||||
src/Explorer/Tabs/SparkMasterTab.ts
|
||||
src/Explorer/Tabs/StoredProcedureTab.ts
|
||||
src/Explorer/Tabs/TabComponents.ts
|
||||
@@ -292,8 +287,6 @@ src/Utils/DatabaseAccountUtils.ts
|
||||
src/Utils/JunoUtils.ts
|
||||
src/Utils/MessageValidation.ts
|
||||
src/Utils/NotebookConfigurationUtils.ts
|
||||
src/Utils/OfferUtils.test.ts
|
||||
src/Utils/OfferUtils.ts
|
||||
src/Utils/PricingUtils.test.ts
|
||||
src/Utils/QueryUtils.test.ts
|
||||
src/Utils/QueryUtils.ts
|
||||
@@ -398,19 +391,5 @@ src/Explorer/Tree/ResourceTreeAdapterForResourceToken.tsx
|
||||
src/GalleryViewer/Cards/GalleryCardComponent.tsx
|
||||
src/GalleryViewer/GalleryViewer.tsx
|
||||
src/GalleryViewer/GalleryViewerComponent.tsx
|
||||
cypress/integration/dataexplorer/CASSANDRA/addCollection.spec.ts
|
||||
cypress/integration/dataexplorer/GRAPH/addCollection.spec.ts
|
||||
cypress/integration/dataexplorer/ci-tests/addCollectionPane.spec.ts
|
||||
cypress/integration/dataexplorer/ci-tests/createDatabase.spec.ts
|
||||
cypress/integration/dataexplorer/ci-tests/deleteCollection.spec.ts
|
||||
cypress/integration/dataexplorer/ci-tests/deleteDatabase.spec.ts
|
||||
cypress/integration/dataexplorer/MONGO/addCollection.spec.ts
|
||||
cypress/integration/dataexplorer/MONGO/addCollectionAutopilot.spec.ts
|
||||
cypress/integration/dataexplorer/MONGO/addCollectionExistingDatabase.spec.ts
|
||||
cypress/integration/dataexplorer/MONGO/provisionDatabaseThroughput.spec.ts
|
||||
cypress/integration/dataexplorer/SQL/addCollection.spec.ts
|
||||
cypress/integration/dataexplorer/TABLE/addCollection.spec.ts
|
||||
cypress/integration/notebook/newNotebook.spec.ts
|
||||
cypress/integration/notebook/resourceTree.spec.ts
|
||||
__mocks__/monaco-editor.ts
|
||||
src/Explorer/Tree/ResourceTreeAdapterForResourceToken.test.tsx
|
||||
@@ -42,6 +42,13 @@ module.exports = {
|
||||
"no-null/no-null": "error",
|
||||
"@typescript-eslint/no-explicit-any": "error",
|
||||
"prefer-arrow/prefer-arrow-functions": ["error", { allowStandaloneDeclarations: true }],
|
||||
eqeqeq: "error"
|
||||
eqeqeq: "error",
|
||||
"no-restricted-syntax": [
|
||||
"error",
|
||||
{
|
||||
selector: "CallExpression[callee.object.name='JSON'][callee.property.name='stringify'] Identifier[name=/$err/]",
|
||||
message: "Do not use JSON.stringify(error). It will print '{}'"
|
||||
}
|
||||
]
|
||||
}
|
||||
};
|
||||
|
||||
29
.github/workflows/automerge.yml
vendored
29
.github/workflows/automerge.yml
vendored
@@ -1,29 +0,0 @@
|
||||
name: automerge
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- labeled
|
||||
- unlabeled
|
||||
- synchronize
|
||||
- opened
|
||||
- edited
|
||||
- ready_for_review
|
||||
- reopened
|
||||
- unlocked
|
||||
pull_request_review:
|
||||
types:
|
||||
- submitted
|
||||
check_suite:
|
||||
types:
|
||||
- completed
|
||||
status: {}
|
||||
jobs:
|
||||
automerge:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: automerge
|
||||
uses: "pascalgn/automerge-action@v0.11.0"
|
||||
env:
|
||||
GITHUB_TOKEN: "${{ secrets.AUTOMERGE_GITHUB_PAT }}"
|
||||
MERGE_METHOD: "squash"
|
||||
MERGE_COMMIT_MESSAGE: "pull-request-title"
|
||||
141
.github/workflows/ci.yml
vendored
141
.github/workflows/ci.yml
vendored
@@ -79,100 +79,32 @@ jobs:
|
||||
name: dist
|
||||
path: dist/
|
||||
endtoendemulator:
|
||||
name: "End To End Tests | Emulator | SQL"
|
||||
name: "End To End Emulator Tests"
|
||||
needs: [lint, format, compile, unittest]
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Use Node.js 12.x
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 12.x
|
||||
- uses: southpolesteve/cosmos-emulator-github-action@v1
|
||||
- name: Use Node.js 12.x
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 12.x
|
||||
- name: Restore Cypress Binary Cache
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ~/.cache/Cypress
|
||||
key: ${{ runner.os }}-cypress-binary-cache
|
||||
- name: End to End Tests
|
||||
run: |
|
||||
npm ci
|
||||
npm start &
|
||||
npm ci --prefix ./cypress
|
||||
npm run test:ci --prefix ./cypress -- --spec ./integration/dataexplorer/ci-tests/createDatabase.spec.ts
|
||||
shell: bash
|
||||
env:
|
||||
EMULATOR_ENDPOINT: https://0.0.0.0:8081/
|
||||
NODE_TLS_REJECT_UNAUTHORIZED: 0
|
||||
CYPRESS_CACHE_FOLDER: ~/.cache/Cypress
|
||||
endtoendsql:
|
||||
name: "End To End Tests | SQL"
|
||||
needs: [lint, format, compile, unittest]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Use Node.js 12.x
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 12.x
|
||||
- name: Restore Cypress Binary Cache
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ~/.cache/Cypress
|
||||
key: ${{ runner.os }}-cypress-binary-cache
|
||||
- run: npm ci
|
||||
- name: End to End Tests
|
||||
run: |
|
||||
npm start &
|
||||
cd cypress
|
||||
npm ci
|
||||
node cleanup.js
|
||||
npm run wait-for-server
|
||||
npx cypress run --browser chrome --headless --spec "./integration/dataexplorer/SQL/*"
|
||||
npx jest -c ./jest.config.e2e.js --detectOpenHandles sql
|
||||
shell: bash
|
||||
env:
|
||||
DATA_EXPLORER_ENDPOINT: "https://localhost:1234/explorer.html?platform=Emulator"
|
||||
PLATFORM: "Emulator"
|
||||
NODE_TLS_REJECT_UNAUTHORIZED: 0
|
||||
CYPRESS_CACHE_FOLDER: ~/.cache/Cypress
|
||||
CYPRESS_CONNECTION_STRING: ${{ secrets.CONNECTION_STRING_SQL }}
|
||||
- uses: actions/upload-artifact@v2
|
||||
name: videos
|
||||
if: ${{ failure() }}
|
||||
if: failure()
|
||||
with:
|
||||
path: "**/*.mp4"
|
||||
endtoendmongo:
|
||||
name: "End To End Tests | Mongo"
|
||||
needs: [lint, format, compile, unittest]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Use Node.js 12.x
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 12.x
|
||||
- name: Restore Cypress Binary Cache
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ~/.cache/Cypress
|
||||
key: ${{ runner.os }}-cypress-binary-cache
|
||||
- name: End to End Tests
|
||||
run: |
|
||||
npm ci
|
||||
npm start &
|
||||
cd cypress
|
||||
npm ci
|
||||
node cleanup.js
|
||||
npm run wait-for-server
|
||||
npx cypress run --browser chrome --headless --spec "./integration/dataexplorer/MONGO/*"
|
||||
shell: bash
|
||||
env:
|
||||
NODE_TLS_REJECT_UNAUTHORIZED: 0
|
||||
CYPRESS_CACHE_FOLDER: ~/.cache/Cypress
|
||||
CYPRESS_CONNECTION_STRING: ${{ secrets.CONNECTION_STRING_MONGO }}
|
||||
- uses: actions/upload-artifact@v2
|
||||
if: ${{ failure() }}
|
||||
name: videos
|
||||
with:
|
||||
path: "**/*.mp4"
|
||||
name: screenshots
|
||||
path: failed-*
|
||||
accessibility:
|
||||
name: "Accessibility | Hosted"
|
||||
needs: [lint, format, compile, unittest]
|
||||
@@ -191,13 +123,13 @@ jobs:
|
||||
sudo sysctl -p
|
||||
npm ci
|
||||
npm start &
|
||||
npx wait-on -i 5000 https-get://0.0.0.0:1234/
|
||||
npx wait-on -i 5000 https-get://0.0.0.0:1234/
|
||||
node utils/accesibilityCheck.js
|
||||
shell: bash
|
||||
env:
|
||||
NODE_TLS_REJECT_UNAUTHORIZED: 0
|
||||
endtoendpuppeteer:
|
||||
name: "End to end puppeteer tests"
|
||||
endtoendhosted:
|
||||
name: "End to End Hosted Tests"
|
||||
needs: [lint, format, compile, unittest]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
@@ -206,7 +138,7 @@ jobs:
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 12.x
|
||||
- name: End to End Puppeteer Tests
|
||||
- name: End to End Hosted Tests
|
||||
run: |
|
||||
npm ci
|
||||
npm start &
|
||||
@@ -215,13 +147,27 @@ jobs:
|
||||
shell: bash
|
||||
env:
|
||||
NODE_TLS_REJECT_UNAUTHORIZED: 0
|
||||
PORTAL_RUNNER_SUBSCRIPTION: ${{ secrets.PORTAL_RUNNER_SUBSCRIPTION }}
|
||||
PORTAL_RUNNER_RESOURCE_GROUP: ${{ secrets.PORTAL_RUNNER_RESOURCE_GROUP }}
|
||||
PORTAL_RUNNER_DATABASE_ACCOUNT: ${{ secrets.PORTAL_RUNNER_DATABASE_ACCOUNT }}
|
||||
PORTAL_RUNNER_DATABASE_ACCOUNT_KEY: ${{ secrets.PORTAL_RUNNER_DATABASE_ACCOUNT_KEY }}
|
||||
NOTEBOOKS_TEST_RUNNER_TENANT_ID: ${{ secrets.NOTEBOOKS_TEST_RUNNER_TENANT_ID }}
|
||||
NOTEBOOKS_TEST_RUNNER_CLIENT_ID: ${{ secrets.NOTEBOOKS_TEST_RUNNER_CLIENT_ID }}
|
||||
NOTEBOOKS_TEST_RUNNER_CLIENT_SECRET: ${{ secrets.NOTEBOOKS_TEST_RUNNER_CLIENT_SECRET }}
|
||||
PORTAL_RUNNER_CONNECTION_STRING: ${{ secrets.CONNECTION_STRING_SQL }}
|
||||
MONGO_CONNECTION_STRING: ${{ secrets.CONNECTION_STRING_MONGO }}
|
||||
CASSANDRA_CONNECTION_STRING: ${{ secrets.CONNECTION_STRING_CASSANDRA }}
|
||||
TABLES_CONNECTION_STRING: ${{ secrets.CONNECTION_STRING_TABLE }}
|
||||
DATA_EXPLORER_ENDPOINT: "https://localhost:1234/hostedExplorer.html"
|
||||
- uses: actions/upload-artifact@v2
|
||||
if: failure()
|
||||
with:
|
||||
name: screenshots
|
||||
path: failed-*
|
||||
nuget:
|
||||
name: Publish Nuget
|
||||
if: github.ref == 'refs/heads/master' || contains(github.ref, 'hotfix/') || contains(github.ref, 'release/')
|
||||
needs: [lint, format, compile, build, unittest, endtoendemulator, endtoendsql, endtoendmongo]
|
||||
needs: [lint, format, compile, build, unittest, endtoendemulator, endtoendhosted, accessibility]
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
NUGET_SOURCE: ${{ secrets.NUGET_SOURCE }}
|
||||
@@ -245,7 +191,7 @@ jobs:
|
||||
nugetmpac:
|
||||
name: Publish Nuget MPAC
|
||||
if: github.ref == 'refs/heads/master' || contains(github.ref, 'hotfix/') || contains(github.ref, 'release/')
|
||||
needs: [lint, format, compile, build, unittest, endtoendemulator, endtoendsql, endtoendmongo]
|
||||
needs: [lint, format, compile, build, unittest, endtoendemulator, endtoendhosted, accessibility]
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
NUGET_SOURCE: ${{ secrets.NUGET_SOURCE }}
|
||||
@@ -267,3 +213,28 @@ jobs:
|
||||
name: packages
|
||||
with:
|
||||
path: "*.nupkg"
|
||||
nugetie:
|
||||
name: Publish Nuget IE
|
||||
if: github.ref == 'refs/heads/master' || contains(github.ref, 'hotfix/') || contains(github.ref, 'release/')
|
||||
needs: [lint, format, compile, build, unittest, endtoendemulator, endtoendhosted, accessibility]
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
NUGET_SOURCE: ${{ secrets.NUGET_SOURCE }}
|
||||
AZURE_DEVOPS_PAT: ${{ secrets.AZURE_DEVOPS_PAT }}
|
||||
steps:
|
||||
- uses: nuget/setup-nuget@v1
|
||||
with:
|
||||
nuget-api-key: ${{ secrets.NUGET_API_KEY }}
|
||||
- name: Download Dist Folder
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: dist
|
||||
- run: cp ./configs/prod.json config.json
|
||||
- run: sed -i 's/Azure.Cosmos.DB.Data.Explorer/Azure.Cosmos.DB.Data.Explorer.IE/g' DataExplorer.nuspec
|
||||
- run: nuget sources add -Name "ADO" -Source "$NUGET_SOURCE" -UserName "GitHub" -Password "$AZURE_DEVOPS_PAT"
|
||||
- run: nuget pack -Version "2.0.0-github-${GITHUB_SHA}"
|
||||
- run: nuget push -Source "$NUGET_SOURCE" -ApiKey Az *.nupkg
|
||||
- uses: actions/upload-artifact@v2
|
||||
name: packages
|
||||
with:
|
||||
path: "*.nupkg"
|
||||
|
||||
25
.github/workflows/runners.yml
vendored
25
.github/workflows/runners.yml
vendored
@@ -1,25 +0,0 @@
|
||||
name: Runners
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 * 1 * *"
|
||||
jobs:
|
||||
sqlcreatecollection:
|
||||
runs-on: ubuntu-latest
|
||||
name: "SQL | Create Collection"
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-node@v1
|
||||
- run: npm ci
|
||||
- run: npm run test:e2e
|
||||
env:
|
||||
PORTAL_RUNNER_APP_INSIGHTS_KEY: ${{ secrets.PORTAL_RUNNER_APP_INSIGHTS_KEY }}
|
||||
PORTAL_RUNNER_USERNAME: ${{ secrets.PORTAL_RUNNER_USERNAME }}
|
||||
PORTAL_RUNNER_PASSWORD: ${{ secrets.PORTAL_RUNNER_PASSWORD }}
|
||||
PORTAL_RUNNER_SUBSCRIPTION: 69e02f2d-f059-4409-9eac-97e8a276ae2c
|
||||
PORTAL_RUNNER_RESOURCE_GROUP: runners
|
||||
PORTAL_RUNNER_DATABASE_ACCOUNT: portal-sql-runner
|
||||
- uses: actions/upload-artifact@v2
|
||||
if: failure()
|
||||
with:
|
||||
name: screenshots
|
||||
path: failure.png
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -9,9 +9,6 @@ pkg/DataExplorer/*
|
||||
test/out/*
|
||||
workers/**/*.js
|
||||
*.trx
|
||||
cypress/videos
|
||||
cypress/screenshots
|
||||
cypress/fixtures
|
||||
notebookapp/*
|
||||
Contracts/*
|
||||
.DS_Store
|
||||
|
||||
BIN
.vs/slnx.sqlite
Normal file
BIN
.vs/slnx.sqlite
Normal file
Binary file not shown.
60
README.md
60
README.md
@@ -1,4 +1,4 @@
|
||||
# CosmosDB Explorer
|
||||
# Cosmos DB Explorer
|
||||
|
||||
UI for Azure Cosmos DB. Powers the [Azure Portal](https://portal.azure.com/), https://cosmos.azure.com/, and the [Cosmos DB Emulator](https://docs.microsoft.com/en-us/azure/cosmos-db/local-emulator)
|
||||
|
||||
@@ -13,29 +13,18 @@ UI for Azure Cosmos DB. Powers the [Azure Portal](https://portal.azure.com/), ht
|
||||
|
||||
### Watch mode
|
||||
|
||||
Run `npm run watch` to start the development server and automatically rebuild on changes
|
||||
Run `npm start` to start the development server and automatically rebuild on changes
|
||||
|
||||
### Specifying Development Platform
|
||||
### Hosted Development (https://cosmos.azure.com)
|
||||
|
||||
Setting the environment variable `PLATFORM` during the build process will force the explorer to load the specified platform. By default in development it will run in `Hosted` mode. Valid options:
|
||||
|
||||
- Hosted
|
||||
- Emulator
|
||||
- Portal
|
||||
|
||||
`PLATFORM=Emulator npm run watch`
|
||||
|
||||
### Hosted Development
|
||||
|
||||
The default webpack dev server configuration will proxy requests to the production portal backend: `https://main.documentdb.ext.azure.com`. This will allow you to use production connection strings on your local machine.
|
||||
|
||||
To run pure hosted mode, in `webpack.config.js` change index HtmlWebpackPlugin to use hostedExplorer.html and change entry for index to use HostedExplorer.ts.
|
||||
- Visit: `https://localhost:1234/hostedExplorer.html`
|
||||
- Local sign in via AAD will NOT work. Connection string only in dev mode. Use the Portal if you need AAD auth.
|
||||
- The default webpack dev server configuration will proxy requests to the production portal backend: `https://main.documentdb.ext.azure.com`. This will allow you to use production connection strings on your local machine.
|
||||
|
||||
### Emulator Development
|
||||
|
||||
In a window environment, running `npm run build` will automatically copy the built files from `/dist` over to the default emulator install paths. In a non-windows enironment you can specify an alternate endpoint using `EMULATOR_ENDPOINT` and webpack dev server will proxy requests for you.
|
||||
|
||||
`PLATFORM=Emulator EMULATOR_ENDPOINT=https://my-vm.azure.com:8081 npm run watch`
|
||||
- Start the Cosmos Emulator
|
||||
- Visit: https://localhost:1234/index.html
|
||||
|
||||
#### Setting up a Remote Emulator
|
||||
|
||||
@@ -55,16 +44,8 @@ The Cosmos emulator currently only runs in Windows environments. You can still d
|
||||
|
||||
### Portal Development
|
||||
|
||||
The Cosmos Portal that consumes this repo is not currently open source. If you have access to this project, `npm run build` will copy the built files over to the portal where they will be loaded by the portal development environment
|
||||
|
||||
You can however load a local running instance of data explorer in the production portal.
|
||||
|
||||
1. Turn off browser SSL validation for localhost: chrome://flags/#allow-insecure-localhost OR Install valid SSL certs for localhost (on IE, follow these [instructions](https://www.technipages.com/ie-bypass-problem-with-this-websites-security-certificate) to install the localhost certificate in the right place)
|
||||
2. Whitelist `https://localhost:1234` domain for CORS in the Azure Cosmos DB portal
|
||||
3. Start the project in portal mode: `PLATFORM=Portal npm run watch`
|
||||
4. Load the portal using the following link: https://ms.portal.azure.com/?dataExplorerSource=https%3A%2F%2Flocalhost%3A1234%2Fexplorer.html
|
||||
|
||||
Live reload will occur, but data explorer will not properly integrate again with the parent iframe. You will have to manually reload the page.
|
||||
- Visit: https://ms.portal.azure.com/?dataExplorerSource=https%3A%2F%2Flocalhost%3A1234%2Fexplorer.html
|
||||
- You may have to manually visit https://localhost:1234/explorer.html first and click through any SSL certificate warnings
|
||||
|
||||
### Testing
|
||||
|
||||
@@ -76,24 +57,17 @@ Unit tests are located adjacent to the code under test and run with [Jest](https
|
||||
|
||||
#### End to End CI Tests
|
||||
|
||||
[Cypress](https://www.cypress.io/) is used for end to end tests and are contained in `cypress/`. Currently, it operates as sub project with its own typescript config and dependencies. It also only operates against the emulator. To run cypress tests:
|
||||
Jest and Puppeteer are used for end to end browser based tests and are contained in `test/`. To run these tests locally:
|
||||
|
||||
1. Ensure the emulator is running
|
||||
2. Start cosmos explorer in emulator mode: `PLATFORM=Emulator npm run watch`
|
||||
3. Move into `cypress/` folder: `cd cypress`
|
||||
4. Install dependencies: `npm install`
|
||||
5. Run cypress headless(`npm run test`) or in interactive mode(`npm run test:debug`)
|
||||
|
||||
#### End to End Production Runners
|
||||
|
||||
Jest and Puppeteer are used for end to end production runners and are contained in `test/`. To run these tests locally:
|
||||
|
||||
1. Copy .env.example to .env and fill in all variables
|
||||
2. Run `npm run test:e2e`
|
||||
1. Copy .env.example to .env
|
||||
2. Update the values in .env including your local data explorer endpoint (ask a teammate/codeowner for help with .env values)
|
||||
3. Make sure all packages are installed `npm install`
|
||||
4. Run the server `npm run start` and wait for it to start
|
||||
5. Run `npm run test:e2e`
|
||||
|
||||
### Releasing
|
||||
|
||||
We generally adhear to the release strategy [documented by the Azure SDK Guidelines](https://azure.github.io/azure-sdk/policies_repobranching.html#release-branches). Most releases should happen from the master branch. If master contains commits that cannot be released, you may create a release from a `release/` or `hotfix/` branch. See linked documentation for more details.
|
||||
We generally adhere to the release strategy [documented by the Azure SDK Guidelines](https://azure.github.io/azure-sdk/policies_repobranching.html#release-branches). Most releases should happen from the master branch. If master contains commits that cannot be released, you may create a release from a `release/` or `hotfix/` branch. See linked documentation for more details.
|
||||
|
||||
# Contributing
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@ trigger:
|
||||
- master
|
||||
|
||||
pool:
|
||||
vmImage: 'ubuntu-latest'
|
||||
vmImage: "ubuntu-latest"
|
||||
|
||||
steps:
|
||||
- task: ComponentGovernanceComponentDetection@0
|
||||
- task: ComponentGovernanceComponentDetection@0
|
||||
|
||||
4
cypress/.gitignore
vendored
4
cypress/.gitignore
vendored
@@ -1,4 +0,0 @@
|
||||
cypress.env.json
|
||||
cypress/report
|
||||
cypress/screenshots
|
||||
cypress/videos
|
||||
@@ -1,51 +0,0 @@
|
||||
// Cleans up old databases from previous test runs
|
||||
const { CosmosClient } = require("@azure/cosmos");
|
||||
|
||||
// TODO: Add support for other API connection strings
|
||||
const mongoRegex = RegExp("mongodb://.*:(.*)@(.*).mongo.cosmos.azure.com");
|
||||
|
||||
async function cleanup() {
|
||||
const connectionString = process.env.CYPRESS_CONNECTION_STRING;
|
||||
if (!connectionString) {
|
||||
throw new Error("Connection string not provided");
|
||||
}
|
||||
|
||||
let client;
|
||||
switch (true) {
|
||||
case connectionString.includes("mongodb://"): {
|
||||
const [, key, accountName] = connectionString.match(mongoRegex);
|
||||
client = new CosmosClient({
|
||||
key,
|
||||
endpoint: `https://${accountName}.documents.azure.com:443/`
|
||||
});
|
||||
break;
|
||||
}
|
||||
// TODO: Add support for other API connection strings
|
||||
default:
|
||||
client = new CosmosClient(connectionString);
|
||||
break;
|
||||
}
|
||||
|
||||
const response = await client.databases.readAll().fetchAll();
|
||||
return Promise.all(
|
||||
response.resources.map(async db => {
|
||||
const dbTimestamp = new Date(db._ts * 1000);
|
||||
const twentyMinutesAgo = new Date(Date.now() - 1000 * 60 * 20);
|
||||
if (dbTimestamp < twentyMinutesAgo) {
|
||||
await client.database(db.id).delete();
|
||||
console.log(`DELETED: ${db.id} | Timestamp: ${dbTimestamp}`);
|
||||
} else {
|
||||
console.log(`SKIPPED: ${db.id} | Timestamp: ${dbTimestamp}`);
|
||||
}
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
cleanup()
|
||||
.then(() => {
|
||||
process.exit(0);
|
||||
})
|
||||
.catch(error => {
|
||||
console.error(error);
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -1,15 +0,0 @@
|
||||
{
|
||||
"integrationFolder": "./integration",
|
||||
"pluginsFile": false,
|
||||
"fixturesFolder": false,
|
||||
"supportFile": "./support/index.js",
|
||||
"defaultCommandTimeout": 90000,
|
||||
"chromeWebSecurity": false,
|
||||
"reporter": "mochawesome",
|
||||
"reporterOptions": {
|
||||
"reportDir": "cypress/report",
|
||||
"json": true,
|
||||
"overwrite": false,
|
||||
"html": false
|
||||
}
|
||||
}
|
||||
@@ -1,66 +0,0 @@
|
||||
// 1. Click on "New Container" on the command bar.
|
||||
// 2. Pane with the title "Add Container" should appear on the right side of the screen
|
||||
// 3. It includes an input box for the database Id.
|
||||
// 4. It includes a checkbox called "Create now".
|
||||
// 5. When the checkbox is marked, enter new database id.
|
||||
// 3. Create a database WITH "Provision throughput" checked.
|
||||
// 4. Enter minimum throughput value of 400.
|
||||
// 5. Enter container id to the container id text box.
|
||||
// 6. Enter partition key to the partition key text box.
|
||||
// 7. Click "OK" to create a new container.
|
||||
// 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
|
||||
|
||||
const connectionString = require("../../../utilities/connectionString");
|
||||
|
||||
let crypt = require("crypto");
|
||||
|
||||
context("Cassandra API Test - createDatabase", () => {
|
||||
beforeEach(() => {
|
||||
connectionString.loginUsingConnectionString(connectionString.constants.cassandra);
|
||||
});
|
||||
|
||||
it("Create a new table in Cassandra API", () => {
|
||||
const keyspaceId = `KeyspaceId${crypt.randomBytes(8).toString("hex")}`;
|
||||
const tableId = `TableId112`;
|
||||
|
||||
cy.get("iframe").then($element => {
|
||||
const $body = $element.contents().find("body");
|
||||
cy.wrap($body)
|
||||
.find('div[class="commandBarContainer"]')
|
||||
.should("be.visible")
|
||||
.find('button[data-test="New Table"]')
|
||||
.should("be.visible")
|
||||
.click();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[class="contextual-pane-in"]')
|
||||
.should("be.visible")
|
||||
.find('span[id="containerTitle"]');
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[id="keyspace-id"]')
|
||||
.should("be.visible")
|
||||
.type(keyspaceId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[class="textfontclr"]')
|
||||
.type(tableId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="databaseThroughputValue"]')
|
||||
.should("have.value", "400");
|
||||
|
||||
cy.wrap($body)
|
||||
.find('data-test="addCollection-createCollection"')
|
||||
.click();
|
||||
|
||||
cy.wait(10000);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[data-test="resourceTreeId"]')
|
||||
.should("exist")
|
||||
.find('div[class="treeComponent dataResourceTree"]')
|
||||
.should("contain", tableId);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,81 +0,0 @@
|
||||
// 1. Click on "New Graph" on the command bar.
|
||||
// 2. Pane with the title "Add Container" should appear on the right side of the screen
|
||||
// 3. It includes an input box for the database Id.
|
||||
// 4. It includes a checkbox called "Create now".
|
||||
// 5. When the checkbox is marked, enter new database id.
|
||||
// 3. Create a database WITH "Provision throughput" checked.
|
||||
// 4. Enter minimum throughput value of 400.
|
||||
// 5. Enter container id to the container id text box.
|
||||
// 6. Enter partition key to the partition key text box.
|
||||
// 7. Click "OK" to create a new container.
|
||||
// 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
|
||||
|
||||
const connectionString = require("../../../utilities/connectionString");
|
||||
|
||||
let crypt = require("crypto");
|
||||
|
||||
context("Graph API Test", () => {
|
||||
beforeEach(() => {
|
||||
connectionString.loginUsingConnectionString(connectionString.constants.graph);
|
||||
});
|
||||
|
||||
it("Create a new graph in Graph API", () => {
|
||||
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
|
||||
const graphId = `TestGraph${crypt.randomBytes(8).toString("hex")}`;
|
||||
const partitionKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
|
||||
|
||||
cy.get("iframe").then($element => {
|
||||
const $body = $element.contents().find("body");
|
||||
cy.wrap($body)
|
||||
.find('div[class="commandBarContainer"]')
|
||||
.should("be.visible")
|
||||
.find('button[data-test="New Graph"]')
|
||||
.should("be.visible")
|
||||
.click();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[class="contextual-pane-in"]')
|
||||
.should("be.visible")
|
||||
.find('span[id="containerTitle"]');
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-createNewDatabase"]')
|
||||
.check();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-newDatabaseId"]')
|
||||
.should("be.visible")
|
||||
.type(dbId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
|
||||
.check();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="databaseThroughputValue"]')
|
||||
.should("have.value", "400");
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-collectionId"]')
|
||||
.type(graphId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-partitionKeyValue"]')
|
||||
.type(partitionKey);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-createCollection"]')
|
||||
.click();
|
||||
|
||||
cy.wait(10000);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[data-test="resourceTreeId"]')
|
||||
.should("exist")
|
||||
.find('div[class="treeComponent dataResourceTree"]')
|
||||
.should("contain", dbId)
|
||||
.click()
|
||||
.should("contain", graphId);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,80 +0,0 @@
|
||||
// 1. Click on "New Container" on the command bar.
|
||||
// 2. Pane with the title "Add Container" should appear on the right side of the screen
|
||||
// 3. It includes an input box for the database Id.
|
||||
// 4. It includes a checkbox called "Create now".
|
||||
// 5. When the checkbox is marked, enter new database id.
|
||||
// 3. Create a database WITH "Provision throughput" checked.
|
||||
// 4. Enter minimum throughput value of 400.
|
||||
// 5. Enter container id to the container id text box.
|
||||
// 6. Enter partition key to the partition key text box.
|
||||
// 7. Click "OK" to create a new container.
|
||||
// // 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
|
||||
|
||||
const connectionString = require("../../../utilities/connectionString");
|
||||
|
||||
let crypt = require("crypto");
|
||||
|
||||
context("Mongo API Test - createDatabase", () => {
|
||||
beforeEach(() => {
|
||||
connectionString.loginUsingConnectionString();
|
||||
});
|
||||
|
||||
it("Create a new collection in Mongo API", () => {
|
||||
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
|
||||
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
|
||||
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
|
||||
|
||||
cy.get("iframe").then($element => {
|
||||
const $body = $element.contents().find("body");
|
||||
cy.wrap($body)
|
||||
.find('div[class="commandBarContainer"]')
|
||||
.should("be.visible")
|
||||
.find('button[data-test="New Collection"]')
|
||||
.should("be.visible")
|
||||
.click();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[class="contextual-pane-in"]')
|
||||
.should("be.visible")
|
||||
.find('span[id="containerTitle"]');
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-createNewDatabase"]')
|
||||
.check();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-newDatabaseId"]')
|
||||
.type(dbId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
|
||||
.check();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-collectionId"]')
|
||||
.type(collectionId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="databaseThroughputValue"]')
|
||||
.should("have.value", "400");
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-partitionKeyValue"]')
|
||||
.type(sharedKey);
|
||||
|
||||
cy.wrap($body)
|
||||
.find("#submitBtnAddCollection")
|
||||
.click();
|
||||
|
||||
cy.wait(10000);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[data-test="resourceTreeId"]')
|
||||
.should("exist")
|
||||
.find('div[class="treeComponent dataResourceTree"]')
|
||||
.should("contain", dbId)
|
||||
.click()
|
||||
.should("contain", collectionId);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,96 +0,0 @@
|
||||
// 1. Click on "New Container" on the command bar.
|
||||
// 2. Pane with the title "Add Container" should appear on the right side of the screen
|
||||
// 3. It includes an input box for the database Id.
|
||||
// 4. It includes a checkbox called "Create now".
|
||||
// 5. When the checkbox is marked, enter new database id.
|
||||
// 3. Create a database WITH "Provision throughput" checked.
|
||||
// 4. Enter minimum throughput value of 400.
|
||||
// 5. Enter container id to the container id text box.
|
||||
// 6. Enter partition key to the partition key text box.
|
||||
// 7. Click "OK" to create a new container.
|
||||
// 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
|
||||
|
||||
const connectionString = require("../../../utilities/connectionString");
|
||||
|
||||
let crypt = require("crypto");
|
||||
|
||||
context("Mongo API Test", () => {
|
||||
beforeEach(() => {
|
||||
connectionString.loginUsingConnectionString();
|
||||
});
|
||||
|
||||
it.skip("Create a new collection in Mongo API - Autopilot", () => {
|
||||
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
|
||||
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
|
||||
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
|
||||
|
||||
cy.get("iframe").then($element => {
|
||||
const $body = $element.contents().find("body");
|
||||
cy.wrap($body)
|
||||
.find('div[class="commandBarContainer"]')
|
||||
.should("be.visible")
|
||||
.find('button[data-test="New Collection"]')
|
||||
.should("be.visible")
|
||||
.click();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[class="contextual-pane-in"]')
|
||||
.should("be.visible")
|
||||
.find('span[id="containerTitle"]');
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-createNewDatabase"]')
|
||||
.check();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-newDatabaseId"]')
|
||||
.type(dbId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
|
||||
.check();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[class="throughputModeContainer"]')
|
||||
.should("be.visible")
|
||||
.and(input => {
|
||||
expect(input.get(0).textContent, "first item").contains("Autopilot (preview)");
|
||||
expect(input.get(1).textContent, "second item").contains("Manual");
|
||||
});
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[id="newContainer-databaseThroughput-autoPilotRadio"]')
|
||||
.check();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('select[name="autoPilotTiers"]')
|
||||
// .eq(1).should('contain', '4,000 RU/s');
|
||||
// // .select('4,000 RU/s').should('have.value', '1');
|
||||
|
||||
.find('option[value="2"]')
|
||||
.then($element => $element.get(1).setAttribute("selected", "selected"));
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-collectionId"]')
|
||||
.type(collectionId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-partitionKeyValue"]')
|
||||
.type(sharedKey);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-createCollection"]')
|
||||
.click();
|
||||
|
||||
cy.wait(10000);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[data-test="resourceTreeId"]')
|
||||
.should("exist")
|
||||
.find('div[class="treeComponent dataResourceTree"]')
|
||||
.should("contain", dbId)
|
||||
.click()
|
||||
.should("contain", collectionId);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,67 +0,0 @@
|
||||
const connectionString = require("../../../utilities/connectionString");
|
||||
|
||||
let crypt = require("crypto");
|
||||
|
||||
context("Mongo API Test", () => {
|
||||
beforeEach(() => {
|
||||
connectionString.loginUsingConnectionString();
|
||||
});
|
||||
|
||||
it.skip("Create a new collection in existing database in Mongo API", () => {
|
||||
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
|
||||
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
|
||||
|
||||
cy.get("iframe").then($element => {
|
||||
const $body = $element.contents().find("body");
|
||||
|
||||
cy.wrap($body)
|
||||
.find('span[class="nodeLabel"]')
|
||||
.should("be.visible")
|
||||
.then($span => {
|
||||
const dbId1 = $span.text();
|
||||
cy.log("DBBB", dbId1);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[class="commandBarContainer"]')
|
||||
.should("be.visible")
|
||||
.find('button[data-test="New Collection"]')
|
||||
.should("be.visible")
|
||||
.click();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[class="contextual-pane-in"]')
|
||||
.should("be.visible")
|
||||
.find('span[id="containerTitle"]');
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-existingDatabase"]')
|
||||
.check();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-existingDatabase"]')
|
||||
.type(dbId1);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-collectionId"]')
|
||||
.type(collectionId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-partitionKeyValue"]')
|
||||
.type(sharedKey);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-createCollection"]')
|
||||
.click();
|
||||
|
||||
cy.wait(10000);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[data-test="resourceTreeId"]')
|
||||
.should("exist")
|
||||
.find('div[class="treeComponent dataResourceTree"]')
|
||||
.click()
|
||||
.should("contain", collectionId);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,203 +0,0 @@
|
||||
const connectionString = require("../../../utilities/connectionString");
|
||||
|
||||
let crypt = require("crypto");
|
||||
|
||||
context.skip("Mongo API Test", () => {
|
||||
beforeEach(() => {
|
||||
connectionString.loginUsingConnectionString();
|
||||
});
|
||||
|
||||
it("Create a new collection in Mongo API - Provision database throughput", () => {
|
||||
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
|
||||
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
|
||||
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
|
||||
|
||||
cy.get("iframe").then($element => {
|
||||
const $body = $element.contents().find("body");
|
||||
cy.wrap($body)
|
||||
.find('div[class="commandBarContainer"]')
|
||||
.should("be.visible")
|
||||
.find('button[data-test="New Collection"]')
|
||||
.should("be.visible")
|
||||
.click();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[class="contextual-pane-in"]')
|
||||
.should("be.visible")
|
||||
.find('span[id="containerTitle"]');
|
||||
|
||||
cy.wrap($body)
|
||||
.find(".createNewDatabaseOrUseExisting")
|
||||
.should("have.length", 2)
|
||||
.and(input => {
|
||||
expect(input.get(0).textContent, "first item").contains("Create new");
|
||||
expect(input.get(1).textContent, "second item").contains("Use existing");
|
||||
});
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-createNewDatabase"]')
|
||||
.check();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
|
||||
.check();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-newDatabaseId"]')
|
||||
.type(dbId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
|
||||
.check();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="databaseThroughputValue"]')
|
||||
.should("have.value", "400");
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-collectionId"]')
|
||||
.type(collectionId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-partitionKeyValue"]')
|
||||
.type(sharedKey);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-createCollection"]')
|
||||
.click();
|
||||
|
||||
cy.wait(10000);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[data-test="resourceTreeId"]')
|
||||
.should("exist")
|
||||
.find('div[class="treeComponent dataResourceTree"]')
|
||||
.should("contain", dbId)
|
||||
.click()
|
||||
.should("contain", collectionId);
|
||||
});
|
||||
});
|
||||
|
||||
it("Create a new collection - without provision database throughput", () => {
|
||||
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
|
||||
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
|
||||
const collectionIdTitle = `Add Collection`;
|
||||
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
|
||||
|
||||
cy.get("iframe").then($element => {
|
||||
const $body = $element.contents().find("body");
|
||||
cy.wrap($body)
|
||||
.find('div[class="commandBarContainer"]')
|
||||
.should("be.visible")
|
||||
.find('button[data-test="New Collection"]')
|
||||
.should("be.visible")
|
||||
.click();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[class="contextual-pane-in"]')
|
||||
.should("be.visible")
|
||||
.find('span[id="containerTitle"]');
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-createNewDatabase"]')
|
||||
.check();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-newDatabaseId"]')
|
||||
.type(dbId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
|
||||
.uncheck();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-collectionId"]')
|
||||
.type(collectionId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[id="tab2"]')
|
||||
.check({ force: true });
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-partitionKeyValue"]')
|
||||
.type(sharedKey);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="databaseThroughputValue"]')
|
||||
.should("have.value", "400");
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-createCollection"]')
|
||||
.click();
|
||||
|
||||
cy.wait(10000);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[data-test="resourceTreeId"]')
|
||||
.should("exist")
|
||||
.find('div[class="treeComponent dataResourceTree"]')
|
||||
.should("contain", dbId)
|
||||
.click()
|
||||
.should("contain", collectionId);
|
||||
});
|
||||
});
|
||||
|
||||
it("Create a new collection - without provision database throughput Fixed Storage Capacity", () => {
|
||||
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
|
||||
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
|
||||
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
|
||||
|
||||
cy.get("iframe").then($element => {
|
||||
const $body = $element.contents().find("body");
|
||||
cy.wrap($body)
|
||||
.find('div[class="commandBarContainer"]')
|
||||
.should("be.visible")
|
||||
.find('button[data-test="New Collection"]')
|
||||
.should("be.visible")
|
||||
.click();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[class="contextual-pane-in"]')
|
||||
.should("be.visible")
|
||||
.find('span[id="containerTitle"]');
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-createNewDatabase"]')
|
||||
.check();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-newDatabaseId"]')
|
||||
.type(dbId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
|
||||
.uncheck();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-collectionId"]')
|
||||
.type(collectionId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[id="tab1"]')
|
||||
.check({ force: true });
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="databaseThroughputValue"]')
|
||||
.should("have.value", "400");
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-createCollection"]')
|
||||
.click();
|
||||
|
||||
cy.wait(10000);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[data-test="resourceTreeId"]')
|
||||
.should("exist")
|
||||
.find('div[class="treeComponent dataResourceTree"]')
|
||||
.should("contain", dbId)
|
||||
.click()
|
||||
.should("contain", collectionId);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,79 +0,0 @@
|
||||
// 1. Click on "New Container" on the command bar.
|
||||
// 2. Pane with the title "Add Container" should appear on the right side of the screen
|
||||
// 3. It includes an input box for the database Id.
|
||||
// 4. It includes a checkbox called "Create now".
|
||||
// 5. When the checkbox is marked, enter new database id.
|
||||
// 3. Create a database WITH "Provision throughput" checked.
|
||||
// 4. Enter minimum throughput value of 400.
|
||||
// 5. Enter container id to the container id text box.
|
||||
// 6. Enter partition key to the partition key text box.
|
||||
// 7. Click "OK" to create a new container.
|
||||
// 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
|
||||
|
||||
const connectionString = require("../../../utilities/connectionString");
|
||||
|
||||
let crypt = require("crypto");
|
||||
|
||||
context("SQL API Test", () => {
|
||||
beforeEach(() => {
|
||||
connectionString.loginUsingConnectionString();
|
||||
});
|
||||
|
||||
it("Create a new container in SQL API", () => {
|
||||
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
|
||||
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
|
||||
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
|
||||
connectionString.loginUsingConnectionString();
|
||||
|
||||
cy.get("iframe").then($element => {
|
||||
const $body = $element.contents().find("body");
|
||||
cy.wrap($body)
|
||||
.find('div[class="commandBarContainer"]')
|
||||
.should("be.visible")
|
||||
.find('button[data-test="New Container"]')
|
||||
.should("be.visible")
|
||||
.click();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[class="contextual-pane-in"]')
|
||||
.should("be.visible")
|
||||
.find('span[id="containerTitle"]');
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-createNewDatabase"]')
|
||||
.check();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
|
||||
.check();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-newDatabaseId"]')
|
||||
.type(dbId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-collectionId"]')
|
||||
.type(collectionId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="databaseThroughputValue"]')
|
||||
.should("have.value", "400");
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-partitionKeyValue"]')
|
||||
.type(sharedKey);
|
||||
|
||||
cy.wrap($body)
|
||||
.find("#submitBtnAddCollection")
|
||||
.click();
|
||||
|
||||
cy.wait(10000);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[data-test="resourceTreeId"]')
|
||||
.should("exist")
|
||||
.find('div[class="treeComponent dataResourceTree"]')
|
||||
.should("contain", dbId);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,60 +0,0 @@
|
||||
// 1. Click on "New Container" on the command bar.
|
||||
// 2. Pane with the title "Add Container" should appear on the right side of the screen
|
||||
// 3. It includes an input box for the database Id.
|
||||
// 4. It includes a checkbox called "Create now".
|
||||
// 5. When the checkbox is marked, enter new database id.
|
||||
// 3. Create a database WITH "Provision throughput" checked.
|
||||
// 4. Enter minimum throughput value of 400.
|
||||
// 5. Enter container id to the container id text box.
|
||||
// 6. Enter partition key to the partition key text box.
|
||||
// 7. Click "OK" to create a new container.
|
||||
// 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
|
||||
|
||||
const connectionString = require("../../../utilities/connectionString");
|
||||
|
||||
let crypt = require("crypto");
|
||||
|
||||
context("Table API Test", () => {
|
||||
beforeEach(() => {
|
||||
connectionString.loginUsingConnectionString(connectionString.constants.table);
|
||||
});
|
||||
|
||||
it("Create a new table in Table API", () => {
|
||||
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
|
||||
|
||||
cy.get("iframe").then($element => {
|
||||
const $body = $element.contents().find("body");
|
||||
cy.wrap($body)
|
||||
.find('div[class="commandBarContainer"]')
|
||||
.should("be.visible")
|
||||
.find('button[data-test="New Table"]')
|
||||
.should("be.visible")
|
||||
.click();
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[class="contextual-pane-in"]')
|
||||
.should("be.visible")
|
||||
.find('span[id="containerTitle"]');
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-collectionId"]')
|
||||
.type(collectionId);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="databaseThroughputValue"]')
|
||||
.should("have.value", "400");
|
||||
|
||||
cy.wrap($body)
|
||||
.find('input[data-test="addCollection-createCollection"]')
|
||||
.click();
|
||||
|
||||
cy.wait(10000);
|
||||
|
||||
cy.wrap($body)
|
||||
.find('div[data-test="resourceTreeId"]')
|
||||
.should("exist")
|
||||
.find('div[class="treeComponent dataResourceTree"]')
|
||||
.should("contain", collectionId);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,55 +0,0 @@
|
||||
// 1. Click on "New Container" on the command bar.
|
||||
// 2. Pane with the title "Add Container" should appear on the right side of the screen
|
||||
// 3. It includes an input box for the database Id.
|
||||
// 4. It includes a checkbox called "Create now".
|
||||
// 5. When the checkbox is marked, enter new database id.
|
||||
// 3. Create a database WITH "Provision throughput" checked.
|
||||
// 4. Enter minimum throughput value of 400.
|
||||
// 5. Enter container id to the container id text box.
|
||||
// 6. Enter partition key to the partition key text box.
|
||||
// 7. Click "OK" to create a new container.
|
||||
// 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
|
||||
|
||||
let crypt = require("crypto");
|
||||
|
||||
context("Emulator - createDatabase", () => {
|
||||
beforeEach(() => {
|
||||
cy.visit("http://localhost:1234/explorer.html");
|
||||
});
|
||||
|
||||
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
|
||||
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
|
||||
const collectionIdTitle = `Add Collection`;
|
||||
const partitionKey = `PartitionKey${crypt.randomBytes(8).toString("hex")}`;
|
||||
|
||||
it("Create a new collection", () => {
|
||||
cy.contains("New Container").click();
|
||||
|
||||
// cy.contains(collectionIdTitle);
|
||||
|
||||
cy.get(".createNewDatabaseOrUseExisting")
|
||||
.should("have.length", 2)
|
||||
.and(input => {
|
||||
expect(input.get(0).textContent, "first item").contains("Create new");
|
||||
expect(input.get(1).textContent, "second item").contains("Use existing");
|
||||
});
|
||||
|
||||
cy.get('input[data-test="addCollection-createNewDatabase"]').check();
|
||||
|
||||
cy.get('input[data-test="addCollection-newDatabaseId"]').type(dbId);
|
||||
|
||||
cy.get('input[data-test="addCollection-collectionId"]').type(collectionId);
|
||||
|
||||
cy.get('input[data-test="databaseThroughputValue"]').should("have.value", "400");
|
||||
|
||||
cy.get('input[data-test="addCollection-partitionKeyValue"]').type(partitionKey);
|
||||
|
||||
cy.get('input[data-test="addCollection-createCollection"]').click();
|
||||
|
||||
cy.get('div[data-test="resourceTreeId"]').should("exist");
|
||||
|
||||
cy.get('div[data-test="resourceTree-collectionsTree"]').should("contain", dbId);
|
||||
|
||||
cy.get('div[data-test="databaseList"]').should("contain", collectionId);
|
||||
});
|
||||
});
|
||||
@@ -1,65 +0,0 @@
|
||||
// 1. Click on "New Database" on the command bar
|
||||
// 2. a Pane with the title "Add Database" should appear on the right side of the screen
|
||||
// i. It includes an input box for the database Id.
|
||||
// ii. It includes a checkbox called "Provision throughput".
|
||||
// iii. Whe the checkbox is marked, a new input with a throughput control let's you customize RU at the database level
|
||||
// 3. Create a database WITHOUT "Provision throughput" checked.
|
||||
// 4. It should appear in the Data Explorer list.
|
||||
// 5. Repeat steps 1-3 but create a database WITH "Provision throughput" with the default RU value.
|
||||
// 6. It should appear in the Data Explorer list.
|
||||
// 7. If expanded, it should have the list item called "Scale", that once clicked, it should show the "Scale" tab.
|
||||
// 8. Inside that tab, a throughput control will let you change the RU value within the permited range.
|
||||
// 9. If you change the value, it should enable the "Save" button.
|
||||
// 10. Click "Save" and verify that the process completes without error.
|
||||
// 11. Close the tab and reopen it and verify that the input contains the last saved value.%
|
||||
|
||||
const crypto = require("crypto");
|
||||
const client = require("../../../utilities/cosmosClient");
|
||||
const randomString = crypto.randomBytes(2).toString("hex");
|
||||
const databaseId = `TestDB-${randomString}`;
|
||||
const collectionId = `TestColl-${randomString}`;
|
||||
|
||||
context("Emulator - Create database -> container -> item", () => {
|
||||
beforeEach(async () => {
|
||||
const { resources } = await client.databases.readAll().fetchAll();
|
||||
for (const database of resources) {
|
||||
await client.database(database.id).delete();
|
||||
}
|
||||
});
|
||||
|
||||
it("creates a new database", () => {
|
||||
cy.visit("https://0.0.0.0:1234/explorer.html?platform=Emulator");
|
||||
cy.contains("New Container").click();
|
||||
cy.get("[data-test=addCollection-newDatabaseId]").click();
|
||||
cy.get("[data-test=addCollection-newDatabaseId]").type(databaseId);
|
||||
cy.get("[data-test=addCollection-collectionId]").click();
|
||||
cy.get("[data-test=addCollection-collectionId]").type(collectionId);
|
||||
cy.get("[data-test=addCollection-partitionKeyValue]").click();
|
||||
cy.get("[data-test=addCollection-partitionKeyValue]").type("/pk");
|
||||
cy.get('input[name="createCollection"]').click();
|
||||
cy.get(".dataResourceTree").should("contain", databaseId);
|
||||
cy.get(".dataResourceTree")
|
||||
.contains(databaseId)
|
||||
.click();
|
||||
cy.get(".dataResourceTree").should("contain", collectionId);
|
||||
cy.get(".dataResourceTree")
|
||||
.contains(collectionId)
|
||||
.click();
|
||||
cy.get(".dataResourceTree")
|
||||
.contains("Items")
|
||||
.click();
|
||||
cy.get(".dataResourceTree")
|
||||
.contains("Items")
|
||||
.click();
|
||||
cy.wait(1000); // React rendering inside KO causes some weird async rendering that makes this test flaky without waiting
|
||||
cy.get(".commandBarContainer")
|
||||
.contains("New Item")
|
||||
.click();
|
||||
cy.wait(1000); // React rendering inside KO causes some weird async rendering that makes this test flaky without waiting
|
||||
cy.get(".commandBarContainer")
|
||||
.contains("Save")
|
||||
.click();
|
||||
cy.wait(1000); // React rendering inside KO causes some weird async rendering that makes this test flaky without waiting
|
||||
cy.get(".documentsGridHeaderContainer").should("contain", "replace_with_new_document_id");
|
||||
});
|
||||
});
|
||||
@@ -1,46 +0,0 @@
|
||||
// 1. Click last database in the resource tree
|
||||
// 2. Click the last collection within the database
|
||||
// 3. Select the context menu within the collection
|
||||
// 4. Select "Delete Container" option in the dropdown
|
||||
// 5. On Selection, Delete Container pane opens on the right side
|
||||
// 6. Enter the same collection id that is to be deleted and click ok
|
||||
// 7. Now, the resource tree refreshes, the deleted collection should not appear under the database
|
||||
|
||||
let crypt = require("crypto");
|
||||
|
||||
context("Emulator - deleteCollection", () => {
|
||||
beforeEach(() => {
|
||||
cy.visit("http://localhost:1234/explorer.html");
|
||||
});
|
||||
|
||||
it("Delete a collection", () => {
|
||||
cy.get(".databaseId")
|
||||
.last()
|
||||
.click();
|
||||
|
||||
cy.get(".collectionList")
|
||||
.last()
|
||||
.then($id => {
|
||||
const collectionId = $id.text();
|
||||
|
||||
cy.get('span[data-test="collectionEllipsisMenu"]').should("exist");
|
||||
|
||||
cy.get('span[data-test="collectionEllipsisMenu"]')
|
||||
.invoke("show")
|
||||
.last()
|
||||
.click();
|
||||
|
||||
cy.get('div[data-test="collectionContextMenu"]')
|
||||
.contains("Delete Container")
|
||||
.click({ force: true });
|
||||
|
||||
cy.get('input[data-test="confirmCollectionId"]').type(collectionId.trim());
|
||||
|
||||
cy.get('input[data-test="deleteCollection"]').click();
|
||||
|
||||
cy.get('div[data-test="databaseList"]').should("not.contain", collectionId);
|
||||
|
||||
cy.get('div[data-test="databaseMenu"]').should("not.contain", collectionId);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,83 +0,0 @@
|
||||
// 1. Click last database in the resource tree
|
||||
// 2. Select the context menu within the database
|
||||
// 4. Select "Delete Database" option in the dropdown
|
||||
// 5. On Selection, Delete Database pane opens on the right side
|
||||
// 6. Enter the same database id that is to be deleted and click ok
|
||||
// 7. Now, the resource tree refreshes, the deleted database should not appear in the resource tree
|
||||
|
||||
let crypt = require("crypto");
|
||||
|
||||
context("Emulator - deleteDatabase", () => {
|
||||
beforeEach(() => {
|
||||
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
|
||||
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
|
||||
let db_rid = "";
|
||||
const date = new Date().toUTCString();
|
||||
let authToken = "";
|
||||
cy.visit("http://localhost:1234/explorer.html");
|
||||
|
||||
// Creating auth token for collection creation
|
||||
cy.request({
|
||||
method: "GET",
|
||||
url: "https://localhost:8081/_explorer/authorization/post/dbs/",
|
||||
headers: {
|
||||
"x-ms-date": date,
|
||||
authorization: "-"
|
||||
}
|
||||
})
|
||||
.then(response => {
|
||||
authToken = response.body.Token; // Getting auth token for collection creation
|
||||
return new Cypress.Promise((resolve, reject) => {
|
||||
return resolve();
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
cy.request({
|
||||
method: "POST",
|
||||
url: "https://localhost:8081/dbs",
|
||||
headers: {
|
||||
"x-ms-date": date,
|
||||
authorization: authToken,
|
||||
"x-ms-version": "2018-12-31"
|
||||
},
|
||||
body: {
|
||||
id: dbId
|
||||
}
|
||||
}).then(response => {
|
||||
cy.log("Response", response);
|
||||
db_rid = response.body._rid;
|
||||
return new Cypress.Promise((resolve, reject) => {
|
||||
cy.log("Rid", db_rid);
|
||||
return resolve();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it("Delete a database", () => {
|
||||
cy.get('span[data-test="refreshTree"]').click();
|
||||
|
||||
cy.get(".databaseId")
|
||||
.last()
|
||||
.then($id => {
|
||||
const dbId = $id.text();
|
||||
|
||||
cy.get('span[data-test="databaseEllipsisMenu"]').should("exist");
|
||||
|
||||
cy.get('span[data-test="databaseEllipsisMenu"]')
|
||||
.invoke("show")
|
||||
.last()
|
||||
.click();
|
||||
|
||||
cy.get('div[data-test="databaseContextMenu"]')
|
||||
.contains("Delete Database")
|
||||
.click({ force: true });
|
||||
|
||||
cy.get('input[data-test="confirmDatabaseId"]').type(dbId.trim());
|
||||
|
||||
cy.get('input[data-test="deleteDatabase"]').click();
|
||||
|
||||
cy.get('div[data-test="databaseList"]').should("not.contain", dbId);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,35 +0,0 @@
|
||||
# Notebook end-to-end tests
|
||||
This describes how to run the tests locally
|
||||
|
||||
## Stand up a local notebook container instance:
|
||||
Instructions on how to build and run the container [here](https://microsoft.sharepoint.com/teams/DocDB/_layouts/OneNote.aspx?id=%2Fteams%2FDocDB%2FSiteAssets%2FDocDB%20Team%20Notebook&wd=target%28Tools%20_%20SDK%2FPortal%2FDevelopment.one%7CF800BE8E-1E31-48FE-90D7-EF698EF88112%2FHow%20to%20build%20notebook%20service%7C4BAA153B-422C-41E2-B997-F3FCE02CD743%2F%29)
|
||||
|
||||
## Run a local data explorer
|
||||
Instructions are in [`DataExplorer/README.md`](https://msdata.visualstudio.com/CosmosDB/_git/cosmosdb-dataexplorer?path=%2FProduct%2FPortal%2FDataExplorer%2FREADME.md&_a=preview).
|
||||
|
||||
Make sure you can run Data Explorer locally from the web browser.
|
||||
|
||||
## Run cypress tests
|
||||
1. Edit the URL for your DataExplorer in the `.spec.ts` file
|
||||
2. Run the test:
|
||||
```bash
|
||||
cd DataExplorer/cypress
|
||||
npm i
|
||||
npm t -- --spec 'integration/notebook/newNotebook.spec.ts'
|
||||
```
|
||||
|
||||
To run in Debug mode:
|
||||
```
|
||||
npm run test:debug
|
||||
```
|
||||
This opens Cypress UI
|
||||
|
||||
## Troubleshooting
|
||||
* The tests are recorded in the `videos` folder.
|
||||
* Cypress does not support hover: workarounds [here](https://docs.cypress.io/api/commands/hover.html#Workarounds).
|
||||
|
||||
|
||||
## References
|
||||
* [Cypress API](https://docs.cypress.io/api/api/table-of-contents.html)
|
||||
* [Cypress cookbook](https://docs.cypress.io/faq/questions/using-cypress-faq.html#How-do-I-get-an-element%E2%80%99s-text-contents)
|
||||
* [Cypress best practices](https://docs.cypress.io/guides/references/best-practices.html#Selecting-Elements)
|
||||
@@ -1,93 +0,0 @@
|
||||
// THIS ADDS A NEW NOTEBOOK TO YOUR NOTEBOOKS
|
||||
context("New Notebook smoke test", () => {
|
||||
const timeout = 15000; // in ms
|
||||
const explorerUrl =
|
||||
"https://localhost:1234/explorer.html?feature.notebookserverurl=https%3A%2F%2Flocalhost%3A10001%2F12345%2Fnotebook&feature.notebookServerToken=token&feature.enablenotebooks=true";
|
||||
|
||||
/**
|
||||
* Wait for UI to be ready
|
||||
*/
|
||||
const waitForReady = () => {
|
||||
cy.get(".splashScreenContainer", { timeout }).should("be.visible");
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
cy.visit(explorerUrl);
|
||||
waitForReady();
|
||||
});
|
||||
|
||||
it("Create a new notebook and run some code", () => {
|
||||
// Create new notebook
|
||||
cy.contains("New Notebook").click();
|
||||
|
||||
// Check tab name
|
||||
cy.get("li.tabList .tabNavText").should($span => {
|
||||
const text = $span.text();
|
||||
expect(text).to.match(/^Untitled.*\.ipynb$/);
|
||||
});
|
||||
|
||||
// Wait for python3 | idle status
|
||||
cy.get('[data-test="notebookStatusBar"] [data-test="kernelStatus"]', { timeout }).should($p => {
|
||||
const text = $p.text();
|
||||
expect(text).to.match(/^python3.*idle$/);
|
||||
});
|
||||
|
||||
// Click on a cell
|
||||
cy.get(".cell-container")
|
||||
.as("cellContainer")
|
||||
.click();
|
||||
|
||||
// Type in some code
|
||||
cy.get("@cellContainer").type("2+4");
|
||||
|
||||
// Execute
|
||||
cy.get('[data-test="Run"]')
|
||||
.first()
|
||||
.click();
|
||||
|
||||
// Verify results
|
||||
cy.get("@cellContainer").within(() => {
|
||||
cy.get("pre code span").should("contain", "6");
|
||||
});
|
||||
|
||||
// Restart kernel
|
||||
cy.get('[data-test="Run"] button')
|
||||
.eq(-1)
|
||||
.click();
|
||||
cy.get("li")
|
||||
.contains("Restart Kernel")
|
||||
.click();
|
||||
|
||||
// Wait for python3 | restarting status
|
||||
cy.get('[data-test="notebookStatusBar"] [data-test="kernelStatus"]', { timeout }).should($p => {
|
||||
const text = $p.text();
|
||||
expect(text).to.match(/^python3.*restarting$/);
|
||||
});
|
||||
|
||||
// Wait for python3 | idle status
|
||||
cy.get('[data-test="notebookStatusBar"] [data-test="kernelStatus"]', { timeout }).should($p => {
|
||||
const text = $p.text();
|
||||
expect(text).to.match(/^python3.*idle$/);
|
||||
});
|
||||
|
||||
// Click on a cell
|
||||
cy.get(".cell-container")
|
||||
.as("cellContainer")
|
||||
.find(".input")
|
||||
.as("codeInput")
|
||||
.click();
|
||||
|
||||
// Type in some code
|
||||
cy.get("@codeInput").type("{backspace}{backspace}{backspace}4+5");
|
||||
|
||||
// Execute
|
||||
cy.get('[data-test="Run"]')
|
||||
.first()
|
||||
.click();
|
||||
|
||||
// Verify results
|
||||
cy.get("@cellContainer").within(() => {
|
||||
cy.get("pre code span").should("contain", "9");
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,172 +0,0 @@
|
||||
context("Resource tree notebook file manipulation", () => {
|
||||
const timeout = 15000; // in ms
|
||||
const explorerUrl =
|
||||
"https://localhost:1234/explorer.html?feature.notebookserverurl=https%3A%2F%2Flocalhost%3A10001%2F12345%2Fnotebook&feature.notebookServerToken=token&feature.enablenotebooks=true";
|
||||
|
||||
/**
|
||||
* Wait for UI to be ready
|
||||
*/
|
||||
const waitForReady = () => {
|
||||
cy.get(".splashScreenContainer", { timeout }).should("be.visible");
|
||||
};
|
||||
|
||||
const clickContextMenuAndSelectOption = (nodeLabel, option) => {
|
||||
cy.get(`.treeNodeHeader[data-test="${nodeLabel}"]`)
|
||||
.find("button.treeMenuEllipsis")
|
||||
.click();
|
||||
cy.get('[data-test="treeComponentMenuItemContainer"]')
|
||||
.contains(option)
|
||||
.click();
|
||||
};
|
||||
|
||||
const createFolder = folder => {
|
||||
clickContextMenuAndSelectOption("My Notebooks/", "New Directory");
|
||||
|
||||
cy.get("#stringInputPane").within(() => {
|
||||
cy.get('input[name="collectionIdConfirmation"]').type(folder);
|
||||
cy.get("form").submit();
|
||||
});
|
||||
};
|
||||
|
||||
const deleteItem = nodeName => {
|
||||
clickContextMenuAndSelectOption(`${nodeName}`, "Delete");
|
||||
cy.get(".ms-Dialog-main")
|
||||
.contains("Delete")
|
||||
.click();
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
cy.visit(explorerUrl);
|
||||
waitForReady();
|
||||
});
|
||||
|
||||
it("Create and remove a directory", () => {
|
||||
const folder = "e2etest_folder1";
|
||||
createFolder(folder);
|
||||
cy.get(`.treeNodeHeader[data-test="${folder}/"]`).should("exist");
|
||||
deleteItem(`${folder}/`);
|
||||
cy.get(`.treeNodeHeader[data-test="${folder}/"]`).should("not.exist");
|
||||
});
|
||||
|
||||
it("Create and rename a directory", () => {
|
||||
const folder = "e2etest_folder2";
|
||||
const renamedFolder = "e2etest_folder2_renamed";
|
||||
createFolder(folder);
|
||||
|
||||
// Rename
|
||||
clickContextMenuAndSelectOption(`${folder}/`, "Rename");
|
||||
cy.get("#stringInputPane").within(() => {
|
||||
cy.get('input[name="collectionIdConfirmation"]')
|
||||
.clear()
|
||||
.type(renamedFolder);
|
||||
cy.get("form").submit();
|
||||
});
|
||||
cy.get(`.treeNodeHeader[data-test="${renamedFolder}/"]`).should("exist");
|
||||
cy.get(`.treeNodeHeader[data-test="${folder}/"]`).should("not.exist");
|
||||
|
||||
deleteItem(`${renamedFolder}/`);
|
||||
cy.get(`.treeNodeHeader[data-test="${renamedFolder}/"]`).should("not.exist");
|
||||
});
|
||||
|
||||
it("Create a notebook inside a directory", () => {
|
||||
const folder = "e2etest_folder3";
|
||||
const newNotebookName = "Untitled.ipynb";
|
||||
createFolder(folder);
|
||||
clickContextMenuAndSelectOption(`${folder}/`, "New Notebook");
|
||||
|
||||
// Verify tab is open
|
||||
cy.get(".tabList")
|
||||
.contains(newNotebookName)
|
||||
.should("exist");
|
||||
|
||||
// Close tab
|
||||
cy.get(`.tabList[title="notebooks/${folder}/${newNotebookName}"]`)
|
||||
.find(".cancelButton")
|
||||
.click();
|
||||
// When running from command line, closing the tab is too fast
|
||||
cy.get("body").then($body => {
|
||||
if ($body.find(".ms-Dialog-main").length) {
|
||||
// For some reason, this does not work
|
||||
// cy.get(".ms-Dialog-main").contains("Close").click();
|
||||
cy.get(".ms-Dialog-main .ms-Button--primary").click();
|
||||
}
|
||||
});
|
||||
|
||||
// Expand folder node
|
||||
cy.get(`.treeNodeHeader[data-test="${folder}/"]`).click();
|
||||
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${newNotebookName}"]`).should("exist");
|
||||
|
||||
// Delete notebook
|
||||
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${newNotebookName}"]`)
|
||||
.find("button.treeMenuEllipsis")
|
||||
.click();
|
||||
cy.get('[data-test="treeComponentMenuItemContainer"]')
|
||||
.contains("Delete")
|
||||
.click();
|
||||
|
||||
// Confirm
|
||||
cy.get(".ms-Dialog-main")
|
||||
.contains("Delete")
|
||||
.click();
|
||||
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${newNotebookName}"]`).should("not.exist");
|
||||
|
||||
deleteItem(`${folder}/`);
|
||||
});
|
||||
|
||||
it("Create and rename a notebook inside a directory", () => {
|
||||
const folder = "e2etest_folder4";
|
||||
const newNotebookName = "Untitled.ipynb";
|
||||
const renamedNotebookName = "mynotebook.ipynb";
|
||||
createFolder(folder);
|
||||
clickContextMenuAndSelectOption(`${folder}/`, "New Notebook");
|
||||
|
||||
// Close tab
|
||||
cy.get(`.tabList[title="notebooks/${folder}/${newNotebookName}"]`)
|
||||
.find(".cancelButton")
|
||||
.click();
|
||||
cy.get("body").then($body => {
|
||||
if ($body.find(".ms-Dialog-main").length) {
|
||||
// For some reason, this does not work
|
||||
// cy.get(".ms-Dialog-main").contains("Close").click();
|
||||
cy.get(".ms-Dialog-main .ms-Button--primary").click();
|
||||
}
|
||||
});
|
||||
|
||||
// Expand folder node
|
||||
cy.get(`.treeNodeHeader[data-test="${folder}/"]`).click();
|
||||
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${newNotebookName}"]`).should("exist");
|
||||
|
||||
// Rename notebook
|
||||
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${newNotebookName}"]`)
|
||||
.find("button.treeMenuEllipsis")
|
||||
.click();
|
||||
cy.get('[data-test="treeComponentMenuItemContainer"]')
|
||||
.contains("Rename")
|
||||
.click();
|
||||
|
||||
cy.get("#stringInputPane").within(() => {
|
||||
cy.get('input[name="collectionIdConfirmation"]')
|
||||
.clear()
|
||||
.type(renamedNotebookName);
|
||||
cy.get("form").submit();
|
||||
});
|
||||
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${newNotebookName}"]`).should("not.exist");
|
||||
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${renamedNotebookName}"]`).should("exist");
|
||||
|
||||
// Delete notebook
|
||||
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${renamedNotebookName}"]`)
|
||||
.find("button.treeMenuEllipsis")
|
||||
.click();
|
||||
cy.get('[data-test="treeComponentMenuItemContainer"]')
|
||||
.contains("Delete")
|
||||
.click();
|
||||
|
||||
// Confirm
|
||||
cy.get(".ms-Dialog-main")
|
||||
.contains("Delete")
|
||||
.click();
|
||||
// Give it time to settle
|
||||
cy.wait(1000);
|
||||
deleteItem(`${folder}/`);
|
||||
});
|
||||
});
|
||||
3066
cypress/package-lock.json
generated
3066
cypress/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,25 +0,0 @@
|
||||
{
|
||||
"name": "cosmos-explorer-cypress",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "cypress run",
|
||||
"wait-for-server": "wait-on -t 240000 -i 5000 -v https-get://0.0.0.0:1234/",
|
||||
"test:sql": "cypress run --browser chrome --spec \"./integration/dataexplorer/SQL/*\"",
|
||||
"test:ci": "wait-on -t 240000 -i 5000 -v https-get://0.0.0.0:1234/ https-get://0.0.0.0:8081/_explorer/index.html && cypress run --browser edge --headless",
|
||||
"test:debug": "cypress open"
|
||||
},
|
||||
"devDependencies": {
|
||||
"cypress": "^4.8.0",
|
||||
"mocha": "^7.0.1",
|
||||
"mochawesome": "^4.1.0",
|
||||
"mochawesome-merge": "^4.0.1",
|
||||
"mochawesome-report-generator": "^4.1.0",
|
||||
"typescript": "3.4.3",
|
||||
"wait-on": "^4.0.2"
|
||||
},
|
||||
"dependencies": {
|
||||
"@microsoft/applicationinsights-web": "^2.5.2"
|
||||
}
|
||||
}
|
||||
@@ -1,23 +0,0 @@
|
||||
let appInsightsLib = require("@microsoft/applicationinsights-web");
|
||||
|
||||
const appInsights = new appInsightsLib.ApplicationInsights({
|
||||
config: {
|
||||
instrumentationKey: "fe61c39f-7d32-4488-a191-b13621965315"
|
||||
/* ...Other Configuration Options... */
|
||||
}
|
||||
});
|
||||
|
||||
appInsights.loadAppInsights();
|
||||
|
||||
Cypress.on("fail", (error, runnable) => {
|
||||
// App Insights will record the fail tests for Create Collection
|
||||
let message = JSON.stringify(runnable.title);
|
||||
appInsights.trackTrace({
|
||||
message: `${message}`,
|
||||
properties: {
|
||||
passed: false,
|
||||
error: error
|
||||
}
|
||||
});
|
||||
throw error; // throw error to have test still fail
|
||||
});
|
||||
@@ -1,11 +0,0 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"strict": true,
|
||||
"noEmit": true,
|
||||
"module": "commonjs",
|
||||
"target": "es5",
|
||||
"lib": ["es5", "dom", "es6"],
|
||||
"types": ["cypress", "node"]
|
||||
},
|
||||
"include": ["**/*.ts", "**/*.spec.ts"]
|
||||
}
|
||||
@@ -1,41 +0,0 @@
|
||||
module.exports = {
|
||||
loginUsingConnectionString: function() {
|
||||
const prodUrl = Cypress.env("TEST_ENDPOINT") || "https://localhost:1234/hostedExplorer.html";
|
||||
const timeout = 15000;
|
||||
|
||||
cy.visit(prodUrl);
|
||||
cy.get('iframe[id="explorerMenu"]').should("be.visible");
|
||||
|
||||
cy.get("iframe").then($element => {
|
||||
const $body = $element.contents().find("body");
|
||||
|
||||
cy.wrap($body)
|
||||
.find("#connectExplorer")
|
||||
.should("exist")
|
||||
.find("div[class='connectExplorer']")
|
||||
.should("exist")
|
||||
.find("p[class='welcomeText']")
|
||||
.should("exist");
|
||||
|
||||
cy.wrap($body.find("div > p.switchConnectTypeText"))
|
||||
.should("exist")
|
||||
.last()
|
||||
.click({ force: true });
|
||||
|
||||
const secret = Cypress.env("CONNECTION_STRING");
|
||||
|
||||
cy.wrap($body)
|
||||
.find("input[class='inputToken']")
|
||||
.should("exist")
|
||||
.type(secret, {
|
||||
force: true
|
||||
});
|
||||
|
||||
cy.wrap($body.find("input[value='Connect']"), { timeout })
|
||||
.first()
|
||||
.click({ force: true });
|
||||
|
||||
cy.wait(15000);
|
||||
});
|
||||
}
|
||||
};
|
||||
@@ -1,6 +0,0 @@
|
||||
const { CosmosClient } = require("@azure/cosmos");
|
||||
|
||||
module.exports = new CosmosClient({
|
||||
endpoint: "https://0.0.0.0:8081",
|
||||
key: "C2y6yDjf5/R+ob0N8A7Cgv30VRDJIWEHLM+4QDU5DE2nQ9nDuVTqobD4b8mGGyPMbIZnqyMsEcaGQy67XIw/Jw=="
|
||||
});
|
||||
177
externals/jquery.contextMenu.css
vendored
177
externals/jquery.contextMenu.css
vendored
@@ -1,177 +0,0 @@
|
||||
/*!
|
||||
* jQuery contextMenu - Plugin for simple contextMenu handling
|
||||
*
|
||||
* Version: 1.6.6
|
||||
*
|
||||
* Authors: Rodney Rehm, Addy Osmani (patches for FF)
|
||||
* Web: http://medialize.github.com/jQuery-contextMenu/
|
||||
*
|
||||
* Licensed under
|
||||
* MIT License http://www.opensource.org/licenses/mit-license
|
||||
* GPL v3 http://opensource.org/licenses/GPL-3.0
|
||||
*
|
||||
*/
|
||||
|
||||
.context-menu-list {
|
||||
z-index: 1001;
|
||||
position: fixed;
|
||||
background: white;
|
||||
border: solid 1px gainsboro;
|
||||
box-shadow: 4px 4px 4px -2px #888888;
|
||||
padding: 8px 0px 8px 0px;
|
||||
line-height: 25px;
|
||||
width: 254px;
|
||||
list-style: none;
|
||||
margin-left: -10px;
|
||||
outline: 0px #fff;
|
||||
}
|
||||
|
||||
.context-menu-item {
|
||||
padding: 2px 2px 2px 31px;
|
||||
background-color: #fff;
|
||||
position: relative;
|
||||
-webkit-user-select: none;
|
||||
-moz-user-select: -moz-none;
|
||||
-ms-user-select: none;
|
||||
user-select: none;
|
||||
}
|
||||
|
||||
.context-menu-separator {
|
||||
padding-bottom: 0;
|
||||
border-bottom: 1px solid #DDD;
|
||||
}
|
||||
|
||||
.context-menu-item>label>input,
|
||||
.context-menu-item>label>textarea {
|
||||
-webkit-user-select: text;
|
||||
-moz-user-select: text;
|
||||
-ms-user-select: text;
|
||||
user-select: text;
|
||||
margin-left: -10px;
|
||||
}
|
||||
|
||||
.context-menu-item:hover {
|
||||
cursor: pointer;
|
||||
background-color: #eeeeee;
|
||||
}
|
||||
|
||||
.context-menu-item.disabled {
|
||||
color: #666;
|
||||
}
|
||||
|
||||
.context-menu-input.hover,
|
||||
.context-menu-item.disabled.hover {
|
||||
cursor: default;
|
||||
background-color: #EEE;
|
||||
}
|
||||
|
||||
.context-menu-submenu:after {
|
||||
content: ">";
|
||||
color: #666;
|
||||
position: absolute;
|
||||
top: 0;
|
||||
right: 3px;
|
||||
z-index: 1;
|
||||
}
|
||||
|
||||
|
||||
/* icons
|
||||
#protip:
|
||||
In case you want to use sprites for icons (which I would suggest you do) have a look at
|
||||
http://css-tricks.com/13224-pseudo-spriting/ to get an idea of how to implement
|
||||
.context-menu-item.icon:before {}
|
||||
*/
|
||||
|
||||
.context-menu-item.icon {
|
||||
min-height: 18px;
|
||||
background-repeat: no-repeat;
|
||||
background-position: 10px 7px;
|
||||
}
|
||||
|
||||
.context-menu-item.icon:hover {
|
||||
min-height: 18px;
|
||||
background-repeat: no-repeat;
|
||||
background-position: 10px 7px;
|
||||
}
|
||||
|
||||
|
||||
/*.context-menu-item.icon-edit {
|
||||
background-image: url(images/page_white_edit.png);
|
||||
}
|
||||
|
||||
.context-menu-item.icon-cut {
|
||||
background-image: url(images/cut.png);
|
||||
}
|
||||
|
||||
.context-menu-item.icon-copy {
|
||||
background-image: url(images/page_white_copy.png);
|
||||
}
|
||||
|
||||
.context-menu-item.icon-paste {
|
||||
background-image: url(images/page_white_paste.png);
|
||||
}
|
||||
|
||||
.context-menu-item.icon-delete {
|
||||
background-image: url(images/page_white_delete.png);
|
||||
}
|
||||
|
||||
.context-menu-item.icon-add {
|
||||
background-image: url(images/page_white_add.png);
|
||||
}
|
||||
|
||||
.context-menu-item.icon-quit {
|
||||
background-image: url(images/door.png);
|
||||
}*/
|
||||
|
||||
|
||||
/* vertically align inside labels */
|
||||
|
||||
.context-menu-input>label>* {
|
||||
vertical-align: top;
|
||||
}
|
||||
|
||||
|
||||
/* position checkboxes and radios as icons */
|
||||
|
||||
.context-menu-input>label>input[type="checkbox"],
|
||||
.context-menu-input>label>input[type="radio"] {
|
||||
margin-left: -17px;
|
||||
}
|
||||
|
||||
.context-menu-input>label>span {
|
||||
margin-left: 5px;
|
||||
}
|
||||
|
||||
.context-menu-input>label,
|
||||
.context-menu-input>label>input[type="text"],
|
||||
.context-menu-input>label>textarea,
|
||||
.context-menu-input>label>select {
|
||||
display: block;
|
||||
width: 100%;
|
||||
-webkit-box-sizing: border-box;
|
||||
-moz-box-sizing: border-box;
|
||||
-ms-box-sizing: border-box;
|
||||
-o-box-sizing: border-box;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
.context-menu-input>label>textarea {
|
||||
height: 100px;
|
||||
}
|
||||
|
||||
.context-menu-item>.context-menu-list {
|
||||
display: none;
|
||||
/* re-positioned by js */
|
||||
right: -5px;
|
||||
top: 5px;
|
||||
}
|
||||
|
||||
|
||||
/*.context-menu-item.hover>.context-menu-list {
|
||||
display: block;
|
||||
padding-left: 5px;
|
||||
}*/
|
||||
|
||||
.context-menu-accesskey {
|
||||
text-decoration: underline;
|
||||
}
|
||||
1686
externals/jquery.contextMenu.js
vendored
1686
externals/jquery.contextMenu.js
vendored
File diff suppressed because it is too large
Load Diff
@@ -3,7 +3,7 @@ const isCI = require("is-ci");
|
||||
module.exports = {
|
||||
launch: {
|
||||
headless: isCI,
|
||||
slowMo: 30,
|
||||
slowMo: 55,
|
||||
defaultViewport: null,
|
||||
ignoreHTTPSErrors: true,
|
||||
args: ["--disable-web-security"]
|
||||
|
||||
@@ -2082,7 +2082,8 @@ a:link {
|
||||
.resourceTreeAndTabs {
|
||||
display: flex;
|
||||
flex: 1 1 auto;
|
||||
overflow: auto;
|
||||
overflow-x: auto;
|
||||
overflow-y: hidden;
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
@@ -3022,3 +3023,7 @@ settings-pane {
|
||||
.infoBoxContent a {
|
||||
color: @AccentMediumHigh
|
||||
}
|
||||
|
||||
.collapsibleSection :hover{
|
||||
cursor: pointer;
|
||||
}
|
||||
3792
package-lock.json
generated
3792
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
18
package.json
18
package.json
@@ -4,11 +4,13 @@
|
||||
"description": "Cosmos Explorer",
|
||||
"main": "index.js",
|
||||
"dependencies": {
|
||||
"@azure/arm-cosmosdb": "9.1.0",
|
||||
"@azure/cosmos": "3.9.0",
|
||||
"@azure/cosmos-language-service": "0.0.4",
|
||||
"@azure/identity": "1.1.0",
|
||||
"@azure/cosmos-language-service": "0.0.5",
|
||||
"@jupyterlab/services": "6.0.0-rc.2",
|
||||
"@jupyterlab/terminal": "3.0.0-rc.2",
|
||||
"@microsoft/applicationinsights-web": "2.5.8",
|
||||
"@microsoft/applicationinsights-web": "2.5.9",
|
||||
"@nteract/commutable": "7.3.2",
|
||||
"@nteract/connected-components": "6.8.2",
|
||||
"@nteract/core": "15.1.0",
|
||||
@@ -21,7 +23,7 @@
|
||||
"@nteract/jupyter-widgets": "2.0.0",
|
||||
"@nteract/logos": "1.0.0",
|
||||
"@nteract/markdown": "4.4.0",
|
||||
"@nteract/monaco-editor": "3.2.0",
|
||||
"@nteract/monaco-editor": "3.2.2",
|
||||
"@nteract/octicons": "2.0.0",
|
||||
"@nteract/outputs": "3.0.9",
|
||||
"@nteract/presentational-components": "3.0.7",
|
||||
@@ -66,7 +68,7 @@
|
||||
"jquery-ui-dist": "1.12.1",
|
||||
"knockout": "3.5.1",
|
||||
"mkdirp": "1.0.4",
|
||||
"monaco-editor": "0.15.6",
|
||||
"monaco-editor": "0.18.1",
|
||||
"object.entries": "1.1.0",
|
||||
"office-ui-fabric-react": "7.134.1",
|
||||
"p-retry": "4.2.0",
|
||||
@@ -88,8 +90,8 @@
|
||||
"styled-components": "4.3.2",
|
||||
"text-encoding": "0.7.0",
|
||||
"underscore": "1.9.1",
|
||||
"utility-types": "3.10.0",
|
||||
"url-polyfill": "1.1.7",
|
||||
"utility-types": "3.10.0",
|
||||
"webcrypto-liner": "1.1.4",
|
||||
"webfontloader": "1.6.28",
|
||||
"whatwg-fetch": "3.0.0"
|
||||
@@ -115,7 +117,7 @@
|
||||
"@types/prop-types": "15.5.8",
|
||||
"@types/puppeteer": "3.0.1",
|
||||
"@types/q": "1.5.1",
|
||||
"@types/react": "16.9.49",
|
||||
"@types/react": "16.9.56",
|
||||
"@types/react-dom": "16.0.7",
|
||||
"@types/react-notification-system": "0.2.39",
|
||||
"@types/react-redux": "7.1.7",
|
||||
@@ -194,8 +196,8 @@
|
||||
"compile": "tsc",
|
||||
"compile:contracts": "tsc -p ./tsconfig.contracts.json",
|
||||
"compile:strict": "tsc -p ./tsconfig.strict.json",
|
||||
"format": "prettier --write \"{src,cypress,test}/**/*.{ts,tsx,html}\" \"*.{js,html}\"",
|
||||
"format:check": "prettier --check \"{src,cypress,test}/**/*.{ts,tsx,html}\" \"*.{js,html}\"",
|
||||
"format": "prettier --write \"{src,test}/**/*.{ts,tsx,html}\" \"*.{js,html}\"",
|
||||
"format:check": "prettier --check \"{src,test}/**/*.{ts,tsx,html}\" \"*.{js,html}\"",
|
||||
"lint": "tslint --project tsconfig.json && eslint \"**/*.{ts,tsx}\"",
|
||||
"build:contracts": "npm run compile:contracts",
|
||||
"strictEligibleFiles": "node ./strict-migration-tools/index.js",
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
"offerThroughput": 400,
|
||||
"databaseLevelThroughput": false,
|
||||
"collectionId": "Persons",
|
||||
"rupmEnabled": false,
|
||||
"partitionKey": { "kind": "Hash", "paths": ["/name"] },
|
||||
"data": [
|
||||
"g.addV('person').property(id, '1').property('name', 'Eva').property('age', 44)",
|
||||
@@ -13,4 +12,4 @@
|
||||
"g.V('1').addE('knows').to(g.V('2')).outV().addE('knows').to(g.V('3'))",
|
||||
"g.V('3').addE('knows').to(g.V('4'))"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import { AutopilotTier } from "../Contracts/DataModels";
|
||||
import { HashMap } from "./HashMap";
|
||||
|
||||
export class AuthorizationEndpoints {
|
||||
@@ -109,15 +108,12 @@ export class CapabilityNames {
|
||||
export class Features {
|
||||
public static readonly cosmosdb = "cosmosdb";
|
||||
public static readonly enableChangeFeedPolicy = "enablechangefeedpolicy";
|
||||
public static readonly enableRupm = "enablerupm";
|
||||
public static readonly executeSproc = "dataexplorerexecutesproc";
|
||||
public static readonly hostedDataExplorer = "hosteddataexplorerenabled";
|
||||
public static readonly enableTtl = "enablettl";
|
||||
public static readonly enableNotebooks = "enablenotebooks";
|
||||
public static readonly enableGalleryPublish = "enablegallerypublish";
|
||||
public static readonly enableCodeOfConduct = "enablecodeofconduct";
|
||||
public static readonly enableLinkInjection = "enablelinkinjection";
|
||||
public static readonly enableSettingsV2 = "enablesettingsv2";
|
||||
public static readonly enableSpark = "enablespark";
|
||||
public static readonly livyEndpoint = "livyendpoint";
|
||||
public static readonly notebookServerUrl = "notebookserverurl";
|
||||
@@ -125,10 +121,17 @@ export class Features {
|
||||
public static readonly notebookBasePath = "notebookbasepath";
|
||||
public static readonly canExceedMaximumValue = "canexceedmaximumvalue";
|
||||
public static readonly enableFixedCollectionWithSharedThroughput = "enablefixedcollectionwithsharedthroughput";
|
||||
public static readonly enableAutoPilotV2 = "enableautopilotv2";
|
||||
public static readonly ttl90Days = "ttl90days";
|
||||
public static readonly enableRightPanelV2 = "enablerightpanelv2";
|
||||
public static readonly enableSchema = "enableschema";
|
||||
public static readonly enableSDKoperations = "enablesdkoperations";
|
||||
public static readonly showMinRUSurvey = "showminrusurvey";
|
||||
}
|
||||
|
||||
// flight names returned from the portal are always lowercase
|
||||
export class Flights {
|
||||
public static readonly SettingsV2 = "settingsv2";
|
||||
public static readonly MongoIndexEditor = "mongoindexeditor";
|
||||
}
|
||||
|
||||
export class AfecFeatures {
|
||||
@@ -176,11 +179,6 @@ export class CassandraBackend {
|
||||
public static readonly guestSchemaApi: string = "api/guest/cassandra/schema";
|
||||
}
|
||||
|
||||
export class RUPMStates {
|
||||
public static on: string = "on";
|
||||
public static off: string = "off";
|
||||
}
|
||||
|
||||
export class Queries {
|
||||
public static CustomPageOption: string = "custom";
|
||||
public static UnlimitedPageOption: string = "unlimited";
|
||||
@@ -257,7 +255,6 @@ export class HttpHeaders {
|
||||
public static usePolygonsSmallerThanAHemisphere = "x-ms-documentdb-usepolygonssmallerthanahemisphere";
|
||||
public static autoPilotThroughput = "autoscaleSettings";
|
||||
public static autoPilotThroughputSDK = "x-ms-cosmos-offer-autopilot-settings";
|
||||
public static autoPilotTier = "x-ms-cosmos-offer-autopilot-tier";
|
||||
public static partitionKey: string = "x-ms-documentdb-partitionkey";
|
||||
public static migrateOfferToManualThroughput: string = "x-ms-cosmos-migrate-offer-to-manual-throughput";
|
||||
public static migrateOfferToAutopilot: string = "x-ms-cosmos-migrate-offer-to-autopilot";
|
||||
@@ -402,54 +399,6 @@ export enum ConflictOperationType {
|
||||
Delete = "delete"
|
||||
}
|
||||
|
||||
export class AutoPilot {
|
||||
public static tier1Text: string = "4,000 RU/s";
|
||||
public static tier2Text: string = "20,000 RU/s";
|
||||
public static tier3Text: string = "100,000 RU/s";
|
||||
public static tier4Text: string = "500,000 RU/s";
|
||||
|
||||
public static tierText = {
|
||||
[AutopilotTier.Tier1]: "Tier 1",
|
||||
[AutopilotTier.Tier2]: "Tier 2",
|
||||
[AutopilotTier.Tier3]: "Tier 3",
|
||||
[AutopilotTier.Tier4]: "Tier 4"
|
||||
};
|
||||
|
||||
public static tierMaxRus = {
|
||||
[AutopilotTier.Tier1]: 2000,
|
||||
[AutopilotTier.Tier2]: 20000,
|
||||
[AutopilotTier.Tier3]: 100000,
|
||||
[AutopilotTier.Tier4]: 500000
|
||||
};
|
||||
|
||||
public static tierMinRus = {
|
||||
[AutopilotTier.Tier1]: 0,
|
||||
[AutopilotTier.Tier2]: 0,
|
||||
[AutopilotTier.Tier3]: 0,
|
||||
[AutopilotTier.Tier4]: 0
|
||||
};
|
||||
|
||||
public static tierStorageInGB = {
|
||||
[AutopilotTier.Tier1]: 50,
|
||||
[AutopilotTier.Tier2]: 200,
|
||||
[AutopilotTier.Tier3]: 1000,
|
||||
[AutopilotTier.Tier4]: 5000
|
||||
};
|
||||
}
|
||||
|
||||
export class DataExplorerVersions {
|
||||
public static readonly v_1_0_0: string = "1.0.0";
|
||||
public static readonly v_1_0_1: string = "1.0.1";
|
||||
}
|
||||
|
||||
export class DataExplorerFeatures {
|
||||
public static offerCache: string = "OfferCache";
|
||||
}
|
||||
|
||||
export const DataExplorerFeaturesVersions: any = {
|
||||
OfferCache: DataExplorerVersions.v_1_0_1
|
||||
};
|
||||
|
||||
export const EmulatorMasterKey =
|
||||
//[SuppressMessage("Microsoft.Security", "CS002:SecretInNextLine", Justification="Well known public masterKey for emulator")]
|
||||
"C2y6yDjf5/R+ob0N8A7Cgv30VRDJIWEHLM+4QDU5DE2nQ9nDuVTqobD4b8mGGyPMbIZnqyMsEcaGQy67XIw/Jw==";
|
||||
|
||||
@@ -112,7 +112,6 @@ describe("endpoint", () => {
|
||||
|
||||
describe("requestPlugin", () => {
|
||||
beforeEach(() => {
|
||||
delete window.dataExplorerPlatform;
|
||||
resetConfigContext();
|
||||
});
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import * as Cosmos from "@azure/cosmos";
|
||||
import { RequestInfo, setAuthorizationTokenHeaderUsingMasterKey } from "@azure/cosmos";
|
||||
import { configContext, Platform } from "../ConfigContext";
|
||||
import { getErrorMessage } from "./ErrorHandlingUtils";
|
||||
import { logConsoleError } from "../Utils/NotificationConsoleUtils";
|
||||
import { EmulatorMasterKey, HttpHeaders } from "./Constants";
|
||||
import { userContext } from "../UserContext";
|
||||
@@ -69,7 +70,7 @@ export async function getTokenFromAuthService(verb: string, resourceType: string
|
||||
const result = JSON.parse(await response.json());
|
||||
return result;
|
||||
} catch (error) {
|
||||
logConsoleError(`Failed to get authorization headers for ${resourceType}: ${JSON.stringify(error)}`);
|
||||
logConsoleError(`Failed to get authorization headers for ${resourceType}: ${getErrorMessage(error)}`);
|
||||
return Promise.reject(error);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,214 +0,0 @@
|
||||
import {
|
||||
ConflictDefinition,
|
||||
FeedOptions,
|
||||
ItemDefinition,
|
||||
OfferDefinition,
|
||||
QueryIterator,
|
||||
Resource
|
||||
} from "@azure/cosmos";
|
||||
import { RequestOptions } from "@azure/cosmos/dist-esm";
|
||||
import Q from "q";
|
||||
import { configContext, Platform } from "../ConfigContext";
|
||||
import * as DataModels from "../Contracts/DataModels";
|
||||
import { MessageTypes } from "../Contracts/ExplorerContracts";
|
||||
import * as ViewModels from "../Contracts/ViewModels";
|
||||
import ConflictId from "../Explorer/Tree/ConflictId";
|
||||
import DocumentId from "../Explorer/Tree/DocumentId";
|
||||
import StoredProcedure from "../Explorer/Tree/StoredProcedure";
|
||||
import { LocalStorageUtility, StorageKey } from "../Shared/StorageUtility";
|
||||
import { OfferUtils } from "../Utils/OfferUtils";
|
||||
import * as Constants from "./Constants";
|
||||
import { client } from "./CosmosClient";
|
||||
import * as HeadersUtility from "./HeadersUtility";
|
||||
import { sendCachedDataMessage } from "./MessageHandler";
|
||||
|
||||
export function getCommonQueryOptions(options: FeedOptions): any {
|
||||
const storedItemPerPageSetting: number = LocalStorageUtility.getEntryNumber(StorageKey.ActualItemPerPage);
|
||||
options = options || {};
|
||||
options.populateQueryMetrics = true;
|
||||
options.enableScanInQuery = options.enableScanInQuery || true;
|
||||
if (!options.partitionKey) {
|
||||
options.forceQueryPlan = true;
|
||||
}
|
||||
options.maxItemCount =
|
||||
options.maxItemCount ||
|
||||
(storedItemPerPageSetting !== undefined && storedItemPerPageSetting) ||
|
||||
Constants.Queries.itemsPerPage;
|
||||
options.maxDegreeOfParallelism = LocalStorageUtility.getEntryNumber(StorageKey.MaxDegreeOfParellism);
|
||||
|
||||
return options;
|
||||
}
|
||||
|
||||
export function queryDocuments(
|
||||
databaseId: string,
|
||||
containerId: string,
|
||||
query: string,
|
||||
options: any
|
||||
): Q.Promise<QueryIterator<ItemDefinition & Resource>> {
|
||||
options = getCommonQueryOptions(options);
|
||||
const documentsIterator = client()
|
||||
.database(databaseId)
|
||||
.container(containerId)
|
||||
.items.query(query, options);
|
||||
return Q(documentsIterator);
|
||||
}
|
||||
|
||||
export function getPartitionKeyHeaderForConflict(conflictId: ConflictId): Object {
|
||||
const partitionKeyDefinition: DataModels.PartitionKey = conflictId.partitionKey;
|
||||
const partitionKeyValue: any = conflictId.partitionKeyValue;
|
||||
|
||||
return getPartitionKeyHeader(partitionKeyDefinition, partitionKeyValue);
|
||||
}
|
||||
|
||||
export function getPartitionKeyHeader(partitionKeyDefinition: DataModels.PartitionKey, partitionKeyValue: any): Object {
|
||||
if (!partitionKeyDefinition) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
if (partitionKeyValue === undefined) {
|
||||
return [{}];
|
||||
}
|
||||
|
||||
return [partitionKeyValue];
|
||||
}
|
||||
|
||||
export function updateOffer(
|
||||
offer: DataModels.Offer,
|
||||
newOffer: DataModels.Offer,
|
||||
options?: RequestOptions
|
||||
): Q.Promise<DataModels.Offer> {
|
||||
return Q(
|
||||
client()
|
||||
.offer(offer.id)
|
||||
// TODO Remove casting when SDK types are fixed (https://github.com/Azure/azure-sdk-for-js/issues/10660)
|
||||
.replace((newOffer as unknown) as OfferDefinition, options)
|
||||
.then(response => {
|
||||
return Promise.all([refreshCachedOffers(), refreshCachedResources()]).then(() => response.resource);
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
export function updateDocument(
|
||||
collection: ViewModels.CollectionBase,
|
||||
documentId: DocumentId,
|
||||
newDocument: any
|
||||
): Q.Promise<any> {
|
||||
const partitionKey = documentId.partitionKeyValue;
|
||||
|
||||
return Q(
|
||||
client()
|
||||
.database(collection.databaseId)
|
||||
.container(collection.id())
|
||||
.item(documentId.id(), partitionKey)
|
||||
.replace(newDocument)
|
||||
.then(response => response.resource)
|
||||
);
|
||||
}
|
||||
|
||||
export function executeStoredProcedure(
|
||||
collection: ViewModels.Collection,
|
||||
storedProcedure: StoredProcedure,
|
||||
partitionKeyValue: any,
|
||||
params: any[]
|
||||
): Q.Promise<any> {
|
||||
// TODO remove this deferred. Kept it because of timeout code at bottom of function
|
||||
const deferred = Q.defer<any>();
|
||||
|
||||
client()
|
||||
.database(collection.databaseId)
|
||||
.container(collection.id())
|
||||
.scripts.storedProcedure(storedProcedure.id())
|
||||
.execute(partitionKeyValue, params, { enableScriptLogging: true })
|
||||
.then(response =>
|
||||
deferred.resolve({
|
||||
result: response.resource,
|
||||
scriptLogs: response.headers[Constants.HttpHeaders.scriptLogResults]
|
||||
})
|
||||
)
|
||||
.catch(error => deferred.reject(error));
|
||||
|
||||
return deferred.promise.timeout(
|
||||
Constants.ClientDefaults.requestTimeoutMs,
|
||||
`Request timed out while executing stored procedure ${storedProcedure.id()}`
|
||||
);
|
||||
}
|
||||
|
||||
export function createDocument(collection: ViewModels.CollectionBase, newDocument: any): Q.Promise<any> {
|
||||
return Q(
|
||||
client()
|
||||
.database(collection.databaseId)
|
||||
.container(collection.id())
|
||||
.items.create(newDocument)
|
||||
.then(response => response.resource)
|
||||
);
|
||||
}
|
||||
|
||||
export function readDocument(collection: ViewModels.CollectionBase, documentId: DocumentId): Q.Promise<any> {
|
||||
const partitionKey = documentId.partitionKeyValue;
|
||||
|
||||
return Q(
|
||||
client()
|
||||
.database(collection.databaseId)
|
||||
.container(collection.id())
|
||||
.item(documentId.id(), partitionKey)
|
||||
.read()
|
||||
.then(response => response.resource)
|
||||
);
|
||||
}
|
||||
|
||||
export function deleteDocument(collection: ViewModels.CollectionBase, documentId: DocumentId): Q.Promise<any> {
|
||||
const partitionKey = documentId.partitionKeyValue;
|
||||
|
||||
return Q(
|
||||
client()
|
||||
.database(collection.databaseId)
|
||||
.container(collection.id())
|
||||
.item(documentId.id(), partitionKey)
|
||||
.delete()
|
||||
);
|
||||
}
|
||||
|
||||
export function deleteConflict(
|
||||
collection: ViewModels.CollectionBase,
|
||||
conflictId: ConflictId,
|
||||
options: any = {}
|
||||
): Q.Promise<any> {
|
||||
options.partitionKey = options.partitionKey || getPartitionKeyHeaderForConflict(conflictId);
|
||||
|
||||
return Q(
|
||||
client()
|
||||
.database(collection.databaseId)
|
||||
.container(collection.id())
|
||||
.conflict(conflictId.id())
|
||||
.delete(options)
|
||||
);
|
||||
}
|
||||
|
||||
export function refreshCachedOffers(): Q.Promise<void> {
|
||||
if (configContext.platform === Platform.Portal) {
|
||||
return sendCachedDataMessage(MessageTypes.RefreshOffers, []);
|
||||
} else {
|
||||
return Q();
|
||||
}
|
||||
}
|
||||
|
||||
export function refreshCachedResources(options?: any): Q.Promise<void> {
|
||||
if (configContext.platform === Platform.Portal) {
|
||||
return sendCachedDataMessage(MessageTypes.RefreshResources, []);
|
||||
} else {
|
||||
return Q();
|
||||
}
|
||||
}
|
||||
|
||||
export function queryConflicts(
|
||||
databaseId: string,
|
||||
containerId: string,
|
||||
query: string,
|
||||
options: any
|
||||
): Q.Promise<QueryIterator<ConflictDefinition & Resource>> {
|
||||
const documentsIterator = client()
|
||||
.database(databaseId)
|
||||
.container(containerId)
|
||||
.conflicts.query(query, options);
|
||||
return Q(documentsIterator);
|
||||
}
|
||||
@@ -1,279 +0,0 @@
|
||||
import { ConflictDefinition, ItemDefinition, QueryIterator, Resource } from "@azure/cosmos";
|
||||
import { RequestOptions } from "@azure/cosmos/dist-esm";
|
||||
import Q from "q";
|
||||
import * as DataModels from "../Contracts/DataModels";
|
||||
import * as ViewModels from "../Contracts/ViewModels";
|
||||
import ConflictId from "../Explorer/Tree/ConflictId";
|
||||
import DocumentId from "../Explorer/Tree/DocumentId";
|
||||
import StoredProcedure from "../Explorer/Tree/StoredProcedure";
|
||||
import { logConsoleError, logConsoleInfo, logConsoleProgress } from "../Utils/NotificationConsoleUtils";
|
||||
import * as Constants from "./Constants";
|
||||
import { sendNotificationForError } from "./dataAccess/sendNotificationForError";
|
||||
import * as DataAccessUtilityBase from "./DataAccessUtilityBase";
|
||||
import { MinimalQueryIterator, nextPage } from "./IteratorUtilities";
|
||||
import * as Logger from "./Logger";
|
||||
|
||||
// TODO: Log all promise resolutions and errors with verbosity levels
|
||||
export function queryDocuments(
|
||||
databaseId: string,
|
||||
containerId: string,
|
||||
query: string,
|
||||
options: any
|
||||
): Q.Promise<QueryIterator<ItemDefinition & Resource>> {
|
||||
return DataAccessUtilityBase.queryDocuments(databaseId, containerId, query, options);
|
||||
}
|
||||
|
||||
export function queryConflicts(
|
||||
databaseId: string,
|
||||
containerId: string,
|
||||
query: string,
|
||||
options: any
|
||||
): Q.Promise<QueryIterator<ConflictDefinition & Resource>> {
|
||||
return DataAccessUtilityBase.queryConflicts(databaseId, containerId, query, options);
|
||||
}
|
||||
|
||||
export function getEntityName() {
|
||||
const defaultExperience =
|
||||
window.dataExplorer && window.dataExplorer.defaultExperience && window.dataExplorer.defaultExperience();
|
||||
if (defaultExperience === Constants.DefaultAccountExperience.MongoDB) {
|
||||
return "document";
|
||||
}
|
||||
return "item";
|
||||
}
|
||||
|
||||
export function executeStoredProcedure(
|
||||
collection: ViewModels.Collection,
|
||||
storedProcedure: StoredProcedure,
|
||||
partitionKeyValue: any,
|
||||
params: any[]
|
||||
): Q.Promise<any> {
|
||||
var deferred = Q.defer<any>();
|
||||
|
||||
const clearMessage = logConsoleProgress(`Executing stored procedure ${storedProcedure.id()}`);
|
||||
DataAccessUtilityBase.executeStoredProcedure(collection, storedProcedure, partitionKeyValue, params)
|
||||
.then(
|
||||
(response: any) => {
|
||||
deferred.resolve(response);
|
||||
logConsoleInfo(
|
||||
`Finished executing stored procedure ${storedProcedure.id()} for container ${storedProcedure.collection.id()}`
|
||||
);
|
||||
},
|
||||
(error: any) => {
|
||||
logConsoleError(
|
||||
`Failed to execute stored procedure ${storedProcedure.id()} for container ${storedProcedure.collection.id()}: ${JSON.stringify(
|
||||
error
|
||||
)}`
|
||||
);
|
||||
Logger.logError(JSON.stringify(error), "ExecuteStoredProcedure", error.code);
|
||||
sendNotificationForError(error);
|
||||
deferred.reject(error);
|
||||
}
|
||||
)
|
||||
.finally(() => {
|
||||
clearMessage();
|
||||
});
|
||||
|
||||
return deferred.promise;
|
||||
}
|
||||
|
||||
export function queryDocumentsPage(
|
||||
resourceName: string,
|
||||
documentsIterator: MinimalQueryIterator,
|
||||
firstItemIndex: number,
|
||||
options: any
|
||||
): Q.Promise<ViewModels.QueryResults> {
|
||||
var deferred = Q.defer<ViewModels.QueryResults>();
|
||||
const entityName = getEntityName();
|
||||
const clearMessage = logConsoleProgress(`Querying ${entityName} for container ${resourceName}`);
|
||||
Q(nextPage(documentsIterator, firstItemIndex))
|
||||
.then(
|
||||
(result: ViewModels.QueryResults) => {
|
||||
const itemCount = (result.documents && result.documents.length) || 0;
|
||||
logConsoleInfo(`Successfully fetched ${itemCount} ${entityName} for container ${resourceName}`);
|
||||
deferred.resolve(result);
|
||||
},
|
||||
(error: any) => {
|
||||
logConsoleError(`Failed to query ${entityName} for container ${resourceName}: ${JSON.stringify(error)}`);
|
||||
Logger.logError(JSON.stringify(error), "QueryDocumentsPage", error.code);
|
||||
sendNotificationForError(error);
|
||||
deferred.reject(error);
|
||||
}
|
||||
)
|
||||
.finally(() => {
|
||||
clearMessage();
|
||||
});
|
||||
|
||||
return deferred.promise;
|
||||
}
|
||||
|
||||
export function readDocument(collection: ViewModels.CollectionBase, documentId: DocumentId): Q.Promise<any> {
|
||||
var deferred = Q.defer<any>();
|
||||
const entityName = getEntityName();
|
||||
const clearMessage = logConsoleProgress(`Reading ${entityName} ${documentId.id()}`);
|
||||
DataAccessUtilityBase.readDocument(collection, documentId)
|
||||
.then(
|
||||
(document: any) => {
|
||||
deferred.resolve(document);
|
||||
},
|
||||
(error: any) => {
|
||||
logConsoleError(`Failed to read ${entityName} ${documentId.id()}: ${JSON.stringify(error)}`);
|
||||
Logger.logError(JSON.stringify(error), "ReadDocument", error.code);
|
||||
sendNotificationForError(error);
|
||||
deferred.reject(error);
|
||||
}
|
||||
)
|
||||
.finally(() => {
|
||||
clearMessage();
|
||||
});
|
||||
|
||||
return deferred.promise;
|
||||
}
|
||||
|
||||
export function updateDocument(
|
||||
collection: ViewModels.CollectionBase,
|
||||
documentId: DocumentId,
|
||||
newDocument: any
|
||||
): Q.Promise<any> {
|
||||
var deferred = Q.defer<any>();
|
||||
const entityName = getEntityName();
|
||||
const clearMessage = logConsoleProgress(`Updating ${entityName} ${documentId.id()}`);
|
||||
DataAccessUtilityBase.updateDocument(collection, documentId, newDocument)
|
||||
.then(
|
||||
(updatedDocument: any) => {
|
||||
logConsoleInfo(`Successfully updated ${entityName} ${documentId.id()}`);
|
||||
deferred.resolve(updatedDocument);
|
||||
},
|
||||
(error: any) => {
|
||||
logConsoleError(`Failed to update ${entityName} ${documentId.id()}: ${JSON.stringify(error)}`);
|
||||
Logger.logError(JSON.stringify(error), "UpdateDocument", error.code);
|
||||
sendNotificationForError(error);
|
||||
deferred.reject(error);
|
||||
}
|
||||
)
|
||||
.finally(() => {
|
||||
clearMessage();
|
||||
});
|
||||
|
||||
return deferred.promise;
|
||||
}
|
||||
|
||||
export function updateOffer(
|
||||
offer: DataModels.Offer,
|
||||
newOffer: DataModels.Offer,
|
||||
options: RequestOptions
|
||||
): Q.Promise<DataModels.Offer> {
|
||||
var deferred = Q.defer<any>();
|
||||
const clearMessage = logConsoleProgress(`Updating offer for resource ${offer.resource}`);
|
||||
DataAccessUtilityBase.updateOffer(offer, newOffer, options)
|
||||
.then(
|
||||
(replacedOffer: DataModels.Offer) => {
|
||||
logConsoleInfo(`Successfully updated offer for resource ${offer.resource}`);
|
||||
deferred.resolve(replacedOffer);
|
||||
},
|
||||
(error: any) => {
|
||||
logConsoleError(`Error updating offer for resource ${offer.resource}: ${JSON.stringify(error)}`);
|
||||
Logger.logError(
|
||||
JSON.stringify({
|
||||
oldOffer: offer,
|
||||
newOffer: newOffer,
|
||||
error: error
|
||||
}),
|
||||
"UpdateOffer",
|
||||
error.code
|
||||
);
|
||||
sendNotificationForError(error);
|
||||
deferred.reject(error);
|
||||
}
|
||||
)
|
||||
.finally(() => {
|
||||
clearMessage();
|
||||
});
|
||||
|
||||
return deferred.promise;
|
||||
}
|
||||
|
||||
export function createDocument(collection: ViewModels.CollectionBase, newDocument: any): Q.Promise<any> {
|
||||
var deferred = Q.defer<any>();
|
||||
const entityName = getEntityName();
|
||||
const clearMessage = logConsoleProgress(`Creating new ${entityName} for container ${collection.id()}`);
|
||||
DataAccessUtilityBase.createDocument(collection, newDocument)
|
||||
.then(
|
||||
(savedDocument: any) => {
|
||||
logConsoleInfo(`Successfully created new ${entityName} for container ${collection.id()}`);
|
||||
deferred.resolve(savedDocument);
|
||||
},
|
||||
(error: any) => {
|
||||
logConsoleError(
|
||||
`Error while creating new ${entityName} for container ${collection.id()}:\n ${JSON.stringify(error)}`
|
||||
);
|
||||
Logger.logError(JSON.stringify(error), "CreateDocument", error.code);
|
||||
sendNotificationForError(error);
|
||||
deferred.reject(error);
|
||||
}
|
||||
)
|
||||
.finally(() => {
|
||||
clearMessage();
|
||||
});
|
||||
|
||||
return deferred.promise;
|
||||
}
|
||||
|
||||
export function deleteDocument(collection: ViewModels.CollectionBase, documentId: DocumentId): Q.Promise<any> {
|
||||
var deferred = Q.defer<any>();
|
||||
const entityName = getEntityName();
|
||||
const clearMessage = logConsoleProgress(`Deleting ${entityName} ${documentId.id()}`);
|
||||
DataAccessUtilityBase.deleteDocument(collection, documentId)
|
||||
.then(
|
||||
(response: any) => {
|
||||
logConsoleInfo(`Successfully deleted ${entityName} ${documentId.id()}`);
|
||||
deferred.resolve(response);
|
||||
},
|
||||
(error: any) => {
|
||||
logConsoleError(`Error while deleting ${entityName} ${documentId.id()}:\n ${JSON.stringify(error)}`);
|
||||
Logger.logError(JSON.stringify(error), "DeleteDocument", error.code);
|
||||
sendNotificationForError(error);
|
||||
deferred.reject(error);
|
||||
}
|
||||
)
|
||||
.finally(() => {
|
||||
clearMessage();
|
||||
});
|
||||
|
||||
return deferred.promise;
|
||||
}
|
||||
|
||||
export function deleteConflict(
|
||||
collection: ViewModels.CollectionBase,
|
||||
conflictId: ConflictId,
|
||||
options?: any
|
||||
): Q.Promise<any> {
|
||||
var deferred = Q.defer<any>();
|
||||
|
||||
const clearMessage = logConsoleProgress(`Deleting conflict ${conflictId.id()}`);
|
||||
DataAccessUtilityBase.deleteConflict(collection, conflictId, options)
|
||||
.then(
|
||||
(response: any) => {
|
||||
logConsoleInfo(`Successfully deleted conflict ${conflictId.id()}`);
|
||||
deferred.resolve(response);
|
||||
},
|
||||
(error: any) => {
|
||||
logConsoleError(`Error while deleting conflict ${conflictId.id()}:\n ${JSON.stringify(error)}`);
|
||||
Logger.logError(JSON.stringify(error), "DeleteConflict", error.code);
|
||||
sendNotificationForError(error);
|
||||
deferred.reject(error);
|
||||
}
|
||||
)
|
||||
.finally(() => {
|
||||
clearMessage();
|
||||
});
|
||||
|
||||
return deferred.promise;
|
||||
}
|
||||
|
||||
export function refreshCachedResources(options: any = {}): Q.Promise<void> {
|
||||
return DataAccessUtilityBase.refreshCachedResources(options);
|
||||
}
|
||||
|
||||
export function refreshCachedOffers(): Q.Promise<void> {
|
||||
return DataAccessUtilityBase.refreshCachedOffers();
|
||||
}
|
||||
10
src/Common/DocumentUtility.ts
Normal file
10
src/Common/DocumentUtility.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import { DefaultAccountExperienceType } from "../DefaultAccountExperienceType";
|
||||
import { userContext } from "../UserContext";
|
||||
|
||||
export const getEntityName = (): string => {
|
||||
if (userContext.defaultExperience === DefaultAccountExperienceType.MongoDB) {
|
||||
return "document";
|
||||
}
|
||||
|
||||
return "item";
|
||||
};
|
||||
@@ -1,8 +1,6 @@
|
||||
export default class EnvironmentUtility {
|
||||
public static normalizeArmEndpointUri(uri: string): string {
|
||||
if (uri && uri.slice(-1) !== "/") {
|
||||
return `${uri}/`;
|
||||
}
|
||||
return uri;
|
||||
export function normalizeArmEndpoint(uri: string): string {
|
||||
if (uri && uri.slice(-1) !== "/") {
|
||||
return `${uri}/`;
|
||||
}
|
||||
return uri;
|
||||
}
|
||||
|
||||
56
src/Common/ErrorHandlingUtils.ts
Normal file
56
src/Common/ErrorHandlingUtils.ts
Normal file
@@ -0,0 +1,56 @@
|
||||
import { ARMError } from "../Utils/arm/request";
|
||||
import { HttpStatusCodes } from "./Constants";
|
||||
import { MessageTypes } from "../Contracts/ExplorerContracts";
|
||||
import { SubscriptionType } from "../Contracts/SubscriptionType";
|
||||
import { logConsoleError } from "../Utils/NotificationConsoleUtils";
|
||||
import { logError } from "./Logger";
|
||||
import { sendMessage } from "./MessageHandler";
|
||||
|
||||
export const handleError = (error: string | ARMError | Error, area: string, consoleErrorPrefix?: string): void => {
|
||||
const errorMessage = getErrorMessage(error);
|
||||
const errorCode = error instanceof ARMError ? error.code : undefined;
|
||||
|
||||
// logs error to data explorer console
|
||||
const consoleErrorMessage = consoleErrorPrefix ? `${consoleErrorPrefix}:\n ${errorMessage}` : errorMessage;
|
||||
logConsoleError(consoleErrorMessage);
|
||||
|
||||
// logs error to both app insight and kusto
|
||||
logError(errorMessage, area, errorCode);
|
||||
|
||||
// checks for errors caused by firewall and sends them to portal to handle
|
||||
sendNotificationForError(errorMessage, errorCode);
|
||||
};
|
||||
|
||||
export const getErrorMessage = (error: string | Error): string => {
|
||||
const errorMessage = typeof error === "string" ? error : error.message;
|
||||
return replaceKnownError(errorMessage);
|
||||
};
|
||||
|
||||
export const getErrorStack = (error: string | Error): string => {
|
||||
return typeof error === "string" ? undefined : error.stack;
|
||||
};
|
||||
|
||||
const sendNotificationForError = (errorMessage: string, errorCode: number | string): void => {
|
||||
if (errorCode === HttpStatusCodes.Forbidden) {
|
||||
if (errorMessage?.toLowerCase().indexOf("sharedoffer is disabled for your account") > 0) {
|
||||
return;
|
||||
}
|
||||
sendMessage({
|
||||
type: MessageTypes.ForbiddenError,
|
||||
reason: errorMessage
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const replaceKnownError = (errorMessage: string): string => {
|
||||
if (
|
||||
window.dataExplorer?.subscriptionType() === SubscriptionType.Internal &&
|
||||
errorMessage.indexOf("SharedOffer is Disabled for your account") >= 0
|
||||
) {
|
||||
return "Database throughput is not supported for internal subscriptions.";
|
||||
} else if (errorMessage.indexOf("Partition key paths must contain only valid") >= 0) {
|
||||
return "Partition key paths must contain only valid characters and not contain a trailing slash or wildcard character.";
|
||||
}
|
||||
|
||||
return errorMessage;
|
||||
};
|
||||
@@ -1,24 +0,0 @@
|
||||
import * as ErrorParserUtility from "./ErrorParserUtility";
|
||||
|
||||
describe("Error Parser Utility", () => {
|
||||
describe("shouldEnableCrossPartitionKeyForResourceWithPartitionKey()", () => {
|
||||
it("should parse a backend error correctly", () => {
|
||||
// A fake error matching what is thrown by the SDK on a bad collection create request
|
||||
const innerMessage =
|
||||
"The partition key component definition path '/asdwqr31 @#$#$WRadf' could not be accepted, failed near position '10'. Partition key paths must contain only valid characters and not contain a trailing slash or wildcard character.";
|
||||
const message = `Message: {\"Errors\":[\"${innerMessage}\"]}\r\nActivityId: 97b2e684-7505-4921-85f6-2513b9b28220, Request URI: /apps/89fdcf25-2a0b-4d2a-aab6-e161e565b26f/services/54911149-7bb1-4e7d-a1fa-22c8b36a4bb9/partitions/cc2a7a04-5f5a-4709-bcf7-8509b264963f/replicas/132304018743619218p, RequestStats: , SDK: Microsoft.Azure.Documents.Common/2.10.0`;
|
||||
const err = new Error(message) as any;
|
||||
err.code = 400;
|
||||
err.body = {
|
||||
code: "BadRequest",
|
||||
message
|
||||
};
|
||||
err.headers = {};
|
||||
err.activityId = "97b2e684-7505-4921-85f6-2513b9b28220";
|
||||
|
||||
const parsedError = ErrorParserUtility.parse(err);
|
||||
expect(parsedError.length).toBe(1);
|
||||
expect(parsedError[0].message).toBe(innerMessage);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,69 +0,0 @@
|
||||
import * as DataModels from "../Contracts/DataModels";
|
||||
import * as ViewModels from "../Contracts/ViewModels";
|
||||
|
||||
export function replaceKnownError(err: string): string {
|
||||
if (
|
||||
window.dataExplorer.subscriptionType() === ViewModels.SubscriptionType.Internal &&
|
||||
err.indexOf("SharedOffer is Disabled for your account") >= 0
|
||||
) {
|
||||
return "Database throughput is not supported for internal subscriptions.";
|
||||
} else if (err.indexOf("Partition key paths must contain only valid") >= 0) {
|
||||
return "Partition key paths must contain only valid characters and not contain a trailing slash or wildcard character.";
|
||||
}
|
||||
|
||||
return err;
|
||||
}
|
||||
|
||||
export function parse(err: any): DataModels.ErrorDataModel[] {
|
||||
try {
|
||||
return _parse(err);
|
||||
} catch (e) {
|
||||
return [<DataModels.ErrorDataModel>{ message: JSON.stringify(err) }];
|
||||
}
|
||||
}
|
||||
|
||||
function _parse(err: any): DataModels.ErrorDataModel[] {
|
||||
var normalizedErrors: DataModels.ErrorDataModel[] = [];
|
||||
if (err.message && !err.code) {
|
||||
normalizedErrors.push(err);
|
||||
} else {
|
||||
const innerErrors: any[] = _getInnerErrors(err.message);
|
||||
normalizedErrors = innerErrors.map(innerError =>
|
||||
typeof innerError === "string" ? { message: innerError } : innerError
|
||||
);
|
||||
}
|
||||
|
||||
return normalizedErrors;
|
||||
}
|
||||
|
||||
function _getInnerErrors(message: string): any[] {
|
||||
/*
|
||||
The backend error message has an inner-message which is a stringified object.
|
||||
|
||||
For SQL errors, the "errors" property is an array of SqlErrorDataModel.
|
||||
Example:
|
||||
"Message: {"Errors":["Resource with specified id or name already exists"]}\r\nActivityId: 80005000008d40b6a, Request URI: /apps/19000c000c0a0005/services/mctestdocdbprod-MasterService-0-00066ab9937/partitions/900005f9000e676fb8/replicas/13000000000955p"
|
||||
For non-SQL errors the "Errors" propery is an array of string.
|
||||
Example:
|
||||
"Message: {"errors":[{"severity":"Error","location":{"start":7,"end":8},"code":"SC1001","message":"Syntax error, incorrect syntax near '.'."}]}\r\nActivityId: d3300016d4084e310a, Request URI: /apps/12401f9e1df77/services/dc100232b1f44545/partitions/f86f3bc0001a2f78/replicas/13085003638s"
|
||||
*/
|
||||
|
||||
let innerMessage: any = null;
|
||||
|
||||
const singleLineMessage = message.replace(/[\r\n]|\r|\n/g, "");
|
||||
try {
|
||||
// Multi-Partition error flavor
|
||||
const regExp = /^(.*)ActivityId: (.*)/g;
|
||||
const regString = regExp.exec(singleLineMessage);
|
||||
const innerMessageString = regString[1];
|
||||
innerMessage = JSON.parse(innerMessageString);
|
||||
} catch (e) {
|
||||
// Single-partition error flavor
|
||||
const regExp = /^Message: (.*)ActivityId: (.*), Request URI: (.*)/g;
|
||||
const regString = regExp.exec(singleLineMessage);
|
||||
const innerMessageString = regString[1];
|
||||
innerMessage = JSON.parse(innerMessageString);
|
||||
}
|
||||
|
||||
return innerMessage.errors ? innerMessage.errors : innerMessage.Errors;
|
||||
}
|
||||
@@ -21,14 +21,8 @@ export function logWarning(message: string, area: string, code?: number): void {
|
||||
return _logEntry(entry);
|
||||
}
|
||||
|
||||
export function logError(message: string | Error, area: string, code?: number): void {
|
||||
let logMessage: string;
|
||||
if (typeof message === "string") {
|
||||
logMessage = message;
|
||||
} else {
|
||||
logMessage = JSON.stringify(message, Object.getOwnPropertyNames(message));
|
||||
}
|
||||
const entry: Diagnostics.LogEntry = _generateLogEntry(Diagnostics.LogEntryLevel.Error, logMessage, area, code);
|
||||
export function logError(errorMessage: string, area: string, code?: number | string): void {
|
||||
const entry: Diagnostics.LogEntry = _generateLogEntry(Diagnostics.LogEntryLevel.Error, errorMessage, area, code);
|
||||
return _logEntry(entry);
|
||||
}
|
||||
|
||||
@@ -59,7 +53,7 @@ function _generateLogEntry(
|
||||
level: Diagnostics.LogEntryLevel,
|
||||
message: string,
|
||||
area: string,
|
||||
code?: number
|
||||
code?: number | string
|
||||
): Diagnostics.LogEntry {
|
||||
return {
|
||||
timestamp: new Date().getUTCSeconds(),
|
||||
|
||||
@@ -1,46 +0,0 @@
|
||||
import "jquery";
|
||||
import * as Q from "q";
|
||||
import * as DataModels from "../Contracts/DataModels";
|
||||
import * as ViewModels from "../Contracts/ViewModels";
|
||||
import { getAuthorizationHeader } from "../Utils/AuthorizationUtils";
|
||||
import { userContext } from "../UserContext";
|
||||
|
||||
export class NotificationsClientBase {
|
||||
private _extensionEndpoint: string;
|
||||
private _notificationsApiSuffix: string;
|
||||
|
||||
protected constructor(notificationsApiSuffix: string) {
|
||||
this._notificationsApiSuffix = notificationsApiSuffix;
|
||||
}
|
||||
|
||||
public fetchNotifications(): Q.Promise<DataModels.Notification[]> {
|
||||
const deferred: Q.Deferred<DataModels.Notification[]> = Q.defer<DataModels.Notification[]>();
|
||||
const databaseAccount = userContext.databaseAccount;
|
||||
const subscriptionId = userContext.subscriptionId;
|
||||
const resourceGroup = userContext.resourceGroup;
|
||||
const url = `${this._extensionEndpoint}${this._notificationsApiSuffix}?accountName=${databaseAccount.name}&subscriptionId=${subscriptionId}&resourceGroup=${resourceGroup}`;
|
||||
const authorizationHeader: ViewModels.AuthorizationTokenHeaderMetadata = getAuthorizationHeader();
|
||||
const headers: any = {};
|
||||
headers[authorizationHeader.header] = authorizationHeader.token;
|
||||
|
||||
$.ajax({
|
||||
url: url,
|
||||
type: "GET",
|
||||
headers: headers,
|
||||
cache: false
|
||||
}).then(
|
||||
(notifications: DataModels.Notification[], textStatus: string, xhr: JQueryXHR<any>) => {
|
||||
deferred.resolve(notifications);
|
||||
},
|
||||
(xhr: JQueryXHR<any>, textStatus: string, error: any) => {
|
||||
deferred.reject(xhr.responseText);
|
||||
}
|
||||
);
|
||||
|
||||
return deferred.promise;
|
||||
}
|
||||
|
||||
public setExtensionEndpoint(extensionEndpoint: string): void {
|
||||
this._extensionEndpoint = extensionEndpoint;
|
||||
}
|
||||
}
|
||||
64
src/Common/OfferUtility.test.ts
Normal file
64
src/Common/OfferUtility.test.ts
Normal file
@@ -0,0 +1,64 @@
|
||||
import * as OfferUtility from "./OfferUtility";
|
||||
import { SDKOfferDefinition, Offer } from "../Contracts/DataModels";
|
||||
import { OfferResponse } from "@azure/cosmos";
|
||||
|
||||
describe("parseSDKOfferResponse", () => {
|
||||
it("manual throughput", () => {
|
||||
const mockOfferDefinition = {
|
||||
content: {
|
||||
offerThroughput: 500,
|
||||
collectionThroughputInfo: {
|
||||
minimumRUForCollection: 400,
|
||||
numPhysicalPartitions: 1
|
||||
}
|
||||
},
|
||||
id: "test"
|
||||
} as SDKOfferDefinition;
|
||||
|
||||
const mockResponse = {
|
||||
resource: mockOfferDefinition
|
||||
} as OfferResponse;
|
||||
|
||||
const expectedResult: Offer = {
|
||||
manualThroughput: 500,
|
||||
autoscaleMaxThroughput: undefined,
|
||||
minimumThroughput: 400,
|
||||
id: "test",
|
||||
offerDefinition: mockOfferDefinition,
|
||||
offerReplacePending: false
|
||||
};
|
||||
|
||||
expect(OfferUtility.parseSDKOfferResponse(mockResponse)).toEqual(expectedResult);
|
||||
});
|
||||
|
||||
it("autoscale throughput", () => {
|
||||
const mockOfferDefinition = {
|
||||
content: {
|
||||
offerThroughput: 400,
|
||||
collectionThroughputInfo: {
|
||||
minimumRUForCollection: 400,
|
||||
numPhysicalPartitions: 1
|
||||
},
|
||||
offerAutopilotSettings: {
|
||||
maxThroughput: 5000
|
||||
}
|
||||
},
|
||||
id: "test"
|
||||
} as SDKOfferDefinition;
|
||||
|
||||
const mockResponse = {
|
||||
resource: mockOfferDefinition
|
||||
} as OfferResponse;
|
||||
|
||||
const expectedResult: Offer = {
|
||||
manualThroughput: undefined,
|
||||
autoscaleMaxThroughput: 5000,
|
||||
minimumThroughput: 400,
|
||||
id: "test",
|
||||
offerDefinition: mockOfferDefinition,
|
||||
offerReplacePending: false
|
||||
};
|
||||
|
||||
expect(OfferUtility.parseSDKOfferResponse(mockResponse)).toEqual(expectedResult);
|
||||
});
|
||||
});
|
||||
34
src/Common/OfferUtility.ts
Normal file
34
src/Common/OfferUtility.ts
Normal file
@@ -0,0 +1,34 @@
|
||||
import { Offer, SDKOfferDefinition } from "../Contracts/DataModels";
|
||||
import { OfferResponse } from "@azure/cosmos";
|
||||
import { HttpHeaders } from "./Constants";
|
||||
|
||||
export const parseSDKOfferResponse = (offerResponse: OfferResponse): Offer => {
|
||||
const offerDefinition: SDKOfferDefinition = offerResponse?.resource;
|
||||
const offerContent = offerDefinition.content;
|
||||
if (!offerContent) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const minimumThroughput = offerContent.collectionThroughputInfo?.minimumRUForCollection;
|
||||
const autopilotSettings = offerContent.offerAutopilotSettings;
|
||||
|
||||
if (autopilotSettings) {
|
||||
return {
|
||||
id: offerDefinition.id,
|
||||
autoscaleMaxThroughput: autopilotSettings.maxThroughput,
|
||||
manualThroughput: undefined,
|
||||
minimumThroughput,
|
||||
offerDefinition,
|
||||
offerReplacePending: offerResponse.headers?.[HttpHeaders.offerReplacePending] === "true"
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
id: offerDefinition.id,
|
||||
autoscaleMaxThroughput: undefined,
|
||||
manualThroughput: offerContent.offerThroughput,
|
||||
minimumThroughput,
|
||||
offerDefinition,
|
||||
offerReplacePending: offerResponse.headers?.[HttpHeaders.offerReplacePending] === "true"
|
||||
};
|
||||
};
|
||||
41
src/Common/PortalNotifications.ts
Normal file
41
src/Common/PortalNotifications.ts
Normal file
@@ -0,0 +1,41 @@
|
||||
import * as DataModels from "../Contracts/DataModels";
|
||||
import * as ViewModels from "../Contracts/ViewModels";
|
||||
import { getAuthorizationHeader } from "../Utils/AuthorizationUtils";
|
||||
import { userContext } from "../UserContext";
|
||||
import { configContext, Platform } from "../ConfigContext";
|
||||
|
||||
const notificationsPath = () => {
|
||||
switch (configContext.platform) {
|
||||
case Platform.Hosted:
|
||||
return "/api/guest/notifications";
|
||||
case Platform.Portal:
|
||||
return "/api/notifications";
|
||||
default:
|
||||
throw new Error(`Unknown platform: ${configContext.platform}`);
|
||||
}
|
||||
};
|
||||
|
||||
export const fetchPortalNotifications = async (): Promise<DataModels.Notification[]> => {
|
||||
if (configContext.platform === Platform.Emulator || configContext.platform === Platform.Hosted) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const databaseAccount = userContext.databaseAccount;
|
||||
const subscriptionId = userContext.subscriptionId;
|
||||
const resourceGroup = userContext.resourceGroup;
|
||||
const url = `${configContext.BACKEND_ENDPOINT}${notificationsPath()}?accountName=${
|
||||
databaseAccount.name
|
||||
}&subscriptionId=${subscriptionId}&resourceGroup=${resourceGroup}`;
|
||||
const authorizationHeader: ViewModels.AuthorizationTokenHeaderMetadata = getAuthorizationHeader();
|
||||
const headers = { [authorizationHeader.header]: authorizationHeader.token };
|
||||
|
||||
const response = await window.fetch(url, {
|
||||
headers
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(await response.text());
|
||||
}
|
||||
|
||||
return (await response.json()) as DataModels.Notification[];
|
||||
};
|
||||
@@ -3,17 +3,18 @@ import * as _ from "underscore";
|
||||
import * as DataModels from "../Contracts/DataModels";
|
||||
import * as ViewModels from "../Contracts/ViewModels";
|
||||
import Explorer from "../Explorer/Explorer";
|
||||
import { ConsoleDataType } from "../Explorer/Menus/NotificationConsole/NotificationConsoleComponent";
|
||||
import DocumentsTab from "../Explorer/Tabs/DocumentsTab";
|
||||
import DocumentId from "../Explorer/Tree/DocumentId";
|
||||
import * as NotificationConsoleUtils from "../Utils/NotificationConsoleUtils";
|
||||
import { QueryUtils } from "../Utils/QueryUtils";
|
||||
import { BackendDefaults, HttpStatusCodes, SavedQueries } from "./Constants";
|
||||
import { userContext } from "../UserContext";
|
||||
import { createDocument, deleteDocument, queryDocuments, queryDocumentsPage } from "./DocumentClientUtilityBase";
|
||||
import { queryDocumentsPage } from "./dataAccess/queryDocumentsPage";
|
||||
import { createCollection } from "./dataAccess/createCollection";
|
||||
import * as ErrorParserUtility from "./ErrorParserUtility";
|
||||
import * as Logger from "./Logger";
|
||||
import { handleError } from "./ErrorHandlingUtils";
|
||||
import { createDocument } from "./dataAccess/createDocument";
|
||||
import { deleteDocument } from "./dataAccess/deleteDocument";
|
||||
import { queryDocuments } from "./dataAccess/queryDocuments";
|
||||
|
||||
export class QueriesClient {
|
||||
private static readonly PartitionKey: DataModels.PartitionKey = {
|
||||
@@ -32,10 +33,7 @@ export class QueriesClient {
|
||||
return Promise.resolve(queriesCollection.rawDataModel);
|
||||
}
|
||||
|
||||
const id = NotificationConsoleUtils.logConsoleMessage(
|
||||
ConsoleDataType.InProgress,
|
||||
"Setting up account for saving queries"
|
||||
);
|
||||
const clearMessage = NotificationConsoleUtils.logConsoleProgress("Setting up account for saving queries");
|
||||
return createCollection({
|
||||
collectionId: SavedQueries.CollectionName,
|
||||
createNewDatabase: true,
|
||||
@@ -46,33 +44,22 @@ export class QueriesClient {
|
||||
})
|
||||
.then(
|
||||
(collection: DataModels.Collection) => {
|
||||
NotificationConsoleUtils.logConsoleMessage(
|
||||
ConsoleDataType.Info,
|
||||
"Successfully set up account for saving queries"
|
||||
);
|
||||
NotificationConsoleUtils.logConsoleInfo("Successfully set up account for saving queries");
|
||||
return Promise.resolve(collection);
|
||||
},
|
||||
(error: any) => {
|
||||
const stringifiedError: string = JSON.stringify(error);
|
||||
NotificationConsoleUtils.logConsoleMessage(
|
||||
ConsoleDataType.Error,
|
||||
`Failed to set up account for saving queries: ${stringifiedError}`
|
||||
);
|
||||
Logger.logError(stringifiedError, "setupQueriesCollection");
|
||||
return Promise.reject(stringifiedError);
|
||||
handleError(error, "setupQueriesCollection", "Failed to set up account for saving queries");
|
||||
return Promise.reject(error);
|
||||
}
|
||||
)
|
||||
.finally(() => NotificationConsoleUtils.clearInProgressMessageWithId(id));
|
||||
.finally(() => clearMessage());
|
||||
}
|
||||
|
||||
public async saveQuery(query: DataModels.Query): Promise<void> {
|
||||
const queriesCollection = this.findQueriesCollection();
|
||||
if (!queriesCollection) {
|
||||
const errorMessage: string = "Account not set up to perform saved query operations";
|
||||
NotificationConsoleUtils.logConsoleMessage(
|
||||
ConsoleDataType.Error,
|
||||
`Failed to save query ${query.queryName}: ${errorMessage}`
|
||||
);
|
||||
NotificationConsoleUtils.logConsoleError(`Failed to save query ${query.queryName}: ${errorMessage}`);
|
||||
return Promise.reject(errorMessage);
|
||||
}
|
||||
|
||||
@@ -80,121 +67,85 @@ export class QueriesClient {
|
||||
this.validateQuery(query);
|
||||
} catch (error) {
|
||||
const errorMessage: string = "Invalid query specified";
|
||||
NotificationConsoleUtils.logConsoleMessage(
|
||||
ConsoleDataType.Error,
|
||||
`Failed to save query ${query.queryName}: ${errorMessage}`
|
||||
);
|
||||
NotificationConsoleUtils.logConsoleError(`Failed to save query ${query.queryName}: ${errorMessage}`);
|
||||
return Promise.reject(errorMessage);
|
||||
}
|
||||
|
||||
const id = NotificationConsoleUtils.logConsoleMessage(
|
||||
ConsoleDataType.InProgress,
|
||||
`Saving query ${query.queryName}`
|
||||
);
|
||||
const clearMessage = NotificationConsoleUtils.logConsoleProgress(`Saving query ${query.queryName}`);
|
||||
query.id = query.queryName;
|
||||
return createDocument(queriesCollection, query)
|
||||
.then(
|
||||
(savedQuery: DataModels.Query) => {
|
||||
NotificationConsoleUtils.logConsoleMessage(
|
||||
ConsoleDataType.Info,
|
||||
`Successfully saved query ${query.queryName}`
|
||||
);
|
||||
NotificationConsoleUtils.logConsoleInfo(`Successfully saved query ${query.queryName}`);
|
||||
return Promise.resolve();
|
||||
},
|
||||
(error: any) => {
|
||||
let errorMessage: string;
|
||||
const parsedError: DataModels.ErrorDataModel = ErrorParserUtility.parse(error)[0];
|
||||
if (parsedError.code === HttpStatusCodes.Conflict.toString()) {
|
||||
errorMessage = `Query ${query.queryName} already exists`;
|
||||
} else {
|
||||
errorMessage = parsedError.message;
|
||||
if (error.code === HttpStatusCodes.Conflict.toString()) {
|
||||
error = `Query ${query.queryName} already exists`;
|
||||
}
|
||||
NotificationConsoleUtils.logConsoleMessage(
|
||||
ConsoleDataType.Error,
|
||||
`Failed to save query ${query.queryName}: ${errorMessage}`
|
||||
);
|
||||
Logger.logError(JSON.stringify(parsedError), "saveQuery");
|
||||
return Promise.reject(errorMessage);
|
||||
handleError(error, "saveQuery", `Failed to save query ${query.queryName}`);
|
||||
return Promise.reject(error);
|
||||
}
|
||||
)
|
||||
.finally(() => NotificationConsoleUtils.clearInProgressMessageWithId(id));
|
||||
.finally(() => clearMessage());
|
||||
}
|
||||
|
||||
public async getQueries(): Promise<DataModels.Query[]> {
|
||||
const queriesCollection = this.findQueriesCollection();
|
||||
if (!queriesCollection) {
|
||||
const errorMessage: string = "Account not set up to perform saved query operations";
|
||||
NotificationConsoleUtils.logConsoleMessage(
|
||||
ConsoleDataType.Error,
|
||||
`Failed to fetch saved queries: ${errorMessage}`
|
||||
);
|
||||
NotificationConsoleUtils.logConsoleError(`Failed to fetch saved queries: ${errorMessage}`);
|
||||
return Promise.reject(errorMessage);
|
||||
}
|
||||
|
||||
const options: any = { enableCrossPartitionQuery: true };
|
||||
const id = NotificationConsoleUtils.logConsoleMessage(ConsoleDataType.InProgress, "Fetching saved queries");
|
||||
return queryDocuments(SavedQueries.DatabaseName, SavedQueries.CollectionName, this.fetchQueriesQuery(), options)
|
||||
const clearMessage = NotificationConsoleUtils.logConsoleProgress("Fetching saved queries");
|
||||
const queryIterator: QueryIterator<ItemDefinition & Resource> = queryDocuments(
|
||||
SavedQueries.DatabaseName,
|
||||
SavedQueries.CollectionName,
|
||||
this.fetchQueriesQuery(),
|
||||
options
|
||||
);
|
||||
const fetchQueries = async (firstItemIndex: number): Promise<ViewModels.QueryResults> =>
|
||||
await queryDocumentsPage(queriesCollection.id(), queryIterator, firstItemIndex);
|
||||
return QueryUtils.queryAllPages(fetchQueries)
|
||||
.then(
|
||||
(queryIterator: QueryIterator<ItemDefinition & Resource>) => {
|
||||
const fetchQueries = (firstItemIndex: number): Q.Promise<ViewModels.QueryResults> =>
|
||||
queryDocumentsPage(queriesCollection.id(), queryIterator, firstItemIndex, options);
|
||||
return QueryUtils.queryAllPages(fetchQueries).then(
|
||||
(results: ViewModels.QueryResults) => {
|
||||
let queries: DataModels.Query[] = _.map(results.documents, (document: DataModels.Query) => {
|
||||
if (!document) {
|
||||
return undefined;
|
||||
}
|
||||
const { id, resourceId, query, queryName } = document;
|
||||
const parsedQuery: DataModels.Query = {
|
||||
resourceId: resourceId,
|
||||
queryName: queryName,
|
||||
query: query,
|
||||
id: id
|
||||
};
|
||||
try {
|
||||
this.validateQuery(parsedQuery);
|
||||
return parsedQuery;
|
||||
} catch (error) {
|
||||
return undefined;
|
||||
}
|
||||
});
|
||||
queries = _.reject(queries, (parsedQuery: DataModels.Query) => !parsedQuery);
|
||||
NotificationConsoleUtils.logConsoleMessage(ConsoleDataType.Info, "Successfully fetched saved queries");
|
||||
return Promise.resolve(queries);
|
||||
},
|
||||
(error: any) => {
|
||||
const stringifiedError: string = JSON.stringify(error);
|
||||
NotificationConsoleUtils.logConsoleMessage(
|
||||
ConsoleDataType.Error,
|
||||
`Failed to fetch saved queries: ${stringifiedError}`
|
||||
);
|
||||
Logger.logError(stringifiedError, "getSavedQueries");
|
||||
return Promise.reject(stringifiedError);
|
||||
(results: ViewModels.QueryResults) => {
|
||||
let queries: DataModels.Query[] = _.map(results.documents, (document: DataModels.Query) => {
|
||||
if (!document) {
|
||||
return undefined;
|
||||
}
|
||||
);
|
||||
const { id, resourceId, query, queryName } = document;
|
||||
const parsedQuery: DataModels.Query = {
|
||||
resourceId: resourceId,
|
||||
queryName: queryName,
|
||||
query: query,
|
||||
id: id
|
||||
};
|
||||
try {
|
||||
this.validateQuery(parsedQuery);
|
||||
return parsedQuery;
|
||||
} catch (error) {
|
||||
return undefined;
|
||||
}
|
||||
});
|
||||
queries = _.reject(queries, (parsedQuery: DataModels.Query) => !parsedQuery);
|
||||
NotificationConsoleUtils.logConsoleInfo("Successfully fetched saved queries");
|
||||
return Promise.resolve(queries);
|
||||
},
|
||||
(error: any) => {
|
||||
// should never get into this state but we handle this regardless
|
||||
const stringifiedError: string = JSON.stringify(error);
|
||||
NotificationConsoleUtils.logConsoleMessage(
|
||||
ConsoleDataType.Error,
|
||||
`Failed to fetch saved queries: ${stringifiedError}`
|
||||
);
|
||||
Logger.logError(stringifiedError, "getSavedQueries");
|
||||
return Promise.reject(stringifiedError);
|
||||
handleError(error, "getSavedQueries", "Failed to fetch saved queries");
|
||||
return Promise.reject(error);
|
||||
}
|
||||
)
|
||||
.finally(() => NotificationConsoleUtils.clearInProgressMessageWithId(id));
|
||||
.finally(() => clearMessage());
|
||||
}
|
||||
|
||||
public async deleteQuery(query: DataModels.Query): Promise<void> {
|
||||
const queriesCollection = this.findQueriesCollection();
|
||||
if (!queriesCollection) {
|
||||
const errorMessage: string = "Account not set up to perform saved query operations";
|
||||
NotificationConsoleUtils.logConsoleMessage(
|
||||
ConsoleDataType.Error,
|
||||
`Failed to fetch saved queries: ${errorMessage}`
|
||||
);
|
||||
NotificationConsoleUtils.logConsoleError(`Failed to fetch saved queries: ${errorMessage}`);
|
||||
return Promise.reject(errorMessage);
|
||||
}
|
||||
|
||||
@@ -202,16 +153,10 @@ export class QueriesClient {
|
||||
this.validateQuery(query);
|
||||
} catch (error) {
|
||||
const errorMessage: string = "Invalid query specified";
|
||||
NotificationConsoleUtils.logConsoleMessage(
|
||||
ConsoleDataType.Error,
|
||||
`Failed to delete query ${query.queryName}: ${errorMessage}`
|
||||
);
|
||||
NotificationConsoleUtils.logConsoleError(`Failed to delete query ${query.queryName}: ${errorMessage}`);
|
||||
}
|
||||
|
||||
const id = NotificationConsoleUtils.logConsoleMessage(
|
||||
ConsoleDataType.InProgress,
|
||||
`Deleting query ${query.queryName}`
|
||||
);
|
||||
const clearMessage = NotificationConsoleUtils.logConsoleProgress(`Deleting query ${query.queryName}`);
|
||||
query.id = query.queryName;
|
||||
const documentId = new DocumentId(
|
||||
{
|
||||
@@ -225,23 +170,15 @@ export class QueriesClient {
|
||||
return deleteDocument(queriesCollection, documentId)
|
||||
.then(
|
||||
() => {
|
||||
NotificationConsoleUtils.logConsoleMessage(
|
||||
ConsoleDataType.Info,
|
||||
`Successfully deleted query ${query.queryName}`
|
||||
);
|
||||
NotificationConsoleUtils.logConsoleInfo(`Successfully deleted query ${query.queryName}`);
|
||||
return Promise.resolve();
|
||||
},
|
||||
(error: any) => {
|
||||
const stringifiedError: string = JSON.stringify(error);
|
||||
NotificationConsoleUtils.logConsoleMessage(
|
||||
ConsoleDataType.Error,
|
||||
`Failed to delete query ${query.queryName}: ${stringifiedError}`
|
||||
);
|
||||
Logger.logError(stringifiedError, "deleteQuery");
|
||||
return Promise.reject(stringifiedError);
|
||||
handleError(error, "deleteQuery", `Failed to delete query ${query.queryName}`);
|
||||
return Promise.reject(error);
|
||||
}
|
||||
)
|
||||
.finally(() => NotificationConsoleUtils.clearInProgressMessageWithId(id));
|
||||
.finally(() => clearMessage());
|
||||
}
|
||||
|
||||
public getResourceId(): string {
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
jest.mock("../../Utils/arm/request");
|
||||
jest.mock("../CosmosClient");
|
||||
jest.mock("../DataAccessUtilityBase");
|
||||
import { AuthType } from "../../AuthType";
|
||||
import { CreateCollectionParams, DatabaseAccount } from "../../Contracts/DataModels";
|
||||
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import * as DataModels from "../../Contracts/DataModels";
|
||||
import * as ErrorParserUtility from "../ErrorParserUtility";
|
||||
import { AuthType } from "../../AuthType";
|
||||
import { ContainerResponse, DatabaseResponse } from "@azure/cosmos";
|
||||
import { ContainerRequest } from "@azure/cosmos/dist-esm/client/Container/ContainerRequest";
|
||||
@@ -23,21 +22,19 @@ import {
|
||||
getGremlinGraph
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
|
||||
import { createUpdateTable, getTable } from "../../Utils/arm/generatedClients/2020-04-01/tableResources";
|
||||
import { logConsoleProgress, logConsoleError, logConsoleInfo } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logError } from "../Logger";
|
||||
import { refreshCachedResources } from "../DataAccessUtilityBase";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import { logConsoleProgress, logConsoleInfo } from "../../Utils/NotificationConsoleUtils";
|
||||
import { userContext } from "../../UserContext";
|
||||
import { createDatabase } from "./createDatabase";
|
||||
import * as TelemetryProcessor from "../../Shared/Telemetry/TelemetryProcessor";
|
||||
import { Action, ActionModifiers } from "../../Shared/Telemetry/TelemetryConstants";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
|
||||
export const createCollection = async (params: DataModels.CreateCollectionParams): Promise<DataModels.Collection> => {
|
||||
let collection: DataModels.Collection;
|
||||
const clearMessage = logConsoleProgress(
|
||||
`Creating a new container ${params.collectionId} for database ${params.databaseId}`
|
||||
);
|
||||
try {
|
||||
let collection: DataModels.Collection;
|
||||
if (window.authType === AuthType.AAD && !userContext.useSDKOperations) {
|
||||
if (params.createNewDatabase) {
|
||||
const createDatabaseParams: DataModels.CreateDatabaseParams = {
|
||||
@@ -54,18 +51,15 @@ export const createCollection = async (params: DataModels.CreateCollectionParams
|
||||
} else {
|
||||
collection = await createCollectionWithSDK(params);
|
||||
}
|
||||
|
||||
logConsoleInfo(`Successfully created container ${params.collectionId}`);
|
||||
return collection;
|
||||
} catch (error) {
|
||||
const sanitizedError = ErrorParserUtility.replaceKnownError(JSON.stringify(error));
|
||||
logConsoleError(`Error while creating container ${params.collectionId}:\n ${sanitizedError}`);
|
||||
logError(JSON.stringify(error), "CreateCollection", error.code);
|
||||
sendNotificationForError(error);
|
||||
clearMessage();
|
||||
handleError(error, "CreateCollection", `Error while creating container ${params.collectionId}`);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
}
|
||||
logConsoleInfo(`Successfully created container ${params.collectionId}`);
|
||||
await refreshCachedResources();
|
||||
clearMessage();
|
||||
return collection;
|
||||
};
|
||||
|
||||
const createCollectionWithARM = async (params: DataModels.CreateCollectionParams): Promise<DataModels.Collection> => {
|
||||
|
||||
@@ -24,36 +24,28 @@ import {
|
||||
createUpdateGremlinDatabase,
|
||||
getGremlinDatabase
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
|
||||
import { logConsoleProgress, logConsoleError, logConsoleInfo } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logError } from "../Logger";
|
||||
import { refreshCachedOffers, refreshCachedResources } from "../DataAccessUtilityBase";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleProgress, logConsoleInfo } from "../../Utils/NotificationConsoleUtils";
|
||||
import { userContext } from "../../UserContext";
|
||||
|
||||
export async function createDatabase(params: DataModels.CreateDatabaseParams): Promise<DataModels.Database> {
|
||||
let database: DataModels.Database;
|
||||
const clearMessage = logConsoleProgress(`Creating a new database ${params.databaseId}`);
|
||||
try {
|
||||
if (userContext.defaultExperience === DefaultAccountExperienceType.Table) {
|
||||
throw new Error("Creating database resources is not allowed for tables accounts");
|
||||
}
|
||||
if (window.authType === AuthType.AAD && !userContext.useSDKOperations) {
|
||||
database = await createDatabaseWithARM(params);
|
||||
} else {
|
||||
database = await createDatabaseWithSDK(params);
|
||||
}
|
||||
const database: DataModels.Database = await (window.authType === AuthType.AAD && !userContext.useSDKOperations
|
||||
? createDatabaseWithARM(params)
|
||||
: createDatabaseWithSDK(params));
|
||||
|
||||
logConsoleInfo(`Successfully created database ${params.databaseId}`);
|
||||
return database;
|
||||
} catch (error) {
|
||||
logConsoleError(`Error while creating database ${params.databaseId}:\n ${error.message}`);
|
||||
logError(JSON.stringify(error), "CreateDatabase", error.code);
|
||||
sendNotificationForError(error);
|
||||
clearMessage();
|
||||
handleError(error, "CreateDatabase", `Error while creating database ${params.databaseId}`);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
}
|
||||
logConsoleInfo(`Successfully created database ${params.databaseId}`);
|
||||
await refreshCachedResources();
|
||||
await refreshCachedOffers();
|
||||
clearMessage();
|
||||
return database;
|
||||
}
|
||||
|
||||
async function createDatabaseWithARM(params: DataModels.CreateDatabaseParams): Promise<DataModels.Database> {
|
||||
|
||||
25
src/Common/dataAccess/createDocument.ts
Normal file
25
src/Common/dataAccess/createDocument.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
import { CollectionBase } from "../../Contracts/ViewModels";
|
||||
import { client } from "../CosmosClient";
|
||||
import { getEntityName } from "../DocumentUtility";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleInfo, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
|
||||
export const createDocument = async (collection: CollectionBase, newDocument: unknown): Promise<unknown> => {
|
||||
const entityName = getEntityName();
|
||||
const clearMessage = logConsoleProgress(`Creating new ${entityName} for container ${collection.id()}`);
|
||||
|
||||
try {
|
||||
const response = await client()
|
||||
.database(collection.databaseId)
|
||||
.container(collection.id())
|
||||
.items.create(newDocument);
|
||||
|
||||
logConsoleInfo(`Successfully created new ${entityName} for container ${collection.id()}`);
|
||||
return response?.resource;
|
||||
} catch (error) {
|
||||
handleError(error, "CreateDocument", `Error while creating new ${entityName} for container ${collection.id()}`);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
}
|
||||
};
|
||||
@@ -1,28 +1,78 @@
|
||||
import { AuthType } from "../../AuthType";
|
||||
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
||||
import { Resource, StoredProcedureDefinition } from "@azure/cosmos";
|
||||
import { logConsoleError, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import {
|
||||
SqlStoredProcedureCreateUpdateParameters,
|
||||
SqlStoredProcedureResource
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/types";
|
||||
import { client } from "../CosmosClient";
|
||||
import { logError } from "../Logger";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import {
|
||||
createUpdateSqlStoredProcedure,
|
||||
getSqlStoredProcedure
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { userContext } from "../../UserContext";
|
||||
|
||||
export async function createStoredProcedure(
|
||||
databaseId: string,
|
||||
collectionId: string,
|
||||
storedProcedure: StoredProcedureDefinition
|
||||
): Promise<StoredProcedureDefinition & Resource> {
|
||||
let createdStoredProcedure: StoredProcedureDefinition & Resource;
|
||||
const clearMessage = logConsoleProgress(`Creating stored procedure ${storedProcedure.id}`);
|
||||
try {
|
||||
if (
|
||||
window.authType === AuthType.AAD &&
|
||||
!userContext.useSDKOperations &&
|
||||
userContext.defaultExperience === DefaultAccountExperienceType.DocumentDB
|
||||
) {
|
||||
try {
|
||||
const getResponse = await getSqlStoredProcedure(
|
||||
userContext.subscriptionId,
|
||||
userContext.resourceGroup,
|
||||
userContext.databaseAccount.name,
|
||||
databaseId,
|
||||
collectionId,
|
||||
storedProcedure.id
|
||||
);
|
||||
if (getResponse?.properties?.resource) {
|
||||
throw new Error(
|
||||
`Create stored procedure failed: stored procedure with id ${storedProcedure.id} already exists`
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
if (error.code !== "NotFound") {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
const createSprocParams: SqlStoredProcedureCreateUpdateParameters = {
|
||||
properties: {
|
||||
resource: storedProcedure as SqlStoredProcedureResource,
|
||||
options: {}
|
||||
}
|
||||
};
|
||||
const rpResponse = await createUpdateSqlStoredProcedure(
|
||||
userContext.subscriptionId,
|
||||
userContext.resourceGroup,
|
||||
userContext.databaseAccount.name,
|
||||
databaseId,
|
||||
collectionId,
|
||||
storedProcedure.id,
|
||||
createSprocParams
|
||||
);
|
||||
return rpResponse && (rpResponse.properties?.resource as StoredProcedureDefinition & Resource);
|
||||
}
|
||||
|
||||
const response = await client()
|
||||
.database(databaseId)
|
||||
.container(collectionId)
|
||||
.scripts.storedProcedures.create(storedProcedure);
|
||||
createdStoredProcedure = response.resource;
|
||||
return response?.resource;
|
||||
} catch (error) {
|
||||
logConsoleError(`Error while creating stored procedure ${storedProcedure.id}:\n ${JSON.stringify(error)}`);
|
||||
logError(JSON.stringify(error), "CreateStoredProcedure", error.code);
|
||||
sendNotificationForError(error);
|
||||
handleError(error, "CreateStoredProcedure", `Error while creating stored procedure ${storedProcedure.id}`);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
}
|
||||
|
||||
clearMessage();
|
||||
return createdStoredProcedure;
|
||||
}
|
||||
|
||||
@@ -1,28 +1,73 @@
|
||||
import { AuthType } from "../../AuthType";
|
||||
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
||||
import { Resource, TriggerDefinition } from "@azure/cosmos";
|
||||
import { logConsoleError, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import {
|
||||
SqlTriggerCreateUpdateParameters,
|
||||
SqlTriggerResource
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/types";
|
||||
import { client } from "../CosmosClient";
|
||||
import { logError } from "../Logger";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import { createUpdateSqlTrigger, getSqlTrigger } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { userContext } from "../../UserContext";
|
||||
|
||||
export async function createTrigger(
|
||||
databaseId: string,
|
||||
collectionId: string,
|
||||
trigger: TriggerDefinition
|
||||
): Promise<TriggerDefinition & Resource> {
|
||||
let createdTrigger: TriggerDefinition & Resource;
|
||||
const clearMessage = logConsoleProgress(`Creating trigger ${trigger.id}`);
|
||||
try {
|
||||
if (
|
||||
window.authType === AuthType.AAD &&
|
||||
!userContext.useSDKOperations &&
|
||||
userContext.defaultExperience === DefaultAccountExperienceType.DocumentDB
|
||||
) {
|
||||
try {
|
||||
const getResponse = await getSqlTrigger(
|
||||
userContext.subscriptionId,
|
||||
userContext.resourceGroup,
|
||||
userContext.databaseAccount.name,
|
||||
databaseId,
|
||||
collectionId,
|
||||
trigger.id
|
||||
);
|
||||
if (getResponse?.properties?.resource) {
|
||||
throw new Error(`Create trigger failed: ${trigger.id} already exists`);
|
||||
}
|
||||
} catch (error) {
|
||||
if (error.code !== "NotFound") {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
const createTriggerParams: SqlTriggerCreateUpdateParameters = {
|
||||
properties: {
|
||||
resource: trigger as SqlTriggerResource,
|
||||
options: {}
|
||||
}
|
||||
};
|
||||
const rpResponse = await createUpdateSqlTrigger(
|
||||
userContext.subscriptionId,
|
||||
userContext.resourceGroup,
|
||||
userContext.databaseAccount.name,
|
||||
databaseId,
|
||||
collectionId,
|
||||
trigger.id,
|
||||
createTriggerParams
|
||||
);
|
||||
return rpResponse && (rpResponse.properties?.resource as TriggerDefinition & Resource);
|
||||
}
|
||||
|
||||
const response = await client()
|
||||
.database(databaseId)
|
||||
.container(collectionId)
|
||||
.scripts.triggers.create(trigger);
|
||||
createdTrigger = response.resource;
|
||||
return response.resource;
|
||||
} catch (error) {
|
||||
logConsoleError(`Error while creating trigger ${trigger.id}:\n ${JSON.stringify(error)}`);
|
||||
logError(JSON.stringify(error), "CreateTrigger", error.code);
|
||||
sendNotificationForError(error);
|
||||
handleError(error, "CreateTrigger", `Error while creating trigger ${trigger.id}`);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
}
|
||||
|
||||
clearMessage();
|
||||
return createdTrigger;
|
||||
}
|
||||
|
||||
@@ -1,28 +1,82 @@
|
||||
import { AuthType } from "../../AuthType";
|
||||
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
||||
import { Resource, UserDefinedFunctionDefinition } from "@azure/cosmos";
|
||||
import { logConsoleError, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import {
|
||||
SqlUserDefinedFunctionCreateUpdateParameters,
|
||||
SqlUserDefinedFunctionResource
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/types";
|
||||
import { client } from "../CosmosClient";
|
||||
import { logError } from "../Logger";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import {
|
||||
createUpdateSqlUserDefinedFunction,
|
||||
getSqlUserDefinedFunction
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { userContext } from "../../UserContext";
|
||||
|
||||
export async function createUserDefinedFunction(
|
||||
databaseId: string,
|
||||
collectionId: string,
|
||||
userDefinedFunction: UserDefinedFunctionDefinition
|
||||
): Promise<UserDefinedFunctionDefinition & Resource> {
|
||||
let createdUserDefinedFunction: UserDefinedFunctionDefinition & Resource;
|
||||
const clearMessage = logConsoleProgress(`Creating user defined function ${userDefinedFunction.id}`);
|
||||
try {
|
||||
if (
|
||||
window.authType === AuthType.AAD &&
|
||||
!userContext.useSDKOperations &&
|
||||
userContext.defaultExperience === DefaultAccountExperienceType.DocumentDB
|
||||
) {
|
||||
try {
|
||||
const getResponse = await getSqlUserDefinedFunction(
|
||||
userContext.subscriptionId,
|
||||
userContext.resourceGroup,
|
||||
userContext.databaseAccount.name,
|
||||
databaseId,
|
||||
collectionId,
|
||||
userDefinedFunction.id
|
||||
);
|
||||
if (getResponse?.properties?.resource) {
|
||||
throw new Error(
|
||||
`Create user defined function failed: user defined function with id ${userDefinedFunction.id} already exists`
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
if (error.code !== "NotFound") {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
const createUDFParams: SqlUserDefinedFunctionCreateUpdateParameters = {
|
||||
properties: {
|
||||
resource: userDefinedFunction as SqlUserDefinedFunctionResource,
|
||||
options: {}
|
||||
}
|
||||
};
|
||||
const rpResponse = await createUpdateSqlUserDefinedFunction(
|
||||
userContext.subscriptionId,
|
||||
userContext.resourceGroup,
|
||||
userContext.databaseAccount.name,
|
||||
databaseId,
|
||||
collectionId,
|
||||
userDefinedFunction.id,
|
||||
createUDFParams
|
||||
);
|
||||
return rpResponse && (rpResponse.properties?.resource as UserDefinedFunctionDefinition & Resource);
|
||||
}
|
||||
|
||||
const response = await client()
|
||||
.database(databaseId)
|
||||
.container(collectionId)
|
||||
.scripts.userDefinedFunctions.create(userDefinedFunction);
|
||||
createdUserDefinedFunction = response.resource;
|
||||
return response?.resource;
|
||||
} catch (error) {
|
||||
logConsoleError(`Error while creating user defined function ${userDefinedFunction.id}:\n ${JSON.stringify(error)}`);
|
||||
logError(JSON.stringify(error), "CreateUserupdateUserDefinedFunction", error.code);
|
||||
sendNotificationForError(error);
|
||||
handleError(
|
||||
error,
|
||||
"CreateUserupdateUserDefinedFunction",
|
||||
`Error while creating user defined function ${userDefinedFunction.id}`
|
||||
);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
}
|
||||
|
||||
clearMessage();
|
||||
return createdUserDefinedFunction;
|
||||
}
|
||||
|
||||
@@ -7,7 +7,6 @@ import { AuthType } from "../../AuthType";
|
||||
import { client } from "../CosmosClient";
|
||||
import { updateUserContext } from "../../UserContext";
|
||||
import { DatabaseAccount } from "../../Contracts/DataModels";
|
||||
import { sendCachedDataMessage } from "../MessageHandler";
|
||||
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
||||
|
||||
describe("deleteCollection", () => {
|
||||
@@ -18,7 +17,6 @@ describe("deleteCollection", () => {
|
||||
} as DatabaseAccount,
|
||||
defaultExperience: DefaultAccountExperienceType.DocumentDB
|
||||
});
|
||||
(sendCachedDataMessage as jest.Mock).mockResolvedValue(undefined);
|
||||
});
|
||||
|
||||
it("should call ARM if logged in with AAD", async () => {
|
||||
|
||||
@@ -5,12 +5,10 @@ import { deleteCassandraTable } from "../../Utils/arm/generatedClients/2020-04-0
|
||||
import { deleteMongoDBCollection } from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
|
||||
import { deleteGremlinGraph } from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
|
||||
import { deleteTable } from "../../Utils/arm/generatedClients/2020-04-01/tableResources";
|
||||
import { logConsoleError, logConsoleInfo, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logError } from "../Logger";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleInfo, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { userContext } from "../../UserContext";
|
||||
import { client } from "../CosmosClient";
|
||||
import { refreshCachedResources } from "../DataAccessUtilityBase";
|
||||
|
||||
export async function deleteCollection(databaseId: string, collectionId: string): Promise<void> {
|
||||
const clearMessage = logConsoleProgress(`Deleting container ${collectionId}`);
|
||||
@@ -23,15 +21,13 @@ export async function deleteCollection(databaseId: string, collectionId: string)
|
||||
.container(collectionId)
|
||||
.delete();
|
||||
}
|
||||
logConsoleInfo(`Successfully deleted container ${collectionId}`);
|
||||
} catch (error) {
|
||||
logConsoleError(`Error while deleting container ${collectionId}:\n ${JSON.stringify(error)}`);
|
||||
logError(JSON.stringify(error), "DeleteCollection", error.code);
|
||||
sendNotificationForError(error);
|
||||
handleError(error, "DeleteCollection", `Error while deleting container ${collectionId}`);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
}
|
||||
logConsoleInfo(`Successfully deleted container ${collectionId}`);
|
||||
clearMessage();
|
||||
await refreshCachedResources();
|
||||
}
|
||||
|
||||
function deleteCollectionWithARM(databaseId: string, collectionId: string): Promise<void> {
|
||||
|
||||
36
src/Common/dataAccess/deleteConflict.ts
Normal file
36
src/Common/dataAccess/deleteConflict.ts
Normal file
@@ -0,0 +1,36 @@
|
||||
import ConflictId from "../../Explorer/Tree/ConflictId";
|
||||
import { CollectionBase } from "../../Contracts/ViewModels";
|
||||
import { RequestOptions } from "@azure/cosmos";
|
||||
import { client } from "../CosmosClient";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleProgress, logConsoleInfo } from "../../Utils/NotificationConsoleUtils";
|
||||
|
||||
export const deleteConflict = async (collection: CollectionBase, conflictId: ConflictId): Promise<void> => {
|
||||
const clearMessage = logConsoleProgress(`Deleting conflict ${conflictId.id()}`);
|
||||
|
||||
try {
|
||||
const options = {
|
||||
partitionKey: getPartitionKeyHeaderForConflict(conflictId)
|
||||
};
|
||||
|
||||
await client()
|
||||
.database(collection.databaseId)
|
||||
.container(collection.id())
|
||||
.conflict(conflictId.id())
|
||||
.delete(options as RequestOptions);
|
||||
logConsoleInfo(`Successfully deleted conflict ${conflictId.id()}`);
|
||||
} catch (error) {
|
||||
handleError(error, "DeleteConflict", `Error while deleting conflict ${conflictId.id()}`);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
}
|
||||
};
|
||||
|
||||
const getPartitionKeyHeaderForConflict = (conflictId: ConflictId): unknown => {
|
||||
if (!conflictId.partitionKey) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return conflictId.partitionKeyValue === undefined ? [{}] : [conflictId.partitionKeyValue];
|
||||
};
|
||||
@@ -7,7 +7,6 @@ import { AuthType } from "../../AuthType";
|
||||
import { client } from "../CosmosClient";
|
||||
import { updateUserContext } from "../../UserContext";
|
||||
import { DatabaseAccount } from "../../Contracts/DataModels";
|
||||
import { sendCachedDataMessage } from "../MessageHandler";
|
||||
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
||||
|
||||
describe("deleteDatabase", () => {
|
||||
@@ -18,7 +17,6 @@ describe("deleteDatabase", () => {
|
||||
} as DatabaseAccount,
|
||||
defaultExperience: DefaultAccountExperienceType.DocumentDB
|
||||
});
|
||||
(sendCachedDataMessage as jest.Mock).mockResolvedValue(undefined);
|
||||
});
|
||||
|
||||
it("should call ARM if logged in with AAD", async () => {
|
||||
|
||||
@@ -4,12 +4,10 @@ import { deleteSqlDatabase } from "../../Utils/arm/generatedClients/2020-04-01/s
|
||||
import { deleteCassandraKeyspace } from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources";
|
||||
import { deleteMongoDBDatabase } from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
|
||||
import { deleteGremlinDatabase } from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
|
||||
import { logConsoleError, logConsoleInfo, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleInfo, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { userContext } from "../../UserContext";
|
||||
import { client } from "../CosmosClient";
|
||||
import { refreshCachedResources } from "../DataAccessUtilityBase";
|
||||
import { logError } from "../Logger";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
|
||||
export async function deleteDatabase(databaseId: string): Promise<void> {
|
||||
const clearMessage = logConsoleProgress(`Deleting database ${databaseId}`);
|
||||
@@ -25,15 +23,13 @@ export async function deleteDatabase(databaseId: string): Promise<void> {
|
||||
.database(databaseId)
|
||||
.delete();
|
||||
}
|
||||
logConsoleInfo(`Successfully deleted database ${databaseId}`);
|
||||
} catch (error) {
|
||||
logConsoleError(`Error while deleting database ${databaseId}:\n ${JSON.stringify(error)}`);
|
||||
logError(JSON.stringify(error), "DeleteDatabase", error.code);
|
||||
sendNotificationForError(error);
|
||||
handleError(error, "DeleteDatabase", `Error while deleting database ${databaseId}`);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
}
|
||||
logConsoleInfo(`Successfully deleted database ${databaseId}`);
|
||||
clearMessage();
|
||||
await refreshCachedResources();
|
||||
}
|
||||
|
||||
function deleteDatabaseWithARM(databaseId: string): Promise<void> {
|
||||
|
||||
25
src/Common/dataAccess/deleteDocument.ts
Normal file
25
src/Common/dataAccess/deleteDocument.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
import { CollectionBase } from "../../Contracts/ViewModels";
|
||||
import { client } from "../CosmosClient";
|
||||
import { getEntityName } from "../DocumentUtility";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleInfo, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import DocumentId from "../../Explorer/Tree/DocumentId";
|
||||
|
||||
export const deleteDocument = async (collection: CollectionBase, documentId: DocumentId): Promise<void> => {
|
||||
const entityName: string = getEntityName();
|
||||
const clearMessage = logConsoleProgress(`Deleting ${entityName} ${documentId.id()}`);
|
||||
|
||||
try {
|
||||
await client()
|
||||
.database(collection.databaseId)
|
||||
.container(collection.id())
|
||||
.item(documentId.id(), documentId.partitionKeyValue)
|
||||
.delete();
|
||||
logConsoleInfo(`Successfully deleted ${entityName} ${documentId.id()}`);
|
||||
} catch (error) {
|
||||
handleError(error, "DeleteDocument", `Error while deleting ${entityName} ${documentId.id()}`);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
}
|
||||
};
|
||||
@@ -1,7 +1,10 @@
|
||||
import { logConsoleError, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { AuthType } from "../../AuthType";
|
||||
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
||||
import { client } from "../CosmosClient";
|
||||
import { logError } from "../Logger";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import { deleteSqlStoredProcedure } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { userContext } from "../../UserContext";
|
||||
|
||||
export async function deleteStoredProcedure(
|
||||
databaseId: string,
|
||||
@@ -10,17 +13,30 @@ export async function deleteStoredProcedure(
|
||||
): Promise<void> {
|
||||
const clearMessage = logConsoleProgress(`Deleting stored procedure ${storedProcedureId}`);
|
||||
try {
|
||||
await client()
|
||||
.database(databaseId)
|
||||
.container(collectionId)
|
||||
.scripts.storedProcedure(storedProcedureId)
|
||||
.delete();
|
||||
if (
|
||||
window.authType === AuthType.AAD &&
|
||||
!userContext.useSDKOperations &&
|
||||
userContext.defaultExperience === DefaultAccountExperienceType.DocumentDB
|
||||
) {
|
||||
await deleteSqlStoredProcedure(
|
||||
userContext.subscriptionId,
|
||||
userContext.resourceGroup,
|
||||
userContext.databaseAccount.name,
|
||||
databaseId,
|
||||
collectionId,
|
||||
storedProcedureId
|
||||
);
|
||||
} else {
|
||||
await client()
|
||||
.database(databaseId)
|
||||
.container(collectionId)
|
||||
.scripts.storedProcedure(storedProcedureId)
|
||||
.delete();
|
||||
}
|
||||
} catch (error) {
|
||||
logConsoleError(`Error while deleting stored procedure ${storedProcedureId}:\n ${JSON.stringify(error)}`);
|
||||
logError(JSON.stringify(error), "DeleteStoredProcedure", error.code);
|
||||
sendNotificationForError(error);
|
||||
handleError(error, "DeleteStoredProcedure", `Error while deleting stored procedure ${storedProcedureId}`);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
}
|
||||
|
||||
clearMessage();
|
||||
return undefined;
|
||||
}
|
||||
|
||||
@@ -1,22 +1,38 @@
|
||||
import { logConsoleError, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { AuthType } from "../../AuthType";
|
||||
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
||||
import { client } from "../CosmosClient";
|
||||
import { logError } from "../Logger";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import { deleteSqlTrigger } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { userContext } from "../../UserContext";
|
||||
|
||||
export async function deleteTrigger(databaseId: string, collectionId: string, triggerId: string): Promise<void> {
|
||||
const clearMessage = logConsoleProgress(`Deleting trigger ${triggerId}`);
|
||||
try {
|
||||
await client()
|
||||
.database(databaseId)
|
||||
.container(collectionId)
|
||||
.scripts.trigger(triggerId)
|
||||
.delete();
|
||||
if (
|
||||
window.authType === AuthType.AAD &&
|
||||
!userContext.useSDKOperations &&
|
||||
userContext.defaultExperience === DefaultAccountExperienceType.DocumentDB
|
||||
) {
|
||||
await deleteSqlTrigger(
|
||||
userContext.subscriptionId,
|
||||
userContext.resourceGroup,
|
||||
userContext.databaseAccount.name,
|
||||
databaseId,
|
||||
collectionId,
|
||||
triggerId
|
||||
);
|
||||
} else {
|
||||
await client()
|
||||
.database(databaseId)
|
||||
.container(collectionId)
|
||||
.scripts.trigger(triggerId)
|
||||
.delete();
|
||||
}
|
||||
} catch (error) {
|
||||
logConsoleError(`Error while deleting trigger ${triggerId}:\n ${JSON.stringify(error)}`);
|
||||
logError(JSON.stringify(error), "DeleteTrigger", error.code);
|
||||
sendNotificationForError(error);
|
||||
handleError(error, "DeleteTrigger", `Error while deleting trigger ${triggerId}`);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
}
|
||||
|
||||
clearMessage();
|
||||
return undefined;
|
||||
}
|
||||
|
||||
@@ -1,22 +1,38 @@
|
||||
import { logConsoleError, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { AuthType } from "../../AuthType";
|
||||
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
||||
import { client } from "../CosmosClient";
|
||||
import { logError } from "../Logger";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import { deleteSqlUserDefinedFunction } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { userContext } from "../../UserContext";
|
||||
|
||||
export async function deleteUserDefinedFunction(databaseId: string, collectionId: string, id: string): Promise<void> {
|
||||
const clearMessage = logConsoleProgress(`Deleting user defined function ${id}`);
|
||||
try {
|
||||
await client()
|
||||
.database(databaseId)
|
||||
.container(collectionId)
|
||||
.scripts.userDefinedFunction(id)
|
||||
.delete();
|
||||
if (
|
||||
window.authType === AuthType.AAD &&
|
||||
!userContext.useSDKOperations &&
|
||||
userContext.defaultExperience === DefaultAccountExperienceType.DocumentDB
|
||||
) {
|
||||
await deleteSqlUserDefinedFunction(
|
||||
userContext.subscriptionId,
|
||||
userContext.resourceGroup,
|
||||
userContext.databaseAccount.name,
|
||||
databaseId,
|
||||
collectionId,
|
||||
id
|
||||
);
|
||||
} else {
|
||||
await client()
|
||||
.database(databaseId)
|
||||
.container(collectionId)
|
||||
.scripts.userDefinedFunction(id)
|
||||
.delete();
|
||||
}
|
||||
} catch (error) {
|
||||
logConsoleError(`Error while deleting user defined function ${id}:\n ${JSON.stringify(error)}`);
|
||||
logError(JSON.stringify(error), "DeleteUserDefinedFunction", error.code);
|
||||
sendNotificationForError(error);
|
||||
handleError(error, "DeleteUserDefinedFunction", `Error while deleting user defined function ${id}`);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
}
|
||||
|
||||
clearMessage();
|
||||
return undefined;
|
||||
}
|
||||
|
||||
48
src/Common/dataAccess/executeStoredProcedure.ts
Normal file
48
src/Common/dataAccess/executeStoredProcedure.ts
Normal file
@@ -0,0 +1,48 @@
|
||||
import { Collection } from "../../Contracts/ViewModels";
|
||||
import { ClientDefaults, HttpHeaders } from "../Constants";
|
||||
import { client } from "../CosmosClient";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleProgress, logConsoleInfo } from "../../Utils/NotificationConsoleUtils";
|
||||
import StoredProcedure from "../../Explorer/Tree/StoredProcedure";
|
||||
|
||||
export interface ExecuteSprocResult {
|
||||
result: StoredProcedure;
|
||||
scriptLogs: string;
|
||||
}
|
||||
|
||||
export const executeStoredProcedure = async (
|
||||
collection: Collection,
|
||||
storedProcedure: StoredProcedure,
|
||||
partitionKeyValue: string,
|
||||
params: string[]
|
||||
): Promise<ExecuteSprocResult> => {
|
||||
const clearMessage = logConsoleProgress(`Executing stored procedure ${storedProcedure.id()}`);
|
||||
const timeout = setTimeout(() => {
|
||||
throw Error(`Request timed out while executing stored procedure ${storedProcedure.id()}`);
|
||||
}, ClientDefaults.requestTimeoutMs);
|
||||
|
||||
try {
|
||||
const response = await client()
|
||||
.database(collection.databaseId)
|
||||
.container(collection.id())
|
||||
.scripts.storedProcedure(storedProcedure.id())
|
||||
.execute(partitionKeyValue, params, { enableScriptLogging: true });
|
||||
clearTimeout(timeout);
|
||||
logConsoleInfo(
|
||||
`Finished executing stored procedure ${storedProcedure.id()} for container ${storedProcedure.collection.id()}`
|
||||
);
|
||||
return {
|
||||
result: response.resource,
|
||||
scriptLogs: response.headers[HttpHeaders.scriptLogResults] as string
|
||||
};
|
||||
} catch (error) {
|
||||
handleError(
|
||||
error,
|
||||
"ExecuteStoredProcedure",
|
||||
`Failed to execute stored procedure ${storedProcedure.id()} for container ${storedProcedure.collection.id()}`
|
||||
);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
}
|
||||
};
|
||||
92
src/Common/dataAccess/getCollectionDataUsageSize.ts
Normal file
92
src/Common/dataAccess/getCollectionDataUsageSize.ts
Normal file
@@ -0,0 +1,92 @@
|
||||
import { AuthType } from "../../AuthType";
|
||||
import { armRequest } from "../../Utils/arm/request";
|
||||
import { configContext } from "../../ConfigContext";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { userContext } from "../../UserContext";
|
||||
|
||||
interface TimeSeriesData {
|
||||
data: {
|
||||
timeStamp: string;
|
||||
total: number;
|
||||
}[];
|
||||
metadatavalues: {
|
||||
name: {
|
||||
localizedValue: string;
|
||||
value: string;
|
||||
};
|
||||
value: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface MetricsData {
|
||||
displayDescription: string;
|
||||
errorCode: string;
|
||||
id: string;
|
||||
name: {
|
||||
value: string;
|
||||
localizedValue: string;
|
||||
};
|
||||
timeseries: TimeSeriesData[];
|
||||
type: string;
|
||||
unit: string;
|
||||
}
|
||||
|
||||
interface MetricsResponse {
|
||||
cost: number;
|
||||
interval: string;
|
||||
namespace: string;
|
||||
resourceregion: string;
|
||||
timespan: string;
|
||||
value: MetricsData[];
|
||||
}
|
||||
|
||||
export const getCollectionUsageSizeInKB = async (databaseName: string, containerName: string): Promise<number> => {
|
||||
if (window.authType !== AuthType.AAD) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const subscriptionId = userContext.subscriptionId;
|
||||
const resourceGroup = userContext.resourceGroup;
|
||||
const accountName = userContext.databaseAccount.name;
|
||||
const filter = `DatabaseName eq '${databaseName}' and CollectionName eq '${containerName}'`;
|
||||
const metricNames = "DataUsage,IndexUsage";
|
||||
const path = `/subscriptions/${subscriptionId}/resourceGroups/${resourceGroup}/providers/Microsoft.DocumentDB/databaseAccounts/${accountName}/providers/microsoft.insights/metrics`;
|
||||
|
||||
try {
|
||||
const metricsResponse: MetricsResponse = await armRequest({
|
||||
host: configContext.ARM_ENDPOINT,
|
||||
path,
|
||||
method: "GET",
|
||||
apiVersion: "2018-01-01",
|
||||
queryParams: {
|
||||
filter,
|
||||
metricNames
|
||||
}
|
||||
});
|
||||
|
||||
if (metricsResponse?.value?.length !== 2) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const dataUsageData: MetricsData = metricsResponse.value[0];
|
||||
const indexUsagedata: MetricsData = metricsResponse.value[1];
|
||||
const dataUsageSizeInKb: number = getUsageSizeInKb(dataUsageData);
|
||||
const indexUsageSizeInKb: number = getUsageSizeInKb(indexUsagedata);
|
||||
|
||||
return dataUsageSizeInKb + indexUsageSizeInKb;
|
||||
} catch (error) {
|
||||
handleError(error, "getCollectionUsageSize");
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
const getUsageSizeInKb = (metricsData: MetricsData): number => {
|
||||
if (metricsData?.errorCode !== "Success") {
|
||||
throw Error(`Get collection usage size failed: ${metricsData.errorCode}`);
|
||||
}
|
||||
|
||||
const timeSeriesData: TimeSeriesData = metricsData?.timeseries?.[0];
|
||||
const usageSizeInBytes: number = timeSeriesData?.data?.[0]?.total;
|
||||
|
||||
return usageSizeInBytes ? usageSizeInBytes / 1024 : 0;
|
||||
};
|
||||
28
src/Common/dataAccess/getIndexTransformationProgress.ts
Normal file
28
src/Common/dataAccess/getIndexTransformationProgress.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import { client } from "../CosmosClient";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import * as Constants from "../Constants";
|
||||
import { AuthType } from "../../AuthType";
|
||||
|
||||
export async function getIndexTransformationProgress(databaseId: string, collectionId: string): Promise<number> {
|
||||
if (window.authType !== AuthType.AAD) {
|
||||
return undefined;
|
||||
}
|
||||
let indexTransformationPercentage: number;
|
||||
const clearMessage = logConsoleProgress(`Reading container ${collectionId}`);
|
||||
try {
|
||||
const response = await client()
|
||||
.database(databaseId)
|
||||
.container(collectionId)
|
||||
.read({ populateQuotaInfo: true });
|
||||
|
||||
indexTransformationPercentage = parseInt(
|
||||
response.headers[Constants.HttpHeaders.collectionIndexTransformationProgress] as string
|
||||
);
|
||||
} catch (error) {
|
||||
handleError(error, "ReadMongoDBCollection", `Error while reading container ${collectionId}`);
|
||||
throw error;
|
||||
}
|
||||
clearMessage();
|
||||
return indexTransformationPercentage;
|
||||
}
|
||||
14
src/Common/dataAccess/queryConflicts.ts
Normal file
14
src/Common/dataAccess/queryConflicts.ts
Normal file
@@ -0,0 +1,14 @@
|
||||
import { ConflictDefinition, FeedOptions, QueryIterator, Resource } from "@azure/cosmos";
|
||||
import { client } from "../CosmosClient";
|
||||
|
||||
export const queryConflicts = (
|
||||
databaseId: string,
|
||||
containerId: string,
|
||||
query: string,
|
||||
options: FeedOptions
|
||||
): QueryIterator<ConflictDefinition & Resource> => {
|
||||
return client()
|
||||
.database(databaseId)
|
||||
.container(containerId)
|
||||
.conflicts.query(query, options);
|
||||
};
|
||||
@@ -1,13 +1,13 @@
|
||||
import { getCommonQueryOptions } from "./DataAccessUtilityBase";
|
||||
import { LocalStorageUtility, StorageKey } from "../Shared/StorageUtility";
|
||||
|
||||
describe("getCommonQueryOptions", () => {
|
||||
it("builds the correct default options objects", () => {
|
||||
expect(getCommonQueryOptions({})).toMatchSnapshot();
|
||||
});
|
||||
it("reads from localStorage", () => {
|
||||
LocalStorageUtility.setEntryNumber(StorageKey.ActualItemPerPage, 37);
|
||||
LocalStorageUtility.setEntryNumber(StorageKey.MaxDegreeOfParellism, 17);
|
||||
expect(getCommonQueryOptions({})).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
import { getCommonQueryOptions } from "./queryDocuments";
|
||||
import { LocalStorageUtility, StorageKey } from "../../Shared/StorageUtility";
|
||||
|
||||
describe("getCommonQueryOptions", () => {
|
||||
it("builds the correct default options objects", () => {
|
||||
expect(getCommonQueryOptions({})).toMatchSnapshot();
|
||||
});
|
||||
it("reads from localStorage", () => {
|
||||
LocalStorageUtility.setEntryNumber(StorageKey.ActualItemPerPage, 37);
|
||||
LocalStorageUtility.setEntryNumber(StorageKey.MaxDegreeOfParellism, 17);
|
||||
expect(getCommonQueryOptions({})).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
34
src/Common/dataAccess/queryDocuments.ts
Normal file
34
src/Common/dataAccess/queryDocuments.ts
Normal file
@@ -0,0 +1,34 @@
|
||||
import { Queries } from "../Constants";
|
||||
import { FeedOptions, ItemDefinition, QueryIterator, Resource } from "@azure/cosmos";
|
||||
import { LocalStorageUtility, StorageKey } from "../../Shared/StorageUtility";
|
||||
import { client } from "../CosmosClient";
|
||||
|
||||
export const queryDocuments = (
|
||||
databaseId: string,
|
||||
containerId: string,
|
||||
query: string,
|
||||
options: FeedOptions
|
||||
): QueryIterator<ItemDefinition & Resource> => {
|
||||
options = getCommonQueryOptions(options);
|
||||
return client()
|
||||
.database(databaseId)
|
||||
.container(containerId)
|
||||
.items.query(query, options);
|
||||
};
|
||||
|
||||
export const getCommonQueryOptions = (options: FeedOptions): FeedOptions => {
|
||||
const storedItemPerPageSetting: number = LocalStorageUtility.getEntryNumber(StorageKey.ActualItemPerPage);
|
||||
options = options || {};
|
||||
options.populateQueryMetrics = true;
|
||||
options.enableScanInQuery = options.enableScanInQuery || true;
|
||||
if (!options.partitionKey) {
|
||||
options.forceQueryPlan = true;
|
||||
}
|
||||
options.maxItemCount =
|
||||
options.maxItemCount ||
|
||||
(storedItemPerPageSetting !== undefined && storedItemPerPageSetting) ||
|
||||
Queries.itemsPerPage;
|
||||
options.maxDegreeOfParallelism = LocalStorageUtility.getEntryNumber(StorageKey.MaxDegreeOfParellism);
|
||||
|
||||
return options;
|
||||
};
|
||||
26
src/Common/dataAccess/queryDocumentsPage.ts
Normal file
26
src/Common/dataAccess/queryDocumentsPage.ts
Normal file
@@ -0,0 +1,26 @@
|
||||
import { QueryResults } from "../../Contracts/ViewModels";
|
||||
import { logConsoleInfo, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { MinimalQueryIterator, nextPage } from "../IteratorUtilities";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { getEntityName } from "../DocumentUtility";
|
||||
|
||||
export const queryDocumentsPage = async (
|
||||
resourceName: string,
|
||||
documentsIterator: MinimalQueryIterator,
|
||||
firstItemIndex: number
|
||||
): Promise<QueryResults> => {
|
||||
const entityName = getEntityName();
|
||||
const clearMessage = logConsoleProgress(`Querying ${entityName} for container ${resourceName}`);
|
||||
|
||||
try {
|
||||
const result: QueryResults = await nextPage(documentsIterator, firstItemIndex);
|
||||
const itemCount = (result.documents && result.documents.length) || 0;
|
||||
logConsoleInfo(`Successfully fetched ${itemCount} ${entityName} for container ${resourceName}`);
|
||||
return result;
|
||||
} catch (error) {
|
||||
handleError(error, "QueryDocumentsPage", `Failed to query ${entityName} for container ${resourceName}`);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
}
|
||||
};
|
||||
@@ -1,8 +1,7 @@
|
||||
import * as DataModels from "../../Contracts/DataModels";
|
||||
import { client } from "../CosmosClient";
|
||||
import { logConsoleProgress, logConsoleError } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logError } from "../Logger";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
|
||||
export async function readCollection(databaseId: string, collectionId: string): Promise<DataModels.Collection> {
|
||||
let collection: DataModels.Collection;
|
||||
@@ -14,9 +13,7 @@ export async function readCollection(databaseId: string, collectionId: string):
|
||||
.read();
|
||||
collection = response.resource as DataModels.Collection;
|
||||
} catch (error) {
|
||||
logConsoleError(`Error while querying container ${collectionId}:\n ${JSON.stringify(error)}`);
|
||||
logError(JSON.stringify(error), "ReadCollection", error.code);
|
||||
sendNotificationForError(error);
|
||||
handleError(error, "ReadCollection", `Error while querying container ${collectionId}`);
|
||||
throw error;
|
||||
}
|
||||
clearMessage();
|
||||
|
||||
@@ -1,126 +1,123 @@
|
||||
import * as DataModels from "../../Contracts/DataModels";
|
||||
import { AuthType } from "../../AuthType";
|
||||
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
||||
import { HttpHeaders } from "../Constants";
|
||||
import { RequestOptions } from "@azure/cosmos/dist-esm";
|
||||
import { client } from "../CosmosClient";
|
||||
import { Offer, ReadCollectionOfferParams } from "../../Contracts/DataModels";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { getSqlContainerThroughput } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||
import { getMongoDBCollectionThroughput } from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
|
||||
import { getCassandraTableThroughput } from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources";
|
||||
import { getGremlinGraphThroughput } from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
|
||||
import { getTableThroughput } from "../../Utils/arm/generatedClients/2020-04-01/tableResources";
|
||||
import { logConsoleProgress, logConsoleError } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logError } from "../Logger";
|
||||
import { readOffers } from "./readOffers";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { readOfferWithSDK } from "./readOfferWithSDK";
|
||||
import { userContext } from "../../UserContext";
|
||||
|
||||
export const readCollectionOffer = async (
|
||||
params: DataModels.ReadCollectionOfferParams
|
||||
): Promise<DataModels.OfferWithHeaders> => {
|
||||
export const readCollectionOffer = async (params: ReadCollectionOfferParams): Promise<Offer> => {
|
||||
const clearMessage = logConsoleProgress(`Querying offer for collection ${params.collectionId}`);
|
||||
let offerId = params.offerId;
|
||||
if (!offerId) {
|
||||
if (window.authType === AuthType.AAD && !userContext.useSDKOperations) {
|
||||
try {
|
||||
offerId = await getCollectionOfferIdWithARM(params.databaseId, params.collectionId);
|
||||
} catch (error) {
|
||||
clearMessage();
|
||||
if (error.code !== "NotFound") {
|
||||
throw error;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
} else {
|
||||
offerId = await getCollectionOfferIdWithSDK(params.collectionResourceId);
|
||||
if (!offerId) {
|
||||
clearMessage();
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const options: RequestOptions = {
|
||||
initialHeaders: {
|
||||
[HttpHeaders.populateCollectionThroughputInfo]: true
|
||||
}
|
||||
};
|
||||
|
||||
try {
|
||||
const response = await client()
|
||||
.offer(offerId)
|
||||
.read(options);
|
||||
return (
|
||||
response && {
|
||||
...response.resource,
|
||||
headers: response.headers
|
||||
}
|
||||
);
|
||||
if (
|
||||
window.authType === AuthType.AAD &&
|
||||
!userContext.useSDKOperations &&
|
||||
userContext.defaultExperience !== DefaultAccountExperienceType.Table
|
||||
) {
|
||||
return await readCollectionOfferWithARM(params.databaseId, params.collectionId);
|
||||
}
|
||||
|
||||
return await readOfferWithSDK(params.offerId, params.collectionResourceId);
|
||||
} catch (error) {
|
||||
logConsoleError(`Error while querying offer for collection ${params.collectionId}:\n ${JSON.stringify(error)}`);
|
||||
logError(JSON.stringify(error), "ReadCollectionOffer", error.code);
|
||||
sendNotificationForError(error);
|
||||
handleError(error, "ReadCollectionOffer", `Error while querying offer for collection ${params.collectionId}`);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
}
|
||||
};
|
||||
|
||||
const getCollectionOfferIdWithARM = async (databaseId: string, collectionId: string): Promise<string> => {
|
||||
let rpResponse;
|
||||
const readCollectionOfferWithARM = async (databaseId: string, collectionId: string): Promise<Offer> => {
|
||||
const subscriptionId = userContext.subscriptionId;
|
||||
const resourceGroup = userContext.resourceGroup;
|
||||
const accountName = userContext.databaseAccount.name;
|
||||
const defaultExperience = userContext.defaultExperience;
|
||||
switch (defaultExperience) {
|
||||
case DefaultAccountExperienceType.DocumentDB:
|
||||
rpResponse = await getSqlContainerThroughput(
|
||||
subscriptionId,
|
||||
resourceGroup,
|
||||
accountName,
|
||||
databaseId,
|
||||
collectionId
|
||||
);
|
||||
break;
|
||||
case DefaultAccountExperienceType.MongoDB:
|
||||
rpResponse = await getMongoDBCollectionThroughput(
|
||||
subscriptionId,
|
||||
resourceGroup,
|
||||
accountName,
|
||||
databaseId,
|
||||
collectionId
|
||||
);
|
||||
break;
|
||||
case DefaultAccountExperienceType.Cassandra:
|
||||
rpResponse = await getCassandraTableThroughput(
|
||||
subscriptionId,
|
||||
resourceGroup,
|
||||
accountName,
|
||||
databaseId,
|
||||
collectionId
|
||||
);
|
||||
break;
|
||||
case DefaultAccountExperienceType.Graph:
|
||||
rpResponse = await getGremlinGraphThroughput(
|
||||
subscriptionId,
|
||||
resourceGroup,
|
||||
accountName,
|
||||
databaseId,
|
||||
collectionId
|
||||
);
|
||||
break;
|
||||
case DefaultAccountExperienceType.Table:
|
||||
rpResponse = await getTableThroughput(subscriptionId, resourceGroup, accountName, collectionId);
|
||||
break;
|
||||
default:
|
||||
throw new Error(`Unsupported default experience type: ${defaultExperience}`);
|
||||
|
||||
let rpResponse;
|
||||
try {
|
||||
switch (defaultExperience) {
|
||||
case DefaultAccountExperienceType.DocumentDB:
|
||||
rpResponse = await getSqlContainerThroughput(
|
||||
subscriptionId,
|
||||
resourceGroup,
|
||||
accountName,
|
||||
databaseId,
|
||||
collectionId
|
||||
);
|
||||
break;
|
||||
case DefaultAccountExperienceType.MongoDB:
|
||||
rpResponse = await getMongoDBCollectionThroughput(
|
||||
subscriptionId,
|
||||
resourceGroup,
|
||||
accountName,
|
||||
databaseId,
|
||||
collectionId
|
||||
);
|
||||
break;
|
||||
case DefaultAccountExperienceType.Cassandra:
|
||||
rpResponse = await getCassandraTableThroughput(
|
||||
subscriptionId,
|
||||
resourceGroup,
|
||||
accountName,
|
||||
databaseId,
|
||||
collectionId
|
||||
);
|
||||
break;
|
||||
case DefaultAccountExperienceType.Graph:
|
||||
rpResponse = await getGremlinGraphThroughput(
|
||||
subscriptionId,
|
||||
resourceGroup,
|
||||
accountName,
|
||||
databaseId,
|
||||
collectionId
|
||||
);
|
||||
break;
|
||||
case DefaultAccountExperienceType.Table:
|
||||
rpResponse = await getTableThroughput(subscriptionId, resourceGroup, accountName, collectionId);
|
||||
break;
|
||||
default:
|
||||
throw new Error(`Unsupported default experience type: ${defaultExperience}`);
|
||||
}
|
||||
} catch (error) {
|
||||
if (error.code !== "NotFound") {
|
||||
throw error;
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return rpResponse?.name;
|
||||
};
|
||||
const resource = rpResponse?.properties?.resource;
|
||||
if (resource) {
|
||||
const offerId: string = rpResponse.name;
|
||||
const minimumThroughput: number =
|
||||
typeof resource.minimumThroughput === "string"
|
||||
? parseInt(resource.minimumThroughput)
|
||||
: resource.minimumThroughput;
|
||||
const autoscaleSettings = resource.autoscaleSettings;
|
||||
|
||||
const getCollectionOfferIdWithSDK = async (collectionResourceId: string): Promise<string> => {
|
||||
const offers = await readOffers();
|
||||
const offer = offers.find(offer => offer.resource === collectionResourceId);
|
||||
return offer?.id;
|
||||
if (autoscaleSettings) {
|
||||
return {
|
||||
id: offerId,
|
||||
autoscaleMaxThroughput: autoscaleSettings.maxThroughput,
|
||||
manualThroughput: undefined,
|
||||
minimumThroughput,
|
||||
offerReplacePending: resource.offerReplacePending === "true"
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
id: offerId,
|
||||
autoscaleMaxThroughput: undefined,
|
||||
manualThroughput: resource.throughput,
|
||||
minimumThroughput,
|
||||
offerReplacePending: resource.offerReplacePending === "true"
|
||||
};
|
||||
}
|
||||
|
||||
return undefined;
|
||||
};
|
||||
|
||||
@@ -1,48 +0,0 @@
|
||||
import * as DataModels from "../../Contracts/DataModels";
|
||||
import * as HeadersUtility from "../HeadersUtility";
|
||||
import * as ViewModels from "../../Contracts/ViewModels";
|
||||
import { ContainerDefinition, Resource } from "@azure/cosmos";
|
||||
import { HttpHeaders } from "../Constants";
|
||||
import { RequestOptions } from "@azure/cosmos/dist-esm";
|
||||
import { client } from "../CosmosClient";
|
||||
import { logConsoleProgress, logConsoleError } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logError } from "../Logger";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
|
||||
interface ResourceWithStatistics {
|
||||
statistics: DataModels.Statistic[];
|
||||
}
|
||||
|
||||
export const readCollectionQuotaInfo = async (
|
||||
collection: ViewModels.Collection
|
||||
): Promise<DataModels.CollectionQuotaInfo> => {
|
||||
const clearMessage = logConsoleProgress(`Querying containers for database ${collection.id}`);
|
||||
const options: RequestOptions = {};
|
||||
options.populateQuotaInfo = true;
|
||||
options.initialHeaders = options.initialHeaders || {};
|
||||
options.initialHeaders[HttpHeaders.populatePartitionStatistics] = true;
|
||||
|
||||
try {
|
||||
const response = await client()
|
||||
.database(collection.databaseId)
|
||||
.container(collection.id())
|
||||
.read(options);
|
||||
const quota: DataModels.CollectionQuotaInfo = HeadersUtility.getQuota(response.headers);
|
||||
const resource = response.resource as ContainerDefinition & Resource & ResourceWithStatistics;
|
||||
quota["usageSizeInKB"] = resource.statistics.reduce(
|
||||
(previousValue: number, currentValue: DataModels.Statistic) => previousValue + currentValue.sizeInKB,
|
||||
0
|
||||
);
|
||||
quota["numPartitions"] = resource.statistics.length;
|
||||
quota["uniqueKeyPolicy"] = collection.uniqueKeyPolicy; // TODO: Remove after refactoring (#119617)
|
||||
|
||||
return quota;
|
||||
} catch (error) {
|
||||
logConsoleError(`Error while querying quota info for container ${collection.id}:\n ${JSON.stringify(error)}`);
|
||||
logError(JSON.stringify(error), "ReadCollectionQuotaInfo", error.code);
|
||||
sendNotificationForError(error);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
}
|
||||
};
|
||||
@@ -2,18 +2,16 @@ import * as DataModels from "../../Contracts/DataModels";
|
||||
import { AuthType } from "../../AuthType";
|
||||
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
||||
import { client } from "../CosmosClient";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { listSqlContainers } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||
import { listCassandraTables } from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources";
|
||||
import { listMongoDBCollections } from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
|
||||
import { listGremlinGraphs } from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
|
||||
import { listTables } from "../../Utils/arm/generatedClients/2020-04-01/tableResources";
|
||||
import { logConsoleProgress, logConsoleError } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logError } from "../Logger";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { userContext } from "../../UserContext";
|
||||
|
||||
export async function readCollections(databaseId: string): Promise<DataModels.Collection[]> {
|
||||
let collections: DataModels.Collection[];
|
||||
const clearMessage = logConsoleProgress(`Querying containers for database ${databaseId}`);
|
||||
try {
|
||||
if (
|
||||
@@ -22,22 +20,20 @@ export async function readCollections(databaseId: string): Promise<DataModels.Co
|
||||
userContext.defaultExperience !== DefaultAccountExperienceType.MongoDB &&
|
||||
userContext.defaultExperience !== DefaultAccountExperienceType.Table
|
||||
) {
|
||||
collections = await readCollectionsWithARM(databaseId);
|
||||
} else {
|
||||
const sdkResponse = await client()
|
||||
.database(databaseId)
|
||||
.containers.readAll()
|
||||
.fetchAll();
|
||||
collections = sdkResponse.resources as DataModels.Collection[];
|
||||
return await readCollectionsWithARM(databaseId);
|
||||
}
|
||||
|
||||
const sdkResponse = await client()
|
||||
.database(databaseId)
|
||||
.containers.readAll()
|
||||
.fetchAll();
|
||||
return sdkResponse.resources as DataModels.Collection[];
|
||||
} catch (error) {
|
||||
logConsoleError(`Error while querying containers for database ${databaseId}:\n ${JSON.stringify(error)}`);
|
||||
logError(JSON.stringify(error), "ReadCollections", error.code);
|
||||
sendNotificationForError(error);
|
||||
handleError(error, "ReadCollections", `Error while querying containers for database ${databaseId}`);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
}
|
||||
clearMessage();
|
||||
return collections;
|
||||
}
|
||||
|
||||
async function readCollectionsWithARM(databaseId: string): Promise<DataModels.Collection[]> {
|
||||
|
||||
@@ -1,102 +1,95 @@
|
||||
import * as DataModels from "../../Contracts/DataModels";
|
||||
import { AuthType } from "../../AuthType";
|
||||
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
||||
import { HttpHeaders } from "../Constants";
|
||||
import { RequestOptions } from "@azure/cosmos/dist-esm";
|
||||
import { client } from "../CosmosClient";
|
||||
import { Offer, ReadDatabaseOfferParams } from "../../Contracts/DataModels";
|
||||
import { getSqlDatabaseThroughput } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||
import { getMongoDBDatabaseThroughput } from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
|
||||
import { getCassandraKeyspaceThroughput } from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources";
|
||||
import { getGremlinDatabaseThroughput } from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
|
||||
import { logConsoleProgress, logConsoleError } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logError } from "../Logger";
|
||||
import { readOffers } from "./readOffers";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { readOfferWithSDK } from "./readOfferWithSDK";
|
||||
import { userContext } from "../../UserContext";
|
||||
|
||||
export const readDatabaseOffer = async (
|
||||
params: DataModels.ReadDatabaseOfferParams
|
||||
): Promise<DataModels.OfferWithHeaders> => {
|
||||
export const readDatabaseOffer = async (params: ReadDatabaseOfferParams): Promise<Offer> => {
|
||||
const clearMessage = logConsoleProgress(`Querying offer for database ${params.databaseId}`);
|
||||
let offerId = params.offerId;
|
||||
if (!offerId) {
|
||||
|
||||
try {
|
||||
if (
|
||||
window.authType === AuthType.AAD &&
|
||||
!userContext.useSDKOperations &&
|
||||
userContext.defaultExperience !== DefaultAccountExperienceType.Table
|
||||
) {
|
||||
try {
|
||||
offerId = await getDatabaseOfferIdWithARM(params.databaseId);
|
||||
} catch (error) {
|
||||
clearMessage();
|
||||
if (error.code !== "NotFound") {
|
||||
throw new error();
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
} else {
|
||||
offerId = await getDatabaseOfferIdWithSDK(params.databaseResourceId);
|
||||
if (!offerId) {
|
||||
clearMessage();
|
||||
return undefined;
|
||||
}
|
||||
return await readDatabaseOfferWithARM(params.databaseId);
|
||||
}
|
||||
}
|
||||
|
||||
const options: RequestOptions = {
|
||||
initialHeaders: {
|
||||
[HttpHeaders.populateCollectionThroughputInfo]: true
|
||||
}
|
||||
};
|
||||
|
||||
try {
|
||||
const response = await client()
|
||||
.offer(offerId)
|
||||
.read(options);
|
||||
return (
|
||||
response && {
|
||||
...response.resource,
|
||||
headers: response.headers
|
||||
}
|
||||
);
|
||||
return await readOfferWithSDK(params.offerId, params.databaseResourceId);
|
||||
} catch (error) {
|
||||
logConsoleError(`Error while querying offer for database ${params.databaseId}:\n ${JSON.stringify(error)}`);
|
||||
logError(JSON.stringify(error), "ReadDatabaseOffer", error.code);
|
||||
sendNotificationForError(error);
|
||||
handleError(error, "ReadDatabaseOffer", `Error while querying offer for database ${params.databaseId}`);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
}
|
||||
};
|
||||
|
||||
const getDatabaseOfferIdWithARM = async (databaseId: string): Promise<string> => {
|
||||
let rpResponse;
|
||||
const readDatabaseOfferWithARM = async (databaseId: string): Promise<Offer> => {
|
||||
const subscriptionId = userContext.subscriptionId;
|
||||
const resourceGroup = userContext.resourceGroup;
|
||||
const accountName = userContext.databaseAccount.name;
|
||||
const defaultExperience = userContext.defaultExperience;
|
||||
switch (defaultExperience) {
|
||||
case DefaultAccountExperienceType.DocumentDB:
|
||||
rpResponse = await getSqlDatabaseThroughput(subscriptionId, resourceGroup, accountName, databaseId);
|
||||
break;
|
||||
case DefaultAccountExperienceType.MongoDB:
|
||||
rpResponse = await getMongoDBDatabaseThroughput(subscriptionId, resourceGroup, accountName, databaseId);
|
||||
break;
|
||||
case DefaultAccountExperienceType.Cassandra:
|
||||
rpResponse = await getCassandraKeyspaceThroughput(subscriptionId, resourceGroup, accountName, databaseId);
|
||||
break;
|
||||
case DefaultAccountExperienceType.Graph:
|
||||
rpResponse = await getGremlinDatabaseThroughput(subscriptionId, resourceGroup, accountName, databaseId);
|
||||
break;
|
||||
default:
|
||||
throw new Error(`Unsupported default experience type: ${defaultExperience}`);
|
||||
|
||||
let rpResponse;
|
||||
try {
|
||||
switch (defaultExperience) {
|
||||
case DefaultAccountExperienceType.DocumentDB:
|
||||
rpResponse = await getSqlDatabaseThroughput(subscriptionId, resourceGroup, accountName, databaseId);
|
||||
break;
|
||||
case DefaultAccountExperienceType.MongoDB:
|
||||
rpResponse = await getMongoDBDatabaseThroughput(subscriptionId, resourceGroup, accountName, databaseId);
|
||||
break;
|
||||
case DefaultAccountExperienceType.Cassandra:
|
||||
rpResponse = await getCassandraKeyspaceThroughput(subscriptionId, resourceGroup, accountName, databaseId);
|
||||
break;
|
||||
case DefaultAccountExperienceType.Graph:
|
||||
rpResponse = await getGremlinDatabaseThroughput(subscriptionId, resourceGroup, accountName, databaseId);
|
||||
break;
|
||||
default:
|
||||
throw new Error(`Unsupported default experience type: ${defaultExperience}`);
|
||||
}
|
||||
} catch (error) {
|
||||
if (error.code !== "NotFound") {
|
||||
throw error;
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return rpResponse?.name;
|
||||
};
|
||||
const resource = rpResponse?.properties?.resource;
|
||||
if (resource) {
|
||||
const offerId: string = rpResponse.name;
|
||||
const minimumThroughput: number =
|
||||
typeof resource.minimumThroughput === "string"
|
||||
? parseInt(resource.minimumThroughput)
|
||||
: resource.minimumThroughput;
|
||||
const autoscaleSettings = resource.autoscaleSettings;
|
||||
|
||||
const getDatabaseOfferIdWithSDK = async (databaseResourceId: string): Promise<string> => {
|
||||
const offers = await readOffers();
|
||||
const offer = offers.find(offer => offer.resource === databaseResourceId);
|
||||
return offer?.id;
|
||||
if (autoscaleSettings) {
|
||||
return {
|
||||
id: offerId,
|
||||
autoscaleMaxThroughput: autoscaleSettings.maxThroughput,
|
||||
manualThroughput: undefined,
|
||||
minimumThroughput,
|
||||
offerReplacePending: resource.offerReplacePending === "true"
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
id: offerId,
|
||||
autoscaleMaxThroughput: undefined,
|
||||
manualThroughput: resource.throughput,
|
||||
minimumThroughput,
|
||||
offerReplacePending: resource.offerReplacePending === "true"
|
||||
};
|
||||
}
|
||||
|
||||
return undefined;
|
||||
};
|
||||
|
||||
@@ -2,13 +2,12 @@ import * as DataModels from "../../Contracts/DataModels";
|
||||
import { AuthType } from "../../AuthType";
|
||||
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
||||
import { client } from "../CosmosClient";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { listSqlDatabases } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||
import { listCassandraKeyspaces } from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources";
|
||||
import { listMongoDBDatabases } from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
|
||||
import { listGremlinDatabases } from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
|
||||
import { logConsoleProgress, logConsoleError } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logError } from "../Logger";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { userContext } from "../../UserContext";
|
||||
|
||||
export async function readDatabases(): Promise<DataModels.Database[]> {
|
||||
@@ -18,9 +17,7 @@ export async function readDatabases(): Promise<DataModels.Database[]> {
|
||||
if (
|
||||
window.authType === AuthType.AAD &&
|
||||
!userContext.useSDKOperations &&
|
||||
userContext.defaultExperience !== DefaultAccountExperienceType.MongoDB &&
|
||||
userContext.defaultExperience !== DefaultAccountExperienceType.Table &&
|
||||
userContext.defaultExperience !== DefaultAccountExperienceType.Cassandra
|
||||
userContext.defaultExperience !== DefaultAccountExperienceType.Table
|
||||
) {
|
||||
databases = await readDatabasesWithARM();
|
||||
} else {
|
||||
@@ -30,9 +27,7 @@ export async function readDatabases(): Promise<DataModels.Database[]> {
|
||||
databases = sdkResponse.resources as DataModels.Database[];
|
||||
}
|
||||
} catch (error) {
|
||||
logConsoleError(`Error while querying databases:\n ${JSON.stringify(error)}`);
|
||||
logError(JSON.stringify(error), "ReadDatabases", error.code);
|
||||
sendNotificationForError(error);
|
||||
handleError(error, "ReadDatabases", `Error while querying databases`);
|
||||
throw error;
|
||||
}
|
||||
clearMessage();
|
||||
|
||||
27
src/Common/dataAccess/readDocument.ts
Normal file
27
src/Common/dataAccess/readDocument.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
import { Item } from "@azure/cosmos";
|
||||
import { CollectionBase } from "../../Contracts/ViewModels";
|
||||
import { client } from "../CosmosClient";
|
||||
import { getEntityName } from "../DocumentUtility";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import DocumentId from "../../Explorer/Tree/DocumentId";
|
||||
|
||||
export const readDocument = async (collection: CollectionBase, documentId: DocumentId): Promise<Item> => {
|
||||
const entityName = getEntityName();
|
||||
const clearMessage = logConsoleProgress(`Reading ${entityName} ${documentId.id()}`);
|
||||
|
||||
try {
|
||||
const response = await client()
|
||||
.database(collection.databaseId)
|
||||
.container(collection.id())
|
||||
.item(documentId.id(), documentId.partitionKeyValue)
|
||||
.read();
|
||||
|
||||
return response?.resource;
|
||||
} catch (error) {
|
||||
handleError(error, "ReadDocument", `Failed to read ${entityName} ${documentId.id()}`);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
}
|
||||
};
|
||||
30
src/Common/dataAccess/readMongoDBCollection.tsx
Normal file
30
src/Common/dataAccess/readMongoDBCollection.tsx
Normal file
@@ -0,0 +1,30 @@
|
||||
import { userContext } from "../../UserContext";
|
||||
import { getMongoDBCollection } from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
|
||||
import { MongoDBCollectionResource } from "../../Utils/arm/generatedClients/2020-04-01/types";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { AuthType } from "../../AuthType";
|
||||
|
||||
export async function readMongoDBCollectionThroughRP(
|
||||
databaseId: string,
|
||||
collectionId: string
|
||||
): Promise<MongoDBCollectionResource> {
|
||||
if (window.authType !== AuthType.AAD) {
|
||||
return undefined;
|
||||
}
|
||||
let collection: MongoDBCollectionResource;
|
||||
const subscriptionId = userContext.subscriptionId;
|
||||
const resourceGroup = userContext.resourceGroup;
|
||||
const accountName = userContext.databaseAccount.name;
|
||||
|
||||
const clearMessage = logConsoleProgress(`Reading container ${collectionId}`);
|
||||
try {
|
||||
const response = await getMongoDBCollection(subscriptionId, resourceGroup, accountName, databaseId, collectionId);
|
||||
collection = response.properties.resource;
|
||||
} catch (error) {
|
||||
handleError(error, "ReadMongoDBCollection", `Error while reading container ${collectionId}`);
|
||||
throw error;
|
||||
}
|
||||
clearMessage();
|
||||
return collection;
|
||||
}
|
||||
29
src/Common/dataAccess/readOfferWithSDK.ts
Normal file
29
src/Common/dataAccess/readOfferWithSDK.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
import { HttpHeaders } from "../Constants";
|
||||
import { Offer } from "../../Contracts/DataModels";
|
||||
import { RequestOptions } from "@azure/cosmos/dist-esm";
|
||||
import { client } from "../CosmosClient";
|
||||
import { parseSDKOfferResponse } from "../OfferUtility";
|
||||
import { readOffers } from "./readOffers";
|
||||
|
||||
export const readOfferWithSDK = async (offerId: string, resourceId: string): Promise<Offer> => {
|
||||
if (!offerId) {
|
||||
const offers = await readOffers();
|
||||
const offer = offers.find(offer => offer.resource === resourceId);
|
||||
|
||||
if (!offer) {
|
||||
return undefined;
|
||||
}
|
||||
offerId = offer.id;
|
||||
}
|
||||
|
||||
const options: RequestOptions = {
|
||||
initialHeaders: {
|
||||
[HttpHeaders.populateCollectionThroughputInfo]: true
|
||||
}
|
||||
};
|
||||
const response = await client()
|
||||
.offer(offerId)
|
||||
.read(options);
|
||||
|
||||
return parseSDKOfferResponse(response);
|
||||
};
|
||||
@@ -1,26 +1,10 @@
|
||||
import { Offer } from "../../Contracts/DataModels";
|
||||
import { ClientDefaults } from "../Constants";
|
||||
import { MessageTypes } from "../../Contracts/ExplorerContracts";
|
||||
import { Platform, configContext } from "../../ConfigContext";
|
||||
import { SDKOfferDefinition } from "../../Contracts/DataModels";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { client } from "../CosmosClient";
|
||||
import { logConsoleProgress, logConsoleError } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logError } from "../Logger";
|
||||
import { sendCachedDataMessage } from "../MessageHandler";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import { userContext } from "../../UserContext";
|
||||
import { handleError, getErrorMessage } from "../ErrorHandlingUtils";
|
||||
|
||||
export const readOffers = async (): Promise<Offer[]> => {
|
||||
export const readOffers = async (): Promise<SDKOfferDefinition[]> => {
|
||||
const clearMessage = logConsoleProgress(`Querying offers`);
|
||||
try {
|
||||
if (configContext.platform === Platform.Portal) {
|
||||
return sendCachedDataMessage<Offer[]>(MessageTypes.AllOffers, [
|
||||
userContext.databaseAccount.id,
|
||||
ClientDefaults.portalCacheTimeoutMs
|
||||
]);
|
||||
}
|
||||
} catch (error) {
|
||||
// If error getting cached Offers, continue on and read via SDK
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await client()
|
||||
@@ -29,13 +13,11 @@ export const readOffers = async (): Promise<Offer[]> => {
|
||||
return response?.resources;
|
||||
} catch (error) {
|
||||
// This should be removed when we can correctly identify if an account is serverless when connected using connection string too.
|
||||
if (error.message.includes("Reading or replacing offers is not supported for serverless accounts")) {
|
||||
if (getErrorMessage(error)?.includes("Reading or replacing offers is not supported for serverless accounts")) {
|
||||
return [];
|
||||
}
|
||||
|
||||
logConsoleError(`Error while querying offers:\n ${JSON.stringify(error)}`);
|
||||
logError(JSON.stringify(error), "ReadOffers", error.code);
|
||||
sendNotificationForError(error);
|
||||
handleError(error, "ReadOffers", `Error while querying offers`);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
|
||||
@@ -1,27 +1,43 @@
|
||||
import { AuthType } from "../../AuthType";
|
||||
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
||||
import { Resource, StoredProcedureDefinition } from "@azure/cosmos";
|
||||
import { logConsoleError, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { client } from "../CosmosClient";
|
||||
import { logError } from "../Logger";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { listSqlStoredProcedures } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { userContext } from "../../UserContext";
|
||||
|
||||
export async function readStoredProcedures(
|
||||
databaseId: string,
|
||||
collectionId: string
|
||||
): Promise<(StoredProcedureDefinition & Resource)[]> {
|
||||
let sprocs: (StoredProcedureDefinition & Resource)[];
|
||||
const clearMessage = logConsoleProgress(`Querying stored procedures for container ${collectionId}`);
|
||||
try {
|
||||
if (
|
||||
window.authType === AuthType.AAD &&
|
||||
!userContext.useSDKOperations &&
|
||||
userContext.defaultExperience === DefaultAccountExperienceType.DocumentDB
|
||||
) {
|
||||
const rpResponse = await listSqlStoredProcedures(
|
||||
userContext.subscriptionId,
|
||||
userContext.resourceGroup,
|
||||
userContext.databaseAccount.name,
|
||||
databaseId,
|
||||
collectionId
|
||||
);
|
||||
return rpResponse?.value?.map(sproc => sproc.properties?.resource as StoredProcedureDefinition & Resource);
|
||||
}
|
||||
|
||||
const response = await client()
|
||||
.database(databaseId)
|
||||
.container(collectionId)
|
||||
.scripts.storedProcedures.readAll()
|
||||
.fetchAll();
|
||||
sprocs = response.resources;
|
||||
return response?.resources;
|
||||
} catch (error) {
|
||||
logConsoleError(`Failed to query stored procedures for container ${collectionId}: ${JSON.stringify(error)}`);
|
||||
logError(JSON.stringify(error), "ReadStoredProcedures", error.code);
|
||||
sendNotificationForError(error);
|
||||
handleError(error, "ReadStoredProcedures", `Failed to query stored procedures for container ${collectionId}`);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
}
|
||||
clearMessage();
|
||||
return sprocs;
|
||||
}
|
||||
|
||||
@@ -1,27 +1,43 @@
|
||||
import { AuthType } from "../../AuthType";
|
||||
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
||||
import { Resource, TriggerDefinition } from "@azure/cosmos";
|
||||
import { logConsoleError, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { client } from "../CosmosClient";
|
||||
import { logError } from "../Logger";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import { listSqlTriggers } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { userContext } from "../../UserContext";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
|
||||
export async function readTriggers(
|
||||
databaseId: string,
|
||||
collectionId: string
|
||||
): Promise<(TriggerDefinition & Resource)[]> {
|
||||
let triggers: (TriggerDefinition & Resource)[];
|
||||
const clearMessage = logConsoleProgress(`Querying triggers for container ${collectionId}`);
|
||||
try {
|
||||
if (
|
||||
window.authType === AuthType.AAD &&
|
||||
!userContext.useSDKOperations &&
|
||||
userContext.defaultExperience === DefaultAccountExperienceType.DocumentDB
|
||||
) {
|
||||
const rpResponse = await listSqlTriggers(
|
||||
userContext.subscriptionId,
|
||||
userContext.resourceGroup,
|
||||
userContext.databaseAccount.name,
|
||||
databaseId,
|
||||
collectionId
|
||||
);
|
||||
return rpResponse?.value?.map(trigger => trigger.properties?.resource as TriggerDefinition & Resource);
|
||||
}
|
||||
|
||||
const response = await client()
|
||||
.database(databaseId)
|
||||
.container(collectionId)
|
||||
.scripts.triggers.readAll()
|
||||
.fetchAll();
|
||||
triggers = response.resources;
|
||||
return response?.resources;
|
||||
} catch (error) {
|
||||
logConsoleError(`Failed to query triggers for container ${collectionId}: ${JSON.stringify(error)}`);
|
||||
logError(JSON.stringify(error), "ReadTriggers", error.code);
|
||||
sendNotificationForError(error);
|
||||
handleError(error, "ReadTriggers", `Failed to query triggers for container ${collectionId}`);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
}
|
||||
clearMessage();
|
||||
return triggers;
|
||||
}
|
||||
|
||||
@@ -1,27 +1,47 @@
|
||||
import { AuthType } from "../../AuthType";
|
||||
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
||||
import { Resource, UserDefinedFunctionDefinition } from "@azure/cosmos";
|
||||
import { logConsoleError, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { client } from "../CosmosClient";
|
||||
import { logError } from "../Logger";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { listSqlUserDefinedFunctions } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { userContext } from "../../UserContext";
|
||||
|
||||
export async function readUserDefinedFunctions(
|
||||
databaseId: string,
|
||||
collectionId: string
|
||||
): Promise<(UserDefinedFunctionDefinition & Resource)[]> {
|
||||
let udfs: (UserDefinedFunctionDefinition & Resource)[];
|
||||
const clearMessage = logConsoleProgress(`Querying user defined functions for container ${collectionId}`);
|
||||
try {
|
||||
if (
|
||||
window.authType === AuthType.AAD &&
|
||||
!userContext.useSDKOperations &&
|
||||
userContext.defaultExperience === DefaultAccountExperienceType.DocumentDB
|
||||
) {
|
||||
const rpResponse = await listSqlUserDefinedFunctions(
|
||||
userContext.subscriptionId,
|
||||
userContext.resourceGroup,
|
||||
userContext.databaseAccount.name,
|
||||
databaseId,
|
||||
collectionId
|
||||
);
|
||||
return rpResponse?.value?.map(udf => udf.properties?.resource as UserDefinedFunctionDefinition & Resource);
|
||||
}
|
||||
|
||||
const response = await client()
|
||||
.database(databaseId)
|
||||
.container(collectionId)
|
||||
.scripts.userDefinedFunctions.readAll()
|
||||
.fetchAll();
|
||||
udfs = response.resources;
|
||||
return response?.resources;
|
||||
} catch (error) {
|
||||
logConsoleError(`Failed to query user defined functions for container ${collectionId}: ${JSON.stringify(error)}`);
|
||||
logError(JSON.stringify(error), "ReadUserDefinedFunctions", error.code);
|
||||
sendNotificationForError(error);
|
||||
handleError(
|
||||
error,
|
||||
"ReadUserDefinedFunctions",
|
||||
`Failed to query user defined functions for container ${collectionId}`
|
||||
);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
}
|
||||
clearMessage();
|
||||
return udfs;
|
||||
}
|
||||
|
||||
@@ -1,20 +0,0 @@
|
||||
import * as Constants from "../Constants";
|
||||
import { sendMessage } from "../MessageHandler";
|
||||
import { MessageTypes } from "../../Contracts/ExplorerContracts";
|
||||
|
||||
interface CosmosError {
|
||||
code: number;
|
||||
message?: string;
|
||||
}
|
||||
|
||||
export function sendNotificationForError(error: CosmosError): void {
|
||||
if (error && error.code === Constants.HttpStatusCodes.Forbidden) {
|
||||
if (error.message && error.message.toLowerCase().indexOf("sharedoffer is disabled for your account") > 0) {
|
||||
return;
|
||||
}
|
||||
sendMessage({
|
||||
type: MessageTypes.ForbiddenError,
|
||||
reason: error && error.message ? error.message : error
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -3,7 +3,10 @@ import { Collection } from "../../Contracts/DataModels";
|
||||
import { ContainerDefinition } from "@azure/cosmos";
|
||||
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
||||
import {
|
||||
CreateUpdateOptions,
|
||||
ExtendedResourceProperties,
|
||||
MongoDBCollectionCreateUpdateParameters,
|
||||
MongoDBCollectionResource,
|
||||
SqlContainerCreateUpdateParameters,
|
||||
SqlContainerResource
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/types";
|
||||
@@ -23,10 +26,8 @@ import {
|
||||
getGremlinGraph
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
|
||||
import { createUpdateTable, getTable } from "../../Utils/arm/generatedClients/2020-04-01/tableResources";
|
||||
import { logConsoleError, logConsoleInfo, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { logError } from "../Logger";
|
||||
import { refreshCachedResources } from "../DataAccessUtilityBase";
|
||||
import { sendNotificationForError } from "./sendNotificationForError";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleInfo, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { userContext } from "../../UserContext";
|
||||
|
||||
export async function updateCollection(
|
||||
@@ -41,6 +42,7 @@ export async function updateCollection(
|
||||
try {
|
||||
if (
|
||||
window.authType === AuthType.AAD &&
|
||||
!userContext.useSDKOperations &&
|
||||
userContext.defaultExperience !== DefaultAccountExperienceType.MongoDB &&
|
||||
userContext.defaultExperience !== DefaultAccountExperienceType.Table
|
||||
) {
|
||||
@@ -50,18 +52,18 @@ export async function updateCollection(
|
||||
.database(databaseId)
|
||||
.container(collectionId)
|
||||
.replace(newCollection as ContainerDefinition, options);
|
||||
|
||||
collection = sdkResponse.resource as Collection;
|
||||
}
|
||||
|
||||
logConsoleInfo(`Successfully updated container ${collectionId}`);
|
||||
return collection;
|
||||
} catch (error) {
|
||||
logConsoleError(`Failed to update container ${collectionId}: ${JSON.stringify(error)}`);
|
||||
logError(JSON.stringify(error), "UpdateCollection", error.code);
|
||||
sendNotificationForError(error);
|
||||
handleError(error, "UpdateCollection", `Failed to update container ${collectionId}`);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
}
|
||||
logConsoleInfo(`Successfully updated container ${collectionId}`);
|
||||
clearMessage();
|
||||
await refreshCachedResources();
|
||||
return collection;
|
||||
}
|
||||
|
||||
async function updateCollectionWithARM(
|
||||
@@ -77,15 +79,6 @@ async function updateCollectionWithARM(
|
||||
switch (defaultExperience) {
|
||||
case DefaultAccountExperienceType.DocumentDB:
|
||||
return updateSqlContainer(databaseId, collectionId, subscriptionId, resourceGroup, accountName, newCollection);
|
||||
case DefaultAccountExperienceType.MongoDB:
|
||||
return updateMongoDBCollection(
|
||||
databaseId,
|
||||
collectionId,
|
||||
subscriptionId,
|
||||
resourceGroup,
|
||||
accountName,
|
||||
newCollection
|
||||
);
|
||||
case DefaultAccountExperienceType.Cassandra:
|
||||
return updateCassandraTable(databaseId, collectionId, subscriptionId, resourceGroup, accountName, newCollection);
|
||||
case DefaultAccountExperienceType.Graph:
|
||||
@@ -122,26 +115,35 @@ async function updateSqlContainer(
|
||||
throw new Error(`Sql container to update does not exist. Database id: ${databaseId} Collection id: ${collectionId}`);
|
||||
}
|
||||
|
||||
async function updateMongoDBCollection(
|
||||
export async function updateMongoDBCollectionThroughRP(
|
||||
databaseId: string,
|
||||
collectionId: string,
|
||||
subscriptionId: string,
|
||||
resourceGroup: string,
|
||||
accountName: string,
|
||||
newCollection: Collection
|
||||
): Promise<Collection> {
|
||||
newCollection: MongoDBCollectionResource,
|
||||
updateOptions?: CreateUpdateOptions
|
||||
): Promise<MongoDBCollectionResource> {
|
||||
const subscriptionId = userContext.subscriptionId;
|
||||
const resourceGroup = userContext.resourceGroup;
|
||||
const accountName = userContext.databaseAccount.name;
|
||||
|
||||
const getResponse = await getMongoDBCollection(subscriptionId, resourceGroup, accountName, databaseId, collectionId);
|
||||
if (getResponse && getResponse.properties && getResponse.properties.resource) {
|
||||
getResponse.properties.resource = newCollection as SqlContainerResource & ExtendedResourceProperties;
|
||||
const updateParams: MongoDBCollectionCreateUpdateParameters = {
|
||||
properties: {
|
||||
resource: newCollection,
|
||||
options: updateOptions
|
||||
}
|
||||
};
|
||||
|
||||
const updateResponse = await createUpdateMongoDBCollection(
|
||||
subscriptionId,
|
||||
resourceGroup,
|
||||
accountName,
|
||||
databaseId,
|
||||
collectionId,
|
||||
getResponse as SqlContainerCreateUpdateParameters
|
||||
updateParams
|
||||
);
|
||||
return updateResponse && (updateResponse.properties.resource as Collection);
|
||||
|
||||
return updateResponse && (updateResponse.properties.resource as MongoDBCollectionResource);
|
||||
}
|
||||
|
||||
throw new Error(
|
||||
|
||||
32
src/Common/dataAccess/updateDocument.ts
Normal file
32
src/Common/dataAccess/updateDocument.ts
Normal file
@@ -0,0 +1,32 @@
|
||||
import { CollectionBase } from "../../Contracts/ViewModels";
|
||||
import { Item } from "@azure/cosmos";
|
||||
import { client } from "../CosmosClient";
|
||||
import { getEntityName } from "../DocumentUtility";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleInfo, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import DocumentId from "../../Explorer/Tree/DocumentId";
|
||||
|
||||
export const updateDocument = async (
|
||||
collection: CollectionBase,
|
||||
documentId: DocumentId,
|
||||
newDocument: Item
|
||||
): Promise<Item> => {
|
||||
const entityName = getEntityName();
|
||||
const clearMessage = logConsoleProgress(`Updating ${entityName} ${documentId.id()}`);
|
||||
|
||||
try {
|
||||
const response = await client()
|
||||
.database(collection.databaseId)
|
||||
.container(collection.id())
|
||||
.item(documentId.id(), documentId.partitionKeyValue)
|
||||
.replace(newDocument);
|
||||
|
||||
logConsoleInfo(`Successfully updated ${entityName} ${documentId.id()}`);
|
||||
return response?.resource;
|
||||
} catch (error) {
|
||||
handleError(error, "UpdateDocument", `Failed to update ${entityName} ${documentId.id()}`);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
}
|
||||
};
|
||||
421
src/Common/dataAccess/updateOffer.ts
Normal file
421
src/Common/dataAccess/updateOffer.ts
Normal file
@@ -0,0 +1,421 @@
|
||||
import { AuthType } from "../../AuthType";
|
||||
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
||||
import { HttpHeaders } from "../Constants";
|
||||
import { Offer, SDKOfferDefinition, UpdateOfferParams } from "../../Contracts/DataModels";
|
||||
import { OfferDefinition } from "@azure/cosmos";
|
||||
import { RequestOptions } from "@azure/cosmos/dist-esm";
|
||||
import { ThroughputSettingsUpdateParameters } from "../../Utils/arm/generatedClients/2020-04-01/types";
|
||||
import { client } from "../CosmosClient";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleInfo, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
import { parseSDKOfferResponse } from "../OfferUtility";
|
||||
import { readCollectionOffer } from "./readCollectionOffer";
|
||||
import { readDatabaseOffer } from "./readDatabaseOffer";
|
||||
import {
|
||||
updateSqlDatabaseThroughput,
|
||||
migrateSqlDatabaseToAutoscale,
|
||||
migrateSqlDatabaseToManualThroughput,
|
||||
migrateSqlContainerToAutoscale,
|
||||
migrateSqlContainerToManualThroughput,
|
||||
updateSqlContainerThroughput
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||
import {
|
||||
updateCassandraKeyspaceThroughput,
|
||||
migrateCassandraKeyspaceToAutoscale,
|
||||
migrateCassandraKeyspaceToManualThroughput,
|
||||
migrateCassandraTableToAutoscale,
|
||||
migrateCassandraTableToManualThroughput,
|
||||
updateCassandraTableThroughput
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources";
|
||||
import {
|
||||
updateMongoDBDatabaseThroughput,
|
||||
migrateMongoDBDatabaseToAutoscale,
|
||||
migrateMongoDBDatabaseToManualThroughput,
|
||||
migrateMongoDBCollectionToAutoscale,
|
||||
migrateMongoDBCollectionToManualThroughput,
|
||||
updateMongoDBCollectionThroughput
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
|
||||
import {
|
||||
updateGremlinDatabaseThroughput,
|
||||
migrateGremlinDatabaseToAutoscale,
|
||||
migrateGremlinDatabaseToManualThroughput,
|
||||
migrateGremlinGraphToAutoscale,
|
||||
migrateGremlinGraphToManualThroughput,
|
||||
updateGremlinGraphThroughput
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
|
||||
import { userContext } from "../../UserContext";
|
||||
import {
|
||||
migrateTableToAutoscale,
|
||||
migrateTableToManualThroughput,
|
||||
updateTableThroughput
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/tableResources";
|
||||
|
||||
export const updateOffer = async (params: UpdateOfferParams): Promise<Offer> => {
|
||||
let updatedOffer: Offer;
|
||||
const offerResourceText: string = params.collectionId
|
||||
? `collection ${params.collectionId}`
|
||||
: `database ${params.databaseId}`;
|
||||
const clearMessage = logConsoleProgress(`Updating offer for ${offerResourceText}`);
|
||||
|
||||
try {
|
||||
if (window.authType === AuthType.AAD && !userContext.useSDKOperations) {
|
||||
if (params.collectionId) {
|
||||
updatedOffer = await updateCollectionOfferWithARM(params);
|
||||
} else if (userContext.defaultExperience === DefaultAccountExperienceType.Table) {
|
||||
// update table's database offer with SDK since RP doesn't support it
|
||||
updatedOffer = await updateOfferWithSDK(params);
|
||||
} else {
|
||||
updatedOffer = await updateDatabaseOfferWithARM(params);
|
||||
}
|
||||
} else {
|
||||
updatedOffer = await updateOfferWithSDK(params);
|
||||
}
|
||||
logConsoleInfo(`Successfully updated offer for ${offerResourceText}`);
|
||||
return updatedOffer;
|
||||
} catch (error) {
|
||||
handleError(error, "UpdateCollection", `Error updating offer for ${offerResourceText}`);
|
||||
throw error;
|
||||
} finally {
|
||||
clearMessage();
|
||||
}
|
||||
};
|
||||
|
||||
const updateCollectionOfferWithARM = async (params: UpdateOfferParams): Promise<Offer> => {
|
||||
try {
|
||||
switch (userContext.defaultExperience) {
|
||||
case DefaultAccountExperienceType.DocumentDB:
|
||||
await updateSqlContainerOffer(params);
|
||||
break;
|
||||
case DefaultAccountExperienceType.MongoDB:
|
||||
await updateMongoCollectionOffer(params);
|
||||
break;
|
||||
case DefaultAccountExperienceType.Cassandra:
|
||||
await updateCassandraTableOffer(params);
|
||||
break;
|
||||
case DefaultAccountExperienceType.Graph:
|
||||
await updateGremlinGraphOffer(params);
|
||||
break;
|
||||
case DefaultAccountExperienceType.Table:
|
||||
await updateTableOffer(params);
|
||||
break;
|
||||
default:
|
||||
throw new Error(`Unsupported default experience type: ${userContext.defaultExperience}`);
|
||||
}
|
||||
} catch (error) {
|
||||
if (error.code !== "MethodNotAllowed") {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
return await readCollectionOffer({
|
||||
collectionId: params.collectionId,
|
||||
databaseId: params.databaseId,
|
||||
offerId: params.currentOffer.id
|
||||
});
|
||||
};
|
||||
|
||||
const updateDatabaseOfferWithARM = async (params: UpdateOfferParams): Promise<Offer> => {
|
||||
try {
|
||||
switch (userContext.defaultExperience) {
|
||||
case DefaultAccountExperienceType.DocumentDB:
|
||||
await updateSqlDatabaseOffer(params);
|
||||
break;
|
||||
case DefaultAccountExperienceType.MongoDB:
|
||||
await updateMongoDatabaseOffer(params);
|
||||
break;
|
||||
case DefaultAccountExperienceType.Cassandra:
|
||||
await updateCassandraKeyspaceOffer(params);
|
||||
break;
|
||||
case DefaultAccountExperienceType.Graph:
|
||||
await updateGremlinDatabaseOffer(params);
|
||||
break;
|
||||
default:
|
||||
throw new Error(`Unsupported default experience type: ${userContext.defaultExperience}`);
|
||||
}
|
||||
} catch (error) {
|
||||
if (error.code !== "MethodNotAllowed") {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
return await readDatabaseOffer({
|
||||
databaseId: params.databaseId,
|
||||
offerId: params.currentOffer.id
|
||||
});
|
||||
};
|
||||
|
||||
const updateSqlContainerOffer = async (params: UpdateOfferParams): Promise<void> => {
|
||||
const subscriptionId = userContext.subscriptionId;
|
||||
const resourceGroup = userContext.resourceGroup;
|
||||
const accountName = userContext.databaseAccount.name;
|
||||
|
||||
if (params.migrateToAutoPilot) {
|
||||
await migrateSqlContainerToAutoscale(
|
||||
subscriptionId,
|
||||
resourceGroup,
|
||||
accountName,
|
||||
params.databaseId,
|
||||
params.collectionId
|
||||
);
|
||||
} else if (params.migrateToManual) {
|
||||
await migrateSqlContainerToManualThroughput(
|
||||
subscriptionId,
|
||||
resourceGroup,
|
||||
accountName,
|
||||
params.databaseId,
|
||||
params.collectionId
|
||||
);
|
||||
} else {
|
||||
const body: ThroughputSettingsUpdateParameters = createUpdateOfferBody(params);
|
||||
await updateSqlContainerThroughput(
|
||||
subscriptionId,
|
||||
resourceGroup,
|
||||
accountName,
|
||||
params.databaseId,
|
||||
params.collectionId,
|
||||
body
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
const updateMongoCollectionOffer = async (params: UpdateOfferParams): Promise<void> => {
|
||||
const subscriptionId = userContext.subscriptionId;
|
||||
const resourceGroup = userContext.resourceGroup;
|
||||
const accountName = userContext.databaseAccount.name;
|
||||
|
||||
if (params.migrateToAutoPilot) {
|
||||
await migrateMongoDBCollectionToAutoscale(
|
||||
subscriptionId,
|
||||
resourceGroup,
|
||||
accountName,
|
||||
params.databaseId,
|
||||
params.collectionId
|
||||
);
|
||||
} else if (params.migrateToManual) {
|
||||
await migrateMongoDBCollectionToManualThroughput(
|
||||
subscriptionId,
|
||||
resourceGroup,
|
||||
accountName,
|
||||
params.databaseId,
|
||||
params.collectionId
|
||||
);
|
||||
} else {
|
||||
const body: ThroughputSettingsUpdateParameters = createUpdateOfferBody(params);
|
||||
await updateMongoDBCollectionThroughput(
|
||||
subscriptionId,
|
||||
resourceGroup,
|
||||
accountName,
|
||||
params.databaseId,
|
||||
params.collectionId,
|
||||
body
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
const updateCassandraTableOffer = async (params: UpdateOfferParams): Promise<void> => {
|
||||
const subscriptionId = userContext.subscriptionId;
|
||||
const resourceGroup = userContext.resourceGroup;
|
||||
const accountName = userContext.databaseAccount.name;
|
||||
|
||||
if (params.migrateToAutoPilot) {
|
||||
await migrateCassandraTableToAutoscale(
|
||||
subscriptionId,
|
||||
resourceGroup,
|
||||
accountName,
|
||||
params.databaseId,
|
||||
params.collectionId
|
||||
);
|
||||
} else if (params.migrateToManual) {
|
||||
await migrateCassandraTableToManualThroughput(
|
||||
subscriptionId,
|
||||
resourceGroup,
|
||||
accountName,
|
||||
params.databaseId,
|
||||
params.collectionId
|
||||
);
|
||||
} else {
|
||||
const body: ThroughputSettingsUpdateParameters = createUpdateOfferBody(params);
|
||||
await updateCassandraTableThroughput(
|
||||
subscriptionId,
|
||||
resourceGroup,
|
||||
accountName,
|
||||
params.databaseId,
|
||||
params.collectionId,
|
||||
body
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
const updateGremlinGraphOffer = async (params: UpdateOfferParams): Promise<void> => {
|
||||
const subscriptionId = userContext.subscriptionId;
|
||||
const resourceGroup = userContext.resourceGroup;
|
||||
const accountName = userContext.databaseAccount.name;
|
||||
|
||||
if (params.migrateToAutoPilot) {
|
||||
await migrateGremlinGraphToAutoscale(
|
||||
subscriptionId,
|
||||
resourceGroup,
|
||||
accountName,
|
||||
params.databaseId,
|
||||
params.collectionId
|
||||
);
|
||||
} else if (params.migrateToManual) {
|
||||
await migrateGremlinGraphToManualThroughput(
|
||||
subscriptionId,
|
||||
resourceGroup,
|
||||
accountName,
|
||||
params.databaseId,
|
||||
params.collectionId
|
||||
);
|
||||
} else {
|
||||
const body: ThroughputSettingsUpdateParameters = createUpdateOfferBody(params);
|
||||
await updateGremlinGraphThroughput(
|
||||
subscriptionId,
|
||||
resourceGroup,
|
||||
accountName,
|
||||
params.databaseId,
|
||||
params.collectionId,
|
||||
body
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
const updateTableOffer = async (params: UpdateOfferParams): Promise<void> => {
|
||||
const subscriptionId = userContext.subscriptionId;
|
||||
const resourceGroup = userContext.resourceGroup;
|
||||
const accountName = userContext.databaseAccount.name;
|
||||
|
||||
if (params.migrateToAutoPilot) {
|
||||
await migrateTableToAutoscale(subscriptionId, resourceGroup, accountName, params.collectionId);
|
||||
} else if (params.migrateToManual) {
|
||||
await migrateTableToManualThroughput(subscriptionId, resourceGroup, accountName, params.collectionId);
|
||||
} else {
|
||||
const body: ThroughputSettingsUpdateParameters = createUpdateOfferBody(params);
|
||||
await updateTableThroughput(subscriptionId, resourceGroup, accountName, params.collectionId, body);
|
||||
}
|
||||
};
|
||||
|
||||
const updateSqlDatabaseOffer = async (params: UpdateOfferParams): Promise<void> => {
|
||||
const subscriptionId = userContext.subscriptionId;
|
||||
const resourceGroup = userContext.resourceGroup;
|
||||
const accountName = userContext.databaseAccount.name;
|
||||
|
||||
if (params.migrateToAutoPilot) {
|
||||
await migrateSqlDatabaseToAutoscale(subscriptionId, resourceGroup, accountName, params.databaseId);
|
||||
} else if (params.migrateToManual) {
|
||||
await migrateSqlDatabaseToManualThroughput(subscriptionId, resourceGroup, accountName, params.databaseId);
|
||||
} else {
|
||||
const body: ThroughputSettingsUpdateParameters = createUpdateOfferBody(params);
|
||||
await updateSqlDatabaseThroughput(subscriptionId, resourceGroup, accountName, params.databaseId, body);
|
||||
}
|
||||
};
|
||||
|
||||
const updateMongoDatabaseOffer = async (params: UpdateOfferParams): Promise<void> => {
|
||||
const subscriptionId = userContext.subscriptionId;
|
||||
const resourceGroup = userContext.resourceGroup;
|
||||
const accountName = userContext.databaseAccount.name;
|
||||
|
||||
if (params.migrateToAutoPilot) {
|
||||
await migrateMongoDBDatabaseToAutoscale(subscriptionId, resourceGroup, accountName, params.databaseId);
|
||||
} else if (params.migrateToManual) {
|
||||
await migrateMongoDBDatabaseToManualThroughput(subscriptionId, resourceGroup, accountName, params.databaseId);
|
||||
} else {
|
||||
const body: ThroughputSettingsUpdateParameters = createUpdateOfferBody(params);
|
||||
await updateMongoDBDatabaseThroughput(subscriptionId, resourceGroup, accountName, params.databaseId, body);
|
||||
}
|
||||
};
|
||||
|
||||
const updateCassandraKeyspaceOffer = async (params: UpdateOfferParams): Promise<void> => {
|
||||
const subscriptionId = userContext.subscriptionId;
|
||||
const resourceGroup = userContext.resourceGroup;
|
||||
const accountName = userContext.databaseAccount.name;
|
||||
|
||||
if (params.migrateToAutoPilot) {
|
||||
await migrateCassandraKeyspaceToAutoscale(subscriptionId, resourceGroup, accountName, params.databaseId);
|
||||
} else if (params.migrateToManual) {
|
||||
await migrateCassandraKeyspaceToManualThroughput(subscriptionId, resourceGroup, accountName, params.databaseId);
|
||||
} else {
|
||||
const body: ThroughputSettingsUpdateParameters = createUpdateOfferBody(params);
|
||||
await updateCassandraKeyspaceThroughput(subscriptionId, resourceGroup, accountName, params.databaseId, body);
|
||||
}
|
||||
};
|
||||
|
||||
const updateGremlinDatabaseOffer = async (params: UpdateOfferParams): Promise<void> => {
|
||||
const subscriptionId = userContext.subscriptionId;
|
||||
const resourceGroup = userContext.resourceGroup;
|
||||
const accountName = userContext.databaseAccount.name;
|
||||
|
||||
if (params.migrateToAutoPilot) {
|
||||
await migrateGremlinDatabaseToAutoscale(subscriptionId, resourceGroup, accountName, params.databaseId);
|
||||
} else if (params.migrateToManual) {
|
||||
await migrateGremlinDatabaseToManualThroughput(subscriptionId, resourceGroup, accountName, params.databaseId);
|
||||
} else {
|
||||
const body: ThroughputSettingsUpdateParameters = createUpdateOfferBody(params);
|
||||
await updateGremlinDatabaseThroughput(subscriptionId, resourceGroup, accountName, params.databaseId, body);
|
||||
}
|
||||
};
|
||||
|
||||
const createUpdateOfferBody = (params: UpdateOfferParams): ThroughputSettingsUpdateParameters => {
|
||||
const body: ThroughputSettingsUpdateParameters = {
|
||||
properties: {
|
||||
resource: {}
|
||||
}
|
||||
};
|
||||
|
||||
if (params.autopilotThroughput) {
|
||||
body.properties.resource.autoscaleSettings = {
|
||||
maxThroughput: params.autopilotThroughput
|
||||
};
|
||||
} else {
|
||||
body.properties.resource.throughput = params.manualThroughput;
|
||||
}
|
||||
|
||||
return body;
|
||||
};
|
||||
|
||||
const updateOfferWithSDK = async (params: UpdateOfferParams): Promise<Offer> => {
|
||||
const sdkOfferDefinition = params.currentOffer.offerDefinition;
|
||||
const newOffer: SDKOfferDefinition = {
|
||||
content: {
|
||||
offerThroughput: undefined,
|
||||
offerIsRUPerMinuteThroughputEnabled: false
|
||||
},
|
||||
_etag: undefined,
|
||||
_ts: undefined,
|
||||
_rid: sdkOfferDefinition._rid,
|
||||
_self: sdkOfferDefinition._self,
|
||||
id: sdkOfferDefinition.id,
|
||||
offerResourceId: sdkOfferDefinition.offerResourceId,
|
||||
offerVersion: sdkOfferDefinition.offerVersion,
|
||||
offerType: sdkOfferDefinition.offerType,
|
||||
resource: sdkOfferDefinition.resource
|
||||
};
|
||||
|
||||
if (params.autopilotThroughput) {
|
||||
newOffer.content.offerAutopilotSettings = {
|
||||
maxThroughput: params.autopilotThroughput
|
||||
};
|
||||
} else {
|
||||
newOffer.content.offerThroughput = params.manualThroughput;
|
||||
}
|
||||
|
||||
const options: RequestOptions = {};
|
||||
if (params.migrateToAutoPilot) {
|
||||
options.initialHeaders = {
|
||||
[HttpHeaders.migrateOfferToAutopilot]: "true"
|
||||
};
|
||||
delete newOffer.content.offerAutopilotSettings;
|
||||
} else if (params.migrateToManual) {
|
||||
options.initialHeaders = {
|
||||
[HttpHeaders.migrateOfferToManualThroughput]: "true"
|
||||
};
|
||||
newOffer.content.offerAutopilotSettings = { maxThroughput: 0 };
|
||||
}
|
||||
|
||||
const sdkResponse = await client()
|
||||
.offer(params.currentOffer.id)
|
||||
// TODO Remove casting when SDK types are fixed (https://github.com/Azure/azure-sdk-for-js/issues/10660)
|
||||
.replace((newOffer as unknown) as OfferDefinition, options);
|
||||
|
||||
return parseSDKOfferResponse(sdkResponse);
|
||||
};
|
||||
@@ -1,25 +0,0 @@
|
||||
import { updateOfferThroughputBeyondLimit } from "./updateOfferThroughputBeyondLimit";
|
||||
|
||||
describe("updateOfferThroughputBeyondLimit", () => {
|
||||
it("should call fetch", async () => {
|
||||
window.fetch = jest.fn(() => {
|
||||
return {
|
||||
ok: true
|
||||
};
|
||||
});
|
||||
window.dataExplorer = {
|
||||
logConsoleData: jest.fn(),
|
||||
deleteInProgressConsoleDataWithId: jest.fn()
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
} as any;
|
||||
await updateOfferThroughputBeyondLimit({
|
||||
subscriptionId: "foo",
|
||||
resourceGroup: "foo",
|
||||
databaseAccountName: "foo",
|
||||
databaseName: "foo",
|
||||
throughput: 1000000000,
|
||||
offerIsRUPerMinuteThroughputEnabled: false
|
||||
});
|
||||
expect(window.fetch).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
@@ -1,51 +0,0 @@
|
||||
import { Platform, configContext } from "../../ConfigContext";
|
||||
import { getAuthorizationHeader } from "../../Utils/AuthorizationUtils";
|
||||
import { AutoPilotOfferSettings } from "../../Contracts/DataModels";
|
||||
import { logConsoleProgress, logConsoleInfo, logConsoleError } from "../../Utils/NotificationConsoleUtils";
|
||||
import { HttpHeaders } from "../Constants";
|
||||
|
||||
interface UpdateOfferThroughputRequest {
|
||||
subscriptionId: string;
|
||||
resourceGroup: string;
|
||||
databaseAccountName: string;
|
||||
databaseName: string;
|
||||
collectionName?: string;
|
||||
throughput: number;
|
||||
offerIsRUPerMinuteThroughputEnabled: boolean;
|
||||
offerAutopilotSettings?: AutoPilotOfferSettings;
|
||||
}
|
||||
|
||||
export async function updateOfferThroughputBeyondLimit(request: UpdateOfferThroughputRequest): Promise<void> {
|
||||
if (configContext.platform !== Platform.Portal) {
|
||||
throw new Error("Updating throughput beyond specified limit is not supported on this platform");
|
||||
}
|
||||
|
||||
const resourceDescriptionInfo = request.collectionName
|
||||
? `database ${request.databaseName} and container ${request.collectionName}`
|
||||
: `database ${request.databaseName}`;
|
||||
|
||||
const clearMessage = logConsoleProgress(
|
||||
`Requesting increase in throughput to ${request.throughput} for ${resourceDescriptionInfo}`
|
||||
);
|
||||
|
||||
const url = `${configContext.BACKEND_ENDPOINT}/api/offerthroughputrequest/updatebeyondspecifiedlimit`;
|
||||
const authorizationHeader = getAuthorizationHeader();
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: "POST",
|
||||
body: JSON.stringify(request),
|
||||
headers: { [authorizationHeader.header]: authorizationHeader.token, [HttpHeaders.contentType]: "application/json" }
|
||||
});
|
||||
|
||||
if (response.ok) {
|
||||
logConsoleInfo(
|
||||
`Successfully requested an increase in throughput to ${request.throughput} for ${resourceDescriptionInfo}`
|
||||
);
|
||||
clearMessage();
|
||||
return undefined;
|
||||
}
|
||||
const error = await response.json();
|
||||
logConsoleError(`Failed to request an increase in throughput for ${request.throughput}: ${error.message}`);
|
||||
clearMessage();
|
||||
throw new Error(error.message);
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user