Compare commits

...

86 Commits

Author SHA1 Message Date
Balaji Sridharan
5b0a98dce7 Removing TODO comments 2021-02-17 10:43:23 -08:00
Balaji Sridharan
67ebce444f Stylecop changes 2021-02-16 00:21:44 -08:00
Balaji Sridharan
a09bcc7197 Merge branch 'users/fnbalaji/PortalChangesForDGW' of https://github.com/Azure/cosmos-explorer into users/fnbalaji/PortalChangesForDGW 2021-02-15 23:49:54 -08:00
Balaji Sridharan
b2390e23e7 Portal changes for DedicatedGateway. CR feedback 2021-02-15 23:49:14 -08:00
fnbalaji
f922103e5c Merge branch 'master' into users/fnbalaji/PortalChangesForDGW 2021-02-15 23:39:50 -08:00
victor-meng
22d8a7a1be Move database settings tab to react (#386)
Co-authored-by: Steve Faulkner <southpolesteve@gmail.com>
2021-02-10 16:06:14 -06:00
victor-meng
4210e0752b Move delete collection confirmation pane to react (#417) 2021-02-10 13:44:00 -08:00
Steve Faulkner
b217d4be1b Delete Cassandra tables/keyspaces via ARM (#436) 2021-02-08 18:52:53 -06:00
victor-meng
81fd442fad Make getCollectionDataUsageSize call fail gracefully (#434) 2021-02-08 16:02:02 -08:00
Steve Faulkner
87f7dd2230 Rename Feedback -> Report Issue (#425)
Co-authored-by: victor-meng <56978073+victor-meng@users.noreply.github.com>
2021-02-08 14:23:55 -06:00
Srinath Narayanan
9926fd97a2 Test explorer changes (#420)
* Changes to publish pane

* fixed format errors

* fixed failing test

* added test explorer changes for mongo accounts

* added log for test

* fixed lit errors

* added secrets to ci.yml file

* fixed failing self serve test
2021-02-08 09:42:16 -08:00
Balaji Sridharan
faa98de9e9 Portal changes for DedicatedGateway
Changes to support creation and deletion of DedicatedGateway resource.

Tested locally with various scenarios.
2021-02-08 05:16:10 -08:00
Tanuj Mittal
2a7546e0de Skip SelfServe e2e test (#432) 2021-02-06 02:30:46 +05:30
Tanuj Mittal
4b442dd869 Use GET instead of PATCH for some Juno endpoints (#431) 2021-02-06 02:01:34 +05:30
Tanuj Mittal
f0b4737313 Update gallery colors (#430)
* Update gallery colors

* fix lint error

* Fix test
2021-02-06 01:19:36 +05:30
Srinath Narayanan
8dc5ed590a Added Spinner for public gallery (#427)
* Added more publish changes

* addressed PR comments

* fixed lint errors

Co-authored-by: Tanuj Mittal <tamitta@microsoft.com>
2021-02-05 11:32:26 -08:00
Tanuj Mittal
afaa844d28 Telemetry updates (#429) 2021-02-06 00:26:20 +05:30
Tanuj Mittal
3e5a876ef2 Fix setting isGalleryPublishEnabled when flight is enabled (#428) 2021-02-05 20:55:08 +05:30
Tanuj Mittal
51abf1560a Disable dark overlay for Dialog (#423) 2021-02-05 19:28:57 +05:30
Srinath Narayanan
1c0fed88c0 Changes to cards in notebook gallery (#422)
* added gallery changes

* addressed PR comments
2021-02-05 02:32:55 -08:00
Tanuj Mittal
93cfd52e36 Update Code of Conduct Overlay and other minor changes (#424)
* Use light theme for coc-overlay

* Updates

* Fix vertical height for COC overlay
2021-02-05 14:56:50 +05:30
Tanuj Mittal
3fd014ddad Disable caching for config.json file (#421)
* Disable caching for config.json file

* Disable cache when fetching config.json
2021-02-04 15:25:13 +05:30
Srinath Narayanan
3b6fda4fa5 Changes to notebook publish pane (#419)
* Changes to publish pane

* fixed format errors

* fixed failing test

Co-authored-by: Tanuj Mittal <tamitta@microsoft.com>
2021-02-03 10:46:51 -08:00
Tanuj Mittal
db7c45c9b8 Enable gallery publishing in MPAC (#416)
* Enable gallery publishing in MPAC

* Address feedback

* Use ENABLE_GALLERY_PUBLISH config in standalone gallery

* Fix test
2021-02-03 23:42:11 +05:30
Tanuj Mittal
4f6b75fe79 More gallery updates (#418)
* More gallery updates

* Add PublishContent icon

* Address feedback
2021-02-03 23:24:27 +05:30
Tanuj Mittal
5038a01079 Add telemetry for Notebooks Gallery and other updates (#413)
* Add telemetry for Notebooks Gallery

* More changes

* Address feedback and fix lint error

* Fix margins for My published work
2021-02-03 14:48:50 +05:30
Tanuj Mittal
e0063c76d9 Add support for gallerypublish flight (#412) 2021-02-03 02:05:19 +05:30
Tanuj Mittal
9278654479 Public gallery improvements (#409)
- [x] Don't show extension in name field for publish
- [x] Open "Your published work" tab after publishing
- [x] Continue showing dialog for Report Abuse status
- [x] For showing COC in Public Gallery tab show backdrop of thumbnails
- [x] Liked -> My Favorites & Your published work -> My published work
2021-01-29 17:04:38 +00:00
Tanuj Mittal
59113d7bbf Update Juno endpoints (#405) 2021-01-29 20:28:20 +05:30
Tim Sander
88d8200c14 Check that customer is using Mongo 3.6 before applying index everything policy (#410) 2021-01-28 15:26:47 -06:00
Srinath Narayanan
6aaddd9c60 Added localization for the Self Serve Model (#406)
* added localization for selfserve model

* added comment

* addressed PR comments

* fixed format errors

* Addressed PR comments
2021-01-28 11:17:02 -08:00
Jordi Bunster
f8ede0cc1e Remove Q from ViewModels (#390)
I got cold feet at the thought of merging #324 in one go, so I'm going to split it into smaller chunks and keep rebasing the large one until there's no more Q.
2021-01-28 18:13:26 +00:00
Laurent Nguyen
bddb288a89 Update package versions and package-lock.json (#404)
The file `package-lock.json` is not in sync with `package.json` anymore. This causes build issues when upgrading a package.
This change sync's `package-lock.json` and fixes the build issues.
2021-01-28 08:50:24 +00:00
Steve Faulkner
a14d20a88e Fix applyExplorerBindings call in Portal (#408) 2021-01-27 20:37:14 -06:00
Steve Faulkner
f1db1ed978 Region Select Button (#407) 2021-01-27 15:32:53 -06:00
Laurent Nguyen
86a483c3a4 Fix notebook cell selection bug (#402)
This fixes a bug that prevents getting focus to a text cell (effectively preventing editing) when the window height is small after double-clicking on a neighboring code cell.
The issue is that selecting a text cell is broken likely because there's a behavior change in MonacoEditor that keeps the focus on the code cell. The selection issue will probably be resolved when migrating the text cell to Monaco (which will acquire and keep focus the same way), but for now, this will disable the faulty code which doesn't appear to work anymore (presumably auto-scrolling to the cell).
2021-01-27 09:09:54 +00:00
Tanuj Mittal
263262a040 Update Juno endpoints to pass subscriptionId (#339)
Corresponding [server side change](https://msdata.visualstudio.com/CosmosDB/_git/CosmosDB-portal/pullrequest/464443?_a=overview) has been deployed to Prod so now we can go ahead with DE side changes.
2021-01-27 08:08:58 +00:00
victor-meng
bd4d8da065 Move notification console to react (#400) 2021-01-26 15:32:37 -08:00
Steve Faulkner
59ec18cd9b Add basic static code metrics (#396) 2021-01-26 13:13:13 -06:00
Srinath Narayanan
49bf8c60db Added more Self Serve functionalities (#401)
* added recursion and inition decorators

* working version

* added todo comment and removed console.log

* Added Recursive add

* removed type requirement

* proper resolution of promises

* added custom element and base class

* Made selfServe standalone page

* Added custom renderer as async type

* Added overall defaults

* added inital open from data explorer

* removed landingpage

* added feature for self serve type

* renamed sqlx->example and added invalid type

* Added comments for Example

* removed unnecessary changes

* Resolved PR comments

Added tests
Moved onSubmt and initialize inside base class
Moved testExplorer to separate folder
made fields of SelfServe Class non static

* fixed lint errors

* fixed compilation errors

* Removed reactbinding changes

* renamed dropdown -> choice

* Added SelfServeComponent

* Addressed PR comments

* added toggle, visibility, text display,commandbar

* added sqlx example

* added onRefrssh

* formatting changes

* rmoved radioswitch display

* updated smartui tests

* Added more tests

* onSubmit -> onSave

* Resolved PR comments
2021-01-26 09:44:14 -08:00
Steve Faulkner
b0b973b21a Refactor explorer config into useKnockoutExplorer hook (#397)
Co-authored-by: Steve Faulkner <stfaul@microsoft.com>
2021-01-25 13:56:15 -06:00
Chris-MS-896
3529e80f0d no message (#398) 2021-01-22 10:02:35 -06:00
Srinath Narayanan
a298fd8389 Added message to indicate compound indexes are not supported in Mongo Index editor (#395)
* mongo message

* Added test and bug fix in Main.tsx

* format changes

* added new formatting

* added null check
2021-01-21 10:56:05 -08:00
Steve Faulkner
1ecc467f60 Remove IE nuget (#394) 2021-01-20 12:46:12 -06:00
Steve Faulkner
b3cafe3468 Add telemetry to Spark+Synapse Pools (#392) 2021-01-20 11:08:29 -06:00
Steve Faulkner
4be53284b5 Prettier 2.0 (#393) 2021-01-20 09:15:01 -06:00
Srinath Narayanan
c1937ca464 Added the Self Serve Data Model (#367)
* added recursion and inition decorators

* working version

* added todo comment and removed console.log

* Added Recursive add

* removed type requirement

* proper resolution of promises

* added custom element and base class

* Made selfServe standalone page

* Added custom renderer as async type

* Added overall defaults

* added inital open from data explorer

* removed landingpage

* added feature for self serve type

* renamed sqlx->example and added invalid type

* Added comments for Example

* removed unnecessary changes

* Resolved PR comments

Added tests
Moved onSubmt and initialize inside base class
Moved testExplorer to separate folder
made fields of SelfServe Class non static

* fixed lint errors

* fixed compilation errors

* Removed reactbinding changes

* renamed dropdown -> choice

* Added SelfServeComponent

* Addressed PR comments

* merged master

* added selfservetype.none for emulator and hosted experience

* fixed formatting errors

* Removed "any" type

* undid package.json changes
2021-01-19 22:42:45 -08:00
Steve Faulkner
2b2de7c645 Migrated Hosted Explorer to React (#360)
Co-authored-by: Victor Meng <vimeng@microsoft.com>
Co-authored-by: Steve Faulkner <stfaul@microsoft.com>
2021-01-19 16:31:55 -06:00
Deborah Chen
8c40df0fa1 Adding in experimentation for autoscale test (#345)
* Adding autoscale flight info

* Add flight info to cassandra collection pane

* Add telemetry for autoscale toggle on/off in create resource blade and scale/settings

* Run formatting and add expected properties to test file

* removing empty line

* Updating to pass unit tests

Co-authored-by: Steve Faulkner <southpolesteve@gmail.com>
2021-01-15 17:15:15 -06:00
Steve Faulkner
fcbc9474ea Remove Preview for Synapse Link (#389) 2021-01-15 09:51:14 -06:00
Steve Faulkner
81f861af39 Empty commit to refresh nuget after transient failures 2021-01-14 17:37:24 -06:00
victor-meng
9afa29cdb6 Properly construct the query to delete Cassandra row (#388) 2021-01-14 16:59:31 -06:00
Chris-MS-896
9a1e8b2d87 Add rest of three utils files to Matser (#370)
* 'minor change'
2021-01-13 17:49:06 -06:00
Tim Sander
babda4d9cb fix issue where Mongo indexing checkbox stops adding wildcard index (#384) 2021-01-12 18:38:16 -06:00
Steve Faulkner
9d20a13dd4 Warn on SubQuery (#378) 2021-01-12 13:53:15 -06:00
Chris-MS-896
3effbe6991 no message (#372) 2021-01-12 13:09:20 -06:00
Chris-MS-896
af53697ff4 Add file of Terminal to Master (#371)
* "minor changes"
2021-01-12 12:55:47 -06:00
Chris-MS-896
b1ad80480e Add two files of notebook component in Matser (#363)
* “minor changes”
2021-01-12 12:55:21 -06:00
Armando Trejo Oliver
9247a6c4a2 A11y fixes - Add a skip link and remove duplicate ids (#381)
* Add a skip link to allow people who navigate sequentially through content more direct access to the primary content of the Data Explorer

Co-authored-by: Chris Cao (Zen3 Infosolutions America Inc) <v-yiqcao@microsoft.com>

* Rename id of partition key field in  Add Collection Pane to ensure no  elements contain duplicate attributes.

Co-authored-by: Chris Cao (Zen3 Infosolutions America Inc) <v-yiqcao@microsoft.com>
2021-01-12 09:55:04 -08:00
Steve Faulkner
767d46480e Revert TablesEntitiyListViewModel changes (#382) 2021-01-11 16:16:40 -06:00
Chris-MS-896
2d98c5d269 add ArraysByKeyCache.ts (#366)
* 'add ArraysByKeyCache'
* "minor change"
2021-01-08 22:51:50 -06:00
Steve Faulkner
6627172a52 Add Architecture Diagram to README (#380) 2021-01-08 22:20:40 -06:00
Steve Faulkner
19fa5e17a5 Fix JSONEditor bug with undefined value (#379) 2021-01-08 22:20:06 -06:00
Chris-MS-896
a4a367a212 Add all arm request related files to Matser (#373)
* “minor changes”
* 'changes for unit test'
2021-01-08 21:56:29 -06:00
Chris-MS-896
983c9201bb Add two files of GraphExplorer component in Master (#365) 2021-01-08 21:14:53 -06:00
Chris-MS-896
76d7f00a90 Add two files of Table to master (#364) 2021-01-08 20:56:59 -06:00
Chris-MS-896
6490597736 add CollapsiblePanel/CollapsiblePanelComponent.ts and /ErrorDisplayComponent to Master (#357) 2021-01-08 20:29:15 -06:00
Chris-MS-896
229119e697 add file offerUtility to tsconfig (#356) 2021-01-08 20:14:12 -06:00
Steve Faulkner
ceefd7c615 Fix Conflict Resolution path setting (#377)
* Fix Conflict Resolution path setting

* Fix test
2021-01-08 12:36:44 -06:00
Laurent Nguyen
6e619175c6 Fix missing scrollbar in left pane when too many collections/notebooks (#375)
Constrain left pane container to height: 100% so that scrollbar show up when content wants to overflow.
The `main` classname seems too generic, but I left it alone (so I don't break anything), since this part will eventually be ported to React.
2021-01-08 14:00:26 +00:00
victor-meng
08e8bf4bcf Fix two settings tab issues (#374) 2021-01-07 15:38:13 -06:00
Chris-MS-896
89dc0f394b Add Spliter file to Master (#358) 2021-01-06 12:51:42 -06:00
Chris-MS-896
30e0001b7f no message (#359) 2021-01-05 16:45:13 -06:00
Steve Faulkner
4a8f408112 Add UX for Mongo indexing experiment (#368)
Co-authored-by: Tim Sander <tisande@microsoft.com>
2021-01-05 16:04:55 -06:00
Armando Trejo Oliver
e801364800 Remove stale .main class from tree.less (#362)
.main CSS class has a naming conflict with Moncao editor CSS classes and this is causing  A11y issues with Moncao editor.

This class should no longer be used since we moved to the new tree component in REACT, so I am removing it. From my testing, this is not affecting anything.

If we find any styling issue later, we should fix without adding back this class.
2021-01-05 10:53:55 -08:00
victor-meng
a55f2d0de9 Free tier improvements in DE (#348)
Co-authored-by: Steve Faulkner <southpolesteve@gmail.com>
2021-01-04 12:56:55 -08:00
Steve Faulkner
d40b1aa9b5 Remove Empty Query Logging (#361) 2021-01-04 13:58:01 -06:00
Steve Faulkner
cc63cdc1fd Remove dependency on canvas (#354) 2020-12-26 21:56:37 -06:00
Steve Faulkner
c3058ee5a9 Check for undefined query results (#350) 2020-12-18 19:55:32 -06:00
Steve Faulkner
b000631a0c Revert web.config changes (#349) 2020-12-18 19:26:10 -06:00
vchske
e8f4c8f93c Cost Estimate Changes (#342)
* Initial change of estimated cost to table format

* Converted cost estimate to table format and added different data for current vs updated cost estimates.

* lint fixes

* Changed the names of some interfaces

* Refactored a unit call to use an argument interface to avoid future confusion.

* Changed the severity of the save warning

* Format fix

* Fixed test due to styling change

Co-authored-by: Steve Faulkner <southpolesteve@gmail.com>
2020-12-18 16:15:55 -08:00
Steve Faulkner
16bde97e47 Rewrite URL for IE users (#340) 2020-12-18 16:08:40 -06:00
Steve Faulkner
6da43ee27b Publish IE specific Nuget package (#347)
* Publish IE specific Nuget package

* Require ally tests to pass
2020-12-17 17:41:38 -06:00
Gahl Levy
ebae484b8f Fix duplicate settings tabs (#343)
Co-authored-by: Steve Faulkner <southpolesteve@gmail.com>
2020-12-17 13:01:36 -08:00
Steve Faulkner
dfb1b50621 Explorer.ts Cleanup (#341)
Co-authored-by: victor-meng <56978073+victor-meng@users.noreply.github.com>
2020-12-16 20:00:39 -06:00
victor-meng
f54e8eb692 Move queryDocuments out of DataAccessUtility (#334) 2020-12-16 15:27:17 -08:00
603 changed files with 58571 additions and 54392 deletions

View File

@@ -14,7 +14,6 @@ src/Common/DataAccessUtilityBase.ts
src/Common/DeleteFeedback.ts
src/Common/DocumentClientUtilityBase.ts
src/Common/EditableUtility.ts
src/Common/EnvironmentUtility.ts
src/Common/HashMap.test.ts
src/Common/HashMap.ts
src/Common/HeadersUtility.test.ts
@@ -43,7 +42,6 @@ src/Contracts/ViewModels.ts
src/Controls/Heatmap/Heatmap.test.ts
src/Controls/Heatmap/Heatmap.ts
src/Controls/Heatmap/HeatmapDatatypes.ts
src/Definitions/adal.d.ts
src/Definitions/datatables.d.ts
src/Definitions/gif.d.ts
src/Definitions/globals.d.ts
@@ -89,7 +87,7 @@ src/Explorer/DataSamples/ContainerSampleGenerator.test.ts
src/Explorer/DataSamples/ContainerSampleGenerator.ts
src/Explorer/DataSamples/DataSamplesUtil.test.ts
src/Explorer/DataSamples/DataSamplesUtil.ts
src/Explorer/Explorer.ts
src/Explorer/Explorer.tsx
src/Explorer/Graph/GraphExplorerComponent/ArraysByKeyCache.test.ts
src/Explorer/Graph/GraphExplorerComponent/ArraysByKeyCache.ts
src/Explorer/Graph/GraphExplorerComponent/D3ForceGraph.test.ts
@@ -243,9 +241,6 @@ src/Platform/Hosted/Authorization.ts
src/Platform/Hosted/DataAccessUtility.ts
src/Platform/Hosted/ExplorerFactory.ts
src/Platform/Hosted/Helpers/ConnectionStringParser.test.ts
src/Platform/Hosted/Helpers/ConnectionStringParser.ts
src/Platform/Hosted/HostedUtils.test.ts
src/Platform/Hosted/HostedUtils.ts
src/Platform/Hosted/Main.ts
src/Platform/Hosted/Maint.test.ts
src/Platform/Hosted/NotificationsClient.ts

View File

@@ -1,41 +1,39 @@
module.exports = {
env: {
browser: true,
es6: true
es6: true,
},
plugins: ["@typescript-eslint", "no-null", "prefer-arrow"],
extends: ["eslint:recommended", "plugin:@typescript-eslint/recommended"],
globals: {
Atomics: "readonly",
SharedArrayBuffer: "readonly"
SharedArrayBuffer: "readonly",
},
parser: "@typescript-eslint/parser",
parserOptions: {
ecmaFeatures: {
jsx: true
jsx: true,
},
ecmaVersion: 2018,
sourceType: "module"
sourceType: "module",
},
overrides: [
{
files: ["**/*.tsx"],
env: {
jest: true
},
extends: ["plugin:react/recommended"],
plugins: ["react"]
extends: ["plugin:react/recommended"], // TODO: Add react-hooks
plugins: ["react"],
},
{
files: ["**/*.{test,spec}.{ts,tsx}"],
env: {
jest: true
jest: true,
},
extends: ["plugin:jest/recommended"],
plugins: ["jest"]
}
plugins: ["jest"],
},
],
rules: {
"no-console": ["error", { allow: ["error", "warn", "dir"] }],
curly: "error",
"@typescript-eslint/no-unused-vars": "error",
"@typescript-eslint/no-extraneous-class": "error",
@@ -43,12 +41,13 @@ module.exports = {
"@typescript-eslint/no-explicit-any": "error",
"prefer-arrow/prefer-arrow-functions": ["error", { allowStandaloneDeclarations: true }],
eqeqeq: "error",
"react/display-name": "off",
"no-restricted-syntax": [
"error",
{
selector: "CallExpression[callee.object.name='JSON'][callee.property.name='stringify'] Identifier[name=/$err/]",
message: "Do not use JSON.stringify(error). It will print '{}'"
}
]
}
message: "Do not use JSON.stringify(error). It will print '{}'",
},
],
},
};

View File

@@ -9,6 +9,20 @@ on:
branches:
- master
jobs:
codemetrics:
runs-on: ubuntu-latest
name: "Log Code Metrics"
if: github.ref == 'refs/heads/master'
steps:
- uses: actions/checkout@v2
- name: Use Node.js 12.x
uses: actions/setup-node@v1
with:
node-version: 12.x
- run: npm ci
- run: node utils/codeMetrics.js
env:
CODE_METRICS_APP_ID: ${{ secrets.CODE_METRICS_APP_ID }}
compile:
runs-on: ubuntu-latest
name: "Compile TypeScript"
@@ -94,13 +108,14 @@ jobs:
npm ci
npm start &
npm run wait-for-server
npx jest -c ./jest.config.e2e.js --detectOpenHandles sql
npx jest -c ./jest.config.e2e.js --detectOpenHandles test/sql/container.spec.ts
shell: bash
env:
DATA_EXPLORER_ENDPOINT: "https://localhost:1234/explorer.html?platform=Emulator"
PLATFORM: "Emulator"
NODE_TLS_REJECT_UNAUTHORIZED: 0
- uses: actions/upload-artifact@v2
if: failure()
with:
name: screenshots
path: failed-*
@@ -141,6 +156,7 @@ jobs:
run: |
npm ci
npm start &
node utils/cleanupDBs.js
npm run wait-for-server
npm run test:e2e
shell: bash
@@ -150,6 +166,8 @@ jobs:
PORTAL_RUNNER_RESOURCE_GROUP: ${{ secrets.PORTAL_RUNNER_RESOURCE_GROUP }}
PORTAL_RUNNER_DATABASE_ACCOUNT: ${{ secrets.PORTAL_RUNNER_DATABASE_ACCOUNT }}
PORTAL_RUNNER_DATABASE_ACCOUNT_KEY: ${{ secrets.PORTAL_RUNNER_DATABASE_ACCOUNT_KEY }}
PORTAL_RUNNER_MONGO_DATABASE_ACCOUNT: ${{ secrets.PORTAL_RUNNER_MONGO_DATABASE_ACCOUNT }}
PORTAL_RUNNER_MONGO_DATABASE_ACCOUNT_KEY: ${{ secrets.PORTAL_RUNNER_MONGO_DATABASE_ACCOUNT_KEY }}
NOTEBOOKS_TEST_RUNNER_TENANT_ID: ${{ secrets.NOTEBOOKS_TEST_RUNNER_TENANT_ID }}
NOTEBOOKS_TEST_RUNNER_CLIENT_ID: ${{ secrets.NOTEBOOKS_TEST_RUNNER_CLIENT_ID }}
NOTEBOOKS_TEST_RUNNER_CLIENT_SECRET: ${{ secrets.NOTEBOOKS_TEST_RUNNER_CLIENT_SECRET }}
@@ -159,13 +177,14 @@ jobs:
TABLES_CONNECTION_STRING: ${{ secrets.CONNECTION_STRING_TABLE }}
DATA_EXPLORER_ENDPOINT: "https://localhost:1234/hostedExplorer.html"
- uses: actions/upload-artifact@v2
if: failure()
with:
name: screenshots
path: failed-*
nuget:
name: Publish Nuget
if: github.ref == 'refs/heads/master' || contains(github.ref, 'hotfix/') || contains(github.ref, 'release/')
needs: [lint, format, compile, build, unittest, endtoendemulator, endtoendhosted]
needs: [lint, format, compile, build, unittest, endtoendemulator, endtoendhosted, accessibility]
runs-on: ubuntu-latest
env:
NUGET_SOURCE: ${{ secrets.NUGET_SOURCE }}
@@ -189,7 +208,7 @@ jobs:
nugetmpac:
name: Publish Nuget MPAC
if: github.ref == 'refs/heads/master' || contains(github.ref, 'hotfix/') || contains(github.ref, 'release/')
needs: [lint, format, compile, build, unittest, endtoendemulator, endtoendhosted]
needs: [lint, format, compile, build, unittest, endtoendemulator, endtoendhosted, accessibility]
runs-on: ubuntu-latest
env:
NUGET_SOURCE: ${{ secrets.NUGET_SOURCE }}

BIN
.vs/slnx.sqlite Normal file

Binary file not shown.

View File

@@ -13,29 +13,18 @@ UI for Azure Cosmos DB. Powers the [Azure Portal](https://portal.azure.com/), ht
### Watch mode
Run `npm run watch` to start the development server and automatically rebuild on changes
Run `npm start` to start the development server and automatically rebuild on changes
### Specifying Development Platform
### Hosted Development (https://cosmos.azure.com)
Setting the environment variable `PLATFORM` during the build process will force the explorer to load the specified platform. By default in development it will run in `Hosted` mode. Valid options:
- Hosted
- Emulator
- Portal
`PLATFORM=Emulator npm run watch`
### Hosted Development
The default webpack dev server configuration will proxy requests to the production portal backend: `https://main.documentdb.ext.azure.com`. This will allow you to use production connection strings on your local machine.
To run pure hosted mode, in `webpack.config.js` change index HtmlWebpackPlugin to use hostedExplorer.html and change entry for index to use HostedExplorer.ts.
- Visit: `https://localhost:1234/hostedExplorer.html`
- Local sign in via AAD will NOT work. Connection string only in dev mode. Use the Portal if you need AAD auth.
- The default webpack dev server configuration will proxy requests to the production portal backend: `https://main.documentdb.ext.azure.com`. This will allow you to use production connection strings on your local machine.
### Emulator Development
In a window environment, running `npm run build` will automatically copy the built files from `/dist` over to the default emulator install paths. In a non-windows environment you can specify an alternate endpoint using `EMULATOR_ENDPOINT` and webpack dev server will proxy requests for you.
`PLATFORM=Emulator EMULATOR_ENDPOINT=https://my-vm.azure.com:8081 npm run watch`
- Start the Cosmos Emulator
- Visit: https://localhost:1234/index.html
#### Setting up a Remote Emulator
@@ -55,16 +44,8 @@ The Cosmos emulator currently only runs in Windows environments. You can still d
### Portal Development
The Cosmos Portal that consumes this repo is not currently open source. If you have access to this project, `npm run build` will copy the built files over to the portal where they will be loaded by the portal development environment
You can however load a local running instance of data explorer in the production portal.
1. Turn off browser SSL validation for localhost: chrome://flags/#allow-insecure-localhost OR Install valid SSL certs for localhost (on IE, follow these [instructions](https://www.technipages.com/ie-bypass-problem-with-this-websites-security-certificate) to install the localhost certificate in the right place)
2. Allowlist `https://localhost:1234` domain for CORS in the Azure Cosmos DB portal
3. Start the project in portal mode: `PLATFORM=Portal npm run watch`
4. Load the portal using the following link: https://ms.portal.azure.com/?dataExplorerSource=https%3A%2F%2Flocalhost%3A1234%2Fexplorer.html
Live reload will occur, but data explorer will not properly integrate again with the parent iframe. You will have to manually reload the page.
- Visit: https://ms.portal.azure.com/?dataExplorerSource=https%3A%2F%2Flocalhost%3A1234%2Fexplorer.html
- You may have to manually visit https://localhost:1234/explorer.html first and click through any SSL certificate warnings
### Testing
@@ -88,6 +69,10 @@ Jest and Puppeteer are used for end to end browser based tests and are contained
We generally adhere to the release strategy [documented by the Azure SDK Guidelines](https://azure.github.io/azure-sdk/policies_repobranching.html#release-branches). Most releases should happen from the master branch. If master contains commits that cannot be released, you may create a release from a `release/` or `hotfix/` branch. See linked documentation for more details.
### Architechture
[![](https://mermaid.ink/img/eyJjb2RlIjoiZ3JhcGggTFJcbiAgaG9zdGVkKGh0dHBzOi8vY29zbW9zLmF6dXJlLmNvbSlcbiAgcG9ydGFsKFBvcnRhbClcbiAgZW11bGF0b3IoRW11bGF0b3IpXG4gIGFhZFtBQURdXG4gIHJlc291cmNlVG9rZW5bUmVzb3VyY2UgVG9rZW5dXG4gIGNvbm5lY3Rpb25TdHJpbmdbQ29ubmVjdGlvbiBTdHJpbmddXG4gIHBvcnRhbFRva2VuW0VuY3J5cHRlZCBQb3J0YWwgVG9rZW5dXG4gIG1hc3RlcktleVtNYXN0ZXIgS2V5XVxuICBhcm1bQVJNIFJlc291cmNlIFByb3ZpZGVyXVxuICBkYXRhcGxhbmVbRGF0YSBQbGFuZV1cbiAgcHJveHlbUG9ydGFsIEFQSSBQcm94eV1cbiAgc3FsW1NRTF1cbiAgbW9uZ29bTW9uZ29dXG4gIHRhYmxlc1tUYWJsZXNdXG4gIGNhc3NhbmRyYVtDYXNzYW5kcmFdXG4gIGdyYWZbR3JhcGhdXG5cblxuICBlbXVsYXRvciAtLT4gbWFzdGVyS2V5IC0tLS0-IGRhdGFwbGFuZVxuICBwb3J0YWwgLS0-IGFhZFxuICBob3N0ZWQgLS0-IHBvcnRhbFRva2VuICYgcmVzb3VyY2VUb2tlbiAmIGNvbm5lY3Rpb25TdHJpbmcgJiBhYWRcbiAgYWFkIC0tLT4gYXJtXG4gIGFhZCAtLS0-IGRhdGFwbGFuZVxuICBhYWQgLS0tPiBwcm94eVxuICByZXNvdXJjZVRva2VuIC0tLT4gc3FsIC0tPiBkYXRhcGxhbmVcbiAgcG9ydGFsVG9rZW4gLS0tPiBwcm94eVxuICBwcm94eSAtLT4gZGF0YXBsYW5lXG4gIGNvbm5lY3Rpb25TdHJpbmcgLS0-IHNxbCAmIG1vbmdvICYgY2Fzc2FuZHJhICYgZ3JhZiAmIHRhYmxlc1xuICBzcWwgLS0-IGRhdGFwbGFuZVxuICB0YWJsZXMgLS0-IGRhdGFwbGFuZVxuICBtb25nbyAtLT4gcHJveHlcbiAgY2Fzc2FuZHJhIC0tPiBwcm94eVxuICBncmFmIC0tPiBwcm94eVxuXG5cdFx0IiwibWVybWFpZCI6eyJ0aGVtZSI6ImRlZmF1bHQifSwidXBkYXRlRWRpdG9yIjpmYWxzZX0)](https://mermaid-js.github.io/mermaid-live-editor/#/edit/eyJjb2RlIjoiZ3JhcGggTFJcbiAgaG9zdGVkKGh0dHBzOi8vY29zbW9zLmF6dXJlLmNvbSlcbiAgcG9ydGFsKFBvcnRhbClcbiAgZW11bGF0b3IoRW11bGF0b3IpXG4gIGFhZFtBQURdXG4gIHJlc291cmNlVG9rZW5bUmVzb3VyY2UgVG9rZW5dXG4gIGNvbm5lY3Rpb25TdHJpbmdbQ29ubmVjdGlvbiBTdHJpbmddXG4gIHBvcnRhbFRva2VuW0VuY3J5cHRlZCBQb3J0YWwgVG9rZW5dXG4gIG1hc3RlcktleVtNYXN0ZXIgS2V5XVxuICBhcm1bQVJNIFJlc291cmNlIFByb3ZpZGVyXVxuICBkYXRhcGxhbmVbRGF0YSBQbGFuZV1cbiAgcHJveHlbUG9ydGFsIEFQSSBQcm94eV1cbiAgc3FsW1NRTF1cbiAgbW9uZ29bTW9uZ29dXG4gIHRhYmxlc1tUYWJsZXNdXG4gIGNhc3NhbmRyYVtDYXNzYW5kcmFdXG4gIGdyYWZbR3JhcGhdXG5cblxuICBlbXVsYXRvciAtLT4gbWFzdGVyS2V5IC0tLS0-IGRhdGFwbGFuZVxuICBwb3J0YWwgLS0-IGFhZFxuICBob3N0ZWQgLS0-IHBvcnRhbFRva2VuICYgcmVzb3VyY2VUb2tlbiAmIGNvbm5lY3Rpb25TdHJpbmcgJiBhYWRcbiAgYWFkIC0tLT4gYXJtXG4gIGFhZCAtLS0-IGRhdGFwbGFuZVxuICBhYWQgLS0tPiBwcm94eVxuICByZXNvdXJjZVRva2VuIC0tLT4gc3FsIC0tPiBkYXRhcGxhbmVcbiAgcG9ydGFsVG9rZW4gLS0tPiBwcm94eVxuICBwcm94eSAtLT4gZGF0YXBsYW5lXG4gIGNvbm5lY3Rpb25TdHJpbmcgLS0-IHNxbCAmIG1vbmdvICYgY2Fzc2FuZHJhICYgZ3JhZiAmIHRhYmxlc1xuICBzcWwgLS0-IGRhdGFwbGFuZVxuICB0YWJsZXMgLS0-IGRhdGFwbGFuZVxuICBtb25nbyAtLT4gcHJveHlcbiAgY2Fzc2FuZHJhIC0tPiBwcm94eVxuICBncmFmIC0tPiBwcm94eVxuXG5cdFx0IiwibWVybWFpZCI6eyJ0aGVtZSI6ImRlZmF1bHQifSwidXBkYXRlRWRpdG9yIjpmYWxzZX0)
# Contributing
Please read the [contribution guidelines](./CONTRIBUTING.md).

View File

@@ -1,3 +1,4 @@
module.exports = {
presets: [["@babel/preset-env", { targets: { node: "current" } }], "@babel/preset-react", "@babel/preset-typescript"]
presets: [["@babel/preset-env", { targets: { node: "current" } }], "@babel/preset-react", "@babel/preset-typescript"],
plugins: [["@babel/plugin-proposal-decorators", { legacy: true }]],
};

7
canvas/README.md Normal file
View File

@@ -0,0 +1,7 @@
# Why?
This adds a mock module for `canvas`. Nteract has a ignored require and undeclared dependency on this module. `cavnas` is a server side node module and is not used in browser side code for nteract.
Installing it locally (`npm install canvas`) will resolve the problem, but it is a native module so it is flaky depending on the system, node version, processor arch, etc. This module provides a simpler, more robust solution.
Remove this workaround if [this bug](https://github.com/nteract/any-vega/issues/2) ever gets resolved

1
canvas/index.js Normal file
View File

@@ -0,0 +1 @@
module.exports = {}

11
canvas/package.json Normal file
View File

@@ -0,0 +1,11 @@
{
"name": "canvas",
"version": "1.0.0",
"description": "",
"main": "index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"author": "",
"license": "ISC"
}

View File

@@ -1,3 +1,4 @@
{
"JUNO_ENDPOINT": "https://tools-staging.cosmos.azure.com"
"JUNO_ENDPOINT": "https://tools-staging.cosmos.azure.com",
"ENABLE_GALLERY_PUBLISH": true
}

1963
externals/adal.js vendored

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,3 @@
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M7.31449 2.01439L4.00103 5.31963L3.26105 4.57965L7.8407 0L12.4203 4.57965L11.6804 5.31963L8.36691 2.01439V12.8428H7.31449V2.01439ZM13.629 12.8428H14.6814V16H1V12.8428H2.05242V14.9476H13.629V12.8428Z" fill="#0078D4"/>
</svg>

After

Width:  |  Height:  |  Size: 329 B

View File

@@ -6,6 +6,6 @@ module.exports = {
slowMo: 55,
defaultViewport: null,
ignoreHTTPSErrors: true,
args: ["--disable-web-security"]
}
args: ["--disable-web-security"],
},
};

View File

@@ -1,5 +1,5 @@
module.exports = {
preset: "jest-puppeteer",
testMatch: ["<rootDir>/test/**/*.spec.[jt]s?(x)"],
setupFiles: ["dotenv/config"]
setupFiles: ["dotenv/config"],
};

View File

@@ -39,11 +39,11 @@ module.exports = {
// An object that configures minimum threshold enforcement for coverage results
coverageThreshold: {
global: {
branches: 20,
functions: 24,
lines: 30,
statements: 29.0
}
branches: 22,
functions: 28,
lines: 33,
statements: 31,
},
},
// Make calling deprecated APIs throw helpful error messages
@@ -76,7 +76,7 @@ module.exports = {
"office-ui-fabric-react/lib/(.*)$": "office-ui-fabric-react/lib-commonjs/$1", // https://github.com/OfficeDev/office-ui-fabric-react/wiki/Fabric-6-Release-Notes
"^dnd-core$": "dnd-core/dist/cjs",
"^react-dnd$": "react-dnd/dist/cjs",
"^react-dnd-html5-backend$": "react-dnd-html5-backend/dist/cjs"
"^react-dnd-html5-backend$": "react-dnd-html5-backend/dist/cjs",
},
// An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader
@@ -164,11 +164,11 @@ module.exports = {
// A map from regular expressions to paths to transformers
transform: {
"^.+\\.html?$": "html-loader-jest",
"^.+\\.[t|j]sx?$": "babel-jest"
"^.+\\.[t|j]sx?$": "babel-jest",
},
// An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation
transformIgnorePatterns: ["/node_modules/", "/externals/"]
transformIgnorePatterns: ["/node_modules/", "/externals/"],
// An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them
// unmockedModulePathPatterns: undefined,

View File

@@ -3,8 +3,8 @@
/******************************************************************************/
@font-face {
font-family: wf_segoe-ui_normal;
src: url('../../fonts/segoe-ui/west-european/normal/latest.woff');
font-family: wf_segoe-ui_normal;
src: url("../../fonts/segoe-ui/west-european/normal/latest.woff");
}
@DataExplorerFont: wf_segoe-ui_normal, "Segoe UI", "Segoe WP", Tahoma, Arial, sans-serif;
@@ -20,26 +20,26 @@
COLORS
/******************************************************************************/
@AccentMediumHigh: #0058AD;
@AccentMedium: #004E87;
@AccentHigh: #1EBAED;
@AccentExtraHigh: #55B3FF;
@AccentLow: #EDF6FF;
@AccentMediumLow: #DDEEFE;
@AccentLight: #EEF7FF;
@AccentExtra: #DDF0FF;
@AccentMediumHigh: #0058ad;
@AccentMedium: #004e87;
@AccentHigh: #1ebaed;
@AccentExtraHigh: #55b3ff;
@AccentLow: #edf6ff;
@AccentMediumLow: #ddeefe;
@AccentLight: #eef7ff;
@AccentExtra: #ddf0ff;
@SelectionHigh: #B91F26;
@BaseLight: #FFFFFF;
@SelectionHigh: #b91f26;
@BaseLight: #ffffff;
@BaseDark: #000000;
@NotificationLow: #FFF4CE;
@NotificationHigh: #F9E9B0;
@Purple1: #8A2DA5;
@NotificationLow: #fff4ce;
@NotificationHigh: #f9e9b0;
@Purple1: #8a2da5;
@Dirty: #9b4f96;
@BaseLow: #F2F2F2;
@BaseMediumLow: #E6E6E6;
@BaseMedium: #CCCCCC;
@BaseLow: #f2f2f2;
@BaseMediumLow: #e6e6e6;
@BaseMedium: #cccccc;
@BaseMediumHigh: #767676;
@BaseHigh: #393939;
@@ -53,10 +53,17 @@
@ErrorColor: @SelectionHigh;
@SelectionColor: #3074B0;
@SelectionColor: #3074b0;
@FocusColor: #605e5c;
@GalleryBackgroundColor: #fdfdfd;
//Icons
@InfoIconColor: #0072c6;
@WarningIconColor: #db7500;
@ErrorIconColor: #b91f26;
/******************************************************************************
METRICS
/******************************************************************************/
@@ -80,7 +87,7 @@
@ImgWidth: 14px;
@ImgHeight: 14px;
@toggleFontWeight:700;
@toggleFontWeight: 700;
//Resource Tree
@TreeLineHeight: 17px;
@@ -144,16 +151,16 @@
/**********************************************************************************/
.flex-display(@display: flex) {
display: ~"-webkit-@{display}";
display: ~"-ms-@{display}box"; // IE10 uses -ms-flexbox
display: ~"-ms-@{display}"; // IE11
display: @display;
display: ~"-webkit-@{display}";
display: ~"-ms-@{display}box"; // IE10 uses -ms-flexbox
display: ~"-ms-@{display}"; // IE11
display: @display;
}
.flex-direction(@direction: column) {
-webkit-flex-direction: @direction;
-ms-flex-direction: @direction;
flex-direction: @direction;
-ms-flex-direction: @direction;
flex-direction: @direction;
}
/*************************************************************************************
@@ -161,32 +168,31 @@
**************************************************************************************/
@media all and (-ms-high-contrast: none), (-ms-high-contrast: active) {
.selectedRadio,
.selectedRadio:hover,
.selectedRadio:active,
.selectedRadio.dirty,
.tab [type=radio]:checked ~ label,
.tab [type=radio]:checked ~ label:hover {
-ms-high-contrast-adjust: none;
-webkit-text-fill-color: HighlightText;
color: HighlightText;
border-color: HighlightText;
background-color: Highlight;
}
.queryMetricsSummaryTuple {
th, td {
&:nth-child(2) {
width: @IETableDataWidth;
}
&:nth-child(3) {
width: 50%;
}
}
.selectedRadio,
.selectedRadio:hover,
.selectedRadio:active,
.selectedRadio.dirty,
.tab [type="radio"]:checked ~ label,
.tab [type="radio"]:checked ~ label:hover {
-ms-high-contrast-adjust: none;
-webkit-text-fill-color: HighlightText;
color: HighlightText;
border-color: HighlightText;
background-color: Highlight;
}
.queryMetricsSummaryTuple {
th,
td {
&:nth-child(2) {
width: @IETableDataWidth;
}
&:nth-child(3) {
width: 50%;
}
}
}
}
/********************************************************************************************
@@ -194,15 +200,15 @@
*********************************************************************************************/
.hover() {
background-color: @AccentLight;
background-color: @AccentLight;
}
.active() {
background-color: @AccentExtra;
background-color: @AccentExtra;
}
.focus() {
outline: 1px dashed @FocusColor;
outline: 1px dashed @FocusColor;
}
/************************************************************************************************
@@ -212,63 +218,87 @@
@ToggleWidth: 180px;
.toggleSwitch() {
max-width: 100%;
margin-bottom: @SmallSpace;
padding: @SmallSpace;
cursor: pointer;
color: @BaseHigh;
font-weight: 400;
font-size: @mediumFontSize;
font-family: @DataExplorerFont;
max-width: 100%;
margin-bottom: @SmallSpace;
padding: @SmallSpace;
cursor: pointer;
color: @BaseHigh;
font-weight: 400;
font-size: @mediumFontSize;
font-family: @DataExplorerFont;
}
.selectedToggle() {
border-bottom: 2px solid @BaseHigh;
border-bottom: 2px solid @BaseHigh;
}
.unselectedToggle() {
color: @AccentMediumHigh;
color: @AccentMediumHigh;
}
/********************************************************************************************************
Common Data Explorer Icons
*********************************************************************************************************/
.dataExplorerIcons() {
cursor: pointer;
width: @ImgWidth;
height: @ImgHeight;
cursor: pointer;
width: @ImgWidth;
height: @ImgHeight;
}
/*********************************************************************************************************
Info Tooltip
**********************************************************************************************************/
.infoTooltip() {
position: relative;
display: inline-block;
position: relative;
display: inline-block;
}
.tooltipText(@textColor: @BaseLight, @backgroundColor: @BaseHigh) {
visibility: hidden;
background-color: @backgroundColor;
color: @textColor;
position: absolute;
z-index: 1;
left: @MediumSpace;
padding: @MediumSpace;
visibility: hidden;
background-color: @backgroundColor;
color: @textColor;
position: absolute;
z-index: 1;
left: @MediumSpace;
padding: @MediumSpace;
}
.tooltipTextAfter(@color: @BaseDark) {
content: "";
position: absolute;
right: 100%;
border-style: solid;
border-color: transparent @color transparent transparent;
left: 0px;
width: 0;
height: 0;
border-color: @InfoPointerColor transparent;
content: "";
position: absolute;
right: 100%;
border-style: solid;
border-color: transparent @color transparent transparent;
left: 0px;
width: 0;
height: 0;
border-color: @InfoPointerColor transparent;
}
.tooltipVisible() {
visibility: visible;
visibility: visible;
}
.inputTooltip() {
position: relative;
}
.inputTooltipText(@textColor: @BaseLight, @backgroundColor: @BaseHigh) {
background-color: @backgroundColor;
color: @textColor;
position: absolute;
z-index: 1;
padding: @MediumSpace;
}
.inputTooltipTextAfter(@color: @BaseDark) {
content: "";
position: absolute;
right: 100%;
border-style: solid;
border-color: transparent @color transparent transparent;
left: 10px;
width: 0;
height: 0;
border-color: @InfoPointerColor transparent;
}

File diff suppressed because it is too large Load Diff

View File

@@ -13,6 +13,11 @@
@NavMediumSpace: 10px;
@NavLargeSpace: 15px;
.skip-link {
position: fixed;
top: -200px;
}
html {
font-family: wf_segoe-ui_normal, "Segoe UI", "Segoe WP", Tahoma, Arial, sans-serif;
padding: 0px;

View File

@@ -1,20 +1,12 @@
@import "./Common/Constants";
.main {
width: 100%;
float: left;
transition: all .0s ease-in-out;
-ms-transition: all 0s ease-in-out;
-webkit-transition: all 0s ease-in-out;
-moz-transition: all .0s ease-in-out;
height: 100%;
background-color: white;
border-left: 0px solid white;
}
.resourceTree {
height: 100%;
flex: 0 0 auto;
.main {
height: 100%;
}
}
.resourceTreeScroll {

5069
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -6,12 +6,14 @@
"dependencies": {
"@azure/arm-cosmosdb": "9.1.0",
"@azure/cosmos": "3.9.0",
"@azure/identity": "1.1.0",
"@azure/cosmos-language-service": "0.0.5",
"@jupyterlab/services": "6.0.0-rc.2",
"@jupyterlab/terminal": "3.0.0-rc.2",
"@azure/identity": "1.2.1",
"@babel/plugin-proposal-class-properties": "7.12.1",
"@babel/plugin-proposal-decorators": "7.12.12",
"@jupyterlab/services": "6.0.2",
"@jupyterlab/terminal": "3.0.3",
"@microsoft/applicationinsights-web": "2.5.9",
"@nteract/commutable": "7.3.2",
"@nteract/commutable": "7.4.2",
"@nteract/connected-components": "6.8.2",
"@nteract/core": "15.1.0",
"@nteract/data-explorer": "8.0.3",
@@ -36,6 +38,7 @@
"@nteract/transform-vega": "7.0.6",
"@octokit/rest": "17.9.2",
"@phosphor/widgets": "1.9.3",
"@testing-library/jest-dom": "5.11.9",
"@types/mkdirp": "1.0.1",
"@types/node-fetch": "2.5.7",
"@uifabric/react-cards": "0.109.110",
@@ -44,7 +47,7 @@
"applicationinsights": "1.8.0",
"babel-polyfill": "6.26.0",
"bootstrap": "3.4.1",
"canvas": "2.6.1",
"canvas": "file:./canvas",
"clean-webpack-plugin": "0.1.19",
"copy-webpack-plugin": "6.0.2",
"crossroads": "0.12.2",
@@ -61,6 +64,9 @@
"eslint-plugin-react": "7.20.0",
"hasher": "1.2.0",
"html2canvas": "1.0.0-rc.5",
"i18next": "19.8.4",
"i18next-browser-languagedetector": "6.0.1",
"i18next-http-backend": "1.0.23",
"immutable": "4.0.0-rc.12",
"is-ci": "2.0.0",
"jquery": "3.5.1",
@@ -69,6 +75,7 @@
"knockout": "3.5.1",
"mkdirp": "1.0.4",
"monaco-editor": "0.18.1",
"msal": "1.4.4",
"object.entries": "1.1.0",
"office-ui-fabric-react": "7.134.1",
"p-retry": "4.2.0",
@@ -80,14 +87,17 @@
"react-animate-height": "2.0.8",
"react-dnd": "9.4.0",
"react-dnd-html5-backend": "9.4.0",
"react-dom": "16.9.0",
"react-dom": "16.13.1",
"react-hotkeys": "2.0.0",
"react-i18next": "11.8.5",
"react-notification-system": "0.2.17",
"react-redux": "7.1.3",
"redux": "4.0.4",
"reflect-metadata": "0.1.13",
"rx-jupyter": "5.5.12",
"rxjs": "6.6.3",
"styled-components": "4.3.2",
"swr": "0.4.0",
"text-encoding": "0.7.0",
"underscore": "1.9.1",
"url-polyfill": "1.1.7",
@@ -101,6 +111,7 @@
"@babel/preset-env": "7.9.0",
"@babel/preset-react": "7.9.4",
"@babel/preset-typescript": "7.9.0",
"@testing-library/react": "11.2.3",
"@types/applicationinsights-js": "1.0.7",
"@types/codemirror": "0.0.56",
"@types/crossroads": "0.0.30",
@@ -109,7 +120,7 @@
"@types/enzyme-adapter-react-16": "1.0.6",
"@types/expect-puppeteer": "4.4.3",
"@types/hasher": "0.0.31",
"@types/jest": "23.3.10",
"@types/jest": "26.0.20",
"@types/jest-environment-puppeteer": "4.3.2",
"@types/memoize-one": "4.1.1",
"@types/node": "12.11.1",
@@ -117,8 +128,8 @@
"@types/prop-types": "15.5.8",
"@types/puppeteer": "3.0.1",
"@types/q": "1.5.1",
"@types/react": "16.9.56",
"@types/react-dom": "16.0.7",
"@types/react": "17.0.0",
"@types/react-dom": "17.0.0",
"@types/react-notification-system": "0.2.39",
"@types/react-redux": "7.1.7",
"@types/sinon": "2.3.3",
@@ -128,7 +139,6 @@
"@types/webfontloader": "1.6.29",
"@typescript-eslint/eslint-plugin": "4.0.1",
"@typescript-eslint/parser": "4.0.1",
"adal-angular": "1.0.15",
"axe-puppeteer": "1.1.0",
"babel-jest": "24.9.0",
"babel-loader": "8.1.0",
@@ -143,7 +153,9 @@
"eslint-cli": "1.1.1",
"eslint-plugin-no-null": "1.0.2",
"eslint-plugin-prefer-arrow": "1.2.2",
"eslint-plugin-react-hooks": "4.2.0",
"expose-loader": "0.7.5",
"fast-glob": "3.2.5",
"file-loader": "2.0.0",
"fs-extra": "7.0.0",
"html-loader": "0.5.5",
@@ -160,7 +172,7 @@
"mini-css-extract-plugin": "0.4.3",
"monaco-editor-webpack-plugin": "1.7.0",
"node-fetch": "2.6.1",
"prettier": "1.19.1",
"prettier": "2.2.1",
"puppeteer": "4.0.0",
"raw-loader": "0.5.1",
"rimraf": "3.0.0",

View File

@@ -2,5 +2,6 @@ export enum AuthType {
AAD = "aad",
EncryptedToken = "encryptedtoken",
MasterKey = "masterkey",
ResourceToken = "resourcetoken"
ResourceToken = "resourcetoken",
ConnectionString = "connectionstring",
}

View File

@@ -1,5 +1,6 @@
import * as ko from "knockout";
import * as ReactBindingHandler from "./ReactBindingHandler";
import "../Explorer/Tables/DataTable/DataTableBindingManager";
export class BindingHandlersRegisterer {
public static registerBindingHandlers() {
@@ -13,7 +14,7 @@ export class BindingHandlersRegisterer {
) {
const value = ko.unwrap(wrappedValueAccessor());
bindingContext?.$data.isTemplateReady(value);
}
},
} as ko.BindingHandler;
ReactBindingHandler.Registerer.register();

View File

@@ -42,7 +42,7 @@ export class Registerer {
// Initial rendering at mount point
ReactDOM.render(adapter.renderComponent(), element);
}
},
} as ko.BindingHandler;
}
}

View File

@@ -40,7 +40,7 @@ export class ArrayHashMap<T> {
public forEach(key: string, iteratorFct: (value: T) => void) {
const values = this.store.get(key);
if (values) {
values.forEach(value => iteratorFct(value));
values.forEach((value) => iteratorFct(value));
}
}

View File

@@ -1,10 +1,3 @@
import { HashMap } from "./HashMap";
export class AuthorizationEndpoints {
public static arm: string = "https://management.core.windows.net/";
public static common: string = "https://login.windows.net/";
}
export class CodeOfConductEndpoints {
public static privacyStatement: string = "https://aka.ms/ms-privacy-policy";
public static codeOfConduct: string = "https://aka.ms/cosmos-code-of-conduct";
@@ -14,7 +7,7 @@ export class CodeOfConductEndpoints {
export class EndpointsRegex {
public static readonly cassandra = [
"AccountEndpoint=(.*).cassandra.cosmosdb.azure.com",
"HostName=(.*).cassandra.cosmos.azure.com"
"HostName=(.*).cassandra.cosmos.azure.com",
];
public static readonly mongo = "mongodb://.*:(.*)@(.*).documents.azure.com";
public static readonly mongoCompute = "mongodb://.*:(.*)@(.*).mongo.cosmos.azure.com";
@@ -126,12 +119,18 @@ export class Features {
public static readonly enableSchema = "enableschema";
public static readonly enableSDKoperations = "enablesdkoperations";
public static readonly showMinRUSurvey = "showminrusurvey";
public static readonly enableDatabaseSettingsTabV1 = "enabledbsettingsv1";
public static readonly selfServeType = "selfservetype";
public static readonly enableKOPanel = "enablekopanel";
}
// flight names returned from the portal are always lowercase
export class Flights {
public static readonly SettingsV2 = "settingsv2";
public static readonly MongoIndexEditor = "mongoindexeditor";
public static readonly MongoIndexing = "mongoindexing";
public static readonly AutoscaleTest = "autoscaletest";
public static readonly GalleryPublish = "gallerypublish";
}
export class AfecFeatures {
@@ -140,19 +139,6 @@ export class AfecFeatures {
public static readonly StorageAnalytics = "storageanalytics-public-preview";
}
export class Spark {
public static readonly MaxWorkerCount = 10;
public static readonly SKUs: HashMap<string> = new HashMap({
"Cosmos.Spark.D1s": "D1s / 1 core / 4GB RAM",
"Cosmos.Spark.D2s": "D2s / 2 cores / 8GB RAM",
"Cosmos.Spark.D4s": "D4s / 4 cores / 16GB RAM",
"Cosmos.Spark.D8s": "D8s / 8 cores / 32GB RAM",
"Cosmos.Spark.D16s": "D16s / 16 cores / 64GB RAM",
"Cosmos.Spark.D32s": "D32s / 32 cores / 128GB RAM",
"Cosmos.Spark.D64s": "D64s / 64 cores / 256GB RAM"
});
}
export class TagNames {
public static defaultExperience: string = "defaultExperience";
}
@@ -164,7 +150,7 @@ export class MongoDBAccounts {
export enum MongoBackendEndpointType {
local,
remote
remote,
}
// TODO: 435619 Add default endpoints per cloud and use regional only when available
@@ -291,7 +277,7 @@ export class HttpStatusCodes {
HttpStatusCodes.InternalServerError, // TODO: Handle all 500s on Portal backend and remove from retries list
HttpStatusCodes.BadGateway,
HttpStatusCodes.ServiceUnavailable,
HttpStatusCodes.GatewayTimeout
HttpStatusCodes.GatewayTimeout,
];
}
@@ -347,10 +333,7 @@ export class HashRoutePrefixes {
public static docsWithIds(databaseId: string, collectionId: string, docId: string) {
const transformedDatabasePrefix: string = this.docs.replace("{db_id}", databaseId);
return transformedDatabasePrefix
.replace("{coll_id}", collectionId)
.replace("{doc_id}", docId)
.replace("/", ""); // strip the first slash since hasher adds it
return transformedDatabasePrefix.replace("{coll_id}", collectionId).replace("{doc_id}", docId).replace("/", ""); // strip the first slash since hasher adds it
}
}
@@ -396,7 +379,7 @@ export class OfferVersions {
export enum ConflictOperationType {
Replace = "replace",
Create = "create",
Delete = "delete"
Delete = "delete",
}
export const EmulatorMasterKey =

View File

@@ -10,17 +10,17 @@ describe("tokenProvider", () => {
resourceId: "",
resourceType: "dbs" as ResourceType,
headers: {},
getAuthorizationTokenUsingMasterKey: () => ""
getAuthorizationTokenUsingMasterKey: () => "",
};
beforeEach(() => {
updateConfigContext({
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
});
window.fetch = jest.fn().mockImplementation(() => {
return {
json: () => "{}",
headers: new Map()
headers: new Map(),
};
});
});
@@ -36,7 +36,7 @@ describe("tokenProvider", () => {
it("does not call the auth service if a master key is set", async () => {
updateUserContext({
masterKey: "foo"
masterKey: "foo",
});
await tokenProvider(options);
expect((window.fetch as any).mock.calls.length).toBe(0);
@@ -50,7 +50,7 @@ describe("getTokenFromAuthService", () => {
window.fetch = jest.fn().mockImplementation(() => {
return {
json: () => "{}",
headers: new Map()
headers: new Map(),
};
});
});
@@ -61,7 +61,7 @@ describe("getTokenFromAuthService", () => {
it("builds the correct URL in production", () => {
updateConfigContext({
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
});
getTokenFromAuthService("GET", "dbs", "foo");
expect(window.fetch).toHaveBeenCalledWith(
@@ -72,7 +72,7 @@ describe("getTokenFromAuthService", () => {
it("builds the correct URL in dev", () => {
updateConfigContext({
BACKEND_ENDPOINT: "https://localhost:1234"
BACKEND_ENDPOINT: "https://localhost:1234",
});
getTokenFromAuthService("GET", "dbs", "foo");
expect(window.fetch).toHaveBeenCalledWith(
@@ -96,15 +96,15 @@ describe("endpoint", () => {
documentEndpoint: "bar",
gremlinEndpoint: "foo",
tableEndpoint: "foo",
cassandraEndpoint: "foo"
}
}
cassandraEndpoint: "foo",
},
},
});
expect(endpoint()).toEqual("bar");
});
it("uses _endpoint if set", () => {
updateUserContext({
endpoint: "baz"
endpoint: "baz",
});
expect(endpoint()).toEqual("baz");
});
@@ -121,7 +121,7 @@ describe("requestPlugin", () => {
updateConfigContext({
platform: Platform.Hosted,
BACKEND_ENDPOINT: "https://localhost:1234",
PROXY_PATH: "/proxy"
PROXY_PATH: "/proxy",
});
const headers = {};
const endpoint = "https://docs.azure.com";

View File

@@ -58,13 +58,13 @@ export async function getTokenFromAuthService(verb: string, resourceType: string
method: "POST",
headers: {
"content-type": "application/json",
"x-ms-encrypted-auth-token": userContext.accessToken
"x-ms-encrypted-auth-token": userContext.accessToken,
},
body: JSON.stringify({
verb,
resourceType,
resourceId
})
resourceId,
}),
});
//TODO I am not sure why we have to parse the JSON again here. fetch should do it for us when we call .json()
const result = JSON.parse(await response.json());
@@ -77,13 +77,13 @@ export async function getTokenFromAuthService(verb: string, resourceType: string
export function client(): Cosmos.CosmosClient {
const options: Cosmos.CosmosClientOptions = {
endpoint: endpoint() || " ", // CosmosClient gets upset if we pass a falsy value here
endpoint: endpoint() || "https://cosmos.azure.com", // CosmosClient gets upset if we pass a bad URL. This should never actually get called
key: userContext.masterKey,
tokenProvider,
connectionPolicy: {
enableEndpointDiscovery: false
enableEndpointDiscovery: false,
},
userAgentSuffix: "Azure Portal"
userAgentSuffix: "Azure Portal",
};
if (configContext.PROXY_PATH !== undefined) {

View File

@@ -1,169 +0,0 @@
import { ConflictDefinition, FeedOptions, ItemDefinition, QueryIterator, Resource } from "@azure/cosmos";
import Q from "q";
import * as DataModels from "../Contracts/DataModels";
import * as ViewModels from "../Contracts/ViewModels";
import ConflictId from "../Explorer/Tree/ConflictId";
import DocumentId from "../Explorer/Tree/DocumentId";
import StoredProcedure from "../Explorer/Tree/StoredProcedure";
import { LocalStorageUtility, StorageKey } from "../Shared/StorageUtility";
import * as Constants from "./Constants";
import { client } from "./CosmosClient";
export function getCommonQueryOptions(options: FeedOptions): any {
const storedItemPerPageSetting: number = LocalStorageUtility.getEntryNumber(StorageKey.ActualItemPerPage);
options = options || {};
options.populateQueryMetrics = true;
options.enableScanInQuery = options.enableScanInQuery || true;
if (!options.partitionKey) {
options.forceQueryPlan = true;
}
options.maxItemCount =
options.maxItemCount ||
(storedItemPerPageSetting !== undefined && storedItemPerPageSetting) ||
Constants.Queries.itemsPerPage;
options.maxDegreeOfParallelism = LocalStorageUtility.getEntryNumber(StorageKey.MaxDegreeOfParellism);
return options;
}
export function queryDocuments(
databaseId: string,
containerId: string,
query: string,
options: any
): Q.Promise<QueryIterator<ItemDefinition & Resource>> {
options = getCommonQueryOptions(options);
const documentsIterator = client()
.database(databaseId)
.container(containerId)
.items.query(query, options);
return Q(documentsIterator);
}
export function getPartitionKeyHeaderForConflict(conflictId: ConflictId): Object {
const partitionKeyDefinition: DataModels.PartitionKey = conflictId.partitionKey;
const partitionKeyValue: any = conflictId.partitionKeyValue;
return getPartitionKeyHeader(partitionKeyDefinition, partitionKeyValue);
}
export function getPartitionKeyHeader(partitionKeyDefinition: DataModels.PartitionKey, partitionKeyValue: any): Object {
if (!partitionKeyDefinition) {
return undefined;
}
if (partitionKeyValue === undefined) {
return [{}];
}
return [partitionKeyValue];
}
export function updateDocument(
collection: ViewModels.CollectionBase,
documentId: DocumentId,
newDocument: any
): Q.Promise<any> {
const partitionKey = documentId.partitionKeyValue;
return Q(
client()
.database(collection.databaseId)
.container(collection.id())
.item(documentId.id(), partitionKey)
.replace(newDocument)
.then(response => response.resource)
);
}
export function executeStoredProcedure(
collection: ViewModels.Collection,
storedProcedure: StoredProcedure,
partitionKeyValue: any,
params: any[]
): Q.Promise<any> {
// TODO remove this deferred. Kept it because of timeout code at bottom of function
const deferred = Q.defer<any>();
client()
.database(collection.databaseId)
.container(collection.id())
.scripts.storedProcedure(storedProcedure.id())
.execute(partitionKeyValue, params, { enableScriptLogging: true })
.then(response =>
deferred.resolve({
result: response.resource,
scriptLogs: response.headers[Constants.HttpHeaders.scriptLogResults]
})
)
.catch(error => deferred.reject(error));
return deferred.promise.timeout(
Constants.ClientDefaults.requestTimeoutMs,
`Request timed out while executing stored procedure ${storedProcedure.id()}`
);
}
export function createDocument(collection: ViewModels.CollectionBase, newDocument: any): Q.Promise<any> {
return Q(
client()
.database(collection.databaseId)
.container(collection.id())
.items.create(newDocument)
.then(response => response.resource)
);
}
export function readDocument(collection: ViewModels.CollectionBase, documentId: DocumentId): Q.Promise<any> {
const partitionKey = documentId.partitionKeyValue;
return Q(
client()
.database(collection.databaseId)
.container(collection.id())
.item(documentId.id(), partitionKey)
.read()
.then(response => response.resource)
);
}
export function deleteDocument(collection: ViewModels.CollectionBase, documentId: DocumentId): Q.Promise<any> {
const partitionKey = documentId.partitionKeyValue;
return Q(
client()
.database(collection.databaseId)
.container(collection.id())
.item(documentId.id(), partitionKey)
.delete()
);
}
export function deleteConflict(
collection: ViewModels.CollectionBase,
conflictId: ConflictId,
options: any = {}
): Q.Promise<any> {
options.partitionKey = options.partitionKey || getPartitionKeyHeaderForConflict(conflictId);
return Q(
client()
.database(collection.databaseId)
.container(collection.id())
.conflict(conflictId.id())
.delete(options)
);
}
export function queryConflicts(
databaseId: string,
containerId: string,
query: string,
options: any
): Q.Promise<QueryIterator<ConflictDefinition & Resource>> {
const documentsIterator = client()
.database(databaseId)
.container(containerId)
.conflicts.query(query, options);
return Q(documentsIterator);
}

View File

@@ -1,217 +0,0 @@
import { ConflictDefinition, ItemDefinition, QueryIterator, Resource } from "@azure/cosmos";
import Q from "q";
import * as ViewModels from "../Contracts/ViewModels";
import ConflictId from "../Explorer/Tree/ConflictId";
import DocumentId from "../Explorer/Tree/DocumentId";
import StoredProcedure from "../Explorer/Tree/StoredProcedure";
import { logConsoleInfo, logConsoleProgress } from "../Utils/NotificationConsoleUtils";
import * as Constants from "./Constants";
import * as DataAccessUtilityBase from "./DataAccessUtilityBase";
import { MinimalQueryIterator, nextPage } from "./IteratorUtilities";
import { handleError } from "./ErrorHandlingUtils";
// TODO: Log all promise resolutions and errors with verbosity levels
export function queryDocuments(
databaseId: string,
containerId: string,
query: string,
options: any
): Q.Promise<QueryIterator<ItemDefinition & Resource>> {
return DataAccessUtilityBase.queryDocuments(databaseId, containerId, query, options);
}
export function queryConflicts(
databaseId: string,
containerId: string,
query: string,
options: any
): Q.Promise<QueryIterator<ConflictDefinition & Resource>> {
return DataAccessUtilityBase.queryConflicts(databaseId, containerId, query, options);
}
export function getEntityName() {
const defaultExperience =
window.dataExplorer && window.dataExplorer.defaultExperience && window.dataExplorer.defaultExperience();
if (defaultExperience === Constants.DefaultAccountExperience.MongoDB) {
return "document";
}
return "item";
}
export function executeStoredProcedure(
collection: ViewModels.Collection,
storedProcedure: StoredProcedure,
partitionKeyValue: any,
params: any[]
): Q.Promise<any> {
var deferred = Q.defer<any>();
const clearMessage = logConsoleProgress(`Executing stored procedure ${storedProcedure.id()}`);
DataAccessUtilityBase.executeStoredProcedure(collection, storedProcedure, partitionKeyValue, params)
.then(
(response: any) => {
deferred.resolve(response);
logConsoleInfo(
`Finished executing stored procedure ${storedProcedure.id()} for container ${storedProcedure.collection.id()}`
);
},
(error: any) => {
handleError(
error,
"ExecuteStoredProcedure",
`Failed to execute stored procedure ${storedProcedure.id()} for container ${storedProcedure.collection.id()}`
);
deferred.reject(error);
}
)
.finally(() => {
clearMessage();
});
return deferred.promise;
}
export function queryDocumentsPage(
resourceName: string,
documentsIterator: MinimalQueryIterator,
firstItemIndex: number,
options: any
): Q.Promise<ViewModels.QueryResults> {
var deferred = Q.defer<ViewModels.QueryResults>();
const entityName = getEntityName();
const clearMessage = logConsoleProgress(`Querying ${entityName} for container ${resourceName}`);
Q(nextPage(documentsIterator, firstItemIndex))
.then(
(result: ViewModels.QueryResults) => {
const itemCount = (result.documents && result.documents.length) || 0;
logConsoleInfo(`Successfully fetched ${itemCount} ${entityName} for container ${resourceName}`);
deferred.resolve(result);
},
(error: any) => {
handleError(error, "QueryDocumentsPage", `Failed to query ${entityName} for container ${resourceName}`);
deferred.reject(error);
}
)
.finally(() => {
clearMessage();
});
return deferred.promise;
}
export function readDocument(collection: ViewModels.CollectionBase, documentId: DocumentId): Q.Promise<any> {
var deferred = Q.defer<any>();
const entityName = getEntityName();
const clearMessage = logConsoleProgress(`Reading ${entityName} ${documentId.id()}`);
DataAccessUtilityBase.readDocument(collection, documentId)
.then(
(document: any) => {
deferred.resolve(document);
},
(error: any) => {
handleError(error, "ReadDocument", `Failed to read ${entityName} ${documentId.id()}`);
deferred.reject(error);
}
)
.finally(() => {
clearMessage();
});
return deferred.promise;
}
export function updateDocument(
collection: ViewModels.CollectionBase,
documentId: DocumentId,
newDocument: any
): Q.Promise<any> {
var deferred = Q.defer<any>();
const entityName = getEntityName();
const clearMessage = logConsoleProgress(`Updating ${entityName} ${documentId.id()}`);
DataAccessUtilityBase.updateDocument(collection, documentId, newDocument)
.then(
(updatedDocument: any) => {
logConsoleInfo(`Successfully updated ${entityName} ${documentId.id()}`);
deferred.resolve(updatedDocument);
},
(error: any) => {
handleError(error, "UpdateDocument", `Failed to update ${entityName} ${documentId.id()}`);
deferred.reject(error);
}
)
.finally(() => {
clearMessage();
});
return deferred.promise;
}
export function createDocument(collection: ViewModels.CollectionBase, newDocument: any): Q.Promise<any> {
var deferred = Q.defer<any>();
const entityName = getEntityName();
const clearMessage = logConsoleProgress(`Creating new ${entityName} for container ${collection.id()}`);
DataAccessUtilityBase.createDocument(collection, newDocument)
.then(
(savedDocument: any) => {
logConsoleInfo(`Successfully created new ${entityName} for container ${collection.id()}`);
deferred.resolve(savedDocument);
},
(error: any) => {
handleError(error, "CreateDocument", `Error while creating new ${entityName} for container ${collection.id()}`);
deferred.reject(error);
}
)
.finally(() => {
clearMessage();
});
return deferred.promise;
}
export function deleteDocument(collection: ViewModels.CollectionBase, documentId: DocumentId): Q.Promise<any> {
var deferred = Q.defer<any>();
const entityName = getEntityName();
const clearMessage = logConsoleProgress(`Deleting ${entityName} ${documentId.id()}`);
DataAccessUtilityBase.deleteDocument(collection, documentId)
.then(
(response: any) => {
logConsoleInfo(`Successfully deleted ${entityName} ${documentId.id()}`);
deferred.resolve(response);
},
(error: any) => {
handleError(error, "DeleteDocument", `Error while deleting ${entityName} ${documentId.id()}`);
deferred.reject(error);
}
)
.finally(() => {
clearMessage();
});
return deferred.promise;
}
export function deleteConflict(
collection: ViewModels.CollectionBase,
conflictId: ConflictId,
options?: any
): Q.Promise<any> {
var deferred = Q.defer<any>();
const clearMessage = logConsoleProgress(`Deleting conflict ${conflictId.id()}`);
DataAccessUtilityBase.deleteConflict(collection, conflictId, options)
.then(
(response: any) => {
logConsoleInfo(`Successfully deleted conflict ${conflictId.id()}`);
deferred.resolve(response);
},
(error: any) => {
handleError(error, "DeleteConflict", `Error while deleting conflict ${conflictId.id()}`);
deferred.reject(error);
}
)
.finally(() => {
clearMessage();
});
return deferred.promise;
}

View File

@@ -0,0 +1,10 @@
import { DefaultAccountExperienceType } from "../DefaultAccountExperienceType";
import { userContext } from "../UserContext";
export const getEntityName = (): string => {
if (userContext.defaultExperience === DefaultAccountExperienceType.MongoDB) {
return "document";
}
return "item";
};

View File

@@ -73,7 +73,7 @@ export default class EditableUtility {
return false;
});
observable.subscribe(edit => {
observable.subscribe((edit) => {
var edits = observable.edits && observable.edits();
if (!edits) {
return;
@@ -83,9 +83,9 @@ export default class EditableUtility {
});
observable.editableIsValid = ko.observable<boolean>(true);
observable.subscribe(value => {
observable.subscribe((value) => {
const validations: ((value: T) => boolean)[] = (observable.validations && observable.validations()) || [];
const isValid = validations.every(validate => validate(value));
const isValid = validations.every((validate) => validate(value));
observable.editableIsValid(isValid);
});

View File

@@ -1,8 +1,6 @@
export default class EnvironmentUtility {
public static normalizeArmEndpointUri(uri: string): string {
if (uri && uri.slice(-1) !== "/") {
return `${uri}/`;
}
return uri;
export function normalizeArmEndpoint(uri: string): string {
if (uri && uri.slice(-1) !== "/") {
return `${uri}/`;
}
return uri;
}

View File

@@ -21,7 +21,7 @@ export const handleError = (error: string | ARMError | Error, area: string, cons
sendNotificationForError(errorMessage, errorCode);
};
export const getErrorMessage = (error: string | Error): string => {
export const getErrorMessage = (error: string | Error = ""): string => {
const errorMessage = typeof error === "string" ? error : error.message;
return replaceKnownError(errorMessage);
};
@@ -37,7 +37,7 @@ const sendNotificationForError = (errorMessage: string, errorCode: number | stri
}
sendMessage({
type: MessageTypes.ForbiddenError,
reason: errorMessage
reason: errorMessage,
});
}
};
@@ -45,10 +45,10 @@ const sendNotificationForError = (errorMessage: string, errorCode: number | stri
const replaceKnownError = (errorMessage: string): string => {
if (
window.dataExplorer?.subscriptionType() === SubscriptionType.Internal &&
errorMessage.indexOf("SharedOffer is Disabled for your account") >= 0
errorMessage?.indexOf("SharedOffer is Disabled for your account") >= 0
) {
return "Database throughput is not supported for internal subscriptions.";
} else if (errorMessage.indexOf("Partition key paths must contain only valid") >= 0) {
} else if (errorMessage?.indexOf("Partition key paths must contain only valid") >= 0) {
return "Partition key paths must contain only valid characters and not contain a trailing slash or wildcard character.";
}

View File

@@ -11,8 +11,8 @@ describe("nextPage", () => {
queryMetrics: {},
requestCharge: 1,
headers: {},
activityId: "foo"
})
activityId: "foo",
}),
};
expect(await nextPage(fakeIterator, 10)).toMatchSnapshot();

View File

@@ -14,7 +14,7 @@ export interface MinimalQueryIterator {
// Pick<QueryIterator<any>, "fetchNext">;
export function nextPage(documentsIterator: MinimalQueryIterator, firstItemIndex: number): Promise<QueryResults> {
return documentsIterator.fetchNext().then(response => {
return documentsIterator.fetchNext().then((response) => {
const documents = response.resources;
const headers = (response as any).headers || {}; // TODO this is a private key. Remove any
const itemCount = (documents && documents.length) || 0;
@@ -26,7 +26,7 @@ export function nextPage(documentsIterator: MinimalQueryIterator, firstItemIndex
lastItemIndex: Number(firstItemIndex) + Number(itemCount),
headers,
activityId: response.activityId,
requestCharge: response.requestCharge
requestCharge: response.requestCharge,
};
});
}

View File

@@ -29,7 +29,7 @@ export function logError(errorMessage: string, area: string, code?: number | str
function _logEntry(entry: Diagnostics.LogEntry): void {
sendMessage({
type: MessageTypes.LogInfo,
data: JSON.stringify(entry)
data: JSON.stringify(entry),
});
const severityLevel = ((level: Diagnostics.LogEntryLevel): SeverityLevel => {
@@ -60,6 +60,6 @@ function _generateLogEntry(
level,
message,
area,
code
code,
};
}

View File

@@ -6,7 +6,7 @@ describe("Message Handler", () => {
let mockPromise = {
id: "123",
startTime: new Date(),
deferred: Q.defer<any>()
deferred: Q.defer<any>(),
};
let mockMessage = { message: { id: "123", data: "{}" } };
MessageHandler.RequestMap[mockPromise.id] = mockPromise;
@@ -18,7 +18,7 @@ describe("Message Handler", () => {
let message = {
id: "123",
startTime: new Date(),
deferred: Q.defer<any>()
deferred: Q.defer<any>(),
};
MessageHandler.handleCachedDataMessage(message);

View File

@@ -35,7 +35,7 @@ export function sendCachedDataMessage<TResponseDataModel>(
let cachedDataPromise: CachedDataPromise<TResponseDataModel> = {
deferred: Q.defer<TResponseDataModel>(),
startTime: new Date(),
id: _.uniqueId()
id: _.uniqueId(),
};
RequestMap[cachedDataPromise.id] = cachedDataPromise;
sendMessage({ type: messageType, params: params, id: cachedDataPromise.id });
@@ -54,7 +54,7 @@ export function sendMessage(data: any): void {
portalChildWindow.parent.postMessage(
{
signature: "pcIframe",
data: data
data: data,
},
portalChildWindow.document.referrer
);

View File

@@ -14,7 +14,7 @@ const fetchMock = () => {
ok: true,
text: () => "{}",
json: () => "{}",
headers: new Map()
headers: new Map(),
});
};
@@ -27,8 +27,8 @@ const collection = {
partitionKey: {
paths: ["/pk"],
kind: "Hash",
version: 1
}
version: 1,
},
} as Collection;
const documentId = ({
@@ -38,8 +38,8 @@ const documentId = ({
partitionKey: {
paths: ["/pk"],
kind: "Hash",
version: 1
}
version: 1,
},
} as unknown) as DocumentId;
const databaseAccount = {
@@ -52,8 +52,8 @@ const databaseAccount = {
documentEndpoint: "bar",
gremlinEndpoint: "foo",
tableEndpoint: "foo",
cassandraEndpoint: "foo"
}
cassandraEndpoint: "foo",
},
} as DatabaseAccount;
describe("MongoProxyClient", () => {
@@ -61,10 +61,10 @@ describe("MongoProxyClient", () => {
beforeEach(() => {
resetConfigContext();
updateUserContext({
databaseAccount
databaseAccount,
});
updateConfigContext({
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
});
window.fetch = jest.fn().mockImplementation(fetchMock);
});
@@ -93,10 +93,10 @@ describe("MongoProxyClient", () => {
beforeEach(() => {
resetConfigContext();
updateUserContext({
databaseAccount
databaseAccount,
});
updateConfigContext({
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
});
window.fetch = jest.fn().mockImplementation(fetchMock);
});
@@ -125,10 +125,10 @@ describe("MongoProxyClient", () => {
beforeEach(() => {
resetConfigContext();
updateUserContext({
databaseAccount
databaseAccount,
});
updateConfigContext({
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
});
window.fetch = jest.fn().mockImplementation(fetchMock);
});
@@ -157,10 +157,10 @@ describe("MongoProxyClient", () => {
beforeEach(() => {
resetConfigContext();
updateUserContext({
databaseAccount
databaseAccount,
});
updateConfigContext({
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
});
window.fetch = jest.fn().mockImplementation(fetchMock);
});
@@ -189,10 +189,10 @@ describe("MongoProxyClient", () => {
beforeEach(() => {
resetConfigContext();
updateUserContext({
databaseAccount
databaseAccount,
});
updateConfigContext({
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
});
window.fetch = jest.fn().mockImplementation(fetchMock);
});
@@ -222,10 +222,10 @@ describe("MongoProxyClient", () => {
resetConfigContext();
delete window.authType;
updateUserContext({
databaseAccount
databaseAccount,
});
updateConfigContext({
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
});
});

View File

@@ -16,7 +16,7 @@ import { sendMessage } from "./MessageHandler";
const defaultHeaders = {
[HttpHeaders.apiType]: ApiType.MongoDB.toString(),
[CosmosSDKConstants.HttpHeaders.MaxEntityCount]: "100",
[CosmosSDKConstants.HttpHeaders.Version]: "2017-11-15"
[CosmosSDKConstants.HttpHeaders.Version]: "2017-11-15",
};
function authHeaders() {
@@ -31,7 +31,7 @@ export function queryIterator(databaseId: string, collection: Collection, query:
let continuationToken: string;
return {
fetchNext: () => {
return queryDocuments(databaseId, collection, false, query).then(response => {
return queryDocuments(databaseId, collection, false, query).then((response) => {
continuationToken = response.continuationToken;
const headers: { [key: string]: string | number } = {};
response.headers.forEach((value, key) => {
@@ -42,10 +42,10 @@ export function queryIterator(databaseId: string, collection: Collection, query:
headers,
requestCharge: Number(headers[CosmosSDKConstants.HttpHeaders.RequestCharge]),
activityId: String(headers[CosmosSDKConstants.HttpHeaders.ActivityId]),
hasMoreResults: !!continuationToken
hasMoreResults: !!continuationToken,
};
});
}
},
};
}
@@ -74,7 +74,9 @@ export function queryDocuments(
rg: userContext.resourceGroup,
dba: databaseAccount.name,
pk:
collection && collection.partitionKey && !collection.partitionKey.systemKey ? collection.partitionKeyProperty : ""
collection && collection.partitionKey && !collection.partitionKey.systemKey
? collection.partitionKeyProperty
: "",
};
const endpoint = getEndpoint() || "";
@@ -87,7 +89,7 @@ export function queryDocuments(
[CosmosSDKConstants.HttpHeaders.EnableScanInQuery]: "true",
[CosmosSDKConstants.HttpHeaders.EnableCrossPartitionQuery]: "true",
[CosmosSDKConstants.HttpHeaders.ParallelizeCrossPartitionQuery]: "true",
[HttpHeaders.contentType]: "application/query+json"
[HttpHeaders.contentType]: "application/query+json",
};
if (continuationToken) {
@@ -100,14 +102,14 @@ export function queryDocuments(
.fetch(`${endpoint}${path}?${queryString.stringify(params)}`, {
method: "POST",
body: JSON.stringify({ query }),
headers
headers,
})
.then(async response => {
.then(async (response) => {
if (response.ok) {
return {
continuationToken: response.headers.get(CosmosSDKConstants.HttpHeaders.Continuation),
documents: (await response.json()).Documents as DataModels.DocumentId[],
headers: response.headers
headers: response.headers,
};
}
errorHandling(response, "querying documents", params);
@@ -135,7 +137,9 @@ export function readDocument(
rg: userContext.resourceGroup,
dba: databaseAccount.name,
pk:
documentId && documentId.partitionKey && !documentId.partitionKey.systemKey ? documentId.partitionKeyProperty : ""
documentId && documentId.partitionKey && !documentId.partitionKey.systemKey
? documentId.partitionKeyProperty
: "",
};
const endpoint = getEndpoint();
@@ -147,10 +151,10 @@ export function readDocument(
...authHeaders(),
[CosmosSDKConstants.HttpHeaders.PartitionKey]: encodeURIComponent(
JSON.stringify(documentId.partitionKeyHeader())
)
}
),
},
})
.then(response => {
.then((response) => {
if (response.ok) {
return response.json();
}
@@ -175,7 +179,7 @@ export function createDocument(
sid: userContext.subscriptionId,
rg: userContext.resourceGroup,
dba: databaseAccount.name,
pk: collection && collection.partitionKey && !collection.partitionKey.systemKey ? partitionKeyProperty : ""
pk: collection && collection.partitionKey && !collection.partitionKey.systemKey ? partitionKeyProperty : "",
};
const endpoint = getEndpoint();
@@ -186,10 +190,10 @@ export function createDocument(
body: JSON.stringify(documentContent),
headers: {
...defaultHeaders,
...authHeaders()
}
...authHeaders(),
},
})
.then(response => {
.then((response) => {
if (response.ok) {
return response.json();
}
@@ -218,7 +222,9 @@ export function updateDocument(
rg: userContext.resourceGroup,
dba: databaseAccount.name,
pk:
documentId && documentId.partitionKey && !documentId.partitionKey.systemKey ? documentId.partitionKeyProperty : ""
documentId && documentId.partitionKey && !documentId.partitionKey.systemKey
? documentId.partitionKeyProperty
: "",
};
const endpoint = getEndpoint();
@@ -230,10 +236,10 @@ export function updateDocument(
...defaultHeaders,
...authHeaders(),
[HttpHeaders.contentType]: "application/json",
[CosmosSDKConstants.HttpHeaders.PartitionKey]: JSON.stringify(documentId.partitionKeyHeader())
}
[CosmosSDKConstants.HttpHeaders.PartitionKey]: JSON.stringify(documentId.partitionKeyHeader()),
},
})
.then(response => {
.then((response) => {
if (response.ok) {
return response.json();
}
@@ -257,7 +263,9 @@ export function deleteDocument(databaseId: string, collection: Collection, docum
rg: userContext.resourceGroup,
dba: databaseAccount.name,
pk:
documentId && documentId.partitionKey && !documentId.partitionKey.systemKey ? documentId.partitionKeyProperty : ""
documentId && documentId.partitionKey && !documentId.partitionKey.systemKey
? documentId.partitionKeyProperty
: "",
};
const endpoint = getEndpoint();
@@ -268,10 +276,10 @@ export function deleteDocument(databaseId: string, collection: Collection, docum
...defaultHeaders,
...authHeaders(),
[HttpHeaders.contentType]: "application/json",
[CosmosSDKConstants.HttpHeaders.PartitionKey]: JSON.stringify(documentId.partitionKeyHeader())
}
[CosmosSDKConstants.HttpHeaders.PartitionKey]: JSON.stringify(documentId.partitionKeyHeader()),
},
})
.then(response => {
.then((response) => {
if (response.ok) {
return undefined;
}
@@ -299,7 +307,7 @@ export function createMongoCollectionWithProxy(
rg: userContext.resourceGroup,
dba: databaseAccount.name,
isAutoPilot: !!params.autoPilotMaxThroughput,
autoPilotThroughput: params.autoPilotMaxThroughput?.toString()
autoPilotThroughput: params.autoPilotMaxThroughput?.toString(),
};
const endpoint = getEndpoint();
@@ -314,11 +322,11 @@ export function createMongoCollectionWithProxy(
headers: {
...defaultHeaders,
...authHeaders(),
[HttpHeaders.contentType]: "application/json"
}
[HttpHeaders.contentType]: "application/json",
},
}
)
.then(response => {
.then((response) => {
if (response.ok) {
return response.json();
}

View File

@@ -14,7 +14,7 @@
*/
export default class MongoUtility {
public static tojson = function(x: any, indent: string, nolint: boolean) {
public static tojson = function (x: any, indent: string, nolint: boolean) {
if (x === null || x === undefined) {
return String(x);
}
@@ -71,7 +71,7 @@ export default class MongoUtility {
}
};
private static tojsonObject = function(x: any, indent: string, nolint: boolean) {
private static tojsonObject = function (x: any, indent: string, nolint: boolean) {
var lineEnding = nolint ? " " : "\n";
var tabSpace = nolint ? "" : "\t";
indent = indent || "";
@@ -114,7 +114,7 @@ export default class MongoUtility {
}
}
// Add proper line endings, indents, and commas to each line
s += $.map(pairs, function(pair) {
s += $.map(pairs, function (pair) {
return lineEnding + indent + pair;
}).join(",");
s += lineEnding;
@@ -124,7 +124,7 @@ export default class MongoUtility {
return s + indent + "}";
};
private static tojsonArray = function(a: any, indent: string, nolint: boolean) {
private static tojsonArray = function (a: any, indent: string, nolint: boolean) {
if (a.length === 0) {
return "[ ]";
}
@@ -151,7 +151,7 @@ export default class MongoUtility {
return s;
};
private static hasDefinedProperty = function(obj: any, prop: string): boolean {
private static hasDefinedProperty = function (obj: any, prop: string): boolean {
if (Object.getPrototypeOf === undefined || Object.getPrototypeOf(obj) === null) {
return false;
} else if (obj.hasOwnProperty(prop)) {

View File

@@ -9,14 +9,14 @@ describe("parseSDKOfferResponse", () => {
offerThroughput: 500,
collectionThroughputInfo: {
minimumRUForCollection: 400,
numPhysicalPartitions: 1
}
numPhysicalPartitions: 1,
},
},
id: "test"
id: "test",
} as SDKOfferDefinition;
const mockResponse = {
resource: mockOfferDefinition
resource: mockOfferDefinition,
} as OfferResponse;
const expectedResult: Offer = {
@@ -25,7 +25,7 @@ describe("parseSDKOfferResponse", () => {
minimumThroughput: 400,
id: "test",
offerDefinition: mockOfferDefinition,
offerReplacePending: false
offerReplacePending: false,
};
expect(OfferUtility.parseSDKOfferResponse(mockResponse)).toEqual(expectedResult);
@@ -37,17 +37,17 @@ describe("parseSDKOfferResponse", () => {
offerThroughput: 400,
collectionThroughputInfo: {
minimumRUForCollection: 400,
numPhysicalPartitions: 1
numPhysicalPartitions: 1,
},
offerAutopilotSettings: {
maxThroughput: 5000
}
maxThroughput: 5000,
},
},
id: "test"
id: "test",
} as SDKOfferDefinition;
const mockResponse = {
resource: mockOfferDefinition
resource: mockOfferDefinition,
} as OfferResponse;
const expectedResult: Offer = {
@@ -56,7 +56,7 @@ describe("parseSDKOfferResponse", () => {
minimumThroughput: 400,
id: "test",
offerDefinition: mockOfferDefinition,
offerReplacePending: false
offerReplacePending: false,
};
expect(OfferUtility.parseSDKOfferResponse(mockResponse)).toEqual(expectedResult);

View File

@@ -2,8 +2,11 @@ import { Offer, SDKOfferDefinition } from "../Contracts/DataModels";
import { OfferResponse } from "@azure/cosmos";
import { HttpHeaders } from "./Constants";
export const parseSDKOfferResponse = (offerResponse: OfferResponse): Offer => {
const offerDefinition: SDKOfferDefinition = offerResponse?.resource;
export const parseSDKOfferResponse = (offerResponse: OfferResponse): Offer | undefined => {
const offerDefinition: SDKOfferDefinition | undefined = offerResponse?.resource;
if (!offerDefinition) {
return undefined;
}
const offerContent = offerDefinition.content;
if (!offerContent) {
return undefined;
@@ -12,14 +15,14 @@ export const parseSDKOfferResponse = (offerResponse: OfferResponse): Offer => {
const minimumThroughput = offerContent.collectionThroughputInfo?.minimumRUForCollection;
const autopilotSettings = offerContent.offerAutopilotSettings;
if (autopilotSettings) {
if (autopilotSettings && autopilotSettings.maxThroughput && minimumThroughput) {
return {
id: offerDefinition.id,
autoscaleMaxThroughput: autopilotSettings.maxThroughput,
manualThroughput: undefined,
minimumThroughput,
offerDefinition,
offerReplacePending: offerResponse.headers?.[HttpHeaders.offerReplacePending] === "true"
offerReplacePending: offerResponse.headers?.[HttpHeaders.offerReplacePending] === "true",
};
}
@@ -29,6 +32,6 @@ export const parseSDKOfferResponse = (offerResponse: OfferResponse): Offer => {
manualThroughput: offerContent.offerThroughput,
minimumThroughput,
offerDefinition,
offerReplacePending: offerResponse.headers?.[HttpHeaders.offerReplacePending] === "true"
offerReplacePending: offerResponse.headers?.[HttpHeaders.offerReplacePending] === "true",
};
};

View File

@@ -30,7 +30,7 @@ export const fetchPortalNotifications = async (): Promise<DataModels.Notificatio
const headers = { [authorizationHeader.header]: authorizationHeader.token };
const response = await window.fetch(url, {
headers
headers,
});
if (!response.ok) {

View File

@@ -3,22 +3,24 @@ import * as _ from "underscore";
import * as DataModels from "../Contracts/DataModels";
import * as ViewModels from "../Contracts/ViewModels";
import Explorer from "../Explorer/Explorer";
import { ConsoleDataType } from "../Explorer/Menus/NotificationConsole/NotificationConsoleComponent";
import DocumentsTab from "../Explorer/Tabs/DocumentsTab";
import DocumentId from "../Explorer/Tree/DocumentId";
import * as NotificationConsoleUtils from "../Utils/NotificationConsoleUtils";
import { QueryUtils } from "../Utils/QueryUtils";
import { BackendDefaults, HttpStatusCodes, SavedQueries } from "./Constants";
import { userContext } from "../UserContext";
import { createDocument, deleteDocument, queryDocuments, queryDocumentsPage } from "./DocumentClientUtilityBase";
import { queryDocumentsPage } from "./dataAccess/queryDocumentsPage";
import { createCollection } from "./dataAccess/createCollection";
import { handleError } from "./ErrorHandlingUtils";
import { createDocument } from "./dataAccess/createDocument";
import { deleteDocument } from "./dataAccess/deleteDocument";
import { queryDocuments } from "./dataAccess/queryDocuments";
export class QueriesClient {
private static readonly PartitionKey: DataModels.PartitionKey = {
paths: [`/${SavedQueries.PartitionKeyProperty}`],
kind: BackendDefaults.partitionKeyKind,
version: BackendDefaults.partitionKeyVersion
version: BackendDefaults.partitionKeyVersion,
};
private static readonly FetchQuery: string = "SELECT * FROM c";
private static readonly FetchMongoQuery: string = "{}";
@@ -31,24 +33,18 @@ export class QueriesClient {
return Promise.resolve(queriesCollection.rawDataModel);
}
const id = NotificationConsoleUtils.logConsoleMessage(
ConsoleDataType.InProgress,
"Setting up account for saving queries"
);
const clearMessage = NotificationConsoleUtils.logConsoleProgress("Setting up account for saving queries");
return createCollection({
collectionId: SavedQueries.CollectionName,
createNewDatabase: true,
databaseId: SavedQueries.DatabaseName,
partitionKey: QueriesClient.PartitionKey,
offerThroughput: SavedQueries.OfferThroughput,
databaseLevelThroughput: false
databaseLevelThroughput: false,
})
.then(
(collection: DataModels.Collection) => {
NotificationConsoleUtils.logConsoleMessage(
ConsoleDataType.Info,
"Successfully set up account for saving queries"
);
NotificationConsoleUtils.logConsoleInfo("Successfully set up account for saving queries");
return Promise.resolve(collection);
},
(error: any) => {
@@ -56,17 +52,14 @@ export class QueriesClient {
return Promise.reject(error);
}
)
.finally(() => NotificationConsoleUtils.clearInProgressMessageWithId(id));
.finally(() => clearMessage());
}
public async saveQuery(query: DataModels.Query): Promise<void> {
const queriesCollection = this.findQueriesCollection();
if (!queriesCollection) {
const errorMessage: string = "Account not set up to perform saved query operations";
NotificationConsoleUtils.logConsoleMessage(
ConsoleDataType.Error,
`Failed to save query ${query.queryName}: ${errorMessage}`
);
NotificationConsoleUtils.logConsoleError(`Failed to save query ${query.queryName}: ${errorMessage}`);
return Promise.reject(errorMessage);
}
@@ -74,25 +67,16 @@ export class QueriesClient {
this.validateQuery(query);
} catch (error) {
const errorMessage: string = "Invalid query specified";
NotificationConsoleUtils.logConsoleMessage(
ConsoleDataType.Error,
`Failed to save query ${query.queryName}: ${errorMessage}`
);
NotificationConsoleUtils.logConsoleError(`Failed to save query ${query.queryName}: ${errorMessage}`);
return Promise.reject(errorMessage);
}
const id = NotificationConsoleUtils.logConsoleMessage(
ConsoleDataType.InProgress,
`Saving query ${query.queryName}`
);
const clearMessage = NotificationConsoleUtils.logConsoleProgress(`Saving query ${query.queryName}`);
query.id = query.queryName;
return createDocument(queriesCollection, query)
.then(
(savedQuery: DataModels.Query) => {
NotificationConsoleUtils.logConsoleMessage(
ConsoleDataType.Info,
`Successfully saved query ${query.queryName}`
);
NotificationConsoleUtils.logConsoleInfo(`Successfully saved query ${query.queryName}`);
return Promise.resolve();
},
(error: any) => {
@@ -103,74 +87,65 @@ export class QueriesClient {
return Promise.reject(error);
}
)
.finally(() => NotificationConsoleUtils.clearInProgressMessageWithId(id));
.finally(() => clearMessage());
}
public async getQueries(): Promise<DataModels.Query[]> {
const queriesCollection = this.findQueriesCollection();
if (!queriesCollection) {
const errorMessage: string = "Account not set up to perform saved query operations";
NotificationConsoleUtils.logConsoleMessage(
ConsoleDataType.Error,
`Failed to fetch saved queries: ${errorMessage}`
);
NotificationConsoleUtils.logConsoleError(`Failed to fetch saved queries: ${errorMessage}`);
return Promise.reject(errorMessage);
}
const options: any = { enableCrossPartitionQuery: true };
const id = NotificationConsoleUtils.logConsoleMessage(ConsoleDataType.InProgress, "Fetching saved queries");
return queryDocuments(SavedQueries.DatabaseName, SavedQueries.CollectionName, this.fetchQueriesQuery(), options)
const clearMessage = NotificationConsoleUtils.logConsoleProgress("Fetching saved queries");
const queryIterator: QueryIterator<ItemDefinition & Resource> = queryDocuments(
SavedQueries.DatabaseName,
SavedQueries.CollectionName,
this.fetchQueriesQuery(),
options
);
const fetchQueries = async (firstItemIndex: number): Promise<ViewModels.QueryResults> =>
await queryDocumentsPage(queriesCollection.id(), queryIterator, firstItemIndex);
return QueryUtils.queryAllPages(fetchQueries)
.then(
(queryIterator: QueryIterator<ItemDefinition & Resource>) => {
const fetchQueries = (firstItemIndex: number): Q.Promise<ViewModels.QueryResults> =>
queryDocumentsPage(queriesCollection.id(), queryIterator, firstItemIndex, options);
return QueryUtils.queryAllPages(fetchQueries).then(
(results: ViewModels.QueryResults) => {
let queries: DataModels.Query[] = _.map(results.documents, (document: DataModels.Query) => {
if (!document) {
return undefined;
}
const { id, resourceId, query, queryName } = document;
const parsedQuery: DataModels.Query = {
resourceId: resourceId,
queryName: queryName,
query: query,
id: id
};
try {
this.validateQuery(parsedQuery);
return parsedQuery;
} catch (error) {
return undefined;
}
});
queries = _.reject(queries, (parsedQuery: DataModels.Query) => !parsedQuery);
NotificationConsoleUtils.logConsoleMessage(ConsoleDataType.Info, "Successfully fetched saved queries");
return Promise.resolve(queries);
},
(error: any) => {
handleError(error, "getSavedQueries", "Failed to fetch saved queries");
return Promise.reject(error);
(results: ViewModels.QueryResults) => {
let queries: DataModels.Query[] = _.map(results.documents, (document: DataModels.Query) => {
if (!document) {
return undefined;
}
);
const { id, resourceId, query, queryName } = document;
const parsedQuery: DataModels.Query = {
resourceId: resourceId,
queryName: queryName,
query: query,
id: id,
};
try {
this.validateQuery(parsedQuery);
return parsedQuery;
} catch (error) {
return undefined;
}
});
queries = _.reject(queries, (parsedQuery: DataModels.Query) => !parsedQuery);
NotificationConsoleUtils.logConsoleInfo("Successfully fetched saved queries");
return Promise.resolve(queries);
},
(error: any) => {
// should never get into this state but we handle this regardless
handleError(error, "getSavedQueries", "Failed to fetch saved queries");
return Promise.reject(error);
}
)
.finally(() => NotificationConsoleUtils.clearInProgressMessageWithId(id));
.finally(() => clearMessage());
}
public async deleteQuery(query: DataModels.Query): Promise<void> {
const queriesCollection = this.findQueriesCollection();
if (!queriesCollection) {
const errorMessage: string = "Account not set up to perform saved query operations";
NotificationConsoleUtils.logConsoleMessage(
ConsoleDataType.Error,
`Failed to fetch saved queries: ${errorMessage}`
);
NotificationConsoleUtils.logConsoleError(`Failed to fetch saved queries: ${errorMessage}`);
return Promise.reject(errorMessage);
}
@@ -178,21 +153,15 @@ export class QueriesClient {
this.validateQuery(query);
} catch (error) {
const errorMessage: string = "Invalid query specified";
NotificationConsoleUtils.logConsoleMessage(
ConsoleDataType.Error,
`Failed to delete query ${query.queryName}: ${errorMessage}`
);
NotificationConsoleUtils.logConsoleError(`Failed to delete query ${query.queryName}: ${errorMessage}`);
}
const id = NotificationConsoleUtils.logConsoleMessage(
ConsoleDataType.InProgress,
`Deleting query ${query.queryName}`
);
const clearMessage = NotificationConsoleUtils.logConsoleProgress(`Deleting query ${query.queryName}`);
query.id = query.queryName;
const documentId = new DocumentId(
{
partitionKey: QueriesClient.PartitionKey,
partitionKeyProperty: "id"
partitionKeyProperty: "id",
} as DocumentsTab,
query,
query.queryName
@@ -201,10 +170,7 @@ export class QueriesClient {
return deleteDocument(queriesCollection, documentId)
.then(
() => {
NotificationConsoleUtils.logConsoleMessage(
ConsoleDataType.Info,
`Successfully deleted query ${query.queryName}`
);
NotificationConsoleUtils.logConsoleInfo(`Successfully deleted query ${query.queryName}`);
return Promise.resolve();
},
(error: any) => {
@@ -212,7 +178,7 @@ export class QueriesClient {
return Promise.reject(error);
}
)
.finally(() => NotificationConsoleUtils.clearInProgressMessageWithId(id));
.finally(() => clearMessage());
}
public getResourceId(): string {

View File

@@ -4,7 +4,7 @@ import { SplitterMetrics } from "./Constants";
export enum SplitterDirection {
Horizontal = "horizontal",
Vertical = "vertical"
Vertical = "vertical",
}
export interface SplitterBounds {
@@ -23,10 +23,10 @@ export class Splitter {
public splitterId: string;
public leftSideId: string;
public splitter: HTMLElement;
public leftSide: HTMLElement;
public lastX: number;
public lastWidth: number;
public splitter!: HTMLElement;
public leftSide!: HTMLElement;
public lastX!: number;
public lastWidth!: number;
private isCollapsed: ko.Observable<boolean>;
private bounds: SplitterBounds;
@@ -42,15 +42,16 @@ export class Splitter {
}
public initialize() {
this.splitter = document.getElementById(this.splitterId);
this.leftSide = document.getElementById(this.leftSideId);
if (document.getElementById(this.splitterId) !== null && document.getElementById(this.leftSideId) != null) {
this.splitter = <HTMLElement>document.getElementById(this.splitterId);
this.leftSide = <HTMLElement>document.getElementById(this.leftSideId);
}
const isVerticalSplitter: boolean = this.direction === SplitterDirection.Vertical;
const splitterOptions: JQueryUI.ResizableOptions = {
animate: true,
animateDuration: "fast",
start: this.onResizeStart,
stop: this.onResizeStop
stop: this.onResizeStop,
};
if (isVerticalSplitter) {
@@ -90,9 +91,7 @@ export class Splitter {
this.lastWidth = $(this.leftSide).width();
$(this.splitter).css("left", SplitterMetrics.CollapsedPositionLeft);
$(this.leftSide).css("width", "");
$(this.leftSide)
.resizable("option", "disabled", true)
.removeClass("ui-resizable-disabled"); // remove class so splitter is visible
$(this.leftSide).resizable("option", "disabled", true).removeClass("ui-resizable-disabled"); // remove class so splitter is visible
$(this.splitter).removeClass("ui-resizable-e");
this.isCollapsed(true);
}

View File

@@ -32,8 +32,8 @@ export default class UrlUtility {
type: type,
objectBody: {
id: id,
self: resourcePath
}
self: resourcePath,
},
};
return result;

View File

@@ -1,6 +1,5 @@
jest.mock("../../Utils/arm/request");
jest.mock("../CosmosClient");
jest.mock("../DataAccessUtilityBase");
import { AuthType } from "../../AuthType";
import { CreateCollectionParams, DatabaseAccount } from "../../Contracts/DataModels";
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
@@ -15,15 +14,15 @@ describe("createCollection", () => {
collectionId: "testContainer",
databaseId: "testDatabase",
databaseLevelThroughput: true,
offerThroughput: 400
offerThroughput: 400,
};
beforeAll(() => {
updateUserContext({
databaseAccount: {
name: "test"
name: "test",
} as DatabaseAccount,
defaultExperience: DefaultAccountExperienceType.DocumentDB
defaultExperience: DefaultAccountExperienceType.DocumentDB,
});
});
@@ -41,12 +40,12 @@ describe("createCollection", () => {
return {
database: {
containers: {
create: () => ({})
}
}
create: () => ({}),
},
},
};
}
}
},
},
});
await createCollection(createCollectionParams);
expect(client).toHaveBeenCalled();
@@ -60,7 +59,7 @@ describe("createCollection", () => {
collectionId: "testContainer",
databaseId: "testDatabase",
databaseLevelThroughput: false,
offerThroughput: 400
offerThroughput: 400,
};
expect(constructRpOptions(manualThroughputParams)).toEqual({ throughput: 400 });
@@ -70,12 +69,12 @@ describe("createCollection", () => {
databaseId: "testDatabase",
databaseLevelThroughput: false,
offerThroughput: 400,
autoPilotMaxThroughput: 4000
autoPilotMaxThroughput: 4000,
};
expect(constructRpOptions(autoPilotThroughputParams)).toEqual({
autoscaleSettings: {
maxThroughput: 4000
}
maxThroughput: 4000,
},
});
});
});

View File

@@ -11,15 +11,15 @@ import { createMongoCollectionWithProxy } from "../MongoProxyClient";
import { createUpdateSqlContainer, getSqlContainer } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import {
createUpdateCassandraTable,
getCassandraTable
getCassandraTable,
} from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources";
import {
createUpdateMongoDBCollection,
getMongoDBCollection
getMongoDBCollection,
} from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
import {
createUpdateGremlinGraph,
getGremlinGraph
getGremlinGraph,
} from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
import { createUpdateTable, getTable } from "../../Utils/arm/generatedClients/2020-04-01/tableResources";
import { logConsoleProgress, logConsoleInfo } from "../../Utils/NotificationConsoleUtils";
@@ -41,7 +41,7 @@ export const createCollection = async (params: DataModels.CreateCollectionParams
autoPilotMaxThroughput: params.autoPilotMaxThroughput,
databaseId: params.databaseId,
databaseLevelThroughput: params.databaseLevelThroughput,
offerThroughput: params.offerThroughput
offerThroughput: params.offerThroughput,
};
await createDatabase(createDatabaseParams);
}
@@ -100,7 +100,7 @@ const createSqlContainer = async (params: DataModels.CreateCollectionParams): Pr
const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params);
const resource: ARMTypes.SqlContainerResource = {
id: params.collectionId
id: params.collectionId,
};
if (params.analyticalStorageTtl) {
resource.analyticalStorageTtl = params.analyticalStorageTtl;
@@ -118,8 +118,8 @@ const createSqlContainer = async (params: DataModels.CreateCollectionParams): Pr
const rpPayload: ARMTypes.SqlDatabaseCreateUpdateParameters = {
properties: {
resource,
options
}
options,
},
};
const createResponse = await createUpdateSqlContainer(
@@ -154,7 +154,7 @@ const createMongoCollection = async (params: DataModels.CreateCollectionParams):
const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params);
const resource: ARMTypes.MongoDBCollectionResource = {
id: params.collectionId
id: params.collectionId,
};
if (params.analyticalStorageTtl) {
resource.analyticalStorageTtl = params.analyticalStorageTtl;
@@ -170,8 +170,8 @@ const createMongoCollection = async (params: DataModels.CreateCollectionParams):
const rpPayload: ARMTypes.MongoDBCollectionCreateUpdateParameters = {
properties: {
resource,
options
}
options,
},
};
const createResponse = await createUpdateMongoDBCollection(
@@ -185,7 +185,7 @@ const createMongoCollection = async (params: DataModels.CreateCollectionParams):
if (params.createMongoWildcardIndex) {
TelemetryProcessor.trace(Action.CreateMongoCollectionWithWildcardIndex, ActionModifiers.Mark, {
message: "Mongo Collection created with wildcard index on all fields."
message: "Mongo Collection created with wildcard index on all fields.",
});
}
@@ -212,7 +212,7 @@ const createCassandraTable = async (params: DataModels.CreateCollectionParams):
const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params);
const resource: ARMTypes.CassandraTableResource = {
id: params.collectionId
id: params.collectionId,
};
if (params.analyticalStorageTtl) {
resource.analyticalStorageTtl = params.analyticalStorageTtl;
@@ -221,8 +221,8 @@ const createCassandraTable = async (params: DataModels.CreateCollectionParams):
const rpPayload: ARMTypes.CassandraTableCreateUpdateParameters = {
properties: {
resource,
options
}
options,
},
};
const createResponse = await createUpdateCassandraTable(
@@ -256,7 +256,7 @@ const createGraph = async (params: DataModels.CreateCollectionParams): Promise<D
const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params);
const resource: ARMTypes.GremlinGraphResource = {
id: params.collectionId
id: params.collectionId,
};
if (params.indexingPolicy) {
@@ -272,8 +272,8 @@ const createGraph = async (params: DataModels.CreateCollectionParams): Promise<D
const rpPayload: ARMTypes.GremlinGraphCreateUpdateParameters = {
properties: {
resource,
options
}
options,
},
};
const createResponse = await createUpdateGremlinGraph(
@@ -306,14 +306,14 @@ const createTable = async (params: DataModels.CreateCollectionParams): Promise<D
const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params);
const resource: ARMTypes.TableResource = {
id: params.collectionId
id: params.collectionId,
};
const rpPayload: ARMTypes.TableCreateUpdateParameters = {
properties: {
resource,
options
}
options,
},
};
const createResponse = await createUpdateTable(
@@ -334,13 +334,13 @@ export const constructRpOptions = (params: DataModels.CreateDatabaseParams): ARM
if (params.autoPilotMaxThroughput) {
return {
autoscaleSettings: {
maxThroughput: params.autoPilotMaxThroughput
}
maxThroughput: params.autoPilotMaxThroughput,
},
};
}
return {
throughput: params.offerThroughput
throughput: params.offerThroughput,
};
};
@@ -350,7 +350,7 @@ const createCollectionWithSDK = async (params: DataModels.CreateCollectionParams
partitionKey: params.partitionKey || undefined,
indexingPolicy: params.indexingPolicy || undefined,
uniqueKeyPolicy: params.uniqueKeyPolicy || undefined,
analyticalStorageTtl: params.analyticalStorageTtl
analyticalStorageTtl: params.analyticalStorageTtl,
} as ContainerRequest; // TODO: remove cast when https://github.com/Azure/azure-cosmos-js/issues/423 is fixed
const collectionOptions: RequestOptions = {};
const createDatabaseBody: DatabaseRequest = { id: params.databaseId };

View File

@@ -8,21 +8,21 @@ import {
GremlinDatabaseCreateUpdateParameters,
MongoDBDatabaseCreateUpdateParameters,
SqlDatabaseCreateUpdateParameters,
CreateUpdateOptions
CreateUpdateOptions,
} from "../../Utils/arm/generatedClients/2020-04-01/types";
import { client } from "../CosmosClient";
import { createUpdateSqlDatabase, getSqlDatabase } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import {
createUpdateCassandraKeyspace,
getCassandraKeyspace
getCassandraKeyspace,
} from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources";
import {
createUpdateMongoDBDatabase,
getMongoDBDatabase
getMongoDBDatabase,
} from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
import {
createUpdateGremlinDatabase,
getGremlinDatabase
getGremlinDatabase,
} from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
import { handleError } from "../ErrorHandlingUtils";
import { logConsoleProgress, logConsoleInfo } from "../../Utils/NotificationConsoleUtils";
@@ -85,10 +85,10 @@ async function createSqlDatabase(params: DataModels.CreateDatabaseParams): Promi
const rpPayload: SqlDatabaseCreateUpdateParameters = {
properties: {
resource: {
id: params.databaseId
id: params.databaseId,
},
options
}
options,
},
};
const createResponse = await createUpdateSqlDatabase(
userContext.subscriptionId,
@@ -121,10 +121,10 @@ async function createMongoDatabase(params: DataModels.CreateDatabaseParams): Pro
const rpPayload: MongoDBDatabaseCreateUpdateParameters = {
properties: {
resource: {
id: params.databaseId
id: params.databaseId,
},
options
}
options,
},
};
const createResponse = await createUpdateMongoDBDatabase(
userContext.subscriptionId,
@@ -157,10 +157,10 @@ async function createCassandraKeyspace(params: DataModels.CreateDatabaseParams):
const rpPayload: CassandraKeyspaceCreateUpdateParameters = {
properties: {
resource: {
id: params.databaseId
id: params.databaseId,
},
options
}
options,
},
};
const createResponse = await createUpdateCassandraKeyspace(
userContext.subscriptionId,
@@ -193,10 +193,10 @@ async function createGremlineDatabase(params: DataModels.CreateDatabaseParams):
const rpPayload: GremlinDatabaseCreateUpdateParameters = {
properties: {
resource: {
id: params.databaseId
id: params.databaseId,
},
options
}
options,
},
};
const createResponse = await createUpdateGremlinDatabase(
userContext.subscriptionId,
@@ -231,12 +231,12 @@ function constructRpOptions(params: DataModels.CreateDatabaseParams): CreateUpda
if (params.autoPilotMaxThroughput) {
return {
autoscaleSettings: {
maxThroughput: params.autoPilotMaxThroughput
}
maxThroughput: params.autoPilotMaxThroughput,
},
};
}
return {
throughput: params.offerThroughput
throughput: params.offerThroughput,
};
}

View File

@@ -0,0 +1,25 @@
import { CollectionBase } from "../../Contracts/ViewModels";
import { client } from "../CosmosClient";
import { getEntityName } from "../DocumentUtility";
import { handleError } from "../ErrorHandlingUtils";
import { logConsoleInfo, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
export const createDocument = async (collection: CollectionBase, newDocument: unknown): Promise<unknown> => {
const entityName = getEntityName();
const clearMessage = logConsoleProgress(`Creating new ${entityName} for container ${collection.id()}`);
try {
const response = await client()
.database(collection.databaseId)
.container(collection.id())
.items.create(newDocument);
logConsoleInfo(`Successfully created new ${entityName} for container ${collection.id()}`);
return response?.resource;
} catch (error) {
handleError(error, "CreateDocument", `Error while creating new ${entityName} for container ${collection.id()}`);
throw error;
} finally {
clearMessage();
}
};

View File

@@ -3,12 +3,12 @@ import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType
import { Resource, StoredProcedureDefinition } from "@azure/cosmos";
import {
SqlStoredProcedureCreateUpdateParameters,
SqlStoredProcedureResource
SqlStoredProcedureResource,
} from "../../Utils/arm/generatedClients/2020-04-01/types";
import { client } from "../CosmosClient";
import {
createUpdateSqlStoredProcedure,
getSqlStoredProcedure
getSqlStoredProcedure,
} from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import { handleError } from "../ErrorHandlingUtils";
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
@@ -49,8 +49,8 @@ export async function createStoredProcedure(
const createSprocParams: SqlStoredProcedureCreateUpdateParameters = {
properties: {
resource: storedProcedure as SqlStoredProcedureResource,
options: {}
}
options: {},
},
};
const rpResponse = await createUpdateSqlStoredProcedure(
userContext.subscriptionId,

View File

@@ -3,7 +3,7 @@ import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType
import { Resource, TriggerDefinition } from "@azure/cosmos";
import {
SqlTriggerCreateUpdateParameters,
SqlTriggerResource
SqlTriggerResource,
} from "../../Utils/arm/generatedClients/2020-04-01/types";
import { client } from "../CosmosClient";
import { createUpdateSqlTrigger, getSqlTrigger } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
@@ -44,8 +44,8 @@ export async function createTrigger(
const createTriggerParams: SqlTriggerCreateUpdateParameters = {
properties: {
resource: trigger as SqlTriggerResource,
options: {}
}
options: {},
},
};
const rpResponse = await createUpdateSqlTrigger(
userContext.subscriptionId,
@@ -59,10 +59,7 @@ export async function createTrigger(
return rpResponse && (rpResponse.properties?.resource as TriggerDefinition & Resource);
}
const response = await client()
.database(databaseId)
.container(collectionId)
.scripts.triggers.create(trigger);
const response = await client().database(databaseId).container(collectionId).scripts.triggers.create(trigger);
return response.resource;
} catch (error) {
handleError(error, "CreateTrigger", `Error while creating trigger ${trigger.id}`);

View File

@@ -3,12 +3,12 @@ import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType
import { Resource, UserDefinedFunctionDefinition } from "@azure/cosmos";
import {
SqlUserDefinedFunctionCreateUpdateParameters,
SqlUserDefinedFunctionResource
SqlUserDefinedFunctionResource,
} from "../../Utils/arm/generatedClients/2020-04-01/types";
import { client } from "../CosmosClient";
import {
createUpdateSqlUserDefinedFunction,
getSqlUserDefinedFunction
getSqlUserDefinedFunction,
} from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import { handleError } from "../ErrorHandlingUtils";
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
@@ -49,8 +49,8 @@ export async function createUserDefinedFunction(
const createUDFParams: SqlUserDefinedFunctionCreateUpdateParameters = {
properties: {
resource: userDefinedFunction as SqlUserDefinedFunctionResource,
options: {}
}
options: {},
},
};
const rpResponse = await createUpdateSqlUserDefinedFunction(
userContext.subscriptionId,

View File

@@ -13,9 +13,9 @@ describe("deleteCollection", () => {
beforeAll(() => {
updateUserContext({
databaseAccount: {
name: "test"
name: "test",
} as DatabaseAccount,
defaultExperience: DefaultAccountExperienceType.DocumentDB
defaultExperience: DefaultAccountExperienceType.DocumentDB,
});
});
@@ -32,11 +32,11 @@ describe("deleteCollection", () => {
return {
container: () => {
return {
delete: (): unknown => undefined
delete: (): unknown => undefined,
};
}
},
};
}
},
});
await deleteCollection("database", "collection");
expect(client).toHaveBeenCalled();

View File

@@ -16,10 +16,7 @@ export async function deleteCollection(databaseId: string, collectionId: string)
if (window.authType === AuthType.AAD && !userContext.useSDKOperations) {
await deleteCollectionWithARM(databaseId, collectionId);
} else {
await client()
.database(databaseId)
.container(collectionId)
.delete();
await client().database(databaseId).container(collectionId).delete();
}
logConsoleInfo(`Successfully deleted container ${collectionId}`);
} catch (error) {

View File

@@ -0,0 +1,36 @@
import ConflictId from "../../Explorer/Tree/ConflictId";
import { CollectionBase } from "../../Contracts/ViewModels";
import { RequestOptions } from "@azure/cosmos";
import { client } from "../CosmosClient";
import { handleError } from "../ErrorHandlingUtils";
import { logConsoleProgress, logConsoleInfo } from "../../Utils/NotificationConsoleUtils";
export const deleteConflict = async (collection: CollectionBase, conflictId: ConflictId): Promise<void> => {
const clearMessage = logConsoleProgress(`Deleting conflict ${conflictId.id()}`);
try {
const options = {
partitionKey: getPartitionKeyHeaderForConflict(conflictId),
};
await client()
.database(collection.databaseId)
.container(collection.id())
.conflict(conflictId.id())
.delete(options as RequestOptions);
logConsoleInfo(`Successfully deleted conflict ${conflictId.id()}`);
} catch (error) {
handleError(error, "DeleteConflict", `Error while deleting conflict ${conflictId.id()}`);
throw error;
} finally {
clearMessage();
}
};
const getPartitionKeyHeaderForConflict = (conflictId: ConflictId): unknown => {
if (!conflictId.partitionKey) {
return undefined;
}
return conflictId.partitionKeyValue === undefined ? [{}] : [conflictId.partitionKeyValue];
};

View File

@@ -13,9 +13,9 @@ describe("deleteDatabase", () => {
beforeAll(() => {
updateUserContext({
databaseAccount: {
name: "test"
name: "test",
} as DatabaseAccount,
defaultExperience: DefaultAccountExperienceType.DocumentDB
defaultExperience: DefaultAccountExperienceType.DocumentDB,
});
});
@@ -30,9 +30,9 @@ describe("deleteDatabase", () => {
(client as jest.Mock).mockReturnValue({
database: () => {
return {
delete: (): unknown => undefined
delete: (): unknown => undefined,
};
}
},
});
await deleteDatabase("database");
expect(client).toHaveBeenCalled();

View File

@@ -19,9 +19,7 @@ export async function deleteDatabase(databaseId: string): Promise<void> {
if (window.authType === AuthType.AAD && !userContext.useSDKOperations) {
await deleteDatabaseWithARM(databaseId);
} else {
await client()
.database(databaseId)
.delete();
await client().database(databaseId).delete();
}
logConsoleInfo(`Successfully deleted database ${databaseId}`);
} catch (error) {

View File

@@ -0,0 +1,25 @@
import { CollectionBase } from "../../Contracts/ViewModels";
import { client } from "../CosmosClient";
import { getEntityName } from "../DocumentUtility";
import { handleError } from "../ErrorHandlingUtils";
import { logConsoleInfo, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
import DocumentId from "../../Explorer/Tree/DocumentId";
export const deleteDocument = async (collection: CollectionBase, documentId: DocumentId): Promise<void> => {
const entityName: string = getEntityName();
const clearMessage = logConsoleProgress(`Deleting ${entityName} ${documentId.id()}`);
try {
await client()
.database(collection.databaseId)
.container(collection.id())
.item(documentId.id(), documentId.partitionKeyValue)
.delete();
logConsoleInfo(`Successfully deleted ${entityName} ${documentId.id()}`);
} catch (error) {
handleError(error, "DeleteDocument", `Error while deleting ${entityName} ${documentId.id()}`);
throw error;
} finally {
clearMessage();
}
};

View File

@@ -27,11 +27,7 @@ export async function deleteStoredProcedure(
storedProcedureId
);
} else {
await client()
.database(databaseId)
.container(collectionId)
.scripts.storedProcedure(storedProcedureId)
.delete();
await client().database(databaseId).container(collectionId).scripts.storedProcedure(storedProcedureId).delete();
}
} catch (error) {
handleError(error, "DeleteStoredProcedure", `Error while deleting stored procedure ${storedProcedureId}`);

View File

@@ -23,11 +23,7 @@ export async function deleteTrigger(databaseId: string, collectionId: string, tr
triggerId
);
} else {
await client()
.database(databaseId)
.container(collectionId)
.scripts.trigger(triggerId)
.delete();
await client().database(databaseId).container(collectionId).scripts.trigger(triggerId).delete();
}
} catch (error) {
handleError(error, "DeleteTrigger", `Error while deleting trigger ${triggerId}`);

View File

@@ -23,11 +23,7 @@ export async function deleteUserDefinedFunction(databaseId: string, collectionId
id
);
} else {
await client()
.database(databaseId)
.container(collectionId)
.scripts.userDefinedFunction(id)
.delete();
await client().database(databaseId).container(collectionId).scripts.userDefinedFunction(id).delete();
}
} catch (error) {
handleError(error, "DeleteUserDefinedFunction", `Error while deleting user defined function ${id}`);

View File

@@ -0,0 +1,48 @@
import { Collection } from "../../Contracts/ViewModels";
import { ClientDefaults, HttpHeaders } from "../Constants";
import { client } from "../CosmosClient";
import { handleError } from "../ErrorHandlingUtils";
import { logConsoleProgress, logConsoleInfo } from "../../Utils/NotificationConsoleUtils";
import StoredProcedure from "../../Explorer/Tree/StoredProcedure";
export interface ExecuteSprocResult {
result: StoredProcedure;
scriptLogs: string;
}
export const executeStoredProcedure = async (
collection: Collection,
storedProcedure: StoredProcedure,
partitionKeyValue: string,
params: string[]
): Promise<ExecuteSprocResult> => {
const clearMessage = logConsoleProgress(`Executing stored procedure ${storedProcedure.id()}`);
const timeout = setTimeout(() => {
throw Error(`Request timed out while executing stored procedure ${storedProcedure.id()}`);
}, ClientDefaults.requestTimeoutMs);
try {
const response = await client()
.database(collection.databaseId)
.container(collection.id())
.scripts.storedProcedure(storedProcedure.id())
.execute(partitionKeyValue, params, { enableScriptLogging: true });
clearTimeout(timeout);
logConsoleInfo(
`Finished executing stored procedure ${storedProcedure.id()} for container ${storedProcedure.collection.id()}`
);
return {
result: response.resource,
scriptLogs: response.headers[HttpHeaders.scriptLogResults] as string,
};
} catch (error) {
handleError(
error,
"ExecuteStoredProcedure",
`Failed to execute stored procedure ${storedProcedure.id()} for container ${storedProcedure.collection.id()}`
);
throw error;
} finally {
clearMessage();
}
};

View File

@@ -60,8 +60,8 @@ export const getCollectionUsageSizeInKB = async (databaseName: string, container
apiVersion: "2018-01-01",
queryParams: {
filter,
metricNames
}
metricNames,
},
});
if (metricsResponse?.value?.length !== 2) {
@@ -76,7 +76,7 @@ export const getCollectionUsageSizeInKB = async (databaseName: string, container
return dataUsageSizeInKb + indexUsageSizeInKb;
} catch (error) {
handleError(error, "getCollectionUsageSize");
throw error;
return undefined;
}
};

View File

@@ -11,10 +11,7 @@ export async function getIndexTransformationProgress(databaseId: string, collect
let indexTransformationPercentage: number;
const clearMessage = logConsoleProgress(`Reading container ${collectionId}`);
try {
const response = await client()
.database(databaseId)
.container(collectionId)
.read({ populateQuotaInfo: true });
const response = await client().database(databaseId).container(collectionId).read({ populateQuotaInfo: true });
indexTransformationPercentage = parseInt(
response.headers[Constants.HttpHeaders.collectionIndexTransformationProgress] as string

View File

@@ -0,0 +1,11 @@
import { ConflictDefinition, FeedOptions, QueryIterator, Resource } from "@azure/cosmos";
import { client } from "../CosmosClient";
export const queryConflicts = (
databaseId: string,
containerId: string,
query: string,
options: FeedOptions
): QueryIterator<ConflictDefinition & Resource> => {
return client().database(databaseId).container(containerId).conflicts.query(query, options);
};

View File

@@ -1,5 +1,5 @@
import { getCommonQueryOptions } from "./DataAccessUtilityBase";
import { LocalStorageUtility, StorageKey } from "../Shared/StorageUtility";
import { getCommonQueryOptions } from "./queryDocuments";
import { LocalStorageUtility, StorageKey } from "../../Shared/StorageUtility";
describe("getCommonQueryOptions", () => {
it("builds the correct default options objects", () => {

View File

@@ -0,0 +1,31 @@
import { Queries } from "../Constants";
import { FeedOptions, ItemDefinition, QueryIterator, Resource } from "@azure/cosmos";
import { LocalStorageUtility, StorageKey } from "../../Shared/StorageUtility";
import { client } from "../CosmosClient";
export const queryDocuments = (
databaseId: string,
containerId: string,
query: string,
options: FeedOptions
): QueryIterator<ItemDefinition & Resource> => {
options = getCommonQueryOptions(options);
return client().database(databaseId).container(containerId).items.query(query, options);
};
export const getCommonQueryOptions = (options: FeedOptions): FeedOptions => {
const storedItemPerPageSetting: number = LocalStorageUtility.getEntryNumber(StorageKey.ActualItemPerPage);
options = options || {};
options.populateQueryMetrics = true;
options.enableScanInQuery = options.enableScanInQuery || true;
if (!options.partitionKey) {
options.forceQueryPlan = true;
}
options.maxItemCount =
options.maxItemCount ||
(storedItemPerPageSetting !== undefined && storedItemPerPageSetting) ||
Queries.itemsPerPage;
options.maxDegreeOfParallelism = LocalStorageUtility.getEntryNumber(StorageKey.MaxDegreeOfParellism);
return options;
};

View File

@@ -0,0 +1,26 @@
import { QueryResults } from "../../Contracts/ViewModels";
import { logConsoleInfo, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
import { MinimalQueryIterator, nextPage } from "../IteratorUtilities";
import { handleError } from "../ErrorHandlingUtils";
import { getEntityName } from "../DocumentUtility";
export const queryDocumentsPage = async (
resourceName: string,
documentsIterator: MinimalQueryIterator,
firstItemIndex: number
): Promise<QueryResults> => {
const entityName = getEntityName();
const clearMessage = logConsoleProgress(`Querying ${entityName} for container ${resourceName}`);
try {
const result: QueryResults = await nextPage(documentsIterator, firstItemIndex);
const itemCount = (result.documents && result.documents.length) || 0;
logConsoleInfo(`Successfully fetched ${itemCount} ${entityName} for container ${resourceName}`);
return result;
} catch (error) {
handleError(error, "QueryDocumentsPage", `Failed to query ${entityName} for container ${resourceName}`);
throw error;
} finally {
clearMessage();
}
};

View File

@@ -10,9 +10,9 @@ describe("readCollection", () => {
beforeAll(() => {
updateUserContext({
databaseAccount: {
name: "test"
name: "test",
} as DatabaseAccount,
defaultExperience: DefaultAccountExperienceType.DocumentDB
defaultExperience: DefaultAccountExperienceType.DocumentDB,
});
});
@@ -23,11 +23,11 @@ describe("readCollection", () => {
return {
container: () => {
return {
read: (): unknown => ({})
read: (): unknown => ({}),
};
}
},
};
}
},
});
await readCollection("database", "collection");
expect(client).toHaveBeenCalled();

View File

@@ -7,10 +7,7 @@ export async function readCollection(databaseId: string, collectionId: string):
let collection: DataModels.Collection;
const clearMessage = logConsoleProgress(`Querying container ${collectionId}`);
try {
const response = await client()
.database(databaseId)
.container(collectionId)
.read();
const response = await client().database(databaseId).container(collectionId).read();
collection = response.resource as DataModels.Collection;
} catch (error) {
handleError(error, "ReadCollection", `Error while querying container ${collectionId}`);

View File

@@ -106,7 +106,7 @@ const readCollectionOfferWithARM = async (databaseId: string, collectionId: stri
autoscaleMaxThroughput: autoscaleSettings.maxThroughput,
manualThroughput: undefined,
minimumThroughput,
offerReplacePending: resource.offerReplacePending === "true"
offerReplacePending: resource.offerReplacePending === "true",
};
}
@@ -115,7 +115,7 @@ const readCollectionOfferWithARM = async (databaseId: string, collectionId: stri
autoscaleMaxThroughput: undefined,
manualThroughput: resource.throughput,
minimumThroughput,
offerReplacePending: resource.offerReplacePending === "true"
offerReplacePending: resource.offerReplacePending === "true",
};
}

View File

@@ -12,9 +12,9 @@ describe("readCollections", () => {
beforeAll(() => {
updateUserContext({
databaseAccount: {
name: "test"
name: "test",
} as DatabaseAccount,
defaultExperience: DefaultAccountExperienceType.DocumentDB
defaultExperience: DefaultAccountExperienceType.DocumentDB,
});
});
@@ -32,12 +32,12 @@ describe("readCollections", () => {
containers: {
readAll: () => {
return {
fetchAll: (): unknown => []
fetchAll: (): unknown => [],
};
}
}
},
},
};
}
},
});
await readCollections("database");
expect(client).toHaveBeenCalled();

View File

@@ -23,10 +23,7 @@ export async function readCollections(databaseId: string): Promise<DataModels.Co
return await readCollectionsWithARM(databaseId);
}
const sdkResponse = await client()
.database(databaseId)
.containers.readAll()
.fetchAll();
const sdkResponse = await client().database(databaseId).containers.readAll().fetchAll();
return sdkResponse.resources as DataModels.Collection[];
} catch (error) {
handleError(error, "ReadCollections", `Error while querying containers for database ${databaseId}`);
@@ -63,5 +60,5 @@ async function readCollectionsWithARM(databaseId: string): Promise<DataModels.Co
throw new Error(`Unsupported default experience type: ${defaultExperience}`);
}
return rpResponse?.value?.map(collection => collection.properties?.resource as DataModels.Collection);
return rpResponse?.value?.map((collection) => collection.properties?.resource as DataModels.Collection);
}

View File

@@ -78,7 +78,7 @@ const readDatabaseOfferWithARM = async (databaseId: string): Promise<Offer> => {
autoscaleMaxThroughput: autoscaleSettings.maxThroughput,
manualThroughput: undefined,
minimumThroughput,
offerReplacePending: resource.offerReplacePending === "true"
offerReplacePending: resource.offerReplacePending === "true",
};
}
@@ -87,7 +87,7 @@ const readDatabaseOfferWithARM = async (databaseId: string): Promise<Offer> => {
autoscaleMaxThroughput: undefined,
manualThroughput: resource.throughput,
minimumThroughput,
offerReplacePending: resource.offerReplacePending === "true"
offerReplacePending: resource.offerReplacePending === "true",
};
}

View File

@@ -12,9 +12,9 @@ describe("readDatabases", () => {
beforeAll(() => {
updateUserContext({
databaseAccount: {
name: "test"
name: "test",
} as DatabaseAccount,
defaultExperience: DefaultAccountExperienceType.DocumentDB
defaultExperience: DefaultAccountExperienceType.DocumentDB,
});
});
@@ -30,10 +30,10 @@ describe("readDatabases", () => {
databases: {
readAll: () => {
return {
fetchAll: (): unknown => []
fetchAll: (): unknown => [],
};
}
}
},
},
});
await readDatabases();
expect(client).toHaveBeenCalled();

View File

@@ -21,9 +21,7 @@ export async function readDatabases(): Promise<DataModels.Database[]> {
) {
databases = await readDatabasesWithARM();
} else {
const sdkResponse = await client()
.databases.readAll()
.fetchAll();
const sdkResponse = await client().databases.readAll().fetchAll();
databases = sdkResponse.resources as DataModels.Database[];
}
} catch (error) {
@@ -58,5 +56,5 @@ async function readDatabasesWithARM(): Promise<DataModels.Database[]> {
throw new Error(`Unsupported default experience type: ${defaultExperience}`);
}
return rpResponse?.value?.map(database => database.properties?.resource as DataModels.Database);
return rpResponse?.value?.map((database) => database.properties?.resource as DataModels.Database);
}

View File

@@ -0,0 +1,27 @@
import { Item } from "@azure/cosmos";
import { CollectionBase } from "../../Contracts/ViewModels";
import { client } from "../CosmosClient";
import { getEntityName } from "../DocumentUtility";
import { handleError } from "../ErrorHandlingUtils";
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
import DocumentId from "../../Explorer/Tree/DocumentId";
export const readDocument = async (collection: CollectionBase, documentId: DocumentId): Promise<Item> => {
const entityName = getEntityName();
const clearMessage = logConsoleProgress(`Reading ${entityName} ${documentId.id()}`);
try {
const response = await client()
.database(collection.databaseId)
.container(collection.id())
.item(documentId.id(), documentId.partitionKeyValue)
.read();
return response?.resource;
} catch (error) {
handleError(error, "ReadDocument", `Failed to read ${entityName} ${documentId.id()}`);
throw error;
} finally {
clearMessage();
}
};

View File

@@ -8,7 +8,7 @@ import { readOffers } from "./readOffers";
export const readOfferWithSDK = async (offerId: string, resourceId: string): Promise<Offer> => {
if (!offerId) {
const offers = await readOffers();
const offer = offers.find(offer => offer.resource === resourceId);
const offer = offers.find((offer) => offer.resource === resourceId);
if (!offer) {
return undefined;
@@ -18,12 +18,10 @@ export const readOfferWithSDK = async (offerId: string, resourceId: string): Pro
const options: RequestOptions = {
initialHeaders: {
[HttpHeaders.populateCollectionThroughputInfo]: true
}
[HttpHeaders.populateCollectionThroughputInfo]: true,
},
};
const response = await client()
.offer(offerId)
.read(options);
const response = await client().offer(offerId).read(options);
return parseSDKOfferResponse(response);
};

View File

@@ -7,9 +7,7 @@ export const readOffers = async (): Promise<SDKOfferDefinition[]> => {
const clearMessage = logConsoleProgress(`Querying offers`);
try {
const response = await client()
.offers.readAll()
.fetchAll();
const response = await client().offers.readAll().fetchAll();
return response?.resources;
} catch (error) {
// This should be removed when we can correctly identify if an account is serverless when connected using connection string too.

View File

@@ -25,7 +25,7 @@ export async function readStoredProcedures(
databaseId,
collectionId
);
return rpResponse?.value?.map(sproc => sproc.properties?.resource as StoredProcedureDefinition & Resource);
return rpResponse?.value?.map((sproc) => sproc.properties?.resource as StoredProcedureDefinition & Resource);
}
const response = await client()

View File

@@ -25,14 +25,10 @@ export async function readTriggers(
databaseId,
collectionId
);
return rpResponse?.value?.map(trigger => trigger.properties?.resource as TriggerDefinition & Resource);
return rpResponse?.value?.map((trigger) => trigger.properties?.resource as TriggerDefinition & Resource);
}
const response = await client()
.database(databaseId)
.container(collectionId)
.scripts.triggers.readAll()
.fetchAll();
const response = await client().database(databaseId).container(collectionId).scripts.triggers.readAll().fetchAll();
return response?.resources;
} catch (error) {
handleError(error, "ReadTriggers", `Failed to query triggers for container ${collectionId}`);

View File

@@ -25,7 +25,7 @@ export async function readUserDefinedFunctions(
databaseId,
collectionId
);
return rpResponse?.value?.map(udf => udf.properties?.resource as UserDefinedFunctionDefinition & Resource);
return rpResponse?.value?.map((udf) => udf.properties?.resource as UserDefinedFunctionDefinition & Resource);
}
const response = await client()

View File

@@ -8,22 +8,22 @@ import {
MongoDBCollectionCreateUpdateParameters,
MongoDBCollectionResource,
SqlContainerCreateUpdateParameters,
SqlContainerResource
SqlContainerResource,
} from "../../Utils/arm/generatedClients/2020-04-01/types";
import { RequestOptions } from "@azure/cosmos/dist-esm";
import { client } from "../CosmosClient";
import { createUpdateSqlContainer, getSqlContainer } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import {
createUpdateCassandraTable,
getCassandraTable
getCassandraTable,
} from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources";
import {
createUpdateMongoDBCollection,
getMongoDBCollection
getMongoDBCollection,
} from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
import {
createUpdateGremlinGraph,
getGremlinGraph
getGremlinGraph,
} from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
import { createUpdateTable, getTable } from "../../Utils/arm/generatedClients/2020-04-01/tableResources";
import { handleError } from "../ErrorHandlingUtils";
@@ -130,8 +130,8 @@ export async function updateMongoDBCollectionThroughRP(
const updateParams: MongoDBCollectionCreateUpdateParameters = {
properties: {
resource: newCollection,
options: updateOptions
}
options: updateOptions,
},
};
const updateResponse = await createUpdateMongoDBCollection(

View File

@@ -0,0 +1,32 @@
import { CollectionBase } from "../../Contracts/ViewModels";
import { Item } from "@azure/cosmos";
import { client } from "../CosmosClient";
import { getEntityName } from "../DocumentUtility";
import { handleError } from "../ErrorHandlingUtils";
import { logConsoleInfo, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
import DocumentId from "../../Explorer/Tree/DocumentId";
export const updateDocument = async (
collection: CollectionBase,
documentId: DocumentId,
newDocument: Item
): Promise<Item> => {
const entityName = getEntityName();
const clearMessage = logConsoleProgress(`Updating ${entityName} ${documentId.id()}`);
try {
const response = await client()
.database(collection.databaseId)
.container(collection.id())
.item(documentId.id(), documentId.partitionKeyValue)
.replace(newDocument);
logConsoleInfo(`Successfully updated ${entityName} ${documentId.id()}`);
return response?.resource;
} catch (error) {
handleError(error, "UpdateDocument", `Failed to update ${entityName} ${documentId.id()}`);
throw error;
} finally {
clearMessage();
}
};

View File

@@ -17,7 +17,7 @@ import {
migrateSqlDatabaseToManualThroughput,
migrateSqlContainerToAutoscale,
migrateSqlContainerToManualThroughput,
updateSqlContainerThroughput
updateSqlContainerThroughput,
} from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import {
updateCassandraKeyspaceThroughput,
@@ -25,7 +25,7 @@ import {
migrateCassandraKeyspaceToManualThroughput,
migrateCassandraTableToAutoscale,
migrateCassandraTableToManualThroughput,
updateCassandraTableThroughput
updateCassandraTableThroughput,
} from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources";
import {
updateMongoDBDatabaseThroughput,
@@ -33,7 +33,7 @@ import {
migrateMongoDBDatabaseToManualThroughput,
migrateMongoDBCollectionToAutoscale,
migrateMongoDBCollectionToManualThroughput,
updateMongoDBCollectionThroughput
updateMongoDBCollectionThroughput,
} from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
import {
updateGremlinDatabaseThroughput,
@@ -41,13 +41,13 @@ import {
migrateGremlinDatabaseToManualThroughput,
migrateGremlinGraphToAutoscale,
migrateGremlinGraphToManualThroughput,
updateGremlinGraphThroughput
updateGremlinGraphThroughput,
} from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
import { userContext } from "../../UserContext";
import {
migrateTableToAutoscale,
migrateTableToManualThroughput,
updateTableThroughput
updateTableThroughput,
} from "../../Utils/arm/generatedClients/2020-04-01/tableResources";
export const updateOffer = async (params: UpdateOfferParams): Promise<Offer> => {
@@ -110,7 +110,7 @@ const updateCollectionOfferWithARM = async (params: UpdateOfferParams): Promise<
return await readCollectionOffer({
collectionId: params.collectionId,
databaseId: params.databaseId,
offerId: params.currentOffer.id
offerId: params.currentOffer.id,
});
};
@@ -140,7 +140,7 @@ const updateDatabaseOfferWithARM = async (params: UpdateOfferParams): Promise<Of
return await readDatabaseOffer({
databaseId: params.databaseId,
offerId: params.currentOffer.id
offerId: params.currentOffer.id,
});
};
@@ -358,13 +358,13 @@ const updateGremlinDatabaseOffer = async (params: UpdateOfferParams): Promise<vo
const createUpdateOfferBody = (params: UpdateOfferParams): ThroughputSettingsUpdateParameters => {
const body: ThroughputSettingsUpdateParameters = {
properties: {
resource: {}
}
resource: {},
},
};
if (params.autopilotThroughput) {
body.properties.resource.autoscaleSettings = {
maxThroughput: params.autopilotThroughput
maxThroughput: params.autopilotThroughput,
};
} else {
body.properties.resource.throughput = params.manualThroughput;
@@ -378,7 +378,7 @@ const updateOfferWithSDK = async (params: UpdateOfferParams): Promise<Offer> =>
const newOffer: SDKOfferDefinition = {
content: {
offerThroughput: undefined,
offerIsRUPerMinuteThroughputEnabled: false
offerIsRUPerMinuteThroughputEnabled: false,
},
_etag: undefined,
_ts: undefined,
@@ -388,12 +388,12 @@ const updateOfferWithSDK = async (params: UpdateOfferParams): Promise<Offer> =>
offerResourceId: sdkOfferDefinition.offerResourceId,
offerVersion: sdkOfferDefinition.offerVersion,
offerType: sdkOfferDefinition.offerType,
resource: sdkOfferDefinition.resource
resource: sdkOfferDefinition.resource,
};
if (params.autopilotThroughput) {
newOffer.content.offerAutopilotSettings = {
maxThroughput: params.autopilotThroughput
maxThroughput: params.autopilotThroughput,
};
} else {
newOffer.content.offerThroughput = params.manualThroughput;
@@ -402,12 +402,12 @@ const updateOfferWithSDK = async (params: UpdateOfferParams): Promise<Offer> =>
const options: RequestOptions = {};
if (params.migrateToAutoPilot) {
options.initialHeaders = {
[HttpHeaders.migrateOfferToAutopilot]: "true"
[HttpHeaders.migrateOfferToAutopilot]: "true",
};
delete newOffer.content.offerAutopilotSettings;
} else if (params.migrateToManual) {
options.initialHeaders = {
[HttpHeaders.migrateOfferToManualThroughput]: "true"
[HttpHeaders.migrateOfferToManualThroughput]: "true",
};
newOffer.content.offerAutopilotSettings = { maxThroughput: 0 };
}

View File

@@ -3,12 +3,12 @@ import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType
import { Resource, StoredProcedureDefinition } from "@azure/cosmos";
import {
SqlStoredProcedureCreateUpdateParameters,
SqlStoredProcedureResource
SqlStoredProcedureResource,
} from "../../Utils/arm/generatedClients/2020-04-01/types";
import { client } from "../CosmosClient";
import {
createUpdateSqlStoredProcedure,
getSqlStoredProcedure
getSqlStoredProcedure,
} from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import { handleError } from "../ErrorHandlingUtils";
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
@@ -39,8 +39,8 @@ export async function updateStoredProcedure(
const createSprocParams: SqlStoredProcedureCreateUpdateParameters = {
properties: {
resource: storedProcedure as SqlStoredProcedureResource,
options: {}
}
options: {},
},
};
const rpResponse = await createUpdateSqlStoredProcedure(
userContext.subscriptionId,

View File

@@ -2,7 +2,7 @@ import { AuthType } from "../../AuthType";
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
import {
SqlTriggerCreateUpdateParameters,
SqlTriggerResource
SqlTriggerResource,
} from "../../Utils/arm/generatedClients/2020-04-01/types";
import { TriggerDefinition } from "@azure/cosmos";
import { client } from "../CosmosClient";
@@ -36,8 +36,8 @@ export async function updateTrigger(
const createTriggerParams: SqlTriggerCreateUpdateParameters = {
properties: {
resource: trigger as SqlTriggerResource,
options: {}
}
options: {},
},
};
const rpResponse = await createUpdateSqlTrigger(
userContext.subscriptionId,

View File

@@ -3,12 +3,12 @@ import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType
import { Resource, UserDefinedFunctionDefinition } from "@azure/cosmos";
import {
SqlUserDefinedFunctionCreateUpdateParameters,
SqlUserDefinedFunctionResource
SqlUserDefinedFunctionResource,
} from "../../Utils/arm/generatedClients/2020-04-01/types";
import { client } from "../CosmosClient";
import {
createUpdateSqlUserDefinedFunction,
getSqlUserDefinedFunction
getSqlUserDefinedFunction,
} from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import { handleError } from "../ErrorHandlingUtils";
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
@@ -39,8 +39,8 @@ export async function updateUserDefinedFunction(
const createUDFParams: SqlUserDefinedFunctionCreateUpdateParameters = {
properties: {
resource: userDefinedFunction as SqlUserDefinedFunctionResource,
options: {}
}
options: {},
},
};
const rpResponse = await createUpdateSqlUserDefinedFunction(
userContext.subscriptionId,

View File

@@ -1,10 +1,10 @@
export enum Platform {
Portal = "Portal",
Hosted = "Hosted",
Emulator = "Emulator"
Emulator = "Emulator",
}
interface ConfigContext {
export interface ConfigContext {
platform: Platform;
allowedParentFrameOrigins: string[];
gitSha?: string;
@@ -26,6 +26,7 @@ interface ConfigContext {
GITHUB_CLIENT_SECRET?: string; // No need to inject secret for prod. Juno already knows it.
hostedExplorerURL: string;
armAPIVersion?: string;
ENABLE_GALLERY_PUBLISH?: boolean;
}
// Default configuration
@@ -37,7 +38,7 @@ let configContext: Readonly<ConfigContext> = {
`^https:\\/\\/[\\.\\w]*portal\\.microsoftazure.de$`,
`^https:\\/\\/[\\.\\w]*ext\\.azure\\.(com|cn|us)$`,
`^https:\\/\\/[\\.\\w]*\\.ext\\.microsoftazure\\.de$`,
`^https://cosmos-db-dataexplorer-germanycentral.azurewebsites.de$`
`^https://cosmos-db-dataexplorer-germanycentral.azurewebsites.de$`,
],
// Webpack injects this at build time
gitSha: process.env.GIT_SHA,
@@ -52,7 +53,7 @@ let configContext: Readonly<ConfigContext> = {
ARCADIA_LIVY_ENDPOINT_DNS_ZONE: "dev.azuresynapse.net",
GITHUB_CLIENT_ID: "6cb2f63cf6f7b5cbdeca", // Registered OAuth app: https://github.com/settings/applications/1189306
JUNO_ENDPOINT: "https://tools.cosmos.azure.com",
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
};
export function resetConfigContext(): void {
@@ -73,20 +74,24 @@ if (process.env.NODE_ENV === "development") {
BACKEND_ENDPOINT: "https://localhost:" + port,
MONGO_BACKEND_ENDPOINT: "https://localhost:" + port,
PROXY_PATH: "/proxy",
EMULATOR_ENDPOINT: "https://localhost:8081"
EMULATOR_ENDPOINT: "https://localhost:8081",
});
}
export async function initializeConfiguration(): Promise<ConfigContext> {
try {
const response = await fetch("./config.json");
const response = await fetch("./config.json", {
headers: {
"If-None-Match": "", // disable client side cache
},
});
if (response.status === 200) {
try {
const { allowedParentFrameOrigins, ...externalConfig } = await response.json();
Object.assign(configContext, externalConfig);
if (allowedParentFrameOrigins && allowedParentFrameOrigins.length > 0) {
updateConfigContext({
allowedParentFrameOrigins: [...configContext.allowedParentFrameOrigins, ...allowedParentFrameOrigins]
allowedParentFrameOrigins: [...configContext.allowedParentFrameOrigins, ...allowedParentFrameOrigins],
});
}
} catch (error) {
@@ -104,7 +109,7 @@ export async function initializeConfiguration(): Promise<ConfigContext> {
const platform = params.get("platform");
switch (platform) {
default:
console.log("Invalid platform query parameter given, ignoring");
console.error(`Invalid platform query parameter: ${platform}`);
break;
case Platform.Portal:
case Platform.Hosted:
@@ -113,7 +118,7 @@ export async function initializeConfiguration(): Promise<ConfigContext> {
}
}
} catch (error) {
console.log("No configuration file found using defaults");
console.error("No configuration file found using defaults");
}
return configContext;
}

View File

@@ -7,7 +7,7 @@ export enum TabKind {
TableEntities,
Graph,
SQLQuery,
ScaleSettings
ScaleSettings,
}
/**
@@ -20,7 +20,7 @@ export enum PaneKind {
DeleteDatabase,
GlobalSettings,
AdHocAccess,
SwitchDirectory
SwitchDirectory,
}
/**
@@ -79,5 +79,5 @@ export enum ActionType {
OpenCollectionTab,
OpenPane,
TransmitCachedData,
OpenSampleNotebook
OpenSampleNotebook,
}

View File

@@ -56,7 +56,7 @@ export enum ApiKind {
Table,
Cassandra,
Graph,
MongoDBCompute
MongoDBCompute,
}
export interface GenerateTokenResponse {
@@ -210,9 +210,9 @@ export interface QueryMetrics {
export interface Offer {
id: string;
autoscaleMaxThroughput: number;
manualThroughput: number;
minimumThroughput: number;
autoscaleMaxThroughput: number | undefined;
manualThroughput: number | undefined;
minimumThroughput: number | undefined;
offerDefinition?: SDKOfferDefinition;
offerReplacePending: boolean;
}
@@ -334,7 +334,7 @@ export interface Notification {
export enum ConflictResolutionMode {
Custom = "Custom",
LastWriterWins = "LastWriterWins"
LastWriterWins = "LastWriterWins",
}
/**
@@ -472,7 +472,7 @@ export interface SparkClusterEndpoint {
export enum SparkClusterEndpointKind {
SparkUI = "SparkUI",
HistoryServerUI = "HistoryServerUI",
Livy = "Livy"
Livy = "Livy",
}
export interface RpParameters {
@@ -587,11 +587,3 @@ export interface MemoryUsageInfo {
freeKB: number;
totalKB: number;
}
export interface resourceTokenConnectionStringProperties {
accountEndpoint: string;
collectionId: string;
databaseId: string;
partitionKey?: string;
resourceToken: string;
}

View File

@@ -21,7 +21,7 @@ export enum LogEntryLevel {
/**
* Error level.
*/
Error = 2
Error = 2,
}
/**
* Schema of a log entry.

View File

@@ -33,7 +33,6 @@ export enum MessageTypes {
CreateWorkspace,
CreateSparkPool,
RefreshDatabaseAccount,
InitTestExplorer
}
export { Versions, ActionContracts, Diagnostics };

View File

@@ -3,5 +3,5 @@ export enum SubscriptionType {
EA,
Free,
Internal,
PAYG
PAYG,
}

View File

@@ -3,9 +3,8 @@ import {
Resource,
StoredProcedureDefinition,
TriggerDefinition,
UserDefinedFunctionDefinition
UserDefinedFunctionDefinition,
} from "@azure/cosmos";
import Q from "q";
import { CommandButtonComponentProps } from "../Explorer/Controls/CommandButton/CommandButtonComponent";
import Explorer from "../Explorer/Explorer";
import { ConsoleData } from "../Explorer/Menus/NotificationConsole/NotificationConsoleComponent";
@@ -15,6 +14,7 @@ import DocumentId from "../Explorer/Tree/DocumentId";
import StoredProcedure from "../Explorer/Tree/StoredProcedure";
import Trigger from "../Explorer/Tree/Trigger";
import UserDefinedFunction from "../Explorer/Tree/UserDefinedFunction";
import { SelfServeType } from "../SelfServe/SelfServeUtils";
import { UploadDetails } from "../workers/upload/definitions";
import * as DataModels from "./DataModels";
import { SubscriptionType } from "./SubscriptionType";
@@ -91,6 +91,7 @@ export interface Database extends TreeNode {
onDeleteDatabaseContextMenuClick(source: Database, event: MouseEvent | KeyboardEvent): void;
onSettingsClick: () => void;
loadOffer(): Promise<void>;
getPendingThroughputSplitNotification(): Promise<DataModels.Notification>;
}
export interface CollectionBase extends TreeNode {
@@ -108,7 +109,7 @@ export interface CollectionBase extends TreeNode {
onDocumentDBDocumentsClick(): void;
onNewQueryClick(source: any, event: MouseEvent, queryText?: string): void;
expandCollection(): Q.Promise<any>;
expandCollection(): void;
collapseCollection(): void;
getDatabase(): Database;
}
@@ -137,7 +138,6 @@ export interface Collection extends CollectionBase {
openTab(): void;
onSettingsClick: () => Promise<void>;
onDeleteCollectionContextMenuClick(source: Collection, event: MouseEvent): void;
onNewGraphClick(): void;
onNewMongoQueryClick(source: any, event: MouseEvent, queryText?: string): void;
@@ -175,9 +175,10 @@ export interface Collection extends CollectionBase {
onDragOver(source: Collection, event: { originalEvent: DragEvent }): void;
onDrop(source: Collection, event: { originalEvent: DragEvent }): void;
uploadFiles(fileList: FileList): Q.Promise<UploadDetails>;
uploadFiles(fileList: FileList): Promise<UploadDetails>;
getLabel(): string;
getPendingThroughputSplitNotification(): Promise<DataModels.Notification>;
}
/**
@@ -195,7 +196,7 @@ export interface PaneOptions {
export enum NeighborType {
SOURCES_ONLY,
TARGETS_ONLY,
BOTH
BOTH,
}
/**
@@ -292,10 +293,6 @@ export interface DocumentsTabOptions extends TabOptions {
resourceTokenPartitionKey?: string;
}
export interface SettingsTabV2Options extends TabOptions {
getPendingNotification: Q.Promise<DataModels.Notification>;
}
export interface ConflictsTabOptions extends TabOptions {
partitionKey: DataModels.PartitionKey;
conflictIds: ko.ObservableArray<ConflictId>;
@@ -325,14 +322,14 @@ export enum DocumentExplorerState {
newDocumentInvalid,
exisitingDocumentNoEdits,
exisitingDocumentDirtyValid,
exisitingDocumentDirtyInvalid
exisitingDocumentDirtyInvalid,
}
export enum IndexingPolicyEditorState {
noCollectionSelected,
noEdits,
dirtyValid,
dirtyInvalid
dirtyInvalid,
}
export enum ScriptEditorState {
@@ -340,7 +337,7 @@ export enum ScriptEditorState {
newValid,
exisitingNoEdits,
exisitingDirtyValid,
exisitingDirtyInvalid
exisitingDirtyInvalid,
}
export enum CollectionTabKind {
@@ -362,13 +359,14 @@ export enum CollectionTabKind {
Gallery = 17,
NotebookViewer = 18,
Schema = 19,
SettingsV2 = 19
CollectionSettingsV2 = 20,
DatabaseSettingsV2 = 21,
}
export enum TerminalKind {
Default = 0,
Mongo = 1,
Cassandra = 2
Cassandra = 2,
}
export interface DataExplorerInputsFrame {
@@ -395,6 +393,7 @@ export interface DataExplorerInputsFrame {
isAuthWithresourceToken?: boolean;
defaultCollectionThroughput?: CollectionCreationDefaults;
flights?: readonly string[];
selfServeType?: SelfServeType;
}
export interface CollectionCreationDefaults {

View File

@@ -6,19 +6,19 @@ describe("The Heatmap Control", () => {
const dataPoints = {
"1": {
"2019-06-19T00:59:10Z": {
"Normalized Throughput": 0.35
"Normalized Throughput": 0.35,
},
"2019-06-19T00:48:10Z": {
"Normalized Throughput": 0.25
}
}
"Normalized Throughput": 0.25,
},
},
};
const chartCaptions = {
chartTitle: "chart title",
yAxisTitle: "YAxisTitle",
tooltipText: "Tooltip text",
timeWindow: 123456789
timeWindow: 123456789,
};
let heatmap: Heatmap;
@@ -75,12 +75,12 @@ describe("The Heatmap Control", () => {
if (dayjs().utcOffset()) {
expect(heatmap.generateMatrixFromMap(dataPoints).xAxisPoints).not.toEqual([
"2019-06-19T00:48:10Z",
"2019-06-19T00:59:10Z"
"2019-06-19T00:59:10Z",
]);
} else {
expect(heatmap.generateMatrixFromMap(dataPoints).xAxisPoints).toEqual([
"2019-06-19T00:48:10Z",
"2019-06-19T00:59:10Z"
"2019-06-19T00:59:10Z",
]);
}
});
@@ -106,9 +106,9 @@ describe("iframe rendering when there is no data", () => {
data: {
chartData: {},
chartSettings: {},
theme: 4
}
}
theme: 4,
},
},
};
const divElement: string = `<div id="${Heatmap.elementId}"></div>`;
@@ -126,9 +126,9 @@ describe("iframe rendering when there is no data", () => {
data: {
chartData: {},
chartSettings: {},
theme: 2
}
}
theme: 2,
},
},
};
const divElement: string = `<div id="${Heatmap.elementId}"></div>`;

Some files were not shown because too many files have changed in this diff Show More