Compare commits

..

105 Commits

Author SHA1 Message Date
Balaji Sridharan
5b0a98dce7 Removing TODO comments 2021-02-17 10:43:23 -08:00
Balaji Sridharan
67ebce444f Stylecop changes 2021-02-16 00:21:44 -08:00
Balaji Sridharan
a09bcc7197 Merge branch 'users/fnbalaji/PortalChangesForDGW' of https://github.com/Azure/cosmos-explorer into users/fnbalaji/PortalChangesForDGW 2021-02-15 23:49:54 -08:00
Balaji Sridharan
b2390e23e7 Portal changes for DedicatedGateway. CR feedback 2021-02-15 23:49:14 -08:00
fnbalaji
f922103e5c Merge branch 'master' into users/fnbalaji/PortalChangesForDGW 2021-02-15 23:39:50 -08:00
victor-meng
22d8a7a1be Move database settings tab to react (#386)
Co-authored-by: Steve Faulkner <southpolesteve@gmail.com>
2021-02-10 16:06:14 -06:00
victor-meng
4210e0752b Move delete collection confirmation pane to react (#417) 2021-02-10 13:44:00 -08:00
Steve Faulkner
b217d4be1b Delete Cassandra tables/keyspaces via ARM (#436) 2021-02-08 18:52:53 -06:00
victor-meng
81fd442fad Make getCollectionDataUsageSize call fail gracefully (#434) 2021-02-08 16:02:02 -08:00
Steve Faulkner
87f7dd2230 Rename Feedback -> Report Issue (#425)
Co-authored-by: victor-meng <56978073+victor-meng@users.noreply.github.com>
2021-02-08 14:23:55 -06:00
Srinath Narayanan
9926fd97a2 Test explorer changes (#420)
* Changes to publish pane

* fixed format errors

* fixed failing test

* added test explorer changes for mongo accounts

* added log for test

* fixed lit errors

* added secrets to ci.yml file

* fixed failing self serve test
2021-02-08 09:42:16 -08:00
Balaji Sridharan
faa98de9e9 Portal changes for DedicatedGateway
Changes to support creation and deletion of DedicatedGateway resource.

Tested locally with various scenarios.
2021-02-08 05:16:10 -08:00
Tanuj Mittal
2a7546e0de Skip SelfServe e2e test (#432) 2021-02-06 02:30:46 +05:30
Tanuj Mittal
4b442dd869 Use GET instead of PATCH for some Juno endpoints (#431) 2021-02-06 02:01:34 +05:30
Tanuj Mittal
f0b4737313 Update gallery colors (#430)
* Update gallery colors

* fix lint error

* Fix test
2021-02-06 01:19:36 +05:30
Srinath Narayanan
8dc5ed590a Added Spinner for public gallery (#427)
* Added more publish changes

* addressed PR comments

* fixed lint errors

Co-authored-by: Tanuj Mittal <tamitta@microsoft.com>
2021-02-05 11:32:26 -08:00
Tanuj Mittal
afaa844d28 Telemetry updates (#429) 2021-02-06 00:26:20 +05:30
Tanuj Mittal
3e5a876ef2 Fix setting isGalleryPublishEnabled when flight is enabled (#428) 2021-02-05 20:55:08 +05:30
Tanuj Mittal
51abf1560a Disable dark overlay for Dialog (#423) 2021-02-05 19:28:57 +05:30
Srinath Narayanan
1c0fed88c0 Changes to cards in notebook gallery (#422)
* added gallery changes

* addressed PR comments
2021-02-05 02:32:55 -08:00
Tanuj Mittal
93cfd52e36 Update Code of Conduct Overlay and other minor changes (#424)
* Use light theme for coc-overlay

* Updates

* Fix vertical height for COC overlay
2021-02-05 14:56:50 +05:30
Tanuj Mittal
3fd014ddad Disable caching for config.json file (#421)
* Disable caching for config.json file

* Disable cache when fetching config.json
2021-02-04 15:25:13 +05:30
Srinath Narayanan
3b6fda4fa5 Changes to notebook publish pane (#419)
* Changes to publish pane

* fixed format errors

* fixed failing test

Co-authored-by: Tanuj Mittal <tamitta@microsoft.com>
2021-02-03 10:46:51 -08:00
Tanuj Mittal
db7c45c9b8 Enable gallery publishing in MPAC (#416)
* Enable gallery publishing in MPAC

* Address feedback

* Use ENABLE_GALLERY_PUBLISH config in standalone gallery

* Fix test
2021-02-03 23:42:11 +05:30
Tanuj Mittal
4f6b75fe79 More gallery updates (#418)
* More gallery updates

* Add PublishContent icon

* Address feedback
2021-02-03 23:24:27 +05:30
Tanuj Mittal
5038a01079 Add telemetry for Notebooks Gallery and other updates (#413)
* Add telemetry for Notebooks Gallery

* More changes

* Address feedback and fix lint error

* Fix margins for My published work
2021-02-03 14:48:50 +05:30
Tanuj Mittal
e0063c76d9 Add support for gallerypublish flight (#412) 2021-02-03 02:05:19 +05:30
Tanuj Mittal
9278654479 Public gallery improvements (#409)
- [x] Don't show extension in name field for publish
- [x] Open "Your published work" tab after publishing
- [x] Continue showing dialog for Report Abuse status
- [x] For showing COC in Public Gallery tab show backdrop of thumbnails
- [x] Liked -> My Favorites & Your published work -> My published work
2021-01-29 17:04:38 +00:00
Tanuj Mittal
59113d7bbf Update Juno endpoints (#405) 2021-01-29 20:28:20 +05:30
Tim Sander
88d8200c14 Check that customer is using Mongo 3.6 before applying index everything policy (#410) 2021-01-28 15:26:47 -06:00
Srinath Narayanan
6aaddd9c60 Added localization for the Self Serve Model (#406)
* added localization for selfserve model

* added comment

* addressed PR comments

* fixed format errors

* Addressed PR comments
2021-01-28 11:17:02 -08:00
Jordi Bunster
f8ede0cc1e Remove Q from ViewModels (#390)
I got cold feet at the thought of merging #324 in one go, so I'm going to split it into smaller chunks and keep rebasing the large one until there's no more Q.
2021-01-28 18:13:26 +00:00
Laurent Nguyen
bddb288a89 Update package versions and package-lock.json (#404)
The file `package-lock.json` is not in sync with `package.json` anymore. This causes build issues when upgrading a package.
This change sync's `package-lock.json` and fixes the build issues.
2021-01-28 08:50:24 +00:00
Steve Faulkner
a14d20a88e Fix applyExplorerBindings call in Portal (#408) 2021-01-27 20:37:14 -06:00
Steve Faulkner
f1db1ed978 Region Select Button (#407) 2021-01-27 15:32:53 -06:00
Laurent Nguyen
86a483c3a4 Fix notebook cell selection bug (#402)
This fixes a bug that prevents getting focus to a text cell (effectively preventing editing) when the window height is small after double-clicking on a neighboring code cell.
The issue is that selecting a text cell is broken likely because there's a behavior change in MonacoEditor that keeps the focus on the code cell. The selection issue will probably be resolved when migrating the text cell to Monaco (which will acquire and keep focus the same way), but for now, this will disable the faulty code which doesn't appear to work anymore (presumably auto-scrolling to the cell).
2021-01-27 09:09:54 +00:00
Tanuj Mittal
263262a040 Update Juno endpoints to pass subscriptionId (#339)
Corresponding [server side change](https://msdata.visualstudio.com/CosmosDB/_git/CosmosDB-portal/pullrequest/464443?_a=overview) has been deployed to Prod so now we can go ahead with DE side changes.
2021-01-27 08:08:58 +00:00
victor-meng
bd4d8da065 Move notification console to react (#400) 2021-01-26 15:32:37 -08:00
Steve Faulkner
59ec18cd9b Add basic static code metrics (#396) 2021-01-26 13:13:13 -06:00
Srinath Narayanan
49bf8c60db Added more Self Serve functionalities (#401)
* added recursion and inition decorators

* working version

* added todo comment and removed console.log

* Added Recursive add

* removed type requirement

* proper resolution of promises

* added custom element and base class

* Made selfServe standalone page

* Added custom renderer as async type

* Added overall defaults

* added inital open from data explorer

* removed landingpage

* added feature for self serve type

* renamed sqlx->example and added invalid type

* Added comments for Example

* removed unnecessary changes

* Resolved PR comments

Added tests
Moved onSubmt and initialize inside base class
Moved testExplorer to separate folder
made fields of SelfServe Class non static

* fixed lint errors

* fixed compilation errors

* Removed reactbinding changes

* renamed dropdown -> choice

* Added SelfServeComponent

* Addressed PR comments

* added toggle, visibility, text display,commandbar

* added sqlx example

* added onRefrssh

* formatting changes

* rmoved radioswitch display

* updated smartui tests

* Added more tests

* onSubmit -> onSave

* Resolved PR comments
2021-01-26 09:44:14 -08:00
Steve Faulkner
b0b973b21a Refactor explorer config into useKnockoutExplorer hook (#397)
Co-authored-by: Steve Faulkner <stfaul@microsoft.com>
2021-01-25 13:56:15 -06:00
Chris-MS-896
3529e80f0d no message (#398) 2021-01-22 10:02:35 -06:00
Srinath Narayanan
a298fd8389 Added message to indicate compound indexes are not supported in Mongo Index editor (#395)
* mongo message

* Added test and bug fix in Main.tsx

* format changes

* added new formatting

* added null check
2021-01-21 10:56:05 -08:00
Steve Faulkner
1ecc467f60 Remove IE nuget (#394) 2021-01-20 12:46:12 -06:00
Steve Faulkner
b3cafe3468 Add telemetry to Spark+Synapse Pools (#392) 2021-01-20 11:08:29 -06:00
Steve Faulkner
4be53284b5 Prettier 2.0 (#393) 2021-01-20 09:15:01 -06:00
Srinath Narayanan
c1937ca464 Added the Self Serve Data Model (#367)
* added recursion and inition decorators

* working version

* added todo comment and removed console.log

* Added Recursive add

* removed type requirement

* proper resolution of promises

* added custom element and base class

* Made selfServe standalone page

* Added custom renderer as async type

* Added overall defaults

* added inital open from data explorer

* removed landingpage

* added feature for self serve type

* renamed sqlx->example and added invalid type

* Added comments for Example

* removed unnecessary changes

* Resolved PR comments

Added tests
Moved onSubmt and initialize inside base class
Moved testExplorer to separate folder
made fields of SelfServe Class non static

* fixed lint errors

* fixed compilation errors

* Removed reactbinding changes

* renamed dropdown -> choice

* Added SelfServeComponent

* Addressed PR comments

* merged master

* added selfservetype.none for emulator and hosted experience

* fixed formatting errors

* Removed "any" type

* undid package.json changes
2021-01-19 22:42:45 -08:00
Steve Faulkner
2b2de7c645 Migrated Hosted Explorer to React (#360)
Co-authored-by: Victor Meng <vimeng@microsoft.com>
Co-authored-by: Steve Faulkner <stfaul@microsoft.com>
2021-01-19 16:31:55 -06:00
Deborah Chen
8c40df0fa1 Adding in experimentation for autoscale test (#345)
* Adding autoscale flight info

* Add flight info to cassandra collection pane

* Add telemetry for autoscale toggle on/off in create resource blade and scale/settings

* Run formatting and add expected properties to test file

* removing empty line

* Updating to pass unit tests

Co-authored-by: Steve Faulkner <southpolesteve@gmail.com>
2021-01-15 17:15:15 -06:00
Steve Faulkner
fcbc9474ea Remove Preview for Synapse Link (#389) 2021-01-15 09:51:14 -06:00
Steve Faulkner
81f861af39 Empty commit to refresh nuget after transient failures 2021-01-14 17:37:24 -06:00
victor-meng
9afa29cdb6 Properly construct the query to delete Cassandra row (#388) 2021-01-14 16:59:31 -06:00
Chris-MS-896
9a1e8b2d87 Add rest of three utils files to Matser (#370)
* 'minor change'
2021-01-13 17:49:06 -06:00
Tim Sander
babda4d9cb fix issue where Mongo indexing checkbox stops adding wildcard index (#384) 2021-01-12 18:38:16 -06:00
Steve Faulkner
9d20a13dd4 Warn on SubQuery (#378) 2021-01-12 13:53:15 -06:00
Chris-MS-896
3effbe6991 no message (#372) 2021-01-12 13:09:20 -06:00
Chris-MS-896
af53697ff4 Add file of Terminal to Master (#371)
* "minor changes"
2021-01-12 12:55:47 -06:00
Chris-MS-896
b1ad80480e Add two files of notebook component in Matser (#363)
* “minor changes”
2021-01-12 12:55:21 -06:00
Armando Trejo Oliver
9247a6c4a2 A11y fixes - Add a skip link and remove duplicate ids (#381)
* Add a skip link to allow people who navigate sequentially through content more direct access to the primary content of the Data Explorer

Co-authored-by: Chris Cao (Zen3 Infosolutions America Inc) <v-yiqcao@microsoft.com>

* Rename id of partition key field in  Add Collection Pane to ensure no  elements contain duplicate attributes.

Co-authored-by: Chris Cao (Zen3 Infosolutions America Inc) <v-yiqcao@microsoft.com>
2021-01-12 09:55:04 -08:00
Steve Faulkner
767d46480e Revert TablesEntitiyListViewModel changes (#382) 2021-01-11 16:16:40 -06:00
Chris-MS-896
2d98c5d269 add ArraysByKeyCache.ts (#366)
* 'add ArraysByKeyCache'
* "minor change"
2021-01-08 22:51:50 -06:00
Steve Faulkner
6627172a52 Add Architecture Diagram to README (#380) 2021-01-08 22:20:40 -06:00
Steve Faulkner
19fa5e17a5 Fix JSONEditor bug with undefined value (#379) 2021-01-08 22:20:06 -06:00
Chris-MS-896
a4a367a212 Add all arm request related files to Matser (#373)
* “minor changes”
* 'changes for unit test'
2021-01-08 21:56:29 -06:00
Chris-MS-896
983c9201bb Add two files of GraphExplorer component in Master (#365) 2021-01-08 21:14:53 -06:00
Chris-MS-896
76d7f00a90 Add two files of Table to master (#364) 2021-01-08 20:56:59 -06:00
Chris-MS-896
6490597736 add CollapsiblePanel/CollapsiblePanelComponent.ts and /ErrorDisplayComponent to Master (#357) 2021-01-08 20:29:15 -06:00
Chris-MS-896
229119e697 add file offerUtility to tsconfig (#356) 2021-01-08 20:14:12 -06:00
Steve Faulkner
ceefd7c615 Fix Conflict Resolution path setting (#377)
* Fix Conflict Resolution path setting

* Fix test
2021-01-08 12:36:44 -06:00
Laurent Nguyen
6e619175c6 Fix missing scrollbar in left pane when too many collections/notebooks (#375)
Constrain left pane container to height: 100% so that scrollbar show up when content wants to overflow.
The `main` classname seems too generic, but I left it alone (so I don't break anything), since this part will eventually be ported to React.
2021-01-08 14:00:26 +00:00
victor-meng
08e8bf4bcf Fix two settings tab issues (#374) 2021-01-07 15:38:13 -06:00
Chris-MS-896
89dc0f394b Add Spliter file to Master (#358) 2021-01-06 12:51:42 -06:00
Chris-MS-896
30e0001b7f no message (#359) 2021-01-05 16:45:13 -06:00
Steve Faulkner
4a8f408112 Add UX for Mongo indexing experiment (#368)
Co-authored-by: Tim Sander <tisande@microsoft.com>
2021-01-05 16:04:55 -06:00
Armando Trejo Oliver
e801364800 Remove stale .main class from tree.less (#362)
.main CSS class has a naming conflict with Moncao editor CSS classes and this is causing  A11y issues with Moncao editor.

This class should no longer be used since we moved to the new tree component in REACT, so I am removing it. From my testing, this is not affecting anything.

If we find any styling issue later, we should fix without adding back this class.
2021-01-05 10:53:55 -08:00
victor-meng
a55f2d0de9 Free tier improvements in DE (#348)
Co-authored-by: Steve Faulkner <southpolesteve@gmail.com>
2021-01-04 12:56:55 -08:00
Steve Faulkner
d40b1aa9b5 Remove Empty Query Logging (#361) 2021-01-04 13:58:01 -06:00
Steve Faulkner
cc63cdc1fd Remove dependency on canvas (#354) 2020-12-26 21:56:37 -06:00
Steve Faulkner
c3058ee5a9 Check for undefined query results (#350) 2020-12-18 19:55:32 -06:00
Steve Faulkner
b000631a0c Revert web.config changes (#349) 2020-12-18 19:26:10 -06:00
vchske
e8f4c8f93c Cost Estimate Changes (#342)
* Initial change of estimated cost to table format

* Converted cost estimate to table format and added different data for current vs updated cost estimates.

* lint fixes

* Changed the names of some interfaces

* Refactored a unit call to use an argument interface to avoid future confusion.

* Changed the severity of the save warning

* Format fix

* Fixed test due to styling change

Co-authored-by: Steve Faulkner <southpolesteve@gmail.com>
2020-12-18 16:15:55 -08:00
Steve Faulkner
16bde97e47 Rewrite URL for IE users (#340) 2020-12-18 16:08:40 -06:00
Steve Faulkner
6da43ee27b Publish IE specific Nuget package (#347)
* Publish IE specific Nuget package

* Require ally tests to pass
2020-12-17 17:41:38 -06:00
Gahl Levy
ebae484b8f Fix duplicate settings tabs (#343)
Co-authored-by: Steve Faulkner <southpolesteve@gmail.com>
2020-12-17 13:01:36 -08:00
Steve Faulkner
dfb1b50621 Explorer.ts Cleanup (#341)
Co-authored-by: victor-meng <56978073+victor-meng@users.noreply.github.com>
2020-12-16 20:00:39 -06:00
victor-meng
f54e8eb692 Move queryDocuments out of DataAccessUtility (#334) 2020-12-16 15:27:17 -08:00
Steve Faulkner
ea39c1d092 Fix offer update notification for AAD users (#338) 2020-12-11 13:38:57 -06:00
vchske
c21f42159f Updated cost messaging for new db/container pane (#333)
* Adds information text further explaining estimated cost.

* Minor tweak to cost messaging

* Updated unit tests

* Added text and link for capacity planner when choosing manual RUs
2020-12-11 10:06:43 -08:00
victor-meng
31e4b49f11 Only call getCollectionDataUsageSize for AAD users (#337) 2020-12-10 14:13:08 -08:00
Tanuj Mittal
40491ec9c5 Gallery related fixes (#312)
* AVERT fixes

* Remove enableCodeOfConduct feature flag

* Fix reporting abuse

* Add empty screen for Liked and Published tabs in Gallery

* fix build

* Remove unused code

* Fix standalone public gallery
2020-12-10 13:09:18 -08:00
Tanuj Mittal
e133df18dd Record baseUrl for OpenTerminal success/failure telemetry (#335)
This is useful to know which terminal is opening.
2020-12-10 19:54:21 +00:00
Steve Faulkner
0532ed26a2 Remove runner workflow that is no longer functioning (#332) 2020-12-01 10:23:18 -06:00
victor-meng
fd60c9c15e Remove RUPM (#328)
Remove all RUPM code
2020-12-01 07:06:38 +00:00
Chris-MS-896
04ab1f3918 '[Visual Requirement-Data Explorer (iframe)] On the Data Explorer page, luminosity contrast ratio of the borderline button is less than 3.:1.' (#331) 2020-11-30 15:32:28 -06:00
Chris-MS-896
b784ac0f86 [967093][Screen Readers- CosmosDB – Notification] Screen reader does not pass the combo-box list information (#329)
* ‘Bug fix: Screen reader does not pass the combo-box list information under notification field.’

* ‘update for comments’

* ‘load path refator’
2020-11-30 14:33:18 -06:00
Srinath Narayanan
28899f63d7 Fixed bug in fetching 'index transformation progress' header (#330)
* bug fix

* fixed formatting errors
2020-11-24 10:32:18 -08:00
victor-meng
9cbf632577 Get collection usage size with ARM metrics call (#327)
- Removed `readCollectionQuotaInfo` call. The only data we need is the usage size which we can get via the ARM metrics call instead.
- Added `getCollectionUsageSize` which fetches the `DataUsage` and `IndexUsage` metrics, converts them to KB, and returns the sum as the total usage size
2020-11-20 20:21:16 +00:00
victor-meng
17fd2185dc Move read offer to RP (#326) 2020-11-19 17:13:11 -08:00
Srinath Narayanan
a93c8509cd Added testExplorer and notebooks UI automated tests (#323)
* initial commit for notbooks pupeteer tests

* Added Auth

* added try catch block with screenshot for error

* Addressed PR comments

* renamed params

* renamed param

* fixed formatting error

* Updates mongo spec to remove waitFor on already awaited selector

* added logging statements

* format errors fixed

* added ci env variables

* increased delay for render

* removed logging

* added delay

* fix format error

* removed deletion

* reverted package.json change

Co-authored-by: zfoster <notzachfoster@gmail.com>
2020-11-19 09:29:38 -08:00
Laurent Nguyen
5c93c11bd9 Bug fix: match monaco-editor version with @nteract/monaco-editor (#322)
Opening notebook (which contains code cell), then "Items"-> document editor is broken. Our JsonEditor component will hang at `monaco.editor.create()`.

Matching the `monaco-editor` version with `@nteract/monaco-editor` fixes it.

I looked into using one, but we cannot rely nteract, because it does not get loaded if notebook isn't enabled. Forcing nteract to use ours (if at all possible) isn't a good idea, since their code is tuned to their version.

For now, we'll have to keep the versions in sync.
2020-11-19 14:33:23 +00:00
Srinath Narayanan
85d2378d3a Removed SettingsV1 code paths (#325)
* removed settingsv1 code path in collection.ts

* removed Settingsv1 code

* Moved AAD error message up the chain
2020-11-18 12:11:25 -08:00
Steve Faulkner
84b6075ee8 Use Puppeteer for Emulator Test (#321)
* Use Puppeteer for Emulator Test

* Fix yaml

* more fixes

* Cleanup

* README

Co-authored-by: Steve Faulkner <stfaul@microsoft.com>
2020-11-13 10:58:38 -06:00
Steve Faulkner
d880723be9 React Wrapper Take 2 (#310) 2020-11-13 02:10:59 +00:00
Garrett Ausfeldt
4ce9dcc024 Add analytical store schema POC (#164)
* add schema APIs to JunoClient

* start implementing buildSchemaNode

* finish getSchemaNodes

* finish implementing addSchema

* cleanup

* make schema optional

* handle undefined/null schema and fields. Also don't retry on gettting schema failures.

* fix request schema and get schema endpoints

* add feature flag

* try to get most recent schema when refreshed or initialized.

* add tests

* cleanup

* cleanup

* cleanup

* fix merge conflict typos

* fix lint errors

* fix tests and update snapshot

Co-authored-by: REDMOND\gaausfel <gaausfel@microsoft.com>
2020-11-12 13:33:37 -08:00
Steve Faulkner
addcfedd5e MinRU survey for SettingsV2 component (#320)
Adds survey link to remove the RU/GB minimum on an account
2020-11-12 19:35:39 +00:00
640 changed files with 61462 additions and 62522 deletions

View File

@@ -3,7 +3,11 @@ PORTAL_RUNNER_PASSWORD=
PORTAL_RUNNER_SUBSCRIPTION= PORTAL_RUNNER_SUBSCRIPTION=
PORTAL_RUNNER_RESOURCE_GROUP= PORTAL_RUNNER_RESOURCE_GROUP=
PORTAL_RUNNER_DATABASE_ACCOUNT= PORTAL_RUNNER_DATABASE_ACCOUNT=
PORTAL_RUNNER_DATABASE_ACCOUNT_KEY=
PORTAL_RUNNER_CONNECTION_STRING= PORTAL_RUNNER_CONNECTION_STRING=
NOTEBOOKS_TEST_RUNNER_TENANT_ID=
NOTEBOOKS_TEST_RUNNER_CLIENT_ID=
NOTEBOOKS_TEST_RUNNER_CLIENT_SECRET=
CASSANDRA_CONNECTION_STRING= CASSANDRA_CONNECTION_STRING=
MONGO_CONNECTION_STRING= MONGO_CONNECTION_STRING=
TABLES_CONNECTION_STRING= TABLES_CONNECTION_STRING=

View File

@@ -14,7 +14,6 @@ src/Common/DataAccessUtilityBase.ts
src/Common/DeleteFeedback.ts src/Common/DeleteFeedback.ts
src/Common/DocumentClientUtilityBase.ts src/Common/DocumentClientUtilityBase.ts
src/Common/EditableUtility.ts src/Common/EditableUtility.ts
src/Common/EnvironmentUtility.ts
src/Common/HashMap.test.ts src/Common/HashMap.test.ts
src/Common/HashMap.ts src/Common/HashMap.ts
src/Common/HeadersUtility.test.ts src/Common/HeadersUtility.test.ts
@@ -43,7 +42,6 @@ src/Contracts/ViewModels.ts
src/Controls/Heatmap/Heatmap.test.ts src/Controls/Heatmap/Heatmap.test.ts
src/Controls/Heatmap/Heatmap.ts src/Controls/Heatmap/Heatmap.ts
src/Controls/Heatmap/HeatmapDatatypes.ts src/Controls/Heatmap/HeatmapDatatypes.ts
src/Definitions/adal.d.ts
src/Definitions/datatables.d.ts src/Definitions/datatables.d.ts
src/Definitions/gif.d.ts src/Definitions/gif.d.ts
src/Definitions/globals.d.ts src/Definitions/globals.d.ts
@@ -89,7 +87,7 @@ src/Explorer/DataSamples/ContainerSampleGenerator.test.ts
src/Explorer/DataSamples/ContainerSampleGenerator.ts src/Explorer/DataSamples/ContainerSampleGenerator.ts
src/Explorer/DataSamples/DataSamplesUtil.test.ts src/Explorer/DataSamples/DataSamplesUtil.test.ts
src/Explorer/DataSamples/DataSamplesUtil.ts src/Explorer/DataSamples/DataSamplesUtil.ts
src/Explorer/Explorer.ts src/Explorer/Explorer.tsx
src/Explorer/Graph/GraphExplorerComponent/ArraysByKeyCache.test.ts src/Explorer/Graph/GraphExplorerComponent/ArraysByKeyCache.test.ts
src/Explorer/Graph/GraphExplorerComponent/ArraysByKeyCache.ts src/Explorer/Graph/GraphExplorerComponent/ArraysByKeyCache.ts
src/Explorer/Graph/GraphExplorerComponent/D3ForceGraph.test.ts src/Explorer/Graph/GraphExplorerComponent/D3ForceGraph.test.ts
@@ -202,8 +200,6 @@ src/Explorer/Tabs/QueryTab.test.ts
src/Explorer/Tabs/QueryTab.ts src/Explorer/Tabs/QueryTab.ts
src/Explorer/Tabs/QueryTablesTab.ts src/Explorer/Tabs/QueryTablesTab.ts
src/Explorer/Tabs/ScriptTabBase.ts src/Explorer/Tabs/ScriptTabBase.ts
src/Explorer/Tabs/SettingsTab.test.ts
src/Explorer/Tabs/SettingsTab.ts
src/Explorer/Tabs/SparkMasterTab.ts src/Explorer/Tabs/SparkMasterTab.ts
src/Explorer/Tabs/StoredProcedureTab.ts src/Explorer/Tabs/StoredProcedureTab.ts
src/Explorer/Tabs/TabComponents.ts src/Explorer/Tabs/TabComponents.ts
@@ -245,9 +241,6 @@ src/Platform/Hosted/Authorization.ts
src/Platform/Hosted/DataAccessUtility.ts src/Platform/Hosted/DataAccessUtility.ts
src/Platform/Hosted/ExplorerFactory.ts src/Platform/Hosted/ExplorerFactory.ts
src/Platform/Hosted/Helpers/ConnectionStringParser.test.ts src/Platform/Hosted/Helpers/ConnectionStringParser.test.ts
src/Platform/Hosted/Helpers/ConnectionStringParser.ts
src/Platform/Hosted/HostedUtils.test.ts
src/Platform/Hosted/HostedUtils.ts
src/Platform/Hosted/Main.ts src/Platform/Hosted/Main.ts
src/Platform/Hosted/Maint.test.ts src/Platform/Hosted/Maint.test.ts
src/Platform/Hosted/NotificationsClient.ts src/Platform/Hosted/NotificationsClient.ts
@@ -290,8 +283,6 @@ src/Utils/DatabaseAccountUtils.ts
src/Utils/JunoUtils.ts src/Utils/JunoUtils.ts
src/Utils/MessageValidation.ts src/Utils/MessageValidation.ts
src/Utils/NotebookConfigurationUtils.ts src/Utils/NotebookConfigurationUtils.ts
src/Utils/OfferUtils.test.ts
src/Utils/OfferUtils.ts
src/Utils/PricingUtils.test.ts src/Utils/PricingUtils.test.ts
src/Utils/QueryUtils.test.ts src/Utils/QueryUtils.test.ts
src/Utils/QueryUtils.ts src/Utils/QueryUtils.ts
@@ -396,19 +387,5 @@ src/Explorer/Tree/ResourceTreeAdapterForResourceToken.tsx
src/GalleryViewer/Cards/GalleryCardComponent.tsx src/GalleryViewer/Cards/GalleryCardComponent.tsx
src/GalleryViewer/GalleryViewer.tsx src/GalleryViewer/GalleryViewer.tsx
src/GalleryViewer/GalleryViewerComponent.tsx src/GalleryViewer/GalleryViewerComponent.tsx
cypress/integration/dataexplorer/CASSANDRA/addCollection.spec.ts
cypress/integration/dataexplorer/GRAPH/addCollection.spec.ts
cypress/integration/dataexplorer/ci-tests/addCollectionPane.spec.ts
cypress/integration/dataexplorer/ci-tests/createDatabase.spec.ts
cypress/integration/dataexplorer/ci-tests/deleteCollection.spec.ts
cypress/integration/dataexplorer/ci-tests/deleteDatabase.spec.ts
cypress/integration/dataexplorer/MONGO/addCollection.spec.ts
cypress/integration/dataexplorer/MONGO/addCollectionAutopilot.spec.ts
cypress/integration/dataexplorer/MONGO/addCollectionExistingDatabase.spec.ts
cypress/integration/dataexplorer/MONGO/provisionDatabaseThroughput.spec.ts
cypress/integration/dataexplorer/SQL/addCollection.spec.ts
cypress/integration/dataexplorer/TABLE/addCollection.spec.ts
cypress/integration/notebook/newNotebook.spec.ts
cypress/integration/notebook/resourceTree.spec.ts
__mocks__/monaco-editor.ts __mocks__/monaco-editor.ts
src/Explorer/Tree/ResourceTreeAdapterForResourceToken.test.tsx src/Explorer/Tree/ResourceTreeAdapterForResourceToken.test.tsx

View File

@@ -1,41 +1,39 @@
module.exports = { module.exports = {
env: { env: {
browser: true, browser: true,
es6: true es6: true,
}, },
plugins: ["@typescript-eslint", "no-null", "prefer-arrow"], plugins: ["@typescript-eslint", "no-null", "prefer-arrow"],
extends: ["eslint:recommended", "plugin:@typescript-eslint/recommended"], extends: ["eslint:recommended", "plugin:@typescript-eslint/recommended"],
globals: { globals: {
Atomics: "readonly", Atomics: "readonly",
SharedArrayBuffer: "readonly" SharedArrayBuffer: "readonly",
}, },
parser: "@typescript-eslint/parser", parser: "@typescript-eslint/parser",
parserOptions: { parserOptions: {
ecmaFeatures: { ecmaFeatures: {
jsx: true jsx: true,
}, },
ecmaVersion: 2018, ecmaVersion: 2018,
sourceType: "module" sourceType: "module",
}, },
overrides: [ overrides: [
{ {
files: ["**/*.tsx"], files: ["**/*.tsx"],
env: { extends: ["plugin:react/recommended"], // TODO: Add react-hooks
jest: true plugins: ["react"],
},
extends: ["plugin:react/recommended"],
plugins: ["react"]
}, },
{ {
files: ["**/*.{test,spec}.{ts,tsx}"], files: ["**/*.{test,spec}.{ts,tsx}"],
env: { env: {
jest: true jest: true,
}, },
extends: ["plugin:jest/recommended"], extends: ["plugin:jest/recommended"],
plugins: ["jest"] plugins: ["jest"],
} },
], ],
rules: { rules: {
"no-console": ["error", { allow: ["error", "warn", "dir"] }],
curly: "error", curly: "error",
"@typescript-eslint/no-unused-vars": "error", "@typescript-eslint/no-unused-vars": "error",
"@typescript-eslint/no-extraneous-class": "error", "@typescript-eslint/no-extraneous-class": "error",
@@ -43,12 +41,13 @@ module.exports = {
"@typescript-eslint/no-explicit-any": "error", "@typescript-eslint/no-explicit-any": "error",
"prefer-arrow/prefer-arrow-functions": ["error", { allowStandaloneDeclarations: true }], "prefer-arrow/prefer-arrow-functions": ["error", { allowStandaloneDeclarations: true }],
eqeqeq: "error", eqeqeq: "error",
"react/display-name": "off",
"no-restricted-syntax": [ "no-restricted-syntax": [
"error", "error",
{ {
selector: "CallExpression[callee.object.name='JSON'][callee.property.name='stringify'] Identifier[name=/$err/]", selector: "CallExpression[callee.object.name='JSON'][callee.property.name='stringify'] Identifier[name=/$err/]",
message: "Do not use JSON.stringify(error). It will print '{}'" message: "Do not use JSON.stringify(error). It will print '{}'",
} },
] ],
} },
}; };

View File

@@ -9,6 +9,20 @@ on:
branches: branches:
- master - master
jobs: jobs:
codemetrics:
runs-on: ubuntu-latest
name: "Log Code Metrics"
if: github.ref == 'refs/heads/master'
steps:
- uses: actions/checkout@v2
- name: Use Node.js 12.x
uses: actions/setup-node@v1
with:
node-version: 12.x
- run: npm ci
- run: node utils/codeMetrics.js
env:
CODE_METRICS_APP_ID: ${{ secrets.CODE_METRICS_APP_ID }}
compile: compile:
runs-on: ubuntu-latest runs-on: ubuntu-latest
name: "Compile TypeScript" name: "Compile TypeScript"
@@ -79,32 +93,32 @@ jobs:
name: dist name: dist
path: dist/ path: dist/
endtoendemulator: endtoendemulator:
name: "End To End Tests | Emulator | SQL" name: "End To End Emulator Tests"
needs: [lint, format, compile, unittest] needs: [lint, format, compile, unittest]
runs-on: windows-latest runs-on: windows-latest
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- uses: southpolesteve/cosmos-emulator-github-action@v1
- name: Use Node.js 12.x - name: Use Node.js 12.x
uses: actions/setup-node@v1 uses: actions/setup-node@v1
with: with:
node-version: 12.x node-version: 12.x
- name: Restore Cypress Binary Cache - uses: southpolesteve/cosmos-emulator-github-action@v1
uses: actions/cache@v2
with:
path: ~/.cache/Cypress
key: ${{ runner.os }}-cypress-binary-cache
- name: End to End Tests - name: End to End Tests
run: | run: |
npm ci npm ci
npm start & npm start &
npm ci --prefix ./cypress npm run wait-for-server
npm run test:ci --prefix ./cypress -- --spec ./integration/dataexplorer/ci-tests/createDatabase.spec.ts npx jest -c ./jest.config.e2e.js --detectOpenHandles test/sql/container.spec.ts
shell: bash shell: bash
env: env:
EMULATOR_ENDPOINT: https://0.0.0.0:8081/ DATA_EXPLORER_ENDPOINT: "https://localhost:1234/explorer.html?platform=Emulator"
PLATFORM: "Emulator"
NODE_TLS_REJECT_UNAUTHORIZED: 0 NODE_TLS_REJECT_UNAUTHORIZED: 0
CYPRESS_CACHE_FOLDER: ~/.cache/Cypress - uses: actions/upload-artifact@v2
if: failure()
with:
name: screenshots
path: failed-*
accessibility: accessibility:
name: "Accessibility | Hosted" name: "Accessibility | Hosted"
needs: [lint, format, compile, unittest] needs: [lint, format, compile, unittest]
@@ -123,13 +137,13 @@ jobs:
sudo sysctl -p sudo sysctl -p
npm ci npm ci
npm start & npm start &
npx wait-on -i 5000 https-get://0.0.0.0:1234/ npx wait-on -i 5000 https-get://0.0.0.0:1234/
node utils/accesibilityCheck.js node utils/accesibilityCheck.js
shell: bash shell: bash
env: env:
NODE_TLS_REJECT_UNAUTHORIZED: 0 NODE_TLS_REJECT_UNAUTHORIZED: 0
endtoendpuppeteer: endtoendhosted:
name: "End to end puppeteer tests" name: "End to End Hosted Tests"
needs: [lint, format, compile, unittest] needs: [lint, format, compile, unittest]
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
@@ -138,28 +152,39 @@ jobs:
uses: actions/setup-node@v1 uses: actions/setup-node@v1
with: with:
node-version: 12.x node-version: 12.x
- name: End to End Puppeteer Tests - name: End to End Hosted Tests
run: | run: |
npm ci npm ci
npm start & npm start &
node utils/cleanupDBs.js
npm run wait-for-server npm run wait-for-server
npm run test:e2e npm run test:e2e
shell: bash shell: bash
env: env:
NODE_TLS_REJECT_UNAUTHORIZED: 0 NODE_TLS_REJECT_UNAUTHORIZED: 0
PORTAL_RUNNER_SUBSCRIPTION: ${{ secrets.PORTAL_RUNNER_SUBSCRIPTION }}
PORTAL_RUNNER_RESOURCE_GROUP: ${{ secrets.PORTAL_RUNNER_RESOURCE_GROUP }}
PORTAL_RUNNER_DATABASE_ACCOUNT: ${{ secrets.PORTAL_RUNNER_DATABASE_ACCOUNT }}
PORTAL_RUNNER_DATABASE_ACCOUNT_KEY: ${{ secrets.PORTAL_RUNNER_DATABASE_ACCOUNT_KEY }}
PORTAL_RUNNER_MONGO_DATABASE_ACCOUNT: ${{ secrets.PORTAL_RUNNER_MONGO_DATABASE_ACCOUNT }}
PORTAL_RUNNER_MONGO_DATABASE_ACCOUNT_KEY: ${{ secrets.PORTAL_RUNNER_MONGO_DATABASE_ACCOUNT_KEY }}
NOTEBOOKS_TEST_RUNNER_TENANT_ID: ${{ secrets.NOTEBOOKS_TEST_RUNNER_TENANT_ID }}
NOTEBOOKS_TEST_RUNNER_CLIENT_ID: ${{ secrets.NOTEBOOKS_TEST_RUNNER_CLIENT_ID }}
NOTEBOOKS_TEST_RUNNER_CLIENT_SECRET: ${{ secrets.NOTEBOOKS_TEST_RUNNER_CLIENT_SECRET }}
PORTAL_RUNNER_CONNECTION_STRING: ${{ secrets.CONNECTION_STRING_SQL }} PORTAL_RUNNER_CONNECTION_STRING: ${{ secrets.CONNECTION_STRING_SQL }}
MONGO_CONNECTION_STRING: ${{ secrets.CONNECTION_STRING_MONGO }} MONGO_CONNECTION_STRING: ${{ secrets.CONNECTION_STRING_MONGO }}
CASSANDRA_CONNECTION_STRING: ${{ secrets.CONNECTION_STRING_CASSANDRA }} CASSANDRA_CONNECTION_STRING: ${{ secrets.CONNECTION_STRING_CASSANDRA }}
TABLES_CONNECTION_STRING: ${{ secrets.CONNECTION_STRING_TABLE }} TABLES_CONNECTION_STRING: ${{ secrets.CONNECTION_STRING_TABLE }}
DATA_EXPLORER_ENDPOINT: "https://localhost:1234/hostedExplorer.html" DATA_EXPLORER_ENDPOINT: "https://localhost:1234/hostedExplorer.html"
- uses: actions/upload-artifact@v2 - uses: actions/upload-artifact@v2
if: failure()
with: with:
name: screenshots name: screenshots
path: failed-* path: failed-*
nuget: nuget:
name: Publish Nuget name: Publish Nuget
if: github.ref == 'refs/heads/master' || contains(github.ref, 'hotfix/') || contains(github.ref, 'release/') if: github.ref == 'refs/heads/master' || contains(github.ref, 'hotfix/') || contains(github.ref, 'release/')
needs: [lint, format, compile, build, unittest, endtoendemulator, endtoendpuppeteer] needs: [lint, format, compile, build, unittest, endtoendemulator, endtoendhosted, accessibility]
runs-on: ubuntu-latest runs-on: ubuntu-latest
env: env:
NUGET_SOURCE: ${{ secrets.NUGET_SOURCE }} NUGET_SOURCE: ${{ secrets.NUGET_SOURCE }}
@@ -183,7 +208,7 @@ jobs:
nugetmpac: nugetmpac:
name: Publish Nuget MPAC name: Publish Nuget MPAC
if: github.ref == 'refs/heads/master' || contains(github.ref, 'hotfix/') || contains(github.ref, 'release/') if: github.ref == 'refs/heads/master' || contains(github.ref, 'hotfix/') || contains(github.ref, 'release/')
needs: [lint, format, compile, build, unittest, endtoendemulator, endtoendpuppeteer] needs: [lint, format, compile, build, unittest, endtoendemulator, endtoendhosted, accessibility]
runs-on: ubuntu-latest runs-on: ubuntu-latest
env: env:
NUGET_SOURCE: ${{ secrets.NUGET_SOURCE }} NUGET_SOURCE: ${{ secrets.NUGET_SOURCE }}

View File

@@ -1,25 +0,0 @@
name: Runners
on:
schedule:
- cron: "0 * 1 * *"
jobs:
sqlcreatecollection:
runs-on: ubuntu-latest
name: "SQL | Create Collection"
steps:
- uses: actions/checkout@v2
- uses: actions/setup-node@v1
- run: npm ci
- run: npm run test:e2e
env:
PORTAL_RUNNER_APP_INSIGHTS_KEY: ${{ secrets.PORTAL_RUNNER_APP_INSIGHTS_KEY }}
PORTAL_RUNNER_USERNAME: ${{ secrets.PORTAL_RUNNER_USERNAME }}
PORTAL_RUNNER_PASSWORD: ${{ secrets.PORTAL_RUNNER_PASSWORD }}
PORTAL_RUNNER_SUBSCRIPTION: 69e02f2d-f059-4409-9eac-97e8a276ae2c
PORTAL_RUNNER_RESOURCE_GROUP: runners
PORTAL_RUNNER_DATABASE_ACCOUNT: portal-sql-runner
- uses: actions/upload-artifact@v2
if: failure()
with:
name: screenshots
path: failure.png

3
.gitignore vendored
View File

@@ -9,9 +9,6 @@ pkg/DataExplorer/*
test/out/* test/out/*
workers/**/*.js workers/**/*.js
*.trx *.trx
cypress/videos
cypress/screenshots
cypress/fixtures
notebookapp/* notebookapp/*
Contracts/* Contracts/*
.DS_Store .DS_Store

BIN
.vs/slnx.sqlite Normal file

Binary file not shown.

View File

@@ -13,29 +13,18 @@ UI for Azure Cosmos DB. Powers the [Azure Portal](https://portal.azure.com/), ht
### Watch mode ### Watch mode
Run `npm run watch` to start the development server and automatically rebuild on changes Run `npm start` to start the development server and automatically rebuild on changes
### Specifying Development Platform ### Hosted Development (https://cosmos.azure.com)
Setting the environment variable `PLATFORM` during the build process will force the explorer to load the specified platform. By default in development it will run in `Hosted` mode. Valid options: - Visit: `https://localhost:1234/hostedExplorer.html`
- Local sign in via AAD will NOT work. Connection string only in dev mode. Use the Portal if you need AAD auth.
- Hosted - The default webpack dev server configuration will proxy requests to the production portal backend: `https://main.documentdb.ext.azure.com`. This will allow you to use production connection strings on your local machine.
- Emulator
- Portal
`PLATFORM=Emulator npm run watch`
### Hosted Development
The default webpack dev server configuration will proxy requests to the production portal backend: `https://main.documentdb.ext.azure.com`. This will allow you to use production connection strings on your local machine.
To run pure hosted mode, in `webpack.config.js` change index HtmlWebpackPlugin to use hostedExplorer.html and change entry for index to use HostedExplorer.ts.
### Emulator Development ### Emulator Development
In a window environment, running `npm run build` will automatically copy the built files from `/dist` over to the default emulator install paths. In a non-windows environment you can specify an alternate endpoint using `EMULATOR_ENDPOINT` and webpack dev server will proxy requests for you. - Start the Cosmos Emulator
- Visit: https://localhost:1234/index.html
`PLATFORM=Emulator EMULATOR_ENDPOINT=https://my-vm.azure.com:8081 npm run watch`
#### Setting up a Remote Emulator #### Setting up a Remote Emulator
@@ -55,16 +44,8 @@ The Cosmos emulator currently only runs in Windows environments. You can still d
### Portal Development ### Portal Development
The Cosmos Portal that consumes this repo is not currently open source. If you have access to this project, `npm run build` will copy the built files over to the portal where they will be loaded by the portal development environment - Visit: https://ms.portal.azure.com/?dataExplorerSource=https%3A%2F%2Flocalhost%3A1234%2Fexplorer.html
- You may have to manually visit https://localhost:1234/explorer.html first and click through any SSL certificate warnings
You can however load a local running instance of data explorer in the production portal.
1. Turn off browser SSL validation for localhost: chrome://flags/#allow-insecure-localhost OR Install valid SSL certs for localhost (on IE, follow these [instructions](https://www.technipages.com/ie-bypass-problem-with-this-websites-security-certificate) to install the localhost certificate in the right place)
2. Allowlist `https://localhost:1234` domain for CORS in the Azure Cosmos DB portal
3. Start the project in portal mode: `PLATFORM=Portal npm run watch`
4. Load the portal using the following link: https://ms.portal.azure.com/?dataExplorerSource=https%3A%2F%2Flocalhost%3A1234%2Fexplorer.html
Live reload will occur, but data explorer will not properly integrate again with the parent iframe. You will have to manually reload the page.
### Testing ### Testing
@@ -76,17 +57,7 @@ Unit tests are located adjacent to the code under test and run with [Jest](https
#### End to End CI Tests #### End to End CI Tests
[Cypress](https://www.cypress.io/) is used for end to end tests and are contained in `cypress/`. Currently, it operates as sub project with its own typescript config and dependencies. It also only operates against the emulator. To run cypress tests: Jest and Puppeteer are used for end to end browser based tests and are contained in `test/`. To run these tests locally:
1. Ensure the emulator is running
2. Start cosmos explorer in emulator mode: `PLATFORM=Emulator npm run watch`
3. Move into `cypress/` folder: `cd cypress`
4. Install dependencies: `npm install`
5. Run cypress headless(`npm run test`) or in interactive mode(`npm run test:debug`)
#### End to End Production Tests
Jest and Puppeteer are used for end to end production runners and are contained in `test/`. To run these tests locally:
1. Copy .env.example to .env 1. Copy .env.example to .env
2. Update the values in .env including your local data explorer endpoint (ask a teammate/codeowner for help with .env values) 2. Update the values in .env including your local data explorer endpoint (ask a teammate/codeowner for help with .env values)
@@ -98,6 +69,10 @@ Jest and Puppeteer are used for end to end production runners and are contained
We generally adhere to the release strategy [documented by the Azure SDK Guidelines](https://azure.github.io/azure-sdk/policies_repobranching.html#release-branches). Most releases should happen from the master branch. If master contains commits that cannot be released, you may create a release from a `release/` or `hotfix/` branch. See linked documentation for more details. We generally adhere to the release strategy [documented by the Azure SDK Guidelines](https://azure.github.io/azure-sdk/policies_repobranching.html#release-branches). Most releases should happen from the master branch. If master contains commits that cannot be released, you may create a release from a `release/` or `hotfix/` branch. See linked documentation for more details.
### Architechture
[![](https://mermaid.ink/img/eyJjb2RlIjoiZ3JhcGggTFJcbiAgaG9zdGVkKGh0dHBzOi8vY29zbW9zLmF6dXJlLmNvbSlcbiAgcG9ydGFsKFBvcnRhbClcbiAgZW11bGF0b3IoRW11bGF0b3IpXG4gIGFhZFtBQURdXG4gIHJlc291cmNlVG9rZW5bUmVzb3VyY2UgVG9rZW5dXG4gIGNvbm5lY3Rpb25TdHJpbmdbQ29ubmVjdGlvbiBTdHJpbmddXG4gIHBvcnRhbFRva2VuW0VuY3J5cHRlZCBQb3J0YWwgVG9rZW5dXG4gIG1hc3RlcktleVtNYXN0ZXIgS2V5XVxuICBhcm1bQVJNIFJlc291cmNlIFByb3ZpZGVyXVxuICBkYXRhcGxhbmVbRGF0YSBQbGFuZV1cbiAgcHJveHlbUG9ydGFsIEFQSSBQcm94eV1cbiAgc3FsW1NRTF1cbiAgbW9uZ29bTW9uZ29dXG4gIHRhYmxlc1tUYWJsZXNdXG4gIGNhc3NhbmRyYVtDYXNzYW5kcmFdXG4gIGdyYWZbR3JhcGhdXG5cblxuICBlbXVsYXRvciAtLT4gbWFzdGVyS2V5IC0tLS0-IGRhdGFwbGFuZVxuICBwb3J0YWwgLS0-IGFhZFxuICBob3N0ZWQgLS0-IHBvcnRhbFRva2VuICYgcmVzb3VyY2VUb2tlbiAmIGNvbm5lY3Rpb25TdHJpbmcgJiBhYWRcbiAgYWFkIC0tLT4gYXJtXG4gIGFhZCAtLS0-IGRhdGFwbGFuZVxuICBhYWQgLS0tPiBwcm94eVxuICByZXNvdXJjZVRva2VuIC0tLT4gc3FsIC0tPiBkYXRhcGxhbmVcbiAgcG9ydGFsVG9rZW4gLS0tPiBwcm94eVxuICBwcm94eSAtLT4gZGF0YXBsYW5lXG4gIGNvbm5lY3Rpb25TdHJpbmcgLS0-IHNxbCAmIG1vbmdvICYgY2Fzc2FuZHJhICYgZ3JhZiAmIHRhYmxlc1xuICBzcWwgLS0-IGRhdGFwbGFuZVxuICB0YWJsZXMgLS0-IGRhdGFwbGFuZVxuICBtb25nbyAtLT4gcHJveHlcbiAgY2Fzc2FuZHJhIC0tPiBwcm94eVxuICBncmFmIC0tPiBwcm94eVxuXG5cdFx0IiwibWVybWFpZCI6eyJ0aGVtZSI6ImRlZmF1bHQifSwidXBkYXRlRWRpdG9yIjpmYWxzZX0)](https://mermaid-js.github.io/mermaid-live-editor/#/edit/eyJjb2RlIjoiZ3JhcGggTFJcbiAgaG9zdGVkKGh0dHBzOi8vY29zbW9zLmF6dXJlLmNvbSlcbiAgcG9ydGFsKFBvcnRhbClcbiAgZW11bGF0b3IoRW11bGF0b3IpXG4gIGFhZFtBQURdXG4gIHJlc291cmNlVG9rZW5bUmVzb3VyY2UgVG9rZW5dXG4gIGNvbm5lY3Rpb25TdHJpbmdbQ29ubmVjdGlvbiBTdHJpbmddXG4gIHBvcnRhbFRva2VuW0VuY3J5cHRlZCBQb3J0YWwgVG9rZW5dXG4gIG1hc3RlcktleVtNYXN0ZXIgS2V5XVxuICBhcm1bQVJNIFJlc291cmNlIFByb3ZpZGVyXVxuICBkYXRhcGxhbmVbRGF0YSBQbGFuZV1cbiAgcHJveHlbUG9ydGFsIEFQSSBQcm94eV1cbiAgc3FsW1NRTF1cbiAgbW9uZ29bTW9uZ29dXG4gIHRhYmxlc1tUYWJsZXNdXG4gIGNhc3NhbmRyYVtDYXNzYW5kcmFdXG4gIGdyYWZbR3JhcGhdXG5cblxuICBlbXVsYXRvciAtLT4gbWFzdGVyS2V5IC0tLS0-IGRhdGFwbGFuZVxuICBwb3J0YWwgLS0-IGFhZFxuICBob3N0ZWQgLS0-IHBvcnRhbFRva2VuICYgcmVzb3VyY2VUb2tlbiAmIGNvbm5lY3Rpb25TdHJpbmcgJiBhYWRcbiAgYWFkIC0tLT4gYXJtXG4gIGFhZCAtLS0-IGRhdGFwbGFuZVxuICBhYWQgLS0tPiBwcm94eVxuICByZXNvdXJjZVRva2VuIC0tLT4gc3FsIC0tPiBkYXRhcGxhbmVcbiAgcG9ydGFsVG9rZW4gLS0tPiBwcm94eVxuICBwcm94eSAtLT4gZGF0YXBsYW5lXG4gIGNvbm5lY3Rpb25TdHJpbmcgLS0-IHNxbCAmIG1vbmdvICYgY2Fzc2FuZHJhICYgZ3JhZiAmIHRhYmxlc1xuICBzcWwgLS0-IGRhdGFwbGFuZVxuICB0YWJsZXMgLS0-IGRhdGFwbGFuZVxuICBtb25nbyAtLT4gcHJveHlcbiAgY2Fzc2FuZHJhIC0tPiBwcm94eVxuICBncmFmIC0tPiBwcm94eVxuXG5cdFx0IiwibWVybWFpZCI6eyJ0aGVtZSI6ImRlZmF1bHQifSwidXBkYXRlRWRpdG9yIjpmYWxzZX0)
# Contributing # Contributing
Please read the [contribution guidelines](./CONTRIBUTING.md). Please read the [contribution guidelines](./CONTRIBUTING.md).

View File

@@ -1,3 +1,4 @@
module.exports = { module.exports = {
presets: [["@babel/preset-env", { targets: { node: "current" } }], "@babel/preset-react", "@babel/preset-typescript"] presets: [["@babel/preset-env", { targets: { node: "current" } }], "@babel/preset-react", "@babel/preset-typescript"],
plugins: [["@babel/plugin-proposal-decorators", { legacy: true }]],
}; };

7
canvas/README.md Normal file
View File

@@ -0,0 +1,7 @@
# Why?
This adds a mock module for `canvas`. Nteract has a ignored require and undeclared dependency on this module. `cavnas` is a server side node module and is not used in browser side code for nteract.
Installing it locally (`npm install canvas`) will resolve the problem, but it is a native module so it is flaky depending on the system, node version, processor arch, etc. This module provides a simpler, more robust solution.
Remove this workaround if [this bug](https://github.com/nteract/any-vega/issues/2) ever gets resolved

1
canvas/index.js Normal file
View File

@@ -0,0 +1 @@
module.exports = {}

11
canvas/package.json Normal file
View File

@@ -0,0 +1,11 @@
{
"name": "canvas",
"version": "1.0.0",
"description": "",
"main": "index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"author": "",
"license": "ISC"
}

View File

@@ -1,3 +1,4 @@
{ {
"JUNO_ENDPOINT": "https://tools-staging.cosmos.azure.com" "JUNO_ENDPOINT": "https://tools-staging.cosmos.azure.com",
"ENABLE_GALLERY_PUBLISH": true
} }

4
cypress/.gitignore vendored
View File

@@ -1,4 +0,0 @@
cypress.env.json
cypress/report
cypress/screenshots
cypress/videos

View File

@@ -1,15 +0,0 @@
{
"integrationFolder": "./integration",
"pluginsFile": false,
"fixturesFolder": false,
"supportFile": "./support/index.js",
"defaultCommandTimeout": 90000,
"chromeWebSecurity": false,
"reporter": "mochawesome",
"reporterOptions": {
"reportDir": "cypress/report",
"json": true,
"overwrite": false,
"html": false
}
}

View File

@@ -1,66 +0,0 @@
// 1. Click on "New Container" on the command bar.
// 2. Pane with the title "Add Container" should appear on the right side of the screen
// 3. It includes an input box for the database Id.
// 4. It includes a checkbox called "Create now".
// 5. When the checkbox is marked, enter new database id.
// 3. Create a database WITH "Provision throughput" checked.
// 4. Enter minimum throughput value of 400.
// 5. Enter container id to the container id text box.
// 6. Enter partition key to the partition key text box.
// 7. Click "OK" to create a new container.
// 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
const connectionString = require("../../../utilities/connectionString");
let crypt = require("crypto");
context("Cassandra API Test - createDatabase", () => {
beforeEach(() => {
connectionString.loginUsingConnectionString(connectionString.constants.cassandra);
});
it("Create a new table in Cassandra API", () => {
const keyspaceId = `KeyspaceId${crypt.randomBytes(8).toString("hex")}`;
const tableId = `TableId112`;
cy.get("iframe").then($element => {
const $body = $element.contents().find("body");
cy.wrap($body)
.find('div[class="commandBarContainer"]')
.should("be.visible")
.find('button[data-test="New Table"]')
.should("be.visible")
.click();
cy.wrap($body)
.find('div[class="contextual-pane-in"]')
.should("be.visible")
.find('span[id="containerTitle"]');
cy.wrap($body)
.find('input[id="keyspace-id"]')
.should("be.visible")
.type(keyspaceId);
cy.wrap($body)
.find('input[class="textfontclr"]')
.type(tableId);
cy.wrap($body)
.find('input[data-test="databaseThroughputValue"]')
.should("have.value", "400");
cy.wrap($body)
.find('data-test="addCollection-createCollection"')
.click();
cy.wait(10000);
cy.wrap($body)
.find('div[data-test="resourceTreeId"]')
.should("exist")
.find('div[class="treeComponent dataResourceTree"]')
.should("contain", tableId);
});
});
});

View File

@@ -1,81 +0,0 @@
// 1. Click on "New Graph" on the command bar.
// 2. Pane with the title "Add Container" should appear on the right side of the screen
// 3. It includes an input box for the database Id.
// 4. It includes a checkbox called "Create now".
// 5. When the checkbox is marked, enter new database id.
// 3. Create a database WITH "Provision throughput" checked.
// 4. Enter minimum throughput value of 400.
// 5. Enter container id to the container id text box.
// 6. Enter partition key to the partition key text box.
// 7. Click "OK" to create a new container.
// 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
const connectionString = require("../../../utilities/connectionString");
let crypt = require("crypto");
context("Graph API Test", () => {
beforeEach(() => {
connectionString.loginUsingConnectionString(connectionString.constants.graph);
});
it("Create a new graph in Graph API", () => {
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
const graphId = `TestGraph${crypt.randomBytes(8).toString("hex")}`;
const partitionKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
cy.get("iframe").then($element => {
const $body = $element.contents().find("body");
cy.wrap($body)
.find('div[class="commandBarContainer"]')
.should("be.visible")
.find('button[data-test="New Graph"]')
.should("be.visible")
.click();
cy.wrap($body)
.find('div[class="contextual-pane-in"]')
.should("be.visible")
.find('span[id="containerTitle"]');
cy.wrap($body)
.find('input[data-test="addCollection-createNewDatabase"]')
.check();
cy.wrap($body)
.find('input[data-test="addCollection-newDatabaseId"]')
.should("be.visible")
.type(dbId);
cy.wrap($body)
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
.check();
cy.wrap($body)
.find('input[data-test="databaseThroughputValue"]')
.should("have.value", "400");
cy.wrap($body)
.find('input[data-test="addCollection-collectionId"]')
.type(graphId);
cy.wrap($body)
.find('input[data-test="addCollection-partitionKeyValue"]')
.type(partitionKey);
cy.wrap($body)
.find('input[data-test="addCollection-createCollection"]')
.click();
cy.wait(10000);
cy.wrap($body)
.find('div[data-test="resourceTreeId"]')
.should("exist")
.find('div[class="treeComponent dataResourceTree"]')
.should("contain", dbId)
.click()
.should("contain", graphId);
});
});
});

View File

@@ -1,80 +0,0 @@
// 1. Click on "New Container" on the command bar.
// 2. Pane with the title "Add Container" should appear on the right side of the screen
// 3. It includes an input box for the database Id.
// 4. It includes a checkbox called "Create now".
// 5. When the checkbox is marked, enter new database id.
// 3. Create a database WITH "Provision throughput" checked.
// 4. Enter minimum throughput value of 400.
// 5. Enter container id to the container id text box.
// 6. Enter partition key to the partition key text box.
// 7. Click "OK" to create a new container.
// // 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
const connectionString = require("../../../utilities/connectionString");
let crypt = require("crypto");
context("Mongo API Test - createDatabase", () => {
beforeEach(() => {
connectionString.loginUsingConnectionString();
});
it("Create a new collection in Mongo API", () => {
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
cy.get("iframe").then($element => {
const $body = $element.contents().find("body");
cy.wrap($body)
.find('div[class="commandBarContainer"]')
.should("be.visible")
.find('button[data-test="New Collection"]')
.should("be.visible")
.click();
cy.wrap($body)
.find('div[class="contextual-pane-in"]')
.should("be.visible")
.find('span[id="containerTitle"]');
cy.wrap($body)
.find('input[data-test="addCollection-createNewDatabase"]')
.check();
cy.wrap($body)
.find('input[data-test="addCollection-newDatabaseId"]')
.type(dbId);
cy.wrap($body)
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
.check();
cy.wrap($body)
.find('input[data-test="addCollection-collectionId"]')
.type(collectionId);
cy.wrap($body)
.find('input[data-test="databaseThroughputValue"]')
.should("have.value", "400");
cy.wrap($body)
.find('input[data-test="addCollection-partitionKeyValue"]')
.type(sharedKey);
cy.wrap($body)
.find("#submitBtnAddCollection")
.click();
cy.wait(10000);
cy.wrap($body)
.find('div[data-test="resourceTreeId"]')
.should("exist")
.find('div[class="treeComponent dataResourceTree"]')
.should("contain", dbId)
.click()
.should("contain", collectionId);
});
});
});

View File

@@ -1,96 +0,0 @@
// 1. Click on "New Container" on the command bar.
// 2. Pane with the title "Add Container" should appear on the right side of the screen
// 3. It includes an input box for the database Id.
// 4. It includes a checkbox called "Create now".
// 5. When the checkbox is marked, enter new database id.
// 3. Create a database WITH "Provision throughput" checked.
// 4. Enter minimum throughput value of 400.
// 5. Enter container id to the container id text box.
// 6. Enter partition key to the partition key text box.
// 7. Click "OK" to create a new container.
// 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
const connectionString = require("../../../utilities/connectionString");
let crypt = require("crypto");
context("Mongo API Test", () => {
beforeEach(() => {
connectionString.loginUsingConnectionString();
});
it.skip("Create a new collection in Mongo API - Autopilot", () => {
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
cy.get("iframe").then($element => {
const $body = $element.contents().find("body");
cy.wrap($body)
.find('div[class="commandBarContainer"]')
.should("be.visible")
.find('button[data-test="New Collection"]')
.should("be.visible")
.click();
cy.wrap($body)
.find('div[class="contextual-pane-in"]')
.should("be.visible")
.find('span[id="containerTitle"]');
cy.wrap($body)
.find('input[data-test="addCollection-createNewDatabase"]')
.check();
cy.wrap($body)
.find('input[data-test="addCollection-newDatabaseId"]')
.type(dbId);
cy.wrap($body)
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
.check();
cy.wrap($body)
.find('div[class="throughputModeContainer"]')
.should("be.visible")
.and(input => {
expect(input.get(0).textContent, "first item").contains("Autopilot (preview)");
expect(input.get(1).textContent, "second item").contains("Manual");
});
cy.wrap($body)
.find('input[id="newContainer-databaseThroughput-autoPilotRadio"]')
.check();
cy.wrap($body)
.find('select[name="autoPilotTiers"]')
// .eq(1).should('contain', '4,000 RU/s');
// // .select('4,000 RU/s').should('have.value', '1');
.find('option[value="2"]')
.then($element => $element.get(1).setAttribute("selected", "selected"));
cy.wrap($body)
.find('input[data-test="addCollection-collectionId"]')
.type(collectionId);
cy.wrap($body)
.find('input[data-test="addCollection-partitionKeyValue"]')
.type(sharedKey);
cy.wrap($body)
.find('input[data-test="addCollection-createCollection"]')
.click();
cy.wait(10000);
cy.wrap($body)
.find('div[data-test="resourceTreeId"]')
.should("exist")
.find('div[class="treeComponent dataResourceTree"]')
.should("contain", dbId)
.click()
.should("contain", collectionId);
});
});
});

View File

@@ -1,67 +0,0 @@
const connectionString = require("../../../utilities/connectionString");
let crypt = require("crypto");
context("Mongo API Test", () => {
beforeEach(() => {
connectionString.loginUsingConnectionString();
});
it.skip("Create a new collection in existing database in Mongo API", () => {
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
cy.get("iframe").then($element => {
const $body = $element.contents().find("body");
cy.wrap($body)
.find('span[class="nodeLabel"]')
.should("be.visible")
.then($span => {
const dbId1 = $span.text();
cy.log("DBBB", dbId1);
cy.wrap($body)
.find('div[class="commandBarContainer"]')
.should("be.visible")
.find('button[data-test="New Collection"]')
.should("be.visible")
.click();
cy.wrap($body)
.find('div[class="contextual-pane-in"]')
.should("be.visible")
.find('span[id="containerTitle"]');
cy.wrap($body)
.find('input[data-test="addCollection-existingDatabase"]')
.check();
cy.wrap($body)
.find('input[data-test="addCollection-existingDatabase"]')
.type(dbId1);
cy.wrap($body)
.find('input[data-test="addCollection-collectionId"]')
.type(collectionId);
cy.wrap($body)
.find('input[data-test="addCollection-partitionKeyValue"]')
.type(sharedKey);
cy.wrap($body)
.find('input[data-test="addCollection-createCollection"]')
.click();
cy.wait(10000);
cy.wrap($body)
.find('div[data-test="resourceTreeId"]')
.should("exist")
.find('div[class="treeComponent dataResourceTree"]')
.click()
.should("contain", collectionId);
});
});
});
});

View File

@@ -1,203 +0,0 @@
const connectionString = require("../../../utilities/connectionString");
let crypt = require("crypto");
context.skip("Mongo API Test", () => {
beforeEach(() => {
connectionString.loginUsingConnectionString();
});
it("Create a new collection in Mongo API - Provision database throughput", () => {
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
cy.get("iframe").then($element => {
const $body = $element.contents().find("body");
cy.wrap($body)
.find('div[class="commandBarContainer"]')
.should("be.visible")
.find('button[data-test="New Collection"]')
.should("be.visible")
.click();
cy.wrap($body)
.find('div[class="contextual-pane-in"]')
.should("be.visible")
.find('span[id="containerTitle"]');
cy.wrap($body)
.find(".createNewDatabaseOrUseExisting")
.should("have.length", 2)
.and(input => {
expect(input.get(0).textContent, "first item").contains("Create new");
expect(input.get(1).textContent, "second item").contains("Use existing");
});
cy.wrap($body)
.find('input[data-test="addCollection-createNewDatabase"]')
.check();
cy.wrap($body)
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
.check();
cy.wrap($body)
.find('input[data-test="addCollection-newDatabaseId"]')
.type(dbId);
cy.wrap($body)
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
.check();
cy.wrap($body)
.find('input[data-test="databaseThroughputValue"]')
.should("have.value", "400");
cy.wrap($body)
.find('input[data-test="addCollection-collectionId"]')
.type(collectionId);
cy.wrap($body)
.find('input[data-test="addCollection-partitionKeyValue"]')
.type(sharedKey);
cy.wrap($body)
.find('input[data-test="addCollection-createCollection"]')
.click();
cy.wait(10000);
cy.wrap($body)
.find('div[data-test="resourceTreeId"]')
.should("exist")
.find('div[class="treeComponent dataResourceTree"]')
.should("contain", dbId)
.click()
.should("contain", collectionId);
});
});
it("Create a new collection - without provision database throughput", () => {
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
const collectionIdTitle = `Add Collection`;
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
cy.get("iframe").then($element => {
const $body = $element.contents().find("body");
cy.wrap($body)
.find('div[class="commandBarContainer"]')
.should("be.visible")
.find('button[data-test="New Collection"]')
.should("be.visible")
.click();
cy.wrap($body)
.find('div[class="contextual-pane-in"]')
.should("be.visible")
.find('span[id="containerTitle"]');
cy.wrap($body)
.find('input[data-test="addCollection-createNewDatabase"]')
.check();
cy.wrap($body)
.find('input[data-test="addCollection-newDatabaseId"]')
.type(dbId);
cy.wrap($body)
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
.uncheck();
cy.wrap($body)
.find('input[data-test="addCollection-collectionId"]')
.type(collectionId);
cy.wrap($body)
.find('input[id="tab2"]')
.check({ force: true });
cy.wrap($body)
.find('input[data-test="addCollection-partitionKeyValue"]')
.type(sharedKey);
cy.wrap($body)
.find('input[data-test="databaseThroughputValue"]')
.should("have.value", "400");
cy.wrap($body)
.find('input[data-test="addCollection-createCollection"]')
.click();
cy.wait(10000);
cy.wrap($body)
.find('div[data-test="resourceTreeId"]')
.should("exist")
.find('div[class="treeComponent dataResourceTree"]')
.should("contain", dbId)
.click()
.should("contain", collectionId);
});
});
it("Create a new collection - without provision database throughput Fixed Storage Capacity", () => {
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
cy.get("iframe").then($element => {
const $body = $element.contents().find("body");
cy.wrap($body)
.find('div[class="commandBarContainer"]')
.should("be.visible")
.find('button[data-test="New Collection"]')
.should("be.visible")
.click();
cy.wrap($body)
.find('div[class="contextual-pane-in"]')
.should("be.visible")
.find('span[id="containerTitle"]');
cy.wrap($body)
.find('input[data-test="addCollection-createNewDatabase"]')
.check();
cy.wrap($body)
.find('input[data-test="addCollection-newDatabaseId"]')
.type(dbId);
cy.wrap($body)
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
.uncheck();
cy.wrap($body)
.find('input[data-test="addCollection-collectionId"]')
.type(collectionId);
cy.wrap($body)
.find('input[id="tab1"]')
.check({ force: true });
cy.wrap($body)
.find('input[data-test="databaseThroughputValue"]')
.should("have.value", "400");
cy.wrap($body)
.find('input[data-test="addCollection-createCollection"]')
.click();
cy.wait(10000);
cy.wrap($body)
.find('div[data-test="resourceTreeId"]')
.should("exist")
.find('div[class="treeComponent dataResourceTree"]')
.should("contain", dbId)
.click()
.should("contain", collectionId);
});
});
});

View File

@@ -1,79 +0,0 @@
// 1. Click on "New Container" on the command bar.
// 2. Pane with the title "Add Container" should appear on the right side of the screen
// 3. It includes an input box for the database Id.
// 4. It includes a checkbox called "Create now".
// 5. When the checkbox is marked, enter new database id.
// 3. Create a database WITH "Provision throughput" checked.
// 4. Enter minimum throughput value of 400.
// 5. Enter container id to the container id text box.
// 6. Enter partition key to the partition key text box.
// 7. Click "OK" to create a new container.
// 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
const connectionString = require("../../../utilities/connectionString");
let crypt = require("crypto");
context("SQL API Test", () => {
beforeEach(() => {
connectionString.loginUsingConnectionString();
});
it("Create a new container in SQL API", () => {
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
connectionString.loginUsingConnectionString();
cy.get("iframe").then($element => {
const $body = $element.contents().find("body");
cy.wrap($body)
.find('div[class="commandBarContainer"]')
.should("be.visible")
.find('button[data-test="New Container"]')
.should("be.visible")
.click();
cy.wrap($body)
.find('div[class="contextual-pane-in"]')
.should("be.visible")
.find('span[id="containerTitle"]');
cy.wrap($body)
.find('input[data-test="addCollection-createNewDatabase"]')
.check();
cy.wrap($body)
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
.check();
cy.wrap($body)
.find('input[data-test="addCollection-newDatabaseId"]')
.type(dbId);
cy.wrap($body)
.find('input[data-test="addCollection-collectionId"]')
.type(collectionId);
cy.wrap($body)
.find('input[data-test="databaseThroughputValue"]')
.should("have.value", "400");
cy.wrap($body)
.find('input[data-test="addCollection-partitionKeyValue"]')
.type(sharedKey);
cy.wrap($body)
.find("#submitBtnAddCollection")
.click();
cy.wait(10000);
cy.wrap($body)
.find('div[data-test="resourceTreeId"]')
.should("exist")
.find('div[class="treeComponent dataResourceTree"]')
.should("contain", dbId);
});
});
});

View File

@@ -1,60 +0,0 @@
// 1. Click on "New Container" on the command bar.
// 2. Pane with the title "Add Container" should appear on the right side of the screen
// 3. It includes an input box for the database Id.
// 4. It includes a checkbox called "Create now".
// 5. When the checkbox is marked, enter new database id.
// 3. Create a database WITH "Provision throughput" checked.
// 4. Enter minimum throughput value of 400.
// 5. Enter container id to the container id text box.
// 6. Enter partition key to the partition key text box.
// 7. Click "OK" to create a new container.
// 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
const connectionString = require("../../../utilities/connectionString");
let crypt = require("crypto");
context("Table API Test", () => {
beforeEach(() => {
connectionString.loginUsingConnectionString(connectionString.constants.table);
});
it("Create a new table in Table API", () => {
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
cy.get("iframe").then($element => {
const $body = $element.contents().find("body");
cy.wrap($body)
.find('div[class="commandBarContainer"]')
.should("be.visible")
.find('button[data-test="New Table"]')
.should("be.visible")
.click();
cy.wrap($body)
.find('div[class="contextual-pane-in"]')
.should("be.visible")
.find('span[id="containerTitle"]');
cy.wrap($body)
.find('input[data-test="addCollection-collectionId"]')
.type(collectionId);
cy.wrap($body)
.find('input[data-test="databaseThroughputValue"]')
.should("have.value", "400");
cy.wrap($body)
.find('input[data-test="addCollection-createCollection"]')
.click();
cy.wait(10000);
cy.wrap($body)
.find('div[data-test="resourceTreeId"]')
.should("exist")
.find('div[class="treeComponent dataResourceTree"]')
.should("contain", collectionId);
});
});
});

View File

@@ -1,55 +0,0 @@
// 1. Click on "New Container" on the command bar.
// 2. Pane with the title "Add Container" should appear on the right side of the screen
// 3. It includes an input box for the database Id.
// 4. It includes a checkbox called "Create now".
// 5. When the checkbox is marked, enter new database id.
// 3. Create a database WITH "Provision throughput" checked.
// 4. Enter minimum throughput value of 400.
// 5. Enter container id to the container id text box.
// 6. Enter partition key to the partition key text box.
// 7. Click "OK" to create a new container.
// 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
let crypt = require("crypto");
context("Emulator - createDatabase", () => {
beforeEach(() => {
cy.visit("http://localhost:1234/explorer.html");
});
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
const collectionIdTitle = `Add Collection`;
const partitionKey = `PartitionKey${crypt.randomBytes(8).toString("hex")}`;
it("Create a new collection", () => {
cy.contains("New Container").click();
// cy.contains(collectionIdTitle);
cy.get(".createNewDatabaseOrUseExisting")
.should("have.length", 2)
.and(input => {
expect(input.get(0).textContent, "first item").contains("Create new");
expect(input.get(1).textContent, "second item").contains("Use existing");
});
cy.get('input[data-test="addCollection-createNewDatabase"]').check();
cy.get('input[data-test="addCollection-newDatabaseId"]').type(dbId);
cy.get('input[data-test="addCollection-collectionId"]').type(collectionId);
cy.get('input[data-test="databaseThroughputValue"]').should("have.value", "400");
cy.get('input[data-test="addCollection-partitionKeyValue"]').type(partitionKey);
cy.get('input[data-test="addCollection-createCollection"]').click();
cy.get('div[data-test="resourceTreeId"]').should("exist");
cy.get('div[data-test="resourceTree-collectionsTree"]').should("contain", dbId);
cy.get('div[data-test="databaseList"]').should("contain", collectionId);
});
});

View File

@@ -1,65 +0,0 @@
// 1. Click on "New Database" on the command bar
// 2. a Pane with the title "Add Database" should appear on the right side of the screen
// i. It includes an input box for the database Id.
// ii. It includes a checkbox called "Provision throughput".
// iii. Whe the checkbox is marked, a new input with a throughput control let's you customize RU at the database level
// 3. Create a database WITHOUT "Provision throughput" checked.
// 4. It should appear in the Data Explorer list.
// 5. Repeat steps 1-3 but create a database WITH "Provision throughput" with the default RU value.
// 6. It should appear in the Data Explorer list.
// 7. If expanded, it should have the list item called "Scale", that once clicked, it should show the "Scale" tab.
// 8. Inside that tab, a throughput control will let you change the RU value within the permited range.
// 9. If you change the value, it should enable the "Save" button.
// 10. Click "Save" and verify that the process completes without error.
// 11. Close the tab and reopen it and verify that the input contains the last saved value.%
const crypto = require("crypto");
const client = require("../../../utilities/cosmosClient");
const randomString = crypto.randomBytes(2).toString("hex");
const databaseId = `TestDB-${randomString}`;
const collectionId = `TestColl-${randomString}`;
context("Emulator - Create database -> container -> item", () => {
beforeEach(async () => {
const { resources } = await client.databases.readAll().fetchAll();
for (const database of resources) {
await client.database(database.id).delete();
}
});
it("creates a new database", () => {
cy.visit("https://0.0.0.0:1234/explorer.html?platform=Emulator");
cy.contains("New Container").click();
cy.get("[data-test=addCollection-newDatabaseId]").click();
cy.get("[data-test=addCollection-newDatabaseId]").type(databaseId);
cy.get("[data-test=addCollection-collectionId]").click();
cy.get("[data-test=addCollection-collectionId]").type(collectionId);
cy.get("[data-test=addCollection-partitionKeyValue]").click();
cy.get("[data-test=addCollection-partitionKeyValue]").type("/pk");
cy.get('input[name="createCollection"]').click();
cy.get(".dataResourceTree").should("contain", databaseId);
cy.get(".dataResourceTree")
.contains(databaseId)
.click();
cy.get(".dataResourceTree").should("contain", collectionId);
cy.get(".dataResourceTree")
.contains(collectionId)
.click();
cy.get(".dataResourceTree")
.contains("Items")
.click();
cy.get(".dataResourceTree")
.contains("Items")
.click();
cy.wait(1000); // React rendering inside KO causes some weird async rendering that makes this test flaky without waiting
cy.get(".commandBarContainer")
.contains("New Item")
.click();
cy.wait(1000); // React rendering inside KO causes some weird async rendering that makes this test flaky without waiting
cy.get(".commandBarContainer")
.contains("Save")
.click();
cy.wait(1000); // React rendering inside KO causes some weird async rendering that makes this test flaky without waiting
cy.get(".documentsGridHeaderContainer").should("contain", "replace_with_new_document_id");
});
});

View File

@@ -1,46 +0,0 @@
// 1. Click last database in the resource tree
// 2. Click the last collection within the database
// 3. Select the context menu within the collection
// 4. Select "Delete Container" option in the dropdown
// 5. On Selection, Delete Container pane opens on the right side
// 6. Enter the same collection id that is to be deleted and click ok
// 7. Now, the resource tree refreshes, the deleted collection should not appear under the database
let crypt = require("crypto");
context("Emulator - deleteCollection", () => {
beforeEach(() => {
cy.visit("http://localhost:1234/explorer.html");
});
it("Delete a collection", () => {
cy.get(".databaseId")
.last()
.click();
cy.get(".collectionList")
.last()
.then($id => {
const collectionId = $id.text();
cy.get('span[data-test="collectionEllipsisMenu"]').should("exist");
cy.get('span[data-test="collectionEllipsisMenu"]')
.invoke("show")
.last()
.click();
cy.get('div[data-test="collectionContextMenu"]')
.contains("Delete Container")
.click({ force: true });
cy.get('input[data-test="confirmCollectionId"]').type(collectionId.trim());
cy.get('input[data-test="deleteCollection"]').click();
cy.get('div[data-test="databaseList"]').should("not.contain", collectionId);
cy.get('div[data-test="databaseMenu"]').should("not.contain", collectionId);
});
});
});

View File

@@ -1,83 +0,0 @@
// 1. Click last database in the resource tree
// 2. Select the context menu within the database
// 4. Select "Delete Database" option in the dropdown
// 5. On Selection, Delete Database pane opens on the right side
// 6. Enter the same database id that is to be deleted and click ok
// 7. Now, the resource tree refreshes, the deleted database should not appear in the resource tree
let crypt = require("crypto");
context("Emulator - deleteDatabase", () => {
beforeEach(() => {
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
let db_rid = "";
const date = new Date().toUTCString();
let authToken = "";
cy.visit("http://localhost:1234/explorer.html");
// Creating auth token for collection creation
cy.request({
method: "GET",
url: "https://localhost:8081/_explorer/authorization/post/dbs/",
headers: {
"x-ms-date": date,
authorization: "-"
}
})
.then(response => {
authToken = response.body.Token; // Getting auth token for collection creation
return new Cypress.Promise((resolve, reject) => {
return resolve();
});
})
.then(() => {
cy.request({
method: "POST",
url: "https://localhost:8081/dbs",
headers: {
"x-ms-date": date,
authorization: authToken,
"x-ms-version": "2018-12-31"
},
body: {
id: dbId
}
}).then(response => {
cy.log("Response", response);
db_rid = response.body._rid;
return new Cypress.Promise((resolve, reject) => {
cy.log("Rid", db_rid);
return resolve();
});
});
});
});
it("Delete a database", () => {
cy.get('span[data-test="refreshTree"]').click();
cy.get(".databaseId")
.last()
.then($id => {
const dbId = $id.text();
cy.get('span[data-test="databaseEllipsisMenu"]').should("exist");
cy.get('span[data-test="databaseEllipsisMenu"]')
.invoke("show")
.last()
.click();
cy.get('div[data-test="databaseContextMenu"]')
.contains("Delete Database")
.click({ force: true });
cy.get('input[data-test="confirmDatabaseId"]').type(dbId.trim());
cy.get('input[data-test="deleteDatabase"]').click();
cy.get('div[data-test="databaseList"]').should("not.contain", dbId);
});
});
});

View File

@@ -1,35 +0,0 @@
# Notebook end-to-end tests
This describes how to run the tests locally
## Stand up a local notebook container instance:
Instructions on how to build and run the container [here](https://microsoft.sharepoint.com/teams/DocDB/_layouts/OneNote.aspx?id=%2Fteams%2FDocDB%2FSiteAssets%2FDocDB%20Team%20Notebook&wd=target%28Tools%20_%20SDK%2FPortal%2FDevelopment.one%7CF800BE8E-1E31-48FE-90D7-EF698EF88112%2FHow%20to%20build%20notebook%20service%7C4BAA153B-422C-41E2-B997-F3FCE02CD743%2F%29)
## Run a local data explorer
Instructions are in [`DataExplorer/README.md`](https://msdata.visualstudio.com/CosmosDB/_git/cosmosdb-dataexplorer?path=%2FProduct%2FPortal%2FDataExplorer%2FREADME.md&_a=preview).
Make sure you can run Data Explorer locally from the web browser.
## Run cypress tests
1. Edit the URL for your DataExplorer in the `.spec.ts` file
2. Run the test:
```bash
cd DataExplorer/cypress
npm i
npm t -- --spec 'integration/notebook/newNotebook.spec.ts'
```
To run in Debug mode:
```
npm run test:debug
```
This opens Cypress UI
## Troubleshooting
* The tests are recorded in the `videos` folder.
* Cypress does not support hover: workarounds [here](https://docs.cypress.io/api/commands/hover.html#Workarounds).
## References
* [Cypress API](https://docs.cypress.io/api/api/table-of-contents.html)
* [Cypress cookbook](https://docs.cypress.io/faq/questions/using-cypress-faq.html#How-do-I-get-an-element%E2%80%99s-text-contents)
* [Cypress best practices](https://docs.cypress.io/guides/references/best-practices.html#Selecting-Elements)

View File

@@ -1,93 +0,0 @@
// THIS ADDS A NEW NOTEBOOK TO YOUR NOTEBOOKS
context("New Notebook smoke test", () => {
const timeout = 15000; // in ms
const explorerUrl =
"https://localhost:1234/explorer.html?feature.notebookserverurl=https%3A%2F%2Flocalhost%3A10001%2F12345%2Fnotebook&feature.notebookServerToken=token&feature.enablenotebooks=true";
/**
* Wait for UI to be ready
*/
const waitForReady = () => {
cy.get(".splashScreenContainer", { timeout }).should("be.visible");
};
beforeEach(() => {
cy.visit(explorerUrl);
waitForReady();
});
it("Create a new notebook and run some code", () => {
// Create new notebook
cy.contains("New Notebook").click();
// Check tab name
cy.get("li.tabList .tabNavText").should($span => {
const text = $span.text();
expect(text).to.match(/^Untitled.*\.ipynb$/);
});
// Wait for python3 | idle status
cy.get('[data-test="notebookStatusBar"] [data-test="kernelStatus"]', { timeout }).should($p => {
const text = $p.text();
expect(text).to.match(/^python3.*idle$/);
});
// Click on a cell
cy.get(".cell-container")
.as("cellContainer")
.click();
// Type in some code
cy.get("@cellContainer").type("2+4");
// Execute
cy.get('[data-test="Run"]')
.first()
.click();
// Verify results
cy.get("@cellContainer").within(() => {
cy.get("pre code span").should("contain", "6");
});
// Restart kernel
cy.get('[data-test="Run"] button')
.eq(-1)
.click();
cy.get("li")
.contains("Restart Kernel")
.click();
// Wait for python3 | restarting status
cy.get('[data-test="notebookStatusBar"] [data-test="kernelStatus"]', { timeout }).should($p => {
const text = $p.text();
expect(text).to.match(/^python3.*restarting$/);
});
// Wait for python3 | idle status
cy.get('[data-test="notebookStatusBar"] [data-test="kernelStatus"]', { timeout }).should($p => {
const text = $p.text();
expect(text).to.match(/^python3.*idle$/);
});
// Click on a cell
cy.get(".cell-container")
.as("cellContainer")
.find(".input")
.as("codeInput")
.click();
// Type in some code
cy.get("@codeInput").type("{backspace}{backspace}{backspace}4+5");
// Execute
cy.get('[data-test="Run"]')
.first()
.click();
// Verify results
cy.get("@cellContainer").within(() => {
cy.get("pre code span").should("contain", "9");
});
});
});

View File

@@ -1,172 +0,0 @@
context("Resource tree notebook file manipulation", () => {
const timeout = 15000; // in ms
const explorerUrl =
"https://localhost:1234/explorer.html?feature.notebookserverurl=https%3A%2F%2Flocalhost%3A10001%2F12345%2Fnotebook&feature.notebookServerToken=token&feature.enablenotebooks=true";
/**
* Wait for UI to be ready
*/
const waitForReady = () => {
cy.get(".splashScreenContainer", { timeout }).should("be.visible");
};
const clickContextMenuAndSelectOption = (nodeLabel, option) => {
cy.get(`.treeNodeHeader[data-test="${nodeLabel}"]`)
.find("button.treeMenuEllipsis")
.click();
cy.get('[data-test="treeComponentMenuItemContainer"]')
.contains(option)
.click();
};
const createFolder = folder => {
clickContextMenuAndSelectOption("My Notebooks/", "New Directory");
cy.get("#stringInputPane").within(() => {
cy.get('input[name="collectionIdConfirmation"]').type(folder);
cy.get("form").submit();
});
};
const deleteItem = nodeName => {
clickContextMenuAndSelectOption(`${nodeName}`, "Delete");
cy.get(".ms-Dialog-main")
.contains("Delete")
.click();
};
beforeEach(() => {
cy.visit(explorerUrl);
waitForReady();
});
it("Create and remove a directory", () => {
const folder = "e2etest_folder1";
createFolder(folder);
cy.get(`.treeNodeHeader[data-test="${folder}/"]`).should("exist");
deleteItem(`${folder}/`);
cy.get(`.treeNodeHeader[data-test="${folder}/"]`).should("not.exist");
});
it("Create and rename a directory", () => {
const folder = "e2etest_folder2";
const renamedFolder = "e2etest_folder2_renamed";
createFolder(folder);
// Rename
clickContextMenuAndSelectOption(`${folder}/`, "Rename");
cy.get("#stringInputPane").within(() => {
cy.get('input[name="collectionIdConfirmation"]')
.clear()
.type(renamedFolder);
cy.get("form").submit();
});
cy.get(`.treeNodeHeader[data-test="${renamedFolder}/"]`).should("exist");
cy.get(`.treeNodeHeader[data-test="${folder}/"]`).should("not.exist");
deleteItem(`${renamedFolder}/`);
cy.get(`.treeNodeHeader[data-test="${renamedFolder}/"]`).should("not.exist");
});
it("Create a notebook inside a directory", () => {
const folder = "e2etest_folder3";
const newNotebookName = "Untitled.ipynb";
createFolder(folder);
clickContextMenuAndSelectOption(`${folder}/`, "New Notebook");
// Verify tab is open
cy.get(".tabList")
.contains(newNotebookName)
.should("exist");
// Close tab
cy.get(`.tabList[title="notebooks/${folder}/${newNotebookName}"]`)
.find(".cancelButton")
.click();
// When running from command line, closing the tab is too fast
cy.get("body").then($body => {
if ($body.find(".ms-Dialog-main").length) {
// For some reason, this does not work
// cy.get(".ms-Dialog-main").contains("Close").click();
cy.get(".ms-Dialog-main .ms-Button--primary").click();
}
});
// Expand folder node
cy.get(`.treeNodeHeader[data-test="${folder}/"]`).click();
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${newNotebookName}"]`).should("exist");
// Delete notebook
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${newNotebookName}"]`)
.find("button.treeMenuEllipsis")
.click();
cy.get('[data-test="treeComponentMenuItemContainer"]')
.contains("Delete")
.click();
// Confirm
cy.get(".ms-Dialog-main")
.contains("Delete")
.click();
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${newNotebookName}"]`).should("not.exist");
deleteItem(`${folder}/`);
});
it("Create and rename a notebook inside a directory", () => {
const folder = "e2etest_folder4";
const newNotebookName = "Untitled.ipynb";
const renamedNotebookName = "mynotebook.ipynb";
createFolder(folder);
clickContextMenuAndSelectOption(`${folder}/`, "New Notebook");
// Close tab
cy.get(`.tabList[title="notebooks/${folder}/${newNotebookName}"]`)
.find(".cancelButton")
.click();
cy.get("body").then($body => {
if ($body.find(".ms-Dialog-main").length) {
// For some reason, this does not work
// cy.get(".ms-Dialog-main").contains("Close").click();
cy.get(".ms-Dialog-main .ms-Button--primary").click();
}
});
// Expand folder node
cy.get(`.treeNodeHeader[data-test="${folder}/"]`).click();
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${newNotebookName}"]`).should("exist");
// Rename notebook
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${newNotebookName}"]`)
.find("button.treeMenuEllipsis")
.click();
cy.get('[data-test="treeComponentMenuItemContainer"]')
.contains("Rename")
.click();
cy.get("#stringInputPane").within(() => {
cy.get('input[name="collectionIdConfirmation"]')
.clear()
.type(renamedNotebookName);
cy.get("form").submit();
});
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${newNotebookName}"]`).should("not.exist");
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${renamedNotebookName}"]`).should("exist");
// Delete notebook
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${renamedNotebookName}"]`)
.find("button.treeMenuEllipsis")
.click();
cy.get('[data-test="treeComponentMenuItemContainer"]')
.contains("Delete")
.click();
// Confirm
cy.get(".ms-Dialog-main")
.contains("Delete")
.click();
// Give it time to settle
cy.wait(1000);
deleteItem(`${folder}/`);
});
});

3066
cypress/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,25 +0,0 @@
{
"name": "cosmos-explorer-cypress",
"version": "1.0.0",
"description": "",
"main": "index.js",
"scripts": {
"test": "cypress run",
"wait-for-server": "wait-on -t 240000 -i 5000 -v https-get://0.0.0.0:1234/",
"test:sql": "cypress run --browser chrome --spec \"./integration/dataexplorer/SQL/*\"",
"test:ci": "wait-on -t 240000 -i 5000 -v https-get://0.0.0.0:1234/ https-get://0.0.0.0:8081/_explorer/index.html && cypress run --browser edge --headless",
"test:debug": "cypress open"
},
"devDependencies": {
"cypress": "^4.8.0",
"mocha": "^7.0.1",
"mochawesome": "^4.1.0",
"mochawesome-merge": "^4.0.1",
"mochawesome-report-generator": "^4.1.0",
"typescript": "3.4.3",
"wait-on": "^4.0.2"
},
"dependencies": {
"@microsoft/applicationinsights-web": "^2.5.2"
}
}

View File

@@ -1,23 +0,0 @@
let appInsightsLib = require("@microsoft/applicationinsights-web");
const appInsights = new appInsightsLib.ApplicationInsights({
config: {
instrumentationKey: "fe61c39f-7d32-4488-a191-b13621965315"
/* ...Other Configuration Options... */
}
});
appInsights.loadAppInsights();
Cypress.on("fail", (error, runnable) => {
// App Insights will record the fail tests for Create Collection
let message = JSON.stringify(runnable.title);
appInsights.trackTrace({
message: `${message}`,
properties: {
passed: false,
error: error
}
});
throw error; // throw error to have test still fail
});

View File

@@ -1,11 +0,0 @@
{
"compilerOptions": {
"strict": true,
"noEmit": true,
"module": "commonjs",
"target": "es5",
"lib": ["es5", "dom", "es6"],
"types": ["cypress", "node"]
},
"include": ["**/*.ts", "**/*.spec.ts"]
}

View File

@@ -1,41 +0,0 @@
module.exports = {
loginUsingConnectionString: function() {
const prodUrl = Cypress.env("TEST_ENDPOINT") || "https://localhost:1234/hostedExplorer.html";
const timeout = 15000;
cy.visit(prodUrl);
cy.get('iframe[id="explorerMenu"]').should("be.visible");
cy.get("iframe").then($element => {
const $body = $element.contents().find("body");
cy.wrap($body)
.find("#connectExplorer")
.should("exist")
.find("div[class='connectExplorer']")
.should("exist")
.find("p[class='welcomeText']")
.should("exist");
cy.wrap($body.find("div > p.switchConnectTypeText"))
.should("exist")
.last()
.click({ force: true });
const secret = Cypress.env("CONNECTION_STRING");
cy.wrap($body)
.find("input[class='inputToken']")
.should("exist")
.type(secret, {
force: true
});
cy.wrap($body.find("input[value='Connect']"), { timeout })
.first()
.click({ force: true });
cy.wait(15000);
});
}
};

View File

@@ -1,6 +0,0 @@
const { CosmosClient } = require("@azure/cosmos");
module.exports = new CosmosClient({
endpoint: "https://0.0.0.0:8081",
key: "C2y6yDjf5/R+ob0N8A7Cgv30VRDJIWEHLM+4QDU5DE2nQ9nDuVTqobD4b8mGGyPMbIZnqyMsEcaGQy67XIw/Jw=="
});

1963
externals/adal.js vendored

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,3 @@
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M7.31449 2.01439L4.00103 5.31963L3.26105 4.57965L7.8407 0L12.4203 4.57965L11.6804 5.31963L8.36691 2.01439V12.8428H7.31449V2.01439ZM13.629 12.8428H14.6814V16H1V12.8428H2.05242V14.9476H13.629V12.8428Z" fill="#0078D4"/>
</svg>

After

Width:  |  Height:  |  Size: 329 B

View File

@@ -6,6 +6,6 @@ module.exports = {
slowMo: 55, slowMo: 55,
defaultViewport: null, defaultViewport: null,
ignoreHTTPSErrors: true, ignoreHTTPSErrors: true,
args: ["--disable-web-security"] args: ["--disable-web-security"],
} },
}; };

View File

@@ -1,5 +1,5 @@
module.exports = { module.exports = {
preset: "jest-puppeteer", preset: "jest-puppeteer",
testMatch: ["<rootDir>/test/**/*.spec.[jt]s?(x)"], testMatch: ["<rootDir>/test/**/*.spec.[jt]s?(x)"],
setupFiles: ["dotenv/config"] setupFiles: ["dotenv/config"],
}; };

View File

@@ -39,11 +39,11 @@ module.exports = {
// An object that configures minimum threshold enforcement for coverage results // An object that configures minimum threshold enforcement for coverage results
coverageThreshold: { coverageThreshold: {
global: { global: {
branches: 20, branches: 22,
functions: 24, functions: 28,
lines: 30, lines: 33,
statements: 29.0 statements: 31,
} },
}, },
// Make calling deprecated APIs throw helpful error messages // Make calling deprecated APIs throw helpful error messages
@@ -76,7 +76,7 @@ module.exports = {
"office-ui-fabric-react/lib/(.*)$": "office-ui-fabric-react/lib-commonjs/$1", // https://github.com/OfficeDev/office-ui-fabric-react/wiki/Fabric-6-Release-Notes "office-ui-fabric-react/lib/(.*)$": "office-ui-fabric-react/lib-commonjs/$1", // https://github.com/OfficeDev/office-ui-fabric-react/wiki/Fabric-6-Release-Notes
"^dnd-core$": "dnd-core/dist/cjs", "^dnd-core$": "dnd-core/dist/cjs",
"^react-dnd$": "react-dnd/dist/cjs", "^react-dnd$": "react-dnd/dist/cjs",
"^react-dnd-html5-backend$": "react-dnd-html5-backend/dist/cjs" "^react-dnd-html5-backend$": "react-dnd-html5-backend/dist/cjs",
}, },
// An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader // An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader
@@ -164,11 +164,11 @@ module.exports = {
// A map from regular expressions to paths to transformers // A map from regular expressions to paths to transformers
transform: { transform: {
"^.+\\.html?$": "html-loader-jest", "^.+\\.html?$": "html-loader-jest",
"^.+\\.[t|j]sx?$": "babel-jest" "^.+\\.[t|j]sx?$": "babel-jest",
}, },
// An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation // An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation
transformIgnorePatterns: ["/node_modules/", "/externals/"] transformIgnorePatterns: ["/node_modules/", "/externals/"],
// An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them // An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them
// unmockedModulePathPatterns: undefined, // unmockedModulePathPatterns: undefined,

View File

@@ -3,8 +3,8 @@
/******************************************************************************/ /******************************************************************************/
@font-face { @font-face {
font-family: wf_segoe-ui_normal; font-family: wf_segoe-ui_normal;
src: url('../../fonts/segoe-ui/west-european/normal/latest.woff'); src: url("../../fonts/segoe-ui/west-european/normal/latest.woff");
} }
@DataExplorerFont: wf_segoe-ui_normal, "Segoe UI", "Segoe WP", Tahoma, Arial, sans-serif; @DataExplorerFont: wf_segoe-ui_normal, "Segoe UI", "Segoe WP", Tahoma, Arial, sans-serif;
@@ -20,26 +20,26 @@
COLORS COLORS
/******************************************************************************/ /******************************************************************************/
@AccentMediumHigh: #0058AD; @AccentMediumHigh: #0058ad;
@AccentMedium: #004E87; @AccentMedium: #004e87;
@AccentHigh: #1EBAED; @AccentHigh: #1ebaed;
@AccentExtraHigh: #55B3FF; @AccentExtraHigh: #55b3ff;
@AccentLow: #EDF6FF; @AccentLow: #edf6ff;
@AccentMediumLow: #DDEEFE; @AccentMediumLow: #ddeefe;
@AccentLight: #EEF7FF; @AccentLight: #eef7ff;
@AccentExtra: #DDF0FF; @AccentExtra: #ddf0ff;
@SelectionHigh: #B91F26; @SelectionHigh: #b91f26;
@BaseLight: #FFFFFF; @BaseLight: #ffffff;
@BaseDark: #000000; @BaseDark: #000000;
@NotificationLow: #FFF4CE; @NotificationLow: #fff4ce;
@NotificationHigh: #F9E9B0; @NotificationHigh: #f9e9b0;
@Purple1: #8A2DA5; @Purple1: #8a2da5;
@Dirty: #9b4f96; @Dirty: #9b4f96;
@BaseLow: #F2F2F2; @BaseLow: #f2f2f2;
@BaseMediumLow: #E6E6E6; @BaseMediumLow: #e6e6e6;
@BaseMedium: #CCCCCC; @BaseMedium: #cccccc;
@BaseMediumHigh: #767676; @BaseMediumHigh: #767676;
@BaseHigh: #393939; @BaseHigh: #393939;
@@ -53,10 +53,17 @@
@ErrorColor: @SelectionHigh; @ErrorColor: @SelectionHigh;
@SelectionColor: #3074B0; @SelectionColor: #3074b0;
@FocusColor: #605e5c; @FocusColor: #605e5c;
@GalleryBackgroundColor: #fdfdfd;
//Icons
@InfoIconColor: #0072c6;
@WarningIconColor: #db7500;
@ErrorIconColor: #b91f26;
/****************************************************************************** /******************************************************************************
METRICS METRICS
/******************************************************************************/ /******************************************************************************/
@@ -80,7 +87,7 @@
@ImgWidth: 14px; @ImgWidth: 14px;
@ImgHeight: 14px; @ImgHeight: 14px;
@toggleFontWeight:700; @toggleFontWeight: 700;
//Resource Tree //Resource Tree
@TreeLineHeight: 17px; @TreeLineHeight: 17px;
@@ -144,16 +151,16 @@
/**********************************************************************************/ /**********************************************************************************/
.flex-display(@display: flex) { .flex-display(@display: flex) {
display: ~"-webkit-@{display}"; display: ~"-webkit-@{display}";
display: ~"-ms-@{display}box"; // IE10 uses -ms-flexbox display: ~"-ms-@{display}box"; // IE10 uses -ms-flexbox
display: ~"-ms-@{display}"; // IE11 display: ~"-ms-@{display}"; // IE11
display: @display; display: @display;
} }
.flex-direction(@direction: column) { .flex-direction(@direction: column) {
-webkit-flex-direction: @direction; -webkit-flex-direction: @direction;
-ms-flex-direction: @direction; -ms-flex-direction: @direction;
flex-direction: @direction; flex-direction: @direction;
} }
/************************************************************************************* /*************************************************************************************
@@ -161,32 +168,31 @@
**************************************************************************************/ **************************************************************************************/
@media all and (-ms-high-contrast: none), (-ms-high-contrast: active) { @media all and (-ms-high-contrast: none), (-ms-high-contrast: active) {
.selectedRadio, .selectedRadio,
.selectedRadio:hover, .selectedRadio:hover,
.selectedRadio:active, .selectedRadio:active,
.selectedRadio.dirty, .selectedRadio.dirty,
.tab [type=radio]:checked ~ label, .tab [type="radio"]:checked ~ label,
.tab [type=radio]:checked ~ label:hover { .tab [type="radio"]:checked ~ label:hover {
-ms-high-contrast-adjust: none; -ms-high-contrast-adjust: none;
-webkit-text-fill-color: HighlightText; -webkit-text-fill-color: HighlightText;
color: HighlightText; color: HighlightText;
border-color: HighlightText; border-color: HighlightText;
background-color: Highlight; background-color: Highlight;
} }
.queryMetricsSummaryTuple { .queryMetricsSummaryTuple {
th,
th, td { td {
&:nth-child(2) {
&:nth-child(2) { width: @IETableDataWidth;
width: @IETableDataWidth; }
}
&:nth-child(3) {
&:nth-child(3) { width: 50%;
width: 50%; }
}
}
} }
}
} }
/******************************************************************************************** /********************************************************************************************
@@ -194,15 +200,15 @@
*********************************************************************************************/ *********************************************************************************************/
.hover() { .hover() {
background-color: @AccentLight; background-color: @AccentLight;
} }
.active() { .active() {
background-color: @AccentExtra; background-color: @AccentExtra;
} }
.focus() { .focus() {
outline: 1px dashed @FocusColor; outline: 1px dashed @FocusColor;
} }
/************************************************************************************************ /************************************************************************************************
@@ -212,63 +218,87 @@
@ToggleWidth: 180px; @ToggleWidth: 180px;
.toggleSwitch() { .toggleSwitch() {
max-width: 100%; max-width: 100%;
margin-bottom: @SmallSpace; margin-bottom: @SmallSpace;
padding: @SmallSpace; padding: @SmallSpace;
cursor: pointer; cursor: pointer;
color: @BaseHigh; color: @BaseHigh;
font-weight: 400; font-weight: 400;
font-size: @mediumFontSize; font-size: @mediumFontSize;
font-family: @DataExplorerFont; font-family: @DataExplorerFont;
} }
.selectedToggle() { .selectedToggle() {
border-bottom: 2px solid @BaseHigh; border-bottom: 2px solid @BaseHigh;
} }
.unselectedToggle() { .unselectedToggle() {
color: @AccentMediumHigh; color: @AccentMediumHigh;
} }
/******************************************************************************************************** /********************************************************************************************************
Common Data Explorer Icons Common Data Explorer Icons
*********************************************************************************************************/ *********************************************************************************************************/
.dataExplorerIcons() { .dataExplorerIcons() {
cursor: pointer; cursor: pointer;
width: @ImgWidth; width: @ImgWidth;
height: @ImgHeight; height: @ImgHeight;
} }
/********************************************************************************************************* /*********************************************************************************************************
Info Tooltip Info Tooltip
**********************************************************************************************************/ **********************************************************************************************************/
.infoTooltip() { .infoTooltip() {
position: relative; position: relative;
display: inline-block; display: inline-block;
} }
.tooltipText(@textColor: @BaseLight, @backgroundColor: @BaseHigh) { .tooltipText(@textColor: @BaseLight, @backgroundColor: @BaseHigh) {
visibility: hidden; visibility: hidden;
background-color: @backgroundColor; background-color: @backgroundColor;
color: @textColor; color: @textColor;
position: absolute; position: absolute;
z-index: 1; z-index: 1;
left: @MediumSpace; left: @MediumSpace;
padding: @MediumSpace; padding: @MediumSpace;
} }
.tooltipTextAfter(@color: @BaseDark) { .tooltipTextAfter(@color: @BaseDark) {
content: ""; content: "";
position: absolute; position: absolute;
right: 100%; right: 100%;
border-style: solid; border-style: solid;
border-color: transparent @color transparent transparent; border-color: transparent @color transparent transparent;
left: 0px; left: 0px;
width: 0; width: 0;
height: 0; height: 0;
border-color: @InfoPointerColor transparent; border-color: @InfoPointerColor transparent;
} }
.tooltipVisible() { .tooltipVisible() {
visibility: visible; visibility: visible;
}
.inputTooltip() {
position: relative;
}
.inputTooltipText(@textColor: @BaseLight, @backgroundColor: @BaseHigh) {
background-color: @backgroundColor;
color: @textColor;
position: absolute;
z-index: 1;
padding: @MediumSpace;
}
.inputTooltipTextAfter(@color: @BaseDark) {
content: "";
position: absolute;
right: 100%;
border-style: solid;
border-color: transparent @color transparent transparent;
left: 10px;
width: 0;
height: 0;
border-color: @InfoPointerColor transparent;
} }

File diff suppressed because it is too large Load Diff

View File

@@ -13,6 +13,11 @@
@NavMediumSpace: 10px; @NavMediumSpace: 10px;
@NavLargeSpace: 15px; @NavLargeSpace: 15px;
.skip-link {
position: fixed;
top: -200px;
}
html { html {
font-family: wf_segoe-ui_normal, "Segoe UI", "Segoe WP", Tahoma, Arial, sans-serif; font-family: wf_segoe-ui_normal, "Segoe UI", "Segoe WP", Tahoma, Arial, sans-serif;
padding: 0px; padding: 0px;

View File

@@ -1,20 +1,12 @@
@import "./Common/Constants"; @import "./Common/Constants";
.main {
width: 100%;
float: left;
transition: all .0s ease-in-out;
-ms-transition: all 0s ease-in-out;
-webkit-transition: all 0s ease-in-out;
-moz-transition: all .0s ease-in-out;
height: 100%;
background-color: white;
border-left: 0px solid white;
}
.resourceTree { .resourceTree {
height: 100%; height: 100%;
flex: 0 0 auto; flex: 0 0 auto;
.main {
height: 100%;
}
} }
.resourceTreeScroll { .resourceTreeScroll {

6505
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -4,12 +4,16 @@
"description": "Cosmos Explorer", "description": "Cosmos Explorer",
"main": "index.js", "main": "index.js",
"dependencies": { "dependencies": {
"@azure/arm-cosmosdb": "9.1.0",
"@azure/cosmos": "3.9.0", "@azure/cosmos": "3.9.0",
"@azure/cosmos-language-service": "0.0.4", "@azure/cosmos-language-service": "0.0.5",
"@jupyterlab/services": "6.0.0-rc.2", "@azure/identity": "1.2.1",
"@jupyterlab/terminal": "3.0.0-rc.2", "@babel/plugin-proposal-class-properties": "7.12.1",
"@babel/plugin-proposal-decorators": "7.12.12",
"@jupyterlab/services": "6.0.2",
"@jupyterlab/terminal": "3.0.3",
"@microsoft/applicationinsights-web": "2.5.9", "@microsoft/applicationinsights-web": "2.5.9",
"@nteract/commutable": "7.3.2", "@nteract/commutable": "7.4.2",
"@nteract/connected-components": "6.8.2", "@nteract/connected-components": "6.8.2",
"@nteract/core": "15.1.0", "@nteract/core": "15.1.0",
"@nteract/data-explorer": "8.0.3", "@nteract/data-explorer": "8.0.3",
@@ -34,6 +38,7 @@
"@nteract/transform-vega": "7.0.6", "@nteract/transform-vega": "7.0.6",
"@octokit/rest": "17.9.2", "@octokit/rest": "17.9.2",
"@phosphor/widgets": "1.9.3", "@phosphor/widgets": "1.9.3",
"@testing-library/jest-dom": "5.11.9",
"@types/mkdirp": "1.0.1", "@types/mkdirp": "1.0.1",
"@types/node-fetch": "2.5.7", "@types/node-fetch": "2.5.7",
"@uifabric/react-cards": "0.109.110", "@uifabric/react-cards": "0.109.110",
@@ -42,7 +47,7 @@
"applicationinsights": "1.8.0", "applicationinsights": "1.8.0",
"babel-polyfill": "6.26.0", "babel-polyfill": "6.26.0",
"bootstrap": "3.4.1", "bootstrap": "3.4.1",
"canvas": "2.6.1", "canvas": "file:./canvas",
"clean-webpack-plugin": "0.1.19", "clean-webpack-plugin": "0.1.19",
"copy-webpack-plugin": "6.0.2", "copy-webpack-plugin": "6.0.2",
"crossroads": "0.12.2", "crossroads": "0.12.2",
@@ -59,6 +64,9 @@
"eslint-plugin-react": "7.20.0", "eslint-plugin-react": "7.20.0",
"hasher": "1.2.0", "hasher": "1.2.0",
"html2canvas": "1.0.0-rc.5", "html2canvas": "1.0.0-rc.5",
"i18next": "19.8.4",
"i18next-browser-languagedetector": "6.0.1",
"i18next-http-backend": "1.0.23",
"immutable": "4.0.0-rc.12", "immutable": "4.0.0-rc.12",
"is-ci": "2.0.0", "is-ci": "2.0.0",
"jquery": "3.5.1", "jquery": "3.5.1",
@@ -66,7 +74,8 @@
"jquery-ui-dist": "1.12.1", "jquery-ui-dist": "1.12.1",
"knockout": "3.5.1", "knockout": "3.5.1",
"mkdirp": "1.0.4", "mkdirp": "1.0.4",
"monaco-editor": "0.15.6", "monaco-editor": "0.18.1",
"msal": "1.4.4",
"object.entries": "1.1.0", "object.entries": "1.1.0",
"office-ui-fabric-react": "7.134.1", "office-ui-fabric-react": "7.134.1",
"p-retry": "4.2.0", "p-retry": "4.2.0",
@@ -78,14 +87,17 @@
"react-animate-height": "2.0.8", "react-animate-height": "2.0.8",
"react-dnd": "9.4.0", "react-dnd": "9.4.0",
"react-dnd-html5-backend": "9.4.0", "react-dnd-html5-backend": "9.4.0",
"react-dom": "16.9.0", "react-dom": "16.13.1",
"react-hotkeys": "2.0.0", "react-hotkeys": "2.0.0",
"react-i18next": "11.8.5",
"react-notification-system": "0.2.17", "react-notification-system": "0.2.17",
"react-redux": "7.1.3", "react-redux": "7.1.3",
"redux": "4.0.4", "redux": "4.0.4",
"reflect-metadata": "0.1.13",
"rx-jupyter": "5.5.12", "rx-jupyter": "5.5.12",
"rxjs": "6.6.3", "rxjs": "6.6.3",
"styled-components": "4.3.2", "styled-components": "4.3.2",
"swr": "0.4.0",
"text-encoding": "0.7.0", "text-encoding": "0.7.0",
"underscore": "1.9.1", "underscore": "1.9.1",
"url-polyfill": "1.1.7", "url-polyfill": "1.1.7",
@@ -99,6 +111,7 @@
"@babel/preset-env": "7.9.0", "@babel/preset-env": "7.9.0",
"@babel/preset-react": "7.9.4", "@babel/preset-react": "7.9.4",
"@babel/preset-typescript": "7.9.0", "@babel/preset-typescript": "7.9.0",
"@testing-library/react": "11.2.3",
"@types/applicationinsights-js": "1.0.7", "@types/applicationinsights-js": "1.0.7",
"@types/codemirror": "0.0.56", "@types/codemirror": "0.0.56",
"@types/crossroads": "0.0.30", "@types/crossroads": "0.0.30",
@@ -107,7 +120,7 @@
"@types/enzyme-adapter-react-16": "1.0.6", "@types/enzyme-adapter-react-16": "1.0.6",
"@types/expect-puppeteer": "4.4.3", "@types/expect-puppeteer": "4.4.3",
"@types/hasher": "0.0.31", "@types/hasher": "0.0.31",
"@types/jest": "23.3.10", "@types/jest": "26.0.20",
"@types/jest-environment-puppeteer": "4.3.2", "@types/jest-environment-puppeteer": "4.3.2",
"@types/memoize-one": "4.1.1", "@types/memoize-one": "4.1.1",
"@types/node": "12.11.1", "@types/node": "12.11.1",
@@ -115,8 +128,8 @@
"@types/prop-types": "15.5.8", "@types/prop-types": "15.5.8",
"@types/puppeteer": "3.0.1", "@types/puppeteer": "3.0.1",
"@types/q": "1.5.1", "@types/q": "1.5.1",
"@types/react": "16.9.49", "@types/react": "17.0.0",
"@types/react-dom": "16.0.7", "@types/react-dom": "17.0.0",
"@types/react-notification-system": "0.2.39", "@types/react-notification-system": "0.2.39",
"@types/react-redux": "7.1.7", "@types/react-redux": "7.1.7",
"@types/sinon": "2.3.3", "@types/sinon": "2.3.3",
@@ -126,7 +139,6 @@
"@types/webfontloader": "1.6.29", "@types/webfontloader": "1.6.29",
"@typescript-eslint/eslint-plugin": "4.0.1", "@typescript-eslint/eslint-plugin": "4.0.1",
"@typescript-eslint/parser": "4.0.1", "@typescript-eslint/parser": "4.0.1",
"adal-angular": "1.0.15",
"axe-puppeteer": "1.1.0", "axe-puppeteer": "1.1.0",
"babel-jest": "24.9.0", "babel-jest": "24.9.0",
"babel-loader": "8.1.0", "babel-loader": "8.1.0",
@@ -141,7 +153,9 @@
"eslint-cli": "1.1.1", "eslint-cli": "1.1.1",
"eslint-plugin-no-null": "1.0.2", "eslint-plugin-no-null": "1.0.2",
"eslint-plugin-prefer-arrow": "1.2.2", "eslint-plugin-prefer-arrow": "1.2.2",
"eslint-plugin-react-hooks": "4.2.0",
"expose-loader": "0.7.5", "expose-loader": "0.7.5",
"fast-glob": "3.2.5",
"file-loader": "2.0.0", "file-loader": "2.0.0",
"fs-extra": "7.0.0", "fs-extra": "7.0.0",
"html-loader": "0.5.5", "html-loader": "0.5.5",
@@ -158,7 +172,7 @@
"mini-css-extract-plugin": "0.4.3", "mini-css-extract-plugin": "0.4.3",
"monaco-editor-webpack-plugin": "1.7.0", "monaco-editor-webpack-plugin": "1.7.0",
"node-fetch": "2.6.1", "node-fetch": "2.6.1",
"prettier": "1.19.1", "prettier": "2.2.1",
"puppeteer": "4.0.0", "puppeteer": "4.0.0",
"raw-loader": "0.5.1", "raw-loader": "0.5.1",
"rimraf": "3.0.0", "rimraf": "3.0.0",
@@ -194,8 +208,8 @@
"compile": "tsc", "compile": "tsc",
"compile:contracts": "tsc -p ./tsconfig.contracts.json", "compile:contracts": "tsc -p ./tsconfig.contracts.json",
"compile:strict": "tsc -p ./tsconfig.strict.json", "compile:strict": "tsc -p ./tsconfig.strict.json",
"format": "prettier --write \"{src,cypress,test}/**/*.{ts,tsx,html}\" \"*.{js,html}\"", "format": "prettier --write \"{src,test}/**/*.{ts,tsx,html}\" \"*.{js,html}\"",
"format:check": "prettier --check \"{src,cypress,test}/**/*.{ts,tsx,html}\" \"*.{js,html}\"", "format:check": "prettier --check \"{src,test}/**/*.{ts,tsx,html}\" \"*.{js,html}\"",
"lint": "tslint --project tsconfig.json && eslint \"**/*.{ts,tsx}\"", "lint": "tslint --project tsconfig.json && eslint \"**/*.{ts,tsx}\"",
"build:contracts": "npm run compile:contracts", "build:contracts": "npm run compile:contracts",
"strictEligibleFiles": "node ./strict-migration-tools/index.js", "strictEligibleFiles": "node ./strict-migration-tools/index.js",

View File

@@ -3,7 +3,6 @@
"offerThroughput": 400, "offerThroughput": 400,
"databaseLevelThroughput": false, "databaseLevelThroughput": false,
"collectionId": "Persons", "collectionId": "Persons",
"rupmEnabled": false,
"partitionKey": { "kind": "Hash", "paths": ["/name"] }, "partitionKey": { "kind": "Hash", "paths": ["/name"] },
"data": [ "data": [
"g.addV('person').property(id, '1').property('name', 'Eva').property('age', 44)", "g.addV('person').property(id, '1').property('name', 'Eva').property('age', 44)",
@@ -13,4 +12,4 @@
"g.V('1').addE('knows').to(g.V('2')).outV().addE('knows').to(g.V('3'))", "g.V('1').addE('knows').to(g.V('2')).outV().addE('knows').to(g.V('3'))",
"g.V('3').addE('knows').to(g.V('4'))" "g.V('3').addE('knows').to(g.V('4'))"
] ]
} }

View File

@@ -1,6 +1,7 @@
export enum AuthType { export enum AuthType {
AAD = "aad", AAD = "aad",
EncryptedToken = "encryptedtoken", EncryptedToken = "encryptedtoken",
MasterKey = "masterkey", MasterKey = "masterkey",
ResourceToken = "resourcetoken" ResourceToken = "resourcetoken",
} ConnectionString = "connectionstring",
}

View File

@@ -1,21 +1,22 @@
import * as ko from "knockout"; import * as ko from "knockout";
import * as ReactBindingHandler from "./ReactBindingHandler"; import * as ReactBindingHandler from "./ReactBindingHandler";
import "../Explorer/Tables/DataTable/DataTableBindingManager";
export class BindingHandlersRegisterer {
public static registerBindingHandlers() { export class BindingHandlersRegisterer {
ko.bindingHandlers.setTemplateReady = { public static registerBindingHandlers() {
init( ko.bindingHandlers.setTemplateReady = {
element: any, init(
wrappedValueAccessor: () => any, element: any,
allBindings?: ko.AllBindings, wrappedValueAccessor: () => any,
viewModel?: any, allBindings?: ko.AllBindings,
bindingContext?: ko.BindingContext viewModel?: any,
) { bindingContext?: ko.BindingContext
const value = ko.unwrap(wrappedValueAccessor()); ) {
bindingContext?.$data.isTemplateReady(value); const value = ko.unwrap(wrappedValueAccessor());
} bindingContext?.$data.isTemplateReady(value);
} as ko.BindingHandler; },
} as ko.BindingHandler;
ReactBindingHandler.Registerer.register();
} ReactBindingHandler.Registerer.register();
} }
}

View File

@@ -42,7 +42,7 @@ export class Registerer {
// Initial rendering at mount point // Initial rendering at mount point
ReactDOM.render(adapter.renderComponent(), element); ReactDOM.render(adapter.renderComponent(), element);
} },
} as ko.BindingHandler; } as ko.BindingHandler;
} }
} }

View File

@@ -40,7 +40,7 @@ export class ArrayHashMap<T> {
public forEach(key: string, iteratorFct: (value: T) => void) { public forEach(key: string, iteratorFct: (value: T) => void) {
const values = this.store.get(key); const values = this.store.get(key);
if (values) { if (values) {
values.forEach(value => iteratorFct(value)); values.forEach((value) => iteratorFct(value));
} }
} }

View File

@@ -1,439 +1,417 @@
import { HashMap } from "./HashMap"; export class CodeOfConductEndpoints {
public static privacyStatement: string = "https://aka.ms/ms-privacy-policy";
export class AuthorizationEndpoints { public static codeOfConduct: string = "https://aka.ms/cosmos-code-of-conduct";
public static arm: string = "https://management.core.windows.net/"; public static termsOfUse: string = "https://aka.ms/ms-terms-of-use";
public static common: string = "https://login.windows.net/"; }
}
export class EndpointsRegex {
export class CodeOfConductEndpoints { public static readonly cassandra = [
public static privacyStatement: string = "https://aka.ms/ms-privacy-policy"; "AccountEndpoint=(.*).cassandra.cosmosdb.azure.com",
public static codeOfConduct: string = "https://aka.ms/cosmos-code-of-conduct"; "HostName=(.*).cassandra.cosmos.azure.com",
public static termsOfUse: string = "https://aka.ms/ms-terms-of-use"; ];
} public static readonly mongo = "mongodb://.*:(.*)@(.*).documents.azure.com";
public static readonly mongoCompute = "mongodb://.*:(.*)@(.*).mongo.cosmos.azure.com";
export class EndpointsRegex { public static readonly sql = "AccountEndpoint=https://(.*).documents.azure.com";
public static readonly cassandra = [ public static readonly table = "TableEndpoint=https://(.*).table.cosmosdb.azure.com";
"AccountEndpoint=(.*).cassandra.cosmosdb.azure.com", }
"HostName=(.*).cassandra.cosmos.azure.com"
]; export class ApiEndpoints {
public static readonly mongo = "mongodb://.*:(.*)@(.*).documents.azure.com"; public static runtimeProxy: string = "/api/RuntimeProxy";
public static readonly mongoCompute = "mongodb://.*:(.*)@(.*).mongo.cosmos.azure.com"; public static guestRuntimeProxy: string = "/api/guest/RuntimeProxy";
public static readonly sql = "AccountEndpoint=https://(.*).documents.azure.com"; }
public static readonly table = "TableEndpoint=https://(.*).table.cosmosdb.azure.com";
} export class ServerIds {
public static localhost: string = "localhost";
export class ApiEndpoints { public static blackforest: string = "blackforest";
public static runtimeProxy: string = "/api/RuntimeProxy"; public static fairfax: string = "fairfax";
public static guestRuntimeProxy: string = "/api/guest/RuntimeProxy"; public static mooncake: string = "mooncake";
} public static productionPortal: string = "prod";
public static dev: string = "dev";
export class ServerIds { }
public static localhost: string = "localhost";
public static blackforest: string = "blackforest"; export class ArmApiVersions {
public static fairfax: string = "fairfax"; public static readonly documentDB: string = "2015-11-06";
public static mooncake: string = "mooncake"; public static readonly arcadia: string = "2019-06-01-preview";
public static productionPortal: string = "prod"; public static readonly arcadiaLivy: string = "2019-11-01-preview";
public static dev: string = "dev"; public static readonly arm: string = "2015-11-01";
} public static readonly armFeatures: string = "2014-08-01-preview";
public static readonly publicVersion = "2020-04-01";
export class ArmApiVersions { }
public static readonly documentDB: string = "2015-11-06";
public static readonly arcadia: string = "2019-06-01-preview"; export class ArmResourceTypes {
public static readonly arcadiaLivy: string = "2019-11-01-preview"; public static readonly notebookWorkspaces = "Microsoft.DocumentDB/databaseAccounts/notebookWorkspaces";
public static readonly arm: string = "2015-11-01"; public static readonly synapseWorkspaces = "Microsoft.Synapse/workspaces";
public static readonly armFeatures: string = "2014-08-01-preview"; }
public static readonly publicVersion = "2020-04-01";
} export class BackendDefaults {
public static partitionKeyKind: string = "Hash";
export class ArmResourceTypes { public static singlePartitionStorageInGb: string = "10";
public static readonly notebookWorkspaces = "Microsoft.DocumentDB/databaseAccounts/notebookWorkspaces"; public static multiPartitionStorageInGb: string = "100";
public static readonly synapseWorkspaces = "Microsoft.Synapse/workspaces"; public static maxChangeFeedRetentionDuration: number = 10;
} public static partitionKeyVersion = 2;
}
export class BackendDefaults {
public static partitionKeyKind: string = "Hash"; export class ClientDefaults {
public static singlePartitionStorageInGb: string = "10"; public static requestTimeoutMs: number = 60000;
public static multiPartitionStorageInGb: string = "100"; public static portalCacheTimeoutMs: number = 10000;
public static maxChangeFeedRetentionDuration: number = 10; public static errorNotificationTimeoutMs: number = 5000;
public static partitionKeyVersion = 2; public static copyHelperTimeoutMs: number = 2000;
} public static waitForDOMElementMs: number = 500;
public static cacheBustingTimeoutMs: number =
export class ClientDefaults { 10 /** minutes **/ * 60 /** to seconds **/ * 1000 /** to milliseconds **/;
public static requestTimeoutMs: number = 60000; public static databaseThroughputIncreaseFactor: number = 100;
public static portalCacheTimeoutMs: number = 10000; public static readonly arcadiaTokenRefreshInterval: number =
public static errorNotificationTimeoutMs: number = 5000; 20 /** minutes **/ * 60 /** to seconds **/ * 1000 /** to milliseconds **/;
public static copyHelperTimeoutMs: number = 2000; public static readonly arcadiaTokenRefreshIntervalPaddingMs: number = 2000;
public static waitForDOMElementMs: number = 500; }
public static cacheBustingTimeoutMs: number =
10 /** minutes **/ * 60 /** to seconds **/ * 1000 /** to milliseconds **/; export class AccountKind {
public static databaseThroughputIncreaseFactor: number = 100; public static DocumentDB: string = "DocumentDB";
public static readonly arcadiaTokenRefreshInterval: number = public static MongoDB: string = "MongoDB";
20 /** minutes **/ * 60 /** to seconds **/ * 1000 /** to milliseconds **/; public static Parse: string = "Parse";
public static readonly arcadiaTokenRefreshIntervalPaddingMs: number = 2000; public static GlobalDocumentDB: string = "GlobalDocumentDB";
} public static Default: string = AccountKind.DocumentDB;
}
export class AccountKind {
public static DocumentDB: string = "DocumentDB"; export class CorrelationBackend {
public static MongoDB: string = "MongoDB"; public static Url: string = "https://aka.ms/cosmosdbanalytics";
public static Parse: string = "Parse"; }
public static GlobalDocumentDB: string = "GlobalDocumentDB";
public static Default: string = AccountKind.DocumentDB; export class DefaultAccountExperience {
} public static DocumentDB: string = "DocumentDB";
public static Graph: string = "Graph";
export class CorrelationBackend { public static MongoDB: string = "MongoDB";
public static Url: string = "https://aka.ms/cosmosdbanalytics"; public static ApiForMongoDB: string = "Azure Cosmos DB for MongoDB API";
} public static Table: string = "Table";
public static Cassandra: string = "Cassandra";
export class DefaultAccountExperience { public static Default: string = DefaultAccountExperience.DocumentDB;
public static DocumentDB: string = "DocumentDB"; }
public static Graph: string = "Graph";
public static MongoDB: string = "MongoDB"; export class CapabilityNames {
public static ApiForMongoDB: string = "Azure Cosmos DB for MongoDB API"; public static EnableTable: string = "EnableTable";
public static Table: string = "Table"; public static EnableGremlin: string = "EnableGremlin";
public static Cassandra: string = "Cassandra"; public static EnableCassandra: string = "EnableCassandra";
public static Default: string = DefaultAccountExperience.DocumentDB; public static EnableAutoScale: string = "EnableAutoScale";
} public static readonly EnableNotebooks: string = "EnableNotebooks";
public static readonly EnableStorageAnalytics: string = "EnableStorageAnalytics";
export class CapabilityNames { public static readonly EnableMongo: string = "EnableMongo";
public static EnableTable: string = "EnableTable"; public static readonly EnableServerless: string = "EnableServerless";
public static EnableGremlin: string = "EnableGremlin"; }
public static EnableCassandra: string = "EnableCassandra";
public static EnableAutoScale: string = "EnableAutoScale"; export class Features {
public static readonly EnableNotebooks: string = "EnableNotebooks"; public static readonly cosmosdb = "cosmosdb";
public static readonly EnableStorageAnalytics: string = "EnableStorageAnalytics"; public static readonly enableChangeFeedPolicy = "enablechangefeedpolicy";
public static readonly EnableMongo: string = "EnableMongo"; public static readonly executeSproc = "dataexplorerexecutesproc";
public static readonly EnableServerless: string = "EnableServerless"; public static readonly hostedDataExplorer = "hosteddataexplorerenabled";
} public static readonly enableTtl = "enablettl";
public static readonly enableNotebooks = "enablenotebooks";
export class Features { public static readonly enableGalleryPublish = "enablegallerypublish";
public static readonly cosmosdb = "cosmosdb"; public static readonly enableLinkInjection = "enablelinkinjection";
public static readonly enableChangeFeedPolicy = "enablechangefeedpolicy"; public static readonly enableSpark = "enablespark";
public static readonly enableRupm = "enablerupm"; public static readonly livyEndpoint = "livyendpoint";
public static readonly executeSproc = "dataexplorerexecutesproc"; public static readonly notebookServerUrl = "notebookserverurl";
public static readonly hostedDataExplorer = "hosteddataexplorerenabled"; public static readonly notebookServerToken = "notebookservertoken";
public static readonly enableTtl = "enablettl"; public static readonly notebookBasePath = "notebookbasepath";
public static readonly enableNotebooks = "enablenotebooks"; public static readonly canExceedMaximumValue = "canexceedmaximumvalue";
public static readonly enableGalleryPublish = "enablegallerypublish"; public static readonly enableFixedCollectionWithSharedThroughput = "enablefixedcollectionwithsharedthroughput";
public static readonly enableCodeOfConduct = "enablecodeofconduct"; public static readonly ttl90Days = "ttl90days";
public static readonly enableLinkInjection = "enablelinkinjection"; public static readonly enableRightPanelV2 = "enablerightpanelv2";
public static readonly enableSpark = "enablespark"; public static readonly enableSchema = "enableschema";
public static readonly livyEndpoint = "livyendpoint"; public static readonly enableSDKoperations = "enablesdkoperations";
public static readonly notebookServerUrl = "notebookserverurl"; public static readonly showMinRUSurvey = "showminrusurvey";
public static readonly notebookServerToken = "notebookservertoken"; public static readonly enableDatabaseSettingsTabV1 = "enabledbsettingsv1";
public static readonly notebookBasePath = "notebookbasepath"; public static readonly selfServeType = "selfservetype";
public static readonly canExceedMaximumValue = "canexceedmaximumvalue"; public static readonly enableKOPanel = "enablekopanel";
public static readonly enableFixedCollectionWithSharedThroughput = "enablefixedcollectionwithsharedthroughput"; }
public static readonly ttl90Days = "ttl90days";
public static readonly enableRightPanelV2 = "enablerightpanelv2"; // flight names returned from the portal are always lowercase
public static readonly enableSDKoperations = "enablesdkoperations"; export class Flights {
} public static readonly SettingsV2 = "settingsv2";
public static readonly MongoIndexEditor = "mongoindexeditor";
// flight names returned from the portal are always lowercase public static readonly MongoIndexing = "mongoindexing";
export class Flights { public static readonly AutoscaleTest = "autoscaletest";
public static readonly SettingsV2 = "settingsv2"; public static readonly GalleryPublish = "gallerypublish";
public static readonly MongoIndexEditor = "mongoindexeditor"; }
}
export class AfecFeatures {
export class AfecFeatures { public static readonly Spark = "spark-public-preview";
public static readonly Spark = "spark-public-preview"; public static readonly Notebooks = "sparknotebooks-public-preview";
public static readonly Notebooks = "sparknotebooks-public-preview"; public static readonly StorageAnalytics = "storageanalytics-public-preview";
public static readonly StorageAnalytics = "storageanalytics-public-preview"; }
}
export class TagNames {
export class Spark { public static defaultExperience: string = "defaultExperience";
public static readonly MaxWorkerCount = 10; }
public static readonly SKUs: HashMap<string> = new HashMap({
"Cosmos.Spark.D1s": "D1s / 1 core / 4GB RAM", export class MongoDBAccounts {
"Cosmos.Spark.D2s": "D2s / 2 cores / 8GB RAM", public static protocol: string = "https";
"Cosmos.Spark.D4s": "D4s / 4 cores / 16GB RAM", public static defaultPort: string = "10255";
"Cosmos.Spark.D8s": "D8s / 8 cores / 32GB RAM", }
"Cosmos.Spark.D16s": "D16s / 16 cores / 64GB RAM",
"Cosmos.Spark.D32s": "D32s / 32 cores / 128GB RAM", export enum MongoBackendEndpointType {
"Cosmos.Spark.D64s": "D64s / 64 cores / 256GB RAM" local,
}); remote,
} }
export class TagNames { // TODO: 435619 Add default endpoints per cloud and use regional only when available
public static defaultExperience: string = "defaultExperience"; export class CassandraBackend {
} public static readonly createOrDeleteApi: string = "api/cassandra/createordelete";
public static readonly guestCreateOrDeleteApi: string = "api/guest/cassandra/createordelete";
export class MongoDBAccounts { public static readonly queryApi: string = "api/cassandra";
public static protocol: string = "https"; public static readonly guestQueryApi: string = "api/guest/cassandra";
public static defaultPort: string = "10255"; public static readonly keysApi: string = "api/cassandra/keys";
} public static readonly guestKeysApi: string = "api/guest/cassandra/keys";
public static readonly schemaApi: string = "api/cassandra/schema";
export enum MongoBackendEndpointType { public static readonly guestSchemaApi: string = "api/guest/cassandra/schema";
local, }
remote
} export class Queries {
public static CustomPageOption: string = "custom";
// TODO: 435619 Add default endpoints per cloud and use regional only when available public static UnlimitedPageOption: string = "unlimited";
export class CassandraBackend { public static itemsPerPage: number = 100;
public static readonly createOrDeleteApi: string = "api/cassandra/createordelete"; public static unlimitedItemsPerPage: number = 100; // TODO: Figure out appropriate value so it works for accounts with a large number of partitions
public static readonly guestCreateOrDeleteApi: string = "api/guest/cassandra/createordelete";
public static readonly queryApi: string = "api/cassandra"; public static QueryEditorMinHeightRatio: number = 0.1;
public static readonly guestQueryApi: string = "api/guest/cassandra"; public static QueryEditorMaxHeightRatio: number = 0.4;
public static readonly keysApi: string = "api/cassandra/keys"; public static readonly DefaultMaxDegreeOfParallelism = 6;
public static readonly guestKeysApi: string = "api/guest/cassandra/keys"; }
public static readonly schemaApi: string = "api/cassandra/schema";
public static readonly guestSchemaApi: string = "api/guest/cassandra/schema"; export class SavedQueries {
} public static readonly CollectionName: string = "___Query";
public static readonly DatabaseName: string = "___Cosmos";
export class RUPMStates { public static readonly OfferThroughput: number = 400;
public static on: string = "on"; public static readonly PartitionKeyProperty: string = "id";
public static off: string = "off"; }
}
export class DocumentsGridMetrics {
export class Queries { public static DocumentsPerPage: number = 100;
public static CustomPageOption: string = "custom"; public static IndividualRowHeight: number = 34;
public static UnlimitedPageOption: string = "unlimited"; public static BufferHeight: number = 28;
public static itemsPerPage: number = 100; public static SplitterMinWidth: number = 200;
public static unlimitedItemsPerPage: number = 100; // TODO: Figure out appropriate value so it works for accounts with a large number of partitions public static SplitterMaxWidth: number = 360;
public static QueryEditorMinHeightRatio: number = 0.1; public static DocumentEditorMinWidthRatio: number = 0.2;
public static QueryEditorMaxHeightRatio: number = 0.4; public static DocumentEditorMaxWidthRatio: number = 0.4;
public static readonly DefaultMaxDegreeOfParallelism = 6; }
}
export class ExplorerMetrics {
export class SavedQueries { public static SplitterMinWidth: number = 240;
public static readonly CollectionName: string = "___Query"; public static SplitterMaxWidth: number = 400;
public static readonly DatabaseName: string = "___Cosmos"; public static CollapsedResourceTreeWidth: number = 36;
public static readonly OfferThroughput: number = 400; }
public static readonly PartitionKeyProperty: string = "id";
} export class SplitterMetrics {
public static CollapsedPositionLeft: number = ExplorerMetrics.CollapsedResourceTreeWidth;
export class DocumentsGridMetrics { }
public static DocumentsPerPage: number = 100;
public static IndividualRowHeight: number = 34; export class Areas {
public static BufferHeight: number = 28; public static ResourceTree: string = "Resource Tree";
public static SplitterMinWidth: number = 200; public static ContextualPane: string = "Contextual Pane";
public static SplitterMaxWidth: number = 360; public static Tab: string = "Tab";
public static ShareDialog: string = "Share Access Dialog";
public static DocumentEditorMinWidthRatio: number = 0.2; public static Notebook: string = "Notebook";
public static DocumentEditorMaxWidthRatio: number = 0.4; }
}
export class HttpHeaders {
export class ExplorerMetrics { public static activityId: string = "x-ms-activity-id";
public static SplitterMinWidth: number = 240; public static apiType: string = "x-ms-cosmos-apitype";
public static SplitterMaxWidth: number = 400; public static authorization: string = "authorization";
public static CollapsedResourceTreeWidth: number = 36; public static collectionIndexTransformationProgress: string =
} "x-ms-documentdb-collection-index-transformation-progress";
public static continuation: string = "x-ms-continuation";
export class SplitterMetrics { public static correlationRequestId: string = "x-ms-correlation-request-id";
public static CollapsedPositionLeft: number = ExplorerMetrics.CollapsedResourceTreeWidth; public static enableScriptLogging: string = "x-ms-documentdb-script-enable-logging";
} public static guestAccessToken: string = "x-ms-encrypted-auth-token";
public static getReadOnlyKey: string = "x-ms-get-read-only-key";
export class Areas { public static connectionString: string = "x-ms-connection-string";
public static ResourceTree: string = "Resource Tree"; public static msDate: string = "x-ms-date";
public static ContextualPane: string = "Contextual Pane"; public static location: string = "Location";
public static Tab: string = "Tab"; public static contentType: string = "Content-Type";
public static ShareDialog: string = "Share Access Dialog"; public static offerReplacePending: string = "x-ms-offer-replace-pending";
public static Notebook: string = "Notebook"; public static user: string = "x-ms-user";
} public static populatePartitionStatistics: string = "x-ms-documentdb-populatepartitionstatistics";
public static queryMetrics: string = "x-ms-documentdb-query-metrics";
export class HttpHeaders { public static requestCharge: string = "x-ms-request-charge";
public static activityId: string = "x-ms-activity-id"; public static resourceQuota: string = "x-ms-resource-quota";
public static apiType: string = "x-ms-cosmos-apitype"; public static resourceUsage: string = "x-ms-resource-usage";
public static authorization: string = "authorization"; public static retryAfterMs: string = "x-ms-retry-after-ms";
public static collectionIndexTransformationProgress: string = public static scriptLogResults: string = "x-ms-documentdb-script-log-results";
"x-ms-documentdb-collection-index-transformation-progress"; public static populateCollectionThroughputInfo = "x-ms-documentdb-populatecollectionthroughputinfo";
public static continuation: string = "x-ms-continuation"; public static supportSpatialLegacyCoordinates = "x-ms-documentdb-supportspatiallegacycoordinates";
public static correlationRequestId: string = "x-ms-correlation-request-id"; public static usePolygonsSmallerThanAHemisphere = "x-ms-documentdb-usepolygonssmallerthanahemisphere";
public static enableScriptLogging: string = "x-ms-documentdb-script-enable-logging"; public static autoPilotThroughput = "autoscaleSettings";
public static guestAccessToken: string = "x-ms-encrypted-auth-token"; public static autoPilotThroughputSDK = "x-ms-cosmos-offer-autopilot-settings";
public static getReadOnlyKey: string = "x-ms-get-read-only-key"; public static partitionKey: string = "x-ms-documentdb-partitionkey";
public static connectionString: string = "x-ms-connection-string"; public static migrateOfferToManualThroughput: string = "x-ms-cosmos-migrate-offer-to-manual-throughput";
public static msDate: string = "x-ms-date"; public static migrateOfferToAutopilot: string = "x-ms-cosmos-migrate-offer-to-autopilot";
public static location: string = "Location"; }
public static contentType: string = "Content-Type";
public static offerReplacePending: string = "x-ms-offer-replace-pending"; export class ApiType {
public static user: string = "x-ms-user"; // Mapped to hexadecimal values in the backend
public static populatePartitionStatistics: string = "x-ms-documentdb-populatepartitionstatistics"; public static readonly MongoDB: number = 1;
public static queryMetrics: string = "x-ms-documentdb-query-metrics"; public static readonly Gremlin: number = 2;
public static requestCharge: string = "x-ms-request-charge"; public static readonly Cassandra: number = 4;
public static resourceQuota: string = "x-ms-resource-quota"; public static readonly Table: number = 8;
public static resourceUsage: string = "x-ms-resource-usage"; public static readonly SQL: number = 16;
public static retryAfterMs: string = "x-ms-retry-after-ms"; }
public static scriptLogResults: string = "x-ms-documentdb-script-log-results";
public static populateCollectionThroughputInfo = "x-ms-documentdb-populatecollectionthroughputinfo"; export class HttpStatusCodes {
public static supportSpatialLegacyCoordinates = "x-ms-documentdb-supportspatiallegacycoordinates"; public static readonly OK: number = 200;
public static usePolygonsSmallerThanAHemisphere = "x-ms-documentdb-usepolygonssmallerthanahemisphere"; public static readonly Created: number = 201;
public static autoPilotThroughput = "autoscaleSettings"; public static readonly Accepted: number = 202;
public static autoPilotThroughputSDK = "x-ms-cosmos-offer-autopilot-settings"; public static readonly NoContent: number = 204;
public static partitionKey: string = "x-ms-documentdb-partitionkey"; public static readonly NotModified: number = 304;
public static migrateOfferToManualThroughput: string = "x-ms-cosmos-migrate-offer-to-manual-throughput"; public static readonly Unauthorized: number = 401;
public static migrateOfferToAutopilot: string = "x-ms-cosmos-migrate-offer-to-autopilot"; public static readonly Forbidden: number = 403;
} public static readonly NotFound: number = 404;
public static readonly TooManyRequests: number = 429;
export class ApiType { public static readonly Conflict: number = 409;
// Mapped to hexadecimal values in the backend
public static readonly MongoDB: number = 1; public static readonly InternalServerError: number = 500;
public static readonly Gremlin: number = 2; public static readonly BadGateway: number = 502;
public static readonly Cassandra: number = 4; public static readonly ServiceUnavailable: number = 503;
public static readonly Table: number = 8; public static readonly GatewayTimeout: number = 504;
public static readonly SQL: number = 16;
} public static readonly RetryableStatusCodes: number[] = [
HttpStatusCodes.TooManyRequests,
export class HttpStatusCodes { HttpStatusCodes.InternalServerError, // TODO: Handle all 500s on Portal backend and remove from retries list
public static readonly OK: number = 200; HttpStatusCodes.BadGateway,
public static readonly Created: number = 201; HttpStatusCodes.ServiceUnavailable,
public static readonly Accepted: number = 202; HttpStatusCodes.GatewayTimeout,
public static readonly NoContent: number = 204; ];
public static readonly NotModified: number = 304; }
public static readonly Unauthorized: number = 401;
public static readonly Forbidden: number = 403; export class Urls {
public static readonly NotFound: number = 404; public static feedbackEmail = "https://aka.ms/cosmosdbfeedback?subject=Cosmos%20DB%20Data%20Explorer%20Feedback";
public static readonly TooManyRequests: number = 429; public static autoscaleMigration = "https://aka.ms/cosmos-autoscale-migration";
public static readonly Conflict: number = 409; public static freeTierInformation = "https://aka.ms/cosmos-free-tier";
public static cosmosPricing = "https://aka.ms/azure-cosmos-db-pricing";
public static readonly InternalServerError: number = 500; }
public static readonly BadGateway: number = 502;
public static readonly ServiceUnavailable: number = 503; export class HashRoutePrefixes {
public static readonly GatewayTimeout: number = 504; public static databases: string = "/dbs/{db_id}";
public static collections: string = "/dbs/{db_id}/colls/{coll_id}";
public static readonly RetryableStatusCodes: number[] = [ public static sprocHash: string = "/sprocs/";
HttpStatusCodes.TooManyRequests, public static sprocs: string = HashRoutePrefixes.collections + HashRoutePrefixes.sprocHash + "{sproc_id}";
HttpStatusCodes.InternalServerError, // TODO: Handle all 500s on Portal backend and remove from retries list public static docs: string = HashRoutePrefixes.collections + "/docs/{doc_id}/";
HttpStatusCodes.BadGateway, public static conflicts: string = HashRoutePrefixes.collections + "/conflicts";
HttpStatusCodes.ServiceUnavailable,
HttpStatusCodes.GatewayTimeout public static databasesWithId(databaseId: string): string {
]; return this.databases.replace("{db_id}", databaseId).replace("/", ""); // strip the first slash since hasher adds it
} }
export class Urls { public static collectionsWithIds(databaseId: string, collectionId: string): string {
public static feedbackEmail = "https://aka.ms/cosmosdbfeedback?subject=Cosmos%20DB%20Data%20Explorer%20Feedback"; const transformedDatabasePrefix: string = this.collections.replace("{db_id}", databaseId);
public static autoscaleMigration = "https://aka.ms/cosmos-autoscale-migration";
public static freeTierInformation = "https://aka.ms/cosmos-free-tier"; return transformedDatabasePrefix.replace("{coll_id}", collectionId).replace("/", ""); // strip the first slash since hasher adds it
public static cosmosPricing = "https://aka.ms/azure-cosmos-db-pricing"; }
}
public static sprocWithIds(
export class HashRoutePrefixes { databaseId: string,
public static databases: string = "/dbs/{db_id}"; collectionId: string,
public static collections: string = "/dbs/{db_id}/colls/{coll_id}"; sprocId: string,
public static sprocHash: string = "/sprocs/"; stripFirstSlash: boolean = true
public static sprocs: string = HashRoutePrefixes.collections + HashRoutePrefixes.sprocHash + "{sproc_id}"; ): string {
public static docs: string = HashRoutePrefixes.collections + "/docs/{doc_id}/"; const transformedDatabasePrefix: string = this.sprocs.replace("{db_id}", databaseId);
public static conflicts: string = HashRoutePrefixes.collections + "/conflicts";
const transformedSprocRoute: string = transformedDatabasePrefix
public static databasesWithId(databaseId: string): string { .replace("{coll_id}", collectionId)
return this.databases.replace("{db_id}", databaseId).replace("/", ""); // strip the first slash since hasher adds it .replace("{sproc_id}", sprocId);
} if (!!stripFirstSlash) {
return transformedSprocRoute.replace("/", ""); // strip the first slash since hasher adds it
public static collectionsWithIds(databaseId: string, collectionId: string): string { }
const transformedDatabasePrefix: string = this.collections.replace("{db_id}", databaseId);
return transformedSprocRoute;
return transformedDatabasePrefix.replace("{coll_id}", collectionId).replace("/", ""); // strip the first slash since hasher adds it }
}
public static conflictsWithIds(databaseId: string, collectionId: string) {
public static sprocWithIds( const transformedDatabasePrefix: string = this.conflicts.replace("{db_id}", databaseId);
databaseId: string,
collectionId: string, return transformedDatabasePrefix.replace("{coll_id}", collectionId).replace("/", ""); // strip the first slash since hasher adds it;
sprocId: string, }
stripFirstSlash: boolean = true
): string { public static docsWithIds(databaseId: string, collectionId: string, docId: string) {
const transformedDatabasePrefix: string = this.sprocs.replace("{db_id}", databaseId); const transformedDatabasePrefix: string = this.docs.replace("{db_id}", databaseId);
const transformedSprocRoute: string = transformedDatabasePrefix return transformedDatabasePrefix.replace("{coll_id}", collectionId).replace("{doc_id}", docId).replace("/", ""); // strip the first slash since hasher adds it
.replace("{coll_id}", collectionId) }
.replace("{sproc_id}", sprocId); }
if (!!stripFirstSlash) {
return transformedSprocRoute.replace("/", ""); // strip the first slash since hasher adds it export class ConfigurationOverridesValues {
} public static IsBsonSchemaV2: string = "true";
}
return transformedSprocRoute;
} export class KeyCodes {
public static Space: number = 32;
public static conflictsWithIds(databaseId: string, collectionId: string) { public static Enter: number = 13;
const transformedDatabasePrefix: string = this.conflicts.replace("{db_id}", databaseId); public static Escape: number = 27;
public static UpArrow: number = 38;
return transformedDatabasePrefix.replace("{coll_id}", collectionId).replace("/", ""); // strip the first slash since hasher adds it; public static DownArrow: number = 40;
} public static LeftArrow: number = 37;
public static RightArrow: number = 39;
public static docsWithIds(databaseId: string, collectionId: string, docId: string) { public static Tab: number = 9;
const transformedDatabasePrefix: string = this.docs.replace("{db_id}", databaseId); }
return transformedDatabasePrefix // Normalized per: https://www.w3.org/TR/uievents-key/#named-key-attribute-values
.replace("{coll_id}", collectionId) export class NormalizedEventKey {
.replace("{doc_id}", docId) public static readonly Space = " ";
.replace("/", ""); // strip the first slash since hasher adds it public static readonly Enter = "Enter";
} public static readonly Escape = "Escape";
} public static readonly UpArrow = "ArrowUp";
public static readonly DownArrow = "ArrowDown";
export class ConfigurationOverridesValues { public static readonly LeftArrow = "ArrowLeft";
public static IsBsonSchemaV2: string = "true"; public static readonly RightArrow = "ArrowRight";
} }
export class KeyCodes { export class TryCosmosExperience {
public static Space: number = 32; public static extendUrl: string = "https://trycosmosdb.azure.com/api/resource/extendportal?userId={0}";
public static Enter: number = 13; public static deleteUrl: string = "https://trycosmosdb.azure.com/api/resource/deleteportal?userId={0}";
public static Escape: number = 27; public static collectionsPerAccount: number = 3;
public static UpArrow: number = 38; public static maxRU: number = 5000;
public static DownArrow: number = 40; public static defaultRU: number = 3000;
public static LeftArrow: number = 37; }
public static RightArrow: number = 39;
public static Tab: number = 9; export class OfferVersions {
} public static V1: string = "V1";
public static V2: string = "V2";
// Normalized per: https://www.w3.org/TR/uievents-key/#named-key-attribute-values }
export class NormalizedEventKey {
public static readonly Space = " "; export enum ConflictOperationType {
public static readonly Enter = "Enter"; Replace = "replace",
public static readonly Escape = "Escape"; Create = "create",
public static readonly UpArrow = "ArrowUp"; Delete = "delete",
public static readonly DownArrow = "ArrowDown"; }
public static readonly LeftArrow = "ArrowLeft";
public static readonly RightArrow = "ArrowRight"; export const EmulatorMasterKey =
} //[SuppressMessage("Microsoft.Security", "CS002:SecretInNextLine", Justification="Well known public masterKey for emulator")]
"C2y6yDjf5/R+ob0N8A7Cgv30VRDJIWEHLM+4QDU5DE2nQ9nDuVTqobD4b8mGGyPMbIZnqyMsEcaGQy67XIw/Jw==";
export class TryCosmosExperience {
public static extendUrl: string = "https://trycosmosdb.azure.com/api/resource/extendportal?userId={0}"; // A variable @MyVariable defined in Constants.less is accessible as StyleConstants.MyVariable
public static deleteUrl: string = "https://trycosmosdb.azure.com/api/resource/deleteportal?userId={0}"; export const StyleConstants = require("less-vars-loader!../../less/Common/Constants.less");
public static collectionsPerAccount: number = 3;
public static maxRU: number = 5000; export class Notebook {
public static defaultRU: number = 3000; public static readonly defaultBasePath = "./notebooks";
} public static readonly heartbeatDelayMs = 5000;
public static readonly kernelRestartInitialDelayMs = 1000;
export class OfferVersions { public static readonly kernelRestartMaxDelayMs = 20000;
public static V1: string = "V1"; public static readonly autoSaveIntervalMs = 120000;
public static V2: string = "V2"; }
}
export class SparkLibrary {
export enum ConflictOperationType { public static readonly nameMinLength = 3;
Replace = "replace", public static readonly nameMaxLength = 63;
Create = "create", }
Delete = "delete"
} export class AnalyticalStorageTtl {
public static readonly Days90: number = 7776000;
export const EmulatorMasterKey = public static readonly Infinite: number = -1;
//[SuppressMessage("Microsoft.Security", "CS002:SecretInNextLine", Justification="Well known public masterKey for emulator")] public static readonly Disabled: number = 0;
"C2y6yDjf5/R+ob0N8A7Cgv30VRDJIWEHLM+4QDU5DE2nQ9nDuVTqobD4b8mGGyPMbIZnqyMsEcaGQy67XIw/Jw=="; }
// A variable @MyVariable defined in Constants.less is accessible as StyleConstants.MyVariable export class TerminalQueryParams {
export const StyleConstants = require("less-vars-loader!../../less/Common/Constants.less"); public static readonly Terminal = "terminal";
public static readonly Server = "server";
export class Notebook { public static readonly Token = "token";
public static readonly defaultBasePath = "./notebooks"; public static readonly SubscriptionId = "subscriptionId";
public static readonly heartbeatDelayMs = 5000; public static readonly TerminalEndpoint = "terminalEndpoint";
public static readonly kernelRestartInitialDelayMs = 1000; }
public static readonly kernelRestartMaxDelayMs = 20000;
public static readonly autoSaveIntervalMs = 120000;
}
export class SparkLibrary {
public static readonly nameMinLength = 3;
public static readonly nameMaxLength = 63;
}
export class AnalyticalStorageTtl {
public static readonly Days90: number = 7776000;
public static readonly Infinite: number = -1;
public static readonly Disabled: number = 0;
}
export class TerminalQueryParams {
public static readonly Terminal = "terminal";
public static readonly Server = "server";
public static readonly Token = "token";
public static readonly SubscriptionId = "subscriptionId";
public static readonly TerminalEndpoint = "terminalEndpoint";
}

View File

@@ -10,17 +10,17 @@ describe("tokenProvider", () => {
resourceId: "", resourceId: "",
resourceType: "dbs" as ResourceType, resourceType: "dbs" as ResourceType,
headers: {}, headers: {},
getAuthorizationTokenUsingMasterKey: () => "" getAuthorizationTokenUsingMasterKey: () => "",
}; };
beforeEach(() => { beforeEach(() => {
updateConfigContext({ updateConfigContext({
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com" BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
}); });
window.fetch = jest.fn().mockImplementation(() => { window.fetch = jest.fn().mockImplementation(() => {
return { return {
json: () => "{}", json: () => "{}",
headers: new Map() headers: new Map(),
}; };
}); });
}); });
@@ -36,7 +36,7 @@ describe("tokenProvider", () => {
it("does not call the auth service if a master key is set", async () => { it("does not call the auth service if a master key is set", async () => {
updateUserContext({ updateUserContext({
masterKey: "foo" masterKey: "foo",
}); });
await tokenProvider(options); await tokenProvider(options);
expect((window.fetch as any).mock.calls.length).toBe(0); expect((window.fetch as any).mock.calls.length).toBe(0);
@@ -50,7 +50,7 @@ describe("getTokenFromAuthService", () => {
window.fetch = jest.fn().mockImplementation(() => { window.fetch = jest.fn().mockImplementation(() => {
return { return {
json: () => "{}", json: () => "{}",
headers: new Map() headers: new Map(),
}; };
}); });
}); });
@@ -61,7 +61,7 @@ describe("getTokenFromAuthService", () => {
it("builds the correct URL in production", () => { it("builds the correct URL in production", () => {
updateConfigContext({ updateConfigContext({
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com" BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
}); });
getTokenFromAuthService("GET", "dbs", "foo"); getTokenFromAuthService("GET", "dbs", "foo");
expect(window.fetch).toHaveBeenCalledWith( expect(window.fetch).toHaveBeenCalledWith(
@@ -72,7 +72,7 @@ describe("getTokenFromAuthService", () => {
it("builds the correct URL in dev", () => { it("builds the correct URL in dev", () => {
updateConfigContext({ updateConfigContext({
BACKEND_ENDPOINT: "https://localhost:1234" BACKEND_ENDPOINT: "https://localhost:1234",
}); });
getTokenFromAuthService("GET", "dbs", "foo"); getTokenFromAuthService("GET", "dbs", "foo");
expect(window.fetch).toHaveBeenCalledWith( expect(window.fetch).toHaveBeenCalledWith(
@@ -96,15 +96,15 @@ describe("endpoint", () => {
documentEndpoint: "bar", documentEndpoint: "bar",
gremlinEndpoint: "foo", gremlinEndpoint: "foo",
tableEndpoint: "foo", tableEndpoint: "foo",
cassandraEndpoint: "foo" cassandraEndpoint: "foo",
} },
} },
}); });
expect(endpoint()).toEqual("bar"); expect(endpoint()).toEqual("bar");
}); });
it("uses _endpoint if set", () => { it("uses _endpoint if set", () => {
updateUserContext({ updateUserContext({
endpoint: "baz" endpoint: "baz",
}); });
expect(endpoint()).toEqual("baz"); expect(endpoint()).toEqual("baz");
}); });
@@ -121,7 +121,7 @@ describe("requestPlugin", () => {
updateConfigContext({ updateConfigContext({
platform: Platform.Hosted, platform: Platform.Hosted,
BACKEND_ENDPOINT: "https://localhost:1234", BACKEND_ENDPOINT: "https://localhost:1234",
PROXY_PATH: "/proxy" PROXY_PATH: "/proxy",
}); });
const headers = {}; const headers = {};
const endpoint = "https://docs.azure.com"; const endpoint = "https://docs.azure.com";

View File

@@ -58,13 +58,13 @@ export async function getTokenFromAuthService(verb: string, resourceType: string
method: "POST", method: "POST",
headers: { headers: {
"content-type": "application/json", "content-type": "application/json",
"x-ms-encrypted-auth-token": userContext.accessToken "x-ms-encrypted-auth-token": userContext.accessToken,
}, },
body: JSON.stringify({ body: JSON.stringify({
verb, verb,
resourceType, resourceType,
resourceId resourceId,
}) }),
}); });
//TODO I am not sure why we have to parse the JSON again here. fetch should do it for us when we call .json() //TODO I am not sure why we have to parse the JSON again here. fetch should do it for us when we call .json()
const result = JSON.parse(await response.json()); const result = JSON.parse(await response.json());
@@ -77,13 +77,13 @@ export async function getTokenFromAuthService(verb: string, resourceType: string
export function client(): Cosmos.CosmosClient { export function client(): Cosmos.CosmosClient {
const options: Cosmos.CosmosClientOptions = { const options: Cosmos.CosmosClientOptions = {
endpoint: endpoint() || " ", // CosmosClient gets upset if we pass a falsy value here endpoint: endpoint() || "https://cosmos.azure.com", // CosmosClient gets upset if we pass a bad URL. This should never actually get called
key: userContext.masterKey, key: userContext.masterKey,
tokenProvider, tokenProvider,
connectionPolicy: { connectionPolicy: {
enableEndpointDiscovery: false enableEndpointDiscovery: false,
}, },
userAgentSuffix: "Azure Portal" userAgentSuffix: "Azure Portal",
}; };
if (configContext.PROXY_PATH !== undefined) { if (configContext.PROXY_PATH !== undefined) {

View File

@@ -1,182 +0,0 @@
import {
ConflictDefinition,
FeedOptions,
ItemDefinition,
OfferDefinition,
QueryIterator,
Resource
} from "@azure/cosmos";
import { RequestOptions } from "@azure/cosmos/dist-esm";
import Q from "q";
import { configContext, Platform } from "../ConfigContext";
import * as DataModels from "../Contracts/DataModels";
import { MessageTypes } from "../Contracts/ExplorerContracts";
import * as ViewModels from "../Contracts/ViewModels";
import ConflictId from "../Explorer/Tree/ConflictId";
import DocumentId from "../Explorer/Tree/DocumentId";
import StoredProcedure from "../Explorer/Tree/StoredProcedure";
import { LocalStorageUtility, StorageKey } from "../Shared/StorageUtility";
import { OfferUtils } from "../Utils/OfferUtils";
import * as Constants from "./Constants";
import { client } from "./CosmosClient";
import * as HeadersUtility from "./HeadersUtility";
import { sendCachedDataMessage } from "./MessageHandler";
export function getCommonQueryOptions(options: FeedOptions): any {
const storedItemPerPageSetting: number = LocalStorageUtility.getEntryNumber(StorageKey.ActualItemPerPage);
options = options || {};
options.populateQueryMetrics = true;
options.enableScanInQuery = options.enableScanInQuery || true;
if (!options.partitionKey) {
options.forceQueryPlan = true;
}
options.maxItemCount =
options.maxItemCount ||
(storedItemPerPageSetting !== undefined && storedItemPerPageSetting) ||
Constants.Queries.itemsPerPage;
options.maxDegreeOfParallelism = LocalStorageUtility.getEntryNumber(StorageKey.MaxDegreeOfParellism);
return options;
}
export function queryDocuments(
databaseId: string,
containerId: string,
query: string,
options: any
): Q.Promise<QueryIterator<ItemDefinition & Resource>> {
options = getCommonQueryOptions(options);
const documentsIterator = client()
.database(databaseId)
.container(containerId)
.items.query(query, options);
return Q(documentsIterator);
}
export function getPartitionKeyHeaderForConflict(conflictId: ConflictId): Object {
const partitionKeyDefinition: DataModels.PartitionKey = conflictId.partitionKey;
const partitionKeyValue: any = conflictId.partitionKeyValue;
return getPartitionKeyHeader(partitionKeyDefinition, partitionKeyValue);
}
export function getPartitionKeyHeader(partitionKeyDefinition: DataModels.PartitionKey, partitionKeyValue: any): Object {
if (!partitionKeyDefinition) {
return undefined;
}
if (partitionKeyValue === undefined) {
return [{}];
}
return [partitionKeyValue];
}
export function updateDocument(
collection: ViewModels.CollectionBase,
documentId: DocumentId,
newDocument: any
): Q.Promise<any> {
const partitionKey = documentId.partitionKeyValue;
return Q(
client()
.database(collection.databaseId)
.container(collection.id())
.item(documentId.id(), partitionKey)
.replace(newDocument)
.then(response => response.resource)
);
}
export function executeStoredProcedure(
collection: ViewModels.Collection,
storedProcedure: StoredProcedure,
partitionKeyValue: any,
params: any[]
): Q.Promise<any> {
// TODO remove this deferred. Kept it because of timeout code at bottom of function
const deferred = Q.defer<any>();
client()
.database(collection.databaseId)
.container(collection.id())
.scripts.storedProcedure(storedProcedure.id())
.execute(partitionKeyValue, params, { enableScriptLogging: true })
.then(response =>
deferred.resolve({
result: response.resource,
scriptLogs: response.headers[Constants.HttpHeaders.scriptLogResults]
})
)
.catch(error => deferred.reject(error));
return deferred.promise.timeout(
Constants.ClientDefaults.requestTimeoutMs,
`Request timed out while executing stored procedure ${storedProcedure.id()}`
);
}
export function createDocument(collection: ViewModels.CollectionBase, newDocument: any): Q.Promise<any> {
return Q(
client()
.database(collection.databaseId)
.container(collection.id())
.items.create(newDocument)
.then(response => response.resource)
);
}
export function readDocument(collection: ViewModels.CollectionBase, documentId: DocumentId): Q.Promise<any> {
const partitionKey = documentId.partitionKeyValue;
return Q(
client()
.database(collection.databaseId)
.container(collection.id())
.item(documentId.id(), partitionKey)
.read()
.then(response => response.resource)
);
}
export function deleteDocument(collection: ViewModels.CollectionBase, documentId: DocumentId): Q.Promise<any> {
const partitionKey = documentId.partitionKeyValue;
return Q(
client()
.database(collection.databaseId)
.container(collection.id())
.item(documentId.id(), partitionKey)
.delete()
);
}
export function deleteConflict(
collection: ViewModels.CollectionBase,
conflictId: ConflictId,
options: any = {}
): Q.Promise<any> {
options.partitionKey = options.partitionKey || getPartitionKeyHeaderForConflict(conflictId);
return Q(
client()
.database(collection.databaseId)
.container(collection.id())
.conflict(conflictId.id())
.delete(options)
);
}
export function queryConflicts(
databaseId: string,
containerId: string,
query: string,
options: any
): Q.Promise<QueryIterator<ConflictDefinition & Resource>> {
const documentsIterator = client()
.database(databaseId)
.container(containerId)
.conflicts.query(query, options);
return Q(documentsIterator);
}

View File

@@ -1,217 +0,0 @@
import { ConflictDefinition, ItemDefinition, QueryIterator, Resource } from "@azure/cosmos";
import Q from "q";
import * as ViewModels from "../Contracts/ViewModels";
import ConflictId from "../Explorer/Tree/ConflictId";
import DocumentId from "../Explorer/Tree/DocumentId";
import StoredProcedure from "../Explorer/Tree/StoredProcedure";
import { logConsoleInfo, logConsoleProgress } from "../Utils/NotificationConsoleUtils";
import * as Constants from "./Constants";
import * as DataAccessUtilityBase from "./DataAccessUtilityBase";
import { MinimalQueryIterator, nextPage } from "./IteratorUtilities";
import { handleError } from "./ErrorHandlingUtils";
// TODO: Log all promise resolutions and errors with verbosity levels
export function queryDocuments(
databaseId: string,
containerId: string,
query: string,
options: any
): Q.Promise<QueryIterator<ItemDefinition & Resource>> {
return DataAccessUtilityBase.queryDocuments(databaseId, containerId, query, options);
}
export function queryConflicts(
databaseId: string,
containerId: string,
query: string,
options: any
): Q.Promise<QueryIterator<ConflictDefinition & Resource>> {
return DataAccessUtilityBase.queryConflicts(databaseId, containerId, query, options);
}
export function getEntityName() {
const defaultExperience =
window.dataExplorer && window.dataExplorer.defaultExperience && window.dataExplorer.defaultExperience();
if (defaultExperience === Constants.DefaultAccountExperience.MongoDB) {
return "document";
}
return "item";
}
export function executeStoredProcedure(
collection: ViewModels.Collection,
storedProcedure: StoredProcedure,
partitionKeyValue: any,
params: any[]
): Q.Promise<any> {
var deferred = Q.defer<any>();
const clearMessage = logConsoleProgress(`Executing stored procedure ${storedProcedure.id()}`);
DataAccessUtilityBase.executeStoredProcedure(collection, storedProcedure, partitionKeyValue, params)
.then(
(response: any) => {
deferred.resolve(response);
logConsoleInfo(
`Finished executing stored procedure ${storedProcedure.id()} for container ${storedProcedure.collection.id()}`
);
},
(error: any) => {
handleError(
error,
"ExecuteStoredProcedure",
`Failed to execute stored procedure ${storedProcedure.id()} for container ${storedProcedure.collection.id()}`
);
deferred.reject(error);
}
)
.finally(() => {
clearMessage();
});
return deferred.promise;
}
export function queryDocumentsPage(
resourceName: string,
documentsIterator: MinimalQueryIterator,
firstItemIndex: number,
options: any
): Q.Promise<ViewModels.QueryResults> {
var deferred = Q.defer<ViewModels.QueryResults>();
const entityName = getEntityName();
const clearMessage = logConsoleProgress(`Querying ${entityName} for container ${resourceName}`);
Q(nextPage(documentsIterator, firstItemIndex))
.then(
(result: ViewModels.QueryResults) => {
const itemCount = (result.documents && result.documents.length) || 0;
logConsoleInfo(`Successfully fetched ${itemCount} ${entityName} for container ${resourceName}`);
deferred.resolve(result);
},
(error: any) => {
handleError(error, "QueryDocumentsPage", `Failed to query ${entityName} for container ${resourceName}`);
deferred.reject(error);
}
)
.finally(() => {
clearMessage();
});
return deferred.promise;
}
export function readDocument(collection: ViewModels.CollectionBase, documentId: DocumentId): Q.Promise<any> {
var deferred = Q.defer<any>();
const entityName = getEntityName();
const clearMessage = logConsoleProgress(`Reading ${entityName} ${documentId.id()}`);
DataAccessUtilityBase.readDocument(collection, documentId)
.then(
(document: any) => {
deferred.resolve(document);
},
(error: any) => {
handleError(error, "ReadDocument", `Failed to read ${entityName} ${documentId.id()}`);
deferred.reject(error);
}
)
.finally(() => {
clearMessage();
});
return deferred.promise;
}
export function updateDocument(
collection: ViewModels.CollectionBase,
documentId: DocumentId,
newDocument: any
): Q.Promise<any> {
var deferred = Q.defer<any>();
const entityName = getEntityName();
const clearMessage = logConsoleProgress(`Updating ${entityName} ${documentId.id()}`);
DataAccessUtilityBase.updateDocument(collection, documentId, newDocument)
.then(
(updatedDocument: any) => {
logConsoleInfo(`Successfully updated ${entityName} ${documentId.id()}`);
deferred.resolve(updatedDocument);
},
(error: any) => {
handleError(error, "UpdateDocument", `Failed to update ${entityName} ${documentId.id()}`);
deferred.reject(error);
}
)
.finally(() => {
clearMessage();
});
return deferred.promise;
}
export function createDocument(collection: ViewModels.CollectionBase, newDocument: any): Q.Promise<any> {
var deferred = Q.defer<any>();
const entityName = getEntityName();
const clearMessage = logConsoleProgress(`Creating new ${entityName} for container ${collection.id()}`);
DataAccessUtilityBase.createDocument(collection, newDocument)
.then(
(savedDocument: any) => {
logConsoleInfo(`Successfully created new ${entityName} for container ${collection.id()}`);
deferred.resolve(savedDocument);
},
(error: any) => {
handleError(error, "CreateDocument", `Error while creating new ${entityName} for container ${collection.id()}`);
deferred.reject(error);
}
)
.finally(() => {
clearMessage();
});
return deferred.promise;
}
export function deleteDocument(collection: ViewModels.CollectionBase, documentId: DocumentId): Q.Promise<any> {
var deferred = Q.defer<any>();
const entityName = getEntityName();
const clearMessage = logConsoleProgress(`Deleting ${entityName} ${documentId.id()}`);
DataAccessUtilityBase.deleteDocument(collection, documentId)
.then(
(response: any) => {
logConsoleInfo(`Successfully deleted ${entityName} ${documentId.id()}`);
deferred.resolve(response);
},
(error: any) => {
handleError(error, "DeleteDocument", `Error while deleting ${entityName} ${documentId.id()}`);
deferred.reject(error);
}
)
.finally(() => {
clearMessage();
});
return deferred.promise;
}
export function deleteConflict(
collection: ViewModels.CollectionBase,
conflictId: ConflictId,
options?: any
): Q.Promise<any> {
var deferred = Q.defer<any>();
const clearMessage = logConsoleProgress(`Deleting conflict ${conflictId.id()}`);
DataAccessUtilityBase.deleteConflict(collection, conflictId, options)
.then(
(response: any) => {
logConsoleInfo(`Successfully deleted conflict ${conflictId.id()}`);
deferred.resolve(response);
},
(error: any) => {
handleError(error, "DeleteConflict", `Error while deleting conflict ${conflictId.id()}`);
deferred.reject(error);
}
)
.finally(() => {
clearMessage();
});
return deferred.promise;
}

View File

@@ -0,0 +1,10 @@
import { DefaultAccountExperienceType } from "../DefaultAccountExperienceType";
import { userContext } from "../UserContext";
export const getEntityName = (): string => {
if (userContext.defaultExperience === DefaultAccountExperienceType.MongoDB) {
return "document";
}
return "item";
};

View File

@@ -1,94 +1,94 @@
import * as ko from "knockout"; import * as ko from "knockout";
import * as ViewModels from "../Contracts/ViewModels"; import * as ViewModels from "../Contracts/ViewModels";
export default class EditableUtility { export default class EditableUtility {
public static observable<T>(initialValue?: T): ViewModels.Editable<T> { public static observable<T>(initialValue?: T): ViewModels.Editable<T> {
var observable: ViewModels.Editable<T> = <ViewModels.Editable<T>>ko.observable<T>(initialValue); var observable: ViewModels.Editable<T> = <ViewModels.Editable<T>>ko.observable<T>(initialValue);
observable.edits = ko.observableArray<T>([initialValue]); observable.edits = ko.observableArray<T>([initialValue]);
observable.validations = ko.observableArray<(value: T) => boolean>([]); observable.validations = ko.observableArray<(value: T) => boolean>([]);
observable.setBaseline = (baseline: T) => { observable.setBaseline = (baseline: T) => {
observable(baseline); observable(baseline);
observable.edits([baseline]); observable.edits([baseline]);
}; };
observable.getEditableCurrentValue = ko.computed<T>(() => { observable.getEditableCurrentValue = ko.computed<T>(() => {
const edits = (observable.edits && observable.edits()) || []; const edits = (observable.edits && observable.edits()) || [];
if (edits.length === 0) { if (edits.length === 0) {
return undefined; return undefined;
} }
return edits[edits.length - 1]; return edits[edits.length - 1];
}); });
observable.getEditableOriginalValue = ko.computed<T>(() => { observable.getEditableOriginalValue = ko.computed<T>(() => {
const edits = (observable.edits && observable.edits()) || []; const edits = (observable.edits && observable.edits()) || [];
if (edits.length === 0) { if (edits.length === 0) {
return undefined; return undefined;
} }
return edits[0]; return edits[0];
}); });
observable.editableIsDirty = ko.computed<boolean>(() => { observable.editableIsDirty = ko.computed<boolean>(() => {
const edits = (observable.edits && observable.edits()) || []; const edits = (observable.edits && observable.edits()) || [];
if (edits.length <= 1) { if (edits.length <= 1) {
return false; return false;
} }
let current: any = observable.getEditableCurrentValue(); let current: any = observable.getEditableCurrentValue();
let original: any = observable.getEditableOriginalValue(); let original: any = observable.getEditableOriginalValue();
switch (typeof current) { switch (typeof current) {
case "string": case "string":
case "undefined": case "undefined":
case "number": case "number":
case "boolean": case "boolean":
current = current && current.toString(); current = current && current.toString();
break; break;
default: default:
current = JSON.stringify(current); current = JSON.stringify(current);
break; break;
} }
switch (typeof original) { switch (typeof original) {
case "string": case "string":
case "undefined": case "undefined":
case "number": case "number":
case "boolean": case "boolean":
original = original && original.toString(); original = original && original.toString();
break; break;
default: default:
original = JSON.stringify(original); original = JSON.stringify(original);
break; break;
} }
if (current !== original) { if (current !== original) {
return true; return true;
} }
return false; return false;
}); });
observable.subscribe(edit => { observable.subscribe((edit) => {
var edits = observable.edits && observable.edits(); var edits = observable.edits && observable.edits();
if (!edits) { if (!edits) {
return; return;
} }
edits.push(edit); edits.push(edit);
observable.edits(edits); observable.edits(edits);
}); });
observable.editableIsValid = ko.observable<boolean>(true); observable.editableIsValid = ko.observable<boolean>(true);
observable.subscribe(value => { observable.subscribe((value) => {
const validations: ((value: T) => boolean)[] = (observable.validations && observable.validations()) || []; const validations: ((value: T) => boolean)[] = (observable.validations && observable.validations()) || [];
const isValid = validations.every(validate => validate(value)); const isValid = validations.every((validate) => validate(value));
observable.editableIsValid(isValid); observable.editableIsValid(isValid);
}); });
return observable; return observable;
} }
} }

View File

@@ -1,8 +1,6 @@
export default class EnvironmentUtility { export function normalizeArmEndpoint(uri: string): string {
public static normalizeArmEndpointUri(uri: string): string { if (uri && uri.slice(-1) !== "/") {
if (uri && uri.slice(-1) !== "/") { return `${uri}/`;
return `${uri}/`; }
} return uri;
return uri; }
}
}

View File

@@ -1,7 +1,7 @@
import { ARMError } from "../Utils/arm/request"; import { ARMError } from "../Utils/arm/request";
import { HttpStatusCodes } from "./Constants"; import { HttpStatusCodes } from "./Constants";
import { MessageTypes } from "../Contracts/ExplorerContracts"; import { MessageTypes } from "../Contracts/ExplorerContracts";
import { SubscriptionType } from "../Contracts/ViewModels"; import { SubscriptionType } from "../Contracts/SubscriptionType";
import { logConsoleError } from "../Utils/NotificationConsoleUtils"; import { logConsoleError } from "../Utils/NotificationConsoleUtils";
import { logError } from "./Logger"; import { logError } from "./Logger";
import { sendMessage } from "./MessageHandler"; import { sendMessage } from "./MessageHandler";
@@ -21,7 +21,7 @@ export const handleError = (error: string | ARMError | Error, area: string, cons
sendNotificationForError(errorMessage, errorCode); sendNotificationForError(errorMessage, errorCode);
}; };
export const getErrorMessage = (error: string | Error): string => { export const getErrorMessage = (error: string | Error = ""): string => {
const errorMessage = typeof error === "string" ? error : error.message; const errorMessage = typeof error === "string" ? error : error.message;
return replaceKnownError(errorMessage); return replaceKnownError(errorMessage);
}; };
@@ -37,7 +37,7 @@ const sendNotificationForError = (errorMessage: string, errorCode: number | stri
} }
sendMessage({ sendMessage({
type: MessageTypes.ForbiddenError, type: MessageTypes.ForbiddenError,
reason: errorMessage reason: errorMessage,
}); });
} }
}; };
@@ -45,10 +45,10 @@ const sendNotificationForError = (errorMessage: string, errorCode: number | stri
const replaceKnownError = (errorMessage: string): string => { const replaceKnownError = (errorMessage: string): string => {
if ( if (
window.dataExplorer?.subscriptionType() === SubscriptionType.Internal && window.dataExplorer?.subscriptionType() === SubscriptionType.Internal &&
errorMessage.indexOf("SharedOffer is Disabled for your account") >= 0 errorMessage?.indexOf("SharedOffer is Disabled for your account") >= 0
) { ) {
return "Database throughput is not supported for internal subscriptions."; return "Database throughput is not supported for internal subscriptions.";
} else if (errorMessage.indexOf("Partition key paths must contain only valid") >= 0) { } else if (errorMessage?.indexOf("Partition key paths must contain only valid") >= 0) {
return "Partition key paths must contain only valid characters and not contain a trailing slash or wildcard character."; return "Partition key paths must contain only valid characters and not contain a trailing slash or wildcard character.";
} }

View File

@@ -1,25 +1,25 @@
import * as HeadersUtility from "./HeadersUtility"; import * as HeadersUtility from "./HeadersUtility";
import { ExplorerSettings } from "../Shared/ExplorerSettings"; import { ExplorerSettings } from "../Shared/ExplorerSettings";
import { LocalStorageUtility, StorageKey } from "../Shared/StorageUtility"; import { LocalStorageUtility, StorageKey } from "../Shared/StorageUtility";
describe("Headers Utility", () => { describe("Headers Utility", () => {
describe("shouldEnableCrossPartitionKeyForResourceWithPartitionKey()", () => { describe("shouldEnableCrossPartitionKeyForResourceWithPartitionKey()", () => {
beforeEach(() => { beforeEach(() => {
ExplorerSettings.createDefaultSettings(); ExplorerSettings.createDefaultSettings();
}); });
it("should return true by default", () => { it("should return true by default", () => {
expect(HeadersUtility.shouldEnableCrossPartitionKey()).toBe(true); expect(HeadersUtility.shouldEnableCrossPartitionKey()).toBe(true);
}); });
it("should return false if the enable cross partition key feed option is false", () => { it("should return false if the enable cross partition key feed option is false", () => {
LocalStorageUtility.setEntryString(StorageKey.IsCrossPartitionQueryEnabled, "false"); LocalStorageUtility.setEntryString(StorageKey.IsCrossPartitionQueryEnabled, "false");
expect(HeadersUtility.shouldEnableCrossPartitionKey()).toBe(false); expect(HeadersUtility.shouldEnableCrossPartitionKey()).toBe(false);
}); });
it("should return true if the enable cross partition key feed option is true", () => { it("should return true if the enable cross partition key feed option is true", () => {
LocalStorageUtility.setEntryString(StorageKey.IsCrossPartitionQueryEnabled, "true"); LocalStorageUtility.setEntryString(StorageKey.IsCrossPartitionQueryEnabled, "true");
expect(HeadersUtility.shouldEnableCrossPartitionKey()).toBe(true); expect(HeadersUtility.shouldEnableCrossPartitionKey()).toBe(true);
}); });
}); });
}); });

View File

@@ -1,28 +1,28 @@
import * as Constants from "./Constants"; import * as Constants from "./Constants";
import { LocalStorageUtility, StorageKey } from "../Shared/StorageUtility"; import { LocalStorageUtility, StorageKey } from "../Shared/StorageUtility";
// x-ms-resource-quota: databases = 100; collections = 5000; users = 500000; permissions = 2000000; // x-ms-resource-quota: databases = 100; collections = 5000; users = 500000; permissions = 2000000;
export function getQuota(responseHeaders: any): any { export function getQuota(responseHeaders: any): any {
return responseHeaders && responseHeaders[Constants.HttpHeaders.resourceQuota] return responseHeaders && responseHeaders[Constants.HttpHeaders.resourceQuota]
? parseStringIntoObject(responseHeaders[Constants.HttpHeaders.resourceQuota]) ? parseStringIntoObject(responseHeaders[Constants.HttpHeaders.resourceQuota])
: null; : null;
} }
export function shouldEnableCrossPartitionKey(): boolean { export function shouldEnableCrossPartitionKey(): boolean {
return LocalStorageUtility.getEntryString(StorageKey.IsCrossPartitionQueryEnabled) === "true"; return LocalStorageUtility.getEntryString(StorageKey.IsCrossPartitionQueryEnabled) === "true";
} }
function parseStringIntoObject(resourceString: string) { function parseStringIntoObject(resourceString: string) {
var entityObject: any = {}; var entityObject: any = {};
if (resourceString) { if (resourceString) {
var entitiesArray: string[] = resourceString.split(";"); var entitiesArray: string[] = resourceString.split(";");
for (var i: any = 0; i < entitiesArray.length; i++) { for (var i: any = 0; i < entitiesArray.length; i++) {
var entity: string[] = entitiesArray[i].split("="); var entity: string[] = entitiesArray[i].split("=");
entityObject[entity[0]] = entity[1]; entityObject[entity[0]] = entity[1];
} }
} }
return entityObject; return entityObject;
} }

View File

@@ -11,8 +11,8 @@ describe("nextPage", () => {
queryMetrics: {}, queryMetrics: {},
requestCharge: 1, requestCharge: 1,
headers: {}, headers: {},
activityId: "foo" activityId: "foo",
}) }),
}; };
expect(await nextPage(fakeIterator, 10)).toMatchSnapshot(); expect(await nextPage(fakeIterator, 10)).toMatchSnapshot();

View File

@@ -14,7 +14,7 @@ export interface MinimalQueryIterator {
// Pick<QueryIterator<any>, "fetchNext">; // Pick<QueryIterator<any>, "fetchNext">;
export function nextPage(documentsIterator: MinimalQueryIterator, firstItemIndex: number): Promise<QueryResults> { export function nextPage(documentsIterator: MinimalQueryIterator, firstItemIndex: number): Promise<QueryResults> {
return documentsIterator.fetchNext().then(response => { return documentsIterator.fetchNext().then((response) => {
const documents = response.resources; const documents = response.resources;
const headers = (response as any).headers || {}; // TODO this is a private key. Remove any const headers = (response as any).headers || {}; // TODO this is a private key. Remove any
const itemCount = (documents && documents.length) || 0; const itemCount = (documents && documents.length) || 0;
@@ -26,7 +26,7 @@ export function nextPage(documentsIterator: MinimalQueryIterator, firstItemIndex
lastItemIndex: Number(firstItemIndex) + Number(itemCount), lastItemIndex: Number(firstItemIndex) + Number(itemCount),
headers, headers,
activityId: response.activityId, activityId: response.activityId,
requestCharge: response.requestCharge requestCharge: response.requestCharge,
}; };
}); });
} }

View File

@@ -1,26 +1,26 @@
jest.mock("./MessageHandler"); jest.mock("./MessageHandler");
import { LogEntryLevel } from "../Contracts/Diagnostics"; import { LogEntryLevel } from "../Contracts/Diagnostics";
import * as Logger from "./Logger"; import * as Logger from "./Logger";
import { MessageTypes } from "../Contracts/ExplorerContracts"; import { MessageTypes } from "../Contracts/ExplorerContracts";
import { sendMessage } from "./MessageHandler"; import { sendMessage } from "./MessageHandler";
describe("Logger", () => { describe("Logger", () => {
beforeEach(() => { beforeEach(() => {
jest.resetAllMocks(); jest.resetAllMocks();
}); });
it("should log info messages", () => { it("should log info messages", () => {
Logger.logInfo("Test info", "DocDB"); Logger.logInfo("Test info", "DocDB");
expect(sendMessage).toBeCalled(); expect(sendMessage).toBeCalled();
}); });
it("should log error messages", () => { it("should log error messages", () => {
Logger.logError("Test error", "DocDB"); Logger.logError("Test error", "DocDB");
expect(sendMessage).toBeCalled(); expect(sendMessage).toBeCalled();
}); });
it("should log warnings", () => { it("should log warnings", () => {
Logger.logWarning("Test warning", "DocDB"); Logger.logWarning("Test warning", "DocDB");
expect(sendMessage).toBeCalled(); expect(sendMessage).toBeCalled();
}); });
}); });

View File

@@ -29,7 +29,7 @@ export function logError(errorMessage: string, area: string, code?: number | str
function _logEntry(entry: Diagnostics.LogEntry): void { function _logEntry(entry: Diagnostics.LogEntry): void {
sendMessage({ sendMessage({
type: MessageTypes.LogInfo, type: MessageTypes.LogInfo,
data: JSON.stringify(entry) data: JSON.stringify(entry),
}); });
const severityLevel = ((level: Diagnostics.LogEntryLevel): SeverityLevel => { const severityLevel = ((level: Diagnostics.LogEntryLevel): SeverityLevel => {
@@ -60,6 +60,6 @@ function _generateLogEntry(
level, level,
message, message,
area, area,
code code,
}; };
} }

View File

@@ -1,28 +1,28 @@
import Q from "q"; import Q from "q";
import * as MessageHandler from "./MessageHandler"; import * as MessageHandler from "./MessageHandler";
describe("Message Handler", () => { describe("Message Handler", () => {
it("should handle cached message", async () => { it("should handle cached message", async () => {
let mockPromise = { let mockPromise = {
id: "123", id: "123",
startTime: new Date(), startTime: new Date(),
deferred: Q.defer<any>() deferred: Q.defer<any>(),
}; };
let mockMessage = { message: { id: "123", data: "{}" } }; let mockMessage = { message: { id: "123", data: "{}" } };
MessageHandler.RequestMap[mockPromise.id] = mockPromise; MessageHandler.RequestMap[mockPromise.id] = mockPromise;
MessageHandler.handleCachedDataMessage(mockMessage); MessageHandler.handleCachedDataMessage(mockMessage);
expect(mockPromise.deferred.promise.isFulfilled()).toBe(true); expect(mockPromise.deferred.promise.isFulfilled()).toBe(true);
}); });
it("should delete fulfilled promises on running the garbage collector", async () => { it("should delete fulfilled promises on running the garbage collector", async () => {
let message = { let message = {
id: "123", id: "123",
startTime: new Date(), startTime: new Date(),
deferred: Q.defer<any>() deferred: Q.defer<any>(),
}; };
MessageHandler.handleCachedDataMessage(message); MessageHandler.handleCachedDataMessage(message);
MessageHandler.runGarbageCollector(); MessageHandler.runGarbageCollector();
expect(MessageHandler.RequestMap["123"]).toBeUndefined(); expect(MessageHandler.RequestMap["123"]).toBeUndefined();
}); });
}); });

View File

@@ -35,7 +35,7 @@ export function sendCachedDataMessage<TResponseDataModel>(
let cachedDataPromise: CachedDataPromise<TResponseDataModel> = { let cachedDataPromise: CachedDataPromise<TResponseDataModel> = {
deferred: Q.defer<TResponseDataModel>(), deferred: Q.defer<TResponseDataModel>(),
startTime: new Date(), startTime: new Date(),
id: _.uniqueId() id: _.uniqueId(),
}; };
RequestMap[cachedDataPromise.id] = cachedDataPromise; RequestMap[cachedDataPromise.id] = cachedDataPromise;
sendMessage({ type: messageType, params: params, id: cachedDataPromise.id }); sendMessage({ type: messageType, params: params, id: cachedDataPromise.id });
@@ -54,7 +54,7 @@ export function sendMessage(data: any): void {
portalChildWindow.parent.postMessage( portalChildWindow.parent.postMessage(
{ {
signature: "pcIframe", signature: "pcIframe",
data: data data: data,
}, },
portalChildWindow.document.referrer portalChildWindow.document.referrer
); );

View File

@@ -14,7 +14,7 @@ const fetchMock = () => {
ok: true, ok: true,
text: () => "{}", text: () => "{}",
json: () => "{}", json: () => "{}",
headers: new Map() headers: new Map(),
}); });
}; };
@@ -27,8 +27,8 @@ const collection = {
partitionKey: { partitionKey: {
paths: ["/pk"], paths: ["/pk"],
kind: "Hash", kind: "Hash",
version: 1 version: 1,
} },
} as Collection; } as Collection;
const documentId = ({ const documentId = ({
@@ -38,8 +38,8 @@ const documentId = ({
partitionKey: { partitionKey: {
paths: ["/pk"], paths: ["/pk"],
kind: "Hash", kind: "Hash",
version: 1 version: 1,
} },
} as unknown) as DocumentId; } as unknown) as DocumentId;
const databaseAccount = { const databaseAccount = {
@@ -52,8 +52,8 @@ const databaseAccount = {
documentEndpoint: "bar", documentEndpoint: "bar",
gremlinEndpoint: "foo", gremlinEndpoint: "foo",
tableEndpoint: "foo", tableEndpoint: "foo",
cassandraEndpoint: "foo" cassandraEndpoint: "foo",
} },
} as DatabaseAccount; } as DatabaseAccount;
describe("MongoProxyClient", () => { describe("MongoProxyClient", () => {
@@ -61,10 +61,10 @@ describe("MongoProxyClient", () => {
beforeEach(() => { beforeEach(() => {
resetConfigContext(); resetConfigContext();
updateUserContext({ updateUserContext({
databaseAccount databaseAccount,
}); });
updateConfigContext({ updateConfigContext({
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com" BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
}); });
window.fetch = jest.fn().mockImplementation(fetchMock); window.fetch = jest.fn().mockImplementation(fetchMock);
}); });
@@ -93,10 +93,10 @@ describe("MongoProxyClient", () => {
beforeEach(() => { beforeEach(() => {
resetConfigContext(); resetConfigContext();
updateUserContext({ updateUserContext({
databaseAccount databaseAccount,
}); });
updateConfigContext({ updateConfigContext({
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com" BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
}); });
window.fetch = jest.fn().mockImplementation(fetchMock); window.fetch = jest.fn().mockImplementation(fetchMock);
}); });
@@ -125,10 +125,10 @@ describe("MongoProxyClient", () => {
beforeEach(() => { beforeEach(() => {
resetConfigContext(); resetConfigContext();
updateUserContext({ updateUserContext({
databaseAccount databaseAccount,
}); });
updateConfigContext({ updateConfigContext({
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com" BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
}); });
window.fetch = jest.fn().mockImplementation(fetchMock); window.fetch = jest.fn().mockImplementation(fetchMock);
}); });
@@ -157,10 +157,10 @@ describe("MongoProxyClient", () => {
beforeEach(() => { beforeEach(() => {
resetConfigContext(); resetConfigContext();
updateUserContext({ updateUserContext({
databaseAccount databaseAccount,
}); });
updateConfigContext({ updateConfigContext({
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com" BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
}); });
window.fetch = jest.fn().mockImplementation(fetchMock); window.fetch = jest.fn().mockImplementation(fetchMock);
}); });
@@ -189,10 +189,10 @@ describe("MongoProxyClient", () => {
beforeEach(() => { beforeEach(() => {
resetConfigContext(); resetConfigContext();
updateUserContext({ updateUserContext({
databaseAccount databaseAccount,
}); });
updateConfigContext({ updateConfigContext({
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com" BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
}); });
window.fetch = jest.fn().mockImplementation(fetchMock); window.fetch = jest.fn().mockImplementation(fetchMock);
}); });
@@ -222,10 +222,10 @@ describe("MongoProxyClient", () => {
resetConfigContext(); resetConfigContext();
delete window.authType; delete window.authType;
updateUserContext({ updateUserContext({
databaseAccount databaseAccount,
}); });
updateConfigContext({ updateConfigContext({
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com" BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
}); });
}); });

View File

@@ -16,7 +16,7 @@ import { sendMessage } from "./MessageHandler";
const defaultHeaders = { const defaultHeaders = {
[HttpHeaders.apiType]: ApiType.MongoDB.toString(), [HttpHeaders.apiType]: ApiType.MongoDB.toString(),
[CosmosSDKConstants.HttpHeaders.MaxEntityCount]: "100", [CosmosSDKConstants.HttpHeaders.MaxEntityCount]: "100",
[CosmosSDKConstants.HttpHeaders.Version]: "2017-11-15" [CosmosSDKConstants.HttpHeaders.Version]: "2017-11-15",
}; };
function authHeaders() { function authHeaders() {
@@ -31,7 +31,7 @@ export function queryIterator(databaseId: string, collection: Collection, query:
let continuationToken: string; let continuationToken: string;
return { return {
fetchNext: () => { fetchNext: () => {
return queryDocuments(databaseId, collection, false, query).then(response => { return queryDocuments(databaseId, collection, false, query).then((response) => {
continuationToken = response.continuationToken; continuationToken = response.continuationToken;
const headers: { [key: string]: string | number } = {}; const headers: { [key: string]: string | number } = {};
response.headers.forEach((value, key) => { response.headers.forEach((value, key) => {
@@ -42,10 +42,10 @@ export function queryIterator(databaseId: string, collection: Collection, query:
headers, headers,
requestCharge: Number(headers[CosmosSDKConstants.HttpHeaders.RequestCharge]), requestCharge: Number(headers[CosmosSDKConstants.HttpHeaders.RequestCharge]),
activityId: String(headers[CosmosSDKConstants.HttpHeaders.ActivityId]), activityId: String(headers[CosmosSDKConstants.HttpHeaders.ActivityId]),
hasMoreResults: !!continuationToken hasMoreResults: !!continuationToken,
}; };
}); });
} },
}; };
} }
@@ -74,7 +74,9 @@ export function queryDocuments(
rg: userContext.resourceGroup, rg: userContext.resourceGroup,
dba: databaseAccount.name, dba: databaseAccount.name,
pk: pk:
collection && collection.partitionKey && !collection.partitionKey.systemKey ? collection.partitionKeyProperty : "" collection && collection.partitionKey && !collection.partitionKey.systemKey
? collection.partitionKeyProperty
: "",
}; };
const endpoint = getEndpoint() || ""; const endpoint = getEndpoint() || "";
@@ -87,7 +89,7 @@ export function queryDocuments(
[CosmosSDKConstants.HttpHeaders.EnableScanInQuery]: "true", [CosmosSDKConstants.HttpHeaders.EnableScanInQuery]: "true",
[CosmosSDKConstants.HttpHeaders.EnableCrossPartitionQuery]: "true", [CosmosSDKConstants.HttpHeaders.EnableCrossPartitionQuery]: "true",
[CosmosSDKConstants.HttpHeaders.ParallelizeCrossPartitionQuery]: "true", [CosmosSDKConstants.HttpHeaders.ParallelizeCrossPartitionQuery]: "true",
[HttpHeaders.contentType]: "application/query+json" [HttpHeaders.contentType]: "application/query+json",
}; };
if (continuationToken) { if (continuationToken) {
@@ -100,14 +102,14 @@ export function queryDocuments(
.fetch(`${endpoint}${path}?${queryString.stringify(params)}`, { .fetch(`${endpoint}${path}?${queryString.stringify(params)}`, {
method: "POST", method: "POST",
body: JSON.stringify({ query }), body: JSON.stringify({ query }),
headers headers,
}) })
.then(async response => { .then(async (response) => {
if (response.ok) { if (response.ok) {
return { return {
continuationToken: response.headers.get(CosmosSDKConstants.HttpHeaders.Continuation), continuationToken: response.headers.get(CosmosSDKConstants.HttpHeaders.Continuation),
documents: (await response.json()).Documents as DataModels.DocumentId[], documents: (await response.json()).Documents as DataModels.DocumentId[],
headers: response.headers headers: response.headers,
}; };
} }
errorHandling(response, "querying documents", params); errorHandling(response, "querying documents", params);
@@ -135,7 +137,9 @@ export function readDocument(
rg: userContext.resourceGroup, rg: userContext.resourceGroup,
dba: databaseAccount.name, dba: databaseAccount.name,
pk: pk:
documentId && documentId.partitionKey && !documentId.partitionKey.systemKey ? documentId.partitionKeyProperty : "" documentId && documentId.partitionKey && !documentId.partitionKey.systemKey
? documentId.partitionKeyProperty
: "",
}; };
const endpoint = getEndpoint(); const endpoint = getEndpoint();
@@ -147,10 +151,10 @@ export function readDocument(
...authHeaders(), ...authHeaders(),
[CosmosSDKConstants.HttpHeaders.PartitionKey]: encodeURIComponent( [CosmosSDKConstants.HttpHeaders.PartitionKey]: encodeURIComponent(
JSON.stringify(documentId.partitionKeyHeader()) JSON.stringify(documentId.partitionKeyHeader())
) ),
} },
}) })
.then(response => { .then((response) => {
if (response.ok) { if (response.ok) {
return response.json(); return response.json();
} }
@@ -175,7 +179,7 @@ export function createDocument(
sid: userContext.subscriptionId, sid: userContext.subscriptionId,
rg: userContext.resourceGroup, rg: userContext.resourceGroup,
dba: databaseAccount.name, dba: databaseAccount.name,
pk: collection && collection.partitionKey && !collection.partitionKey.systemKey ? partitionKeyProperty : "" pk: collection && collection.partitionKey && !collection.partitionKey.systemKey ? partitionKeyProperty : "",
}; };
const endpoint = getEndpoint(); const endpoint = getEndpoint();
@@ -186,10 +190,10 @@ export function createDocument(
body: JSON.stringify(documentContent), body: JSON.stringify(documentContent),
headers: { headers: {
...defaultHeaders, ...defaultHeaders,
...authHeaders() ...authHeaders(),
} },
}) })
.then(response => { .then((response) => {
if (response.ok) { if (response.ok) {
return response.json(); return response.json();
} }
@@ -218,7 +222,9 @@ export function updateDocument(
rg: userContext.resourceGroup, rg: userContext.resourceGroup,
dba: databaseAccount.name, dba: databaseAccount.name,
pk: pk:
documentId && documentId.partitionKey && !documentId.partitionKey.systemKey ? documentId.partitionKeyProperty : "" documentId && documentId.partitionKey && !documentId.partitionKey.systemKey
? documentId.partitionKeyProperty
: "",
}; };
const endpoint = getEndpoint(); const endpoint = getEndpoint();
@@ -230,10 +236,10 @@ export function updateDocument(
...defaultHeaders, ...defaultHeaders,
...authHeaders(), ...authHeaders(),
[HttpHeaders.contentType]: "application/json", [HttpHeaders.contentType]: "application/json",
[CosmosSDKConstants.HttpHeaders.PartitionKey]: JSON.stringify(documentId.partitionKeyHeader()) [CosmosSDKConstants.HttpHeaders.PartitionKey]: JSON.stringify(documentId.partitionKeyHeader()),
} },
}) })
.then(response => { .then((response) => {
if (response.ok) { if (response.ok) {
return response.json(); return response.json();
} }
@@ -257,7 +263,9 @@ export function deleteDocument(databaseId: string, collection: Collection, docum
rg: userContext.resourceGroup, rg: userContext.resourceGroup,
dba: databaseAccount.name, dba: databaseAccount.name,
pk: pk:
documentId && documentId.partitionKey && !documentId.partitionKey.systemKey ? documentId.partitionKeyProperty : "" documentId && documentId.partitionKey && !documentId.partitionKey.systemKey
? documentId.partitionKeyProperty
: "",
}; };
const endpoint = getEndpoint(); const endpoint = getEndpoint();
@@ -268,10 +276,10 @@ export function deleteDocument(databaseId: string, collection: Collection, docum
...defaultHeaders, ...defaultHeaders,
...authHeaders(), ...authHeaders(),
[HttpHeaders.contentType]: "application/json", [HttpHeaders.contentType]: "application/json",
[CosmosSDKConstants.HttpHeaders.PartitionKey]: JSON.stringify(documentId.partitionKeyHeader()) [CosmosSDKConstants.HttpHeaders.PartitionKey]: JSON.stringify(documentId.partitionKeyHeader()),
} },
}) })
.then(response => { .then((response) => {
if (response.ok) { if (response.ok) {
return undefined; return undefined;
} }
@@ -299,7 +307,7 @@ export function createMongoCollectionWithProxy(
rg: userContext.resourceGroup, rg: userContext.resourceGroup,
dba: databaseAccount.name, dba: databaseAccount.name,
isAutoPilot: !!params.autoPilotMaxThroughput, isAutoPilot: !!params.autoPilotMaxThroughput,
autoPilotThroughput: params.autoPilotMaxThroughput?.toString() autoPilotThroughput: params.autoPilotMaxThroughput?.toString(),
}; };
const endpoint = getEndpoint(); const endpoint = getEndpoint();
@@ -314,11 +322,11 @@ export function createMongoCollectionWithProxy(
headers: { headers: {
...defaultHeaders, ...defaultHeaders,
...authHeaders(), ...authHeaders(),
[HttpHeaders.contentType]: "application/json" [HttpHeaders.contentType]: "application/json",
} },
} }
) )
.then(response => { .then((response) => {
if (response.ok) { if (response.ok) {
return response.json(); return response.json();
} }

View File

@@ -1,168 +1,168 @@
/* Copyright 2013 10gen Inc. /* Copyright 2013 10gen Inc.
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. * you may not use this file except in compliance with the License.
* You may obtain a copy of the License at * You may obtain a copy of the License at
* *
* http://www.apache.org/licenses/LICENSE-2.0 * http://www.apache.org/licenses/LICENSE-2.0
* *
* Unless required by applicable law or agreed to in writing, software * Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, * distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
export default class MongoUtility { export default class MongoUtility {
public static tojson = function(x: any, indent: string, nolint: boolean) { public static tojson = function (x: any, indent: string, nolint: boolean) {
if (x === null || x === undefined) { if (x === null || x === undefined) {
return String(x); return String(x);
} }
indent = indent || ""; indent = indent || "";
switch (typeof x) { switch (typeof x) {
case "string": case "string":
var out = new Array(x.length + 1); var out = new Array(x.length + 1);
out[0] = '"'; out[0] = '"';
for (var i = 0; i < x.length; i++) { for (var i = 0; i < x.length; i++) {
if (x[i] === '"') { if (x[i] === '"') {
out[out.length] = '\\"'; out[out.length] = '\\"';
} else if (x[i] === "\\") { } else if (x[i] === "\\") {
out[out.length] = "\\\\"; out[out.length] = "\\\\";
} else if (x[i] === "\b") { } else if (x[i] === "\b") {
out[out.length] = "\\b"; out[out.length] = "\\b";
} else if (x[i] === "\f") { } else if (x[i] === "\f") {
out[out.length] = "\\f"; out[out.length] = "\\f";
} else if (x[i] === "\n") { } else if (x[i] === "\n") {
out[out.length] = "\\n"; out[out.length] = "\\n";
} else if (x[i] === "\r") { } else if (x[i] === "\r") {
out[out.length] = "\\r"; out[out.length] = "\\r";
} else if (x[i] === "\t") { } else if (x[i] === "\t") {
out[out.length] = "\\t"; out[out.length] = "\\t";
} else { } else {
var code = x.charCodeAt(i); var code = x.charCodeAt(i);
if (code < 0x20) { if (code < 0x20) {
out[out.length] = (code < 0x10 ? "\\u000" : "\\u00") + code.toString(16); out[out.length] = (code < 0x10 ? "\\u000" : "\\u00") + code.toString(16);
} else { } else {
out[out.length] = x[i]; out[out.length] = x[i];
} }
} }
} }
return out.join("") + '"'; return out.join("") + '"';
case "number": case "number":
/* falls through */ /* falls through */
case "boolean": case "boolean":
return "" + x; return "" + x;
case "object": case "object":
var func = $.isArray(x) ? MongoUtility.tojsonArray : MongoUtility.tojsonObject; var func = $.isArray(x) ? MongoUtility.tojsonArray : MongoUtility.tojsonObject;
var s = func(x, indent, nolint); var s = func(x, indent, nolint);
if ( if (
(nolint === null || nolint === undefined || nolint === true) && (nolint === null || nolint === undefined || nolint === true) &&
s.length < 80 && s.length < 80 &&
(indent === null || indent.length === 0) (indent === null || indent.length === 0)
) { ) {
s = s.replace(/[\t\r\n]+/gm, " "); s = s.replace(/[\t\r\n]+/gm, " ");
} }
return s; return s;
case "function": case "function":
return x.toString(); return x.toString();
default: default:
throw new Error("tojson can't handle type " + typeof x); throw new Error("tojson can't handle type " + typeof x);
} }
}; };
private static tojsonObject = function(x: any, indent: string, nolint: boolean) { private static tojsonObject = function (x: any, indent: string, nolint: boolean) {
var lineEnding = nolint ? " " : "\n"; var lineEnding = nolint ? " " : "\n";
var tabSpace = nolint ? "" : "\t"; var tabSpace = nolint ? "" : "\t";
indent = indent || ""; indent = indent || "";
if (typeof x.tojson === "function" && x.tojson !== MongoUtility.tojson) { if (typeof x.tojson === "function" && x.tojson !== MongoUtility.tojson) {
return x.tojson(indent, nolint); return x.tojson(indent, nolint);
} }
if (x.constructor && typeof x.constructor.tojson === "function" && x.constructor.tojson !== MongoUtility.tojson) { if (x.constructor && typeof x.constructor.tojson === "function" && x.constructor.tojson !== MongoUtility.tojson) {
return x.constructor.tojson(x, indent, nolint); return x.constructor.tojson(x, indent, nolint);
} }
if (MongoUtility.hasDefinedProperty(x, "toString") && !$.isArray(x)) { if (MongoUtility.hasDefinedProperty(x, "toString") && !$.isArray(x)) {
return x.toString(); return x.toString();
} }
if (x instanceof Error) { if (x instanceof Error) {
return x.toString(); return x.toString();
} }
if (MongoUtility.isObjectId(x)) { if (MongoUtility.isObjectId(x)) {
return 'ObjectId("' + x.$oid + '")'; return 'ObjectId("' + x.$oid + '")';
} }
// push one level of indent // push one level of indent
indent += tabSpace; indent += tabSpace;
var s = "{"; var s = "{";
var pairs = []; var pairs = [];
for (var k in x) { for (var k in x) {
if (x.hasOwnProperty(k)) { if (x.hasOwnProperty(k)) {
var val = x[k]; var val = x[k];
var pair = '"' + k + '" : ' + MongoUtility.tojson(val, indent, nolint); var pair = '"' + k + '" : ' + MongoUtility.tojson(val, indent, nolint);
if (k === "_id") { if (k === "_id") {
pairs.unshift(pair); pairs.unshift(pair);
} else { } else {
pairs.push(pair); pairs.push(pair);
} }
} }
} }
// Add proper line endings, indents, and commas to each line // Add proper line endings, indents, and commas to each line
s += $.map(pairs, function(pair) { s += $.map(pairs, function (pair) {
return lineEnding + indent + pair; return lineEnding + indent + pair;
}).join(","); }).join(",");
s += lineEnding; s += lineEnding;
// pop one level of indent // pop one level of indent
indent = indent.substring(1); indent = indent.substring(1);
return s + indent + "}"; return s + indent + "}";
}; };
private static tojsonArray = function(a: any, indent: string, nolint: boolean) { private static tojsonArray = function (a: any, indent: string, nolint: boolean) {
if (a.length === 0) { if (a.length === 0) {
return "[ ]"; return "[ ]";
} }
var lineEnding = nolint ? " " : "\n"; var lineEnding = nolint ? " " : "\n";
if (!indent || nolint) { if (!indent || nolint) {
indent = ""; indent = "";
} }
var s = "[" + lineEnding; var s = "[" + lineEnding;
indent += "\t"; indent += "\t";
for (var i = 0; i < a.length; i++) { for (var i = 0; i < a.length; i++) {
s += indent + MongoUtility.tojson(a[i], indent, nolint); s += indent + MongoUtility.tojson(a[i], indent, nolint);
if (i < a.length - 1) { if (i < a.length - 1) {
s += "," + lineEnding; s += "," + lineEnding;
} }
} }
if (a.length === 0) { if (a.length === 0) {
s += indent; s += indent;
} }
indent = indent.substring(1); indent = indent.substring(1);
s += lineEnding + indent + "]"; s += lineEnding + indent + "]";
return s; return s;
}; };
private static hasDefinedProperty = function(obj: any, prop: string): boolean { private static hasDefinedProperty = function (obj: any, prop: string): boolean {
if (Object.getPrototypeOf === undefined || Object.getPrototypeOf(obj) === null) { if (Object.getPrototypeOf === undefined || Object.getPrototypeOf(obj) === null) {
return false; return false;
} else if (obj.hasOwnProperty(prop)) { } else if (obj.hasOwnProperty(prop)) {
return true; return true;
} else { } else {
return MongoUtility.hasDefinedProperty(Object.getPrototypeOf(obj), prop); return MongoUtility.hasDefinedProperty(Object.getPrototypeOf(obj), prop);
} }
}; };
private static isObjectId(obj: any): boolean { private static isObjectId(obj: any): boolean {
var keys = Object.keys(obj); var keys = Object.keys(obj);
return keys.length === 1 && keys[0] === "$oid" && typeof obj.$oid === "string" && /^[0-9a-f]{24}$/.test(obj.$oid); return keys.length === 1 && keys[0] === "$oid" && typeof obj.$oid === "string" && /^[0-9a-f]{24}$/.test(obj.$oid);
} }
} }

View File

@@ -0,0 +1,64 @@
import * as OfferUtility from "./OfferUtility";
import { SDKOfferDefinition, Offer } from "../Contracts/DataModels";
import { OfferResponse } from "@azure/cosmos";
describe("parseSDKOfferResponse", () => {
it("manual throughput", () => {
const mockOfferDefinition = {
content: {
offerThroughput: 500,
collectionThroughputInfo: {
minimumRUForCollection: 400,
numPhysicalPartitions: 1,
},
},
id: "test",
} as SDKOfferDefinition;
const mockResponse = {
resource: mockOfferDefinition,
} as OfferResponse;
const expectedResult: Offer = {
manualThroughput: 500,
autoscaleMaxThroughput: undefined,
minimumThroughput: 400,
id: "test",
offerDefinition: mockOfferDefinition,
offerReplacePending: false,
};
expect(OfferUtility.parseSDKOfferResponse(mockResponse)).toEqual(expectedResult);
});
it("autoscale throughput", () => {
const mockOfferDefinition = {
content: {
offerThroughput: 400,
collectionThroughputInfo: {
minimumRUForCollection: 400,
numPhysicalPartitions: 1,
},
offerAutopilotSettings: {
maxThroughput: 5000,
},
},
id: "test",
} as SDKOfferDefinition;
const mockResponse = {
resource: mockOfferDefinition,
} as OfferResponse;
const expectedResult: Offer = {
manualThroughput: undefined,
autoscaleMaxThroughput: 5000,
minimumThroughput: 400,
id: "test",
offerDefinition: mockOfferDefinition,
offerReplacePending: false,
};
expect(OfferUtility.parseSDKOfferResponse(mockResponse)).toEqual(expectedResult);
});
});

View File

@@ -0,0 +1,37 @@
import { Offer, SDKOfferDefinition } from "../Contracts/DataModels";
import { OfferResponse } from "@azure/cosmos";
import { HttpHeaders } from "./Constants";
export const parseSDKOfferResponse = (offerResponse: OfferResponse): Offer | undefined => {
const offerDefinition: SDKOfferDefinition | undefined = offerResponse?.resource;
if (!offerDefinition) {
return undefined;
}
const offerContent = offerDefinition.content;
if (!offerContent) {
return undefined;
}
const minimumThroughput = offerContent.collectionThroughputInfo?.minimumRUForCollection;
const autopilotSettings = offerContent.offerAutopilotSettings;
if (autopilotSettings && autopilotSettings.maxThroughput && minimumThroughput) {
return {
id: offerDefinition.id,
autoscaleMaxThroughput: autopilotSettings.maxThroughput,
manualThroughput: undefined,
minimumThroughput,
offerDefinition,
offerReplacePending: offerResponse.headers?.[HttpHeaders.offerReplacePending] === "true",
};
}
return {
id: offerDefinition.id,
autoscaleMaxThroughput: undefined,
manualThroughput: offerContent.offerThroughput,
minimumThroughput,
offerDefinition,
offerReplacePending: offerResponse.headers?.[HttpHeaders.offerReplacePending] === "true",
};
};

View File

@@ -16,7 +16,7 @@ const notificationsPath = () => {
}; };
export const fetchPortalNotifications = async (): Promise<DataModels.Notification[]> => { export const fetchPortalNotifications = async (): Promise<DataModels.Notification[]> => {
if (configContext.platform === Platform.Emulator) { if (configContext.platform === Platform.Emulator || configContext.platform === Platform.Hosted) {
return []; return [];
} }
@@ -30,7 +30,7 @@ export const fetchPortalNotifications = async (): Promise<DataModels.Notificatio
const headers = { [authorizationHeader.header]: authorizationHeader.token }; const headers = { [authorizationHeader.header]: authorizationHeader.token };
const response = await window.fetch(url, { const response = await window.fetch(url, {
headers headers,
}); });
if (!response.ok) { if (!response.ok) {

View File

@@ -1,253 +1,219 @@
import { ItemDefinition, QueryIterator, Resource } from "@azure/cosmos"; import { ItemDefinition, QueryIterator, Resource } from "@azure/cosmos";
import * as _ from "underscore"; import * as _ from "underscore";
import * as DataModels from "../Contracts/DataModels"; import * as DataModels from "../Contracts/DataModels";
import * as ViewModels from "../Contracts/ViewModels"; import * as ViewModels from "../Contracts/ViewModels";
import Explorer from "../Explorer/Explorer"; import Explorer from "../Explorer/Explorer";
import { ConsoleDataType } from "../Explorer/Menus/NotificationConsole/NotificationConsoleComponent"; import DocumentsTab from "../Explorer/Tabs/DocumentsTab";
import DocumentsTab from "../Explorer/Tabs/DocumentsTab"; import DocumentId from "../Explorer/Tree/DocumentId";
import DocumentId from "../Explorer/Tree/DocumentId"; import * as NotificationConsoleUtils from "../Utils/NotificationConsoleUtils";
import * as NotificationConsoleUtils from "../Utils/NotificationConsoleUtils"; import { QueryUtils } from "../Utils/QueryUtils";
import { QueryUtils } from "../Utils/QueryUtils"; import { BackendDefaults, HttpStatusCodes, SavedQueries } from "./Constants";
import { BackendDefaults, HttpStatusCodes, SavedQueries } from "./Constants"; import { userContext } from "../UserContext";
import { userContext } from "../UserContext"; import { queryDocumentsPage } from "./dataAccess/queryDocumentsPage";
import { createDocument, deleteDocument, queryDocuments, queryDocumentsPage } from "./DocumentClientUtilityBase"; import { createCollection } from "./dataAccess/createCollection";
import { createCollection } from "./dataAccess/createCollection"; import { handleError } from "./ErrorHandlingUtils";
import { handleError } from "./ErrorHandlingUtils"; import { createDocument } from "./dataAccess/createDocument";
import { deleteDocument } from "./dataAccess/deleteDocument";
export class QueriesClient { import { queryDocuments } from "./dataAccess/queryDocuments";
private static readonly PartitionKey: DataModels.PartitionKey = {
paths: [`/${SavedQueries.PartitionKeyProperty}`], export class QueriesClient {
kind: BackendDefaults.partitionKeyKind, private static readonly PartitionKey: DataModels.PartitionKey = {
version: BackendDefaults.partitionKeyVersion paths: [`/${SavedQueries.PartitionKeyProperty}`],
}; kind: BackendDefaults.partitionKeyKind,
private static readonly FetchQuery: string = "SELECT * FROM c"; version: BackendDefaults.partitionKeyVersion,
private static readonly FetchMongoQuery: string = "{}"; };
private static readonly FetchQuery: string = "SELECT * FROM c";
public constructor(private container: Explorer) {} private static readonly FetchMongoQuery: string = "{}";
public async setupQueriesCollection(): Promise<DataModels.Collection> { public constructor(private container: Explorer) {}
const queriesCollection: ViewModels.Collection = this.findQueriesCollection();
if (queriesCollection) { public async setupQueriesCollection(): Promise<DataModels.Collection> {
return Promise.resolve(queriesCollection.rawDataModel); const queriesCollection: ViewModels.Collection = this.findQueriesCollection();
} if (queriesCollection) {
return Promise.resolve(queriesCollection.rawDataModel);
const id = NotificationConsoleUtils.logConsoleMessage( }
ConsoleDataType.InProgress,
"Setting up account for saving queries" const clearMessage = NotificationConsoleUtils.logConsoleProgress("Setting up account for saving queries");
); return createCollection({
return createCollection({ collectionId: SavedQueries.CollectionName,
collectionId: SavedQueries.CollectionName, createNewDatabase: true,
createNewDatabase: true, databaseId: SavedQueries.DatabaseName,
databaseId: SavedQueries.DatabaseName, partitionKey: QueriesClient.PartitionKey,
partitionKey: QueriesClient.PartitionKey, offerThroughput: SavedQueries.OfferThroughput,
offerThroughput: SavedQueries.OfferThroughput, databaseLevelThroughput: false,
databaseLevelThroughput: false })
}) .then(
.then( (collection: DataModels.Collection) => {
(collection: DataModels.Collection) => { NotificationConsoleUtils.logConsoleInfo("Successfully set up account for saving queries");
NotificationConsoleUtils.logConsoleMessage( return Promise.resolve(collection);
ConsoleDataType.Info, },
"Successfully set up account for saving queries" (error: any) => {
); handleError(error, "setupQueriesCollection", "Failed to set up account for saving queries");
return Promise.resolve(collection); return Promise.reject(error);
}, }
(error: any) => { )
handleError(error, "setupQueriesCollection", "Failed to set up account for saving queries"); .finally(() => clearMessage());
return Promise.reject(error); }
}
) public async saveQuery(query: DataModels.Query): Promise<void> {
.finally(() => NotificationConsoleUtils.clearInProgressMessageWithId(id)); const queriesCollection = this.findQueriesCollection();
} if (!queriesCollection) {
const errorMessage: string = "Account not set up to perform saved query operations";
public async saveQuery(query: DataModels.Query): Promise<void> { NotificationConsoleUtils.logConsoleError(`Failed to save query ${query.queryName}: ${errorMessage}`);
const queriesCollection = this.findQueriesCollection(); return Promise.reject(errorMessage);
if (!queriesCollection) { }
const errorMessage: string = "Account not set up to perform saved query operations";
NotificationConsoleUtils.logConsoleMessage( try {
ConsoleDataType.Error, this.validateQuery(query);
`Failed to save query ${query.queryName}: ${errorMessage}` } catch (error) {
); const errorMessage: string = "Invalid query specified";
return Promise.reject(errorMessage); NotificationConsoleUtils.logConsoleError(`Failed to save query ${query.queryName}: ${errorMessage}`);
} return Promise.reject(errorMessage);
}
try {
this.validateQuery(query); const clearMessage = NotificationConsoleUtils.logConsoleProgress(`Saving query ${query.queryName}`);
} catch (error) { query.id = query.queryName;
const errorMessage: string = "Invalid query specified"; return createDocument(queriesCollection, query)
NotificationConsoleUtils.logConsoleMessage( .then(
ConsoleDataType.Error, (savedQuery: DataModels.Query) => {
`Failed to save query ${query.queryName}: ${errorMessage}` NotificationConsoleUtils.logConsoleInfo(`Successfully saved query ${query.queryName}`);
); return Promise.resolve();
return Promise.reject(errorMessage); },
} (error: any) => {
if (error.code === HttpStatusCodes.Conflict.toString()) {
const id = NotificationConsoleUtils.logConsoleMessage( error = `Query ${query.queryName} already exists`;
ConsoleDataType.InProgress, }
`Saving query ${query.queryName}` handleError(error, "saveQuery", `Failed to save query ${query.queryName}`);
); return Promise.reject(error);
query.id = query.queryName; }
return createDocument(queriesCollection, query) )
.then( .finally(() => clearMessage());
(savedQuery: DataModels.Query) => { }
NotificationConsoleUtils.logConsoleMessage(
ConsoleDataType.Info, public async getQueries(): Promise<DataModels.Query[]> {
`Successfully saved query ${query.queryName}` const queriesCollection = this.findQueriesCollection();
); if (!queriesCollection) {
return Promise.resolve(); const errorMessage: string = "Account not set up to perform saved query operations";
}, NotificationConsoleUtils.logConsoleError(`Failed to fetch saved queries: ${errorMessage}`);
(error: any) => { return Promise.reject(errorMessage);
if (error.code === HttpStatusCodes.Conflict.toString()) { }
error = `Query ${query.queryName} already exists`;
} const options: any = { enableCrossPartitionQuery: true };
handleError(error, "saveQuery", `Failed to save query ${query.queryName}`); const clearMessage = NotificationConsoleUtils.logConsoleProgress("Fetching saved queries");
return Promise.reject(error); const queryIterator: QueryIterator<ItemDefinition & Resource> = queryDocuments(
} SavedQueries.DatabaseName,
) SavedQueries.CollectionName,
.finally(() => NotificationConsoleUtils.clearInProgressMessageWithId(id)); this.fetchQueriesQuery(),
} options
);
public async getQueries(): Promise<DataModels.Query[]> { const fetchQueries = async (firstItemIndex: number): Promise<ViewModels.QueryResults> =>
const queriesCollection = this.findQueriesCollection(); await queryDocumentsPage(queriesCollection.id(), queryIterator, firstItemIndex);
if (!queriesCollection) { return QueryUtils.queryAllPages(fetchQueries)
const errorMessage: string = "Account not set up to perform saved query operations"; .then(
NotificationConsoleUtils.logConsoleMessage( (results: ViewModels.QueryResults) => {
ConsoleDataType.Error, let queries: DataModels.Query[] = _.map(results.documents, (document: DataModels.Query) => {
`Failed to fetch saved queries: ${errorMessage}` if (!document) {
); return undefined;
return Promise.reject(errorMessage); }
} const { id, resourceId, query, queryName } = document;
const parsedQuery: DataModels.Query = {
const options: any = { enableCrossPartitionQuery: true }; resourceId: resourceId,
const id = NotificationConsoleUtils.logConsoleMessage(ConsoleDataType.InProgress, "Fetching saved queries"); queryName: queryName,
return queryDocuments(SavedQueries.DatabaseName, SavedQueries.CollectionName, this.fetchQueriesQuery(), options) query: query,
.then( id: id,
(queryIterator: QueryIterator<ItemDefinition & Resource>) => { };
const fetchQueries = (firstItemIndex: number): Q.Promise<ViewModels.QueryResults> => try {
queryDocumentsPage(queriesCollection.id(), queryIterator, firstItemIndex, options); this.validateQuery(parsedQuery);
return QueryUtils.queryAllPages(fetchQueries).then( return parsedQuery;
(results: ViewModels.QueryResults) => { } catch (error) {
let queries: DataModels.Query[] = _.map(results.documents, (document: DataModels.Query) => { return undefined;
if (!document) { }
return undefined; });
} queries = _.reject(queries, (parsedQuery: DataModels.Query) => !parsedQuery);
const { id, resourceId, query, queryName } = document; NotificationConsoleUtils.logConsoleInfo("Successfully fetched saved queries");
const parsedQuery: DataModels.Query = { return Promise.resolve(queries);
resourceId: resourceId, },
queryName: queryName, (error: any) => {
query: query, handleError(error, "getSavedQueries", "Failed to fetch saved queries");
id: id return Promise.reject(error);
}; }
try { )
this.validateQuery(parsedQuery); .finally(() => clearMessage());
return parsedQuery; }
} catch (error) {
return undefined; public async deleteQuery(query: DataModels.Query): Promise<void> {
} const queriesCollection = this.findQueriesCollection();
}); if (!queriesCollection) {
queries = _.reject(queries, (parsedQuery: DataModels.Query) => !parsedQuery); const errorMessage: string = "Account not set up to perform saved query operations";
NotificationConsoleUtils.logConsoleMessage(ConsoleDataType.Info, "Successfully fetched saved queries"); NotificationConsoleUtils.logConsoleError(`Failed to fetch saved queries: ${errorMessage}`);
return Promise.resolve(queries); return Promise.reject(errorMessage);
}, }
(error: any) => {
handleError(error, "getSavedQueries", "Failed to fetch saved queries"); try {
return Promise.reject(error); this.validateQuery(query);
} } catch (error) {
); const errorMessage: string = "Invalid query specified";
}, NotificationConsoleUtils.logConsoleError(`Failed to delete query ${query.queryName}: ${errorMessage}`);
(error: any) => { }
// should never get into this state but we handle this regardless
handleError(error, "getSavedQueries", "Failed to fetch saved queries"); const clearMessage = NotificationConsoleUtils.logConsoleProgress(`Deleting query ${query.queryName}`);
return Promise.reject(error); query.id = query.queryName;
} const documentId = new DocumentId(
) {
.finally(() => NotificationConsoleUtils.clearInProgressMessageWithId(id)); partitionKey: QueriesClient.PartitionKey,
} partitionKeyProperty: "id",
} as DocumentsTab,
public async deleteQuery(query: DataModels.Query): Promise<void> { query,
const queriesCollection = this.findQueriesCollection(); query.queryName
if (!queriesCollection) { ); // TODO: Remove DocumentId's dependency on DocumentsTab
const errorMessage: string = "Account not set up to perform saved query operations"; const options: any = { partitionKey: query.resourceId };
NotificationConsoleUtils.logConsoleMessage( return deleteDocument(queriesCollection, documentId)
ConsoleDataType.Error, .then(
`Failed to fetch saved queries: ${errorMessage}` () => {
); NotificationConsoleUtils.logConsoleInfo(`Successfully deleted query ${query.queryName}`);
return Promise.reject(errorMessage); return Promise.resolve();
} },
(error: any) => {
try { handleError(error, "deleteQuery", `Failed to delete query ${query.queryName}`);
this.validateQuery(query); return Promise.reject(error);
} catch (error) { }
const errorMessage: string = "Invalid query specified"; )
NotificationConsoleUtils.logConsoleMessage( .finally(() => clearMessage());
ConsoleDataType.Error, }
`Failed to delete query ${query.queryName}: ${errorMessage}`
); public getResourceId(): string {
} const databaseAccount = userContext.databaseAccount;
const databaseAccountName = (databaseAccount && databaseAccount.name) || "";
const id = NotificationConsoleUtils.logConsoleMessage( const subscriptionId = userContext.subscriptionId || "";
ConsoleDataType.InProgress, const resourceGroup = userContext.resourceGroup || "";
`Deleting query ${query.queryName}`
); return `/subscriptions/${subscriptionId}/resourceGroups/${resourceGroup}/providers/Microsoft.DocumentDb/databaseAccounts/${databaseAccountName}`;
query.id = query.queryName; }
const documentId = new DocumentId(
{ private findQueriesCollection(): ViewModels.Collection {
partitionKey: QueriesClient.PartitionKey, const queriesDatabase: ViewModels.Database = _.find(
partitionKeyProperty: "id" this.container.databases(),
} as DocumentsTab, (database: ViewModels.Database) => database.id() === SavedQueries.DatabaseName
query, );
query.queryName if (!queriesDatabase) {
); // TODO: Remove DocumentId's dependency on DocumentsTab return undefined;
const options: any = { partitionKey: query.resourceId }; }
return deleteDocument(queriesCollection, documentId) return _.find(
.then( queriesDatabase.collections(),
() => { (collection: ViewModels.Collection) => collection.id() === SavedQueries.CollectionName
NotificationConsoleUtils.logConsoleMessage( );
ConsoleDataType.Info, }
`Successfully deleted query ${query.queryName}`
); private validateQuery(query: DataModels.Query): void {
return Promise.resolve(); if (!query || query.queryName == null || query.query == null || query.resourceId == null) {
}, throw new Error("Invalid query specified");
(error: any) => { }
handleError(error, "deleteQuery", `Failed to delete query ${query.queryName}`); }
return Promise.reject(error);
} private fetchQueriesQuery(): string {
) if (this.container.isPreferredApiMongoDB()) {
.finally(() => NotificationConsoleUtils.clearInProgressMessageWithId(id)); return QueriesClient.FetchMongoQuery;
} }
return QueriesClient.FetchQuery;
public getResourceId(): string { }
const databaseAccount = userContext.databaseAccount; }
const databaseAccountName = (databaseAccount && databaseAccount.name) || "";
const subscriptionId = userContext.subscriptionId || "";
const resourceGroup = userContext.resourceGroup || "";
return `/subscriptions/${subscriptionId}/resourceGroups/${resourceGroup}/providers/Microsoft.DocumentDb/databaseAccounts/${databaseAccountName}`;
}
private findQueriesCollection(): ViewModels.Collection {
const queriesDatabase: ViewModels.Database = _.find(
this.container.databases(),
(database: ViewModels.Database) => database.id() === SavedQueries.DatabaseName
);
if (!queriesDatabase) {
return undefined;
}
return _.find(
queriesDatabase.collections(),
(collection: ViewModels.Collection) => collection.id() === SavedQueries.CollectionName
);
}
private validateQuery(query: DataModels.Query): void {
if (!query || query.queryName == null || query.query == null || query.resourceId == null) {
throw new Error("Invalid query specified");
}
}
private fetchQueriesQuery(): string {
if (this.container.isPreferredApiMongoDB()) {
return QueriesClient.FetchMongoQuery;
}
return QueriesClient.FetchQuery;
}
}

View File

@@ -1,108 +1,107 @@
import * as ko from "knockout"; import * as ko from "knockout";
import { SplitterMetrics } from "./Constants"; import { SplitterMetrics } from "./Constants";
export enum SplitterDirection { export enum SplitterDirection {
Horizontal = "horizontal", Horizontal = "horizontal",
Vertical = "vertical" Vertical = "vertical",
} }
export interface SplitterBounds { export interface SplitterBounds {
max: number; max: number;
min: number; min: number;
} }
export interface SplitterOptions { export interface SplitterOptions {
splitterId: string; splitterId: string;
leftId: string; leftId: string;
bounds: SplitterBounds; bounds: SplitterBounds;
direction: SplitterDirection; direction: SplitterDirection;
} }
export class Splitter { export class Splitter {
public splitterId: string; public splitterId: string;
public leftSideId: string; public leftSideId: string;
public splitter: HTMLElement; public splitter!: HTMLElement;
public leftSide: HTMLElement; public leftSide!: HTMLElement;
public lastX: number; public lastX!: number;
public lastWidth: number; public lastWidth!: number;
private isCollapsed: ko.Observable<boolean>; private isCollapsed: ko.Observable<boolean>;
private bounds: SplitterBounds; private bounds: SplitterBounds;
private direction: SplitterDirection; private direction: SplitterDirection;
constructor(options: SplitterOptions) { constructor(options: SplitterOptions) {
this.splitterId = options.splitterId; this.splitterId = options.splitterId;
this.leftSideId = options.leftId; this.leftSideId = options.leftId;
this.isCollapsed = ko.observable<boolean>(false); this.isCollapsed = ko.observable<boolean>(false);
this.bounds = options.bounds; this.bounds = options.bounds;
this.direction = options.direction; this.direction = options.direction;
this.initialize(); this.initialize();
} }
public initialize() { public initialize() {
this.splitter = document.getElementById(this.splitterId); if (document.getElementById(this.splitterId) !== null && document.getElementById(this.leftSideId) != null) {
this.leftSide = document.getElementById(this.leftSideId); this.splitter = <HTMLElement>document.getElementById(this.splitterId);
this.leftSide = <HTMLElement>document.getElementById(this.leftSideId);
const isVerticalSplitter: boolean = this.direction === SplitterDirection.Vertical; }
const splitterOptions: JQueryUI.ResizableOptions = { const isVerticalSplitter: boolean = this.direction === SplitterDirection.Vertical;
animate: true, const splitterOptions: JQueryUI.ResizableOptions = {
animateDuration: "fast", animate: true,
start: this.onResizeStart, animateDuration: "fast",
stop: this.onResizeStop start: this.onResizeStart,
}; stop: this.onResizeStop,
};
if (isVerticalSplitter) {
$(this.leftSide).css("width", this.bounds.min); if (isVerticalSplitter) {
$(this.splitter).css("height", "100%"); $(this.leftSide).css("width", this.bounds.min);
$(this.splitter).css("height", "100%");
splitterOptions.maxWidth = this.bounds.max;
splitterOptions.minWidth = this.bounds.min; splitterOptions.maxWidth = this.bounds.max;
splitterOptions.handles = { e: "#" + this.splitterId }; splitterOptions.minWidth = this.bounds.min;
} else { splitterOptions.handles = { e: "#" + this.splitterId };
$(this.leftSide).css("height", this.bounds.min); } else {
$(this.splitter).css("width", "100%"); $(this.leftSide).css("height", this.bounds.min);
$(this.splitter).css("width", "100%");
splitterOptions.maxHeight = this.bounds.max;
splitterOptions.minHeight = this.bounds.min; splitterOptions.maxHeight = this.bounds.max;
splitterOptions.handles = { s: "#" + this.splitterId }; splitterOptions.minHeight = this.bounds.min;
} splitterOptions.handles = { s: "#" + this.splitterId };
}
$(this.leftSide).resizable(splitterOptions);
} $(this.leftSide).resizable(splitterOptions);
}
private onResizeStart: JQueryUI.ResizableEvent = (e: Event, ui: JQueryUI.ResizableUIParams) => {
if (this.direction === SplitterDirection.Vertical) { private onResizeStart: JQueryUI.ResizableEvent = (e: Event, ui: JQueryUI.ResizableUIParams) => {
$(".ui-resizable-helper").height("100%"); if (this.direction === SplitterDirection.Vertical) {
} else { $(".ui-resizable-helper").height("100%");
$(".ui-resizable-helper").width("100%"); } else {
} $(".ui-resizable-helper").width("100%");
$("iframe").css("pointer-events", "none"); }
}; $("iframe").css("pointer-events", "none");
};
private onResizeStop: JQueryUI.ResizableEvent = (e: Event, ui: JQueryUI.ResizableUIParams) => {
$("iframe").css("pointer-events", "auto"); private onResizeStop: JQueryUI.ResizableEvent = (e: Event, ui: JQueryUI.ResizableUIParams) => {
}; $("iframe").css("pointer-events", "auto");
};
public collapseLeft() {
this.lastX = $(this.splitter).position().left; public collapseLeft() {
this.lastWidth = $(this.leftSide).width(); this.lastX = $(this.splitter).position().left;
$(this.splitter).css("left", SplitterMetrics.CollapsedPositionLeft); this.lastWidth = $(this.leftSide).width();
$(this.leftSide).css("width", ""); $(this.splitter).css("left", SplitterMetrics.CollapsedPositionLeft);
$(this.leftSide) $(this.leftSide).css("width", "");
.resizable("option", "disabled", true) $(this.leftSide).resizable("option", "disabled", true).removeClass("ui-resizable-disabled"); // remove class so splitter is visible
.removeClass("ui-resizable-disabled"); // remove class so splitter is visible $(this.splitter).removeClass("ui-resizable-e");
$(this.splitter).removeClass("ui-resizable-e"); this.isCollapsed(true);
this.isCollapsed(true); }
}
public expandLeft() {
public expandLeft() { $(this.splitter).addClass("ui-resizable-e");
$(this.splitter).addClass("ui-resizable-e"); $(this.leftSide).css("width", this.lastWidth);
$(this.leftSide).css("width", this.lastWidth); $(this.splitter).css("left", this.lastX);
$(this.splitter).css("left", this.lastX); $(this.splitter).css("left", ""); // this ensures the splitter's position is not fixed and enables movement during resizing
$(this.splitter).css("left", ""); // this ensures the splitter's position is not fixed and enables movement during resizing $(this.leftSide).resizable("enable");
$(this.leftSide).resizable("enable"); this.isCollapsed(false);
this.isCollapsed(false); }
} }
}

View File

@@ -32,8 +32,8 @@ export default class UrlUtility {
type: type, type: type,
objectBody: { objectBody: {
id: id, id: id,
self: resourcePath self: resourcePath,
} },
}; };
return result; return result;

View File

@@ -1,6 +1,5 @@
jest.mock("../../Utils/arm/request"); jest.mock("../../Utils/arm/request");
jest.mock("../CosmosClient"); jest.mock("../CosmosClient");
jest.mock("../DataAccessUtilityBase");
import { AuthType } from "../../AuthType"; import { AuthType } from "../../AuthType";
import { CreateCollectionParams, DatabaseAccount } from "../../Contracts/DataModels"; import { CreateCollectionParams, DatabaseAccount } from "../../Contracts/DataModels";
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType"; import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
@@ -15,15 +14,15 @@ describe("createCollection", () => {
collectionId: "testContainer", collectionId: "testContainer",
databaseId: "testDatabase", databaseId: "testDatabase",
databaseLevelThroughput: true, databaseLevelThroughput: true,
offerThroughput: 400 offerThroughput: 400,
}; };
beforeAll(() => { beforeAll(() => {
updateUserContext({ updateUserContext({
databaseAccount: { databaseAccount: {
name: "test" name: "test",
} as DatabaseAccount, } as DatabaseAccount,
defaultExperience: DefaultAccountExperienceType.DocumentDB defaultExperience: DefaultAccountExperienceType.DocumentDB,
}); });
}); });
@@ -41,12 +40,12 @@ describe("createCollection", () => {
return { return {
database: { database: {
containers: { containers: {
create: () => ({}) create: () => ({}),
} },
} },
}; };
} },
} },
}); });
await createCollection(createCollectionParams); await createCollection(createCollectionParams);
expect(client).toHaveBeenCalled(); expect(client).toHaveBeenCalled();
@@ -60,7 +59,7 @@ describe("createCollection", () => {
collectionId: "testContainer", collectionId: "testContainer",
databaseId: "testDatabase", databaseId: "testDatabase",
databaseLevelThroughput: false, databaseLevelThroughput: false,
offerThroughput: 400 offerThroughput: 400,
}; };
expect(constructRpOptions(manualThroughputParams)).toEqual({ throughput: 400 }); expect(constructRpOptions(manualThroughputParams)).toEqual({ throughput: 400 });
@@ -70,12 +69,12 @@ describe("createCollection", () => {
databaseId: "testDatabase", databaseId: "testDatabase",
databaseLevelThroughput: false, databaseLevelThroughput: false,
offerThroughput: 400, offerThroughput: 400,
autoPilotMaxThroughput: 4000 autoPilotMaxThroughput: 4000,
}; };
expect(constructRpOptions(autoPilotThroughputParams)).toEqual({ expect(constructRpOptions(autoPilotThroughputParams)).toEqual({
autoscaleSettings: { autoscaleSettings: {
maxThroughput: 4000 maxThroughput: 4000,
} },
}); });
}); });
}); });

View File

@@ -11,15 +11,15 @@ import { createMongoCollectionWithProxy } from "../MongoProxyClient";
import { createUpdateSqlContainer, getSqlContainer } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources"; import { createUpdateSqlContainer, getSqlContainer } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import { import {
createUpdateCassandraTable, createUpdateCassandraTable,
getCassandraTable getCassandraTable,
} from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources"; } from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources";
import { import {
createUpdateMongoDBCollection, createUpdateMongoDBCollection,
getMongoDBCollection getMongoDBCollection,
} from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources"; } from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
import { import {
createUpdateGremlinGraph, createUpdateGremlinGraph,
getGremlinGraph getGremlinGraph,
} from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources"; } from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
import { createUpdateTable, getTable } from "../../Utils/arm/generatedClients/2020-04-01/tableResources"; import { createUpdateTable, getTable } from "../../Utils/arm/generatedClients/2020-04-01/tableResources";
import { logConsoleProgress, logConsoleInfo } from "../../Utils/NotificationConsoleUtils"; import { logConsoleProgress, logConsoleInfo } from "../../Utils/NotificationConsoleUtils";
@@ -41,7 +41,7 @@ export const createCollection = async (params: DataModels.CreateCollectionParams
autoPilotMaxThroughput: params.autoPilotMaxThroughput, autoPilotMaxThroughput: params.autoPilotMaxThroughput,
databaseId: params.databaseId, databaseId: params.databaseId,
databaseLevelThroughput: params.databaseLevelThroughput, databaseLevelThroughput: params.databaseLevelThroughput,
offerThroughput: params.offerThroughput offerThroughput: params.offerThroughput,
}; };
await createDatabase(createDatabaseParams); await createDatabase(createDatabaseParams);
} }
@@ -100,7 +100,7 @@ const createSqlContainer = async (params: DataModels.CreateCollectionParams): Pr
const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params); const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params);
const resource: ARMTypes.SqlContainerResource = { const resource: ARMTypes.SqlContainerResource = {
id: params.collectionId id: params.collectionId,
}; };
if (params.analyticalStorageTtl) { if (params.analyticalStorageTtl) {
resource.analyticalStorageTtl = params.analyticalStorageTtl; resource.analyticalStorageTtl = params.analyticalStorageTtl;
@@ -118,8 +118,8 @@ const createSqlContainer = async (params: DataModels.CreateCollectionParams): Pr
const rpPayload: ARMTypes.SqlDatabaseCreateUpdateParameters = { const rpPayload: ARMTypes.SqlDatabaseCreateUpdateParameters = {
properties: { properties: {
resource, resource,
options options,
} },
}; };
const createResponse = await createUpdateSqlContainer( const createResponse = await createUpdateSqlContainer(
@@ -154,7 +154,7 @@ const createMongoCollection = async (params: DataModels.CreateCollectionParams):
const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params); const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params);
const resource: ARMTypes.MongoDBCollectionResource = { const resource: ARMTypes.MongoDBCollectionResource = {
id: params.collectionId id: params.collectionId,
}; };
if (params.analyticalStorageTtl) { if (params.analyticalStorageTtl) {
resource.analyticalStorageTtl = params.analyticalStorageTtl; resource.analyticalStorageTtl = params.analyticalStorageTtl;
@@ -170,8 +170,8 @@ const createMongoCollection = async (params: DataModels.CreateCollectionParams):
const rpPayload: ARMTypes.MongoDBCollectionCreateUpdateParameters = { const rpPayload: ARMTypes.MongoDBCollectionCreateUpdateParameters = {
properties: { properties: {
resource, resource,
options options,
} },
}; };
const createResponse = await createUpdateMongoDBCollection( const createResponse = await createUpdateMongoDBCollection(
@@ -185,7 +185,7 @@ const createMongoCollection = async (params: DataModels.CreateCollectionParams):
if (params.createMongoWildcardIndex) { if (params.createMongoWildcardIndex) {
TelemetryProcessor.trace(Action.CreateMongoCollectionWithWildcardIndex, ActionModifiers.Mark, { TelemetryProcessor.trace(Action.CreateMongoCollectionWithWildcardIndex, ActionModifiers.Mark, {
message: "Mongo Collection created with wildcard index on all fields." message: "Mongo Collection created with wildcard index on all fields.",
}); });
} }
@@ -212,7 +212,7 @@ const createCassandraTable = async (params: DataModels.CreateCollectionParams):
const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params); const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params);
const resource: ARMTypes.CassandraTableResource = { const resource: ARMTypes.CassandraTableResource = {
id: params.collectionId id: params.collectionId,
}; };
if (params.analyticalStorageTtl) { if (params.analyticalStorageTtl) {
resource.analyticalStorageTtl = params.analyticalStorageTtl; resource.analyticalStorageTtl = params.analyticalStorageTtl;
@@ -221,8 +221,8 @@ const createCassandraTable = async (params: DataModels.CreateCollectionParams):
const rpPayload: ARMTypes.CassandraTableCreateUpdateParameters = { const rpPayload: ARMTypes.CassandraTableCreateUpdateParameters = {
properties: { properties: {
resource, resource,
options options,
} },
}; };
const createResponse = await createUpdateCassandraTable( const createResponse = await createUpdateCassandraTable(
@@ -256,7 +256,7 @@ const createGraph = async (params: DataModels.CreateCollectionParams): Promise<D
const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params); const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params);
const resource: ARMTypes.GremlinGraphResource = { const resource: ARMTypes.GremlinGraphResource = {
id: params.collectionId id: params.collectionId,
}; };
if (params.indexingPolicy) { if (params.indexingPolicy) {
@@ -272,8 +272,8 @@ const createGraph = async (params: DataModels.CreateCollectionParams): Promise<D
const rpPayload: ARMTypes.GremlinGraphCreateUpdateParameters = { const rpPayload: ARMTypes.GremlinGraphCreateUpdateParameters = {
properties: { properties: {
resource, resource,
options options,
} },
}; };
const createResponse = await createUpdateGremlinGraph( const createResponse = await createUpdateGremlinGraph(
@@ -306,14 +306,14 @@ const createTable = async (params: DataModels.CreateCollectionParams): Promise<D
const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params); const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params);
const resource: ARMTypes.TableResource = { const resource: ARMTypes.TableResource = {
id: params.collectionId id: params.collectionId,
}; };
const rpPayload: ARMTypes.TableCreateUpdateParameters = { const rpPayload: ARMTypes.TableCreateUpdateParameters = {
properties: { properties: {
resource, resource,
options options,
} },
}; };
const createResponse = await createUpdateTable( const createResponse = await createUpdateTable(
@@ -334,13 +334,13 @@ export const constructRpOptions = (params: DataModels.CreateDatabaseParams): ARM
if (params.autoPilotMaxThroughput) { if (params.autoPilotMaxThroughput) {
return { return {
autoscaleSettings: { autoscaleSettings: {
maxThroughput: params.autoPilotMaxThroughput maxThroughput: params.autoPilotMaxThroughput,
} },
}; };
} }
return { return {
throughput: params.offerThroughput throughput: params.offerThroughput,
}; };
}; };
@@ -350,7 +350,7 @@ const createCollectionWithSDK = async (params: DataModels.CreateCollectionParams
partitionKey: params.partitionKey || undefined, partitionKey: params.partitionKey || undefined,
indexingPolicy: params.indexingPolicy || undefined, indexingPolicy: params.indexingPolicy || undefined,
uniqueKeyPolicy: params.uniqueKeyPolicy || undefined, uniqueKeyPolicy: params.uniqueKeyPolicy || undefined,
analyticalStorageTtl: params.analyticalStorageTtl analyticalStorageTtl: params.analyticalStorageTtl,
} as ContainerRequest; // TODO: remove cast when https://github.com/Azure/azure-cosmos-js/issues/423 is fixed } as ContainerRequest; // TODO: remove cast when https://github.com/Azure/azure-cosmos-js/issues/423 is fixed
const collectionOptions: RequestOptions = {}; const collectionOptions: RequestOptions = {};
const createDatabaseBody: DatabaseRequest = { id: params.databaseId }; const createDatabaseBody: DatabaseRequest = { id: params.databaseId };

View File

@@ -8,21 +8,21 @@ import {
GremlinDatabaseCreateUpdateParameters, GremlinDatabaseCreateUpdateParameters,
MongoDBDatabaseCreateUpdateParameters, MongoDBDatabaseCreateUpdateParameters,
SqlDatabaseCreateUpdateParameters, SqlDatabaseCreateUpdateParameters,
CreateUpdateOptions CreateUpdateOptions,
} from "../../Utils/arm/generatedClients/2020-04-01/types"; } from "../../Utils/arm/generatedClients/2020-04-01/types";
import { client } from "../CosmosClient"; import { client } from "../CosmosClient";
import { createUpdateSqlDatabase, getSqlDatabase } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources"; import { createUpdateSqlDatabase, getSqlDatabase } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import { import {
createUpdateCassandraKeyspace, createUpdateCassandraKeyspace,
getCassandraKeyspace getCassandraKeyspace,
} from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources"; } from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources";
import { import {
createUpdateMongoDBDatabase, createUpdateMongoDBDatabase,
getMongoDBDatabase getMongoDBDatabase,
} from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources"; } from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
import { import {
createUpdateGremlinDatabase, createUpdateGremlinDatabase,
getGremlinDatabase getGremlinDatabase,
} from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources"; } from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
import { handleError } from "../ErrorHandlingUtils"; import { handleError } from "../ErrorHandlingUtils";
import { logConsoleProgress, logConsoleInfo } from "../../Utils/NotificationConsoleUtils"; import { logConsoleProgress, logConsoleInfo } from "../../Utils/NotificationConsoleUtils";
@@ -85,10 +85,10 @@ async function createSqlDatabase(params: DataModels.CreateDatabaseParams): Promi
const rpPayload: SqlDatabaseCreateUpdateParameters = { const rpPayload: SqlDatabaseCreateUpdateParameters = {
properties: { properties: {
resource: { resource: {
id: params.databaseId id: params.databaseId,
}, },
options options,
} },
}; };
const createResponse = await createUpdateSqlDatabase( const createResponse = await createUpdateSqlDatabase(
userContext.subscriptionId, userContext.subscriptionId,
@@ -121,10 +121,10 @@ async function createMongoDatabase(params: DataModels.CreateDatabaseParams): Pro
const rpPayload: MongoDBDatabaseCreateUpdateParameters = { const rpPayload: MongoDBDatabaseCreateUpdateParameters = {
properties: { properties: {
resource: { resource: {
id: params.databaseId id: params.databaseId,
}, },
options options,
} },
}; };
const createResponse = await createUpdateMongoDBDatabase( const createResponse = await createUpdateMongoDBDatabase(
userContext.subscriptionId, userContext.subscriptionId,
@@ -157,10 +157,10 @@ async function createCassandraKeyspace(params: DataModels.CreateDatabaseParams):
const rpPayload: CassandraKeyspaceCreateUpdateParameters = { const rpPayload: CassandraKeyspaceCreateUpdateParameters = {
properties: { properties: {
resource: { resource: {
id: params.databaseId id: params.databaseId,
}, },
options options,
} },
}; };
const createResponse = await createUpdateCassandraKeyspace( const createResponse = await createUpdateCassandraKeyspace(
userContext.subscriptionId, userContext.subscriptionId,
@@ -193,10 +193,10 @@ async function createGremlineDatabase(params: DataModels.CreateDatabaseParams):
const rpPayload: GremlinDatabaseCreateUpdateParameters = { const rpPayload: GremlinDatabaseCreateUpdateParameters = {
properties: { properties: {
resource: { resource: {
id: params.databaseId id: params.databaseId,
}, },
options options,
} },
}; };
const createResponse = await createUpdateGremlinDatabase( const createResponse = await createUpdateGremlinDatabase(
userContext.subscriptionId, userContext.subscriptionId,
@@ -231,12 +231,12 @@ function constructRpOptions(params: DataModels.CreateDatabaseParams): CreateUpda
if (params.autoPilotMaxThroughput) { if (params.autoPilotMaxThroughput) {
return { return {
autoscaleSettings: { autoscaleSettings: {
maxThroughput: params.autoPilotMaxThroughput maxThroughput: params.autoPilotMaxThroughput,
} },
}; };
} }
return { return {
throughput: params.offerThroughput throughput: params.offerThroughput,
}; };
} }

View File

@@ -0,0 +1,25 @@
import { CollectionBase } from "../../Contracts/ViewModels";
import { client } from "../CosmosClient";
import { getEntityName } from "../DocumentUtility";
import { handleError } from "../ErrorHandlingUtils";
import { logConsoleInfo, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
export const createDocument = async (collection: CollectionBase, newDocument: unknown): Promise<unknown> => {
const entityName = getEntityName();
const clearMessage = logConsoleProgress(`Creating new ${entityName} for container ${collection.id()}`);
try {
const response = await client()
.database(collection.databaseId)
.container(collection.id())
.items.create(newDocument);
logConsoleInfo(`Successfully created new ${entityName} for container ${collection.id()}`);
return response?.resource;
} catch (error) {
handleError(error, "CreateDocument", `Error while creating new ${entityName} for container ${collection.id()}`);
throw error;
} finally {
clearMessage();
}
};

View File

@@ -3,12 +3,12 @@ import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType
import { Resource, StoredProcedureDefinition } from "@azure/cosmos"; import { Resource, StoredProcedureDefinition } from "@azure/cosmos";
import { import {
SqlStoredProcedureCreateUpdateParameters, SqlStoredProcedureCreateUpdateParameters,
SqlStoredProcedureResource SqlStoredProcedureResource,
} from "../../Utils/arm/generatedClients/2020-04-01/types"; } from "../../Utils/arm/generatedClients/2020-04-01/types";
import { client } from "../CosmosClient"; import { client } from "../CosmosClient";
import { import {
createUpdateSqlStoredProcedure, createUpdateSqlStoredProcedure,
getSqlStoredProcedure getSqlStoredProcedure,
} from "../../Utils/arm/generatedClients/2020-04-01/sqlResources"; } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import { handleError } from "../ErrorHandlingUtils"; import { handleError } from "../ErrorHandlingUtils";
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils"; import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
@@ -49,8 +49,8 @@ export async function createStoredProcedure(
const createSprocParams: SqlStoredProcedureCreateUpdateParameters = { const createSprocParams: SqlStoredProcedureCreateUpdateParameters = {
properties: { properties: {
resource: storedProcedure as SqlStoredProcedureResource, resource: storedProcedure as SqlStoredProcedureResource,
options: {} options: {},
} },
}; };
const rpResponse = await createUpdateSqlStoredProcedure( const rpResponse = await createUpdateSqlStoredProcedure(
userContext.subscriptionId, userContext.subscriptionId,

View File

@@ -3,7 +3,7 @@ import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType
import { Resource, TriggerDefinition } from "@azure/cosmos"; import { Resource, TriggerDefinition } from "@azure/cosmos";
import { import {
SqlTriggerCreateUpdateParameters, SqlTriggerCreateUpdateParameters,
SqlTriggerResource SqlTriggerResource,
} from "../../Utils/arm/generatedClients/2020-04-01/types"; } from "../../Utils/arm/generatedClients/2020-04-01/types";
import { client } from "../CosmosClient"; import { client } from "../CosmosClient";
import { createUpdateSqlTrigger, getSqlTrigger } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources"; import { createUpdateSqlTrigger, getSqlTrigger } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
@@ -44,8 +44,8 @@ export async function createTrigger(
const createTriggerParams: SqlTriggerCreateUpdateParameters = { const createTriggerParams: SqlTriggerCreateUpdateParameters = {
properties: { properties: {
resource: trigger as SqlTriggerResource, resource: trigger as SqlTriggerResource,
options: {} options: {},
} },
}; };
const rpResponse = await createUpdateSqlTrigger( const rpResponse = await createUpdateSqlTrigger(
userContext.subscriptionId, userContext.subscriptionId,
@@ -59,10 +59,7 @@ export async function createTrigger(
return rpResponse && (rpResponse.properties?.resource as TriggerDefinition & Resource); return rpResponse && (rpResponse.properties?.resource as TriggerDefinition & Resource);
} }
const response = await client() const response = await client().database(databaseId).container(collectionId).scripts.triggers.create(trigger);
.database(databaseId)
.container(collectionId)
.scripts.triggers.create(trigger);
return response.resource; return response.resource;
} catch (error) { } catch (error) {
handleError(error, "CreateTrigger", `Error while creating trigger ${trigger.id}`); handleError(error, "CreateTrigger", `Error while creating trigger ${trigger.id}`);

View File

@@ -3,12 +3,12 @@ import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType
import { Resource, UserDefinedFunctionDefinition } from "@azure/cosmos"; import { Resource, UserDefinedFunctionDefinition } from "@azure/cosmos";
import { import {
SqlUserDefinedFunctionCreateUpdateParameters, SqlUserDefinedFunctionCreateUpdateParameters,
SqlUserDefinedFunctionResource SqlUserDefinedFunctionResource,
} from "../../Utils/arm/generatedClients/2020-04-01/types"; } from "../../Utils/arm/generatedClients/2020-04-01/types";
import { client } from "../CosmosClient"; import { client } from "../CosmosClient";
import { import {
createUpdateSqlUserDefinedFunction, createUpdateSqlUserDefinedFunction,
getSqlUserDefinedFunction getSqlUserDefinedFunction,
} from "../../Utils/arm/generatedClients/2020-04-01/sqlResources"; } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import { handleError } from "../ErrorHandlingUtils"; import { handleError } from "../ErrorHandlingUtils";
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils"; import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
@@ -49,8 +49,8 @@ export async function createUserDefinedFunction(
const createUDFParams: SqlUserDefinedFunctionCreateUpdateParameters = { const createUDFParams: SqlUserDefinedFunctionCreateUpdateParameters = {
properties: { properties: {
resource: userDefinedFunction as SqlUserDefinedFunctionResource, resource: userDefinedFunction as SqlUserDefinedFunctionResource,
options: {} options: {},
} },
}; };
const rpResponse = await createUpdateSqlUserDefinedFunction( const rpResponse = await createUpdateSqlUserDefinedFunction(
userContext.subscriptionId, userContext.subscriptionId,

View File

@@ -13,9 +13,9 @@ describe("deleteCollection", () => {
beforeAll(() => { beforeAll(() => {
updateUserContext({ updateUserContext({
databaseAccount: { databaseAccount: {
name: "test" name: "test",
} as DatabaseAccount, } as DatabaseAccount,
defaultExperience: DefaultAccountExperienceType.DocumentDB defaultExperience: DefaultAccountExperienceType.DocumentDB,
}); });
}); });
@@ -32,11 +32,11 @@ describe("deleteCollection", () => {
return { return {
container: () => { container: () => {
return { return {
delete: (): unknown => undefined delete: (): unknown => undefined,
}; };
} },
}; };
} },
}); });
await deleteCollection("database", "collection"); await deleteCollection("database", "collection");
expect(client).toHaveBeenCalled(); expect(client).toHaveBeenCalled();

View File

@@ -16,10 +16,7 @@ export async function deleteCollection(databaseId: string, collectionId: string)
if (window.authType === AuthType.AAD && !userContext.useSDKOperations) { if (window.authType === AuthType.AAD && !userContext.useSDKOperations) {
await deleteCollectionWithARM(databaseId, collectionId); await deleteCollectionWithARM(databaseId, collectionId);
} else { } else {
await client() await client().database(databaseId).container(collectionId).delete();
.database(databaseId)
.container(collectionId)
.delete();
} }
logConsoleInfo(`Successfully deleted container ${collectionId}`); logConsoleInfo(`Successfully deleted container ${collectionId}`);
} catch (error) { } catch (error) {

View File

@@ -0,0 +1,36 @@
import ConflictId from "../../Explorer/Tree/ConflictId";
import { CollectionBase } from "../../Contracts/ViewModels";
import { RequestOptions } from "@azure/cosmos";
import { client } from "../CosmosClient";
import { handleError } from "../ErrorHandlingUtils";
import { logConsoleProgress, logConsoleInfo } from "../../Utils/NotificationConsoleUtils";
export const deleteConflict = async (collection: CollectionBase, conflictId: ConflictId): Promise<void> => {
const clearMessage = logConsoleProgress(`Deleting conflict ${conflictId.id()}`);
try {
const options = {
partitionKey: getPartitionKeyHeaderForConflict(conflictId),
};
await client()
.database(collection.databaseId)
.container(collection.id())
.conflict(conflictId.id())
.delete(options as RequestOptions);
logConsoleInfo(`Successfully deleted conflict ${conflictId.id()}`);
} catch (error) {
handleError(error, "DeleteConflict", `Error while deleting conflict ${conflictId.id()}`);
throw error;
} finally {
clearMessage();
}
};
const getPartitionKeyHeaderForConflict = (conflictId: ConflictId): unknown => {
if (!conflictId.partitionKey) {
return undefined;
}
return conflictId.partitionKeyValue === undefined ? [{}] : [conflictId.partitionKeyValue];
};

View File

@@ -13,9 +13,9 @@ describe("deleteDatabase", () => {
beforeAll(() => { beforeAll(() => {
updateUserContext({ updateUserContext({
databaseAccount: { databaseAccount: {
name: "test" name: "test",
} as DatabaseAccount, } as DatabaseAccount,
defaultExperience: DefaultAccountExperienceType.DocumentDB defaultExperience: DefaultAccountExperienceType.DocumentDB,
}); });
}); });
@@ -30,9 +30,9 @@ describe("deleteDatabase", () => {
(client as jest.Mock).mockReturnValue({ (client as jest.Mock).mockReturnValue({
database: () => { database: () => {
return { return {
delete: (): unknown => undefined delete: (): unknown => undefined,
}; };
} },
}); });
await deleteDatabase("database"); await deleteDatabase("database");
expect(client).toHaveBeenCalled(); expect(client).toHaveBeenCalled();

View File

@@ -19,9 +19,7 @@ export async function deleteDatabase(databaseId: string): Promise<void> {
if (window.authType === AuthType.AAD && !userContext.useSDKOperations) { if (window.authType === AuthType.AAD && !userContext.useSDKOperations) {
await deleteDatabaseWithARM(databaseId); await deleteDatabaseWithARM(databaseId);
} else { } else {
await client() await client().database(databaseId).delete();
.database(databaseId)
.delete();
} }
logConsoleInfo(`Successfully deleted database ${databaseId}`); logConsoleInfo(`Successfully deleted database ${databaseId}`);
} catch (error) { } catch (error) {

View File

@@ -0,0 +1,25 @@
import { CollectionBase } from "../../Contracts/ViewModels";
import { client } from "../CosmosClient";
import { getEntityName } from "../DocumentUtility";
import { handleError } from "../ErrorHandlingUtils";
import { logConsoleInfo, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
import DocumentId from "../../Explorer/Tree/DocumentId";
export const deleteDocument = async (collection: CollectionBase, documentId: DocumentId): Promise<void> => {
const entityName: string = getEntityName();
const clearMessage = logConsoleProgress(`Deleting ${entityName} ${documentId.id()}`);
try {
await client()
.database(collection.databaseId)
.container(collection.id())
.item(documentId.id(), documentId.partitionKeyValue)
.delete();
logConsoleInfo(`Successfully deleted ${entityName} ${documentId.id()}`);
} catch (error) {
handleError(error, "DeleteDocument", `Error while deleting ${entityName} ${documentId.id()}`);
throw error;
} finally {
clearMessage();
}
};

View File

@@ -27,11 +27,7 @@ export async function deleteStoredProcedure(
storedProcedureId storedProcedureId
); );
} else { } else {
await client() await client().database(databaseId).container(collectionId).scripts.storedProcedure(storedProcedureId).delete();
.database(databaseId)
.container(collectionId)
.scripts.storedProcedure(storedProcedureId)
.delete();
} }
} catch (error) { } catch (error) {
handleError(error, "DeleteStoredProcedure", `Error while deleting stored procedure ${storedProcedureId}`); handleError(error, "DeleteStoredProcedure", `Error while deleting stored procedure ${storedProcedureId}`);

View File

@@ -23,11 +23,7 @@ export async function deleteTrigger(databaseId: string, collectionId: string, tr
triggerId triggerId
); );
} else { } else {
await client() await client().database(databaseId).container(collectionId).scripts.trigger(triggerId).delete();
.database(databaseId)
.container(collectionId)
.scripts.trigger(triggerId)
.delete();
} }
} catch (error) { } catch (error) {
handleError(error, "DeleteTrigger", `Error while deleting trigger ${triggerId}`); handleError(error, "DeleteTrigger", `Error while deleting trigger ${triggerId}`);

View File

@@ -23,11 +23,7 @@ export async function deleteUserDefinedFunction(databaseId: string, collectionId
id id
); );
} else { } else {
await client() await client().database(databaseId).container(collectionId).scripts.userDefinedFunction(id).delete();
.database(databaseId)
.container(collectionId)
.scripts.userDefinedFunction(id)
.delete();
} }
} catch (error) { } catch (error) {
handleError(error, "DeleteUserDefinedFunction", `Error while deleting user defined function ${id}`); handleError(error, "DeleteUserDefinedFunction", `Error while deleting user defined function ${id}`);

View File

@@ -0,0 +1,48 @@
import { Collection } from "../../Contracts/ViewModels";
import { ClientDefaults, HttpHeaders } from "../Constants";
import { client } from "../CosmosClient";
import { handleError } from "../ErrorHandlingUtils";
import { logConsoleProgress, logConsoleInfo } from "../../Utils/NotificationConsoleUtils";
import StoredProcedure from "../../Explorer/Tree/StoredProcedure";
export interface ExecuteSprocResult {
result: StoredProcedure;
scriptLogs: string;
}
export const executeStoredProcedure = async (
collection: Collection,
storedProcedure: StoredProcedure,
partitionKeyValue: string,
params: string[]
): Promise<ExecuteSprocResult> => {
const clearMessage = logConsoleProgress(`Executing stored procedure ${storedProcedure.id()}`);
const timeout = setTimeout(() => {
throw Error(`Request timed out while executing stored procedure ${storedProcedure.id()}`);
}, ClientDefaults.requestTimeoutMs);
try {
const response = await client()
.database(collection.databaseId)
.container(collection.id())
.scripts.storedProcedure(storedProcedure.id())
.execute(partitionKeyValue, params, { enableScriptLogging: true });
clearTimeout(timeout);
logConsoleInfo(
`Finished executing stored procedure ${storedProcedure.id()} for container ${storedProcedure.collection.id()}`
);
return {
result: response.resource,
scriptLogs: response.headers[HttpHeaders.scriptLogResults] as string,
};
} catch (error) {
handleError(
error,
"ExecuteStoredProcedure",
`Failed to execute stored procedure ${storedProcedure.id()} for container ${storedProcedure.collection.id()}`
);
throw error;
} finally {
clearMessage();
}
};

View File

@@ -0,0 +1,92 @@
import { AuthType } from "../../AuthType";
import { armRequest } from "../../Utils/arm/request";
import { configContext } from "../../ConfigContext";
import { handleError } from "../ErrorHandlingUtils";
import { userContext } from "../../UserContext";
interface TimeSeriesData {
data: {
timeStamp: string;
total: number;
}[];
metadatavalues: {
name: {
localizedValue: string;
value: string;
};
value: string;
};
}
interface MetricsData {
displayDescription: string;
errorCode: string;
id: string;
name: {
value: string;
localizedValue: string;
};
timeseries: TimeSeriesData[];
type: string;
unit: string;
}
interface MetricsResponse {
cost: number;
interval: string;
namespace: string;
resourceregion: string;
timespan: string;
value: MetricsData[];
}
export const getCollectionUsageSizeInKB = async (databaseName: string, containerName: string): Promise<number> => {
if (window.authType !== AuthType.AAD) {
return undefined;
}
const subscriptionId = userContext.subscriptionId;
const resourceGroup = userContext.resourceGroup;
const accountName = userContext.databaseAccount.name;
const filter = `DatabaseName eq '${databaseName}' and CollectionName eq '${containerName}'`;
const metricNames = "DataUsage,IndexUsage";
const path = `/subscriptions/${subscriptionId}/resourceGroups/${resourceGroup}/providers/Microsoft.DocumentDB/databaseAccounts/${accountName}/providers/microsoft.insights/metrics`;
try {
const metricsResponse: MetricsResponse = await armRequest({
host: configContext.ARM_ENDPOINT,
path,
method: "GET",
apiVersion: "2018-01-01",
queryParams: {
filter,
metricNames,
},
});
if (metricsResponse?.value?.length !== 2) {
return undefined;
}
const dataUsageData: MetricsData = metricsResponse.value[0];
const indexUsagedata: MetricsData = metricsResponse.value[1];
const dataUsageSizeInKb: number = getUsageSizeInKb(dataUsageData);
const indexUsageSizeInKb: number = getUsageSizeInKb(indexUsagedata);
return dataUsageSizeInKb + indexUsageSizeInKb;
} catch (error) {
handleError(error, "getCollectionUsageSize");
return undefined;
}
};
const getUsageSizeInKb = (metricsData: MetricsData): number => {
if (metricsData?.errorCode !== "Success") {
throw Error(`Get collection usage size failed: ${metricsData.errorCode}`);
}
const timeSeriesData: TimeSeriesData = metricsData?.timeseries?.[0];
const usageSizeInBytes: number = timeSeriesData?.data?.[0]?.total;
return usageSizeInBytes ? usageSizeInBytes / 1024 : 0;
};

View File

@@ -11,10 +11,7 @@ export async function getIndexTransformationProgress(databaseId: string, collect
let indexTransformationPercentage: number; let indexTransformationPercentage: number;
const clearMessage = logConsoleProgress(`Reading container ${collectionId}`); const clearMessage = logConsoleProgress(`Reading container ${collectionId}`);
try { try {
const response = await client() const response = await client().database(databaseId).container(collectionId).read({ populateQuotaInfo: true });
.database(databaseId)
.container(collectionId)
.read({ populateQuotaInfo: true });
indexTransformationPercentage = parseInt( indexTransformationPercentage = parseInt(
response.headers[Constants.HttpHeaders.collectionIndexTransformationProgress] as string response.headers[Constants.HttpHeaders.collectionIndexTransformationProgress] as string

View File

@@ -0,0 +1,11 @@
import { ConflictDefinition, FeedOptions, QueryIterator, Resource } from "@azure/cosmos";
import { client } from "../CosmosClient";
export const queryConflicts = (
databaseId: string,
containerId: string,
query: string,
options: FeedOptions
): QueryIterator<ConflictDefinition & Resource> => {
return client().database(databaseId).container(containerId).conflicts.query(query, options);
};

View File

@@ -1,13 +1,13 @@
import { getCommonQueryOptions } from "./DataAccessUtilityBase"; import { getCommonQueryOptions } from "./queryDocuments";
import { LocalStorageUtility, StorageKey } from "../Shared/StorageUtility"; import { LocalStorageUtility, StorageKey } from "../../Shared/StorageUtility";
describe("getCommonQueryOptions", () => { describe("getCommonQueryOptions", () => {
it("builds the correct default options objects", () => { it("builds the correct default options objects", () => {
expect(getCommonQueryOptions({})).toMatchSnapshot(); expect(getCommonQueryOptions({})).toMatchSnapshot();
}); });
it("reads from localStorage", () => { it("reads from localStorage", () => {
LocalStorageUtility.setEntryNumber(StorageKey.ActualItemPerPage, 37); LocalStorageUtility.setEntryNumber(StorageKey.ActualItemPerPage, 37);
LocalStorageUtility.setEntryNumber(StorageKey.MaxDegreeOfParellism, 17); LocalStorageUtility.setEntryNumber(StorageKey.MaxDegreeOfParellism, 17);
expect(getCommonQueryOptions({})).toMatchSnapshot(); expect(getCommonQueryOptions({})).toMatchSnapshot();
}); });
}); });

Some files were not shown because too many files have changed in this diff Show More