Compare commits

..

2 Commits

Author SHA1 Message Date
Steve Faulkner
a636c70ce8 Remove old panel component 2021-02-24 13:07:15 -06:00
Steve Faulkner
92891a4878 Remove unused SparkMasterTab 2020-10-26 22:15:19 -05:00
704 changed files with 70902 additions and 108311 deletions

View File

@@ -1,16 +1,7 @@
# These options are only needed when if running end to end tests locally
PORTAL_RUNNER_USERNAME=
PORTAL_RUNNER_PASSWORD=
PORTAL_RUNNER_SUBSCRIPTION=
PORTAL_RUNNER_RESOURCE_GROUP=
PORTAL_RUNNER_DATABASE_ACCOUNT=
PORTAL_RUNNER_DATABASE_ACCOUNT_KEY=
PORTAL_RUNNER_MONGO_DATABASE_ACCOUNT=
PORTAL_RUNNER_MONGO_DATABASE_ACCOUNT_KEY=
PORTAL_RUNNER_CONNECTION_STRING=
NOTEBOOKS_TEST_RUNNER_TENANT_ID=
NOTEBOOKS_TEST_RUNNER_CLIENT_ID=
NOTEBOOKS_TEST_RUNNER_CLIENT_SECRET=
CASSANDRA_CONNECTION_STRING=
MONGO_CONNECTION_STRING=
TABLES_CONNECTION_STRING=
DATA_EXPLORER_ENDPOINT=https://localhost:1234/hostedExplorer.html

View File

@@ -11,9 +11,18 @@ src/Common/CosmosClient.test.ts
src/Common/CosmosClient.ts
src/Common/DataAccessUtilityBase.test.ts
src/Common/DataAccessUtilityBase.ts
src/Common/DeleteFeedback.ts
src/Common/DocumentClientUtilityBase.ts
src/Common/EditableUtility.ts
src/Common/EnvironmentUtility.ts
src/Common/ErrorParserUtility.test.ts
src/Common/ErrorParserUtility.ts
src/Common/HashMap.test.ts
src/Common/HashMap.ts
src/Common/HeadersUtility.test.ts
src/Common/HeadersUtility.ts
src/Common/IteratorUtilities.test.ts
src/Common/IteratorUtilities.ts
src/Common/Logger.test.ts
src/Common/MessageHandler.test.ts
src/Common/MessageHandler.ts
@@ -24,6 +33,8 @@ src/Common/ObjectCache.test.ts
src/Common/ObjectCache.ts
src/Common/QueriesClient.ts
src/Common/Splitter.ts
src/Common/ThemeUtility.ts
src/Common/UrlUtility.ts
src/Config.ts
src/Contracts/ActionContracts.ts
src/Contracts/DataModels.ts
@@ -34,6 +45,7 @@ src/Contracts/ViewModels.ts
src/Controls/Heatmap/Heatmap.test.ts
src/Controls/Heatmap/Heatmap.ts
src/Controls/Heatmap/HeatmapDatatypes.ts
src/Definitions/adal.d.ts
src/Definitions/datatables.d.ts
src/Definitions/gif.d.ts
src/Definitions/globals.d.ts
@@ -50,6 +62,8 @@ src/Explorer/ComponentRegisterer.test.ts
src/Explorer/ComponentRegisterer.ts
src/Explorer/ContextMenuButtonFactory.ts
src/Explorer/Controls/CollapsiblePanel/CollapsiblePanelComponent.ts
src/Explorer/Controls/CommandButton/CommandButton.test.ts
src/Explorer/Controls/CommandButton/CommandButton.ts
src/Explorer/Controls/DiffEditor/DiffEditorComponent.ts
src/Explorer/Controls/DynamicList/DynamicList.test.ts
src/Explorer/Controls/DynamicList/DynamicListComponent.ts
@@ -77,7 +91,7 @@ src/Explorer/DataSamples/ContainerSampleGenerator.test.ts
src/Explorer/DataSamples/ContainerSampleGenerator.ts
src/Explorer/DataSamples/DataSamplesUtil.test.ts
src/Explorer/DataSamples/DataSamplesUtil.ts
src/Explorer/Explorer.tsx
src/Explorer/Explorer.ts
src/Explorer/Graph/GraphExplorerComponent/ArraysByKeyCache.test.ts
src/Explorer/Graph/GraphExplorerComponent/ArraysByKeyCache.ts
src/Explorer/Graph/GraphExplorerComponent/D3ForceGraph.test.ts
@@ -85,6 +99,8 @@ src/Explorer/Graph/GraphExplorerComponent/D3ForceGraph.ts
src/Explorer/Graph/GraphExplorerComponent/EdgeInfoCache.ts
src/Explorer/Graph/GraphExplorerComponent/GraphData.test.ts
src/Explorer/Graph/GraphExplorerComponent/GraphData.ts
src/Explorer/Graph/GraphExplorerComponent/GraphUtil.test.ts
src/Explorer/Graph/GraphExplorerComponent/GraphUtil.ts
src/Explorer/Graph/GraphExplorerComponent/GremlinClient.test.ts
src/Explorer/Graph/GraphExplorerComponent/GremlinClient.ts
src/Explorer/Graph/GraphExplorerComponent/GremlinSimpleClient.test.ts
@@ -97,6 +113,8 @@ src/Explorer/Menus/CommandBar/CommandBarComponentButtonFactory.test.ts
src/Explorer/Menus/CommandBar/CommandBarComponentButtonFactory.ts
src/Explorer/Menus/ContextMenu.ts
src/Explorer/MostRecentActivity/MostRecentActivity.ts
src/Explorer/Notebook/FileSystemUtil.ts
src/Explorer/Notebook/NTeractUtil.ts
src/Explorer/Notebook/NotebookClientV2.ts
src/Explorer/Notebook/NotebookComponent/NotebookContentProvider.ts
src/Explorer/Notebook/NotebookComponent/__mocks__/rx-jupyter.ts
@@ -125,12 +143,15 @@ src/Explorer/Panes/DeleteCollectionConfirmationPane.test.ts
src/Explorer/Panes/DeleteCollectionConfirmationPane.ts
src/Explorer/Panes/DeleteDatabaseConfirmationPane.test.ts
src/Explorer/Panes/DeleteDatabaseConfirmationPane.ts
src/Explorer/Panes/ExecuteSprocParamsPane.ts
src/Explorer/Panes/GraphStylingPane.ts
src/Explorer/Panes/LoadQueryPane.ts
src/Explorer/Panes/NewVertexPane.ts
src/Explorer/Panes/PaneComponents.ts
src/Explorer/Panes/RenewAdHocAccessPane.ts
src/Explorer/Panes/SaveQueryPane.ts
src/Explorer/Panes/SettingsPane.test.ts
src/Explorer/Panes/SettingsPane.ts
src/Explorer/Panes/SetupNotebooksPane.ts
src/Explorer/Panes/StringInputPane.ts
src/Explorer/Panes/SwitchDirectoryPane.ts
@@ -143,7 +164,9 @@ src/Explorer/Panes/Tables/TableEntityPane.ts
src/Explorer/Panes/Tables/Validators/EntityPropertyNameValidator.ts
src/Explorer/Panes/Tables/Validators/EntityPropertyValidationCommon.ts
src/Explorer/Panes/Tables/Validators/EntityPropertyValueValidator.ts
src/Explorer/SplashScreen/SplashScreen.test.ts
src/Explorer/Panes/UploadFilePane.ts
src/Explorer/Panes/UploadItemsPane.ts
src/Explorer/SplashScreen/SplashScreenComponentAdapter.test.ts
src/Explorer/Tables/Constants.ts
src/Explorer/Tables/DataTable/CacheBase.ts
src/Explorer/Tables/DataTable/DataTableBindingManager.ts
@@ -151,6 +174,7 @@ src/Explorer/Tables/DataTable/DataTableBuilder.ts
src/Explorer/Tables/DataTable/DataTableContextMenu.ts
src/Explorer/Tables/DataTable/DataTableOperationManager.ts
src/Explorer/Tables/DataTable/DataTableOperations.ts
src/Explorer/Tables/DataTable/DataTableUtilities.ts
src/Explorer/Tables/DataTable/DataTableViewModel.ts
src/Explorer/Tables/DataTable/TableCommands.ts
src/Explorer/Tables/DataTable/TableEntityCache.ts
@@ -159,6 +183,8 @@ src/Explorer/Tables/Entities.ts
src/Explorer/Tables/QueryBuilder/ClauseGroup.ts
src/Explorer/Tables/QueryBuilder/ClauseGroupViewModel.ts
src/Explorer/Tables/QueryBuilder/CustomTimestampHelper.ts
src/Explorer/Tables/QueryBuilder/DateTimeUtilities.test.ts
src/Explorer/Tables/QueryBuilder/DateTimeUtilities.ts
src/Explorer/Tables/QueryBuilder/QueryBuilderViewModel.ts
src/Explorer/Tables/QueryBuilder/QueryClauseViewModel.ts
src/Explorer/Tables/QueryBuilder/QueryViewModel.ts
@@ -178,6 +204,8 @@ src/Explorer/Tabs/QueryTab.test.ts
src/Explorer/Tabs/QueryTab.ts
src/Explorer/Tabs/QueryTablesTab.ts
src/Explorer/Tabs/ScriptTabBase.ts
src/Explorer/Tabs/SettingsTab.test.ts
src/Explorer/Tabs/SettingsTab.ts
src/Explorer/Tabs/StoredProcedureTab.ts
src/Explorer/Tabs/TabComponents.ts
src/Explorer/Tabs/TabsBase.ts
@@ -218,6 +246,9 @@ src/Platform/Hosted/Authorization.ts
src/Platform/Hosted/DataAccessUtility.ts
src/Platform/Hosted/ExplorerFactory.ts
src/Platform/Hosted/Helpers/ConnectionStringParser.test.ts
src/Platform/Hosted/Helpers/ConnectionStringParser.ts
src/Platform/Hosted/HostedUtils.test.ts
src/Platform/Hosted/HostedUtils.ts
src/Platform/Hosted/Main.ts
src/Platform/Hosted/Maint.test.ts
src/Platform/Hosted/NotificationsClient.ts
@@ -241,6 +272,8 @@ src/Shared/ExplorerSettings.ts
src/Shared/PriceEstimateCalculator.ts
src/Shared/StorageUtility.test.ts
src/Shared/StorageUtility.ts
src/Shared/StringUtility.test.ts
src/Shared/StringUtility.ts
src/Shared/appInsights.ts
src/SparkClusterManager/ArcadiaResourceManager.ts
src/SparkClusterManager/SparkClusterManager.ts
@@ -249,11 +282,27 @@ src/Terminal/NotebookAppContracts.d.ts
src/Terminal/index.ts
src/TokenProviders/PortalTokenProvider.ts
src/TokenProviders/TokenProviderFactory.ts
src/Utils/AuthorizationUtils.test.ts
src/Utils/AuthorizationUtils.ts
src/Utils/AutoPilotUtils.test.ts
src/Utils/AutoPilotUtils.ts
src/Utils/DatabaseAccountUtils.test.ts
src/Utils/DatabaseAccountUtils.ts
src/Utils/JunoUtils.ts
src/Utils/MessageValidation.ts
src/Utils/NotebookConfigurationUtils.ts
src/Utils/OfferUtils.test.ts
src/Utils/OfferUtils.ts
src/Utils/PricingUtils.test.ts
src/Utils/QueryUtils.test.ts
src/Utils/QueryUtils.ts
src/Utils/StringUtils.test.ts
src/Utils/StringUtils.ts
src/applyExplorerBindings.ts
src/global.d.ts
src/quickstart.ts
src/setupTests.ts
src/workers/upload/definitions.ts
src/workers/upload/index.ts
src/Explorer/Controls/AccessibleElement/AccessibleElement.tsx
src/Explorer/Controls/Accordion/AccordionComponent.tsx
@@ -300,7 +349,15 @@ src/Explorer/Graph/GraphExplorerComponent/QueryContainerComponent.tsx
src/Explorer/Graph/GraphExplorerComponent/ReadOnlyNeighborsComponent.tsx
src/Explorer/Graph/GraphExplorerComponent/ReadOnlyNodePropertiesComponent.test.tsx
src/Explorer/Graph/GraphExplorerComponent/ReadOnlyNodePropertiesComponent.tsx
src/Explorer/Menus/CommandBar/CommandBarComponentAdapter.tsx
src/Explorer/Menus/CommandBar/CommandBarUtil.test.tsx
src/Explorer/Menus/CommandBar/CommandBarUtil.tsx
src/Explorer/Menus/CommandBar/MemoryTrackerComponent.tsx
src/Explorer/Menus/NavBar/ControlBarComponent.tsx
src/Explorer/Menus/NavBar/ControlBarComponentAdapter.tsx
src/Explorer/Menus/NavBar/MeControlComponent.test.tsx
src/Explorer/Menus/NavBar/MeControlComponent.tsx
src/Explorer/Menus/NavBar/MeControlComponentAdapter.tsx
src/Explorer/Menus/NotificationConsole/NotificationConsoleComponent.test.tsx
src/Explorer/Menus/NotificationConsole/NotificationConsoleComponent.tsx
src/Explorer/Menus/NotificationConsole/NotificationConsoleComponentAdapter.tsx
@@ -330,7 +387,8 @@ src/Explorer/Notebook/temp/inputs/editor.tsx
src/Explorer/Notebook/temp/markdown-cell.tsx
src/Explorer/Notebook/temp/source.tsx
src/Explorer/Notebook/temp/syntax-highlighter/index.tsx
src/Explorer/SplashScreen/SplashScreen.tsx
src/Explorer/SplashScreen/SplashScreenComponent.tsx
src/Explorer/SplashScreen/SplashScreenComponentApdapter.tsx
src/Explorer/Tabs/GalleryTab.tsx
src/Explorer/Tabs/NotebookViewerTab.tsx
src/Explorer/Tabs/TerminalTab.tsx
@@ -339,5 +397,19 @@ src/Explorer/Tree/ResourceTreeAdapterForResourceToken.tsx
src/GalleryViewer/Cards/GalleryCardComponent.tsx
src/GalleryViewer/GalleryViewer.tsx
src/GalleryViewer/GalleryViewerComponent.tsx
cypress/integration/dataexplorer/CASSANDRA/addCollection.spec.ts
cypress/integration/dataexplorer/GRAPH/addCollection.spec.ts
cypress/integration/dataexplorer/ci-tests/addCollectionPane.spec.ts
cypress/integration/dataexplorer/ci-tests/createDatabase.spec.ts
cypress/integration/dataexplorer/ci-tests/deleteCollection.spec.ts
cypress/integration/dataexplorer/ci-tests/deleteDatabase.spec.ts
cypress/integration/dataexplorer/MONGO/addCollection.spec.ts
cypress/integration/dataexplorer/MONGO/addCollectionAutopilot.spec.ts
cypress/integration/dataexplorer/MONGO/addCollectionExistingDatabase.spec.ts
cypress/integration/dataexplorer/MONGO/provisionDatabaseThroughput.spec.ts
cypress/integration/dataexplorer/SQL/addCollection.spec.ts
cypress/integration/dataexplorer/TABLE/addCollection.spec.ts
cypress/integration/notebook/newNotebook.spec.ts
cypress/integration/notebook/resourceTree.spec.ts
__mocks__/monaco-editor.ts
src/Explorer/Tree/ResourceTreeAdapterForResourceToken.test.tsx

View File

@@ -1,39 +1,41 @@
module.exports = {
env: {
browser: true,
es6: true,
es6: true
},
plugins: ["@typescript-eslint", "no-null", "prefer-arrow"],
extends: ["eslint:recommended", "plugin:@typescript-eslint/recommended"],
globals: {
Atomics: "readonly",
SharedArrayBuffer: "readonly",
SharedArrayBuffer: "readonly"
},
parser: "@typescript-eslint/parser",
parserOptions: {
ecmaFeatures: {
jsx: true,
jsx: true
},
ecmaVersion: 2018,
sourceType: "module",
sourceType: "module"
},
overrides: [
{
files: ["**/*.tsx"],
extends: ["plugin:react/recommended"], // TODO: Add react-hooks
plugins: ["react"],
env: {
jest: true
},
extends: ["plugin:react/recommended"],
plugins: ["react"]
},
{
files: ["**/*.{test,spec}.{ts,tsx}"],
env: {
jest: true,
jest: true
},
extends: ["plugin:jest/recommended"],
plugins: ["jest"],
},
plugins: ["jest"]
}
],
rules: {
"no-console": ["error", { allow: ["error", "warn", "dir"] }],
curly: "error",
"@typescript-eslint/no-unused-vars": "error",
"@typescript-eslint/no-extraneous-class": "error",
@@ -41,13 +43,12 @@ module.exports = {
"@typescript-eslint/no-explicit-any": "error",
"prefer-arrow/prefer-arrow-functions": ["error", { allowStandaloneDeclarations: true }],
eqeqeq: "error",
"react/display-name": "off",
"no-restricted-syntax": [
"error",
{
selector: "CallExpression[callee.object.name='JSON'][callee.property.name='stringify'] Identifier[name=/$err/]",
message: "Do not use JSON.stringify(error). It will print '{}'",
},
],
},
message: "Do not use JSON.stringify(error). It will print '{}'"
}
]
}
};

View File

@@ -1,9 +0,0 @@
# Please see the documentation for all configuration options:
# https://help.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
version: 2
updates:
- package-ecosystem: "npm"
directory: "/"
schedule:
interval: "daily"

View File

@@ -9,29 +9,15 @@ on:
branches:
- master
jobs:
codemetrics:
runs-on: ubuntu-latest
name: "Log Code Metrics"
if: github.ref == 'refs/heads/master'
steps:
- uses: actions/checkout@v2
- name: Use Node.js 14.x
uses: actions/setup-node@v1
with:
node-version: 14.x
- run: npm ci
- run: node utils/codeMetrics.js
env:
CODE_METRICS_APP_ID: ${{ secrets.CODE_METRICS_APP_ID }}
compile:
runs-on: ubuntu-latest
name: "Compile TypeScript"
steps:
- uses: actions/checkout@v2
- name: Use Node.js 14.x
- name: Use Node.js 12.x
uses: actions/setup-node@v1
with:
node-version: 14.x
node-version: 12.x
- run: npm ci
- run: npm run compile
- run: npm run compile:strict
@@ -40,10 +26,10 @@ jobs:
name: "Check Format"
steps:
- uses: actions/checkout@v2
- name: Use Node.js 14.x
- name: Use Node.js 12.x
uses: actions/setup-node@v1
with:
node-version: 14.x
node-version: 12.x
- run: npm ci
- run: npm run format:check
lint:
@@ -51,10 +37,10 @@ jobs:
name: "Lint"
steps:
- uses: actions/checkout@v2
- name: Use Node.js 14.x
- name: Use Node.js 12.x
uses: actions/setup-node@v1
with:
node-version: 14.x
node-version: 12.x
- run: npm ci
- run: npm run lint
unittest:
@@ -62,10 +48,10 @@ jobs:
name: "Unit Tests"
steps:
- uses: actions/checkout@v2
- name: Use Node.js 14.x
- name: Use Node.js 12.x
uses: actions/setup-node@v1
with:
node-version: 14.x
node-version: 12.x
- run: npm ci
- run: npm run test
build:
@@ -74,10 +60,10 @@ jobs:
name: "Build"
steps:
- uses: actions/checkout@v2
- name: Use Node.js 14.x
- name: Use Node.js 12.x
uses: actions/setup-node@v1
with:
node-version: 14.x
node-version: 12.x
- run: npm ci
- run: npm run build:contracts
- name: Restore Build Cache
@@ -93,42 +79,42 @@ jobs:
name: dist
path: dist/
endtoendemulator:
name: "End To End Emulator Tests"
if: github.ref == 'refs/heads/master' || contains(github.ref, 'hotfix/') || contains(github.ref, 'release/')
name: "End To End Tests | Emulator | SQL"
needs: [lint, format, compile, unittest]
runs-on: windows-latest
steps:
- uses: actions/checkout@v2
- name: Use Node.js 14.x
- uses: southpolesteve/cosmos-emulator-github-action@v1
- name: Use Node.js 12.x
uses: actions/setup-node@v1
with:
node-version: 14.x
- uses: southpolesteve/cosmos-emulator-github-action@v1
node-version: 12.x
- name: Restore Cypress Binary Cache
uses: actions/cache@v2
with:
path: ~/.cache/Cypress
key: ${{ runner.os }}-cypress-binary-cache
- name: End to End Tests
run: |
npm ci
npm start &
npm run wait-for-server
npx jest -c ./jest.config.e2e.js --detectOpenHandles test/sql/container.spec.ts
npm ci --prefix ./cypress
npm run test:ci --prefix ./cypress -- --spec ./integration/dataexplorer/ci-tests/createDatabase.spec.ts
shell: bash
env:
DATA_EXPLORER_ENDPOINT: "https://localhost:1234/explorer.html?platform=Emulator"
PLATFORM: "Emulator"
EMULATOR_ENDPOINT: https://0.0.0.0:8081/
NODE_TLS_REJECT_UNAUTHORIZED: 0
- uses: actions/upload-artifact@v2
if: failure()
with:
name: screenshots
path: failed-*
CYPRESS_CACHE_FOLDER: ~/.cache/Cypress
accessibility:
name: "Accessibility | Hosted"
needs: [lint, format, compile, unittest]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Use Node.js 14.x
- name: Use Node.js 12.x
uses: actions/setup-node@v1
with:
node-version: 14.x
node-version: 12.x
- name: Accessibility Check
run: |
# Ubuntu gets mad when webpack runs too many files watchers
@@ -142,73 +128,32 @@ jobs:
shell: bash
env:
NODE_TLS_REJECT_UNAUTHORIZED: 0
endtoendhosted:
name: "End to End Tests"
needs: [cleanupaccounts]
endtoendpuppeteer:
name: "End to end puppeteer tests"
needs: [lint, format, compile, unittest]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Use Node.js 12.x
uses: actions/setup-node@v1
with:
node-version: 12.x
- name: End to End Puppeteer Tests
run: |
npm ci
npm start &
npm run wait-for-server
npm run test:e2e
shell: bash
env:
NODE_TLS_REJECT_UNAUTHORIZED: 0
PORTAL_RUNNER_SUBSCRIPTION: ${{ secrets.PORTAL_RUNNER_SUBSCRIPTION }}
PORTAL_RUNNER_RESOURCE_GROUP: ${{ secrets.PORTAL_RUNNER_RESOURCE_GROUP }}
PORTAL_RUNNER_DATABASE_ACCOUNT: ${{ secrets.PORTAL_RUNNER_DATABASE_ACCOUNT }}
PORTAL_RUNNER_DATABASE_ACCOUNT_KEY: ${{ secrets.PORTAL_RUNNER_DATABASE_ACCOUNT_KEY }}
PORTAL_RUNNER_MONGO_DATABASE_ACCOUNT: ${{ secrets.PORTAL_RUNNER_MONGO_DATABASE_ACCOUNT }}
PORTAL_RUNNER_MONGO_DATABASE_ACCOUNT_KEY: ${{ secrets.PORTAL_RUNNER_MONGO_DATABASE_ACCOUNT_KEY }}
NOTEBOOKS_TEST_RUNNER_TENANT_ID: ${{ secrets.NOTEBOOKS_TEST_RUNNER_TENANT_ID }}
NOTEBOOKS_TEST_RUNNER_CLIENT_ID: ${{ secrets.NOTEBOOKS_TEST_RUNNER_CLIENT_ID }}
NOTEBOOKS_TEST_RUNNER_CLIENT_SECRET: ${{ secrets.NOTEBOOKS_TEST_RUNNER_CLIENT_SECRET }}
PORTAL_RUNNER_CONNECTION_STRING: ${{ secrets.CONNECTION_STRING_SQL }}
MONGO_CONNECTION_STRING: ${{ secrets.CONNECTION_STRING_MONGO }}
CASSANDRA_CONNECTION_STRING: ${{ secrets.CONNECTION_STRING_CASSANDRA }}
TABLES_CONNECTION_STRING: ${{ secrets.CONNECTION_STRING_TABLE }}
DATA_EXPLORER_ENDPOINT: "https://localhost:1234/hostedExplorer.html"
strategy:
fail-fast: false
matrix:
test-file:
- ./test/cassandra/container.spec.ts
- ./test/mongo/mongoIndexPolicy.spec.ts
- ./test/notebooks/uploadAndOpenNotebook.spec.ts
- ./test/selfServe/selfServeExample.spec.ts
- ./test/sql/container.spec.ts
- ./test/sql/resourceToken.spec.ts
- ./test/tables/container.spec.ts
steps:
- uses: actions/checkout@v2
- name: Use Node.js 14.x
uses: actions/setup-node@v1
with:
node-version: 14.x
- run: npm ci
- run: npm start &
- run: node utils/cleanupDBs.js
- run: npm run wait-for-server
- name: ${{ matrix['test-file'] }}
run: npx jest -c ./jest.config.e2e.js --detectOpenHandles ${{ matrix['test-file'] }}
shell: bash
- uses: actions/upload-artifact@v2
if: failure()
with:
name: screenshots
path: failed-*
cleanupaccounts:
name: "Cleanup Test Database Accounts"
runs-on: ubuntu-latest
env:
NOTEBOOKS_TEST_RUNNER_CLIENT_ID: ${{ secrets.NOTEBOOKS_TEST_RUNNER_CLIENT_ID }}
NOTEBOOKS_TEST_RUNNER_CLIENT_SECRET: ${{ secrets.NOTEBOOKS_TEST_RUNNER_CLIENT_SECRET }}
steps:
- uses: actions/checkout@v2
- name: Use Node.js 14.x
uses: actions/setup-node@v1
with:
node-version: 14.x
- run: npm ci
- run: node utils/cleanupDBs.js
nuget:
name: Publish Nuget
if: github.ref == 'refs/heads/master' || contains(github.ref, 'hotfix/') || contains(github.ref, 'release/')
needs: [build]
needs: [lint, format, compile, build, unittest, endtoendemulator, endtoendpuppeteer]
runs-on: ubuntu-latest
env:
NUGET_SOURCE: ${{ secrets.NUGET_SOURCE }}
@@ -224,7 +169,7 @@ jobs:
- run: cp ./configs/prod.json config.json
- run: nuget sources add -Name "ADO" -Source "$NUGET_SOURCE" -UserName "GitHub" -Password "$AZURE_DEVOPS_PAT"
- run: nuget pack -Version "2.0.0-github-${GITHUB_SHA}"
- run: nuget push -SkipDuplicate -Source "$NUGET_SOURCE" -ApiKey Az *.nupkg
- run: nuget push -Source "$NUGET_SOURCE" -ApiKey Az *.nupkg
- uses: actions/upload-artifact@v2
name: packages
with:
@@ -232,7 +177,7 @@ jobs:
nugetmpac:
name: Publish Nuget MPAC
if: github.ref == 'refs/heads/master' || contains(github.ref, 'hotfix/') || contains(github.ref, 'release/')
needs: [build]
needs: [lint, format, compile, build, unittest, endtoendemulator, endtoendpuppeteer]
runs-on: ubuntu-latest
env:
NUGET_SOURCE: ${{ secrets.NUGET_SOURCE }}
@@ -249,7 +194,7 @@ jobs:
- run: sed -i 's/Azure.Cosmos.DB.Data.Explorer/Azure.Cosmos.DB.Data.Explorer.MPAC/g' DataExplorer.nuspec
- run: nuget sources add -Name "ADO" -Source "$NUGET_SOURCE" -UserName "GitHub" -Password "$AZURE_DEVOPS_PAT"
- run: nuget pack -Version "2.0.0-github-${GITHUB_SHA}"
- run: nuget push -SkipDuplicate -Source "$NUGET_SOURCE" -ApiKey Az *.nupkg
- run: nuget push -Source "$NUGET_SOURCE" -ApiKey Az *.nupkg
- uses: actions/upload-artifact@v2
name: packages
with:

25
.github/workflows/runners.yml vendored Normal file
View File

@@ -0,0 +1,25 @@
name: Runners
on:
schedule:
- cron: "0 * 1 * *"
jobs:
sqlcreatecollection:
runs-on: ubuntu-latest
name: "SQL | Create Collection"
steps:
- uses: actions/checkout@v2
- uses: actions/setup-node@v1
- run: npm ci
- run: npm run test:e2e
env:
PORTAL_RUNNER_APP_INSIGHTS_KEY: ${{ secrets.PORTAL_RUNNER_APP_INSIGHTS_KEY }}
PORTAL_RUNNER_USERNAME: ${{ secrets.PORTAL_RUNNER_USERNAME }}
PORTAL_RUNNER_PASSWORD: ${{ secrets.PORTAL_RUNNER_PASSWORD }}
PORTAL_RUNNER_SUBSCRIPTION: 69e02f2d-f059-4409-9eac-97e8a276ae2c
PORTAL_RUNNER_RESOURCE_GROUP: runners
PORTAL_RUNNER_DATABASE_ACCOUNT: portal-sql-runner
- uses: actions/upload-artifact@v2
if: failure()
with:
name: screenshots
path: failure.png

3
.gitignore vendored
View File

@@ -9,6 +9,9 @@ pkg/DataExplorer/*
test/out/*
workers/**/*.js
*.trx
cypress/videos
cypress/screenshots
cypress/fixtures
notebookapp/*
Contracts/*
.DS_Store

Binary file not shown.

View File

@@ -17,10 +17,5 @@
"test/out/**": true,
"workers/libs/**": true
},
"typescript.tsdk": "node_modules/typescript/lib",
"editor.formatOnSave": true,
"editor.codeActionsOnSave": {
"source.fixAll.eslint": true,
"source.organizeImports": true
}
"typescript.tsdk": "node_modules/typescript/lib"
}

View File

@@ -1,194 +0,0 @@
# Coding Guidelines and Recommendations
Cosmos Explorer has been under constant development for over 5 years. As a result, there are many different patterns and practices in the codebase. This document serves as a guide to how we write code and helps avoid propagating practices which are no longer preferred. Each requirement in this document is labeled and color-coded to show the relative importance. In order from highest to lowest importance:
✅ DO this. If you feel you need an exception, engage with the project owners _prior_ to implementation.
⛔️ DO NOT do this. If you feel you need an exception, engage with the project owners _prior_ to implementation.
☑️ YOU SHOULD strongly consider this but it is not a requirement. If not following this advice, please comment code with why and proactively begin a discussion as part of the PR process.
⚠️ YOU SHOULD NOT strongly consider not doing this. If not following this advice, please comment code with why and proactively begin a discussion as part of the PR process.
💭 YOU MAY consider this advice if appropriate to your situation. Other team members may comment on this as part of PR review, but there is no need to be proactive.
## Development Environment
☑️ YOU SHOULD
- Use VSCode and install the following extensions. This setup will catch most linting/formatting/type errors as you develop:
- [Prettier](https://marketplace.visualstudio.com/items?itemName=esbenp.prettier-vscode)
- [ESLint](https://marketplace.visualstudio.com/items?itemName=dbaeumer.vscode-eslint)
💭 YOU MAY
- Use the [GitHub CLI](https://cli.github.com/). It has helpful workflows for submitting PRs as well as for checking out other team member's PRs.
- Use Windows, Linux (including WSL), or OSX. We have team members developing on all three environments.
✅ DO
- Maintain cross-platform compatibility when modifying any engineering or build systems
## Code Formatting
✅ DO
- Use [Prettier](https://prettier.io/) to format your code
- This will occur automatically if using the recommended editor setup
- `npm run format` will also format code
## Linting
✅ DO
- Use [ESLint](https://eslint.org/) to check for code errors.
- This will occur automatically if using the recommended editor setup
- `npm run lint` will also check for linting errors
💭 YOU MAY
- Consider adding new lint rules.
- If you find yourself performing "nits" as part of PR review, consider adding a lint rule that will automatically catch the error in the future
⚠️ YOU SHOULD NOT
- Disable lint rules
- Lint rules exist as guidance and to catch common mistakes
- You will find places we disable specific lint rules however it should be exceptional.
- If a rule does need to be disabled, prefer disabling a specific line instead of the entire file.
⛔️ DO NOT
- Add [TSLint](https://palantir.github.io/tslint/) rules
- TSLint has been deprecated and is on track to be removed
- Always prefer ESLint rules
## UI Components
☑️ YOU SHOULD
- Write new components using [React](https://reactjs.org/). We are actively migrating Cosmos Explorer off of [Knockout](https://knockoutjs.com/).
- Use [Fluent](https://developer.microsoft.com/en-us/fluentui#/) components.
- Fluent components are designed to be highly accessible and composable
- Using Fluent allows us to build upon the work of the Fluent team and leads to a lower total cost of ownership for UI code
### React
☑️ YOU SHOULD
- Use pure functional components when no state is required
💭 YOU MAY
- Use functional (hooks) or class components
- The project contains examples of both
- Neither is strongly preferred at this time
⛔️ DO NOT
- Use inheritance for sharing component behavior.
- React documentation covers this topic in detail https://reactjs.org/docs/composition-vs-inheritance.html
- Suffix your file or component name with "Component"
- Even though the code has examples of it, we are ending the practice.
## Libraries
⚠️ YOU SHOULD NOT
- Add new libraries to package.json.
- Adding libraries may bring in code that explodes the bundled size or attempts to run NodeJS code in the browser
- Consult with project owners for help with library selection if one is needed
⛔️ DO NOT
- Use underscore.js
- Much of this library is now native to JS and will be automatically transpiled
- Use jQuery
- Much of this library is not native to the DOM.
- We are planning to remove it
## Testing
⛔️ DO NOT
- Decrease test coverage
- Unit/Functional test coverage is checked as part of the CI process
### Unit Tests
✅ DO
- Write unit tests for non-UI and utility code.
- Write your tests using [Jest](https://jestjs.io/)
☑️ YOU SHOULD
- Abstract non-UI and utility code so it can run either the NodeJS or Browser environment
### Functional(Component) Tests
✅ DO
- Write tests for UI components
- Write your tests using [Jest](https://jestjs.io/)
- Use either Enzyme or React Testing Library to perform component tests.
### Mocking
✅ DO
- Use Jest's built-in mocking helpers
☑️ YOU SHOULD
- Write code that does not require mocking
- Build components that do not require mocking extremely large or difficult to mock objects (like Explorer.ts). Pass _only_ what you need.
⛔️ DO NOT
- Use sinon.js for mocking
- Sinon has been deprecated and planned for removal
### End to End Tests
✅ DO
- Use [Puppeteer](https://developers.google.com/web/tools/puppeteer) and [Jest](https://jestjs.io/)
- Write or modify an existing E2E test that covers the primary use case of any major feature.
- Use caution. Do not try to cover every case. End to End tests can be slow and brittle.
☑️ YOU SHOULD
- Write tests that use accessible attributes to perform actions. Role, Title, Label, etc
- More information https://testing-library.com/docs/queries/about#priority
⚠️ YOU SHOULD NOT
- Add test specfic `data-*` attributes to dom elements
- This is a common current practice, but one we would like to avoid in the future
- End to end tests need to use semantic HTML and accesible attributes to be truely end to end
- No user or screen reader actually navigates an app using `data-*` attributes
- Add arbitrary time delays to wait for page to render or element to be ready.
- All the time delays add up and slow down testing.
- Prefer using the framework's "wait for..." functionality.
### Migrating Knockout to React
✅ DO
- Consult other team members before beginning migration work. There is a significant amount of flux in patterns we are using and it is important we do not propagate incorrect patterns.
- Start by converting HTML to JSX: https://magic.reactjs.net/htmltojsx.htm. Add functionality as a second step.
☑️ YOU SHOULD
- Write React components that require no dependency on Knockout or observables to trigger rendering.
## Browser Support
✅ DO
- Support all [browsers supported by the Azure Portal](https://docs.microsoft.com/en-us/azure/azure-portal/azure-portal-supported-browsers-devices)
- Support IE11
- In practice, this should not need to be considered as part of a normal development workflow
- Polyfills and transpilation are already provided by our engineering systems.
- This requirement will be removed on March 30th, 2021 when Azure drops IE11 support.

View File

@@ -13,7 +13,6 @@ For more information see the [Code of Conduct FAQ](https://opensource.microsoft.
contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments.
## Microsoft Open Source Code of Conduct
This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/).
Resources:
@@ -21,3 +20,33 @@ Resources:
- [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/)
- [Microsoft Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/)
- Contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with questions or concerns
## Browser support
Please make sure to support all modern browsers as well as Internet Explorer 11.
For IE support, polyfill is preferred over new usage of lodash or underscore. We already polyfill almost everything by importing babel-polyfill at the top of entry points.
## Coding guidelines, conventions and recommendations
### Typescript
* Follow this [typescript style guide](https://github.com/excelmicro/typescript) which is based on [airbnb's style guide](https://github.com/airbnb/javascript).
* Conventions speficic to this project:
- Use double-quotes for string
- Don't use `null`, use `undefined`
- Pascal case for private static readonly fields
- Camel case for classnames in markup
* Don't use class unless necessary
* Code related to notebooks should be dynamically imported so that it is loaded from a separate bundle only if the account is notebook-enabled. There are already top-level notebook components which are dynamically imported and their dependencies can be statically imported from these files.
* Prefer using [Fluent UI controls](https://developer.microsoft.com/en-us/fluentui#/controls/web) over creating your own, in order to maintain consistency and support a11y.
### React
* Prefer using React class components over function components and hooks unless you have a simple component and require no nested functions:
* Nested functions may be harder to test independently
* Switching from function component to class component later mayb be painful
## Testing
Any PR should not decrease testing coverage.
## Recommended Tools and VS Code extensions
* [Bookmarks](https://github.com/alefragnani/vscode-bookmarks)
* [Bracket pair colorizer](https://github.com/CoenraadS/Bracket-Pair-Colorizer-2)
* [GitHub Pull Requests and Issues](https://github.com/Microsoft/vscode-pull-request-github)

View File

@@ -13,17 +13,29 @@ UI for Azure Cosmos DB. Powers the [Azure Portal](https://portal.azure.com/), ht
### Watch mode
Run `npm start` to start the development server and automatically rebuild on changes
Run `npm run watch` to start the development server and automatically rebuild on changes
### Hosted Development (https://cosmos.azure.com)
### Specifying Development Platform
- Visit: `https://localhost:1234/hostedExplorer.html`
- The default webpack dev server configuration will proxy requests to the production portal backend: `https://main.documentdb.ext.azure.com`. This will allow you to use production connection strings on your local machine.
Setting the environment variable `PLATFORM` during the build process will force the explorer to load the specified platform. By default in development it will run in `Hosted` mode. Valid options:
- Hosted
- Emulator
- Portal
`PLATFORM=Emulator npm run watch`
### Hosted Development
The default webpack dev server configuration will proxy requests to the production portal backend: `https://main.documentdb.ext.azure.com`. This will allow you to use production connection strings on your local machine.
To run pure hosted mode, in `webpack.config.js` change index HtmlWebpackPlugin to use hostedExplorer.html and change entry for index to use HostedExplorer.ts.
### Emulator Development
- Start the Cosmos Emulator
- Visit: https://localhost:1234/index.html
In a window environment, running `npm run build` will automatically copy the built files from `/dist` over to the default emulator install paths. In a non-windows enironment you can specify an alternate endpoint using `EMULATOR_ENDPOINT` and webpack dev server will proxy requests for you.
`PLATFORM=Emulator EMULATOR_ENDPOINT=https://my-vm.azure.com:8081 npm run watch`
#### Setting up a Remote Emulator
@@ -43,8 +55,16 @@ The Cosmos emulator currently only runs in Windows environments. You can still d
### Portal Development
- Visit: https://ms.portal.azure.com/?dataExplorerSource=https%3A%2F%2Flocalhost%3A1234%2Fexplorer.html
- You may have to manually visit https://localhost:1234/explorer.html first and click through any SSL certificate warnings
The Cosmos Portal that consumes this repo is not currently open source. If you have access to this project, `npm run build` will copy the built files over to the portal where they will be loaded by the portal development environment
You can however load a local running instance of data explorer in the production portal.
1. Turn off browser SSL validation for localhost: chrome://flags/#allow-insecure-localhost OR Install valid SSL certs for localhost (on IE, follow these [instructions](https://www.technipages.com/ie-bypass-problem-with-this-websites-security-certificate) to install the localhost certificate in the right place)
2. Whitelist `https://localhost:1234` domain for CORS in the Azure Cosmos DB portal
3. Start the project in portal mode: `PLATFORM=Portal npm run watch`
4. Load the portal using the following link: https://ms.portal.azure.com/?dataExplorerSource=https%3A%2F%2Flocalhost%3A1234%2Fexplorer.html
Live reload will occur, but data explorer will not properly integrate again with the parent iframe. You will have to manually reload the page.
### Testing
@@ -56,21 +76,24 @@ Unit tests are located adjacent to the code under test and run with [Jest](https
#### End to End CI Tests
Jest and Puppeteer are used for end to end browser based tests and are contained in `test/`. To run these tests locally:
[Cypress](https://www.cypress.io/) is used for end to end tests and are contained in `cypress/`. Currently, it operates as sub project with its own typescript config and dependencies. It also only operates against the emulator. To run cypress tests:
1. Copy .env.example to .env
2. Update the values in .env including your local data explorer endpoint (ask a teammate/codeowner for help with .env values)
3. Make sure all packages are installed `npm install`
4. Run the server `npm run start` and wait for it to start
5. Run `npm run test:e2e`
1. Ensure the emulator is running
2. Start cosmos explorer in emulator mode: `PLATFORM=Emulator npm run watch`
3. Move into `cypress/` folder: `cd cypress`
4. Install dependencies: `npm install`
5. Run cypress headless(`npm run test`) or in interactive mode(`npm run test:debug`)
#### End to End Production Runners
Jest and Puppeteer are used for end to end production runners and are contained in `test/`. To run these tests locally:
1. Copy .env.example to .env and fill in all variables
2. Run `npm run test:e2e`
### Releasing
We generally adhere to the release strategy [documented by the Azure SDK Guidelines](https://azure.github.io/azure-sdk/policies_repobranching.html#release-branches). Most releases should happen from the master branch. If master contains commits that cannot be released, you may create a release from a `release/` or `hotfix/` branch. See linked documentation for more details.
### Architecture
[![](https://mermaid.ink/img/eyJjb2RlIjoiZ3JhcGggTFJcbiAgaG9zdGVkKGh0dHBzOi8vY29zbW9zLmF6dXJlLmNvbSlcbiAgcG9ydGFsKFBvcnRhbClcbiAgZW11bGF0b3IoRW11bGF0b3IpXG4gIGFhZFtBQURdXG4gIHJlc291cmNlVG9rZW5bUmVzb3VyY2UgVG9rZW5dXG4gIGNvbm5lY3Rpb25TdHJpbmdbQ29ubmVjdGlvbiBTdHJpbmddXG4gIHBvcnRhbFRva2VuW0VuY3J5cHRlZCBQb3J0YWwgVG9rZW5dXG4gIG1hc3RlcktleVtNYXN0ZXIgS2V5XVxuICBhcm1bQVJNIFJlc291cmNlIFByb3ZpZGVyXVxuICBkYXRhcGxhbmVbRGF0YSBQbGFuZV1cbiAgcHJveHlbUG9ydGFsIEFQSSBQcm94eV1cbiAgc3FsW1NRTF1cbiAgbW9uZ29bTW9uZ29dXG4gIHRhYmxlc1tUYWJsZXNdXG4gIGNhc3NhbmRyYVtDYXNzYW5kcmFdXG4gIGdyYWZbR3JhcGhdXG5cblxuICBlbXVsYXRvciAtLT4gbWFzdGVyS2V5IC0tLS0-IGRhdGFwbGFuZVxuICBwb3J0YWwgLS0-IGFhZFxuICBob3N0ZWQgLS0-IHBvcnRhbFRva2VuICYgcmVzb3VyY2VUb2tlbiAmIGNvbm5lY3Rpb25TdHJpbmcgJiBhYWRcbiAgYWFkIC0tLT4gYXJtXG4gIGFhZCAtLS0-IGRhdGFwbGFuZVxuICBhYWQgLS0tPiBwcm94eVxuICByZXNvdXJjZVRva2VuIC0tLT4gc3FsIC0tPiBkYXRhcGxhbmVcbiAgcG9ydGFsVG9rZW4gLS0tPiBwcm94eVxuICBwcm94eSAtLT4gZGF0YXBsYW5lXG4gIGNvbm5lY3Rpb25TdHJpbmcgLS0-IHNxbCAmIG1vbmdvICYgY2Fzc2FuZHJhICYgZ3JhZiAmIHRhYmxlc1xuICBzcWwgLS0-IGRhdGFwbGFuZVxuICB0YWJsZXMgLS0-IGRhdGFwbGFuZVxuICBtb25nbyAtLT4gcHJveHlcbiAgY2Fzc2FuZHJhIC0tPiBwcm94eVxuICBncmFmIC0tPiBwcm94eVxuXG5cdFx0IiwibWVybWFpZCI6eyJ0aGVtZSI6ImRlZmF1bHQifSwidXBkYXRlRWRpdG9yIjpmYWxzZX0)](https://mermaid-js.github.io/mermaid-live-editor/#/edit/eyJjb2RlIjoiZ3JhcGggTFJcbiAgaG9zdGVkKGh0dHBzOi8vY29zbW9zLmF6dXJlLmNvbSlcbiAgcG9ydGFsKFBvcnRhbClcbiAgZW11bGF0b3IoRW11bGF0b3IpXG4gIGFhZFtBQURdXG4gIHJlc291cmNlVG9rZW5bUmVzb3VyY2UgVG9rZW5dXG4gIGNvbm5lY3Rpb25TdHJpbmdbQ29ubmVjdGlvbiBTdHJpbmddXG4gIHBvcnRhbFRva2VuW0VuY3J5cHRlZCBQb3J0YWwgVG9rZW5dXG4gIG1hc3RlcktleVtNYXN0ZXIgS2V5XVxuICBhcm1bQVJNIFJlc291cmNlIFByb3ZpZGVyXVxuICBkYXRhcGxhbmVbRGF0YSBQbGFuZV1cbiAgcHJveHlbUG9ydGFsIEFQSSBQcm94eV1cbiAgc3FsW1NRTF1cbiAgbW9uZ29bTW9uZ29dXG4gIHRhYmxlc1tUYWJsZXNdXG4gIGNhc3NhbmRyYVtDYXNzYW5kcmFdXG4gIGdyYWZbR3JhcGhdXG5cblxuICBlbXVsYXRvciAtLT4gbWFzdGVyS2V5IC0tLS0-IGRhdGFwbGFuZVxuICBwb3J0YWwgLS0-IGFhZFxuICBob3N0ZWQgLS0-IHBvcnRhbFRva2VuICYgcmVzb3VyY2VUb2tlbiAmIGNvbm5lY3Rpb25TdHJpbmcgJiBhYWRcbiAgYWFkIC0tLT4gYXJtXG4gIGFhZCAtLS0-IGRhdGFwbGFuZVxuICBhYWQgLS0tPiBwcm94eVxuICByZXNvdXJjZVRva2VuIC0tLT4gc3FsIC0tPiBkYXRhcGxhbmVcbiAgcG9ydGFsVG9rZW4gLS0tPiBwcm94eVxuICBwcm94eSAtLT4gZGF0YXBsYW5lXG4gIGNvbm5lY3Rpb25TdHJpbmcgLS0-IHNxbCAmIG1vbmdvICYgY2Fzc2FuZHJhICYgZ3JhZiAmIHRhYmxlc1xuICBzcWwgLS0-IGRhdGFwbGFuZVxuICB0YWJsZXMgLS0-IGRhdGFwbGFuZVxuICBtb25nbyAtLT4gcHJveHlcbiAgY2Fzc2FuZHJhIC0tPiBwcm94eVxuICBncmFmIC0tPiBwcm94eVxuXG5cdFx0IiwibWVybWFpZCI6eyJ0aGVtZSI6ImRlZmF1bHQifSwidXBkYXRlRWRpdG9yIjpmYWxzZX0)
We generally adhear to the release strategy [documented by the Azure SDK Guidelines](https://azure.github.io/azure-sdk/policies_repobranching.html#release-branches). Most releases should happen from the master branch. If master contains commits that cannot be released, you may create a release from a `release/` or `hotfix/` branch. See linked documentation for more details.
# Contributing

View File

@@ -1,4 +1,3 @@
module.exports = {
presets: [["@babel/preset-env", { targets: { node: "current" } }], "@babel/preset-react", "@babel/preset-typescript"],
plugins: [["@babel/plugin-proposal-decorators", { legacy: true }]],
presets: [["@babel/preset-env", { targets: { node: "current" } }], "@babel/preset-react", "@babel/preset-typescript"]
};

View File

@@ -1,7 +0,0 @@
# Why?
This adds a mock module for `canvas`. Nteract has a ignored require and undeclared dependency on this module. `cavnas` is a server side node module and is not used in browser side code for nteract.
Installing it locally (`npm install canvas`) will resolve the problem, but it is a native module so it is flaky depending on the system, node version, processor arch, etc. This module provides a simpler, more robust solution.
Remove this workaround if [this bug](https://github.com/nteract/any-vega/issues/2) ever gets resolved

View File

@@ -1 +0,0 @@
module.exports = {}

View File

@@ -1,11 +0,0 @@
{
"name": "canvas",
"version": "1.0.0",
"description": "",
"main": "index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"author": "",
"license": "ISC"
}

4
cypress/.gitignore vendored Normal file
View File

@@ -0,0 +1,4 @@
cypress.env.json
cypress/report
cypress/screenshots
cypress/videos

51
cypress/cleanup.js Normal file
View File

@@ -0,0 +1,51 @@
// Cleans up old databases from previous test runs
const { CosmosClient } = require("@azure/cosmos");
// TODO: Add support for other API connection strings
const mongoRegex = RegExp("mongodb://.*:(.*)@(.*).mongo.cosmos.azure.com");
async function cleanup() {
const connectionString = process.env.CYPRESS_CONNECTION_STRING;
if (!connectionString) {
throw new Error("Connection string not provided");
}
let client;
switch (true) {
case connectionString.includes("mongodb://"): {
const [, key, accountName] = connectionString.match(mongoRegex);
client = new CosmosClient({
key,
endpoint: `https://${accountName}.documents.azure.com:443/`
});
break;
}
// TODO: Add support for other API connection strings
default:
client = new CosmosClient(connectionString);
break;
}
const response = await client.databases.readAll().fetchAll();
return Promise.all(
response.resources.map(async db => {
const dbTimestamp = new Date(db._ts * 1000);
const twentyMinutesAgo = new Date(Date.now() - 1000 * 60 * 20);
if (dbTimestamp < twentyMinutesAgo) {
await client.database(db.id).delete();
console.log(`DELETED: ${db.id} | Timestamp: ${dbTimestamp}`);
} else {
console.log(`SKIPPED: ${db.id} | Timestamp: ${dbTimestamp}`);
}
})
);
}
cleanup()
.then(() => {
process.exit(0);
})
.catch(error => {
console.error(error);
process.exit(1);
});

15
cypress/cypress.json Normal file
View File

@@ -0,0 +1,15 @@
{
"integrationFolder": "./integration",
"pluginsFile": false,
"fixturesFolder": false,
"supportFile": "./support/index.js",
"defaultCommandTimeout": 90000,
"chromeWebSecurity": false,
"reporter": "mochawesome",
"reporterOptions": {
"reportDir": "cypress/report",
"json": true,
"overwrite": false,
"html": false
}
}

View File

@@ -0,0 +1,66 @@
// 1. Click on "New Container" on the command bar.
// 2. Pane with the title "Add Container" should appear on the right side of the screen
// 3. It includes an input box for the database Id.
// 4. It includes a checkbox called "Create now".
// 5. When the checkbox is marked, enter new database id.
// 3. Create a database WITH "Provision throughput" checked.
// 4. Enter minimum throughput value of 400.
// 5. Enter container id to the container id text box.
// 6. Enter partition key to the partition key text box.
// 7. Click "OK" to create a new container.
// 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
const connectionString = require("../../../utilities/connectionString");
let crypt = require("crypto");
context("Cassandra API Test - createDatabase", () => {
beforeEach(() => {
connectionString.loginUsingConnectionString(connectionString.constants.cassandra);
});
it("Create a new table in Cassandra API", () => {
const keyspaceId = `KeyspaceId${crypt.randomBytes(8).toString("hex")}`;
const tableId = `TableId112`;
cy.get("iframe").then($element => {
const $body = $element.contents().find("body");
cy.wrap($body)
.find('div[class="commandBarContainer"]')
.should("be.visible")
.find('button[data-test="New Table"]')
.should("be.visible")
.click();
cy.wrap($body)
.find('div[class="contextual-pane-in"]')
.should("be.visible")
.find('span[id="containerTitle"]');
cy.wrap($body)
.find('input[id="keyspace-id"]')
.should("be.visible")
.type(keyspaceId);
cy.wrap($body)
.find('input[class="textfontclr"]')
.type(tableId);
cy.wrap($body)
.find('input[data-test="databaseThroughputValue"]')
.should("have.value", "400");
cy.wrap($body)
.find('data-test="addCollection-createCollection"')
.click();
cy.wait(10000);
cy.wrap($body)
.find('div[data-test="resourceTreeId"]')
.should("exist")
.find('div[class="treeComponent dataResourceTree"]')
.should("contain", tableId);
});
});
});

View File

@@ -0,0 +1,81 @@
// 1. Click on "New Graph" on the command bar.
// 2. Pane with the title "Add Container" should appear on the right side of the screen
// 3. It includes an input box for the database Id.
// 4. It includes a checkbox called "Create now".
// 5. When the checkbox is marked, enter new database id.
// 3. Create a database WITH "Provision throughput" checked.
// 4. Enter minimum throughput value of 400.
// 5. Enter container id to the container id text box.
// 6. Enter partition key to the partition key text box.
// 7. Click "OK" to create a new container.
// 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
const connectionString = require("../../../utilities/connectionString");
let crypt = require("crypto");
context("Graph API Test", () => {
beforeEach(() => {
connectionString.loginUsingConnectionString(connectionString.constants.graph);
});
it("Create a new graph in Graph API", () => {
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
const graphId = `TestGraph${crypt.randomBytes(8).toString("hex")}`;
const partitionKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
cy.get("iframe").then($element => {
const $body = $element.contents().find("body");
cy.wrap($body)
.find('div[class="commandBarContainer"]')
.should("be.visible")
.find('button[data-test="New Graph"]')
.should("be.visible")
.click();
cy.wrap($body)
.find('div[class="contextual-pane-in"]')
.should("be.visible")
.find('span[id="containerTitle"]');
cy.wrap($body)
.find('input[data-test="addCollection-createNewDatabase"]')
.check();
cy.wrap($body)
.find('input[data-test="addCollection-newDatabaseId"]')
.should("be.visible")
.type(dbId);
cy.wrap($body)
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
.check();
cy.wrap($body)
.find('input[data-test="databaseThroughputValue"]')
.should("have.value", "400");
cy.wrap($body)
.find('input[data-test="addCollection-collectionId"]')
.type(graphId);
cy.wrap($body)
.find('input[data-test="addCollection-partitionKeyValue"]')
.type(partitionKey);
cy.wrap($body)
.find('input[data-test="addCollection-createCollection"]')
.click();
cy.wait(10000);
cy.wrap($body)
.find('div[data-test="resourceTreeId"]')
.should("exist")
.find('div[class="treeComponent dataResourceTree"]')
.should("contain", dbId)
.click()
.should("contain", graphId);
});
});
});

View File

@@ -0,0 +1,80 @@
// 1. Click on "New Container" on the command bar.
// 2. Pane with the title "Add Container" should appear on the right side of the screen
// 3. It includes an input box for the database Id.
// 4. It includes a checkbox called "Create now".
// 5. When the checkbox is marked, enter new database id.
// 3. Create a database WITH "Provision throughput" checked.
// 4. Enter minimum throughput value of 400.
// 5. Enter container id to the container id text box.
// 6. Enter partition key to the partition key text box.
// 7. Click "OK" to create a new container.
// // 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
const connectionString = require("../../../utilities/connectionString");
let crypt = require("crypto");
context("Mongo API Test - createDatabase", () => {
beforeEach(() => {
connectionString.loginUsingConnectionString();
});
it("Create a new collection in Mongo API", () => {
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
cy.get("iframe").then($element => {
const $body = $element.contents().find("body");
cy.wrap($body)
.find('div[class="commandBarContainer"]')
.should("be.visible")
.find('button[data-test="New Collection"]')
.should("be.visible")
.click();
cy.wrap($body)
.find('div[class="contextual-pane-in"]')
.should("be.visible")
.find('span[id="containerTitle"]');
cy.wrap($body)
.find('input[data-test="addCollection-createNewDatabase"]')
.check();
cy.wrap($body)
.find('input[data-test="addCollection-newDatabaseId"]')
.type(dbId);
cy.wrap($body)
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
.check();
cy.wrap($body)
.find('input[data-test="addCollection-collectionId"]')
.type(collectionId);
cy.wrap($body)
.find('input[data-test="databaseThroughputValue"]')
.should("have.value", "400");
cy.wrap($body)
.find('input[data-test="addCollection-partitionKeyValue"]')
.type(sharedKey);
cy.wrap($body)
.find("#submitBtnAddCollection")
.click();
cy.wait(10000);
cy.wrap($body)
.find('div[data-test="resourceTreeId"]')
.should("exist")
.find('div[class="treeComponent dataResourceTree"]')
.should("contain", dbId)
.click()
.should("contain", collectionId);
});
});
});

View File

@@ -0,0 +1,96 @@
// 1. Click on "New Container" on the command bar.
// 2. Pane with the title "Add Container" should appear on the right side of the screen
// 3. It includes an input box for the database Id.
// 4. It includes a checkbox called "Create now".
// 5. When the checkbox is marked, enter new database id.
// 3. Create a database WITH "Provision throughput" checked.
// 4. Enter minimum throughput value of 400.
// 5. Enter container id to the container id text box.
// 6. Enter partition key to the partition key text box.
// 7. Click "OK" to create a new container.
// 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
const connectionString = require("../../../utilities/connectionString");
let crypt = require("crypto");
context("Mongo API Test", () => {
beforeEach(() => {
connectionString.loginUsingConnectionString();
});
it.skip("Create a new collection in Mongo API - Autopilot", () => {
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
cy.get("iframe").then($element => {
const $body = $element.contents().find("body");
cy.wrap($body)
.find('div[class="commandBarContainer"]')
.should("be.visible")
.find('button[data-test="New Collection"]')
.should("be.visible")
.click();
cy.wrap($body)
.find('div[class="contextual-pane-in"]')
.should("be.visible")
.find('span[id="containerTitle"]');
cy.wrap($body)
.find('input[data-test="addCollection-createNewDatabase"]')
.check();
cy.wrap($body)
.find('input[data-test="addCollection-newDatabaseId"]')
.type(dbId);
cy.wrap($body)
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
.check();
cy.wrap($body)
.find('div[class="throughputModeContainer"]')
.should("be.visible")
.and(input => {
expect(input.get(0).textContent, "first item").contains("Autopilot (preview)");
expect(input.get(1).textContent, "second item").contains("Manual");
});
cy.wrap($body)
.find('input[id="newContainer-databaseThroughput-autoPilotRadio"]')
.check();
cy.wrap($body)
.find('select[name="autoPilotTiers"]')
// .eq(1).should('contain', '4,000 RU/s');
// // .select('4,000 RU/s').should('have.value', '1');
.find('option[value="2"]')
.then($element => $element.get(1).setAttribute("selected", "selected"));
cy.wrap($body)
.find('input[data-test="addCollection-collectionId"]')
.type(collectionId);
cy.wrap($body)
.find('input[data-test="addCollection-partitionKeyValue"]')
.type(sharedKey);
cy.wrap($body)
.find('input[data-test="addCollection-createCollection"]')
.click();
cy.wait(10000);
cy.wrap($body)
.find('div[data-test="resourceTreeId"]')
.should("exist")
.find('div[class="treeComponent dataResourceTree"]')
.should("contain", dbId)
.click()
.should("contain", collectionId);
});
});
});

View File

@@ -0,0 +1,67 @@
const connectionString = require("../../../utilities/connectionString");
let crypt = require("crypto");
context("Mongo API Test", () => {
beforeEach(() => {
connectionString.loginUsingConnectionString();
});
it.skip("Create a new collection in existing database in Mongo API", () => {
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
cy.get("iframe").then($element => {
const $body = $element.contents().find("body");
cy.wrap($body)
.find('span[class="nodeLabel"]')
.should("be.visible")
.then($span => {
const dbId1 = $span.text();
cy.log("DBBB", dbId1);
cy.wrap($body)
.find('div[class="commandBarContainer"]')
.should("be.visible")
.find('button[data-test="New Collection"]')
.should("be.visible")
.click();
cy.wrap($body)
.find('div[class="contextual-pane-in"]')
.should("be.visible")
.find('span[id="containerTitle"]');
cy.wrap($body)
.find('input[data-test="addCollection-existingDatabase"]')
.check();
cy.wrap($body)
.find('input[data-test="addCollection-existingDatabase"]')
.type(dbId1);
cy.wrap($body)
.find('input[data-test="addCollection-collectionId"]')
.type(collectionId);
cy.wrap($body)
.find('input[data-test="addCollection-partitionKeyValue"]')
.type(sharedKey);
cy.wrap($body)
.find('input[data-test="addCollection-createCollection"]')
.click();
cy.wait(10000);
cy.wrap($body)
.find('div[data-test="resourceTreeId"]')
.should("exist")
.find('div[class="treeComponent dataResourceTree"]')
.click()
.should("contain", collectionId);
});
});
});
});

View File

@@ -0,0 +1,203 @@
const connectionString = require("../../../utilities/connectionString");
let crypt = require("crypto");
context.skip("Mongo API Test", () => {
beforeEach(() => {
connectionString.loginUsingConnectionString();
});
it("Create a new collection in Mongo API - Provision database throughput", () => {
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
cy.get("iframe").then($element => {
const $body = $element.contents().find("body");
cy.wrap($body)
.find('div[class="commandBarContainer"]')
.should("be.visible")
.find('button[data-test="New Collection"]')
.should("be.visible")
.click();
cy.wrap($body)
.find('div[class="contextual-pane-in"]')
.should("be.visible")
.find('span[id="containerTitle"]');
cy.wrap($body)
.find(".createNewDatabaseOrUseExisting")
.should("have.length", 2)
.and(input => {
expect(input.get(0).textContent, "first item").contains("Create new");
expect(input.get(1).textContent, "second item").contains("Use existing");
});
cy.wrap($body)
.find('input[data-test="addCollection-createNewDatabase"]')
.check();
cy.wrap($body)
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
.check();
cy.wrap($body)
.find('input[data-test="addCollection-newDatabaseId"]')
.type(dbId);
cy.wrap($body)
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
.check();
cy.wrap($body)
.find('input[data-test="databaseThroughputValue"]')
.should("have.value", "400");
cy.wrap($body)
.find('input[data-test="addCollection-collectionId"]')
.type(collectionId);
cy.wrap($body)
.find('input[data-test="addCollection-partitionKeyValue"]')
.type(sharedKey);
cy.wrap($body)
.find('input[data-test="addCollection-createCollection"]')
.click();
cy.wait(10000);
cy.wrap($body)
.find('div[data-test="resourceTreeId"]')
.should("exist")
.find('div[class="treeComponent dataResourceTree"]')
.should("contain", dbId)
.click()
.should("contain", collectionId);
});
});
it("Create a new collection - without provision database throughput", () => {
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
const collectionIdTitle = `Add Collection`;
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
cy.get("iframe").then($element => {
const $body = $element.contents().find("body");
cy.wrap($body)
.find('div[class="commandBarContainer"]')
.should("be.visible")
.find('button[data-test="New Collection"]')
.should("be.visible")
.click();
cy.wrap($body)
.find('div[class="contextual-pane-in"]')
.should("be.visible")
.find('span[id="containerTitle"]');
cy.wrap($body)
.find('input[data-test="addCollection-createNewDatabase"]')
.check();
cy.wrap($body)
.find('input[data-test="addCollection-newDatabaseId"]')
.type(dbId);
cy.wrap($body)
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
.uncheck();
cy.wrap($body)
.find('input[data-test="addCollection-collectionId"]')
.type(collectionId);
cy.wrap($body)
.find('input[id="tab2"]')
.check({ force: true });
cy.wrap($body)
.find('input[data-test="addCollection-partitionKeyValue"]')
.type(sharedKey);
cy.wrap($body)
.find('input[data-test="databaseThroughputValue"]')
.should("have.value", "400");
cy.wrap($body)
.find('input[data-test="addCollection-createCollection"]')
.click();
cy.wait(10000);
cy.wrap($body)
.find('div[data-test="resourceTreeId"]')
.should("exist")
.find('div[class="treeComponent dataResourceTree"]')
.should("contain", dbId)
.click()
.should("contain", collectionId);
});
});
it("Create a new collection - without provision database throughput Fixed Storage Capacity", () => {
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
cy.get("iframe").then($element => {
const $body = $element.contents().find("body");
cy.wrap($body)
.find('div[class="commandBarContainer"]')
.should("be.visible")
.find('button[data-test="New Collection"]')
.should("be.visible")
.click();
cy.wrap($body)
.find('div[class="contextual-pane-in"]')
.should("be.visible")
.find('span[id="containerTitle"]');
cy.wrap($body)
.find('input[data-test="addCollection-createNewDatabase"]')
.check();
cy.wrap($body)
.find('input[data-test="addCollection-newDatabaseId"]')
.type(dbId);
cy.wrap($body)
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
.uncheck();
cy.wrap($body)
.find('input[data-test="addCollection-collectionId"]')
.type(collectionId);
cy.wrap($body)
.find('input[id="tab1"]')
.check({ force: true });
cy.wrap($body)
.find('input[data-test="databaseThroughputValue"]')
.should("have.value", "400");
cy.wrap($body)
.find('input[data-test="addCollection-createCollection"]')
.click();
cy.wait(10000);
cy.wrap($body)
.find('div[data-test="resourceTreeId"]')
.should("exist")
.find('div[class="treeComponent dataResourceTree"]')
.should("contain", dbId)
.click()
.should("contain", collectionId);
});
});
});

View File

@@ -0,0 +1,79 @@
// 1. Click on "New Container" on the command bar.
// 2. Pane with the title "Add Container" should appear on the right side of the screen
// 3. It includes an input box for the database Id.
// 4. It includes a checkbox called "Create now".
// 5. When the checkbox is marked, enter new database id.
// 3. Create a database WITH "Provision throughput" checked.
// 4. Enter minimum throughput value of 400.
// 5. Enter container id to the container id text box.
// 6. Enter partition key to the partition key text box.
// 7. Click "OK" to create a new container.
// 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
const connectionString = require("../../../utilities/connectionString");
let crypt = require("crypto");
context("SQL API Test", () => {
beforeEach(() => {
connectionString.loginUsingConnectionString();
});
it("Create a new container in SQL API", () => {
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
const sharedKey = `SharedKey${crypt.randomBytes(8).toString("hex")}`;
connectionString.loginUsingConnectionString();
cy.get("iframe").then($element => {
const $body = $element.contents().find("body");
cy.wrap($body)
.find('div[class="commandBarContainer"]')
.should("be.visible")
.find('button[data-test="New Container"]')
.should("be.visible")
.click();
cy.wrap($body)
.find('div[class="contextual-pane-in"]')
.should("be.visible")
.find('span[id="containerTitle"]');
cy.wrap($body)
.find('input[data-test="addCollection-createNewDatabase"]')
.check();
cy.wrap($body)
.find('input[data-test="addCollectionPane-databaseSharedThroughput"]')
.check();
cy.wrap($body)
.find('input[data-test="addCollection-newDatabaseId"]')
.type(dbId);
cy.wrap($body)
.find('input[data-test="addCollection-collectionId"]')
.type(collectionId);
cy.wrap($body)
.find('input[data-test="databaseThroughputValue"]')
.should("have.value", "400");
cy.wrap($body)
.find('input[data-test="addCollection-partitionKeyValue"]')
.type(sharedKey);
cy.wrap($body)
.find("#submitBtnAddCollection")
.click();
cy.wait(10000);
cy.wrap($body)
.find('div[data-test="resourceTreeId"]')
.should("exist")
.find('div[class="treeComponent dataResourceTree"]')
.should("contain", dbId);
});
});
});

View File

@@ -0,0 +1,60 @@
// 1. Click on "New Container" on the command bar.
// 2. Pane with the title "Add Container" should appear on the right side of the screen
// 3. It includes an input box for the database Id.
// 4. It includes a checkbox called "Create now".
// 5. When the checkbox is marked, enter new database id.
// 3. Create a database WITH "Provision throughput" checked.
// 4. Enter minimum throughput value of 400.
// 5. Enter container id to the container id text box.
// 6. Enter partition key to the partition key text box.
// 7. Click "OK" to create a new container.
// 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
const connectionString = require("../../../utilities/connectionString");
let crypt = require("crypto");
context("Table API Test", () => {
beforeEach(() => {
connectionString.loginUsingConnectionString(connectionString.constants.table);
});
it("Create a new table in Table API", () => {
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
cy.get("iframe").then($element => {
const $body = $element.contents().find("body");
cy.wrap($body)
.find('div[class="commandBarContainer"]')
.should("be.visible")
.find('button[data-test="New Table"]')
.should("be.visible")
.click();
cy.wrap($body)
.find('div[class="contextual-pane-in"]')
.should("be.visible")
.find('span[id="containerTitle"]');
cy.wrap($body)
.find('input[data-test="addCollection-collectionId"]')
.type(collectionId);
cy.wrap($body)
.find('input[data-test="databaseThroughputValue"]')
.should("have.value", "400");
cy.wrap($body)
.find('input[data-test="addCollection-createCollection"]')
.click();
cy.wait(10000);
cy.wrap($body)
.find('div[data-test="resourceTreeId"]')
.should("exist")
.find('div[class="treeComponent dataResourceTree"]')
.should("contain", collectionId);
});
});
});

View File

@@ -0,0 +1,55 @@
// 1. Click on "New Container" on the command bar.
// 2. Pane with the title "Add Container" should appear on the right side of the screen
// 3. It includes an input box for the database Id.
// 4. It includes a checkbox called "Create now".
// 5. When the checkbox is marked, enter new database id.
// 3. Create a database WITH "Provision throughput" checked.
// 4. Enter minimum throughput value of 400.
// 5. Enter container id to the container id text box.
// 6. Enter partition key to the partition key text box.
// 7. Click "OK" to create a new container.
// 8. Verify the new container is created along with the database id and should appead in the Data Explorer list in the left side of the screen.
let crypt = require("crypto");
context("Emulator - createDatabase", () => {
beforeEach(() => {
cy.visit("http://localhost:1234/explorer.html");
});
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
const collectionIdTitle = `Add Collection`;
const partitionKey = `PartitionKey${crypt.randomBytes(8).toString("hex")}`;
it("Create a new collection", () => {
cy.contains("New Container").click();
// cy.contains(collectionIdTitle);
cy.get(".createNewDatabaseOrUseExisting")
.should("have.length", 2)
.and(input => {
expect(input.get(0).textContent, "first item").contains("Create new");
expect(input.get(1).textContent, "second item").contains("Use existing");
});
cy.get('input[data-test="addCollection-createNewDatabase"]').check();
cy.get('input[data-test="addCollection-newDatabaseId"]').type(dbId);
cy.get('input[data-test="addCollection-collectionId"]').type(collectionId);
cy.get('input[data-test="databaseThroughputValue"]').should("have.value", "400");
cy.get('input[data-test="addCollection-partitionKeyValue"]').type(partitionKey);
cy.get('input[data-test="addCollection-createCollection"]').click();
cy.get('div[data-test="resourceTreeId"]').should("exist");
cy.get('div[data-test="resourceTree-collectionsTree"]').should("contain", dbId);
cy.get('div[data-test="databaseList"]').should("contain", collectionId);
});
});

View File

@@ -0,0 +1,65 @@
// 1. Click on "New Database" on the command bar
// 2. a Pane with the title "Add Database" should appear on the right side of the screen
// i. It includes an input box for the database Id.
// ii. It includes a checkbox called "Provision throughput".
// iii. Whe the checkbox is marked, a new input with a throughput control let's you customize RU at the database level
// 3. Create a database WITHOUT "Provision throughput" checked.
// 4. It should appear in the Data Explorer list.
// 5. Repeat steps 1-3 but create a database WITH "Provision throughput" with the default RU value.
// 6. It should appear in the Data Explorer list.
// 7. If expanded, it should have the list item called "Scale", that once clicked, it should show the "Scale" tab.
// 8. Inside that tab, a throughput control will let you change the RU value within the permited range.
// 9. If you change the value, it should enable the "Save" button.
// 10. Click "Save" and verify that the process completes without error.
// 11. Close the tab and reopen it and verify that the input contains the last saved value.%
const crypto = require("crypto");
const client = require("../../../utilities/cosmosClient");
const randomString = crypto.randomBytes(2).toString("hex");
const databaseId = `TestDB-${randomString}`;
const collectionId = `TestColl-${randomString}`;
context("Emulator - Create database -> container -> item", () => {
beforeEach(async () => {
const { resources } = await client.databases.readAll().fetchAll();
for (const database of resources) {
await client.database(database.id).delete();
}
});
it("creates a new database", () => {
cy.visit("https://0.0.0.0:1234/explorer.html?platform=Emulator");
cy.contains("New Container").click();
cy.get("[data-test=addCollection-newDatabaseId]").click();
cy.get("[data-test=addCollection-newDatabaseId]").type(databaseId);
cy.get("[data-test=addCollection-collectionId]").click();
cy.get("[data-test=addCollection-collectionId]").type(collectionId);
cy.get("[data-test=addCollection-partitionKeyValue]").click();
cy.get("[data-test=addCollection-partitionKeyValue]").type("/pk");
cy.get('input[name="createCollection"]').click();
cy.get(".dataResourceTree").should("contain", databaseId);
cy.get(".dataResourceTree")
.contains(databaseId)
.click();
cy.get(".dataResourceTree").should("contain", collectionId);
cy.get(".dataResourceTree")
.contains(collectionId)
.click();
cy.get(".dataResourceTree")
.contains("Items")
.click();
cy.get(".dataResourceTree")
.contains("Items")
.click();
cy.wait(1000); // React rendering inside KO causes some weird async rendering that makes this test flaky without waiting
cy.get(".commandBarContainer")
.contains("New Item")
.click();
cy.wait(1000); // React rendering inside KO causes some weird async rendering that makes this test flaky without waiting
cy.get(".commandBarContainer")
.contains("Save")
.click();
cy.wait(1000); // React rendering inside KO causes some weird async rendering that makes this test flaky without waiting
cy.get(".documentsGridHeaderContainer").should("contain", "replace_with_new_document_id");
});
});

View File

@@ -0,0 +1,46 @@
// 1. Click last database in the resource tree
// 2. Click the last collection within the database
// 3. Select the context menu within the collection
// 4. Select "Delete Container" option in the dropdown
// 5. On Selection, Delete Container pane opens on the right side
// 6. Enter the same collection id that is to be deleted and click ok
// 7. Now, the resource tree refreshes, the deleted collection should not appear under the database
let crypt = require("crypto");
context("Emulator - deleteCollection", () => {
beforeEach(() => {
cy.visit("http://localhost:1234/explorer.html");
});
it("Delete a collection", () => {
cy.get(".databaseId")
.last()
.click();
cy.get(".collectionList")
.last()
.then($id => {
const collectionId = $id.text();
cy.get('span[data-test="collectionEllipsisMenu"]').should("exist");
cy.get('span[data-test="collectionEllipsisMenu"]')
.invoke("show")
.last()
.click();
cy.get('div[data-test="collectionContextMenu"]')
.contains("Delete Container")
.click({ force: true });
cy.get('input[data-test="confirmCollectionId"]').type(collectionId.trim());
cy.get('input[data-test="deleteCollection"]').click();
cy.get('div[data-test="databaseList"]').should("not.contain", collectionId);
cy.get('div[data-test="databaseMenu"]').should("not.contain", collectionId);
});
});
});

View File

@@ -0,0 +1,83 @@
// 1. Click last database in the resource tree
// 2. Select the context menu within the database
// 4. Select "Delete Database" option in the dropdown
// 5. On Selection, Delete Database pane opens on the right side
// 6. Enter the same database id that is to be deleted and click ok
// 7. Now, the resource tree refreshes, the deleted database should not appear in the resource tree
let crypt = require("crypto");
context("Emulator - deleteDatabase", () => {
beforeEach(() => {
const dbId = `TestDatabase${crypt.randomBytes(8).toString("hex")}`;
const collectionId = `TestCollection${crypt.randomBytes(8).toString("hex")}`;
let db_rid = "";
const date = new Date().toUTCString();
let authToken = "";
cy.visit("http://localhost:1234/explorer.html");
// Creating auth token for collection creation
cy.request({
method: "GET",
url: "https://localhost:8081/_explorer/authorization/post/dbs/",
headers: {
"x-ms-date": date,
authorization: "-"
}
})
.then(response => {
authToken = response.body.Token; // Getting auth token for collection creation
return new Cypress.Promise((resolve, reject) => {
return resolve();
});
})
.then(() => {
cy.request({
method: "POST",
url: "https://localhost:8081/dbs",
headers: {
"x-ms-date": date,
authorization: authToken,
"x-ms-version": "2018-12-31"
},
body: {
id: dbId
}
}).then(response => {
cy.log("Response", response);
db_rid = response.body._rid;
return new Cypress.Promise((resolve, reject) => {
cy.log("Rid", db_rid);
return resolve();
});
});
});
});
it("Delete a database", () => {
cy.get('span[data-test="refreshTree"]').click();
cy.get(".databaseId")
.last()
.then($id => {
const dbId = $id.text();
cy.get('span[data-test="databaseEllipsisMenu"]').should("exist");
cy.get('span[data-test="databaseEllipsisMenu"]')
.invoke("show")
.last()
.click();
cy.get('div[data-test="databaseContextMenu"]')
.contains("Delete Database")
.click({ force: true });
cy.get('input[data-test="confirmDatabaseId"]').type(dbId.trim());
cy.get('input[data-test="deleteDatabase"]').click();
cy.get('div[data-test="databaseList"]').should("not.contain", dbId);
});
});
});

View File

@@ -0,0 +1,35 @@
# Notebook end-to-end tests
This describes how to run the tests locally
## Stand up a local notebook container instance:
Instructions on how to build and run the container [here](https://microsoft.sharepoint.com/teams/DocDB/_layouts/OneNote.aspx?id=%2Fteams%2FDocDB%2FSiteAssets%2FDocDB%20Team%20Notebook&wd=target%28Tools%20_%20SDK%2FPortal%2FDevelopment.one%7CF800BE8E-1E31-48FE-90D7-EF698EF88112%2FHow%20to%20build%20notebook%20service%7C4BAA153B-422C-41E2-B997-F3FCE02CD743%2F%29)
## Run a local data explorer
Instructions are in [`DataExplorer/README.md`](https://msdata.visualstudio.com/CosmosDB/_git/cosmosdb-dataexplorer?path=%2FProduct%2FPortal%2FDataExplorer%2FREADME.md&_a=preview).
Make sure you can run Data Explorer locally from the web browser.
## Run cypress tests
1. Edit the URL for your DataExplorer in the `.spec.ts` file
2. Run the test:
```bash
cd DataExplorer/cypress
npm i
npm t -- --spec 'integration/notebook/newNotebook.spec.ts'
```
To run in Debug mode:
```
npm run test:debug
```
This opens Cypress UI
## Troubleshooting
* The tests are recorded in the `videos` folder.
* Cypress does not support hover: workarounds [here](https://docs.cypress.io/api/commands/hover.html#Workarounds).
## References
* [Cypress API](https://docs.cypress.io/api/api/table-of-contents.html)
* [Cypress cookbook](https://docs.cypress.io/faq/questions/using-cypress-faq.html#How-do-I-get-an-element%E2%80%99s-text-contents)
* [Cypress best practices](https://docs.cypress.io/guides/references/best-practices.html#Selecting-Elements)

View File

@@ -0,0 +1,93 @@
// THIS ADDS A NEW NOTEBOOK TO YOUR NOTEBOOKS
context("New Notebook smoke test", () => {
const timeout = 15000; // in ms
const explorerUrl =
"https://localhost:1234/explorer.html?feature.notebookserverurl=https%3A%2F%2Flocalhost%3A10001%2F12345%2Fnotebook&feature.notebookServerToken=token&feature.enablenotebooks=true";
/**
* Wait for UI to be ready
*/
const waitForReady = () => {
cy.get(".splashScreenContainer", { timeout }).should("be.visible");
};
beforeEach(() => {
cy.visit(explorerUrl);
waitForReady();
});
it("Create a new notebook and run some code", () => {
// Create new notebook
cy.contains("New Notebook").click();
// Check tab name
cy.get("li.tabList .tabNavText").should($span => {
const text = $span.text();
expect(text).to.match(/^Untitled.*\.ipynb$/);
});
// Wait for python3 | idle status
cy.get('[data-test="notebookStatusBar"] [data-test="kernelStatus"]', { timeout }).should($p => {
const text = $p.text();
expect(text).to.match(/^python3.*idle$/);
});
// Click on a cell
cy.get(".cell-container")
.as("cellContainer")
.click();
// Type in some code
cy.get("@cellContainer").type("2+4");
// Execute
cy.get('[data-test="Run"]')
.first()
.click();
// Verify results
cy.get("@cellContainer").within(() => {
cy.get("pre code span").should("contain", "6");
});
// Restart kernel
cy.get('[data-test="Run"] button')
.eq(-1)
.click();
cy.get("li")
.contains("Restart Kernel")
.click();
// Wait for python3 | restarting status
cy.get('[data-test="notebookStatusBar"] [data-test="kernelStatus"]', { timeout }).should($p => {
const text = $p.text();
expect(text).to.match(/^python3.*restarting$/);
});
// Wait for python3 | idle status
cy.get('[data-test="notebookStatusBar"] [data-test="kernelStatus"]', { timeout }).should($p => {
const text = $p.text();
expect(text).to.match(/^python3.*idle$/);
});
// Click on a cell
cy.get(".cell-container")
.as("cellContainer")
.find(".input")
.as("codeInput")
.click();
// Type in some code
cy.get("@codeInput").type("{backspace}{backspace}{backspace}4+5");
// Execute
cy.get('[data-test="Run"]')
.first()
.click();
// Verify results
cy.get("@cellContainer").within(() => {
cy.get("pre code span").should("contain", "9");
});
});
});

View File

@@ -0,0 +1,172 @@
context("Resource tree notebook file manipulation", () => {
const timeout = 15000; // in ms
const explorerUrl =
"https://localhost:1234/explorer.html?feature.notebookserverurl=https%3A%2F%2Flocalhost%3A10001%2F12345%2Fnotebook&feature.notebookServerToken=token&feature.enablenotebooks=true";
/**
* Wait for UI to be ready
*/
const waitForReady = () => {
cy.get(".splashScreenContainer", { timeout }).should("be.visible");
};
const clickContextMenuAndSelectOption = (nodeLabel, option) => {
cy.get(`.treeNodeHeader[data-test="${nodeLabel}"]`)
.find("button.treeMenuEllipsis")
.click();
cy.get('[data-test="treeComponentMenuItemContainer"]')
.contains(option)
.click();
};
const createFolder = folder => {
clickContextMenuAndSelectOption("My Notebooks/", "New Directory");
cy.get("#stringInputPane").within(() => {
cy.get('input[name="collectionIdConfirmation"]').type(folder);
cy.get("form").submit();
});
};
const deleteItem = nodeName => {
clickContextMenuAndSelectOption(`${nodeName}`, "Delete");
cy.get(".ms-Dialog-main")
.contains("Delete")
.click();
};
beforeEach(() => {
cy.visit(explorerUrl);
waitForReady();
});
it("Create and remove a directory", () => {
const folder = "e2etest_folder1";
createFolder(folder);
cy.get(`.treeNodeHeader[data-test="${folder}/"]`).should("exist");
deleteItem(`${folder}/`);
cy.get(`.treeNodeHeader[data-test="${folder}/"]`).should("not.exist");
});
it("Create and rename a directory", () => {
const folder = "e2etest_folder2";
const renamedFolder = "e2etest_folder2_renamed";
createFolder(folder);
// Rename
clickContextMenuAndSelectOption(`${folder}/`, "Rename");
cy.get("#stringInputPane").within(() => {
cy.get('input[name="collectionIdConfirmation"]')
.clear()
.type(renamedFolder);
cy.get("form").submit();
});
cy.get(`.treeNodeHeader[data-test="${renamedFolder}/"]`).should("exist");
cy.get(`.treeNodeHeader[data-test="${folder}/"]`).should("not.exist");
deleteItem(`${renamedFolder}/`);
cy.get(`.treeNodeHeader[data-test="${renamedFolder}/"]`).should("not.exist");
});
it("Create a notebook inside a directory", () => {
const folder = "e2etest_folder3";
const newNotebookName = "Untitled.ipynb";
createFolder(folder);
clickContextMenuAndSelectOption(`${folder}/`, "New Notebook");
// Verify tab is open
cy.get(".tabList")
.contains(newNotebookName)
.should("exist");
// Close tab
cy.get(`.tabList[title="notebooks/${folder}/${newNotebookName}"]`)
.find(".cancelButton")
.click();
// When running from command line, closing the tab is too fast
cy.get("body").then($body => {
if ($body.find(".ms-Dialog-main").length) {
// For some reason, this does not work
// cy.get(".ms-Dialog-main").contains("Close").click();
cy.get(".ms-Dialog-main .ms-Button--primary").click();
}
});
// Expand folder node
cy.get(`.treeNodeHeader[data-test="${folder}/"]`).click();
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${newNotebookName}"]`).should("exist");
// Delete notebook
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${newNotebookName}"]`)
.find("button.treeMenuEllipsis")
.click();
cy.get('[data-test="treeComponentMenuItemContainer"]')
.contains("Delete")
.click();
// Confirm
cy.get(".ms-Dialog-main")
.contains("Delete")
.click();
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${newNotebookName}"]`).should("not.exist");
deleteItem(`${folder}/`);
});
it("Create and rename a notebook inside a directory", () => {
const folder = "e2etest_folder4";
const newNotebookName = "Untitled.ipynb";
const renamedNotebookName = "mynotebook.ipynb";
createFolder(folder);
clickContextMenuAndSelectOption(`${folder}/`, "New Notebook");
// Close tab
cy.get(`.tabList[title="notebooks/${folder}/${newNotebookName}"]`)
.find(".cancelButton")
.click();
cy.get("body").then($body => {
if ($body.find(".ms-Dialog-main").length) {
// For some reason, this does not work
// cy.get(".ms-Dialog-main").contains("Close").click();
cy.get(".ms-Dialog-main .ms-Button--primary").click();
}
});
// Expand folder node
cy.get(`.treeNodeHeader[data-test="${folder}/"]`).click();
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${newNotebookName}"]`).should("exist");
// Rename notebook
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${newNotebookName}"]`)
.find("button.treeMenuEllipsis")
.click();
cy.get('[data-test="treeComponentMenuItemContainer"]')
.contains("Rename")
.click();
cy.get("#stringInputPane").within(() => {
cy.get('input[name="collectionIdConfirmation"]')
.clear()
.type(renamedNotebookName);
cy.get("form").submit();
});
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${newNotebookName}"]`).should("not.exist");
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${renamedNotebookName}"]`).should("exist");
// Delete notebook
cy.get(`.nodeChildren[data-test="${folder}/"] .treeNodeHeader[data-test="${renamedNotebookName}"]`)
.find("button.treeMenuEllipsis")
.click();
cy.get('[data-test="treeComponentMenuItemContainer"]')
.contains("Delete")
.click();
// Confirm
cy.get(".ms-Dialog-main")
.contains("Delete")
.click();
// Give it time to settle
cy.wait(1000);
deleteItem(`${folder}/`);
});
});

3066
cypress/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

25
cypress/package.json Normal file
View File

@@ -0,0 +1,25 @@
{
"name": "cosmos-explorer-cypress",
"version": "1.0.0",
"description": "",
"main": "index.js",
"scripts": {
"test": "cypress run",
"wait-for-server": "wait-on -t 240000 -i 5000 -v https-get://0.0.0.0:1234/",
"test:sql": "cypress run --browser chrome --spec \"./integration/dataexplorer/SQL/*\"",
"test:ci": "wait-on -t 240000 -i 5000 -v https-get://0.0.0.0:1234/ https-get://0.0.0.0:8081/_explorer/index.html && cypress run --browser edge --headless",
"test:debug": "cypress open"
},
"devDependencies": {
"cypress": "^4.8.0",
"mocha": "^7.0.1",
"mochawesome": "^4.1.0",
"mochawesome-merge": "^4.0.1",
"mochawesome-report-generator": "^4.1.0",
"typescript": "3.4.3",
"wait-on": "^4.0.2"
},
"dependencies": {
"@microsoft/applicationinsights-web": "^2.5.2"
}
}

23
cypress/support/index.js Normal file
View File

@@ -0,0 +1,23 @@
let appInsightsLib = require("@microsoft/applicationinsights-web");
const appInsights = new appInsightsLib.ApplicationInsights({
config: {
instrumentationKey: "fe61c39f-7d32-4488-a191-b13621965315"
/* ...Other Configuration Options... */
}
});
appInsights.loadAppInsights();
Cypress.on("fail", (error, runnable) => {
// App Insights will record the fail tests for Create Collection
let message = JSON.stringify(runnable.title);
appInsights.trackTrace({
message: `${message}`,
properties: {
passed: false,
error: error
}
});
throw error; // throw error to have test still fail
});

11
cypress/tsconfig.json Normal file
View File

@@ -0,0 +1,11 @@
{
"compilerOptions": {
"strict": true,
"noEmit": true,
"module": "commonjs",
"target": "es5",
"lib": ["es5", "dom", "es6"],
"types": ["cypress", "node"]
},
"include": ["**/*.ts", "**/*.spec.ts"]
}

View File

@@ -0,0 +1,41 @@
module.exports = {
loginUsingConnectionString: function() {
const prodUrl = Cypress.env("TEST_ENDPOINT") || "https://localhost:1234/hostedExplorer.html";
const timeout = 15000;
cy.visit(prodUrl);
cy.get('iframe[id="explorerMenu"]').should("be.visible");
cy.get("iframe").then($element => {
const $body = $element.contents().find("body");
cy.wrap($body)
.find("#connectExplorer")
.should("exist")
.find("div[class='connectExplorer']")
.should("exist")
.find("p[class='welcomeText']")
.should("exist");
cy.wrap($body.find("div > p.switchConnectTypeText"))
.should("exist")
.last()
.click({ force: true });
const secret = Cypress.env("CONNECTION_STRING");
cy.wrap($body)
.find("input[class='inputToken']")
.should("exist")
.type(secret, {
force: true
});
cy.wrap($body.find("input[value='Connect']"), { timeout })
.first()
.click({ force: true });
cy.wait(15000);
});
}
};

View File

@@ -0,0 +1,6 @@
const { CosmosClient } = require("@azure/cosmos");
module.exports = new CosmosClient({
endpoint: "https://0.0.0.0:8081",
key: "C2y6yDjf5/R+ob0N8A7Cgv30VRDJIWEHLM+4QDU5DE2nQ9nDuVTqobD4b8mGGyPMbIZnqyMsEcaGQy67XIw/Jw=="
});

1963
externals/adal.js vendored Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -1,3 +0,0 @@
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M7.31449 2.01439L4.00103 5.31963L3.26105 4.57965L7.8407 0L12.4203 4.57965L11.6804 5.31963L8.36691 2.01439V12.8428H7.31449V2.01439ZM13.629 12.8428H14.6814V16H1V12.8428H2.05242V14.9476H13.629V12.8428Z" fill="#0078D4"/>
</svg>

Before

Width:  |  Height:  |  Size: 329 B

View File

@@ -6,7 +6,6 @@ module.exports = {
slowMo: 55,
defaultViewport: null,
ignoreHTTPSErrors: true,
args: ["--disable-web-security"],
exitOnPageError: false,
},
args: ["--disable-web-security"]
}
};

View File

@@ -1,5 +1,5 @@
module.exports = {
preset: "jest-puppeteer",
testMatch: ["<rootDir>/test/**/*.spec.[jt]s?(x)"],
setupFiles: ["dotenv/config"],
setupFiles: ["dotenv/config"]
};

View File

@@ -21,13 +21,17 @@ module.exports = {
collectCoverage: true,
// An array of glob patterns indicating a set of files for which coverage information should be collected
collectCoverageFrom: ["src/**/*.{js,jsx,ts,tsx}"],
// collectCoverageFrom: [
// "src/Common/Headers*"
// ],
// The directory where Jest should output its coverage files
coverageDirectory: "coverage",
// An array of regexp pattern strings used to skip coverage collection
coveragePathIgnorePatterns: ["/node_modules/"],
// coveragePathIgnorePatterns: [
// "/node_modules/"
// ],
// A list of reporter names that Jest uses when writing coverage reports
coverageReporters: ["json", "text", "cobertura"],
@@ -35,11 +39,11 @@ module.exports = {
// An object that configures minimum threshold enforcement for coverage results
coverageThreshold: {
global: {
branches: 25,
functions: 25,
branches: 20,
functions: 24,
lines: 30,
statements: 30,
},
statements: 29.0
}
},
// Make calling deprecated APIs throw helpful error messages
@@ -67,13 +71,12 @@ module.exports = {
// A map from regular expressions to module names that allow to stub out resources with a single module
moduleNameMapper: {
"^.*[.](svg|png|gif|less|css)$": "<rootDir>/mockModule",
"@nteract/stateful-components/(.*)$": "<rootDir>/mockModule",
"^.*[.](svg|png|gif|less)$": "<rootDir>/mockModule",
"worker-loader": "<rootDir>/mockModule",
"office-ui-fabric-react/lib/(.*)$": "office-ui-fabric-react/lib-commonjs/$1", // https://github.com/OfficeDev/office-ui-fabric-react/wiki/Fabric-6-Release-Notes
"^dnd-core$": "dnd-core/dist/cjs",
"^react-dnd$": "react-dnd/dist/cjs",
"^react-dnd-html5-backend$": "react-dnd-html5-backend/dist/cjs",
"^react-dnd-html5-backend$": "react-dnd-html5-backend/dist/cjs"
},
// An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader
@@ -161,11 +164,11 @@ module.exports = {
// A map from regular expressions to paths to transformers
transform: {
"^.+\\.html?$": "html-loader-jest",
"^.+\\.[t|j]sx?$": "babel-jest",
"^.+\\.[t|j]sx?$": "babel-jest"
},
// An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation
transformIgnorePatterns: ["/node_modules/", "/externals/"],
transformIgnorePatterns: ["/node_modules/", "/externals/"]
// An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them
// unmockedModulePathPatterns: undefined,

View File

@@ -4,7 +4,7 @@
@font-face {
font-family: wf_segoe-ui_normal;
src: url("../../fonts/segoe-ui/west-european/normal/latest.woff");
src: url('../../fonts/segoe-ui/west-european/normal/latest.woff');
}
@DataExplorerFont: wf_segoe-ui_normal, "Segoe UI", "Segoe WP", Tahoma, Arial, sans-serif;
@@ -20,26 +20,26 @@
COLORS
/******************************************************************************/
@AccentMediumHigh: #0058ad;
@AccentMedium: #004e87;
@AccentHigh: #1ebaed;
@AccentExtraHigh: #55b3ff;
@AccentLow: #edf6ff;
@AccentMediumLow: #ddeefe;
@AccentLight: #eef7ff;
@AccentExtra: #ddf0ff;
@AccentMediumHigh: #0058AD;
@AccentMedium: #004E87;
@AccentHigh: #1EBAED;
@AccentExtraHigh: #55B3FF;
@AccentLow: #EDF6FF;
@AccentMediumLow: #DDEEFE;
@AccentLight: #EEF7FF;
@AccentExtra: #DDF0FF;
@SelectionHigh: #b91f26;
@BaseLight: #ffffff;
@SelectionHigh: #B91F26;
@BaseLight: #FFFFFF;
@BaseDark: #000000;
@NotificationLow: #fff4ce;
@NotificationHigh: #f9e9b0;
@Purple1: #8a2da5;
@NotificationLow: #FFF4CE;
@NotificationHigh: #F9E9B0;
@Purple1: #8A2DA5;
@Dirty: #9b4f96;
@BaseLow: #f2f2f2;
@BaseMediumLow: #e6e6e6;
@BaseMedium: #cccccc;
@BaseLow: #F2F2F2;
@BaseMediumLow: #E6E6E6;
@BaseMedium: #CCCCCC;
@BaseMediumHigh: #767676;
@BaseHigh: #393939;
@@ -53,17 +53,10 @@
@ErrorColor: @SelectionHigh;
@SelectionColor: #3074b0;
@SelectionColor: #3074B0;
@FocusColor: #605e5c;
@GalleryBackgroundColor: #fdfdfd;
//Icons
@InfoIconColor: #0072c6;
@WarningIconColor: #db7500;
@ErrorIconColor: #b91f26;
/******************************************************************************
METRICS
/******************************************************************************/
@@ -172,8 +165,8 @@
.selectedRadio:hover,
.selectedRadio:active,
.selectedRadio.dirty,
.tab [type="radio"]:checked ~ label,
.tab [type="radio"]:checked ~ label:hover {
.tab [type=radio]:checked ~ label,
.tab [type=radio]:checked ~ label:hover {
-ms-high-contrast-adjust: none;
-webkit-text-fill-color: HighlightText;
color: HighlightText;
@@ -182,8 +175,9 @@
}
.queryMetricsSummaryTuple {
th,
td {
th, td {
&:nth-child(2) {
width: @IETableDataWidth;
}
@@ -278,27 +272,3 @@
.tooltipVisible() {
visibility: visible;
}
.inputTooltip() {
position: relative;
}
.inputTooltipText(@textColor: @BaseLight, @backgroundColor: @BaseHigh) {
background-color: @backgroundColor;
color: @textColor;
position: absolute;
z-index: 1;
padding: @MediumSpace;
}
.inputTooltipTextAfter(@color: @BaseDark) {
content: "";
position: absolute;
right: 100%;
border-style: solid;
border-color: transparent @color transparent transparent;
left: 10px;
width: 0;
height: 0;
border-color: @InfoPointerColor transparent;
}

View File

@@ -16,7 +16,7 @@ body {
height: 100%;
:focus {
.focus();
.focus()
}
}
@@ -59,7 +59,7 @@ body {
right: 14px;
border-width: 0 9px 9px;
border-style: solid;
border-color: #fff rgba(0, 0, 0, 0);
border-color: #FFF rgba(0, 0, 0, 0);
display: block;
width: 0;
}
@@ -88,6 +88,7 @@ body {
height: 100%;
width: 100%;
.urlContainer {
margin-left: @DefaultSpace;
@@ -178,8 +179,7 @@ body {
.active();
}
&:focus .urlTokenCopyTooltiptext,
&:focus .urlTokenCopyTooltiptext {
&:focus .urlTokenCopyTooltiptext, &:focus .urlTokenCopyTooltiptext {
.tooltipVisible();
}
@@ -217,7 +217,7 @@ body {
.shareLink {
width: 300px;
background-color: #ffffff;
background-color: #FFFFFF;
border: 1px solid @BaseMedium;
overflow: hidden;
text-overflow: ellipsis;
@@ -718,12 +718,11 @@ execute-sproc-params-pane {
}
}
.stored-procedure-tab {
stored-procedure-tab {
@ToggleHeight: 30px;
@ToggleWidth: 180px;
.results-container,
.errors-container {
.results-container, .errors-container {
padding: @MediumSpace 0px 0px @MediumSpace;
height: 100%;
.flex-display();
@@ -935,19 +934,19 @@ menuQuickStart {
.content {
display: inline-block;
width: 100%;
transition: all 0.4s ease-in-out;
-ms-transition: all 0.4s ease-in-out;
-webkit-transition: all 0.4s ease-in-out;
-moz-transition: all 0.4s ease-in-out;
transition: all .4s ease-in-out;
-ms-transition: all .4s ease-in-out;
-webkit-transition: all .4s ease-in-out;
-moz-transition: all .4s ease-in-out;
height: 100vh;
}
.mini {
width: 0%;
float: left;
transition: all 0.4s ease-in-out;
-webkit-transition: all 0.4s ease-in-out;
-moz-transition: all 0.4s ease-in-out;
transition: all .4s ease-in-out;
-webkit-transition: all .4s ease-in-out;
-moz-transition: all .4s ease-in-out;
height: 100vh;
background-color: white;
}
@@ -1098,7 +1097,7 @@ menuQuickStart {
}
#tbodycontent>tr>td {
border-bottom: 1px solid #cccccc;
border-bottom: 1px solid #CCCCCC;
border-top: none;
padding: 6px;
}
@@ -1206,7 +1205,7 @@ menuQuickStart {
.gridRowSelected:hover {
cursor: default;
.hover();
.hover()
}
.gridRowHighlighted {
@@ -1241,7 +1240,7 @@ menuQuickStart {
border-top: 1px solid #eee;
margin-left: -17px;
width: 100%;
color: 1px solid #53575b;
color: 1px solid #53575B;
}
.partitioning-btn {
@@ -1309,7 +1308,7 @@ menuQuickStart {
.collid-white {
width: 100%;
border: solid 1px #ddd;
border: solid 1px #DDD;
}
.plusimg-but {
@@ -1523,21 +1522,6 @@ p {
.tooltipVisible();
}
.inputTooltip {
.inputTooltip();
}
.inputTooltip .inputTooltipText {
top: -68px;
.inputTooltipText();
}
.inputTooltip .inputTooltipText::after {
border-width: @MediumSpace @MediumSpace 0 @MediumSpace;
top: 55px;
.inputTooltipTextAfter();
}
.infoTooltip a {
color: @AccentHigh;
}
@@ -1647,6 +1631,7 @@ p {
margin-left: -32px;
}
/* Variant of paddingspan3 without the margins */
.contextual-pane .paddingspan3b {
@@ -1659,7 +1644,7 @@ p {
}
.contextual-pane hr {
border: 1px solid #53575b;
border: 1px solid #53575B;
margin-right: 20px;
}
@@ -1709,7 +1694,6 @@ input::-webkit-calendar-picker-indicator {
display: flex;
flex-direction: column;
height: 100%;
max-height: 100vh;
}
.contextual-pane .paneErrorDetailsContainer {
@@ -1834,11 +1818,11 @@ label {
.datalist-arrow:focus:after,
.datalist-arrow:active:after {
background: #1ebbee;
background: #1EBBEE;
}
input::-webkit-calendar-picker-indicator::after {
content: "\276F";
content: '\276F';
right: 0;
top: -8%;
display: block;
@@ -1852,7 +1836,7 @@ input::-webkit-calendar-picker-indicator::after {
}
.datalist-arrow:after:hover {
content: "\276F";
content: '\276F';
position: absolute;
right: 1px;
top: 6%;
@@ -1864,7 +1848,7 @@ input::-webkit-calendar-picker-indicator::after {
color: #fff;
text-align: center;
pointer-events: none;
background-color: #1ebbee;
background-color: #1EBBEE;
}
.Introline3 {
@@ -1928,7 +1912,7 @@ input::-webkit-calendar-picker-indicator::after {
.nav-tabs-margin {
padding-top: 8px;
background-color: #f2f2f2;
background-color: #F2F2F2;
}
.navTabHeight {
@@ -1994,7 +1978,7 @@ input::-webkit-calendar-picker-indicator::after {
.atags {
color: @AccentMediumHigh;
font-weight: 400;
cursor: pointer;
cursor: pointer
}
.qsmenuicons {
@@ -2099,7 +2083,7 @@ a:link {
display: flex;
flex: 1 1 auto;
overflow-x: auto;
overflow-y: auto;
overflow-y: hidden;
height: 100%;
}
@@ -2234,7 +2218,7 @@ a:link {
.documentsGridHeaderContainer {
padding-left: 5px;
width: 100%;
border-bottom: 1px solid #cccccc;
border-bottom: 1px solid #CCCCCC;
}
.documentsGridHeaderContainer>table {
@@ -2250,7 +2234,7 @@ a:link {
position: sticky;
top: 0;
background-color: #fff !important;
border-bottom: 1px solid #cccccc !important;
border-bottom: 1px solid #CCCCCC !important;
}
}
@@ -2399,7 +2383,7 @@ a:link {
color: #393939;
}
.tab [type="radio"] {
.tab [type=radio] {
display: none;
}
@@ -2411,40 +2395,40 @@ a:link {
padding: @MediumSpace 0px;
}
.tab [type="radio"]:checked ~ label {
.tab [type=radio]:checked~label {
border: 1px solid #0072c6;
background-color: @AccentMediumHigh;
color: white;
z-index: 2;
}
.tab [type="radio"]:checked ~ label:hover {
.tab [type=radio]:checked~label:hover {
border: 1px solid @AccentMediumHigh;
background-color: @AccentMediumHigh;
color: white;
z-index: 2;
}
.tab [type="radio"]:checked ~ label:active {
.tab [type=radio]:checked~label:active {
border: 1px solid #0072c6;
background-color: #0072c6;
color: white;
z-index: 2;
}
.tab [type="radio"]:checked ~ label ~ .tabcontent {
.tab [type=radio]:checked~label~.tabcontent {
z-index: 1;
display: initial;
}
.tab [type="radio"]:not(:checked) ~ label:hover {
.tab [type=radio]:not(:checked)~label:hover {
border: 1px solid #969696;
background-color: #969696;
color: white;
cursor: pointer;
}
.tab [type="radio"]:not(:checked) ~ label ~ .tabcontent {
.tab [type=radio]:not(:checked)~label~.tabcontent {
display: none;
}
@@ -2748,7 +2732,7 @@ a:link {
text-overflow: ellipsis;
overflow: hidden;
white-space: nowrap;
flex-grow: 1;
flex-grow: 1
}
.tabIconSection {
@@ -2838,17 +2822,17 @@ a:link {
}
.tabCommandDisabled {
color: #cccccc;
color: #CCCCCC;
cursor: default;
background-color: #ffffff;
background-color: #FFFFFF;
}
.tabCommandDisabled:active {
border: 1px solid #ffffff;
border: 1px solid #FFFFFF;
}
.tabCommandDisabled:hover {
background-color: #ffffff;
background-color: #FFFFFF;
}
#explorerNotificationConsole {
@@ -2956,13 +2940,13 @@ settings-pane {
}
.linkDarkBackground {
color: @AccentExtraHigh;
color: @AccentExtraHigh
}
.linkDarkBackground:hover,
.linkDarkBackground:active,
.linkDarkBackground:focus {
color: @AccentHigh;
color: @AccentHigh
}
.library-add-button {
@@ -2999,7 +2983,7 @@ settings-pane {
}
.enableAnalyticalStorageRadioLabel {
padding: 0px;
padding: 0px
}
}
@@ -3009,18 +2993,18 @@ settings-pane {
}
.button.enabled{
background: #fff;
background: #FFF;
border-radius: 2px;
color: #323130;
padding: 3px 20px;
border: 1px solid #8a8886;
border: 1px solid #8A8886;
}
.button.disabled{
background: #f3f2f1;
border: 0px solid #8a8886;
background: #F3F2F1;
border: 0px solid #8A8886;
border-radius: 2px;
color: #a19f9d;
color: #A19F9D;
padding: 3px 20px;
}
@@ -3033,55 +3017,13 @@ settings-pane {
}
.warningErrorContent a {
color: @AccentMediumHigh;
color: @AccentMediumHigh
}
.infoBoxContent a {
color: @AccentMediumHigh;
color: @AccentMediumHigh
}
.collapsibleSection :hover{
cursor: pointer;
}
.messageBarInfoIcon {
color: @InfoIconColor;
}
.messageBarWarningIcon {
color: @WarningIconColor;
}
.freeTierInfoBanner {
background-color: @BaseLow;
display: inline-flex;
padding: @DefaultSpace;
width: 100%;
.freeTierInfoIcon img {
height: 28px;
width: 28px;
margin-left: 4px;
}
.freeTierInfoMessage {
margin: auto 0;
padding-left: @MediumSpace;
}
}
.freeTierInlineWarning {
display: inline-flex;
padding: 8px 8px 8px 0;
width: 100%;
.freeTierWarningIcon img {
height: 20px;
width: 20px;
}
.freeTierWarningMessage {
margin: auto 0;
padding-left: @SmallSpace;
}
}

View File

@@ -13,11 +13,6 @@
@NavMediumSpace: 10px;
@NavLargeSpace: 15px;
.skip-link {
position: fixed;
top: -200px;
}
html {
font-family: wf_segoe-ui_normal, "Segoe UI", "Segoe WP", Tahoma, Arial, sans-serif;
padding: 0px;

View File

@@ -1,12 +1,20 @@
@import "./Common/Constants";
.main {
width: 100%;
float: left;
transition: all .0s ease-in-out;
-ms-transition: all 0s ease-in-out;
-webkit-transition: all 0s ease-in-out;
-moz-transition: all .0s ease-in-out;
height: 100%;
background-color: white;
border-left: 0px solid white;
}
.resourceTree {
height: 100%;
flex: 0 0 auto;
.main {
height: 100%;
}
}
.resourceTreeScroll {

34519
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -4,17 +4,12 @@
"description": "Cosmos Explorer",
"main": "index.js",
"dependencies": {
"@azure/arm-cosmosdb": "9.1.0",
"@azure/cosmos": "3.9.0",
"@azure/cosmos-language-service": "0.0.5",
"@azure/identity": "1.2.1",
"@azure/ms-rest-nodeauth": "3.0.7",
"@babel/plugin-proposal-class-properties": "7.12.1",
"@babel/plugin-proposal-decorators": "7.12.12",
"@jupyterlab/services": "6.0.2",
"@jupyterlab/terminal": "3.0.3",
"@azure/cosmos-language-service": "0.0.4",
"@jupyterlab/services": "6.0.0-rc.2",
"@jupyterlab/terminal": "3.0.0-rc.2",
"@microsoft/applicationinsights-web": "2.5.9",
"@nteract/commutable": "7.4.2",
"@nteract/commutable": "7.3.2",
"@nteract/connected-components": "6.8.2",
"@nteract/core": "15.1.0",
"@nteract/data-explorer": "8.0.3",
@@ -26,7 +21,7 @@
"@nteract/jupyter-widgets": "2.0.0",
"@nteract/logos": "1.0.0",
"@nteract/markdown": "4.4.0",
"@nteract/monaco-editor": "3.2.2",
"@nteract/monaco-editor": "3.2.0",
"@nteract/octicons": "2.0.0",
"@nteract/outputs": "3.0.9",
"@nteract/presentational-components": "3.0.7",
@@ -39,17 +34,16 @@
"@nteract/transform-vega": "7.0.6",
"@octokit/rest": "17.9.2",
"@phosphor/widgets": "1.9.3",
"@testing-library/jest-dom": "5.11.9",
"@types/mkdirp": "1.0.1",
"@types/node-fetch": "2.5.7",
"@uifabric/react-cards": "0.109.110",
"@uifabric/react-hooks": "7.14.0",
"@uifabric/styling": "7.13.7",
"abort-controller": "3.0.0",
"applicationinsights": "1.8.0",
"babel-polyfill": "6.26.0",
"bootstrap": "3.4.1",
"canvas": "file:./canvas",
"canvas": "2.6.1",
"clean-webpack-plugin": "0.1.19",
"clipboard-copy": "4.0.1",
"copy-webpack-plugin": "6.0.2",
"crossroads": "0.12.2",
"css-element-queries": "1.1.1",
@@ -59,13 +53,12 @@
"date-fns": "1.29.0",
"dayjs": "1.8.19",
"dotenv": "8.2.0",
"es6-object-assign": "1.1.0",
"es6-symbol": "3.1.3",
"eslint-plugin-jest": "23.13.2",
"eslint-plugin-react": "7.20.0",
"hasher": "1.2.0",
"html2canvas": "1.0.0-rc.5",
"i18next": "19.8.4",
"i18next-browser-languagedetector": "6.0.1",
"i18next-http-backend": "1.0.23",
"immutable": "4.0.0-rc.12",
"is-ci": "2.0.0",
"jquery": "3.5.1",
@@ -73,63 +66,67 @@
"jquery-ui-dist": "1.12.1",
"knockout": "3.5.1",
"mkdirp": "1.0.4",
"monaco-editor": "0.18.1",
"ms": "2.1.3",
"msal": "1.4.4",
"office-ui-fabric-react": "7.164.2",
"monaco-editor": "0.15.6",
"object.entries": "1.1.0",
"office-ui-fabric-react": "7.134.1",
"p-retry": "4.2.0",
"plotly.js-cartesian-dist-min": "1.52.3",
"promise-polyfill": "8.1.0",
"promise.prototype.finally": "3.1.0",
"q": "1.5.1",
"react": "16.13.1",
"react-animate-height": "2.0.8",
"react-dnd": "9.4.0",
"react-dnd-html5-backend": "9.4.0",
"react-dom": "16.13.1",
"react-dom": "16.9.0",
"react-hotkeys": "2.0.0",
"react-i18next": "11.8.5",
"react-notification-system": "0.2.17",
"react-redux": "7.1.3",
"redux": "4.0.4",
"reflect-metadata": "0.1.13",
"rx-jupyter": "5.5.12",
"rxjs": "6.6.3",
"styled-components": "4.3.2",
"swr": "0.4.0",
"terser-webpack-plugin": "3.1.0",
"text-encoding": "0.7.0",
"underscore": "1.9.1",
"utility-types": "3.10.0"
"url-polyfill": "1.1.7",
"utility-types": "3.10.0",
"webcrypto-liner": "1.1.4",
"webfontloader": "1.6.28",
"whatwg-fetch": "3.0.0"
},
"devDependencies": {
"@babel/core": "7.9.0",
"@babel/preset-env": "7.9.0",
"@babel/preset-react": "7.9.4",
"@babel/preset-typescript": "7.9.0",
"@testing-library/react": "11.2.3",
"@types/applicationinsights-js": "1.0.7",
"@types/codemirror": "0.0.56",
"@types/crossroads": "0.0.30",
"@types/d3": "5.9.2",
"@types/enzyme": "3.10.7",
"@types/enzyme-adapter-react-16": "1.0.6",
"@types/expect-puppeteer": "4.4.5",
"@types/expect-puppeteer": "4.4.3",
"@types/hasher": "0.0.31",
"@types/jest": "26.0.20",
"@types/jest-environment-puppeteer": "4.4.1",
"@types/jest": "23.3.10",
"@types/jest-environment-puppeteer": "4.3.2",
"@types/memoize-one": "4.1.1",
"@types/node": "12.11.1",
"@types/promise.prototype.finally": "2.0.3",
"@types/prop-types": "15.5.8",
"@types/puppeteer": "5.4.3",
"@types/puppeteer": "3.0.1",
"@types/q": "1.5.1",
"@types/react": "17.0.0",
"@types/react-dom": "17.0.0",
"@types/react": "16.9.49",
"@types/react-dom": "16.0.7",
"@types/react-notification-system": "0.2.39",
"@types/react-redux": "7.1.7",
"@types/sinon": "2.3.3",
"@types/styled-components": "5.1.1",
"@types/text-encoding": "0.0.33",
"@types/underscore": "1.7.36",
"@types/webfontloader": "1.6.29",
"@typescript-eslint/eslint-plugin": "4.0.1",
"@typescript-eslint/parser": "4.0.1",
"adal-angular": "1.0.15",
"axe-puppeteer": "1.1.0",
"babel-jest": "24.9.0",
"babel-loader": "8.1.0",
@@ -144,14 +141,13 @@
"eslint-cli": "1.1.1",
"eslint-plugin-no-null": "1.0.2",
"eslint-plugin-prefer-arrow": "1.2.2",
"eslint-plugin-react-hooks": "4.2.0",
"expose-loader": "0.7.5",
"fast-glob": "3.2.5",
"file-loader": "2.0.0",
"fs-extra": "7.0.0",
"html-loader": "0.5.5",
"html-loader-jest": "0.2.1",
"html-webpack-plugin": "3.2.0",
"inline-css": "2.2.5",
"jest": "25.5.4",
"jest-canvas-mock": "2.1.0",
"jest-puppeteer": "4.4.0",
@@ -162,12 +158,13 @@
"mini-css-extract-plugin": "0.4.3",
"monaco-editor-webpack-plugin": "1.7.0",
"node-fetch": "2.6.1",
"prettier": "2.2.1",
"puppeteer": "8.0.0",
"prettier": "1.19.1",
"puppeteer": "4.0.0",
"raw-loader": "0.5.1",
"rimraf": "3.0.0",
"sinon": "3.2.1",
"style-loader": "0.23.0",
"terser-webpack-plugin": "3.0.5",
"ts-loader": "6.2.2",
"tslint": "5.11.0",
"tslint-microsoft-contrib": "6.0.0",
@@ -197,8 +194,8 @@
"compile": "tsc",
"compile:contracts": "tsc -p ./tsconfig.contracts.json",
"compile:strict": "tsc -p ./tsconfig.strict.json",
"format": "prettier --write \"{src,test}/**/*.{ts,tsx,html}\" \"*.{js,html}\"",
"format:check": "prettier --check \"{src,test}/**/*.{ts,tsx,html}\" \"*.{js,html}\"",
"format": "prettier --write \"{src,cypress,test}/**/*.{ts,tsx,html}\" \"*.{js,html}\"",
"format:check": "prettier --check \"{src,cypress,test}/**/*.{ts,tsx,html}\" \"*.{js,html}\"",
"lint": "tslint --project tsconfig.json && eslint \"**/*.{ts,tsx}\"",
"build:contracts": "npm run compile:contracts",
"strictEligibleFiles": "node ./strict-migration-tools/index.js",

View File

@@ -3,7 +3,7 @@
"offerThroughput": 400,
"databaseLevelThroughput": false,
"collectionId": "Persons",
"createNewDatabase": true,
"rupmEnabled": false,
"partitionKey": { "kind": "Hash", "paths": ["/name"] },
"data": [
"g.addV('person').property(id, '1').property('name', 'Eva').property('age', 44)",

View File

@@ -2,6 +2,5 @@ export enum AuthType {
AAD = "aad",
EncryptedToken = "encryptedtoken",
MasterKey = "masterkey",
ResourceToken = "resourcetoken",
ConnectionString = "connectionstring",
ResourceToken = "resourcetoken"
}

View File

@@ -1,6 +1,5 @@
import * as ko from "knockout";
import * as ReactBindingHandler from "./ReactBindingHandler";
import "../Explorer/Tables/DataTable/DataTableBindingManager";
export class BindingHandlersRegisterer {
public static registerBindingHandlers() {
@@ -14,7 +13,7 @@ export class BindingHandlersRegisterer {
) {
const value = ko.unwrap(wrappedValueAccessor());
bindingContext?.$data.isTemplateReady(value);
},
}
} as ko.BindingHandler;
ReactBindingHandler.Registerer.register();

View File

@@ -42,7 +42,7 @@ export class Registerer {
// Initial rendering at mount point
ReactDOM.render(adapter.renderComponent(), element);
},
}
} as ko.BindingHandler;
}
}

View File

@@ -40,7 +40,7 @@ export class ArrayHashMap<T> {
public forEach(key: string, iteratorFct: (value: T) => void) {
const values = this.store.get(key);
if (values) {
values.forEach((value) => iteratorFct(value));
values.forEach(value => iteratorFct(value));
}
}

View File

@@ -1,3 +1,11 @@
import { AutopilotTier } from "../Contracts/DataModels";
import { HashMap } from "./HashMap";
export class AuthorizationEndpoints {
public static arm: string = "https://management.core.windows.net/";
public static common: string = "https://login.windows.net/";
}
export class CodeOfConductEndpoints {
public static privacyStatement: string = "https://aka.ms/ms-privacy-policy";
public static codeOfConduct: string = "https://aka.ms/cosmos-code-of-conduct";
@@ -7,7 +15,7 @@ export class CodeOfConductEndpoints {
export class EndpointsRegex {
public static readonly cassandra = [
"AccountEndpoint=(.*).cassandra.cosmosdb.azure.com",
"HostName=(.*).cassandra.cosmos.azure.com",
"HostName=(.*).cassandra.cosmos.azure.com"
];
public static readonly mongo = "mongodb://.*:(.*)@(.*).documents.azure.com";
public static readonly mongoCompute = "mongodb://.*:(.*)@(.*).mongo.cosmos.azure.com";
@@ -98,12 +106,34 @@ export class CapabilityNames {
public static readonly EnableServerless: string = "EnableServerless";
}
export class Features {
public static readonly cosmosdb = "cosmosdb";
public static readonly enableChangeFeedPolicy = "enablechangefeedpolicy";
public static readonly enableRupm = "enablerupm";
public static readonly executeSproc = "dataexplorerexecutesproc";
public static readonly hostedDataExplorer = "hosteddataexplorerenabled";
public static readonly enableTtl = "enablettl";
public static readonly enableNotebooks = "enablenotebooks";
public static readonly enableGalleryPublish = "enablegallerypublish";
public static readonly enableCodeOfConduct = "enablecodeofconduct";
public static readonly enableLinkInjection = "enablelinkinjection";
public static readonly enableSpark = "enablespark";
public static readonly livyEndpoint = "livyendpoint";
public static readonly notebookServerUrl = "notebookserverurl";
public static readonly notebookServerToken = "notebookservertoken";
public static readonly notebookBasePath = "notebookbasepath";
public static readonly canExceedMaximumValue = "canexceedmaximumvalue";
public static readonly enableFixedCollectionWithSharedThroughput = "enablefixedcollectionwithsharedthroughput";
public static readonly enableAutoPilotV2 = "enableautopilotv2";
public static readonly ttl90Days = "ttl90days";
public static readonly enableRightPanelV2 = "enablerightpanelv2";
public static readonly enableSDKoperations = "enablesdkoperations";
}
// flight names returned from the portal are always lowercase
export class Flights {
public static readonly SettingsV2 = "settingsv2";
public static readonly MongoIndexEditor = "mongoindexeditor";
public static readonly MongoIndexing = "mongoindexing";
public static readonly AutoscaleTest = "autoscaletest";
}
export class AfecFeatures {
@@ -112,6 +142,19 @@ export class AfecFeatures {
public static readonly StorageAnalytics = "storageanalytics-public-preview";
}
export class Spark {
public static readonly MaxWorkerCount = 10;
public static readonly SKUs: HashMap<string> = new HashMap({
"Cosmos.Spark.D1s": "D1s / 1 core / 4GB RAM",
"Cosmos.Spark.D2s": "D2s / 2 cores / 8GB RAM",
"Cosmos.Spark.D4s": "D4s / 4 cores / 16GB RAM",
"Cosmos.Spark.D8s": "D8s / 8 cores / 32GB RAM",
"Cosmos.Spark.D16s": "D16s / 16 cores / 64GB RAM",
"Cosmos.Spark.D32s": "D32s / 32 cores / 128GB RAM",
"Cosmos.Spark.D64s": "D64s / 64 cores / 256GB RAM"
});
}
export class TagNames {
public static defaultExperience: string = "defaultExperience";
}
@@ -123,7 +166,7 @@ export class MongoDBAccounts {
export enum MongoBackendEndpointType {
local,
remote,
remote
}
// TODO: 435619 Add default endpoints per cloud and use regional only when available
@@ -138,6 +181,11 @@ export class CassandraBackend {
public static readonly guestSchemaApi: string = "api/guest/cassandra/schema";
}
export class RUPMStates {
public static on: string = "on";
public static off: string = "off";
}
export class Queries {
public static CustomPageOption: string = "custom";
public static UnlimitedPageOption: string = "unlimited";
@@ -214,6 +262,7 @@ export class HttpHeaders {
public static usePolygonsSmallerThanAHemisphere = "x-ms-documentdb-usepolygonssmallerthanahemisphere";
public static autoPilotThroughput = "autoscaleSettings";
public static autoPilotThroughputSDK = "x-ms-cosmos-offer-autopilot-settings";
public static autoPilotTier = "x-ms-cosmos-offer-autopilot-tier";
public static partitionKey: string = "x-ms-documentdb-partitionkey";
public static migrateOfferToManualThroughput: string = "x-ms-cosmos-migrate-offer-to-manual-throughput";
public static migrateOfferToAutopilot: string = "x-ms-cosmos-migrate-offer-to-autopilot";
@@ -250,7 +299,7 @@ export class HttpStatusCodes {
HttpStatusCodes.InternalServerError, // TODO: Handle all 500s on Portal backend and remove from retries list
HttpStatusCodes.BadGateway,
HttpStatusCodes.ServiceUnavailable,
HttpStatusCodes.GatewayTimeout,
HttpStatusCodes.GatewayTimeout
];
}
@@ -306,7 +355,10 @@ export class HashRoutePrefixes {
public static docsWithIds(databaseId: string, collectionId: string, docId: string) {
const transformedDatabasePrefix: string = this.docs.replace("{db_id}", databaseId);
return transformedDatabasePrefix.replace("{coll_id}", collectionId).replace("{doc_id}", docId).replace("/", ""); // strip the first slash since hasher adds it
return transformedDatabasePrefix
.replace("{coll_id}", collectionId)
.replace("{doc_id}", docId)
.replace("/", ""); // strip the first slash since hasher adds it
}
}
@@ -352,9 +404,57 @@ export class OfferVersions {
export enum ConflictOperationType {
Replace = "replace",
Create = "create",
Delete = "delete",
Delete = "delete"
}
export class AutoPilot {
public static tier1Text: string = "4,000 RU/s";
public static tier2Text: string = "20,000 RU/s";
public static tier3Text: string = "100,000 RU/s";
public static tier4Text: string = "500,000 RU/s";
public static tierText = {
[AutopilotTier.Tier1]: "Tier 1",
[AutopilotTier.Tier2]: "Tier 2",
[AutopilotTier.Tier3]: "Tier 3",
[AutopilotTier.Tier4]: "Tier 4"
};
public static tierMaxRus = {
[AutopilotTier.Tier1]: 2000,
[AutopilotTier.Tier2]: 20000,
[AutopilotTier.Tier3]: 100000,
[AutopilotTier.Tier4]: 500000
};
public static tierMinRus = {
[AutopilotTier.Tier1]: 0,
[AutopilotTier.Tier2]: 0,
[AutopilotTier.Tier3]: 0,
[AutopilotTier.Tier4]: 0
};
public static tierStorageInGB = {
[AutopilotTier.Tier1]: 50,
[AutopilotTier.Tier2]: 200,
[AutopilotTier.Tier3]: 1000,
[AutopilotTier.Tier4]: 5000
};
}
export class DataExplorerVersions {
public static readonly v_1_0_0: string = "1.0.0";
public static readonly v_1_0_1: string = "1.0.1";
}
export class DataExplorerFeatures {
public static offerCache: string = "OfferCache";
}
export const DataExplorerFeaturesVersions: any = {
OfferCache: DataExplorerVersions.v_1_0_1
};
export const EmulatorMasterKey =
//[SuppressMessage("Microsoft.Security", "CS002:SecretInNextLine", Justification="Well known public masterKey for emulator")]
"C2y6yDjf5/R+ob0N8A7Cgv30VRDJIWEHLM+4QDU5DE2nQ9nDuVTqobD4b8mGGyPMbIZnqyMsEcaGQy67XIw/Jw==";

View File

@@ -10,17 +10,17 @@ describe("tokenProvider", () => {
resourceId: "",
resourceType: "dbs" as ResourceType,
headers: {},
getAuthorizationTokenUsingMasterKey: () => "",
getAuthorizationTokenUsingMasterKey: () => ""
};
beforeEach(() => {
updateConfigContext({
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
});
window.fetch = jest.fn().mockImplementation(() => {
return {
json: () => "{}",
headers: new Map(),
headers: new Map()
};
});
});
@@ -36,7 +36,7 @@ describe("tokenProvider", () => {
it("does not call the auth service if a master key is set", async () => {
updateUserContext({
masterKey: "foo",
masterKey: "foo"
});
await tokenProvider(options);
expect((window.fetch as any).mock.calls.length).toBe(0);
@@ -50,7 +50,7 @@ describe("getTokenFromAuthService", () => {
window.fetch = jest.fn().mockImplementation(() => {
return {
json: () => "{}",
headers: new Map(),
headers: new Map()
};
});
});
@@ -61,7 +61,7 @@ describe("getTokenFromAuthService", () => {
it("builds the correct URL in production", () => {
updateConfigContext({
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
});
getTokenFromAuthService("GET", "dbs", "foo");
expect(window.fetch).toHaveBeenCalledWith(
@@ -72,7 +72,7 @@ describe("getTokenFromAuthService", () => {
it("builds the correct URL in dev", () => {
updateConfigContext({
BACKEND_ENDPOINT: "https://localhost:1234",
BACKEND_ENDPOINT: "https://localhost:1234"
});
getTokenFromAuthService("GET", "dbs", "foo");
expect(window.fetch).toHaveBeenCalledWith(
@@ -96,15 +96,15 @@ describe("endpoint", () => {
documentEndpoint: "bar",
gremlinEndpoint: "foo",
tableEndpoint: "foo",
cassandraEndpoint: "foo",
},
},
cassandraEndpoint: "foo"
}
}
});
expect(endpoint()).toEqual("bar");
});
it("uses _endpoint if set", () => {
updateUserContext({
endpoint: "baz",
endpoint: "baz"
});
expect(endpoint()).toEqual("baz");
});
@@ -121,7 +121,7 @@ describe("requestPlugin", () => {
updateConfigContext({
platform: Platform.Hosted,
BACKEND_ENDPOINT: "https://localhost:1234",
PROXY_PATH: "/proxy",
PROXY_PATH: "/proxy"
});
const headers = {};
const endpoint = "https://docs.azure.com";

View File

@@ -1,10 +1,9 @@
import * as Cosmos from "@azure/cosmos";
import { RequestInfo, setAuthorizationTokenHeaderUsingMasterKey } from "@azure/cosmos";
import { configContext, Platform } from "../ConfigContext";
import { userContext } from "../UserContext";
import { logConsoleError } from "../Utils/NotificationConsoleUtils";
import { EmulatorMasterKey, HttpHeaders } from "./Constants";
import { getErrorMessage } from "./ErrorHandlingUtils";
import { userContext } from "../UserContext";
const _global = typeof self === "undefined" ? window : self;
@@ -58,32 +57,32 @@ export async function getTokenFromAuthService(verb: string, resourceType: string
method: "POST",
headers: {
"content-type": "application/json",
"x-ms-encrypted-auth-token": userContext.accessToken,
"x-ms-encrypted-auth-token": userContext.accessToken
},
body: JSON.stringify({
verb,
resourceType,
resourceId,
}),
resourceId
})
});
//TODO I am not sure why we have to parse the JSON again here. fetch should do it for us when we call .json()
const result = JSON.parse(await response.json());
return result;
} catch (error) {
logConsoleError(`Failed to get authorization headers for ${resourceType}: ${getErrorMessage(error)}`);
logConsoleError(`Failed to get authorization headers for ${resourceType}: ${error.message}`);
return Promise.reject(error);
}
}
export function client(): Cosmos.CosmosClient {
const options: Cosmos.CosmosClientOptions = {
endpoint: endpoint() || "https://cosmos.azure.com", // CosmosClient gets upset if we pass a bad URL. This should never actually get called
endpoint: endpoint() || " ", // CosmosClient gets upset if we pass a falsy value here
key: userContext.masterKey,
tokenProvider,
connectionPolicy: {
enableEndpointDiscovery: false,
enableEndpointDiscovery: false
},
userAgentSuffix: "Azure Portal",
userAgentSuffix: "Azure Portal"
};
if (configContext.PROXY_PATH !== undefined) {

View File

@@ -1,5 +1,5 @@
import { getCommonQueryOptions } from "./queryDocuments";
import { LocalStorageUtility, StorageKey } from "../../Shared/StorageUtility";
import { getCommonQueryOptions } from "./DataAccessUtilityBase";
import { LocalStorageUtility, StorageKey } from "../Shared/StorageUtility";
describe("getCommonQueryOptions", () => {
it("builds the correct default options objects", () => {

View File

@@ -0,0 +1,182 @@
import {
ConflictDefinition,
FeedOptions,
ItemDefinition,
OfferDefinition,
QueryIterator,
Resource
} from "@azure/cosmos";
import { RequestOptions } from "@azure/cosmos/dist-esm";
import Q from "q";
import { configContext, Platform } from "../ConfigContext";
import * as DataModels from "../Contracts/DataModels";
import { MessageTypes } from "../Contracts/ExplorerContracts";
import * as ViewModels from "../Contracts/ViewModels";
import ConflictId from "../Explorer/Tree/ConflictId";
import DocumentId from "../Explorer/Tree/DocumentId";
import StoredProcedure from "../Explorer/Tree/StoredProcedure";
import { LocalStorageUtility, StorageKey } from "../Shared/StorageUtility";
import { OfferUtils } from "../Utils/OfferUtils";
import * as Constants from "./Constants";
import { client } from "./CosmosClient";
import * as HeadersUtility from "./HeadersUtility";
import { sendCachedDataMessage } from "./MessageHandler";
export function getCommonQueryOptions(options: FeedOptions): any {
const storedItemPerPageSetting: number = LocalStorageUtility.getEntryNumber(StorageKey.ActualItemPerPage);
options = options || {};
options.populateQueryMetrics = true;
options.enableScanInQuery = options.enableScanInQuery || true;
if (!options.partitionKey) {
options.forceQueryPlan = true;
}
options.maxItemCount =
options.maxItemCount ||
(storedItemPerPageSetting !== undefined && storedItemPerPageSetting) ||
Constants.Queries.itemsPerPage;
options.maxDegreeOfParallelism = LocalStorageUtility.getEntryNumber(StorageKey.MaxDegreeOfParellism);
return options;
}
export function queryDocuments(
databaseId: string,
containerId: string,
query: string,
options: any
): Q.Promise<QueryIterator<ItemDefinition & Resource>> {
options = getCommonQueryOptions(options);
const documentsIterator = client()
.database(databaseId)
.container(containerId)
.items.query(query, options);
return Q(documentsIterator);
}
export function getPartitionKeyHeaderForConflict(conflictId: ConflictId): Object {
const partitionKeyDefinition: DataModels.PartitionKey = conflictId.partitionKey;
const partitionKeyValue: any = conflictId.partitionKeyValue;
return getPartitionKeyHeader(partitionKeyDefinition, partitionKeyValue);
}
export function getPartitionKeyHeader(partitionKeyDefinition: DataModels.PartitionKey, partitionKeyValue: any): Object {
if (!partitionKeyDefinition) {
return undefined;
}
if (partitionKeyValue === undefined) {
return [{}];
}
return [partitionKeyValue];
}
export function updateDocument(
collection: ViewModels.CollectionBase,
documentId: DocumentId,
newDocument: any
): Q.Promise<any> {
const partitionKey = documentId.partitionKeyValue;
return Q(
client()
.database(collection.databaseId)
.container(collection.id())
.item(documentId.id(), partitionKey)
.replace(newDocument)
.then(response => response.resource)
);
}
export function executeStoredProcedure(
collection: ViewModels.Collection,
storedProcedure: StoredProcedure,
partitionKeyValue: any,
params: any[]
): Q.Promise<any> {
// TODO remove this deferred. Kept it because of timeout code at bottom of function
const deferred = Q.defer<any>();
client()
.database(collection.databaseId)
.container(collection.id())
.scripts.storedProcedure(storedProcedure.id())
.execute(partitionKeyValue, params, { enableScriptLogging: true })
.then(response =>
deferred.resolve({
result: response.resource,
scriptLogs: response.headers[Constants.HttpHeaders.scriptLogResults]
})
)
.catch(error => deferred.reject(error));
return deferred.promise.timeout(
Constants.ClientDefaults.requestTimeoutMs,
`Request timed out while executing stored procedure ${storedProcedure.id()}`
);
}
export function createDocument(collection: ViewModels.CollectionBase, newDocument: any): Q.Promise<any> {
return Q(
client()
.database(collection.databaseId)
.container(collection.id())
.items.create(newDocument)
.then(response => response.resource)
);
}
export function readDocument(collection: ViewModels.CollectionBase, documentId: DocumentId): Q.Promise<any> {
const partitionKey = documentId.partitionKeyValue;
return Q(
client()
.database(collection.databaseId)
.container(collection.id())
.item(documentId.id(), partitionKey)
.read()
.then(response => response.resource)
);
}
export function deleteDocument(collection: ViewModels.CollectionBase, documentId: DocumentId): Q.Promise<any> {
const partitionKey = documentId.partitionKeyValue;
return Q(
client()
.database(collection.databaseId)
.container(collection.id())
.item(documentId.id(), partitionKey)
.delete()
);
}
export function deleteConflict(
collection: ViewModels.CollectionBase,
conflictId: ConflictId,
options: any = {}
): Q.Promise<any> {
options.partitionKey = options.partitionKey || getPartitionKeyHeaderForConflict(conflictId);
return Q(
client()
.database(collection.databaseId)
.container(collection.id())
.conflict(conflictId.id())
.delete(options)
);
}
export function queryConflicts(
databaseId: string,
containerId: string,
query: string,
options: any
): Q.Promise<QueryIterator<ConflictDefinition & Resource>> {
const documentsIterator = client()
.database(databaseId)
.container(containerId)
.conflicts.query(query, options);
return Q(documentsIterator);
}

View File

@@ -0,0 +1,217 @@
import { ConflictDefinition, ItemDefinition, QueryIterator, Resource } from "@azure/cosmos";
import Q from "q";
import * as ViewModels from "../Contracts/ViewModels";
import ConflictId from "../Explorer/Tree/ConflictId";
import DocumentId from "../Explorer/Tree/DocumentId";
import StoredProcedure from "../Explorer/Tree/StoredProcedure";
import { logConsoleInfo, logConsoleProgress } from "../Utils/NotificationConsoleUtils";
import * as Constants from "./Constants";
import * as DataAccessUtilityBase from "./DataAccessUtilityBase";
import { MinimalQueryIterator, nextPage } from "./IteratorUtilities";
import { handleError } from "./ErrorHandlingUtils";
// TODO: Log all promise resolutions and errors with verbosity levels
export function queryDocuments(
databaseId: string,
containerId: string,
query: string,
options: any
): Q.Promise<QueryIterator<ItemDefinition & Resource>> {
return DataAccessUtilityBase.queryDocuments(databaseId, containerId, query, options);
}
export function queryConflicts(
databaseId: string,
containerId: string,
query: string,
options: any
): Q.Promise<QueryIterator<ConflictDefinition & Resource>> {
return DataAccessUtilityBase.queryConflicts(databaseId, containerId, query, options);
}
export function getEntityName() {
const defaultExperience =
window.dataExplorer && window.dataExplorer.defaultExperience && window.dataExplorer.defaultExperience();
if (defaultExperience === Constants.DefaultAccountExperience.MongoDB) {
return "document";
}
return "item";
}
export function executeStoredProcedure(
collection: ViewModels.Collection,
storedProcedure: StoredProcedure,
partitionKeyValue: any,
params: any[]
): Q.Promise<any> {
var deferred = Q.defer<any>();
const clearMessage = logConsoleProgress(`Executing stored procedure ${storedProcedure.id()}`);
DataAccessUtilityBase.executeStoredProcedure(collection, storedProcedure, partitionKeyValue, params)
.then(
(response: any) => {
deferred.resolve(response);
logConsoleInfo(
`Finished executing stored procedure ${storedProcedure.id()} for container ${storedProcedure.collection.id()}`
);
},
(error: any) => {
handleError(
error,
`Failed to execute stored procedure ${storedProcedure.id()} for container ${storedProcedure.collection.id()}`,
"ExecuteStoredProcedure"
);
deferred.reject(error);
}
)
.finally(() => {
clearMessage();
});
return deferred.promise;
}
export function queryDocumentsPage(
resourceName: string,
documentsIterator: MinimalQueryIterator,
firstItemIndex: number,
options: any
): Q.Promise<ViewModels.QueryResults> {
var deferred = Q.defer<ViewModels.QueryResults>();
const entityName = getEntityName();
const clearMessage = logConsoleProgress(`Querying ${entityName} for container ${resourceName}`);
Q(nextPage(documentsIterator, firstItemIndex))
.then(
(result: ViewModels.QueryResults) => {
const itemCount = (result.documents && result.documents.length) || 0;
logConsoleInfo(`Successfully fetched ${itemCount} ${entityName} for container ${resourceName}`);
deferred.resolve(result);
},
(error: any) => {
handleError(error, `Failed to query ${entityName} for container ${resourceName}`, "QueryDocumentsPage");
deferred.reject(error);
}
)
.finally(() => {
clearMessage();
});
return deferred.promise;
}
export function readDocument(collection: ViewModels.CollectionBase, documentId: DocumentId): Q.Promise<any> {
var deferred = Q.defer<any>();
const entityName = getEntityName();
const clearMessage = logConsoleProgress(`Reading ${entityName} ${documentId.id()}`);
DataAccessUtilityBase.readDocument(collection, documentId)
.then(
(document: any) => {
deferred.resolve(document);
},
(error: any) => {
handleError(error, `Failed to read ${entityName} ${documentId.id()}`, "ReadDocument");
deferred.reject(error);
}
)
.finally(() => {
clearMessage();
});
return deferred.promise;
}
export function updateDocument(
collection: ViewModels.CollectionBase,
documentId: DocumentId,
newDocument: any
): Q.Promise<any> {
var deferred = Q.defer<any>();
const entityName = getEntityName();
const clearMessage = logConsoleProgress(`Updating ${entityName} ${documentId.id()}`);
DataAccessUtilityBase.updateDocument(collection, documentId, newDocument)
.then(
(updatedDocument: any) => {
logConsoleInfo(`Successfully updated ${entityName} ${documentId.id()}`);
deferred.resolve(updatedDocument);
},
(error: any) => {
handleError(error, `Failed to update ${entityName} ${documentId.id()}`, "UpdateDocument");
deferred.reject(error);
}
)
.finally(() => {
clearMessage();
});
return deferred.promise;
}
export function createDocument(collection: ViewModels.CollectionBase, newDocument: any): Q.Promise<any> {
var deferred = Q.defer<any>();
const entityName = getEntityName();
const clearMessage = logConsoleProgress(`Creating new ${entityName} for container ${collection.id()}`);
DataAccessUtilityBase.createDocument(collection, newDocument)
.then(
(savedDocument: any) => {
logConsoleInfo(`Successfully created new ${entityName} for container ${collection.id()}`);
deferred.resolve(savedDocument);
},
(error: any) => {
handleError(error, `Error while creating new ${entityName} for container ${collection.id()}`, "CreateDocument");
deferred.reject(error);
}
)
.finally(() => {
clearMessage();
});
return deferred.promise;
}
export function deleteDocument(collection: ViewModels.CollectionBase, documentId: DocumentId): Q.Promise<any> {
var deferred = Q.defer<any>();
const entityName = getEntityName();
const clearMessage = logConsoleProgress(`Deleting ${entityName} ${documentId.id()}`);
DataAccessUtilityBase.deleteDocument(collection, documentId)
.then(
(response: any) => {
logConsoleInfo(`Successfully deleted ${entityName} ${documentId.id()}`);
deferred.resolve(response);
},
(error: any) => {
handleError(error, `Error while deleting ${entityName} ${documentId.id()}`, "DeleteDocument");
deferred.reject(error);
}
)
.finally(() => {
clearMessage();
});
return deferred.promise;
}
export function deleteConflict(
collection: ViewModels.CollectionBase,
conflictId: ConflictId,
options?: any
): Q.Promise<any> {
var deferred = Q.defer<any>();
const clearMessage = logConsoleProgress(`Deleting conflict ${conflictId.id()}`);
DataAccessUtilityBase.deleteConflict(collection, conflictId, options)
.then(
(response: any) => {
logConsoleInfo(`Successfully deleted conflict ${conflictId.id()}`);
deferred.resolve(response);
},
(error: any) => {
handleError(error, `Error while deleting conflict ${conflictId.id()}`, "DeleteConflict");
deferred.reject(error);
}
)
.finally(() => {
clearMessage();
});
return deferred.promise;
}

View File

@@ -1,10 +0,0 @@
import { DefaultAccountExperienceType } from "../DefaultAccountExperienceType";
import { userContext } from "../UserContext";
export const getEntityName = (): string => {
if (userContext.defaultExperience === DefaultAccountExperienceType.MongoDB) {
return "document";
}
return "item";
};

View File

@@ -73,7 +73,7 @@ export default class EditableUtility {
return false;
});
observable.subscribe((edit) => {
observable.subscribe(edit => {
var edits = observable.edits && observable.edits();
if (!edits) {
return;
@@ -83,9 +83,9 @@ export default class EditableUtility {
});
observable.editableIsValid = ko.observable<boolean>(true);
observable.subscribe((value) => {
observable.subscribe(value => {
const validations: ((value: T) => boolean)[] = (observable.validations && observable.validations()) || [];
const isValid = validations.every((validate) => validate(value));
const isValid = validations.every(validate => validate(value));
observable.editableIsValid(isValid);
});

View File

@@ -1,6 +1,8 @@
export function normalizeArmEndpoint(uri: string): string {
export default class EnvironmentUtility {
public static normalizeArmEndpointUri(uri: string): string {
if (uri && uri.slice(-1) !== "/") {
return `${uri}/`;
}
return uri;
}
}

View File

@@ -1,57 +1,11 @@
import { MessageTypes } from "../Contracts/ExplorerContracts";
import { SubscriptionType } from "../Contracts/SubscriptionType";
import { userContext } from "../UserContext";
import { ARMError } from "../Utils/arm/request";
import { CosmosError, sendNotificationForError } from "./dataAccess/sendNotificationForError";
import { logConsoleError } from "../Utils/NotificationConsoleUtils";
import { HttpStatusCodes } from "./Constants";
import { logError } from "./Logger";
import { sendMessage } from "./MessageHandler";
import { replaceKnownError } from "./ErrorParserUtility";
export const handleError = (error: string | ARMError | Error, area: string, consoleErrorPrefix?: string): void => {
const errorMessage = getErrorMessage(error);
const errorCode = error instanceof ARMError ? error.code : undefined;
// logs error to data explorer console
const consoleErrorMessage = consoleErrorPrefix ? `${consoleErrorPrefix}:\n ${errorMessage}` : errorMessage;
logConsoleError(consoleErrorMessage);
// logs error to both app insight and kusto
logError(errorMessage, area, errorCode);
// checks for errors caused by firewall and sends them to portal to handle
sendNotificationForError(errorMessage, errorCode);
};
export const getErrorMessage = (error: string | Error = ""): string => {
const errorMessage = typeof error === "string" ? error : error.message;
return replaceKnownError(errorMessage);
};
export const getErrorStack = (error: string | Error): string => {
return typeof error === "string" ? undefined : error.stack;
};
const sendNotificationForError = (errorMessage: string, errorCode: number | string): void => {
if (errorCode === HttpStatusCodes.Forbidden) {
if (errorMessage?.toLowerCase().indexOf("sharedoffer is disabled for your account") > 0) {
return;
}
sendMessage({
type: MessageTypes.ForbiddenError,
reason: errorMessage,
});
}
};
const replaceKnownError = (errorMessage: string): string => {
if (
userContext.subscriptionType === SubscriptionType.Internal &&
errorMessage?.indexOf("SharedOffer is Disabled for your account") >= 0
) {
return "Database throughput is not supported for internal subscriptions.";
} else if (errorMessage?.indexOf("Partition key paths must contain only valid") >= 0) {
return "Partition key paths must contain only valid characters and not contain a trailing slash or wildcard character.";
}
return errorMessage;
export const handleError = (error: CosmosError, consoleErrorPrefix: string, area: string): void => {
const sanitizedErrorMsg = replaceKnownError(error.message);
logConsoleError(`${consoleErrorPrefix}:\n ${sanitizedErrorMsg}`);
logError(sanitizedErrorMsg, area, error.code);
sendNotificationForError(error);
};

View File

@@ -0,0 +1,24 @@
import * as ErrorParserUtility from "./ErrorParserUtility";
describe("Error Parser Utility", () => {
describe("shouldEnableCrossPartitionKeyForResourceWithPartitionKey()", () => {
it("should parse a backend error correctly", () => {
// A fake error matching what is thrown by the SDK on a bad collection create request
const innerMessage =
"The partition key component definition path '/asdwqr31 @#$#$WRadf' could not be accepted, failed near position '10'. Partition key paths must contain only valid characters and not contain a trailing slash or wildcard character.";
const message = `Message: {\"Errors\":[\"${innerMessage}\"]}\r\nActivityId: 97b2e684-7505-4921-85f6-2513b9b28220, Request URI: /apps/89fdcf25-2a0b-4d2a-aab6-e161e565b26f/services/54911149-7bb1-4e7d-a1fa-22c8b36a4bb9/partitions/cc2a7a04-5f5a-4709-bcf7-8509b264963f/replicas/132304018743619218p, RequestStats: , SDK: Microsoft.Azure.Documents.Common/2.10.0`;
const err = new Error(message) as any;
err.code = 400;
err.body = {
code: "BadRequest",
message
};
err.headers = {};
err.activityId = "97b2e684-7505-4921-85f6-2513b9b28220";
const parsedError = ErrorParserUtility.parse(err);
expect(parsedError.length).toBe(1);
expect(parsedError[0].message).toBe(innerMessage);
});
});
});

View File

@@ -0,0 +1,69 @@
import * as DataModels from "../Contracts/DataModels";
import * as ViewModels from "../Contracts/ViewModels";
export function replaceKnownError(err: string): string {
if (
window.dataExplorer.subscriptionType() === ViewModels.SubscriptionType.Internal &&
err.indexOf("SharedOffer is Disabled for your account") >= 0
) {
return "Database throughput is not supported for internal subscriptions.";
} else if (err.indexOf("Partition key paths must contain only valid") >= 0) {
return "Partition key paths must contain only valid characters and not contain a trailing slash or wildcard character.";
}
return err;
}
export function parse(err: any): DataModels.ErrorDataModel[] {
try {
return _parse(err);
} catch (e) {
return [<DataModels.ErrorDataModel>{ message: JSON.stringify(err) }];
}
}
function _parse(err: any): DataModels.ErrorDataModel[] {
var normalizedErrors: DataModels.ErrorDataModel[] = [];
if (err.message && !err.code) {
normalizedErrors.push(err);
} else {
const innerErrors: any[] = _getInnerErrors(err.message);
normalizedErrors = innerErrors.map(innerError =>
typeof innerError === "string" ? { message: innerError } : innerError
);
}
return normalizedErrors;
}
function _getInnerErrors(message: string): any[] {
/*
The backend error message has an inner-message which is a stringified object.
For SQL errors, the "errors" property is an array of SqlErrorDataModel.
Example:
"Message: {"Errors":["Resource with specified id or name already exists"]}\r\nActivityId: 80005000008d40b6a, Request URI: /apps/19000c000c0a0005/services/mctestdocdbprod-MasterService-0-00066ab9937/partitions/900005f9000e676fb8/replicas/13000000000955p"
For non-SQL errors the "Errors" propery is an array of string.
Example:
"Message: {"errors":[{"severity":"Error","location":{"start":7,"end":8},"code":"SC1001","message":"Syntax error, incorrect syntax near '.'."}]}\r\nActivityId: d3300016d4084e310a, Request URI: /apps/12401f9e1df77/services/dc100232b1f44545/partitions/f86f3bc0001a2f78/replicas/13085003638s"
*/
let innerMessage: any = null;
const singleLineMessage = message.replace(/[\r\n]|\r|\n/g, "");
try {
// Multi-Partition error flavor
const regExp = /^(.*)ActivityId: (.*)/g;
const regString = regExp.exec(singleLineMessage);
const innerMessageString = regString[1];
innerMessage = JSON.parse(innerMessageString);
} catch (e) {
// Single-partition error flavor
const regExp = /^Message: (.*)ActivityId: (.*), Request URI: (.*)/g;
const regString = regExp.exec(singleLineMessage);
const innerMessageString = regString[1];
innerMessage = JSON.parse(innerMessageString);
}
return innerMessage.errors ? innerMessage.errors : innerMessage.Errors;
}

View File

@@ -1,5 +1,28 @@
import * as Constants from "./Constants";
import { LocalStorageUtility, StorageKey } from "../Shared/StorageUtility";
// x-ms-resource-quota: databases = 100; collections = 5000; users = 500000; permissions = 2000000;
export function getQuota(responseHeaders: any): any {
return responseHeaders && responseHeaders[Constants.HttpHeaders.resourceQuota]
? parseStringIntoObject(responseHeaders[Constants.HttpHeaders.resourceQuota])
: null;
}
export function shouldEnableCrossPartitionKey(): boolean {
return LocalStorageUtility.getEntryString(StorageKey.IsCrossPartitionQueryEnabled) === "true";
}
function parseStringIntoObject(resourceString: string) {
var entityObject: any = {};
if (resourceString) {
var entitiesArray: string[] = resourceString.split(";");
for (var i: any = 0; i < entitiesArray.length; i++) {
var entity: string[] = entitiesArray[i].split("=");
entityObject[entity[0]] = entity[1];
}
}
return entityObject;
}

View File

@@ -11,8 +11,8 @@ describe("nextPage", () => {
queryMetrics: {},
requestCharge: 1,
headers: {},
activityId: "foo",
}),
activityId: "foo"
})
};
expect(await nextPage(fakeIterator, 10)).toMatchSnapshot();

View File

@@ -1,8 +1,6 @@
import { QueryResults } from "../Contracts/ViewModels";
interface QueryResponse {
// [Todo] remove any
// eslint-disable-next-line @typescript-eslint/no-explicit-any
resources: any[];
hasMoreResults: boolean;
activityId: string;
@@ -16,9 +14,8 @@ export interface MinimalQueryIterator {
// Pick<QueryIterator<any>, "fetchNext">;
export function nextPage(documentsIterator: MinimalQueryIterator, firstItemIndex: number): Promise<QueryResults> {
return documentsIterator.fetchNext().then((response) => {
return documentsIterator.fetchNext().then(response => {
const documents = response.resources;
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const headers = (response as any).headers || {}; // TODO this is a private key. Remove any
const itemCount = (documents && documents.length) || 0;
return {
@@ -29,7 +26,7 @@ export function nextPage(documentsIterator: MinimalQueryIterator, firstItemIndex
lastItemIndex: Number(firstItemIndex) + Number(itemCount),
headers,
activityId: response.activityId,
requestCharge: response.requestCharge,
requestCharge: response.requestCharge
};
});
}

View File

@@ -21,15 +21,21 @@ export function logWarning(message: string, area: string, code?: number): void {
return _logEntry(entry);
}
export function logError(errorMessage: string, area: string, code?: number | string): void {
const entry: Diagnostics.LogEntry = _generateLogEntry(Diagnostics.LogEntryLevel.Error, errorMessage, area, code);
export function logError(message: string | Error, area: string, code?: number): void {
let logMessage: string;
if (typeof message === "string") {
logMessage = message;
} else {
logMessage = JSON.stringify(message, Object.getOwnPropertyNames(message));
}
const entry: Diagnostics.LogEntry = _generateLogEntry(Diagnostics.LogEntryLevel.Error, logMessage, area, code);
return _logEntry(entry);
}
function _logEntry(entry: Diagnostics.LogEntry): void {
sendMessage({
type: MessageTypes.LogInfo,
data: JSON.stringify(entry),
data: JSON.stringify(entry)
});
const severityLevel = ((level: Diagnostics.LogEntryLevel): SeverityLevel => {
@@ -53,13 +59,13 @@ function _generateLogEntry(
level: Diagnostics.LogEntryLevel,
message: string,
area: string,
code?: number | string
code?: number
): Diagnostics.LogEntry {
return {
timestamp: new Date().getUTCSeconds(),
level,
message,
area,
code,
code
};
}

View File

@@ -6,7 +6,7 @@ describe("Message Handler", () => {
let mockPromise = {
id: "123",
startTime: new Date(),
deferred: Q.defer<any>(),
deferred: Q.defer<any>()
};
let mockMessage = { message: { id: "123", data: "{}" } };
MessageHandler.RequestMap[mockPromise.id] = mockPromise;
@@ -18,7 +18,7 @@ describe("Message Handler", () => {
let message = {
id: "123",
startTime: new Date(),
deferred: Q.defer<any>(),
deferred: Q.defer<any>()
};
MessageHandler.handleCachedDataMessage(message);

View File

@@ -1,8 +1,8 @@
import { MessageTypes } from "../Contracts/ExplorerContracts";
import Q from "q";
import * as _ from "underscore";
import { MessageTypes } from "../Contracts/ExplorerContracts";
import { getDataExplorerWindow } from "../Utils/WindowUtils";
import * as Constants from "./Constants";
import { getDataExplorerWindow } from "../Utils/WindowUtils";
export interface CachedDataPromise<T> {
deferred: Q.Deferred<T>;
@@ -35,7 +35,7 @@ export function sendCachedDataMessage<TResponseDataModel>(
let cachedDataPromise: CachedDataPromise<TResponseDataModel> = {
deferred: Q.defer<TResponseDataModel>(),
startTime: new Date(),
id: _.uniqueId(),
id: _.uniqueId()
};
RequestMap[cachedDataPromise.id] = cachedDataPromise;
sendMessage({ type: messageType, params: params, id: cachedDataPromise.id });
@@ -54,22 +54,7 @@ export function sendMessage(data: any): void {
portalChildWindow.parent.postMessage(
{
signature: "pcIframe",
data: data,
},
portalChildWindow.document.referrer
);
}
}
export function sendReadyMessage(): void {
if (canSendMessage()) {
// We try to find data explorer window first, then fallback to current window
const portalChildWindow = getDataExplorerWindow(window) || window;
portalChildWindow.parent.postMessage(
{
signature: "pcIframe",
kind: "ready",
data: "ready",
data: data
},
portalChildWindow.document.referrer
);

View File

@@ -14,7 +14,7 @@ const fetchMock = () => {
ok: true,
text: () => "{}",
json: () => "{}",
headers: new Map(),
headers: new Map()
});
};
@@ -27,8 +27,8 @@ const collection = {
partitionKey: {
paths: ["/pk"],
kind: "Hash",
version: 1,
},
version: 1
}
} as Collection;
const documentId = ({
@@ -38,8 +38,8 @@ const documentId = ({
partitionKey: {
paths: ["/pk"],
kind: "Hash",
version: 1,
},
version: 1
}
} as unknown) as DocumentId;
const databaseAccount = {
@@ -52,8 +52,8 @@ const databaseAccount = {
documentEndpoint: "bar",
gremlinEndpoint: "foo",
tableEndpoint: "foo",
cassandraEndpoint: "foo",
},
cassandraEndpoint: "foo"
}
} as DatabaseAccount;
describe("MongoProxyClient", () => {
@@ -61,10 +61,10 @@ describe("MongoProxyClient", () => {
beforeEach(() => {
resetConfigContext();
updateUserContext({
databaseAccount,
databaseAccount
});
updateConfigContext({
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
});
window.fetch = jest.fn().mockImplementation(fetchMock);
});
@@ -93,10 +93,10 @@ describe("MongoProxyClient", () => {
beforeEach(() => {
resetConfigContext();
updateUserContext({
databaseAccount,
databaseAccount
});
updateConfigContext({
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
});
window.fetch = jest.fn().mockImplementation(fetchMock);
});
@@ -125,10 +125,10 @@ describe("MongoProxyClient", () => {
beforeEach(() => {
resetConfigContext();
updateUserContext({
databaseAccount,
databaseAccount
});
updateConfigContext({
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
});
window.fetch = jest.fn().mockImplementation(fetchMock);
});
@@ -157,10 +157,10 @@ describe("MongoProxyClient", () => {
beforeEach(() => {
resetConfigContext();
updateUserContext({
databaseAccount,
databaseAccount
});
updateConfigContext({
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
});
window.fetch = jest.fn().mockImplementation(fetchMock);
});
@@ -189,10 +189,10 @@ describe("MongoProxyClient", () => {
beforeEach(() => {
resetConfigContext();
updateUserContext({
databaseAccount,
databaseAccount
});
updateConfigContext({
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
});
window.fetch = jest.fn().mockImplementation(fetchMock);
});
@@ -220,11 +220,12 @@ describe("MongoProxyClient", () => {
describe("getEndpoint", () => {
beforeEach(() => {
resetConfigContext();
delete window.authType;
updateUserContext({
databaseAccount,
databaseAccount
});
updateConfigContext({
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
});
});
@@ -240,9 +241,7 @@ describe("MongoProxyClient", () => {
});
it("returns a guest endpoint", () => {
updateUserContext({
authType: AuthType.EncryptedToken,
});
window.authType = AuthType.EncryptedToken;
const endpoint = getEndpoint();
expect(endpoint).toEqual("https://main.documentdb.ext.azure.com/api/guest/mongo/explorer");
});

View File

@@ -16,11 +16,11 @@ import { sendMessage } from "./MessageHandler";
const defaultHeaders = {
[HttpHeaders.apiType]: ApiType.MongoDB.toString(),
[CosmosSDKConstants.HttpHeaders.MaxEntityCount]: "100",
[CosmosSDKConstants.HttpHeaders.Version]: "2017-11-15",
[CosmosSDKConstants.HttpHeaders.Version]: "2017-11-15"
};
function authHeaders() {
if (userContext.authType === AuthType.EncryptedToken) {
if (window.authType === AuthType.EncryptedToken) {
return { [HttpHeaders.guestAccessToken]: userContext.accessToken };
} else {
return { [HttpHeaders.authorization]: userContext.authorizationToken };
@@ -31,7 +31,7 @@ export function queryIterator(databaseId: string, collection: Collection, query:
let continuationToken: string;
return {
fetchNext: () => {
return queryDocuments(databaseId, collection, false, query).then((response) => {
return queryDocuments(databaseId, collection, false, query).then(response => {
continuationToken = response.continuationToken;
const headers: { [key: string]: string | number } = {};
response.headers.forEach((value, key) => {
@@ -42,10 +42,10 @@ export function queryIterator(databaseId: string, collection: Collection, query:
headers,
requestCharge: Number(headers[CosmosSDKConstants.HttpHeaders.RequestCharge]),
activityId: String(headers[CosmosSDKConstants.HttpHeaders.ActivityId]),
hasMoreResults: !!continuationToken,
hasMoreResults: !!continuationToken
};
});
},
}
};
}
@@ -74,9 +74,7 @@ export function queryDocuments(
rg: userContext.resourceGroup,
dba: databaseAccount.name,
pk:
collection && collection.partitionKey && !collection.partitionKey.systemKey
? collection.partitionKeyProperty
: "",
collection && collection.partitionKey && !collection.partitionKey.systemKey ? collection.partitionKeyProperty : ""
};
const endpoint = getEndpoint() || "";
@@ -89,7 +87,7 @@ export function queryDocuments(
[CosmosSDKConstants.HttpHeaders.EnableScanInQuery]: "true",
[CosmosSDKConstants.HttpHeaders.EnableCrossPartitionQuery]: "true",
[CosmosSDKConstants.HttpHeaders.ParallelizeCrossPartitionQuery]: "true",
[HttpHeaders.contentType]: "application/query+json",
[HttpHeaders.contentType]: "application/query+json"
};
if (continuationToken) {
@@ -102,14 +100,14 @@ export function queryDocuments(
.fetch(`${endpoint}${path}?${queryString.stringify(params)}`, {
method: "POST",
body: JSON.stringify({ query }),
headers,
headers
})
.then(async (response) => {
.then(async response => {
if (response.ok) {
return {
continuationToken: response.headers.get(CosmosSDKConstants.HttpHeaders.Continuation),
documents: (await response.json()).Documents as DataModels.DocumentId[],
headers: response.headers,
headers: response.headers
};
}
errorHandling(response, "querying documents", params);
@@ -137,9 +135,7 @@ export function readDocument(
rg: userContext.resourceGroup,
dba: databaseAccount.name,
pk:
documentId && documentId.partitionKey && !documentId.partitionKey.systemKey
? documentId.partitionKeyProperty
: "",
documentId && documentId.partitionKey && !documentId.partitionKey.systemKey ? documentId.partitionKeyProperty : ""
};
const endpoint = getEndpoint();
@@ -151,10 +147,10 @@ export function readDocument(
...authHeaders(),
[CosmosSDKConstants.HttpHeaders.PartitionKey]: encodeURIComponent(
JSON.stringify(documentId.partitionKeyHeader())
),
},
)
}
})
.then((response) => {
.then(response => {
if (response.ok) {
return response.json();
}
@@ -179,7 +175,7 @@ export function createDocument(
sid: userContext.subscriptionId,
rg: userContext.resourceGroup,
dba: databaseAccount.name,
pk: collection && collection.partitionKey && !collection.partitionKey.systemKey ? partitionKeyProperty : "",
pk: collection && collection.partitionKey && !collection.partitionKey.systemKey ? partitionKeyProperty : ""
};
const endpoint = getEndpoint();
@@ -190,10 +186,10 @@ export function createDocument(
body: JSON.stringify(documentContent),
headers: {
...defaultHeaders,
...authHeaders(),
},
...authHeaders()
}
})
.then((response) => {
.then(response => {
if (response.ok) {
return response.json();
}
@@ -222,9 +218,7 @@ export function updateDocument(
rg: userContext.resourceGroup,
dba: databaseAccount.name,
pk:
documentId && documentId.partitionKey && !documentId.partitionKey.systemKey
? documentId.partitionKeyProperty
: "",
documentId && documentId.partitionKey && !documentId.partitionKey.systemKey ? documentId.partitionKeyProperty : ""
};
const endpoint = getEndpoint();
@@ -236,10 +230,10 @@ export function updateDocument(
...defaultHeaders,
...authHeaders(),
[HttpHeaders.contentType]: "application/json",
[CosmosSDKConstants.HttpHeaders.PartitionKey]: JSON.stringify(documentId.partitionKeyHeader()),
},
[CosmosSDKConstants.HttpHeaders.PartitionKey]: JSON.stringify(documentId.partitionKeyHeader())
}
})
.then((response) => {
.then(response => {
if (response.ok) {
return response.json();
}
@@ -263,9 +257,7 @@ export function deleteDocument(databaseId: string, collection: Collection, docum
rg: userContext.resourceGroup,
dba: databaseAccount.name,
pk:
documentId && documentId.partitionKey && !documentId.partitionKey.systemKey
? documentId.partitionKeyProperty
: "",
documentId && documentId.partitionKey && !documentId.partitionKey.systemKey ? documentId.partitionKeyProperty : ""
};
const endpoint = getEndpoint();
@@ -276,10 +268,10 @@ export function deleteDocument(databaseId: string, collection: Collection, docum
...defaultHeaders,
...authHeaders(),
[HttpHeaders.contentType]: "application/json",
[CosmosSDKConstants.HttpHeaders.PartitionKey]: JSON.stringify(documentId.partitionKeyHeader()),
},
[CosmosSDKConstants.HttpHeaders.PartitionKey]: JSON.stringify(documentId.partitionKeyHeader())
}
})
.then((response) => {
.then(response => {
if (response.ok) {
return undefined;
}
@@ -307,7 +299,7 @@ export function createMongoCollectionWithProxy(
rg: userContext.resourceGroup,
dba: databaseAccount.name,
isAutoPilot: !!params.autoPilotMaxThroughput,
autoPilotThroughput: params.autoPilotMaxThroughput?.toString(),
autoPilotThroughput: params.autoPilotMaxThroughput?.toString()
};
const endpoint = getEndpoint();
@@ -322,11 +314,11 @@ export function createMongoCollectionWithProxy(
headers: {
...defaultHeaders,
...authHeaders(),
[HttpHeaders.contentType]: "application/json",
},
[HttpHeaders.contentType]: "application/json"
}
}
)
.then((response) => {
.then(response => {
if (response.ok) {
return response.json();
}
@@ -337,7 +329,7 @@ export function createMongoCollectionWithProxy(
export function getEndpoint(): string {
let url = (configContext.MONGO_BACKEND_ENDPOINT || configContext.BACKEND_ENDPOINT) + "/api/mongo/explorer";
if (userContext.authType === AuthType.EncryptedToken) {
if (window.authType === AuthType.EncryptedToken) {
url = url.replace("api/mongo", "api/guest/mongo");
}
return url;

View File

@@ -1,64 +0,0 @@
import * as OfferUtility from "./OfferUtility";
import { SDKOfferDefinition, Offer } from "../Contracts/DataModels";
import { OfferResponse } from "@azure/cosmos";
describe("parseSDKOfferResponse", () => {
it("manual throughput", () => {
const mockOfferDefinition = {
content: {
offerThroughput: 500,
collectionThroughputInfo: {
minimumRUForCollection: 400,
numPhysicalPartitions: 1,
},
},
id: "test",
} as SDKOfferDefinition;
const mockResponse = {
resource: mockOfferDefinition,
} as OfferResponse;
const expectedResult: Offer = {
manualThroughput: 500,
autoscaleMaxThroughput: undefined,
minimumThroughput: 400,
id: "test",
offerDefinition: mockOfferDefinition,
offerReplacePending: false,
};
expect(OfferUtility.parseSDKOfferResponse(mockResponse)).toEqual(expectedResult);
});
it("autoscale throughput", () => {
const mockOfferDefinition = {
content: {
offerThroughput: 400,
collectionThroughputInfo: {
minimumRUForCollection: 400,
numPhysicalPartitions: 1,
},
offerAutopilotSettings: {
maxThroughput: 5000,
},
},
id: "test",
} as SDKOfferDefinition;
const mockResponse = {
resource: mockOfferDefinition,
} as OfferResponse;
const expectedResult: Offer = {
manualThroughput: undefined,
autoscaleMaxThroughput: 5000,
minimumThroughput: 400,
id: "test",
offerDefinition: mockOfferDefinition,
offerReplacePending: false,
};
expect(OfferUtility.parseSDKOfferResponse(mockResponse)).toEqual(expectedResult);
});
});

View File

@@ -1,37 +0,0 @@
import { Offer, SDKOfferDefinition } from "../Contracts/DataModels";
import { OfferResponse } from "@azure/cosmos";
import { HttpHeaders } from "./Constants";
export const parseSDKOfferResponse = (offerResponse: OfferResponse): Offer | undefined => {
const offerDefinition: SDKOfferDefinition | undefined = offerResponse?.resource;
if (!offerDefinition) {
return undefined;
}
const offerContent = offerDefinition.content;
if (!offerContent) {
return undefined;
}
const minimumThroughput = offerContent.collectionThroughputInfo?.minimumRUForCollection;
const autopilotSettings = offerContent.offerAutopilotSettings;
if (autopilotSettings && autopilotSettings.maxThroughput && minimumThroughput) {
return {
id: offerDefinition.id,
autoscaleMaxThroughput: autopilotSettings.maxThroughput,
manualThroughput: undefined,
minimumThroughput,
offerDefinition,
offerReplacePending: offerResponse.headers?.[HttpHeaders.offerReplacePending] === "true",
};
}
return {
id: offerDefinition.id,
autoscaleMaxThroughput: undefined,
manualThroughput: offerContent.offerThroughput,
minimumThroughput,
offerDefinition,
offerReplacePending: offerResponse.headers?.[HttpHeaders.offerReplacePending] === "true",
};
};

View File

@@ -16,7 +16,7 @@ const notificationsPath = () => {
};
export const fetchPortalNotifications = async (): Promise<DataModels.Notification[]> => {
if (configContext.platform === Platform.Emulator || configContext.platform === Platform.Hosted) {
if (configContext.platform === Platform.Emulator) {
return [];
}
@@ -30,7 +30,7 @@ export const fetchPortalNotifications = async (): Promise<DataModels.Notificatio
const headers = { [authorizationHeader.header]: authorizationHeader.token };
const response = await window.fetch(url, {
headers,
headers
});
if (!response.ok) {

View File

@@ -3,24 +3,23 @@ import * as _ from "underscore";
import * as DataModels from "../Contracts/DataModels";
import * as ViewModels from "../Contracts/ViewModels";
import Explorer from "../Explorer/Explorer";
import { ConsoleDataType } from "../Explorer/Menus/NotificationConsole/NotificationConsoleComponent";
import DocumentsTab from "../Explorer/Tabs/DocumentsTab";
import DocumentId from "../Explorer/Tree/DocumentId";
import * as NotificationConsoleUtils from "../Utils/NotificationConsoleUtils";
import * as QueryUtils from "../Utils/QueryUtils";
import { QueryUtils } from "../Utils/QueryUtils";
import { BackendDefaults, HttpStatusCodes, SavedQueries } from "./Constants";
import { userContext } from "../UserContext";
import { queryDocumentsPage } from "./dataAccess/queryDocumentsPage";
import { createDocument, deleteDocument, queryDocuments, queryDocumentsPage } from "./DocumentClientUtilityBase";
import { createCollection } from "./dataAccess/createCollection";
import { handleError } from "./ErrorHandlingUtils";
import { createDocument } from "./dataAccess/createDocument";
import { deleteDocument } from "./dataAccess/deleteDocument";
import { queryDocuments } from "./dataAccess/queryDocuments";
import * as ErrorParserUtility from "./ErrorParserUtility";
import * as Logger from "./Logger";
export class QueriesClient {
private static readonly PartitionKey: DataModels.PartitionKey = {
paths: [`/${SavedQueries.PartitionKeyProperty}`],
kind: BackendDefaults.partitionKeyKind,
version: BackendDefaults.partitionKeyVersion,
version: BackendDefaults.partitionKeyVersion
};
private static readonly FetchQuery: string = "SELECT * FROM c";
private static readonly FetchMongoQuery: string = "{}";
@@ -33,33 +32,47 @@ export class QueriesClient {
return Promise.resolve(queriesCollection.rawDataModel);
}
const clearMessage = NotificationConsoleUtils.logConsoleProgress("Setting up account for saving queries");
const id = NotificationConsoleUtils.logConsoleMessage(
ConsoleDataType.InProgress,
"Setting up account for saving queries"
);
return createCollection({
collectionId: SavedQueries.CollectionName,
createNewDatabase: true,
databaseId: SavedQueries.DatabaseName,
partitionKey: QueriesClient.PartitionKey,
offerThroughput: SavedQueries.OfferThroughput,
databaseLevelThroughput: false,
databaseLevelThroughput: false
})
.then(
(collection: DataModels.Collection) => {
NotificationConsoleUtils.logConsoleInfo("Successfully set up account for saving queries");
NotificationConsoleUtils.logConsoleMessage(
ConsoleDataType.Info,
"Successfully set up account for saving queries"
);
return Promise.resolve(collection);
},
(error: any) => {
handleError(error, "setupQueriesCollection", "Failed to set up account for saving queries");
return Promise.reject(error);
const stringifiedError: string = error.message;
NotificationConsoleUtils.logConsoleMessage(
ConsoleDataType.Error,
`Failed to set up account for saving queries: ${stringifiedError}`
);
Logger.logError(stringifiedError, "setupQueriesCollection");
return Promise.reject(stringifiedError);
}
)
.finally(() => clearMessage());
.finally(() => NotificationConsoleUtils.clearInProgressMessageWithId(id));
}
public async saveQuery(query: DataModels.Query): Promise<void> {
const queriesCollection = this.findQueriesCollection();
if (!queriesCollection) {
const errorMessage: string = "Account not set up to perform saved query operations";
NotificationConsoleUtils.logConsoleError(`Failed to save query ${query.queryName}: ${errorMessage}`);
NotificationConsoleUtils.logConsoleMessage(
ConsoleDataType.Error,
`Failed to save query ${query.queryName}: ${errorMessage}`
);
return Promise.reject(errorMessage);
}
@@ -67,49 +80,65 @@ export class QueriesClient {
this.validateQuery(query);
} catch (error) {
const errorMessage: string = "Invalid query specified";
NotificationConsoleUtils.logConsoleError(`Failed to save query ${query.queryName}: ${errorMessage}`);
NotificationConsoleUtils.logConsoleMessage(
ConsoleDataType.Error,
`Failed to save query ${query.queryName}: ${errorMessage}`
);
return Promise.reject(errorMessage);
}
const clearMessage = NotificationConsoleUtils.logConsoleProgress(`Saving query ${query.queryName}`);
const id = NotificationConsoleUtils.logConsoleMessage(
ConsoleDataType.InProgress,
`Saving query ${query.queryName}`
);
query.id = query.queryName;
return createDocument(queriesCollection, query)
.then(
(savedQuery: DataModels.Query) => {
NotificationConsoleUtils.logConsoleInfo(`Successfully saved query ${query.queryName}`);
NotificationConsoleUtils.logConsoleMessage(
ConsoleDataType.Info,
`Successfully saved query ${query.queryName}`
);
return Promise.resolve();
},
(error: any) => {
if (error.code === HttpStatusCodes.Conflict.toString()) {
error = `Query ${query.queryName} already exists`;
let errorMessage: string;
const parsedError: DataModels.ErrorDataModel = ErrorParserUtility.parse(error)[0];
if (parsedError.code === HttpStatusCodes.Conflict.toString()) {
errorMessage = `Query ${query.queryName} already exists`;
} else {
errorMessage = parsedError.message;
}
handleError(error, "saveQuery", `Failed to save query ${query.queryName}`);
return Promise.reject(error);
NotificationConsoleUtils.logConsoleMessage(
ConsoleDataType.Error,
`Failed to save query ${query.queryName}: ${errorMessage}`
);
Logger.logError(JSON.stringify(parsedError), "saveQuery");
return Promise.reject(errorMessage);
}
)
.finally(() => clearMessage());
.finally(() => NotificationConsoleUtils.clearInProgressMessageWithId(id));
}
public async getQueries(): Promise<DataModels.Query[]> {
const queriesCollection = this.findQueriesCollection();
if (!queriesCollection) {
const errorMessage: string = "Account not set up to perform saved query operations";
NotificationConsoleUtils.logConsoleError(`Failed to fetch saved queries: ${errorMessage}`);
NotificationConsoleUtils.logConsoleMessage(
ConsoleDataType.Error,
`Failed to fetch saved queries: ${errorMessage}`
);
return Promise.reject(errorMessage);
}
const options: any = { enableCrossPartitionQuery: true };
const clearMessage = NotificationConsoleUtils.logConsoleProgress("Fetching saved queries");
const queryIterator: QueryIterator<ItemDefinition & Resource> = queryDocuments(
SavedQueries.DatabaseName,
SavedQueries.CollectionName,
this.fetchQueriesQuery(),
options
);
const fetchQueries = async (firstItemIndex: number): Promise<ViewModels.QueryResults> =>
await queryDocumentsPage(queriesCollection.id(), queryIterator, firstItemIndex);
return QueryUtils.queryAllPages(fetchQueries)
const id = NotificationConsoleUtils.logConsoleMessage(ConsoleDataType.InProgress, "Fetching saved queries");
return queryDocuments(SavedQueries.DatabaseName, SavedQueries.CollectionName, this.fetchQueriesQuery(), options)
.then(
(queryIterator: QueryIterator<ItemDefinition & Resource>) => {
const fetchQueries = (firstItemIndex: number): Q.Promise<ViewModels.QueryResults> =>
queryDocumentsPage(queriesCollection.id(), queryIterator, firstItemIndex, options);
return QueryUtils.queryAllPages(fetchQueries).then(
(results: ViewModels.QueryResults) => {
let queries: DataModels.Query[] = _.map(results.documents, (document: DataModels.Query) => {
if (!document) {
@@ -120,7 +149,7 @@ export class QueriesClient {
resourceId: resourceId,
queryName: queryName,
query: query,
id: id,
id: id
};
try {
this.validateQuery(parsedQuery);
@@ -130,22 +159,42 @@ export class QueriesClient {
}
});
queries = _.reject(queries, (parsedQuery: DataModels.Query) => !parsedQuery);
NotificationConsoleUtils.logConsoleInfo("Successfully fetched saved queries");
NotificationConsoleUtils.logConsoleMessage(ConsoleDataType.Info, "Successfully fetched saved queries");
return Promise.resolve(queries);
},
(error: any) => {
handleError(error, "getSavedQueries", "Failed to fetch saved queries");
return Promise.reject(error);
const stringifiedError: string = error.message;
NotificationConsoleUtils.logConsoleMessage(
ConsoleDataType.Error,
`Failed to fetch saved queries: ${stringifiedError}`
);
Logger.logError(stringifiedError, "getSavedQueries");
return Promise.reject(stringifiedError);
}
);
},
(error: any) => {
// should never get into this state but we handle this regardless
const stringifiedError: string = error.message;
NotificationConsoleUtils.logConsoleMessage(
ConsoleDataType.Error,
`Failed to fetch saved queries: ${stringifiedError}`
);
Logger.logError(stringifiedError, "getSavedQueries");
return Promise.reject(stringifiedError);
}
)
.finally(() => clearMessage());
.finally(() => NotificationConsoleUtils.clearInProgressMessageWithId(id));
}
public async deleteQuery(query: DataModels.Query): Promise<void> {
const queriesCollection = this.findQueriesCollection();
if (!queriesCollection) {
const errorMessage: string = "Account not set up to perform saved query operations";
NotificationConsoleUtils.logConsoleError(`Failed to fetch saved queries: ${errorMessage}`);
NotificationConsoleUtils.logConsoleMessage(
ConsoleDataType.Error,
`Failed to fetch saved queries: ${errorMessage}`
);
return Promise.reject(errorMessage);
}
@@ -153,15 +202,21 @@ export class QueriesClient {
this.validateQuery(query);
} catch (error) {
const errorMessage: string = "Invalid query specified";
NotificationConsoleUtils.logConsoleError(`Failed to delete query ${query.queryName}: ${errorMessage}`);
NotificationConsoleUtils.logConsoleMessage(
ConsoleDataType.Error,
`Failed to delete query ${query.queryName}: ${errorMessage}`
);
}
const clearMessage = NotificationConsoleUtils.logConsoleProgress(`Deleting query ${query.queryName}`);
const id = NotificationConsoleUtils.logConsoleMessage(
ConsoleDataType.InProgress,
`Deleting query ${query.queryName}`
);
query.id = query.queryName;
const documentId = new DocumentId(
{
partitionKey: QueriesClient.PartitionKey,
partitionKeyProperty: "id",
partitionKeyProperty: "id"
} as DocumentsTab,
query,
query.queryName
@@ -170,15 +225,23 @@ export class QueriesClient {
return deleteDocument(queriesCollection, documentId)
.then(
() => {
NotificationConsoleUtils.logConsoleInfo(`Successfully deleted query ${query.queryName}`);
NotificationConsoleUtils.logConsoleMessage(
ConsoleDataType.Info,
`Successfully deleted query ${query.queryName}`
);
return Promise.resolve();
},
(error: any) => {
handleError(error, "deleteQuery", `Failed to delete query ${query.queryName}`);
return Promise.reject(error);
const stringifiedError: string = error.message;
NotificationConsoleUtils.logConsoleMessage(
ConsoleDataType.Error,
`Failed to delete query ${query.queryName}: ${stringifiedError}`
);
Logger.logError(stringifiedError, "deleteQuery");
return Promise.reject(stringifiedError);
}
)
.finally(() => clearMessage());
.finally(() => NotificationConsoleUtils.clearInProgressMessageWithId(id));
}
public getResourceId(): string {

View File

@@ -4,7 +4,7 @@ import { SplitterMetrics } from "./Constants";
export enum SplitterDirection {
Horizontal = "horizontal",
Vertical = "vertical",
Vertical = "vertical"
}
export interface SplitterBounds {
@@ -23,10 +23,10 @@ export class Splitter {
public splitterId: string;
public leftSideId: string;
public splitter!: HTMLElement;
public leftSide!: HTMLElement;
public lastX!: number;
public lastWidth!: number;
public splitter: HTMLElement;
public leftSide: HTMLElement;
public lastX: number;
public lastWidth: number;
private isCollapsed: ko.Observable<boolean>;
private bounds: SplitterBounds;
@@ -42,16 +42,15 @@ export class Splitter {
}
public initialize() {
if (document.getElementById(this.splitterId) !== null && document.getElementById(this.leftSideId) != null) {
this.splitter = <HTMLElement>document.getElementById(this.splitterId);
this.leftSide = <HTMLElement>document.getElementById(this.leftSideId);
}
this.splitter = document.getElementById(this.splitterId);
this.leftSide = document.getElementById(this.leftSideId);
const isVerticalSplitter: boolean = this.direction === SplitterDirection.Vertical;
const splitterOptions: JQueryUI.ResizableOptions = {
animate: true,
animateDuration: "fast",
start: this.onResizeStart,
stop: this.onResizeStop,
stop: this.onResizeStop
};
if (isVerticalSplitter) {
@@ -91,7 +90,9 @@ export class Splitter {
this.lastWidth = $(this.leftSide).width();
$(this.splitter).css("left", SplitterMetrics.CollapsedPositionLeft);
$(this.leftSide).css("width", "");
$(this.leftSide).resizable("option", "disabled", true).removeClass("ui-resizable-disabled"); // remove class so splitter is visible
$(this.leftSide)
.resizable("option", "disabled", true)
.removeClass("ui-resizable-disabled"); // remove class so splitter is visible
$(this.splitter).removeClass("ui-resizable-e");
this.isCollapsed(true);
}

View File

@@ -2,7 +2,8 @@
* Copyright (C) Microsoft Corporation. All rights reserved.
*----------------------------------------------------------*/
export function getMonacoTheme(theme: string): string {
export default class ThemeUtility {
public static getMonacoTheme(theme: string): string {
switch (theme) {
case "default":
case "hc-white":
@@ -15,3 +16,4 @@ export function getMonacoTheme(theme: string): string {
return "vs";
}
}
}

View File

@@ -1,24 +0,0 @@
import { useId } from "@uifabric/react-hooks";
import { ITooltipHostStyles, TooltipHost } from "office-ui-fabric-react/lib/Tooltip";
import * as React from "react";
import InfoBubble from "../../../images/info-bubble.svg";
const calloutProps = { gapSpace: 0 };
const hostStyles: Partial<ITooltipHostStyles> = { root: { display: "inline-block" } };
export interface TooltipProps {
children: string;
}
export const Tooltip: React.FunctionComponent = ({ children }: TooltipProps) => {
const tooltipId = useId("tooltip");
return children ? (
<span>
<TooltipHost content={children} id={tooltipId} calloutProps={calloutProps} styles={hostStyles}>
<img className="infoImg" src={InfoBubble} alt="More information" />
</TooltipHost>
</span>
) : (
<></>
);
};

View File

@@ -1,75 +0,0 @@
import { Image, Stack, TextField } from "office-ui-fabric-react";
import React, { ChangeEvent, FunctionComponent, KeyboardEvent, useRef, useState } from "react";
import FolderIcon from "../../../images/folder_16x16.svg";
import * as Constants from "../../Common/Constants";
import { Tooltip } from "../Tooltip";
interface UploadProps {
label: string;
accept?: string;
tooltip?: string;
multiple?: boolean;
tabIndex?: number;
onUpload: (event: ChangeEvent<HTMLInputElement>) => void;
}
export const Upload: FunctionComponent<UploadProps> = ({
label,
accept,
tooltip,
multiple,
tabIndex,
...props
}: UploadProps) => {
const [selectedFilesTitle, setSelectedFilesTitle] = useState<string[]>([]);
const fileRef = useRef<HTMLInputElement>();
const onImportLinkKeyPress = (event: KeyboardEvent<HTMLAnchorElement>): void => {
if (event.keyCode === Constants.KeyCodes.Enter || event.keyCode === Constants.KeyCodes.Space) {
onImportLinkClick();
}
};
const onImportLinkClick = (): void => {
fileRef?.current?.click();
};
const onUpload = (event: ChangeEvent<HTMLInputElement>): void => {
const { files } = event.target;
const newFileList = [];
for (let i = 0; i < files.length; i++) {
newFileList.push(files.item(i).name);
}
if (newFileList) {
setSelectedFilesTitle(newFileList);
props.onUpload(event);
}
};
const title = label + " to upload";
return (
<div>
<span className="renewUploadItemsHeader">{label}</span>
<Tooltip>{tooltip}</Tooltip>
<Stack horizontal>
<TextField styles={{ fieldGroup: { width: 300 } }} readOnly value={selectedFilesTitle.toString()} />
<input
type="file"
id="importFileInput"
style={{ display: "none" }}
ref={fileRef}
accept={accept}
tabIndex={tabIndex}
multiple={multiple}
title="Upload Icon"
onChange={onUpload}
role="button"
/>
<a href="#" id="fileImportLinkNotebook" onClick={onImportLinkClick} onKeyPress={onImportLinkKeyPress}>
<Image className="fileImportImg" src={FolderIcon} alt={title} title={title} />
</a>
</Stack>
</div>
);
};

View File

@@ -1,12 +1,5 @@
interface Result {
type?: string;
objectBody?: {
id: string;
self: string;
};
}
export function parseDocumentsPath(resourcePath: string): Result {
export default class UrlUtility {
public static parseDocumentsPath(resourcePath: string): any {
if (typeof resourcePath !== "string") {
return {};
}
@@ -23,9 +16,9 @@ export function parseDocumentsPath(resourcePath: string): Result {
resourcePath = "/" + resourcePath;
}
let id: string;
let type: string;
const pathParts = resourcePath.split("/");
var id: string;
var type: string;
var pathParts = resourcePath.split("/");
if (pathParts.length % 2 === 0) {
id = pathParts[pathParts.length - 2];
@@ -35,27 +28,28 @@ export function parseDocumentsPath(resourcePath: string): Result {
type = pathParts[pathParts.length - 2];
}
const result = {
var result = {
type: type,
objectBody: {
id: id,
self: resourcePath,
},
self: resourcePath
}
};
return result;
}
export function createUri(baseUri: string, relativeUri: string): string {
public static createUri(baseUri: string, relativeUri: string): string {
if (!baseUri) {
throw new Error("baseUri is null or empty");
}
const slashAtEndOfUriRegex = /\/$/,
var slashAtEndOfUriRegex = /\/$/,
slashAtStartOfUriRegEx = /^\//;
const normalizedBaseUri = baseUri.replace(slashAtEndOfUriRegex, "") + "/",
var normalizedBaseUri = baseUri.replace(slashAtEndOfUriRegex, "") + "/",
normalizedRelativeUri = (relativeUri && relativeUri.replace(slashAtStartOfUriRegEx, "")) || "";
return normalizedBaseUri + normalizedRelativeUri;
}
}

View File

@@ -1,5 +1,6 @@
jest.mock("../../Utils/arm/request");
jest.mock("../CosmosClient");
jest.mock("../DataAccessUtilityBase");
import { AuthType } from "../../AuthType";
import { CreateCollectionParams, DatabaseAccount } from "../../Contracts/DataModels";
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
@@ -14,42 +15,38 @@ describe("createCollection", () => {
collectionId: "testContainer",
databaseId: "testDatabase",
databaseLevelThroughput: true,
offerThroughput: 400,
offerThroughput: 400
};
beforeAll(() => {
updateUserContext({
databaseAccount: {
name: "test",
name: "test"
} as DatabaseAccount,
defaultExperience: DefaultAccountExperienceType.DocumentDB,
defaultExperience: DefaultAccountExperienceType.DocumentDB
});
});
it("should call ARM if logged in with AAD", async () => {
updateUserContext({
authType: AuthType.AAD,
});
window.authType = AuthType.AAD;
await createCollection(createCollectionParams);
expect(armRequest).toHaveBeenCalled();
});
it("should call SDK if not logged in with non-AAD method", async () => {
updateUserContext({
authType: AuthType.MasterKey,
});
window.authType = AuthType.MasterKey;
(client as jest.Mock).mockReturnValue({
databases: {
createIfNotExists: () => {
return {
database: {
containers: {
create: () => ({}),
},
},
create: () => ({})
}
}
};
},
},
}
}
});
await createCollection(createCollectionParams);
expect(client).toHaveBeenCalled();
@@ -63,7 +60,7 @@ describe("createCollection", () => {
collectionId: "testContainer",
databaseId: "testDatabase",
databaseLevelThroughput: false,
offerThroughput: 400,
offerThroughput: 400
};
expect(constructRpOptions(manualThroughputParams)).toEqual({ throughput: 400 });
@@ -73,12 +70,12 @@ describe("createCollection", () => {
databaseId: "testDatabase",
databaseLevelThroughput: false,
offerThroughput: 400,
autoPilotMaxThroughput: 4000,
autoPilotMaxThroughput: 4000
};
expect(constructRpOptions(autoPilotThroughputParams)).toEqual({
autoscaleSettings: {
maxThroughput: 4000,
},
maxThroughput: 4000
}
});
});
});

View File

@@ -11,15 +11,15 @@ import { createMongoCollectionWithProxy } from "../MongoProxyClient";
import { createUpdateSqlContainer, getSqlContainer } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import {
createUpdateCassandraTable,
getCassandraTable,
getCassandraTable
} from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources";
import {
createUpdateMongoDBCollection,
getMongoDBCollection,
getMongoDBCollection
} from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
import {
createUpdateGremlinGraph,
getGremlinGraph,
getGremlinGraph
} from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
import { createUpdateTable, getTable } from "../../Utils/arm/generatedClients/2020-04-01/tableResources";
import { logConsoleProgress, logConsoleInfo } from "../../Utils/NotificationConsoleUtils";
@@ -35,13 +35,13 @@ export const createCollection = async (params: DataModels.CreateCollectionParams
);
try {
let collection: DataModels.Collection;
if (userContext.authType === AuthType.AAD && !userContext.useSDKOperations) {
if (window.authType === AuthType.AAD && !userContext.useSDKOperations) {
if (params.createNewDatabase) {
const createDatabaseParams: DataModels.CreateDatabaseParams = {
autoPilotMaxThroughput: params.autoPilotMaxThroughput,
databaseId: params.databaseId,
databaseLevelThroughput: params.databaseLevelThroughput,
offerThroughput: params.offerThroughput,
offerThroughput: params.offerThroughput
};
await createDatabase(createDatabaseParams);
}
@@ -55,7 +55,7 @@ export const createCollection = async (params: DataModels.CreateCollectionParams
logConsoleInfo(`Successfully created container ${params.collectionId}`);
return collection;
} catch (error) {
handleError(error, "CreateCollection", `Error while creating container ${params.collectionId}`);
handleError(error, `Error while creating container ${params.collectionId}`, "CreateCollection");
throw error;
} finally {
clearMessage();
@@ -100,7 +100,7 @@ const createSqlContainer = async (params: DataModels.CreateCollectionParams): Pr
const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params);
const resource: ARMTypes.SqlContainerResource = {
id: params.collectionId,
id: params.collectionId
};
if (params.analyticalStorageTtl) {
resource.analyticalStorageTtl = params.analyticalStorageTtl;
@@ -118,8 +118,8 @@ const createSqlContainer = async (params: DataModels.CreateCollectionParams): Pr
const rpPayload: ARMTypes.SqlDatabaseCreateUpdateParameters = {
properties: {
resource,
options,
},
options
}
};
const createResponse = await createUpdateSqlContainer(
@@ -154,7 +154,7 @@ const createMongoCollection = async (params: DataModels.CreateCollectionParams):
const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params);
const resource: ARMTypes.MongoDBCollectionResource = {
id: params.collectionId,
id: params.collectionId
};
if (params.analyticalStorageTtl) {
resource.analyticalStorageTtl = params.analyticalStorageTtl;
@@ -170,8 +170,8 @@ const createMongoCollection = async (params: DataModels.CreateCollectionParams):
const rpPayload: ARMTypes.MongoDBCollectionCreateUpdateParameters = {
properties: {
resource,
options,
},
options
}
};
const createResponse = await createUpdateMongoDBCollection(
@@ -185,7 +185,7 @@ const createMongoCollection = async (params: DataModels.CreateCollectionParams):
if (params.createMongoWildcardIndex) {
TelemetryProcessor.trace(Action.CreateMongoCollectionWithWildcardIndex, ActionModifiers.Mark, {
message: "Mongo Collection created with wildcard index on all fields.",
message: "Mongo Collection created with wildcard index on all fields."
});
}
@@ -212,7 +212,7 @@ const createCassandraTable = async (params: DataModels.CreateCollectionParams):
const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params);
const resource: ARMTypes.CassandraTableResource = {
id: params.collectionId,
id: params.collectionId
};
if (params.analyticalStorageTtl) {
resource.analyticalStorageTtl = params.analyticalStorageTtl;
@@ -221,8 +221,8 @@ const createCassandraTable = async (params: DataModels.CreateCollectionParams):
const rpPayload: ARMTypes.CassandraTableCreateUpdateParameters = {
properties: {
resource,
options,
},
options
}
};
const createResponse = await createUpdateCassandraTable(
@@ -256,7 +256,7 @@ const createGraph = async (params: DataModels.CreateCollectionParams): Promise<D
const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params);
const resource: ARMTypes.GremlinGraphResource = {
id: params.collectionId,
id: params.collectionId
};
if (params.indexingPolicy) {
@@ -272,8 +272,8 @@ const createGraph = async (params: DataModels.CreateCollectionParams): Promise<D
const rpPayload: ARMTypes.GremlinGraphCreateUpdateParameters = {
properties: {
resource,
options,
},
options
}
};
const createResponse = await createUpdateGremlinGraph(
@@ -306,14 +306,14 @@ const createTable = async (params: DataModels.CreateCollectionParams): Promise<D
const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params);
const resource: ARMTypes.TableResource = {
id: params.collectionId,
id: params.collectionId
};
const rpPayload: ARMTypes.TableCreateUpdateParameters = {
properties: {
resource,
options,
},
options
}
};
const createResponse = await createUpdateTable(
@@ -334,13 +334,13 @@ export const constructRpOptions = (params: DataModels.CreateDatabaseParams): ARM
if (params.autoPilotMaxThroughput) {
return {
autoscaleSettings: {
maxThroughput: params.autoPilotMaxThroughput,
},
maxThroughput: params.autoPilotMaxThroughput
}
};
}
return {
throughput: params.offerThroughput,
throughput: params.offerThroughput
};
};
@@ -350,7 +350,7 @@ const createCollectionWithSDK = async (params: DataModels.CreateCollectionParams
partitionKey: params.partitionKey || undefined,
indexingPolicy: params.indexingPolicy || undefined,
uniqueKeyPolicy: params.uniqueKeyPolicy || undefined,
analyticalStorageTtl: params.analyticalStorageTtl,
analyticalStorageTtl: params.analyticalStorageTtl
} as ContainerRequest; // TODO: remove cast when https://github.com/Azure/azure-cosmos-js/issues/423 is fixed
const collectionOptions: RequestOptions = {};
const createDatabaseBody: DatabaseRequest = { id: params.databaseId };

View File

@@ -8,21 +8,21 @@ import {
GremlinDatabaseCreateUpdateParameters,
MongoDBDatabaseCreateUpdateParameters,
SqlDatabaseCreateUpdateParameters,
CreateUpdateOptions,
CreateUpdateOptions
} from "../../Utils/arm/generatedClients/2020-04-01/types";
import { client } from "../CosmosClient";
import { createUpdateSqlDatabase, getSqlDatabase } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import {
createUpdateCassandraKeyspace,
getCassandraKeyspace,
getCassandraKeyspace
} from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources";
import {
createUpdateMongoDBDatabase,
getMongoDBDatabase,
getMongoDBDatabase
} from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
import {
createUpdateGremlinDatabase,
getGremlinDatabase,
getGremlinDatabase
} from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
import { handleError } from "../ErrorHandlingUtils";
import { logConsoleProgress, logConsoleInfo } from "../../Utils/NotificationConsoleUtils";
@@ -34,14 +34,14 @@ export async function createDatabase(params: DataModels.CreateDatabaseParams): P
if (userContext.defaultExperience === DefaultAccountExperienceType.Table) {
throw new Error("Creating database resources is not allowed for tables accounts");
}
const database: DataModels.Database = await (userContext.authType === AuthType.AAD && !userContext.useSDKOperations
const database: DataModels.Database = await (window.authType === AuthType.AAD && !userContext.useSDKOperations
? createDatabaseWithARM(params)
: createDatabaseWithSDK(params));
logConsoleInfo(`Successfully created database ${params.databaseId}`);
return database;
} catch (error) {
handleError(error, "CreateDatabase", `Error while creating database ${params.databaseId}`);
handleError(error, `Error while creating database ${params.databaseId}`, "CreateDatabase");
throw error;
} finally {
clearMessage();
@@ -85,10 +85,10 @@ async function createSqlDatabase(params: DataModels.CreateDatabaseParams): Promi
const rpPayload: SqlDatabaseCreateUpdateParameters = {
properties: {
resource: {
id: params.databaseId,
},
options,
id: params.databaseId
},
options
}
};
const createResponse = await createUpdateSqlDatabase(
userContext.subscriptionId,
@@ -121,10 +121,10 @@ async function createMongoDatabase(params: DataModels.CreateDatabaseParams): Pro
const rpPayload: MongoDBDatabaseCreateUpdateParameters = {
properties: {
resource: {
id: params.databaseId,
},
options,
id: params.databaseId
},
options
}
};
const createResponse = await createUpdateMongoDBDatabase(
userContext.subscriptionId,
@@ -157,10 +157,10 @@ async function createCassandraKeyspace(params: DataModels.CreateDatabaseParams):
const rpPayload: CassandraKeyspaceCreateUpdateParameters = {
properties: {
resource: {
id: params.databaseId,
},
options,
id: params.databaseId
},
options
}
};
const createResponse = await createUpdateCassandraKeyspace(
userContext.subscriptionId,
@@ -193,10 +193,10 @@ async function createGremlineDatabase(params: DataModels.CreateDatabaseParams):
const rpPayload: GremlinDatabaseCreateUpdateParameters = {
properties: {
resource: {
id: params.databaseId,
},
options,
id: params.databaseId
},
options
}
};
const createResponse = await createUpdateGremlinDatabase(
userContext.subscriptionId,
@@ -231,12 +231,12 @@ function constructRpOptions(params: DataModels.CreateDatabaseParams): CreateUpda
if (params.autoPilotMaxThroughput) {
return {
autoscaleSettings: {
maxThroughput: params.autoPilotMaxThroughput,
},
maxThroughput: params.autoPilotMaxThroughput
}
};
}
return {
throughput: params.offerThroughput,
throughput: params.offerThroughput
};
}

View File

@@ -1,25 +0,0 @@
import { CollectionBase } from "../../Contracts/ViewModels";
import { client } from "../CosmosClient";
import { getEntityName } from "../DocumentUtility";
import { handleError } from "../ErrorHandlingUtils";
import { logConsoleInfo, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
export const createDocument = async (collection: CollectionBase, newDocument: unknown): Promise<unknown> => {
const entityName = getEntityName();
const clearMessage = logConsoleProgress(`Creating new ${entityName} for container ${collection.id()}`);
try {
const response = await client()
.database(collection.databaseId)
.container(collection.id())
.items.create(newDocument);
logConsoleInfo(`Successfully created new ${entityName} for container ${collection.id()}`);
return response?.resource;
} catch (error) {
handleError(error, "CreateDocument", `Error while creating new ${entityName} for container ${collection.id()}`);
throw error;
} finally {
clearMessage();
}
};

View File

@@ -3,12 +3,12 @@ import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType
import { Resource, StoredProcedureDefinition } from "@azure/cosmos";
import {
SqlStoredProcedureCreateUpdateParameters,
SqlStoredProcedureResource,
SqlStoredProcedureResource
} from "../../Utils/arm/generatedClients/2020-04-01/types";
import { client } from "../CosmosClient";
import {
createUpdateSqlStoredProcedure,
getSqlStoredProcedure,
getSqlStoredProcedure
} from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import { handleError } from "../ErrorHandlingUtils";
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
@@ -22,7 +22,7 @@ export async function createStoredProcedure(
const clearMessage = logConsoleProgress(`Creating stored procedure ${storedProcedure.id}`);
try {
if (
userContext.authType === AuthType.AAD &&
window.authType === AuthType.AAD &&
!userContext.useSDKOperations &&
userContext.defaultExperience === DefaultAccountExperienceType.DocumentDB
) {
@@ -49,8 +49,8 @@ export async function createStoredProcedure(
const createSprocParams: SqlStoredProcedureCreateUpdateParameters = {
properties: {
resource: storedProcedure as SqlStoredProcedureResource,
options: {},
},
options: {}
}
};
const rpResponse = await createUpdateSqlStoredProcedure(
userContext.subscriptionId,
@@ -70,7 +70,7 @@ export async function createStoredProcedure(
.scripts.storedProcedures.create(storedProcedure);
return response?.resource;
} catch (error) {
handleError(error, "CreateStoredProcedure", `Error while creating stored procedure ${storedProcedure.id}`);
handleError(error, `Error while creating stored procedure ${storedProcedure.id}`, "CreateStoredProcedure");
throw error;
} finally {
clearMessage();

View File

@@ -3,12 +3,13 @@ import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType
import { Resource, TriggerDefinition } from "@azure/cosmos";
import {
SqlTriggerCreateUpdateParameters,
SqlTriggerResource,
SqlTriggerResource
} from "../../Utils/arm/generatedClients/2020-04-01/types";
import { client } from "../CosmosClient";
import { createUpdateSqlTrigger, getSqlTrigger } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import { handleError } from "../ErrorHandlingUtils";
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
import { logConsoleError, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
import { logError } from "../Logger";
import { sendNotificationForError } from "./sendNotificationForError";
import { userContext } from "../../UserContext";
export async function createTrigger(
@@ -19,7 +20,7 @@ export async function createTrigger(
const clearMessage = logConsoleProgress(`Creating trigger ${trigger.id}`);
try {
if (
userContext.authType === AuthType.AAD &&
window.authType === AuthType.AAD &&
!userContext.useSDKOperations &&
userContext.defaultExperience === DefaultAccountExperienceType.DocumentDB
) {
@@ -44,8 +45,8 @@ export async function createTrigger(
const createTriggerParams: SqlTriggerCreateUpdateParameters = {
properties: {
resource: trigger as SqlTriggerResource,
options: {},
},
options: {}
}
};
const rpResponse = await createUpdateSqlTrigger(
userContext.subscriptionId,
@@ -59,10 +60,15 @@ export async function createTrigger(
return rpResponse && (rpResponse.properties?.resource as TriggerDefinition & Resource);
}
const response = await client().database(databaseId).container(collectionId).scripts.triggers.create(trigger);
const response = await client()
.database(databaseId)
.container(collectionId)
.scripts.triggers.create(trigger);
return response.resource;
} catch (error) {
handleError(error, "CreateTrigger", `Error while creating trigger ${trigger.id}`);
logConsoleError(`Error while creating trigger ${trigger.id}:\n ${error.message}`);
logError(error.message, "CreateTrigger", error.code);
sendNotificationForError(error);
throw error;
} finally {
clearMessage();

View File

@@ -3,12 +3,12 @@ import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType
import { Resource, UserDefinedFunctionDefinition } from "@azure/cosmos";
import {
SqlUserDefinedFunctionCreateUpdateParameters,
SqlUserDefinedFunctionResource,
SqlUserDefinedFunctionResource
} from "../../Utils/arm/generatedClients/2020-04-01/types";
import { client } from "../CosmosClient";
import {
createUpdateSqlUserDefinedFunction,
getSqlUserDefinedFunction,
getSqlUserDefinedFunction
} from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import { handleError } from "../ErrorHandlingUtils";
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
@@ -22,7 +22,7 @@ export async function createUserDefinedFunction(
const clearMessage = logConsoleProgress(`Creating user defined function ${userDefinedFunction.id}`);
try {
if (
userContext.authType === AuthType.AAD &&
window.authType === AuthType.AAD &&
!userContext.useSDKOperations &&
userContext.defaultExperience === DefaultAccountExperienceType.DocumentDB
) {
@@ -49,8 +49,8 @@ export async function createUserDefinedFunction(
const createUDFParams: SqlUserDefinedFunctionCreateUpdateParameters = {
properties: {
resource: userDefinedFunction as SqlUserDefinedFunctionResource,
options: {},
},
options: {}
}
};
const rpResponse = await createUpdateSqlUserDefinedFunction(
userContext.subscriptionId,
@@ -72,8 +72,8 @@ export async function createUserDefinedFunction(
} catch (error) {
handleError(
error,
"CreateUserupdateUserDefinedFunction",
`Error while creating user defined function ${userDefinedFunction.id}`
`Error while creating user defined function ${userDefinedFunction.id}`,
"CreateUserupdateUserDefinedFunction"
);
throw error;
} finally {

View File

@@ -13,34 +13,30 @@ describe("deleteCollection", () => {
beforeAll(() => {
updateUserContext({
databaseAccount: {
name: "test",
name: "test"
} as DatabaseAccount,
defaultExperience: DefaultAccountExperienceType.DocumentDB,
defaultExperience: DefaultAccountExperienceType.DocumentDB
});
});
it("should call ARM if logged in with AAD", async () => {
updateUserContext({
authType: AuthType.AAD,
});
window.authType = AuthType.AAD;
await deleteCollection("database", "collection");
expect(armRequest).toHaveBeenCalled();
});
it("should call SDK if not logged in with non-AAD method", async () => {
updateUserContext({
authType: AuthType.MasterKey,
});
window.authType = AuthType.MasterKey;
(client as jest.Mock).mockReturnValue({
database: () => {
return {
container: () => {
return {
delete: (): unknown => undefined,
delete: (): unknown => undefined
};
},
}
};
},
}
});
await deleteCollection("database", "collection");
expect(client).toHaveBeenCalled();

View File

@@ -13,14 +13,17 @@ import { client } from "../CosmosClient";
export async function deleteCollection(databaseId: string, collectionId: string): Promise<void> {
const clearMessage = logConsoleProgress(`Deleting container ${collectionId}`);
try {
if (userContext.authType === AuthType.AAD && !userContext.useSDKOperations) {
if (window.authType === AuthType.AAD && !userContext.useSDKOperations) {
await deleteCollectionWithARM(databaseId, collectionId);
} else {
await client().database(databaseId).container(collectionId).delete();
await client()
.database(databaseId)
.container(collectionId)
.delete();
}
logConsoleInfo(`Successfully deleted container ${collectionId}`);
} catch (error) {
handleError(error, "DeleteCollection", `Error while deleting container ${collectionId}`);
handleError(error, `Error while deleting container ${collectionId}`, "DeleteCollection");
throw error;
} finally {
clearMessage();

View File

@@ -1,36 +0,0 @@
import ConflictId from "../../Explorer/Tree/ConflictId";
import { CollectionBase } from "../../Contracts/ViewModels";
import { RequestOptions } from "@azure/cosmos";
import { client } from "../CosmosClient";
import { handleError } from "../ErrorHandlingUtils";
import { logConsoleProgress, logConsoleInfo } from "../../Utils/NotificationConsoleUtils";
export const deleteConflict = async (collection: CollectionBase, conflictId: ConflictId): Promise<void> => {
const clearMessage = logConsoleProgress(`Deleting conflict ${conflictId.id()}`);
try {
const options = {
partitionKey: getPartitionKeyHeaderForConflict(conflictId),
};
await client()
.database(collection.databaseId)
.container(collection.id())
.conflict(conflictId.id())
.delete(options as RequestOptions);
logConsoleInfo(`Successfully deleted conflict ${conflictId.id()}`);
} catch (error) {
handleError(error, "DeleteConflict", `Error while deleting conflict ${conflictId.id()}`);
throw error;
} finally {
clearMessage();
}
};
const getPartitionKeyHeaderForConflict = (conflictId: ConflictId): unknown => {
if (!conflictId.partitionKey) {
return undefined;
}
return conflictId.partitionKeyValue === undefined ? [{}] : [conflictId.partitionKeyValue];
};

View File

@@ -13,30 +13,26 @@ describe("deleteDatabase", () => {
beforeAll(() => {
updateUserContext({
databaseAccount: {
name: "test",
name: "test"
} as DatabaseAccount,
defaultExperience: DefaultAccountExperienceType.DocumentDB,
defaultExperience: DefaultAccountExperienceType.DocumentDB
});
});
it("should call ARM if logged in with AAD", async () => {
updateUserContext({
authType: AuthType.AAD,
});
window.authType = AuthType.AAD;
await deleteDatabase("database");
expect(armRequest).toHaveBeenCalled();
});
it("should call SDK if not logged in with non-AAD method", async () => {
updateUserContext({
authType: AuthType.MasterKey,
});
window.authType = AuthType.MasterKey;
(client as jest.Mock).mockReturnValue({
database: () => {
return {
delete: (): unknown => undefined,
delete: (): unknown => undefined
};
},
}
});
await deleteDatabase("database");
expect(client).toHaveBeenCalled();

View File

@@ -16,14 +16,16 @@ export async function deleteDatabase(databaseId: string): Promise<void> {
if (userContext.defaultExperience === DefaultAccountExperienceType.Table) {
throw new Error("Deleting database resources is not allowed for tables accounts");
}
if (userContext.authType === AuthType.AAD && !userContext.useSDKOperations) {
if (window.authType === AuthType.AAD && !userContext.useSDKOperations) {
await deleteDatabaseWithARM(databaseId);
} else {
await client().database(databaseId).delete();
await client()
.database(databaseId)
.delete();
}
logConsoleInfo(`Successfully deleted database ${databaseId}`);
} catch (error) {
handleError(error, "DeleteDatabase", `Error while deleting database ${databaseId}`);
handleError(error, `Error while deleting database ${databaseId}`, "DeleteDatabase");
throw error;
} finally {
clearMessage();

View File

@@ -1,25 +0,0 @@
import { CollectionBase } from "../../Contracts/ViewModels";
import { client } from "../CosmosClient";
import { getEntityName } from "../DocumentUtility";
import { handleError } from "../ErrorHandlingUtils";
import { logConsoleInfo, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
import DocumentId from "../../Explorer/Tree/DocumentId";
export const deleteDocument = async (collection: CollectionBase, documentId: DocumentId): Promise<void> => {
const entityName: string = getEntityName();
const clearMessage = logConsoleProgress(`Deleting ${entityName} ${documentId.id()}`);
try {
await client()
.database(collection.databaseId)
.container(collection.id())
.item(documentId.id(), documentId.partitionKeyValue)
.delete();
logConsoleInfo(`Successfully deleted ${entityName} ${documentId.id()}`);
} catch (error) {
handleError(error, "DeleteDocument", `Error while deleting ${entityName} ${documentId.id()}`);
throw error;
} finally {
clearMessage();
}
};

View File

@@ -14,7 +14,7 @@ export async function deleteStoredProcedure(
const clearMessage = logConsoleProgress(`Deleting stored procedure ${storedProcedureId}`);
try {
if (
userContext.authType === AuthType.AAD &&
window.authType === AuthType.AAD &&
!userContext.useSDKOperations &&
userContext.defaultExperience === DefaultAccountExperienceType.DocumentDB
) {
@@ -27,10 +27,14 @@ export async function deleteStoredProcedure(
storedProcedureId
);
} else {
await client().database(databaseId).container(collectionId).scripts.storedProcedure(storedProcedureId).delete();
await client()
.database(databaseId)
.container(collectionId)
.scripts.storedProcedure(storedProcedureId)
.delete();
}
} catch (error) {
handleError(error, "DeleteStoredProcedure", `Error while deleting stored procedure ${storedProcedureId}`);
handleError(error, `Error while deleting stored procedure ${storedProcedureId}`, "DeleteStoredProcedure");
throw error;
} finally {
clearMessage();

Some files were not shown because too many files have changed in this diff Show More