mirror of
https://github.com/Azure/cosmos-explorer.git
synced 2025-12-31 06:41:35 +00:00
Compare commits
1 Commits
hosted-msa
...
package-up
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a2568be0fe |
@@ -14,6 +14,7 @@ src/Common/DataAccessUtilityBase.ts
|
|||||||
src/Common/DeleteFeedback.ts
|
src/Common/DeleteFeedback.ts
|
||||||
src/Common/DocumentClientUtilityBase.ts
|
src/Common/DocumentClientUtilityBase.ts
|
||||||
src/Common/EditableUtility.ts
|
src/Common/EditableUtility.ts
|
||||||
|
src/Common/EnvironmentUtility.ts
|
||||||
src/Common/HashMap.test.ts
|
src/Common/HashMap.test.ts
|
||||||
src/Common/HashMap.ts
|
src/Common/HashMap.ts
|
||||||
src/Common/HeadersUtility.test.ts
|
src/Common/HeadersUtility.test.ts
|
||||||
@@ -42,6 +43,7 @@ src/Contracts/ViewModels.ts
|
|||||||
src/Controls/Heatmap/Heatmap.test.ts
|
src/Controls/Heatmap/Heatmap.test.ts
|
||||||
src/Controls/Heatmap/Heatmap.ts
|
src/Controls/Heatmap/Heatmap.ts
|
||||||
src/Controls/Heatmap/HeatmapDatatypes.ts
|
src/Controls/Heatmap/HeatmapDatatypes.ts
|
||||||
|
src/Definitions/adal.d.ts
|
||||||
src/Definitions/datatables.d.ts
|
src/Definitions/datatables.d.ts
|
||||||
src/Definitions/gif.d.ts
|
src/Definitions/gif.d.ts
|
||||||
src/Definitions/globals.d.ts
|
src/Definitions/globals.d.ts
|
||||||
|
|||||||
26
.eslintrc.js
26
.eslintrc.js
@@ -1,39 +1,39 @@
|
|||||||
module.exports = {
|
module.exports = {
|
||||||
env: {
|
env: {
|
||||||
browser: true,
|
browser: true,
|
||||||
es6: true
|
es6: true,
|
||||||
},
|
},
|
||||||
plugins: ["@typescript-eslint", "no-null", "prefer-arrow"],
|
plugins: ["@typescript-eslint", "no-null", "prefer-arrow"],
|
||||||
extends: ["eslint:recommended", "plugin:@typescript-eslint/recommended"],
|
extends: ["eslint:recommended", "plugin:@typescript-eslint/recommended"],
|
||||||
globals: {
|
globals: {
|
||||||
Atomics: "readonly",
|
Atomics: "readonly",
|
||||||
SharedArrayBuffer: "readonly"
|
SharedArrayBuffer: "readonly",
|
||||||
},
|
},
|
||||||
parser: "@typescript-eslint/parser",
|
parser: "@typescript-eslint/parser",
|
||||||
parserOptions: {
|
parserOptions: {
|
||||||
ecmaFeatures: {
|
ecmaFeatures: {
|
||||||
jsx: true
|
jsx: true,
|
||||||
},
|
},
|
||||||
ecmaVersion: 2018,
|
ecmaVersion: 2018,
|
||||||
sourceType: "module"
|
sourceType: "module",
|
||||||
},
|
},
|
||||||
overrides: [
|
overrides: [
|
||||||
{
|
{
|
||||||
files: ["**/*.tsx"],
|
files: ["**/*.tsx"],
|
||||||
env: {
|
env: {
|
||||||
jest: true
|
jest: true,
|
||||||
},
|
},
|
||||||
extends: ["plugin:react/recommended"],
|
extends: ["plugin:react/recommended"],
|
||||||
plugins: ["react"]
|
plugins: ["react"],
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
files: ["**/*.{test,spec}.{ts,tsx}"],
|
files: ["**/*.{test,spec}.{ts,tsx}"],
|
||||||
env: {
|
env: {
|
||||||
jest: true
|
jest: true,
|
||||||
},
|
},
|
||||||
extends: ["plugin:jest/recommended"],
|
extends: ["plugin:jest/recommended"],
|
||||||
plugins: ["jest"]
|
plugins: ["jest"],
|
||||||
}
|
},
|
||||||
],
|
],
|
||||||
rules: {
|
rules: {
|
||||||
curly: "error",
|
curly: "error",
|
||||||
@@ -47,8 +47,8 @@ module.exports = {
|
|||||||
"error",
|
"error",
|
||||||
{
|
{
|
||||||
selector: "CallExpression[callee.object.name='JSON'][callee.property.name='stringify'] Identifier[name=/$err/]",
|
selector: "CallExpression[callee.object.name='JSON'][callee.property.name='stringify'] Identifier[name=/$err/]",
|
||||||
message: "Do not use JSON.stringify(error). It will print '{}'"
|
message: "Do not use JSON.stringify(error). It will print '{}'",
|
||||||
}
|
},
|
||||||
]
|
],
|
||||||
}
|
},
|
||||||
};
|
};
|
||||||
|
|||||||
31
.github/workflows/ci.yml
vendored
31
.github/workflows/ci.yml
vendored
@@ -101,7 +101,6 @@ jobs:
|
|||||||
PLATFORM: "Emulator"
|
PLATFORM: "Emulator"
|
||||||
NODE_TLS_REJECT_UNAUTHORIZED: 0
|
NODE_TLS_REJECT_UNAUTHORIZED: 0
|
||||||
- uses: actions/upload-artifact@v2
|
- uses: actions/upload-artifact@v2
|
||||||
if: failure()
|
|
||||||
with:
|
with:
|
||||||
name: screenshots
|
name: screenshots
|
||||||
path: failed-*
|
path: failed-*
|
||||||
@@ -160,14 +159,13 @@ jobs:
|
|||||||
TABLES_CONNECTION_STRING: ${{ secrets.CONNECTION_STRING_TABLE }}
|
TABLES_CONNECTION_STRING: ${{ secrets.CONNECTION_STRING_TABLE }}
|
||||||
DATA_EXPLORER_ENDPOINT: "https://localhost:1234/hostedExplorer.html"
|
DATA_EXPLORER_ENDPOINT: "https://localhost:1234/hostedExplorer.html"
|
||||||
- uses: actions/upload-artifact@v2
|
- uses: actions/upload-artifact@v2
|
||||||
if: failure()
|
|
||||||
with:
|
with:
|
||||||
name: screenshots
|
name: screenshots
|
||||||
path: failed-*
|
path: failed-*
|
||||||
nuget:
|
nuget:
|
||||||
name: Publish Nuget
|
name: Publish Nuget
|
||||||
if: github.ref == 'refs/heads/master' || contains(github.ref, 'hotfix/') || contains(github.ref, 'release/')
|
if: github.ref == 'refs/heads/master' || contains(github.ref, 'hotfix/') || contains(github.ref, 'release/')
|
||||||
needs: [lint, format, compile, build, unittest, endtoendemulator, endtoendhosted, accessibility]
|
needs: [lint, format, compile, build, unittest, endtoendemulator, endtoendhosted]
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
env:
|
env:
|
||||||
NUGET_SOURCE: ${{ secrets.NUGET_SOURCE }}
|
NUGET_SOURCE: ${{ secrets.NUGET_SOURCE }}
|
||||||
@@ -191,7 +189,7 @@ jobs:
|
|||||||
nugetmpac:
|
nugetmpac:
|
||||||
name: Publish Nuget MPAC
|
name: Publish Nuget MPAC
|
||||||
if: github.ref == 'refs/heads/master' || contains(github.ref, 'hotfix/') || contains(github.ref, 'release/')
|
if: github.ref == 'refs/heads/master' || contains(github.ref, 'hotfix/') || contains(github.ref, 'release/')
|
||||||
needs: [lint, format, compile, build, unittest, endtoendemulator, endtoendhosted, accessibility]
|
needs: [lint, format, compile, build, unittest, endtoendemulator, endtoendhosted]
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
env:
|
env:
|
||||||
NUGET_SOURCE: ${{ secrets.NUGET_SOURCE }}
|
NUGET_SOURCE: ${{ secrets.NUGET_SOURCE }}
|
||||||
@@ -213,28 +211,3 @@ jobs:
|
|||||||
name: packages
|
name: packages
|
||||||
with:
|
with:
|
||||||
path: "*.nupkg"
|
path: "*.nupkg"
|
||||||
nugetie:
|
|
||||||
name: Publish Nuget IE
|
|
||||||
if: github.ref == 'refs/heads/master' || contains(github.ref, 'hotfix/') || contains(github.ref, 'release/')
|
|
||||||
needs: [lint, format, compile, build, unittest, endtoendemulator, endtoendhosted, accessibility]
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
env:
|
|
||||||
NUGET_SOURCE: ${{ secrets.NUGET_SOURCE }}
|
|
||||||
AZURE_DEVOPS_PAT: ${{ secrets.AZURE_DEVOPS_PAT }}
|
|
||||||
steps:
|
|
||||||
- uses: nuget/setup-nuget@v1
|
|
||||||
with:
|
|
||||||
nuget-api-key: ${{ secrets.NUGET_API_KEY }}
|
|
||||||
- name: Download Dist Folder
|
|
||||||
uses: actions/download-artifact@v2
|
|
||||||
with:
|
|
||||||
name: dist
|
|
||||||
- run: cp ./configs/prod.json config.json
|
|
||||||
- run: sed -i 's/Azure.Cosmos.DB.Data.Explorer/Azure.Cosmos.DB.Data.Explorer.IE/g' DataExplorer.nuspec
|
|
||||||
- run: nuget sources add -Name "ADO" -Source "$NUGET_SOURCE" -UserName "GitHub" -Password "$AZURE_DEVOPS_PAT"
|
|
||||||
- run: nuget pack -Version "2.0.0-github-${GITHUB_SHA}"
|
|
||||||
- run: nuget push -Source "$NUGET_SOURCE" -ApiKey Az *.nupkg
|
|
||||||
- uses: actions/upload-artifact@v2
|
|
||||||
name: packages
|
|
||||||
with:
|
|
||||||
path: "*.nupkg"
|
|
||||||
|
|||||||
BIN
.vs/slnx.sqlite
BIN
.vs/slnx.sqlite
Binary file not shown.
37
README.md
37
README.md
@@ -13,18 +13,29 @@ UI for Azure Cosmos DB. Powers the [Azure Portal](https://portal.azure.com/), ht
|
|||||||
|
|
||||||
### Watch mode
|
### Watch mode
|
||||||
|
|
||||||
Run `npm start` to start the development server and automatically rebuild on changes
|
Run `npm run watch` to start the development server and automatically rebuild on changes
|
||||||
|
|
||||||
### Hosted Development (https://cosmos.azure.com)
|
### Specifying Development Platform
|
||||||
|
|
||||||
- Visit: `https://localhost:1234/hostedExplorer.html`
|
Setting the environment variable `PLATFORM` during the build process will force the explorer to load the specified platform. By default in development it will run in `Hosted` mode. Valid options:
|
||||||
- Local sign in via AAD will NOT work. Connection string only in dev mode. Use the Portal if you need AAD auth.
|
|
||||||
- The default webpack dev server configuration will proxy requests to the production portal backend: `https://main.documentdb.ext.azure.com`. This will allow you to use production connection strings on your local machine.
|
- Hosted
|
||||||
|
- Emulator
|
||||||
|
- Portal
|
||||||
|
|
||||||
|
`PLATFORM=Emulator npm run watch`
|
||||||
|
|
||||||
|
### Hosted Development
|
||||||
|
|
||||||
|
The default webpack dev server configuration will proxy requests to the production portal backend: `https://main.documentdb.ext.azure.com`. This will allow you to use production connection strings on your local machine.
|
||||||
|
|
||||||
|
To run pure hosted mode, in `webpack.config.js` change index HtmlWebpackPlugin to use hostedExplorer.html and change entry for index to use HostedExplorer.ts.
|
||||||
|
|
||||||
### Emulator Development
|
### Emulator Development
|
||||||
|
|
||||||
- Start the Cosmos Emulator
|
In a window environment, running `npm run build` will automatically copy the built files from `/dist` over to the default emulator install paths. In a non-windows environment you can specify an alternate endpoint using `EMULATOR_ENDPOINT` and webpack dev server will proxy requests for you.
|
||||||
- Visit: https://localhost:1234/index.html
|
|
||||||
|
`PLATFORM=Emulator EMULATOR_ENDPOINT=https://my-vm.azure.com:8081 npm run watch`
|
||||||
|
|
||||||
#### Setting up a Remote Emulator
|
#### Setting up a Remote Emulator
|
||||||
|
|
||||||
@@ -44,8 +55,16 @@ The Cosmos emulator currently only runs in Windows environments. You can still d
|
|||||||
|
|
||||||
### Portal Development
|
### Portal Development
|
||||||
|
|
||||||
- Visit: https://ms.portal.azure.com/?dataExplorerSource=https%3A%2F%2Flocalhost%3A1234%2Fexplorer.html
|
The Cosmos Portal that consumes this repo is not currently open source. If you have access to this project, `npm run build` will copy the built files over to the portal where they will be loaded by the portal development environment
|
||||||
- You may have to manually visit https://localhost:1234/explorer.html first and click through any SSL certificate warnings
|
|
||||||
|
You can however load a local running instance of data explorer in the production portal.
|
||||||
|
|
||||||
|
1. Turn off browser SSL validation for localhost: chrome://flags/#allow-insecure-localhost OR Install valid SSL certs for localhost (on IE, follow these [instructions](https://www.technipages.com/ie-bypass-problem-with-this-websites-security-certificate) to install the localhost certificate in the right place)
|
||||||
|
2. Allowlist `https://localhost:1234` domain for CORS in the Azure Cosmos DB portal
|
||||||
|
3. Start the project in portal mode: `PLATFORM=Portal npm run watch`
|
||||||
|
4. Load the portal using the following link: https://ms.portal.azure.com/?dataExplorerSource=https%3A%2F%2Flocalhost%3A1234%2Fexplorer.html
|
||||||
|
|
||||||
|
Live reload will occur, but data explorer will not properly integrate again with the parent iframe. You will have to manually reload the page.
|
||||||
|
|
||||||
### Testing
|
### Testing
|
||||||
|
|
||||||
|
|||||||
@@ -1,3 +1,3 @@
|
|||||||
module.exports = {
|
module.exports = {
|
||||||
presets: [["@babel/preset-env", { targets: { node: "current" } }], "@babel/preset-react", "@babel/preset-typescript"]
|
presets: [["@babel/preset-env", { targets: { node: "current" } }], "@babel/preset-react", "@babel/preset-typescript"],
|
||||||
};
|
};
|
||||||
|
|||||||
1963
externals/adal.js
vendored
Normal file
1963
externals/adal.js
vendored
Normal file
File diff suppressed because it is too large
Load Diff
@@ -6,6 +6,6 @@ module.exports = {
|
|||||||
slowMo: 55,
|
slowMo: 55,
|
||||||
defaultViewport: null,
|
defaultViewport: null,
|
||||||
ignoreHTTPSErrors: true,
|
ignoreHTTPSErrors: true,
|
||||||
args: ["--disable-web-security"]
|
args: ["--disable-web-security"],
|
||||||
}
|
},
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
module.exports = {
|
module.exports = {
|
||||||
preset: "jest-puppeteer",
|
preset: "jest-puppeteer",
|
||||||
testMatch: ["<rootDir>/test/**/*.spec.[jt]s?(x)"],
|
testMatch: ["<rootDir>/test/**/*.spec.[jt]s?(x)"],
|
||||||
setupFiles: ["dotenv/config"]
|
setupFiles: ["dotenv/config"],
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -42,8 +42,8 @@ module.exports = {
|
|||||||
branches: 20,
|
branches: 20,
|
||||||
functions: 24,
|
functions: 24,
|
||||||
lines: 30,
|
lines: 30,
|
||||||
statements: 29.0
|
statements: 29.0,
|
||||||
}
|
},
|
||||||
},
|
},
|
||||||
|
|
||||||
// Make calling deprecated APIs throw helpful error messages
|
// Make calling deprecated APIs throw helpful error messages
|
||||||
@@ -76,7 +76,7 @@ module.exports = {
|
|||||||
"office-ui-fabric-react/lib/(.*)$": "office-ui-fabric-react/lib-commonjs/$1", // https://github.com/OfficeDev/office-ui-fabric-react/wiki/Fabric-6-Release-Notes
|
"office-ui-fabric-react/lib/(.*)$": "office-ui-fabric-react/lib-commonjs/$1", // https://github.com/OfficeDev/office-ui-fabric-react/wiki/Fabric-6-Release-Notes
|
||||||
"^dnd-core$": "dnd-core/dist/cjs",
|
"^dnd-core$": "dnd-core/dist/cjs",
|
||||||
"^react-dnd$": "react-dnd/dist/cjs",
|
"^react-dnd$": "react-dnd/dist/cjs",
|
||||||
"^react-dnd-html5-backend$": "react-dnd-html5-backend/dist/cjs"
|
"^react-dnd-html5-backend$": "react-dnd-html5-backend/dist/cjs",
|
||||||
},
|
},
|
||||||
|
|
||||||
// An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader
|
// An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader
|
||||||
@@ -164,11 +164,11 @@ module.exports = {
|
|||||||
// A map from regular expressions to paths to transformers
|
// A map from regular expressions to paths to transformers
|
||||||
transform: {
|
transform: {
|
||||||
"^.+\\.html?$": "html-loader-jest",
|
"^.+\\.html?$": "html-loader-jest",
|
||||||
"^.+\\.[t|j]sx?$": "babel-jest"
|
"^.+\\.[t|j]sx?$": "babel-jest",
|
||||||
},
|
},
|
||||||
|
|
||||||
// An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation
|
// An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation
|
||||||
transformIgnorePatterns: ["/node_modules/", "/externals/"]
|
transformIgnorePatterns: ["/node_modules/", "/externals/"],
|
||||||
|
|
||||||
// An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them
|
// An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them
|
||||||
// unmockedModulePathPatterns: undefined,
|
// unmockedModulePathPatterns: undefined,
|
||||||
|
|||||||
39
package-lock.json
generated
39
package-lock.json
generated
@@ -275,27 +275,6 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"@azure/msal-browser": {
|
|
||||||
"version": "2.8.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/@azure/msal-browser/-/msal-browser-2.8.0.tgz",
|
|
||||||
"integrity": "sha512-I6n7EQnwsZXgKPOLlS5X48jhzUNUFwMVm180wDBA/pwEkUy8ei6zWiPMBfWaMSxz9uNx9WHaEhgAyhJy0ze3AQ==",
|
|
||||||
"requires": {
|
|
||||||
"@azure/msal-common": "^2.0.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"@azure/msal-common": {
|
|
||||||
"version": "2.0.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/@azure/msal-common/-/msal-common-2.0.0.tgz",
|
|
||||||
"integrity": "sha512-d1RNcJb+P1EGzMHtgbZoVlHLQWjlVfr504jywNk9YEfoq8Hw3BxJ0wepu+1w0hc64D8zG0wljcvHaIH1jTn2SA==",
|
|
||||||
"requires": {
|
|
||||||
"debug": "^4.1.1"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"@azure/msal-react": {
|
|
||||||
"version": "1.0.0-alpha.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/@azure/msal-react/-/msal-react-1.0.0-alpha.1.tgz",
|
|
||||||
"integrity": "sha512-8BftMP1DyXf7/Fa7mxi14/fmHBdDGDUONmE8sm1T6w7ERJyY1RN7PZgdnUOcYcqj2xMnxfz9++8HsrzMrtMc0Q=="
|
|
||||||
},
|
|
||||||
"@babel/code-frame": {
|
"@babel/code-frame": {
|
||||||
"version": "7.10.4",
|
"version": "7.10.4",
|
||||||
"resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.10.4.tgz",
|
"resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.10.4.tgz",
|
||||||
@@ -5469,6 +5448,12 @@
|
|||||||
"resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-6.2.0.tgz",
|
"resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-6.2.0.tgz",
|
||||||
"integrity": "sha512-7evsyfH1cLOCdAzZAd43Cic04yKydNx0cF+7tiA19p1XnLLPU4dpCQOqpjqwokFe//vS0QqfqqjCS2JkiIs0cA=="
|
"integrity": "sha512-7evsyfH1cLOCdAzZAd43Cic04yKydNx0cF+7tiA19p1XnLLPU4dpCQOqpjqwokFe//vS0QqfqqjCS2JkiIs0cA=="
|
||||||
},
|
},
|
||||||
|
"adal-angular": {
|
||||||
|
"version": "1.0.15",
|
||||||
|
"resolved": "https://registry.npmjs.org/adal-angular/-/adal-angular-1.0.15.tgz",
|
||||||
|
"integrity": "sha1-8qnvgvNYxEToMUKs5l0yJ6RBBDs=",
|
||||||
|
"dev": true
|
||||||
|
},
|
||||||
"agent-base": {
|
"agent-base": {
|
||||||
"version": "6.0.2",
|
"version": "6.0.2",
|
||||||
"resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz",
|
"resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz",
|
||||||
@@ -17280,9 +17265,9 @@
|
|||||||
"integrity": "sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ="
|
"integrity": "sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ="
|
||||||
},
|
},
|
||||||
"prettier": {
|
"prettier": {
|
||||||
"version": "1.19.1",
|
"version": "2.2.1",
|
||||||
"resolved": "https://registry.npmjs.org/prettier/-/prettier-1.19.1.tgz",
|
"resolved": "https://registry.npmjs.org/prettier/-/prettier-2.2.1.tgz",
|
||||||
"integrity": "sha512-s7PoyDv/II1ObgQunCbB9PdLmUcBZcnWOcxDh7O0N/UwDEsHyqkW+Qh28jW+mVuCdx7gLB0BotYI1Y6uI9iyew==",
|
"integrity": "sha512-PqyhM2yCjg/oKkFPtTGUojv7gnZAoG80ttl45O6x2Ug/rMJw4wcc9k6aaf2hibP7BGVCCM33gZoGjyvt9mm16Q==",
|
||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
"pretty-error": {
|
"pretty-error": {
|
||||||
@@ -20743,9 +20728,9 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"typescript": {
|
"typescript": {
|
||||||
"version": "4.0.2",
|
"version": "4.1.2",
|
||||||
"resolved": "https://registry.npmjs.org/typescript/-/typescript-4.0.2.tgz",
|
"resolved": "https://registry.npmjs.org/typescript/-/typescript-4.1.2.tgz",
|
||||||
"integrity": "sha512-e4ERvRV2wb+rRZ/IQeb3jm2VxBsirQLpQhdxplZ2MEzGvDkkMmPglecnNDfSUBivMjP93vRbngYYDQqQ/78bcQ==",
|
"integrity": "sha512-thGloWsGH3SOxv1SoY7QojKi0tc+8FnOmiarEGMbd/lar7QOEd3hvlx3Fp5y6FlDUGl9L+pd4n2e+oToGMmhRQ==",
|
||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
"typestyle": {
|
"typestyle": {
|
||||||
|
|||||||
@@ -6,10 +6,8 @@
|
|||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@azure/arm-cosmosdb": "9.1.0",
|
"@azure/arm-cosmosdb": "9.1.0",
|
||||||
"@azure/cosmos": "3.9.0",
|
"@azure/cosmos": "3.9.0",
|
||||||
"@azure/cosmos-language-service": "0.0.5",
|
|
||||||
"@azure/identity": "1.1.0",
|
"@azure/identity": "1.1.0",
|
||||||
"@azure/msal-browser": "2.8.0",
|
"@azure/cosmos-language-service": "0.0.5",
|
||||||
"@azure/msal-react": "1.0.0-alpha.1",
|
|
||||||
"@jupyterlab/services": "6.0.0-rc.2",
|
"@jupyterlab/services": "6.0.0-rc.2",
|
||||||
"@jupyterlab/terminal": "3.0.0-rc.2",
|
"@jupyterlab/terminal": "3.0.0-rc.2",
|
||||||
"@microsoft/applicationinsights-web": "2.5.9",
|
"@microsoft/applicationinsights-web": "2.5.9",
|
||||||
@@ -130,6 +128,7 @@
|
|||||||
"@types/webfontloader": "1.6.29",
|
"@types/webfontloader": "1.6.29",
|
||||||
"@typescript-eslint/eslint-plugin": "4.0.1",
|
"@typescript-eslint/eslint-plugin": "4.0.1",
|
||||||
"@typescript-eslint/parser": "4.0.1",
|
"@typescript-eslint/parser": "4.0.1",
|
||||||
|
"adal-angular": "1.0.15",
|
||||||
"axe-puppeteer": "1.1.0",
|
"axe-puppeteer": "1.1.0",
|
||||||
"babel-jest": "24.9.0",
|
"babel-jest": "24.9.0",
|
||||||
"babel-loader": "8.1.0",
|
"babel-loader": "8.1.0",
|
||||||
@@ -161,7 +160,7 @@
|
|||||||
"mini-css-extract-plugin": "0.4.3",
|
"mini-css-extract-plugin": "0.4.3",
|
||||||
"monaco-editor-webpack-plugin": "1.7.0",
|
"monaco-editor-webpack-plugin": "1.7.0",
|
||||||
"node-fetch": "2.6.1",
|
"node-fetch": "2.6.1",
|
||||||
"prettier": "1.19.1",
|
"prettier": "2.2.1",
|
||||||
"puppeteer": "4.0.0",
|
"puppeteer": "4.0.0",
|
||||||
"raw-loader": "0.5.1",
|
"raw-loader": "0.5.1",
|
||||||
"rimraf": "3.0.0",
|
"rimraf": "3.0.0",
|
||||||
@@ -171,7 +170,7 @@
|
|||||||
"ts-loader": "6.2.2",
|
"ts-loader": "6.2.2",
|
||||||
"tslint": "5.11.0",
|
"tslint": "5.11.0",
|
||||||
"tslint-microsoft-contrib": "6.0.0",
|
"tslint-microsoft-contrib": "6.0.0",
|
||||||
"typescript": "4.0.2",
|
"typescript": "4.1.2",
|
||||||
"url-loader": "1.1.1",
|
"url-loader": "1.1.1",
|
||||||
"wait-on": "4.0.2",
|
"wait-on": "4.0.2",
|
||||||
"webpack": "4.43.0",
|
"webpack": "4.43.0",
|
||||||
|
|||||||
@@ -2,5 +2,5 @@ export enum AuthType {
|
|||||||
AAD = "aad",
|
AAD = "aad",
|
||||||
EncryptedToken = "encryptedtoken",
|
EncryptedToken = "encryptedtoken",
|
||||||
MasterKey = "masterkey",
|
MasterKey = "masterkey",
|
||||||
ResourceToken = "resourcetoken"
|
ResourceToken = "resourcetoken",
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ export class BindingHandlersRegisterer {
|
|||||||
) {
|
) {
|
||||||
const value = ko.unwrap(wrappedValueAccessor());
|
const value = ko.unwrap(wrappedValueAccessor());
|
||||||
bindingContext?.$data.isTemplateReady(value);
|
bindingContext?.$data.isTemplateReady(value);
|
||||||
}
|
},
|
||||||
} as ko.BindingHandler;
|
} as ko.BindingHandler;
|
||||||
|
|
||||||
ReactBindingHandler.Registerer.register();
|
ReactBindingHandler.Registerer.register();
|
||||||
|
|||||||
@@ -42,7 +42,7 @@ export class Registerer {
|
|||||||
|
|
||||||
// Initial rendering at mount point
|
// Initial rendering at mount point
|
||||||
ReactDOM.render(adapter.renderComponent(), element);
|
ReactDOM.render(adapter.renderComponent(), element);
|
||||||
}
|
},
|
||||||
} as ko.BindingHandler;
|
} as ko.BindingHandler;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -40,7 +40,7 @@ export class ArrayHashMap<T> {
|
|||||||
public forEach(key: string, iteratorFct: (value: T) => void) {
|
public forEach(key: string, iteratorFct: (value: T) => void) {
|
||||||
const values = this.store.get(key);
|
const values = this.store.get(key);
|
||||||
if (values) {
|
if (values) {
|
||||||
values.forEach(value => iteratorFct(value));
|
values.forEach((value) => iteratorFct(value));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ export class CodeOfConductEndpoints {
|
|||||||
export class EndpointsRegex {
|
export class EndpointsRegex {
|
||||||
public static readonly cassandra = [
|
public static readonly cassandra = [
|
||||||
"AccountEndpoint=(.*).cassandra.cosmosdb.azure.com",
|
"AccountEndpoint=(.*).cassandra.cosmosdb.azure.com",
|
||||||
"HostName=(.*).cassandra.cosmos.azure.com"
|
"HostName=(.*).cassandra.cosmos.azure.com",
|
||||||
];
|
];
|
||||||
public static readonly mongo = "mongodb://.*:(.*)@(.*).documents.azure.com";
|
public static readonly mongo = "mongodb://.*:(.*)@(.*).documents.azure.com";
|
||||||
public static readonly mongoCompute = "mongodb://.*:(.*)@(.*).mongo.cosmos.azure.com";
|
public static readonly mongoCompute = "mongodb://.*:(.*)@(.*).mongo.cosmos.azure.com";
|
||||||
@@ -113,6 +113,7 @@ export class Features {
|
|||||||
public static readonly enableTtl = "enablettl";
|
public static readonly enableTtl = "enablettl";
|
||||||
public static readonly enableNotebooks = "enablenotebooks";
|
public static readonly enableNotebooks = "enablenotebooks";
|
||||||
public static readonly enableGalleryPublish = "enablegallerypublish";
|
public static readonly enableGalleryPublish = "enablegallerypublish";
|
||||||
|
public static readonly enableCodeOfConduct = "enablecodeofconduct";
|
||||||
public static readonly enableLinkInjection = "enablelinkinjection";
|
public static readonly enableLinkInjection = "enablelinkinjection";
|
||||||
public static readonly enableSpark = "enablespark";
|
public static readonly enableSpark = "enablespark";
|
||||||
public static readonly livyEndpoint = "livyendpoint";
|
public static readonly livyEndpoint = "livyendpoint";
|
||||||
@@ -149,7 +150,7 @@ export class Spark {
|
|||||||
"Cosmos.Spark.D8s": "D8s / 8 cores / 32GB RAM",
|
"Cosmos.Spark.D8s": "D8s / 8 cores / 32GB RAM",
|
||||||
"Cosmos.Spark.D16s": "D16s / 16 cores / 64GB RAM",
|
"Cosmos.Spark.D16s": "D16s / 16 cores / 64GB RAM",
|
||||||
"Cosmos.Spark.D32s": "D32s / 32 cores / 128GB RAM",
|
"Cosmos.Spark.D32s": "D32s / 32 cores / 128GB RAM",
|
||||||
"Cosmos.Spark.D64s": "D64s / 64 cores / 256GB RAM"
|
"Cosmos.Spark.D64s": "D64s / 64 cores / 256GB RAM",
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -164,7 +165,7 @@ export class MongoDBAccounts {
|
|||||||
|
|
||||||
export enum MongoBackendEndpointType {
|
export enum MongoBackendEndpointType {
|
||||||
local,
|
local,
|
||||||
remote
|
remote,
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: 435619 Add default endpoints per cloud and use regional only when available
|
// TODO: 435619 Add default endpoints per cloud and use regional only when available
|
||||||
@@ -291,7 +292,7 @@ export class HttpStatusCodes {
|
|||||||
HttpStatusCodes.InternalServerError, // TODO: Handle all 500s on Portal backend and remove from retries list
|
HttpStatusCodes.InternalServerError, // TODO: Handle all 500s on Portal backend and remove from retries list
|
||||||
HttpStatusCodes.BadGateway,
|
HttpStatusCodes.BadGateway,
|
||||||
HttpStatusCodes.ServiceUnavailable,
|
HttpStatusCodes.ServiceUnavailable,
|
||||||
HttpStatusCodes.GatewayTimeout
|
HttpStatusCodes.GatewayTimeout,
|
||||||
];
|
];
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -347,10 +348,7 @@ export class HashRoutePrefixes {
|
|||||||
public static docsWithIds(databaseId: string, collectionId: string, docId: string) {
|
public static docsWithIds(databaseId: string, collectionId: string, docId: string) {
|
||||||
const transformedDatabasePrefix: string = this.docs.replace("{db_id}", databaseId);
|
const transformedDatabasePrefix: string = this.docs.replace("{db_id}", databaseId);
|
||||||
|
|
||||||
return transformedDatabasePrefix
|
return transformedDatabasePrefix.replace("{coll_id}", collectionId).replace("{doc_id}", docId).replace("/", ""); // strip the first slash since hasher adds it
|
||||||
.replace("{coll_id}", collectionId)
|
|
||||||
.replace("{doc_id}", docId)
|
|
||||||
.replace("/", ""); // strip the first slash since hasher adds it
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -396,7 +394,7 @@ export class OfferVersions {
|
|||||||
export enum ConflictOperationType {
|
export enum ConflictOperationType {
|
||||||
Replace = "replace",
|
Replace = "replace",
|
||||||
Create = "create",
|
Create = "create",
|
||||||
Delete = "delete"
|
Delete = "delete",
|
||||||
}
|
}
|
||||||
|
|
||||||
export const EmulatorMasterKey =
|
export const EmulatorMasterKey =
|
||||||
|
|||||||
@@ -10,17 +10,17 @@ describe("tokenProvider", () => {
|
|||||||
resourceId: "",
|
resourceId: "",
|
||||||
resourceType: "dbs" as ResourceType,
|
resourceType: "dbs" as ResourceType,
|
||||||
headers: {},
|
headers: {},
|
||||||
getAuthorizationTokenUsingMasterKey: () => ""
|
getAuthorizationTokenUsingMasterKey: () => "",
|
||||||
};
|
};
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
updateConfigContext({
|
updateConfigContext({
|
||||||
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
|
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
|
||||||
});
|
});
|
||||||
window.fetch = jest.fn().mockImplementation(() => {
|
window.fetch = jest.fn().mockImplementation(() => {
|
||||||
return {
|
return {
|
||||||
json: () => "{}",
|
json: () => "{}",
|
||||||
headers: new Map()
|
headers: new Map(),
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@@ -36,7 +36,7 @@ describe("tokenProvider", () => {
|
|||||||
|
|
||||||
it("does not call the auth service if a master key is set", async () => {
|
it("does not call the auth service if a master key is set", async () => {
|
||||||
updateUserContext({
|
updateUserContext({
|
||||||
masterKey: "foo"
|
masterKey: "foo",
|
||||||
});
|
});
|
||||||
await tokenProvider(options);
|
await tokenProvider(options);
|
||||||
expect((window.fetch as any).mock.calls.length).toBe(0);
|
expect((window.fetch as any).mock.calls.length).toBe(0);
|
||||||
@@ -50,7 +50,7 @@ describe("getTokenFromAuthService", () => {
|
|||||||
window.fetch = jest.fn().mockImplementation(() => {
|
window.fetch = jest.fn().mockImplementation(() => {
|
||||||
return {
|
return {
|
||||||
json: () => "{}",
|
json: () => "{}",
|
||||||
headers: new Map()
|
headers: new Map(),
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@@ -61,7 +61,7 @@ describe("getTokenFromAuthService", () => {
|
|||||||
|
|
||||||
it("builds the correct URL in production", () => {
|
it("builds the correct URL in production", () => {
|
||||||
updateConfigContext({
|
updateConfigContext({
|
||||||
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
|
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
|
||||||
});
|
});
|
||||||
getTokenFromAuthService("GET", "dbs", "foo");
|
getTokenFromAuthService("GET", "dbs", "foo");
|
||||||
expect(window.fetch).toHaveBeenCalledWith(
|
expect(window.fetch).toHaveBeenCalledWith(
|
||||||
@@ -72,7 +72,7 @@ describe("getTokenFromAuthService", () => {
|
|||||||
|
|
||||||
it("builds the correct URL in dev", () => {
|
it("builds the correct URL in dev", () => {
|
||||||
updateConfigContext({
|
updateConfigContext({
|
||||||
BACKEND_ENDPOINT: "https://localhost:1234"
|
BACKEND_ENDPOINT: "https://localhost:1234",
|
||||||
});
|
});
|
||||||
getTokenFromAuthService("GET", "dbs", "foo");
|
getTokenFromAuthService("GET", "dbs", "foo");
|
||||||
expect(window.fetch).toHaveBeenCalledWith(
|
expect(window.fetch).toHaveBeenCalledWith(
|
||||||
@@ -96,15 +96,15 @@ describe("endpoint", () => {
|
|||||||
documentEndpoint: "bar",
|
documentEndpoint: "bar",
|
||||||
gremlinEndpoint: "foo",
|
gremlinEndpoint: "foo",
|
||||||
tableEndpoint: "foo",
|
tableEndpoint: "foo",
|
||||||
cassandraEndpoint: "foo"
|
cassandraEndpoint: "foo",
|
||||||
}
|
},
|
||||||
}
|
},
|
||||||
});
|
});
|
||||||
expect(endpoint()).toEqual("bar");
|
expect(endpoint()).toEqual("bar");
|
||||||
});
|
});
|
||||||
it("uses _endpoint if set", () => {
|
it("uses _endpoint if set", () => {
|
||||||
updateUserContext({
|
updateUserContext({
|
||||||
endpoint: "baz"
|
endpoint: "baz",
|
||||||
});
|
});
|
||||||
expect(endpoint()).toEqual("baz");
|
expect(endpoint()).toEqual("baz");
|
||||||
});
|
});
|
||||||
@@ -121,7 +121,7 @@ describe("requestPlugin", () => {
|
|||||||
updateConfigContext({
|
updateConfigContext({
|
||||||
platform: Platform.Hosted,
|
platform: Platform.Hosted,
|
||||||
BACKEND_ENDPOINT: "https://localhost:1234",
|
BACKEND_ENDPOINT: "https://localhost:1234",
|
||||||
PROXY_PATH: "/proxy"
|
PROXY_PATH: "/proxy",
|
||||||
});
|
});
|
||||||
const headers = {};
|
const headers = {};
|
||||||
const endpoint = "https://docs.azure.com";
|
const endpoint = "https://docs.azure.com";
|
||||||
|
|||||||
@@ -58,13 +58,13 @@ export async function getTokenFromAuthService(verb: string, resourceType: string
|
|||||||
method: "POST",
|
method: "POST",
|
||||||
headers: {
|
headers: {
|
||||||
"content-type": "application/json",
|
"content-type": "application/json",
|
||||||
"x-ms-encrypted-auth-token": userContext.accessToken
|
"x-ms-encrypted-auth-token": userContext.accessToken,
|
||||||
},
|
},
|
||||||
body: JSON.stringify({
|
body: JSON.stringify({
|
||||||
verb,
|
verb,
|
||||||
resourceType,
|
resourceType,
|
||||||
resourceId
|
resourceId,
|
||||||
})
|
}),
|
||||||
});
|
});
|
||||||
//TODO I am not sure why we have to parse the JSON again here. fetch should do it for us when we call .json()
|
//TODO I am not sure why we have to parse the JSON again here. fetch should do it for us when we call .json()
|
||||||
const result = JSON.parse(await response.json());
|
const result = JSON.parse(await response.json());
|
||||||
@@ -81,9 +81,9 @@ export function client(): Cosmos.CosmosClient {
|
|||||||
key: userContext.masterKey,
|
key: userContext.masterKey,
|
||||||
tokenProvider,
|
tokenProvider,
|
||||||
connectionPolicy: {
|
connectionPolicy: {
|
||||||
enableEndpointDiscovery: false
|
enableEndpointDiscovery: false,
|
||||||
},
|
},
|
||||||
userAgentSuffix: "Azure Portal"
|
userAgentSuffix: "Azure Portal",
|
||||||
};
|
};
|
||||||
|
|
||||||
if (configContext.PROXY_PATH !== undefined) {
|
if (configContext.PROXY_PATH !== undefined) {
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
import { getCommonQueryOptions } from "./queryDocuments";
|
import { getCommonQueryOptions } from "./DataAccessUtilityBase";
|
||||||
import { LocalStorageUtility, StorageKey } from "../../Shared/StorageUtility";
|
import { LocalStorageUtility, StorageKey } from "../Shared/StorageUtility";
|
||||||
|
|
||||||
describe("getCommonQueryOptions", () => {
|
describe("getCommonQueryOptions", () => {
|
||||||
it("builds the correct default options objects", () => {
|
it("builds the correct default options objects", () => {
|
||||||
155
src/Common/DataAccessUtilityBase.ts
Normal file
155
src/Common/DataAccessUtilityBase.ts
Normal file
@@ -0,0 +1,155 @@
|
|||||||
|
import { ConflictDefinition, FeedOptions, ItemDefinition, QueryIterator, Resource } from "@azure/cosmos";
|
||||||
|
import Q from "q";
|
||||||
|
import * as DataModels from "../Contracts/DataModels";
|
||||||
|
import * as ViewModels from "../Contracts/ViewModels";
|
||||||
|
import ConflictId from "../Explorer/Tree/ConflictId";
|
||||||
|
import DocumentId from "../Explorer/Tree/DocumentId";
|
||||||
|
import StoredProcedure from "../Explorer/Tree/StoredProcedure";
|
||||||
|
import { LocalStorageUtility, StorageKey } from "../Shared/StorageUtility";
|
||||||
|
import * as Constants from "./Constants";
|
||||||
|
import { client } from "./CosmosClient";
|
||||||
|
|
||||||
|
export function getCommonQueryOptions(options: FeedOptions): any {
|
||||||
|
const storedItemPerPageSetting: number = LocalStorageUtility.getEntryNumber(StorageKey.ActualItemPerPage);
|
||||||
|
options = options || {};
|
||||||
|
options.populateQueryMetrics = true;
|
||||||
|
options.enableScanInQuery = options.enableScanInQuery || true;
|
||||||
|
if (!options.partitionKey) {
|
||||||
|
options.forceQueryPlan = true;
|
||||||
|
}
|
||||||
|
options.maxItemCount =
|
||||||
|
options.maxItemCount ||
|
||||||
|
(storedItemPerPageSetting !== undefined && storedItemPerPageSetting) ||
|
||||||
|
Constants.Queries.itemsPerPage;
|
||||||
|
options.maxDegreeOfParallelism = LocalStorageUtility.getEntryNumber(StorageKey.MaxDegreeOfParellism);
|
||||||
|
|
||||||
|
return options;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function queryDocuments(
|
||||||
|
databaseId: string,
|
||||||
|
containerId: string,
|
||||||
|
query: string,
|
||||||
|
options: any
|
||||||
|
): Q.Promise<QueryIterator<ItemDefinition & Resource>> {
|
||||||
|
options = getCommonQueryOptions(options);
|
||||||
|
const documentsIterator = client().database(databaseId).container(containerId).items.query(query, options);
|
||||||
|
return Q(documentsIterator);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getPartitionKeyHeaderForConflict(conflictId: ConflictId): Object {
|
||||||
|
const partitionKeyDefinition: DataModels.PartitionKey = conflictId.partitionKey;
|
||||||
|
const partitionKeyValue: any = conflictId.partitionKeyValue;
|
||||||
|
|
||||||
|
return getPartitionKeyHeader(partitionKeyDefinition, partitionKeyValue);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getPartitionKeyHeader(partitionKeyDefinition: DataModels.PartitionKey, partitionKeyValue: any): Object {
|
||||||
|
if (!partitionKeyDefinition) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (partitionKeyValue === undefined) {
|
||||||
|
return [{}];
|
||||||
|
}
|
||||||
|
|
||||||
|
return [partitionKeyValue];
|
||||||
|
}
|
||||||
|
|
||||||
|
export function updateDocument(
|
||||||
|
collection: ViewModels.CollectionBase,
|
||||||
|
documentId: DocumentId,
|
||||||
|
newDocument: any
|
||||||
|
): Q.Promise<any> {
|
||||||
|
const partitionKey = documentId.partitionKeyValue;
|
||||||
|
|
||||||
|
return Q(
|
||||||
|
client()
|
||||||
|
.database(collection.databaseId)
|
||||||
|
.container(collection.id())
|
||||||
|
.item(documentId.id(), partitionKey)
|
||||||
|
.replace(newDocument)
|
||||||
|
.then((response) => response.resource)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function executeStoredProcedure(
|
||||||
|
collection: ViewModels.Collection,
|
||||||
|
storedProcedure: StoredProcedure,
|
||||||
|
partitionKeyValue: any,
|
||||||
|
params: any[]
|
||||||
|
): Q.Promise<any> {
|
||||||
|
// TODO remove this deferred. Kept it because of timeout code at bottom of function
|
||||||
|
const deferred = Q.defer<any>();
|
||||||
|
|
||||||
|
client()
|
||||||
|
.database(collection.databaseId)
|
||||||
|
.container(collection.id())
|
||||||
|
.scripts.storedProcedure(storedProcedure.id())
|
||||||
|
.execute(partitionKeyValue, params, { enableScriptLogging: true })
|
||||||
|
.then((response) =>
|
||||||
|
deferred.resolve({
|
||||||
|
result: response.resource,
|
||||||
|
scriptLogs: response.headers[Constants.HttpHeaders.scriptLogResults],
|
||||||
|
})
|
||||||
|
)
|
||||||
|
.catch((error) => deferred.reject(error));
|
||||||
|
|
||||||
|
return deferred.promise.timeout(
|
||||||
|
Constants.ClientDefaults.requestTimeoutMs,
|
||||||
|
`Request timed out while executing stored procedure ${storedProcedure.id()}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function createDocument(collection: ViewModels.CollectionBase, newDocument: any): Q.Promise<any> {
|
||||||
|
return Q(
|
||||||
|
client()
|
||||||
|
.database(collection.databaseId)
|
||||||
|
.container(collection.id())
|
||||||
|
.items.create(newDocument)
|
||||||
|
.then((response) => response.resource)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function readDocument(collection: ViewModels.CollectionBase, documentId: DocumentId): Q.Promise<any> {
|
||||||
|
const partitionKey = documentId.partitionKeyValue;
|
||||||
|
|
||||||
|
return Q(
|
||||||
|
client()
|
||||||
|
.database(collection.databaseId)
|
||||||
|
.container(collection.id())
|
||||||
|
.item(documentId.id(), partitionKey)
|
||||||
|
.read()
|
||||||
|
.then((response) => response.resource)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function deleteDocument(collection: ViewModels.CollectionBase, documentId: DocumentId): Q.Promise<any> {
|
||||||
|
const partitionKey = documentId.partitionKeyValue;
|
||||||
|
|
||||||
|
return Q(
|
||||||
|
client().database(collection.databaseId).container(collection.id()).item(documentId.id(), partitionKey).delete()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function deleteConflict(
|
||||||
|
collection: ViewModels.CollectionBase,
|
||||||
|
conflictId: ConflictId,
|
||||||
|
options: any = {}
|
||||||
|
): Q.Promise<any> {
|
||||||
|
options.partitionKey = options.partitionKey || getPartitionKeyHeaderForConflict(conflictId);
|
||||||
|
|
||||||
|
return Q(
|
||||||
|
client().database(collection.databaseId).container(collection.id()).conflict(conflictId.id()).delete(options)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function queryConflicts(
|
||||||
|
databaseId: string,
|
||||||
|
containerId: string,
|
||||||
|
query: string,
|
||||||
|
options: any
|
||||||
|
): Q.Promise<QueryIterator<ConflictDefinition & Resource>> {
|
||||||
|
const documentsIterator = client().database(databaseId).container(containerId).conflicts.query(query, options);
|
||||||
|
return Q(documentsIterator);
|
||||||
|
}
|
||||||
217
src/Common/DocumentClientUtilityBase.ts
Normal file
217
src/Common/DocumentClientUtilityBase.ts
Normal file
@@ -0,0 +1,217 @@
|
|||||||
|
import { ConflictDefinition, ItemDefinition, QueryIterator, Resource } from "@azure/cosmos";
|
||||||
|
import Q from "q";
|
||||||
|
import * as ViewModels from "../Contracts/ViewModels";
|
||||||
|
import ConflictId from "../Explorer/Tree/ConflictId";
|
||||||
|
import DocumentId from "../Explorer/Tree/DocumentId";
|
||||||
|
import StoredProcedure from "../Explorer/Tree/StoredProcedure";
|
||||||
|
import { logConsoleInfo, logConsoleProgress } from "../Utils/NotificationConsoleUtils";
|
||||||
|
import * as Constants from "./Constants";
|
||||||
|
import * as DataAccessUtilityBase from "./DataAccessUtilityBase";
|
||||||
|
import { MinimalQueryIterator, nextPage } from "./IteratorUtilities";
|
||||||
|
import { handleError } from "./ErrorHandlingUtils";
|
||||||
|
|
||||||
|
// TODO: Log all promise resolutions and errors with verbosity levels
|
||||||
|
export function queryDocuments(
|
||||||
|
databaseId: string,
|
||||||
|
containerId: string,
|
||||||
|
query: string,
|
||||||
|
options: any
|
||||||
|
): Q.Promise<QueryIterator<ItemDefinition & Resource>> {
|
||||||
|
return DataAccessUtilityBase.queryDocuments(databaseId, containerId, query, options);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function queryConflicts(
|
||||||
|
databaseId: string,
|
||||||
|
containerId: string,
|
||||||
|
query: string,
|
||||||
|
options: any
|
||||||
|
): Q.Promise<QueryIterator<ConflictDefinition & Resource>> {
|
||||||
|
return DataAccessUtilityBase.queryConflicts(databaseId, containerId, query, options);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getEntityName() {
|
||||||
|
const defaultExperience =
|
||||||
|
window.dataExplorer && window.dataExplorer.defaultExperience && window.dataExplorer.defaultExperience();
|
||||||
|
if (defaultExperience === Constants.DefaultAccountExperience.MongoDB) {
|
||||||
|
return "document";
|
||||||
|
}
|
||||||
|
return "item";
|
||||||
|
}
|
||||||
|
|
||||||
|
export function executeStoredProcedure(
|
||||||
|
collection: ViewModels.Collection,
|
||||||
|
storedProcedure: StoredProcedure,
|
||||||
|
partitionKeyValue: any,
|
||||||
|
params: any[]
|
||||||
|
): Q.Promise<any> {
|
||||||
|
var deferred = Q.defer<any>();
|
||||||
|
|
||||||
|
const clearMessage = logConsoleProgress(`Executing stored procedure ${storedProcedure.id()}`);
|
||||||
|
DataAccessUtilityBase.executeStoredProcedure(collection, storedProcedure, partitionKeyValue, params)
|
||||||
|
.then(
|
||||||
|
(response: any) => {
|
||||||
|
deferred.resolve(response);
|
||||||
|
logConsoleInfo(
|
||||||
|
`Finished executing stored procedure ${storedProcedure.id()} for container ${storedProcedure.collection.id()}`
|
||||||
|
);
|
||||||
|
},
|
||||||
|
(error: any) => {
|
||||||
|
handleError(
|
||||||
|
error,
|
||||||
|
"ExecuteStoredProcedure",
|
||||||
|
`Failed to execute stored procedure ${storedProcedure.id()} for container ${storedProcedure.collection.id()}`
|
||||||
|
);
|
||||||
|
deferred.reject(error);
|
||||||
|
}
|
||||||
|
)
|
||||||
|
.finally(() => {
|
||||||
|
clearMessage();
|
||||||
|
});
|
||||||
|
|
||||||
|
return deferred.promise;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function queryDocumentsPage(
|
||||||
|
resourceName: string,
|
||||||
|
documentsIterator: MinimalQueryIterator,
|
||||||
|
firstItemIndex: number,
|
||||||
|
options: any
|
||||||
|
): Q.Promise<ViewModels.QueryResults> {
|
||||||
|
var deferred = Q.defer<ViewModels.QueryResults>();
|
||||||
|
const entityName = getEntityName();
|
||||||
|
const clearMessage = logConsoleProgress(`Querying ${entityName} for container ${resourceName}`);
|
||||||
|
Q(nextPage(documentsIterator, firstItemIndex))
|
||||||
|
.then(
|
||||||
|
(result: ViewModels.QueryResults) => {
|
||||||
|
const itemCount = (result.documents && result.documents.length) || 0;
|
||||||
|
logConsoleInfo(`Successfully fetched ${itemCount} ${entityName} for container ${resourceName}`);
|
||||||
|
deferred.resolve(result);
|
||||||
|
},
|
||||||
|
(error: any) => {
|
||||||
|
handleError(error, "QueryDocumentsPage", `Failed to query ${entityName} for container ${resourceName}`);
|
||||||
|
deferred.reject(error);
|
||||||
|
}
|
||||||
|
)
|
||||||
|
.finally(() => {
|
||||||
|
clearMessage();
|
||||||
|
});
|
||||||
|
|
||||||
|
return deferred.promise;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function readDocument(collection: ViewModels.CollectionBase, documentId: DocumentId): Q.Promise<any> {
|
||||||
|
var deferred = Q.defer<any>();
|
||||||
|
const entityName = getEntityName();
|
||||||
|
const clearMessage = logConsoleProgress(`Reading ${entityName} ${documentId.id()}`);
|
||||||
|
DataAccessUtilityBase.readDocument(collection, documentId)
|
||||||
|
.then(
|
||||||
|
(document: any) => {
|
||||||
|
deferred.resolve(document);
|
||||||
|
},
|
||||||
|
(error: any) => {
|
||||||
|
handleError(error, "ReadDocument", `Failed to read ${entityName} ${documentId.id()}`);
|
||||||
|
deferred.reject(error);
|
||||||
|
}
|
||||||
|
)
|
||||||
|
.finally(() => {
|
||||||
|
clearMessage();
|
||||||
|
});
|
||||||
|
|
||||||
|
return deferred.promise;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function updateDocument(
|
||||||
|
collection: ViewModels.CollectionBase,
|
||||||
|
documentId: DocumentId,
|
||||||
|
newDocument: any
|
||||||
|
): Q.Promise<any> {
|
||||||
|
var deferred = Q.defer<any>();
|
||||||
|
const entityName = getEntityName();
|
||||||
|
const clearMessage = logConsoleProgress(`Updating ${entityName} ${documentId.id()}`);
|
||||||
|
DataAccessUtilityBase.updateDocument(collection, documentId, newDocument)
|
||||||
|
.then(
|
||||||
|
(updatedDocument: any) => {
|
||||||
|
logConsoleInfo(`Successfully updated ${entityName} ${documentId.id()}`);
|
||||||
|
deferred.resolve(updatedDocument);
|
||||||
|
},
|
||||||
|
(error: any) => {
|
||||||
|
handleError(error, "UpdateDocument", `Failed to update ${entityName} ${documentId.id()}`);
|
||||||
|
deferred.reject(error);
|
||||||
|
}
|
||||||
|
)
|
||||||
|
.finally(() => {
|
||||||
|
clearMessage();
|
||||||
|
});
|
||||||
|
|
||||||
|
return deferred.promise;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function createDocument(collection: ViewModels.CollectionBase, newDocument: any): Q.Promise<any> {
|
||||||
|
var deferred = Q.defer<any>();
|
||||||
|
const entityName = getEntityName();
|
||||||
|
const clearMessage = logConsoleProgress(`Creating new ${entityName} for container ${collection.id()}`);
|
||||||
|
DataAccessUtilityBase.createDocument(collection, newDocument)
|
||||||
|
.then(
|
||||||
|
(savedDocument: any) => {
|
||||||
|
logConsoleInfo(`Successfully created new ${entityName} for container ${collection.id()}`);
|
||||||
|
deferred.resolve(savedDocument);
|
||||||
|
},
|
||||||
|
(error: any) => {
|
||||||
|
handleError(error, "CreateDocument", `Error while creating new ${entityName} for container ${collection.id()}`);
|
||||||
|
deferred.reject(error);
|
||||||
|
}
|
||||||
|
)
|
||||||
|
.finally(() => {
|
||||||
|
clearMessage();
|
||||||
|
});
|
||||||
|
|
||||||
|
return deferred.promise;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function deleteDocument(collection: ViewModels.CollectionBase, documentId: DocumentId): Q.Promise<any> {
|
||||||
|
var deferred = Q.defer<any>();
|
||||||
|
const entityName = getEntityName();
|
||||||
|
const clearMessage = logConsoleProgress(`Deleting ${entityName} ${documentId.id()}`);
|
||||||
|
DataAccessUtilityBase.deleteDocument(collection, documentId)
|
||||||
|
.then(
|
||||||
|
(response: any) => {
|
||||||
|
logConsoleInfo(`Successfully deleted ${entityName} ${documentId.id()}`);
|
||||||
|
deferred.resolve(response);
|
||||||
|
},
|
||||||
|
(error: any) => {
|
||||||
|
handleError(error, "DeleteDocument", `Error while deleting ${entityName} ${documentId.id()}`);
|
||||||
|
deferred.reject(error);
|
||||||
|
}
|
||||||
|
)
|
||||||
|
.finally(() => {
|
||||||
|
clearMessage();
|
||||||
|
});
|
||||||
|
|
||||||
|
return deferred.promise;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function deleteConflict(
|
||||||
|
collection: ViewModels.CollectionBase,
|
||||||
|
conflictId: ConflictId,
|
||||||
|
options?: any
|
||||||
|
): Q.Promise<any> {
|
||||||
|
var deferred = Q.defer<any>();
|
||||||
|
|
||||||
|
const clearMessage = logConsoleProgress(`Deleting conflict ${conflictId.id()}`);
|
||||||
|
DataAccessUtilityBase.deleteConflict(collection, conflictId, options)
|
||||||
|
.then(
|
||||||
|
(response: any) => {
|
||||||
|
logConsoleInfo(`Successfully deleted conflict ${conflictId.id()}`);
|
||||||
|
deferred.resolve(response);
|
||||||
|
},
|
||||||
|
(error: any) => {
|
||||||
|
handleError(error, "DeleteConflict", `Error while deleting conflict ${conflictId.id()}`);
|
||||||
|
deferred.reject(error);
|
||||||
|
}
|
||||||
|
)
|
||||||
|
.finally(() => {
|
||||||
|
clearMessage();
|
||||||
|
});
|
||||||
|
|
||||||
|
return deferred.promise;
|
||||||
|
}
|
||||||
@@ -1,10 +0,0 @@
|
|||||||
import { DefaultAccountExperienceType } from "../DefaultAccountExperienceType";
|
|
||||||
import { userContext } from "../UserContext";
|
|
||||||
|
|
||||||
export const getEntityName = (): string => {
|
|
||||||
if (userContext.defaultExperience === DefaultAccountExperienceType.MongoDB) {
|
|
||||||
return "document";
|
|
||||||
}
|
|
||||||
|
|
||||||
return "item";
|
|
||||||
};
|
|
||||||
@@ -73,7 +73,7 @@ export default class EditableUtility {
|
|||||||
return false;
|
return false;
|
||||||
});
|
});
|
||||||
|
|
||||||
observable.subscribe(edit => {
|
observable.subscribe((edit) => {
|
||||||
var edits = observable.edits && observable.edits();
|
var edits = observable.edits && observable.edits();
|
||||||
if (!edits) {
|
if (!edits) {
|
||||||
return;
|
return;
|
||||||
@@ -83,9 +83,9 @@ export default class EditableUtility {
|
|||||||
});
|
});
|
||||||
|
|
||||||
observable.editableIsValid = ko.observable<boolean>(true);
|
observable.editableIsValid = ko.observable<boolean>(true);
|
||||||
observable.subscribe(value => {
|
observable.subscribe((value) => {
|
||||||
const validations: ((value: T) => boolean)[] = (observable.validations && observable.validations()) || [];
|
const validations: ((value: T) => boolean)[] = (observable.validations && observable.validations()) || [];
|
||||||
const isValid = validations.every(validate => validate(value));
|
const isValid = validations.every((validate) => validate(value));
|
||||||
observable.editableIsValid(isValid);
|
observable.editableIsValid(isValid);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,8 @@
|
|||||||
export function normalizeArmEndpoint(uri: string): string {
|
export default class EnvironmentUtility {
|
||||||
|
public static normalizeArmEndpointUri(uri: string): string {
|
||||||
if (uri && uri.slice(-1) !== "/") {
|
if (uri && uri.slice(-1) !== "/") {
|
||||||
return `${uri}/`;
|
return `${uri}/`;
|
||||||
}
|
}
|
||||||
return uri;
|
return uri;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -37,7 +37,7 @@ const sendNotificationForError = (errorMessage: string, errorCode: number | stri
|
|||||||
}
|
}
|
||||||
sendMessage({
|
sendMessage({
|
||||||
type: MessageTypes.ForbiddenError,
|
type: MessageTypes.ForbiddenError,
|
||||||
reason: errorMessage
|
reason: errorMessage,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -11,8 +11,8 @@ describe("nextPage", () => {
|
|||||||
queryMetrics: {},
|
queryMetrics: {},
|
||||||
requestCharge: 1,
|
requestCharge: 1,
|
||||||
headers: {},
|
headers: {},
|
||||||
activityId: "foo"
|
activityId: "foo",
|
||||||
})
|
}),
|
||||||
};
|
};
|
||||||
|
|
||||||
expect(await nextPage(fakeIterator, 10)).toMatchSnapshot();
|
expect(await nextPage(fakeIterator, 10)).toMatchSnapshot();
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ export interface MinimalQueryIterator {
|
|||||||
// Pick<QueryIterator<any>, "fetchNext">;
|
// Pick<QueryIterator<any>, "fetchNext">;
|
||||||
|
|
||||||
export function nextPage(documentsIterator: MinimalQueryIterator, firstItemIndex: number): Promise<QueryResults> {
|
export function nextPage(documentsIterator: MinimalQueryIterator, firstItemIndex: number): Promise<QueryResults> {
|
||||||
return documentsIterator.fetchNext().then(response => {
|
return documentsIterator.fetchNext().then((response) => {
|
||||||
const documents = response.resources;
|
const documents = response.resources;
|
||||||
const headers = (response as any).headers || {}; // TODO this is a private key. Remove any
|
const headers = (response as any).headers || {}; // TODO this is a private key. Remove any
|
||||||
const itemCount = (documents && documents.length) || 0;
|
const itemCount = (documents && documents.length) || 0;
|
||||||
@@ -26,7 +26,7 @@ export function nextPage(documentsIterator: MinimalQueryIterator, firstItemIndex
|
|||||||
lastItemIndex: Number(firstItemIndex) + Number(itemCount),
|
lastItemIndex: Number(firstItemIndex) + Number(itemCount),
|
||||||
headers,
|
headers,
|
||||||
activityId: response.activityId,
|
activityId: response.activityId,
|
||||||
requestCharge: response.requestCharge
|
requestCharge: response.requestCharge,
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -29,7 +29,7 @@ export function logError(errorMessage: string, area: string, code?: number | str
|
|||||||
function _logEntry(entry: Diagnostics.LogEntry): void {
|
function _logEntry(entry: Diagnostics.LogEntry): void {
|
||||||
sendMessage({
|
sendMessage({
|
||||||
type: MessageTypes.LogInfo,
|
type: MessageTypes.LogInfo,
|
||||||
data: JSON.stringify(entry)
|
data: JSON.stringify(entry),
|
||||||
});
|
});
|
||||||
|
|
||||||
const severityLevel = ((level: Diagnostics.LogEntryLevel): SeverityLevel => {
|
const severityLevel = ((level: Diagnostics.LogEntryLevel): SeverityLevel => {
|
||||||
@@ -60,6 +60,6 @@ function _generateLogEntry(
|
|||||||
level,
|
level,
|
||||||
message,
|
message,
|
||||||
area,
|
area,
|
||||||
code
|
code,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ describe("Message Handler", () => {
|
|||||||
let mockPromise = {
|
let mockPromise = {
|
||||||
id: "123",
|
id: "123",
|
||||||
startTime: new Date(),
|
startTime: new Date(),
|
||||||
deferred: Q.defer<any>()
|
deferred: Q.defer<any>(),
|
||||||
};
|
};
|
||||||
let mockMessage = { message: { id: "123", data: "{}" } };
|
let mockMessage = { message: { id: "123", data: "{}" } };
|
||||||
MessageHandler.RequestMap[mockPromise.id] = mockPromise;
|
MessageHandler.RequestMap[mockPromise.id] = mockPromise;
|
||||||
@@ -18,7 +18,7 @@ describe("Message Handler", () => {
|
|||||||
let message = {
|
let message = {
|
||||||
id: "123",
|
id: "123",
|
||||||
startTime: new Date(),
|
startTime: new Date(),
|
||||||
deferred: Q.defer<any>()
|
deferred: Q.defer<any>(),
|
||||||
};
|
};
|
||||||
|
|
||||||
MessageHandler.handleCachedDataMessage(message);
|
MessageHandler.handleCachedDataMessage(message);
|
||||||
|
|||||||
@@ -35,7 +35,7 @@ export function sendCachedDataMessage<TResponseDataModel>(
|
|||||||
let cachedDataPromise: CachedDataPromise<TResponseDataModel> = {
|
let cachedDataPromise: CachedDataPromise<TResponseDataModel> = {
|
||||||
deferred: Q.defer<TResponseDataModel>(),
|
deferred: Q.defer<TResponseDataModel>(),
|
||||||
startTime: new Date(),
|
startTime: new Date(),
|
||||||
id: _.uniqueId()
|
id: _.uniqueId(),
|
||||||
};
|
};
|
||||||
RequestMap[cachedDataPromise.id] = cachedDataPromise;
|
RequestMap[cachedDataPromise.id] = cachedDataPromise;
|
||||||
sendMessage({ type: messageType, params: params, id: cachedDataPromise.id });
|
sendMessage({ type: messageType, params: params, id: cachedDataPromise.id });
|
||||||
@@ -54,7 +54,7 @@ export function sendMessage(data: any): void {
|
|||||||
portalChildWindow.parent.postMessage(
|
portalChildWindow.parent.postMessage(
|
||||||
{
|
{
|
||||||
signature: "pcIframe",
|
signature: "pcIframe",
|
||||||
data: data
|
data: data,
|
||||||
},
|
},
|
||||||
portalChildWindow.document.referrer
|
portalChildWindow.document.referrer
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ const fetchMock = () => {
|
|||||||
ok: true,
|
ok: true,
|
||||||
text: () => "{}",
|
text: () => "{}",
|
||||||
json: () => "{}",
|
json: () => "{}",
|
||||||
headers: new Map()
|
headers: new Map(),
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -27,8 +27,8 @@ const collection = {
|
|||||||
partitionKey: {
|
partitionKey: {
|
||||||
paths: ["/pk"],
|
paths: ["/pk"],
|
||||||
kind: "Hash",
|
kind: "Hash",
|
||||||
version: 1
|
version: 1,
|
||||||
}
|
},
|
||||||
} as Collection;
|
} as Collection;
|
||||||
|
|
||||||
const documentId = ({
|
const documentId = ({
|
||||||
@@ -38,8 +38,8 @@ const documentId = ({
|
|||||||
partitionKey: {
|
partitionKey: {
|
||||||
paths: ["/pk"],
|
paths: ["/pk"],
|
||||||
kind: "Hash",
|
kind: "Hash",
|
||||||
version: 1
|
version: 1,
|
||||||
}
|
},
|
||||||
} as unknown) as DocumentId;
|
} as unknown) as DocumentId;
|
||||||
|
|
||||||
const databaseAccount = {
|
const databaseAccount = {
|
||||||
@@ -52,8 +52,8 @@ const databaseAccount = {
|
|||||||
documentEndpoint: "bar",
|
documentEndpoint: "bar",
|
||||||
gremlinEndpoint: "foo",
|
gremlinEndpoint: "foo",
|
||||||
tableEndpoint: "foo",
|
tableEndpoint: "foo",
|
||||||
cassandraEndpoint: "foo"
|
cassandraEndpoint: "foo",
|
||||||
}
|
},
|
||||||
} as DatabaseAccount;
|
} as DatabaseAccount;
|
||||||
|
|
||||||
describe("MongoProxyClient", () => {
|
describe("MongoProxyClient", () => {
|
||||||
@@ -61,10 +61,10 @@ describe("MongoProxyClient", () => {
|
|||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
resetConfigContext();
|
resetConfigContext();
|
||||||
updateUserContext({
|
updateUserContext({
|
||||||
databaseAccount
|
databaseAccount,
|
||||||
});
|
});
|
||||||
updateConfigContext({
|
updateConfigContext({
|
||||||
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
|
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
|
||||||
});
|
});
|
||||||
window.fetch = jest.fn().mockImplementation(fetchMock);
|
window.fetch = jest.fn().mockImplementation(fetchMock);
|
||||||
});
|
});
|
||||||
@@ -93,10 +93,10 @@ describe("MongoProxyClient", () => {
|
|||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
resetConfigContext();
|
resetConfigContext();
|
||||||
updateUserContext({
|
updateUserContext({
|
||||||
databaseAccount
|
databaseAccount,
|
||||||
});
|
});
|
||||||
updateConfigContext({
|
updateConfigContext({
|
||||||
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
|
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
|
||||||
});
|
});
|
||||||
window.fetch = jest.fn().mockImplementation(fetchMock);
|
window.fetch = jest.fn().mockImplementation(fetchMock);
|
||||||
});
|
});
|
||||||
@@ -125,10 +125,10 @@ describe("MongoProxyClient", () => {
|
|||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
resetConfigContext();
|
resetConfigContext();
|
||||||
updateUserContext({
|
updateUserContext({
|
||||||
databaseAccount
|
databaseAccount,
|
||||||
});
|
});
|
||||||
updateConfigContext({
|
updateConfigContext({
|
||||||
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
|
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
|
||||||
});
|
});
|
||||||
window.fetch = jest.fn().mockImplementation(fetchMock);
|
window.fetch = jest.fn().mockImplementation(fetchMock);
|
||||||
});
|
});
|
||||||
@@ -157,10 +157,10 @@ describe("MongoProxyClient", () => {
|
|||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
resetConfigContext();
|
resetConfigContext();
|
||||||
updateUserContext({
|
updateUserContext({
|
||||||
databaseAccount
|
databaseAccount,
|
||||||
});
|
});
|
||||||
updateConfigContext({
|
updateConfigContext({
|
||||||
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
|
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
|
||||||
});
|
});
|
||||||
window.fetch = jest.fn().mockImplementation(fetchMock);
|
window.fetch = jest.fn().mockImplementation(fetchMock);
|
||||||
});
|
});
|
||||||
@@ -189,10 +189,10 @@ describe("MongoProxyClient", () => {
|
|||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
resetConfigContext();
|
resetConfigContext();
|
||||||
updateUserContext({
|
updateUserContext({
|
||||||
databaseAccount
|
databaseAccount,
|
||||||
});
|
});
|
||||||
updateConfigContext({
|
updateConfigContext({
|
||||||
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
|
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
|
||||||
});
|
});
|
||||||
window.fetch = jest.fn().mockImplementation(fetchMock);
|
window.fetch = jest.fn().mockImplementation(fetchMock);
|
||||||
});
|
});
|
||||||
@@ -222,10 +222,10 @@ describe("MongoProxyClient", () => {
|
|||||||
resetConfigContext();
|
resetConfigContext();
|
||||||
delete window.authType;
|
delete window.authType;
|
||||||
updateUserContext({
|
updateUserContext({
|
||||||
databaseAccount
|
databaseAccount,
|
||||||
});
|
});
|
||||||
updateConfigContext({
|
updateConfigContext({
|
||||||
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
|
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ import { sendMessage } from "./MessageHandler";
|
|||||||
const defaultHeaders = {
|
const defaultHeaders = {
|
||||||
[HttpHeaders.apiType]: ApiType.MongoDB.toString(),
|
[HttpHeaders.apiType]: ApiType.MongoDB.toString(),
|
||||||
[CosmosSDKConstants.HttpHeaders.MaxEntityCount]: "100",
|
[CosmosSDKConstants.HttpHeaders.MaxEntityCount]: "100",
|
||||||
[CosmosSDKConstants.HttpHeaders.Version]: "2017-11-15"
|
[CosmosSDKConstants.HttpHeaders.Version]: "2017-11-15",
|
||||||
};
|
};
|
||||||
|
|
||||||
function authHeaders() {
|
function authHeaders() {
|
||||||
@@ -31,7 +31,7 @@ export function queryIterator(databaseId: string, collection: Collection, query:
|
|||||||
let continuationToken: string;
|
let continuationToken: string;
|
||||||
return {
|
return {
|
||||||
fetchNext: () => {
|
fetchNext: () => {
|
||||||
return queryDocuments(databaseId, collection, false, query).then(response => {
|
return queryDocuments(databaseId, collection, false, query).then((response) => {
|
||||||
continuationToken = response.continuationToken;
|
continuationToken = response.continuationToken;
|
||||||
const headers: { [key: string]: string | number } = {};
|
const headers: { [key: string]: string | number } = {};
|
||||||
response.headers.forEach((value, key) => {
|
response.headers.forEach((value, key) => {
|
||||||
@@ -42,10 +42,10 @@ export function queryIterator(databaseId: string, collection: Collection, query:
|
|||||||
headers,
|
headers,
|
||||||
requestCharge: Number(headers[CosmosSDKConstants.HttpHeaders.RequestCharge]),
|
requestCharge: Number(headers[CosmosSDKConstants.HttpHeaders.RequestCharge]),
|
||||||
activityId: String(headers[CosmosSDKConstants.HttpHeaders.ActivityId]),
|
activityId: String(headers[CosmosSDKConstants.HttpHeaders.ActivityId]),
|
||||||
hasMoreResults: !!continuationToken
|
hasMoreResults: !!continuationToken,
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
}
|
},
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -74,7 +74,9 @@ export function queryDocuments(
|
|||||||
rg: userContext.resourceGroup,
|
rg: userContext.resourceGroup,
|
||||||
dba: databaseAccount.name,
|
dba: databaseAccount.name,
|
||||||
pk:
|
pk:
|
||||||
collection && collection.partitionKey && !collection.partitionKey.systemKey ? collection.partitionKeyProperty : ""
|
collection && collection.partitionKey && !collection.partitionKey.systemKey
|
||||||
|
? collection.partitionKeyProperty
|
||||||
|
: "",
|
||||||
};
|
};
|
||||||
|
|
||||||
const endpoint = getEndpoint() || "";
|
const endpoint = getEndpoint() || "";
|
||||||
@@ -87,7 +89,7 @@ export function queryDocuments(
|
|||||||
[CosmosSDKConstants.HttpHeaders.EnableScanInQuery]: "true",
|
[CosmosSDKConstants.HttpHeaders.EnableScanInQuery]: "true",
|
||||||
[CosmosSDKConstants.HttpHeaders.EnableCrossPartitionQuery]: "true",
|
[CosmosSDKConstants.HttpHeaders.EnableCrossPartitionQuery]: "true",
|
||||||
[CosmosSDKConstants.HttpHeaders.ParallelizeCrossPartitionQuery]: "true",
|
[CosmosSDKConstants.HttpHeaders.ParallelizeCrossPartitionQuery]: "true",
|
||||||
[HttpHeaders.contentType]: "application/query+json"
|
[HttpHeaders.contentType]: "application/query+json",
|
||||||
};
|
};
|
||||||
|
|
||||||
if (continuationToken) {
|
if (continuationToken) {
|
||||||
@@ -100,14 +102,14 @@ export function queryDocuments(
|
|||||||
.fetch(`${endpoint}${path}?${queryString.stringify(params)}`, {
|
.fetch(`${endpoint}${path}?${queryString.stringify(params)}`, {
|
||||||
method: "POST",
|
method: "POST",
|
||||||
body: JSON.stringify({ query }),
|
body: JSON.stringify({ query }),
|
||||||
headers
|
headers,
|
||||||
})
|
})
|
||||||
.then(async response => {
|
.then(async (response) => {
|
||||||
if (response.ok) {
|
if (response.ok) {
|
||||||
return {
|
return {
|
||||||
continuationToken: response.headers.get(CosmosSDKConstants.HttpHeaders.Continuation),
|
continuationToken: response.headers.get(CosmosSDKConstants.HttpHeaders.Continuation),
|
||||||
documents: (await response.json()).Documents as DataModels.DocumentId[],
|
documents: (await response.json()).Documents as DataModels.DocumentId[],
|
||||||
headers: response.headers
|
headers: response.headers,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
errorHandling(response, "querying documents", params);
|
errorHandling(response, "querying documents", params);
|
||||||
@@ -135,7 +137,9 @@ export function readDocument(
|
|||||||
rg: userContext.resourceGroup,
|
rg: userContext.resourceGroup,
|
||||||
dba: databaseAccount.name,
|
dba: databaseAccount.name,
|
||||||
pk:
|
pk:
|
||||||
documentId && documentId.partitionKey && !documentId.partitionKey.systemKey ? documentId.partitionKeyProperty : ""
|
documentId && documentId.partitionKey && !documentId.partitionKey.systemKey
|
||||||
|
? documentId.partitionKeyProperty
|
||||||
|
: "",
|
||||||
};
|
};
|
||||||
|
|
||||||
const endpoint = getEndpoint();
|
const endpoint = getEndpoint();
|
||||||
@@ -147,10 +151,10 @@ export function readDocument(
|
|||||||
...authHeaders(),
|
...authHeaders(),
|
||||||
[CosmosSDKConstants.HttpHeaders.PartitionKey]: encodeURIComponent(
|
[CosmosSDKConstants.HttpHeaders.PartitionKey]: encodeURIComponent(
|
||||||
JSON.stringify(documentId.partitionKeyHeader())
|
JSON.stringify(documentId.partitionKeyHeader())
|
||||||
)
|
),
|
||||||
}
|
},
|
||||||
})
|
})
|
||||||
.then(response => {
|
.then((response) => {
|
||||||
if (response.ok) {
|
if (response.ok) {
|
||||||
return response.json();
|
return response.json();
|
||||||
}
|
}
|
||||||
@@ -175,7 +179,7 @@ export function createDocument(
|
|||||||
sid: userContext.subscriptionId,
|
sid: userContext.subscriptionId,
|
||||||
rg: userContext.resourceGroup,
|
rg: userContext.resourceGroup,
|
||||||
dba: databaseAccount.name,
|
dba: databaseAccount.name,
|
||||||
pk: collection && collection.partitionKey && !collection.partitionKey.systemKey ? partitionKeyProperty : ""
|
pk: collection && collection.partitionKey && !collection.partitionKey.systemKey ? partitionKeyProperty : "",
|
||||||
};
|
};
|
||||||
|
|
||||||
const endpoint = getEndpoint();
|
const endpoint = getEndpoint();
|
||||||
@@ -186,10 +190,10 @@ export function createDocument(
|
|||||||
body: JSON.stringify(documentContent),
|
body: JSON.stringify(documentContent),
|
||||||
headers: {
|
headers: {
|
||||||
...defaultHeaders,
|
...defaultHeaders,
|
||||||
...authHeaders()
|
...authHeaders(),
|
||||||
}
|
},
|
||||||
})
|
})
|
||||||
.then(response => {
|
.then((response) => {
|
||||||
if (response.ok) {
|
if (response.ok) {
|
||||||
return response.json();
|
return response.json();
|
||||||
}
|
}
|
||||||
@@ -218,7 +222,9 @@ export function updateDocument(
|
|||||||
rg: userContext.resourceGroup,
|
rg: userContext.resourceGroup,
|
||||||
dba: databaseAccount.name,
|
dba: databaseAccount.name,
|
||||||
pk:
|
pk:
|
||||||
documentId && documentId.partitionKey && !documentId.partitionKey.systemKey ? documentId.partitionKeyProperty : ""
|
documentId && documentId.partitionKey && !documentId.partitionKey.systemKey
|
||||||
|
? documentId.partitionKeyProperty
|
||||||
|
: "",
|
||||||
};
|
};
|
||||||
const endpoint = getEndpoint();
|
const endpoint = getEndpoint();
|
||||||
|
|
||||||
@@ -230,10 +236,10 @@ export function updateDocument(
|
|||||||
...defaultHeaders,
|
...defaultHeaders,
|
||||||
...authHeaders(),
|
...authHeaders(),
|
||||||
[HttpHeaders.contentType]: "application/json",
|
[HttpHeaders.contentType]: "application/json",
|
||||||
[CosmosSDKConstants.HttpHeaders.PartitionKey]: JSON.stringify(documentId.partitionKeyHeader())
|
[CosmosSDKConstants.HttpHeaders.PartitionKey]: JSON.stringify(documentId.partitionKeyHeader()),
|
||||||
}
|
},
|
||||||
})
|
})
|
||||||
.then(response => {
|
.then((response) => {
|
||||||
if (response.ok) {
|
if (response.ok) {
|
||||||
return response.json();
|
return response.json();
|
||||||
}
|
}
|
||||||
@@ -257,7 +263,9 @@ export function deleteDocument(databaseId: string, collection: Collection, docum
|
|||||||
rg: userContext.resourceGroup,
|
rg: userContext.resourceGroup,
|
||||||
dba: databaseAccount.name,
|
dba: databaseAccount.name,
|
||||||
pk:
|
pk:
|
||||||
documentId && documentId.partitionKey && !documentId.partitionKey.systemKey ? documentId.partitionKeyProperty : ""
|
documentId && documentId.partitionKey && !documentId.partitionKey.systemKey
|
||||||
|
? documentId.partitionKeyProperty
|
||||||
|
: "",
|
||||||
};
|
};
|
||||||
const endpoint = getEndpoint();
|
const endpoint = getEndpoint();
|
||||||
|
|
||||||
@@ -268,10 +276,10 @@ export function deleteDocument(databaseId: string, collection: Collection, docum
|
|||||||
...defaultHeaders,
|
...defaultHeaders,
|
||||||
...authHeaders(),
|
...authHeaders(),
|
||||||
[HttpHeaders.contentType]: "application/json",
|
[HttpHeaders.contentType]: "application/json",
|
||||||
[CosmosSDKConstants.HttpHeaders.PartitionKey]: JSON.stringify(documentId.partitionKeyHeader())
|
[CosmosSDKConstants.HttpHeaders.PartitionKey]: JSON.stringify(documentId.partitionKeyHeader()),
|
||||||
}
|
},
|
||||||
})
|
})
|
||||||
.then(response => {
|
.then((response) => {
|
||||||
if (response.ok) {
|
if (response.ok) {
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
@@ -299,7 +307,7 @@ export function createMongoCollectionWithProxy(
|
|||||||
rg: userContext.resourceGroup,
|
rg: userContext.resourceGroup,
|
||||||
dba: databaseAccount.name,
|
dba: databaseAccount.name,
|
||||||
isAutoPilot: !!params.autoPilotMaxThroughput,
|
isAutoPilot: !!params.autoPilotMaxThroughput,
|
||||||
autoPilotThroughput: params.autoPilotMaxThroughput?.toString()
|
autoPilotThroughput: params.autoPilotMaxThroughput?.toString(),
|
||||||
};
|
};
|
||||||
|
|
||||||
const endpoint = getEndpoint();
|
const endpoint = getEndpoint();
|
||||||
@@ -314,11 +322,11 @@ export function createMongoCollectionWithProxy(
|
|||||||
headers: {
|
headers: {
|
||||||
...defaultHeaders,
|
...defaultHeaders,
|
||||||
...authHeaders(),
|
...authHeaders(),
|
||||||
[HttpHeaders.contentType]: "application/json"
|
[HttpHeaders.contentType]: "application/json",
|
||||||
}
|
},
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
.then(response => {
|
.then((response) => {
|
||||||
if (response.ok) {
|
if (response.ok) {
|
||||||
return response.json();
|
return response.json();
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -9,14 +9,14 @@ describe("parseSDKOfferResponse", () => {
|
|||||||
offerThroughput: 500,
|
offerThroughput: 500,
|
||||||
collectionThroughputInfo: {
|
collectionThroughputInfo: {
|
||||||
minimumRUForCollection: 400,
|
minimumRUForCollection: 400,
|
||||||
numPhysicalPartitions: 1
|
numPhysicalPartitions: 1,
|
||||||
}
|
|
||||||
},
|
},
|
||||||
id: "test"
|
},
|
||||||
|
id: "test",
|
||||||
} as SDKOfferDefinition;
|
} as SDKOfferDefinition;
|
||||||
|
|
||||||
const mockResponse = {
|
const mockResponse = {
|
||||||
resource: mockOfferDefinition
|
resource: mockOfferDefinition,
|
||||||
} as OfferResponse;
|
} as OfferResponse;
|
||||||
|
|
||||||
const expectedResult: Offer = {
|
const expectedResult: Offer = {
|
||||||
@@ -25,7 +25,6 @@ describe("parseSDKOfferResponse", () => {
|
|||||||
minimumThroughput: 400,
|
minimumThroughput: 400,
|
||||||
id: "test",
|
id: "test",
|
||||||
offerDefinition: mockOfferDefinition,
|
offerDefinition: mockOfferDefinition,
|
||||||
offerReplacePending: false
|
|
||||||
};
|
};
|
||||||
|
|
||||||
expect(OfferUtility.parseSDKOfferResponse(mockResponse)).toEqual(expectedResult);
|
expect(OfferUtility.parseSDKOfferResponse(mockResponse)).toEqual(expectedResult);
|
||||||
@@ -37,17 +36,17 @@ describe("parseSDKOfferResponse", () => {
|
|||||||
offerThroughput: 400,
|
offerThroughput: 400,
|
||||||
collectionThroughputInfo: {
|
collectionThroughputInfo: {
|
||||||
minimumRUForCollection: 400,
|
minimumRUForCollection: 400,
|
||||||
numPhysicalPartitions: 1
|
numPhysicalPartitions: 1,
|
||||||
},
|
},
|
||||||
offerAutopilotSettings: {
|
offerAutopilotSettings: {
|
||||||
maxThroughput: 5000
|
maxThroughput: 5000,
|
||||||
}
|
|
||||||
},
|
},
|
||||||
id: "test"
|
},
|
||||||
|
id: "test",
|
||||||
} as SDKOfferDefinition;
|
} as SDKOfferDefinition;
|
||||||
|
|
||||||
const mockResponse = {
|
const mockResponse = {
|
||||||
resource: mockOfferDefinition
|
resource: mockOfferDefinition,
|
||||||
} as OfferResponse;
|
} as OfferResponse;
|
||||||
|
|
||||||
const expectedResult: Offer = {
|
const expectedResult: Offer = {
|
||||||
@@ -56,7 +55,6 @@ describe("parseSDKOfferResponse", () => {
|
|||||||
minimumThroughput: 400,
|
minimumThroughput: 400,
|
||||||
id: "test",
|
id: "test",
|
||||||
offerDefinition: mockOfferDefinition,
|
offerDefinition: mockOfferDefinition,
|
||||||
offerReplacePending: false
|
|
||||||
};
|
};
|
||||||
|
|
||||||
expect(OfferUtility.parseSDKOfferResponse(mockResponse)).toEqual(expectedResult);
|
expect(OfferUtility.parseSDKOfferResponse(mockResponse)).toEqual(expectedResult);
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
import { Offer, SDKOfferDefinition } from "../Contracts/DataModels";
|
import { Offer, SDKOfferDefinition } from "../Contracts/DataModels";
|
||||||
import { OfferResponse } from "@azure/cosmos";
|
import { OfferResponse } from "@azure/cosmos";
|
||||||
import { HttpHeaders } from "./Constants";
|
|
||||||
|
|
||||||
export const parseSDKOfferResponse = (offerResponse: OfferResponse): Offer => {
|
export const parseSDKOfferResponse = (offerResponse: OfferResponse): Offer => {
|
||||||
const offerDefinition: SDKOfferDefinition = offerResponse?.resource;
|
const offerDefinition: SDKOfferDefinition = offerResponse?.resource;
|
||||||
@@ -19,7 +18,7 @@ export const parseSDKOfferResponse = (offerResponse: OfferResponse): Offer => {
|
|||||||
manualThroughput: undefined,
|
manualThroughput: undefined,
|
||||||
minimumThroughput,
|
minimumThroughput,
|
||||||
offerDefinition,
|
offerDefinition,
|
||||||
offerReplacePending: offerResponse.headers?.[HttpHeaders.offerReplacePending] === "true"
|
headers: offerResponse.headers,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -29,6 +28,6 @@ export const parseSDKOfferResponse = (offerResponse: OfferResponse): Offer => {
|
|||||||
manualThroughput: offerContent.offerThroughput,
|
manualThroughput: offerContent.offerThroughput,
|
||||||
minimumThroughput,
|
minimumThroughput,
|
||||||
offerDefinition,
|
offerDefinition,
|
||||||
offerReplacePending: offerResponse.headers?.[HttpHeaders.offerReplacePending] === "true"
|
headers: offerResponse.headers,
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -30,7 +30,7 @@ export const fetchPortalNotifications = async (): Promise<DataModels.Notificatio
|
|||||||
const headers = { [authorizationHeader.header]: authorizationHeader.token };
|
const headers = { [authorizationHeader.header]: authorizationHeader.token };
|
||||||
|
|
||||||
const response = await window.fetch(url, {
|
const response = await window.fetch(url, {
|
||||||
headers
|
headers,
|
||||||
});
|
});
|
||||||
|
|
||||||
if (!response.ok) {
|
if (!response.ok) {
|
||||||
|
|||||||
@@ -3,24 +3,22 @@ import * as _ from "underscore";
|
|||||||
import * as DataModels from "../Contracts/DataModels";
|
import * as DataModels from "../Contracts/DataModels";
|
||||||
import * as ViewModels from "../Contracts/ViewModels";
|
import * as ViewModels from "../Contracts/ViewModels";
|
||||||
import Explorer from "../Explorer/Explorer";
|
import Explorer from "../Explorer/Explorer";
|
||||||
|
import { ConsoleDataType } from "../Explorer/Menus/NotificationConsole/NotificationConsoleComponent";
|
||||||
import DocumentsTab from "../Explorer/Tabs/DocumentsTab";
|
import DocumentsTab from "../Explorer/Tabs/DocumentsTab";
|
||||||
import DocumentId from "../Explorer/Tree/DocumentId";
|
import DocumentId from "../Explorer/Tree/DocumentId";
|
||||||
import * as NotificationConsoleUtils from "../Utils/NotificationConsoleUtils";
|
import * as NotificationConsoleUtils from "../Utils/NotificationConsoleUtils";
|
||||||
import { QueryUtils } from "../Utils/QueryUtils";
|
import { QueryUtils } from "../Utils/QueryUtils";
|
||||||
import { BackendDefaults, HttpStatusCodes, SavedQueries } from "./Constants";
|
import { BackendDefaults, HttpStatusCodes, SavedQueries } from "./Constants";
|
||||||
import { userContext } from "../UserContext";
|
import { userContext } from "../UserContext";
|
||||||
import { queryDocumentsPage } from "./dataAccess/queryDocumentsPage";
|
import { createDocument, deleteDocument, queryDocuments, queryDocumentsPage } from "./DocumentClientUtilityBase";
|
||||||
import { createCollection } from "./dataAccess/createCollection";
|
import { createCollection } from "./dataAccess/createCollection";
|
||||||
import { handleError } from "./ErrorHandlingUtils";
|
import { handleError } from "./ErrorHandlingUtils";
|
||||||
import { createDocument } from "./dataAccess/createDocument";
|
|
||||||
import { deleteDocument } from "./dataAccess/deleteDocument";
|
|
||||||
import { queryDocuments } from "./dataAccess/queryDocuments";
|
|
||||||
|
|
||||||
export class QueriesClient {
|
export class QueriesClient {
|
||||||
private static readonly PartitionKey: DataModels.PartitionKey = {
|
private static readonly PartitionKey: DataModels.PartitionKey = {
|
||||||
paths: [`/${SavedQueries.PartitionKeyProperty}`],
|
paths: [`/${SavedQueries.PartitionKeyProperty}`],
|
||||||
kind: BackendDefaults.partitionKeyKind,
|
kind: BackendDefaults.partitionKeyKind,
|
||||||
version: BackendDefaults.partitionKeyVersion
|
version: BackendDefaults.partitionKeyVersion,
|
||||||
};
|
};
|
||||||
private static readonly FetchQuery: string = "SELECT * FROM c";
|
private static readonly FetchQuery: string = "SELECT * FROM c";
|
||||||
private static readonly FetchMongoQuery: string = "{}";
|
private static readonly FetchMongoQuery: string = "{}";
|
||||||
@@ -33,18 +31,24 @@ export class QueriesClient {
|
|||||||
return Promise.resolve(queriesCollection.rawDataModel);
|
return Promise.resolve(queriesCollection.rawDataModel);
|
||||||
}
|
}
|
||||||
|
|
||||||
const clearMessage = NotificationConsoleUtils.logConsoleProgress("Setting up account for saving queries");
|
const id = NotificationConsoleUtils.logConsoleMessage(
|
||||||
|
ConsoleDataType.InProgress,
|
||||||
|
"Setting up account for saving queries"
|
||||||
|
);
|
||||||
return createCollection({
|
return createCollection({
|
||||||
collectionId: SavedQueries.CollectionName,
|
collectionId: SavedQueries.CollectionName,
|
||||||
createNewDatabase: true,
|
createNewDatabase: true,
|
||||||
databaseId: SavedQueries.DatabaseName,
|
databaseId: SavedQueries.DatabaseName,
|
||||||
partitionKey: QueriesClient.PartitionKey,
|
partitionKey: QueriesClient.PartitionKey,
|
||||||
offerThroughput: SavedQueries.OfferThroughput,
|
offerThroughput: SavedQueries.OfferThroughput,
|
||||||
databaseLevelThroughput: false
|
databaseLevelThroughput: false,
|
||||||
})
|
})
|
||||||
.then(
|
.then(
|
||||||
(collection: DataModels.Collection) => {
|
(collection: DataModels.Collection) => {
|
||||||
NotificationConsoleUtils.logConsoleInfo("Successfully set up account for saving queries");
|
NotificationConsoleUtils.logConsoleMessage(
|
||||||
|
ConsoleDataType.Info,
|
||||||
|
"Successfully set up account for saving queries"
|
||||||
|
);
|
||||||
return Promise.resolve(collection);
|
return Promise.resolve(collection);
|
||||||
},
|
},
|
||||||
(error: any) => {
|
(error: any) => {
|
||||||
@@ -52,14 +56,17 @@ export class QueriesClient {
|
|||||||
return Promise.reject(error);
|
return Promise.reject(error);
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
.finally(() => clearMessage());
|
.finally(() => NotificationConsoleUtils.clearInProgressMessageWithId(id));
|
||||||
}
|
}
|
||||||
|
|
||||||
public async saveQuery(query: DataModels.Query): Promise<void> {
|
public async saveQuery(query: DataModels.Query): Promise<void> {
|
||||||
const queriesCollection = this.findQueriesCollection();
|
const queriesCollection = this.findQueriesCollection();
|
||||||
if (!queriesCollection) {
|
if (!queriesCollection) {
|
||||||
const errorMessage: string = "Account not set up to perform saved query operations";
|
const errorMessage: string = "Account not set up to perform saved query operations";
|
||||||
NotificationConsoleUtils.logConsoleError(`Failed to save query ${query.queryName}: ${errorMessage}`);
|
NotificationConsoleUtils.logConsoleMessage(
|
||||||
|
ConsoleDataType.Error,
|
||||||
|
`Failed to save query ${query.queryName}: ${errorMessage}`
|
||||||
|
);
|
||||||
return Promise.reject(errorMessage);
|
return Promise.reject(errorMessage);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -67,16 +74,25 @@ export class QueriesClient {
|
|||||||
this.validateQuery(query);
|
this.validateQuery(query);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const errorMessage: string = "Invalid query specified";
|
const errorMessage: string = "Invalid query specified";
|
||||||
NotificationConsoleUtils.logConsoleError(`Failed to save query ${query.queryName}: ${errorMessage}`);
|
NotificationConsoleUtils.logConsoleMessage(
|
||||||
|
ConsoleDataType.Error,
|
||||||
|
`Failed to save query ${query.queryName}: ${errorMessage}`
|
||||||
|
);
|
||||||
return Promise.reject(errorMessage);
|
return Promise.reject(errorMessage);
|
||||||
}
|
}
|
||||||
|
|
||||||
const clearMessage = NotificationConsoleUtils.logConsoleProgress(`Saving query ${query.queryName}`);
|
const id = NotificationConsoleUtils.logConsoleMessage(
|
||||||
|
ConsoleDataType.InProgress,
|
||||||
|
`Saving query ${query.queryName}`
|
||||||
|
);
|
||||||
query.id = query.queryName;
|
query.id = query.queryName;
|
||||||
return createDocument(queriesCollection, query)
|
return createDocument(queriesCollection, query)
|
||||||
.then(
|
.then(
|
||||||
(savedQuery: DataModels.Query) => {
|
(savedQuery: DataModels.Query) => {
|
||||||
NotificationConsoleUtils.logConsoleInfo(`Successfully saved query ${query.queryName}`);
|
NotificationConsoleUtils.logConsoleMessage(
|
||||||
|
ConsoleDataType.Info,
|
||||||
|
`Successfully saved query ${query.queryName}`
|
||||||
|
);
|
||||||
return Promise.resolve();
|
return Promise.resolve();
|
||||||
},
|
},
|
||||||
(error: any) => {
|
(error: any) => {
|
||||||
@@ -87,29 +103,28 @@ export class QueriesClient {
|
|||||||
return Promise.reject(error);
|
return Promise.reject(error);
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
.finally(() => clearMessage());
|
.finally(() => NotificationConsoleUtils.clearInProgressMessageWithId(id));
|
||||||
}
|
}
|
||||||
|
|
||||||
public async getQueries(): Promise<DataModels.Query[]> {
|
public async getQueries(): Promise<DataModels.Query[]> {
|
||||||
const queriesCollection = this.findQueriesCollection();
|
const queriesCollection = this.findQueriesCollection();
|
||||||
if (!queriesCollection) {
|
if (!queriesCollection) {
|
||||||
const errorMessage: string = "Account not set up to perform saved query operations";
|
const errorMessage: string = "Account not set up to perform saved query operations";
|
||||||
NotificationConsoleUtils.logConsoleError(`Failed to fetch saved queries: ${errorMessage}`);
|
NotificationConsoleUtils.logConsoleMessage(
|
||||||
|
ConsoleDataType.Error,
|
||||||
|
`Failed to fetch saved queries: ${errorMessage}`
|
||||||
|
);
|
||||||
return Promise.reject(errorMessage);
|
return Promise.reject(errorMessage);
|
||||||
}
|
}
|
||||||
|
|
||||||
const options: any = { enableCrossPartitionQuery: true };
|
const options: any = { enableCrossPartitionQuery: true };
|
||||||
const clearMessage = NotificationConsoleUtils.logConsoleProgress("Fetching saved queries");
|
const id = NotificationConsoleUtils.logConsoleMessage(ConsoleDataType.InProgress, "Fetching saved queries");
|
||||||
const queryIterator: QueryIterator<ItemDefinition & Resource> = queryDocuments(
|
return queryDocuments(SavedQueries.DatabaseName, SavedQueries.CollectionName, this.fetchQueriesQuery(), options)
|
||||||
SavedQueries.DatabaseName,
|
|
||||||
SavedQueries.CollectionName,
|
|
||||||
this.fetchQueriesQuery(),
|
|
||||||
options
|
|
||||||
);
|
|
||||||
const fetchQueries = async (firstItemIndex: number): Promise<ViewModels.QueryResults> =>
|
|
||||||
await queryDocumentsPage(queriesCollection.id(), queryIterator, firstItemIndex);
|
|
||||||
return QueryUtils.queryAllPages(fetchQueries)
|
|
||||||
.then(
|
.then(
|
||||||
|
(queryIterator: QueryIterator<ItemDefinition & Resource>) => {
|
||||||
|
const fetchQueries = (firstItemIndex: number): Q.Promise<ViewModels.QueryResults> =>
|
||||||
|
queryDocumentsPage(queriesCollection.id(), queryIterator, firstItemIndex, options);
|
||||||
|
return QueryUtils.queryAllPages(fetchQueries).then(
|
||||||
(results: ViewModels.QueryResults) => {
|
(results: ViewModels.QueryResults) => {
|
||||||
let queries: DataModels.Query[] = _.map(results.documents, (document: DataModels.Query) => {
|
let queries: DataModels.Query[] = _.map(results.documents, (document: DataModels.Query) => {
|
||||||
if (!document) {
|
if (!document) {
|
||||||
@@ -120,7 +135,7 @@ export class QueriesClient {
|
|||||||
resourceId: resourceId,
|
resourceId: resourceId,
|
||||||
queryName: queryName,
|
queryName: queryName,
|
||||||
query: query,
|
query: query,
|
||||||
id: id
|
id: id,
|
||||||
};
|
};
|
||||||
try {
|
try {
|
||||||
this.validateQuery(parsedQuery);
|
this.validateQuery(parsedQuery);
|
||||||
@@ -130,22 +145,32 @@ export class QueriesClient {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
queries = _.reject(queries, (parsedQuery: DataModels.Query) => !parsedQuery);
|
queries = _.reject(queries, (parsedQuery: DataModels.Query) => !parsedQuery);
|
||||||
NotificationConsoleUtils.logConsoleInfo("Successfully fetched saved queries");
|
NotificationConsoleUtils.logConsoleMessage(ConsoleDataType.Info, "Successfully fetched saved queries");
|
||||||
return Promise.resolve(queries);
|
return Promise.resolve(queries);
|
||||||
},
|
},
|
||||||
(error: any) => {
|
(error: any) => {
|
||||||
handleError(error, "getSavedQueries", "Failed to fetch saved queries");
|
handleError(error, "getSavedQueries", "Failed to fetch saved queries");
|
||||||
return Promise.reject(error);
|
return Promise.reject(error);
|
||||||
}
|
}
|
||||||
|
);
|
||||||
|
},
|
||||||
|
(error: any) => {
|
||||||
|
// should never get into this state but we handle this regardless
|
||||||
|
handleError(error, "getSavedQueries", "Failed to fetch saved queries");
|
||||||
|
return Promise.reject(error);
|
||||||
|
}
|
||||||
)
|
)
|
||||||
.finally(() => clearMessage());
|
.finally(() => NotificationConsoleUtils.clearInProgressMessageWithId(id));
|
||||||
}
|
}
|
||||||
|
|
||||||
public async deleteQuery(query: DataModels.Query): Promise<void> {
|
public async deleteQuery(query: DataModels.Query): Promise<void> {
|
||||||
const queriesCollection = this.findQueriesCollection();
|
const queriesCollection = this.findQueriesCollection();
|
||||||
if (!queriesCollection) {
|
if (!queriesCollection) {
|
||||||
const errorMessage: string = "Account not set up to perform saved query operations";
|
const errorMessage: string = "Account not set up to perform saved query operations";
|
||||||
NotificationConsoleUtils.logConsoleError(`Failed to fetch saved queries: ${errorMessage}`);
|
NotificationConsoleUtils.logConsoleMessage(
|
||||||
|
ConsoleDataType.Error,
|
||||||
|
`Failed to fetch saved queries: ${errorMessage}`
|
||||||
|
);
|
||||||
return Promise.reject(errorMessage);
|
return Promise.reject(errorMessage);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -153,15 +178,21 @@ export class QueriesClient {
|
|||||||
this.validateQuery(query);
|
this.validateQuery(query);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const errorMessage: string = "Invalid query specified";
|
const errorMessage: string = "Invalid query specified";
|
||||||
NotificationConsoleUtils.logConsoleError(`Failed to delete query ${query.queryName}: ${errorMessage}`);
|
NotificationConsoleUtils.logConsoleMessage(
|
||||||
|
ConsoleDataType.Error,
|
||||||
|
`Failed to delete query ${query.queryName}: ${errorMessage}`
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
const clearMessage = NotificationConsoleUtils.logConsoleProgress(`Deleting query ${query.queryName}`);
|
const id = NotificationConsoleUtils.logConsoleMessage(
|
||||||
|
ConsoleDataType.InProgress,
|
||||||
|
`Deleting query ${query.queryName}`
|
||||||
|
);
|
||||||
query.id = query.queryName;
|
query.id = query.queryName;
|
||||||
const documentId = new DocumentId(
|
const documentId = new DocumentId(
|
||||||
{
|
{
|
||||||
partitionKey: QueriesClient.PartitionKey,
|
partitionKey: QueriesClient.PartitionKey,
|
||||||
partitionKeyProperty: "id"
|
partitionKeyProperty: "id",
|
||||||
} as DocumentsTab,
|
} as DocumentsTab,
|
||||||
query,
|
query,
|
||||||
query.queryName
|
query.queryName
|
||||||
@@ -170,7 +201,10 @@ export class QueriesClient {
|
|||||||
return deleteDocument(queriesCollection, documentId)
|
return deleteDocument(queriesCollection, documentId)
|
||||||
.then(
|
.then(
|
||||||
() => {
|
() => {
|
||||||
NotificationConsoleUtils.logConsoleInfo(`Successfully deleted query ${query.queryName}`);
|
NotificationConsoleUtils.logConsoleMessage(
|
||||||
|
ConsoleDataType.Info,
|
||||||
|
`Successfully deleted query ${query.queryName}`
|
||||||
|
);
|
||||||
return Promise.resolve();
|
return Promise.resolve();
|
||||||
},
|
},
|
||||||
(error: any) => {
|
(error: any) => {
|
||||||
@@ -178,7 +212,7 @@ export class QueriesClient {
|
|||||||
return Promise.reject(error);
|
return Promise.reject(error);
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
.finally(() => clearMessage());
|
.finally(() => NotificationConsoleUtils.clearInProgressMessageWithId(id));
|
||||||
}
|
}
|
||||||
|
|
||||||
public getResourceId(): string {
|
public getResourceId(): string {
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ import { SplitterMetrics } from "./Constants";
|
|||||||
|
|
||||||
export enum SplitterDirection {
|
export enum SplitterDirection {
|
||||||
Horizontal = "horizontal",
|
Horizontal = "horizontal",
|
||||||
Vertical = "vertical"
|
Vertical = "vertical",
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface SplitterBounds {
|
export interface SplitterBounds {
|
||||||
@@ -50,7 +50,7 @@ export class Splitter {
|
|||||||
animate: true,
|
animate: true,
|
||||||
animateDuration: "fast",
|
animateDuration: "fast",
|
||||||
start: this.onResizeStart,
|
start: this.onResizeStart,
|
||||||
stop: this.onResizeStop
|
stop: this.onResizeStop,
|
||||||
};
|
};
|
||||||
|
|
||||||
if (isVerticalSplitter) {
|
if (isVerticalSplitter) {
|
||||||
@@ -90,9 +90,7 @@ export class Splitter {
|
|||||||
this.lastWidth = $(this.leftSide).width();
|
this.lastWidth = $(this.leftSide).width();
|
||||||
$(this.splitter).css("left", SplitterMetrics.CollapsedPositionLeft);
|
$(this.splitter).css("left", SplitterMetrics.CollapsedPositionLeft);
|
||||||
$(this.leftSide).css("width", "");
|
$(this.leftSide).css("width", "");
|
||||||
$(this.leftSide)
|
$(this.leftSide).resizable("option", "disabled", true).removeClass("ui-resizable-disabled"); // remove class so splitter is visible
|
||||||
.resizable("option", "disabled", true)
|
|
||||||
.removeClass("ui-resizable-disabled"); // remove class so splitter is visible
|
|
||||||
$(this.splitter).removeClass("ui-resizable-e");
|
$(this.splitter).removeClass("ui-resizable-e");
|
||||||
this.isCollapsed(true);
|
this.isCollapsed(true);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -32,8 +32,8 @@ export default class UrlUtility {
|
|||||||
type: type,
|
type: type,
|
||||||
objectBody: {
|
objectBody: {
|
||||||
id: id,
|
id: id,
|
||||||
self: resourcePath
|
self: resourcePath,
|
||||||
}
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
jest.mock("../../Utils/arm/request");
|
jest.mock("../../Utils/arm/request");
|
||||||
jest.mock("../CosmosClient");
|
jest.mock("../CosmosClient");
|
||||||
|
jest.mock("../DataAccessUtilityBase");
|
||||||
import { AuthType } from "../../AuthType";
|
import { AuthType } from "../../AuthType";
|
||||||
import { CreateCollectionParams, DatabaseAccount } from "../../Contracts/DataModels";
|
import { CreateCollectionParams, DatabaseAccount } from "../../Contracts/DataModels";
|
||||||
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
||||||
@@ -14,15 +15,15 @@ describe("createCollection", () => {
|
|||||||
collectionId: "testContainer",
|
collectionId: "testContainer",
|
||||||
databaseId: "testDatabase",
|
databaseId: "testDatabase",
|
||||||
databaseLevelThroughput: true,
|
databaseLevelThroughput: true,
|
||||||
offerThroughput: 400
|
offerThroughput: 400,
|
||||||
};
|
};
|
||||||
|
|
||||||
beforeAll(() => {
|
beforeAll(() => {
|
||||||
updateUserContext({
|
updateUserContext({
|
||||||
databaseAccount: {
|
databaseAccount: {
|
||||||
name: "test"
|
name: "test",
|
||||||
} as DatabaseAccount,
|
} as DatabaseAccount,
|
||||||
defaultExperience: DefaultAccountExperienceType.DocumentDB
|
defaultExperience: DefaultAccountExperienceType.DocumentDB,
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -40,12 +41,12 @@ describe("createCollection", () => {
|
|||||||
return {
|
return {
|
||||||
database: {
|
database: {
|
||||||
containers: {
|
containers: {
|
||||||
create: () => ({})
|
create: () => ({}),
|
||||||
}
|
},
|
||||||
}
|
},
|
||||||
};
|
};
|
||||||
}
|
},
|
||||||
}
|
},
|
||||||
});
|
});
|
||||||
await createCollection(createCollectionParams);
|
await createCollection(createCollectionParams);
|
||||||
expect(client).toHaveBeenCalled();
|
expect(client).toHaveBeenCalled();
|
||||||
@@ -59,7 +60,7 @@ describe("createCollection", () => {
|
|||||||
collectionId: "testContainer",
|
collectionId: "testContainer",
|
||||||
databaseId: "testDatabase",
|
databaseId: "testDatabase",
|
||||||
databaseLevelThroughput: false,
|
databaseLevelThroughput: false,
|
||||||
offerThroughput: 400
|
offerThroughput: 400,
|
||||||
};
|
};
|
||||||
expect(constructRpOptions(manualThroughputParams)).toEqual({ throughput: 400 });
|
expect(constructRpOptions(manualThroughputParams)).toEqual({ throughput: 400 });
|
||||||
|
|
||||||
@@ -69,12 +70,12 @@ describe("createCollection", () => {
|
|||||||
databaseId: "testDatabase",
|
databaseId: "testDatabase",
|
||||||
databaseLevelThroughput: false,
|
databaseLevelThroughput: false,
|
||||||
offerThroughput: 400,
|
offerThroughput: 400,
|
||||||
autoPilotMaxThroughput: 4000
|
autoPilotMaxThroughput: 4000,
|
||||||
};
|
};
|
||||||
expect(constructRpOptions(autoPilotThroughputParams)).toEqual({
|
expect(constructRpOptions(autoPilotThroughputParams)).toEqual({
|
||||||
autoscaleSettings: {
|
autoscaleSettings: {
|
||||||
maxThroughput: 4000
|
maxThroughput: 4000,
|
||||||
}
|
},
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -11,15 +11,15 @@ import { createMongoCollectionWithProxy } from "../MongoProxyClient";
|
|||||||
import { createUpdateSqlContainer, getSqlContainer } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
import { createUpdateSqlContainer, getSqlContainer } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||||
import {
|
import {
|
||||||
createUpdateCassandraTable,
|
createUpdateCassandraTable,
|
||||||
getCassandraTable
|
getCassandraTable,
|
||||||
} from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources";
|
} from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources";
|
||||||
import {
|
import {
|
||||||
createUpdateMongoDBCollection,
|
createUpdateMongoDBCollection,
|
||||||
getMongoDBCollection
|
getMongoDBCollection,
|
||||||
} from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
|
} from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
|
||||||
import {
|
import {
|
||||||
createUpdateGremlinGraph,
|
createUpdateGremlinGraph,
|
||||||
getGremlinGraph
|
getGremlinGraph,
|
||||||
} from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
|
} from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
|
||||||
import { createUpdateTable, getTable } from "../../Utils/arm/generatedClients/2020-04-01/tableResources";
|
import { createUpdateTable, getTable } from "../../Utils/arm/generatedClients/2020-04-01/tableResources";
|
||||||
import { logConsoleProgress, logConsoleInfo } from "../../Utils/NotificationConsoleUtils";
|
import { logConsoleProgress, logConsoleInfo } from "../../Utils/NotificationConsoleUtils";
|
||||||
@@ -41,7 +41,7 @@ export const createCollection = async (params: DataModels.CreateCollectionParams
|
|||||||
autoPilotMaxThroughput: params.autoPilotMaxThroughput,
|
autoPilotMaxThroughput: params.autoPilotMaxThroughput,
|
||||||
databaseId: params.databaseId,
|
databaseId: params.databaseId,
|
||||||
databaseLevelThroughput: params.databaseLevelThroughput,
|
databaseLevelThroughput: params.databaseLevelThroughput,
|
||||||
offerThroughput: params.offerThroughput
|
offerThroughput: params.offerThroughput,
|
||||||
};
|
};
|
||||||
await createDatabase(createDatabaseParams);
|
await createDatabase(createDatabaseParams);
|
||||||
}
|
}
|
||||||
@@ -100,7 +100,7 @@ const createSqlContainer = async (params: DataModels.CreateCollectionParams): Pr
|
|||||||
|
|
||||||
const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params);
|
const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params);
|
||||||
const resource: ARMTypes.SqlContainerResource = {
|
const resource: ARMTypes.SqlContainerResource = {
|
||||||
id: params.collectionId
|
id: params.collectionId,
|
||||||
};
|
};
|
||||||
if (params.analyticalStorageTtl) {
|
if (params.analyticalStorageTtl) {
|
||||||
resource.analyticalStorageTtl = params.analyticalStorageTtl;
|
resource.analyticalStorageTtl = params.analyticalStorageTtl;
|
||||||
@@ -118,8 +118,8 @@ const createSqlContainer = async (params: DataModels.CreateCollectionParams): Pr
|
|||||||
const rpPayload: ARMTypes.SqlDatabaseCreateUpdateParameters = {
|
const rpPayload: ARMTypes.SqlDatabaseCreateUpdateParameters = {
|
||||||
properties: {
|
properties: {
|
||||||
resource,
|
resource,
|
||||||
options
|
options,
|
||||||
}
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
const createResponse = await createUpdateSqlContainer(
|
const createResponse = await createUpdateSqlContainer(
|
||||||
@@ -154,7 +154,7 @@ const createMongoCollection = async (params: DataModels.CreateCollectionParams):
|
|||||||
|
|
||||||
const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params);
|
const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params);
|
||||||
const resource: ARMTypes.MongoDBCollectionResource = {
|
const resource: ARMTypes.MongoDBCollectionResource = {
|
||||||
id: params.collectionId
|
id: params.collectionId,
|
||||||
};
|
};
|
||||||
if (params.analyticalStorageTtl) {
|
if (params.analyticalStorageTtl) {
|
||||||
resource.analyticalStorageTtl = params.analyticalStorageTtl;
|
resource.analyticalStorageTtl = params.analyticalStorageTtl;
|
||||||
@@ -170,8 +170,8 @@ const createMongoCollection = async (params: DataModels.CreateCollectionParams):
|
|||||||
const rpPayload: ARMTypes.MongoDBCollectionCreateUpdateParameters = {
|
const rpPayload: ARMTypes.MongoDBCollectionCreateUpdateParameters = {
|
||||||
properties: {
|
properties: {
|
||||||
resource,
|
resource,
|
||||||
options
|
options,
|
||||||
}
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
const createResponse = await createUpdateMongoDBCollection(
|
const createResponse = await createUpdateMongoDBCollection(
|
||||||
@@ -185,7 +185,7 @@ const createMongoCollection = async (params: DataModels.CreateCollectionParams):
|
|||||||
|
|
||||||
if (params.createMongoWildcardIndex) {
|
if (params.createMongoWildcardIndex) {
|
||||||
TelemetryProcessor.trace(Action.CreateMongoCollectionWithWildcardIndex, ActionModifiers.Mark, {
|
TelemetryProcessor.trace(Action.CreateMongoCollectionWithWildcardIndex, ActionModifiers.Mark, {
|
||||||
message: "Mongo Collection created with wildcard index on all fields."
|
message: "Mongo Collection created with wildcard index on all fields.",
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -212,7 +212,7 @@ const createCassandraTable = async (params: DataModels.CreateCollectionParams):
|
|||||||
|
|
||||||
const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params);
|
const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params);
|
||||||
const resource: ARMTypes.CassandraTableResource = {
|
const resource: ARMTypes.CassandraTableResource = {
|
||||||
id: params.collectionId
|
id: params.collectionId,
|
||||||
};
|
};
|
||||||
if (params.analyticalStorageTtl) {
|
if (params.analyticalStorageTtl) {
|
||||||
resource.analyticalStorageTtl = params.analyticalStorageTtl;
|
resource.analyticalStorageTtl = params.analyticalStorageTtl;
|
||||||
@@ -221,8 +221,8 @@ const createCassandraTable = async (params: DataModels.CreateCollectionParams):
|
|||||||
const rpPayload: ARMTypes.CassandraTableCreateUpdateParameters = {
|
const rpPayload: ARMTypes.CassandraTableCreateUpdateParameters = {
|
||||||
properties: {
|
properties: {
|
||||||
resource,
|
resource,
|
||||||
options
|
options,
|
||||||
}
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
const createResponse = await createUpdateCassandraTable(
|
const createResponse = await createUpdateCassandraTable(
|
||||||
@@ -256,7 +256,7 @@ const createGraph = async (params: DataModels.CreateCollectionParams): Promise<D
|
|||||||
|
|
||||||
const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params);
|
const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params);
|
||||||
const resource: ARMTypes.GremlinGraphResource = {
|
const resource: ARMTypes.GremlinGraphResource = {
|
||||||
id: params.collectionId
|
id: params.collectionId,
|
||||||
};
|
};
|
||||||
|
|
||||||
if (params.indexingPolicy) {
|
if (params.indexingPolicy) {
|
||||||
@@ -272,8 +272,8 @@ const createGraph = async (params: DataModels.CreateCollectionParams): Promise<D
|
|||||||
const rpPayload: ARMTypes.GremlinGraphCreateUpdateParameters = {
|
const rpPayload: ARMTypes.GremlinGraphCreateUpdateParameters = {
|
||||||
properties: {
|
properties: {
|
||||||
resource,
|
resource,
|
||||||
options
|
options,
|
||||||
}
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
const createResponse = await createUpdateGremlinGraph(
|
const createResponse = await createUpdateGremlinGraph(
|
||||||
@@ -306,14 +306,14 @@ const createTable = async (params: DataModels.CreateCollectionParams): Promise<D
|
|||||||
|
|
||||||
const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params);
|
const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params);
|
||||||
const resource: ARMTypes.TableResource = {
|
const resource: ARMTypes.TableResource = {
|
||||||
id: params.collectionId
|
id: params.collectionId,
|
||||||
};
|
};
|
||||||
|
|
||||||
const rpPayload: ARMTypes.TableCreateUpdateParameters = {
|
const rpPayload: ARMTypes.TableCreateUpdateParameters = {
|
||||||
properties: {
|
properties: {
|
||||||
resource,
|
resource,
|
||||||
options
|
options,
|
||||||
}
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
const createResponse = await createUpdateTable(
|
const createResponse = await createUpdateTable(
|
||||||
@@ -334,13 +334,13 @@ export const constructRpOptions = (params: DataModels.CreateDatabaseParams): ARM
|
|||||||
if (params.autoPilotMaxThroughput) {
|
if (params.autoPilotMaxThroughput) {
|
||||||
return {
|
return {
|
||||||
autoscaleSettings: {
|
autoscaleSettings: {
|
||||||
maxThroughput: params.autoPilotMaxThroughput
|
maxThroughput: params.autoPilotMaxThroughput,
|
||||||
}
|
},
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
throughput: params.offerThroughput
|
throughput: params.offerThroughput,
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -350,7 +350,7 @@ const createCollectionWithSDK = async (params: DataModels.CreateCollectionParams
|
|||||||
partitionKey: params.partitionKey || undefined,
|
partitionKey: params.partitionKey || undefined,
|
||||||
indexingPolicy: params.indexingPolicy || undefined,
|
indexingPolicy: params.indexingPolicy || undefined,
|
||||||
uniqueKeyPolicy: params.uniqueKeyPolicy || undefined,
|
uniqueKeyPolicy: params.uniqueKeyPolicy || undefined,
|
||||||
analyticalStorageTtl: params.analyticalStorageTtl
|
analyticalStorageTtl: params.analyticalStorageTtl,
|
||||||
} as ContainerRequest; // TODO: remove cast when https://github.com/Azure/azure-cosmos-js/issues/423 is fixed
|
} as ContainerRequest; // TODO: remove cast when https://github.com/Azure/azure-cosmos-js/issues/423 is fixed
|
||||||
const collectionOptions: RequestOptions = {};
|
const collectionOptions: RequestOptions = {};
|
||||||
const createDatabaseBody: DatabaseRequest = { id: params.databaseId };
|
const createDatabaseBody: DatabaseRequest = { id: params.databaseId };
|
||||||
|
|||||||
@@ -8,21 +8,21 @@ import {
|
|||||||
GremlinDatabaseCreateUpdateParameters,
|
GremlinDatabaseCreateUpdateParameters,
|
||||||
MongoDBDatabaseCreateUpdateParameters,
|
MongoDBDatabaseCreateUpdateParameters,
|
||||||
SqlDatabaseCreateUpdateParameters,
|
SqlDatabaseCreateUpdateParameters,
|
||||||
CreateUpdateOptions
|
CreateUpdateOptions,
|
||||||
} from "../../Utils/arm/generatedClients/2020-04-01/types";
|
} from "../../Utils/arm/generatedClients/2020-04-01/types";
|
||||||
import { client } from "../CosmosClient";
|
import { client } from "../CosmosClient";
|
||||||
import { createUpdateSqlDatabase, getSqlDatabase } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
import { createUpdateSqlDatabase, getSqlDatabase } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||||
import {
|
import {
|
||||||
createUpdateCassandraKeyspace,
|
createUpdateCassandraKeyspace,
|
||||||
getCassandraKeyspace
|
getCassandraKeyspace,
|
||||||
} from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources";
|
} from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources";
|
||||||
import {
|
import {
|
||||||
createUpdateMongoDBDatabase,
|
createUpdateMongoDBDatabase,
|
||||||
getMongoDBDatabase
|
getMongoDBDatabase,
|
||||||
} from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
|
} from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
|
||||||
import {
|
import {
|
||||||
createUpdateGremlinDatabase,
|
createUpdateGremlinDatabase,
|
||||||
getGremlinDatabase
|
getGremlinDatabase,
|
||||||
} from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
|
} from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
|
||||||
import { handleError } from "../ErrorHandlingUtils";
|
import { handleError } from "../ErrorHandlingUtils";
|
||||||
import { logConsoleProgress, logConsoleInfo } from "../../Utils/NotificationConsoleUtils";
|
import { logConsoleProgress, logConsoleInfo } from "../../Utils/NotificationConsoleUtils";
|
||||||
@@ -85,10 +85,10 @@ async function createSqlDatabase(params: DataModels.CreateDatabaseParams): Promi
|
|||||||
const rpPayload: SqlDatabaseCreateUpdateParameters = {
|
const rpPayload: SqlDatabaseCreateUpdateParameters = {
|
||||||
properties: {
|
properties: {
|
||||||
resource: {
|
resource: {
|
||||||
id: params.databaseId
|
id: params.databaseId,
|
||||||
|
},
|
||||||
|
options,
|
||||||
},
|
},
|
||||||
options
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
const createResponse = await createUpdateSqlDatabase(
|
const createResponse = await createUpdateSqlDatabase(
|
||||||
userContext.subscriptionId,
|
userContext.subscriptionId,
|
||||||
@@ -121,10 +121,10 @@ async function createMongoDatabase(params: DataModels.CreateDatabaseParams): Pro
|
|||||||
const rpPayload: MongoDBDatabaseCreateUpdateParameters = {
|
const rpPayload: MongoDBDatabaseCreateUpdateParameters = {
|
||||||
properties: {
|
properties: {
|
||||||
resource: {
|
resource: {
|
||||||
id: params.databaseId
|
id: params.databaseId,
|
||||||
|
},
|
||||||
|
options,
|
||||||
},
|
},
|
||||||
options
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
const createResponse = await createUpdateMongoDBDatabase(
|
const createResponse = await createUpdateMongoDBDatabase(
|
||||||
userContext.subscriptionId,
|
userContext.subscriptionId,
|
||||||
@@ -157,10 +157,10 @@ async function createCassandraKeyspace(params: DataModels.CreateDatabaseParams):
|
|||||||
const rpPayload: CassandraKeyspaceCreateUpdateParameters = {
|
const rpPayload: CassandraKeyspaceCreateUpdateParameters = {
|
||||||
properties: {
|
properties: {
|
||||||
resource: {
|
resource: {
|
||||||
id: params.databaseId
|
id: params.databaseId,
|
||||||
|
},
|
||||||
|
options,
|
||||||
},
|
},
|
||||||
options
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
const createResponse = await createUpdateCassandraKeyspace(
|
const createResponse = await createUpdateCassandraKeyspace(
|
||||||
userContext.subscriptionId,
|
userContext.subscriptionId,
|
||||||
@@ -193,10 +193,10 @@ async function createGremlineDatabase(params: DataModels.CreateDatabaseParams):
|
|||||||
const rpPayload: GremlinDatabaseCreateUpdateParameters = {
|
const rpPayload: GremlinDatabaseCreateUpdateParameters = {
|
||||||
properties: {
|
properties: {
|
||||||
resource: {
|
resource: {
|
||||||
id: params.databaseId
|
id: params.databaseId,
|
||||||
|
},
|
||||||
|
options,
|
||||||
},
|
},
|
||||||
options
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
const createResponse = await createUpdateGremlinDatabase(
|
const createResponse = await createUpdateGremlinDatabase(
|
||||||
userContext.subscriptionId,
|
userContext.subscriptionId,
|
||||||
@@ -231,12 +231,12 @@ function constructRpOptions(params: DataModels.CreateDatabaseParams): CreateUpda
|
|||||||
if (params.autoPilotMaxThroughput) {
|
if (params.autoPilotMaxThroughput) {
|
||||||
return {
|
return {
|
||||||
autoscaleSettings: {
|
autoscaleSettings: {
|
||||||
maxThroughput: params.autoPilotMaxThroughput
|
maxThroughput: params.autoPilotMaxThroughput,
|
||||||
}
|
},
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
throughput: params.offerThroughput
|
throughput: params.offerThroughput,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,25 +0,0 @@
|
|||||||
import { CollectionBase } from "../../Contracts/ViewModels";
|
|
||||||
import { client } from "../CosmosClient";
|
|
||||||
import { getEntityName } from "../DocumentUtility";
|
|
||||||
import { handleError } from "../ErrorHandlingUtils";
|
|
||||||
import { logConsoleInfo, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
|
||||||
|
|
||||||
export const createDocument = async (collection: CollectionBase, newDocument: unknown): Promise<unknown> => {
|
|
||||||
const entityName = getEntityName();
|
|
||||||
const clearMessage = logConsoleProgress(`Creating new ${entityName} for container ${collection.id()}`);
|
|
||||||
|
|
||||||
try {
|
|
||||||
const response = await client()
|
|
||||||
.database(collection.databaseId)
|
|
||||||
.container(collection.id())
|
|
||||||
.items.create(newDocument);
|
|
||||||
|
|
||||||
logConsoleInfo(`Successfully created new ${entityName} for container ${collection.id()}`);
|
|
||||||
return response?.resource;
|
|
||||||
} catch (error) {
|
|
||||||
handleError(error, "CreateDocument", `Error while creating new ${entityName} for container ${collection.id()}`);
|
|
||||||
throw error;
|
|
||||||
} finally {
|
|
||||||
clearMessage();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
@@ -3,12 +3,12 @@ import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType
|
|||||||
import { Resource, StoredProcedureDefinition } from "@azure/cosmos";
|
import { Resource, StoredProcedureDefinition } from "@azure/cosmos";
|
||||||
import {
|
import {
|
||||||
SqlStoredProcedureCreateUpdateParameters,
|
SqlStoredProcedureCreateUpdateParameters,
|
||||||
SqlStoredProcedureResource
|
SqlStoredProcedureResource,
|
||||||
} from "../../Utils/arm/generatedClients/2020-04-01/types";
|
} from "../../Utils/arm/generatedClients/2020-04-01/types";
|
||||||
import { client } from "../CosmosClient";
|
import { client } from "../CosmosClient";
|
||||||
import {
|
import {
|
||||||
createUpdateSqlStoredProcedure,
|
createUpdateSqlStoredProcedure,
|
||||||
getSqlStoredProcedure
|
getSqlStoredProcedure,
|
||||||
} from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
} from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||||
import { handleError } from "../ErrorHandlingUtils";
|
import { handleError } from "../ErrorHandlingUtils";
|
||||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||||
@@ -49,8 +49,8 @@ export async function createStoredProcedure(
|
|||||||
const createSprocParams: SqlStoredProcedureCreateUpdateParameters = {
|
const createSprocParams: SqlStoredProcedureCreateUpdateParameters = {
|
||||||
properties: {
|
properties: {
|
||||||
resource: storedProcedure as SqlStoredProcedureResource,
|
resource: storedProcedure as SqlStoredProcedureResource,
|
||||||
options: {}
|
options: {},
|
||||||
}
|
},
|
||||||
};
|
};
|
||||||
const rpResponse = await createUpdateSqlStoredProcedure(
|
const rpResponse = await createUpdateSqlStoredProcedure(
|
||||||
userContext.subscriptionId,
|
userContext.subscriptionId,
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType
|
|||||||
import { Resource, TriggerDefinition } from "@azure/cosmos";
|
import { Resource, TriggerDefinition } from "@azure/cosmos";
|
||||||
import {
|
import {
|
||||||
SqlTriggerCreateUpdateParameters,
|
SqlTriggerCreateUpdateParameters,
|
||||||
SqlTriggerResource
|
SqlTriggerResource,
|
||||||
} from "../../Utils/arm/generatedClients/2020-04-01/types";
|
} from "../../Utils/arm/generatedClients/2020-04-01/types";
|
||||||
import { client } from "../CosmosClient";
|
import { client } from "../CosmosClient";
|
||||||
import { createUpdateSqlTrigger, getSqlTrigger } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
import { createUpdateSqlTrigger, getSqlTrigger } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||||
@@ -44,8 +44,8 @@ export async function createTrigger(
|
|||||||
const createTriggerParams: SqlTriggerCreateUpdateParameters = {
|
const createTriggerParams: SqlTriggerCreateUpdateParameters = {
|
||||||
properties: {
|
properties: {
|
||||||
resource: trigger as SqlTriggerResource,
|
resource: trigger as SqlTriggerResource,
|
||||||
options: {}
|
options: {},
|
||||||
}
|
},
|
||||||
};
|
};
|
||||||
const rpResponse = await createUpdateSqlTrigger(
|
const rpResponse = await createUpdateSqlTrigger(
|
||||||
userContext.subscriptionId,
|
userContext.subscriptionId,
|
||||||
@@ -59,10 +59,7 @@ export async function createTrigger(
|
|||||||
return rpResponse && (rpResponse.properties?.resource as TriggerDefinition & Resource);
|
return rpResponse && (rpResponse.properties?.resource as TriggerDefinition & Resource);
|
||||||
}
|
}
|
||||||
|
|
||||||
const response = await client()
|
const response = await client().database(databaseId).container(collectionId).scripts.triggers.create(trigger);
|
||||||
.database(databaseId)
|
|
||||||
.container(collectionId)
|
|
||||||
.scripts.triggers.create(trigger);
|
|
||||||
return response.resource;
|
return response.resource;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
handleError(error, "CreateTrigger", `Error while creating trigger ${trigger.id}`);
|
handleError(error, "CreateTrigger", `Error while creating trigger ${trigger.id}`);
|
||||||
|
|||||||
@@ -3,12 +3,12 @@ import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType
|
|||||||
import { Resource, UserDefinedFunctionDefinition } from "@azure/cosmos";
|
import { Resource, UserDefinedFunctionDefinition } from "@azure/cosmos";
|
||||||
import {
|
import {
|
||||||
SqlUserDefinedFunctionCreateUpdateParameters,
|
SqlUserDefinedFunctionCreateUpdateParameters,
|
||||||
SqlUserDefinedFunctionResource
|
SqlUserDefinedFunctionResource,
|
||||||
} from "../../Utils/arm/generatedClients/2020-04-01/types";
|
} from "../../Utils/arm/generatedClients/2020-04-01/types";
|
||||||
import { client } from "../CosmosClient";
|
import { client } from "../CosmosClient";
|
||||||
import {
|
import {
|
||||||
createUpdateSqlUserDefinedFunction,
|
createUpdateSqlUserDefinedFunction,
|
||||||
getSqlUserDefinedFunction
|
getSqlUserDefinedFunction,
|
||||||
} from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
} from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||||
import { handleError } from "../ErrorHandlingUtils";
|
import { handleError } from "../ErrorHandlingUtils";
|
||||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||||
@@ -49,8 +49,8 @@ export async function createUserDefinedFunction(
|
|||||||
const createUDFParams: SqlUserDefinedFunctionCreateUpdateParameters = {
|
const createUDFParams: SqlUserDefinedFunctionCreateUpdateParameters = {
|
||||||
properties: {
|
properties: {
|
||||||
resource: userDefinedFunction as SqlUserDefinedFunctionResource,
|
resource: userDefinedFunction as SqlUserDefinedFunctionResource,
|
||||||
options: {}
|
options: {},
|
||||||
}
|
},
|
||||||
};
|
};
|
||||||
const rpResponse = await createUpdateSqlUserDefinedFunction(
|
const rpResponse = await createUpdateSqlUserDefinedFunction(
|
||||||
userContext.subscriptionId,
|
userContext.subscriptionId,
|
||||||
|
|||||||
@@ -13,9 +13,9 @@ describe("deleteCollection", () => {
|
|||||||
beforeAll(() => {
|
beforeAll(() => {
|
||||||
updateUserContext({
|
updateUserContext({
|
||||||
databaseAccount: {
|
databaseAccount: {
|
||||||
name: "test"
|
name: "test",
|
||||||
} as DatabaseAccount,
|
} as DatabaseAccount,
|
||||||
defaultExperience: DefaultAccountExperienceType.DocumentDB
|
defaultExperience: DefaultAccountExperienceType.DocumentDB,
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -32,11 +32,11 @@ describe("deleteCollection", () => {
|
|||||||
return {
|
return {
|
||||||
container: () => {
|
container: () => {
|
||||||
return {
|
return {
|
||||||
delete: (): unknown => undefined
|
delete: (): unknown => undefined,
|
||||||
};
|
};
|
||||||
}
|
},
|
||||||
};
|
};
|
||||||
}
|
},
|
||||||
});
|
});
|
||||||
await deleteCollection("database", "collection");
|
await deleteCollection("database", "collection");
|
||||||
expect(client).toHaveBeenCalled();
|
expect(client).toHaveBeenCalled();
|
||||||
|
|||||||
@@ -16,10 +16,7 @@ export async function deleteCollection(databaseId: string, collectionId: string)
|
|||||||
if (window.authType === AuthType.AAD && !userContext.useSDKOperations) {
|
if (window.authType === AuthType.AAD && !userContext.useSDKOperations) {
|
||||||
await deleteCollectionWithARM(databaseId, collectionId);
|
await deleteCollectionWithARM(databaseId, collectionId);
|
||||||
} else {
|
} else {
|
||||||
await client()
|
await client().database(databaseId).container(collectionId).delete();
|
||||||
.database(databaseId)
|
|
||||||
.container(collectionId)
|
|
||||||
.delete();
|
|
||||||
}
|
}
|
||||||
logConsoleInfo(`Successfully deleted container ${collectionId}`);
|
logConsoleInfo(`Successfully deleted container ${collectionId}`);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
|||||||
@@ -1,36 +0,0 @@
|
|||||||
import ConflictId from "../../Explorer/Tree/ConflictId";
|
|
||||||
import { CollectionBase } from "../../Contracts/ViewModels";
|
|
||||||
import { RequestOptions } from "@azure/cosmos";
|
|
||||||
import { client } from "../CosmosClient";
|
|
||||||
import { handleError } from "../ErrorHandlingUtils";
|
|
||||||
import { logConsoleProgress, logConsoleInfo } from "../../Utils/NotificationConsoleUtils";
|
|
||||||
|
|
||||||
export const deleteConflict = async (collection: CollectionBase, conflictId: ConflictId): Promise<void> => {
|
|
||||||
const clearMessage = logConsoleProgress(`Deleting conflict ${conflictId.id()}`);
|
|
||||||
|
|
||||||
try {
|
|
||||||
const options = {
|
|
||||||
partitionKey: getPartitionKeyHeaderForConflict(conflictId)
|
|
||||||
};
|
|
||||||
|
|
||||||
await client()
|
|
||||||
.database(collection.databaseId)
|
|
||||||
.container(collection.id())
|
|
||||||
.conflict(conflictId.id())
|
|
||||||
.delete(options as RequestOptions);
|
|
||||||
logConsoleInfo(`Successfully deleted conflict ${conflictId.id()}`);
|
|
||||||
} catch (error) {
|
|
||||||
handleError(error, "DeleteConflict", `Error while deleting conflict ${conflictId.id()}`);
|
|
||||||
throw error;
|
|
||||||
} finally {
|
|
||||||
clearMessage();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const getPartitionKeyHeaderForConflict = (conflictId: ConflictId): unknown => {
|
|
||||||
if (!conflictId.partitionKey) {
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
return conflictId.partitionKeyValue === undefined ? [{}] : [conflictId.partitionKeyValue];
|
|
||||||
};
|
|
||||||
@@ -13,9 +13,9 @@ describe("deleteDatabase", () => {
|
|||||||
beforeAll(() => {
|
beforeAll(() => {
|
||||||
updateUserContext({
|
updateUserContext({
|
||||||
databaseAccount: {
|
databaseAccount: {
|
||||||
name: "test"
|
name: "test",
|
||||||
} as DatabaseAccount,
|
} as DatabaseAccount,
|
||||||
defaultExperience: DefaultAccountExperienceType.DocumentDB
|
defaultExperience: DefaultAccountExperienceType.DocumentDB,
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -30,9 +30,9 @@ describe("deleteDatabase", () => {
|
|||||||
(client as jest.Mock).mockReturnValue({
|
(client as jest.Mock).mockReturnValue({
|
||||||
database: () => {
|
database: () => {
|
||||||
return {
|
return {
|
||||||
delete: (): unknown => undefined
|
delete: (): unknown => undefined,
|
||||||
};
|
};
|
||||||
}
|
},
|
||||||
});
|
});
|
||||||
await deleteDatabase("database");
|
await deleteDatabase("database");
|
||||||
expect(client).toHaveBeenCalled();
|
expect(client).toHaveBeenCalled();
|
||||||
|
|||||||
@@ -19,9 +19,7 @@ export async function deleteDatabase(databaseId: string): Promise<void> {
|
|||||||
if (window.authType === AuthType.AAD && !userContext.useSDKOperations) {
|
if (window.authType === AuthType.AAD && !userContext.useSDKOperations) {
|
||||||
await deleteDatabaseWithARM(databaseId);
|
await deleteDatabaseWithARM(databaseId);
|
||||||
} else {
|
} else {
|
||||||
await client()
|
await client().database(databaseId).delete();
|
||||||
.database(databaseId)
|
|
||||||
.delete();
|
|
||||||
}
|
}
|
||||||
logConsoleInfo(`Successfully deleted database ${databaseId}`);
|
logConsoleInfo(`Successfully deleted database ${databaseId}`);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
|||||||
@@ -1,25 +0,0 @@
|
|||||||
import { CollectionBase } from "../../Contracts/ViewModels";
|
|
||||||
import { client } from "../CosmosClient";
|
|
||||||
import { getEntityName } from "../DocumentUtility";
|
|
||||||
import { handleError } from "../ErrorHandlingUtils";
|
|
||||||
import { logConsoleInfo, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
|
||||||
import DocumentId from "../../Explorer/Tree/DocumentId";
|
|
||||||
|
|
||||||
export const deleteDocument = async (collection: CollectionBase, documentId: DocumentId): Promise<void> => {
|
|
||||||
const entityName: string = getEntityName();
|
|
||||||
const clearMessage = logConsoleProgress(`Deleting ${entityName} ${documentId.id()}`);
|
|
||||||
|
|
||||||
try {
|
|
||||||
await client()
|
|
||||||
.database(collection.databaseId)
|
|
||||||
.container(collection.id())
|
|
||||||
.item(documentId.id(), documentId.partitionKeyValue)
|
|
||||||
.delete();
|
|
||||||
logConsoleInfo(`Successfully deleted ${entityName} ${documentId.id()}`);
|
|
||||||
} catch (error) {
|
|
||||||
handleError(error, "DeleteDocument", `Error while deleting ${entityName} ${documentId.id()}`);
|
|
||||||
throw error;
|
|
||||||
} finally {
|
|
||||||
clearMessage();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
@@ -27,11 +27,7 @@ export async function deleteStoredProcedure(
|
|||||||
storedProcedureId
|
storedProcedureId
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
await client()
|
await client().database(databaseId).container(collectionId).scripts.storedProcedure(storedProcedureId).delete();
|
||||||
.database(databaseId)
|
|
||||||
.container(collectionId)
|
|
||||||
.scripts.storedProcedure(storedProcedureId)
|
|
||||||
.delete();
|
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
handleError(error, "DeleteStoredProcedure", `Error while deleting stored procedure ${storedProcedureId}`);
|
handleError(error, "DeleteStoredProcedure", `Error while deleting stored procedure ${storedProcedureId}`);
|
||||||
|
|||||||
@@ -23,11 +23,7 @@ export async function deleteTrigger(databaseId: string, collectionId: string, tr
|
|||||||
triggerId
|
triggerId
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
await client()
|
await client().database(databaseId).container(collectionId).scripts.trigger(triggerId).delete();
|
||||||
.database(databaseId)
|
|
||||||
.container(collectionId)
|
|
||||||
.scripts.trigger(triggerId)
|
|
||||||
.delete();
|
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
handleError(error, "DeleteTrigger", `Error while deleting trigger ${triggerId}`);
|
handleError(error, "DeleteTrigger", `Error while deleting trigger ${triggerId}`);
|
||||||
|
|||||||
@@ -23,11 +23,7 @@ export async function deleteUserDefinedFunction(databaseId: string, collectionId
|
|||||||
id
|
id
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
await client()
|
await client().database(databaseId).container(collectionId).scripts.userDefinedFunction(id).delete();
|
||||||
.database(databaseId)
|
|
||||||
.container(collectionId)
|
|
||||||
.scripts.userDefinedFunction(id)
|
|
||||||
.delete();
|
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
handleError(error, "DeleteUserDefinedFunction", `Error while deleting user defined function ${id}`);
|
handleError(error, "DeleteUserDefinedFunction", `Error while deleting user defined function ${id}`);
|
||||||
|
|||||||
@@ -1,48 +0,0 @@
|
|||||||
import { Collection } from "../../Contracts/ViewModels";
|
|
||||||
import { ClientDefaults, HttpHeaders } from "../Constants";
|
|
||||||
import { client } from "../CosmosClient";
|
|
||||||
import { handleError } from "../ErrorHandlingUtils";
|
|
||||||
import { logConsoleProgress, logConsoleInfo } from "../../Utils/NotificationConsoleUtils";
|
|
||||||
import StoredProcedure from "../../Explorer/Tree/StoredProcedure";
|
|
||||||
|
|
||||||
export interface ExecuteSprocResult {
|
|
||||||
result: StoredProcedure;
|
|
||||||
scriptLogs: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export const executeStoredProcedure = async (
|
|
||||||
collection: Collection,
|
|
||||||
storedProcedure: StoredProcedure,
|
|
||||||
partitionKeyValue: string,
|
|
||||||
params: string[]
|
|
||||||
): Promise<ExecuteSprocResult> => {
|
|
||||||
const clearMessage = logConsoleProgress(`Executing stored procedure ${storedProcedure.id()}`);
|
|
||||||
const timeout = setTimeout(() => {
|
|
||||||
throw Error(`Request timed out while executing stored procedure ${storedProcedure.id()}`);
|
|
||||||
}, ClientDefaults.requestTimeoutMs);
|
|
||||||
|
|
||||||
try {
|
|
||||||
const response = await client()
|
|
||||||
.database(collection.databaseId)
|
|
||||||
.container(collection.id())
|
|
||||||
.scripts.storedProcedure(storedProcedure.id())
|
|
||||||
.execute(partitionKeyValue, params, { enableScriptLogging: true });
|
|
||||||
clearTimeout(timeout);
|
|
||||||
logConsoleInfo(
|
|
||||||
`Finished executing stored procedure ${storedProcedure.id()} for container ${storedProcedure.collection.id()}`
|
|
||||||
);
|
|
||||||
return {
|
|
||||||
result: response.resource,
|
|
||||||
scriptLogs: response.headers[HttpHeaders.scriptLogResults] as string
|
|
||||||
};
|
|
||||||
} catch (error) {
|
|
||||||
handleError(
|
|
||||||
error,
|
|
||||||
"ExecuteStoredProcedure",
|
|
||||||
`Failed to execute stored procedure ${storedProcedure.id()} for container ${storedProcedure.collection.id()}`
|
|
||||||
);
|
|
||||||
throw error;
|
|
||||||
} finally {
|
|
||||||
clearMessage();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
@@ -1,4 +1,3 @@
|
|||||||
import { AuthType } from "../../AuthType";
|
|
||||||
import { armRequest } from "../../Utils/arm/request";
|
import { armRequest } from "../../Utils/arm/request";
|
||||||
import { configContext } from "../../ConfigContext";
|
import { configContext } from "../../ConfigContext";
|
||||||
import { handleError } from "../ErrorHandlingUtils";
|
import { handleError } from "../ErrorHandlingUtils";
|
||||||
@@ -41,10 +40,6 @@ interface MetricsResponse {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export const getCollectionUsageSizeInKB = async (databaseName: string, containerName: string): Promise<number> => {
|
export const getCollectionUsageSizeInKB = async (databaseName: string, containerName: string): Promise<number> => {
|
||||||
if (window.authType !== AuthType.AAD) {
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
const subscriptionId = userContext.subscriptionId;
|
const subscriptionId = userContext.subscriptionId;
|
||||||
const resourceGroup = userContext.resourceGroup;
|
const resourceGroup = userContext.resourceGroup;
|
||||||
const accountName = userContext.databaseAccount.name;
|
const accountName = userContext.databaseAccount.name;
|
||||||
@@ -60,8 +55,8 @@ export const getCollectionUsageSizeInKB = async (databaseName: string, container
|
|||||||
apiVersion: "2018-01-01",
|
apiVersion: "2018-01-01",
|
||||||
queryParams: {
|
queryParams: {
|
||||||
filter,
|
filter,
|
||||||
metricNames
|
metricNames,
|
||||||
}
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
if (metricsResponse?.value?.length !== 2) {
|
if (metricsResponse?.value?.length !== 2) {
|
||||||
|
|||||||
@@ -11,10 +11,7 @@ export async function getIndexTransformationProgress(databaseId: string, collect
|
|||||||
let indexTransformationPercentage: number;
|
let indexTransformationPercentage: number;
|
||||||
const clearMessage = logConsoleProgress(`Reading container ${collectionId}`);
|
const clearMessage = logConsoleProgress(`Reading container ${collectionId}`);
|
||||||
try {
|
try {
|
||||||
const response = await client()
|
const response = await client().database(databaseId).container(collectionId).read({ populateQuotaInfo: true });
|
||||||
.database(databaseId)
|
|
||||||
.container(collectionId)
|
|
||||||
.read({ populateQuotaInfo: true });
|
|
||||||
|
|
||||||
indexTransformationPercentage = parseInt(
|
indexTransformationPercentage = parseInt(
|
||||||
response.headers[Constants.HttpHeaders.collectionIndexTransformationProgress] as string
|
response.headers[Constants.HttpHeaders.collectionIndexTransformationProgress] as string
|
||||||
|
|||||||
@@ -1,14 +0,0 @@
|
|||||||
import { ConflictDefinition, FeedOptions, QueryIterator, Resource } from "@azure/cosmos";
|
|
||||||
import { client } from "../CosmosClient";
|
|
||||||
|
|
||||||
export const queryConflicts = (
|
|
||||||
databaseId: string,
|
|
||||||
containerId: string,
|
|
||||||
query: string,
|
|
||||||
options: FeedOptions
|
|
||||||
): QueryIterator<ConflictDefinition & Resource> => {
|
|
||||||
return client()
|
|
||||||
.database(databaseId)
|
|
||||||
.container(containerId)
|
|
||||||
.conflicts.query(query, options);
|
|
||||||
};
|
|
||||||
@@ -1,34 +0,0 @@
|
|||||||
import { Queries } from "../Constants";
|
|
||||||
import { FeedOptions, ItemDefinition, QueryIterator, Resource } from "@azure/cosmos";
|
|
||||||
import { LocalStorageUtility, StorageKey } from "../../Shared/StorageUtility";
|
|
||||||
import { client } from "../CosmosClient";
|
|
||||||
|
|
||||||
export const queryDocuments = (
|
|
||||||
databaseId: string,
|
|
||||||
containerId: string,
|
|
||||||
query: string,
|
|
||||||
options: FeedOptions
|
|
||||||
): QueryIterator<ItemDefinition & Resource> => {
|
|
||||||
options = getCommonQueryOptions(options);
|
|
||||||
return client()
|
|
||||||
.database(databaseId)
|
|
||||||
.container(containerId)
|
|
||||||
.items.query(query, options);
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getCommonQueryOptions = (options: FeedOptions): FeedOptions => {
|
|
||||||
const storedItemPerPageSetting: number = LocalStorageUtility.getEntryNumber(StorageKey.ActualItemPerPage);
|
|
||||||
options = options || {};
|
|
||||||
options.populateQueryMetrics = true;
|
|
||||||
options.enableScanInQuery = options.enableScanInQuery || true;
|
|
||||||
if (!options.partitionKey) {
|
|
||||||
options.forceQueryPlan = true;
|
|
||||||
}
|
|
||||||
options.maxItemCount =
|
|
||||||
options.maxItemCount ||
|
|
||||||
(storedItemPerPageSetting !== undefined && storedItemPerPageSetting) ||
|
|
||||||
Queries.itemsPerPage;
|
|
||||||
options.maxDegreeOfParallelism = LocalStorageUtility.getEntryNumber(StorageKey.MaxDegreeOfParellism);
|
|
||||||
|
|
||||||
return options;
|
|
||||||
};
|
|
||||||
@@ -1,26 +0,0 @@
|
|||||||
import { QueryResults } from "../../Contracts/ViewModels";
|
|
||||||
import { logConsoleInfo, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
|
||||||
import { MinimalQueryIterator, nextPage } from "../IteratorUtilities";
|
|
||||||
import { handleError } from "../ErrorHandlingUtils";
|
|
||||||
import { getEntityName } from "../DocumentUtility";
|
|
||||||
|
|
||||||
export const queryDocumentsPage = async (
|
|
||||||
resourceName: string,
|
|
||||||
documentsIterator: MinimalQueryIterator,
|
|
||||||
firstItemIndex: number
|
|
||||||
): Promise<QueryResults> => {
|
|
||||||
const entityName = getEntityName();
|
|
||||||
const clearMessage = logConsoleProgress(`Querying ${entityName} for container ${resourceName}`);
|
|
||||||
|
|
||||||
try {
|
|
||||||
const result: QueryResults = await nextPage(documentsIterator, firstItemIndex);
|
|
||||||
const itemCount = (result.documents && result.documents.length) || 0;
|
|
||||||
logConsoleInfo(`Successfully fetched ${itemCount} ${entityName} for container ${resourceName}`);
|
|
||||||
return result;
|
|
||||||
} catch (error) {
|
|
||||||
handleError(error, "QueryDocumentsPage", `Failed to query ${entityName} for container ${resourceName}`);
|
|
||||||
throw error;
|
|
||||||
} finally {
|
|
||||||
clearMessage();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
@@ -10,9 +10,9 @@ describe("readCollection", () => {
|
|||||||
beforeAll(() => {
|
beforeAll(() => {
|
||||||
updateUserContext({
|
updateUserContext({
|
||||||
databaseAccount: {
|
databaseAccount: {
|
||||||
name: "test"
|
name: "test",
|
||||||
} as DatabaseAccount,
|
} as DatabaseAccount,
|
||||||
defaultExperience: DefaultAccountExperienceType.DocumentDB
|
defaultExperience: DefaultAccountExperienceType.DocumentDB,
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -23,11 +23,11 @@ describe("readCollection", () => {
|
|||||||
return {
|
return {
|
||||||
container: () => {
|
container: () => {
|
||||||
return {
|
return {
|
||||||
read: (): unknown => ({})
|
read: (): unknown => ({}),
|
||||||
};
|
};
|
||||||
}
|
},
|
||||||
};
|
};
|
||||||
}
|
},
|
||||||
});
|
});
|
||||||
await readCollection("database", "collection");
|
await readCollection("database", "collection");
|
||||||
expect(client).toHaveBeenCalled();
|
expect(client).toHaveBeenCalled();
|
||||||
|
|||||||
@@ -7,10 +7,7 @@ export async function readCollection(databaseId: string, collectionId: string):
|
|||||||
let collection: DataModels.Collection;
|
let collection: DataModels.Collection;
|
||||||
const clearMessage = logConsoleProgress(`Querying container ${collectionId}`);
|
const clearMessage = logConsoleProgress(`Querying container ${collectionId}`);
|
||||||
try {
|
try {
|
||||||
const response = await client()
|
const response = await client().database(databaseId).container(collectionId).read();
|
||||||
.database(databaseId)
|
|
||||||
.container(collectionId)
|
|
||||||
.read();
|
|
||||||
collection = response.resource as DataModels.Collection;
|
collection = response.resource as DataModels.Collection;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
handleError(error, "ReadCollection", `Error while querying container ${collectionId}`);
|
handleError(error, "ReadCollection", `Error while querying container ${collectionId}`);
|
||||||
|
|||||||
@@ -106,7 +106,6 @@ const readCollectionOfferWithARM = async (databaseId: string, collectionId: stri
|
|||||||
autoscaleMaxThroughput: autoscaleSettings.maxThroughput,
|
autoscaleMaxThroughput: autoscaleSettings.maxThroughput,
|
||||||
manualThroughput: undefined,
|
manualThroughput: undefined,
|
||||||
minimumThroughput,
|
minimumThroughput,
|
||||||
offerReplacePending: resource.offerReplacePending === "true"
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -115,7 +114,6 @@ const readCollectionOfferWithARM = async (databaseId: string, collectionId: stri
|
|||||||
autoscaleMaxThroughput: undefined,
|
autoscaleMaxThroughput: undefined,
|
||||||
manualThroughput: resource.throughput,
|
manualThroughput: resource.throughput,
|
||||||
minimumThroughput,
|
minimumThroughput,
|
||||||
offerReplacePending: resource.offerReplacePending === "true"
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -12,9 +12,9 @@ describe("readCollections", () => {
|
|||||||
beforeAll(() => {
|
beforeAll(() => {
|
||||||
updateUserContext({
|
updateUserContext({
|
||||||
databaseAccount: {
|
databaseAccount: {
|
||||||
name: "test"
|
name: "test",
|
||||||
} as DatabaseAccount,
|
} as DatabaseAccount,
|
||||||
defaultExperience: DefaultAccountExperienceType.DocumentDB
|
defaultExperience: DefaultAccountExperienceType.DocumentDB,
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -32,12 +32,12 @@ describe("readCollections", () => {
|
|||||||
containers: {
|
containers: {
|
||||||
readAll: () => {
|
readAll: () => {
|
||||||
return {
|
return {
|
||||||
fetchAll: (): unknown => []
|
fetchAll: (): unknown => [],
|
||||||
};
|
};
|
||||||
}
|
},
|
||||||
}
|
},
|
||||||
};
|
};
|
||||||
}
|
},
|
||||||
});
|
});
|
||||||
await readCollections("database");
|
await readCollections("database");
|
||||||
expect(client).toHaveBeenCalled();
|
expect(client).toHaveBeenCalled();
|
||||||
|
|||||||
@@ -23,10 +23,7 @@ export async function readCollections(databaseId: string): Promise<DataModels.Co
|
|||||||
return await readCollectionsWithARM(databaseId);
|
return await readCollectionsWithARM(databaseId);
|
||||||
}
|
}
|
||||||
|
|
||||||
const sdkResponse = await client()
|
const sdkResponse = await client().database(databaseId).containers.readAll().fetchAll();
|
||||||
.database(databaseId)
|
|
||||||
.containers.readAll()
|
|
||||||
.fetchAll();
|
|
||||||
return sdkResponse.resources as DataModels.Collection[];
|
return sdkResponse.resources as DataModels.Collection[];
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
handleError(error, "ReadCollections", `Error while querying containers for database ${databaseId}`);
|
handleError(error, "ReadCollections", `Error while querying containers for database ${databaseId}`);
|
||||||
@@ -63,5 +60,5 @@ async function readCollectionsWithARM(databaseId: string): Promise<DataModels.Co
|
|||||||
throw new Error(`Unsupported default experience type: ${defaultExperience}`);
|
throw new Error(`Unsupported default experience type: ${defaultExperience}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
return rpResponse?.value?.map(collection => collection.properties?.resource as DataModels.Collection);
|
return rpResponse?.value?.map((collection) => collection.properties?.resource as DataModels.Collection);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -78,7 +78,6 @@ const readDatabaseOfferWithARM = async (databaseId: string): Promise<Offer> => {
|
|||||||
autoscaleMaxThroughput: autoscaleSettings.maxThroughput,
|
autoscaleMaxThroughput: autoscaleSettings.maxThroughput,
|
||||||
manualThroughput: undefined,
|
manualThroughput: undefined,
|
||||||
minimumThroughput,
|
minimumThroughput,
|
||||||
offerReplacePending: resource.offerReplacePending === "true"
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -87,7 +86,6 @@ const readDatabaseOfferWithARM = async (databaseId: string): Promise<Offer> => {
|
|||||||
autoscaleMaxThroughput: undefined,
|
autoscaleMaxThroughput: undefined,
|
||||||
manualThroughput: resource.throughput,
|
manualThroughput: resource.throughput,
|
||||||
minimumThroughput,
|
minimumThroughput,
|
||||||
offerReplacePending: resource.offerReplacePending === "true"
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -12,9 +12,9 @@ describe("readDatabases", () => {
|
|||||||
beforeAll(() => {
|
beforeAll(() => {
|
||||||
updateUserContext({
|
updateUserContext({
|
||||||
databaseAccount: {
|
databaseAccount: {
|
||||||
name: "test"
|
name: "test",
|
||||||
} as DatabaseAccount,
|
} as DatabaseAccount,
|
||||||
defaultExperience: DefaultAccountExperienceType.DocumentDB
|
defaultExperience: DefaultAccountExperienceType.DocumentDB,
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -30,10 +30,10 @@ describe("readDatabases", () => {
|
|||||||
databases: {
|
databases: {
|
||||||
readAll: () => {
|
readAll: () => {
|
||||||
return {
|
return {
|
||||||
fetchAll: (): unknown => []
|
fetchAll: (): unknown => [],
|
||||||
};
|
};
|
||||||
}
|
},
|
||||||
}
|
},
|
||||||
});
|
});
|
||||||
await readDatabases();
|
await readDatabases();
|
||||||
expect(client).toHaveBeenCalled();
|
expect(client).toHaveBeenCalled();
|
||||||
|
|||||||
@@ -21,9 +21,7 @@ export async function readDatabases(): Promise<DataModels.Database[]> {
|
|||||||
) {
|
) {
|
||||||
databases = await readDatabasesWithARM();
|
databases = await readDatabasesWithARM();
|
||||||
} else {
|
} else {
|
||||||
const sdkResponse = await client()
|
const sdkResponse = await client().databases.readAll().fetchAll();
|
||||||
.databases.readAll()
|
|
||||||
.fetchAll();
|
|
||||||
databases = sdkResponse.resources as DataModels.Database[];
|
databases = sdkResponse.resources as DataModels.Database[];
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@@ -58,5 +56,5 @@ async function readDatabasesWithARM(): Promise<DataModels.Database[]> {
|
|||||||
throw new Error(`Unsupported default experience type: ${defaultExperience}`);
|
throw new Error(`Unsupported default experience type: ${defaultExperience}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
return rpResponse?.value?.map(database => database.properties?.resource as DataModels.Database);
|
return rpResponse?.value?.map((database) => database.properties?.resource as DataModels.Database);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,27 +0,0 @@
|
|||||||
import { Item } from "@azure/cosmos";
|
|
||||||
import { CollectionBase } from "../../Contracts/ViewModels";
|
|
||||||
import { client } from "../CosmosClient";
|
|
||||||
import { getEntityName } from "../DocumentUtility";
|
|
||||||
import { handleError } from "../ErrorHandlingUtils";
|
|
||||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
|
||||||
import DocumentId from "../../Explorer/Tree/DocumentId";
|
|
||||||
|
|
||||||
export const readDocument = async (collection: CollectionBase, documentId: DocumentId): Promise<Item> => {
|
|
||||||
const entityName = getEntityName();
|
|
||||||
const clearMessage = logConsoleProgress(`Reading ${entityName} ${documentId.id()}`);
|
|
||||||
|
|
||||||
try {
|
|
||||||
const response = await client()
|
|
||||||
.database(collection.databaseId)
|
|
||||||
.container(collection.id())
|
|
||||||
.item(documentId.id(), documentId.partitionKeyValue)
|
|
||||||
.read();
|
|
||||||
|
|
||||||
return response?.resource;
|
|
||||||
} catch (error) {
|
|
||||||
handleError(error, "ReadDocument", `Failed to read ${entityName} ${documentId.id()}`);
|
|
||||||
throw error;
|
|
||||||
} finally {
|
|
||||||
clearMessage();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
@@ -8,7 +8,7 @@ import { readOffers } from "./readOffers";
|
|||||||
export const readOfferWithSDK = async (offerId: string, resourceId: string): Promise<Offer> => {
|
export const readOfferWithSDK = async (offerId: string, resourceId: string): Promise<Offer> => {
|
||||||
if (!offerId) {
|
if (!offerId) {
|
||||||
const offers = await readOffers();
|
const offers = await readOffers();
|
||||||
const offer = offers.find(offer => offer.resource === resourceId);
|
const offer = offers.find((offer) => offer.resource === resourceId);
|
||||||
|
|
||||||
if (!offer) {
|
if (!offer) {
|
||||||
return undefined;
|
return undefined;
|
||||||
@@ -18,12 +18,10 @@ export const readOfferWithSDK = async (offerId: string, resourceId: string): Pro
|
|||||||
|
|
||||||
const options: RequestOptions = {
|
const options: RequestOptions = {
|
||||||
initialHeaders: {
|
initialHeaders: {
|
||||||
[HttpHeaders.populateCollectionThroughputInfo]: true
|
[HttpHeaders.populateCollectionThroughputInfo]: true,
|
||||||
}
|
},
|
||||||
};
|
};
|
||||||
const response = await client()
|
const response = await client().offer(offerId).read(options);
|
||||||
.offer(offerId)
|
|
||||||
.read(options);
|
|
||||||
|
|
||||||
return parseSDKOfferResponse(response);
|
return parseSDKOfferResponse(response);
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -7,9 +7,7 @@ export const readOffers = async (): Promise<SDKOfferDefinition[]> => {
|
|||||||
const clearMessage = logConsoleProgress(`Querying offers`);
|
const clearMessage = logConsoleProgress(`Querying offers`);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const response = await client()
|
const response = await client().offers.readAll().fetchAll();
|
||||||
.offers.readAll()
|
|
||||||
.fetchAll();
|
|
||||||
return response?.resources;
|
return response?.resources;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
// This should be removed when we can correctly identify if an account is serverless when connected using connection string too.
|
// This should be removed when we can correctly identify if an account is serverless when connected using connection string too.
|
||||||
|
|||||||
@@ -25,7 +25,7 @@ export async function readStoredProcedures(
|
|||||||
databaseId,
|
databaseId,
|
||||||
collectionId
|
collectionId
|
||||||
);
|
);
|
||||||
return rpResponse?.value?.map(sproc => sproc.properties?.resource as StoredProcedureDefinition & Resource);
|
return rpResponse?.value?.map((sproc) => sproc.properties?.resource as StoredProcedureDefinition & Resource);
|
||||||
}
|
}
|
||||||
|
|
||||||
const response = await client()
|
const response = await client()
|
||||||
|
|||||||
@@ -25,14 +25,10 @@ export async function readTriggers(
|
|||||||
databaseId,
|
databaseId,
|
||||||
collectionId
|
collectionId
|
||||||
);
|
);
|
||||||
return rpResponse?.value?.map(trigger => trigger.properties?.resource as TriggerDefinition & Resource);
|
return rpResponse?.value?.map((trigger) => trigger.properties?.resource as TriggerDefinition & Resource);
|
||||||
}
|
}
|
||||||
|
|
||||||
const response = await client()
|
const response = await client().database(databaseId).container(collectionId).scripts.triggers.readAll().fetchAll();
|
||||||
.database(databaseId)
|
|
||||||
.container(collectionId)
|
|
||||||
.scripts.triggers.readAll()
|
|
||||||
.fetchAll();
|
|
||||||
return response?.resources;
|
return response?.resources;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
handleError(error, "ReadTriggers", `Failed to query triggers for container ${collectionId}`);
|
handleError(error, "ReadTriggers", `Failed to query triggers for container ${collectionId}`);
|
||||||
|
|||||||
@@ -25,7 +25,7 @@ export async function readUserDefinedFunctions(
|
|||||||
databaseId,
|
databaseId,
|
||||||
collectionId
|
collectionId
|
||||||
);
|
);
|
||||||
return rpResponse?.value?.map(udf => udf.properties?.resource as UserDefinedFunctionDefinition & Resource);
|
return rpResponse?.value?.map((udf) => udf.properties?.resource as UserDefinedFunctionDefinition & Resource);
|
||||||
}
|
}
|
||||||
|
|
||||||
const response = await client()
|
const response = await client()
|
||||||
|
|||||||
@@ -8,22 +8,22 @@ import {
|
|||||||
MongoDBCollectionCreateUpdateParameters,
|
MongoDBCollectionCreateUpdateParameters,
|
||||||
MongoDBCollectionResource,
|
MongoDBCollectionResource,
|
||||||
SqlContainerCreateUpdateParameters,
|
SqlContainerCreateUpdateParameters,
|
||||||
SqlContainerResource
|
SqlContainerResource,
|
||||||
} from "../../Utils/arm/generatedClients/2020-04-01/types";
|
} from "../../Utils/arm/generatedClients/2020-04-01/types";
|
||||||
import { RequestOptions } from "@azure/cosmos/dist-esm";
|
import { RequestOptions } from "@azure/cosmos/dist-esm";
|
||||||
import { client } from "../CosmosClient";
|
import { client } from "../CosmosClient";
|
||||||
import { createUpdateSqlContainer, getSqlContainer } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
import { createUpdateSqlContainer, getSqlContainer } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||||
import {
|
import {
|
||||||
createUpdateCassandraTable,
|
createUpdateCassandraTable,
|
||||||
getCassandraTable
|
getCassandraTable,
|
||||||
} from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources";
|
} from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources";
|
||||||
import {
|
import {
|
||||||
createUpdateMongoDBCollection,
|
createUpdateMongoDBCollection,
|
||||||
getMongoDBCollection
|
getMongoDBCollection,
|
||||||
} from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
|
} from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
|
||||||
import {
|
import {
|
||||||
createUpdateGremlinGraph,
|
createUpdateGremlinGraph,
|
||||||
getGremlinGraph
|
getGremlinGraph,
|
||||||
} from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
|
} from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
|
||||||
import { createUpdateTable, getTable } from "../../Utils/arm/generatedClients/2020-04-01/tableResources";
|
import { createUpdateTable, getTable } from "../../Utils/arm/generatedClients/2020-04-01/tableResources";
|
||||||
import { handleError } from "../ErrorHandlingUtils";
|
import { handleError } from "../ErrorHandlingUtils";
|
||||||
@@ -130,8 +130,8 @@ export async function updateMongoDBCollectionThroughRP(
|
|||||||
const updateParams: MongoDBCollectionCreateUpdateParameters = {
|
const updateParams: MongoDBCollectionCreateUpdateParameters = {
|
||||||
properties: {
|
properties: {
|
||||||
resource: newCollection,
|
resource: newCollection,
|
||||||
options: updateOptions
|
options: updateOptions,
|
||||||
}
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
const updateResponse = await createUpdateMongoDBCollection(
|
const updateResponse = await createUpdateMongoDBCollection(
|
||||||
|
|||||||
@@ -1,32 +0,0 @@
|
|||||||
import { CollectionBase } from "../../Contracts/ViewModels";
|
|
||||||
import { Item } from "@azure/cosmos";
|
|
||||||
import { client } from "../CosmosClient";
|
|
||||||
import { getEntityName } from "../DocumentUtility";
|
|
||||||
import { handleError } from "../ErrorHandlingUtils";
|
|
||||||
import { logConsoleInfo, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
|
||||||
import DocumentId from "../../Explorer/Tree/DocumentId";
|
|
||||||
|
|
||||||
export const updateDocument = async (
|
|
||||||
collection: CollectionBase,
|
|
||||||
documentId: DocumentId,
|
|
||||||
newDocument: Item
|
|
||||||
): Promise<Item> => {
|
|
||||||
const entityName = getEntityName();
|
|
||||||
const clearMessage = logConsoleProgress(`Updating ${entityName} ${documentId.id()}`);
|
|
||||||
|
|
||||||
try {
|
|
||||||
const response = await client()
|
|
||||||
.database(collection.databaseId)
|
|
||||||
.container(collection.id())
|
|
||||||
.item(documentId.id(), documentId.partitionKeyValue)
|
|
||||||
.replace(newDocument);
|
|
||||||
|
|
||||||
logConsoleInfo(`Successfully updated ${entityName} ${documentId.id()}`);
|
|
||||||
return response?.resource;
|
|
||||||
} catch (error) {
|
|
||||||
handleError(error, "UpdateDocument", `Failed to update ${entityName} ${documentId.id()}`);
|
|
||||||
throw error;
|
|
||||||
} finally {
|
|
||||||
clearMessage();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
@@ -17,7 +17,7 @@ import {
|
|||||||
migrateSqlDatabaseToManualThroughput,
|
migrateSqlDatabaseToManualThroughput,
|
||||||
migrateSqlContainerToAutoscale,
|
migrateSqlContainerToAutoscale,
|
||||||
migrateSqlContainerToManualThroughput,
|
migrateSqlContainerToManualThroughput,
|
||||||
updateSqlContainerThroughput
|
updateSqlContainerThroughput,
|
||||||
} from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
} from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||||
import {
|
import {
|
||||||
updateCassandraKeyspaceThroughput,
|
updateCassandraKeyspaceThroughput,
|
||||||
@@ -25,7 +25,7 @@ import {
|
|||||||
migrateCassandraKeyspaceToManualThroughput,
|
migrateCassandraKeyspaceToManualThroughput,
|
||||||
migrateCassandraTableToAutoscale,
|
migrateCassandraTableToAutoscale,
|
||||||
migrateCassandraTableToManualThroughput,
|
migrateCassandraTableToManualThroughput,
|
||||||
updateCassandraTableThroughput
|
updateCassandraTableThroughput,
|
||||||
} from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources";
|
} from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources";
|
||||||
import {
|
import {
|
||||||
updateMongoDBDatabaseThroughput,
|
updateMongoDBDatabaseThroughput,
|
||||||
@@ -33,7 +33,7 @@ import {
|
|||||||
migrateMongoDBDatabaseToManualThroughput,
|
migrateMongoDBDatabaseToManualThroughput,
|
||||||
migrateMongoDBCollectionToAutoscale,
|
migrateMongoDBCollectionToAutoscale,
|
||||||
migrateMongoDBCollectionToManualThroughput,
|
migrateMongoDBCollectionToManualThroughput,
|
||||||
updateMongoDBCollectionThroughput
|
updateMongoDBCollectionThroughput,
|
||||||
} from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
|
} from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
|
||||||
import {
|
import {
|
||||||
updateGremlinDatabaseThroughput,
|
updateGremlinDatabaseThroughput,
|
||||||
@@ -41,13 +41,13 @@ import {
|
|||||||
migrateGremlinDatabaseToManualThroughput,
|
migrateGremlinDatabaseToManualThroughput,
|
||||||
migrateGremlinGraphToAutoscale,
|
migrateGremlinGraphToAutoscale,
|
||||||
migrateGremlinGraphToManualThroughput,
|
migrateGremlinGraphToManualThroughput,
|
||||||
updateGremlinGraphThroughput
|
updateGremlinGraphThroughput,
|
||||||
} from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
|
} from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
|
||||||
import { userContext } from "../../UserContext";
|
import { userContext } from "../../UserContext";
|
||||||
import {
|
import {
|
||||||
migrateTableToAutoscale,
|
migrateTableToAutoscale,
|
||||||
migrateTableToManualThroughput,
|
migrateTableToManualThroughput,
|
||||||
updateTableThroughput
|
updateTableThroughput,
|
||||||
} from "../../Utils/arm/generatedClients/2020-04-01/tableResources";
|
} from "../../Utils/arm/generatedClients/2020-04-01/tableResources";
|
||||||
|
|
||||||
export const updateOffer = async (params: UpdateOfferParams): Promise<Offer> => {
|
export const updateOffer = async (params: UpdateOfferParams): Promise<Offer> => {
|
||||||
@@ -110,7 +110,7 @@ const updateCollectionOfferWithARM = async (params: UpdateOfferParams): Promise<
|
|||||||
return await readCollectionOffer({
|
return await readCollectionOffer({
|
||||||
collectionId: params.collectionId,
|
collectionId: params.collectionId,
|
||||||
databaseId: params.databaseId,
|
databaseId: params.databaseId,
|
||||||
offerId: params.currentOffer.id
|
offerId: params.currentOffer.id,
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -140,7 +140,7 @@ const updateDatabaseOfferWithARM = async (params: UpdateOfferParams): Promise<Of
|
|||||||
|
|
||||||
return await readDatabaseOffer({
|
return await readDatabaseOffer({
|
||||||
databaseId: params.databaseId,
|
databaseId: params.databaseId,
|
||||||
offerId: params.currentOffer.id
|
offerId: params.currentOffer.id,
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -358,13 +358,13 @@ const updateGremlinDatabaseOffer = async (params: UpdateOfferParams): Promise<vo
|
|||||||
const createUpdateOfferBody = (params: UpdateOfferParams): ThroughputSettingsUpdateParameters => {
|
const createUpdateOfferBody = (params: UpdateOfferParams): ThroughputSettingsUpdateParameters => {
|
||||||
const body: ThroughputSettingsUpdateParameters = {
|
const body: ThroughputSettingsUpdateParameters = {
|
||||||
properties: {
|
properties: {
|
||||||
resource: {}
|
resource: {},
|
||||||
}
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
if (params.autopilotThroughput) {
|
if (params.autopilotThroughput) {
|
||||||
body.properties.resource.autoscaleSettings = {
|
body.properties.resource.autoscaleSettings = {
|
||||||
maxThroughput: params.autopilotThroughput
|
maxThroughput: params.autopilotThroughput,
|
||||||
};
|
};
|
||||||
} else {
|
} else {
|
||||||
body.properties.resource.throughput = params.manualThroughput;
|
body.properties.resource.throughput = params.manualThroughput;
|
||||||
@@ -378,7 +378,7 @@ const updateOfferWithSDK = async (params: UpdateOfferParams): Promise<Offer> =>
|
|||||||
const newOffer: SDKOfferDefinition = {
|
const newOffer: SDKOfferDefinition = {
|
||||||
content: {
|
content: {
|
||||||
offerThroughput: undefined,
|
offerThroughput: undefined,
|
||||||
offerIsRUPerMinuteThroughputEnabled: false
|
offerIsRUPerMinuteThroughputEnabled: false,
|
||||||
},
|
},
|
||||||
_etag: undefined,
|
_etag: undefined,
|
||||||
_ts: undefined,
|
_ts: undefined,
|
||||||
@@ -388,12 +388,12 @@ const updateOfferWithSDK = async (params: UpdateOfferParams): Promise<Offer> =>
|
|||||||
offerResourceId: sdkOfferDefinition.offerResourceId,
|
offerResourceId: sdkOfferDefinition.offerResourceId,
|
||||||
offerVersion: sdkOfferDefinition.offerVersion,
|
offerVersion: sdkOfferDefinition.offerVersion,
|
||||||
offerType: sdkOfferDefinition.offerType,
|
offerType: sdkOfferDefinition.offerType,
|
||||||
resource: sdkOfferDefinition.resource
|
resource: sdkOfferDefinition.resource,
|
||||||
};
|
};
|
||||||
|
|
||||||
if (params.autopilotThroughput) {
|
if (params.autopilotThroughput) {
|
||||||
newOffer.content.offerAutopilotSettings = {
|
newOffer.content.offerAutopilotSettings = {
|
||||||
maxThroughput: params.autopilotThroughput
|
maxThroughput: params.autopilotThroughput,
|
||||||
};
|
};
|
||||||
} else {
|
} else {
|
||||||
newOffer.content.offerThroughput = params.manualThroughput;
|
newOffer.content.offerThroughput = params.manualThroughput;
|
||||||
@@ -402,12 +402,12 @@ const updateOfferWithSDK = async (params: UpdateOfferParams): Promise<Offer> =>
|
|||||||
const options: RequestOptions = {};
|
const options: RequestOptions = {};
|
||||||
if (params.migrateToAutoPilot) {
|
if (params.migrateToAutoPilot) {
|
||||||
options.initialHeaders = {
|
options.initialHeaders = {
|
||||||
[HttpHeaders.migrateOfferToAutopilot]: "true"
|
[HttpHeaders.migrateOfferToAutopilot]: "true",
|
||||||
};
|
};
|
||||||
delete newOffer.content.offerAutopilotSettings;
|
delete newOffer.content.offerAutopilotSettings;
|
||||||
} else if (params.migrateToManual) {
|
} else if (params.migrateToManual) {
|
||||||
options.initialHeaders = {
|
options.initialHeaders = {
|
||||||
[HttpHeaders.migrateOfferToManualThroughput]: "true"
|
[HttpHeaders.migrateOfferToManualThroughput]: "true",
|
||||||
};
|
};
|
||||||
newOffer.content.offerAutopilotSettings = { maxThroughput: 0 };
|
newOffer.content.offerAutopilotSettings = { maxThroughput: 0 };
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,12 +3,12 @@ import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType
|
|||||||
import { Resource, StoredProcedureDefinition } from "@azure/cosmos";
|
import { Resource, StoredProcedureDefinition } from "@azure/cosmos";
|
||||||
import {
|
import {
|
||||||
SqlStoredProcedureCreateUpdateParameters,
|
SqlStoredProcedureCreateUpdateParameters,
|
||||||
SqlStoredProcedureResource
|
SqlStoredProcedureResource,
|
||||||
} from "../../Utils/arm/generatedClients/2020-04-01/types";
|
} from "../../Utils/arm/generatedClients/2020-04-01/types";
|
||||||
import { client } from "../CosmosClient";
|
import { client } from "../CosmosClient";
|
||||||
import {
|
import {
|
||||||
createUpdateSqlStoredProcedure,
|
createUpdateSqlStoredProcedure,
|
||||||
getSqlStoredProcedure
|
getSqlStoredProcedure,
|
||||||
} from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
} from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||||
import { handleError } from "../ErrorHandlingUtils";
|
import { handleError } from "../ErrorHandlingUtils";
|
||||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||||
@@ -39,8 +39,8 @@ export async function updateStoredProcedure(
|
|||||||
const createSprocParams: SqlStoredProcedureCreateUpdateParameters = {
|
const createSprocParams: SqlStoredProcedureCreateUpdateParameters = {
|
||||||
properties: {
|
properties: {
|
||||||
resource: storedProcedure as SqlStoredProcedureResource,
|
resource: storedProcedure as SqlStoredProcedureResource,
|
||||||
options: {}
|
options: {},
|
||||||
}
|
},
|
||||||
};
|
};
|
||||||
const rpResponse = await createUpdateSqlStoredProcedure(
|
const rpResponse = await createUpdateSqlStoredProcedure(
|
||||||
userContext.subscriptionId,
|
userContext.subscriptionId,
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ import { AuthType } from "../../AuthType";
|
|||||||
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
||||||
import {
|
import {
|
||||||
SqlTriggerCreateUpdateParameters,
|
SqlTriggerCreateUpdateParameters,
|
||||||
SqlTriggerResource
|
SqlTriggerResource,
|
||||||
} from "../../Utils/arm/generatedClients/2020-04-01/types";
|
} from "../../Utils/arm/generatedClients/2020-04-01/types";
|
||||||
import { TriggerDefinition } from "@azure/cosmos";
|
import { TriggerDefinition } from "@azure/cosmos";
|
||||||
import { client } from "../CosmosClient";
|
import { client } from "../CosmosClient";
|
||||||
@@ -36,8 +36,8 @@ export async function updateTrigger(
|
|||||||
const createTriggerParams: SqlTriggerCreateUpdateParameters = {
|
const createTriggerParams: SqlTriggerCreateUpdateParameters = {
|
||||||
properties: {
|
properties: {
|
||||||
resource: trigger as SqlTriggerResource,
|
resource: trigger as SqlTriggerResource,
|
||||||
options: {}
|
options: {},
|
||||||
}
|
},
|
||||||
};
|
};
|
||||||
const rpResponse = await createUpdateSqlTrigger(
|
const rpResponse = await createUpdateSqlTrigger(
|
||||||
userContext.subscriptionId,
|
userContext.subscriptionId,
|
||||||
|
|||||||
@@ -3,12 +3,12 @@ import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType
|
|||||||
import { Resource, UserDefinedFunctionDefinition } from "@azure/cosmos";
|
import { Resource, UserDefinedFunctionDefinition } from "@azure/cosmos";
|
||||||
import {
|
import {
|
||||||
SqlUserDefinedFunctionCreateUpdateParameters,
|
SqlUserDefinedFunctionCreateUpdateParameters,
|
||||||
SqlUserDefinedFunctionResource
|
SqlUserDefinedFunctionResource,
|
||||||
} from "../../Utils/arm/generatedClients/2020-04-01/types";
|
} from "../../Utils/arm/generatedClients/2020-04-01/types";
|
||||||
import { client } from "../CosmosClient";
|
import { client } from "../CosmosClient";
|
||||||
import {
|
import {
|
||||||
createUpdateSqlUserDefinedFunction,
|
createUpdateSqlUserDefinedFunction,
|
||||||
getSqlUserDefinedFunction
|
getSqlUserDefinedFunction,
|
||||||
} from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
} from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||||
import { handleError } from "../ErrorHandlingUtils";
|
import { handleError } from "../ErrorHandlingUtils";
|
||||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||||
@@ -39,8 +39,8 @@ export async function updateUserDefinedFunction(
|
|||||||
const createUDFParams: SqlUserDefinedFunctionCreateUpdateParameters = {
|
const createUDFParams: SqlUserDefinedFunctionCreateUpdateParameters = {
|
||||||
properties: {
|
properties: {
|
||||||
resource: userDefinedFunction as SqlUserDefinedFunctionResource,
|
resource: userDefinedFunction as SqlUserDefinedFunctionResource,
|
||||||
options: {}
|
options: {},
|
||||||
}
|
},
|
||||||
};
|
};
|
||||||
const rpResponse = await createUpdateSqlUserDefinedFunction(
|
const rpResponse = await createUpdateSqlUserDefinedFunction(
|
||||||
userContext.subscriptionId,
|
userContext.subscriptionId,
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
export enum Platform {
|
export enum Platform {
|
||||||
Portal = "Portal",
|
Portal = "Portal",
|
||||||
Hosted = "Hosted",
|
Hosted = "Hosted",
|
||||||
Emulator = "Emulator"
|
Emulator = "Emulator",
|
||||||
}
|
}
|
||||||
|
|
||||||
interface ConfigContext {
|
interface ConfigContext {
|
||||||
@@ -37,7 +37,7 @@ let configContext: Readonly<ConfigContext> = {
|
|||||||
`^https:\\/\\/[\\.\\w]*portal\\.microsoftazure.de$`,
|
`^https:\\/\\/[\\.\\w]*portal\\.microsoftazure.de$`,
|
||||||
`^https:\\/\\/[\\.\\w]*ext\\.azure\\.(com|cn|us)$`,
|
`^https:\\/\\/[\\.\\w]*ext\\.azure\\.(com|cn|us)$`,
|
||||||
`^https:\\/\\/[\\.\\w]*\\.ext\\.microsoftazure\\.de$`,
|
`^https:\\/\\/[\\.\\w]*\\.ext\\.microsoftazure\\.de$`,
|
||||||
`^https://cosmos-db-dataexplorer-germanycentral.azurewebsites.de$`
|
`^https://cosmos-db-dataexplorer-germanycentral.azurewebsites.de$`,
|
||||||
],
|
],
|
||||||
// Webpack injects this at build time
|
// Webpack injects this at build time
|
||||||
gitSha: process.env.GIT_SHA,
|
gitSha: process.env.GIT_SHA,
|
||||||
@@ -52,7 +52,7 @@ let configContext: Readonly<ConfigContext> = {
|
|||||||
ARCADIA_LIVY_ENDPOINT_DNS_ZONE: "dev.azuresynapse.net",
|
ARCADIA_LIVY_ENDPOINT_DNS_ZONE: "dev.azuresynapse.net",
|
||||||
GITHUB_CLIENT_ID: "6cb2f63cf6f7b5cbdeca", // Registered OAuth app: https://github.com/settings/applications/1189306
|
GITHUB_CLIENT_ID: "6cb2f63cf6f7b5cbdeca", // Registered OAuth app: https://github.com/settings/applications/1189306
|
||||||
JUNO_ENDPOINT: "https://tools.cosmos.azure.com",
|
JUNO_ENDPOINT: "https://tools.cosmos.azure.com",
|
||||||
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
|
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
|
||||||
};
|
};
|
||||||
|
|
||||||
export function resetConfigContext(): void {
|
export function resetConfigContext(): void {
|
||||||
@@ -73,7 +73,7 @@ if (process.env.NODE_ENV === "development") {
|
|||||||
BACKEND_ENDPOINT: "https://localhost:" + port,
|
BACKEND_ENDPOINT: "https://localhost:" + port,
|
||||||
MONGO_BACKEND_ENDPOINT: "https://localhost:" + port,
|
MONGO_BACKEND_ENDPOINT: "https://localhost:" + port,
|
||||||
PROXY_PATH: "/proxy",
|
PROXY_PATH: "/proxy",
|
||||||
EMULATOR_ENDPOINT: "https://localhost:8081"
|
EMULATOR_ENDPOINT: "https://localhost:8081",
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -86,7 +86,7 @@ export async function initializeConfiguration(): Promise<ConfigContext> {
|
|||||||
Object.assign(configContext, externalConfig);
|
Object.assign(configContext, externalConfig);
|
||||||
if (allowedParentFrameOrigins && allowedParentFrameOrigins.length > 0) {
|
if (allowedParentFrameOrigins && allowedParentFrameOrigins.length > 0) {
|
||||||
updateConfigContext({
|
updateConfigContext({
|
||||||
allowedParentFrameOrigins: [...configContext.allowedParentFrameOrigins, ...allowedParentFrameOrigins]
|
allowedParentFrameOrigins: [...configContext.allowedParentFrameOrigins, ...allowedParentFrameOrigins],
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ export enum TabKind {
|
|||||||
TableEntities,
|
TableEntities,
|
||||||
Graph,
|
Graph,
|
||||||
SQLQuery,
|
SQLQuery,
|
||||||
ScaleSettings
|
ScaleSettings,
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -20,7 +20,7 @@ export enum PaneKind {
|
|||||||
DeleteDatabase,
|
DeleteDatabase,
|
||||||
GlobalSettings,
|
GlobalSettings,
|
||||||
AdHocAccess,
|
AdHocAccess,
|
||||||
SwitchDirectory
|
SwitchDirectory,
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -79,5 +79,5 @@ export enum ActionType {
|
|||||||
OpenCollectionTab,
|
OpenCollectionTab,
|
||||||
OpenPane,
|
OpenPane,
|
||||||
TransmitCachedData,
|
TransmitCachedData,
|
||||||
OpenSampleNotebook
|
OpenSampleNotebook,
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -56,7 +56,7 @@ export enum ApiKind {
|
|||||||
Table,
|
Table,
|
||||||
Cassandra,
|
Cassandra,
|
||||||
Graph,
|
Graph,
|
||||||
MongoDBCompute
|
MongoDBCompute,
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface GenerateTokenResponse {
|
export interface GenerateTokenResponse {
|
||||||
@@ -214,7 +214,7 @@ export interface Offer {
|
|||||||
manualThroughput: number;
|
manualThroughput: number;
|
||||||
minimumThroughput: number;
|
minimumThroughput: number;
|
||||||
offerDefinition?: SDKOfferDefinition;
|
offerDefinition?: SDKOfferDefinition;
|
||||||
offerReplacePending: boolean;
|
headers?: any;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface SDKOfferDefinition extends Resource {
|
export interface SDKOfferDefinition extends Resource {
|
||||||
@@ -334,7 +334,7 @@ export interface Notification {
|
|||||||
|
|
||||||
export enum ConflictResolutionMode {
|
export enum ConflictResolutionMode {
|
||||||
Custom = "Custom",
|
Custom = "Custom",
|
||||||
LastWriterWins = "LastWriterWins"
|
LastWriterWins = "LastWriterWins",
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -472,7 +472,7 @@ export interface SparkClusterEndpoint {
|
|||||||
export enum SparkClusterEndpointKind {
|
export enum SparkClusterEndpointKind {
|
||||||
SparkUI = "SparkUI",
|
SparkUI = "SparkUI",
|
||||||
HistoryServerUI = "HistoryServerUI",
|
HistoryServerUI = "HistoryServerUI",
|
||||||
Livy = "Livy"
|
Livy = "Livy",
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface RpParameters {
|
export interface RpParameters {
|
||||||
|
|||||||
@@ -21,7 +21,7 @@ export enum LogEntryLevel {
|
|||||||
/**
|
/**
|
||||||
* Error level.
|
* Error level.
|
||||||
*/
|
*/
|
||||||
Error = 2
|
Error = 2,
|
||||||
}
|
}
|
||||||
/**
|
/**
|
||||||
* Schema of a log entry.
|
* Schema of a log entry.
|
||||||
|
|||||||
@@ -33,7 +33,7 @@ export enum MessageTypes {
|
|||||||
CreateWorkspace,
|
CreateWorkspace,
|
||||||
CreateSparkPool,
|
CreateSparkPool,
|
||||||
RefreshDatabaseAccount,
|
RefreshDatabaseAccount,
|
||||||
InitTestExplorer
|
InitTestExplorer,
|
||||||
}
|
}
|
||||||
|
|
||||||
export { Versions, ActionContracts, Diagnostics };
|
export { Versions, ActionContracts, Diagnostics };
|
||||||
|
|||||||
@@ -3,5 +3,5 @@ export enum SubscriptionType {
|
|||||||
EA,
|
EA,
|
||||||
Free,
|
Free,
|
||||||
Internal,
|
Internal,
|
||||||
PAYG
|
PAYG,
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ import {
|
|||||||
Resource,
|
Resource,
|
||||||
StoredProcedureDefinition,
|
StoredProcedureDefinition,
|
||||||
TriggerDefinition,
|
TriggerDefinition,
|
||||||
UserDefinedFunctionDefinition
|
UserDefinedFunctionDefinition,
|
||||||
} from "@azure/cosmos";
|
} from "@azure/cosmos";
|
||||||
import Q from "q";
|
import Q from "q";
|
||||||
import { CommandButtonComponentProps } from "../Explorer/Controls/CommandButton/CommandButtonComponent";
|
import { CommandButtonComponentProps } from "../Explorer/Controls/CommandButton/CommandButtonComponent";
|
||||||
@@ -195,7 +195,7 @@ export interface PaneOptions {
|
|||||||
export enum NeighborType {
|
export enum NeighborType {
|
||||||
SOURCES_ONLY,
|
SOURCES_ONLY,
|
||||||
TARGETS_ONLY,
|
TARGETS_ONLY,
|
||||||
BOTH
|
BOTH,
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -325,14 +325,14 @@ export enum DocumentExplorerState {
|
|||||||
newDocumentInvalid,
|
newDocumentInvalid,
|
||||||
exisitingDocumentNoEdits,
|
exisitingDocumentNoEdits,
|
||||||
exisitingDocumentDirtyValid,
|
exisitingDocumentDirtyValid,
|
||||||
exisitingDocumentDirtyInvalid
|
exisitingDocumentDirtyInvalid,
|
||||||
}
|
}
|
||||||
|
|
||||||
export enum IndexingPolicyEditorState {
|
export enum IndexingPolicyEditorState {
|
||||||
noCollectionSelected,
|
noCollectionSelected,
|
||||||
noEdits,
|
noEdits,
|
||||||
dirtyValid,
|
dirtyValid,
|
||||||
dirtyInvalid
|
dirtyInvalid,
|
||||||
}
|
}
|
||||||
|
|
||||||
export enum ScriptEditorState {
|
export enum ScriptEditorState {
|
||||||
@@ -340,7 +340,7 @@ export enum ScriptEditorState {
|
|||||||
newValid,
|
newValid,
|
||||||
exisitingNoEdits,
|
exisitingNoEdits,
|
||||||
exisitingDirtyValid,
|
exisitingDirtyValid,
|
||||||
exisitingDirtyInvalid
|
exisitingDirtyInvalid,
|
||||||
}
|
}
|
||||||
|
|
||||||
export enum CollectionTabKind {
|
export enum CollectionTabKind {
|
||||||
@@ -362,13 +362,13 @@ export enum CollectionTabKind {
|
|||||||
Gallery = 17,
|
Gallery = 17,
|
||||||
NotebookViewer = 18,
|
NotebookViewer = 18,
|
||||||
Schema = 19,
|
Schema = 19,
|
||||||
SettingsV2 = 20
|
SettingsV2 = 19,
|
||||||
}
|
}
|
||||||
|
|
||||||
export enum TerminalKind {
|
export enum TerminalKind {
|
||||||
Default = 0,
|
Default = 0,
|
||||||
Mongo = 1,
|
Mongo = 1,
|
||||||
Cassandra = 2
|
Cassandra = 2,
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface DataExplorerInputsFrame {
|
export interface DataExplorerInputsFrame {
|
||||||
|
|||||||
@@ -6,19 +6,19 @@ describe("The Heatmap Control", () => {
|
|||||||
const dataPoints = {
|
const dataPoints = {
|
||||||
"1": {
|
"1": {
|
||||||
"2019-06-19T00:59:10Z": {
|
"2019-06-19T00:59:10Z": {
|
||||||
"Normalized Throughput": 0.35
|
"Normalized Throughput": 0.35,
|
||||||
},
|
},
|
||||||
"2019-06-19T00:48:10Z": {
|
"2019-06-19T00:48:10Z": {
|
||||||
"Normalized Throughput": 0.25
|
"Normalized Throughput": 0.25,
|
||||||
}
|
},
|
||||||
}
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
const chartCaptions = {
|
const chartCaptions = {
|
||||||
chartTitle: "chart title",
|
chartTitle: "chart title",
|
||||||
yAxisTitle: "YAxisTitle",
|
yAxisTitle: "YAxisTitle",
|
||||||
tooltipText: "Tooltip text",
|
tooltipText: "Tooltip text",
|
||||||
timeWindow: 123456789
|
timeWindow: 123456789,
|
||||||
};
|
};
|
||||||
|
|
||||||
let heatmap: Heatmap;
|
let heatmap: Heatmap;
|
||||||
@@ -75,12 +75,12 @@ describe("The Heatmap Control", () => {
|
|||||||
if (dayjs().utcOffset()) {
|
if (dayjs().utcOffset()) {
|
||||||
expect(heatmap.generateMatrixFromMap(dataPoints).xAxisPoints).not.toEqual([
|
expect(heatmap.generateMatrixFromMap(dataPoints).xAxisPoints).not.toEqual([
|
||||||
"2019-06-19T00:48:10Z",
|
"2019-06-19T00:48:10Z",
|
||||||
"2019-06-19T00:59:10Z"
|
"2019-06-19T00:59:10Z",
|
||||||
]);
|
]);
|
||||||
} else {
|
} else {
|
||||||
expect(heatmap.generateMatrixFromMap(dataPoints).xAxisPoints).toEqual([
|
expect(heatmap.generateMatrixFromMap(dataPoints).xAxisPoints).toEqual([
|
||||||
"2019-06-19T00:48:10Z",
|
"2019-06-19T00:48:10Z",
|
||||||
"2019-06-19T00:59:10Z"
|
"2019-06-19T00:59:10Z",
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@@ -106,9 +106,9 @@ describe("iframe rendering when there is no data", () => {
|
|||||||
data: {
|
data: {
|
||||||
chartData: {},
|
chartData: {},
|
||||||
chartSettings: {},
|
chartSettings: {},
|
||||||
theme: 4
|
theme: 4,
|
||||||
}
|
},
|
||||||
}
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
const divElement: string = `<div id="${Heatmap.elementId}"></div>`;
|
const divElement: string = `<div id="${Heatmap.elementId}"></div>`;
|
||||||
@@ -126,9 +126,9 @@ describe("iframe rendering when there is no data", () => {
|
|||||||
data: {
|
data: {
|
||||||
chartData: {},
|
chartData: {},
|
||||||
chartSettings: {},
|
chartSettings: {},
|
||||||
theme: 2
|
theme: 2,
|
||||||
}
|
},
|
||||||
}
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
const divElement: string = `<div id="${Heatmap.elementId}"></div>`;
|
const divElement: string = `<div id="${Heatmap.elementId}"></div>`;
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ import {
|
|||||||
HeatmapData,
|
HeatmapData,
|
||||||
LayoutSettings,
|
LayoutSettings,
|
||||||
PartitionTimeStampToData,
|
PartitionTimeStampToData,
|
||||||
PortalTheme
|
PortalTheme,
|
||||||
} from "./HeatmapDatatypes";
|
} from "./HeatmapDatatypes";
|
||||||
import { isInvalidParentFrameOrigin } from "../../Utils/MessageValidation";
|
import { isInvalidParentFrameOrigin } from "../../Utils/MessageValidation";
|
||||||
import { sendCachedDataMessage, sendMessage } from "../../Common/MessageHandler";
|
import { sendCachedDataMessage, sendMessage } from "../../Common/MessageHandler";
|
||||||
@@ -43,7 +43,7 @@ export class Heatmap {
|
|||||||
return {
|
return {
|
||||||
family: StyleConstants.DataExplorerFont,
|
family: StyleConstants.DataExplorerFont,
|
||||||
size,
|
size,
|
||||||
color
|
color,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -73,7 +73,7 @@ export class Heatmap {
|
|||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
}),
|
||||||
};
|
};
|
||||||
// go thru all rows and create 2d matrix for heatmap...
|
// go thru all rows and create 2d matrix for heatmap...
|
||||||
for (let i = 0; i < rows.length; i++) {
|
for (let i = 0; i < rows.length; i++) {
|
||||||
@@ -115,7 +115,7 @@ export class Heatmap {
|
|||||||
[0.7, "#E46612"],
|
[0.7, "#E46612"],
|
||||||
[0.8, "#E64914"],
|
[0.8, "#E64914"],
|
||||||
[0.9, "#B80016"],
|
[0.9, "#B80016"],
|
||||||
[1.0, "#B80016"]
|
[1.0, "#B80016"],
|
||||||
],
|
],
|
||||||
name: "",
|
name: "",
|
||||||
hovertemplate: this._heatmapCaptions.tooltipText,
|
hovertemplate: this._heatmapCaptions.tooltipText,
|
||||||
@@ -123,11 +123,11 @@ export class Heatmap {
|
|||||||
thickness: 15,
|
thickness: 15,
|
||||||
outlinewidth: 0,
|
outlinewidth: 0,
|
||||||
tickcolor: StyleConstants.BaseDark,
|
tickcolor: StyleConstants.BaseDark,
|
||||||
tickfont: this._getFontStyles(10, this._defaultFontColor)
|
tickfont: this._getFontStyles(10, this._defaultFontColor),
|
||||||
},
|
},
|
||||||
y: this._chartData.yAxisPoints,
|
y: this._chartData.yAxisPoints,
|
||||||
x: this._chartData.xAxisPoints
|
x: this._chartData.xAxisPoints,
|
||||||
}
|
},
|
||||||
];
|
];
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -138,7 +138,7 @@ export class Heatmap {
|
|||||||
r: 10,
|
r: 10,
|
||||||
b: 35,
|
b: 35,
|
||||||
t: 30,
|
t: 30,
|
||||||
pad: 0
|
pad: 0,
|
||||||
},
|
},
|
||||||
paper_bgcolor: "transparent",
|
paper_bgcolor: "transparent",
|
||||||
plot_bgcolor: "transparent",
|
plot_bgcolor: "transparent",
|
||||||
@@ -154,7 +154,7 @@ export class Heatmap {
|
|||||||
autotick: true,
|
autotick: true,
|
||||||
fixedrange: true,
|
fixedrange: true,
|
||||||
ticks: "",
|
ticks: "",
|
||||||
showticklabels: false
|
showticklabels: false,
|
||||||
},
|
},
|
||||||
xaxis: {
|
xaxis: {
|
||||||
fixedrange: true,
|
fixedrange: true,
|
||||||
@@ -167,13 +167,13 @@ export class Heatmap {
|
|||||||
autotick: true,
|
autotick: true,
|
||||||
tickformat: this._heatmapCaptions.timeWindow > 7 ? "%I:%M %p" : "%b %e",
|
tickformat: this._heatmapCaptions.timeWindow > 7 ? "%I:%M %p" : "%b %e",
|
||||||
showticklabels: true,
|
showticklabels: true,
|
||||||
tickfont: this._getFontStyles(10)
|
tickfont: this._getFontStyles(10),
|
||||||
},
|
},
|
||||||
title: {
|
title: {
|
||||||
text: this._heatmapCaptions.chartTitle,
|
text: this._heatmapCaptions.chartTitle,
|
||||||
x: 0.01,
|
x: 0.01,
|
||||||
font: this._getFontStyles(13, this._defaultFontColor)
|
font: this._getFontStyles(13, this._defaultFontColor),
|
||||||
}
|
},
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -181,7 +181,7 @@ export class Heatmap {
|
|||||||
return {
|
return {
|
||||||
/* heatmap can be fully responsive however the min-height needed in that case is greater than the iframe portal height, hence explicit width + height have been set in _getLayoutSettings
|
/* heatmap can be fully responsive however the min-height needed in that case is greater than the iframe portal height, hence explicit width + height have been set in _getLayoutSettings
|
||||||
responsive: true,*/
|
responsive: true,*/
|
||||||
displayModeBar: false
|
displayModeBar: false,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ export enum PortalTheme {
|
|||||||
blue = 1,
|
blue = 1,
|
||||||
azure,
|
azure,
|
||||||
light,
|
light,
|
||||||
dark
|
dark,
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface HeatmapData {
|
export interface HeatmapData {
|
||||||
|
|||||||
@@ -4,5 +4,5 @@ export enum DefaultAccountExperienceType {
|
|||||||
MongoDB = "MongoDB",
|
MongoDB = "MongoDB",
|
||||||
Table = "Table",
|
Table = "Table",
|
||||||
Cassandra = "Cassandra",
|
Cassandra = "Cassandra",
|
||||||
ApiForMongoDB = "Azure Cosmos DB for MongoDB API"
|
ApiForMongoDB = "Azure Cosmos DB for MongoDB API",
|
||||||
}
|
}
|
||||||
|
|||||||
383
src/Definitions/adal.d.ts
vendored
Normal file
383
src/Definitions/adal.d.ts
vendored
Normal file
@@ -0,0 +1,383 @@
|
|||||||
|
// Type definitions for adal-angular 1.0.1.1
|
||||||
|
// Project: https://github.com/AzureAD/azure-activedirectory-library-for-js#readme
|
||||||
|
// Definitions by: Daniel Perez Alvarez <https://github.com/unindented>
|
||||||
|
// Anthony Ciccarello <https://github.com/aciccarello>
|
||||||
|
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
|
||||||
|
|
||||||
|
//This is a customized version of adal on top of version 1.0.1 which does not support multi tenant
|
||||||
|
// Customized version add tenantId to stored tokens so when tenant change, adal will refetch instead of read from sessionStorage
|
||||||
|
|
||||||
|
// In module contexts the class constructor function is the exported object
|
||||||
|
// export = AuthenticationContext;
|
||||||
|
|
||||||
|
// This class is defined globally in not in a module context
|
||||||
|
declare class AuthenticationContext {
|
||||||
|
instance: string;
|
||||||
|
config: AuthenticationContext.Options;
|
||||||
|
callback: AuthenticationContext.TokenCallback;
|
||||||
|
popUp: boolean;
|
||||||
|
isAngular: boolean;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Enum for request type
|
||||||
|
*/
|
||||||
|
REQUEST_TYPE: AuthenticationContext.RequestType;
|
||||||
|
RESPONSE_TYPE: AuthenticationContext.ResponseType;
|
||||||
|
CONSTANTS: AuthenticationContext.Constants;
|
||||||
|
|
||||||
|
constructor(options: AuthenticationContext.Options);
|
||||||
|
/**
|
||||||
|
* Initiates the login process by redirecting the user to Azure AD authorization endpoint.
|
||||||
|
*/
|
||||||
|
login(): void;
|
||||||
|
/**
|
||||||
|
* Returns whether a login is in progress.
|
||||||
|
*/
|
||||||
|
loginInProgress(): boolean;
|
||||||
|
/**
|
||||||
|
* Gets token for the specified resource from the cache.
|
||||||
|
* @param resource A URI that identifies the resource for which the token is requested.
|
||||||
|
* @param tenantId tenant Id.
|
||||||
|
*/
|
||||||
|
getCachedToken(resource: string, tenantId: string): string;
|
||||||
|
/**
|
||||||
|
* If user object exists, returns it. Else creates a new user object by decoding `id_token` from the cache.
|
||||||
|
*/
|
||||||
|
getCachedUser(): AuthenticationContext.UserInfo;
|
||||||
|
/**
|
||||||
|
* Adds the passed callback to the array of callbacks for the specified resource.
|
||||||
|
* @param resource A URI that identifies the resource for which the token is requested.
|
||||||
|
* @param expectedState A unique identifier (guid).
|
||||||
|
* @param callback The callback provided by the caller. It will be called with token or error.
|
||||||
|
*/
|
||||||
|
registerCallback(
|
||||||
|
expectedState: string,
|
||||||
|
resource: string,
|
||||||
|
callback: AuthenticationContext.TokenCallback,
|
||||||
|
tenantId: string
|
||||||
|
): void;
|
||||||
|
/**
|
||||||
|
* Acquires token from the cache if it is not expired. Otherwise sends request to AAD to obtain a new token.
|
||||||
|
* @param resource Resource URI identifying the target resource.
|
||||||
|
* @param callback The callback provided by the caller. It will be called with token or error.
|
||||||
|
*/
|
||||||
|
acquireToken(resource: string, tenantId: string, callback: AuthenticationContext.TokenCallback): void;
|
||||||
|
/**
|
||||||
|
* Acquires token (interactive flow using a popup window) by sending request to AAD to obtain a new token.
|
||||||
|
* @param resource Resource URI identifying the target resource.
|
||||||
|
* @param extraQueryParameters Query parameters to add to the authentication request.
|
||||||
|
* @param claims Claims to add to the authentication request.
|
||||||
|
* @param callback The callback provided by the caller. It will be called with token or error.
|
||||||
|
*/
|
||||||
|
acquireTokenPopup(
|
||||||
|
resource: string,
|
||||||
|
tenantId: string,
|
||||||
|
extraQueryParameters: string | null | undefined,
|
||||||
|
claims: string | null | undefined,
|
||||||
|
callback: AuthenticationContext.TokenCallback
|
||||||
|
): void;
|
||||||
|
/**
|
||||||
|
* Acquires token (interactive flow using a redirect) by sending request to AAD to obtain a new token. In this case the callback passed in the authentication request constructor will be called.
|
||||||
|
* @param resource Resource URI identifying the target resource.
|
||||||
|
* @param extraQueryParameters Query parameters to add to the authentication request.
|
||||||
|
* @param claims Claims to add to the authentication request.
|
||||||
|
*/
|
||||||
|
acquireTokenRedirect(
|
||||||
|
resource: string,
|
||||||
|
tenantId: string,
|
||||||
|
extraQueryParameters?: string | null,
|
||||||
|
claims?: string | null
|
||||||
|
): void;
|
||||||
|
/**
|
||||||
|
* Redirects the browser to Azure AD authorization endpoint.
|
||||||
|
* @param urlNavigate URL of the authorization endpoint.
|
||||||
|
*/
|
||||||
|
promptUser(urlNavigate: string): void;
|
||||||
|
/**
|
||||||
|
* Clears cache items.
|
||||||
|
*/
|
||||||
|
clearCache(): void;
|
||||||
|
/**
|
||||||
|
* Clears cache items for a given resource.
|
||||||
|
* @param resource Resource URI identifying the target resource.
|
||||||
|
*/
|
||||||
|
clearCacheForResource(resource: string): void;
|
||||||
|
/**
|
||||||
|
* Redirects user to logout endpoint. After logout, it will redirect to `postLogoutRedirectUri` if added as a property on the config object.
|
||||||
|
*/
|
||||||
|
logOut(): void;
|
||||||
|
/**
|
||||||
|
* Calls the passed in callback with the user object or error message related to the user.
|
||||||
|
* @param callback The callback provided by the caller. It will be called with user or error.
|
||||||
|
*/
|
||||||
|
getUser(callback: AuthenticationContext.UserCallback): void;
|
||||||
|
/**
|
||||||
|
* Checks if the URL fragment contains access token, id token or error description.
|
||||||
|
* @param hash Hash passed from redirect page.
|
||||||
|
*/
|
||||||
|
isCallback(hash: string): boolean;
|
||||||
|
/**
|
||||||
|
* Gets login error.
|
||||||
|
*/
|
||||||
|
getLoginError(): string;
|
||||||
|
/**
|
||||||
|
* Creates a request info object from the URL fragment and returns it.
|
||||||
|
*/
|
||||||
|
getRequestInfo(hash: string): AuthenticationContext.RequestInfo;
|
||||||
|
/**
|
||||||
|
* Saves token or error received in the response from AAD in the cache. In case of `id_token`, it also creates the user object.
|
||||||
|
*/
|
||||||
|
saveTokenFromHash(requestInfo: AuthenticationContext.RequestInfo): void;
|
||||||
|
/**
|
||||||
|
* Gets resource for given endpoint if mapping is provided with config.
|
||||||
|
* @param endpoint Resource URI identifying the target resource.
|
||||||
|
*/
|
||||||
|
getResourceForEndpoint(resource: string): string;
|
||||||
|
/**
|
||||||
|
* This method must be called for processing the response received from AAD. It extracts the hash, processes the token or error, saves it in the cache and calls the callbacks with the result.
|
||||||
|
* @param hash Hash fragment of URL. Defaults to `window.location.hash`.
|
||||||
|
*/
|
||||||
|
handleWindowCallback(hash?: string): void;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks the logging Level, constructs the log message and logs it. Users need to implement/override this method to turn on logging.
|
||||||
|
* @param level Level can be set 0, 1, 2 and 3 which turns on 'error', 'warning', 'info' or 'verbose' level logging respectively.
|
||||||
|
* @param message Message to log.
|
||||||
|
* @param error Error to log.
|
||||||
|
*/
|
||||||
|
log(level: AuthenticationContext.LoggingLevel, message: string, error: any): void;
|
||||||
|
/**
|
||||||
|
* Logs messages when logging level is set to 0.
|
||||||
|
* @param message Message to log.
|
||||||
|
* @param error Error to log.
|
||||||
|
*/
|
||||||
|
error(message: string, error: any): void;
|
||||||
|
/**
|
||||||
|
* Logs messages when logging level is set to 1.
|
||||||
|
* @param message Message to log.
|
||||||
|
*/
|
||||||
|
warn(message: string): void;
|
||||||
|
/**
|
||||||
|
* Logs messages when logging level is set to 2.
|
||||||
|
* @param message Message to log.
|
||||||
|
*/
|
||||||
|
info(message: string): void;
|
||||||
|
/**
|
||||||
|
* Logs messages when logging level is set to 3.
|
||||||
|
* @param message Message to log.
|
||||||
|
*/
|
||||||
|
verbose(message: string): void;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Logs Pii messages when Logging Level is set to 0 and window.piiLoggingEnabled is set to true.
|
||||||
|
* @param message Message to log.
|
||||||
|
* @param error Error to log.
|
||||||
|
*/
|
||||||
|
errorPii(message: string, error: any): void;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Logs Pii messages when Logging Level is set to 1 and window.piiLoggingEnabled is set to true.
|
||||||
|
* @param message Message to log.
|
||||||
|
*/
|
||||||
|
warnPii(message: string): void;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Logs messages when Logging Level is set to 2 and window.piiLoggingEnabled is set to true.
|
||||||
|
* @param message Message to log.
|
||||||
|
*/
|
||||||
|
infoPii(message: string): void;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Logs messages when Logging Level is set to 3 and window.piiLoggingEnabled is set to true.
|
||||||
|
* @param message Message to log.
|
||||||
|
*/
|
||||||
|
verbosePii(message: string): void;
|
||||||
|
}
|
||||||
|
|
||||||
|
declare namespace AuthenticationContext {
|
||||||
|
function inject(config: Options): AuthenticationContext;
|
||||||
|
|
||||||
|
type LoggingLevel = 0 | 1 | 2 | 3;
|
||||||
|
|
||||||
|
type RequestType = "LOGIN" | "RENEW_TOKEN" | "UNKNOWN";
|
||||||
|
|
||||||
|
type ResponseType = "id_token token" | "token";
|
||||||
|
|
||||||
|
interface RequestInfo {
|
||||||
|
/**
|
||||||
|
* Object comprising of fields such as id_token/error, session_state, state, e.t.c.
|
||||||
|
*/
|
||||||
|
parameters: any;
|
||||||
|
/**
|
||||||
|
* Request type.
|
||||||
|
*/
|
||||||
|
requestType: RequestType;
|
||||||
|
/**
|
||||||
|
* Whether state is valid.
|
||||||
|
*/
|
||||||
|
stateMatch: boolean;
|
||||||
|
/**
|
||||||
|
* Unique guid used to match the response with the request.
|
||||||
|
*/
|
||||||
|
stateResponse: string;
|
||||||
|
/**
|
||||||
|
* Whether `requestType` contains `id_token`, `access_token` or error.
|
||||||
|
*/
|
||||||
|
valid: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface UserInfo {
|
||||||
|
/**
|
||||||
|
* Username assigned from UPN or email.
|
||||||
|
*/
|
||||||
|
userName: string;
|
||||||
|
/**
|
||||||
|
* Properties parsed from `id_token`.
|
||||||
|
*/
|
||||||
|
profile: any;
|
||||||
|
}
|
||||||
|
|
||||||
|
type TokenCallback = (errorDesc: string | null, token: string | null, error: any) => void;
|
||||||
|
|
||||||
|
type UserCallback = (errorDesc: string | null, user: UserInfo | null) => void;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Configuration options for Authentication Context
|
||||||
|
*/
|
||||||
|
interface Options {
|
||||||
|
/**
|
||||||
|
* Client ID assigned to your app by Azure Active Directory.
|
||||||
|
*/
|
||||||
|
clientId: string;
|
||||||
|
/**
|
||||||
|
* Endpoint at which you expect to receive tokens.Defaults to `window.location.href`.
|
||||||
|
*/
|
||||||
|
redirectUri?: string;
|
||||||
|
/**
|
||||||
|
* Azure Active Directory instance. Defaults to `https://login.microsoftonline.com/`.
|
||||||
|
*/
|
||||||
|
instance?: string;
|
||||||
|
/**
|
||||||
|
* Your target tenant. Defaults to `common`.
|
||||||
|
*/
|
||||||
|
tenant?: string;
|
||||||
|
/**
|
||||||
|
* Query parameters to add to the authentication request.
|
||||||
|
*/
|
||||||
|
extraQueryParameter?: string;
|
||||||
|
/**
|
||||||
|
* Unique identifier used to map the request with the response. Defaults to RFC4122 version 4 guid (128 bits).
|
||||||
|
*/
|
||||||
|
correlationId?: string;
|
||||||
|
/**
|
||||||
|
* User defined function of handling the navigation to Azure AD authorization endpoint in case of login.
|
||||||
|
*/
|
||||||
|
displayCall?: (url: string) => void;
|
||||||
|
/**
|
||||||
|
* Set this to true to enable login in a popup winodow instead of a full redirect. Defaults to `false`.
|
||||||
|
*/
|
||||||
|
popUp?: boolean;
|
||||||
|
/**
|
||||||
|
* Set this to the resource to request on login. Defaults to `clientId`.
|
||||||
|
*/
|
||||||
|
loginResource?: string;
|
||||||
|
/**
|
||||||
|
* Set this to redirect the user to a custom login page.
|
||||||
|
*/
|
||||||
|
localLoginUrl?: string;
|
||||||
|
/**
|
||||||
|
* Redirects to start page after login. Defaults to `true`.
|
||||||
|
*/
|
||||||
|
navigateToLoginRequestUrl?: boolean;
|
||||||
|
/**
|
||||||
|
* Set this to redirect the user to a custom logout page.
|
||||||
|
*/
|
||||||
|
logOutUri?: string;
|
||||||
|
/**
|
||||||
|
* Redirects the user to postLogoutRedirectUri after logout. Defaults to `redirectUri`.
|
||||||
|
*/
|
||||||
|
postLogoutRedirectUri?: string;
|
||||||
|
/**
|
||||||
|
* Sets browser storage to either 'localStorage' or sessionStorage'. Defaults to `sessionStorage`.
|
||||||
|
*/
|
||||||
|
cacheLocation?: "localStorage" | "sessionStorage";
|
||||||
|
/**
|
||||||
|
* Array of keywords or URIs. Adal will attach a token to outgoing requests that have these keywords or URIs.
|
||||||
|
*/
|
||||||
|
endpoints?: { [resource: string]: string };
|
||||||
|
/**
|
||||||
|
* Array of keywords or URIs. Adal will not attach a token to outgoing requests that have these keywords or URIs.
|
||||||
|
*/
|
||||||
|
anonymousEndpoints?: string[];
|
||||||
|
/**
|
||||||
|
* If the cached token is about to be expired in the expireOffsetSeconds (in seconds), Adal will renew the token instead of using the cached token. Defaults to 300 seconds.
|
||||||
|
*/
|
||||||
|
expireOffsetSeconds?: number;
|
||||||
|
/**
|
||||||
|
* The number of milliseconds of inactivity before a token renewal response from AAD should be considered timed out. Defaults to 6 seconds.
|
||||||
|
*/
|
||||||
|
loadFrameTimeout?: number;
|
||||||
|
/**
|
||||||
|
* Callback to be invoked when a token is acquired.
|
||||||
|
*/
|
||||||
|
callback?: TokenCallback;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface LoggingConfig {
|
||||||
|
level: LoggingLevel;
|
||||||
|
log: (message: string) => void;
|
||||||
|
piiLoggingEnabled: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Enum for storage constants
|
||||||
|
*/
|
||||||
|
interface Constants {
|
||||||
|
ACCESS_TOKEN: "access_token";
|
||||||
|
EXPIRES_IN: "expires_in";
|
||||||
|
ID_TOKEN: "id_token";
|
||||||
|
ERROR_DESCRIPTION: "error_description";
|
||||||
|
SESSION_STATE: "session_state";
|
||||||
|
STORAGE: {
|
||||||
|
TOKEN_KEYS: "adal.token.keys";
|
||||||
|
ACCESS_TOKEN_KEY: "adal.access.token.key";
|
||||||
|
EXPIRATION_KEY: "adal.expiration.key";
|
||||||
|
STATE_LOGIN: "adal.state.login";
|
||||||
|
STATE_RENEW: "adal.state.renew";
|
||||||
|
NONCE_IDTOKEN: "adal.nonce.idtoken";
|
||||||
|
SESSION_STATE: "adal.session.state";
|
||||||
|
USERNAME: "adal.username";
|
||||||
|
IDTOKEN: "adal.idtoken";
|
||||||
|
ERROR: "adal.error";
|
||||||
|
ERROR_DESCRIPTION: "adal.error.description";
|
||||||
|
LOGIN_REQUEST: "adal.login.request";
|
||||||
|
LOGIN_ERROR: "adal.login.error";
|
||||||
|
RENEW_STATUS: "adal.token.renew.status";
|
||||||
|
};
|
||||||
|
RESOURCE_DELIMETER: "|";
|
||||||
|
LOADFRAME_TIMEOUT: "6000";
|
||||||
|
TOKEN_RENEW_STATUS_CANCELED: "Canceled";
|
||||||
|
TOKEN_RENEW_STATUS_COMPLETED: "Completed";
|
||||||
|
TOKEN_RENEW_STATUS_IN_PROGRESS: "In Progress";
|
||||||
|
LOGGING_LEVEL: {
|
||||||
|
ERROR: 0;
|
||||||
|
WARN: 1;
|
||||||
|
INFO: 2;
|
||||||
|
VERBOSE: 3;
|
||||||
|
};
|
||||||
|
LEVEL_STRING_MAP: {
|
||||||
|
0: "ERROR:";
|
||||||
|
1: "WARNING:";
|
||||||
|
2: "INFO:";
|
||||||
|
3: "VERBOSE:";
|
||||||
|
};
|
||||||
|
POPUP_WIDTH: 483;
|
||||||
|
POPUP_HEIGHT: 600;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// declare global {
|
||||||
|
// interface Window {
|
||||||
|
// Logging: AuthenticationContext.LoggingConfig;
|
||||||
|
// }
|
||||||
|
// }
|
||||||
@@ -36,8 +36,8 @@ export class ResourceTreeContextMenuButtonFactory {
|
|||||||
{
|
{
|
||||||
iconSrc: AddCollectionIcon,
|
iconSrc: AddCollectionIcon,
|
||||||
onClick: () => container.onNewCollectionClicked(),
|
onClick: () => container.onNewCollectionClicked(),
|
||||||
label: container.addCollectionText()
|
label: container.addCollectionText(),
|
||||||
}
|
},
|
||||||
];
|
];
|
||||||
|
|
||||||
if (userContext.defaultExperience !== DefaultAccountExperienceType.Table) {
|
if (userContext.defaultExperience !== DefaultAccountExperienceType.Table) {
|
||||||
@@ -45,7 +45,7 @@ export class ResourceTreeContextMenuButtonFactory {
|
|||||||
iconSrc: DeleteDatabaseIcon,
|
iconSrc: DeleteDatabaseIcon,
|
||||||
onClick: () => container.deleteDatabaseConfirmationPane.open(),
|
onClick: () => container.deleteDatabaseConfirmationPane.open(),
|
||||||
label: container.deleteDatabaseText(),
|
label: container.deleteDatabaseText(),
|
||||||
styleClass: "deleteDatabaseMenuItem"
|
styleClass: "deleteDatabaseMenuItem",
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
return items;
|
return items;
|
||||||
@@ -60,7 +60,7 @@ export class ResourceTreeContextMenuButtonFactory {
|
|||||||
items.push({
|
items.push({
|
||||||
iconSrc: AddSqlQueryIcon,
|
iconSrc: AddSqlQueryIcon,
|
||||||
onClick: () => selectedCollection && selectedCollection.onNewQueryClick(selectedCollection, null),
|
onClick: () => selectedCollection && selectedCollection.onNewQueryClick(selectedCollection, null),
|
||||||
label: "New SQL Query"
|
label: "New SQL Query",
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -68,7 +68,7 @@ export class ResourceTreeContextMenuButtonFactory {
|
|||||||
items.push({
|
items.push({
|
||||||
iconSrc: AddSqlQueryIcon,
|
iconSrc: AddSqlQueryIcon,
|
||||||
onClick: () => selectedCollection && selectedCollection.onNewMongoQueryClick(selectedCollection, null),
|
onClick: () => selectedCollection && selectedCollection.onNewMongoQueryClick(selectedCollection, null),
|
||||||
label: "New Query"
|
label: "New Query",
|
||||||
});
|
});
|
||||||
|
|
||||||
items.push({
|
items.push({
|
||||||
@@ -77,7 +77,7 @@ export class ResourceTreeContextMenuButtonFactory {
|
|||||||
const selectedCollection: ViewModels.Collection = container.findSelectedCollection();
|
const selectedCollection: ViewModels.Collection = container.findSelectedCollection();
|
||||||
selectedCollection && selectedCollection.onNewMongoShellClick();
|
selectedCollection && selectedCollection.onNewMongoShellClick();
|
||||||
},
|
},
|
||||||
label: "New Shell"
|
label: "New Shell",
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -88,7 +88,7 @@ export class ResourceTreeContextMenuButtonFactory {
|
|||||||
const selectedCollection: ViewModels.Collection = container.findSelectedCollection();
|
const selectedCollection: ViewModels.Collection = container.findSelectedCollection();
|
||||||
selectedCollection && selectedCollection.onNewStoredProcedureClick(selectedCollection, null);
|
selectedCollection && selectedCollection.onNewStoredProcedureClick(selectedCollection, null);
|
||||||
},
|
},
|
||||||
label: "New Stored Procedure"
|
label: "New Stored Procedure",
|
||||||
});
|
});
|
||||||
|
|
||||||
items.push({
|
items.push({
|
||||||
@@ -97,7 +97,7 @@ export class ResourceTreeContextMenuButtonFactory {
|
|||||||
const selectedCollection: ViewModels.Collection = container.findSelectedCollection();
|
const selectedCollection: ViewModels.Collection = container.findSelectedCollection();
|
||||||
selectedCollection && selectedCollection.onNewUserDefinedFunctionClick(selectedCollection, null);
|
selectedCollection && selectedCollection.onNewUserDefinedFunctionClick(selectedCollection, null);
|
||||||
},
|
},
|
||||||
label: "New UDF"
|
label: "New UDF",
|
||||||
});
|
});
|
||||||
|
|
||||||
items.push({
|
items.push({
|
||||||
@@ -106,7 +106,7 @@ export class ResourceTreeContextMenuButtonFactory {
|
|||||||
const selectedCollection: ViewModels.Collection = container.findSelectedCollection();
|
const selectedCollection: ViewModels.Collection = container.findSelectedCollection();
|
||||||
selectedCollection && selectedCollection.onNewTriggerClick(selectedCollection, null);
|
selectedCollection && selectedCollection.onNewTriggerClick(selectedCollection, null);
|
||||||
},
|
},
|
||||||
label: "New Trigger"
|
label: "New Trigger",
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -117,7 +117,7 @@ export class ResourceTreeContextMenuButtonFactory {
|
|||||||
selectedCollection && selectedCollection.onDeleteCollectionContextMenuClick(selectedCollection, null);
|
selectedCollection && selectedCollection.onDeleteCollectionContextMenuClick(selectedCollection, null);
|
||||||
},
|
},
|
||||||
label: container.deleteCollectionText(),
|
label: container.deleteCollectionText(),
|
||||||
styleClass: "deleteCollectionMenuItem"
|
styleClass: "deleteCollectionMenuItem",
|
||||||
});
|
});
|
||||||
|
|
||||||
return items;
|
return items;
|
||||||
@@ -135,8 +135,8 @@ export class ResourceTreeContextMenuButtonFactory {
|
|||||||
{
|
{
|
||||||
iconSrc: DeleteSprocIcon,
|
iconSrc: DeleteSprocIcon,
|
||||||
onClick: () => storedProcedure.delete(),
|
onClick: () => storedProcedure.delete(),
|
||||||
label: "Delete Store Procedure"
|
label: "Delete Store Procedure",
|
||||||
}
|
},
|
||||||
];
|
];
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -149,8 +149,8 @@ export class ResourceTreeContextMenuButtonFactory {
|
|||||||
{
|
{
|
||||||
iconSrc: DeleteTriggerIcon,
|
iconSrc: DeleteTriggerIcon,
|
||||||
onClick: () => trigger.delete(),
|
onClick: () => trigger.delete(),
|
||||||
label: "Delete Trigger"
|
label: "Delete Trigger",
|
||||||
}
|
},
|
||||||
];
|
];
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -166,8 +166,8 @@ export class ResourceTreeContextMenuButtonFactory {
|
|||||||
{
|
{
|
||||||
iconSrc: DeleteUDFIcon,
|
iconSrc: DeleteUDFIcon,
|
||||||
onClick: () => userDefinedFunction.delete(),
|
onClick: () => userDefinedFunction.delete(),
|
||||||
label: "Delete User Defined Function"
|
label: "Delete User Defined Function",
|
||||||
}
|
},
|
||||||
];
|
];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -31,7 +31,7 @@ export class AccessibleElement extends React.Component<AccessibleElementProps> {
|
|||||||
...elementProps,
|
...elementProps,
|
||||||
onKeyPress: this.onKeyPress,
|
onKeyPress: this.onKeyPress,
|
||||||
onClick: this.props.onActivated,
|
onClick: this.props.onActivated,
|
||||||
tabIndex
|
tabIndex,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -38,7 +38,7 @@ export class AccordionItemComponent extends React.Component<AccordionItemCompone
|
|||||||
super(props);
|
super(props);
|
||||||
this.isExpanded = props.isExpanded;
|
this.isExpanded = props.isExpanded;
|
||||||
this.state = {
|
this.state = {
|
||||||
isExpanded: true
|
isExpanded: true,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -46,7 +46,7 @@ export class AccordionItemComponent extends React.Component<AccordionItemCompone
|
|||||||
if (this.props.isExpanded !== this.isExpanded) {
|
if (this.props.isExpanded !== this.isExpanded) {
|
||||||
this.isExpanded = this.props.isExpanded;
|
this.isExpanded = this.props.isExpanded;
|
||||||
this.setState({
|
this.setState({
|
||||||
isExpanded: this.props.isExpanded
|
isExpanded: this.props.isExpanded,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ const createBlankProps = (): AccountSwitchComponentProps => {
|
|||||||
subscriptions: [],
|
subscriptions: [],
|
||||||
selectedSubscriptionId: null,
|
selectedSubscriptionId: null,
|
||||||
isLoadingSubscriptions: false,
|
isLoadingSubscriptions: false,
|
||||||
onSubscriptionChange: jest.fn()
|
onSubscriptionChange: jest.fn(),
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -28,7 +28,7 @@ const createBlankAccount = (): DatabaseAccount => {
|
|||||||
properties: null,
|
properties: null,
|
||||||
location: "",
|
location: "",
|
||||||
tags: null,
|
tags: null,
|
||||||
type: ""
|
type: "",
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -40,7 +40,7 @@ const createBlankSubscription = (): Subscription => {
|
|||||||
state: "",
|
state: "",
|
||||||
subscriptionPolicies: null,
|
subscriptionPolicies: null,
|
||||||
tenantId: "",
|
tenantId: "",
|
||||||
uniqueDisplayName: ""
|
uniqueDisplayName: "",
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -34,13 +34,13 @@ export class AccountSwitchComponent extends React.Component<AccountSwitchCompone
|
|||||||
items: [
|
items: [
|
||||||
{
|
{
|
||||||
key: "switchSubscription",
|
key: "switchSubscription",
|
||||||
onRender: this._renderSubscriptionDropdown.bind(this)
|
onRender: this._renderSubscriptionDropdown.bind(this),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
key: "switchAccount",
|
key: "switchAccount",
|
||||||
onRender: this._renderAccountDropDown.bind(this)
|
onRender: this._renderAccountDropDown.bind(this),
|
||||||
}
|
},
|
||||||
]
|
],
|
||||||
};
|
};
|
||||||
|
|
||||||
const buttonStyles: IButtonStyles = {
|
const buttonStyles: IButtonStyles = {
|
||||||
@@ -51,27 +51,27 @@ export class AccountSwitchComponent extends React.Component<AccountSwitchCompone
|
|||||||
paddingLeft: 10,
|
paddingLeft: 10,
|
||||||
marginRight: 5,
|
marginRight: 5,
|
||||||
backgroundColor: StyleConstants.BaseDark,
|
backgroundColor: StyleConstants.BaseDark,
|
||||||
color: StyleConstants.BaseLight
|
color: StyleConstants.BaseLight,
|
||||||
},
|
},
|
||||||
rootHovered: {
|
rootHovered: {
|
||||||
backgroundColor: StyleConstants.BaseHigh,
|
backgroundColor: StyleConstants.BaseHigh,
|
||||||
color: StyleConstants.BaseLight
|
color: StyleConstants.BaseLight,
|
||||||
},
|
},
|
||||||
rootFocused: {
|
rootFocused: {
|
||||||
backgroundColor: StyleConstants.BaseHigh,
|
backgroundColor: StyleConstants.BaseHigh,
|
||||||
color: StyleConstants.BaseLight
|
color: StyleConstants.BaseLight,
|
||||||
},
|
},
|
||||||
rootPressed: {
|
rootPressed: {
|
||||||
backgroundColor: StyleConstants.BaseHigh,
|
backgroundColor: StyleConstants.BaseHigh,
|
||||||
color: StyleConstants.BaseLight
|
color: StyleConstants.BaseLight,
|
||||||
},
|
},
|
||||||
rootExpanded: {
|
rootExpanded: {
|
||||||
backgroundColor: StyleConstants.BaseHigh,
|
backgroundColor: StyleConstants.BaseHigh,
|
||||||
color: StyleConstants.BaseLight
|
color: StyleConstants.BaseLight,
|
||||||
},
|
},
|
||||||
textContainer: {
|
textContainer: {
|
||||||
flexGrow: "initial"
|
flexGrow: "initial",
|
||||||
}
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
const buttonProps: IButtonProps = {
|
const buttonProps: IButtonProps = {
|
||||||
@@ -79,7 +79,7 @@ export class AccountSwitchComponent extends React.Component<AccountSwitchCompone
|
|||||||
menuProps: menuProps,
|
menuProps: menuProps,
|
||||||
styles: buttonStyles,
|
styles: buttonStyles,
|
||||||
className: "accountSwitchButton",
|
className: "accountSwitchButton",
|
||||||
id: "accountSwitchButton"
|
id: "accountSwitchButton",
|
||||||
};
|
};
|
||||||
|
|
||||||
return <DefaultButton {...buttonProps} />;
|
return <DefaultButton {...buttonProps} />;
|
||||||
@@ -87,11 +87,11 @@ export class AccountSwitchComponent extends React.Component<AccountSwitchCompone
|
|||||||
|
|
||||||
private _renderSubscriptionDropdown(): JSX.Element {
|
private _renderSubscriptionDropdown(): JSX.Element {
|
||||||
const { subscriptions, selectedSubscriptionId, isLoadingSubscriptions } = this.props;
|
const { subscriptions, selectedSubscriptionId, isLoadingSubscriptions } = this.props;
|
||||||
const options: IDropdownOption[] = subscriptions.map(sub => {
|
const options: IDropdownOption[] = subscriptions.map((sub) => {
|
||||||
return {
|
return {
|
||||||
key: sub.subscriptionId,
|
key: sub.subscriptionId,
|
||||||
text: sub.displayName,
|
text: sub.displayName,
|
||||||
data: sub
|
data: sub,
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -109,8 +109,8 @@ export class AccountSwitchComponent extends React.Component<AccountSwitchCompone
|
|||||||
defaultSelectedKey: selectedSubscriptionId,
|
defaultSelectedKey: selectedSubscriptionId,
|
||||||
placeholder: placeHolderText,
|
placeholder: placeHolderText,
|
||||||
styles: {
|
styles: {
|
||||||
callout: "accountSwitchSubscriptionDropdownMenu"
|
callout: "accountSwitchSubscriptionDropdownMenu",
|
||||||
}
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
return <Dropdown {...dropdownProps} />;
|
return <Dropdown {...dropdownProps} />;
|
||||||
@@ -126,11 +126,11 @@ export class AccountSwitchComponent extends React.Component<AccountSwitchCompone
|
|||||||
|
|
||||||
private _renderAccountDropDown(): JSX.Element {
|
private _renderAccountDropDown(): JSX.Element {
|
||||||
const { accounts, selectedAccountName, isLoadingAccounts } = this.props;
|
const { accounts, selectedAccountName, isLoadingAccounts } = this.props;
|
||||||
const options: IDropdownOption[] = accounts.map(account => {
|
const options: IDropdownOption[] = accounts.map((account) => {
|
||||||
return {
|
return {
|
||||||
key: account.name,
|
key: account.name,
|
||||||
text: account.name,
|
text: account.name,
|
||||||
data: account
|
data: account,
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
// Fabric UI will also try to select the first non-disabled option from dropdown.
|
// Fabric UI will also try to select the first non-disabled option from dropdown.
|
||||||
@@ -138,7 +138,7 @@ export class AccountSwitchComponent extends React.Component<AccountSwitchCompone
|
|||||||
options.unshift({
|
options.unshift({
|
||||||
key: "select from list",
|
key: "select from list",
|
||||||
text: "Select Cosmos DB account from list",
|
text: "Select Cosmos DB account from list",
|
||||||
data: undefined
|
data: undefined,
|
||||||
});
|
});
|
||||||
|
|
||||||
const placeHolderText = isLoadingAccounts
|
const placeHolderText = isLoadingAccounts
|
||||||
@@ -155,8 +155,8 @@ export class AccountSwitchComponent extends React.Component<AccountSwitchCompone
|
|||||||
defaultSelectedKey: selectedAccountName,
|
defaultSelectedKey: selectedAccountName,
|
||||||
placeholder: placeHolderText,
|
placeholder: placeHolderText,
|
||||||
styles: {
|
styles: {
|
||||||
callout: "accountSwitchAccountDropdownMenu"
|
callout: "accountSwitchAccountDropdownMenu",
|
||||||
}
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
return <Dropdown {...dropdownProps} />;
|
return <Dropdown {...dropdownProps} />;
|
||||||
|
|||||||
@@ -30,7 +30,7 @@ export class ArcadiaMenuPicker extends React.Component<ArcadiaMenuPickerProps, A
|
|||||||
constructor(props: ArcadiaMenuPickerProps) {
|
constructor(props: ArcadiaMenuPickerProps) {
|
||||||
super(props);
|
super(props);
|
||||||
this.state = {
|
this.state = {
|
||||||
selectedSparkPool: props.selectedSparkPool
|
selectedSparkPool: props.selectedSparkPool,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -41,7 +41,7 @@ export class ArcadiaMenuPicker extends React.Component<ArcadiaMenuPickerProps, A
|
|||||||
try {
|
try {
|
||||||
this.props.onSparkPoolSelect(e, item);
|
this.props.onSparkPoolSelect(e, item);
|
||||||
this.setState({
|
this.setState({
|
||||||
selectedSparkPool: item.text
|
selectedSparkPool: item.text,
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
Logger.logError(getErrorMessage(error), "ArcadiaMenuPicker/_onSparkPoolClicked");
|
Logger.logError(getErrorMessage(error), "ArcadiaMenuPicker/_onSparkPoolClicked");
|
||||||
@@ -65,28 +65,28 @@ export class ArcadiaMenuPicker extends React.Component<ArcadiaMenuPickerProps, A
|
|||||||
|
|
||||||
public render() {
|
public render() {
|
||||||
const { workspaces } = this.props;
|
const { workspaces } = this.props;
|
||||||
let workspaceMenuItems: IContextualMenuItem[] = workspaces.map(workspace => {
|
let workspaceMenuItems: IContextualMenuItem[] = workspaces.map((workspace) => {
|
||||||
let sparkPoolsMenuProps: IContextualMenuProps = {
|
let sparkPoolsMenuProps: IContextualMenuProps = {
|
||||||
items: workspace.sparkPools.map(
|
items: workspace.sparkPools.map(
|
||||||
(sparkpool): IContextualMenuItem => ({
|
(sparkpool): IContextualMenuItem => ({
|
||||||
key: sparkpool.id,
|
key: sparkpool.id,
|
||||||
text: sparkpool.name,
|
text: sparkpool.name,
|
||||||
onClick: this._onSparkPoolClicked
|
onClick: this._onSparkPoolClicked,
|
||||||
})
|
})
|
||||||
)
|
),
|
||||||
};
|
};
|
||||||
if (!sparkPoolsMenuProps.items.length) {
|
if (!sparkPoolsMenuProps.items.length) {
|
||||||
sparkPoolsMenuProps.items.push({
|
sparkPoolsMenuProps.items.push({
|
||||||
key: workspace.id,
|
key: workspace.id,
|
||||||
text: "Create new spark pool",
|
text: "Create new spark pool",
|
||||||
onClick: this._onCreateNewSparkPoolClicked
|
onClick: this._onCreateNewSparkPoolClicked,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
key: workspace.id,
|
key: workspace.id,
|
||||||
text: workspace.name,
|
text: workspace.name,
|
||||||
subMenuProps: this.props.disableSubmenu ? undefined : sparkPoolsMenuProps
|
subMenuProps: this.props.disableSubmenu ? undefined : sparkPoolsMenuProps,
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -94,7 +94,7 @@ export class ArcadiaMenuPicker extends React.Component<ArcadiaMenuPickerProps, A
|
|||||||
workspaceMenuItems.push({
|
workspaceMenuItems.push({
|
||||||
key: "create_workspace",
|
key: "create_workspace",
|
||||||
text: "Create new workspace",
|
text: "Create new workspace",
|
||||||
onClick: this._onCreateNewWorkspaceClicked
|
onClick: this._onCreateNewWorkspaceClicked,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -103,29 +103,29 @@ export class ArcadiaMenuPicker extends React.Component<ArcadiaMenuPickerProps, A
|
|||||||
backgroundColor: "transparent",
|
backgroundColor: "transparent",
|
||||||
margin: "auto 5px",
|
margin: "auto 5px",
|
||||||
padding: "0",
|
padding: "0",
|
||||||
border: "0"
|
border: "0",
|
||||||
},
|
},
|
||||||
rootHovered: {
|
rootHovered: {
|
||||||
backgroundColor: "transparent"
|
backgroundColor: "transparent",
|
||||||
},
|
},
|
||||||
rootChecked: {
|
rootChecked: {
|
||||||
backgroundColor: "transparent"
|
backgroundColor: "transparent",
|
||||||
},
|
},
|
||||||
rootFocused: {
|
rootFocused: {
|
||||||
backgroundColor: "transparent"
|
backgroundColor: "transparent",
|
||||||
},
|
},
|
||||||
rootExpanded: {
|
rootExpanded: {
|
||||||
backgroundColor: "transparent"
|
backgroundColor: "transparent",
|
||||||
},
|
},
|
||||||
flexContainer: {
|
flexContainer: {
|
||||||
height: "30px",
|
height: "30px",
|
||||||
border: "1px solid #a6a6a6",
|
border: "1px solid #a6a6a6",
|
||||||
padding: "0 8px"
|
padding: "0 8px",
|
||||||
},
|
},
|
||||||
label: {
|
label: {
|
||||||
fontWeight: "400",
|
fontWeight: "400",
|
||||||
fontSize: "12px"
|
fontSize: "12px",
|
||||||
}
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
return (
|
return (
|
||||||
@@ -134,7 +134,7 @@ export class ArcadiaMenuPicker extends React.Component<ArcadiaMenuPickerProps, A
|
|||||||
persistMenu={true}
|
persistMenu={true}
|
||||||
className="arcadia-menu-picker"
|
className="arcadia-menu-picker"
|
||||||
menuProps={{
|
menuProps={{
|
||||||
items: workspaceMenuItems
|
items: workspaceMenuItems,
|
||||||
}}
|
}}
|
||||||
styles={dropdownStyle}
|
styles={dropdownStyle}
|
||||||
/>
|
/>
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user