Prettier 2.0 (#393)
This commit is contained in:
parent
c1937ca464
commit
4be53284b5
24
.eslintrc.js
24
.eslintrc.js
|
@ -1,36 +1,36 @@
|
|||
module.exports = {
|
||||
env: {
|
||||
browser: true,
|
||||
es6: true
|
||||
es6: true,
|
||||
},
|
||||
plugins: ["@typescript-eslint", "no-null", "prefer-arrow"],
|
||||
extends: ["eslint:recommended", "plugin:@typescript-eslint/recommended"],
|
||||
globals: {
|
||||
Atomics: "readonly",
|
||||
SharedArrayBuffer: "readonly"
|
||||
SharedArrayBuffer: "readonly",
|
||||
},
|
||||
parser: "@typescript-eslint/parser",
|
||||
parserOptions: {
|
||||
ecmaFeatures: {
|
||||
jsx: true
|
||||
jsx: true,
|
||||
},
|
||||
ecmaVersion: 2018,
|
||||
sourceType: "module"
|
||||
sourceType: "module",
|
||||
},
|
||||
overrides: [
|
||||
{
|
||||
files: ["**/*.tsx"],
|
||||
extends: ["plugin:react/recommended"], // TODO: Add react-hooks
|
||||
plugins: ["react"]
|
||||
plugins: ["react"],
|
||||
},
|
||||
{
|
||||
files: ["**/*.{test,spec}.{ts,tsx}"],
|
||||
env: {
|
||||
jest: true
|
||||
jest: true,
|
||||
},
|
||||
extends: ["plugin:jest/recommended"],
|
||||
plugins: ["jest"]
|
||||
}
|
||||
plugins: ["jest"],
|
||||
},
|
||||
],
|
||||
rules: {
|
||||
"no-console": ["error", { allow: ["error", "warn", "dir"] }],
|
||||
|
@ -46,8 +46,8 @@ module.exports = {
|
|||
"error",
|
||||
{
|
||||
selector: "CallExpression[callee.object.name='JSON'][callee.property.name='stringify'] Identifier[name=/$err/]",
|
||||
message: "Do not use JSON.stringify(error). It will print '{}'"
|
||||
}
|
||||
]
|
||||
}
|
||||
message: "Do not use JSON.stringify(error). It will print '{}'",
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
module.exports = {
|
||||
presets: [["@babel/preset-env", { targets: { node: "current" } }], "@babel/preset-react", "@babel/preset-typescript"],
|
||||
plugins: [["@babel/plugin-proposal-decorators", { legacy: true }]]
|
||||
plugins: [["@babel/plugin-proposal-decorators", { legacy: true }]],
|
||||
};
|
||||
|
|
|
@ -6,6 +6,6 @@ module.exports = {
|
|||
slowMo: 55,
|
||||
defaultViewport: null,
|
||||
ignoreHTTPSErrors: true,
|
||||
args: ["--disable-web-security"]
|
||||
}
|
||||
args: ["--disable-web-security"],
|
||||
},
|
||||
};
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
module.exports = {
|
||||
preset: "jest-puppeteer",
|
||||
testMatch: ["<rootDir>/test/**/*.spec.[jt]s?(x)"],
|
||||
setupFiles: ["dotenv/config"]
|
||||
setupFiles: ["dotenv/config"],
|
||||
};
|
||||
|
|
|
@ -42,8 +42,8 @@ module.exports = {
|
|||
branches: 22,
|
||||
functions: 28,
|
||||
lines: 33,
|
||||
statements: 31
|
||||
}
|
||||
statements: 31,
|
||||
},
|
||||
},
|
||||
|
||||
// Make calling deprecated APIs throw helpful error messages
|
||||
|
@ -76,7 +76,7 @@ module.exports = {
|
|||
"office-ui-fabric-react/lib/(.*)$": "office-ui-fabric-react/lib-commonjs/$1", // https://github.com/OfficeDev/office-ui-fabric-react/wiki/Fabric-6-Release-Notes
|
||||
"^dnd-core$": "dnd-core/dist/cjs",
|
||||
"^react-dnd$": "react-dnd/dist/cjs",
|
||||
"^react-dnd-html5-backend$": "react-dnd-html5-backend/dist/cjs"
|
||||
"^react-dnd-html5-backend$": "react-dnd-html5-backend/dist/cjs",
|
||||
},
|
||||
|
||||
// An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader
|
||||
|
@ -164,11 +164,11 @@ module.exports = {
|
|||
// A map from regular expressions to paths to transformers
|
||||
transform: {
|
||||
"^.+\\.html?$": "html-loader-jest",
|
||||
"^.+\\.[t|j]sx?$": "babel-jest"
|
||||
"^.+\\.[t|j]sx?$": "babel-jest",
|
||||
},
|
||||
|
||||
// An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation
|
||||
transformIgnorePatterns: ["/node_modules/", "/externals/"]
|
||||
transformIgnorePatterns: ["/node_modules/", "/externals/"],
|
||||
|
||||
// An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them
|
||||
// unmockedModulePathPatterns: undefined,
|
||||
|
|
|
@ -17647,9 +17647,9 @@
|
|||
"integrity": "sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ="
|
||||
},
|
||||
"prettier": {
|
||||
"version": "1.19.1",
|
||||
"resolved": "https://registry.npmjs.org/prettier/-/prettier-1.19.1.tgz",
|
||||
"integrity": "sha512-s7PoyDv/II1ObgQunCbB9PdLmUcBZcnWOcxDh7O0N/UwDEsHyqkW+Qh28jW+mVuCdx7gLB0BotYI1Y6uI9iyew==",
|
||||
"version": "2.2.1",
|
||||
"resolved": "https://registry.npmjs.org/prettier/-/prettier-2.2.1.tgz",
|
||||
"integrity": "sha512-PqyhM2yCjg/oKkFPtTGUojv7gnZAoG80ttl45O6x2Ug/rMJw4wcc9k6aaf2hibP7BGVCCM33gZoGjyvt9mm16Q==",
|
||||
"dev": true
|
||||
},
|
||||
"pretty-error": {
|
||||
|
|
|
@ -167,7 +167,7 @@
|
|||
"mini-css-extract-plugin": "0.4.3",
|
||||
"monaco-editor-webpack-plugin": "1.7.0",
|
||||
"node-fetch": "2.6.1",
|
||||
"prettier": "1.19.1",
|
||||
"prettier": "2.2.1",
|
||||
"puppeteer": "4.0.0",
|
||||
"raw-loader": "0.5.1",
|
||||
"rimraf": "3.0.0",
|
||||
|
|
|
@ -3,5 +3,5 @@ export enum AuthType {
|
|||
EncryptedToken = "encryptedtoken",
|
||||
MasterKey = "masterkey",
|
||||
ResourceToken = "resourcetoken",
|
||||
ConnectionString = "connectionstring"
|
||||
ConnectionString = "connectionstring",
|
||||
}
|
||||
|
|
|
@ -14,7 +14,7 @@ export class BindingHandlersRegisterer {
|
|||
) {
|
||||
const value = ko.unwrap(wrappedValueAccessor());
|
||||
bindingContext?.$data.isTemplateReady(value);
|
||||
}
|
||||
},
|
||||
} as ko.BindingHandler;
|
||||
|
||||
ReactBindingHandler.Registerer.register();
|
||||
|
|
|
@ -42,7 +42,7 @@ export class Registerer {
|
|||
|
||||
// Initial rendering at mount point
|
||||
ReactDOM.render(adapter.renderComponent(), element);
|
||||
}
|
||||
},
|
||||
} as ko.BindingHandler;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -40,7 +40,7 @@ export class ArrayHashMap<T> {
|
|||
public forEach(key: string, iteratorFct: (value: T) => void) {
|
||||
const values = this.store.get(key);
|
||||
if (values) {
|
||||
values.forEach(value => iteratorFct(value));
|
||||
values.forEach((value) => iteratorFct(value));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -7,7 +7,7 @@ export class CodeOfConductEndpoints {
|
|||
export class EndpointsRegex {
|
||||
public static readonly cassandra = [
|
||||
"AccountEndpoint=(.*).cassandra.cosmosdb.azure.com",
|
||||
"HostName=(.*).cassandra.cosmos.azure.com"
|
||||
"HostName=(.*).cassandra.cosmos.azure.com",
|
||||
];
|
||||
public static readonly mongo = "mongodb://.*:(.*)@(.*).documents.azure.com";
|
||||
public static readonly mongoCompute = "mongodb://.*:(.*)@(.*).mongo.cosmos.azure.com";
|
||||
|
@ -147,7 +147,7 @@ export class MongoDBAccounts {
|
|||
|
||||
export enum MongoBackendEndpointType {
|
||||
local,
|
||||
remote
|
||||
remote,
|
||||
}
|
||||
|
||||
// TODO: 435619 Add default endpoints per cloud and use regional only when available
|
||||
|
@ -274,7 +274,7 @@ export class HttpStatusCodes {
|
|||
HttpStatusCodes.InternalServerError, // TODO: Handle all 500s on Portal backend and remove from retries list
|
||||
HttpStatusCodes.BadGateway,
|
||||
HttpStatusCodes.ServiceUnavailable,
|
||||
HttpStatusCodes.GatewayTimeout
|
||||
HttpStatusCodes.GatewayTimeout,
|
||||
];
|
||||
}
|
||||
|
||||
|
@ -330,10 +330,7 @@ export class HashRoutePrefixes {
|
|||
public static docsWithIds(databaseId: string, collectionId: string, docId: string) {
|
||||
const transformedDatabasePrefix: string = this.docs.replace("{db_id}", databaseId);
|
||||
|
||||
return transformedDatabasePrefix
|
||||
.replace("{coll_id}", collectionId)
|
||||
.replace("{doc_id}", docId)
|
||||
.replace("/", ""); // strip the first slash since hasher adds it
|
||||
return transformedDatabasePrefix.replace("{coll_id}", collectionId).replace("{doc_id}", docId).replace("/", ""); // strip the first slash since hasher adds it
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -379,7 +376,7 @@ export class OfferVersions {
|
|||
export enum ConflictOperationType {
|
||||
Replace = "replace",
|
||||
Create = "create",
|
||||
Delete = "delete"
|
||||
Delete = "delete",
|
||||
}
|
||||
|
||||
export const EmulatorMasterKey =
|
||||
|
|
|
@ -10,17 +10,17 @@ describe("tokenProvider", () => {
|
|||
resourceId: "",
|
||||
resourceType: "dbs" as ResourceType,
|
||||
headers: {},
|
||||
getAuthorizationTokenUsingMasterKey: () => ""
|
||||
getAuthorizationTokenUsingMasterKey: () => "",
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
updateConfigContext({
|
||||
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
|
||||
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
|
||||
});
|
||||
window.fetch = jest.fn().mockImplementation(() => {
|
||||
return {
|
||||
json: () => "{}",
|
||||
headers: new Map()
|
||||
headers: new Map(),
|
||||
};
|
||||
});
|
||||
});
|
||||
|
@ -36,7 +36,7 @@ describe("tokenProvider", () => {
|
|||
|
||||
it("does not call the auth service if a master key is set", async () => {
|
||||
updateUserContext({
|
||||
masterKey: "foo"
|
||||
masterKey: "foo",
|
||||
});
|
||||
await tokenProvider(options);
|
||||
expect((window.fetch as any).mock.calls.length).toBe(0);
|
||||
|
@ -50,7 +50,7 @@ describe("getTokenFromAuthService", () => {
|
|||
window.fetch = jest.fn().mockImplementation(() => {
|
||||
return {
|
||||
json: () => "{}",
|
||||
headers: new Map()
|
||||
headers: new Map(),
|
||||
};
|
||||
});
|
||||
});
|
||||
|
@ -61,7 +61,7 @@ describe("getTokenFromAuthService", () => {
|
|||
|
||||
it("builds the correct URL in production", () => {
|
||||
updateConfigContext({
|
||||
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
|
||||
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
|
||||
});
|
||||
getTokenFromAuthService("GET", "dbs", "foo");
|
||||
expect(window.fetch).toHaveBeenCalledWith(
|
||||
|
@ -72,7 +72,7 @@ describe("getTokenFromAuthService", () => {
|
|||
|
||||
it("builds the correct URL in dev", () => {
|
||||
updateConfigContext({
|
||||
BACKEND_ENDPOINT: "https://localhost:1234"
|
||||
BACKEND_ENDPOINT: "https://localhost:1234",
|
||||
});
|
||||
getTokenFromAuthService("GET", "dbs", "foo");
|
||||
expect(window.fetch).toHaveBeenCalledWith(
|
||||
|
@ -96,15 +96,15 @@ describe("endpoint", () => {
|
|||
documentEndpoint: "bar",
|
||||
gremlinEndpoint: "foo",
|
||||
tableEndpoint: "foo",
|
||||
cassandraEndpoint: "foo"
|
||||
}
|
||||
}
|
||||
cassandraEndpoint: "foo",
|
||||
},
|
||||
},
|
||||
});
|
||||
expect(endpoint()).toEqual("bar");
|
||||
});
|
||||
it("uses _endpoint if set", () => {
|
||||
updateUserContext({
|
||||
endpoint: "baz"
|
||||
endpoint: "baz",
|
||||
});
|
||||
expect(endpoint()).toEqual("baz");
|
||||
});
|
||||
|
@ -121,7 +121,7 @@ describe("requestPlugin", () => {
|
|||
updateConfigContext({
|
||||
platform: Platform.Hosted,
|
||||
BACKEND_ENDPOINT: "https://localhost:1234",
|
||||
PROXY_PATH: "/proxy"
|
||||
PROXY_PATH: "/proxy",
|
||||
});
|
||||
const headers = {};
|
||||
const endpoint = "https://docs.azure.com";
|
||||
|
|
|
@ -58,13 +58,13 @@ export async function getTokenFromAuthService(verb: string, resourceType: string
|
|||
method: "POST",
|
||||
headers: {
|
||||
"content-type": "application/json",
|
||||
"x-ms-encrypted-auth-token": userContext.accessToken
|
||||
"x-ms-encrypted-auth-token": userContext.accessToken,
|
||||
},
|
||||
body: JSON.stringify({
|
||||
verb,
|
||||
resourceType,
|
||||
resourceId
|
||||
})
|
||||
resourceId,
|
||||
}),
|
||||
});
|
||||
//TODO I am not sure why we have to parse the JSON again here. fetch should do it for us when we call .json()
|
||||
const result = JSON.parse(await response.json());
|
||||
|
@ -81,9 +81,9 @@ export function client(): Cosmos.CosmosClient {
|
|||
key: userContext.masterKey,
|
||||
tokenProvider,
|
||||
connectionPolicy: {
|
||||
enableEndpointDiscovery: false
|
||||
enableEndpointDiscovery: false,
|
||||
},
|
||||
userAgentSuffix: "Azure Portal"
|
||||
userAgentSuffix: "Azure Portal",
|
||||
};
|
||||
|
||||
if (configContext.PROXY_PATH !== undefined) {
|
||||
|
|
|
@ -73,7 +73,7 @@ export default class EditableUtility {
|
|||
return false;
|
||||
});
|
||||
|
||||
observable.subscribe(edit => {
|
||||
observable.subscribe((edit) => {
|
||||
var edits = observable.edits && observable.edits();
|
||||
if (!edits) {
|
||||
return;
|
||||
|
@ -83,9 +83,9 @@ export default class EditableUtility {
|
|||
});
|
||||
|
||||
observable.editableIsValid = ko.observable<boolean>(true);
|
||||
observable.subscribe(value => {
|
||||
observable.subscribe((value) => {
|
||||
const validations: ((value: T) => boolean)[] = (observable.validations && observable.validations()) || [];
|
||||
const isValid = validations.every(validate => validate(value));
|
||||
const isValid = validations.every((validate) => validate(value));
|
||||
observable.editableIsValid(isValid);
|
||||
});
|
||||
|
||||
|
|
|
@ -37,7 +37,7 @@ const sendNotificationForError = (errorMessage: string, errorCode: number | stri
|
|||
}
|
||||
sendMessage({
|
||||
type: MessageTypes.ForbiddenError,
|
||||
reason: errorMessage
|
||||
reason: errorMessage,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
|
|
@ -11,8 +11,8 @@ describe("nextPage", () => {
|
|||
queryMetrics: {},
|
||||
requestCharge: 1,
|
||||
headers: {},
|
||||
activityId: "foo"
|
||||
})
|
||||
activityId: "foo",
|
||||
}),
|
||||
};
|
||||
|
||||
expect(await nextPage(fakeIterator, 10)).toMatchSnapshot();
|
||||
|
|
|
@ -14,7 +14,7 @@ export interface MinimalQueryIterator {
|
|||
// Pick<QueryIterator<any>, "fetchNext">;
|
||||
|
||||
export function nextPage(documentsIterator: MinimalQueryIterator, firstItemIndex: number): Promise<QueryResults> {
|
||||
return documentsIterator.fetchNext().then(response => {
|
||||
return documentsIterator.fetchNext().then((response) => {
|
||||
const documents = response.resources;
|
||||
const headers = (response as any).headers || {}; // TODO this is a private key. Remove any
|
||||
const itemCount = (documents && documents.length) || 0;
|
||||
|
@ -26,7 +26,7 @@ export function nextPage(documentsIterator: MinimalQueryIterator, firstItemIndex
|
|||
lastItemIndex: Number(firstItemIndex) + Number(itemCount),
|
||||
headers,
|
||||
activityId: response.activityId,
|
||||
requestCharge: response.requestCharge
|
||||
requestCharge: response.requestCharge,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
|
|
@ -29,7 +29,7 @@ export function logError(errorMessage: string, area: string, code?: number | str
|
|||
function _logEntry(entry: Diagnostics.LogEntry): void {
|
||||
sendMessage({
|
||||
type: MessageTypes.LogInfo,
|
||||
data: JSON.stringify(entry)
|
||||
data: JSON.stringify(entry),
|
||||
});
|
||||
|
||||
const severityLevel = ((level: Diagnostics.LogEntryLevel): SeverityLevel => {
|
||||
|
@ -60,6 +60,6 @@ function _generateLogEntry(
|
|||
level,
|
||||
message,
|
||||
area,
|
||||
code
|
||||
code,
|
||||
};
|
||||
}
|
||||
|
|
|
@ -6,7 +6,7 @@ describe("Message Handler", () => {
|
|||
let mockPromise = {
|
||||
id: "123",
|
||||
startTime: new Date(),
|
||||
deferred: Q.defer<any>()
|
||||
deferred: Q.defer<any>(),
|
||||
};
|
||||
let mockMessage = { message: { id: "123", data: "{}" } };
|
||||
MessageHandler.RequestMap[mockPromise.id] = mockPromise;
|
||||
|
@ -18,7 +18,7 @@ describe("Message Handler", () => {
|
|||
let message = {
|
||||
id: "123",
|
||||
startTime: new Date(),
|
||||
deferred: Q.defer<any>()
|
||||
deferred: Q.defer<any>(),
|
||||
};
|
||||
|
||||
MessageHandler.handleCachedDataMessage(message);
|
||||
|
|
|
@ -35,7 +35,7 @@ export function sendCachedDataMessage<TResponseDataModel>(
|
|||
let cachedDataPromise: CachedDataPromise<TResponseDataModel> = {
|
||||
deferred: Q.defer<TResponseDataModel>(),
|
||||
startTime: new Date(),
|
||||
id: _.uniqueId()
|
||||
id: _.uniqueId(),
|
||||
};
|
||||
RequestMap[cachedDataPromise.id] = cachedDataPromise;
|
||||
sendMessage({ type: messageType, params: params, id: cachedDataPromise.id });
|
||||
|
@ -54,7 +54,7 @@ export function sendMessage(data: any): void {
|
|||
portalChildWindow.parent.postMessage(
|
||||
{
|
||||
signature: "pcIframe",
|
||||
data: data
|
||||
data: data,
|
||||
},
|
||||
portalChildWindow.document.referrer
|
||||
);
|
||||
|
|
|
@ -14,7 +14,7 @@ const fetchMock = () => {
|
|||
ok: true,
|
||||
text: () => "{}",
|
||||
json: () => "{}",
|
||||
headers: new Map()
|
||||
headers: new Map(),
|
||||
});
|
||||
};
|
||||
|
||||
|
@ -27,8 +27,8 @@ const collection = {
|
|||
partitionKey: {
|
||||
paths: ["/pk"],
|
||||
kind: "Hash",
|
||||
version: 1
|
||||
}
|
||||
version: 1,
|
||||
},
|
||||
} as Collection;
|
||||
|
||||
const documentId = ({
|
||||
|
@ -38,8 +38,8 @@ const documentId = ({
|
|||
partitionKey: {
|
||||
paths: ["/pk"],
|
||||
kind: "Hash",
|
||||
version: 1
|
||||
}
|
||||
version: 1,
|
||||
},
|
||||
} as unknown) as DocumentId;
|
||||
|
||||
const databaseAccount = {
|
||||
|
@ -52,8 +52,8 @@ const databaseAccount = {
|
|||
documentEndpoint: "bar",
|
||||
gremlinEndpoint: "foo",
|
||||
tableEndpoint: "foo",
|
||||
cassandraEndpoint: "foo"
|
||||
}
|
||||
cassandraEndpoint: "foo",
|
||||
},
|
||||
} as DatabaseAccount;
|
||||
|
||||
describe("MongoProxyClient", () => {
|
||||
|
@ -61,10 +61,10 @@ describe("MongoProxyClient", () => {
|
|||
beforeEach(() => {
|
||||
resetConfigContext();
|
||||
updateUserContext({
|
||||
databaseAccount
|
||||
databaseAccount,
|
||||
});
|
||||
updateConfigContext({
|
||||
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
|
||||
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
|
||||
});
|
||||
window.fetch = jest.fn().mockImplementation(fetchMock);
|
||||
});
|
||||
|
@ -93,10 +93,10 @@ describe("MongoProxyClient", () => {
|
|||
beforeEach(() => {
|
||||
resetConfigContext();
|
||||
updateUserContext({
|
||||
databaseAccount
|
||||
databaseAccount,
|
||||
});
|
||||
updateConfigContext({
|
||||
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
|
||||
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
|
||||
});
|
||||
window.fetch = jest.fn().mockImplementation(fetchMock);
|
||||
});
|
||||
|
@ -125,10 +125,10 @@ describe("MongoProxyClient", () => {
|
|||
beforeEach(() => {
|
||||
resetConfigContext();
|
||||
updateUserContext({
|
||||
databaseAccount
|
||||
databaseAccount,
|
||||
});
|
||||
updateConfigContext({
|
||||
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
|
||||
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
|
||||
});
|
||||
window.fetch = jest.fn().mockImplementation(fetchMock);
|
||||
});
|
||||
|
@ -157,10 +157,10 @@ describe("MongoProxyClient", () => {
|
|||
beforeEach(() => {
|
||||
resetConfigContext();
|
||||
updateUserContext({
|
||||
databaseAccount
|
||||
databaseAccount,
|
||||
});
|
||||
updateConfigContext({
|
||||
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
|
||||
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
|
||||
});
|
||||
window.fetch = jest.fn().mockImplementation(fetchMock);
|
||||
});
|
||||
|
@ -189,10 +189,10 @@ describe("MongoProxyClient", () => {
|
|||
beforeEach(() => {
|
||||
resetConfigContext();
|
||||
updateUserContext({
|
||||
databaseAccount
|
||||
databaseAccount,
|
||||
});
|
||||
updateConfigContext({
|
||||
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
|
||||
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
|
||||
});
|
||||
window.fetch = jest.fn().mockImplementation(fetchMock);
|
||||
});
|
||||
|
@ -222,10 +222,10 @@ describe("MongoProxyClient", () => {
|
|||
resetConfigContext();
|
||||
delete window.authType;
|
||||
updateUserContext({
|
||||
databaseAccount
|
||||
databaseAccount,
|
||||
});
|
||||
updateConfigContext({
|
||||
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
|
||||
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
@ -16,7 +16,7 @@ import { sendMessage } from "./MessageHandler";
|
|||
const defaultHeaders = {
|
||||
[HttpHeaders.apiType]: ApiType.MongoDB.toString(),
|
||||
[CosmosSDKConstants.HttpHeaders.MaxEntityCount]: "100",
|
||||
[CosmosSDKConstants.HttpHeaders.Version]: "2017-11-15"
|
||||
[CosmosSDKConstants.HttpHeaders.Version]: "2017-11-15",
|
||||
};
|
||||
|
||||
function authHeaders() {
|
||||
|
@ -31,7 +31,7 @@ export function queryIterator(databaseId: string, collection: Collection, query:
|
|||
let continuationToken: string;
|
||||
return {
|
||||
fetchNext: () => {
|
||||
return queryDocuments(databaseId, collection, false, query).then(response => {
|
||||
return queryDocuments(databaseId, collection, false, query).then((response) => {
|
||||
continuationToken = response.continuationToken;
|
||||
const headers: { [key: string]: string | number } = {};
|
||||
response.headers.forEach((value, key) => {
|
||||
|
@ -42,10 +42,10 @@ export function queryIterator(databaseId: string, collection: Collection, query:
|
|||
headers,
|
||||
requestCharge: Number(headers[CosmosSDKConstants.HttpHeaders.RequestCharge]),
|
||||
activityId: String(headers[CosmosSDKConstants.HttpHeaders.ActivityId]),
|
||||
hasMoreResults: !!continuationToken
|
||||
hasMoreResults: !!continuationToken,
|
||||
};
|
||||
});
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -74,7 +74,9 @@ export function queryDocuments(
|
|||
rg: userContext.resourceGroup,
|
||||
dba: databaseAccount.name,
|
||||
pk:
|
||||
collection && collection.partitionKey && !collection.partitionKey.systemKey ? collection.partitionKeyProperty : ""
|
||||
collection && collection.partitionKey && !collection.partitionKey.systemKey
|
||||
? collection.partitionKeyProperty
|
||||
: "",
|
||||
};
|
||||
|
||||
const endpoint = getEndpoint() || "";
|
||||
|
@ -87,7 +89,7 @@ export function queryDocuments(
|
|||
[CosmosSDKConstants.HttpHeaders.EnableScanInQuery]: "true",
|
||||
[CosmosSDKConstants.HttpHeaders.EnableCrossPartitionQuery]: "true",
|
||||
[CosmosSDKConstants.HttpHeaders.ParallelizeCrossPartitionQuery]: "true",
|
||||
[HttpHeaders.contentType]: "application/query+json"
|
||||
[HttpHeaders.contentType]: "application/query+json",
|
||||
};
|
||||
|
||||
if (continuationToken) {
|
||||
|
@ -100,14 +102,14 @@ export function queryDocuments(
|
|||
.fetch(`${endpoint}${path}?${queryString.stringify(params)}`, {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ query }),
|
||||
headers
|
||||
headers,
|
||||
})
|
||||
.then(async response => {
|
||||
.then(async (response) => {
|
||||
if (response.ok) {
|
||||
return {
|
||||
continuationToken: response.headers.get(CosmosSDKConstants.HttpHeaders.Continuation),
|
||||
documents: (await response.json()).Documents as DataModels.DocumentId[],
|
||||
headers: response.headers
|
||||
headers: response.headers,
|
||||
};
|
||||
}
|
||||
errorHandling(response, "querying documents", params);
|
||||
|
@ -135,7 +137,9 @@ export function readDocument(
|
|||
rg: userContext.resourceGroup,
|
||||
dba: databaseAccount.name,
|
||||
pk:
|
||||
documentId && documentId.partitionKey && !documentId.partitionKey.systemKey ? documentId.partitionKeyProperty : ""
|
||||
documentId && documentId.partitionKey && !documentId.partitionKey.systemKey
|
||||
? documentId.partitionKeyProperty
|
||||
: "",
|
||||
};
|
||||
|
||||
const endpoint = getEndpoint();
|
||||
|
@ -147,10 +151,10 @@ export function readDocument(
|
|||
...authHeaders(),
|
||||
[CosmosSDKConstants.HttpHeaders.PartitionKey]: encodeURIComponent(
|
||||
JSON.stringify(documentId.partitionKeyHeader())
|
||||
)
|
||||
}
|
||||
),
|
||||
},
|
||||
})
|
||||
.then(response => {
|
||||
.then((response) => {
|
||||
if (response.ok) {
|
||||
return response.json();
|
||||
}
|
||||
|
@ -175,7 +179,7 @@ export function createDocument(
|
|||
sid: userContext.subscriptionId,
|
||||
rg: userContext.resourceGroup,
|
||||
dba: databaseAccount.name,
|
||||
pk: collection && collection.partitionKey && !collection.partitionKey.systemKey ? partitionKeyProperty : ""
|
||||
pk: collection && collection.partitionKey && !collection.partitionKey.systemKey ? partitionKeyProperty : "",
|
||||
};
|
||||
|
||||
const endpoint = getEndpoint();
|
||||
|
@ -186,10 +190,10 @@ export function createDocument(
|
|||
body: JSON.stringify(documentContent),
|
||||
headers: {
|
||||
...defaultHeaders,
|
||||
...authHeaders()
|
||||
}
|
||||
...authHeaders(),
|
||||
},
|
||||
})
|
||||
.then(response => {
|
||||
.then((response) => {
|
||||
if (response.ok) {
|
||||
return response.json();
|
||||
}
|
||||
|
@ -218,7 +222,9 @@ export function updateDocument(
|
|||
rg: userContext.resourceGroup,
|
||||
dba: databaseAccount.name,
|
||||
pk:
|
||||
documentId && documentId.partitionKey && !documentId.partitionKey.systemKey ? documentId.partitionKeyProperty : ""
|
||||
documentId && documentId.partitionKey && !documentId.partitionKey.systemKey
|
||||
? documentId.partitionKeyProperty
|
||||
: "",
|
||||
};
|
||||
const endpoint = getEndpoint();
|
||||
|
||||
|
@ -230,10 +236,10 @@ export function updateDocument(
|
|||
...defaultHeaders,
|
||||
...authHeaders(),
|
||||
[HttpHeaders.contentType]: "application/json",
|
||||
[CosmosSDKConstants.HttpHeaders.PartitionKey]: JSON.stringify(documentId.partitionKeyHeader())
|
||||
}
|
||||
[CosmosSDKConstants.HttpHeaders.PartitionKey]: JSON.stringify(documentId.partitionKeyHeader()),
|
||||
},
|
||||
})
|
||||
.then(response => {
|
||||
.then((response) => {
|
||||
if (response.ok) {
|
||||
return response.json();
|
||||
}
|
||||
|
@ -257,7 +263,9 @@ export function deleteDocument(databaseId: string, collection: Collection, docum
|
|||
rg: userContext.resourceGroup,
|
||||
dba: databaseAccount.name,
|
||||
pk:
|
||||
documentId && documentId.partitionKey && !documentId.partitionKey.systemKey ? documentId.partitionKeyProperty : ""
|
||||
documentId && documentId.partitionKey && !documentId.partitionKey.systemKey
|
||||
? documentId.partitionKeyProperty
|
||||
: "",
|
||||
};
|
||||
const endpoint = getEndpoint();
|
||||
|
||||
|
@ -268,10 +276,10 @@ export function deleteDocument(databaseId: string, collection: Collection, docum
|
|||
...defaultHeaders,
|
||||
...authHeaders(),
|
||||
[HttpHeaders.contentType]: "application/json",
|
||||
[CosmosSDKConstants.HttpHeaders.PartitionKey]: JSON.stringify(documentId.partitionKeyHeader())
|
||||
}
|
||||
[CosmosSDKConstants.HttpHeaders.PartitionKey]: JSON.stringify(documentId.partitionKeyHeader()),
|
||||
},
|
||||
})
|
||||
.then(response => {
|
||||
.then((response) => {
|
||||
if (response.ok) {
|
||||
return undefined;
|
||||
}
|
||||
|
@ -299,7 +307,7 @@ export function createMongoCollectionWithProxy(
|
|||
rg: userContext.resourceGroup,
|
||||
dba: databaseAccount.name,
|
||||
isAutoPilot: !!params.autoPilotMaxThroughput,
|
||||
autoPilotThroughput: params.autoPilotMaxThroughput?.toString()
|
||||
autoPilotThroughput: params.autoPilotMaxThroughput?.toString(),
|
||||
};
|
||||
|
||||
const endpoint = getEndpoint();
|
||||
|
@ -314,11 +322,11 @@ export function createMongoCollectionWithProxy(
|
|||
headers: {
|
||||
...defaultHeaders,
|
||||
...authHeaders(),
|
||||
[HttpHeaders.contentType]: "application/json"
|
||||
}
|
||||
[HttpHeaders.contentType]: "application/json",
|
||||
},
|
||||
}
|
||||
)
|
||||
.then(response => {
|
||||
.then((response) => {
|
||||
if (response.ok) {
|
||||
return response.json();
|
||||
}
|
||||
|
|
|
@ -9,14 +9,14 @@ describe("parseSDKOfferResponse", () => {
|
|||
offerThroughput: 500,
|
||||
collectionThroughputInfo: {
|
||||
minimumRUForCollection: 400,
|
||||
numPhysicalPartitions: 1
|
||||
}
|
||||
numPhysicalPartitions: 1,
|
||||
},
|
||||
id: "test"
|
||||
},
|
||||
id: "test",
|
||||
} as SDKOfferDefinition;
|
||||
|
||||
const mockResponse = {
|
||||
resource: mockOfferDefinition
|
||||
resource: mockOfferDefinition,
|
||||
} as OfferResponse;
|
||||
|
||||
const expectedResult: Offer = {
|
||||
|
@ -25,7 +25,7 @@ describe("parseSDKOfferResponse", () => {
|
|||
minimumThroughput: 400,
|
||||
id: "test",
|
||||
offerDefinition: mockOfferDefinition,
|
||||
offerReplacePending: false
|
||||
offerReplacePending: false,
|
||||
};
|
||||
|
||||
expect(OfferUtility.parseSDKOfferResponse(mockResponse)).toEqual(expectedResult);
|
||||
|
@ -37,17 +37,17 @@ describe("parseSDKOfferResponse", () => {
|
|||
offerThroughput: 400,
|
||||
collectionThroughputInfo: {
|
||||
minimumRUForCollection: 400,
|
||||
numPhysicalPartitions: 1
|
||||
numPhysicalPartitions: 1,
|
||||
},
|
||||
offerAutopilotSettings: {
|
||||
maxThroughput: 5000
|
||||
}
|
||||
maxThroughput: 5000,
|
||||
},
|
||||
id: "test"
|
||||
},
|
||||
id: "test",
|
||||
} as SDKOfferDefinition;
|
||||
|
||||
const mockResponse = {
|
||||
resource: mockOfferDefinition
|
||||
resource: mockOfferDefinition,
|
||||
} as OfferResponse;
|
||||
|
||||
const expectedResult: Offer = {
|
||||
|
@ -56,7 +56,7 @@ describe("parseSDKOfferResponse", () => {
|
|||
minimumThroughput: 400,
|
||||
id: "test",
|
||||
offerDefinition: mockOfferDefinition,
|
||||
offerReplacePending: false
|
||||
offerReplacePending: false,
|
||||
};
|
||||
|
||||
expect(OfferUtility.parseSDKOfferResponse(mockResponse)).toEqual(expectedResult);
|
||||
|
|
|
@ -22,7 +22,7 @@ export const parseSDKOfferResponse = (offerResponse: OfferResponse): Offer | und
|
|||
manualThroughput: undefined,
|
||||
minimumThroughput,
|
||||
offerDefinition,
|
||||
offerReplacePending: offerResponse.headers?.[HttpHeaders.offerReplacePending] === "true"
|
||||
offerReplacePending: offerResponse.headers?.[HttpHeaders.offerReplacePending] === "true",
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -32,6 +32,6 @@ export const parseSDKOfferResponse = (offerResponse: OfferResponse): Offer | und
|
|||
manualThroughput: offerContent.offerThroughput,
|
||||
minimumThroughput,
|
||||
offerDefinition,
|
||||
offerReplacePending: offerResponse.headers?.[HttpHeaders.offerReplacePending] === "true"
|
||||
offerReplacePending: offerResponse.headers?.[HttpHeaders.offerReplacePending] === "true",
|
||||
};
|
||||
};
|
||||
|
|
|
@ -30,7 +30,7 @@ export const fetchPortalNotifications = async (): Promise<DataModels.Notificatio
|
|||
const headers = { [authorizationHeader.header]: authorizationHeader.token };
|
||||
|
||||
const response = await window.fetch(url, {
|
||||
headers
|
||||
headers,
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
|
|
|
@ -20,7 +20,7 @@ export class QueriesClient {
|
|||
private static readonly PartitionKey: DataModels.PartitionKey = {
|
||||
paths: [`/${SavedQueries.PartitionKeyProperty}`],
|
||||
kind: BackendDefaults.partitionKeyKind,
|
||||
version: BackendDefaults.partitionKeyVersion
|
||||
version: BackendDefaults.partitionKeyVersion,
|
||||
};
|
||||
private static readonly FetchQuery: string = "SELECT * FROM c";
|
||||
private static readonly FetchMongoQuery: string = "{}";
|
||||
|
@ -40,7 +40,7 @@ export class QueriesClient {
|
|||
databaseId: SavedQueries.DatabaseName,
|
||||
partitionKey: QueriesClient.PartitionKey,
|
||||
offerThroughput: SavedQueries.OfferThroughput,
|
||||
databaseLevelThroughput: false
|
||||
databaseLevelThroughput: false,
|
||||
})
|
||||
.then(
|
||||
(collection: DataModels.Collection) => {
|
||||
|
@ -120,7 +120,7 @@ export class QueriesClient {
|
|||
resourceId: resourceId,
|
||||
queryName: queryName,
|
||||
query: query,
|
||||
id: id
|
||||
id: id,
|
||||
};
|
||||
try {
|
||||
this.validateQuery(parsedQuery);
|
||||
|
@ -161,7 +161,7 @@ export class QueriesClient {
|
|||
const documentId = new DocumentId(
|
||||
{
|
||||
partitionKey: QueriesClient.PartitionKey,
|
||||
partitionKeyProperty: "id"
|
||||
partitionKeyProperty: "id",
|
||||
} as DocumentsTab,
|
||||
query,
|
||||
query.queryName
|
||||
|
|
|
@ -4,7 +4,7 @@ import { SplitterMetrics } from "./Constants";
|
|||
|
||||
export enum SplitterDirection {
|
||||
Horizontal = "horizontal",
|
||||
Vertical = "vertical"
|
||||
Vertical = "vertical",
|
||||
}
|
||||
|
||||
export interface SplitterBounds {
|
||||
|
@ -51,7 +51,7 @@ export class Splitter {
|
|||
animate: true,
|
||||
animateDuration: "fast",
|
||||
start: this.onResizeStart,
|
||||
stop: this.onResizeStop
|
||||
stop: this.onResizeStop,
|
||||
};
|
||||
|
||||
if (isVerticalSplitter) {
|
||||
|
@ -91,9 +91,7 @@ export class Splitter {
|
|||
this.lastWidth = $(this.leftSide).width();
|
||||
$(this.splitter).css("left", SplitterMetrics.CollapsedPositionLeft);
|
||||
$(this.leftSide).css("width", "");
|
||||
$(this.leftSide)
|
||||
.resizable("option", "disabled", true)
|
||||
.removeClass("ui-resizable-disabled"); // remove class so splitter is visible
|
||||
$(this.leftSide).resizable("option", "disabled", true).removeClass("ui-resizable-disabled"); // remove class so splitter is visible
|
||||
$(this.splitter).removeClass("ui-resizable-e");
|
||||
this.isCollapsed(true);
|
||||
}
|
||||
|
|
|
@ -32,8 +32,8 @@ export default class UrlUtility {
|
|||
type: type,
|
||||
objectBody: {
|
||||
id: id,
|
||||
self: resourcePath
|
||||
}
|
||||
self: resourcePath,
|
||||
},
|
||||
};
|
||||
|
||||
return result;
|
||||
|
|
|
@ -14,15 +14,15 @@ describe("createCollection", () => {
|
|||
collectionId: "testContainer",
|
||||
databaseId: "testDatabase",
|
||||
databaseLevelThroughput: true,
|
||||
offerThroughput: 400
|
||||
offerThroughput: 400,
|
||||
};
|
||||
|
||||
beforeAll(() => {
|
||||
updateUserContext({
|
||||
databaseAccount: {
|
||||
name: "test"
|
||||
name: "test",
|
||||
} as DatabaseAccount,
|
||||
defaultExperience: DefaultAccountExperienceType.DocumentDB
|
||||
defaultExperience: DefaultAccountExperienceType.DocumentDB,
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -40,12 +40,12 @@ describe("createCollection", () => {
|
|||
return {
|
||||
database: {
|
||||
containers: {
|
||||
create: () => ({})
|
||||
}
|
||||
}
|
||||
create: () => ({}),
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
});
|
||||
await createCollection(createCollectionParams);
|
||||
expect(client).toHaveBeenCalled();
|
||||
|
@ -59,7 +59,7 @@ describe("createCollection", () => {
|
|||
collectionId: "testContainer",
|
||||
databaseId: "testDatabase",
|
||||
databaseLevelThroughput: false,
|
||||
offerThroughput: 400
|
||||
offerThroughput: 400,
|
||||
};
|
||||
expect(constructRpOptions(manualThroughputParams)).toEqual({ throughput: 400 });
|
||||
|
||||
|
@ -69,12 +69,12 @@ describe("createCollection", () => {
|
|||
databaseId: "testDatabase",
|
||||
databaseLevelThroughput: false,
|
||||
offerThroughput: 400,
|
||||
autoPilotMaxThroughput: 4000
|
||||
autoPilotMaxThroughput: 4000,
|
||||
};
|
||||
expect(constructRpOptions(autoPilotThroughputParams)).toEqual({
|
||||
autoscaleSettings: {
|
||||
maxThroughput: 4000
|
||||
}
|
||||
maxThroughput: 4000,
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -11,15 +11,15 @@ import { createMongoCollectionWithProxy } from "../MongoProxyClient";
|
|||
import { createUpdateSqlContainer, getSqlContainer } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||
import {
|
||||
createUpdateCassandraTable,
|
||||
getCassandraTable
|
||||
getCassandraTable,
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources";
|
||||
import {
|
||||
createUpdateMongoDBCollection,
|
||||
getMongoDBCollection
|
||||
getMongoDBCollection,
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
|
||||
import {
|
||||
createUpdateGremlinGraph,
|
||||
getGremlinGraph
|
||||
getGremlinGraph,
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
|
||||
import { createUpdateTable, getTable } from "../../Utils/arm/generatedClients/2020-04-01/tableResources";
|
||||
import { logConsoleProgress, logConsoleInfo } from "../../Utils/NotificationConsoleUtils";
|
||||
|
@ -41,7 +41,7 @@ export const createCollection = async (params: DataModels.CreateCollectionParams
|
|||
autoPilotMaxThroughput: params.autoPilotMaxThroughput,
|
||||
databaseId: params.databaseId,
|
||||
databaseLevelThroughput: params.databaseLevelThroughput,
|
||||
offerThroughput: params.offerThroughput
|
||||
offerThroughput: params.offerThroughput,
|
||||
};
|
||||
await createDatabase(createDatabaseParams);
|
||||
}
|
||||
|
@ -100,7 +100,7 @@ const createSqlContainer = async (params: DataModels.CreateCollectionParams): Pr
|
|||
|
||||
const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params);
|
||||
const resource: ARMTypes.SqlContainerResource = {
|
||||
id: params.collectionId
|
||||
id: params.collectionId,
|
||||
};
|
||||
if (params.analyticalStorageTtl) {
|
||||
resource.analyticalStorageTtl = params.analyticalStorageTtl;
|
||||
|
@ -118,8 +118,8 @@ const createSqlContainer = async (params: DataModels.CreateCollectionParams): Pr
|
|||
const rpPayload: ARMTypes.SqlDatabaseCreateUpdateParameters = {
|
||||
properties: {
|
||||
resource,
|
||||
options
|
||||
}
|
||||
options,
|
||||
},
|
||||
};
|
||||
|
||||
const createResponse = await createUpdateSqlContainer(
|
||||
|
@ -154,7 +154,7 @@ const createMongoCollection = async (params: DataModels.CreateCollectionParams):
|
|||
|
||||
const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params);
|
||||
const resource: ARMTypes.MongoDBCollectionResource = {
|
||||
id: params.collectionId
|
||||
id: params.collectionId,
|
||||
};
|
||||
if (params.analyticalStorageTtl) {
|
||||
resource.analyticalStorageTtl = params.analyticalStorageTtl;
|
||||
|
@ -170,8 +170,8 @@ const createMongoCollection = async (params: DataModels.CreateCollectionParams):
|
|||
const rpPayload: ARMTypes.MongoDBCollectionCreateUpdateParameters = {
|
||||
properties: {
|
||||
resource,
|
||||
options
|
||||
}
|
||||
options,
|
||||
},
|
||||
};
|
||||
|
||||
const createResponse = await createUpdateMongoDBCollection(
|
||||
|
@ -185,7 +185,7 @@ const createMongoCollection = async (params: DataModels.CreateCollectionParams):
|
|||
|
||||
if (params.createMongoWildcardIndex) {
|
||||
TelemetryProcessor.trace(Action.CreateMongoCollectionWithWildcardIndex, ActionModifiers.Mark, {
|
||||
message: "Mongo Collection created with wildcard index on all fields."
|
||||
message: "Mongo Collection created with wildcard index on all fields.",
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -212,7 +212,7 @@ const createCassandraTable = async (params: DataModels.CreateCollectionParams):
|
|||
|
||||
const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params);
|
||||
const resource: ARMTypes.CassandraTableResource = {
|
||||
id: params.collectionId
|
||||
id: params.collectionId,
|
||||
};
|
||||
if (params.analyticalStorageTtl) {
|
||||
resource.analyticalStorageTtl = params.analyticalStorageTtl;
|
||||
|
@ -221,8 +221,8 @@ const createCassandraTable = async (params: DataModels.CreateCollectionParams):
|
|||
const rpPayload: ARMTypes.CassandraTableCreateUpdateParameters = {
|
||||
properties: {
|
||||
resource,
|
||||
options
|
||||
}
|
||||
options,
|
||||
},
|
||||
};
|
||||
|
||||
const createResponse = await createUpdateCassandraTable(
|
||||
|
@ -256,7 +256,7 @@ const createGraph = async (params: DataModels.CreateCollectionParams): Promise<D
|
|||
|
||||
const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params);
|
||||
const resource: ARMTypes.GremlinGraphResource = {
|
||||
id: params.collectionId
|
||||
id: params.collectionId,
|
||||
};
|
||||
|
||||
if (params.indexingPolicy) {
|
||||
|
@ -272,8 +272,8 @@ const createGraph = async (params: DataModels.CreateCollectionParams): Promise<D
|
|||
const rpPayload: ARMTypes.GremlinGraphCreateUpdateParameters = {
|
||||
properties: {
|
||||
resource,
|
||||
options
|
||||
}
|
||||
options,
|
||||
},
|
||||
};
|
||||
|
||||
const createResponse = await createUpdateGremlinGraph(
|
||||
|
@ -306,14 +306,14 @@ const createTable = async (params: DataModels.CreateCollectionParams): Promise<D
|
|||
|
||||
const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params);
|
||||
const resource: ARMTypes.TableResource = {
|
||||
id: params.collectionId
|
||||
id: params.collectionId,
|
||||
};
|
||||
|
||||
const rpPayload: ARMTypes.TableCreateUpdateParameters = {
|
||||
properties: {
|
||||
resource,
|
||||
options
|
||||
}
|
||||
options,
|
||||
},
|
||||
};
|
||||
|
||||
const createResponse = await createUpdateTable(
|
||||
|
@ -334,13 +334,13 @@ export const constructRpOptions = (params: DataModels.CreateDatabaseParams): ARM
|
|||
if (params.autoPilotMaxThroughput) {
|
||||
return {
|
||||
autoscaleSettings: {
|
||||
maxThroughput: params.autoPilotMaxThroughput
|
||||
}
|
||||
maxThroughput: params.autoPilotMaxThroughput,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
throughput: params.offerThroughput
|
||||
throughput: params.offerThroughput,
|
||||
};
|
||||
};
|
||||
|
||||
|
@ -350,7 +350,7 @@ const createCollectionWithSDK = async (params: DataModels.CreateCollectionParams
|
|||
partitionKey: params.partitionKey || undefined,
|
||||
indexingPolicy: params.indexingPolicy || undefined,
|
||||
uniqueKeyPolicy: params.uniqueKeyPolicy || undefined,
|
||||
analyticalStorageTtl: params.analyticalStorageTtl
|
||||
analyticalStorageTtl: params.analyticalStorageTtl,
|
||||
} as ContainerRequest; // TODO: remove cast when https://github.com/Azure/azure-cosmos-js/issues/423 is fixed
|
||||
const collectionOptions: RequestOptions = {};
|
||||
const createDatabaseBody: DatabaseRequest = { id: params.databaseId };
|
||||
|
|
|
@ -8,21 +8,21 @@ import {
|
|||
GremlinDatabaseCreateUpdateParameters,
|
||||
MongoDBDatabaseCreateUpdateParameters,
|
||||
SqlDatabaseCreateUpdateParameters,
|
||||
CreateUpdateOptions
|
||||
CreateUpdateOptions,
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/types";
|
||||
import { client } from "../CosmosClient";
|
||||
import { createUpdateSqlDatabase, getSqlDatabase } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||
import {
|
||||
createUpdateCassandraKeyspace,
|
||||
getCassandraKeyspace
|
||||
getCassandraKeyspace,
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources";
|
||||
import {
|
||||
createUpdateMongoDBDatabase,
|
||||
getMongoDBDatabase
|
||||
getMongoDBDatabase,
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
|
||||
import {
|
||||
createUpdateGremlinDatabase,
|
||||
getGremlinDatabase
|
||||
getGremlinDatabase,
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleProgress, logConsoleInfo } from "../../Utils/NotificationConsoleUtils";
|
||||
|
@ -85,10 +85,10 @@ async function createSqlDatabase(params: DataModels.CreateDatabaseParams): Promi
|
|||
const rpPayload: SqlDatabaseCreateUpdateParameters = {
|
||||
properties: {
|
||||
resource: {
|
||||
id: params.databaseId
|
||||
id: params.databaseId,
|
||||
},
|
||||
options,
|
||||
},
|
||||
options
|
||||
}
|
||||
};
|
||||
const createResponse = await createUpdateSqlDatabase(
|
||||
userContext.subscriptionId,
|
||||
|
@ -121,10 +121,10 @@ async function createMongoDatabase(params: DataModels.CreateDatabaseParams): Pro
|
|||
const rpPayload: MongoDBDatabaseCreateUpdateParameters = {
|
||||
properties: {
|
||||
resource: {
|
||||
id: params.databaseId
|
||||
id: params.databaseId,
|
||||
},
|
||||
options,
|
||||
},
|
||||
options
|
||||
}
|
||||
};
|
||||
const createResponse = await createUpdateMongoDBDatabase(
|
||||
userContext.subscriptionId,
|
||||
|
@ -157,10 +157,10 @@ async function createCassandraKeyspace(params: DataModels.CreateDatabaseParams):
|
|||
const rpPayload: CassandraKeyspaceCreateUpdateParameters = {
|
||||
properties: {
|
||||
resource: {
|
||||
id: params.databaseId
|
||||
id: params.databaseId,
|
||||
},
|
||||
options,
|
||||
},
|
||||
options
|
||||
}
|
||||
};
|
||||
const createResponse = await createUpdateCassandraKeyspace(
|
||||
userContext.subscriptionId,
|
||||
|
@ -193,10 +193,10 @@ async function createGremlineDatabase(params: DataModels.CreateDatabaseParams):
|
|||
const rpPayload: GremlinDatabaseCreateUpdateParameters = {
|
||||
properties: {
|
||||
resource: {
|
||||
id: params.databaseId
|
||||
id: params.databaseId,
|
||||
},
|
||||
options,
|
||||
},
|
||||
options
|
||||
}
|
||||
};
|
||||
const createResponse = await createUpdateGremlinDatabase(
|
||||
userContext.subscriptionId,
|
||||
|
@ -231,12 +231,12 @@ function constructRpOptions(params: DataModels.CreateDatabaseParams): CreateUpda
|
|||
if (params.autoPilotMaxThroughput) {
|
||||
return {
|
||||
autoscaleSettings: {
|
||||
maxThroughput: params.autoPilotMaxThroughput
|
||||
}
|
||||
maxThroughput: params.autoPilotMaxThroughput,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
throughput: params.offerThroughput
|
||||
throughput: params.offerThroughput,
|
||||
};
|
||||
}
|
||||
|
|
|
@ -3,12 +3,12 @@ import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType
|
|||
import { Resource, StoredProcedureDefinition } from "@azure/cosmos";
|
||||
import {
|
||||
SqlStoredProcedureCreateUpdateParameters,
|
||||
SqlStoredProcedureResource
|
||||
SqlStoredProcedureResource,
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/types";
|
||||
import { client } from "../CosmosClient";
|
||||
import {
|
||||
createUpdateSqlStoredProcedure,
|
||||
getSqlStoredProcedure
|
||||
getSqlStoredProcedure,
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
|
@ -49,8 +49,8 @@ export async function createStoredProcedure(
|
|||
const createSprocParams: SqlStoredProcedureCreateUpdateParameters = {
|
||||
properties: {
|
||||
resource: storedProcedure as SqlStoredProcedureResource,
|
||||
options: {}
|
||||
}
|
||||
options: {},
|
||||
},
|
||||
};
|
||||
const rpResponse = await createUpdateSqlStoredProcedure(
|
||||
userContext.subscriptionId,
|
||||
|
|
|
@ -3,7 +3,7 @@ import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType
|
|||
import { Resource, TriggerDefinition } from "@azure/cosmos";
|
||||
import {
|
||||
SqlTriggerCreateUpdateParameters,
|
||||
SqlTriggerResource
|
||||
SqlTriggerResource,
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/types";
|
||||
import { client } from "../CosmosClient";
|
||||
import { createUpdateSqlTrigger, getSqlTrigger } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||
|
@ -44,8 +44,8 @@ export async function createTrigger(
|
|||
const createTriggerParams: SqlTriggerCreateUpdateParameters = {
|
||||
properties: {
|
||||
resource: trigger as SqlTriggerResource,
|
||||
options: {}
|
||||
}
|
||||
options: {},
|
||||
},
|
||||
};
|
||||
const rpResponse = await createUpdateSqlTrigger(
|
||||
userContext.subscriptionId,
|
||||
|
@ -59,10 +59,7 @@ export async function createTrigger(
|
|||
return rpResponse && (rpResponse.properties?.resource as TriggerDefinition & Resource);
|
||||
}
|
||||
|
||||
const response = await client()
|
||||
.database(databaseId)
|
||||
.container(collectionId)
|
||||
.scripts.triggers.create(trigger);
|
||||
const response = await client().database(databaseId).container(collectionId).scripts.triggers.create(trigger);
|
||||
return response.resource;
|
||||
} catch (error) {
|
||||
handleError(error, "CreateTrigger", `Error while creating trigger ${trigger.id}`);
|
||||
|
|
|
@ -3,12 +3,12 @@ import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType
|
|||
import { Resource, UserDefinedFunctionDefinition } from "@azure/cosmos";
|
||||
import {
|
||||
SqlUserDefinedFunctionCreateUpdateParameters,
|
||||
SqlUserDefinedFunctionResource
|
||||
SqlUserDefinedFunctionResource,
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/types";
|
||||
import { client } from "../CosmosClient";
|
||||
import {
|
||||
createUpdateSqlUserDefinedFunction,
|
||||
getSqlUserDefinedFunction
|
||||
getSqlUserDefinedFunction,
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
|
@ -49,8 +49,8 @@ export async function createUserDefinedFunction(
|
|||
const createUDFParams: SqlUserDefinedFunctionCreateUpdateParameters = {
|
||||
properties: {
|
||||
resource: userDefinedFunction as SqlUserDefinedFunctionResource,
|
||||
options: {}
|
||||
}
|
||||
options: {},
|
||||
},
|
||||
};
|
||||
const rpResponse = await createUpdateSqlUserDefinedFunction(
|
||||
userContext.subscriptionId,
|
||||
|
|
|
@ -13,9 +13,9 @@ describe("deleteCollection", () => {
|
|||
beforeAll(() => {
|
||||
updateUserContext({
|
||||
databaseAccount: {
|
||||
name: "test"
|
||||
name: "test",
|
||||
} as DatabaseAccount,
|
||||
defaultExperience: DefaultAccountExperienceType.DocumentDB
|
||||
defaultExperience: DefaultAccountExperienceType.DocumentDB,
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -32,11 +32,11 @@ describe("deleteCollection", () => {
|
|||
return {
|
||||
container: () => {
|
||||
return {
|
||||
delete: (): unknown => undefined
|
||||
delete: (): unknown => undefined,
|
||||
};
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
},
|
||||
});
|
||||
await deleteCollection("database", "collection");
|
||||
expect(client).toHaveBeenCalled();
|
||||
|
|
|
@ -16,10 +16,7 @@ export async function deleteCollection(databaseId: string, collectionId: string)
|
|||
if (window.authType === AuthType.AAD && !userContext.useSDKOperations) {
|
||||
await deleteCollectionWithARM(databaseId, collectionId);
|
||||
} else {
|
||||
await client()
|
||||
.database(databaseId)
|
||||
.container(collectionId)
|
||||
.delete();
|
||||
await client().database(databaseId).container(collectionId).delete();
|
||||
}
|
||||
logConsoleInfo(`Successfully deleted container ${collectionId}`);
|
||||
} catch (error) {
|
||||
|
|
|
@ -10,7 +10,7 @@ export const deleteConflict = async (collection: CollectionBase, conflictId: Con
|
|||
|
||||
try {
|
||||
const options = {
|
||||
partitionKey: getPartitionKeyHeaderForConflict(conflictId)
|
||||
partitionKey: getPartitionKeyHeaderForConflict(conflictId),
|
||||
};
|
||||
|
||||
await client()
|
||||
|
|
|
@ -13,9 +13,9 @@ describe("deleteDatabase", () => {
|
|||
beforeAll(() => {
|
||||
updateUserContext({
|
||||
databaseAccount: {
|
||||
name: "test"
|
||||
name: "test",
|
||||
} as DatabaseAccount,
|
||||
defaultExperience: DefaultAccountExperienceType.DocumentDB
|
||||
defaultExperience: DefaultAccountExperienceType.DocumentDB,
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -30,9 +30,9 @@ describe("deleteDatabase", () => {
|
|||
(client as jest.Mock).mockReturnValue({
|
||||
database: () => {
|
||||
return {
|
||||
delete: (): unknown => undefined
|
||||
delete: (): unknown => undefined,
|
||||
};
|
||||
}
|
||||
},
|
||||
});
|
||||
await deleteDatabase("database");
|
||||
expect(client).toHaveBeenCalled();
|
||||
|
|
|
@ -19,9 +19,7 @@ export async function deleteDatabase(databaseId: string): Promise<void> {
|
|||
if (window.authType === AuthType.AAD && !userContext.useSDKOperations) {
|
||||
await deleteDatabaseWithARM(databaseId);
|
||||
} else {
|
||||
await client()
|
||||
.database(databaseId)
|
||||
.delete();
|
||||
await client().database(databaseId).delete();
|
||||
}
|
||||
logConsoleInfo(`Successfully deleted database ${databaseId}`);
|
||||
} catch (error) {
|
||||
|
|
|
@ -27,11 +27,7 @@ export async function deleteStoredProcedure(
|
|||
storedProcedureId
|
||||
);
|
||||
} else {
|
||||
await client()
|
||||
.database(databaseId)
|
||||
.container(collectionId)
|
||||
.scripts.storedProcedure(storedProcedureId)
|
||||
.delete();
|
||||
await client().database(databaseId).container(collectionId).scripts.storedProcedure(storedProcedureId).delete();
|
||||
}
|
||||
} catch (error) {
|
||||
handleError(error, "DeleteStoredProcedure", `Error while deleting stored procedure ${storedProcedureId}`);
|
||||
|
|
|
@ -23,11 +23,7 @@ export async function deleteTrigger(databaseId: string, collectionId: string, tr
|
|||
triggerId
|
||||
);
|
||||
} else {
|
||||
await client()
|
||||
.database(databaseId)
|
||||
.container(collectionId)
|
||||
.scripts.trigger(triggerId)
|
||||
.delete();
|
||||
await client().database(databaseId).container(collectionId).scripts.trigger(triggerId).delete();
|
||||
}
|
||||
} catch (error) {
|
||||
handleError(error, "DeleteTrigger", `Error while deleting trigger ${triggerId}`);
|
||||
|
|
|
@ -23,11 +23,7 @@ export async function deleteUserDefinedFunction(databaseId: string, collectionId
|
|||
id
|
||||
);
|
||||
} else {
|
||||
await client()
|
||||
.database(databaseId)
|
||||
.container(collectionId)
|
||||
.scripts.userDefinedFunction(id)
|
||||
.delete();
|
||||
await client().database(databaseId).container(collectionId).scripts.userDefinedFunction(id).delete();
|
||||
}
|
||||
} catch (error) {
|
||||
handleError(error, "DeleteUserDefinedFunction", `Error while deleting user defined function ${id}`);
|
||||
|
|
|
@ -33,7 +33,7 @@ export const executeStoredProcedure = async (
|
|||
);
|
||||
return {
|
||||
result: response.resource,
|
||||
scriptLogs: response.headers[HttpHeaders.scriptLogResults] as string
|
||||
scriptLogs: response.headers[HttpHeaders.scriptLogResults] as string,
|
||||
};
|
||||
} catch (error) {
|
||||
handleError(
|
||||
|
|
|
@ -60,8 +60,8 @@ export const getCollectionUsageSizeInKB = async (databaseName: string, container
|
|||
apiVersion: "2018-01-01",
|
||||
queryParams: {
|
||||
filter,
|
||||
metricNames
|
||||
}
|
||||
metricNames,
|
||||
},
|
||||
});
|
||||
|
||||
if (metricsResponse?.value?.length !== 2) {
|
||||
|
|
|
@ -11,10 +11,7 @@ export async function getIndexTransformationProgress(databaseId: string, collect
|
|||
let indexTransformationPercentage: number;
|
||||
const clearMessage = logConsoleProgress(`Reading container ${collectionId}`);
|
||||
try {
|
||||
const response = await client()
|
||||
.database(databaseId)
|
||||
.container(collectionId)
|
||||
.read({ populateQuotaInfo: true });
|
||||
const response = await client().database(databaseId).container(collectionId).read({ populateQuotaInfo: true });
|
||||
|
||||
indexTransformationPercentage = parseInt(
|
||||
response.headers[Constants.HttpHeaders.collectionIndexTransformationProgress] as string
|
||||
|
|
|
@ -7,8 +7,5 @@ export const queryConflicts = (
|
|||
query: string,
|
||||
options: FeedOptions
|
||||
): QueryIterator<ConflictDefinition & Resource> => {
|
||||
return client()
|
||||
.database(databaseId)
|
||||
.container(containerId)
|
||||
.conflicts.query(query, options);
|
||||
return client().database(databaseId).container(containerId).conflicts.query(query, options);
|
||||
};
|
||||
|
|
|
@ -10,10 +10,7 @@ export const queryDocuments = (
|
|||
options: FeedOptions
|
||||
): QueryIterator<ItemDefinition & Resource> => {
|
||||
options = getCommonQueryOptions(options);
|
||||
return client()
|
||||
.database(databaseId)
|
||||
.container(containerId)
|
||||
.items.query(query, options);
|
||||
return client().database(databaseId).container(containerId).items.query(query, options);
|
||||
};
|
||||
|
||||
export const getCommonQueryOptions = (options: FeedOptions): FeedOptions => {
|
||||
|
|
|
@ -10,9 +10,9 @@ describe("readCollection", () => {
|
|||
beforeAll(() => {
|
||||
updateUserContext({
|
||||
databaseAccount: {
|
||||
name: "test"
|
||||
name: "test",
|
||||
} as DatabaseAccount,
|
||||
defaultExperience: DefaultAccountExperienceType.DocumentDB
|
||||
defaultExperience: DefaultAccountExperienceType.DocumentDB,
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -23,11 +23,11 @@ describe("readCollection", () => {
|
|||
return {
|
||||
container: () => {
|
||||
return {
|
||||
read: (): unknown => ({})
|
||||
read: (): unknown => ({}),
|
||||
};
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
},
|
||||
});
|
||||
await readCollection("database", "collection");
|
||||
expect(client).toHaveBeenCalled();
|
||||
|
|
|
@ -7,10 +7,7 @@ export async function readCollection(databaseId: string, collectionId: string):
|
|||
let collection: DataModels.Collection;
|
||||
const clearMessage = logConsoleProgress(`Querying container ${collectionId}`);
|
||||
try {
|
||||
const response = await client()
|
||||
.database(databaseId)
|
||||
.container(collectionId)
|
||||
.read();
|
||||
const response = await client().database(databaseId).container(collectionId).read();
|
||||
collection = response.resource as DataModels.Collection;
|
||||
} catch (error) {
|
||||
handleError(error, "ReadCollection", `Error while querying container ${collectionId}`);
|
||||
|
|
|
@ -106,7 +106,7 @@ const readCollectionOfferWithARM = async (databaseId: string, collectionId: stri
|
|||
autoscaleMaxThroughput: autoscaleSettings.maxThroughput,
|
||||
manualThroughput: undefined,
|
||||
minimumThroughput,
|
||||
offerReplacePending: resource.offerReplacePending === "true"
|
||||
offerReplacePending: resource.offerReplacePending === "true",
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -115,7 +115,7 @@ const readCollectionOfferWithARM = async (databaseId: string, collectionId: stri
|
|||
autoscaleMaxThroughput: undefined,
|
||||
manualThroughput: resource.throughput,
|
||||
minimumThroughput,
|
||||
offerReplacePending: resource.offerReplacePending === "true"
|
||||
offerReplacePending: resource.offerReplacePending === "true",
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
@ -12,9 +12,9 @@ describe("readCollections", () => {
|
|||
beforeAll(() => {
|
||||
updateUserContext({
|
||||
databaseAccount: {
|
||||
name: "test"
|
||||
name: "test",
|
||||
} as DatabaseAccount,
|
||||
defaultExperience: DefaultAccountExperienceType.DocumentDB
|
||||
defaultExperience: DefaultAccountExperienceType.DocumentDB,
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -32,12 +32,12 @@ describe("readCollections", () => {
|
|||
containers: {
|
||||
readAll: () => {
|
||||
return {
|
||||
fetchAll: (): unknown => []
|
||||
fetchAll: (): unknown => [],
|
||||
};
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
},
|
||||
});
|
||||
await readCollections("database");
|
||||
expect(client).toHaveBeenCalled();
|
||||
|
|
|
@ -23,10 +23,7 @@ export async function readCollections(databaseId: string): Promise<DataModels.Co
|
|||
return await readCollectionsWithARM(databaseId);
|
||||
}
|
||||
|
||||
const sdkResponse = await client()
|
||||
.database(databaseId)
|
||||
.containers.readAll()
|
||||
.fetchAll();
|
||||
const sdkResponse = await client().database(databaseId).containers.readAll().fetchAll();
|
||||
return sdkResponse.resources as DataModels.Collection[];
|
||||
} catch (error) {
|
||||
handleError(error, "ReadCollections", `Error while querying containers for database ${databaseId}`);
|
||||
|
@ -63,5 +60,5 @@ async function readCollectionsWithARM(databaseId: string): Promise<DataModels.Co
|
|||
throw new Error(`Unsupported default experience type: ${defaultExperience}`);
|
||||
}
|
||||
|
||||
return rpResponse?.value?.map(collection => collection.properties?.resource as DataModels.Collection);
|
||||
return rpResponse?.value?.map((collection) => collection.properties?.resource as DataModels.Collection);
|
||||
}
|
||||
|
|
|
@ -78,7 +78,7 @@ const readDatabaseOfferWithARM = async (databaseId: string): Promise<Offer> => {
|
|||
autoscaleMaxThroughput: autoscaleSettings.maxThroughput,
|
||||
manualThroughput: undefined,
|
||||
minimumThroughput,
|
||||
offerReplacePending: resource.offerReplacePending === "true"
|
||||
offerReplacePending: resource.offerReplacePending === "true",
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -87,7 +87,7 @@ const readDatabaseOfferWithARM = async (databaseId: string): Promise<Offer> => {
|
|||
autoscaleMaxThroughput: undefined,
|
||||
manualThroughput: resource.throughput,
|
||||
minimumThroughput,
|
||||
offerReplacePending: resource.offerReplacePending === "true"
|
||||
offerReplacePending: resource.offerReplacePending === "true",
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
@ -12,9 +12,9 @@ describe("readDatabases", () => {
|
|||
beforeAll(() => {
|
||||
updateUserContext({
|
||||
databaseAccount: {
|
||||
name: "test"
|
||||
name: "test",
|
||||
} as DatabaseAccount,
|
||||
defaultExperience: DefaultAccountExperienceType.DocumentDB
|
||||
defaultExperience: DefaultAccountExperienceType.DocumentDB,
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -30,10 +30,10 @@ describe("readDatabases", () => {
|
|||
databases: {
|
||||
readAll: () => {
|
||||
return {
|
||||
fetchAll: (): unknown => []
|
||||
fetchAll: (): unknown => [],
|
||||
};
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
});
|
||||
await readDatabases();
|
||||
expect(client).toHaveBeenCalled();
|
||||
|
|
|
@ -21,9 +21,7 @@ export async function readDatabases(): Promise<DataModels.Database[]> {
|
|||
) {
|
||||
databases = await readDatabasesWithARM();
|
||||
} else {
|
||||
const sdkResponse = await client()
|
||||
.databases.readAll()
|
||||
.fetchAll();
|
||||
const sdkResponse = await client().databases.readAll().fetchAll();
|
||||
databases = sdkResponse.resources as DataModels.Database[];
|
||||
}
|
||||
} catch (error) {
|
||||
|
@ -58,5 +56,5 @@ async function readDatabasesWithARM(): Promise<DataModels.Database[]> {
|
|||
throw new Error(`Unsupported default experience type: ${defaultExperience}`);
|
||||
}
|
||||
|
||||
return rpResponse?.value?.map(database => database.properties?.resource as DataModels.Database);
|
||||
return rpResponse?.value?.map((database) => database.properties?.resource as DataModels.Database);
|
||||
}
|
||||
|
|
|
@ -8,7 +8,7 @@ import { readOffers } from "./readOffers";
|
|||
export const readOfferWithSDK = async (offerId: string, resourceId: string): Promise<Offer> => {
|
||||
if (!offerId) {
|
||||
const offers = await readOffers();
|
||||
const offer = offers.find(offer => offer.resource === resourceId);
|
||||
const offer = offers.find((offer) => offer.resource === resourceId);
|
||||
|
||||
if (!offer) {
|
||||
return undefined;
|
||||
|
@ -18,12 +18,10 @@ export const readOfferWithSDK = async (offerId: string, resourceId: string): Pro
|
|||
|
||||
const options: RequestOptions = {
|
||||
initialHeaders: {
|
||||
[HttpHeaders.populateCollectionThroughputInfo]: true
|
||||
}
|
||||
[HttpHeaders.populateCollectionThroughputInfo]: true,
|
||||
},
|
||||
};
|
||||
const response = await client()
|
||||
.offer(offerId)
|
||||
.read(options);
|
||||
const response = await client().offer(offerId).read(options);
|
||||
|
||||
return parseSDKOfferResponse(response);
|
||||
};
|
||||
|
|
|
@ -7,9 +7,7 @@ export const readOffers = async (): Promise<SDKOfferDefinition[]> => {
|
|||
const clearMessage = logConsoleProgress(`Querying offers`);
|
||||
|
||||
try {
|
||||
const response = await client()
|
||||
.offers.readAll()
|
||||
.fetchAll();
|
||||
const response = await client().offers.readAll().fetchAll();
|
||||
return response?.resources;
|
||||
} catch (error) {
|
||||
// This should be removed when we can correctly identify if an account is serverless when connected using connection string too.
|
||||
|
|
|
@ -25,7 +25,7 @@ export async function readStoredProcedures(
|
|||
databaseId,
|
||||
collectionId
|
||||
);
|
||||
return rpResponse?.value?.map(sproc => sproc.properties?.resource as StoredProcedureDefinition & Resource);
|
||||
return rpResponse?.value?.map((sproc) => sproc.properties?.resource as StoredProcedureDefinition & Resource);
|
||||
}
|
||||
|
||||
const response = await client()
|
||||
|
|
|
@ -25,14 +25,10 @@ export async function readTriggers(
|
|||
databaseId,
|
||||
collectionId
|
||||
);
|
||||
return rpResponse?.value?.map(trigger => trigger.properties?.resource as TriggerDefinition & Resource);
|
||||
return rpResponse?.value?.map((trigger) => trigger.properties?.resource as TriggerDefinition & Resource);
|
||||
}
|
||||
|
||||
const response = await client()
|
||||
.database(databaseId)
|
||||
.container(collectionId)
|
||||
.scripts.triggers.readAll()
|
||||
.fetchAll();
|
||||
const response = await client().database(databaseId).container(collectionId).scripts.triggers.readAll().fetchAll();
|
||||
return response?.resources;
|
||||
} catch (error) {
|
||||
handleError(error, "ReadTriggers", `Failed to query triggers for container ${collectionId}`);
|
||||
|
|
|
@ -25,7 +25,7 @@ export async function readUserDefinedFunctions(
|
|||
databaseId,
|
||||
collectionId
|
||||
);
|
||||
return rpResponse?.value?.map(udf => udf.properties?.resource as UserDefinedFunctionDefinition & Resource);
|
||||
return rpResponse?.value?.map((udf) => udf.properties?.resource as UserDefinedFunctionDefinition & Resource);
|
||||
}
|
||||
|
||||
const response = await client()
|
||||
|
|
|
@ -8,22 +8,22 @@ import {
|
|||
MongoDBCollectionCreateUpdateParameters,
|
||||
MongoDBCollectionResource,
|
||||
SqlContainerCreateUpdateParameters,
|
||||
SqlContainerResource
|
||||
SqlContainerResource,
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/types";
|
||||
import { RequestOptions } from "@azure/cosmos/dist-esm";
|
||||
import { client } from "../CosmosClient";
|
||||
import { createUpdateSqlContainer, getSqlContainer } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||
import {
|
||||
createUpdateCassandraTable,
|
||||
getCassandraTable
|
||||
getCassandraTable,
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources";
|
||||
import {
|
||||
createUpdateMongoDBCollection,
|
||||
getMongoDBCollection
|
||||
getMongoDBCollection,
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
|
||||
import {
|
||||
createUpdateGremlinGraph,
|
||||
getGremlinGraph
|
||||
getGremlinGraph,
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
|
||||
import { createUpdateTable, getTable } from "../../Utils/arm/generatedClients/2020-04-01/tableResources";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
|
@ -130,8 +130,8 @@ export async function updateMongoDBCollectionThroughRP(
|
|||
const updateParams: MongoDBCollectionCreateUpdateParameters = {
|
||||
properties: {
|
||||
resource: newCollection,
|
||||
options: updateOptions
|
||||
}
|
||||
options: updateOptions,
|
||||
},
|
||||
};
|
||||
|
||||
const updateResponse = await createUpdateMongoDBCollection(
|
||||
|
|
|
@ -17,7 +17,7 @@ import {
|
|||
migrateSqlDatabaseToManualThroughput,
|
||||
migrateSqlContainerToAutoscale,
|
||||
migrateSqlContainerToManualThroughput,
|
||||
updateSqlContainerThroughput
|
||||
updateSqlContainerThroughput,
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||
import {
|
||||
updateCassandraKeyspaceThroughput,
|
||||
|
@ -25,7 +25,7 @@ import {
|
|||
migrateCassandraKeyspaceToManualThroughput,
|
||||
migrateCassandraTableToAutoscale,
|
||||
migrateCassandraTableToManualThroughput,
|
||||
updateCassandraTableThroughput
|
||||
updateCassandraTableThroughput,
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources";
|
||||
import {
|
||||
updateMongoDBDatabaseThroughput,
|
||||
|
@ -33,7 +33,7 @@ import {
|
|||
migrateMongoDBDatabaseToManualThroughput,
|
||||
migrateMongoDBCollectionToAutoscale,
|
||||
migrateMongoDBCollectionToManualThroughput,
|
||||
updateMongoDBCollectionThroughput
|
||||
updateMongoDBCollectionThroughput,
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
|
||||
import {
|
||||
updateGremlinDatabaseThroughput,
|
||||
|
@ -41,13 +41,13 @@ import {
|
|||
migrateGremlinDatabaseToManualThroughput,
|
||||
migrateGremlinGraphToAutoscale,
|
||||
migrateGremlinGraphToManualThroughput,
|
||||
updateGremlinGraphThroughput
|
||||
updateGremlinGraphThroughput,
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
|
||||
import { userContext } from "../../UserContext";
|
||||
import {
|
||||
migrateTableToAutoscale,
|
||||
migrateTableToManualThroughput,
|
||||
updateTableThroughput
|
||||
updateTableThroughput,
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/tableResources";
|
||||
|
||||
export const updateOffer = async (params: UpdateOfferParams): Promise<Offer> => {
|
||||
|
@ -110,7 +110,7 @@ const updateCollectionOfferWithARM = async (params: UpdateOfferParams): Promise<
|
|||
return await readCollectionOffer({
|
||||
collectionId: params.collectionId,
|
||||
databaseId: params.databaseId,
|
||||
offerId: params.currentOffer.id
|
||||
offerId: params.currentOffer.id,
|
||||
});
|
||||
};
|
||||
|
||||
|
@ -140,7 +140,7 @@ const updateDatabaseOfferWithARM = async (params: UpdateOfferParams): Promise<Of
|
|||
|
||||
return await readDatabaseOffer({
|
||||
databaseId: params.databaseId,
|
||||
offerId: params.currentOffer.id
|
||||
offerId: params.currentOffer.id,
|
||||
});
|
||||
};
|
||||
|
||||
|
@ -358,13 +358,13 @@ const updateGremlinDatabaseOffer = async (params: UpdateOfferParams): Promise<vo
|
|||
const createUpdateOfferBody = (params: UpdateOfferParams): ThroughputSettingsUpdateParameters => {
|
||||
const body: ThroughputSettingsUpdateParameters = {
|
||||
properties: {
|
||||
resource: {}
|
||||
}
|
||||
resource: {},
|
||||
},
|
||||
};
|
||||
|
||||
if (params.autopilotThroughput) {
|
||||
body.properties.resource.autoscaleSettings = {
|
||||
maxThroughput: params.autopilotThroughput
|
||||
maxThroughput: params.autopilotThroughput,
|
||||
};
|
||||
} else {
|
||||
body.properties.resource.throughput = params.manualThroughput;
|
||||
|
@ -378,7 +378,7 @@ const updateOfferWithSDK = async (params: UpdateOfferParams): Promise<Offer> =>
|
|||
const newOffer: SDKOfferDefinition = {
|
||||
content: {
|
||||
offerThroughput: undefined,
|
||||
offerIsRUPerMinuteThroughputEnabled: false
|
||||
offerIsRUPerMinuteThroughputEnabled: false,
|
||||
},
|
||||
_etag: undefined,
|
||||
_ts: undefined,
|
||||
|
@ -388,12 +388,12 @@ const updateOfferWithSDK = async (params: UpdateOfferParams): Promise<Offer> =>
|
|||
offerResourceId: sdkOfferDefinition.offerResourceId,
|
||||
offerVersion: sdkOfferDefinition.offerVersion,
|
||||
offerType: sdkOfferDefinition.offerType,
|
||||
resource: sdkOfferDefinition.resource
|
||||
resource: sdkOfferDefinition.resource,
|
||||
};
|
||||
|
||||
if (params.autopilotThroughput) {
|
||||
newOffer.content.offerAutopilotSettings = {
|
||||
maxThroughput: params.autopilotThroughput
|
||||
maxThroughput: params.autopilotThroughput,
|
||||
};
|
||||
} else {
|
||||
newOffer.content.offerThroughput = params.manualThroughput;
|
||||
|
@ -402,12 +402,12 @@ const updateOfferWithSDK = async (params: UpdateOfferParams): Promise<Offer> =>
|
|||
const options: RequestOptions = {};
|
||||
if (params.migrateToAutoPilot) {
|
||||
options.initialHeaders = {
|
||||
[HttpHeaders.migrateOfferToAutopilot]: "true"
|
||||
[HttpHeaders.migrateOfferToAutopilot]: "true",
|
||||
};
|
||||
delete newOffer.content.offerAutopilotSettings;
|
||||
} else if (params.migrateToManual) {
|
||||
options.initialHeaders = {
|
||||
[HttpHeaders.migrateOfferToManualThroughput]: "true"
|
||||
[HttpHeaders.migrateOfferToManualThroughput]: "true",
|
||||
};
|
||||
newOffer.content.offerAutopilotSettings = { maxThroughput: 0 };
|
||||
}
|
||||
|
|
|
@ -3,12 +3,12 @@ import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType
|
|||
import { Resource, StoredProcedureDefinition } from "@azure/cosmos";
|
||||
import {
|
||||
SqlStoredProcedureCreateUpdateParameters,
|
||||
SqlStoredProcedureResource
|
||||
SqlStoredProcedureResource,
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/types";
|
||||
import { client } from "../CosmosClient";
|
||||
import {
|
||||
createUpdateSqlStoredProcedure,
|
||||
getSqlStoredProcedure
|
||||
getSqlStoredProcedure,
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
|
@ -39,8 +39,8 @@ export async function updateStoredProcedure(
|
|||
const createSprocParams: SqlStoredProcedureCreateUpdateParameters = {
|
||||
properties: {
|
||||
resource: storedProcedure as SqlStoredProcedureResource,
|
||||
options: {}
|
||||
}
|
||||
options: {},
|
||||
},
|
||||
};
|
||||
const rpResponse = await createUpdateSqlStoredProcedure(
|
||||
userContext.subscriptionId,
|
||||
|
|
|
@ -2,7 +2,7 @@ import { AuthType } from "../../AuthType";
|
|||
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
|
||||
import {
|
||||
SqlTriggerCreateUpdateParameters,
|
||||
SqlTriggerResource
|
||||
SqlTriggerResource,
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/types";
|
||||
import { TriggerDefinition } from "@azure/cosmos";
|
||||
import { client } from "../CosmosClient";
|
||||
|
@ -36,8 +36,8 @@ export async function updateTrigger(
|
|||
const createTriggerParams: SqlTriggerCreateUpdateParameters = {
|
||||
properties: {
|
||||
resource: trigger as SqlTriggerResource,
|
||||
options: {}
|
||||
}
|
||||
options: {},
|
||||
},
|
||||
};
|
||||
const rpResponse = await createUpdateSqlTrigger(
|
||||
userContext.subscriptionId,
|
||||
|
|
|
@ -3,12 +3,12 @@ import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType
|
|||
import { Resource, UserDefinedFunctionDefinition } from "@azure/cosmos";
|
||||
import {
|
||||
SqlUserDefinedFunctionCreateUpdateParameters,
|
||||
SqlUserDefinedFunctionResource
|
||||
SqlUserDefinedFunctionResource,
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/types";
|
||||
import { client } from "../CosmosClient";
|
||||
import {
|
||||
createUpdateSqlUserDefinedFunction,
|
||||
getSqlUserDefinedFunction
|
||||
getSqlUserDefinedFunction,
|
||||
} from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
|
||||
import { handleError } from "../ErrorHandlingUtils";
|
||||
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
||||
|
@ -39,8 +39,8 @@ export async function updateUserDefinedFunction(
|
|||
const createUDFParams: SqlUserDefinedFunctionCreateUpdateParameters = {
|
||||
properties: {
|
||||
resource: userDefinedFunction as SqlUserDefinedFunctionResource,
|
||||
options: {}
|
||||
}
|
||||
options: {},
|
||||
},
|
||||
};
|
||||
const rpResponse = await createUpdateSqlUserDefinedFunction(
|
||||
userContext.subscriptionId,
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
export enum Platform {
|
||||
Portal = "Portal",
|
||||
Hosted = "Hosted",
|
||||
Emulator = "Emulator"
|
||||
Emulator = "Emulator",
|
||||
}
|
||||
|
||||
interface ConfigContext {
|
||||
|
@ -37,7 +37,7 @@ let configContext: Readonly<ConfigContext> = {
|
|||
`^https:\\/\\/[\\.\\w]*portal\\.microsoftazure.de$`,
|
||||
`^https:\\/\\/[\\.\\w]*ext\\.azure\\.(com|cn|us)$`,
|
||||
`^https:\\/\\/[\\.\\w]*\\.ext\\.microsoftazure\\.de$`,
|
||||
`^https://cosmos-db-dataexplorer-germanycentral.azurewebsites.de$`
|
||||
`^https://cosmos-db-dataexplorer-germanycentral.azurewebsites.de$`,
|
||||
],
|
||||
// Webpack injects this at build time
|
||||
gitSha: process.env.GIT_SHA,
|
||||
|
@ -52,7 +52,7 @@ let configContext: Readonly<ConfigContext> = {
|
|||
ARCADIA_LIVY_ENDPOINT_DNS_ZONE: "dev.azuresynapse.net",
|
||||
GITHUB_CLIENT_ID: "6cb2f63cf6f7b5cbdeca", // Registered OAuth app: https://github.com/settings/applications/1189306
|
||||
JUNO_ENDPOINT: "https://tools.cosmos.azure.com",
|
||||
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
|
||||
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
|
||||
};
|
||||
|
||||
export function resetConfigContext(): void {
|
||||
|
@ -73,7 +73,7 @@ if (process.env.NODE_ENV === "development") {
|
|||
BACKEND_ENDPOINT: "https://localhost:" + port,
|
||||
MONGO_BACKEND_ENDPOINT: "https://localhost:" + port,
|
||||
PROXY_PATH: "/proxy",
|
||||
EMULATOR_ENDPOINT: "https://localhost:8081"
|
||||
EMULATOR_ENDPOINT: "https://localhost:8081",
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -86,7 +86,7 @@ export async function initializeConfiguration(): Promise<ConfigContext> {
|
|||
Object.assign(configContext, externalConfig);
|
||||
if (allowedParentFrameOrigins && allowedParentFrameOrigins.length > 0) {
|
||||
updateConfigContext({
|
||||
allowedParentFrameOrigins: [...configContext.allowedParentFrameOrigins, ...allowedParentFrameOrigins]
|
||||
allowedParentFrameOrigins: [...configContext.allowedParentFrameOrigins, ...allowedParentFrameOrigins],
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
|
|
|
@ -7,7 +7,7 @@ export enum TabKind {
|
|||
TableEntities,
|
||||
Graph,
|
||||
SQLQuery,
|
||||
ScaleSettings
|
||||
ScaleSettings,
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -20,7 +20,7 @@ export enum PaneKind {
|
|||
DeleteDatabase,
|
||||
GlobalSettings,
|
||||
AdHocAccess,
|
||||
SwitchDirectory
|
||||
SwitchDirectory,
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -79,5 +79,5 @@ export enum ActionType {
|
|||
OpenCollectionTab,
|
||||
OpenPane,
|
||||
TransmitCachedData,
|
||||
OpenSampleNotebook
|
||||
OpenSampleNotebook,
|
||||
}
|
||||
|
|
|
@ -56,7 +56,7 @@ export enum ApiKind {
|
|||
Table,
|
||||
Cassandra,
|
||||
Graph,
|
||||
MongoDBCompute
|
||||
MongoDBCompute,
|
||||
}
|
||||
|
||||
export interface GenerateTokenResponse {
|
||||
|
@ -334,7 +334,7 @@ export interface Notification {
|
|||
|
||||
export enum ConflictResolutionMode {
|
||||
Custom = "Custom",
|
||||
LastWriterWins = "LastWriterWins"
|
||||
LastWriterWins = "LastWriterWins",
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -472,7 +472,7 @@ export interface SparkClusterEndpoint {
|
|||
export enum SparkClusterEndpointKind {
|
||||
SparkUI = "SparkUI",
|
||||
HistoryServerUI = "HistoryServerUI",
|
||||
Livy = "Livy"
|
||||
Livy = "Livy",
|
||||
}
|
||||
|
||||
export interface RpParameters {
|
||||
|
|
|
@ -21,7 +21,7 @@ export enum LogEntryLevel {
|
|||
/**
|
||||
* Error level.
|
||||
*/
|
||||
Error = 2
|
||||
Error = 2,
|
||||
}
|
||||
/**
|
||||
* Schema of a log entry.
|
||||
|
|
|
@ -33,7 +33,7 @@ export enum MessageTypes {
|
|||
CreateWorkspace,
|
||||
CreateSparkPool,
|
||||
RefreshDatabaseAccount,
|
||||
InitTestExplorer
|
||||
InitTestExplorer,
|
||||
}
|
||||
|
||||
export { Versions, ActionContracts, Diagnostics };
|
||||
|
|
|
@ -3,5 +3,5 @@ export enum SubscriptionType {
|
|||
EA,
|
||||
Free,
|
||||
Internal,
|
||||
PAYG
|
||||
PAYG,
|
||||
}
|
||||
|
|
|
@ -3,7 +3,7 @@ import {
|
|||
Resource,
|
||||
StoredProcedureDefinition,
|
||||
TriggerDefinition,
|
||||
UserDefinedFunctionDefinition
|
||||
UserDefinedFunctionDefinition,
|
||||
} from "@azure/cosmos";
|
||||
import Q from "q";
|
||||
import { CommandButtonComponentProps } from "../Explorer/Controls/CommandButton/CommandButtonComponent";
|
||||
|
@ -196,7 +196,7 @@ export interface PaneOptions {
|
|||
export enum NeighborType {
|
||||
SOURCES_ONLY,
|
||||
TARGETS_ONLY,
|
||||
BOTH
|
||||
BOTH,
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -326,14 +326,14 @@ export enum DocumentExplorerState {
|
|||
newDocumentInvalid,
|
||||
exisitingDocumentNoEdits,
|
||||
exisitingDocumentDirtyValid,
|
||||
exisitingDocumentDirtyInvalid
|
||||
exisitingDocumentDirtyInvalid,
|
||||
}
|
||||
|
||||
export enum IndexingPolicyEditorState {
|
||||
noCollectionSelected,
|
||||
noEdits,
|
||||
dirtyValid,
|
||||
dirtyInvalid
|
||||
dirtyInvalid,
|
||||
}
|
||||
|
||||
export enum ScriptEditorState {
|
||||
|
@ -341,7 +341,7 @@ export enum ScriptEditorState {
|
|||
newValid,
|
||||
exisitingNoEdits,
|
||||
exisitingDirtyValid,
|
||||
exisitingDirtyInvalid
|
||||
exisitingDirtyInvalid,
|
||||
}
|
||||
|
||||
export enum CollectionTabKind {
|
||||
|
@ -363,13 +363,13 @@ export enum CollectionTabKind {
|
|||
Gallery = 17,
|
||||
NotebookViewer = 18,
|
||||
Schema = 19,
|
||||
SettingsV2 = 20
|
||||
SettingsV2 = 20,
|
||||
}
|
||||
|
||||
export enum TerminalKind {
|
||||
Default = 0,
|
||||
Mongo = 1,
|
||||
Cassandra = 2
|
||||
Cassandra = 2,
|
||||
}
|
||||
|
||||
export interface DataExplorerInputsFrame {
|
||||
|
|
|
@ -6,19 +6,19 @@ describe("The Heatmap Control", () => {
|
|||
const dataPoints = {
|
||||
"1": {
|
||||
"2019-06-19T00:59:10Z": {
|
||||
"Normalized Throughput": 0.35
|
||||
"Normalized Throughput": 0.35,
|
||||
},
|
||||
"2019-06-19T00:48:10Z": {
|
||||
"Normalized Throughput": 0.25
|
||||
}
|
||||
}
|
||||
"Normalized Throughput": 0.25,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const chartCaptions = {
|
||||
chartTitle: "chart title",
|
||||
yAxisTitle: "YAxisTitle",
|
||||
tooltipText: "Tooltip text",
|
||||
timeWindow: 123456789
|
||||
timeWindow: 123456789,
|
||||
};
|
||||
|
||||
let heatmap: Heatmap;
|
||||
|
@ -75,12 +75,12 @@ describe("The Heatmap Control", () => {
|
|||
if (dayjs().utcOffset()) {
|
||||
expect(heatmap.generateMatrixFromMap(dataPoints).xAxisPoints).not.toEqual([
|
||||
"2019-06-19T00:48:10Z",
|
||||
"2019-06-19T00:59:10Z"
|
||||
"2019-06-19T00:59:10Z",
|
||||
]);
|
||||
} else {
|
||||
expect(heatmap.generateMatrixFromMap(dataPoints).xAxisPoints).toEqual([
|
||||
"2019-06-19T00:48:10Z",
|
||||
"2019-06-19T00:59:10Z"
|
||||
"2019-06-19T00:59:10Z",
|
||||
]);
|
||||
}
|
||||
});
|
||||
|
@ -106,9 +106,9 @@ describe("iframe rendering when there is no data", () => {
|
|||
data: {
|
||||
chartData: {},
|
||||
chartSettings: {},
|
||||
theme: 4
|
||||
}
|
||||
}
|
||||
theme: 4,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const divElement: string = `<div id="${Heatmap.elementId}"></div>`;
|
||||
|
@ -126,9 +126,9 @@ describe("iframe rendering when there is no data", () => {
|
|||
data: {
|
||||
chartData: {},
|
||||
chartSettings: {},
|
||||
theme: 2
|
||||
}
|
||||
}
|
||||
theme: 2,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const divElement: string = `<div id="${Heatmap.elementId}"></div>`;
|
||||
|
|
|
@ -9,7 +9,7 @@ import {
|
|||
HeatmapData,
|
||||
LayoutSettings,
|
||||
PartitionTimeStampToData,
|
||||
PortalTheme
|
||||
PortalTheme,
|
||||
} from "./HeatmapDatatypes";
|
||||
import { isInvalidParentFrameOrigin } from "../../Utils/MessageValidation";
|
||||
import { sendCachedDataMessage, sendMessage } from "../../Common/MessageHandler";
|
||||
|
@ -43,7 +43,7 @@ export class Heatmap {
|
|||
return {
|
||||
family: StyleConstants.DataExplorerFont,
|
||||
size,
|
||||
color
|
||||
color,
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -73,7 +73,7 @@ export class Heatmap {
|
|||
return 0;
|
||||
}
|
||||
}
|
||||
})
|
||||
}),
|
||||
};
|
||||
// go thru all rows and create 2d matrix for heatmap...
|
||||
for (let i = 0; i < rows.length; i++) {
|
||||
|
@ -115,7 +115,7 @@ export class Heatmap {
|
|||
[0.7, "#E46612"],
|
||||
[0.8, "#E64914"],
|
||||
[0.9, "#B80016"],
|
||||
[1.0, "#B80016"]
|
||||
[1.0, "#B80016"],
|
||||
],
|
||||
name: "",
|
||||
hovertemplate: this._heatmapCaptions.tooltipText,
|
||||
|
@ -123,11 +123,11 @@ export class Heatmap {
|
|||
thickness: 15,
|
||||
outlinewidth: 0,
|
||||
tickcolor: StyleConstants.BaseDark,
|
||||
tickfont: this._getFontStyles(10, this._defaultFontColor)
|
||||
tickfont: this._getFontStyles(10, this._defaultFontColor),
|
||||
},
|
||||
y: this._chartData.yAxisPoints,
|
||||
x: this._chartData.xAxisPoints
|
||||
}
|
||||
x: this._chartData.xAxisPoints,
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
|
@ -138,7 +138,7 @@ export class Heatmap {
|
|||
r: 10,
|
||||
b: 35,
|
||||
t: 30,
|
||||
pad: 0
|
||||
pad: 0,
|
||||
},
|
||||
paper_bgcolor: "transparent",
|
||||
plot_bgcolor: "transparent",
|
||||
|
@ -154,7 +154,7 @@ export class Heatmap {
|
|||
autotick: true,
|
||||
fixedrange: true,
|
||||
ticks: "",
|
||||
showticklabels: false
|
||||
showticklabels: false,
|
||||
},
|
||||
xaxis: {
|
||||
fixedrange: true,
|
||||
|
@ -167,13 +167,13 @@ export class Heatmap {
|
|||
autotick: true,
|
||||
tickformat: this._heatmapCaptions.timeWindow > 7 ? "%I:%M %p" : "%b %e",
|
||||
showticklabels: true,
|
||||
tickfont: this._getFontStyles(10)
|
||||
tickfont: this._getFontStyles(10),
|
||||
},
|
||||
title: {
|
||||
text: this._heatmapCaptions.chartTitle,
|
||||
x: 0.01,
|
||||
font: this._getFontStyles(13, this._defaultFontColor)
|
||||
}
|
||||
font: this._getFontStyles(13, this._defaultFontColor),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -181,7 +181,7 @@ export class Heatmap {
|
|||
return {
|
||||
/* heatmap can be fully responsive however the min-height needed in that case is greater than the iframe portal height, hence explicit width + height have been set in _getLayoutSettings
|
||||
responsive: true,*/
|
||||
displayModeBar: false
|
||||
displayModeBar: false,
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
@ -8,7 +8,7 @@ export enum PortalTheme {
|
|||
blue = 1,
|
||||
azure,
|
||||
light,
|
||||
dark
|
||||
dark,
|
||||
}
|
||||
|
||||
export interface HeatmapData {
|
||||
|
|
|
@ -4,5 +4,5 @@ export enum DefaultAccountExperienceType {
|
|||
MongoDB = "MongoDB",
|
||||
Table = "Table",
|
||||
Cassandra = "Cassandra",
|
||||
ApiForMongoDB = "Azure Cosmos DB for MongoDB API"
|
||||
ApiForMongoDB = "Azure Cosmos DB for MongoDB API",
|
||||
}
|
||||
|
|
|
@ -36,8 +36,8 @@ export class ResourceTreeContextMenuButtonFactory {
|
|||
{
|
||||
iconSrc: AddCollectionIcon,
|
||||
onClick: () => container.onNewCollectionClicked(),
|
||||
label: container.addCollectionText()
|
||||
}
|
||||
label: container.addCollectionText(),
|
||||
},
|
||||
];
|
||||
|
||||
if (userContext.defaultExperience !== DefaultAccountExperienceType.Table) {
|
||||
|
@ -45,7 +45,7 @@ export class ResourceTreeContextMenuButtonFactory {
|
|||
iconSrc: DeleteDatabaseIcon,
|
||||
onClick: () => container.deleteDatabaseConfirmationPane.open(),
|
||||
label: container.deleteDatabaseText(),
|
||||
styleClass: "deleteDatabaseMenuItem"
|
||||
styleClass: "deleteDatabaseMenuItem",
|
||||
});
|
||||
}
|
||||
return items;
|
||||
|
@ -60,7 +60,7 @@ export class ResourceTreeContextMenuButtonFactory {
|
|||
items.push({
|
||||
iconSrc: AddSqlQueryIcon,
|
||||
onClick: () => selectedCollection && selectedCollection.onNewQueryClick(selectedCollection, null),
|
||||
label: "New SQL Query"
|
||||
label: "New SQL Query",
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -68,7 +68,7 @@ export class ResourceTreeContextMenuButtonFactory {
|
|||
items.push({
|
||||
iconSrc: AddSqlQueryIcon,
|
||||
onClick: () => selectedCollection && selectedCollection.onNewMongoQueryClick(selectedCollection, null),
|
||||
label: "New Query"
|
||||
label: "New Query",
|
||||
});
|
||||
|
||||
items.push({
|
||||
|
@ -77,7 +77,7 @@ export class ResourceTreeContextMenuButtonFactory {
|
|||
const selectedCollection: ViewModels.Collection = container.findSelectedCollection();
|
||||
selectedCollection && selectedCollection.onNewMongoShellClick();
|
||||
},
|
||||
label: "New Shell"
|
||||
label: "New Shell",
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -88,7 +88,7 @@ export class ResourceTreeContextMenuButtonFactory {
|
|||
const selectedCollection: ViewModels.Collection = container.findSelectedCollection();
|
||||
selectedCollection && selectedCollection.onNewStoredProcedureClick(selectedCollection, null);
|
||||
},
|
||||
label: "New Stored Procedure"
|
||||
label: "New Stored Procedure",
|
||||
});
|
||||
|
||||
items.push({
|
||||
|
@ -97,7 +97,7 @@ export class ResourceTreeContextMenuButtonFactory {
|
|||
const selectedCollection: ViewModels.Collection = container.findSelectedCollection();
|
||||
selectedCollection && selectedCollection.onNewUserDefinedFunctionClick(selectedCollection, null);
|
||||
},
|
||||
label: "New UDF"
|
||||
label: "New UDF",
|
||||
});
|
||||
|
||||
items.push({
|
||||
|
@ -106,7 +106,7 @@ export class ResourceTreeContextMenuButtonFactory {
|
|||
const selectedCollection: ViewModels.Collection = container.findSelectedCollection();
|
||||
selectedCollection && selectedCollection.onNewTriggerClick(selectedCollection, null);
|
||||
},
|
||||
label: "New Trigger"
|
||||
label: "New Trigger",
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -117,7 +117,7 @@ export class ResourceTreeContextMenuButtonFactory {
|
|||
selectedCollection && selectedCollection.onDeleteCollectionContextMenuClick(selectedCollection, null);
|
||||
},
|
||||
label: container.deleteCollectionText(),
|
||||
styleClass: "deleteCollectionMenuItem"
|
||||
styleClass: "deleteCollectionMenuItem",
|
||||
});
|
||||
|
||||
return items;
|
||||
|
@ -135,8 +135,8 @@ export class ResourceTreeContextMenuButtonFactory {
|
|||
{
|
||||
iconSrc: DeleteSprocIcon,
|
||||
onClick: () => storedProcedure.delete(),
|
||||
label: "Delete Store Procedure"
|
||||
}
|
||||
label: "Delete Store Procedure",
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
|
@ -149,8 +149,8 @@ export class ResourceTreeContextMenuButtonFactory {
|
|||
{
|
||||
iconSrc: DeleteTriggerIcon,
|
||||
onClick: () => trigger.delete(),
|
||||
label: "Delete Trigger"
|
||||
}
|
||||
label: "Delete Trigger",
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
|
@ -166,8 +166,8 @@ export class ResourceTreeContextMenuButtonFactory {
|
|||
{
|
||||
iconSrc: DeleteUDFIcon,
|
||||
onClick: () => userDefinedFunction.delete(),
|
||||
label: "Delete User Defined Function"
|
||||
}
|
||||
label: "Delete User Defined Function",
|
||||
},
|
||||
];
|
||||
}
|
||||
}
|
||||
|
|
|
@ -31,7 +31,7 @@ export class AccessibleElement extends React.Component<AccessibleElementProps> {
|
|||
...elementProps,
|
||||
onKeyPress: this.onKeyPress,
|
||||
onClick: this.props.onActivated,
|
||||
tabIndex
|
||||
tabIndex,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
|
@ -38,7 +38,7 @@ export class AccordionItemComponent extends React.Component<AccordionItemCompone
|
|||
super(props);
|
||||
this.isExpanded = props.isExpanded;
|
||||
this.state = {
|
||||
isExpanded: true
|
||||
isExpanded: true,
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -46,7 +46,7 @@ export class AccordionItemComponent extends React.Component<AccordionItemCompone
|
|||
if (this.props.isExpanded !== this.isExpanded) {
|
||||
this.isExpanded = this.props.isExpanded;
|
||||
this.setState({
|
||||
isExpanded: this.props.isExpanded
|
||||
isExpanded: this.props.isExpanded,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
|
@ -30,7 +30,7 @@ export class ArcadiaMenuPicker extends React.Component<ArcadiaMenuPickerProps, A
|
|||
constructor(props: ArcadiaMenuPickerProps) {
|
||||
super(props);
|
||||
this.state = {
|
||||
selectedSparkPool: props.selectedSparkPool
|
||||
selectedSparkPool: props.selectedSparkPool,
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -41,7 +41,7 @@ export class ArcadiaMenuPicker extends React.Component<ArcadiaMenuPickerProps, A
|
|||
try {
|
||||
this.props.onSparkPoolSelect(e, item);
|
||||
this.setState({
|
||||
selectedSparkPool: item.text
|
||||
selectedSparkPool: item.text,
|
||||
});
|
||||
} catch (error) {
|
||||
Logger.logError(getErrorMessage(error), "ArcadiaMenuPicker/_onSparkPoolClicked");
|
||||
|
@ -65,28 +65,28 @@ export class ArcadiaMenuPicker extends React.Component<ArcadiaMenuPickerProps, A
|
|||
|
||||
public render() {
|
||||
const { workspaces } = this.props;
|
||||
let workspaceMenuItems: IContextualMenuItem[] = workspaces.map(workspace => {
|
||||
let workspaceMenuItems: IContextualMenuItem[] = workspaces.map((workspace) => {
|
||||
let sparkPoolsMenuProps: IContextualMenuProps = {
|
||||
items: workspace.sparkPools.map(
|
||||
(sparkpool): IContextualMenuItem => ({
|
||||
key: sparkpool.id,
|
||||
text: sparkpool.name,
|
||||
onClick: this._onSparkPoolClicked
|
||||
onClick: this._onSparkPoolClicked,
|
||||
})
|
||||
)
|
||||
),
|
||||
};
|
||||
if (!sparkPoolsMenuProps.items.length) {
|
||||
sparkPoolsMenuProps.items.push({
|
||||
key: workspace.id,
|
||||
text: "Create new spark pool",
|
||||
onClick: this._onCreateNewSparkPoolClicked
|
||||
onClick: this._onCreateNewSparkPoolClicked,
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
key: workspace.id,
|
||||
text: workspace.name,
|
||||
subMenuProps: this.props.disableSubmenu ? undefined : sparkPoolsMenuProps
|
||||
subMenuProps: this.props.disableSubmenu ? undefined : sparkPoolsMenuProps,
|
||||
};
|
||||
});
|
||||
|
||||
|
@ -94,7 +94,7 @@ export class ArcadiaMenuPicker extends React.Component<ArcadiaMenuPickerProps, A
|
|||
workspaceMenuItems.push({
|
||||
key: "create_workspace",
|
||||
text: "Create new workspace",
|
||||
onClick: this._onCreateNewWorkspaceClicked
|
||||
onClick: this._onCreateNewWorkspaceClicked,
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -103,29 +103,29 @@ export class ArcadiaMenuPicker extends React.Component<ArcadiaMenuPickerProps, A
|
|||
backgroundColor: "transparent",
|
||||
margin: "auto 5px",
|
||||
padding: "0",
|
||||
border: "0"
|
||||
border: "0",
|
||||
},
|
||||
rootHovered: {
|
||||
backgroundColor: "transparent"
|
||||
backgroundColor: "transparent",
|
||||
},
|
||||
rootChecked: {
|
||||
backgroundColor: "transparent"
|
||||
backgroundColor: "transparent",
|
||||
},
|
||||
rootFocused: {
|
||||
backgroundColor: "transparent"
|
||||
backgroundColor: "transparent",
|
||||
},
|
||||
rootExpanded: {
|
||||
backgroundColor: "transparent"
|
||||
backgroundColor: "transparent",
|
||||
},
|
||||
flexContainer: {
|
||||
height: "30px",
|
||||
border: "1px solid #a6a6a6",
|
||||
padding: "0 8px"
|
||||
padding: "0 8px",
|
||||
},
|
||||
label: {
|
||||
fontWeight: "400",
|
||||
fontSize: "12px"
|
||||
}
|
||||
fontSize: "12px",
|
||||
},
|
||||
};
|
||||
|
||||
return (
|
||||
|
@ -134,7 +134,7 @@ export class ArcadiaMenuPicker extends React.Component<ArcadiaMenuPickerProps, A
|
|||
persistMenu={true}
|
||||
className="arcadia-menu-picker"
|
||||
menuProps={{
|
||||
items: workspaceMenuItems
|
||||
items: workspaceMenuItems,
|
||||
}}
|
||||
styles={dropdownStyle}
|
||||
/>
|
||||
|
|
|
@ -8,7 +8,7 @@ export class CollapsiblePanelComponent {
|
|||
constructor() {
|
||||
return {
|
||||
viewModel: CollapsiblePanelViewModel,
|
||||
template
|
||||
template,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@ import { CollapsibleSectionComponent, CollapsibleSectionProps } from "./Collapsi
|
|||
describe("CollapsibleSectionComponent", () => {
|
||||
it("renders", () => {
|
||||
const props: CollapsibleSectionProps = {
|
||||
title: "Sample title"
|
||||
title: "Sample title",
|
||||
};
|
||||
|
||||
const wrapper = shallow(<CollapsibleSectionComponent {...props} />);
|
||||
|
|
|
@ -14,7 +14,7 @@ export class CollapsibleSectionComponent extends React.Component<CollapsibleSect
|
|||
constructor(props: CollapsibleSectionProps) {
|
||||
super(props);
|
||||
this.state = {
|
||||
isExpanded: true
|
||||
isExpanded: true,
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
@ -149,9 +149,7 @@ export class CommandButtonComponent extends React.Component<CommandButtonCompone
|
|||
private onLauncherKeyDown(event: React.KeyboardEvent<HTMLDivElement>): boolean {
|
||||
if (event.keyCode === KeyCodes.DownArrow) {
|
||||
$(this.dropdownElt).hide();
|
||||
$(this.dropdownElt)
|
||||
.show()
|
||||
.focus();
|
||||
$(this.dropdownElt).show().focus();
|
||||
event.stopPropagation();
|
||||
return false;
|
||||
}
|
||||
|
@ -187,7 +185,7 @@ export class CommandButtonComponent extends React.Component<CommandButtonCompone
|
|||
}
|
||||
this.props.onCommandClick(e);
|
||||
TelemetryProcessor.trace(Action.SelectItem, ActionModifiers.Mark, {
|
||||
commandButtonClicked: this.props.commandButtonLabel
|
||||
commandButtonClicked: this.props.commandButtonLabel,
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
@ -57,14 +57,14 @@ export class DialogComponent extends React.Component<DialogProps, {}> {
|
|||
subText: this.props.subText,
|
||||
styles: {
|
||||
title: { fontSize: DIALOG_TITLE_FONT_SIZE, fontWeight: DIALOG_TITLE_FONT_WEIGHT },
|
||||
subText: { fontSize: DIALOG_SUBTEXT_FONT_SIZE }
|
||||
subText: { fontSize: DIALOG_SUBTEXT_FONT_SIZE },
|
||||
},
|
||||
showCloseButton: this.props.showCloseButton || false,
|
||||
onDismiss: this.props.onDismiss
|
||||
onDismiss: this.props.onDismiss,
|
||||
},
|
||||
modalProps: { isBlocking: this.props.isModal },
|
||||
minWidth: DIALOG_MIN_WIDTH,
|
||||
maxWidth: DIALOG_MAX_WIDTH
|
||||
maxWidth: DIALOG_MAX_WIDTH,
|
||||
};
|
||||
const choiceGroupProps: IChoiceGroupProps = this.props.choiceGroupProps;
|
||||
const textFieldProps: ITextFieldProps = this.props.textFieldProps;
|
||||
|
@ -72,13 +72,13 @@ export class DialogComponent extends React.Component<DialogProps, {}> {
|
|||
const primaryButtonProps: IButtonProps = {
|
||||
text: this.props.primaryButtonText,
|
||||
disabled: this.props.primaryButtonDisabled || false,
|
||||
onClick: this.props.onPrimaryButtonClick
|
||||
onClick: this.props.onPrimaryButtonClick,
|
||||
};
|
||||
const secondaryButtonProps: IButtonProps =
|
||||
this.props.secondaryButtonText && this.props.onSecondaryButtonClick
|
||||
? {
|
||||
text: this.props.secondaryButtonText,
|
||||
onClick: this.props.onSecondaryButtonClick
|
||||
onClick: this.props.onSecondaryButtonClick,
|
||||
}
|
||||
: undefined;
|
||||
|
||||
|
|
|
@ -9,7 +9,7 @@ export class DiffEditorComponent {
|
|||
constructor() {
|
||||
return {
|
||||
viewModel: DiffEditorViewModel,
|
||||
template
|
||||
template,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
@ -103,7 +103,7 @@ export class DiffEditorViewModel {
|
|||
lineNumbers: this.params.lineNumbers || "off",
|
||||
fontSize: 12,
|
||||
ariaLabel: this.params.ariaLabel,
|
||||
theme: this.params.theme
|
||||
theme: this.params.theme,
|
||||
};
|
||||
|
||||
if (this.params.renderSideBySide !== undefined) {
|
||||
|
@ -120,7 +120,7 @@ export class DiffEditorViewModel {
|
|||
);
|
||||
diffEditor.setModel({
|
||||
original: originalModel,
|
||||
modified: modifiedModel
|
||||
modified: modifiedModel,
|
||||
});
|
||||
|
||||
createCallback(diffEditor);
|
||||
|
@ -147,7 +147,7 @@ export class DiffEditorViewModel {
|
|||
this.observer.observe(document.body, {
|
||||
attributes: true,
|
||||
subtree: true,
|
||||
childList: true
|
||||
childList: true,
|
||||
});
|
||||
this.editor.focus();
|
||||
}
|
||||
|
|
|
@ -7,7 +7,7 @@ const createBlankProps = (): DefaultDirectoryDropdownProps => {
|
|||
return {
|
||||
defaultDirectoryId: "",
|
||||
directories: [],
|
||||
onDefaultDirectoryChange: jest.fn()
|
||||
onDefaultDirectoryChange: jest.fn(),
|
||||
};
|
||||
};
|
||||
|
||||
|
@ -17,7 +17,7 @@ const createBlankDirectory = (): Tenant => {
|
|||
displayName: "",
|
||||
domains: [],
|
||||
id: "",
|
||||
tenantId: ""
|
||||
tenantId: "",
|
||||
};
|
||||
};
|
||||
|
||||
|
@ -90,27 +90,15 @@ describe("test function", () => {
|
|||
|
||||
const wrapper = mount(<DefaultDirectoryDropdownComponent {...props} />);
|
||||
|
||||
wrapper
|
||||
.find("div.defaultDirectoryDropdown")
|
||||
.find("div.ms-Dropdown")
|
||||
.simulate("click");
|
||||
wrapper.find("div.defaultDirectoryDropdown").find("div.ms-Dropdown").simulate("click");
|
||||
expect(wrapper.exists("div.ms-Callout-main")).toBe(true);
|
||||
wrapper
|
||||
.find("button.ms-Dropdown-item")
|
||||
.at(1)
|
||||
.simulate("click");
|
||||
wrapper.find("button.ms-Dropdown-item").at(1).simulate("click");
|
||||
expect(props.onDefaultDirectoryChange).toBeCalled();
|
||||
expect(props.onDefaultDirectoryChange).toHaveBeenCalled();
|
||||
|
||||
wrapper
|
||||
.find("div.defaultDirectoryDropdown")
|
||||
.find("div.ms-Dropdown")
|
||||
.simulate("click");
|
||||
wrapper.find("div.defaultDirectoryDropdown").find("div.ms-Dropdown").simulate("click");
|
||||
expect(wrapper.exists("div.ms-Callout-main")).toBe(true);
|
||||
wrapper
|
||||
.find("button.ms-Dropdown-item")
|
||||
.at(0)
|
||||
.simulate("click");
|
||||
wrapper.find("button.ms-Dropdown-item").at(0).simulate("click");
|
||||
expect(props.onDefaultDirectoryChange).toBeCalled();
|
||||
expect(props.onDefaultDirectoryChange).toHaveBeenCalled();
|
||||
});
|
||||
|
|
|
@ -19,13 +19,13 @@ export class DefaultDirectoryDropdownComponent extends React.Component<DefaultDi
|
|||
public render(): JSX.Element {
|
||||
const lastVisitedOption: IDropdownOption = {
|
||||
key: DefaultDirectoryDropdownComponent.lastVisitedKey,
|
||||
text: "Sign in to your last visited directory"
|
||||
text: "Sign in to your last visited directory",
|
||||
};
|
||||
const directoryOptions: Array<IDropdownOption> = this.props.directories.map(
|
||||
(dirc): IDropdownOption => {
|
||||
return {
|
||||
key: dirc.tenantId,
|
||||
text: `${dirc.displayName}(${dirc.tenantId})`
|
||||
text: `${dirc.displayName}(${dirc.tenantId})`,
|
||||
};
|
||||
}
|
||||
);
|
||||
|
@ -35,7 +35,7 @@ export class DefaultDirectoryDropdownComponent extends React.Component<DefaultDi
|
|||
options: dropDownOptions,
|
||||
defaultSelectedKey: this.props.defaultDirectoryId ? this.props.defaultDirectoryId : lastVisitedOption.key,
|
||||
onChange: this._onDropdownChange,
|
||||
className: "defaultDirectoryDropdown"
|
||||
className: "defaultDirectoryDropdown",
|
||||
};
|
||||
|
||||
return <Dropdown {...dropDownProps} />;
|
||||
|
@ -56,12 +56,12 @@ export class DefaultDirectoryDropdownComponent extends React.Component<DefaultDi
|
|||
countryCode: undefined,
|
||||
displayName: undefined,
|
||||
domains: [],
|
||||
id: undefined
|
||||
id: undefined,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const selectedDirectory = _.find(this.props.directories, d => d.tenantId === option.key);
|
||||
const selectedDirectory = _.find(this.props.directories, (d) => d.tenantId === option.key);
|
||||
if (!selectedDirectory) {
|
||||
return;
|
||||
}
|
||||
|
|
|
@ -7,7 +7,7 @@ const createBlankProps = (): DirectoryListProps => {
|
|||
return {
|
||||
selectedDirectoryId: undefined,
|
||||
directories: [],
|
||||
onNewDirectorySelected: jest.fn()
|
||||
onNewDirectorySelected: jest.fn(),
|
||||
};
|
||||
};
|
||||
|
||||
|
@ -17,7 +17,7 @@ const createBlankDirectory = (): Tenant => {
|
|||
displayName: undefined,
|
||||
domains: [],
|
||||
id: undefined,
|
||||
tenantId: undefined
|
||||
tenantId: undefined,
|
||||
};
|
||||
};
|
||||
|
||||
|
|
|
@ -28,7 +28,7 @@ export class DirectoryListComponent extends React.Component<DirectoryListProps,
|
|||
super(props);
|
||||
|
||||
this.state = {
|
||||
filterText: ""
|
||||
filterText: "",
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -38,12 +38,12 @@ export class DirectoryListComponent extends React.Component<DirectoryListProps,
|
|||
const filteredItems =
|
||||
originalItems && originalItems.length && filterText
|
||||
? originalItems.filter(
|
||||
directory =>
|
||||
(directory) =>
|
||||
directory.displayName &&
|
||||
directory.displayName.toLowerCase().indexOf(filterText && filterText.toLowerCase()) >= 0
|
||||
)
|
||||
: originalItems;
|
||||
const filteredItemsSelected = filteredItems.map(t => {
|
||||
const filteredItemsSelected = filteredItems.map((t) => {
|
||||
let tenant: ListTenant = t;
|
||||
tenant.selected = t.tenantId === selectedDirectoryId;
|
||||
return tenant;
|
||||
|
@ -53,7 +53,7 @@ export class DirectoryListComponent extends React.Component<DirectoryListProps,
|
|||
className: "directoryListFilterTextBox",
|
||||
placeholder: "Filter by directory name",
|
||||
onChange: this._onFilterChanged,
|
||||
ariaLabel: "Directory filter text box"
|
||||
ariaLabel: "Directory filter text box",
|
||||
};
|
||||
|
||||
// TODO: add magnify glass to search bar with onRenderSuffix
|
||||
|
@ -69,7 +69,7 @@ export class DirectoryListComponent extends React.Component<DirectoryListProps,
|
|||
|
||||
private _onFilterChanged = (event: React.FormEvent<HTMLInputElement | HTMLTextAreaElement>, text?: string): void => {
|
||||
this.setState({
|
||||
filterText: text
|
||||
filterText: text,
|
||||
});
|
||||
};
|
||||
|
||||
|
@ -84,19 +84,19 @@ export class DirectoryListComponent extends React.Component<DirectoryListProps,
|
|||
height: "auto",
|
||||
borderBottom: "1px solid #ccc",
|
||||
padding: "1px 0",
|
||||
width: "100%"
|
||||
width: "100%",
|
||||
},
|
||||
rootDisabled: {
|
||||
backgroundColor: "#f1f1f8"
|
||||
backgroundColor: "#f1f1f8",
|
||||
},
|
||||
rootHovered: {
|
||||
backgroundColor: "rgba(85,179,255,.1)"
|
||||
backgroundColor: "rgba(85,179,255,.1)",
|
||||
},
|
||||
flexContainer: {
|
||||
height: "auto",
|
||||
justifyContent: "flex-start"
|
||||
}
|
||||
}
|
||||
justifyContent: "flex-start",
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
return (
|
||||
|
@ -115,7 +115,7 @@ export class DirectoryListComponent extends React.Component<DirectoryListProps,
|
|||
}
|
||||
const buttonElement = e.currentTarget;
|
||||
const selectedDirectoryId = buttonElement.getElementsByClassName("directoryListItemId")[0].textContent;
|
||||
const selectedDirectory = _.find(this.props.directories, d => d.tenantId === selectedDirectoryId);
|
||||
const selectedDirectory = _.find(this.props.directories, (d) => d.tenantId === selectedDirectoryId);
|
||||
|
||||
this.props.onNewDirectorySelected(selectedDirectory);
|
||||
};
|
||||
|
|
|
@ -25,7 +25,7 @@ describe("Dynamic List Component", () => {
|
|||
placeholder: placeholder,
|
||||
listItems: items,
|
||||
buttonText: mockButton,
|
||||
ariaLabel: mockAriaLabel
|
||||
ariaLabel: mockAriaLabel,
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
@ -113,5 +113,5 @@ export class DynamicListViewModel extends WaitsForTemplateViewModel {
|
|||
*/
|
||||
export const DynamicListComponent = {
|
||||
viewModel: DynamicListViewModel,
|
||||
template
|
||||
template,
|
||||
};
|
||||
|
|
|
@ -10,7 +10,7 @@ export class EditorComponent {
|
|||
constructor() {
|
||||
return {
|
||||
viewModel: EditorViewModel,
|
||||
template
|
||||
template,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -70,7 +70,7 @@ export class EditorReact extends React.Component<EditorReactProps> {
|
|||
fontSize: 12,
|
||||
ariaLabel: this.props.ariaLabel,
|
||||
theme: this.props.theme,
|
||||
automaticLayout: true
|
||||
automaticLayout: true,
|
||||
};
|
||||
|
||||
this.rootNode.innerHTML = "";
|
||||
|
|
|
@ -10,7 +10,7 @@ export class ErrorDisplayComponent {
|
|||
constructor() {
|
||||
return {
|
||||
viewModel: ErrorDisplayViewModel,
|
||||
template
|
||||
template,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,23 +15,23 @@ export const FeaturePanelComponent: React.FunctionComponent = () => {
|
|||
const baseUrlOptions = [
|
||||
{ key: "https://localhost:1234/explorer.html", text: "localhost:1234" },
|
||||
{ key: "https://cosmos.azure.com/explorer.html", text: "cosmos.azure.com" },
|
||||
{ key: "https://portal.azure.com", text: "portal" }
|
||||
{ key: "https://portal.azure.com", text: "portal" },
|
||||
];
|
||||
|
||||
const platformOptions = [
|
||||
{ key: "Hosted", text: "Hosted" },
|
||||
{ key: "Portal", text: "Portal" },
|
||||
{ key: "Emulator", text: "Emulator" },
|
||||
{ key: "", text: "None" }
|
||||
{ key: "", text: "None" },
|
||||
];
|
||||
|
||||
// React hooks to keep state
|
||||
const [baseUrl, setBaseUrl] = React.useState<IDropdownOption>(
|
||||
baseUrlOptions.find(o => o.key === window.location.origin + window.location.pathname) || baseUrlOptions[0]
|
||||
baseUrlOptions.find((o) => o.key === window.location.origin + window.location.pathname) || baseUrlOptions[0]
|
||||
);
|
||||
const [platform, setPlatform] = React.useState<IDropdownOption>(
|
||||
urlParams.has("platform")
|
||||
? platformOptions.find(o => o.key === urlParams.get("platform")) || platformOptions[0]
|
||||
? platformOptions.find((o) => o.key === urlParams.get("platform")) || platformOptions[0]
|
||||
: platformOptions[0]
|
||||
);
|
||||
|
||||
|
@ -52,13 +52,13 @@ export const FeaturePanelComponent: React.FunctionComponent = () => {
|
|||
{
|
||||
key: "feature.enableLinkInjection",
|
||||
label: "Enable Injecting Notebook Viewer Link into the first cell",
|
||||
value: "true"
|
||||
value: "true",
|
||||
},
|
||||
{ key: "feature.canexceedmaximumvalue", label: "Can exceed max value", value: "true" },
|
||||
{
|
||||
key: "feature.enablefixedcollectionwithsharedthroughput",
|
||||
label: "Enable fixed collection with shared throughput",
|
||||
value: "true"
|
||||
value: "true",
|
||||
},
|
||||
{ key: "feature.ttl90days", label: "TTL 90 days", value: "true" },
|
||||
{ key: "feature.enablenotebooks", label: "Enable notebooks", value: "true" },
|
||||
|
@ -66,10 +66,10 @@ export const FeaturePanelComponent: React.FunctionComponent = () => {
|
|||
key: "feature.customportal",
|
||||
label: "Force Production portal (portal only)",
|
||||
value: "false",
|
||||
disabled: (): boolean => baseUrl.key !== "https://portal.azure.com"
|
||||
disabled: (): boolean => baseUrl.key !== "https://portal.azure.com",
|
||||
},
|
||||
{ key: "feature.enablespark", label: "Enable Synapse", value: "true" },
|
||||
{ key: "feature.enableautopilotv2", label: "Enable Auto-pilot V2", value: "true" }
|
||||
{ key: "feature.enableautopilotv2", label: "Enable Auto-pilot V2", value: "true" },
|
||||
];
|
||||
|
||||
const stringFeatures: {
|
||||
|
@ -88,23 +88,23 @@ export const FeaturePanelComponent: React.FunctionComponent = () => {
|
|||
key: "dataExplorerSource",
|
||||
label: "Data Explorer Source (portal only)",
|
||||
placeholder: "https://localhost:1234/explorer.html",
|
||||
disabled: (): boolean => baseUrl.key !== "https://portal.azure.com"
|
||||
disabled: (): boolean => baseUrl.key !== "https://portal.azure.com",
|
||||
},
|
||||
{ key: "feature.livyendpoint", label: "Livy endpoint", placeholder: "" }
|
||||
{ key: "feature.livyendpoint", label: "Livy endpoint", placeholder: "" },
|
||||
];
|
||||
|
||||
booleanFeatures.forEach(
|
||||
f => (f.reactState = React.useState<boolean>(urlParams.has(f.key) ? urlParams.get(f.key) === "true" : false))
|
||||
(f) => (f.reactState = React.useState<boolean>(urlParams.has(f.key) ? urlParams.get(f.key) === "true" : false))
|
||||
);
|
||||
stringFeatures.forEach(
|
||||
f => (f.reactState = React.useState<string>(urlParams.has(f.key) ? urlParams.get(f.key) : undefined))
|
||||
(f) => (f.reactState = React.useState<string>(urlParams.has(f.key) ? urlParams.get(f.key) : undefined))
|
||||
);
|
||||
|
||||
const buildUrl = (): string => {
|
||||
const fragments = (platform.key === "" ? [] : [`platform=${platform.key}`])
|
||||
.concat(booleanFeatures.map(f => (f.reactState[0] ? `${f.key}=${f.value}` : "")))
|
||||
.concat(stringFeatures.map(f => (f.reactState[0] ? `${f.key}=${encodeURIComponent(f.reactState[0])}` : "")))
|
||||
.filter(v => v && v.length > 0);
|
||||
.concat(booleanFeatures.map((f) => (f.reactState[0] ? `${f.key}=${f.value}` : "")))
|
||||
.concat(stringFeatures.map((f) => (f.reactState[0] ? `${f.key}=${encodeURIComponent(f.reactState[0])}` : "")))
|
||||
.filter((v) => v && v.length > 0);
|
||||
|
||||
const paramString = fragments.length < 1 ? "" : `?${fragments.join("&")}`;
|
||||
return `${baseUrl.key}${paramString}`;
|
||||
|
@ -119,38 +119,38 @@ export const FeaturePanelComponent: React.FunctionComponent = () => {
|
|||
};
|
||||
|
||||
booleanFeatures.forEach(
|
||||
f =>
|
||||
(f) =>
|
||||
(f.onChange = (ev?: React.FormEvent<HTMLElement | HTMLInputElement>, checked?: boolean): void => {
|
||||
f.reactState[1](checked);
|
||||
})
|
||||
);
|
||||
|
||||
stringFeatures.forEach(
|
||||
f =>
|
||||
(f) =>
|
||||
(f.onChange = (event: React.FormEvent<HTMLInputElement | HTMLTextAreaElement>, newValue?: string): void => {
|
||||
f.reactState[1](newValue);
|
||||
})
|
||||
);
|
||||
|
||||
const onNotebookShortcut = (): void => {
|
||||
booleanFeatures.find(f => f.key === "feature.enablenotebooks").reactState[1](true);
|
||||
booleanFeatures.find((f) => f.key === "feature.enablenotebooks").reactState[1](true);
|
||||
stringFeatures
|
||||
.find(f => f.key === "feature.notebookserverurl")
|
||||
.find((f) => f.key === "feature.notebookserverurl")
|
||||
.reactState[1]("https://localhost:10001/12345/notebook/");
|
||||
stringFeatures.find(f => f.key === "feature.notebookservertoken").reactState[1]("token");
|
||||
stringFeatures.find(f => f.key === "feature.notebookbasepath").reactState[1]("./notebooks");
|
||||
setPlatform(platformOptions.find(o => o.key === "Hosted"));
|
||||
stringFeatures.find((f) => f.key === "feature.notebookservertoken").reactState[1]("token");
|
||||
stringFeatures.find((f) => f.key === "feature.notebookbasepath").reactState[1]("./notebooks");
|
||||
setPlatform(platformOptions.find((o) => o.key === "Hosted"));
|
||||
};
|
||||
|
||||
const onPortalLocalDEShortcut = (): void => {
|
||||
setBaseUrl(baseUrlOptions.find(o => o.key === "https://portal.azure.com"));
|
||||
setPlatform(platformOptions.find(o => o.key === "Portal"));
|
||||
stringFeatures.find(f => f.key === "dataExplorerSource").reactState[1]("https://localhost:1234/explorer.html");
|
||||
setBaseUrl(baseUrlOptions.find((o) => o.key === "https://portal.azure.com"));
|
||||
setPlatform(platformOptions.find((o) => o.key === "Portal"));
|
||||
stringFeatures.find((f) => f.key === "dataExplorerSource").reactState[1]("https://localhost:1234/explorer.html");
|
||||
};
|
||||
|
||||
const onReset = (): void => {
|
||||
booleanFeatures.forEach(f => f.reactState[1](false));
|
||||
stringFeatures.forEach(f => f.reactState[1](""));
|
||||
booleanFeatures.forEach((f) => f.reactState[1](false));
|
||||
stringFeatures.forEach((f) => f.reactState[1](""));
|
||||
};
|
||||
|
||||
const stackTokens = { childrenGap: 10 };
|
||||
|
@ -169,7 +169,7 @@ export const FeaturePanelComponent: React.FunctionComponent = () => {
|
|||
const anchorOptions = {
|
||||
href: buildUrl(),
|
||||
target: "_blank",
|
||||
rel: "noopener"
|
||||
rel: "noopener",
|
||||
};
|
||||
|
||||
return (
|
||||
|
@ -201,7 +201,7 @@ export const FeaturePanelComponent: React.FunctionComponent = () => {
|
|||
</Stack>
|
||||
<Stack horizontal>
|
||||
<Stack className="checkboxRow" horizontalAlign="space-between">
|
||||
{leftBooleanFeatures.map(f => (
|
||||
{leftBooleanFeatures.map((f) => (
|
||||
<Checkbox
|
||||
key={f.key}
|
||||
label={f.label}
|
||||
|
@ -212,7 +212,7 @@ export const FeaturePanelComponent: React.FunctionComponent = () => {
|
|||
))}
|
||||
</Stack>
|
||||
<Stack className="checkboxRow" horizontalAlign="space-between">
|
||||
{rightBooleanFeatures.map(f => (
|
||||
{rightBooleanFeatures.map((f) => (
|
||||
<Checkbox
|
||||
key={f.key}
|
||||
label={f.label}
|
||||
|
@ -225,7 +225,7 @@ export const FeaturePanelComponent: React.FunctionComponent = () => {
|
|||
</Stack>
|
||||
<Stack horizontal tokens={stackTokens}>
|
||||
<Stack horizontalAlign="space-between">
|
||||
{leftStringFeatures.map(f => (
|
||||
{leftStringFeatures.map((f) => (
|
||||
<TextField
|
||||
key={f.key}
|
||||
value={f.reactState[0]}
|
||||
|
@ -238,7 +238,7 @@ export const FeaturePanelComponent: React.FunctionComponent = () => {
|
|||
))}
|
||||
</Stack>
|
||||
<Stack horizontalAlign="space-between">
|
||||
{rightStringFeatures.map(f => (
|
||||
{rightStringFeatures.map((f) => (
|
||||
<TextField
|
||||
key={f.key}
|
||||
value={f.reactState[0]}
|
||||
|
|
|
@ -20,7 +20,7 @@ export const FeaturePanelLauncher: React.FunctionComponent = (): JSX.Element =>
|
|||
container: {
|
||||
display: "flex",
|
||||
flexFlow: "column nowrap",
|
||||
alignItems: "stretch"
|
||||
alignItems: "stretch",
|
||||
},
|
||||
header: [
|
||||
// tslint:disable-next-line:deprecation
|
||||
|
@ -32,16 +32,16 @@ export const FeaturePanelLauncher: React.FunctionComponent = (): JSX.Element =>
|
|||
display: "flex",
|
||||
alignItems: "center",
|
||||
fontWeight: FontWeights.semibold,
|
||||
padding: "12px 12px 14px 24px"
|
||||
}
|
||||
padding: "12px 12px 14px 24px",
|
||||
},
|
||||
],
|
||||
body: {
|
||||
flex: "4 4 auto",
|
||||
overflowY: "hidden",
|
||||
marginBottom: 40,
|
||||
height: "100%",
|
||||
display: "flex"
|
||||
}
|
||||
display: "flex",
|
||||
},
|
||||
});
|
||||
|
||||
const iconButtonStyles = {
|
||||
|
@ -49,11 +49,11 @@ export const FeaturePanelLauncher: React.FunctionComponent = (): JSX.Element =>
|
|||
color: theme.palette.neutralPrimary,
|
||||
marginLeft: "auto",
|
||||
marginTop: "4px",
|
||||
marginRight: "2px"
|
||||
marginRight: "2px",
|
||||
},
|
||||
rootHovered: {
|
||||
color: theme.palette.neutralDark
|
||||
}
|
||||
color: theme.palette.neutralDark,
|
||||
},
|
||||
};
|
||||
const cancelIcon: IIconProps = { iconName: "Cancel" };
|
||||
const hideModal = (): void => showModal(false);
|
||||
|
|
|
@ -34,7 +34,7 @@ export class AddRepoComponent extends React.Component<AddRepoComponentProps, Add
|
|||
|
||||
this.state = {
|
||||
textFieldValue: "",
|
||||
textFieldErrorMessage: undefined
|
||||
textFieldErrorMessage: undefined,
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -44,13 +44,13 @@ export class AddRepoComponent extends React.Component<AddRepoComponentProps, Add
|
|||
autoFocus: true,
|
||||
value: this.state.textFieldValue,
|
||||
errorMessage: this.state.textFieldErrorMessage,
|
||||
onChange: this.onTextFieldChange
|
||||
onChange: this.onTextFieldChange,
|
||||
};
|
||||
|
||||
const buttonProps: IButtonProps = {
|
||||
text: AddRepoComponent.ButtonText,
|
||||
ariaLabel: AddRepoComponent.ButtonText,
|
||||
onClick: this.onAddRepoButtonClick
|
||||
onClick: this.onAddRepoButtonClick,
|
||||
};
|
||||
|
||||
return (
|
||||
|
@ -68,7 +68,7 @@ export class AddRepoComponent extends React.Component<AddRepoComponentProps, Add
|
|||
): void => {
|
||||
this.setState({
|
||||
textFieldValue: newValue || "",
|
||||
textFieldErrorMessage: undefined
|
||||
textFieldErrorMessage: undefined,
|
||||
});
|
||||
};
|
||||
|
||||
|
@ -76,7 +76,7 @@ export class AddRepoComponent extends React.Component<AddRepoComponentProps, Add
|
|||
const startKey: number = TelemetryProcessor.traceStart(Action.NotebooksGitHubManualRepoAdd, {
|
||||
databaseAccountName: this.props.container.databaseAccount() && this.props.container.databaseAccount().name,
|
||||
defaultExperience: this.props.container.defaultExperience && this.props.container.defaultExperience(),
|
||||
dataExplorerArea: Constants.Areas.Notebook
|
||||
dataExplorerArea: Constants.Areas.Notebook,
|
||||
});
|
||||
let enteredUrl = this.state.textFieldValue;
|
||||
if (enteredUrl.indexOf("/tree/") === -1) {
|
||||
|
@ -87,7 +87,7 @@ export class AddRepoComponent extends React.Component<AddRepoComponentProps, Add
|
|||
if (repoInfo) {
|
||||
this.setState({
|
||||
textFieldValue: "",
|
||||
textFieldErrorMessage: undefined
|
||||
textFieldErrorMessage: undefined,
|
||||
});
|
||||
|
||||
const repo = await this.props.getRepo(repoInfo.owner, repoInfo.repo);
|
||||
|
@ -97,9 +97,9 @@ export class AddRepoComponent extends React.Component<AddRepoComponentProps, Add
|
|||
repo,
|
||||
branches: [
|
||||
{
|
||||
name: repoInfo.branch
|
||||
}
|
||||
]
|
||||
name: repoInfo.branch,
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
TelemetryProcessor.traceSuccess(
|
||||
|
@ -107,7 +107,7 @@ export class AddRepoComponent extends React.Component<AddRepoComponentProps, Add
|
|||
{
|
||||
databaseAccountName: this.props.container.databaseAccount() && this.props.container.databaseAccount().name,
|
||||
defaultExperience: this.props.container.defaultExperience && this.props.container.defaultExperience(),
|
||||
dataExplorerArea: Constants.Areas.Notebook
|
||||
dataExplorerArea: Constants.Areas.Notebook,
|
||||
},
|
||||
startKey
|
||||
);
|
||||
|
@ -116,7 +116,7 @@ export class AddRepoComponent extends React.Component<AddRepoComponentProps, Add
|
|||
}
|
||||
|
||||
this.setState({
|
||||
textFieldErrorMessage: AddRepoComponent.TextFieldErrorMessage
|
||||
textFieldErrorMessage: AddRepoComponent.TextFieldErrorMessage,
|
||||
});
|
||||
TelemetryProcessor.traceFailure(
|
||||
Action.NotebooksGitHubManualRepoAdd,
|
||||
|
@ -124,7 +124,7 @@ export class AddRepoComponent extends React.Component<AddRepoComponentProps, Add
|
|||
databaseAccountName: this.props.container.databaseAccount() && this.props.container.databaseAccount().name,
|
||||
defaultExperience: this.props.container.defaultExperience && this.props.container.defaultExperience(),
|
||||
dataExplorerArea: Constants.Areas.Notebook,
|
||||
error: AddRepoComponent.TextFieldErrorMessage
|
||||
error: AddRepoComponent.TextFieldErrorMessage,
|
||||
},
|
||||
startKey
|
||||
);
|
||||
|
|
|
@ -3,7 +3,7 @@ import {
|
|||
IButtonProps,
|
||||
IChoiceGroupProps,
|
||||
PrimaryButton,
|
||||
IChoiceGroupOption
|
||||
IChoiceGroupOption,
|
||||
} from "office-ui-fabric-react";
|
||||
import * as React from "react";
|
||||
import { ChildrenMargin } from "./GitHubStyleConstants";
|
||||
|
@ -26,12 +26,12 @@ export class AuthorizeAccessComponent extends React.Component<
|
|||
public static readonly Scopes = {
|
||||
Public: {
|
||||
key: "public_repo",
|
||||
text: "Public repos only"
|
||||
text: "Public repos only",
|
||||
},
|
||||
PublicAndPrivate: {
|
||||
key: "repo",
|
||||
text: "Public and private repos"
|
||||
}
|
||||
text: "Public and private repos",
|
||||
},
|
||||
};
|
||||
|
||||
private static readonly DescriptionPara1 =
|
||||
|
@ -42,7 +42,7 @@ export class AuthorizeAccessComponent extends React.Component<
|
|||
|
||||
private onChoiceGroupChange = (event: React.SyntheticEvent<HTMLElement>, option: IChoiceGroupOption): void =>
|
||||
this.setState({
|
||||
scope: option.key
|
||||
scope: option.key,
|
||||
});
|
||||
|
||||
private onButtonClick = (): void => this.props.authorizeAccess(this.state.scope);
|
||||
|
@ -51,7 +51,7 @@ export class AuthorizeAccessComponent extends React.Component<
|
|||
super(props);
|
||||
|
||||
this.state = {
|
||||
scope: this.props.scope
|
||||
scope: this.props.scope,
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -61,22 +61,22 @@ export class AuthorizeAccessComponent extends React.Component<
|
|||
{
|
||||
key: AuthorizeAccessComponent.Scopes.Public.key,
|
||||
text: AuthorizeAccessComponent.Scopes.Public.text,
|
||||
ariaLabel: AuthorizeAccessComponent.Scopes.Public.text
|
||||
ariaLabel: AuthorizeAccessComponent.Scopes.Public.text,
|
||||
},
|
||||
{
|
||||
key: AuthorizeAccessComponent.Scopes.PublicAndPrivate.key,
|
||||
text: AuthorizeAccessComponent.Scopes.PublicAndPrivate.text,
|
||||
ariaLabel: AuthorizeAccessComponent.Scopes.PublicAndPrivate.text
|
||||
}
|
||||
ariaLabel: AuthorizeAccessComponent.Scopes.PublicAndPrivate.text,
|
||||
},
|
||||
],
|
||||
selectedKey: this.state.scope,
|
||||
onChange: this.onChoiceGroupChange
|
||||
onChange: this.onChoiceGroupChange,
|
||||
};
|
||||
|
||||
const buttonProps: IButtonProps = {
|
||||
text: AuthorizeAccessComponent.AuthorizeButtonText,
|
||||
ariaLabel: AuthorizeAccessComponent.AuthorizeButtonText,
|
||||
onClick: this.onButtonClick
|
||||
onClick: this.onButtonClick,
|
||||
};
|
||||
|
||||
return (
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue