Prettier 2.0 (#393)

This commit is contained in:
Steve Faulkner
2021-01-20 09:15:01 -06:00
committed by GitHub
parent c1937ca464
commit 4be53284b5
500 changed files with 41927 additions and 41838 deletions

View File

@@ -40,7 +40,7 @@ export class ArrayHashMap<T> {
public forEach(key: string, iteratorFct: (value: T) => void) {
const values = this.store.get(key);
if (values) {
values.forEach(value => iteratorFct(value));
values.forEach((value) => iteratorFct(value));
}
}

View File

@@ -7,7 +7,7 @@ export class CodeOfConductEndpoints {
export class EndpointsRegex {
public static readonly cassandra = [
"AccountEndpoint=(.*).cassandra.cosmosdb.azure.com",
"HostName=(.*).cassandra.cosmos.azure.com"
"HostName=(.*).cassandra.cosmos.azure.com",
];
public static readonly mongo = "mongodb://.*:(.*)@(.*).documents.azure.com";
public static readonly mongoCompute = "mongodb://.*:(.*)@(.*).mongo.cosmos.azure.com";
@@ -147,7 +147,7 @@ export class MongoDBAccounts {
export enum MongoBackendEndpointType {
local,
remote
remote,
}
// TODO: 435619 Add default endpoints per cloud and use regional only when available
@@ -274,7 +274,7 @@ export class HttpStatusCodes {
HttpStatusCodes.InternalServerError, // TODO: Handle all 500s on Portal backend and remove from retries list
HttpStatusCodes.BadGateway,
HttpStatusCodes.ServiceUnavailable,
HttpStatusCodes.GatewayTimeout
HttpStatusCodes.GatewayTimeout,
];
}
@@ -330,10 +330,7 @@ export class HashRoutePrefixes {
public static docsWithIds(databaseId: string, collectionId: string, docId: string) {
const transformedDatabasePrefix: string = this.docs.replace("{db_id}", databaseId);
return transformedDatabasePrefix
.replace("{coll_id}", collectionId)
.replace("{doc_id}", docId)
.replace("/", ""); // strip the first slash since hasher adds it
return transformedDatabasePrefix.replace("{coll_id}", collectionId).replace("{doc_id}", docId).replace("/", ""); // strip the first slash since hasher adds it
}
}
@@ -379,7 +376,7 @@ export class OfferVersions {
export enum ConflictOperationType {
Replace = "replace",
Create = "create",
Delete = "delete"
Delete = "delete",
}
export const EmulatorMasterKey =

View File

@@ -10,17 +10,17 @@ describe("tokenProvider", () => {
resourceId: "",
resourceType: "dbs" as ResourceType,
headers: {},
getAuthorizationTokenUsingMasterKey: () => ""
getAuthorizationTokenUsingMasterKey: () => "",
};
beforeEach(() => {
updateConfigContext({
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
});
window.fetch = jest.fn().mockImplementation(() => {
return {
json: () => "{}",
headers: new Map()
headers: new Map(),
};
});
});
@@ -36,7 +36,7 @@ describe("tokenProvider", () => {
it("does not call the auth service if a master key is set", async () => {
updateUserContext({
masterKey: "foo"
masterKey: "foo",
});
await tokenProvider(options);
expect((window.fetch as any).mock.calls.length).toBe(0);
@@ -50,7 +50,7 @@ describe("getTokenFromAuthService", () => {
window.fetch = jest.fn().mockImplementation(() => {
return {
json: () => "{}",
headers: new Map()
headers: new Map(),
};
});
});
@@ -61,7 +61,7 @@ describe("getTokenFromAuthService", () => {
it("builds the correct URL in production", () => {
updateConfigContext({
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
});
getTokenFromAuthService("GET", "dbs", "foo");
expect(window.fetch).toHaveBeenCalledWith(
@@ -72,7 +72,7 @@ describe("getTokenFromAuthService", () => {
it("builds the correct URL in dev", () => {
updateConfigContext({
BACKEND_ENDPOINT: "https://localhost:1234"
BACKEND_ENDPOINT: "https://localhost:1234",
});
getTokenFromAuthService("GET", "dbs", "foo");
expect(window.fetch).toHaveBeenCalledWith(
@@ -96,15 +96,15 @@ describe("endpoint", () => {
documentEndpoint: "bar",
gremlinEndpoint: "foo",
tableEndpoint: "foo",
cassandraEndpoint: "foo"
}
}
cassandraEndpoint: "foo",
},
},
});
expect(endpoint()).toEqual("bar");
});
it("uses _endpoint if set", () => {
updateUserContext({
endpoint: "baz"
endpoint: "baz",
});
expect(endpoint()).toEqual("baz");
});
@@ -121,7 +121,7 @@ describe("requestPlugin", () => {
updateConfigContext({
platform: Platform.Hosted,
BACKEND_ENDPOINT: "https://localhost:1234",
PROXY_PATH: "/proxy"
PROXY_PATH: "/proxy",
});
const headers = {};
const endpoint = "https://docs.azure.com";

View File

@@ -58,13 +58,13 @@ export async function getTokenFromAuthService(verb: string, resourceType: string
method: "POST",
headers: {
"content-type": "application/json",
"x-ms-encrypted-auth-token": userContext.accessToken
"x-ms-encrypted-auth-token": userContext.accessToken,
},
body: JSON.stringify({
verb,
resourceType,
resourceId
})
resourceId,
}),
});
//TODO I am not sure why we have to parse the JSON again here. fetch should do it for us when we call .json()
const result = JSON.parse(await response.json());
@@ -81,9 +81,9 @@ export function client(): Cosmos.CosmosClient {
key: userContext.masterKey,
tokenProvider,
connectionPolicy: {
enableEndpointDiscovery: false
enableEndpointDiscovery: false,
},
userAgentSuffix: "Azure Portal"
userAgentSuffix: "Azure Portal",
};
if (configContext.PROXY_PATH !== undefined) {

View File

@@ -1,94 +1,94 @@
import * as ko from "knockout";
import * as ViewModels from "../Contracts/ViewModels";
export default class EditableUtility {
public static observable<T>(initialValue?: T): ViewModels.Editable<T> {
var observable: ViewModels.Editable<T> = <ViewModels.Editable<T>>ko.observable<T>(initialValue);
observable.edits = ko.observableArray<T>([initialValue]);
observable.validations = ko.observableArray<(value: T) => boolean>([]);
observable.setBaseline = (baseline: T) => {
observable(baseline);
observable.edits([baseline]);
};
observable.getEditableCurrentValue = ko.computed<T>(() => {
const edits = (observable.edits && observable.edits()) || [];
if (edits.length === 0) {
return undefined;
}
return edits[edits.length - 1];
});
observable.getEditableOriginalValue = ko.computed<T>(() => {
const edits = (observable.edits && observable.edits()) || [];
if (edits.length === 0) {
return undefined;
}
return edits[0];
});
observable.editableIsDirty = ko.computed<boolean>(() => {
const edits = (observable.edits && observable.edits()) || [];
if (edits.length <= 1) {
return false;
}
let current: any = observable.getEditableCurrentValue();
let original: any = observable.getEditableOriginalValue();
switch (typeof current) {
case "string":
case "undefined":
case "number":
case "boolean":
current = current && current.toString();
break;
default:
current = JSON.stringify(current);
break;
}
switch (typeof original) {
case "string":
case "undefined":
case "number":
case "boolean":
original = original && original.toString();
break;
default:
original = JSON.stringify(original);
break;
}
if (current !== original) {
return true;
}
return false;
});
observable.subscribe(edit => {
var edits = observable.edits && observable.edits();
if (!edits) {
return;
}
edits.push(edit);
observable.edits(edits);
});
observable.editableIsValid = ko.observable<boolean>(true);
observable.subscribe(value => {
const validations: ((value: T) => boolean)[] = (observable.validations && observable.validations()) || [];
const isValid = validations.every(validate => validate(value));
observable.editableIsValid(isValid);
});
return observable;
}
}
import * as ko from "knockout";
import * as ViewModels from "../Contracts/ViewModels";
export default class EditableUtility {
public static observable<T>(initialValue?: T): ViewModels.Editable<T> {
var observable: ViewModels.Editable<T> = <ViewModels.Editable<T>>ko.observable<T>(initialValue);
observable.edits = ko.observableArray<T>([initialValue]);
observable.validations = ko.observableArray<(value: T) => boolean>([]);
observable.setBaseline = (baseline: T) => {
observable(baseline);
observable.edits([baseline]);
};
observable.getEditableCurrentValue = ko.computed<T>(() => {
const edits = (observable.edits && observable.edits()) || [];
if (edits.length === 0) {
return undefined;
}
return edits[edits.length - 1];
});
observable.getEditableOriginalValue = ko.computed<T>(() => {
const edits = (observable.edits && observable.edits()) || [];
if (edits.length === 0) {
return undefined;
}
return edits[0];
});
observable.editableIsDirty = ko.computed<boolean>(() => {
const edits = (observable.edits && observable.edits()) || [];
if (edits.length <= 1) {
return false;
}
let current: any = observable.getEditableCurrentValue();
let original: any = observable.getEditableOriginalValue();
switch (typeof current) {
case "string":
case "undefined":
case "number":
case "boolean":
current = current && current.toString();
break;
default:
current = JSON.stringify(current);
break;
}
switch (typeof original) {
case "string":
case "undefined":
case "number":
case "boolean":
original = original && original.toString();
break;
default:
original = JSON.stringify(original);
break;
}
if (current !== original) {
return true;
}
return false;
});
observable.subscribe((edit) => {
var edits = observable.edits && observable.edits();
if (!edits) {
return;
}
edits.push(edit);
observable.edits(edits);
});
observable.editableIsValid = ko.observable<boolean>(true);
observable.subscribe((value) => {
const validations: ((value: T) => boolean)[] = (observable.validations && observable.validations()) || [];
const isValid = validations.every((validate) => validate(value));
observable.editableIsValid(isValid);
});
return observable;
}
}

View File

@@ -1,6 +1,6 @@
export function normalizeArmEndpoint(uri: string): string {
if (uri && uri.slice(-1) !== "/") {
return `${uri}/`;
}
return uri;
}
export function normalizeArmEndpoint(uri: string): string {
if (uri && uri.slice(-1) !== "/") {
return `${uri}/`;
}
return uri;
}

View File

@@ -37,7 +37,7 @@ const sendNotificationForError = (errorMessage: string, errorCode: number | stri
}
sendMessage({
type: MessageTypes.ForbiddenError,
reason: errorMessage
reason: errorMessage,
});
}
};

View File

@@ -1,25 +1,25 @@
import * as HeadersUtility from "./HeadersUtility";
import { ExplorerSettings } from "../Shared/ExplorerSettings";
import { LocalStorageUtility, StorageKey } from "../Shared/StorageUtility";
describe("Headers Utility", () => {
describe("shouldEnableCrossPartitionKeyForResourceWithPartitionKey()", () => {
beforeEach(() => {
ExplorerSettings.createDefaultSettings();
});
it("should return true by default", () => {
expect(HeadersUtility.shouldEnableCrossPartitionKey()).toBe(true);
});
it("should return false if the enable cross partition key feed option is false", () => {
LocalStorageUtility.setEntryString(StorageKey.IsCrossPartitionQueryEnabled, "false");
expect(HeadersUtility.shouldEnableCrossPartitionKey()).toBe(false);
});
it("should return true if the enable cross partition key feed option is true", () => {
LocalStorageUtility.setEntryString(StorageKey.IsCrossPartitionQueryEnabled, "true");
expect(HeadersUtility.shouldEnableCrossPartitionKey()).toBe(true);
});
});
});
import * as HeadersUtility from "./HeadersUtility";
import { ExplorerSettings } from "../Shared/ExplorerSettings";
import { LocalStorageUtility, StorageKey } from "../Shared/StorageUtility";
describe("Headers Utility", () => {
describe("shouldEnableCrossPartitionKeyForResourceWithPartitionKey()", () => {
beforeEach(() => {
ExplorerSettings.createDefaultSettings();
});
it("should return true by default", () => {
expect(HeadersUtility.shouldEnableCrossPartitionKey()).toBe(true);
});
it("should return false if the enable cross partition key feed option is false", () => {
LocalStorageUtility.setEntryString(StorageKey.IsCrossPartitionQueryEnabled, "false");
expect(HeadersUtility.shouldEnableCrossPartitionKey()).toBe(false);
});
it("should return true if the enable cross partition key feed option is true", () => {
LocalStorageUtility.setEntryString(StorageKey.IsCrossPartitionQueryEnabled, "true");
expect(HeadersUtility.shouldEnableCrossPartitionKey()).toBe(true);
});
});
});

View File

@@ -1,28 +1,28 @@
import * as Constants from "./Constants";
import { LocalStorageUtility, StorageKey } from "../Shared/StorageUtility";
// x-ms-resource-quota: databases = 100; collections = 5000; users = 500000; permissions = 2000000;
export function getQuota(responseHeaders: any): any {
return responseHeaders && responseHeaders[Constants.HttpHeaders.resourceQuota]
? parseStringIntoObject(responseHeaders[Constants.HttpHeaders.resourceQuota])
: null;
}
export function shouldEnableCrossPartitionKey(): boolean {
return LocalStorageUtility.getEntryString(StorageKey.IsCrossPartitionQueryEnabled) === "true";
}
function parseStringIntoObject(resourceString: string) {
var entityObject: any = {};
if (resourceString) {
var entitiesArray: string[] = resourceString.split(";");
for (var i: any = 0; i < entitiesArray.length; i++) {
var entity: string[] = entitiesArray[i].split("=");
entityObject[entity[0]] = entity[1];
}
}
return entityObject;
}
import * as Constants from "./Constants";
import { LocalStorageUtility, StorageKey } from "../Shared/StorageUtility";
// x-ms-resource-quota: databases = 100; collections = 5000; users = 500000; permissions = 2000000;
export function getQuota(responseHeaders: any): any {
return responseHeaders && responseHeaders[Constants.HttpHeaders.resourceQuota]
? parseStringIntoObject(responseHeaders[Constants.HttpHeaders.resourceQuota])
: null;
}
export function shouldEnableCrossPartitionKey(): boolean {
return LocalStorageUtility.getEntryString(StorageKey.IsCrossPartitionQueryEnabled) === "true";
}
function parseStringIntoObject(resourceString: string) {
var entityObject: any = {};
if (resourceString) {
var entitiesArray: string[] = resourceString.split(";");
for (var i: any = 0; i < entitiesArray.length; i++) {
var entity: string[] = entitiesArray[i].split("=");
entityObject[entity[0]] = entity[1];
}
}
return entityObject;
}

View File

@@ -11,8 +11,8 @@ describe("nextPage", () => {
queryMetrics: {},
requestCharge: 1,
headers: {},
activityId: "foo"
})
activityId: "foo",
}),
};
expect(await nextPage(fakeIterator, 10)).toMatchSnapshot();

View File

@@ -14,7 +14,7 @@ export interface MinimalQueryIterator {
// Pick<QueryIterator<any>, "fetchNext">;
export function nextPage(documentsIterator: MinimalQueryIterator, firstItemIndex: number): Promise<QueryResults> {
return documentsIterator.fetchNext().then(response => {
return documentsIterator.fetchNext().then((response) => {
const documents = response.resources;
const headers = (response as any).headers || {}; // TODO this is a private key. Remove any
const itemCount = (documents && documents.length) || 0;
@@ -26,7 +26,7 @@ export function nextPage(documentsIterator: MinimalQueryIterator, firstItemIndex
lastItemIndex: Number(firstItemIndex) + Number(itemCount),
headers,
activityId: response.activityId,
requestCharge: response.requestCharge
requestCharge: response.requestCharge,
};
});
}

View File

@@ -1,26 +1,26 @@
jest.mock("./MessageHandler");
import { LogEntryLevel } from "../Contracts/Diagnostics";
import * as Logger from "./Logger";
import { MessageTypes } from "../Contracts/ExplorerContracts";
import { sendMessage } from "./MessageHandler";
describe("Logger", () => {
beforeEach(() => {
jest.resetAllMocks();
});
it("should log info messages", () => {
Logger.logInfo("Test info", "DocDB");
expect(sendMessage).toBeCalled();
});
it("should log error messages", () => {
Logger.logError("Test error", "DocDB");
expect(sendMessage).toBeCalled();
});
it("should log warnings", () => {
Logger.logWarning("Test warning", "DocDB");
expect(sendMessage).toBeCalled();
});
});
jest.mock("./MessageHandler");
import { LogEntryLevel } from "../Contracts/Diagnostics";
import * as Logger from "./Logger";
import { MessageTypes } from "../Contracts/ExplorerContracts";
import { sendMessage } from "./MessageHandler";
describe("Logger", () => {
beforeEach(() => {
jest.resetAllMocks();
});
it("should log info messages", () => {
Logger.logInfo("Test info", "DocDB");
expect(sendMessage).toBeCalled();
});
it("should log error messages", () => {
Logger.logError("Test error", "DocDB");
expect(sendMessage).toBeCalled();
});
it("should log warnings", () => {
Logger.logWarning("Test warning", "DocDB");
expect(sendMessage).toBeCalled();
});
});

View File

@@ -29,7 +29,7 @@ export function logError(errorMessage: string, area: string, code?: number | str
function _logEntry(entry: Diagnostics.LogEntry): void {
sendMessage({
type: MessageTypes.LogInfo,
data: JSON.stringify(entry)
data: JSON.stringify(entry),
});
const severityLevel = ((level: Diagnostics.LogEntryLevel): SeverityLevel => {
@@ -60,6 +60,6 @@ function _generateLogEntry(
level,
message,
area,
code
code,
};
}

View File

@@ -1,28 +1,28 @@
import Q from "q";
import * as MessageHandler from "./MessageHandler";
describe("Message Handler", () => {
it("should handle cached message", async () => {
let mockPromise = {
id: "123",
startTime: new Date(),
deferred: Q.defer<any>()
};
let mockMessage = { message: { id: "123", data: "{}" } };
MessageHandler.RequestMap[mockPromise.id] = mockPromise;
MessageHandler.handleCachedDataMessage(mockMessage);
expect(mockPromise.deferred.promise.isFulfilled()).toBe(true);
});
it("should delete fulfilled promises on running the garbage collector", async () => {
let message = {
id: "123",
startTime: new Date(),
deferred: Q.defer<any>()
};
MessageHandler.handleCachedDataMessage(message);
MessageHandler.runGarbageCollector();
expect(MessageHandler.RequestMap["123"]).toBeUndefined();
});
});
import Q from "q";
import * as MessageHandler from "./MessageHandler";
describe("Message Handler", () => {
it("should handle cached message", async () => {
let mockPromise = {
id: "123",
startTime: new Date(),
deferred: Q.defer<any>(),
};
let mockMessage = { message: { id: "123", data: "{}" } };
MessageHandler.RequestMap[mockPromise.id] = mockPromise;
MessageHandler.handleCachedDataMessage(mockMessage);
expect(mockPromise.deferred.promise.isFulfilled()).toBe(true);
});
it("should delete fulfilled promises on running the garbage collector", async () => {
let message = {
id: "123",
startTime: new Date(),
deferred: Q.defer<any>(),
};
MessageHandler.handleCachedDataMessage(message);
MessageHandler.runGarbageCollector();
expect(MessageHandler.RequestMap["123"]).toBeUndefined();
});
});

View File

@@ -35,7 +35,7 @@ export function sendCachedDataMessage<TResponseDataModel>(
let cachedDataPromise: CachedDataPromise<TResponseDataModel> = {
deferred: Q.defer<TResponseDataModel>(),
startTime: new Date(),
id: _.uniqueId()
id: _.uniqueId(),
};
RequestMap[cachedDataPromise.id] = cachedDataPromise;
sendMessage({ type: messageType, params: params, id: cachedDataPromise.id });
@@ -54,7 +54,7 @@ export function sendMessage(data: any): void {
portalChildWindow.parent.postMessage(
{
signature: "pcIframe",
data: data
data: data,
},
portalChildWindow.document.referrer
);

View File

@@ -14,7 +14,7 @@ const fetchMock = () => {
ok: true,
text: () => "{}",
json: () => "{}",
headers: new Map()
headers: new Map(),
});
};
@@ -27,8 +27,8 @@ const collection = {
partitionKey: {
paths: ["/pk"],
kind: "Hash",
version: 1
}
version: 1,
},
} as Collection;
const documentId = ({
@@ -38,8 +38,8 @@ const documentId = ({
partitionKey: {
paths: ["/pk"],
kind: "Hash",
version: 1
}
version: 1,
},
} as unknown) as DocumentId;
const databaseAccount = {
@@ -52,8 +52,8 @@ const databaseAccount = {
documentEndpoint: "bar",
gremlinEndpoint: "foo",
tableEndpoint: "foo",
cassandraEndpoint: "foo"
}
cassandraEndpoint: "foo",
},
} as DatabaseAccount;
describe("MongoProxyClient", () => {
@@ -61,10 +61,10 @@ describe("MongoProxyClient", () => {
beforeEach(() => {
resetConfigContext();
updateUserContext({
databaseAccount
databaseAccount,
});
updateConfigContext({
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
});
window.fetch = jest.fn().mockImplementation(fetchMock);
});
@@ -93,10 +93,10 @@ describe("MongoProxyClient", () => {
beforeEach(() => {
resetConfigContext();
updateUserContext({
databaseAccount
databaseAccount,
});
updateConfigContext({
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
});
window.fetch = jest.fn().mockImplementation(fetchMock);
});
@@ -125,10 +125,10 @@ describe("MongoProxyClient", () => {
beforeEach(() => {
resetConfigContext();
updateUserContext({
databaseAccount
databaseAccount,
});
updateConfigContext({
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
});
window.fetch = jest.fn().mockImplementation(fetchMock);
});
@@ -157,10 +157,10 @@ describe("MongoProxyClient", () => {
beforeEach(() => {
resetConfigContext();
updateUserContext({
databaseAccount
databaseAccount,
});
updateConfigContext({
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
});
window.fetch = jest.fn().mockImplementation(fetchMock);
});
@@ -189,10 +189,10 @@ describe("MongoProxyClient", () => {
beforeEach(() => {
resetConfigContext();
updateUserContext({
databaseAccount
databaseAccount,
});
updateConfigContext({
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
});
window.fetch = jest.fn().mockImplementation(fetchMock);
});
@@ -222,10 +222,10 @@ describe("MongoProxyClient", () => {
resetConfigContext();
delete window.authType;
updateUserContext({
databaseAccount
databaseAccount,
});
updateConfigContext({
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com"
BACKEND_ENDPOINT: "https://main.documentdb.ext.azure.com",
});
});

View File

@@ -16,7 +16,7 @@ import { sendMessage } from "./MessageHandler";
const defaultHeaders = {
[HttpHeaders.apiType]: ApiType.MongoDB.toString(),
[CosmosSDKConstants.HttpHeaders.MaxEntityCount]: "100",
[CosmosSDKConstants.HttpHeaders.Version]: "2017-11-15"
[CosmosSDKConstants.HttpHeaders.Version]: "2017-11-15",
};
function authHeaders() {
@@ -31,7 +31,7 @@ export function queryIterator(databaseId: string, collection: Collection, query:
let continuationToken: string;
return {
fetchNext: () => {
return queryDocuments(databaseId, collection, false, query).then(response => {
return queryDocuments(databaseId, collection, false, query).then((response) => {
continuationToken = response.continuationToken;
const headers: { [key: string]: string | number } = {};
response.headers.forEach((value, key) => {
@@ -42,10 +42,10 @@ export function queryIterator(databaseId: string, collection: Collection, query:
headers,
requestCharge: Number(headers[CosmosSDKConstants.HttpHeaders.RequestCharge]),
activityId: String(headers[CosmosSDKConstants.HttpHeaders.ActivityId]),
hasMoreResults: !!continuationToken
hasMoreResults: !!continuationToken,
};
});
}
},
};
}
@@ -74,7 +74,9 @@ export function queryDocuments(
rg: userContext.resourceGroup,
dba: databaseAccount.name,
pk:
collection && collection.partitionKey && !collection.partitionKey.systemKey ? collection.partitionKeyProperty : ""
collection && collection.partitionKey && !collection.partitionKey.systemKey
? collection.partitionKeyProperty
: "",
};
const endpoint = getEndpoint() || "";
@@ -87,7 +89,7 @@ export function queryDocuments(
[CosmosSDKConstants.HttpHeaders.EnableScanInQuery]: "true",
[CosmosSDKConstants.HttpHeaders.EnableCrossPartitionQuery]: "true",
[CosmosSDKConstants.HttpHeaders.ParallelizeCrossPartitionQuery]: "true",
[HttpHeaders.contentType]: "application/query+json"
[HttpHeaders.contentType]: "application/query+json",
};
if (continuationToken) {
@@ -100,14 +102,14 @@ export function queryDocuments(
.fetch(`${endpoint}${path}?${queryString.stringify(params)}`, {
method: "POST",
body: JSON.stringify({ query }),
headers
headers,
})
.then(async response => {
.then(async (response) => {
if (response.ok) {
return {
continuationToken: response.headers.get(CosmosSDKConstants.HttpHeaders.Continuation),
documents: (await response.json()).Documents as DataModels.DocumentId[],
headers: response.headers
headers: response.headers,
};
}
errorHandling(response, "querying documents", params);
@@ -135,7 +137,9 @@ export function readDocument(
rg: userContext.resourceGroup,
dba: databaseAccount.name,
pk:
documentId && documentId.partitionKey && !documentId.partitionKey.systemKey ? documentId.partitionKeyProperty : ""
documentId && documentId.partitionKey && !documentId.partitionKey.systemKey
? documentId.partitionKeyProperty
: "",
};
const endpoint = getEndpoint();
@@ -147,10 +151,10 @@ export function readDocument(
...authHeaders(),
[CosmosSDKConstants.HttpHeaders.PartitionKey]: encodeURIComponent(
JSON.stringify(documentId.partitionKeyHeader())
)
}
),
},
})
.then(response => {
.then((response) => {
if (response.ok) {
return response.json();
}
@@ -175,7 +179,7 @@ export function createDocument(
sid: userContext.subscriptionId,
rg: userContext.resourceGroup,
dba: databaseAccount.name,
pk: collection && collection.partitionKey && !collection.partitionKey.systemKey ? partitionKeyProperty : ""
pk: collection && collection.partitionKey && !collection.partitionKey.systemKey ? partitionKeyProperty : "",
};
const endpoint = getEndpoint();
@@ -186,10 +190,10 @@ export function createDocument(
body: JSON.stringify(documentContent),
headers: {
...defaultHeaders,
...authHeaders()
}
...authHeaders(),
},
})
.then(response => {
.then((response) => {
if (response.ok) {
return response.json();
}
@@ -218,7 +222,9 @@ export function updateDocument(
rg: userContext.resourceGroup,
dba: databaseAccount.name,
pk:
documentId && documentId.partitionKey && !documentId.partitionKey.systemKey ? documentId.partitionKeyProperty : ""
documentId && documentId.partitionKey && !documentId.partitionKey.systemKey
? documentId.partitionKeyProperty
: "",
};
const endpoint = getEndpoint();
@@ -230,10 +236,10 @@ export function updateDocument(
...defaultHeaders,
...authHeaders(),
[HttpHeaders.contentType]: "application/json",
[CosmosSDKConstants.HttpHeaders.PartitionKey]: JSON.stringify(documentId.partitionKeyHeader())
}
[CosmosSDKConstants.HttpHeaders.PartitionKey]: JSON.stringify(documentId.partitionKeyHeader()),
},
})
.then(response => {
.then((response) => {
if (response.ok) {
return response.json();
}
@@ -257,7 +263,9 @@ export function deleteDocument(databaseId: string, collection: Collection, docum
rg: userContext.resourceGroup,
dba: databaseAccount.name,
pk:
documentId && documentId.partitionKey && !documentId.partitionKey.systemKey ? documentId.partitionKeyProperty : ""
documentId && documentId.partitionKey && !documentId.partitionKey.systemKey
? documentId.partitionKeyProperty
: "",
};
const endpoint = getEndpoint();
@@ -268,10 +276,10 @@ export function deleteDocument(databaseId: string, collection: Collection, docum
...defaultHeaders,
...authHeaders(),
[HttpHeaders.contentType]: "application/json",
[CosmosSDKConstants.HttpHeaders.PartitionKey]: JSON.stringify(documentId.partitionKeyHeader())
}
[CosmosSDKConstants.HttpHeaders.PartitionKey]: JSON.stringify(documentId.partitionKeyHeader()),
},
})
.then(response => {
.then((response) => {
if (response.ok) {
return undefined;
}
@@ -299,7 +307,7 @@ export function createMongoCollectionWithProxy(
rg: userContext.resourceGroup,
dba: databaseAccount.name,
isAutoPilot: !!params.autoPilotMaxThroughput,
autoPilotThroughput: params.autoPilotMaxThroughput?.toString()
autoPilotThroughput: params.autoPilotMaxThroughput?.toString(),
};
const endpoint = getEndpoint();
@@ -314,11 +322,11 @@ export function createMongoCollectionWithProxy(
headers: {
...defaultHeaders,
...authHeaders(),
[HttpHeaders.contentType]: "application/json"
}
[HttpHeaders.contentType]: "application/json",
},
}
)
.then(response => {
.then((response) => {
if (response.ok) {
return response.json();
}

View File

@@ -1,168 +1,168 @@
/* Copyright 2013 10gen Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
export default class MongoUtility {
public static tojson = function(x: any, indent: string, nolint: boolean) {
if (x === null || x === undefined) {
return String(x);
}
indent = indent || "";
switch (typeof x) {
case "string":
var out = new Array(x.length + 1);
out[0] = '"';
for (var i = 0; i < x.length; i++) {
if (x[i] === '"') {
out[out.length] = '\\"';
} else if (x[i] === "\\") {
out[out.length] = "\\\\";
} else if (x[i] === "\b") {
out[out.length] = "\\b";
} else if (x[i] === "\f") {
out[out.length] = "\\f";
} else if (x[i] === "\n") {
out[out.length] = "\\n";
} else if (x[i] === "\r") {
out[out.length] = "\\r";
} else if (x[i] === "\t") {
out[out.length] = "\\t";
} else {
var code = x.charCodeAt(i);
if (code < 0x20) {
out[out.length] = (code < 0x10 ? "\\u000" : "\\u00") + code.toString(16);
} else {
out[out.length] = x[i];
}
}
}
return out.join("") + '"';
case "number":
/* falls through */
case "boolean":
return "" + x;
case "object":
var func = $.isArray(x) ? MongoUtility.tojsonArray : MongoUtility.tojsonObject;
var s = func(x, indent, nolint);
if (
(nolint === null || nolint === undefined || nolint === true) &&
s.length < 80 &&
(indent === null || indent.length === 0)
) {
s = s.replace(/[\t\r\n]+/gm, " ");
}
return s;
case "function":
return x.toString();
default:
throw new Error("tojson can't handle type " + typeof x);
}
};
private static tojsonObject = function(x: any, indent: string, nolint: boolean) {
var lineEnding = nolint ? " " : "\n";
var tabSpace = nolint ? "" : "\t";
indent = indent || "";
if (typeof x.tojson === "function" && x.tojson !== MongoUtility.tojson) {
return x.tojson(indent, nolint);
}
if (x.constructor && typeof x.constructor.tojson === "function" && x.constructor.tojson !== MongoUtility.tojson) {
return x.constructor.tojson(x, indent, nolint);
}
if (MongoUtility.hasDefinedProperty(x, "toString") && !$.isArray(x)) {
return x.toString();
}
if (x instanceof Error) {
return x.toString();
}
if (MongoUtility.isObjectId(x)) {
return 'ObjectId("' + x.$oid + '")';
}
// push one level of indent
indent += tabSpace;
var s = "{";
var pairs = [];
for (var k in x) {
if (x.hasOwnProperty(k)) {
var val = x[k];
var pair = '"' + k + '" : ' + MongoUtility.tojson(val, indent, nolint);
if (k === "_id") {
pairs.unshift(pair);
} else {
pairs.push(pair);
}
}
}
// Add proper line endings, indents, and commas to each line
s += $.map(pairs, function(pair) {
return lineEnding + indent + pair;
}).join(",");
s += lineEnding;
// pop one level of indent
indent = indent.substring(1);
return s + indent + "}";
};
private static tojsonArray = function(a: any, indent: string, nolint: boolean) {
if (a.length === 0) {
return "[ ]";
}
var lineEnding = nolint ? " " : "\n";
if (!indent || nolint) {
indent = "";
}
var s = "[" + lineEnding;
indent += "\t";
for (var i = 0; i < a.length; i++) {
s += indent + MongoUtility.tojson(a[i], indent, nolint);
if (i < a.length - 1) {
s += "," + lineEnding;
}
}
if (a.length === 0) {
s += indent;
}
indent = indent.substring(1);
s += lineEnding + indent + "]";
return s;
};
private static hasDefinedProperty = function(obj: any, prop: string): boolean {
if (Object.getPrototypeOf === undefined || Object.getPrototypeOf(obj) === null) {
return false;
} else if (obj.hasOwnProperty(prop)) {
return true;
} else {
return MongoUtility.hasDefinedProperty(Object.getPrototypeOf(obj), prop);
}
};
private static isObjectId(obj: any): boolean {
var keys = Object.keys(obj);
return keys.length === 1 && keys[0] === "$oid" && typeof obj.$oid === "string" && /^[0-9a-f]{24}$/.test(obj.$oid);
}
}
/* Copyright 2013 10gen Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
export default class MongoUtility {
public static tojson = function (x: any, indent: string, nolint: boolean) {
if (x === null || x === undefined) {
return String(x);
}
indent = indent || "";
switch (typeof x) {
case "string":
var out = new Array(x.length + 1);
out[0] = '"';
for (var i = 0; i < x.length; i++) {
if (x[i] === '"') {
out[out.length] = '\\"';
} else if (x[i] === "\\") {
out[out.length] = "\\\\";
} else if (x[i] === "\b") {
out[out.length] = "\\b";
} else if (x[i] === "\f") {
out[out.length] = "\\f";
} else if (x[i] === "\n") {
out[out.length] = "\\n";
} else if (x[i] === "\r") {
out[out.length] = "\\r";
} else if (x[i] === "\t") {
out[out.length] = "\\t";
} else {
var code = x.charCodeAt(i);
if (code < 0x20) {
out[out.length] = (code < 0x10 ? "\\u000" : "\\u00") + code.toString(16);
} else {
out[out.length] = x[i];
}
}
}
return out.join("") + '"';
case "number":
/* falls through */
case "boolean":
return "" + x;
case "object":
var func = $.isArray(x) ? MongoUtility.tojsonArray : MongoUtility.tojsonObject;
var s = func(x, indent, nolint);
if (
(nolint === null || nolint === undefined || nolint === true) &&
s.length < 80 &&
(indent === null || indent.length === 0)
) {
s = s.replace(/[\t\r\n]+/gm, " ");
}
return s;
case "function":
return x.toString();
default:
throw new Error("tojson can't handle type " + typeof x);
}
};
private static tojsonObject = function (x: any, indent: string, nolint: boolean) {
var lineEnding = nolint ? " " : "\n";
var tabSpace = nolint ? "" : "\t";
indent = indent || "";
if (typeof x.tojson === "function" && x.tojson !== MongoUtility.tojson) {
return x.tojson(indent, nolint);
}
if (x.constructor && typeof x.constructor.tojson === "function" && x.constructor.tojson !== MongoUtility.tojson) {
return x.constructor.tojson(x, indent, nolint);
}
if (MongoUtility.hasDefinedProperty(x, "toString") && !$.isArray(x)) {
return x.toString();
}
if (x instanceof Error) {
return x.toString();
}
if (MongoUtility.isObjectId(x)) {
return 'ObjectId("' + x.$oid + '")';
}
// push one level of indent
indent += tabSpace;
var s = "{";
var pairs = [];
for (var k in x) {
if (x.hasOwnProperty(k)) {
var val = x[k];
var pair = '"' + k + '" : ' + MongoUtility.tojson(val, indent, nolint);
if (k === "_id") {
pairs.unshift(pair);
} else {
pairs.push(pair);
}
}
}
// Add proper line endings, indents, and commas to each line
s += $.map(pairs, function (pair) {
return lineEnding + indent + pair;
}).join(",");
s += lineEnding;
// pop one level of indent
indent = indent.substring(1);
return s + indent + "}";
};
private static tojsonArray = function (a: any, indent: string, nolint: boolean) {
if (a.length === 0) {
return "[ ]";
}
var lineEnding = nolint ? " " : "\n";
if (!indent || nolint) {
indent = "";
}
var s = "[" + lineEnding;
indent += "\t";
for (var i = 0; i < a.length; i++) {
s += indent + MongoUtility.tojson(a[i], indent, nolint);
if (i < a.length - 1) {
s += "," + lineEnding;
}
}
if (a.length === 0) {
s += indent;
}
indent = indent.substring(1);
s += lineEnding + indent + "]";
return s;
};
private static hasDefinedProperty = function (obj: any, prop: string): boolean {
if (Object.getPrototypeOf === undefined || Object.getPrototypeOf(obj) === null) {
return false;
} else if (obj.hasOwnProperty(prop)) {
return true;
} else {
return MongoUtility.hasDefinedProperty(Object.getPrototypeOf(obj), prop);
}
};
private static isObjectId(obj: any): boolean {
var keys = Object.keys(obj);
return keys.length === 1 && keys[0] === "$oid" && typeof obj.$oid === "string" && /^[0-9a-f]{24}$/.test(obj.$oid);
}
}

View File

@@ -9,14 +9,14 @@ describe("parseSDKOfferResponse", () => {
offerThroughput: 500,
collectionThroughputInfo: {
minimumRUForCollection: 400,
numPhysicalPartitions: 1
}
numPhysicalPartitions: 1,
},
},
id: "test"
id: "test",
} as SDKOfferDefinition;
const mockResponse = {
resource: mockOfferDefinition
resource: mockOfferDefinition,
} as OfferResponse;
const expectedResult: Offer = {
@@ -25,7 +25,7 @@ describe("parseSDKOfferResponse", () => {
minimumThroughput: 400,
id: "test",
offerDefinition: mockOfferDefinition,
offerReplacePending: false
offerReplacePending: false,
};
expect(OfferUtility.parseSDKOfferResponse(mockResponse)).toEqual(expectedResult);
@@ -37,17 +37,17 @@ describe("parseSDKOfferResponse", () => {
offerThroughput: 400,
collectionThroughputInfo: {
minimumRUForCollection: 400,
numPhysicalPartitions: 1
numPhysicalPartitions: 1,
},
offerAutopilotSettings: {
maxThroughput: 5000
}
maxThroughput: 5000,
},
},
id: "test"
id: "test",
} as SDKOfferDefinition;
const mockResponse = {
resource: mockOfferDefinition
resource: mockOfferDefinition,
} as OfferResponse;
const expectedResult: Offer = {
@@ -56,7 +56,7 @@ describe("parseSDKOfferResponse", () => {
minimumThroughput: 400,
id: "test",
offerDefinition: mockOfferDefinition,
offerReplacePending: false
offerReplacePending: false,
};
expect(OfferUtility.parseSDKOfferResponse(mockResponse)).toEqual(expectedResult);

View File

@@ -22,7 +22,7 @@ export const parseSDKOfferResponse = (offerResponse: OfferResponse): Offer | und
manualThroughput: undefined,
minimumThroughput,
offerDefinition,
offerReplacePending: offerResponse.headers?.[HttpHeaders.offerReplacePending] === "true"
offerReplacePending: offerResponse.headers?.[HttpHeaders.offerReplacePending] === "true",
};
}
@@ -32,6 +32,6 @@ export const parseSDKOfferResponse = (offerResponse: OfferResponse): Offer | und
manualThroughput: offerContent.offerThroughput,
minimumThroughput,
offerDefinition,
offerReplacePending: offerResponse.headers?.[HttpHeaders.offerReplacePending] === "true"
offerReplacePending: offerResponse.headers?.[HttpHeaders.offerReplacePending] === "true",
};
};

View File

@@ -30,7 +30,7 @@ export const fetchPortalNotifications = async (): Promise<DataModels.Notificatio
const headers = { [authorizationHeader.header]: authorizationHeader.token };
const response = await window.fetch(url, {
headers
headers,
});
if (!response.ok) {

View File

@@ -1,219 +1,219 @@
import { ItemDefinition, QueryIterator, Resource } from "@azure/cosmos";
import * as _ from "underscore";
import * as DataModels from "../Contracts/DataModels";
import * as ViewModels from "../Contracts/ViewModels";
import Explorer from "../Explorer/Explorer";
import DocumentsTab from "../Explorer/Tabs/DocumentsTab";
import DocumentId from "../Explorer/Tree/DocumentId";
import * as NotificationConsoleUtils from "../Utils/NotificationConsoleUtils";
import { QueryUtils } from "../Utils/QueryUtils";
import { BackendDefaults, HttpStatusCodes, SavedQueries } from "./Constants";
import { userContext } from "../UserContext";
import { queryDocumentsPage } from "./dataAccess/queryDocumentsPage";
import { createCollection } from "./dataAccess/createCollection";
import { handleError } from "./ErrorHandlingUtils";
import { createDocument } from "./dataAccess/createDocument";
import { deleteDocument } from "./dataAccess/deleteDocument";
import { queryDocuments } from "./dataAccess/queryDocuments";
export class QueriesClient {
private static readonly PartitionKey: DataModels.PartitionKey = {
paths: [`/${SavedQueries.PartitionKeyProperty}`],
kind: BackendDefaults.partitionKeyKind,
version: BackendDefaults.partitionKeyVersion
};
private static readonly FetchQuery: string = "SELECT * FROM c";
private static readonly FetchMongoQuery: string = "{}";
public constructor(private container: Explorer) {}
public async setupQueriesCollection(): Promise<DataModels.Collection> {
const queriesCollection: ViewModels.Collection = this.findQueriesCollection();
if (queriesCollection) {
return Promise.resolve(queriesCollection.rawDataModel);
}
const clearMessage = NotificationConsoleUtils.logConsoleProgress("Setting up account for saving queries");
return createCollection({
collectionId: SavedQueries.CollectionName,
createNewDatabase: true,
databaseId: SavedQueries.DatabaseName,
partitionKey: QueriesClient.PartitionKey,
offerThroughput: SavedQueries.OfferThroughput,
databaseLevelThroughput: false
})
.then(
(collection: DataModels.Collection) => {
NotificationConsoleUtils.logConsoleInfo("Successfully set up account for saving queries");
return Promise.resolve(collection);
},
(error: any) => {
handleError(error, "setupQueriesCollection", "Failed to set up account for saving queries");
return Promise.reject(error);
}
)
.finally(() => clearMessage());
}
public async saveQuery(query: DataModels.Query): Promise<void> {
const queriesCollection = this.findQueriesCollection();
if (!queriesCollection) {
const errorMessage: string = "Account not set up to perform saved query operations";
NotificationConsoleUtils.logConsoleError(`Failed to save query ${query.queryName}: ${errorMessage}`);
return Promise.reject(errorMessage);
}
try {
this.validateQuery(query);
} catch (error) {
const errorMessage: string = "Invalid query specified";
NotificationConsoleUtils.logConsoleError(`Failed to save query ${query.queryName}: ${errorMessage}`);
return Promise.reject(errorMessage);
}
const clearMessage = NotificationConsoleUtils.logConsoleProgress(`Saving query ${query.queryName}`);
query.id = query.queryName;
return createDocument(queriesCollection, query)
.then(
(savedQuery: DataModels.Query) => {
NotificationConsoleUtils.logConsoleInfo(`Successfully saved query ${query.queryName}`);
return Promise.resolve();
},
(error: any) => {
if (error.code === HttpStatusCodes.Conflict.toString()) {
error = `Query ${query.queryName} already exists`;
}
handleError(error, "saveQuery", `Failed to save query ${query.queryName}`);
return Promise.reject(error);
}
)
.finally(() => clearMessage());
}
public async getQueries(): Promise<DataModels.Query[]> {
const queriesCollection = this.findQueriesCollection();
if (!queriesCollection) {
const errorMessage: string = "Account not set up to perform saved query operations";
NotificationConsoleUtils.logConsoleError(`Failed to fetch saved queries: ${errorMessage}`);
return Promise.reject(errorMessage);
}
const options: any = { enableCrossPartitionQuery: true };
const clearMessage = NotificationConsoleUtils.logConsoleProgress("Fetching saved queries");
const queryIterator: QueryIterator<ItemDefinition & Resource> = queryDocuments(
SavedQueries.DatabaseName,
SavedQueries.CollectionName,
this.fetchQueriesQuery(),
options
);
const fetchQueries = async (firstItemIndex: number): Promise<ViewModels.QueryResults> =>
await queryDocumentsPage(queriesCollection.id(), queryIterator, firstItemIndex);
return QueryUtils.queryAllPages(fetchQueries)
.then(
(results: ViewModels.QueryResults) => {
let queries: DataModels.Query[] = _.map(results.documents, (document: DataModels.Query) => {
if (!document) {
return undefined;
}
const { id, resourceId, query, queryName } = document;
const parsedQuery: DataModels.Query = {
resourceId: resourceId,
queryName: queryName,
query: query,
id: id
};
try {
this.validateQuery(parsedQuery);
return parsedQuery;
} catch (error) {
return undefined;
}
});
queries = _.reject(queries, (parsedQuery: DataModels.Query) => !parsedQuery);
NotificationConsoleUtils.logConsoleInfo("Successfully fetched saved queries");
return Promise.resolve(queries);
},
(error: any) => {
handleError(error, "getSavedQueries", "Failed to fetch saved queries");
return Promise.reject(error);
}
)
.finally(() => clearMessage());
}
public async deleteQuery(query: DataModels.Query): Promise<void> {
const queriesCollection = this.findQueriesCollection();
if (!queriesCollection) {
const errorMessage: string = "Account not set up to perform saved query operations";
NotificationConsoleUtils.logConsoleError(`Failed to fetch saved queries: ${errorMessage}`);
return Promise.reject(errorMessage);
}
try {
this.validateQuery(query);
} catch (error) {
const errorMessage: string = "Invalid query specified";
NotificationConsoleUtils.logConsoleError(`Failed to delete query ${query.queryName}: ${errorMessage}`);
}
const clearMessage = NotificationConsoleUtils.logConsoleProgress(`Deleting query ${query.queryName}`);
query.id = query.queryName;
const documentId = new DocumentId(
{
partitionKey: QueriesClient.PartitionKey,
partitionKeyProperty: "id"
} as DocumentsTab,
query,
query.queryName
); // TODO: Remove DocumentId's dependency on DocumentsTab
const options: any = { partitionKey: query.resourceId };
return deleteDocument(queriesCollection, documentId)
.then(
() => {
NotificationConsoleUtils.logConsoleInfo(`Successfully deleted query ${query.queryName}`);
return Promise.resolve();
},
(error: any) => {
handleError(error, "deleteQuery", `Failed to delete query ${query.queryName}`);
return Promise.reject(error);
}
)
.finally(() => clearMessage());
}
public getResourceId(): string {
const databaseAccount = userContext.databaseAccount;
const databaseAccountName = (databaseAccount && databaseAccount.name) || "";
const subscriptionId = userContext.subscriptionId || "";
const resourceGroup = userContext.resourceGroup || "";
return `/subscriptions/${subscriptionId}/resourceGroups/${resourceGroup}/providers/Microsoft.DocumentDb/databaseAccounts/${databaseAccountName}`;
}
private findQueriesCollection(): ViewModels.Collection {
const queriesDatabase: ViewModels.Database = _.find(
this.container.databases(),
(database: ViewModels.Database) => database.id() === SavedQueries.DatabaseName
);
if (!queriesDatabase) {
return undefined;
}
return _.find(
queriesDatabase.collections(),
(collection: ViewModels.Collection) => collection.id() === SavedQueries.CollectionName
);
}
private validateQuery(query: DataModels.Query): void {
if (!query || query.queryName == null || query.query == null || query.resourceId == null) {
throw new Error("Invalid query specified");
}
}
private fetchQueriesQuery(): string {
if (this.container.isPreferredApiMongoDB()) {
return QueriesClient.FetchMongoQuery;
}
return QueriesClient.FetchQuery;
}
}
import { ItemDefinition, QueryIterator, Resource } from "@azure/cosmos";
import * as _ from "underscore";
import * as DataModels from "../Contracts/DataModels";
import * as ViewModels from "../Contracts/ViewModels";
import Explorer from "../Explorer/Explorer";
import DocumentsTab from "../Explorer/Tabs/DocumentsTab";
import DocumentId from "../Explorer/Tree/DocumentId";
import * as NotificationConsoleUtils from "../Utils/NotificationConsoleUtils";
import { QueryUtils } from "../Utils/QueryUtils";
import { BackendDefaults, HttpStatusCodes, SavedQueries } from "./Constants";
import { userContext } from "../UserContext";
import { queryDocumentsPage } from "./dataAccess/queryDocumentsPage";
import { createCollection } from "./dataAccess/createCollection";
import { handleError } from "./ErrorHandlingUtils";
import { createDocument } from "./dataAccess/createDocument";
import { deleteDocument } from "./dataAccess/deleteDocument";
import { queryDocuments } from "./dataAccess/queryDocuments";
export class QueriesClient {
private static readonly PartitionKey: DataModels.PartitionKey = {
paths: [`/${SavedQueries.PartitionKeyProperty}`],
kind: BackendDefaults.partitionKeyKind,
version: BackendDefaults.partitionKeyVersion,
};
private static readonly FetchQuery: string = "SELECT * FROM c";
private static readonly FetchMongoQuery: string = "{}";
public constructor(private container: Explorer) {}
public async setupQueriesCollection(): Promise<DataModels.Collection> {
const queriesCollection: ViewModels.Collection = this.findQueriesCollection();
if (queriesCollection) {
return Promise.resolve(queriesCollection.rawDataModel);
}
const clearMessage = NotificationConsoleUtils.logConsoleProgress("Setting up account for saving queries");
return createCollection({
collectionId: SavedQueries.CollectionName,
createNewDatabase: true,
databaseId: SavedQueries.DatabaseName,
partitionKey: QueriesClient.PartitionKey,
offerThroughput: SavedQueries.OfferThroughput,
databaseLevelThroughput: false,
})
.then(
(collection: DataModels.Collection) => {
NotificationConsoleUtils.logConsoleInfo("Successfully set up account for saving queries");
return Promise.resolve(collection);
},
(error: any) => {
handleError(error, "setupQueriesCollection", "Failed to set up account for saving queries");
return Promise.reject(error);
}
)
.finally(() => clearMessage());
}
public async saveQuery(query: DataModels.Query): Promise<void> {
const queriesCollection = this.findQueriesCollection();
if (!queriesCollection) {
const errorMessage: string = "Account not set up to perform saved query operations";
NotificationConsoleUtils.logConsoleError(`Failed to save query ${query.queryName}: ${errorMessage}`);
return Promise.reject(errorMessage);
}
try {
this.validateQuery(query);
} catch (error) {
const errorMessage: string = "Invalid query specified";
NotificationConsoleUtils.logConsoleError(`Failed to save query ${query.queryName}: ${errorMessage}`);
return Promise.reject(errorMessage);
}
const clearMessage = NotificationConsoleUtils.logConsoleProgress(`Saving query ${query.queryName}`);
query.id = query.queryName;
return createDocument(queriesCollection, query)
.then(
(savedQuery: DataModels.Query) => {
NotificationConsoleUtils.logConsoleInfo(`Successfully saved query ${query.queryName}`);
return Promise.resolve();
},
(error: any) => {
if (error.code === HttpStatusCodes.Conflict.toString()) {
error = `Query ${query.queryName} already exists`;
}
handleError(error, "saveQuery", `Failed to save query ${query.queryName}`);
return Promise.reject(error);
}
)
.finally(() => clearMessage());
}
public async getQueries(): Promise<DataModels.Query[]> {
const queriesCollection = this.findQueriesCollection();
if (!queriesCollection) {
const errorMessage: string = "Account not set up to perform saved query operations";
NotificationConsoleUtils.logConsoleError(`Failed to fetch saved queries: ${errorMessage}`);
return Promise.reject(errorMessage);
}
const options: any = { enableCrossPartitionQuery: true };
const clearMessage = NotificationConsoleUtils.logConsoleProgress("Fetching saved queries");
const queryIterator: QueryIterator<ItemDefinition & Resource> = queryDocuments(
SavedQueries.DatabaseName,
SavedQueries.CollectionName,
this.fetchQueriesQuery(),
options
);
const fetchQueries = async (firstItemIndex: number): Promise<ViewModels.QueryResults> =>
await queryDocumentsPage(queriesCollection.id(), queryIterator, firstItemIndex);
return QueryUtils.queryAllPages(fetchQueries)
.then(
(results: ViewModels.QueryResults) => {
let queries: DataModels.Query[] = _.map(results.documents, (document: DataModels.Query) => {
if (!document) {
return undefined;
}
const { id, resourceId, query, queryName } = document;
const parsedQuery: DataModels.Query = {
resourceId: resourceId,
queryName: queryName,
query: query,
id: id,
};
try {
this.validateQuery(parsedQuery);
return parsedQuery;
} catch (error) {
return undefined;
}
});
queries = _.reject(queries, (parsedQuery: DataModels.Query) => !parsedQuery);
NotificationConsoleUtils.logConsoleInfo("Successfully fetched saved queries");
return Promise.resolve(queries);
},
(error: any) => {
handleError(error, "getSavedQueries", "Failed to fetch saved queries");
return Promise.reject(error);
}
)
.finally(() => clearMessage());
}
public async deleteQuery(query: DataModels.Query): Promise<void> {
const queriesCollection = this.findQueriesCollection();
if (!queriesCollection) {
const errorMessage: string = "Account not set up to perform saved query operations";
NotificationConsoleUtils.logConsoleError(`Failed to fetch saved queries: ${errorMessage}`);
return Promise.reject(errorMessage);
}
try {
this.validateQuery(query);
} catch (error) {
const errorMessage: string = "Invalid query specified";
NotificationConsoleUtils.logConsoleError(`Failed to delete query ${query.queryName}: ${errorMessage}`);
}
const clearMessage = NotificationConsoleUtils.logConsoleProgress(`Deleting query ${query.queryName}`);
query.id = query.queryName;
const documentId = new DocumentId(
{
partitionKey: QueriesClient.PartitionKey,
partitionKeyProperty: "id",
} as DocumentsTab,
query,
query.queryName
); // TODO: Remove DocumentId's dependency on DocumentsTab
const options: any = { partitionKey: query.resourceId };
return deleteDocument(queriesCollection, documentId)
.then(
() => {
NotificationConsoleUtils.logConsoleInfo(`Successfully deleted query ${query.queryName}`);
return Promise.resolve();
},
(error: any) => {
handleError(error, "deleteQuery", `Failed to delete query ${query.queryName}`);
return Promise.reject(error);
}
)
.finally(() => clearMessage());
}
public getResourceId(): string {
const databaseAccount = userContext.databaseAccount;
const databaseAccountName = (databaseAccount && databaseAccount.name) || "";
const subscriptionId = userContext.subscriptionId || "";
const resourceGroup = userContext.resourceGroup || "";
return `/subscriptions/${subscriptionId}/resourceGroups/${resourceGroup}/providers/Microsoft.DocumentDb/databaseAccounts/${databaseAccountName}`;
}
private findQueriesCollection(): ViewModels.Collection {
const queriesDatabase: ViewModels.Database = _.find(
this.container.databases(),
(database: ViewModels.Database) => database.id() === SavedQueries.DatabaseName
);
if (!queriesDatabase) {
return undefined;
}
return _.find(
queriesDatabase.collections(),
(collection: ViewModels.Collection) => collection.id() === SavedQueries.CollectionName
);
}
private validateQuery(query: DataModels.Query): void {
if (!query || query.queryName == null || query.query == null || query.resourceId == null) {
throw new Error("Invalid query specified");
}
}
private fetchQueriesQuery(): string {
if (this.container.isPreferredApiMongoDB()) {
return QueriesClient.FetchMongoQuery;
}
return QueriesClient.FetchQuery;
}
}

View File

@@ -1,109 +1,107 @@
import * as ko from "knockout";
import { SplitterMetrics } from "./Constants";
export enum SplitterDirection {
Horizontal = "horizontal",
Vertical = "vertical"
}
export interface SplitterBounds {
max: number;
min: number;
}
export interface SplitterOptions {
splitterId: string;
leftId: string;
bounds: SplitterBounds;
direction: SplitterDirection;
}
export class Splitter {
public splitterId: string;
public leftSideId: string;
public splitter!: HTMLElement;
public leftSide!: HTMLElement;
public lastX!: number;
public lastWidth!: number;
private isCollapsed: ko.Observable<boolean>;
private bounds: SplitterBounds;
private direction: SplitterDirection;
constructor(options: SplitterOptions) {
this.splitterId = options.splitterId;
this.leftSideId = options.leftId;
this.isCollapsed = ko.observable<boolean>(false);
this.bounds = options.bounds;
this.direction = options.direction;
this.initialize();
}
public initialize() {
if (document.getElementById(this.splitterId) !== null && document.getElementById(this.leftSideId) != null) {
this.splitter = <HTMLElement>document.getElementById(this.splitterId);
this.leftSide = <HTMLElement>document.getElementById(this.leftSideId);
}
const isVerticalSplitter: boolean = this.direction === SplitterDirection.Vertical;
const splitterOptions: JQueryUI.ResizableOptions = {
animate: true,
animateDuration: "fast",
start: this.onResizeStart,
stop: this.onResizeStop
};
if (isVerticalSplitter) {
$(this.leftSide).css("width", this.bounds.min);
$(this.splitter).css("height", "100%");
splitterOptions.maxWidth = this.bounds.max;
splitterOptions.minWidth = this.bounds.min;
splitterOptions.handles = { e: "#" + this.splitterId };
} else {
$(this.leftSide).css("height", this.bounds.min);
$(this.splitter).css("width", "100%");
splitterOptions.maxHeight = this.bounds.max;
splitterOptions.minHeight = this.bounds.min;
splitterOptions.handles = { s: "#" + this.splitterId };
}
$(this.leftSide).resizable(splitterOptions);
}
private onResizeStart: JQueryUI.ResizableEvent = (e: Event, ui: JQueryUI.ResizableUIParams) => {
if (this.direction === SplitterDirection.Vertical) {
$(".ui-resizable-helper").height("100%");
} else {
$(".ui-resizable-helper").width("100%");
}
$("iframe").css("pointer-events", "none");
};
private onResizeStop: JQueryUI.ResizableEvent = (e: Event, ui: JQueryUI.ResizableUIParams) => {
$("iframe").css("pointer-events", "auto");
};
public collapseLeft() {
this.lastX = $(this.splitter).position().left;
this.lastWidth = $(this.leftSide).width();
$(this.splitter).css("left", SplitterMetrics.CollapsedPositionLeft);
$(this.leftSide).css("width", "");
$(this.leftSide)
.resizable("option", "disabled", true)
.removeClass("ui-resizable-disabled"); // remove class so splitter is visible
$(this.splitter).removeClass("ui-resizable-e");
this.isCollapsed(true);
}
public expandLeft() {
$(this.splitter).addClass("ui-resizable-e");
$(this.leftSide).css("width", this.lastWidth);
$(this.splitter).css("left", this.lastX);
$(this.splitter).css("left", ""); // this ensures the splitter's position is not fixed and enables movement during resizing
$(this.leftSide).resizable("enable");
this.isCollapsed(false);
}
}
import * as ko from "knockout";
import { SplitterMetrics } from "./Constants";
export enum SplitterDirection {
Horizontal = "horizontal",
Vertical = "vertical",
}
export interface SplitterBounds {
max: number;
min: number;
}
export interface SplitterOptions {
splitterId: string;
leftId: string;
bounds: SplitterBounds;
direction: SplitterDirection;
}
export class Splitter {
public splitterId: string;
public leftSideId: string;
public splitter!: HTMLElement;
public leftSide!: HTMLElement;
public lastX!: number;
public lastWidth!: number;
private isCollapsed: ko.Observable<boolean>;
private bounds: SplitterBounds;
private direction: SplitterDirection;
constructor(options: SplitterOptions) {
this.splitterId = options.splitterId;
this.leftSideId = options.leftId;
this.isCollapsed = ko.observable<boolean>(false);
this.bounds = options.bounds;
this.direction = options.direction;
this.initialize();
}
public initialize() {
if (document.getElementById(this.splitterId) !== null && document.getElementById(this.leftSideId) != null) {
this.splitter = <HTMLElement>document.getElementById(this.splitterId);
this.leftSide = <HTMLElement>document.getElementById(this.leftSideId);
}
const isVerticalSplitter: boolean = this.direction === SplitterDirection.Vertical;
const splitterOptions: JQueryUI.ResizableOptions = {
animate: true,
animateDuration: "fast",
start: this.onResizeStart,
stop: this.onResizeStop,
};
if (isVerticalSplitter) {
$(this.leftSide).css("width", this.bounds.min);
$(this.splitter).css("height", "100%");
splitterOptions.maxWidth = this.bounds.max;
splitterOptions.minWidth = this.bounds.min;
splitterOptions.handles = { e: "#" + this.splitterId };
} else {
$(this.leftSide).css("height", this.bounds.min);
$(this.splitter).css("width", "100%");
splitterOptions.maxHeight = this.bounds.max;
splitterOptions.minHeight = this.bounds.min;
splitterOptions.handles = { s: "#" + this.splitterId };
}
$(this.leftSide).resizable(splitterOptions);
}
private onResizeStart: JQueryUI.ResizableEvent = (e: Event, ui: JQueryUI.ResizableUIParams) => {
if (this.direction === SplitterDirection.Vertical) {
$(".ui-resizable-helper").height("100%");
} else {
$(".ui-resizable-helper").width("100%");
}
$("iframe").css("pointer-events", "none");
};
private onResizeStop: JQueryUI.ResizableEvent = (e: Event, ui: JQueryUI.ResizableUIParams) => {
$("iframe").css("pointer-events", "auto");
};
public collapseLeft() {
this.lastX = $(this.splitter).position().left;
this.lastWidth = $(this.leftSide).width();
$(this.splitter).css("left", SplitterMetrics.CollapsedPositionLeft);
$(this.leftSide).css("width", "");
$(this.leftSide).resizable("option", "disabled", true).removeClass("ui-resizable-disabled"); // remove class so splitter is visible
$(this.splitter).removeClass("ui-resizable-e");
this.isCollapsed(true);
}
public expandLeft() {
$(this.splitter).addClass("ui-resizable-e");
$(this.leftSide).css("width", this.lastWidth);
$(this.splitter).css("left", this.lastX);
$(this.splitter).css("left", ""); // this ensures the splitter's position is not fixed and enables movement during resizing
$(this.leftSide).resizable("enable");
this.isCollapsed(false);
}
}

View File

@@ -32,8 +32,8 @@ export default class UrlUtility {
type: type,
objectBody: {
id: id,
self: resourcePath
}
self: resourcePath,
},
};
return result;

View File

@@ -14,15 +14,15 @@ describe("createCollection", () => {
collectionId: "testContainer",
databaseId: "testDatabase",
databaseLevelThroughput: true,
offerThroughput: 400
offerThroughput: 400,
};
beforeAll(() => {
updateUserContext({
databaseAccount: {
name: "test"
name: "test",
} as DatabaseAccount,
defaultExperience: DefaultAccountExperienceType.DocumentDB
defaultExperience: DefaultAccountExperienceType.DocumentDB,
});
});
@@ -40,12 +40,12 @@ describe("createCollection", () => {
return {
database: {
containers: {
create: () => ({})
}
}
create: () => ({}),
},
},
};
}
}
},
},
});
await createCollection(createCollectionParams);
expect(client).toHaveBeenCalled();
@@ -59,7 +59,7 @@ describe("createCollection", () => {
collectionId: "testContainer",
databaseId: "testDatabase",
databaseLevelThroughput: false,
offerThroughput: 400
offerThroughput: 400,
};
expect(constructRpOptions(manualThroughputParams)).toEqual({ throughput: 400 });
@@ -69,12 +69,12 @@ describe("createCollection", () => {
databaseId: "testDatabase",
databaseLevelThroughput: false,
offerThroughput: 400,
autoPilotMaxThroughput: 4000
autoPilotMaxThroughput: 4000,
};
expect(constructRpOptions(autoPilotThroughputParams)).toEqual({
autoscaleSettings: {
maxThroughput: 4000
}
maxThroughput: 4000,
},
});
});
});

View File

@@ -11,15 +11,15 @@ import { createMongoCollectionWithProxy } from "../MongoProxyClient";
import { createUpdateSqlContainer, getSqlContainer } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import {
createUpdateCassandraTable,
getCassandraTable
getCassandraTable,
} from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources";
import {
createUpdateMongoDBCollection,
getMongoDBCollection
getMongoDBCollection,
} from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
import {
createUpdateGremlinGraph,
getGremlinGraph
getGremlinGraph,
} from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
import { createUpdateTable, getTable } from "../../Utils/arm/generatedClients/2020-04-01/tableResources";
import { logConsoleProgress, logConsoleInfo } from "../../Utils/NotificationConsoleUtils";
@@ -41,7 +41,7 @@ export const createCollection = async (params: DataModels.CreateCollectionParams
autoPilotMaxThroughput: params.autoPilotMaxThroughput,
databaseId: params.databaseId,
databaseLevelThroughput: params.databaseLevelThroughput,
offerThroughput: params.offerThroughput
offerThroughput: params.offerThroughput,
};
await createDatabase(createDatabaseParams);
}
@@ -100,7 +100,7 @@ const createSqlContainer = async (params: DataModels.CreateCollectionParams): Pr
const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params);
const resource: ARMTypes.SqlContainerResource = {
id: params.collectionId
id: params.collectionId,
};
if (params.analyticalStorageTtl) {
resource.analyticalStorageTtl = params.analyticalStorageTtl;
@@ -118,8 +118,8 @@ const createSqlContainer = async (params: DataModels.CreateCollectionParams): Pr
const rpPayload: ARMTypes.SqlDatabaseCreateUpdateParameters = {
properties: {
resource,
options
}
options,
},
};
const createResponse = await createUpdateSqlContainer(
@@ -154,7 +154,7 @@ const createMongoCollection = async (params: DataModels.CreateCollectionParams):
const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params);
const resource: ARMTypes.MongoDBCollectionResource = {
id: params.collectionId
id: params.collectionId,
};
if (params.analyticalStorageTtl) {
resource.analyticalStorageTtl = params.analyticalStorageTtl;
@@ -170,8 +170,8 @@ const createMongoCollection = async (params: DataModels.CreateCollectionParams):
const rpPayload: ARMTypes.MongoDBCollectionCreateUpdateParameters = {
properties: {
resource,
options
}
options,
},
};
const createResponse = await createUpdateMongoDBCollection(
@@ -185,7 +185,7 @@ const createMongoCollection = async (params: DataModels.CreateCollectionParams):
if (params.createMongoWildcardIndex) {
TelemetryProcessor.trace(Action.CreateMongoCollectionWithWildcardIndex, ActionModifiers.Mark, {
message: "Mongo Collection created with wildcard index on all fields."
message: "Mongo Collection created with wildcard index on all fields.",
});
}
@@ -212,7 +212,7 @@ const createCassandraTable = async (params: DataModels.CreateCollectionParams):
const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params);
const resource: ARMTypes.CassandraTableResource = {
id: params.collectionId
id: params.collectionId,
};
if (params.analyticalStorageTtl) {
resource.analyticalStorageTtl = params.analyticalStorageTtl;
@@ -221,8 +221,8 @@ const createCassandraTable = async (params: DataModels.CreateCollectionParams):
const rpPayload: ARMTypes.CassandraTableCreateUpdateParameters = {
properties: {
resource,
options
}
options,
},
};
const createResponse = await createUpdateCassandraTable(
@@ -256,7 +256,7 @@ const createGraph = async (params: DataModels.CreateCollectionParams): Promise<D
const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params);
const resource: ARMTypes.GremlinGraphResource = {
id: params.collectionId
id: params.collectionId,
};
if (params.indexingPolicy) {
@@ -272,8 +272,8 @@ const createGraph = async (params: DataModels.CreateCollectionParams): Promise<D
const rpPayload: ARMTypes.GremlinGraphCreateUpdateParameters = {
properties: {
resource,
options
}
options,
},
};
const createResponse = await createUpdateGremlinGraph(
@@ -306,14 +306,14 @@ const createTable = async (params: DataModels.CreateCollectionParams): Promise<D
const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params);
const resource: ARMTypes.TableResource = {
id: params.collectionId
id: params.collectionId,
};
const rpPayload: ARMTypes.TableCreateUpdateParameters = {
properties: {
resource,
options
}
options,
},
};
const createResponse = await createUpdateTable(
@@ -334,13 +334,13 @@ export const constructRpOptions = (params: DataModels.CreateDatabaseParams): ARM
if (params.autoPilotMaxThroughput) {
return {
autoscaleSettings: {
maxThroughput: params.autoPilotMaxThroughput
}
maxThroughput: params.autoPilotMaxThroughput,
},
};
}
return {
throughput: params.offerThroughput
throughput: params.offerThroughput,
};
};
@@ -350,7 +350,7 @@ const createCollectionWithSDK = async (params: DataModels.CreateCollectionParams
partitionKey: params.partitionKey || undefined,
indexingPolicy: params.indexingPolicy || undefined,
uniqueKeyPolicy: params.uniqueKeyPolicy || undefined,
analyticalStorageTtl: params.analyticalStorageTtl
analyticalStorageTtl: params.analyticalStorageTtl,
} as ContainerRequest; // TODO: remove cast when https://github.com/Azure/azure-cosmos-js/issues/423 is fixed
const collectionOptions: RequestOptions = {};
const createDatabaseBody: DatabaseRequest = { id: params.databaseId };

View File

@@ -8,21 +8,21 @@ import {
GremlinDatabaseCreateUpdateParameters,
MongoDBDatabaseCreateUpdateParameters,
SqlDatabaseCreateUpdateParameters,
CreateUpdateOptions
CreateUpdateOptions,
} from "../../Utils/arm/generatedClients/2020-04-01/types";
import { client } from "../CosmosClient";
import { createUpdateSqlDatabase, getSqlDatabase } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import {
createUpdateCassandraKeyspace,
getCassandraKeyspace
getCassandraKeyspace,
} from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources";
import {
createUpdateMongoDBDatabase,
getMongoDBDatabase
getMongoDBDatabase,
} from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
import {
createUpdateGremlinDatabase,
getGremlinDatabase
getGremlinDatabase,
} from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
import { handleError } from "../ErrorHandlingUtils";
import { logConsoleProgress, logConsoleInfo } from "../../Utils/NotificationConsoleUtils";
@@ -85,10 +85,10 @@ async function createSqlDatabase(params: DataModels.CreateDatabaseParams): Promi
const rpPayload: SqlDatabaseCreateUpdateParameters = {
properties: {
resource: {
id: params.databaseId
id: params.databaseId,
},
options
}
options,
},
};
const createResponse = await createUpdateSqlDatabase(
userContext.subscriptionId,
@@ -121,10 +121,10 @@ async function createMongoDatabase(params: DataModels.CreateDatabaseParams): Pro
const rpPayload: MongoDBDatabaseCreateUpdateParameters = {
properties: {
resource: {
id: params.databaseId
id: params.databaseId,
},
options
}
options,
},
};
const createResponse = await createUpdateMongoDBDatabase(
userContext.subscriptionId,
@@ -157,10 +157,10 @@ async function createCassandraKeyspace(params: DataModels.CreateDatabaseParams):
const rpPayload: CassandraKeyspaceCreateUpdateParameters = {
properties: {
resource: {
id: params.databaseId
id: params.databaseId,
},
options
}
options,
},
};
const createResponse = await createUpdateCassandraKeyspace(
userContext.subscriptionId,
@@ -193,10 +193,10 @@ async function createGremlineDatabase(params: DataModels.CreateDatabaseParams):
const rpPayload: GremlinDatabaseCreateUpdateParameters = {
properties: {
resource: {
id: params.databaseId
id: params.databaseId,
},
options
}
options,
},
};
const createResponse = await createUpdateGremlinDatabase(
userContext.subscriptionId,
@@ -231,12 +231,12 @@ function constructRpOptions(params: DataModels.CreateDatabaseParams): CreateUpda
if (params.autoPilotMaxThroughput) {
return {
autoscaleSettings: {
maxThroughput: params.autoPilotMaxThroughput
}
maxThroughput: params.autoPilotMaxThroughput,
},
};
}
return {
throughput: params.offerThroughput
throughput: params.offerThroughput,
};
}

View File

@@ -3,12 +3,12 @@ import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType
import { Resource, StoredProcedureDefinition } from "@azure/cosmos";
import {
SqlStoredProcedureCreateUpdateParameters,
SqlStoredProcedureResource
SqlStoredProcedureResource,
} from "../../Utils/arm/generatedClients/2020-04-01/types";
import { client } from "../CosmosClient";
import {
createUpdateSqlStoredProcedure,
getSqlStoredProcedure
getSqlStoredProcedure,
} from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import { handleError } from "../ErrorHandlingUtils";
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
@@ -49,8 +49,8 @@ export async function createStoredProcedure(
const createSprocParams: SqlStoredProcedureCreateUpdateParameters = {
properties: {
resource: storedProcedure as SqlStoredProcedureResource,
options: {}
}
options: {},
},
};
const rpResponse = await createUpdateSqlStoredProcedure(
userContext.subscriptionId,

View File

@@ -3,7 +3,7 @@ import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType
import { Resource, TriggerDefinition } from "@azure/cosmos";
import {
SqlTriggerCreateUpdateParameters,
SqlTriggerResource
SqlTriggerResource,
} from "../../Utils/arm/generatedClients/2020-04-01/types";
import { client } from "../CosmosClient";
import { createUpdateSqlTrigger, getSqlTrigger } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
@@ -44,8 +44,8 @@ export async function createTrigger(
const createTriggerParams: SqlTriggerCreateUpdateParameters = {
properties: {
resource: trigger as SqlTriggerResource,
options: {}
}
options: {},
},
};
const rpResponse = await createUpdateSqlTrigger(
userContext.subscriptionId,
@@ -59,10 +59,7 @@ export async function createTrigger(
return rpResponse && (rpResponse.properties?.resource as TriggerDefinition & Resource);
}
const response = await client()
.database(databaseId)
.container(collectionId)
.scripts.triggers.create(trigger);
const response = await client().database(databaseId).container(collectionId).scripts.triggers.create(trigger);
return response.resource;
} catch (error) {
handleError(error, "CreateTrigger", `Error while creating trigger ${trigger.id}`);

View File

@@ -3,12 +3,12 @@ import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType
import { Resource, UserDefinedFunctionDefinition } from "@azure/cosmos";
import {
SqlUserDefinedFunctionCreateUpdateParameters,
SqlUserDefinedFunctionResource
SqlUserDefinedFunctionResource,
} from "../../Utils/arm/generatedClients/2020-04-01/types";
import { client } from "../CosmosClient";
import {
createUpdateSqlUserDefinedFunction,
getSqlUserDefinedFunction
getSqlUserDefinedFunction,
} from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import { handleError } from "../ErrorHandlingUtils";
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
@@ -49,8 +49,8 @@ export async function createUserDefinedFunction(
const createUDFParams: SqlUserDefinedFunctionCreateUpdateParameters = {
properties: {
resource: userDefinedFunction as SqlUserDefinedFunctionResource,
options: {}
}
options: {},
},
};
const rpResponse = await createUpdateSqlUserDefinedFunction(
userContext.subscriptionId,

View File

@@ -13,9 +13,9 @@ describe("deleteCollection", () => {
beforeAll(() => {
updateUserContext({
databaseAccount: {
name: "test"
name: "test",
} as DatabaseAccount,
defaultExperience: DefaultAccountExperienceType.DocumentDB
defaultExperience: DefaultAccountExperienceType.DocumentDB,
});
});
@@ -32,11 +32,11 @@ describe("deleteCollection", () => {
return {
container: () => {
return {
delete: (): unknown => undefined
delete: (): unknown => undefined,
};
}
},
};
}
},
});
await deleteCollection("database", "collection");
expect(client).toHaveBeenCalled();

View File

@@ -16,10 +16,7 @@ export async function deleteCollection(databaseId: string, collectionId: string)
if (window.authType === AuthType.AAD && !userContext.useSDKOperations) {
await deleteCollectionWithARM(databaseId, collectionId);
} else {
await client()
.database(databaseId)
.container(collectionId)
.delete();
await client().database(databaseId).container(collectionId).delete();
}
logConsoleInfo(`Successfully deleted container ${collectionId}`);
} catch (error) {

View File

@@ -10,7 +10,7 @@ export const deleteConflict = async (collection: CollectionBase, conflictId: Con
try {
const options = {
partitionKey: getPartitionKeyHeaderForConflict(conflictId)
partitionKey: getPartitionKeyHeaderForConflict(conflictId),
};
await client()

View File

@@ -13,9 +13,9 @@ describe("deleteDatabase", () => {
beforeAll(() => {
updateUserContext({
databaseAccount: {
name: "test"
name: "test",
} as DatabaseAccount,
defaultExperience: DefaultAccountExperienceType.DocumentDB
defaultExperience: DefaultAccountExperienceType.DocumentDB,
});
});
@@ -30,9 +30,9 @@ describe("deleteDatabase", () => {
(client as jest.Mock).mockReturnValue({
database: () => {
return {
delete: (): unknown => undefined
delete: (): unknown => undefined,
};
}
},
});
await deleteDatabase("database");
expect(client).toHaveBeenCalled();

View File

@@ -19,9 +19,7 @@ export async function deleteDatabase(databaseId: string): Promise<void> {
if (window.authType === AuthType.AAD && !userContext.useSDKOperations) {
await deleteDatabaseWithARM(databaseId);
} else {
await client()
.database(databaseId)
.delete();
await client().database(databaseId).delete();
}
logConsoleInfo(`Successfully deleted database ${databaseId}`);
} catch (error) {

View File

@@ -27,11 +27,7 @@ export async function deleteStoredProcedure(
storedProcedureId
);
} else {
await client()
.database(databaseId)
.container(collectionId)
.scripts.storedProcedure(storedProcedureId)
.delete();
await client().database(databaseId).container(collectionId).scripts.storedProcedure(storedProcedureId).delete();
}
} catch (error) {
handleError(error, "DeleteStoredProcedure", `Error while deleting stored procedure ${storedProcedureId}`);

View File

@@ -23,11 +23,7 @@ export async function deleteTrigger(databaseId: string, collectionId: string, tr
triggerId
);
} else {
await client()
.database(databaseId)
.container(collectionId)
.scripts.trigger(triggerId)
.delete();
await client().database(databaseId).container(collectionId).scripts.trigger(triggerId).delete();
}
} catch (error) {
handleError(error, "DeleteTrigger", `Error while deleting trigger ${triggerId}`);

View File

@@ -23,11 +23,7 @@ export async function deleteUserDefinedFunction(databaseId: string, collectionId
id
);
} else {
await client()
.database(databaseId)
.container(collectionId)
.scripts.userDefinedFunction(id)
.delete();
await client().database(databaseId).container(collectionId).scripts.userDefinedFunction(id).delete();
}
} catch (error) {
handleError(error, "DeleteUserDefinedFunction", `Error while deleting user defined function ${id}`);

View File

@@ -33,7 +33,7 @@ export const executeStoredProcedure = async (
);
return {
result: response.resource,
scriptLogs: response.headers[HttpHeaders.scriptLogResults] as string
scriptLogs: response.headers[HttpHeaders.scriptLogResults] as string,
};
} catch (error) {
handleError(

View File

@@ -60,8 +60,8 @@ export const getCollectionUsageSizeInKB = async (databaseName: string, container
apiVersion: "2018-01-01",
queryParams: {
filter,
metricNames
}
metricNames,
},
});
if (metricsResponse?.value?.length !== 2) {

View File

@@ -11,10 +11,7 @@ export async function getIndexTransformationProgress(databaseId: string, collect
let indexTransformationPercentage: number;
const clearMessage = logConsoleProgress(`Reading container ${collectionId}`);
try {
const response = await client()
.database(databaseId)
.container(collectionId)
.read({ populateQuotaInfo: true });
const response = await client().database(databaseId).container(collectionId).read({ populateQuotaInfo: true });
indexTransformationPercentage = parseInt(
response.headers[Constants.HttpHeaders.collectionIndexTransformationProgress] as string

View File

@@ -7,8 +7,5 @@ export const queryConflicts = (
query: string,
options: FeedOptions
): QueryIterator<ConflictDefinition & Resource> => {
return client()
.database(databaseId)
.container(containerId)
.conflicts.query(query, options);
return client().database(databaseId).container(containerId).conflicts.query(query, options);
};

View File

@@ -10,10 +10,7 @@ export const queryDocuments = (
options: FeedOptions
): QueryIterator<ItemDefinition & Resource> => {
options = getCommonQueryOptions(options);
return client()
.database(databaseId)
.container(containerId)
.items.query(query, options);
return client().database(databaseId).container(containerId).items.query(query, options);
};
export const getCommonQueryOptions = (options: FeedOptions): FeedOptions => {

View File

@@ -10,9 +10,9 @@ describe("readCollection", () => {
beforeAll(() => {
updateUserContext({
databaseAccount: {
name: "test"
name: "test",
} as DatabaseAccount,
defaultExperience: DefaultAccountExperienceType.DocumentDB
defaultExperience: DefaultAccountExperienceType.DocumentDB,
});
});
@@ -23,11 +23,11 @@ describe("readCollection", () => {
return {
container: () => {
return {
read: (): unknown => ({})
read: (): unknown => ({}),
};
}
},
};
}
},
});
await readCollection("database", "collection");
expect(client).toHaveBeenCalled();

View File

@@ -7,10 +7,7 @@ export async function readCollection(databaseId: string, collectionId: string):
let collection: DataModels.Collection;
const clearMessage = logConsoleProgress(`Querying container ${collectionId}`);
try {
const response = await client()
.database(databaseId)
.container(collectionId)
.read();
const response = await client().database(databaseId).container(collectionId).read();
collection = response.resource as DataModels.Collection;
} catch (error) {
handleError(error, "ReadCollection", `Error while querying container ${collectionId}`);

View File

@@ -106,7 +106,7 @@ const readCollectionOfferWithARM = async (databaseId: string, collectionId: stri
autoscaleMaxThroughput: autoscaleSettings.maxThroughput,
manualThroughput: undefined,
minimumThroughput,
offerReplacePending: resource.offerReplacePending === "true"
offerReplacePending: resource.offerReplacePending === "true",
};
}
@@ -115,7 +115,7 @@ const readCollectionOfferWithARM = async (databaseId: string, collectionId: stri
autoscaleMaxThroughput: undefined,
manualThroughput: resource.throughput,
minimumThroughput,
offerReplacePending: resource.offerReplacePending === "true"
offerReplacePending: resource.offerReplacePending === "true",
};
}

View File

@@ -12,9 +12,9 @@ describe("readCollections", () => {
beforeAll(() => {
updateUserContext({
databaseAccount: {
name: "test"
name: "test",
} as DatabaseAccount,
defaultExperience: DefaultAccountExperienceType.DocumentDB
defaultExperience: DefaultAccountExperienceType.DocumentDB,
});
});
@@ -32,12 +32,12 @@ describe("readCollections", () => {
containers: {
readAll: () => {
return {
fetchAll: (): unknown => []
fetchAll: (): unknown => [],
};
}
}
},
},
};
}
},
});
await readCollections("database");
expect(client).toHaveBeenCalled();

View File

@@ -23,10 +23,7 @@ export async function readCollections(databaseId: string): Promise<DataModels.Co
return await readCollectionsWithARM(databaseId);
}
const sdkResponse = await client()
.database(databaseId)
.containers.readAll()
.fetchAll();
const sdkResponse = await client().database(databaseId).containers.readAll().fetchAll();
return sdkResponse.resources as DataModels.Collection[];
} catch (error) {
handleError(error, "ReadCollections", `Error while querying containers for database ${databaseId}`);
@@ -63,5 +60,5 @@ async function readCollectionsWithARM(databaseId: string): Promise<DataModels.Co
throw new Error(`Unsupported default experience type: ${defaultExperience}`);
}
return rpResponse?.value?.map(collection => collection.properties?.resource as DataModels.Collection);
return rpResponse?.value?.map((collection) => collection.properties?.resource as DataModels.Collection);
}

View File

@@ -78,7 +78,7 @@ const readDatabaseOfferWithARM = async (databaseId: string): Promise<Offer> => {
autoscaleMaxThroughput: autoscaleSettings.maxThroughput,
manualThroughput: undefined,
minimumThroughput,
offerReplacePending: resource.offerReplacePending === "true"
offerReplacePending: resource.offerReplacePending === "true",
};
}
@@ -87,7 +87,7 @@ const readDatabaseOfferWithARM = async (databaseId: string): Promise<Offer> => {
autoscaleMaxThroughput: undefined,
manualThroughput: resource.throughput,
minimumThroughput,
offerReplacePending: resource.offerReplacePending === "true"
offerReplacePending: resource.offerReplacePending === "true",
};
}

View File

@@ -12,9 +12,9 @@ describe("readDatabases", () => {
beforeAll(() => {
updateUserContext({
databaseAccount: {
name: "test"
name: "test",
} as DatabaseAccount,
defaultExperience: DefaultAccountExperienceType.DocumentDB
defaultExperience: DefaultAccountExperienceType.DocumentDB,
});
});
@@ -30,10 +30,10 @@ describe("readDatabases", () => {
databases: {
readAll: () => {
return {
fetchAll: (): unknown => []
fetchAll: (): unknown => [],
};
}
}
},
},
});
await readDatabases();
expect(client).toHaveBeenCalled();

View File

@@ -21,9 +21,7 @@ export async function readDatabases(): Promise<DataModels.Database[]> {
) {
databases = await readDatabasesWithARM();
} else {
const sdkResponse = await client()
.databases.readAll()
.fetchAll();
const sdkResponse = await client().databases.readAll().fetchAll();
databases = sdkResponse.resources as DataModels.Database[];
}
} catch (error) {
@@ -58,5 +56,5 @@ async function readDatabasesWithARM(): Promise<DataModels.Database[]> {
throw new Error(`Unsupported default experience type: ${defaultExperience}`);
}
return rpResponse?.value?.map(database => database.properties?.resource as DataModels.Database);
return rpResponse?.value?.map((database) => database.properties?.resource as DataModels.Database);
}

View File

@@ -8,7 +8,7 @@ import { readOffers } from "./readOffers";
export const readOfferWithSDK = async (offerId: string, resourceId: string): Promise<Offer> => {
if (!offerId) {
const offers = await readOffers();
const offer = offers.find(offer => offer.resource === resourceId);
const offer = offers.find((offer) => offer.resource === resourceId);
if (!offer) {
return undefined;
@@ -18,12 +18,10 @@ export const readOfferWithSDK = async (offerId: string, resourceId: string): Pro
const options: RequestOptions = {
initialHeaders: {
[HttpHeaders.populateCollectionThroughputInfo]: true
}
[HttpHeaders.populateCollectionThroughputInfo]: true,
},
};
const response = await client()
.offer(offerId)
.read(options);
const response = await client().offer(offerId).read(options);
return parseSDKOfferResponse(response);
};

View File

@@ -7,9 +7,7 @@ export const readOffers = async (): Promise<SDKOfferDefinition[]> => {
const clearMessage = logConsoleProgress(`Querying offers`);
try {
const response = await client()
.offers.readAll()
.fetchAll();
const response = await client().offers.readAll().fetchAll();
return response?.resources;
} catch (error) {
// This should be removed when we can correctly identify if an account is serverless when connected using connection string too.

View File

@@ -25,7 +25,7 @@ export async function readStoredProcedures(
databaseId,
collectionId
);
return rpResponse?.value?.map(sproc => sproc.properties?.resource as StoredProcedureDefinition & Resource);
return rpResponse?.value?.map((sproc) => sproc.properties?.resource as StoredProcedureDefinition & Resource);
}
const response = await client()

View File

@@ -25,14 +25,10 @@ export async function readTriggers(
databaseId,
collectionId
);
return rpResponse?.value?.map(trigger => trigger.properties?.resource as TriggerDefinition & Resource);
return rpResponse?.value?.map((trigger) => trigger.properties?.resource as TriggerDefinition & Resource);
}
const response = await client()
.database(databaseId)
.container(collectionId)
.scripts.triggers.readAll()
.fetchAll();
const response = await client().database(databaseId).container(collectionId).scripts.triggers.readAll().fetchAll();
return response?.resources;
} catch (error) {
handleError(error, "ReadTriggers", `Failed to query triggers for container ${collectionId}`);

View File

@@ -25,7 +25,7 @@ export async function readUserDefinedFunctions(
databaseId,
collectionId
);
return rpResponse?.value?.map(udf => udf.properties?.resource as UserDefinedFunctionDefinition & Resource);
return rpResponse?.value?.map((udf) => udf.properties?.resource as UserDefinedFunctionDefinition & Resource);
}
const response = await client()

View File

@@ -8,22 +8,22 @@ import {
MongoDBCollectionCreateUpdateParameters,
MongoDBCollectionResource,
SqlContainerCreateUpdateParameters,
SqlContainerResource
SqlContainerResource,
} from "../../Utils/arm/generatedClients/2020-04-01/types";
import { RequestOptions } from "@azure/cosmos/dist-esm";
import { client } from "../CosmosClient";
import { createUpdateSqlContainer, getSqlContainer } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import {
createUpdateCassandraTable,
getCassandraTable
getCassandraTable,
} from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources";
import {
createUpdateMongoDBCollection,
getMongoDBCollection
getMongoDBCollection,
} from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
import {
createUpdateGremlinGraph,
getGremlinGraph
getGremlinGraph,
} from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
import { createUpdateTable, getTable } from "../../Utils/arm/generatedClients/2020-04-01/tableResources";
import { handleError } from "../ErrorHandlingUtils";
@@ -130,8 +130,8 @@ export async function updateMongoDBCollectionThroughRP(
const updateParams: MongoDBCollectionCreateUpdateParameters = {
properties: {
resource: newCollection,
options: updateOptions
}
options: updateOptions,
},
};
const updateResponse = await createUpdateMongoDBCollection(

View File

@@ -17,7 +17,7 @@ import {
migrateSqlDatabaseToManualThroughput,
migrateSqlContainerToAutoscale,
migrateSqlContainerToManualThroughput,
updateSqlContainerThroughput
updateSqlContainerThroughput,
} from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import {
updateCassandraKeyspaceThroughput,
@@ -25,7 +25,7 @@ import {
migrateCassandraKeyspaceToManualThroughput,
migrateCassandraTableToAutoscale,
migrateCassandraTableToManualThroughput,
updateCassandraTableThroughput
updateCassandraTableThroughput,
} from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources";
import {
updateMongoDBDatabaseThroughput,
@@ -33,7 +33,7 @@ import {
migrateMongoDBDatabaseToManualThroughput,
migrateMongoDBCollectionToAutoscale,
migrateMongoDBCollectionToManualThroughput,
updateMongoDBCollectionThroughput
updateMongoDBCollectionThroughput,
} from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
import {
updateGremlinDatabaseThroughput,
@@ -41,13 +41,13 @@ import {
migrateGremlinDatabaseToManualThroughput,
migrateGremlinGraphToAutoscale,
migrateGremlinGraphToManualThroughput,
updateGremlinGraphThroughput
updateGremlinGraphThroughput,
} from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
import { userContext } from "../../UserContext";
import {
migrateTableToAutoscale,
migrateTableToManualThroughput,
updateTableThroughput
updateTableThroughput,
} from "../../Utils/arm/generatedClients/2020-04-01/tableResources";
export const updateOffer = async (params: UpdateOfferParams): Promise<Offer> => {
@@ -110,7 +110,7 @@ const updateCollectionOfferWithARM = async (params: UpdateOfferParams): Promise<
return await readCollectionOffer({
collectionId: params.collectionId,
databaseId: params.databaseId,
offerId: params.currentOffer.id
offerId: params.currentOffer.id,
});
};
@@ -140,7 +140,7 @@ const updateDatabaseOfferWithARM = async (params: UpdateOfferParams): Promise<Of
return await readDatabaseOffer({
databaseId: params.databaseId,
offerId: params.currentOffer.id
offerId: params.currentOffer.id,
});
};
@@ -358,13 +358,13 @@ const updateGremlinDatabaseOffer = async (params: UpdateOfferParams): Promise<vo
const createUpdateOfferBody = (params: UpdateOfferParams): ThroughputSettingsUpdateParameters => {
const body: ThroughputSettingsUpdateParameters = {
properties: {
resource: {}
}
resource: {},
},
};
if (params.autopilotThroughput) {
body.properties.resource.autoscaleSettings = {
maxThroughput: params.autopilotThroughput
maxThroughput: params.autopilotThroughput,
};
} else {
body.properties.resource.throughput = params.manualThroughput;
@@ -378,7 +378,7 @@ const updateOfferWithSDK = async (params: UpdateOfferParams): Promise<Offer> =>
const newOffer: SDKOfferDefinition = {
content: {
offerThroughput: undefined,
offerIsRUPerMinuteThroughputEnabled: false
offerIsRUPerMinuteThroughputEnabled: false,
},
_etag: undefined,
_ts: undefined,
@@ -388,12 +388,12 @@ const updateOfferWithSDK = async (params: UpdateOfferParams): Promise<Offer> =>
offerResourceId: sdkOfferDefinition.offerResourceId,
offerVersion: sdkOfferDefinition.offerVersion,
offerType: sdkOfferDefinition.offerType,
resource: sdkOfferDefinition.resource
resource: sdkOfferDefinition.resource,
};
if (params.autopilotThroughput) {
newOffer.content.offerAutopilotSettings = {
maxThroughput: params.autopilotThroughput
maxThroughput: params.autopilotThroughput,
};
} else {
newOffer.content.offerThroughput = params.manualThroughput;
@@ -402,12 +402,12 @@ const updateOfferWithSDK = async (params: UpdateOfferParams): Promise<Offer> =>
const options: RequestOptions = {};
if (params.migrateToAutoPilot) {
options.initialHeaders = {
[HttpHeaders.migrateOfferToAutopilot]: "true"
[HttpHeaders.migrateOfferToAutopilot]: "true",
};
delete newOffer.content.offerAutopilotSettings;
} else if (params.migrateToManual) {
options.initialHeaders = {
[HttpHeaders.migrateOfferToManualThroughput]: "true"
[HttpHeaders.migrateOfferToManualThroughput]: "true",
};
newOffer.content.offerAutopilotSettings = { maxThroughput: 0 };
}

View File

@@ -3,12 +3,12 @@ import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType
import { Resource, StoredProcedureDefinition } from "@azure/cosmos";
import {
SqlStoredProcedureCreateUpdateParameters,
SqlStoredProcedureResource
SqlStoredProcedureResource,
} from "../../Utils/arm/generatedClients/2020-04-01/types";
import { client } from "../CosmosClient";
import {
createUpdateSqlStoredProcedure,
getSqlStoredProcedure
getSqlStoredProcedure,
} from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import { handleError } from "../ErrorHandlingUtils";
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
@@ -39,8 +39,8 @@ export async function updateStoredProcedure(
const createSprocParams: SqlStoredProcedureCreateUpdateParameters = {
properties: {
resource: storedProcedure as SqlStoredProcedureResource,
options: {}
}
options: {},
},
};
const rpResponse = await createUpdateSqlStoredProcedure(
userContext.subscriptionId,

View File

@@ -2,7 +2,7 @@ import { AuthType } from "../../AuthType";
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
import {
SqlTriggerCreateUpdateParameters,
SqlTriggerResource
SqlTriggerResource,
} from "../../Utils/arm/generatedClients/2020-04-01/types";
import { TriggerDefinition } from "@azure/cosmos";
import { client } from "../CosmosClient";
@@ -36,8 +36,8 @@ export async function updateTrigger(
const createTriggerParams: SqlTriggerCreateUpdateParameters = {
properties: {
resource: trigger as SqlTriggerResource,
options: {}
}
options: {},
},
};
const rpResponse = await createUpdateSqlTrigger(
userContext.subscriptionId,

View File

@@ -3,12 +3,12 @@ import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType
import { Resource, UserDefinedFunctionDefinition } from "@azure/cosmos";
import {
SqlUserDefinedFunctionCreateUpdateParameters,
SqlUserDefinedFunctionResource
SqlUserDefinedFunctionResource,
} from "../../Utils/arm/generatedClients/2020-04-01/types";
import { client } from "../CosmosClient";
import {
createUpdateSqlUserDefinedFunction,
getSqlUserDefinedFunction
getSqlUserDefinedFunction,
} from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import { handleError } from "../ErrorHandlingUtils";
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
@@ -39,8 +39,8 @@ export async function updateUserDefinedFunction(
const createUDFParams: SqlUserDefinedFunctionCreateUpdateParameters = {
properties: {
resource: userDefinedFunction as SqlUserDefinedFunctionResource,
options: {}
}
options: {},
},
};
const rpResponse = await createUpdateSqlUserDefinedFunction(
userContext.subscriptionId,