Prettier 2.0 (#393)

This commit is contained in:
Steve Faulkner
2021-01-20 09:15:01 -06:00
committed by GitHub
parent c1937ca464
commit 4be53284b5
500 changed files with 41927 additions and 41838 deletions

View File

@@ -14,15 +14,15 @@ describe("createCollection", () => {
collectionId: "testContainer",
databaseId: "testDatabase",
databaseLevelThroughput: true,
offerThroughput: 400
offerThroughput: 400,
};
beforeAll(() => {
updateUserContext({
databaseAccount: {
name: "test"
name: "test",
} as DatabaseAccount,
defaultExperience: DefaultAccountExperienceType.DocumentDB
defaultExperience: DefaultAccountExperienceType.DocumentDB,
});
});
@@ -40,12 +40,12 @@ describe("createCollection", () => {
return {
database: {
containers: {
create: () => ({})
}
}
create: () => ({}),
},
},
};
}
}
},
},
});
await createCollection(createCollectionParams);
expect(client).toHaveBeenCalled();
@@ -59,7 +59,7 @@ describe("createCollection", () => {
collectionId: "testContainer",
databaseId: "testDatabase",
databaseLevelThroughput: false,
offerThroughput: 400
offerThroughput: 400,
};
expect(constructRpOptions(manualThroughputParams)).toEqual({ throughput: 400 });
@@ -69,12 +69,12 @@ describe("createCollection", () => {
databaseId: "testDatabase",
databaseLevelThroughput: false,
offerThroughput: 400,
autoPilotMaxThroughput: 4000
autoPilotMaxThroughput: 4000,
};
expect(constructRpOptions(autoPilotThroughputParams)).toEqual({
autoscaleSettings: {
maxThroughput: 4000
}
maxThroughput: 4000,
},
});
});
});

View File

@@ -11,15 +11,15 @@ import { createMongoCollectionWithProxy } from "../MongoProxyClient";
import { createUpdateSqlContainer, getSqlContainer } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import {
createUpdateCassandraTable,
getCassandraTable
getCassandraTable,
} from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources";
import {
createUpdateMongoDBCollection,
getMongoDBCollection
getMongoDBCollection,
} from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
import {
createUpdateGremlinGraph,
getGremlinGraph
getGremlinGraph,
} from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
import { createUpdateTable, getTable } from "../../Utils/arm/generatedClients/2020-04-01/tableResources";
import { logConsoleProgress, logConsoleInfo } from "../../Utils/NotificationConsoleUtils";
@@ -41,7 +41,7 @@ export const createCollection = async (params: DataModels.CreateCollectionParams
autoPilotMaxThroughput: params.autoPilotMaxThroughput,
databaseId: params.databaseId,
databaseLevelThroughput: params.databaseLevelThroughput,
offerThroughput: params.offerThroughput
offerThroughput: params.offerThroughput,
};
await createDatabase(createDatabaseParams);
}
@@ -100,7 +100,7 @@ const createSqlContainer = async (params: DataModels.CreateCollectionParams): Pr
const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params);
const resource: ARMTypes.SqlContainerResource = {
id: params.collectionId
id: params.collectionId,
};
if (params.analyticalStorageTtl) {
resource.analyticalStorageTtl = params.analyticalStorageTtl;
@@ -118,8 +118,8 @@ const createSqlContainer = async (params: DataModels.CreateCollectionParams): Pr
const rpPayload: ARMTypes.SqlDatabaseCreateUpdateParameters = {
properties: {
resource,
options
}
options,
},
};
const createResponse = await createUpdateSqlContainer(
@@ -154,7 +154,7 @@ const createMongoCollection = async (params: DataModels.CreateCollectionParams):
const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params);
const resource: ARMTypes.MongoDBCollectionResource = {
id: params.collectionId
id: params.collectionId,
};
if (params.analyticalStorageTtl) {
resource.analyticalStorageTtl = params.analyticalStorageTtl;
@@ -170,8 +170,8 @@ const createMongoCollection = async (params: DataModels.CreateCollectionParams):
const rpPayload: ARMTypes.MongoDBCollectionCreateUpdateParameters = {
properties: {
resource,
options
}
options,
},
};
const createResponse = await createUpdateMongoDBCollection(
@@ -185,7 +185,7 @@ const createMongoCollection = async (params: DataModels.CreateCollectionParams):
if (params.createMongoWildcardIndex) {
TelemetryProcessor.trace(Action.CreateMongoCollectionWithWildcardIndex, ActionModifiers.Mark, {
message: "Mongo Collection created with wildcard index on all fields."
message: "Mongo Collection created with wildcard index on all fields.",
});
}
@@ -212,7 +212,7 @@ const createCassandraTable = async (params: DataModels.CreateCollectionParams):
const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params);
const resource: ARMTypes.CassandraTableResource = {
id: params.collectionId
id: params.collectionId,
};
if (params.analyticalStorageTtl) {
resource.analyticalStorageTtl = params.analyticalStorageTtl;
@@ -221,8 +221,8 @@ const createCassandraTable = async (params: DataModels.CreateCollectionParams):
const rpPayload: ARMTypes.CassandraTableCreateUpdateParameters = {
properties: {
resource,
options
}
options,
},
};
const createResponse = await createUpdateCassandraTable(
@@ -256,7 +256,7 @@ const createGraph = async (params: DataModels.CreateCollectionParams): Promise<D
const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params);
const resource: ARMTypes.GremlinGraphResource = {
id: params.collectionId
id: params.collectionId,
};
if (params.indexingPolicy) {
@@ -272,8 +272,8 @@ const createGraph = async (params: DataModels.CreateCollectionParams): Promise<D
const rpPayload: ARMTypes.GremlinGraphCreateUpdateParameters = {
properties: {
resource,
options
}
options,
},
};
const createResponse = await createUpdateGremlinGraph(
@@ -306,14 +306,14 @@ const createTable = async (params: DataModels.CreateCollectionParams): Promise<D
const options: ARMTypes.CreateUpdateOptions = constructRpOptions(params);
const resource: ARMTypes.TableResource = {
id: params.collectionId
id: params.collectionId,
};
const rpPayload: ARMTypes.TableCreateUpdateParameters = {
properties: {
resource,
options
}
options,
},
};
const createResponse = await createUpdateTable(
@@ -334,13 +334,13 @@ export const constructRpOptions = (params: DataModels.CreateDatabaseParams): ARM
if (params.autoPilotMaxThroughput) {
return {
autoscaleSettings: {
maxThroughput: params.autoPilotMaxThroughput
}
maxThroughput: params.autoPilotMaxThroughput,
},
};
}
return {
throughput: params.offerThroughput
throughput: params.offerThroughput,
};
};
@@ -350,7 +350,7 @@ const createCollectionWithSDK = async (params: DataModels.CreateCollectionParams
partitionKey: params.partitionKey || undefined,
indexingPolicy: params.indexingPolicy || undefined,
uniqueKeyPolicy: params.uniqueKeyPolicy || undefined,
analyticalStorageTtl: params.analyticalStorageTtl
analyticalStorageTtl: params.analyticalStorageTtl,
} as ContainerRequest; // TODO: remove cast when https://github.com/Azure/azure-cosmos-js/issues/423 is fixed
const collectionOptions: RequestOptions = {};
const createDatabaseBody: DatabaseRequest = { id: params.databaseId };

View File

@@ -8,21 +8,21 @@ import {
GremlinDatabaseCreateUpdateParameters,
MongoDBDatabaseCreateUpdateParameters,
SqlDatabaseCreateUpdateParameters,
CreateUpdateOptions
CreateUpdateOptions,
} from "../../Utils/arm/generatedClients/2020-04-01/types";
import { client } from "../CosmosClient";
import { createUpdateSqlDatabase, getSqlDatabase } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import {
createUpdateCassandraKeyspace,
getCassandraKeyspace
getCassandraKeyspace,
} from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources";
import {
createUpdateMongoDBDatabase,
getMongoDBDatabase
getMongoDBDatabase,
} from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
import {
createUpdateGremlinDatabase,
getGremlinDatabase
getGremlinDatabase,
} from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
import { handleError } from "../ErrorHandlingUtils";
import { logConsoleProgress, logConsoleInfo } from "../../Utils/NotificationConsoleUtils";
@@ -85,10 +85,10 @@ async function createSqlDatabase(params: DataModels.CreateDatabaseParams): Promi
const rpPayload: SqlDatabaseCreateUpdateParameters = {
properties: {
resource: {
id: params.databaseId
id: params.databaseId,
},
options
}
options,
},
};
const createResponse = await createUpdateSqlDatabase(
userContext.subscriptionId,
@@ -121,10 +121,10 @@ async function createMongoDatabase(params: DataModels.CreateDatabaseParams): Pro
const rpPayload: MongoDBDatabaseCreateUpdateParameters = {
properties: {
resource: {
id: params.databaseId
id: params.databaseId,
},
options
}
options,
},
};
const createResponse = await createUpdateMongoDBDatabase(
userContext.subscriptionId,
@@ -157,10 +157,10 @@ async function createCassandraKeyspace(params: DataModels.CreateDatabaseParams):
const rpPayload: CassandraKeyspaceCreateUpdateParameters = {
properties: {
resource: {
id: params.databaseId
id: params.databaseId,
},
options
}
options,
},
};
const createResponse = await createUpdateCassandraKeyspace(
userContext.subscriptionId,
@@ -193,10 +193,10 @@ async function createGremlineDatabase(params: DataModels.CreateDatabaseParams):
const rpPayload: GremlinDatabaseCreateUpdateParameters = {
properties: {
resource: {
id: params.databaseId
id: params.databaseId,
},
options
}
options,
},
};
const createResponse = await createUpdateGremlinDatabase(
userContext.subscriptionId,
@@ -231,12 +231,12 @@ function constructRpOptions(params: DataModels.CreateDatabaseParams): CreateUpda
if (params.autoPilotMaxThroughput) {
return {
autoscaleSettings: {
maxThroughput: params.autoPilotMaxThroughput
}
maxThroughput: params.autoPilotMaxThroughput,
},
};
}
return {
throughput: params.offerThroughput
throughput: params.offerThroughput,
};
}

View File

@@ -3,12 +3,12 @@ import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType
import { Resource, StoredProcedureDefinition } from "@azure/cosmos";
import {
SqlStoredProcedureCreateUpdateParameters,
SqlStoredProcedureResource
SqlStoredProcedureResource,
} from "../../Utils/arm/generatedClients/2020-04-01/types";
import { client } from "../CosmosClient";
import {
createUpdateSqlStoredProcedure,
getSqlStoredProcedure
getSqlStoredProcedure,
} from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import { handleError } from "../ErrorHandlingUtils";
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
@@ -49,8 +49,8 @@ export async function createStoredProcedure(
const createSprocParams: SqlStoredProcedureCreateUpdateParameters = {
properties: {
resource: storedProcedure as SqlStoredProcedureResource,
options: {}
}
options: {},
},
};
const rpResponse = await createUpdateSqlStoredProcedure(
userContext.subscriptionId,

View File

@@ -3,7 +3,7 @@ import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType
import { Resource, TriggerDefinition } from "@azure/cosmos";
import {
SqlTriggerCreateUpdateParameters,
SqlTriggerResource
SqlTriggerResource,
} from "../../Utils/arm/generatedClients/2020-04-01/types";
import { client } from "../CosmosClient";
import { createUpdateSqlTrigger, getSqlTrigger } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
@@ -44,8 +44,8 @@ export async function createTrigger(
const createTriggerParams: SqlTriggerCreateUpdateParameters = {
properties: {
resource: trigger as SqlTriggerResource,
options: {}
}
options: {},
},
};
const rpResponse = await createUpdateSqlTrigger(
userContext.subscriptionId,
@@ -59,10 +59,7 @@ export async function createTrigger(
return rpResponse && (rpResponse.properties?.resource as TriggerDefinition & Resource);
}
const response = await client()
.database(databaseId)
.container(collectionId)
.scripts.triggers.create(trigger);
const response = await client().database(databaseId).container(collectionId).scripts.triggers.create(trigger);
return response.resource;
} catch (error) {
handleError(error, "CreateTrigger", `Error while creating trigger ${trigger.id}`);

View File

@@ -3,12 +3,12 @@ import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType
import { Resource, UserDefinedFunctionDefinition } from "@azure/cosmos";
import {
SqlUserDefinedFunctionCreateUpdateParameters,
SqlUserDefinedFunctionResource
SqlUserDefinedFunctionResource,
} from "../../Utils/arm/generatedClients/2020-04-01/types";
import { client } from "../CosmosClient";
import {
createUpdateSqlUserDefinedFunction,
getSqlUserDefinedFunction
getSqlUserDefinedFunction,
} from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import { handleError } from "../ErrorHandlingUtils";
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
@@ -49,8 +49,8 @@ export async function createUserDefinedFunction(
const createUDFParams: SqlUserDefinedFunctionCreateUpdateParameters = {
properties: {
resource: userDefinedFunction as SqlUserDefinedFunctionResource,
options: {}
}
options: {},
},
};
const rpResponse = await createUpdateSqlUserDefinedFunction(
userContext.subscriptionId,

View File

@@ -13,9 +13,9 @@ describe("deleteCollection", () => {
beforeAll(() => {
updateUserContext({
databaseAccount: {
name: "test"
name: "test",
} as DatabaseAccount,
defaultExperience: DefaultAccountExperienceType.DocumentDB
defaultExperience: DefaultAccountExperienceType.DocumentDB,
});
});
@@ -32,11 +32,11 @@ describe("deleteCollection", () => {
return {
container: () => {
return {
delete: (): unknown => undefined
delete: (): unknown => undefined,
};
}
},
};
}
},
});
await deleteCollection("database", "collection");
expect(client).toHaveBeenCalled();

View File

@@ -16,10 +16,7 @@ export async function deleteCollection(databaseId: string, collectionId: string)
if (window.authType === AuthType.AAD && !userContext.useSDKOperations) {
await deleteCollectionWithARM(databaseId, collectionId);
} else {
await client()
.database(databaseId)
.container(collectionId)
.delete();
await client().database(databaseId).container(collectionId).delete();
}
logConsoleInfo(`Successfully deleted container ${collectionId}`);
} catch (error) {

View File

@@ -10,7 +10,7 @@ export const deleteConflict = async (collection: CollectionBase, conflictId: Con
try {
const options = {
partitionKey: getPartitionKeyHeaderForConflict(conflictId)
partitionKey: getPartitionKeyHeaderForConflict(conflictId),
};
await client()

View File

@@ -13,9 +13,9 @@ describe("deleteDatabase", () => {
beforeAll(() => {
updateUserContext({
databaseAccount: {
name: "test"
name: "test",
} as DatabaseAccount,
defaultExperience: DefaultAccountExperienceType.DocumentDB
defaultExperience: DefaultAccountExperienceType.DocumentDB,
});
});
@@ -30,9 +30,9 @@ describe("deleteDatabase", () => {
(client as jest.Mock).mockReturnValue({
database: () => {
return {
delete: (): unknown => undefined
delete: (): unknown => undefined,
};
}
},
});
await deleteDatabase("database");
expect(client).toHaveBeenCalled();

View File

@@ -19,9 +19,7 @@ export async function deleteDatabase(databaseId: string): Promise<void> {
if (window.authType === AuthType.AAD && !userContext.useSDKOperations) {
await deleteDatabaseWithARM(databaseId);
} else {
await client()
.database(databaseId)
.delete();
await client().database(databaseId).delete();
}
logConsoleInfo(`Successfully deleted database ${databaseId}`);
} catch (error) {

View File

@@ -27,11 +27,7 @@ export async function deleteStoredProcedure(
storedProcedureId
);
} else {
await client()
.database(databaseId)
.container(collectionId)
.scripts.storedProcedure(storedProcedureId)
.delete();
await client().database(databaseId).container(collectionId).scripts.storedProcedure(storedProcedureId).delete();
}
} catch (error) {
handleError(error, "DeleteStoredProcedure", `Error while deleting stored procedure ${storedProcedureId}`);

View File

@@ -23,11 +23,7 @@ export async function deleteTrigger(databaseId: string, collectionId: string, tr
triggerId
);
} else {
await client()
.database(databaseId)
.container(collectionId)
.scripts.trigger(triggerId)
.delete();
await client().database(databaseId).container(collectionId).scripts.trigger(triggerId).delete();
}
} catch (error) {
handleError(error, "DeleteTrigger", `Error while deleting trigger ${triggerId}`);

View File

@@ -23,11 +23,7 @@ export async function deleteUserDefinedFunction(databaseId: string, collectionId
id
);
} else {
await client()
.database(databaseId)
.container(collectionId)
.scripts.userDefinedFunction(id)
.delete();
await client().database(databaseId).container(collectionId).scripts.userDefinedFunction(id).delete();
}
} catch (error) {
handleError(error, "DeleteUserDefinedFunction", `Error while deleting user defined function ${id}`);

View File

@@ -33,7 +33,7 @@ export const executeStoredProcedure = async (
);
return {
result: response.resource,
scriptLogs: response.headers[HttpHeaders.scriptLogResults] as string
scriptLogs: response.headers[HttpHeaders.scriptLogResults] as string,
};
} catch (error) {
handleError(

View File

@@ -60,8 +60,8 @@ export const getCollectionUsageSizeInKB = async (databaseName: string, container
apiVersion: "2018-01-01",
queryParams: {
filter,
metricNames
}
metricNames,
},
});
if (metricsResponse?.value?.length !== 2) {

View File

@@ -11,10 +11,7 @@ export async function getIndexTransformationProgress(databaseId: string, collect
let indexTransformationPercentage: number;
const clearMessage = logConsoleProgress(`Reading container ${collectionId}`);
try {
const response = await client()
.database(databaseId)
.container(collectionId)
.read({ populateQuotaInfo: true });
const response = await client().database(databaseId).container(collectionId).read({ populateQuotaInfo: true });
indexTransformationPercentage = parseInt(
response.headers[Constants.HttpHeaders.collectionIndexTransformationProgress] as string

View File

@@ -7,8 +7,5 @@ export const queryConflicts = (
query: string,
options: FeedOptions
): QueryIterator<ConflictDefinition & Resource> => {
return client()
.database(databaseId)
.container(containerId)
.conflicts.query(query, options);
return client().database(databaseId).container(containerId).conflicts.query(query, options);
};

View File

@@ -10,10 +10,7 @@ export const queryDocuments = (
options: FeedOptions
): QueryIterator<ItemDefinition & Resource> => {
options = getCommonQueryOptions(options);
return client()
.database(databaseId)
.container(containerId)
.items.query(query, options);
return client().database(databaseId).container(containerId).items.query(query, options);
};
export const getCommonQueryOptions = (options: FeedOptions): FeedOptions => {

View File

@@ -10,9 +10,9 @@ describe("readCollection", () => {
beforeAll(() => {
updateUserContext({
databaseAccount: {
name: "test"
name: "test",
} as DatabaseAccount,
defaultExperience: DefaultAccountExperienceType.DocumentDB
defaultExperience: DefaultAccountExperienceType.DocumentDB,
});
});
@@ -23,11 +23,11 @@ describe("readCollection", () => {
return {
container: () => {
return {
read: (): unknown => ({})
read: (): unknown => ({}),
};
}
},
};
}
},
});
await readCollection("database", "collection");
expect(client).toHaveBeenCalled();

View File

@@ -7,10 +7,7 @@ export async function readCollection(databaseId: string, collectionId: string):
let collection: DataModels.Collection;
const clearMessage = logConsoleProgress(`Querying container ${collectionId}`);
try {
const response = await client()
.database(databaseId)
.container(collectionId)
.read();
const response = await client().database(databaseId).container(collectionId).read();
collection = response.resource as DataModels.Collection;
} catch (error) {
handleError(error, "ReadCollection", `Error while querying container ${collectionId}`);

View File

@@ -106,7 +106,7 @@ const readCollectionOfferWithARM = async (databaseId: string, collectionId: stri
autoscaleMaxThroughput: autoscaleSettings.maxThroughput,
manualThroughput: undefined,
minimumThroughput,
offerReplacePending: resource.offerReplacePending === "true"
offerReplacePending: resource.offerReplacePending === "true",
};
}
@@ -115,7 +115,7 @@ const readCollectionOfferWithARM = async (databaseId: string, collectionId: stri
autoscaleMaxThroughput: undefined,
manualThroughput: resource.throughput,
minimumThroughput,
offerReplacePending: resource.offerReplacePending === "true"
offerReplacePending: resource.offerReplacePending === "true",
};
}

View File

@@ -12,9 +12,9 @@ describe("readCollections", () => {
beforeAll(() => {
updateUserContext({
databaseAccount: {
name: "test"
name: "test",
} as DatabaseAccount,
defaultExperience: DefaultAccountExperienceType.DocumentDB
defaultExperience: DefaultAccountExperienceType.DocumentDB,
});
});
@@ -32,12 +32,12 @@ describe("readCollections", () => {
containers: {
readAll: () => {
return {
fetchAll: (): unknown => []
fetchAll: (): unknown => [],
};
}
}
},
},
};
}
},
});
await readCollections("database");
expect(client).toHaveBeenCalled();

View File

@@ -23,10 +23,7 @@ export async function readCollections(databaseId: string): Promise<DataModels.Co
return await readCollectionsWithARM(databaseId);
}
const sdkResponse = await client()
.database(databaseId)
.containers.readAll()
.fetchAll();
const sdkResponse = await client().database(databaseId).containers.readAll().fetchAll();
return sdkResponse.resources as DataModels.Collection[];
} catch (error) {
handleError(error, "ReadCollections", `Error while querying containers for database ${databaseId}`);
@@ -63,5 +60,5 @@ async function readCollectionsWithARM(databaseId: string): Promise<DataModels.Co
throw new Error(`Unsupported default experience type: ${defaultExperience}`);
}
return rpResponse?.value?.map(collection => collection.properties?.resource as DataModels.Collection);
return rpResponse?.value?.map((collection) => collection.properties?.resource as DataModels.Collection);
}

View File

@@ -78,7 +78,7 @@ const readDatabaseOfferWithARM = async (databaseId: string): Promise<Offer> => {
autoscaleMaxThroughput: autoscaleSettings.maxThroughput,
manualThroughput: undefined,
minimumThroughput,
offerReplacePending: resource.offerReplacePending === "true"
offerReplacePending: resource.offerReplacePending === "true",
};
}
@@ -87,7 +87,7 @@ const readDatabaseOfferWithARM = async (databaseId: string): Promise<Offer> => {
autoscaleMaxThroughput: undefined,
manualThroughput: resource.throughput,
minimumThroughput,
offerReplacePending: resource.offerReplacePending === "true"
offerReplacePending: resource.offerReplacePending === "true",
};
}

View File

@@ -12,9 +12,9 @@ describe("readDatabases", () => {
beforeAll(() => {
updateUserContext({
databaseAccount: {
name: "test"
name: "test",
} as DatabaseAccount,
defaultExperience: DefaultAccountExperienceType.DocumentDB
defaultExperience: DefaultAccountExperienceType.DocumentDB,
});
});
@@ -30,10 +30,10 @@ describe("readDatabases", () => {
databases: {
readAll: () => {
return {
fetchAll: (): unknown => []
fetchAll: (): unknown => [],
};
}
}
},
},
});
await readDatabases();
expect(client).toHaveBeenCalled();

View File

@@ -21,9 +21,7 @@ export async function readDatabases(): Promise<DataModels.Database[]> {
) {
databases = await readDatabasesWithARM();
} else {
const sdkResponse = await client()
.databases.readAll()
.fetchAll();
const sdkResponse = await client().databases.readAll().fetchAll();
databases = sdkResponse.resources as DataModels.Database[];
}
} catch (error) {
@@ -58,5 +56,5 @@ async function readDatabasesWithARM(): Promise<DataModels.Database[]> {
throw new Error(`Unsupported default experience type: ${defaultExperience}`);
}
return rpResponse?.value?.map(database => database.properties?.resource as DataModels.Database);
return rpResponse?.value?.map((database) => database.properties?.resource as DataModels.Database);
}

View File

@@ -8,7 +8,7 @@ import { readOffers } from "./readOffers";
export const readOfferWithSDK = async (offerId: string, resourceId: string): Promise<Offer> => {
if (!offerId) {
const offers = await readOffers();
const offer = offers.find(offer => offer.resource === resourceId);
const offer = offers.find((offer) => offer.resource === resourceId);
if (!offer) {
return undefined;
@@ -18,12 +18,10 @@ export const readOfferWithSDK = async (offerId: string, resourceId: string): Pro
const options: RequestOptions = {
initialHeaders: {
[HttpHeaders.populateCollectionThroughputInfo]: true
}
[HttpHeaders.populateCollectionThroughputInfo]: true,
},
};
const response = await client()
.offer(offerId)
.read(options);
const response = await client().offer(offerId).read(options);
return parseSDKOfferResponse(response);
};

View File

@@ -7,9 +7,7 @@ export const readOffers = async (): Promise<SDKOfferDefinition[]> => {
const clearMessage = logConsoleProgress(`Querying offers`);
try {
const response = await client()
.offers.readAll()
.fetchAll();
const response = await client().offers.readAll().fetchAll();
return response?.resources;
} catch (error) {
// This should be removed when we can correctly identify if an account is serverless when connected using connection string too.

View File

@@ -25,7 +25,7 @@ export async function readStoredProcedures(
databaseId,
collectionId
);
return rpResponse?.value?.map(sproc => sproc.properties?.resource as StoredProcedureDefinition & Resource);
return rpResponse?.value?.map((sproc) => sproc.properties?.resource as StoredProcedureDefinition & Resource);
}
const response = await client()

View File

@@ -25,14 +25,10 @@ export async function readTriggers(
databaseId,
collectionId
);
return rpResponse?.value?.map(trigger => trigger.properties?.resource as TriggerDefinition & Resource);
return rpResponse?.value?.map((trigger) => trigger.properties?.resource as TriggerDefinition & Resource);
}
const response = await client()
.database(databaseId)
.container(collectionId)
.scripts.triggers.readAll()
.fetchAll();
const response = await client().database(databaseId).container(collectionId).scripts.triggers.readAll().fetchAll();
return response?.resources;
} catch (error) {
handleError(error, "ReadTriggers", `Failed to query triggers for container ${collectionId}`);

View File

@@ -25,7 +25,7 @@ export async function readUserDefinedFunctions(
databaseId,
collectionId
);
return rpResponse?.value?.map(udf => udf.properties?.resource as UserDefinedFunctionDefinition & Resource);
return rpResponse?.value?.map((udf) => udf.properties?.resource as UserDefinedFunctionDefinition & Resource);
}
const response = await client()

View File

@@ -8,22 +8,22 @@ import {
MongoDBCollectionCreateUpdateParameters,
MongoDBCollectionResource,
SqlContainerCreateUpdateParameters,
SqlContainerResource
SqlContainerResource,
} from "../../Utils/arm/generatedClients/2020-04-01/types";
import { RequestOptions } from "@azure/cosmos/dist-esm";
import { client } from "../CosmosClient";
import { createUpdateSqlContainer, getSqlContainer } from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import {
createUpdateCassandraTable,
getCassandraTable
getCassandraTable,
} from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources";
import {
createUpdateMongoDBCollection,
getMongoDBCollection
getMongoDBCollection,
} from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
import {
createUpdateGremlinGraph,
getGremlinGraph
getGremlinGraph,
} from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
import { createUpdateTable, getTable } from "../../Utils/arm/generatedClients/2020-04-01/tableResources";
import { handleError } from "../ErrorHandlingUtils";
@@ -130,8 +130,8 @@ export async function updateMongoDBCollectionThroughRP(
const updateParams: MongoDBCollectionCreateUpdateParameters = {
properties: {
resource: newCollection,
options: updateOptions
}
options: updateOptions,
},
};
const updateResponse = await createUpdateMongoDBCollection(

View File

@@ -17,7 +17,7 @@ import {
migrateSqlDatabaseToManualThroughput,
migrateSqlContainerToAutoscale,
migrateSqlContainerToManualThroughput,
updateSqlContainerThroughput
updateSqlContainerThroughput,
} from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import {
updateCassandraKeyspaceThroughput,
@@ -25,7 +25,7 @@ import {
migrateCassandraKeyspaceToManualThroughput,
migrateCassandraTableToAutoscale,
migrateCassandraTableToManualThroughput,
updateCassandraTableThroughput
updateCassandraTableThroughput,
} from "../../Utils/arm/generatedClients/2020-04-01/cassandraResources";
import {
updateMongoDBDatabaseThroughput,
@@ -33,7 +33,7 @@ import {
migrateMongoDBDatabaseToManualThroughput,
migrateMongoDBCollectionToAutoscale,
migrateMongoDBCollectionToManualThroughput,
updateMongoDBCollectionThroughput
updateMongoDBCollectionThroughput,
} from "../../Utils/arm/generatedClients/2020-04-01/mongoDBResources";
import {
updateGremlinDatabaseThroughput,
@@ -41,13 +41,13 @@ import {
migrateGremlinDatabaseToManualThroughput,
migrateGremlinGraphToAutoscale,
migrateGremlinGraphToManualThroughput,
updateGremlinGraphThroughput
updateGremlinGraphThroughput,
} from "../../Utils/arm/generatedClients/2020-04-01/gremlinResources";
import { userContext } from "../../UserContext";
import {
migrateTableToAutoscale,
migrateTableToManualThroughput,
updateTableThroughput
updateTableThroughput,
} from "../../Utils/arm/generatedClients/2020-04-01/tableResources";
export const updateOffer = async (params: UpdateOfferParams): Promise<Offer> => {
@@ -110,7 +110,7 @@ const updateCollectionOfferWithARM = async (params: UpdateOfferParams): Promise<
return await readCollectionOffer({
collectionId: params.collectionId,
databaseId: params.databaseId,
offerId: params.currentOffer.id
offerId: params.currentOffer.id,
});
};
@@ -140,7 +140,7 @@ const updateDatabaseOfferWithARM = async (params: UpdateOfferParams): Promise<Of
return await readDatabaseOffer({
databaseId: params.databaseId,
offerId: params.currentOffer.id
offerId: params.currentOffer.id,
});
};
@@ -358,13 +358,13 @@ const updateGremlinDatabaseOffer = async (params: UpdateOfferParams): Promise<vo
const createUpdateOfferBody = (params: UpdateOfferParams): ThroughputSettingsUpdateParameters => {
const body: ThroughputSettingsUpdateParameters = {
properties: {
resource: {}
}
resource: {},
},
};
if (params.autopilotThroughput) {
body.properties.resource.autoscaleSettings = {
maxThroughput: params.autopilotThroughput
maxThroughput: params.autopilotThroughput,
};
} else {
body.properties.resource.throughput = params.manualThroughput;
@@ -378,7 +378,7 @@ const updateOfferWithSDK = async (params: UpdateOfferParams): Promise<Offer> =>
const newOffer: SDKOfferDefinition = {
content: {
offerThroughput: undefined,
offerIsRUPerMinuteThroughputEnabled: false
offerIsRUPerMinuteThroughputEnabled: false,
},
_etag: undefined,
_ts: undefined,
@@ -388,12 +388,12 @@ const updateOfferWithSDK = async (params: UpdateOfferParams): Promise<Offer> =>
offerResourceId: sdkOfferDefinition.offerResourceId,
offerVersion: sdkOfferDefinition.offerVersion,
offerType: sdkOfferDefinition.offerType,
resource: sdkOfferDefinition.resource
resource: sdkOfferDefinition.resource,
};
if (params.autopilotThroughput) {
newOffer.content.offerAutopilotSettings = {
maxThroughput: params.autopilotThroughput
maxThroughput: params.autopilotThroughput,
};
} else {
newOffer.content.offerThroughput = params.manualThroughput;
@@ -402,12 +402,12 @@ const updateOfferWithSDK = async (params: UpdateOfferParams): Promise<Offer> =>
const options: RequestOptions = {};
if (params.migrateToAutoPilot) {
options.initialHeaders = {
[HttpHeaders.migrateOfferToAutopilot]: "true"
[HttpHeaders.migrateOfferToAutopilot]: "true",
};
delete newOffer.content.offerAutopilotSettings;
} else if (params.migrateToManual) {
options.initialHeaders = {
[HttpHeaders.migrateOfferToManualThroughput]: "true"
[HttpHeaders.migrateOfferToManualThroughput]: "true",
};
newOffer.content.offerAutopilotSettings = { maxThroughput: 0 };
}

View File

@@ -3,12 +3,12 @@ import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType
import { Resource, StoredProcedureDefinition } from "@azure/cosmos";
import {
SqlStoredProcedureCreateUpdateParameters,
SqlStoredProcedureResource
SqlStoredProcedureResource,
} from "../../Utils/arm/generatedClients/2020-04-01/types";
import { client } from "../CosmosClient";
import {
createUpdateSqlStoredProcedure,
getSqlStoredProcedure
getSqlStoredProcedure,
} from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import { handleError } from "../ErrorHandlingUtils";
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
@@ -39,8 +39,8 @@ export async function updateStoredProcedure(
const createSprocParams: SqlStoredProcedureCreateUpdateParameters = {
properties: {
resource: storedProcedure as SqlStoredProcedureResource,
options: {}
}
options: {},
},
};
const rpResponse = await createUpdateSqlStoredProcedure(
userContext.subscriptionId,

View File

@@ -2,7 +2,7 @@ import { AuthType } from "../../AuthType";
import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType";
import {
SqlTriggerCreateUpdateParameters,
SqlTriggerResource
SqlTriggerResource,
} from "../../Utils/arm/generatedClients/2020-04-01/types";
import { TriggerDefinition } from "@azure/cosmos";
import { client } from "../CosmosClient";
@@ -36,8 +36,8 @@ export async function updateTrigger(
const createTriggerParams: SqlTriggerCreateUpdateParameters = {
properties: {
resource: trigger as SqlTriggerResource,
options: {}
}
options: {},
},
};
const rpResponse = await createUpdateSqlTrigger(
userContext.subscriptionId,

View File

@@ -3,12 +3,12 @@ import { DefaultAccountExperienceType } from "../../DefaultAccountExperienceType
import { Resource, UserDefinedFunctionDefinition } from "@azure/cosmos";
import {
SqlUserDefinedFunctionCreateUpdateParameters,
SqlUserDefinedFunctionResource
SqlUserDefinedFunctionResource,
} from "../../Utils/arm/generatedClients/2020-04-01/types";
import { client } from "../CosmosClient";
import {
createUpdateSqlUserDefinedFunction,
getSqlUserDefinedFunction
getSqlUserDefinedFunction,
} from "../../Utils/arm/generatedClients/2020-04-01/sqlResources";
import { handleError } from "../ErrorHandlingUtils";
import { logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
@@ -39,8 +39,8 @@ export async function updateUserDefinedFunction(
const createUDFParams: SqlUserDefinedFunctionCreateUpdateParameters = {
properties: {
resource: userDefinedFunction as SqlUserDefinedFunctionResource,
options: {}
}
options: {},
},
};
const rpResponse = await createUpdateSqlUserDefinedFunction(
userContext.subscriptionId,