mirror of
https://github.com/Azure/cosmos-explorer.git
synced 2025-12-25 11:51:07 +00:00
* Implement retry on throttling for nosql * Clean up code * Produce specific error for throttling error in mongoProxy bulk delete. Clean up code. * Fix throttling doc url * Fix mongo error wording * Fix unit test * Unit test cleanup * Fix format * Fix unit tests * Fix format * Fix unit test * Fix format * Improve comments * Improve error message wording. Fix URL and add specific URL for Mongo and NoSql. * Fix error messages. Add console errors. * Clean up selection of various delete fct * Fix error display
91 lines
3.1 KiB
TypeScript
91 lines
3.1 KiB
TypeScript
import { BulkOperationType, OperationInput } from "@azure/cosmos";
|
|
import { CollectionBase } from "../../Contracts/ViewModels";
|
|
import DocumentId from "../../Explorer/Tree/DocumentId";
|
|
import { logConsoleInfo, logConsoleProgress } from "../../Utils/NotificationConsoleUtils";
|
|
import { client } from "../CosmosClient";
|
|
import { getEntityName } from "../DocumentUtility";
|
|
import { handleError } from "../ErrorHandlingUtils";
|
|
import { getPartitionKeyValue } from "./getPartitionKeyValue";
|
|
|
|
export const deleteDocument = async (collection: CollectionBase, documentId: DocumentId): Promise<void> => {
|
|
const entityName: string = getEntityName();
|
|
const clearMessage = logConsoleProgress(`Deleting ${entityName} ${documentId.id()}`);
|
|
|
|
try {
|
|
await client()
|
|
.database(collection.databaseId)
|
|
.container(collection.id())
|
|
.item(documentId.id(), getPartitionKeyValue(documentId))
|
|
.delete();
|
|
logConsoleInfo(`Successfully deleted ${entityName} ${documentId.id()}`);
|
|
} catch (error) {
|
|
handleError(error, "DeleteDocument", `Error while deleting ${entityName} ${documentId.id()}`);
|
|
throw error;
|
|
} finally {
|
|
clearMessage();
|
|
}
|
|
};
|
|
|
|
export interface IBulkDeleteResult {
|
|
documentId: DocumentId;
|
|
requestCharge: number;
|
|
statusCode: number;
|
|
retryAfterMilliseconds?: number;
|
|
}
|
|
|
|
/**
|
|
* Bulk delete documents
|
|
* @param collection
|
|
* @param documentId
|
|
* @returns array of results and status codes
|
|
*/
|
|
export const deleteDocuments = async (
|
|
collection: CollectionBase,
|
|
documentIds: DocumentId[],
|
|
): Promise<IBulkDeleteResult[]> => {
|
|
const clearMessage = logConsoleProgress(`Deleting ${documentIds.length} ${getEntityName(true)}`);
|
|
try {
|
|
const v2Container = await client().database(collection.databaseId).container(collection.id());
|
|
|
|
// Bulk can only delete 100 documents at a time
|
|
const BULK_DELETE_LIMIT = 100;
|
|
const promiseArray = [];
|
|
|
|
while (documentIds.length > 0) {
|
|
const documentIdsChunk = documentIds.splice(0, BULK_DELETE_LIMIT);
|
|
const operations: OperationInput[] = documentIdsChunk.map((documentId) => ({
|
|
id: documentId.id(),
|
|
// bulk delete: if not partition key is specified, do not pass empty array, but undefined
|
|
partitionKey:
|
|
documentId.partitionKeyValue &&
|
|
Array.isArray(documentId.partitionKeyValue) &&
|
|
documentId.partitionKeyValue.length === 0
|
|
? undefined
|
|
: documentId.partitionKeyValue,
|
|
operationType: BulkOperationType.Delete,
|
|
}));
|
|
|
|
const promise = v2Container.items.bulk(operations).then((bulkResults) => {
|
|
return bulkResults.map((bulkResult, index) => {
|
|
const documentId = documentIdsChunk[index];
|
|
return { ...bulkResult, documentId };
|
|
});
|
|
});
|
|
promiseArray.push(promise);
|
|
}
|
|
|
|
const allResult = await Promise.all(promiseArray);
|
|
const flatAllResult = Array.prototype.concat.apply([], allResult);
|
|
return flatAllResult;
|
|
} catch (error) {
|
|
handleError(
|
|
error,
|
|
"DeleteDocuments",
|
|
`Error while deleting ${documentIds.length} ${getEntityName(documentIds.length > 1)}`,
|
|
);
|
|
throw error;
|
|
} finally {
|
|
clearMessage();
|
|
}
|
|
};
|