add copy of cosmos node.js SDK as local dependency

This commit is contained in:
Theo van Kraay
2024-08-31 16:44:42 +01:00
committed by Chris Anderson
parent ff1e733679
commit ca396cdfbe
1017 changed files with 39434 additions and 19 deletions

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2020 Microsoft
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -0,0 +1,571 @@
# Azure Cosmos DB client library for JavaScript/TypeScript
[![latest npm badge](https://img.shields.io/npm/v/%40azure%2Fcosmos/latest.svg)][npm]
[![Build Status](https://dev.azure.com/azure-sdk/public/_apis/build/status/js/js%20-%20cosmosdb%20-%20ci?branchName=main)](https://dev.azure.com/azure-sdk/public/_build/latest?definitionId=850&branchName=main)
Azure Cosmos DB is a globally distributed, multi-model database service that supports document, key-value, wide-column, and graph databases. This package is intended for JavaScript/TypeScript applications to interact with **SQL API** databases and the JSON documents they contain:
- Create Cosmos DB databases and modify their settings
- Create and modify containers to store collections of JSON documents
- Create, read, update, and delete the items (JSON documents) in your containers
- Query the documents in your database using SQL-like syntax
Key links:
- [Package (npm)][npm]
- [API reference documentation](https://docs.microsoft.com/javascript/api/@azure/cosmos/?view=azure-node-lates)
- [Product documentation][cosmos_docs]
## Getting started
### Prerequisites
#### Azure Subscription and Cosmos DB SQL API Account
You must have an [Azure Subscription][azure_sub], and a [Cosmos DB account][cosmos_account] (SQL API) to use this package.
If you need a Cosmos DB SQL API account, you can use the Azure [Cloud Shell][cloud_shell_bash] to create one with this Azure CLI command:
```Bash
az cosmosdb create --resource-group <resource-group-name> --name <cosmos-database-account-name>
```
Or you can create an account in the [Azure Portal](https://portal.azure.com/#create/microsoft.documentdb)
#### NodeJS
This package is distributed via [npm][npm] which comes preinstalled with [NodeJS](https://nodejs.org/en/). You should be using Node v10 or above.
#### CORS
You need to set up [Cross-Origin Resource Sharing (CORS)](https://docs.microsoft.com/azure/cosmos-db/how-to-configure-cross-origin-resource-sharing) rules for your Cosmos DB account if you need to develop for browsers. Follow the instructions in the linked document to create new CORS rules for your Cosmos DB.
### Install this package
```Bash
npm install @azure/cosmos
```
### Get Account Credentials
You will need your Cosmos DB **Account Endpoint** and **Key**. You can find these in the [Azure Portal](https://portal.azure.com/#blade/hubsextension/browseresource/resourcetype/microsoft.documentdb%2fdatabaseaccounts) or use the [Azure CLI][azure_cli] snippet below. The snippet is formatted for the Bash shell.
```Bash
az cosmosdb show --resource-group <your-resource-group> --name <your-account-name> --query documentEndpoint --output tsv
az cosmosdb keys list --resource-group <your-resource-group> --name <your-account-name> --query primaryMasterKey --output tsv
```
### Create an instance of `CosmosClient`
Interaction with Cosmos DB starts with an instance of the [CosmosClient](https://docs.microsoft.com/javascript/api/@azure/cosmos/cosmosclient?view=azure-node-latest) class
```js
const { CosmosClient } = require("@azure/cosmos");
const endpoint = "https://your-account.documents.azure.com";
const key = "<database account masterkey>";
const client = new CosmosClient({ endpoint, key });
async function main() {
// The rest of the README samples are designed to be pasted into this function body
}
main().catch((error) => {
console.error(error);
});
```
For simplicity we have included the `key` and `endpoint` directly in the code but you will likely want to load these from a file not in source control using a project such as [dotenv](https://www.npmjs.com/package/dotenv) or loading from environment variables
In production environments, secrets like keys should be stored in [Azure Key Vault](https://azure.microsoft.com/services/key-vault/)
## Key concepts
Once you've initialized a [CosmosClient](https://docs.microsoft.com/javascript/api/@azure/cosmos/cosmosclient?view=azure-node-lates), you can interact with the primary resource types in Cosmos DB:
- [Database](https://docs.microsoft.com/javascript/api/@azure/cosmos/database?view=azure-node-latest): A Cosmos DB account can contain multiple databases. When you create a database, you specify the API you'd like to use when interacting with its documents: SQL, MongoDB, Gremlin, Cassandra, or Azure Table. Use the [Database](https://docs.microsoft.com/javascript/api/@azure/cosmos/database?view=azure-node-latest) object to manage its containers.
- [Container](https://docs.microsoft.com/javascript/api/@azure/cosmos/container?view=azure-node-latest): A container is a collection of JSON documents. You create (insert), read, update, and delete items in a container by using methods on the [Container](https://docs.microsoft.com/javascript/api/@azure/cosmos/container?view=azure-node-latest) object.
- [Item](https://docs.microsoft.com/javascript/api/@azure/cosmos/item?view=azure-node-latest): An Item is a JSON document stored in a container. Each Item must include an `id` key with a value that uniquely identifies the item within the container. If you do not provide an `id`, the SDK will generate one automatically.
For more information about these resources, see [Working with Azure Cosmos databases, containers and items][cosmos_resources].
## Examples
The following sections provide several code snippets covering some of the most common Cosmos DB tasks, including:
- [Create a database](#create-a-database)
- [Create a container](#create-a-container)
- [Using Partition Keys](#using-partition-keys)
- [Insert items](#insert-items)
- [Query documents](#query-the-database)
- [Read an item](#read-an-item)
- [Delete an item](#delete-an-data)
- [CRUD on Container with hierarchical partition key](#container-hierarchical-partition-key)
### Create a database
After authenticating your [CosmosClient](https://docs.microsoft.com/javascript/api/@azure/cosmos/cosmosclient?view=azure-node-latest), you can work with any resource in the account. The code snippet below creates a NOSQL API database.
```js
const { database } = await client.databases.createIfNotExists({ id: "Test Database" });
console.log(database.id);
```
### Create a container
This example creates a container with default settings
```js
const { container } = await database.containers.createIfNotExists({ id: "Test Database" });
console.log(container.id);
```
### Using Partition Keys
This example shows various types of partition Keys supported.
```js
await container.item("id", "1").read(); // string type
await container.item("id", 2).read(); // number type
await container.item("id", true).read(); // boolean type
await container.item("id", {}).read(); // None type
await container.item("id", undefined).read(); // None type
await container.item("id", null).read(); // null type
```
If the Partition Key consists of a single value, it could be supplied either as a literal value, or an array.
```js
await container.item("id", "1").read();
await container.item("id", ["1"]).read();
```
If the Partition Key consists of more than one values, it should be supplied as an array.
```js
await container.item("id", ["a", "b"]).read();
await container.item("id", ["a", 2]).read();
await container.item("id", [{}, {}]).read();
await container.item("id", ["a", {}]).read();
await container.item("id", [2, null]).read();
```
### Insert items
To insert items into a container, pass an object containing your data to [Items.upsert](https://docs.microsoft.com/javascript/api/@azure/cosmos/items?view=azure-node-latest#upsert-t--requestoptions-). The Azure Cosmos DB service requires each item has an `id` key. If you do not provide one, the SDK will generate an `id` automatically.
This example inserts several items into the container
```js
const cities = [
{ id: "1", name: "Olympia", state: "WA", isCapitol: true },
{ id: "2", name: "Redmond", state: "WA", isCapitol: false },
{ id: "3", name: "Chicago", state: "IL", isCapitol: false }
];
for (const city of cities) {
await container.items.create(city);
}
```
### Read an item
To read a single item from a container, use [Item.read](https://docs.microsoft.com/javascript/api/@azure/cosmos/item?view=azure-node-latest#read-requestoptions-). This is a less expensive operation than using SQL to query by `id`.
```js
await container.item("1", "1").read();
```
### CRUD on Container with hierarchical partition key
Create a Container with hierarchical partition key
```js
const containerDefinition = {
id: "Test Database",
partitionKey: {
paths: ["/name", "/address/zip"],
version: PartitionKeyDefinitionVersion.V2,
kind: PartitionKeyKind.MultiHash,
},
}
const { container } = await database.containers.createIfNotExists(containerDefinition);
console.log(container.id);
```
Insert an item with hierarchical partition key defined as - `["/name", "/address/zip"]`
```js
const item = {
id: "1",
name: 'foo',
address: {
zip: 100
},
active: true
}
await container.items.create(item);
```
To read a single item from a container with hierarchical partition key defined as - `["/name", "/address/zip"],`
```js
await container.item("1", ["foo", 100]).read();
```
Query an item with hierarchical partition key with hierarchical partition key defined as - `["/name", "/address/zip"],`
```js
const { resources } = await container.items
.query("SELECT * from c WHERE c.active = true", {
partitionKey: ["foo", 100],
})
.fetchAll();
for (const item of resources) {
console.log(`${item.name}, ${item.address.zip} `);
}
```
### Delete an item
To delete items from a container, use [Item.delete](https://docs.microsoft.com/javascript/api/@azure/cosmos/item?view=azure-node-latest#delete-requestoptions-).
```js
// Delete the first item returned by the query above
await container.item("1").delete();
```
### Query the database
A Cosmos DB SQL API database supports querying the items in a container with [Items.query](https://docs.microsoft.com/javascript/api/@azure/cosmos/items?view=azure-node-latest#query-string---sqlqueryspec--feedoptions-) using SQL-like syntax:
```js
const { resources } = await container.items
.query("SELECT * from c WHERE c.isCapitol = true")
.fetchAll();
for (const city of resources) {
console.log(`${city.name}, ${city.state} is a capitol `);
}
```
Perform parameterized queries by passing an object containing the parameters and their values to [Items.query](https://docs.microsoft.com/javascript/api/@azure/cosmos/items?view=azure-node-latest#query-string---sqlqueryspec--feedoptions-):
```js
const { resources } = await container.items
.query({
query: "SELECT * from c WHERE c.isCapitol = @isCapitol",
parameters: [{ name: "@isCapitol", value: true }]
})
.fetchAll();
for (const city of resources) {
console.log(`${city.name}, ${city.state} is a capitol `);
}
```
For more information on querying Cosmos DB databases using the SQL API, see [Query Azure Cosmos DB data with SQL queries][cosmos_sql_queries].
### Change Feed Pull Model
Change feed can be fetched for a partition key, a feed range or an entire container.
To process the change feed, create an instance of `ChangeFeedPullModelIterator`. When you initially create `ChangeFeedPullModelIterator`, you must specify a required `changeFeedStartFrom` value inside the `ChangeFeedIteratorOptions` which consists of both the starting position for reading changes and the resource(a partition key or a FeedRange) for which changes are to be fetched. You can optionally use `maxItemCount` in `ChangeFeedIteratorOptions` to set the maximum number of items received per page.
Note: If no `changeFeedStartFrom` value is specified, then changefeed will be fetched for an entire container from Now().
There are four starting positions for change feed:
- `Beginning`
```js
// Signals the iterator to read changefeed from the beginning of time.
const options = {
changeFeedStartFrom: ChangeFeedStartFrom.Beginning(),
};
const iterator = container.getChangeFeedIterator(options);
```
- `Time`
```js
// Signals the iterator to read changefeed from a particular point of time.
const time = new Date("2023/09/11"); // some sample date
const options = {
changeFeedStartFrom: ChangeFeedStartFrom.Time(time),
};
```
- `Now`
```js
// Signals the iterator to read changefeed from this moment onward.
const options = {
changeFeedStartFrom: ChangeFeedStartFrom.Now(),
};
```
- `Continuation`
```js
// Signals the iterator to read changefeed from a saved point.
const continuationToken = "some continuation token recieved from previous request";
const options = {
changeFeedStartFrom: ChangeFeedStartFrom.Continuation(continuationToken),
};
```
Here's an example of fetching change feed for a partition key
```js
const partitionKey = "some-partition-Key-value";
const options = {
changeFeedStartFrom: ChangeFeedStartFrom.Beginning(partitionKey),
};
const iterator = container.items.getChangeFeedIterator(options);
while (iterator.hasMoreResults) {
const response = await iterator.readNext();
// process this response
}
```
Because the change feed is effectively an infinite list of items that encompasses all future writes and updates, the value of `hasMoreResults` is always `true`. When you try to read the change feed and there are no new changes available, you receive a response with `NotModified` status.
More detailed usage guidelines and examples of change feed can be found [here](https://learn.microsoft.com/azure/cosmos-db/nosql/change-feed-pull-model?tabs=javascript).
## Error Handling
The SDK generates various types of errors that can occur during an operation.
1. `ErrorResponse` is thrown if the response of an operation returns an error code of >=400.
2. `TimeoutError` is thrown if Abort is called internally due to timeout.
3. `AbortError` is thrown if any user passed signal caused the abort.
4. `RestError` is thrown in case of failure of underlying system call due to network issues.
5. Errors generated by any devDependencies. For Eg. `@azure/identity` package could throw `CredentialUnavailableError`.
Following is an example for handling errors of type `ErrorResponse`, `TimeoutError`, `AbortError`, and `RestError`.
```js
try {
// some code
} catch (err) {
if (err instanceof ErrorResponse) {
// some specific error handling.
} else if (err instanceof RestError) {
// some specific error handling.
}
// handle other type of errors in similar way.
else {
// for any other error.
}
}
```
It's important to properly handle these errors to ensure that your application can gracefully recover from any failures and continue functioning as expected. More details about some of these errors and their possible solutions can be found [here](https://learn.microsoft.com/azure/cosmos-db/nosql/conceptual-resilient-sdk-applications#should-my-application-retry-on-errors).
## Troubleshooting
### General
When you interact with Cosmos DB errors returned by the service correspond to the same HTTP status codes returned for REST API requests:
[HTTP Status Codes for Azure Cosmos DB][cosmos_http_status_codes]
#### Conflicts
For example, if you try to create an item using an `id` that's already in use in your Cosmos DB database, a `409` error is returned, indicating the conflict. In the following snippet, the error is handled gracefully by catching the exception and displaying additional information about the error.
```js
try {
await containers.items.create({ id: "existing-item-id" });
} catch (error) {
if (error.code === 409) {
console.log("There was a conflict with an existing item");
}
}
```
### Transpiling
The Azure SDKs are designed to support ES5 JavaScript syntax and [LTS versions of Node.js](https://github.com/nodejs/release#release-schedule). If you need support for earlier JavaScript runtimes such as Internet Explorer or Node 6, you will need to transpile the SDK code as part of your build process.
### Handle transient errors with retries
While working with Cosmos DB, you might encounter transient failures caused by [rate limits][cosmos_request_units] enforced by the service, or other transient problems like network outages. For information about handling these types of failures, see [Retry pattern][azure_pattern_retry] in the Cloud Design Patterns guide, and the related [Circuit Breaker pattern][azure_pattern_circuit_breaker].
### Logging
Enabling logging may help uncover useful information about failures. In order to see a log of HTTP requests and responses, set the `AZURE_LOG_LEVEL` environment variable to `info`. Alternatively, logging can be enabled at runtime by calling `setLogLevel` in the `@azure/logger`. While using `AZURE_LOG_LEVEL` make sure to set it before logging library is initialized.
Ideally pass it through command line, if using libraries like `dotenv` make sure such libraries are initialized before logging library.
```javascript
const { setLogLevel } = require("@azure/logger");
setLogLevel("info");
```
For more detailed instructions on how to enable logs, you can look at the [@azure/logger package docs](https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/core/logger).
### Diagnostics
Cosmos Diagnostics feature provides enhanced insights into all your client operations. A CosmosDiagnostics object is added to response of all client operations. such as
- Point look up operation reponse - `item.read()`, `container.create()`, `database.delete()`
- Query operation reponse -`queryIterator.fetchAll()`,
- Bulk and Batch operations -`item.batch()`.
- Error/Exception response objects.
A CosmosDiagnostics object is added to response of all client operations.
There are 3 Cosmos Diagnostic levels, info, debug and debug-unsafe. Where only info is meant for production systems and debug and debug-unsafe are meant to be used during development and debugging, since they consume significantly higher resources. Cosmos Diagnostic level can be set in 2 ways
- Programatically
```js
const client = new CosmosClient({ endpoint, key, diagnosticLevel: CosmosDbDiagnosticLevel.debug });
```
- Using environment variables. (Diagnostic level set by Environment variable has higher priority over setting it through client options.)
```bash
export AZURE_COSMOSDB_DIAGNOSTICS_LEVEL="debug"
```
Cosmos Diagnostic has three members
- ClientSideRequestStatistics Type: Contains aggregates diagnostic details, including metadata lookups, retries, endpoints contacted, and request and response statistics like payload size and duration. (is always collected, can be used in production systems.)
- DiagnosticNode: Is a tree-like structure that captures detailed diagnostic information. Similar to `har` recording present in browsers. This feature is disabled by default and is intended for debugging non-production environments only. (collected at diagnostic level debug and debug-unsafe)
- ClientConfig: Captures essential information related to client's configuration settings during client initialization. (collected at diagnostic level debug and debug-unsafe)
Please make sure to never set diagnostic level to `debug-unsafe` in production environment, since it this level `CosmosDiagnostics` captures request and response payloads and if you choose to log it (it is by default logged by @azure/logger at `verbose` level). These payloads might get captured in your log sinks.
#### Consuming Diagnostics
- Since `diagnostics` is added to all Response objects. You could programatically access `CosmosDiagnostic` as follows.
```js
// For point look up operations
const { container, diagnostics: containerCreateDiagnostic } =
await database.containers.createIfNotExists({
id: containerId,
partitionKey: {
paths: ["/key1"],
},
});
// For Batch operations
const operations: OperationInput[] = [
{
operationType: BulkOperationType.Create,
resourceBody: { id: 'A', key: "A", school: "high" },
},
];
const response = await container.items.batch(operations, "A");
// For query operations
const queryIterator = container.items.query("select * from c");
const { resources, diagnostics } = await queryIterator.fetchAll();
// While error handling
try {
// Some operation that might fail
} catch (err) {
const diagnostics = err.diagnostics
}
```
- You could also log `diagnostics` using `@azure/logger`, diagnostic is always logged using `@azure/logger` at `verbose` level. So if you set Diagnostic level to `debug` or `debug-unsafe` and `@azure/logger` level to `verbose`, `diagnostics` will be logged.
## Next steps
### More sample code
[Several samples][cosmos_samples] are available to you in the SDK's GitHub repository. These samples provide example code for additional scenarios commonly encountered while working with Cosmos DB:
- Database Operations
- Container Operations
- Item Operations
- Configuring Indexing
- Reading a container Change Feed
- Stored Procedures
- Changing Database/Container throughput settings
- Multi Region Write Operations
### Limitations
Currently the features below are **not supported**. For alternatives options, check the **Workarounds** section below.
### Data Plane Limitations:
* Queries with COUNT from a DISTINCT subquery
* Direct TCP Mode access
* Aggregate cross-partition queries, like sorting, counting, and distinct, don't support continuation tokens. Streamable queries, like SELECT \* FROM <table> WHERE <condition>, support continuation tokens. See the "Workaround" section for executing non-streamable queries without a continuation token.
* Change Feed: Processor
* Change Feed: Read multiple partitions key values
* Change Feed pull model all versions and delete mode [#27058](https://github.com/Azure/azure-sdk-for-js/issues/27058)
* Change Feed pull model support for partial hierarchical partition keys [#27059](https://github.com/Azure/azure-sdk-for-js/issues/27059)
* Cross-partition ORDER BY for mixed types
### Control Plane Limitations:
* Get CollectionSizeUsage, DatabaseUsage, and DocumentUsage metrics
* Create Geospatial Index
* Update Autoscale throughput
## Workarounds
### Continuation token for cross partitions queries
You can achieve cross partition queries with continuation token support by using
[Side car pattern](https://github.com/Azure-Samples/Cosmosdb-query-sidecar).
This pattern can also enable applications to be composed of heterogeneous components and technologies.
### Executing non-stremable cross-partition query
To execute non-streamable queries without the use of continuation tokens, you can create a query iterator with the required query specification and options. The following sample code demonstrates how to use a query iterator to fetch all results without the need for a continuation token:
```javascript
const querySpec = {
query: "SELECT * FROM c WHERE c.status = @status",
parameters: [{ name: "@status", value: "active" }],
};
const queryOptions = {
maxItemCount: 10, // maximum number of items to return per page
enableCrossPartitionQuery: true,
};
const querIterator = await container.items.query(querySpec, queryOptions);
while (querIterator.hasMoreResults()) {
const { resources: result } = await querIterator.fetchNext();
//Do something with result
}
```
This approach can also be used for streamable queries.
### Control Plane operations
Typically, you can use [Azure Portal](https://portal.azure.com/), [Azure Cosmos DB Resource Provider REST API](https://docs.microsoft.com/rest/api/cosmos-db-resource-provider), [Azure CLI](https://docs.microsoft.com/cli/azure/azure-cli-reference-for-cosmos-db) or [PowerShell](https://docs.microsoft.com/azure/cosmos-db/manage-with-powershell) for the control plane unsupported limitations.
### Additional documentation
For more extensive documentation on the Cosmos DB service, see the [Azure Cosmos DB documentation][cosmos_docs] on docs.microsoft.com.
## Useful links
- [Welcome to Azure Cosmos DB](https://docs.microsoft.com/azure/cosmos-db/community)
- [Quick start](https://docs.microsoft.com/azure/cosmos-db/sql-api-nodejs-get-started)
- [Tutorial](https://docs.microsoft.com/azure/cosmos-db/sql-api-nodejs-application)
- [Samples](https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/cosmosdb/cosmos/samples)
- [Introduction to Resource Model of Azure Cosmos DB Service](https://docs.microsoft.com/azure/cosmos-db/sql-api-resources)
- [Introduction to SQL API of Azure Cosmos DB Service](https://docs.microsoft.com/azure/cosmos-db/sql-api-sql-query)
- [Partitioning](https://docs.microsoft.com/azure/cosmos-db/sql-api-partition-data)
- [API Documentation](https://docs.microsoft.com/javascript/api/%40azure/cosmos/?view=azure-node-latest)
## Contributing
If you'd like to contribute to this library, please read the [contributing guide](https://github.com/Azure/azure-sdk-for-js/blob/main/CONTRIBUTING.md) to learn more about how to build and test the code.
![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fcosmosdb%2Fcosmos%2FREADME.png)
<!-- LINKS -->
[azure_cli]: https://docs.microsoft.com/cli/azure
[azure_pattern_circuit_breaker]: https://docs.microsoft.com/azure/architecture/patterns/circuit-breaker
[azure_pattern_retry]: https://docs.microsoft.com/azure/architecture/patterns/retry
[azure_portal]: https://portal.azure.com
[azure_sub]: https://azure.microsoft.com/free/
[cloud_shell]: https://docs.microsoft.com/azure/cloud-shell/overview
[cloud_shell_bash]: https://shell.azure.com/bash
[cosmos_account_create]: https://docs.microsoft.com/azure/cosmos-db/how-to-manage-database-account
[cosmos_account]: https://docs.microsoft.com/azure/cosmos-db/account-overview
[cosmos_container]: https://docs.microsoft.com/azure/cosmos-db/databases-containers-items#azure-cosmos-containers
[cosmos_database]: https://docs.microsoft.com/azure/cosmos-db/databases-containers-items#azure-cosmos-databases
[cosmos_docs]: https://docs.microsoft.com/azure/cosmos-db/
[cosmos_http_status_codes]: https://docs.microsoft.com/rest/api/cosmos-db/http-status-codes-for-cosmosdb
[cosmos_item]: https://docs.microsoft.com/azure/cosmos-db/databases-containers-items#azure-cosmos-items
[cosmos_request_units]: https://docs.microsoft.com/azure/cosmos-db/request-units
[cosmos_resources]: https://docs.microsoft.com/azure/cosmos-db/databases-containers-items
[cosmos_samples]: https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/cosmosdb/cosmos/samples
[cosmos_sql_queries]: https://docs.microsoft.com/azure/cosmos-db/how-to-sql-query
[cosmos_ttl]: https://docs.microsoft.com/azure/cosmos-db/time-to-live
[npm]: https://www.npmjs.com/package/@azure/cosmos

View File

@@ -0,0 +1,38 @@
/// <reference lib="esnext.asynciterable" />
import { ChangeFeedResponse } from "./ChangeFeedResponse";
import { Resource } from "./client";
/**
* Provides iterator for change feed.
*
* Use `Items.changeFeed()` to get an instance of the iterator.
*/
export declare class ChangeFeedIterator<T> {
private clientContext;
private resourceId;
private resourceLink;
private partitionKey;
private changeFeedOptions;
private static readonly IfNoneMatchAllHeaderValue;
private nextIfNoneMatch;
private ifModifiedSince;
private lastStatusCode;
private isPartitionSpecified;
/**
* Gets a value indicating whether there are potentially additional results that can be retrieved.
*
* Initially returns true. This value is set based on whether the last execution returned a continuation token.
*
* @returns Boolean value representing if whether there are potentially additional results that can be retrieved.
*/
get hasMoreResults(): boolean;
/**
* Gets an async iterator which will yield pages of results from Azure Cosmos DB.
*/
getAsyncIterator(): AsyncIterable<ChangeFeedResponse<Array<T & Resource>>>;
/**
* Read feed and retrieves the next page of results in Azure Cosmos DB.
*/
fetchNext(): Promise<ChangeFeedResponse<Array<T & Resource>>>;
private getFeedResponse;
}
//# sourceMappingURL=ChangeFeedIterator.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"ChangeFeedIterator.d.ts","sourceRoot":"","sources":["../../src/ChangeFeedIterator.ts"],"names":[],"mappings":";AAIA,OAAO,EAAE,kBAAkB,EAAE,MAAM,sBAAsB,CAAC;AAC1D,OAAO,EAAE,QAAQ,EAAE,MAAM,UAAU,CAAC;AASpC;;;;GAIG;AACH,qBAAa,kBAAkB,CAAC,CAAC;IAW7B,OAAO,CAAC,aAAa;IACrB,OAAO,CAAC,UAAU;IAClB,OAAO,CAAC,YAAY;IACpB,OAAO,CAAC,YAAY;IACpB,OAAO,CAAC,iBAAiB;IAd3B,OAAO,CAAC,MAAM,CAAC,QAAQ,CAAC,yBAAyB,CAAO;IACxD,OAAO,CAAC,eAAe,CAAS;IAChC,OAAO,CAAC,eAAe,CAAS;IAChC,OAAO,CAAC,cAAc,CAAS;IAC/B,OAAO,CAAC,oBAAoB,CAAU;IAmCtC;;;;;;OAMG;IACH,IAAI,cAAc,IAAI,OAAO,CAE5B;IAED;;OAEG;IACW,gBAAgB,IAAI,aAAa,CAAC,kBAAkB,CAAC,KAAK,CAAC,CAAC,GAAG,QAAQ,CAAC,CAAC,CAAC;IASxF;;OAEG;IACU,SAAS,IAAI,OAAO,CAAC,kBAAkB,CAAC,KAAK,CAAC,CAAC,GAAG,QAAQ,CAAC,CAAC,CAAC;YAS5D,eAAe;CAgD9B"}

View File

@@ -0,0 +1,107 @@
import { __asyncGenerator, __await } from "tslib";
import { ChangeFeedResponse } from "./ChangeFeedResponse";
import { Constants, ResourceType, StatusCodes } from "./common";
import { getEmptyCosmosDiagnostics, withDiagnostics } from "./utils/diagnostics";
/**
* Provides iterator for change feed.
*
* Use `Items.changeFeed()` to get an instance of the iterator.
*/
export class ChangeFeedIterator {
/**
* @internal
*/
constructor(clientContext, resourceId, resourceLink, partitionKey, changeFeedOptions) {
this.clientContext = clientContext;
this.resourceId = resourceId;
this.resourceLink = resourceLink;
this.partitionKey = partitionKey;
this.changeFeedOptions = changeFeedOptions;
// partition key XOR partition key range id
const partitionKeyValid = partitionKey !== undefined;
this.isPartitionSpecified = partitionKeyValid;
let canUseStartFromBeginning = true;
if (changeFeedOptions.continuation) {
this.nextIfNoneMatch = changeFeedOptions.continuation;
canUseStartFromBeginning = false;
}
if (changeFeedOptions.startTime) {
// .toUTCString() is platform specific, but most platforms use RFC 1123.
// In ECMAScript 2018, this was standardized to RFC 1123.
// See for more info: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toUTCString
this.ifModifiedSince = changeFeedOptions.startTime.toUTCString();
canUseStartFromBeginning = false;
}
if (canUseStartFromBeginning && !changeFeedOptions.startFromBeginning) {
this.nextIfNoneMatch = ChangeFeedIterator.IfNoneMatchAllHeaderValue;
}
}
/**
* Gets a value indicating whether there are potentially additional results that can be retrieved.
*
* Initially returns true. This value is set based on whether the last execution returned a continuation token.
*
* @returns Boolean value representing if whether there are potentially additional results that can be retrieved.
*/
get hasMoreResults() {
return this.lastStatusCode !== StatusCodes.NotModified;
}
/**
* Gets an async iterator which will yield pages of results from Azure Cosmos DB.
*/
getAsyncIterator() {
return __asyncGenerator(this, arguments, function* getAsyncIterator_1() {
do {
const result = yield __await(this.fetchNext());
if (result.count > 0) {
yield yield __await(result);
}
} while (this.hasMoreResults);
});
}
/**
* Read feed and retrieves the next page of results in Azure Cosmos DB.
*/
async fetchNext() {
return withDiagnostics(async (diagnosticNode) => {
const response = await this.getFeedResponse(diagnosticNode);
this.lastStatusCode = response.statusCode;
this.nextIfNoneMatch = response.headers[Constants.HttpHeaders.ETag];
return response;
}, this.clientContext);
}
async getFeedResponse(diagnosticNode) {
if (!this.isPartitionSpecified) {
throw new Error("Container is partitioned, but no partition key or partition key range id was specified.");
}
const feedOptions = { initialHeaders: {}, useIncrementalFeed: true };
if (typeof this.changeFeedOptions.maxItemCount === "number") {
feedOptions.maxItemCount = this.changeFeedOptions.maxItemCount;
}
if (this.changeFeedOptions.sessionToken) {
feedOptions.sessionToken = this.changeFeedOptions.sessionToken;
}
if (this.nextIfNoneMatch) {
feedOptions.accessCondition = {
type: Constants.HttpHeaders.IfNoneMatch,
condition: this.nextIfNoneMatch,
};
}
if (this.ifModifiedSince) {
feedOptions.initialHeaders[Constants.HttpHeaders.IfModifiedSince] = this.ifModifiedSince;
}
const response = await this.clientContext.queryFeed({
path: this.resourceLink,
resourceType: ResourceType.item,
resourceId: this.resourceId,
resultFn: (result) => (result ? result.Documents : []),
query: undefined,
options: feedOptions,
partitionKey: this.partitionKey,
diagnosticNode: diagnosticNode,
}); // TODO: some funky issues with query feed. Probably need to change it up.
return new ChangeFeedResponse(response.result, response.result ? response.result.length : 0, response.code, response.headers, getEmptyCosmosDiagnostics());
}
}
ChangeFeedIterator.IfNoneMatchAllHeaderValue = "*";
//# sourceMappingURL=ChangeFeedIterator.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,35 @@
/**
* Specifies options for the change feed
*
* Some of these options control where and when to start reading from the change feed. The order of precedence is:
* - continuation
* - startTime
* - startFromBeginning
*
* If none of those options are set, it will start reading changes from the first `ChangeFeedIterator.fetchNext()` call.
*/
export interface ChangeFeedOptions {
/**
* Max amount of items to return per page
*/
maxItemCount?: number;
/**
* The continuation token to start from.
*
* This is equivalent to the etag and continuation value from the `ChangeFeedResponse`
*/
continuation?: string;
/**
* The session token to use. If not specified, will use the most recent captured session token to start with.
*/
sessionToken?: string;
/**
* Signals whether to start from the beginning or not.
*/
startFromBeginning?: boolean;
/**
* Specified the start time to start reading changes from.
*/
startTime?: Date;
}
//# sourceMappingURL=ChangeFeedOptions.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"ChangeFeedOptions.d.ts","sourceRoot":"","sources":["../../src/ChangeFeedOptions.ts"],"names":[],"mappings":"AAEA;;;;;;;;;GASG;AACH,MAAM,WAAW,iBAAiB;IAChC;;OAEG;IACH,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB;;;;OAIG;IACH,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB;;OAEG;IACH,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB;;OAEG;IACH,kBAAkB,CAAC,EAAE,OAAO,CAAC;IAC7B;;OAEG;IACH,SAAS,CAAC,EAAE,IAAI,CAAC;CAClB"}

View File

@@ -0,0 +1,2 @@
export {};
//# sourceMappingURL=ChangeFeedOptions.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"ChangeFeedOptions.js","sourceRoot":"","sources":["../../src/ChangeFeedOptions.ts"],"names":[],"mappings":"","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n/**\n * Specifies options for the change feed\n *\n * Some of these options control where and when to start reading from the change feed. The order of precedence is:\n * - continuation\n * - startTime\n * - startFromBeginning\n *\n * If none of those options are set, it will start reading changes from the first `ChangeFeedIterator.fetchNext()` call.\n */\nexport interface ChangeFeedOptions {\n /**\n * Max amount of items to return per page\n */\n maxItemCount?: number;\n /**\n * The continuation token to start from.\n *\n * This is equivalent to the etag and continuation value from the `ChangeFeedResponse`\n */\n continuation?: string;\n /**\n * The session token to use. If not specified, will use the most recent captured session token to start with.\n */\n sessionToken?: string;\n /**\n * Signals whether to start from the beginning or not.\n */\n startFromBeginning?: boolean;\n /**\n * Specified the start time to start reading changes from.\n */\n startTime?: Date;\n}\n"]}

View File

@@ -0,0 +1,53 @@
import { CosmosDiagnostics } from "./CosmosDiagnostics";
import { CosmosHeaders } from "./queryExecutionContext";
/**
* A single response page from the Azure Cosmos DB Change Feed
*/
export declare class ChangeFeedResponse<T> {
/**
* Gets the items returned in the response from Azure Cosmos DB
*/
readonly result: T;
/**
* Gets the number of items returned in the response from Azure Cosmos DB
*/
readonly count: number;
/**
* Gets the status code of the response from Azure Cosmos DB
*/
readonly statusCode: number;
readonly diagnostics: CosmosDiagnostics;
/**
* Gets the request charge for this request from the Azure Cosmos DB service.
*/
get requestCharge(): number;
/**
* Gets the activity ID for the request from the Azure Cosmos DB service.
*/
get activityId(): string;
/**
* Gets the continuation token to be used for continuing enumeration of the Azure Cosmos DB service.
*
* This is equivalent to the `etag` property.
*/
get continuation(): string;
/**
* Gets the session token for use in session consistency reads from the Azure Cosmos DB service.
*/
get sessionToken(): string;
/**
* Gets the entity tag associated with last transaction in the Azure Cosmos DB service,
* which can be used as If-Non-Match Access condition for ReadFeed REST request or
* `continuation` property of `ChangeFeedOptions` parameter for
* `Items.changeFeed()`
* to get feed changes since the transaction specified by this entity tag.
*
* This is equivalent to the `continuation` property.
*/
get etag(): string;
/**
* Response headers of the response from Azure Cosmos DB
*/
headers: CosmosHeaders;
}
//# sourceMappingURL=ChangeFeedResponse.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"ChangeFeedResponse.d.ts","sourceRoot":"","sources":["../../src/ChangeFeedResponse.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AAExD,OAAO,EAAE,aAAa,EAAE,MAAM,yBAAyB,CAAC;AAExD;;GAEG;AACH,qBAAa,kBAAkB,CAAC,CAAC;IAK7B;;OAEG;aACa,MAAM,EAAE,CAAC;IACzB;;OAEG;aACa,KAAK,EAAE,MAAM;IAC7B;;OAEG;aACa,UAAU,EAAE,MAAM;aAElB,WAAW,EAAE,iBAAiB;IAKhD;;OAEG;IACH,IAAW,aAAa,IAAI,MAAM,CAGjC;IAED;;OAEG;IACH,IAAW,UAAU,IAAI,MAAM,CAE9B;IAED;;;;OAIG;IACH,IAAW,YAAY,IAAI,MAAM,CAEhC;IAED;;OAEG;IACH,IAAW,YAAY,IAAI,MAAM,CAEhC;IAED;;;;;;;;OAQG;IACH,IAAW,IAAI,IAAI,MAAM,CAExB;IAED;;OAEG;IACI,OAAO,EAAE,aAAa,CAAC;CAC/B"}

View File

@@ -0,0 +1,68 @@
import { Constants } from "./common";
/**
* A single response page from the Azure Cosmos DB Change Feed
*/
export class ChangeFeedResponse {
/**
* @internal
*/
constructor(
/**
* Gets the items returned in the response from Azure Cosmos DB
*/
result,
/**
* Gets the number of items returned in the response from Azure Cosmos DB
*/
count,
/**
* Gets the status code of the response from Azure Cosmos DB
*/
statusCode, headers, diagnostics) {
this.result = result;
this.count = count;
this.statusCode = statusCode;
this.diagnostics = diagnostics;
this.headers = Object.freeze(headers);
}
/**
* Gets the request charge for this request from the Azure Cosmos DB service.
*/
get requestCharge() {
const rus = this.headers[Constants.HttpHeaders.RequestCharge];
return rus ? parseInt(rus, 10) : null;
}
/**
* Gets the activity ID for the request from the Azure Cosmos DB service.
*/
get activityId() {
return this.headers[Constants.HttpHeaders.ActivityId];
}
/**
* Gets the continuation token to be used for continuing enumeration of the Azure Cosmos DB service.
*
* This is equivalent to the `etag` property.
*/
get continuation() {
return this.etag;
}
/**
* Gets the session token for use in session consistency reads from the Azure Cosmos DB service.
*/
get sessionToken() {
return this.headers[Constants.HttpHeaders.SessionToken];
}
/**
* Gets the entity tag associated with last transaction in the Azure Cosmos DB service,
* which can be used as If-Non-Match Access condition for ReadFeed REST request or
* `continuation` property of `ChangeFeedOptions` parameter for
* `Items.changeFeed()`
* to get feed changes since the transaction specified by this entity tag.
*
* This is equivalent to the `continuation` property.
*/
get etag() {
return this.headers[Constants.HttpHeaders.ETag];
}
}
//# sourceMappingURL=ChangeFeedResponse.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"ChangeFeedResponse.js","sourceRoot":"","sources":["../../src/ChangeFeedResponse.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,SAAS,EAAE,MAAM,UAAU,CAAC;AAGrC;;GAEG;AACH,MAAM,OAAO,kBAAkB;IAC7B;;OAEG;IACH;IACE;;OAEG;IACa,MAAS;IACzB;;OAEG;IACa,KAAa;IAC7B;;OAEG;IACa,UAAkB,EAClC,OAAsB,EACN,WAA8B;QAV9B,WAAM,GAAN,MAAM,CAAG;QAIT,UAAK,GAAL,KAAK,CAAQ;QAIb,eAAU,GAAV,UAAU,CAAQ;QAElB,gBAAW,GAAX,WAAW,CAAmB;QAE9C,IAAI,CAAC,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC;IACxC,CAAC;IAED;;OAEG;IACH,IAAW,aAAa;QACtB,MAAM,GAAG,GAAG,IAAI,CAAC,OAAO,CAAC,SAAS,CAAC,WAAW,CAAC,aAAa,CAAC,CAAC;QAC9D,OAAO,GAAG,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;IACxC,CAAC;IAED;;OAEG;IACH,IAAW,UAAU;QACnB,OAAO,IAAI,CAAC,OAAO,CAAC,SAAS,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC;IACxD,CAAC;IAED;;;;OAIG;IACH,IAAW,YAAY;QACrB,OAAO,IAAI,CAAC,IAAI,CAAC;IACnB,CAAC;IAED;;OAEG;IACH,IAAW,YAAY;QACrB,OAAO,IAAI,CAAC,OAAO,CAAC,SAAS,CAAC,WAAW,CAAC,YAAY,CAAC,CAAC;IAC1D,CAAC;IAED;;;;;;;;OAQG;IACH,IAAW,IAAI;QACb,OAAO,IAAI,CAAC,OAAO,CAAC,SAAS,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;IAClD,CAAC;CAMF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nimport { CosmosDiagnostics } from \"./CosmosDiagnostics\";\nimport { Constants } from \"./common\";\nimport { CosmosHeaders } from \"./queryExecutionContext\";\n\n/**\n * A single response page from the Azure Cosmos DB Change Feed\n */\nexport class ChangeFeedResponse<T> {\n /**\n * @internal\n */\n constructor(\n /**\n * Gets the items returned in the response from Azure Cosmos DB\n */\n public readonly result: T,\n /**\n * Gets the number of items returned in the response from Azure Cosmos DB\n */\n public readonly count: number,\n /**\n * Gets the status code of the response from Azure Cosmos DB\n */\n public readonly statusCode: number,\n headers: CosmosHeaders,\n public readonly diagnostics: CosmosDiagnostics,\n ) {\n this.headers = Object.freeze(headers);\n }\n\n /**\n * Gets the request charge for this request from the Azure Cosmos DB service.\n */\n public get requestCharge(): number {\n const rus = this.headers[Constants.HttpHeaders.RequestCharge];\n return rus ? parseInt(rus, 10) : null;\n }\n\n /**\n * Gets the activity ID for the request from the Azure Cosmos DB service.\n */\n public get activityId(): string {\n return this.headers[Constants.HttpHeaders.ActivityId];\n }\n\n /**\n * Gets the continuation token to be used for continuing enumeration of the Azure Cosmos DB service.\n *\n * This is equivalent to the `etag` property.\n */\n public get continuation(): string {\n return this.etag;\n }\n\n /**\n * Gets the session token for use in session consistency reads from the Azure Cosmos DB service.\n */\n public get sessionToken(): string {\n return this.headers[Constants.HttpHeaders.SessionToken];\n }\n\n /**\n * Gets the entity tag associated with last transaction in the Azure Cosmos DB service,\n * which can be used as If-Non-Match Access condition for ReadFeed REST request or\n * `continuation` property of `ChangeFeedOptions` parameter for\n * `Items.changeFeed()`\n * to get feed changes since the transaction specified by this entity tag.\n *\n * This is equivalent to the `continuation` property.\n */\n public get etag(): string {\n return this.headers[Constants.HttpHeaders.ETag];\n }\n\n /**\n * Response headers of the response from Azure Cosmos DB\n */\n public headers: CosmosHeaders;\n}\n"]}

View File

@@ -0,0 +1,155 @@
import { PartitionKeyRange } from "./client/Container/PartitionKeyRange";
import { Resource } from "./client/Resource";
import { HTTPMethod, ResourceType } from "./common/constants";
import { CosmosClientOptions } from "./CosmosClientOptions";
import { DatabaseAccount, PartitionKey } from "./documents";
import { GlobalEndpointManager } from "./globalEndpointManager";
import { SqlQuerySpec } from "./queryExecutionContext";
import { QueryIterator } from "./queryIterator";
import { FeedOptions, RequestOptions, Response } from "./request";
import { PartitionedQueryExecutionInfo } from "./request/ErrorResponse";
import { BulkOptions } from "./utils/batch";
import { ClientConfigDiagnostic, CosmosDiagnostics } from "./CosmosDiagnostics";
import { DiagnosticNodeInternal } from "./diagnostics/DiagnosticNodeInternal";
import { CosmosDbDiagnosticLevel } from "./diagnostics/CosmosDbDiagnosticLevel";
/**
* @hidden
* @hidden
*/
export declare class ClientContext {
private cosmosClientOptions;
private globalEndpointManager;
private clientConfig;
diagnosticLevel: CosmosDbDiagnosticLevel;
private readonly sessionContainer;
private connectionPolicy;
private pipeline;
private diagnosticWriter;
private diagnosticFormatter;
partitionKeyDefinitionCache: {
[containerUrl: string]: any;
};
constructor(cosmosClientOptions: CosmosClientOptions, globalEndpointManager: GlobalEndpointManager, clientConfig: ClientConfigDiagnostic, diagnosticLevel: CosmosDbDiagnosticLevel);
/** @hidden */
read<T>({ path, resourceType, resourceId, options, partitionKey, diagnosticNode, }: {
path: string;
resourceType: ResourceType;
resourceId: string;
options?: RequestOptions;
partitionKey?: PartitionKey;
diagnosticNode: DiagnosticNodeInternal;
}): Promise<Response<T & Resource>>;
queryFeed<T>({ path, resourceType, resourceId, resultFn, query, options, diagnosticNode, partitionKeyRangeId, partitionKey, startEpk, endEpk, }: {
path: string;
resourceType: ResourceType;
resourceId: string;
resultFn: (result: {
[key: string]: any;
}) => any[];
query: SqlQuerySpec | string;
options: FeedOptions;
diagnosticNode: DiagnosticNodeInternal;
partitionKeyRangeId?: string;
partitionKey?: PartitionKey;
startEpk?: string | undefined;
endEpk?: string | undefined;
}): Promise<Response<T & Resource>>;
getQueryPlan(path: string, resourceType: ResourceType, resourceId: string, query: SqlQuerySpec | string, options: FeedOptions, diagnosticNode: DiagnosticNodeInternal): Promise<Response<PartitionedQueryExecutionInfo>>;
queryPartitionKeyRanges(collectionLink: string, query?: string | SqlQuerySpec, options?: FeedOptions): QueryIterator<PartitionKeyRange>;
delete<T>({ path, resourceType, resourceId, options, partitionKey, method, diagnosticNode, }: {
path: string;
resourceType: ResourceType;
resourceId: string;
options?: RequestOptions;
partitionKey?: PartitionKey;
method?: HTTPMethod;
diagnosticNode: DiagnosticNodeInternal;
}): Promise<Response<T & Resource>>;
patch<T>({ body, path, resourceType, resourceId, options, partitionKey, diagnosticNode, }: {
body: any;
path: string;
resourceType: ResourceType;
resourceId: string;
options?: RequestOptions;
partitionKey?: PartitionKey;
diagnosticNode: DiagnosticNodeInternal;
}): Promise<Response<T & Resource>>;
create<T, U = T>({ body, path, resourceType, resourceId, diagnosticNode, options, partitionKey, }: {
body: T;
path: string;
resourceType: ResourceType;
resourceId: string;
diagnosticNode: DiagnosticNodeInternal;
options?: RequestOptions;
partitionKey?: PartitionKey;
}): Promise<Response<T & U & Resource>>;
private processQueryFeedResponse;
private applySessionToken;
replace<T>({ body, path, resourceType, resourceId, options, partitionKey, diagnosticNode, }: {
body: any;
path: string;
resourceType: ResourceType;
resourceId: string;
options?: RequestOptions;
partitionKey?: PartitionKey;
diagnosticNode: DiagnosticNodeInternal;
}): Promise<Response<T & Resource>>;
upsert<T, U = T>({ body, path, resourceType, resourceId, options, partitionKey, diagnosticNode, }: {
body: T;
path: string;
resourceType: ResourceType;
resourceId: string;
options?: RequestOptions;
partitionKey?: PartitionKey;
diagnosticNode: DiagnosticNodeInternal;
}): Promise<Response<T & U & Resource>>;
execute<T>({ sprocLink, params, options, partitionKey, diagnosticNode, }: {
sprocLink: string;
params?: any[];
options?: RequestOptions;
partitionKey?: PartitionKey;
diagnosticNode: DiagnosticNodeInternal;
}): Promise<Response<T>>;
/**
* Gets the Database account information.
* @param options - `urlConnection` in the options is the endpoint url whose database account needs to be retrieved.
* If not present, current client's url will be used.
*/
getDatabaseAccount(diagnosticNode: DiagnosticNodeInternal, options?: RequestOptions): Promise<Response<DatabaseAccount>>;
getWriteEndpoint(diagnosticNode: DiagnosticNodeInternal): Promise<string>;
getReadEndpoint(diagnosticNode: DiagnosticNodeInternal): Promise<string>;
getWriteEndpoints(): Promise<readonly string[]>;
getReadEndpoints(): Promise<readonly string[]>;
batch<T>({ body, path, partitionKey, resourceId, options, diagnosticNode, }: {
body: T;
path: string;
partitionKey: PartitionKey;
resourceId: string;
options?: RequestOptions;
diagnosticNode: DiagnosticNodeInternal;
}): Promise<Response<any>>;
bulk<T>({ body, path, partitionKeyRangeId, resourceId, bulkOptions, options, diagnosticNode, }: {
body: T;
path: string;
partitionKeyRangeId: string;
resourceId: string;
bulkOptions?: BulkOptions;
options?: RequestOptions;
diagnosticNode: DiagnosticNodeInternal;
}): Promise<Response<any>>;
private captureSessionToken;
clearSessionToken(path: string): void;
recordDiagnostics(diagnostic: CosmosDiagnostics): void;
initializeDiagnosticSettings(diagnosticLevel: CosmosDbDiagnosticLevel): void;
private getSessionParams;
private isMasterResource;
private buildHeaders;
/**
* Returns collection of properties which are derived from the context for Request Creation.
* These properties have client wide scope, as opposed to request specific scope.
* @returns
*/
private getContextDerivedPropsForRequestCreation;
getClientConfig(): ClientConfigDiagnostic;
}
//# sourceMappingURL=ClientContext.d.ts.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,524 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT license.
import { v4 } from "uuid";
const uuid = v4;
import { bearerTokenAuthenticationPolicy, createEmptyPipeline, } from "@azure/core-rest-pipeline";
import { Constants, HTTPMethod, OperationType, ResourceType } from "./common/constants";
import { getIdFromLink, getPathFromLink, parseLink } from "./common/helper";
import { StatusCodes, SubStatusCodes } from "./common/statusCodes";
import { ConsistencyLevel, DatabaseAccount, convertToInternalPartitionKey, } from "./documents";
import { PluginOn, executePlugins } from "./plugins/Plugin";
import { QueryIterator } from "./queryIterator";
import { getHeaders } from "./request/request";
import { RequestHandler } from "./request/RequestHandler";
import { SessionContainer } from "./session/sessionContainer";
import { sanitizeEndpoint } from "./utils/checkURL";
import { supportedQueryFeaturesBuilder } from "./utils/supportedQueryFeaturesBuilder";
import { createClientLogger } from "@azure/logger";
import { LogDiagnosticWriter, NoOpDiagnosticWriter, } from "./diagnostics/DiagnosticWriter";
import { DefaultDiagnosticFormatter } from "./diagnostics/DiagnosticFormatter";
import { CosmosDbDiagnosticLevel } from "./diagnostics/CosmosDbDiagnosticLevel";
const logger = createClientLogger("ClientContext");
const QueryJsonContentType = "application/query+json";
const HttpHeaders = Constants.HttpHeaders;
/**
* @hidden
* @hidden
*/
export class ClientContext {
constructor(cosmosClientOptions, globalEndpointManager, clientConfig, diagnosticLevel) {
this.cosmosClientOptions = cosmosClientOptions;
this.globalEndpointManager = globalEndpointManager;
this.clientConfig = clientConfig;
this.diagnosticLevel = diagnosticLevel;
this.connectionPolicy = cosmosClientOptions.connectionPolicy;
this.sessionContainer = new SessionContainer();
this.partitionKeyDefinitionCache = {};
this.pipeline = null;
if (cosmosClientOptions.aadCredentials) {
this.pipeline = createEmptyPipeline();
const hrefEndpoint = sanitizeEndpoint(cosmosClientOptions.endpoint);
const scope = `${hrefEndpoint}/.default`;
this.pipeline.addPolicy(bearerTokenAuthenticationPolicy({
credential: cosmosClientOptions.aadCredentials,
scopes: scope,
challengeCallbacks: {
async authorizeRequest({ request, getAccessToken }) {
const tokenResponse = await getAccessToken([scope], {});
const AUTH_PREFIX = `type=aad&ver=1.0&sig=`;
const authorizationToken = `${AUTH_PREFIX}${tokenResponse.token}`;
request.headers.set("Authorization", authorizationToken);
},
},
}));
}
this.initializeDiagnosticSettings(diagnosticLevel);
}
/** @hidden */
async read({ path, resourceType, resourceId, options = {}, partitionKey, diagnosticNode, }) {
try {
const request = Object.assign(Object.assign({}, this.getContextDerivedPropsForRequestCreation()), { method: HTTPMethod.get, path, operationType: OperationType.Read, resourceId,
options,
resourceType,
partitionKey });
diagnosticNode.addData({
operationType: OperationType.Read,
resourceType,
});
request.headers = await this.buildHeaders(request);
this.applySessionToken(request);
// read will use ReadEndpoint since it uses GET operation
request.endpoint = await this.globalEndpointManager.resolveServiceEndpoint(diagnosticNode, request.resourceType, request.operationType);
const response = await executePlugins(diagnosticNode, request, RequestHandler.request, PluginOn.operation);
this.captureSessionToken(undefined, path, OperationType.Read, response.headers);
return response;
}
catch (err) {
this.captureSessionToken(err, path, OperationType.Upsert, err.headers);
throw err;
}
}
async queryFeed({ path, resourceType, resourceId, resultFn, query, options, diagnosticNode, partitionKeyRangeId, partitionKey, startEpk, endEpk, }) {
// Query operations will use ReadEndpoint even though it uses
// GET(for queryFeed) and POST(for regular query operations)
const request = Object.assign(Object.assign({}, this.getContextDerivedPropsForRequestCreation()), { method: HTTPMethod.get, path, operationType: OperationType.Query, partitionKeyRangeId,
resourceId,
resourceType,
options, body: query, partitionKey });
diagnosticNode.addData({
operationType: OperationType.Query,
resourceType,
});
const requestId = uuid();
if (query !== undefined) {
request.method = HTTPMethod.post;
}
request.endpoint = await this.globalEndpointManager.resolveServiceEndpoint(diagnosticNode, request.resourceType, request.operationType);
request.headers = await this.buildHeaders(request);
if (startEpk !== undefined && endEpk !== undefined) {
request.headers[HttpHeaders.StartEpk] = startEpk;
request.headers[HttpHeaders.EndEpk] = endEpk;
request.headers[HttpHeaders.ReadFeedKeyType] = "EffectivePartitionKeyRange";
}
if (query !== undefined) {
request.headers[HttpHeaders.IsQuery] = "true";
request.headers[HttpHeaders.ContentType] = QueryJsonContentType;
if (typeof query === "string") {
request.body = { query }; // Converts query text to query object.
}
}
this.applySessionToken(request);
logger.info("query " +
requestId +
" started" +
(request.partitionKeyRangeId ? " pkrid: " + request.partitionKeyRangeId : ""));
logger.verbose(request);
const start = Date.now();
const response = await RequestHandler.request(request, diagnosticNode);
logger.info("query " + requestId + " finished - " + (Date.now() - start) + "ms");
this.captureSessionToken(undefined, path, OperationType.Query, response.headers);
return this.processQueryFeedResponse(response, !!query, resultFn);
}
async getQueryPlan(path, resourceType, resourceId, query, options = {}, diagnosticNode) {
const request = Object.assign(Object.assign({}, this.getContextDerivedPropsForRequestCreation()), { method: HTTPMethod.post, path, operationType: OperationType.Read, resourceId,
resourceType,
options, body: query });
diagnosticNode.addData({
operationType: OperationType.Read,
resourceType,
});
request.endpoint = await this.globalEndpointManager.resolveServiceEndpoint(diagnosticNode, request.resourceType, request.operationType);
request.headers = await this.buildHeaders(request);
request.headers[HttpHeaders.IsQueryPlan] = "True";
request.headers[HttpHeaders.QueryVersion] = "1.4";
request.headers[HttpHeaders.ContentType] = QueryJsonContentType;
request.headers[HttpHeaders.SupportedQueryFeatures] = supportedQueryFeaturesBuilder(options.disableNonStreamingOrderByQuery);
if (typeof query === "string") {
request.body = { query }; // Converts query text to query object.
}
this.applySessionToken(request);
const response = await RequestHandler.request(request, diagnosticNode);
this.captureSessionToken(undefined, path, OperationType.Query, response.headers);
return response;
}
queryPartitionKeyRanges(collectionLink, query, options) {
const path = getPathFromLink(collectionLink, ResourceType.pkranges);
const id = getIdFromLink(collectionLink);
const cb = async (diagNode, innerOptions) => {
const response = await this.queryFeed({
path,
resourceType: ResourceType.pkranges,
resourceId: id,
resultFn: (result) => result.PartitionKeyRanges,
query,
options: innerOptions,
diagnosticNode: diagNode,
});
return response;
};
return new QueryIterator(this, query, options, cb);
}
async delete({ path, resourceType, resourceId, options = {}, partitionKey, method = HTTPMethod.delete, diagnosticNode, }) {
try {
const request = Object.assign(Object.assign({}, this.getContextDerivedPropsForRequestCreation()), { method: method, operationType: OperationType.Delete, path,
resourceType,
options,
resourceId,
partitionKey });
diagnosticNode.addData({
operationType: OperationType.Delete,
resourceType,
});
request.headers = await this.buildHeaders(request);
this.applySessionToken(request);
// deleteResource will use WriteEndpoint since it uses DELETE operation
request.endpoint = await this.globalEndpointManager.resolveServiceEndpoint(diagnosticNode, request.resourceType, request.operationType);
const response = await executePlugins(diagnosticNode, request, RequestHandler.request, PluginOn.operation);
if (parseLink(path).type !== "colls") {
this.captureSessionToken(undefined, path, OperationType.Delete, response.headers);
}
else {
this.clearSessionToken(path);
}
return response;
}
catch (err) {
this.captureSessionToken(err, path, OperationType.Upsert, err.headers);
throw err;
}
}
async patch({ body, path, resourceType, resourceId, options = {}, partitionKey, diagnosticNode, }) {
try {
const request = Object.assign(Object.assign({}, this.getContextDerivedPropsForRequestCreation()), { method: HTTPMethod.patch, operationType: OperationType.Patch, path,
resourceType,
body,
resourceId,
options,
partitionKey });
diagnosticNode.addData({
operationType: OperationType.Patch,
resourceType,
});
request.headers = await this.buildHeaders(request);
this.applySessionToken(request);
// patch will use WriteEndpoint
request.endpoint = await this.globalEndpointManager.resolveServiceEndpoint(diagnosticNode, request.resourceType, request.operationType);
const response = await executePlugins(diagnosticNode, request, RequestHandler.request, PluginOn.operation);
this.captureSessionToken(undefined, path, OperationType.Patch, response.headers);
return response;
}
catch (err) {
this.captureSessionToken(err, path, OperationType.Upsert, err.headers);
throw err;
}
}
async create({ body, path, resourceType, resourceId, diagnosticNode, options = {}, partitionKey, }) {
try {
const request = Object.assign(Object.assign({}, this.getContextDerivedPropsForRequestCreation()), { method: HTTPMethod.post, operationType: OperationType.Create, path,
resourceType,
resourceId,
body,
options,
partitionKey });
diagnosticNode.addData({
operationType: OperationType.Create,
resourceType,
});
request.headers = await this.buildHeaders(request);
// create will use WriteEndpoint since it uses POST operation
this.applySessionToken(request);
request.endpoint = await this.globalEndpointManager.resolveServiceEndpoint(diagnosticNode, request.resourceType, request.operationType);
const response = await executePlugins(diagnosticNode, request, RequestHandler.request, PluginOn.operation);
this.captureSessionToken(undefined, path, OperationType.Create, response.headers);
return response;
}
catch (err) {
this.captureSessionToken(err, path, OperationType.Upsert, err.headers);
throw err;
}
}
processQueryFeedResponse(res, isQuery, resultFn) {
if (isQuery) {
return {
result: resultFn(res.result),
headers: res.headers,
code: res.code,
};
}
else {
const newResult = resultFn(res.result).map((body) => body);
return {
result: newResult,
headers: res.headers,
code: res.code,
};
}
}
applySessionToken(requestContext) {
const request = this.getSessionParams(requestContext.path);
if (requestContext.headers && requestContext.headers[HttpHeaders.SessionToken]) {
return;
}
const sessionConsistency = requestContext.headers[HttpHeaders.ConsistencyLevel];
if (!sessionConsistency) {
return;
}
if (sessionConsistency !== ConsistencyLevel.Session) {
return;
}
if (request.resourceAddress) {
const sessionToken = this.sessionContainer.get(request);
if (sessionToken) {
requestContext.headers[HttpHeaders.SessionToken] = sessionToken;
}
}
}
async replace({ body, path, resourceType, resourceId, options = {}, partitionKey, diagnosticNode, }) {
try {
const request = Object.assign(Object.assign({}, this.getContextDerivedPropsForRequestCreation()), { method: HTTPMethod.put, operationType: OperationType.Replace, path,
resourceType,
body,
resourceId,
options,
partitionKey });
diagnosticNode.addData({
operationType: OperationType.Replace,
resourceType,
});
request.headers = await this.buildHeaders(request);
this.applySessionToken(request);
// replace will use WriteEndpoint since it uses PUT operation
request.endpoint = await this.globalEndpointManager.resolveServiceEndpoint(diagnosticNode, request.resourceType, request.operationType);
const response = await executePlugins(diagnosticNode, request, RequestHandler.request, PluginOn.operation);
this.captureSessionToken(undefined, path, OperationType.Replace, response.headers);
return response;
}
catch (err) {
this.captureSessionToken(err, path, OperationType.Upsert, err.headers);
throw err;
}
}
async upsert({ body, path, resourceType, resourceId, options = {}, partitionKey, diagnosticNode, }) {
try {
const request = Object.assign(Object.assign({}, this.getContextDerivedPropsForRequestCreation()), { method: HTTPMethod.post, operationType: OperationType.Upsert, path,
resourceType,
body,
resourceId,
options,
partitionKey });
diagnosticNode.addData({
operationType: OperationType.Upsert,
resourceType,
});
request.headers = await this.buildHeaders(request);
request.headers[HttpHeaders.IsUpsert] = true;
this.applySessionToken(request);
// upsert will use WriteEndpoint since it uses POST operation
request.endpoint = await this.globalEndpointManager.resolveServiceEndpoint(diagnosticNode, request.resourceType, request.operationType);
const response = await executePlugins(diagnosticNode, request, RequestHandler.request, PluginOn.operation);
this.captureSessionToken(undefined, path, OperationType.Upsert, response.headers);
return response;
}
catch (err) {
this.captureSessionToken(err, path, OperationType.Upsert, err.headers);
throw err;
}
}
async execute({ sprocLink, params, options = {}, partitionKey, diagnosticNode, }) {
// Accept a single parameter or an array of parameters.
// Didn't add type annotation for this because we should legacy this behavior
if (params !== null && params !== undefined && !Array.isArray(params)) {
params = [params];
}
const path = getPathFromLink(sprocLink);
const id = getIdFromLink(sprocLink);
const request = Object.assign(Object.assign({}, this.getContextDerivedPropsForRequestCreation()), { method: HTTPMethod.post, operationType: OperationType.Execute, path, resourceType: ResourceType.sproc, options, resourceId: id, body: params, partitionKey });
diagnosticNode.addData({
operationType: OperationType.Execute,
resourceType: ResourceType.sproc,
});
request.headers = await this.buildHeaders(request);
// executeStoredProcedure will use WriteEndpoint since it uses POST operation
request.endpoint = await this.globalEndpointManager.resolveServiceEndpoint(diagnosticNode, request.resourceType, request.operationType);
const response = await executePlugins(diagnosticNode, request, RequestHandler.request, PluginOn.operation);
return response;
}
/**
* Gets the Database account information.
* @param options - `urlConnection` in the options is the endpoint url whose database account needs to be retrieved.
* If not present, current client's url will be used.
*/
async getDatabaseAccount(diagnosticNode, options = {}) {
const endpoint = options.urlConnection || this.cosmosClientOptions.endpoint;
const request = Object.assign(Object.assign({}, this.getContextDerivedPropsForRequestCreation()), { endpoint, method: HTTPMethod.get, operationType: OperationType.Read, path: "", resourceType: ResourceType.none, options });
diagnosticNode.addData({
operationType: OperationType.Read,
resourceType: ResourceType.none,
});
request.headers = await this.buildHeaders(request);
// await options.beforeOperation({ endpoint, request, headers: requestHeaders });
const { result, headers, code, substatus, diagnostics } = await executePlugins(diagnosticNode, request, RequestHandler.request, PluginOn.operation);
const databaseAccount = new DatabaseAccount(result, headers);
return {
result: databaseAccount,
headers,
diagnostics,
code: code,
substatus: substatus,
};
}
getWriteEndpoint(diagnosticNode) {
return this.globalEndpointManager.getWriteEndpoint(diagnosticNode);
}
getReadEndpoint(diagnosticNode) {
return this.globalEndpointManager.getReadEndpoint(diagnosticNode);
}
getWriteEndpoints() {
return this.globalEndpointManager.getWriteEndpoints();
}
getReadEndpoints() {
return this.globalEndpointManager.getReadEndpoints();
}
async batch({ body, path, partitionKey, resourceId, options = {}, diagnosticNode, }) {
try {
const request = Object.assign(Object.assign({}, this.getContextDerivedPropsForRequestCreation()), { method: HTTPMethod.post, operationType: OperationType.Batch, path,
body, resourceType: ResourceType.item, resourceId,
options,
partitionKey });
diagnosticNode.addData({
operationType: OperationType.Batch,
resourceType: ResourceType.item,
});
request.headers = await this.buildHeaders(request);
request.headers[HttpHeaders.IsBatchRequest] = true;
request.headers[HttpHeaders.IsBatchAtomic] = true;
this.applySessionToken(request);
request.endpoint = await this.globalEndpointManager.resolveServiceEndpoint(diagnosticNode, request.resourceType, request.operationType);
const response = await executePlugins(diagnosticNode, request, RequestHandler.request, PluginOn.operation);
this.captureSessionToken(undefined, path, OperationType.Batch, response.headers);
response.diagnostics = diagnosticNode.toDiagnostic(this.getClientConfig());
return response;
}
catch (err) {
this.captureSessionToken(err, path, OperationType.Upsert, err.headers);
throw err;
}
}
async bulk({ body, path, partitionKeyRangeId, resourceId, bulkOptions = {}, options = {}, diagnosticNode, }) {
try {
const request = Object.assign(Object.assign({}, this.getContextDerivedPropsForRequestCreation()), { method: HTTPMethod.post, operationType: OperationType.Batch, path,
body, resourceType: ResourceType.item, resourceId,
options });
diagnosticNode.addData({
operationType: OperationType.Batch,
resourceType: ResourceType.item,
});
request.headers = await this.buildHeaders(request);
request.headers[HttpHeaders.IsBatchRequest] = true;
request.headers[HttpHeaders.PartitionKeyRangeID] = partitionKeyRangeId;
request.headers[HttpHeaders.IsBatchAtomic] = false;
request.headers[HttpHeaders.BatchContinueOnError] = bulkOptions.continueOnError || false;
this.applySessionToken(request);
request.endpoint = await this.globalEndpointManager.resolveServiceEndpoint(diagnosticNode, request.resourceType, request.operationType);
const response = await executePlugins(diagnosticNode, request, RequestHandler.request, PluginOn.operation);
this.captureSessionToken(undefined, path, OperationType.Batch, response.headers);
return response;
}
catch (err) {
this.captureSessionToken(err, path, OperationType.Upsert, err.headers);
throw err;
}
}
captureSessionToken(err, path, operationType, resHeaders) {
const request = this.getSessionParams(path);
request.operationType = operationType;
if (!err ||
(!this.isMasterResource(request.resourceType) &&
(err.code === StatusCodes.PreconditionFailed ||
err.code === StatusCodes.Conflict ||
(err.code === StatusCodes.NotFound &&
err.substatus !== SubStatusCodes.ReadSessionNotAvailable)))) {
this.sessionContainer.set(request, resHeaders);
}
}
clearSessionToken(path) {
const request = this.getSessionParams(path);
this.sessionContainer.remove(request);
}
recordDiagnostics(diagnostic) {
const formatted = this.diagnosticFormatter.format(diagnostic);
this.diagnosticWriter.write(formatted);
}
initializeDiagnosticSettings(diagnosticLevel) {
this.diagnosticFormatter = new DefaultDiagnosticFormatter();
switch (diagnosticLevel) {
case CosmosDbDiagnosticLevel.info:
this.diagnosticWriter = new NoOpDiagnosticWriter();
break;
default:
this.diagnosticWriter = new LogDiagnosticWriter();
}
}
// TODO: move
getSessionParams(resourceLink) {
const resourceId = null;
let resourceAddress = null;
const parserOutput = parseLink(resourceLink);
resourceAddress = parserOutput.objectBody.self;
const resourceType = parserOutput.type;
return {
resourceId,
resourceAddress,
resourceType,
isNameBased: true,
};
}
isMasterResource(resourceType) {
if (resourceType === Constants.Path.OffersPathSegment ||
resourceType === Constants.Path.DatabasesPathSegment ||
resourceType === Constants.Path.UsersPathSegment ||
resourceType === Constants.Path.PermissionsPathSegment ||
resourceType === Constants.Path.TopologyPathSegment ||
resourceType === Constants.Path.DatabaseAccountPathSegment ||
resourceType === Constants.Path.PartitionKeyRangesPathSegment ||
resourceType === Constants.Path.CollectionsPathSegment) {
return true;
}
return false;
}
buildHeaders(requestContext) {
return getHeaders({
clientOptions: this.cosmosClientOptions,
defaultHeaders: Object.assign(Object.assign({}, this.cosmosClientOptions.defaultHeaders), requestContext.options.initialHeaders),
verb: requestContext.method,
path: requestContext.path,
resourceId: requestContext.resourceId,
resourceType: requestContext.resourceType,
options: requestContext.options,
partitionKeyRangeId: requestContext.partitionKeyRangeId,
useMultipleWriteLocations: this.connectionPolicy.useMultipleWriteLocations,
partitionKey: requestContext.partitionKey !== undefined
? convertToInternalPartitionKey(requestContext.partitionKey)
: undefined, // TODO: Move this check from here to PartitionKey
});
}
/**
* Returns collection of properties which are derived from the context for Request Creation.
* These properties have client wide scope, as opposed to request specific scope.
* @returns
*/
getContextDerivedPropsForRequestCreation() {
return {
globalEndpointManager: this.globalEndpointManager,
requestAgent: this.cosmosClientOptions.agent,
connectionPolicy: this.connectionPolicy,
client: this,
plugins: this.cosmosClientOptions.plugins,
pipeline: this.pipeline,
};
}
getClientConfig() {
return this.clientConfig;
}
}
//# sourceMappingURL=ClientContext.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,116 @@
import { Database, Databases } from "./client/Database";
import { Offer, Offers } from "./client/Offer";
import { CosmosClientOptions } from "./CosmosClientOptions";
import { DiagnosticNodeInternal } from "./diagnostics/DiagnosticNodeInternal";
import { DatabaseAccount } from "./documents";
import { RequestOptions, ResourceResponse } from "./request";
/**
* Provides a client-side logical representation of the Azure Cosmos DB database account.
* This client is used to configure and execute requests in the Azure Cosmos DB database service.
* @example Instantiate a client and create a new database
* ```typescript
* const client = new CosmosClient({endpoint: "<URL HERE>", auth: {masterKey: "<KEY HERE>"}});
* await client.databases.create({id: "<datbase name here>"});
* ```
* @example Instantiate a client with custom Connection Policy
* ```typescript
* const connectionPolicy = new ConnectionPolicy();
* connectionPolicy.RequestTimeout = 10000;
* const client = new CosmosClient({
* endpoint: "<URL HERE>",
* auth: {masterKey: "<KEY HERE>"},
* connectionPolicy
* });
* ```
*/
export declare class CosmosClient {
/**
* Used for creating new databases, or querying/reading all databases.
*
* Use `.database(id)` to read, replace, or delete a specific, existing database by id.
*
* @example Create a new database
* ```typescript
* const {resource: databaseDefinition, database} = await client.databases.create({id: "<name here>"});
* ```
*/
readonly databases: Databases;
/**
* Used for querying & reading all offers.
*
* Use `.offer(id)` to read, or replace existing offers.
*/
readonly offers: Offers;
private clientContext;
private endpointRefresher;
/**
* Creates a new {@link CosmosClient} object from a connection string. Your database connection string can be found in the Azure Portal
*/
constructor(connectionString: string);
/**
* Creates a new {@link CosmosClient} object. See {@link CosmosClientOptions} for more details on what options you can use.
* @param options - bag of options; require at least endpoint and auth to be configured
*/
constructor(options: CosmosClientOptions);
private initializeClientConfigDiagnostic;
/**
* Get information about the current {@link DatabaseAccount} (including which regions are supported, etc.)
*/
getDatabaseAccount(options?: RequestOptions): Promise<ResourceResponse<DatabaseAccount>>;
/**
* @hidden
*/
getDatabaseAccountInternal(diagnosticNode: DiagnosticNodeInternal, options?: RequestOptions): Promise<ResourceResponse<DatabaseAccount>>;
/**
* Gets the currently used write endpoint url. Useful for troubleshooting purposes.
*
* The url may contain a region suffix (e.g. "-eastus") if we're using location specific endpoints.
*/
getWriteEndpoint(): Promise<string>;
/**
* Gets the currently used read endpoint. Useful for troubleshooting purposes.
*
* The url may contain a region suffix (e.g. "-eastus") if we're using location specific endpoints.
*/
getReadEndpoint(): Promise<string>;
/**
* Gets the known write endpoints. Useful for troubleshooting purposes.
*
* The urls may contain a region suffix (e.g. "-eastus") if we're using location specific endpoints.
*/
getWriteEndpoints(): Promise<readonly string[]>;
/**
* Gets the currently used read endpoint. Useful for troubleshooting purposes.
*
* The url may contain a region suffix (e.g. "-eastus") if we're using location specific endpoints.
*/
getReadEndpoints(): Promise<readonly string[]>;
/**
* Used for reading, updating, or deleting a existing database by id or accessing containers belonging to that database.
*
* This does not make a network call. Use `.read` to get info about the database after getting the {@link Database} object.
*
* @param id - The id of the database.
* @example Create a new container off of an existing database
* ```typescript
* const container = client.database("<database id>").containers.create("<container id>");
* ```
*
* @example Delete an existing database
* ```typescript
* await client.database("<id here>").delete();
* ```
*/
database(id: string): Database;
/**
* Used for reading, or updating a existing offer by id.
* @param id - The id of the offer.
*/
offer(id: string): Offer;
/**
* Clears background endpoint refresher. Use client.dispose() when destroying the CosmosClient within another process.
*/
dispose(): void;
private backgroundRefreshEndpointList;
}
//# sourceMappingURL=CosmosClient.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"CosmosClient.d.ts","sourceRoot":"","sources":["../../src/CosmosClient.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,QAAQ,EAAE,SAAS,EAAE,MAAM,mBAAmB,CAAC;AACxD,OAAO,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,gBAAgB,CAAC;AAK/C,OAAO,EAAE,mBAAmB,EAAE,MAAM,uBAAuB,CAAC;AAG5D,OAAO,EAAE,sBAAsB,EAAsB,MAAM,sCAAsC,CAAC;AAClG,OAAO,EAAE,eAAe,EAA2B,MAAM,aAAa,CAAC;AAEvE,OAAO,EAAE,cAAc,EAAE,gBAAgB,EAAE,MAAM,WAAW,CAAC;AAI7D;;;;;;;;;;;;;;;;;;GAkBG;AACH,qBAAa,YAAY;IACvB;;;;;;;;;OASG;IACH,SAAgB,SAAS,EAAE,SAAS,CAAC;IACrC;;;;OAIG;IACH,SAAgB,MAAM,EAAE,MAAM,CAAC;IAC/B,OAAO,CAAC,aAAa,CAAgB;IACrC,OAAO,CAAC,iBAAiB,CAAiB;IAC1C;;OAEG;gBACS,gBAAgB,EAAE,MAAM;IACpC;;;OAGG;gBACS,OAAO,EAAE,mBAAmB;IA+DxC,OAAO,CAAC,gCAAgC;IAmBxC;;OAEG;IACU,kBAAkB,CAC7B,OAAO,CAAC,EAAE,cAAc,GACvB,OAAO,CAAC,gBAAgB,CAAC,eAAe,CAAC,CAAC;IAM7C;;OAEG;IACU,0BAA0B,CACrC,cAAc,EAAE,sBAAsB,EACtC,OAAO,CAAC,EAAE,cAAc,GACvB,OAAO,CAAC,gBAAgB,CAAC,eAAe,CAAC,CAAC;IAW7C;;;;OAIG;IACU,gBAAgB,IAAI,OAAO,CAAC,MAAM,CAAC;IAMhD;;;;OAIG;IACU,eAAe,IAAI,OAAO,CAAC,MAAM,CAAC;IAM/C;;;;OAIG;IACI,iBAAiB,IAAI,OAAO,CAAC,SAAS,MAAM,EAAE,CAAC;IAItD;;;;OAIG;IACI,gBAAgB,IAAI,OAAO,CAAC,SAAS,MAAM,EAAE,CAAC;IAIrD;;;;;;;;;;;;;;;OAeG;IACI,QAAQ,CAAC,EAAE,EAAE,MAAM,GAAG,QAAQ;IAIrC;;;OAGG;IACI,KAAK,CAAC,EAAE,EAAE,MAAM,GAAG,KAAK;IAI/B;;OAEG;IACI,OAAO,IAAI,IAAI;YAIR,6BAA6B;CAqB5C"}

View File

@@ -0,0 +1,181 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT license.
import { Database, Databases } from "./client/Database";
import { Offer, Offers } from "./client/Offer";
import { ClientContext } from "./ClientContext";
import { parseConnectionString } from "./common";
import { Constants } from "./common/constants";
import { getUserAgent } from "./common/platform";
import { determineDiagnosticLevel, getDiagnosticLevelFromEnvironment } from "./diagnostics";
import { DiagnosticNodeType } from "./diagnostics/DiagnosticNodeInternal";
import { defaultConnectionPolicy } from "./documents";
import { GlobalEndpointManager } from "./globalEndpointManager";
import { ResourceResponse } from "./request";
import { checkURL } from "./utils/checkURL";
import { getEmptyCosmosDiagnostics, withDiagnostics } from "./utils/diagnostics";
/**
* Provides a client-side logical representation of the Azure Cosmos DB database account.
* This client is used to configure and execute requests in the Azure Cosmos DB database service.
* @example Instantiate a client and create a new database
* ```typescript
* const client = new CosmosClient({endpoint: "<URL HERE>", auth: {masterKey: "<KEY HERE>"}});
* await client.databases.create({id: "<datbase name here>"});
* ```
* @example Instantiate a client with custom Connection Policy
* ```typescript
* const connectionPolicy = new ConnectionPolicy();
* connectionPolicy.RequestTimeout = 10000;
* const client = new CosmosClient({
* endpoint: "<URL HERE>",
* auth: {masterKey: "<KEY HERE>"},
* connectionPolicy
* });
* ```
*/
export class CosmosClient {
constructor(optionsOrConnectionString) {
var _a, _b;
if (typeof optionsOrConnectionString === "string") {
optionsOrConnectionString = parseConnectionString(optionsOrConnectionString);
}
const endpoint = checkURL(optionsOrConnectionString.endpoint);
if (!endpoint) {
throw new Error("Invalid endpoint specified");
}
const clientConfig = this.initializeClientConfigDiagnostic(optionsOrConnectionString);
optionsOrConnectionString.connectionPolicy = Object.assign({}, defaultConnectionPolicy, optionsOrConnectionString.connectionPolicy);
optionsOrConnectionString.defaultHeaders = optionsOrConnectionString.defaultHeaders || {};
optionsOrConnectionString.defaultHeaders[Constants.HttpHeaders.CacheControl] = "no-cache";
optionsOrConnectionString.defaultHeaders[Constants.HttpHeaders.Version] =
Constants.CurrentVersion;
if (optionsOrConnectionString.consistencyLevel !== undefined) {
optionsOrConnectionString.defaultHeaders[Constants.HttpHeaders.ConsistencyLevel] =
optionsOrConnectionString.consistencyLevel;
}
optionsOrConnectionString.defaultHeaders[Constants.HttpHeaders.UserAgent] = getUserAgent(optionsOrConnectionString.userAgentSuffix);
const globalEndpointManager = new GlobalEndpointManager(optionsOrConnectionString, async (diagnosticNode, opts) => this.getDatabaseAccountInternal(diagnosticNode, opts));
this.clientContext = new ClientContext(optionsOrConnectionString, globalEndpointManager, clientConfig, determineDiagnosticLevel(optionsOrConnectionString.diagnosticLevel, getDiagnosticLevelFromEnvironment()));
if (((_a = optionsOrConnectionString.connectionPolicy) === null || _a === void 0 ? void 0 : _a.enableEndpointDiscovery) &&
((_b = optionsOrConnectionString.connectionPolicy) === null || _b === void 0 ? void 0 : _b.enableBackgroundEndpointRefreshing)) {
this.backgroundRefreshEndpointList(globalEndpointManager, optionsOrConnectionString.connectionPolicy.endpointRefreshRateInMs ||
defaultConnectionPolicy.endpointRefreshRateInMs);
}
this.databases = new Databases(this, this.clientContext);
this.offers = new Offers(this, this.clientContext);
}
initializeClientConfigDiagnostic(optionsOrConnectionString) {
return {
endpoint: optionsOrConnectionString.endpoint,
resourceTokensConfigured: optionsOrConnectionString.resourceTokens !== undefined,
tokenProviderConfigured: optionsOrConnectionString.tokenProvider !== undefined,
aadCredentialsConfigured: optionsOrConnectionString.aadCredentials !== undefined,
connectionPolicyConfigured: optionsOrConnectionString.connectionPolicy !== undefined,
consistencyLevel: optionsOrConnectionString.consistencyLevel,
defaultHeaders: optionsOrConnectionString.defaultHeaders,
agentConfigured: optionsOrConnectionString.agent !== undefined,
userAgentSuffix: optionsOrConnectionString.userAgentSuffix,
diagnosticLevel: optionsOrConnectionString.diagnosticLevel,
pluginsConfigured: optionsOrConnectionString.plugins !== undefined,
sDKVersion: Constants.SDKVersion,
};
}
/**
* Get information about the current {@link DatabaseAccount} (including which regions are supported, etc.)
*/
async getDatabaseAccount(options) {
return withDiagnostics(async (diagnosticNode) => {
return this.getDatabaseAccountInternal(diagnosticNode, options);
}, this.clientContext);
}
/**
* @hidden
*/
async getDatabaseAccountInternal(diagnosticNode, options) {
const response = await this.clientContext.getDatabaseAccount(diagnosticNode, options);
return new ResourceResponse(response.result, response.headers, response.code, getEmptyCosmosDiagnostics(), response.substatus);
}
/**
* Gets the currently used write endpoint url. Useful for troubleshooting purposes.
*
* The url may contain a region suffix (e.g. "-eastus") if we're using location specific endpoints.
*/
async getWriteEndpoint() {
return withDiagnostics(async (diagnosticNode) => {
return this.clientContext.getWriteEndpoint(diagnosticNode);
}, this.clientContext);
}
/**
* Gets the currently used read endpoint. Useful for troubleshooting purposes.
*
* The url may contain a region suffix (e.g. "-eastus") if we're using location specific endpoints.
*/
async getReadEndpoint() {
return withDiagnostics(async (diagnosticNode) => {
return this.clientContext.getReadEndpoint(diagnosticNode);
}, this.clientContext);
}
/**
* Gets the known write endpoints. Useful for troubleshooting purposes.
*
* The urls may contain a region suffix (e.g. "-eastus") if we're using location specific endpoints.
*/
getWriteEndpoints() {
return this.clientContext.getWriteEndpoints();
}
/**
* Gets the currently used read endpoint. Useful for troubleshooting purposes.
*
* The url may contain a region suffix (e.g. "-eastus") if we're using location specific endpoints.
*/
getReadEndpoints() {
return this.clientContext.getReadEndpoints();
}
/**
* Used for reading, updating, or deleting a existing database by id or accessing containers belonging to that database.
*
* This does not make a network call. Use `.read` to get info about the database after getting the {@link Database} object.
*
* @param id - The id of the database.
* @example Create a new container off of an existing database
* ```typescript
* const container = client.database("<database id>").containers.create("<container id>");
* ```
*
* @example Delete an existing database
* ```typescript
* await client.database("<id here>").delete();
* ```
*/
database(id) {
return new Database(this, id, this.clientContext);
}
/**
* Used for reading, or updating a existing offer by id.
* @param id - The id of the offer.
*/
offer(id) {
return new Offer(this, id, this.clientContext);
}
/**
* Clears background endpoint refresher. Use client.dispose() when destroying the CosmosClient within another process.
*/
dispose() {
clearTimeout(this.endpointRefresher);
}
async backgroundRefreshEndpointList(globalEndpointManager, refreshRate) {
this.endpointRefresher = setInterval(() => {
try {
return withDiagnostics(async (diagnosticNode) => {
return globalEndpointManager.refreshEndpointList(diagnosticNode);
}, this.clientContext, DiagnosticNodeType.BACKGROUND_REFRESH_THREAD);
}
catch (e) {
console.warn("Failed to refresh endpoints", e);
}
}, refreshRate);
if (this.endpointRefresher.unref && typeof this.endpointRefresher.unref === "function") {
this.endpointRefresher.unref();
}
}
}
//# sourceMappingURL=CosmosClient.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,54 @@
import { TokenCredential } from "@azure/core-auth";
import { TokenProvider } from "./auth";
import { PermissionDefinition } from "./client";
import { ConnectionPolicy, ConsistencyLevel } from "./documents";
import { CosmosHeaders } from "./queryExecutionContext/CosmosHeaders";
import { CosmosDbDiagnosticLevel } from "./diagnostics/CosmosDbDiagnosticLevel";
export interface Agent {
maxFreeSockets: number;
maxSockets: number;
sockets: any;
requests: any;
destroy(): void;
}
export interface CosmosClientOptions {
/** The service endpoint to use to create the client. */
endpoint: string;
/** The account master or readonly key */
key?: string;
/** An object that contains resources tokens.
* Keys for the object are resource Ids and values are the resource tokens.
*/
resourceTokens?: {
[resourcePath: string]: string;
};
/** A user supplied function for resolving header authorization tokens.
* Allows users to generating their own auth tokens, potentially using a separate service
*/
tokenProvider?: TokenProvider;
/** AAD token from `@azure/identity`
* Obtain a credential object by creating an `@azure/identity` credential object
* We will then use your credential object and a scope URL (your cosmos db endpoint)
* to authenticate requests to Cosmos
*/
aadCredentials?: TokenCredential;
/** An array of {@link Permission} objects. */
permissionFeed?: PermissionDefinition[];
/** An instance of {@link ConnectionPolicy} class.
* This parameter is optional and the default connectionPolicy will be used if omitted.
*/
connectionPolicy?: ConnectionPolicy;
/** An optional parameter that represents the consistency level.
* It can take any value from {@link ConsistencyLevel}.
*/
consistencyLevel?: keyof typeof ConsistencyLevel;
defaultHeaders?: CosmosHeaders;
/** An optional custom http(s) Agent to be used in NodeJS enironments
* Use an agent such as https://github.com/TooTallNate/node-proxy-agent if you need to connect to Cosmos via a proxy
*/
agent?: Agent;
/** A custom string to append to the default SDK user agent. */
userAgentSuffix?: string;
diagnosticLevel?: CosmosDbDiagnosticLevel;
}
//# sourceMappingURL=CosmosClientOptions.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"CosmosClientOptions.d.ts","sourceRoot":"","sources":["../../src/CosmosClientOptions.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,eAAe,EAAE,MAAM,kBAAkB,CAAC;AACnD,OAAO,EAAE,aAAa,EAAE,MAAM,QAAQ,CAAC;AACvC,OAAO,EAAE,oBAAoB,EAAE,MAAM,UAAU,CAAC;AAChD,OAAO,EAAE,gBAAgB,EAAE,gBAAgB,EAAE,MAAM,aAAa,CAAC;AAEjE,OAAO,EAAE,aAAa,EAAE,MAAM,uCAAuC,CAAC;AACtE,OAAO,EAAE,uBAAuB,EAAE,MAAM,uCAAuC,CAAC;AAGhF,MAAM,WAAW,KAAK;IACpB,cAAc,EAAE,MAAM,CAAC;IACvB,UAAU,EAAE,MAAM,CAAC;IACnB,OAAO,EAAE,GAAG,CAAC;IACb,QAAQ,EAAE,GAAG,CAAC;IACd,OAAO,IAAI,IAAI,CAAC;CACjB;AAED,MAAM,WAAW,mBAAmB;IAClC,wDAAwD;IACxD,QAAQ,EAAE,MAAM,CAAC;IACjB,yCAAyC;IACzC,GAAG,CAAC,EAAE,MAAM,CAAC;IACb;;OAEG;IACH,cAAc,CAAC,EAAE;QAAE,CAAC,YAAY,EAAE,MAAM,GAAG,MAAM,CAAA;KAAE,CAAC;IACpD;;OAEG;IACH,aAAa,CAAC,EAAE,aAAa,CAAC;IAC9B;;;;OAIG;IACH,cAAc,CAAC,EAAE,eAAe,CAAC;IACjC,8CAA8C;IAC9C,cAAc,CAAC,EAAE,oBAAoB,EAAE,CAAC;IACxC;;OAEG;IACH,gBAAgB,CAAC,EAAE,gBAAgB,CAAC;IACpC;;OAEG;IACH,gBAAgB,CAAC,EAAE,MAAM,OAAO,gBAAgB,CAAC;IACjD,cAAc,CAAC,EAAE,aAAa,CAAC;IAC/B;;OAEG;IACH,KAAK,CAAC,EAAE,KAAK,CAAC;IACd,+DAA+D;IAC/D,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB,eAAe,CAAC,EAAE,uBAAuB,CAAC;CAG3C"}

View File

@@ -0,0 +1,2 @@
export {};
//# sourceMappingURL=CosmosClientOptions.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"CosmosClientOptions.js","sourceRoot":"","sources":["../../src/CosmosClientOptions.ts"],"names":[],"mappings":"","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nimport { TokenCredential } from \"@azure/core-auth\";\nimport { TokenProvider } from \"./auth\";\nimport { PermissionDefinition } from \"./client\";\nimport { ConnectionPolicy, ConsistencyLevel } from \"./documents\";\nimport { PluginConfig } from \"./plugins/Plugin\";\nimport { CosmosHeaders } from \"./queryExecutionContext/CosmosHeaders\";\nimport { CosmosDbDiagnosticLevel } from \"./diagnostics/CosmosDbDiagnosticLevel\";\n\n// We expose our own Agent interface to avoid taking a dependency on and leaking node types. This interface should mirror the node Agent interface\nexport interface Agent {\n maxFreeSockets: number;\n maxSockets: number;\n sockets: any;\n requests: any;\n destroy(): void;\n}\n\nexport interface CosmosClientOptions {\n /** The service endpoint to use to create the client. */\n endpoint: string;\n /** The account master or readonly key */\n key?: string;\n /** An object that contains resources tokens.\n * Keys for the object are resource Ids and values are the resource tokens.\n */\n resourceTokens?: { [resourcePath: string]: string };\n /** A user supplied function for resolving header authorization tokens.\n * Allows users to generating their own auth tokens, potentially using a separate service\n */\n tokenProvider?: TokenProvider;\n /** AAD token from `@azure/identity`\n * Obtain a credential object by creating an `@azure/identity` credential object\n * We will then use your credential object and a scope URL (your cosmos db endpoint)\n * to authenticate requests to Cosmos\n */\n aadCredentials?: TokenCredential;\n /** An array of {@link Permission} objects. */\n permissionFeed?: PermissionDefinition[];\n /** An instance of {@link ConnectionPolicy} class.\n * This parameter is optional and the default connectionPolicy will be used if omitted.\n */\n connectionPolicy?: ConnectionPolicy;\n /** An optional parameter that represents the consistency level.\n * It can take any value from {@link ConsistencyLevel}.\n */\n consistencyLevel?: keyof typeof ConsistencyLevel;\n defaultHeaders?: CosmosHeaders;\n /** An optional custom http(s) Agent to be used in NodeJS enironments\n * Use an agent such as https://github.com/TooTallNate/node-proxy-agent if you need to connect to Cosmos via a proxy\n */\n agent?: Agent;\n /** A custom string to append to the default SDK user agent. */\n userAgentSuffix?: string;\n diagnosticLevel?: CosmosDbDiagnosticLevel;\n /** @internal */\n plugins?: PluginConfig[];\n}\n"]}

View File

@@ -0,0 +1,202 @@
import { OperationType, ResourceType } from "./common";
import { CosmosDbDiagnosticLevel } from "./diagnostics/CosmosDbDiagnosticLevel";
import { DiagnosticNodeInternal } from "./diagnostics/DiagnosticNodeInternal";
import { ConsistencyLevel } from "./documents";
/**
* * This is a Cosmos Diagnostic type that holds collected diagnostic information during a client operations. ie. Item.read(), Container.create().
* It has three members -
* 1. `clientSideRequestStatistics` member contains aggregate diagnostic information, including -
* - metadata lookups. Here all the server requests, apart from the final intended resource are considered as metadata calls.
* i.e. for item.read(id), if the client makes server call to discover endpoints it would be considered as metadata call.
* - retries
* - endpoints contacted.
* - request, response payload stats.
* - gatewayStatistics - Information corresponding to main operation. For example during Item.read(), the client might perform many operations
* i.e. metadata lookup etc, but gatewayStatistics represents the diagnostics information for actual read operation.
*
* 2. diagnosticNode - Is a tree like structure which captures detailed diagnostic information. By default it is disabled, and is intended to be
* used only for debugging on non production environments. The kind of details captured in diagnosticNode is controlled by `CosmosDbDiagnosticLevel`.
* - CosmosDbDiagnosticLevel.info - Is default value. In this level only clientSideRequestStatistics are captured. Is is meant for production environments.
* - CosmosDbDiagnosticLevel.debug - Captures diagnosticNode and clientConfig. No request and response payloads are captured. Is not meant to be used
* in production environment.
* - CosmosDbDiagnosticLevel.debug-unsafe - In addition to data captured in CosmosDbDiagnosticLevel.debug, also captures request and response payloads.
* Is not meant to be used in production environment.
* 3. clientConfig - Captures information related to how client was configured during initialization.
*/
export declare class CosmosDiagnostics {
readonly clientSideRequestStatistics: ClientSideRequestStatistics;
readonly diagnosticNode: DiagnosticNode;
readonly clientConfig?: ClientConfigDiagnostic;
}
/**
* This type holds information related to initialization of `CosmosClient`
*/
export type ClientConfigDiagnostic = {
/**
* End point configured during client initialization.
*/
endpoint: string;
/**
* True if `resourceTokens` was supplied during client initialization.
*/
resourceTokensConfigured: boolean;
/**
* True if `tokenProvider` was supplied during client initialization.
*/
tokenProviderConfigured: boolean;
/**
* True if `aadCredentials` was supplied during client initialization.
*/
aadCredentialsConfigured: boolean;
/**
* True if `connectionPolicy` was supplied during client initialization.
*/
connectionPolicyConfigured: boolean;
/**
* `consistencyLevel` supplied during client initialization.
*/
consistencyLevel?: keyof typeof ConsistencyLevel;
/**
* `defaultHeaders` supplied during client initialization.
*/
defaultHeaders?: {
[key: string]: any;
};
/**
* True if `connectionPolicy` were supplied during client initialization.
*/
agentConfigured: boolean;
/**
* `userAgentSuffix` supplied during client initialization.
*/
userAgentSuffix: string;
/**
* `diagnosticLevel` supplied during client initialization.
*/
diagnosticLevel?: CosmosDbDiagnosticLevel;
/**
* True if `plugins` were supplied during client initialization.
*/
pluginsConfigured: boolean;
/**
* SDK version
*/
sDKVersion: string;
};
/**
* This type contains diagnostic information regarding all metadata request to server during an CosmosDB client operation.
*/
export type MetadataLookUpDiagnostics = {
metadataLookups: MetadataLookUpDiagnostic[];
};
/**
* This type captures diagnostic information regarding retries attempt during an CosmosDB client operation.
*/
export type RetryDiagnostics = {
failedAttempts: FailedRequestAttemptDiagnostic[];
};
export type GatewayStatistics = {
/**
* This is the activityId for request, made to server for fetching the requested resource. (As opposed to other potential meta data requests)
*/
activityId?: string;
startTimeUTCInMs: number;
durationInMs: number;
operationType?: OperationType;
resourceType?: ResourceType;
statusCode?: number;
subStatusCode?: number;
requestCharge?: number;
requestPayloadLengthInBytes: number;
responsePayloadLengthInBytes: number;
};
/**
* This type contains diagnostic information regarding a single metadata request to server.
*/
export interface MetadataLookUpDiagnostic {
activityId: string;
startTimeUTCInMs: number;
durationInMs: number;
operationType?: OperationType;
resourceType?: ResourceType;
metaDataType: MetadataLookUpType;
requestPayloadLengthInBytes: number;
responsePayloadLengthInBytes: number;
}
/**
* This type captures diagnostic information regarding a failed request to server api.
*/
export interface FailedRequestAttemptDiagnostic {
attemptNumber: number;
activityId: string;
startTimeUTCInMs: number;
durationInMs: number;
operationType?: OperationType;
resourceType?: ResourceType;
statusCode: number;
substatusCode?: number;
requestPayloadLengthInBytes: number;
responsePayloadLengthInBytes: number;
}
/**
* This is enum for Type of Metadata lookups possible.
*/
export declare enum MetadataLookUpType {
PartitionKeyRangeLookUp = "PARTITION_KEY_RANGE_LOOK_UP",
DatabaseAccountLookUp = "DATABASE_ACCOUNT_LOOK_UP",
QueryPlanLookUp = "QUERY_PLAN_LOOK_UP",
DatabaseLookUp = "DATABASE_LOOK_UP",
ContainerLookUp = "CONTAINER_LOOK_UP"
}
/**
* This is a collection type for all client side diagnostic information.
*/
export type ClientSideRequestStatistics = {
/**
* This is the UTC timestamp for start of client operation.
*/
requestStartTimeUTCInMs: number;
/**
* This is the duration in milli seconds taken by client operation.
*/
requestDurationInMs: number;
/**
* This is the list of Location Endpoints contacted during the client operation.
*/
locationEndpointsContacted: string[];
/**
* This field captures diagnostic information for retries happened during client operation.
*/
retryDiagnostics: RetryDiagnostics;
/**
* This field captures diagnostic information for meta data lookups happened during client operation.
*/
metadataDiagnostics: MetadataLookUpDiagnostics;
/**
* These are the statistics for main point look operation.
*/
gatewayStatistics: GatewayStatistics[];
/**
* This is the cumulated Request Payload Length n bytes, this includes metadata calls along with the main operation.
*/
totalRequestPayloadLengthInBytes: number;
/**
* This is the cumulated Response Payload Length n bytes, this includes metadata calls along with the main operation.
*/
totalResponsePayloadLengthInBytes: number;
};
export declare function getRootNode(node: DiagnosticNodeInternal): DiagnosticNodeInternal;
/**
* Represents a tree like structure, for capturing diagnostic information.
*/
export interface DiagnosticNode {
id: string;
nodeType: string;
children: DiagnosticNode[];
data: {
[key: string]: any;
};
startTimeUTCInMs: number;
durationInMs: number;
}
//# sourceMappingURL=CosmosDiagnostics.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"CosmosDiagnostics.d.ts","sourceRoot":"","sources":["../../src/CosmosDiagnostics.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,aAAa,EAAE,YAAY,EAAE,MAAM,UAAU,CAAC;AACvD,OAAO,EAAE,uBAAuB,EAAE,MAAM,uCAAuC,CAAC;AAChF,OAAO,EAAE,sBAAsB,EAAE,MAAM,sCAAsC,CAAC;AAC9E,OAAO,EAAE,gBAAgB,EAAE,MAAM,aAAa,CAAC;AAE/C;;;;;;;;;;;;;;;;;;;;GAoBG;AACH,qBAAa,iBAAiB;IAC5B,SAAgB,2BAA2B,EAAE,2BAA2B,CAAC;IACzE,SAAgB,cAAc,EAAE,cAAc,CAAC;IAC/C,SAAgB,YAAY,CAAC,EAAE,sBAAsB,CAAC;CAavD;AAED;;GAEG;AACH,MAAM,MAAM,sBAAsB,GAAG;IACnC;;OAEG;IACH,QAAQ,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,wBAAwB,EAAE,OAAO,CAAC;IAClC;;OAEG;IACH,uBAAuB,EAAE,OAAO,CAAC;IACjC;;OAEG;IACH,wBAAwB,EAAE,OAAO,CAAC;IAClC;;OAEG;IACH,0BAA0B,EAAE,OAAO,CAAC;IACpC;;OAEG;IACH,gBAAgB,CAAC,EAAE,MAAM,OAAO,gBAAgB,CAAC;IACjD;;OAEG;IACH,cAAc,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,CAAC;IACxC;;OAEG;IACH,eAAe,EAAE,OAAO,CAAC;IACzB;;OAEG;IACH,eAAe,EAAE,MAAM,CAAC;IACxB;;OAEG;IACH,eAAe,CAAC,EAAE,uBAAuB,CAAC;IAC1C;;OAEG;IACH,iBAAiB,EAAE,OAAO,CAAC;IAC3B;;OAEG;IACH,UAAU,EAAE,MAAM,CAAC;CACpB,CAAC;AAEF;;GAEG;AACH,MAAM,MAAM,yBAAyB,GAAG;IACtC,eAAe,EAAE,wBAAwB,EAAE,CAAC;CAC7C,CAAC;AAEF;;GAEG;AACH,MAAM,MAAM,gBAAgB,GAAG;IAC7B,cAAc,EAAE,8BAA8B,EAAE,CAAC;CAClD,CAAC;AAEF,MAAM,MAAM,iBAAiB,GAAG;IAC9B;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,gBAAgB,EAAE,MAAM,CAAC;IACzB,YAAY,EAAE,MAAM,CAAC;IACrB,aAAa,CAAC,EAAE,aAAa,CAAC;IAC9B,YAAY,CAAC,EAAE,YAAY,CAAC;IAC5B,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,2BAA2B,EAAE,MAAM,CAAC;IACpC,4BAA4B,EAAE,MAAM,CAAC;CACtC,CAAC;AAEF;;GAEG;AACH,MAAM,WAAW,wBAAwB;IACvC,UAAU,EAAE,MAAM,CAAC;IACnB,gBAAgB,EAAE,MAAM,CAAC;IACzB,YAAY,EAAE,MAAM,CAAC;IACrB,aAAa,CAAC,EAAE,aAAa,CAAC;IAC9B,YAAY,CAAC,EAAE,YAAY,CAAC;IAC5B,YAAY,EAAE,kBAAkB,CAAC;IACjC,2BAA2B,EAAE,MAAM,CAAC;IACpC,4BAA4B,EAAE,MAAM,CAAC;CACtC;AAED;;GAEG;AACH,MAAM,WAAW,8BAA8B;IAC7C,aAAa,EAAE,MAAM,CAAC;IACtB,UAAU,EAAE,MAAM,CAAC;IACnB,gBAAgB,EAAE,MAAM,CAAC;IACzB,YAAY,EAAE,MAAM,CAAC;IACrB,aAAa,CAAC,EAAE,aAAa,CAAC;IAC9B,YAAY,CAAC,EAAE,YAAY,CAAC;IAC5B,UAAU,EAAE,MAAM,CAAC;IACnB,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,2BAA2B,EAAE,MAAM,CAAC;IACpC,4BAA4B,EAAE,MAAM,CAAC;CACtC;AAED;;GAEG;AACH,oBAAY,kBAAkB;IAC5B,uBAAuB,gCAAgC;IACvD,qBAAqB,6BAA6B;IAClD,eAAe,uBAAuB;IACtC,cAAc,qBAAqB;IACnC,eAAe,sBAAsB;CACtC;AAED;;GAEG;AACH,MAAM,MAAM,2BAA2B,GAAG;IACxC;;OAEG;IACH,uBAAuB,EAAE,MAAM,CAAC;IAChC;;OAEG;IACH,mBAAmB,EAAE,MAAM,CAAC;IAC5B;;OAEG;IACH,0BAA0B,EAAE,MAAM,EAAE,CAAC;IACrC;;OAEG;IACH,gBAAgB,EAAE,gBAAgB,CAAC;IACnC;;OAEG;IACH,mBAAmB,EAAE,yBAAyB,CAAC;IAC/C;;OAEG;IACH,iBAAiB,EAAE,iBAAiB,EAAE,CAAC;IACvC;;OAEG;IACH,gCAAgC,EAAE,MAAM,CAAC;IACzC;;OAEG;IACH,iCAAiC,EAAE,MAAM,CAAC;CAC3C,CAAC;AAEF,wBAAgB,WAAW,CAAC,IAAI,EAAE,sBAAsB,GAAG,sBAAsB,CAGhF;AAED;;GAEG;AACH,MAAM,WAAW,cAAc;IAC7B,EAAE,EAAE,MAAM,CAAC;IACX,QAAQ,EAAE,MAAM,CAAC;IACjB,QAAQ,EAAE,cAAc,EAAE,CAAC;IAC3B,IAAI,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,CAAC;IAC7B,gBAAgB,EAAE,MAAM,CAAC;IACzB,YAAY,EAAE,MAAM,CAAC;CACtB"}

View File

@@ -0,0 +1,51 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT license.
/**
* * This is a Cosmos Diagnostic type that holds collected diagnostic information during a client operations. ie. Item.read(), Container.create().
* It has three members -
* 1. `clientSideRequestStatistics` member contains aggregate diagnostic information, including -
* - metadata lookups. Here all the server requests, apart from the final intended resource are considered as metadata calls.
* i.e. for item.read(id), if the client makes server call to discover endpoints it would be considered as metadata call.
* - retries
* - endpoints contacted.
* - request, response payload stats.
* - gatewayStatistics - Information corresponding to main operation. For example during Item.read(), the client might perform many operations
* i.e. metadata lookup etc, but gatewayStatistics represents the diagnostics information for actual read operation.
*
* 2. diagnosticNode - Is a tree like structure which captures detailed diagnostic information. By default it is disabled, and is intended to be
* used only for debugging on non production environments. The kind of details captured in diagnosticNode is controlled by `CosmosDbDiagnosticLevel`.
* - CosmosDbDiagnosticLevel.info - Is default value. In this level only clientSideRequestStatistics are captured. Is is meant for production environments.
* - CosmosDbDiagnosticLevel.debug - Captures diagnosticNode and clientConfig. No request and response payloads are captured. Is not meant to be used
* in production environment.
* - CosmosDbDiagnosticLevel.debug-unsafe - In addition to data captured in CosmosDbDiagnosticLevel.debug, also captures request and response payloads.
* Is not meant to be used in production environment.
* 3. clientConfig - Captures information related to how client was configured during initialization.
*/
export class CosmosDiagnostics {
/**
* @internal
*/
constructor(clientSideRequestStatistics, diagnosticNode, clientConfig) {
this.clientSideRequestStatistics = clientSideRequestStatistics;
this.diagnosticNode = diagnosticNode;
this.clientConfig = clientConfig;
}
}
/**
* This is enum for Type of Metadata lookups possible.
*/
export var MetadataLookUpType;
(function (MetadataLookUpType) {
MetadataLookUpType["PartitionKeyRangeLookUp"] = "PARTITION_KEY_RANGE_LOOK_UP";
MetadataLookUpType["DatabaseAccountLookUp"] = "DATABASE_ACCOUNT_LOOK_UP";
MetadataLookUpType["QueryPlanLookUp"] = "QUERY_PLAN_LOOK_UP";
MetadataLookUpType["DatabaseLookUp"] = "DATABASE_LOOK_UP";
MetadataLookUpType["ContainerLookUp"] = "CONTAINER_LOOK_UP";
})(MetadataLookUpType || (MetadataLookUpType = {}));
export function getRootNode(node) {
if (node.parent)
return getRootNode(node.parent);
else
return node;
}
//# sourceMappingURL=CosmosDiagnostics.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,28 @@
import { HTTPMethod, ResourceType } from "./common";
import { CosmosClientOptions } from "./CosmosClientOptions";
import { CosmosHeaders } from "./queryExecutionContext";
/** @hidden */
export interface RequestInfo {
verb: HTTPMethod;
path: string;
resourceId: string;
resourceType: ResourceType;
headers: CosmosHeaders;
}
export type TokenProvider = (requestInfo: RequestInfo) => Promise<string>;
/**
* @hidden
*/
export declare function setAuthorizationHeader(clientOptions: CosmosClientOptions, verb: HTTPMethod, path: string, resourceId: string, resourceType: ResourceType, headers: CosmosHeaders): Promise<void>;
/**
* The default function for setting header token using the masterKey
* @hidden
*/
export declare function setAuthorizationTokenHeaderUsingMasterKey(verb: HTTPMethod, resourceId: string, resourceType: ResourceType, headers: CosmosHeaders, masterKey: string): Promise<void>;
/**
* @hidden
*/
export declare function getAuthorizationTokenUsingResourceTokens(resourceTokens: {
[resourceId: string]: string;
}, path: string, resourceId: string): string;
//# sourceMappingURL=auth.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"auth.d.ts","sourceRoot":"","sources":["../../src/auth.ts"],"names":[],"mappings":"AAGA,OAAO,EAGL,UAAU,EACV,YAAY,EAEb,MAAM,UAAU,CAAC;AAClB,OAAO,EAAE,mBAAmB,EAAE,MAAM,uBAAuB,CAAC;AAC5D,OAAO,EAAE,aAAa,EAAE,MAAM,yBAAyB,CAAC;AAExD,cAAc;AACd,MAAM,WAAW,WAAW;IAC1B,IAAI,EAAE,UAAU,CAAC;IACjB,IAAI,EAAE,MAAM,CAAC;IACb,UAAU,EAAE,MAAM,CAAC;IACnB,YAAY,EAAE,YAAY,CAAC;IAC3B,OAAO,EAAE,aAAa,CAAC;CACxB;AAED,MAAM,MAAM,aAAa,GAAG,CAAC,WAAW,EAAE,WAAW,KAAK,OAAO,CAAC,MAAM,CAAC,CAAC;AAE1E;;GAEG;AACH,wBAAsB,sBAAsB,CAC1C,aAAa,EAAE,mBAAmB,EAClC,IAAI,EAAE,UAAU,EAChB,IAAI,EAAE,MAAM,EACZ,UAAU,EAAE,MAAM,EAClB,YAAY,EAAE,YAAY,EAC1B,OAAO,EAAE,aAAa,GACrB,OAAO,CAAC,IAAI,CAAC,CA+Bf;AAED;;;GAGG;AACH,wBAAsB,yCAAyC,CAC7D,IAAI,EAAE,UAAU,EAChB,UAAU,EAAE,MAAM,EAClB,YAAY,EAAE,YAAY,EAC1B,OAAO,EAAE,aAAa,EACtB,SAAS,EAAE,MAAM,GAChB,OAAO,CAAC,IAAI,CAAC,CASf;AAED;;GAEG;AAEH,wBAAgB,wCAAwC,CACtD,cAAc,EAAE;IAAE,CAAC,UAAU,EAAE,MAAM,GAAG,MAAM,CAAA;CAAE,EAChD,IAAI,EAAE,MAAM,EACZ,UAAU,EAAE,MAAM,GACjB,MAAM,CA+CR"}

View File

@@ -0,0 +1,87 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT license.
import { generateHeaders } from "./utils/headers";
import { Constants, getResourceIdFromPath, ResourceType, trimSlashFromLeftAndRight, } from "./common";
/**
* @hidden
*/
export async function setAuthorizationHeader(clientOptions, verb, path, resourceId, resourceType, headers) {
if (clientOptions.permissionFeed) {
clientOptions.resourceTokens = {};
for (const permission of clientOptions.permissionFeed) {
const id = getResourceIdFromPath(permission.resource);
if (!id) {
throw new Error(`authorization error: ${id} \
is an invalid resourceId in permissionFeed`);
}
clientOptions.resourceTokens[id] = permission._token; // TODO: any
}
}
if (clientOptions.key) {
await setAuthorizationTokenHeaderUsingMasterKey(verb, resourceId, resourceType, headers, clientOptions.key);
}
else if (clientOptions.resourceTokens) {
headers[Constants.HttpHeaders.Authorization] = encodeURIComponent(getAuthorizationTokenUsingResourceTokens(clientOptions.resourceTokens, path, resourceId));
}
else if (clientOptions.tokenProvider) {
headers[Constants.HttpHeaders.Authorization] = encodeURIComponent(await clientOptions.tokenProvider({ verb, path, resourceId, resourceType, headers }));
}
}
/**
* The default function for setting header token using the masterKey
* @hidden
*/
export async function setAuthorizationTokenHeaderUsingMasterKey(verb, resourceId, resourceType, headers, masterKey) {
// TODO This should live in cosmos-sign
if (resourceType === ResourceType.offer) {
resourceId = resourceId && resourceId.toLowerCase();
}
headers = Object.assign(headers, await generateHeaders(masterKey, verb, resourceType, resourceId));
}
/**
* @hidden
*/
// TODO: Resource tokens
export function getAuthorizationTokenUsingResourceTokens(resourceTokens, path, resourceId) {
if (resourceTokens && Object.keys(resourceTokens).length > 0) {
// For database account access(through getDatabaseAccount API), path and resourceId are "",
// so in this case we return the first token to be used for creating the auth header as the
// service will accept any token in this case
if (!path && !resourceId) {
return resourceTokens[Object.keys(resourceTokens)[0]];
}
// If we have exact resource token for the path use it
if (resourceId && resourceTokens[resourceId]) {
return resourceTokens[resourceId];
}
// minimum valid path /dbs
if (!path || path.length < 4) {
// TODO: This should throw an error
return null;
}
path = trimSlashFromLeftAndRight(path);
const pathSegments = (path && path.split("/")) || [];
// Item path
if (pathSegments.length === 6) {
// Look for a container token matching the item path
const containerPath = pathSegments.slice(0, 4).map(decodeURIComponent).join("/");
if (resourceTokens[containerPath]) {
return resourceTokens[containerPath];
}
}
// TODO remove in v4: This is legacy behavior that lets someone use a resource token pointing ONLY at an ID
// It was used when _rid was exposed by the SDK, but now that we are using user provided ids it is not needed
// However removing it now would be a breaking change
// if it's an incomplete path like /dbs/db1/colls/, start from the parent resource
let index = pathSegments.length % 2 === 0 ? pathSegments.length - 1 : pathSegments.length - 2;
for (; index > 0; index -= 2) {
const id = decodeURI(pathSegments[index]);
if (resourceTokens[id]) {
return resourceTokens[id];
}
}
}
// TODO: This should throw an error
return null;
}
//# sourceMappingURL=auth.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,8 @@
/**
* Enum to specify the resource for which change feed is being fetched.
*/
export declare enum ChangeFeedResourceType {
FeedRange = 0,
PartitionKey = 1
}
//# sourceMappingURL=ChangeFeedEnums.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"ChangeFeedEnums.d.ts","sourceRoot":"","sources":["../../../../src/client/ChangeFeed/ChangeFeedEnums.ts"],"names":[],"mappings":"AAGA;;GAEG;AACH,oBAAY,sBAAsB;IAChC,SAAS,IAAA;IACT,YAAY,IAAA;CACb"}

View File

@@ -0,0 +1,11 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT license.
/**
* Enum to specify the resource for which change feed is being fetched.
*/
export var ChangeFeedResourceType;
(function (ChangeFeedResourceType) {
ChangeFeedResourceType[ChangeFeedResourceType["FeedRange"] = 0] = "FeedRange";
ChangeFeedResourceType[ChangeFeedResourceType["PartitionKey"] = 1] = "PartitionKey";
})(ChangeFeedResourceType || (ChangeFeedResourceType = {}));
//# sourceMappingURL=ChangeFeedEnums.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"ChangeFeedEnums.js","sourceRoot":"","sources":["../../../../src/client/ChangeFeed/ChangeFeedEnums.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;GAEG;AACH,MAAM,CAAN,IAAY,sBAGX;AAHD,WAAY,sBAAsB;IAChC,6EAAS,CAAA;IACT,mFAAY,CAAA;AACd,CAAC,EAHW,sBAAsB,KAAtB,sBAAsB,QAGjC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Enum to specify the resource for which change feed is being fetched.\n */\nexport enum ChangeFeedResourceType {\n FeedRange,\n PartitionKey,\n}\n"]}

View File

@@ -0,0 +1,72 @@
import { ChangeFeedIteratorResponse } from "./ChangeFeedIteratorResponse";
import { Resource } from "../../client";
import { ChangeFeedPullModelIterator } from "./ChangeFeedPullModelIterator";
/**
* @hidden
* Provides iterator for change feed for entire container or an epk range.
*
* Use `Items.getChangeFeedIterator()` to get an instance of the iterator.
*/
export declare class ChangeFeedForEpkRange<T> implements ChangeFeedPullModelIterator<T> {
private clientContext;
private container;
private partitionKeyRangeCache;
private resourceId;
private resourceLink;
private url;
private changeFeedOptions;
private epkRange;
private continuationToken?;
private queue;
private startTime;
private isInstantiated;
private rId;
private setIteratorRid;
private continuationTokenRidMatchContainerRid;
private fillChangeFeedQueue;
/**
* Fill the queue with the feed ranges overlapping with the given epk range.
*/
private fetchOverLappingFeedRanges;
/**
* Fill the queue with feed ranges from continuation token
*/
private fetchContinuationTokenFeedRanges;
/**
* Change feed is an infinite feed. hasMoreResults is always true.
*/
get hasMoreResults(): boolean;
/**
* Gets an async iterator which will yield change feed results.
*/
getAsyncIterator(): AsyncIterable<ChangeFeedIteratorResponse<Array<T & Resource>>>;
/**
* Gets an async iterator which will yield pages of results from Azure Cosmos DB.
*
* Keeps iterating over the feedranges and checks if any feed range has new result. Keeps note of the last feed range which returned non 304 result.
*
* When same feed range is reached and no new changes are found, a 304 (not Modified) is returned to the end user. Then starts process all over again.
*/
readNext(): Promise<ChangeFeedIteratorResponse<Array<T & Resource>>>;
private generateContinuationToken;
/**
* Read feed and retrieves the next page of results in Azure Cosmos DB.
*/
private fetchNext;
private checkedAllFeedRanges;
/**
* Checks whether the current EpkRange is split into multiple ranges or not.
*
* If yes, it force refreshes the partitionKeyRange cache and enqueue children epk ranges.
*/
private shouldRetryOnFailure;
private handleSplit;
/**
* Fetch the partitionKeyRangeId for the given feed range.
*
* This partitionKeyRangeId is passed to queryFeed to fetch the results.
*/
private getPartitionRangeId;
private getFeedResponse;
}
//# sourceMappingURL=ChangeFeedForEpkRange.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"ChangeFeedForEpkRange.d.ts","sourceRoot":"","sources":["../../../../src/client/ChangeFeed/ChangeFeedForEpkRange.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,0BAA0B,EAAE,MAAM,8BAA8B,CAAC;AAI1E,OAAO,EAAa,QAAQ,EAAE,MAAM,cAAc,CAAC;AAInD,OAAO,EAAE,2BAA2B,EAAE,MAAM,+BAA+B,CAAC;AAK5E;;;;;GAKG;AACH,qBAAa,qBAAqB,CAAC,CAAC,CAAE,YAAW,2BAA2B,CAAC,CAAC,CAAC;IAU3E,OAAO,CAAC,aAAa;IACrB,OAAO,CAAC,SAAS;IACjB,OAAO,CAAC,sBAAsB;IAC9B,OAAO,CAAC,UAAU;IAClB,OAAO,CAAC,YAAY;IACpB,OAAO,CAAC,GAAG;IACX,OAAO,CAAC,iBAAiB;IACzB,OAAO,CAAC,QAAQ;IAhBlB,OAAO,CAAC,iBAAiB,CAAC,CAA6B;IACvD,OAAO,CAAC,KAAK,CAAkC;IAC/C,OAAO,CAAC,SAAS,CAAS;IAC1B,OAAO,CAAC,cAAc,CAAU;IAChC,OAAO,CAAC,GAAG,CAAS;YAwBN,cAAc;IAK5B,OAAO,CAAC,qCAAqC;YAO/B,mBAAmB;IAWjC;;OAEG;YACW,0BAA0B;IAyBxC;;OAEG;YACW,gCAAgC;IAwC9C;;OAEG;IACH,IAAI,cAAc,IAAI,OAAO,CAE5B;IAED;;OAEG;IACW,gBAAgB,IAAI,aAAa,CAAC,0BAA0B,CAAC,KAAK,CAAC,CAAC,GAAG,QAAQ,CAAC,CAAC,CAAC;IAOhG;;;;;;OAMG;IACU,QAAQ,IAAI,OAAO,CAAC,0BAA0B,CAAC,KAAK,CAAC,CAAC,GAAG,QAAQ,CAAC,CAAC,CAAC;IAqCjF,OAAO,CAAC,yBAAyB,CAE/B;IAEF;;OAEG;YACW,SAAS;IAgCvB,OAAO,CAAC,oBAAoB;IAW5B;;;;OAIG;YACW,oBAAoB;YA2CpB,WAAW;IA4CzB;;;;OAIG;YACW,mBAAmB;YAuBnB,eAAe;CA4D9B"}

View File

@@ -0,0 +1,313 @@
import { __asyncGenerator, __await } from "tslib";
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT license.
import { ChangeFeedRange } from "./ChangeFeedRange";
import { ChangeFeedIteratorResponse } from "./ChangeFeedIteratorResponse";
import { QueryRange } from "../../routing";
import { FeedRangeQueue } from "./FeedRangeQueue";
import { Constants, SubStatusCodes, StatusCodes, ResourceType } from "../../common";
import { ErrorResponse } from "../../request";
import { CompositeContinuationToken } from "./CompositeContinuationToken";
import { extractOverlappingRanges } from "./changeFeedUtils";
import { getEmptyCosmosDiagnostics, withDiagnostics } from "../../utils/diagnostics";
/**
* @hidden
* Provides iterator for change feed for entire container or an epk range.
*
* Use `Items.getChangeFeedIterator()` to get an instance of the iterator.
*/
export class ChangeFeedForEpkRange {
/**
* @internal
*/
constructor(clientContext, container, partitionKeyRangeCache, resourceId, resourceLink, url, changeFeedOptions, epkRange) {
this.clientContext = clientContext;
this.container = container;
this.partitionKeyRangeCache = partitionKeyRangeCache;
this.resourceId = resourceId;
this.resourceLink = resourceLink;
this.url = url;
this.changeFeedOptions = changeFeedOptions;
this.epkRange = epkRange;
this.generateContinuationToken = () => {
return JSON.stringify(new CompositeContinuationToken(this.rId, this.queue.returnSnapshot()));
};
this.queue = new FeedRangeQueue();
this.continuationToken = changeFeedOptions.continuationToken
? JSON.parse(changeFeedOptions.continuationToken)
: undefined;
this.startTime = changeFeedOptions.startTime
? changeFeedOptions.startTime.toUTCString()
: undefined;
this.isInstantiated = false;
}
async setIteratorRid(diagnosticNode) {
const { resource } = await this.container.readInternal(diagnosticNode);
this.rId = resource._rid;
}
continuationTokenRidMatchContainerRid() {
if (this.continuationToken.rid !== this.rId) {
return false;
}
return true;
}
async fillChangeFeedQueue(diagnosticNode) {
if (this.continuationToken) {
// fill the queue with feed ranges in continuation token.
await this.fetchContinuationTokenFeedRanges(diagnosticNode);
}
else {
// fill the queue with feed ranges overlapping the given epk range.
await this.fetchOverLappingFeedRanges(diagnosticNode);
}
this.isInstantiated = true;
}
/**
* Fill the queue with the feed ranges overlapping with the given epk range.
*/
async fetchOverLappingFeedRanges(diagnosticNode) {
try {
const overLappingRanges = await this.partitionKeyRangeCache.getOverlappingRanges(this.url, this.epkRange, diagnosticNode);
for (const overLappingRange of overLappingRanges) {
const [epkMinHeader, epkMaxHeader] = await extractOverlappingRanges(this.epkRange, overLappingRange);
const feedRange = new ChangeFeedRange(overLappingRange.minInclusive, overLappingRange.maxExclusive, "", epkMinHeader, epkMaxHeader);
this.queue.enqueue(feedRange);
}
}
catch (err) {
throw new ErrorResponse(err.message);
}
}
/**
* Fill the queue with feed ranges from continuation token
*/
async fetchContinuationTokenFeedRanges(diagnosticNode) {
const contToken = this.continuationToken;
if (!this.continuationTokenRidMatchContainerRid()) {
throw new ErrorResponse("The continuation token is not for the current container definition");
}
else {
for (const cToken of contToken.Continuation) {
const queryRange = new QueryRange(cToken.minInclusive, cToken.maxExclusive, true, false);
try {
const overLappingRanges = await this.partitionKeyRangeCache.getOverlappingRanges(this.url, queryRange, diagnosticNode);
for (const overLappingRange of overLappingRanges) {
// check if the epk range present in continuation token entirely covers the overlapping range.
// If yes, minInclusive and maxExclusive of the overlapping range will be set.
// If no, i.e. there is only partial overlap, epkMinHeader and epkMaxHeader are set as min and max of overlap.
// This will be used when we make a call to fetch change feed.
const [epkMinHeader, epkMaxHeader] = await extractOverlappingRanges(queryRange, overLappingRange);
const feedRange = new ChangeFeedRange(overLappingRange.minInclusive, overLappingRange.maxExclusive, cToken.continuationToken, epkMinHeader, epkMaxHeader);
this.queue.enqueue(feedRange);
}
}
catch (err) {
throw new ErrorResponse(err.message);
}
}
}
}
/**
* Change feed is an infinite feed. hasMoreResults is always true.
*/
get hasMoreResults() {
return true;
}
/**
* Gets an async iterator which will yield change feed results.
*/
getAsyncIterator() {
return __asyncGenerator(this, arguments, function* getAsyncIterator_1() {
do {
const result = yield __await(this.readNext());
yield yield __await(result);
} while (this.hasMoreResults);
});
}
/**
* Gets an async iterator which will yield pages of results from Azure Cosmos DB.
*
* Keeps iterating over the feedranges and checks if any feed range has new result. Keeps note of the last feed range which returned non 304 result.
*
* When same feed range is reached and no new changes are found, a 304 (not Modified) is returned to the end user. Then starts process all over again.
*/
async readNext() {
return withDiagnostics(async (diagnosticNode) => {
// validate if the internal queue is filled up with feed ranges.
if (!this.isInstantiated) {
await this.setIteratorRid(diagnosticNode);
await this.fillChangeFeedQueue(diagnosticNode);
}
// stores the last feedRange for which statusCode is not 304 i.e. there were new changes in that feed range.
let firstNotModifiedFeedRange = undefined;
let result;
do {
const [processedFeedRange, response] = await this.fetchNext(diagnosticNode);
result = response;
if (result !== undefined) {
{
if (firstNotModifiedFeedRange === undefined) {
firstNotModifiedFeedRange = processedFeedRange;
}
// move current feed range to end of queue to fetch result of next feed range.
// This is done to fetch changes in breadth first manner and avoid starvation.
this.queue.moveFirstElementToTheEnd();
// check if there are new results for the given feed range.
if (result.statusCode === StatusCodes.Ok) {
result.headers[Constants.HttpHeaders.ContinuationToken] =
this.generateContinuationToken();
return result;
}
}
}
} while (!this.checkedAllFeedRanges(firstNotModifiedFeedRange));
// set the continuation token after processing.
result.headers[Constants.HttpHeaders.ContinuationToken] = this.generateContinuationToken();
return result;
}, this.clientContext);
}
/**
* Read feed and retrieves the next page of results in Azure Cosmos DB.
*/
async fetchNext(diagnosticNode) {
const feedRange = this.queue.peek();
if (feedRange) {
// fetch results for feed range at the beginning of the queue.
const result = await this.getFeedResponse(feedRange, diagnosticNode);
// check if results need to be fetched again depending on status code returned.
// Eg. in case of paritionSplit, results need to be fetched for the child partitions.
const shouldRetry = await this.shouldRetryOnFailure(feedRange, result, diagnosticNode);
if (shouldRetry) {
this.queue.dequeue();
return this.fetchNext(diagnosticNode);
}
else {
// update the continuation value for the current feed range.
const continuationValueForFeedRange = result.headers[Constants.HttpHeaders.ETag];
const newFeedRange = this.queue.peek();
newFeedRange.continuationToken = continuationValueForFeedRange;
return [[newFeedRange.minInclusive, newFeedRange.maxExclusive], result];
}
}
else {
return [[undefined, undefined], undefined];
}
}
checkedAllFeedRanges(firstNotModifiedFeedRange) {
if (firstNotModifiedFeedRange === undefined) {
return false;
}
const feedRangeQueueFirstElement = this.queue.peek();
return (firstNotModifiedFeedRange[0] === (feedRangeQueueFirstElement === null || feedRangeQueueFirstElement === void 0 ? void 0 : feedRangeQueueFirstElement.minInclusive) &&
firstNotModifiedFeedRange[1] === (feedRangeQueueFirstElement === null || feedRangeQueueFirstElement === void 0 ? void 0 : feedRangeQueueFirstElement.maxExclusive));
}
/**
* Checks whether the current EpkRange is split into multiple ranges or not.
*
* If yes, it force refreshes the partitionKeyRange cache and enqueue children epk ranges.
*/
async shouldRetryOnFailure(feedRange, response, diagnosticNode) {
if (response.statusCode === StatusCodes.Ok || response.statusCode === StatusCodes.NotModified) {
return false;
}
const partitionSplit = response.statusCode === StatusCodes.Gone &&
(response.subStatusCode === SubStatusCodes.PartitionKeyRangeGone ||
response.subStatusCode === SubStatusCodes.CompletingSplit);
if (partitionSplit) {
const queryRange = new QueryRange(feedRange.minInclusive, feedRange.maxExclusive, true, false);
const resolvedRanges = await this.partitionKeyRangeCache.getOverlappingRanges(this.url, queryRange, diagnosticNode, true);
if (resolvedRanges.length < 1) {
throw new ErrorResponse("Partition split/merge detected but no overlapping ranges found.");
}
// This covers both cases of merge and split.
// resolvedRanges.length > 1 in case of split.
// resolvedRanges.length === 1 in case of merge. EpkRange headers will be added in this case.
if (resolvedRanges.length >= 1) {
await this.handleSplit(false, resolvedRanges, queryRange, feedRange.continuationToken);
}
return true;
}
return false;
}
/*
* Enqueues all the children feed ranges for the given feed range.
*/
async handleSplit(shiftLeft, resolvedRanges, oldFeedRange, continuationToken) {
let flag = 0;
if (shiftLeft) {
// This section is only applicable when handleSplit is called by getPartitionRangeId().
// used only when existing partition key range cache is used to check for any overlapping ranges.
// Modifies the first element with the first overlapping range.
const [epkMinHeader, epkMaxHeader] = await extractOverlappingRanges(oldFeedRange, resolvedRanges[0]);
const newFeedRange = new ChangeFeedRange(resolvedRanges[0].minInclusive, resolvedRanges[0].maxExclusive, continuationToken, epkMinHeader, epkMaxHeader);
this.queue.modifyFirstElement(newFeedRange);
flag = 1;
}
// Enqueue the overlapping ranges.
for (let i = flag; i < resolvedRanges.length; i++) {
const [epkMinHeader, epkMaxHeader] = await extractOverlappingRanges(oldFeedRange, resolvedRanges[i]);
const newFeedRange = new ChangeFeedRange(resolvedRanges[i].minInclusive, resolvedRanges[i].maxExclusive, continuationToken, epkMinHeader, epkMaxHeader);
this.queue.enqueue(newFeedRange);
}
}
/**
* Fetch the partitionKeyRangeId for the given feed range.
*
* This partitionKeyRangeId is passed to queryFeed to fetch the results.
*/
async getPartitionRangeId(feedRange, diagnosticNode) {
const min = feedRange.epkMinHeader ? feedRange.epkMinHeader : feedRange.minInclusive;
const max = feedRange.epkMaxHeader ? feedRange.epkMaxHeader : feedRange.maxExclusive;
const queryRange = new QueryRange(min, max, true, false);
const resolvedRanges = await this.partitionKeyRangeCache.getOverlappingRanges(this.url, queryRange, diagnosticNode, false);
if (resolvedRanges.length < 1) {
throw new ErrorResponse("No overlapping ranges found.");
}
const firstResolvedRange = resolvedRanges[0];
if (resolvedRanges.length > 1) {
await this.handleSplit(true, resolvedRanges, queryRange, feedRange.continuationToken);
}
return firstResolvedRange.id;
}
async getFeedResponse(feedRange, diagnosticNode) {
const feedOptions = { initialHeaders: {}, useIncrementalFeed: true };
if (typeof this.changeFeedOptions.maxItemCount === "number") {
feedOptions.maxItemCount = this.changeFeedOptions.maxItemCount;
}
if (this.changeFeedOptions.sessionToken) {
feedOptions.sessionToken = this.changeFeedOptions.sessionToken;
}
if (feedRange.continuationToken) {
feedOptions.accessCondition = {
type: Constants.HttpHeaders.IfNoneMatch,
condition: feedRange.continuationToken,
};
}
if (this.startTime) {
feedOptions.initialHeaders[Constants.HttpHeaders.IfModifiedSince] = this.startTime;
}
const rangeId = await this.getPartitionRangeId(feedRange, diagnosticNode);
try {
// startEpk and endEpk are only valid in case we want to fetch result for a part of partition and not the entire partition.
const response = await this.clientContext.queryFeed({
path: this.resourceLink,
resourceType: ResourceType.item,
resourceId: this.resourceId,
resultFn: (result) => (result ? result.Documents : []),
query: undefined,
options: feedOptions,
diagnosticNode,
partitionKey: undefined,
partitionKeyRangeId: rangeId,
startEpk: feedRange.epkMinHeader,
endEpk: feedRange.epkMaxHeader,
});
return new ChangeFeedIteratorResponse(response.result, response.result ? response.result.length : 0, response.code, response.headers, getEmptyCosmosDiagnostics());
}
catch (err) {
// If any errors are encountered, eg. partition split or gone, handle it based on error code and not break the flow.
return new ChangeFeedIteratorResponse([], 0, err.code, err.headers, getEmptyCosmosDiagnostics(), err.substatus);
}
}
}
//# sourceMappingURL=ChangeFeedForEpkRange.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,42 @@
import { ChangeFeedIteratorResponse } from "./ChangeFeedIteratorResponse";
import { Resource } from "../../client";
import { ChangeFeedPullModelIterator } from "./ChangeFeedPullModelIterator";
/**
* @hidden
* Provides iterator for change feed for one partition key.
*
* Use `Items.getChangeFeedIterator()` to get an instance of the iterator.
*/
export declare class ChangeFeedForPartitionKey<T> implements ChangeFeedPullModelIterator<T> {
private clientContext;
private container;
private resourceId;
private resourceLink;
private partitionKey;
private changeFeedOptions;
private continuationToken;
private startTime;
private rId;
private isInstantiated;
private instantiateIterator;
private continuationTokenRidMatchContainerRid;
private setIteratorRid;
/**
* Change feed is an infinite feed. hasMoreResults is always true.
*/
get hasMoreResults(): boolean;
/**
* Gets an async iterator which will yield change feed results.
*/
getAsyncIterator(): AsyncIterable<ChangeFeedIteratorResponse<Array<T & Resource>>>;
/**
* Returns the result of change feed from Azure Cosmos DB.
*/
readNext(): Promise<ChangeFeedIteratorResponse<Array<T & Resource>>>;
/**
* Read feed and retrieves the next set of results in Azure Cosmos DB.
*/
private fetchNext;
private getFeedResponse;
}
//# sourceMappingURL=ChangeFeedForPartitionKey.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"ChangeFeedForPartitionKey.d.ts","sourceRoot":"","sources":["../../../../src/client/ChangeFeed/ChangeFeedForPartitionKey.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,0BAA0B,EAAE,MAAM,8BAA8B,CAAC;AAC1E,OAAO,EAAa,QAAQ,EAAE,MAAM,cAAc,CAAC;AAKnD,OAAO,EAAE,2BAA2B,EAAE,MAAM,+BAA+B,CAAC;AAI5E;;;;;GAKG;AACH,qBAAa,yBAAyB,CAAC,CAAC,CAAE,YAAW,2BAA2B,CAAC,CAAC,CAAC;IAS/E,OAAO,CAAC,aAAa;IACrB,OAAO,CAAC,SAAS;IACjB,OAAO,CAAC,UAAU;IAClB,OAAO,CAAC,YAAY;IACpB,OAAO,CAAC,YAAY;IACpB,OAAO,CAAC,iBAAiB;IAb3B,OAAO,CAAC,iBAAiB,CAAmC;IAC5D,OAAO,CAAC,SAAS,CAAS;IAC1B,OAAO,CAAC,GAAG,CAAS;IACpB,OAAO,CAAC,cAAc,CAAU;YAsBlB,mBAAmB;IAiBjC,OAAO,CAAC,qCAAqC;YAO/B,cAAc;IAK5B;;OAEG;IACH,IAAI,cAAc,IAAI,OAAO,CAE5B;IAED;;OAEG;IACW,gBAAgB,IAAI,aAAa,CAAC,0BAA0B,CAAC,KAAK,CAAC,CAAC,GAAG,QAAQ,CAAC,CAAC,CAAC;IAOhG;;OAEG;IACU,QAAQ,IAAI,OAAO,CAAC,0BAA0B,CAAC,KAAK,CAAC,CAAC,GAAG,QAAQ,CAAC,CAAC,CAAC;IAUjF;;OAEG;YACW,SAAS;YAWT,eAAe;CA4C9B"}

View File

@@ -0,0 +1,123 @@
import { __asyncGenerator, __await } from "tslib";
import { ChangeFeedIteratorResponse } from "./ChangeFeedIteratorResponse";
import { Constants, ResourceType } from "../../common";
import { ErrorResponse } from "../../request";
import { ContinuationTokenForPartitionKey } from "./ContinuationTokenForPartitionKey";
import { getEmptyCosmosDiagnostics, withDiagnostics } from "../../utils/diagnostics";
/**
* @hidden
* Provides iterator for change feed for one partition key.
*
* Use `Items.getChangeFeedIterator()` to get an instance of the iterator.
*/
export class ChangeFeedForPartitionKey {
/**
* @internal
*/
constructor(clientContext, container, resourceId, resourceLink, partitionKey, changeFeedOptions) {
this.clientContext = clientContext;
this.container = container;
this.resourceId = resourceId;
this.resourceLink = resourceLink;
this.partitionKey = partitionKey;
this.changeFeedOptions = changeFeedOptions;
this.continuationToken = changeFeedOptions.continuationToken
? JSON.parse(changeFeedOptions.continuationToken)
: undefined;
this.isInstantiated = false;
if (changeFeedOptions.startTime) {
this.startTime = changeFeedOptions.startTime.toUTCString();
}
}
async instantiateIterator(diagnosticNode) {
await this.setIteratorRid(diagnosticNode);
if (this.continuationToken) {
if (!this.continuationTokenRidMatchContainerRid()) {
throw new ErrorResponse("The continuation is not for the current container definition.");
}
}
else {
this.continuationToken = new ContinuationTokenForPartitionKey(this.rId, this.partitionKey, "");
}
this.isInstantiated = true;
}
continuationTokenRidMatchContainerRid() {
if (this.continuationToken.rid !== this.rId) {
return false;
}
return true;
}
async setIteratorRid(diagnosticNode) {
const { resource } = await this.container.readInternal(diagnosticNode);
this.rId = resource._rid;
}
/**
* Change feed is an infinite feed. hasMoreResults is always true.
*/
get hasMoreResults() {
return true;
}
/**
* Gets an async iterator which will yield change feed results.
*/
getAsyncIterator() {
return __asyncGenerator(this, arguments, function* getAsyncIterator_1() {
do {
const result = yield __await(this.readNext());
yield yield __await(result);
} while (this.hasMoreResults);
});
}
/**
* Returns the result of change feed from Azure Cosmos DB.
*/
async readNext() {
return withDiagnostics(async (diagnosticNode) => {
if (!this.isInstantiated) {
await this.instantiateIterator(diagnosticNode);
}
const result = await this.fetchNext(diagnosticNode);
return result;
}, this.clientContext);
}
/**
* Read feed and retrieves the next set of results in Azure Cosmos DB.
*/
async fetchNext(diagnosticNode) {
const response = await this.getFeedResponse(diagnosticNode);
this.continuationToken.Continuation = response.headers[Constants.HttpHeaders.ETag];
response.headers[Constants.HttpHeaders.ContinuationToken] = JSON.stringify(this.continuationToken);
return response;
}
async getFeedResponse(diagnosticNode) {
const feedOptions = { initialHeaders: {}, useIncrementalFeed: true };
if (typeof this.changeFeedOptions.maxItemCount === "number") {
feedOptions.maxItemCount = this.changeFeedOptions.maxItemCount;
}
if (this.changeFeedOptions.sessionToken) {
feedOptions.sessionToken = this.changeFeedOptions.sessionToken;
}
const continuation = this.continuationToken.Continuation;
if (continuation) {
feedOptions.accessCondition = {
type: Constants.HttpHeaders.IfNoneMatch,
condition: continuation,
};
}
if (this.startTime) {
feedOptions.initialHeaders[Constants.HttpHeaders.IfModifiedSince] = this.startTime;
}
const response = await this.clientContext.queryFeed({
path: this.resourceLink,
resourceType: ResourceType.item,
resourceId: this.resourceId,
resultFn: (result) => (result ? result.Documents : []),
diagnosticNode,
query: undefined,
options: feedOptions,
partitionKey: this.partitionKey,
});
return new ChangeFeedIteratorResponse(response.result, response.result ? response.result.length : 0, response.code, response.headers, getEmptyCosmosDiagnostics());
}
}
//# sourceMappingURL=ChangeFeedForPartitionKey.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,21 @@
import { ChangeFeedStartFrom } from "./ChangeFeedStartFrom";
/**
* Specifies options for the change feed
*
* If none of those options are set, it will start reading changes from now for the entire container.
*/
export interface ChangeFeedIteratorOptions {
/**
* Max amount of items to return per page
*/
maxItemCount?: number;
/**
* The session token to use. If not specified, will use the most recent captured session token to start with.
*/
sessionToken?: string;
/**
* Signals where to start from in the change feed.
*/
changeFeedStartFrom?: ChangeFeedStartFrom;
}
//# sourceMappingURL=ChangeFeedIteratorOptions.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"ChangeFeedIteratorOptions.d.ts","sourceRoot":"","sources":["../../../../src/client/ChangeFeed/ChangeFeedIteratorOptions.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,mBAAmB,EAAE,MAAM,uBAAuB,CAAC;AAE5D;;;;GAIG;AACH,MAAM,WAAW,yBAAyB;IACxC;;OAEG;IACH,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB;;OAEG;IACH,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB;;OAEG;IACH,mBAAmB,CAAC,EAAE,mBAAmB,CAAC;CAC3C"}

View File

@@ -0,0 +1,4 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT license.
export {};
//# sourceMappingURL=ChangeFeedIteratorOptions.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"ChangeFeedIteratorOptions.js","sourceRoot":"","sources":["../../../../src/client/ChangeFeed/ChangeFeedIteratorOptions.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { ChangeFeedStartFrom } from \"./ChangeFeedStartFrom\";\n\n/**\n * Specifies options for the change feed\n *\n * If none of those options are set, it will start reading changes from now for the entire container.\n */\nexport interface ChangeFeedIteratorOptions {\n /**\n * Max amount of items to return per page\n */\n maxItemCount?: number;\n /**\n * The session token to use. If not specified, will use the most recent captured session token to start with.\n */\n sessionToken?: string;\n /**\n * Signals where to start from in the change feed.\n */\n changeFeedStartFrom?: ChangeFeedStartFrom;\n}\n"]}

View File

@@ -0,0 +1,48 @@
import { CosmosDiagnostics } from "../../CosmosDiagnostics";
import { CosmosHeaders } from "../../queryExecutionContext";
/**
* A single response page from the Azure Cosmos DB Change Feed
*/
export declare class ChangeFeedIteratorResponse<T> {
/**
* Gets the items returned in the response from Azure Cosmos DB
*/
readonly result: T;
/**
* Gets the number of items returned in the response from Azure Cosmos DB
*/
readonly count: number;
/**
* Gets the status code of the response from Azure Cosmos DB
*/
readonly statusCode: number;
/**
* Cosmos Diagnostic Object.
*/
readonly diagnostics: CosmosDiagnostics;
/**
* Gets the subStatusCodes of the response from Azure Cosmos DB. Useful in partition split or partition gone.
*/
readonly subStatusCode?: number;
/**
* Gets the request charge for this request from the Azure Cosmos DB service.
*/
get requestCharge(): number;
/**
* Gets the activity ID for the request from the Azure Cosmos DB service.
*/
get activityId(): string;
/**
* Gets the continuation token to be used for continuing enumeration of the Azure Cosmos DB service.
*/
get continuationToken(): string;
/**
* Gets the session token for use in session consistency reads from the Azure Cosmos DB service.
*/
get sessionToken(): string;
/**
* Response headers of the response from Azure Cosmos DB
*/
headers: CosmosHeaders;
}
//# sourceMappingURL=ChangeFeedIteratorResponse.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"ChangeFeedIteratorResponse.d.ts","sourceRoot":"","sources":["../../../../src/client/ChangeFeed/ChangeFeedIteratorResponse.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,iBAAiB,EAAE,MAAM,yBAAyB,CAAC;AAE5D,OAAO,EAAE,aAAa,EAAE,MAAM,6BAA6B,CAAC;AAE5D;;GAEG;AACH,qBAAa,0BAA0B,CAAC,CAAC;IAKrC;;OAEG;aACa,MAAM,EAAE,CAAC;IACzB;;OAEG;aACa,KAAK,EAAE,MAAM;IAC7B;;OAEG;aACa,UAAU,EAAE,MAAM;IAKlC;;OAEG;aACa,WAAW,EAAE,iBAAiB;IAC9C;;OAEG;aACa,aAAa,CAAC,EAAE,MAAM;IAKxC;;OAEG;IACH,IAAW,aAAa,IAAI,MAAM,CAGjC;IAED;;OAEG;IACH,IAAW,UAAU,IAAI,MAAM,CAE9B;IAED;;OAEG;IACH,IAAW,iBAAiB,IAAI,MAAM,CAErC;IACD;;OAEG;IACH,IAAW,YAAY,IAAI,MAAM,CAEhC;IACD;;OAEG;IACI,OAAO,EAAE,aAAa,CAAC;CAC/B"}

View File

@@ -0,0 +1,67 @@
import { Constants } from "../../common";
/**
* A single response page from the Azure Cosmos DB Change Feed
*/
export class ChangeFeedIteratorResponse {
/**
* @internal
*/
constructor(
/**
* Gets the items returned in the response from Azure Cosmos DB
*/
result,
/**
* Gets the number of items returned in the response from Azure Cosmos DB
*/
count,
/**
* Gets the status code of the response from Azure Cosmos DB
*/
statusCode,
/**
* Headers related to cosmos DB and change feed.
*/
headers,
/**
* Cosmos Diagnostic Object.
*/
diagnostics,
/**
* Gets the subStatusCodes of the response from Azure Cosmos DB. Useful in partition split or partition gone.
*/
subStatusCode) {
this.result = result;
this.count = count;
this.statusCode = statusCode;
this.diagnostics = diagnostics;
this.subStatusCode = subStatusCode;
this.headers = headers;
}
/**
* Gets the request charge for this request from the Azure Cosmos DB service.
*/
get requestCharge() {
const rus = this.headers[Constants.HttpHeaders.RequestCharge];
return rus ? parseInt(rus, 10) : null;
}
/**
* Gets the activity ID for the request from the Azure Cosmos DB service.
*/
get activityId() {
return this.headers[Constants.HttpHeaders.ActivityId];
}
/**
* Gets the continuation token to be used for continuing enumeration of the Azure Cosmos DB service.
*/
get continuationToken() {
return this.headers[Constants.HttpHeaders.ContinuationToken];
}
/**
* Gets the session token for use in session consistency reads from the Azure Cosmos DB service.
*/
get sessionToken() {
return this.headers[Constants.HttpHeaders.SessionToken];
}
}
//# sourceMappingURL=ChangeFeedIteratorResponse.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"ChangeFeedIteratorResponse.js","sourceRoot":"","sources":["../../../../src/client/ChangeFeed/ChangeFeedIteratorResponse.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAGzC;;GAEG;AACH,MAAM,OAAO,0BAA0B;IACrC;;OAEG;IACH;IACE;;OAEG;IACa,MAAS;IACzB;;OAEG;IACa,KAAa;IAC7B;;OAEG;IACa,UAAkB;IAClC;;OAEG;IACH,OAAsB;IACtB;;OAEG;IACa,WAA8B;IAC9C;;OAEG;IACa,aAAsB;QApBtB,WAAM,GAAN,MAAM,CAAG;QAIT,UAAK,GAAL,KAAK,CAAQ;QAIb,eAAU,GAAV,UAAU,CAAQ;QAQlB,gBAAW,GAAX,WAAW,CAAmB;QAI9B,kBAAa,GAAb,aAAa,CAAS;QAEtC,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;IACzB,CAAC;IAED;;OAEG;IACH,IAAW,aAAa;QACtB,MAAM,GAAG,GAAG,IAAI,CAAC,OAAO,CAAC,SAAS,CAAC,WAAW,CAAC,aAAa,CAAC,CAAC;QAC9D,OAAO,GAAG,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;IACxC,CAAC;IAED;;OAEG;IACH,IAAW,UAAU;QACnB,OAAO,IAAI,CAAC,OAAO,CAAC,SAAS,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC;IACxD,CAAC;IAED;;OAEG;IACH,IAAW,iBAAiB;QAC1B,OAAO,IAAI,CAAC,OAAO,CAAC,SAAS,CAAC,WAAW,CAAC,iBAAiB,CAAC,CAAC;IAC/D,CAAC;IACD;;OAEG;IACH,IAAW,YAAY;QACrB,OAAO,IAAI,CAAC,OAAO,CAAC,SAAS,CAAC,WAAW,CAAC,YAAY,CAAC,CAAC;IAC1D,CAAC;CAKF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nimport { CosmosDiagnostics } from \"../../CosmosDiagnostics\";\nimport { Constants } from \"../../common\";\nimport { CosmosHeaders } from \"../../queryExecutionContext\";\n\n/**\n * A single response page from the Azure Cosmos DB Change Feed\n */\nexport class ChangeFeedIteratorResponse<T> {\n /**\n * @internal\n */\n constructor(\n /**\n * Gets the items returned in the response from Azure Cosmos DB\n */\n public readonly result: T,\n /**\n * Gets the number of items returned in the response from Azure Cosmos DB\n */\n public readonly count: number,\n /**\n * Gets the status code of the response from Azure Cosmos DB\n */\n public readonly statusCode: number,\n /**\n * Headers related to cosmos DB and change feed.\n */\n headers: CosmosHeaders,\n /**\n * Cosmos Diagnostic Object.\n */\n public readonly diagnostics: CosmosDiagnostics,\n /**\n * Gets the subStatusCodes of the response from Azure Cosmos DB. Useful in partition split or partition gone.\n */\n public readonly subStatusCode?: number,\n ) {\n this.headers = headers;\n }\n\n /**\n * Gets the request charge for this request from the Azure Cosmos DB service.\n */\n public get requestCharge(): number {\n const rus = this.headers[Constants.HttpHeaders.RequestCharge];\n return rus ? parseInt(rus, 10) : null;\n }\n\n /**\n * Gets the activity ID for the request from the Azure Cosmos DB service.\n */\n public get activityId(): string {\n return this.headers[Constants.HttpHeaders.ActivityId];\n }\n\n /**\n * Gets the continuation token to be used for continuing enumeration of the Azure Cosmos DB service.\n */\n public get continuationToken(): string {\n return this.headers[Constants.HttpHeaders.ContinuationToken];\n }\n /**\n * Gets the session token for use in session consistency reads from the Azure Cosmos DB service.\n */\n public get sessionToken(): string {\n return this.headers[Constants.HttpHeaders.SessionToken];\n }\n /**\n * Response headers of the response from Azure Cosmos DB\n */\n public headers: CosmosHeaders;\n}\n"]}

View File

@@ -0,0 +1,27 @@
import { Resource } from "../Resource";
import { ChangeFeedIteratorResponse } from "./ChangeFeedIteratorResponse";
/**
* Use `Items.getChangeFeedIterator()` to return an iterator that can iterate over all the changes for a partition key, feed range or an entire container.
*/
export interface ChangeFeedPullModelIterator<T> {
/**
* Always returns true, changefeed is an infinite stream.
*/
readonly hasMoreResults: boolean;
/**
* Returns next set of results for the change feed.
*/
readNext(): Promise<ChangeFeedIteratorResponse<Array<T & Resource>>>;
/**
* Gets an async iterator which will yield change feed results.
* @example Get changefeed for an entire container from now
* ```typescript
* const options = { changeFeedStartFrom: ChangeFeedStartFrom.Now() };
* for await(const res of container.items.getChangeFeedIterator(options).getAsyncIterator()) {
* //process res
* }
* ```
*/
getAsyncIterator(): AsyncIterable<ChangeFeedIteratorResponse<Array<T & Resource>>>;
}
//# sourceMappingURL=ChangeFeedPullModelIterator.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"ChangeFeedPullModelIterator.d.ts","sourceRoot":"","sources":["../../../../src/client/ChangeFeed/ChangeFeedPullModelIterator.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,QAAQ,EAAE,MAAM,aAAa,CAAC;AACvC,OAAO,EAAE,0BAA0B,EAAE,MAAM,8BAA8B,CAAC;AAC1E;;GAEG;AACH,MAAM,WAAW,2BAA2B,CAAC,CAAC;IAC5C;;OAEG;IACH,QAAQ,CAAC,cAAc,EAAE,OAAO,CAAC;IACjC;;OAEG;IACH,QAAQ,IAAI,OAAO,CAAC,0BAA0B,CAAC,KAAK,CAAC,CAAC,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAC;IACrE;;;;;;;;;OASG;IACH,gBAAgB,IAAI,aAAa,CAAC,0BAA0B,CAAC,KAAK,CAAC,CAAC,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAC;CACpF"}

View File

@@ -0,0 +1,2 @@
export {};
//# sourceMappingURL=ChangeFeedPullModelIterator.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"ChangeFeedPullModelIterator.js","sourceRoot":"","sources":["../../../../src/client/ChangeFeed/ChangeFeedPullModelIterator.ts"],"names":[],"mappings":"","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nimport { Resource } from \"../Resource\";\nimport { ChangeFeedIteratorResponse } from \"./ChangeFeedIteratorResponse\";\n/**\n * Use `Items.getChangeFeedIterator()` to return an iterator that can iterate over all the changes for a partition key, feed range or an entire container.\n */\nexport interface ChangeFeedPullModelIterator<T> {\n /**\n * Always returns true, changefeed is an infinite stream.\n */\n readonly hasMoreResults: boolean;\n /**\n * Returns next set of results for the change feed.\n */\n readNext(): Promise<ChangeFeedIteratorResponse<Array<T & Resource>>>;\n /**\n * Gets an async iterator which will yield change feed results.\n * @example Get changefeed for an entire container from now\n * ```typescript\n * const options = { changeFeedStartFrom: ChangeFeedStartFrom.Now() };\n * for await(const res of container.items.getChangeFeedIterator(options).getAsyncIterator()) {\n * //process res\n * }\n * ```\n */\n getAsyncIterator(): AsyncIterable<ChangeFeedIteratorResponse<Array<T & Resource>>>;\n}\n"]}

View File

@@ -0,0 +1,2 @@
export {};
//# sourceMappingURL=ChangeFeedRange.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"ChangeFeedRange.d.ts","sourceRoot":"","sources":["../../../../src/client/ChangeFeed/ChangeFeedRange.ts"],"names":[],"mappings":""}

View File

@@ -0,0 +1,16 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT license.
/**
* @internal
* FeedRange for which change feed is being requested.
*/
export class ChangeFeedRange {
constructor(minInclusive, maxExclusive, continuationToken, epkMinHeader, epkMaxHeader) {
this.minInclusive = minInclusive;
this.maxExclusive = maxExclusive;
this.continuationToken = continuationToken;
this.epkMinHeader = epkMinHeader;
this.epkMaxHeader = epkMaxHeader;
}
}
//# sourceMappingURL=ChangeFeedRange.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"ChangeFeedRange.js","sourceRoot":"","sources":["../../../../src/client/ChangeFeed/ChangeFeedRange.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAClC;;;GAGG;AACH,MAAM,OAAO,eAAe;IAsB1B,YACE,YAAoB,EACpB,YAAoB,EACpB,iBAA0B,EAC1B,YAAqB,EACrB,YAAqB;QAErB,IAAI,CAAC,YAAY,GAAG,YAAY,CAAC;QACjC,IAAI,CAAC,YAAY,GAAG,YAAY,CAAC;QACjC,IAAI,CAAC,iBAAiB,GAAG,iBAAiB,CAAC;QAC3C,IAAI,CAAC,YAAY,GAAG,YAAY,CAAC;QACjC,IAAI,CAAC,YAAY,GAAG,YAAY,CAAC;IACnC,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n/**\n * @internal\n * FeedRange for which change feed is being requested.\n */\nexport class ChangeFeedRange {\n /**\n * Min value for the range.\n */\n public minInclusive: string;\n /**\n * Max value for the range.\n */\n public maxExclusive: string;\n /**\n * Continuation token from where to start reading changes.\n */\n public continuationToken?: string;\n /**\n * Min epk value to begin reading changes from in case changefeed of entire partition is not requested.\n */\n public epkMinHeader?: string;\n /**\n * Max epk value to begin reading changes from in case changefeed of entire partition is not requested.\n */\n public epkMaxHeader?: string;\n\n constructor(\n minInclusive: string,\n maxExclusive: string,\n continuationToken?: string,\n epkMinHeader?: string,\n epkMaxHeader?: string,\n ) {\n this.minInclusive = minInclusive;\n this.maxExclusive = maxExclusive;\n this.continuationToken = continuationToken;\n this.epkMinHeader = epkMinHeader;\n this.epkMaxHeader = epkMaxHeader;\n }\n}\n"]}

View File

@@ -0,0 +1,33 @@
import { PartitionKey } from "../../documents";
import { FeedRange } from "./FeedRange";
import { ChangeFeedStartFromNow } from "./ChangeFeedStartFromNow";
import { ChangeFeedStartFromBeginning } from "./ChangeFeedStartFromBeginning";
import { ChangeFeedStartFromTime } from "./ChangeFeedStartFromTime";
import { ChangeFeedStartFromContinuation } from "./ChangeFeedStartFromContinuation";
/**
* Base class for where to start a ChangeFeedIterator.
*/
export declare abstract class ChangeFeedStartFrom {
/**
* Returns an object that tells the ChangeFeedIterator to start from the beginning of time.
* @param cfResource - PartitionKey or FeedRange for which changes are to be fetched. Leave blank for fetching changes for entire container.
*/
static Beginning(cfResource?: PartitionKey | FeedRange): ChangeFeedStartFromBeginning;
/**
* Returns an object that tells the ChangeFeedIterator to start reading changes from this moment onward.
* @param cfResource - PartitionKey or FeedRange for which changes are to be fetched. Leave blank for fetching changes for entire container.
**/
static Now(cfResource?: PartitionKey | FeedRange): ChangeFeedStartFromNow;
/**
* Returns an object that tells the ChangeFeedIterator to start reading changes from some point in time onward.
* @param startTime - Date object specfiying the time to start reading changes from.
* @param cfResource - PartitionKey or FeedRange for which changes are to be fetched. Leave blank for fetching changes for entire container.
*/
static Time(startTime: Date, cfResource?: PartitionKey | FeedRange): ChangeFeedStartFromTime;
/**
* Returns an object that tells the ChangeFeedIterator to start reading changes from a save point.
* @param continuation - The continuation to resume from.
*/
static Continuation(continuationToken: string): ChangeFeedStartFromContinuation;
}
//# sourceMappingURL=ChangeFeedStartFrom.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"ChangeFeedStartFrom.d.ts","sourceRoot":"","sources":["../../../../src/client/ChangeFeed/ChangeFeedStartFrom.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,YAAY,EAAE,MAAM,iBAAiB,CAAC;AAC/C,OAAO,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AACxC,OAAO,EAAE,sBAAsB,EAAE,MAAM,0BAA0B,CAAC;AAClE,OAAO,EAAE,4BAA4B,EAAE,MAAM,gCAAgC,CAAC;AAC9E,OAAO,EAAE,uBAAuB,EAAE,MAAM,2BAA2B,CAAC;AACpE,OAAO,EAAE,+BAA+B,EAAE,MAAM,mCAAmC,CAAC;AAIpF;;GAEG;AAEH,8BAAsB,mBAAmB;IACvC;;;OAGG;WACW,SAAS,CAAC,UAAU,CAAC,EAAE,YAAY,GAAG,SAAS,GAAG,4BAA4B;IAG5F;;;QAGI;WACU,GAAG,CAAC,UAAU,CAAC,EAAE,YAAY,GAAG,SAAS,GAAG,sBAAsB;IAGhF;;;;OAIG;WACW,IAAI,CAChB,SAAS,EAAE,IAAI,EACf,UAAU,CAAC,EAAE,YAAY,GAAG,SAAS,GACpC,uBAAuB;IAU1B;;;OAGG;WACW,YAAY,CAAC,iBAAiB,EAAE,MAAM,GAAG,+BAA+B;CASvF"}

View File

@@ -0,0 +1,56 @@
import { ChangeFeedStartFromNow } from "./ChangeFeedStartFromNow";
import { ChangeFeedStartFromBeginning } from "./ChangeFeedStartFromBeginning";
import { ChangeFeedStartFromTime } from "./ChangeFeedStartFromTime";
import { ChangeFeedStartFromContinuation } from "./ChangeFeedStartFromContinuation";
import { ErrorResponse } from "../../request/ErrorResponse";
import { isNullOrEmpty } from "./changeFeedUtils";
/**
* Base class for where to start a ChangeFeedIterator.
*/
/* eslint-disable @typescript-eslint/no-extraneous-class */
export class ChangeFeedStartFrom {
/**
* Returns an object that tells the ChangeFeedIterator to start from the beginning of time.
* @param cfResource - PartitionKey or FeedRange for which changes are to be fetched. Leave blank for fetching changes for entire container.
*/
static Beginning(cfResource) {
return new ChangeFeedStartFromBeginning(cfResource);
}
/**
* Returns an object that tells the ChangeFeedIterator to start reading changes from this moment onward.
* @param cfResource - PartitionKey or FeedRange for which changes are to be fetched. Leave blank for fetching changes for entire container.
**/
static Now(cfResource) {
return new ChangeFeedStartFromNow(cfResource);
}
/**
* Returns an object that tells the ChangeFeedIterator to start reading changes from some point in time onward.
* @param startTime - Date object specfiying the time to start reading changes from.
* @param cfResource - PartitionKey or FeedRange for which changes are to be fetched. Leave blank for fetching changes for entire container.
*/
static Time(startTime, cfResource) {
if (!startTime) {
throw new ErrorResponse("startTime must be present");
}
if (startTime instanceof Date === true) {
return new ChangeFeedStartFromTime(startTime, cfResource);
}
else {
throw new ErrorResponse("startTime must be a Date object.");
}
}
/**
* Returns an object that tells the ChangeFeedIterator to start reading changes from a save point.
* @param continuation - The continuation to resume from.
*/
static Continuation(continuationToken) {
if (!continuationToken) {
throw new ErrorResponse("Argument continuation must be passed.");
}
if (isNullOrEmpty(continuationToken)) {
throw new ErrorResponse("Argument continuationToken must be a non-empty string.");
}
return new ChangeFeedStartFromContinuation(continuationToken);
}
}
//# sourceMappingURL=ChangeFeedStartFrom.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"ChangeFeedStartFrom.js","sourceRoot":"","sources":["../../../../src/client/ChangeFeed/ChangeFeedStartFrom.ts"],"names":[],"mappings":"AAIA,OAAO,EAAE,sBAAsB,EAAE,MAAM,0BAA0B,CAAC;AAClE,OAAO,EAAE,4BAA4B,EAAE,MAAM,gCAAgC,CAAC;AAC9E,OAAO,EAAE,uBAAuB,EAAE,MAAM,2BAA2B,CAAC;AACpE,OAAO,EAAE,+BAA+B,EAAE,MAAM,mCAAmC,CAAC;AACpF,OAAO,EAAE,aAAa,EAAE,MAAM,6BAA6B,CAAC;AAC5D,OAAO,EAAE,aAAa,EAAE,MAAM,mBAAmB,CAAC;AAElD;;GAEG;AACH,2DAA2D;AAC3D,MAAM,OAAgB,mBAAmB;IACvC;;;OAGG;IACI,MAAM,CAAC,SAAS,CAAC,UAAqC;QAC3D,OAAO,IAAI,4BAA4B,CAAC,UAAU,CAAC,CAAC;IACtD,CAAC;IACD;;;QAGI;IACG,MAAM,CAAC,GAAG,CAAC,UAAqC;QACrD,OAAO,IAAI,sBAAsB,CAAC,UAAU,CAAC,CAAC;IAChD,CAAC;IACD;;;;OAIG;IACI,MAAM,CAAC,IAAI,CAChB,SAAe,EACf,UAAqC;QAErC,IAAI,CAAC,SAAS,EAAE;YACd,MAAM,IAAI,aAAa,CAAC,2BAA2B,CAAC,CAAC;SACtD;QACD,IAAI,SAAS,YAAY,IAAI,KAAK,IAAI,EAAE;YACtC,OAAO,IAAI,uBAAuB,CAAC,SAAS,EAAE,UAAU,CAAC,CAAC;SAC3D;aAAM;YACL,MAAM,IAAI,aAAa,CAAC,kCAAkC,CAAC,CAAC;SAC7D;IACH,CAAC;IACD;;;OAGG;IACI,MAAM,CAAC,YAAY,CAAC,iBAAyB;QAClD,IAAI,CAAC,iBAAiB,EAAE;YACtB,MAAM,IAAI,aAAa,CAAC,uCAAuC,CAAC,CAAC;SAClE;QACD,IAAI,aAAa,CAAC,iBAAiB,CAAC,EAAE;YACpC,MAAM,IAAI,aAAa,CAAC,wDAAwD,CAAC,CAAC;SACnF;QACD,OAAO,IAAI,+BAA+B,CAAC,iBAAiB,CAAC,CAAC;IAChE,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nimport { PartitionKey } from \"../../documents\";\nimport { FeedRange } from \"./FeedRange\";\nimport { ChangeFeedStartFromNow } from \"./ChangeFeedStartFromNow\";\nimport { ChangeFeedStartFromBeginning } from \"./ChangeFeedStartFromBeginning\";\nimport { ChangeFeedStartFromTime } from \"./ChangeFeedStartFromTime\";\nimport { ChangeFeedStartFromContinuation } from \"./ChangeFeedStartFromContinuation\";\nimport { ErrorResponse } from \"../../request/ErrorResponse\";\nimport { isNullOrEmpty } from \"./changeFeedUtils\";\n\n/**\n * Base class for where to start a ChangeFeedIterator.\n */\n/* eslint-disable @typescript-eslint/no-extraneous-class */\nexport abstract class ChangeFeedStartFrom {\n /**\n * Returns an object that tells the ChangeFeedIterator to start from the beginning of time.\n * @param cfResource - PartitionKey or FeedRange for which changes are to be fetched. Leave blank for fetching changes for entire container.\n */\n public static Beginning(cfResource?: PartitionKey | FeedRange): ChangeFeedStartFromBeginning {\n return new ChangeFeedStartFromBeginning(cfResource);\n }\n /**\n * Returns an object that tells the ChangeFeedIterator to start reading changes from this moment onward.\n * @param cfResource - PartitionKey or FeedRange for which changes are to be fetched. Leave blank for fetching changes for entire container.\n **/\n public static Now(cfResource?: PartitionKey | FeedRange): ChangeFeedStartFromNow {\n return new ChangeFeedStartFromNow(cfResource);\n }\n /**\n * Returns an object that tells the ChangeFeedIterator to start reading changes from some point in time onward.\n * @param startTime - Date object specfiying the time to start reading changes from.\n * @param cfResource - PartitionKey or FeedRange for which changes are to be fetched. Leave blank for fetching changes for entire container.\n */\n public static Time(\n startTime: Date,\n cfResource?: PartitionKey | FeedRange,\n ): ChangeFeedStartFromTime {\n if (!startTime) {\n throw new ErrorResponse(\"startTime must be present\");\n }\n if (startTime instanceof Date === true) {\n return new ChangeFeedStartFromTime(startTime, cfResource);\n } else {\n throw new ErrorResponse(\"startTime must be a Date object.\");\n }\n }\n /**\n * Returns an object that tells the ChangeFeedIterator to start reading changes from a save point.\n * @param continuation - The continuation to resume from.\n */\n public static Continuation(continuationToken: string): ChangeFeedStartFromContinuation {\n if (!continuationToken) {\n throw new ErrorResponse(\"Argument continuation must be passed.\");\n }\n if (isNullOrEmpty(continuationToken)) {\n throw new ErrorResponse(\"Argument continuationToken must be a non-empty string.\");\n }\n return new ChangeFeedStartFromContinuation(continuationToken);\n }\n}\n"]}

View File

@@ -0,0 +1,12 @@
import { PartitionKey } from "../../documents";
import { FeedRange } from "./FeedRange";
/**
* @hidden
* Class which specifies the ChangeFeedIterator to start reading changes from beginning of time.
*/
export declare class ChangeFeedStartFromBeginning {
private cfResource?;
constructor(cfResource?: PartitionKey | FeedRange);
getCfResource(): PartitionKey | FeedRange | undefined;
}
//# sourceMappingURL=ChangeFeedStartFromBeginning.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"ChangeFeedStartFromBeginning.d.ts","sourceRoot":"","sources":["../../../../src/client/ChangeFeed/ChangeFeedStartFromBeginning.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,YAAY,EAAE,MAAM,iBAAiB,CAAC;AAC/C,OAAO,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AAExC;;;GAGG;AACH,qBAAa,4BAA4B;IACvC,OAAO,CAAC,UAAU,CAAC,CAA2B;gBAElC,UAAU,CAAC,EAAE,YAAY,GAAG,SAAS;IAI1C,aAAa,IAAI,YAAY,GAAG,SAAS,GAAG,SAAS;CAG7D"}

View File

@@ -0,0 +1,13 @@
/**
* @hidden
* Class which specifies the ChangeFeedIterator to start reading changes from beginning of time.
*/
export class ChangeFeedStartFromBeginning {
constructor(cfResource) {
this.cfResource = cfResource;
}
getCfResource() {
return this.cfResource;
}
}
//# sourceMappingURL=ChangeFeedStartFromBeginning.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"ChangeFeedStartFromBeginning.js","sourceRoot":"","sources":["../../../../src/client/ChangeFeed/ChangeFeedStartFromBeginning.ts"],"names":[],"mappings":"AAKA;;;GAGG;AACH,MAAM,OAAO,4BAA4B;IAGvC,YAAY,UAAqC;QAC/C,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;IAC/B,CAAC;IAEM,aAAa;QAClB,OAAO,IAAI,CAAC,UAAU,CAAC;IACzB,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nimport { PartitionKey } from \"../../documents\";\nimport { FeedRange } from \"./FeedRange\";\n\n/**\n * @hidden\n * Class which specifies the ChangeFeedIterator to start reading changes from beginning of time.\n */\nexport class ChangeFeedStartFromBeginning {\n private cfResource?: PartitionKey | FeedRange;\n\n constructor(cfResource?: PartitionKey | FeedRange) {\n this.cfResource = cfResource;\n }\n\n public getCfResource(): PartitionKey | FeedRange | undefined {\n return this.cfResource;\n }\n}\n"]}

View File

@@ -0,0 +1,12 @@
/**
* @hidden
* Class which specifies the ChangeFeedIterator to start reading changes from a saved point.
*/
export declare class ChangeFeedStartFromContinuation {
private continuationToken;
constructor(continuation: string);
getCfResource(): string;
getCfResourceJson(): any;
getResourceType(): any;
}
//# sourceMappingURL=ChangeFeedStartFromContinuation.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"ChangeFeedStartFromContinuation.d.ts","sourceRoot":"","sources":["../../../../src/client/ChangeFeed/ChangeFeedStartFromContinuation.ts"],"names":[],"mappings":"AAIA;;;GAGG;AACH,qBAAa,+BAA+B;IAC1C,OAAO,CAAC,iBAAiB,CAAS;gBAEtB,YAAY,EAAE,MAAM;IAIzB,aAAa,IAAI,MAAM;IAGvB,iBAAiB,IAAI,GAAG;IAIxB,eAAe,IAAI,GAAG;CAkB9B"}

View File

@@ -0,0 +1,36 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT license.
import { ErrorResponse } from "../../request";
import { ChangeFeedResourceType } from "./ChangeFeedEnums";
/**
* @hidden
* Class which specifies the ChangeFeedIterator to start reading changes from a saved point.
*/
export class ChangeFeedStartFromContinuation {
constructor(continuation) {
this.continuationToken = continuation;
}
getCfResource() {
return this.continuationToken;
}
getCfResourceJson() {
return JSON.parse(this.continuationToken);
}
getResourceType() {
const cToken = this.getCfResourceJson();
if (Object.prototype.hasOwnProperty.call(cToken, "partitionKey") &&
Object.prototype.hasOwnProperty.call(cToken, "Continuation") &&
typeof cToken.Continuation === "string") {
return ChangeFeedResourceType.PartitionKey;
}
else if (Object.prototype.hasOwnProperty.call(cToken, "Continuation") &&
Array.isArray(cToken.Continuation) &&
cToken.Continuation.length > 0) {
return ChangeFeedResourceType.FeedRange;
}
else {
throw new ErrorResponse("Invalid continuation token.");
}
}
}
//# sourceMappingURL=ChangeFeedStartFromContinuation.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"ChangeFeedStartFromContinuation.js","sourceRoot":"","sources":["../../../../src/client/ChangeFeed/ChangeFeedStartFromContinuation.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAClC,OAAO,EAAE,aAAa,EAAE,MAAM,eAAe,CAAC;AAC9C,OAAO,EAAE,sBAAsB,EAAE,MAAM,mBAAmB,CAAC;AAC3D;;;GAGG;AACH,MAAM,OAAO,+BAA+B;IAG1C,YAAY,YAAoB;QAC9B,IAAI,CAAC,iBAAiB,GAAG,YAAY,CAAC;IACxC,CAAC;IAEM,aAAa;QAClB,OAAO,IAAI,CAAC,iBAAiB,CAAC;IAChC,CAAC;IACM,iBAAiB;QACtB,OAAO,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,iBAAiB,CAAC,CAAC;IAC5C,CAAC;IAEM,eAAe;QACpB,MAAM,MAAM,GAAG,IAAI,CAAC,iBAAiB,EAAE,CAAC;QACxC,IACE,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,MAAM,EAAE,cAAc,CAAC;YAC5D,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,MAAM,EAAE,cAAc,CAAC;YAC5D,OAAO,MAAM,CAAC,YAAY,KAAK,QAAQ,EACvC;YACA,OAAO,sBAAsB,CAAC,YAAY,CAAC;SAC5C;aAAM,IACL,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,MAAM,EAAE,cAAc,CAAC;YAC5D,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,YAAY,CAAC;YAClC,MAAM,CAAC,YAAY,CAAC,MAAM,GAAG,CAAC,EAC9B;YACA,OAAO,sBAAsB,CAAC,SAAS,CAAC;SACzC;aAAM;YACL,MAAM,IAAI,aAAa,CAAC,6BAA6B,CAAC,CAAC;SACxD;IACH,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nimport { ErrorResponse } from \"../../request\";\nimport { ChangeFeedResourceType } from \"./ChangeFeedEnums\";\n/**\n * @hidden\n * Class which specifies the ChangeFeedIterator to start reading changes from a saved point.\n */\nexport class ChangeFeedStartFromContinuation {\n private continuationToken: string;\n\n constructor(continuation: string) {\n this.continuationToken = continuation;\n }\n\n public getCfResource(): string {\n return this.continuationToken;\n }\n public getCfResourceJson(): any {\n return JSON.parse(this.continuationToken);\n }\n\n public getResourceType(): any {\n const cToken = this.getCfResourceJson();\n if (\n Object.prototype.hasOwnProperty.call(cToken, \"partitionKey\") &&\n Object.prototype.hasOwnProperty.call(cToken, \"Continuation\") &&\n typeof cToken.Continuation === \"string\"\n ) {\n return ChangeFeedResourceType.PartitionKey;\n } else if (\n Object.prototype.hasOwnProperty.call(cToken, \"Continuation\") &&\n Array.isArray(cToken.Continuation) &&\n cToken.Continuation.length > 0\n ) {\n return ChangeFeedResourceType.FeedRange;\n } else {\n throw new ErrorResponse(\"Invalid continuation token.\");\n }\n }\n}\n"]}

View File

@@ -0,0 +1,12 @@
import { PartitionKey } from "../../documents";
import { FeedRange } from "./FeedRange";
/**
* @hidden
* Class which specifies the ChangeFeedIterator to start reading changes from this moment in time.
*/
export declare class ChangeFeedStartFromNow {
cfResource?: PartitionKey | FeedRange;
constructor(cfResource?: PartitionKey | FeedRange);
getCfResource(): PartitionKey | FeedRange | undefined;
}
//# sourceMappingURL=ChangeFeedStartFromNow.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"ChangeFeedStartFromNow.d.ts","sourceRoot":"","sources":["../../../../src/client/ChangeFeed/ChangeFeedStartFromNow.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,YAAY,EAAE,MAAM,iBAAiB,CAAC;AAC/C,OAAO,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AACxC;;;GAGG;AACH,qBAAa,sBAAsB;IAC1B,UAAU,CAAC,EAAE,YAAY,GAAG,SAAS,CAAC;gBAEjC,UAAU,CAAC,EAAE,YAAY,GAAG,SAAS;IAI1C,aAAa,IAAI,YAAY,GAAG,SAAS,GAAG,SAAS;CAG7D"}

View File

@@ -0,0 +1,15 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT license.
/**
* @hidden
* Class which specifies the ChangeFeedIterator to start reading changes from this moment in time.
*/
export class ChangeFeedStartFromNow {
constructor(cfResource) {
this.cfResource = cfResource;
}
getCfResource() {
return this.cfResource;
}
}
//# sourceMappingURL=ChangeFeedStartFromNow.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"ChangeFeedStartFromNow.js","sourceRoot":"","sources":["../../../../src/client/ChangeFeed/ChangeFeedStartFromNow.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAIlC;;;GAGG;AACH,MAAM,OAAO,sBAAsB;IAGjC,YAAY,UAAqC;QAC/C,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;IAC/B,CAAC;IAEM,aAAa;QAClB,OAAO,IAAI,CAAC,UAAU,CAAC;IACzB,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { PartitionKey } from \"../../documents\";\nimport { FeedRange } from \"./FeedRange\";\n/**\n * @hidden\n * Class which specifies the ChangeFeedIterator to start reading changes from this moment in time.\n */\nexport class ChangeFeedStartFromNow {\n public cfResource?: PartitionKey | FeedRange;\n\n constructor(cfResource?: PartitionKey | FeedRange) {\n this.cfResource = cfResource;\n }\n\n public getCfResource(): PartitionKey | FeedRange | undefined {\n return this.cfResource;\n }\n}\n"]}

View File

@@ -0,0 +1,14 @@
import { PartitionKey } from "../../documents";
import { FeedRange } from "./FeedRange";
/**
* @hidden
* Class which specifies the ChangeFeedIterator to start reading changes from a particular point of time.
*/
export declare class ChangeFeedStartFromTime {
private cfResource?;
private startTime;
constructor(startTime: Date, cfResource?: PartitionKey | FeedRange);
getCfResource(): PartitionKey | FeedRange | undefined;
getStartTime(): Date;
}
//# sourceMappingURL=ChangeFeedStartFromTime.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"ChangeFeedStartFromTime.d.ts","sourceRoot":"","sources":["../../../../src/client/ChangeFeed/ChangeFeedStartFromTime.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,YAAY,EAAE,MAAM,iBAAiB,CAAC;AAC/C,OAAO,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AACxC;;;GAGG;AACH,qBAAa,uBAAuB;IAClC,OAAO,CAAC,UAAU,CAAC,CAA2B;IAE9C,OAAO,CAAC,SAAS,CAAO;gBAEZ,SAAS,EAAE,IAAI,EAAE,UAAU,CAAC,EAAE,YAAY,GAAG,SAAS;IAK3D,aAAa,IAAI,YAAY,GAAG,SAAS,GAAG,SAAS;IAIrD,YAAY,IAAI,IAAI;CAG5B"}

View File

@@ -0,0 +1,17 @@
/**
* @hidden
* Class which specifies the ChangeFeedIterator to start reading changes from a particular point of time.
*/
export class ChangeFeedStartFromTime {
constructor(startTime, cfResource) {
this.startTime = startTime;
this.cfResource = cfResource;
}
getCfResource() {
return this.cfResource;
}
getStartTime() {
return this.startTime;
}
}
//# sourceMappingURL=ChangeFeedStartFromTime.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"ChangeFeedStartFromTime.js","sourceRoot":"","sources":["../../../../src/client/ChangeFeed/ChangeFeedStartFromTime.ts"],"names":[],"mappings":"AAIA;;;GAGG;AACH,MAAM,OAAO,uBAAuB;IAKlC,YAAY,SAAe,EAAE,UAAqC;QAChE,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;QAC3B,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;IAC/B,CAAC;IAEM,aAAa;QAClB,OAAO,IAAI,CAAC,UAAU,CAAC;IACzB,CAAC;IAEM,YAAY;QACjB,OAAO,IAAI,CAAC,SAAS,CAAC;IACxB,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nimport { PartitionKey } from \"../../documents\";\nimport { FeedRange } from \"./FeedRange\";\n/**\n * @hidden\n * Class which specifies the ChangeFeedIterator to start reading changes from a particular point of time.\n */\nexport class ChangeFeedStartFromTime {\n private cfResource?: PartitionKey | FeedRange;\n\n private startTime: Date;\n\n constructor(startTime: Date, cfResource?: PartitionKey | FeedRange) {\n this.startTime = startTime;\n this.cfResource = cfResource;\n }\n\n public getCfResource(): PartitionKey | FeedRange | undefined {\n return this.cfResource;\n }\n\n public getStartTime(): Date {\n return this.startTime;\n }\n}\n"]}

View File

@@ -0,0 +1,2 @@
export {};
//# sourceMappingURL=CompositeContinuationToken.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"CompositeContinuationToken.d.ts","sourceRoot":"","sources":["../../../../src/client/ChangeFeed/CompositeContinuationToken.ts"],"names":[],"mappings":""}

View File

@@ -0,0 +1,11 @@
/**
* Continuation token for change feed of entire container, or a specific Epk Range.
* @internal
*/
export class CompositeContinuationToken {
constructor(rid, Continuation) {
this.rid = rid;
this.Continuation = Continuation;
}
}
//# sourceMappingURL=CompositeContinuationToken.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"CompositeContinuationToken.js","sourceRoot":"","sources":["../../../../src/client/ChangeFeed/CompositeContinuationToken.ts"],"names":[],"mappings":"AAGA;;;GAGG;AACH,MAAM,OAAO,0BAA0B;IAUrC,YAAY,GAAW,EAAE,YAA+B;QACtD,IAAI,CAAC,GAAG,GAAG,GAAG,CAAC;QACf,IAAI,CAAC,YAAY,GAAG,YAAY,CAAC;IACnC,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nimport { ChangeFeedRange } from \"./ChangeFeedRange\";\n/**\n * Continuation token for change feed of entire container, or a specific Epk Range.\n * @internal\n */\nexport class CompositeContinuationToken {\n /**\n * rid of the container for which the continuation token is issued.\n */\n public readonly rid: string;\n /**\n * List of Epk Ranges part of the continuation token\n */\n public readonly Continuation: ChangeFeedRange[];\n\n constructor(rid: string, Continuation: ChangeFeedRange[]) {\n this.rid = rid;\n this.Continuation = Continuation;\n }\n}\n"]}

View File

@@ -0,0 +1,2 @@
export {};
//# sourceMappingURL=ContinuationTokenForPartitionKey.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"ContinuationTokenForPartitionKey.d.ts","sourceRoot":"","sources":["../../../../src/client/ChangeFeed/ContinuationTokenForPartitionKey.ts"],"names":[],"mappings":""}

View File

@@ -0,0 +1,12 @@
/**
* Continuation token for change feed of entire container, or a specific Epk Range.
* @internal
*/
export class ContinuationTokenForPartitionKey {
constructor(rid, partitionKey, continuation) {
this.rid = rid;
this.partitionKey = partitionKey;
this.Continuation = continuation;
}
}
//# sourceMappingURL=ContinuationTokenForPartitionKey.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"ContinuationTokenForPartitionKey.js","sourceRoot":"","sources":["../../../../src/client/ChangeFeed/ContinuationTokenForPartitionKey.ts"],"names":[],"mappings":"AAGA;;;GAGG;AACH,MAAM,OAAO,gCAAgC;IAc3C,YAAY,GAAW,EAAE,YAA0B,EAAE,YAAoB;QACvE,IAAI,CAAC,GAAG,GAAG,GAAG,CAAC;QACf,IAAI,CAAC,YAAY,GAAG,YAAY,CAAC;QACjC,IAAI,CAAC,YAAY,GAAG,YAAY,CAAC;IACnC,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nimport { PartitionKey } from \"../../documents\";\n/**\n * Continuation token for change feed of entire container, or a specific Epk Range.\n * @internal\n */\nexport class ContinuationTokenForPartitionKey {\n /**\n * rid of the container for which the continuation token is issued.\n */\n public readonly rid: string;\n /**\n * A specific Partition key in the container for which the continuation token is issued.\n */\n public readonly partitionKey: PartitionKey;\n /**\n * Continuation value for the specific Partition key in the container.\n */\n public Continuation: string;\n\n constructor(rid: string, partitionKey: PartitionKey, continuation: string) {\n this.rid = rid;\n this.partitionKey = partitionKey;\n this.Continuation = continuation;\n }\n}\n"]}

View File

@@ -0,0 +1,21 @@
/**
* Specifies a feed range for the changefeed.
*/
export declare abstract class FeedRange {
/**
* Min value for the feed range.
*/
readonly minInclusive: string;
/**
* Max value for the feed range.
*/
readonly maxExclusive: string;
}
/**
* @hidden
* Specifies a feed range for the changefeed.
*/
export declare class FeedRangeInternal extends FeedRange {
constructor(minInclusive: string, maxExclusive: string);
}
//# sourceMappingURL=FeedRange.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"FeedRange.d.ts","sourceRoot":"","sources":["../../../../src/client/ChangeFeed/FeedRange.ts"],"names":[],"mappings":"AAKA;;GAEG;AACH,8BAAsB,SAAS;IAC7B;;OAEG;IACH,QAAQ,CAAC,YAAY,EAAE,MAAM,CAAC;IAC9B;;OAEG;IACH,QAAQ,CAAC,YAAY,EAAE,MAAM,CAAC;CAa/B;AAED;;;GAGG;AACH,qBAAa,iBAAkB,SAAQ,SAAS;gBAElC,YAAY,EAAE,MAAM,EAAE,YAAY,EAAE,MAAM;CAGvD"}

View File

@@ -0,0 +1,30 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT license.
import { ErrorResponse } from "../../request";
/**
* Specifies a feed range for the changefeed.
*/
export class FeedRange {
/**
* @internal
*/
constructor(minInclusive, maxExclusive) {
// only way to explictly block users from creating FeedRange directly in JS
if (new.target === FeedRange) {
throw new ErrorResponse("Cannot instantiate abstract class FeedRange");
}
this.minInclusive = minInclusive;
this.maxExclusive = maxExclusive;
}
}
/**
* @hidden
* Specifies a feed range for the changefeed.
*/
export class FeedRangeInternal extends FeedRange {
/* eslint-disable @typescript-eslint/no-useless-constructor */
constructor(minInclusive, maxExclusive) {
super(minInclusive, maxExclusive);
}
}
//# sourceMappingURL=FeedRange.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"FeedRange.js","sourceRoot":"","sources":["../../../../src/client/ChangeFeed/FeedRange.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,aAAa,EAAE,MAAM,eAAe,CAAC;AAE9C;;GAEG;AACH,MAAM,OAAgB,SAAS;IAS7B;;OAEG;IACH,YAAsB,YAAoB,EAAE,YAAoB;QAC9D,2EAA2E;QAC3E,IAAI,GAAG,CAAC,MAAM,KAAK,SAAS,EAAE;YAC5B,MAAM,IAAI,aAAa,CAAC,6CAA6C,CAAC,CAAC;SACxE;QAED,IAAI,CAAC,YAAY,GAAG,YAAY,CAAC;QACjC,IAAI,CAAC,YAAY,GAAG,YAAY,CAAC;IACnC,CAAC;CACF;AAED;;;GAGG;AACH,MAAM,OAAO,iBAAkB,SAAQ,SAAS;IAC9C,8DAA8D;IAC9D,YAAY,YAAoB,EAAE,YAAoB;QACpD,KAAK,CAAC,YAAY,EAAE,YAAY,CAAC,CAAC;IACpC,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { ErrorResponse } from \"../../request\";\n\n/**\n * Specifies a feed range for the changefeed.\n */\nexport abstract class FeedRange {\n /**\n * Min value for the feed range.\n */\n readonly minInclusive: string;\n /**\n * Max value for the feed range.\n */\n readonly maxExclusive: string;\n /**\n * @internal\n */\n protected constructor(minInclusive: string, maxExclusive: string) {\n // only way to explictly block users from creating FeedRange directly in JS\n if (new.target === FeedRange) {\n throw new ErrorResponse(\"Cannot instantiate abstract class FeedRange\");\n }\n\n this.minInclusive = minInclusive;\n this.maxExclusive = maxExclusive;\n }\n}\n\n/**\n * @hidden\n * Specifies a feed range for the changefeed.\n */\nexport class FeedRangeInternal extends FeedRange {\n /* eslint-disable @typescript-eslint/no-useless-constructor */\n constructor(minInclusive: string, maxExclusive: string) {\n super(minInclusive, maxExclusive);\n }\n}\n"]}

View File

@@ -0,0 +1,20 @@
import { ChangeFeedRange } from "./ChangeFeedRange";
/**
* @hidden
* A queue for iterating over specified Epk ranges and fetch change feed for the given epk ranges.
*/
export declare class FeedRangeQueue<T> {
private elements;
constructor();
modifyFirstElement(newItem: ChangeFeedRange): void;
enqueue(item: ChangeFeedRange): void;
dequeue(): ChangeFeedRange;
peek(): T | ChangeFeedRange;
isEmpty(): boolean;
moveFirstElementToTheEnd(): void;
/**
* Returns a snapshot of the queue as an array to be used as Continuation token.
*/
returnSnapshot(): ChangeFeedRange[];
}
//# sourceMappingURL=FeedRangeQueue.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"FeedRangeQueue.d.ts","sourceRoot":"","sources":["../../../../src/client/ChangeFeed/FeedRangeQueue.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,eAAe,EAAE,MAAM,mBAAmB,CAAC;AACpD;;;GAGG;AACH,qBAAa,cAAc,CAAC,CAAC;IAC3B,OAAO,CAAC,QAAQ,CAAoB;;IAM7B,kBAAkB,CAAC,OAAO,EAAE,eAAe,GAAG,IAAI;IAMlD,OAAO,CAAC,IAAI,EAAE,eAAe,GAAG,IAAI;IAIpC,OAAO,IAAI,eAAe;IAI1B,IAAI,IAAI,CAAC,GAAG,eAAe;IAI3B,OAAO,IAAI,OAAO;IAIlB,wBAAwB,IAAI,IAAI;IAKvC;;OAEG;IACI,cAAc,IAAI,eAAe,EAAE;CAc3C"}

View File

@@ -0,0 +1,48 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT license.
import { ChangeFeedRange } from "./ChangeFeedRange";
/**
* @hidden
* A queue for iterating over specified Epk ranges and fetch change feed for the given epk ranges.
*/
export class FeedRangeQueue {
constructor() {
this.elements = [];
}
modifyFirstElement(newItem) {
if (!this.isEmpty()) {
this.elements[0] = newItem;
}
}
enqueue(item) {
this.elements.push(item);
}
dequeue() {
return this.elements.shift();
}
peek() {
return !this.isEmpty() ? this.elements[0] : undefined;
}
isEmpty() {
return this.elements.length === 0;
}
moveFirstElementToTheEnd() {
if (!this.isEmpty()) {
this.elements.push(this.dequeue());
}
}
/**
* Returns a snapshot of the queue as an array to be used as Continuation token.
*/
returnSnapshot() {
const allFeedRanges = [];
this.elements.map((element) => {
const minInclusive = element.epkMinHeader ? element.epkMinHeader : element.minInclusive;
const maxExclusive = element.epkMaxHeader ? element.epkMaxHeader : element.maxExclusive;
const feedRangeElement = new ChangeFeedRange(minInclusive, maxExclusive, element.continuationToken);
allFeedRanges.push(feedRangeElement);
});
return allFeedRanges;
}
}
//# sourceMappingURL=FeedRangeQueue.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"FeedRangeQueue.js","sourceRoot":"","sources":["../../../../src/client/ChangeFeed/FeedRangeQueue.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAClC,OAAO,EAAE,eAAe,EAAE,MAAM,mBAAmB,CAAC;AACpD;;;GAGG;AACH,MAAM,OAAO,cAAc;IAGzB;QACE,IAAI,CAAC,QAAQ,GAAG,EAAE,CAAC;IACrB,CAAC;IAEM,kBAAkB,CAAC,OAAwB;QAChD,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,EAAE;YACnB,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,GAAG,OAAO,CAAC;SAC5B;IACH,CAAC;IAEM,OAAO,CAAC,IAAqB;QAClC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAC3B,CAAC;IAEM,OAAO;QACZ,OAAO,IAAI,CAAC,QAAQ,CAAC,KAAK,EAAE,CAAC;IAC/B,CAAC;IAEM,IAAI;QACT,OAAO,CAAC,IAAI,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC;IACxD,CAAC;IAEM,OAAO;QACZ,OAAO,IAAI,CAAC,QAAQ,CAAC,MAAM,KAAK,CAAC,CAAC;IACpC,CAAC;IAEM,wBAAwB;QAC7B,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,EAAE;YACnB,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,CAAC,CAAC;SACpC;IACH,CAAC;IACD;;OAEG;IACI,cAAc;QACnB,MAAM,aAAa,GAAsB,EAAE,CAAC;QAC5C,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,OAAO,EAAE,EAAE;YAC5B,MAAM,YAAY,GAAG,OAAO,CAAC,YAAY,CAAC,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC;YACxF,MAAM,YAAY,GAAG,OAAO,CAAC,YAAY,CAAC,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC;YACxF,MAAM,gBAAgB,GAAG,IAAI,eAAe,CAC1C,YAAY,EACZ,YAAY,EACZ,OAAO,CAAC,iBAAiB,CAC1B,CAAC;YACF,aAAa,CAAC,IAAI,CAAC,gBAAgB,CAAC,CAAC;QACvC,CAAC,CAAC,CAAC;QACH,OAAO,aAAa,CAAC;IACvB,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nimport { ChangeFeedRange } from \"./ChangeFeedRange\";\n/**\n * @hidden\n * A queue for iterating over specified Epk ranges and fetch change feed for the given epk ranges.\n */\nexport class FeedRangeQueue<T> {\n private elements: ChangeFeedRange[];\n\n constructor() {\n this.elements = [];\n }\n\n public modifyFirstElement(newItem: ChangeFeedRange): void {\n if (!this.isEmpty()) {\n this.elements[0] = newItem;\n }\n }\n\n public enqueue(item: ChangeFeedRange): void {\n this.elements.push(item);\n }\n\n public dequeue(): ChangeFeedRange {\n return this.elements.shift();\n }\n\n public peek(): T | ChangeFeedRange {\n return !this.isEmpty() ? this.elements[0] : undefined;\n }\n\n public isEmpty(): boolean {\n return this.elements.length === 0;\n }\n\n public moveFirstElementToTheEnd(): void {\n if (!this.isEmpty()) {\n this.elements.push(this.dequeue());\n }\n }\n /**\n * Returns a snapshot of the queue as an array to be used as Continuation token.\n */\n public returnSnapshot(): ChangeFeedRange[] {\n const allFeedRanges: ChangeFeedRange[] = [];\n this.elements.map((element) => {\n const minInclusive = element.epkMinHeader ? element.epkMinHeader : element.minInclusive;\n const maxExclusive = element.epkMaxHeader ? element.epkMaxHeader : element.maxExclusive;\n const feedRangeElement = new ChangeFeedRange(\n minInclusive,\n maxExclusive,\n element.continuationToken,\n );\n allFeedRanges.push(feedRangeElement);\n });\n return allFeedRanges;\n }\n}\n"]}

View File

@@ -0,0 +1,11 @@
/**
* @hidden
* Internal Change Feed Iterator Options used only by ChangeFeedForEpkRange and ChangeFeedForPartitionKey.
*/
export interface InternalChangeFeedIteratorOptions {
maxItemCount?: number;
sessionToken?: string;
continuationToken?: string;
startTime?: Date;
}
//# sourceMappingURL=InternalChangeFeedOptions.d.ts.map

Some files were not shown because too many files have changed in this diff Show More