mirror of
https://github.com/pikami/cosmium.git
synced 2025-06-08 08:30:24 +01:00
Compare commits
No commits in common. "master" and "v0.1" have entirely different histories.
14
.github/ISSUE_TEMPLATE.md
vendored
14
.github/ISSUE_TEMPLATE.md
vendored
@ -1,14 +0,0 @@
|
|||||||
#### Summary
|
|
||||||
Bug report in one concise sentence
|
|
||||||
|
|
||||||
#### Steps to reproduce
|
|
||||||
How can we reproduce the issue (what version are you using?)
|
|
||||||
|
|
||||||
#### Expected behavior
|
|
||||||
Describe your issue in detail
|
|
||||||
|
|
||||||
#### Observed behavior (that appears unintentional)
|
|
||||||
What did you see happen? Please include relevant error messages.
|
|
||||||
|
|
||||||
#### Possible fixes
|
|
||||||
If you can, link to the line of code that might be responsible for the problem
|
|
18
.github/PULL_REQUEST_TEMPLATE.md
vendored
18
.github/PULL_REQUEST_TEMPLATE.md
vendored
@ -1,18 +0,0 @@
|
|||||||
<!-- Thank you for contributing a pull request! Here are a few tips to help you:
|
|
||||||
|
|
||||||
1. If applicable, please check if unit tests are added for new features
|
|
||||||
2. Read the contribution guide lines https://github.com/pikami/cosmium/docs/CONTRIBUTING.md
|
|
||||||
|
|
||||||
-->
|
|
||||||
|
|
||||||
#### Summary
|
|
||||||
<!--
|
|
||||||
A description of what this pull request does, as well as QA test steps (if applicable).
|
|
||||||
-->
|
|
||||||
|
|
||||||
#### Ticket Link
|
|
||||||
<!--
|
|
||||||
If applicable, please include a link to the GitHub issue:
|
|
||||||
|
|
||||||
Fixes https://github.com/pikami/cosmium/issues/XXX
|
|
||||||
-->
|
|
31
.github/workflows/compile-shared-libraries.yml
vendored
31
.github/workflows/compile-shared-libraries.yml
vendored
@ -1,31 +0,0 @@
|
|||||||
name: Cross-Compile Shared Libraries
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout code
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
|
|
||||||
- name: Cross-Compile with xgo
|
|
||||||
uses: crazy-max/ghaction-xgo@e22d3c8b089adba750d5a74738b8e95d96f0c991 # v3.1.0
|
|
||||||
with:
|
|
||||||
xgo_version: latest
|
|
||||||
go_version: 1.24.0
|
|
||||||
dest: dist
|
|
||||||
pkg: sharedlibrary
|
|
||||||
prefix: cosmium
|
|
||||||
targets: linux/amd64,linux/arm64,windows/amd64,windows/arm64,darwin/amd64,darwin/arm64
|
|
||||||
v: true
|
|
||||||
buildmode: c-shared
|
|
||||||
buildvcs: true
|
|
||||||
|
|
||||||
- name: Upload artifact
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: shared-libraries
|
|
||||||
path: dist/*
|
|
53
.github/workflows/release.yml
vendored
53
.github/workflows/release.yml
vendored
@ -1,53 +0,0 @@
|
|||||||
name: goreleaser
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
tags:
|
|
||||||
- '*'
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
packages: write
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
goreleaser:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
|
|
||||||
- name: Set up Go
|
|
||||||
uses: actions/setup-go@v5
|
|
||||||
with:
|
|
||||||
go-version: 1.24.0
|
|
||||||
|
|
||||||
- name: Cross-Compile with xgo
|
|
||||||
uses: crazy-max/ghaction-xgo@e22d3c8b089adba750d5a74738b8e95d96f0c991 # v3.1.0
|
|
||||||
with:
|
|
||||||
xgo_version: latest
|
|
||||||
go_version: 1.24.0
|
|
||||||
dest: sharedlibrary_dist
|
|
||||||
pkg: sharedlibrary
|
|
||||||
prefix: cosmium
|
|
||||||
targets: linux/amd64,linux/arm64,windows/amd64,windows/arm64,darwin/amd64,darwin/arm64
|
|
||||||
v: true
|
|
||||||
buildmode: c-shared
|
|
||||||
buildvcs: true
|
|
||||||
|
|
||||||
- name: Docker Login
|
|
||||||
uses: docker/login-action@v3
|
|
||||||
with:
|
|
||||||
registry: ghcr.io
|
|
||||||
username: ${{ github.actor }}
|
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Run GoReleaser
|
|
||||||
uses: goreleaser/goreleaser-action@5742e2a039330cbb23ebf35f046f814d4c6ff811 # v5
|
|
||||||
with:
|
|
||||||
distribution: goreleaser
|
|
||||||
version: ${{ env.GITHUB_REF_NAME }}
|
|
||||||
args: release --clean
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.PUBLISHER_TOKEN }}
|
|
5
.gitignore
vendored
5
.gitignore
vendored
@ -1,7 +1,2 @@
|
|||||||
dist/
|
dist/
|
||||||
sharedlibrary_dist/
|
|
||||||
ignored/
|
ignored/
|
||||||
explorer_www/
|
|
||||||
main
|
|
||||||
save.json
|
|
||||||
.vscode/
|
|
||||||
|
146
.goreleaser.yaml
146
.goreleaser.yaml
@ -1,146 +0,0 @@
|
|||||||
builds:
|
|
||||||
- binary: cosmium
|
|
||||||
main: ./cmd/server
|
|
||||||
goos:
|
|
||||||
- darwin
|
|
||||||
- linux
|
|
||||||
- windows
|
|
||||||
goarch:
|
|
||||||
- amd64
|
|
||||||
- arm64
|
|
||||||
env:
|
|
||||||
- CGO_ENABLED=0
|
|
||||||
|
|
||||||
release:
|
|
||||||
prerelease: auto
|
|
||||||
|
|
||||||
universal_binaries:
|
|
||||||
- replace: true
|
|
||||||
|
|
||||||
brews:
|
|
||||||
- name: cosmium
|
|
||||||
homepage: 'https://github.com/pikami/cosmium'
|
|
||||||
repository:
|
|
||||||
owner: pikami
|
|
||||||
name: homebrew-brew
|
|
||||||
commit_author:
|
|
||||||
name: pikami
|
|
||||||
email: git@pikami.org
|
|
||||||
skip_upload: auto
|
|
||||||
|
|
||||||
archives:
|
|
||||||
- id: bundle
|
|
||||||
format: tar.gz
|
|
||||||
format_overrides:
|
|
||||||
- goos: windows
|
|
||||||
format: zip
|
|
||||||
- id: shared-libraries
|
|
||||||
meta: true
|
|
||||||
format: "tar.gz"
|
|
||||||
wrap_in_directory: true
|
|
||||||
name_template: "{{ .ProjectName }}_{{ .Version }}_shared-libraries"
|
|
||||||
files:
|
|
||||||
- LICENSE
|
|
||||||
- README.md
|
|
||||||
- sharedlibrary_dist/**
|
|
||||||
|
|
||||||
dockers:
|
|
||||||
- id: docker-linux-amd64
|
|
||||||
goos: linux
|
|
||||||
goarch: amd64
|
|
||||||
image_templates:
|
|
||||||
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-amd64"
|
|
||||||
dockerfile: Dockerfile
|
|
||||||
use: buildx
|
|
||||||
build_flag_templates:
|
|
||||||
- "--platform=linux/amd64"
|
|
||||||
- "--pull"
|
|
||||||
- "--label=org.opencontainers.image.title={{.ProjectName}}"
|
|
||||||
- "--label=org.opencontainers.image.description=Lightweight Cosmos DB emulator"
|
|
||||||
- "--label=org.opencontainers.image.url=https://github.com/pikami/cosmium"
|
|
||||||
- "--label=org.opencontainers.image.source=https://github.com/pikami/cosmium"
|
|
||||||
- "--label=org.opencontainers.image.created={{.Date}}"
|
|
||||||
- "--label=org.opencontainers.image.revision={{.FullCommit}}"
|
|
||||||
- "--label=org.opencontainers.image.version={{.Version}}"
|
|
||||||
- id: docker-linux-arm64
|
|
||||||
goos: linux
|
|
||||||
goarch: arm64
|
|
||||||
image_templates:
|
|
||||||
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-arm64"
|
|
||||||
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-arm64v8"
|
|
||||||
dockerfile: Dockerfile
|
|
||||||
use: buildx
|
|
||||||
build_flag_templates:
|
|
||||||
- "--platform=linux/arm64"
|
|
||||||
- "--pull"
|
|
||||||
- "--label=org.opencontainers.image.title={{.ProjectName}}"
|
|
||||||
- "--label=org.opencontainers.image.description=Lightweight Cosmos DB emulator"
|
|
||||||
- "--label=org.opencontainers.image.url=https://github.com/pikami/cosmium"
|
|
||||||
- "--label=org.opencontainers.image.source=https://github.com/pikami/cosmium"
|
|
||||||
- "--label=org.opencontainers.image.created={{.Date}}"
|
|
||||||
- "--label=org.opencontainers.image.revision={{.FullCommit}}"
|
|
||||||
- "--label=org.opencontainers.image.version={{.Version}}"
|
|
||||||
- id: docker-explorer-linux-amd64
|
|
||||||
goos: linux
|
|
||||||
goarch: amd64
|
|
||||||
image_templates:
|
|
||||||
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-explorer-amd64"
|
|
||||||
dockerfile: Explorer.Dockerfile
|
|
||||||
use: buildx
|
|
||||||
build_flag_templates:
|
|
||||||
- "--platform=linux/amd64"
|
|
||||||
- "--pull"
|
|
||||||
- "--label=org.opencontainers.image.title={{.ProjectName}}"
|
|
||||||
- "--label=org.opencontainers.image.description=Lightweight Cosmos DB emulator"
|
|
||||||
- "--label=org.opencontainers.image.url=https://github.com/pikami/cosmium"
|
|
||||||
- "--label=org.opencontainers.image.source=https://github.com/pikami/cosmium"
|
|
||||||
- "--label=org.opencontainers.image.created={{.Date}}"
|
|
||||||
- "--label=org.opencontainers.image.revision={{.FullCommit}}"
|
|
||||||
- "--label=org.opencontainers.image.version={{.Version}}"
|
|
||||||
- id: docker-explorer-linux-arm64
|
|
||||||
goos: linux
|
|
||||||
goarch: arm64
|
|
||||||
image_templates:
|
|
||||||
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-explorer-arm64"
|
|
||||||
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-explorer-arm64v8"
|
|
||||||
dockerfile: Explorer.Dockerfile
|
|
||||||
use: buildx
|
|
||||||
build_flag_templates:
|
|
||||||
- "--platform=linux/arm64"
|
|
||||||
- "--pull"
|
|
||||||
- "--label=org.opencontainers.image.title={{.ProjectName}}"
|
|
||||||
- "--label=org.opencontainers.image.description=Lightweight Cosmos DB emulator"
|
|
||||||
- "--label=org.opencontainers.image.url=https://github.com/pikami/cosmium"
|
|
||||||
- "--label=org.opencontainers.image.source=https://github.com/pikami/cosmium"
|
|
||||||
- "--label=org.opencontainers.image.created={{.Date}}"
|
|
||||||
- "--label=org.opencontainers.image.revision={{.FullCommit}}"
|
|
||||||
- "--label=org.opencontainers.image.version={{.Version}}"
|
|
||||||
|
|
||||||
docker_manifests:
|
|
||||||
- name_template: 'ghcr.io/pikami/{{ .ProjectName }}:latest'
|
|
||||||
skip_push: auto
|
|
||||||
image_templates:
|
|
||||||
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-amd64"
|
|
||||||
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-arm64"
|
|
||||||
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-arm64v8"
|
|
||||||
- name_template: 'ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}'
|
|
||||||
skip_push: auto
|
|
||||||
image_templates:
|
|
||||||
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-amd64"
|
|
||||||
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-arm64"
|
|
||||||
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-arm64v8"
|
|
||||||
- name_template: 'ghcr.io/pikami/{{ .ProjectName }}:explorer'
|
|
||||||
skip_push: auto
|
|
||||||
image_templates:
|
|
||||||
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-explorer-amd64"
|
|
||||||
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-explorer-arm64"
|
|
||||||
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-explorer-arm64v8"
|
|
||||||
- name_template: 'ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-explorer'
|
|
||||||
skip_push: auto
|
|
||||||
image_templates:
|
|
||||||
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-explorer-amd64"
|
|
||||||
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-explorer-arm64"
|
|
||||||
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-explorer-arm64v8"
|
|
||||||
|
|
||||||
checksum:
|
|
||||||
name_template: 'checksums.txt'
|
|
@ -1,6 +0,0 @@
|
|||||||
FROM alpine:latest
|
|
||||||
|
|
||||||
WORKDIR /app
|
|
||||||
COPY cosmium /app/cosmium
|
|
||||||
|
|
||||||
ENTRYPOINT ["/app/cosmium"]
|
|
@ -1,9 +0,0 @@
|
|||||||
FROM ghcr.io/cosmiumdev/cosmos-explorer-base:latest AS explorer-base
|
|
||||||
FROM alpine:latest
|
|
||||||
|
|
||||||
COPY --from=explorer-base /cosmos-explorer /cosmos-explorer
|
|
||||||
|
|
||||||
WORKDIR /app
|
|
||||||
COPY cosmium /app/cosmium
|
|
||||||
|
|
||||||
ENTRYPOINT ["/app/cosmium", "-ExplorerDir", "/cosmos-explorer"]
|
|
51
Makefile
51
Makefile
@ -4,69 +4,28 @@ GOTEST=$(GOCMD) test
|
|||||||
GOCLEAN=$(GOCMD) clean
|
GOCLEAN=$(GOCMD) clean
|
||||||
|
|
||||||
BINARY_NAME=cosmium
|
BINARY_NAME=cosmium
|
||||||
SERVER_LOCATION=./cmd/server
|
|
||||||
|
|
||||||
SHARED_LIB_LOCATION=./sharedlibrary
|
|
||||||
SHARED_LIB_OPT=-buildmode=c-shared
|
|
||||||
XGO_TARGETS=linux/amd64,linux/arm64,windows/amd64,windows/arm64,darwin/amd64,darwin/arm64
|
|
||||||
GOVERSION=1.24.0
|
|
||||||
|
|
||||||
DIST_DIR=dist
|
DIST_DIR=dist
|
||||||
|
|
||||||
SHARED_LIB_TEST_CC=gcc
|
|
||||||
SHARED_LIB_TEST_CFLAGS=-Wall -ldl
|
|
||||||
SHARED_LIB_TEST_TARGET=$(DIST_DIR)/sharedlibrary_test
|
|
||||||
SHARED_LIB_TEST_DIR=./sharedlibrary/tests
|
|
||||||
SHARED_LIB_TEST_SOURCES=$(wildcard $(SHARED_LIB_TEST_DIR)/*.c)
|
|
||||||
|
|
||||||
all: test build-all
|
all: test build-all
|
||||||
|
|
||||||
build-all: build-darwin-arm64 build-darwin-amd64 build-linux-amd64 build-linux-arm64 build-windows-amd64 build-windows-arm64
|
build-all: build-darwin-arm64 build-darwin-amd64 build-linux-amd64 build-windows-amd64
|
||||||
|
|
||||||
build-darwin-arm64:
|
build-darwin-arm64:
|
||||||
@echo "Building macOS ARM binary..."
|
@echo "Building macOS ARM binary..."
|
||||||
@GOOS=darwin GOARCH=arm64 $(GOBUILD) -o $(DIST_DIR)/$(BINARY_NAME)-darwin-arm64 $(SERVER_LOCATION)
|
@GOOS=darwin GOARCH=arm64 $(GOBUILD) -o $(DIST_DIR)/$(BINARY_NAME)-darwin-arm64 .
|
||||||
|
|
||||||
build-darwin-amd64:
|
build-darwin-amd64:
|
||||||
@echo "Building macOS x64 binary..."
|
@echo "Building macOS x64 binary..."
|
||||||
@GOOS=darwin GOARCH=amd64 $(GOBUILD) -o $(DIST_DIR)/$(BINARY_NAME)-darwin-amd64 $(SERVER_LOCATION)
|
@GOOS=darwin GOARCH=amd64 $(GOBUILD) -o $(DIST_DIR)/$(BINARY_NAME)-darwin-amd64 .
|
||||||
|
|
||||||
build-linux-amd64:
|
build-linux-amd64:
|
||||||
@echo "Building Linux x64 binary..."
|
@echo "Building Linux x64 binary..."
|
||||||
@GOOS=linux GOARCH=amd64 $(GOBUILD) -o $(DIST_DIR)/$(BINARY_NAME)-linux-amd64 $(SERVER_LOCATION)
|
@GOOS=linux GOARCH=amd64 $(GOBUILD) -o $(DIST_DIR)/$(BINARY_NAME)-linux-amd64 .
|
||||||
|
|
||||||
build-linux-arm64:
|
|
||||||
@echo "Building Linux ARM binary..."
|
|
||||||
@GOOS=linux GOARCH=arm64 $(GOBUILD) -o $(DIST_DIR)/$(BINARY_NAME)-linux-arm64 $(SERVER_LOCATION)
|
|
||||||
|
|
||||||
build-windows-amd64:
|
build-windows-amd64:
|
||||||
@echo "Building Windows x64 binary..."
|
@echo "Building Windows x64 binary..."
|
||||||
@GOOS=windows GOARCH=amd64 $(GOBUILD) -o $(DIST_DIR)/$(BINARY_NAME)-windows-amd64.exe $(SERVER_LOCATION)
|
@GOOS=windows GOARCH=amd64 $(GOBUILD) -o $(DIST_DIR)/$(BINARY_NAME)-windows-amd64.exe .
|
||||||
|
|
||||||
build-windows-arm64:
|
|
||||||
@echo "Building Windows ARM binary..."
|
|
||||||
@GOOS=windows GOARCH=arm64 $(GOBUILD) -o $(DIST_DIR)/$(BINARY_NAME)-windows-arm64.exe $(SERVER_LOCATION)
|
|
||||||
|
|
||||||
build-sharedlib-linux-amd64:
|
|
||||||
@echo "Building shared library for Linux x64..."
|
|
||||||
@GOOS=linux GOARCH=amd64 $(GOBUILD) $(SHARED_LIB_OPT) -o $(DIST_DIR)/$(BINARY_NAME)-linux-amd64.so $(SHARED_LIB_LOCATION)
|
|
||||||
|
|
||||||
build-sharedlib-darwin-arm64:
|
|
||||||
@echo "Building shared library for macOS ARM..."
|
|
||||||
@GOOS=darwin GOARCH=arm64 $(GOBUILD) $(SHARED_LIB_OPT) -o $(DIST_DIR)/$(BINARY_NAME)-darwin-arm64.so $(SHARED_LIB_LOCATION)
|
|
||||||
|
|
||||||
build-sharedlib-tests: build-sharedlib-linux-amd64
|
|
||||||
@echo "Building shared library tests..."
|
|
||||||
@$(SHARED_LIB_TEST_CC) $(SHARED_LIB_TEST_CFLAGS) -o $(SHARED_LIB_TEST_TARGET) $(SHARED_LIB_TEST_SOURCES)
|
|
||||||
|
|
||||||
run-sharedlib-tests: build-sharedlib-tests
|
|
||||||
@echo "Running shared library tests..."
|
|
||||||
@$(SHARED_LIB_TEST_TARGET) $(DIST_DIR)/$(BINARY_NAME)-linux-amd64.so
|
|
||||||
|
|
||||||
xgo-compile-sharedlib:
|
|
||||||
@echo "Building shared libraries using xgo..."
|
|
||||||
@mkdir -p $(DIST_DIR)
|
|
||||||
@xgo -targets=$(XGO_TARGETS) -go $(GOVERSION) -buildmode=c-shared -dest=$(DIST_DIR) -out=$(BINARY_NAME) -pkg=$(SHARED_LIB_LOCATION) .
|
|
||||||
|
|
||||||
generate-parser-nosql:
|
generate-parser-nosql:
|
||||||
pigeon -o ./parsers/nosql/nosql.go ./parsers/nosql/nosql.peg
|
pigeon -o ./parsers/nosql/nosql.go ./parsers/nosql/nosql.peg
|
||||||
|
95
README.md
95
README.md
@ -1,23 +1,11 @@
|
|||||||
# Cosmium
|
# Cosmium
|
||||||
|
|
||||||
Cosmium is a lightweight Cosmos DB emulator designed to facilitate local development and testing. While it aims to provide developers with a solution for running a local database during development, it's important to note that it's not 100% compatible with Cosmos DB. However, it serves as a convenient tool for E2E or integration tests during the CI/CD pipeline. Read more about compatibility [here](./docs/COMPATIBILITY.md).
|
Cosmium is a lightweight Cosmos DB emulator designed to facilitate local development and testing. While it aims to provide developers with a solution for running a local database during development, it's important to note that it's not 100% compatible with Cosmos DB. However, it serves as a convenient tool for E2E or integration tests during the CI/CD pipeline.
|
||||||
|
|
||||||
One of Cosmium's notable features is its ability to save and load state to a single JSON file. This feature makes it easy to load different test cases or share state with other developers, enhancing collaboration and efficiency in development workflows.
|
One of Cosmium's notable features is its ability to save and load state to a single JSON file. This feature makes it easy to load different test cases or share state with other developers, enhancing collaboration and efficiency in development workflows.
|
||||||
|
|
||||||
# Getting Started
|
# Getting Started
|
||||||
|
### Downloading Cosmium
|
||||||
### Installation via Homebrew
|
|
||||||
|
|
||||||
You can install Cosmium using Homebrew by adding the `pikami/brew` tap and then installing the package.
|
|
||||||
|
|
||||||
```sh
|
|
||||||
brew tap pikami/brew
|
|
||||||
brew install cosmium
|
|
||||||
```
|
|
||||||
|
|
||||||
This will download and install Cosmium on your system, making it easy to manage and update using Homebrew.
|
|
||||||
|
|
||||||
### Downloading Cosmium Binaries
|
|
||||||
|
|
||||||
You can download the latest version of Cosmium from the [GitHub Releases page](https://github.com/pikami/cosmium/releases). Choose the appropriate release for your operating system and architecture.
|
You can download the latest version of Cosmium from the [GitHub Releases page](https://github.com/pikami/cosmium/releases). Choose the appropriate release for your operating system and architecture.
|
||||||
|
|
||||||
@ -25,93 +13,48 @@ You can download the latest version of Cosmium from the [GitHub Releases page](h
|
|||||||
|
|
||||||
Cosmium is available for the following platforms:
|
Cosmium is available for the following platforms:
|
||||||
|
|
||||||
- **Linux**: cosmium-linux-amd64
|
* **Linux**: cosmium-linux-amd64
|
||||||
- **Linux on ARM**: cosmium-linux-arm64
|
* **macOS**: cosmium-darwin-amd64
|
||||||
- **macOS**: cosmium-darwin-amd64
|
* **macOS on Apple Silicon**: cosmium-darwin-arm64
|
||||||
- **macOS on Apple Silicon**: cosmium-darwin-arm64
|
* **Windows**: cosmium-windows-amd64.exe
|
||||||
- **Windows**: cosmium-windows-amd64.exe
|
|
||||||
- **Windows on ARM**: cosmium-windows-arm64.exe
|
|
||||||
|
|
||||||
### Running Cosmium
|
### Running Cosmium
|
||||||
|
|
||||||
Once downloaded, you can launch Cosmium using the following command:
|
Once downloaded, you can launch Cosmium using the following command:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
cosmium -Persist "./save.json"
|
./cosmium-linux-amd64 \
|
||||||
|
-Cert "cert.crt" \
|
||||||
|
-CertKey "cert.key" \
|
||||||
|
-Persist "./save.json" \
|
||||||
|
-InitialData "./save.json"
|
||||||
```
|
```
|
||||||
|
|
||||||
Connection String Example:
|
Connection String Example:
|
||||||
|
|
||||||
```
|
```
|
||||||
AccountEndpoint=https://localhost:8081/;AccountKey=C2y6yDjf5/R+ob0N8A7Cgv30VRDJIWEHLM+4QDU5DE2nQ9nDuVTqobD4b8mGGyPMbIZnqyMsEcaGQy67XIw/Jw==;
|
AccountEndpoint=https://localhost:8081/;AccountKey=C2y6yDjf5/R+ob0N8A7Cgv30VRDJIWEHLM+4QDU5DE2nQ9nDuVTqobD4b8mGGyPMbIZnqyMsEcaGQy67XIw/Jw==;
|
||||||
```
|
```
|
||||||
|
|
||||||
### Running Cosmos DB Explorer
|
### Running Cosmos DB Explorer
|
||||||
|
|
||||||
If you want to run Cosmos DB Explorer alongside Cosmium, you'll need to build it yourself and point the `-ExplorerDir` argument to the dist directory. Please refer to the [Cosmos DB Explorer repository](https://github.com/Azure/cosmos-explorer) for instructions on building the application.
|
If you want to run Cosmos DB Explorer alongside Cosmium, you'll need to build it yourself and point the `-ExplorerDir` argument to the dist directory. Please refer to the [Cosmos DB Explorer repository](https://github.com/Azure/cosmos-explorer) for instructions on building the application.
|
||||||
|
|
||||||
There's also a prebuilt docker image that includes the explorer: `ghcr.io/pikami/cosmium:explorer`
|
|
||||||
|
|
||||||
Once running, the explorer can be reached by navigating following URL: `https://127.0.0.1:8081/_explorer/` (might be different depending on your configuration).
|
Once running, the explorer can be reached by navigating following URL: `https://127.0.0.1:8081/_explorer/` (might be different depending on your configuration).
|
||||||
|
|
||||||
### Running with docker (optional)
|
|
||||||
|
|
||||||
There are two docker tags available:
|
|
||||||
|
|
||||||
- ghcr.io/pikami/cosmium:latest - Cosmium core service
|
|
||||||
- ghcr.io/pikami/cosmium:explorer - Cosmium with database explorer available on `https://127.0.0.1:8081/_explorer/`
|
|
||||||
|
|
||||||
If you wan to run the application using docker, configure it using environment variables see example:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
docker run --rm \
|
|
||||||
-e COSMIUM_PERSIST=/save.json \
|
|
||||||
-v ./save.json:/save.json \
|
|
||||||
-p 8081:8081 \
|
|
||||||
ghcr.io/pikami/cosmium # or `ghcr.io/pikami/cosmium:explorer`
|
|
||||||
```
|
|
||||||
|
|
||||||
### SSL Certificate
|
### SSL Certificate
|
||||||
|
|
||||||
By default, Cosmium uses a pre-generated SSL certificate. You can provide your own certificates by specifying paths to the SSL certificate and key (PEM format) using the `-Cert` and `-CertKey` arguments, respectively.
|
By default, Cosmium runs on HTTP. However, if you provide an SSL certificate, it will use HTTPS. Most applications will require HTTPS, so you can specify paths to the SSL certificate and key (PEM format) using the `-Cert` and `-CertKey` arguments, respectively.
|
||||||
|
|
||||||
To disable SSL and run Cosmium on HTTP instead, you can use the `-DisableTls` flag. However most applications will require HTTPS.
|
|
||||||
|
|
||||||
### Other Available Arguments
|
### Other Available Arguments
|
||||||
|
|
||||||
- **-AccountKey**: Account key for authentication (default "C2y6yDjf5/R+ob0N8A7Cgv30VRDJIWEHLM+4QDU5DE2nQ9nDuVTqobD4b8mGGyPMbIZnqyMsEcaGQy67XIw/Jw==")
|
* **-AccountKey**: Account key for authentication (default "C2y6yDjf5/R+ob0N8A7Cgv30VRDJIWEHLM+4QDU5DE2nQ9nDuVTqobD4b8mGGyPMbIZnqyMsEcaGQy67XIw/Jw==")
|
||||||
- **-DisableAuth**: Disable authentication
|
* **-DisableAuth**: Disable authentication
|
||||||
- **-Host**: Hostname (default "localhost")
|
* **-Host**: Hostname (default "localhost")
|
||||||
- **-InitialData**: Path to JSON containing initial state
|
* **-InitialData**: Path to JSON containing initial state
|
||||||
- **-Persist**: Saves data to the given path on application exit (When `-InitialData` argument is not supplied, it will try to load data from path supplied in `-Persist`)
|
* **-Persist**: Saves data to the given path on application exit
|
||||||
- **-Port**: Listen port (default 8081)
|
* **-Port**: Listen port (default 8081)
|
||||||
- **-LogLevel**: Sets the logging level (one of: debug, info, error, silent) (default info)
|
|
||||||
- **-DataStore**: Allows selecting [storage backend](#data-storage-backends) (default "json")
|
|
||||||
|
|
||||||
These arguments allow you to configure various aspects of Cosmium's behavior according to your requirements.
|
These arguments allow you to configure various aspects of Cosmium's behavior according to your requirements.
|
||||||
|
|
||||||
All mentioned arguments can also be set using environment variables:
|
|
||||||
|
|
||||||
- **COSMIUM_ACCOUNTKEY** for `-AccountKey`
|
|
||||||
- **COSMIUM_DISABLEAUTH** for `-DisableAuth`
|
|
||||||
- **COSMIUM_HOST** for `-Host`
|
|
||||||
- **COSMIUM_INITIALDATA** for `-InitialData`
|
|
||||||
- **COSMIUM_PERSIST** for `-Persist`
|
|
||||||
- **COSMIUM_PORT** for `-Port`
|
|
||||||
- **COSMIUM_LOGLEVEL** for `-LogLevel`
|
|
||||||
|
|
||||||
### Data Storage Backends
|
|
||||||
|
|
||||||
Cosmium supports multiple storage backends for saving, loading, and managing data at runtime.
|
|
||||||
|
|
||||||
| Backend | Storage Location | Write Behavior | Memory Usage | Supports Initial JSON Load |
|
|
||||||
|----------|--------------------------|--------------------------|----------------------|----------------------------|
|
|
||||||
| `json` (default) | JSON file on disk 📄 | On application exit ⏳ | 🛑 More than Badger | ✅ Yes |
|
|
||||||
| `badger` | BadgerDB database on disk ⚡ | Immediately on write 🚀 | ✅ Less than JSON | ❌ No |
|
|
||||||
|
|
||||||
|
|
||||||
The `badger` backend is generally recommended as it uses less memory and writes data to disk immediately. However, if you need to load initial data from a JSON file, use the `json` backend.
|
|
||||||
|
|
||||||
# License
|
# License
|
||||||
|
|
||||||
This project is [MIT licensed](./LICENSE).
|
This project is [MIT licensed](./LICENSE).
|
||||||
|
@ -1,24 +0,0 @@
|
|||||||
package apimodels
|
|
||||||
|
|
||||||
const (
|
|
||||||
BatchOperationTypeCreate = "Create"
|
|
||||||
BatchOperationTypeDelete = "Delete"
|
|
||||||
BatchOperationTypeReplace = "Replace"
|
|
||||||
BatchOperationTypeUpsert = "Upsert"
|
|
||||||
BatchOperationTypeRead = "Read"
|
|
||||||
BatchOperationTypePatch = "Patch"
|
|
||||||
)
|
|
||||||
|
|
||||||
type BatchOperation struct {
|
|
||||||
OperationType string `json:"operationType"`
|
|
||||||
Id string `json:"id"`
|
|
||||||
ResourceBody map[string]interface{} `json:"resourceBody"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type BatchOperationResult struct {
|
|
||||||
StatusCode int `json:"statusCode"`
|
|
||||||
RequestCharge float64 `json:"requestCharge"`
|
|
||||||
ResourceBody map[string]interface{} `json:"resourceBody"`
|
|
||||||
Etag string `json:"etag"`
|
|
||||||
Message string `json:"message"`
|
|
||||||
}
|
|
@ -1,39 +0,0 @@
|
|||||||
package api
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/gin-gonic/gin"
|
|
||||||
"github.com/pikami/cosmium/api/config"
|
|
||||||
"github.com/pikami/cosmium/internal/datastore"
|
|
||||||
)
|
|
||||||
|
|
||||||
type ApiServer struct {
|
|
||||||
stopServer chan interface{}
|
|
||||||
onServerShutdown chan interface{}
|
|
||||||
isActive bool
|
|
||||||
router *gin.Engine
|
|
||||||
config *config.ServerConfig
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewApiServer(dataStore datastore.DataStore, config *config.ServerConfig) *ApiServer {
|
|
||||||
stopChan := make(chan interface{})
|
|
||||||
onServerShutdownChan := make(chan interface{})
|
|
||||||
|
|
||||||
apiServer := &ApiServer{
|
|
||||||
stopServer: stopChan,
|
|
||||||
onServerShutdown: onServerShutdownChan,
|
|
||||||
config: config,
|
|
||||||
}
|
|
||||||
|
|
||||||
apiServer.CreateRouter(dataStore)
|
|
||||||
|
|
||||||
return apiServer
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *ApiServer) GetRouter() *gin.Engine {
|
|
||||||
return s.router
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *ApiServer) Stop() {
|
|
||||||
s.stopServer <- true
|
|
||||||
<-s.onServerShutdown
|
|
||||||
}
|
|
@ -3,24 +3,15 @@ package config
|
|||||||
import (
|
import (
|
||||||
"flag"
|
"flag"
|
||||||
"fmt"
|
"fmt"
|
||||||
"os"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/pikami/cosmium/internal/logger"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
DefaultAccountKey = "C2y6yDjf5/R+ob0N8A7Cgv30VRDJIWEHLM+4QDU5DE2nQ9nDuVTqobD4b8mGGyPMbIZnqyMsEcaGQy67XIw/Jw=="
|
DefaultAccountKey = "C2y6yDjf5/R+ob0N8A7Cgv30VRDJIWEHLM+4QDU5DE2nQ9nDuVTqobD4b8mGGyPMbIZnqyMsEcaGQy67XIw/Jw=="
|
||||||
EnvPrefix = "COSMIUM_"
|
|
||||||
ExplorerBaseUrlLocation = "/_explorer"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
var Config = ServerConfig{}
|
||||||
DataStoreJson = "json"
|
|
||||||
DataStoreBadger = "badger"
|
|
||||||
)
|
|
||||||
|
|
||||||
func ParseFlags() ServerConfig {
|
func ParseFlags() {
|
||||||
host := flag.String("Host", "localhost", "Hostname")
|
host := flag.String("Host", "localhost", "Hostname")
|
||||||
port := flag.Int("Port", 8081, "Listen port")
|
port := flag.Int("Port", 8081, "Listen port")
|
||||||
explorerPath := flag.String("ExplorerDir", "", "Path to cosmos-explorer files")
|
explorerPath := flag.String("ExplorerDir", "", "Path to cosmos-explorer files")
|
||||||
@ -29,100 +20,21 @@ func ParseFlags() ServerConfig {
|
|||||||
initialDataPath := flag.String("InitialData", "", "Path to JSON containing initial state")
|
initialDataPath := flag.String("InitialData", "", "Path to JSON containing initial state")
|
||||||
accountKey := flag.String("AccountKey", DefaultAccountKey, "Account key for authentication")
|
accountKey := flag.String("AccountKey", DefaultAccountKey, "Account key for authentication")
|
||||||
disableAuthentication := flag.Bool("DisableAuth", false, "Disable authentication")
|
disableAuthentication := flag.Bool("DisableAuth", false, "Disable authentication")
|
||||||
disableTls := flag.Bool("DisableTls", false, "Disable TLS, serve over HTTP")
|
|
||||||
persistDataPath := flag.String("Persist", "", "Saves data to given path on application exit")
|
persistDataPath := flag.String("Persist", "", "Saves data to given path on application exit")
|
||||||
logLevel := NewEnumValue("info", []string{"debug", "info", "error", "silent"})
|
|
||||||
flag.Var(logLevel, "LogLevel", fmt.Sprintf("Sets the logging level %s", logLevel.AllowedValuesList()))
|
|
||||||
dataStore := NewEnumValue("json", []string{DataStoreJson, DataStoreBadger})
|
|
||||||
flag.Var(dataStore, "DataStore", fmt.Sprintf("Sets the data store %s", dataStore.AllowedValuesList()))
|
|
||||||
|
|
||||||
flag.Parse()
|
flag.Parse()
|
||||||
setFlagsFromEnvironment()
|
|
||||||
|
|
||||||
config := ServerConfig{}
|
Config.Host = *host
|
||||||
config.Host = *host
|
Config.Port = *port
|
||||||
config.Port = *port
|
Config.ExplorerPath = *explorerPath
|
||||||
config.ExplorerPath = *explorerPath
|
Config.TLS_CertificatePath = *tlsCertificatePath
|
||||||
config.TLS_CertificatePath = *tlsCertificatePath
|
Config.TLS_CertificateKey = *tlsCertificateKey
|
||||||
config.TLS_CertificateKey = *tlsCertificateKey
|
Config.InitialDataFilePath = *initialDataPath
|
||||||
config.InitialDataFilePath = *initialDataPath
|
Config.PersistDataFilePath = *persistDataPath
|
||||||
config.PersistDataFilePath = *persistDataPath
|
Config.DisableAuth = *disableAuthentication
|
||||||
config.DisableAuth = *disableAuthentication
|
|
||||||
config.DisableTls = *disableTls
|
|
||||||
config.AccountKey = *accountKey
|
|
||||||
config.LogLevel = logLevel.value
|
|
||||||
config.DataStore = dataStore.value
|
|
||||||
|
|
||||||
config.PopulateCalculatedFields()
|
Config.DatabaseAccount = Config.Host
|
||||||
|
Config.DatabaseDomain = Config.Host
|
||||||
return config
|
Config.DatabaseEndpoint = fmt.Sprintf("https://%s:%d/", Config.Host, Config.Port)
|
||||||
}
|
Config.AccountKey = *accountKey
|
||||||
|
|
||||||
func (c *ServerConfig) PopulateCalculatedFields() {
|
|
||||||
c.DatabaseAccount = c.Host
|
|
||||||
c.DatabaseDomain = c.Host
|
|
||||||
c.DatabaseEndpoint = fmt.Sprintf("https://%s:%d/", c.Host, c.Port)
|
|
||||||
c.ExplorerBaseUrlLocation = ExplorerBaseUrlLocation
|
|
||||||
|
|
||||||
switch c.LogLevel {
|
|
||||||
case "debug":
|
|
||||||
logger.SetLogLevel(logger.LogLevelDebug)
|
|
||||||
case "info":
|
|
||||||
logger.SetLogLevel(logger.LogLevelInfo)
|
|
||||||
case "error":
|
|
||||||
logger.SetLogLevel(logger.LogLevelError)
|
|
||||||
case "silent":
|
|
||||||
logger.SetLogLevel(logger.LogLevelSilent)
|
|
||||||
default:
|
|
||||||
logger.SetLogLevel(logger.LogLevelInfo)
|
|
||||||
}
|
|
||||||
|
|
||||||
fileInfo, err := os.Stat(c.PersistDataFilePath)
|
|
||||||
if c.PersistDataFilePath != "" && !os.IsNotExist(err) {
|
|
||||||
if err != nil {
|
|
||||||
logger.ErrorLn("Failed to get file info for persist path:", err)
|
|
||||||
os.Exit(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
if c.DataStore == DataStoreJson && fileInfo.IsDir() {
|
|
||||||
logger.ErrorLn("--Persist cannot be a directory when using json data store")
|
|
||||||
os.Exit(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
if c.DataStore == DataStoreBadger && !fileInfo.IsDir() {
|
|
||||||
logger.ErrorLn("--Persist must be a directory when using Badger data store")
|
|
||||||
os.Exit(1)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if c.DataStore == DataStoreBadger && c.InitialDataFilePath != "" {
|
|
||||||
logger.ErrorLn("InitialData option is currently not supported with Badger data store")
|
|
||||||
os.Exit(1)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *ServerConfig) ApplyDefaultsToEmptyFields() {
|
|
||||||
if c.Host == "" {
|
|
||||||
c.Host = "localhost"
|
|
||||||
}
|
|
||||||
if c.Port == 0 {
|
|
||||||
c.Port = 8081
|
|
||||||
}
|
|
||||||
if c.AccountKey == "" {
|
|
||||||
c.AccountKey = DefaultAccountKey
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func setFlagsFromEnvironment() (err error) {
|
|
||||||
flag.VisitAll(func(f *flag.Flag) {
|
|
||||||
name := EnvPrefix + strings.ToUpper(strings.Replace(f.Name, "-", "_", -1))
|
|
||||||
if value, ok := os.LookupEnv(name); ok {
|
|
||||||
err2 := flag.Set(f.Name, value)
|
|
||||||
if err2 != nil {
|
|
||||||
err = fmt.Errorf("failed setting flag from environment: %w", err2)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
return
|
|
||||||
}
|
}
|
||||||
|
@ -1,36 +0,0 @@
|
|||||||
package config
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"strings"
|
|
||||||
)
|
|
||||||
|
|
||||||
type EnumValue struct {
|
|
||||||
allowedValues []string
|
|
||||||
value string
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *EnumValue) String() string {
|
|
||||||
return e.value
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *EnumValue) Set(v string) error {
|
|
||||||
for _, allowed := range e.allowedValues {
|
|
||||||
if v == allowed {
|
|
||||||
e.value = v
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return fmt.Errorf("invalid value %q, must be one of: %s", v, strings.Join(e.allowedValues, ", "))
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewEnumValue(defaultValue string, allowedValues []string) *EnumValue {
|
|
||||||
return &EnumValue{
|
|
||||||
allowedValues: allowedValues,
|
|
||||||
value: defaultValue,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *EnumValue) AllowedValuesList() string {
|
|
||||||
return fmt.Sprintf("(one of: %s)", strings.Join(e.allowedValues, ", "))
|
|
||||||
}
|
|
@ -1,22 +1,17 @@
|
|||||||
package config
|
package config
|
||||||
|
|
||||||
type ServerConfig struct {
|
type ServerConfig struct {
|
||||||
DatabaseAccount string `json:"databaseAccount"`
|
DatabaseAccount string
|
||||||
DatabaseDomain string `json:"databaseDomain"`
|
DatabaseDomain string
|
||||||
DatabaseEndpoint string `json:"databaseEndpoint"`
|
DatabaseEndpoint string
|
||||||
AccountKey string `json:"accountKey"`
|
AccountKey string
|
||||||
|
|
||||||
ExplorerPath string `json:"explorerPath"`
|
ExplorerPath string
|
||||||
Port int `json:"port"`
|
Port int
|
||||||
Host string `json:"host"`
|
Host string
|
||||||
TLS_CertificatePath string `json:"tlsCertificatePath"`
|
TLS_CertificatePath string
|
||||||
TLS_CertificateKey string `json:"tlsCertificateKey"`
|
TLS_CertificateKey string
|
||||||
InitialDataFilePath string `json:"initialDataFilePath"`
|
InitialDataFilePath string
|
||||||
PersistDataFilePath string `json:"persistDataFilePath"`
|
PersistDataFilePath string
|
||||||
DisableAuth bool `json:"disableAuth"`
|
DisableAuth bool
|
||||||
DisableTls bool `json:"disableTls"`
|
|
||||||
LogLevel string `json:"logLevel"`
|
|
||||||
ExplorerBaseUrlLocation string `json:"explorerBaseUrlLocation"`
|
|
||||||
|
|
||||||
DataStore string `json:"dataStore"`
|
|
||||||
}
|
}
|
||||||
|
@ -1,72 +1,64 @@
|
|||||||
package handlers
|
package handlers
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
|
||||||
"net/http"
|
"net/http"
|
||||||
|
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
"github.com/pikami/cosmium/internal/constants"
|
"github.com/pikami/cosmium/internal/repositories"
|
||||||
"github.com/pikami/cosmium/internal/datastore"
|
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
||||||
)
|
)
|
||||||
|
|
||||||
func (h *Handlers) GetAllCollections(c *gin.Context) {
|
func GetAllCollections(c *gin.Context) {
|
||||||
databaseId := c.Param("databaseId")
|
databaseId := c.Param("databaseId")
|
||||||
|
|
||||||
collections, status := h.dataStore.GetAllCollections(databaseId)
|
collections, status := repositories.GetAllCollections(databaseId)
|
||||||
if status == datastore.StatusOk {
|
if status == repositorymodels.StatusOk {
|
||||||
database, _ := h.dataStore.GetDatabase(databaseId)
|
c.IndentedJSON(http.StatusOK, gin.H{"_rid": "", "DocumentCollections": collections, "_count": len(collections)})
|
||||||
|
|
||||||
c.Header("x-ms-item-count", fmt.Sprintf("%d", len(collections)))
|
|
||||||
c.IndentedJSON(http.StatusOK, gin.H{
|
|
||||||
"_rid": database.ResourceID,
|
|
||||||
"DocumentCollections": collections,
|
|
||||||
"_count": len(collections),
|
|
||||||
})
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
||||||
}
|
}
|
||||||
|
|
||||||
func (h *Handlers) GetCollection(c *gin.Context) {
|
func GetCollection(c *gin.Context) {
|
||||||
databaseId := c.Param("databaseId")
|
databaseId := c.Param("databaseId")
|
||||||
id := c.Param("collId")
|
id := c.Param("collId")
|
||||||
|
|
||||||
collection, status := h.dataStore.GetCollection(databaseId, id)
|
collection, status := repositories.GetCollection(databaseId, id)
|
||||||
if status == datastore.StatusOk {
|
if status == repositorymodels.StatusOk {
|
||||||
c.IndentedJSON(http.StatusOK, collection)
|
c.IndentedJSON(http.StatusOK, collection)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if status == datastore.StatusNotFound {
|
if status == repositorymodels.StatusNotFound {
|
||||||
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse)
|
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
||||||
}
|
}
|
||||||
|
|
||||||
func (h *Handlers) DeleteCollection(c *gin.Context) {
|
func DeleteCollection(c *gin.Context) {
|
||||||
databaseId := c.Param("databaseId")
|
databaseId := c.Param("databaseId")
|
||||||
id := c.Param("collId")
|
id := c.Param("collId")
|
||||||
|
|
||||||
status := h.dataStore.DeleteCollection(databaseId, id)
|
status := repositories.DeleteCollection(databaseId, id)
|
||||||
if status == datastore.StatusOk {
|
if status == repositorymodels.StatusOk {
|
||||||
c.Status(http.StatusNoContent)
|
c.Status(http.StatusNoContent)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if status == datastore.StatusNotFound {
|
if status == repositorymodels.StatusNotFound {
|
||||||
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse)
|
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
||||||
}
|
}
|
||||||
|
|
||||||
func (h *Handlers) CreateCollection(c *gin.Context) {
|
func CreateCollection(c *gin.Context) {
|
||||||
databaseId := c.Param("databaseId")
|
databaseId := c.Param("databaseId")
|
||||||
var newCollection datastore.Collection
|
var newCollection repositorymodels.Collection
|
||||||
|
|
||||||
if err := c.BindJSON(&newCollection); err != nil {
|
if err := c.BindJSON(&newCollection); err != nil {
|
||||||
c.JSON(http.StatusBadRequest, gin.H{"message": err.Error()})
|
c.JSON(http.StatusBadRequest, gin.H{"message": err.Error()})
|
||||||
@ -74,20 +66,20 @@ func (h *Handlers) CreateCollection(c *gin.Context) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if newCollection.ID == "" {
|
if newCollection.ID == "" {
|
||||||
c.JSON(http.StatusBadRequest, constants.BadRequestResponse)
|
c.JSON(http.StatusBadRequest, gin.H{"message": "BadRequest"})
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
createdCollection, status := h.dataStore.CreateCollection(databaseId, newCollection)
|
createdCollection, status := repositories.CreateCollection(databaseId, newCollection)
|
||||||
if status == datastore.Conflict {
|
if status == repositorymodels.Conflict {
|
||||||
c.IndentedJSON(http.StatusConflict, constants.ConflictResponse)
|
c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"})
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if status == datastore.StatusOk {
|
if status == repositorymodels.StatusOk {
|
||||||
c.IndentedJSON(http.StatusCreated, createdCollection)
|
c.IndentedJSON(http.StatusCreated, createdCollection)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
||||||
}
|
}
|
||||||
|
@ -4,14 +4,9 @@ import (
|
|||||||
"net/http"
|
"net/http"
|
||||||
|
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
|
"github.com/pikami/cosmium/internal/repositories"
|
||||||
)
|
)
|
||||||
|
|
||||||
func (h *Handlers) CosmiumExport(c *gin.Context) {
|
func CosmiumExport(c *gin.Context) {
|
||||||
dataStoreState, err := h.dataStore.DumpToJson()
|
c.IndentedJSON(http.StatusOK, repositories.GetState())
|
||||||
if err != nil {
|
|
||||||
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
c.Data(http.StatusOK, "application/json", []byte(dataStoreState))
|
|
||||||
}
|
}
|
||||||
|
@ -1,65 +1,59 @@
|
|||||||
package handlers
|
package handlers
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
|
||||||
"net/http"
|
"net/http"
|
||||||
|
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
"github.com/pikami/cosmium/internal/constants"
|
"github.com/pikami/cosmium/internal/repositories"
|
||||||
"github.com/pikami/cosmium/internal/datastore"
|
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
||||||
)
|
)
|
||||||
|
|
||||||
func (h *Handlers) GetAllDatabases(c *gin.Context) {
|
func GetAllDatabases(c *gin.Context) {
|
||||||
databases, status := h.dataStore.GetAllDatabases()
|
databases, status := repositories.GetAllDatabases()
|
||||||
if status == datastore.StatusOk {
|
if status == repositorymodels.StatusOk {
|
||||||
c.Header("x-ms-item-count", fmt.Sprintf("%d", len(databases)))
|
c.IndentedJSON(http.StatusOK, gin.H{"_rid": "", "Databases": databases, "_count": len(databases)})
|
||||||
c.IndentedJSON(http.StatusOK, gin.H{
|
|
||||||
"_rid": "",
|
|
||||||
"Databases": databases,
|
|
||||||
"_count": len(databases),
|
|
||||||
})
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
||||||
}
|
}
|
||||||
|
|
||||||
func (h *Handlers) GetDatabase(c *gin.Context) {
|
func GetDatabase(c *gin.Context) {
|
||||||
id := c.Param("databaseId")
|
id := c.Param("databaseId")
|
||||||
|
|
||||||
database, status := h.dataStore.GetDatabase(id)
|
database, status := repositories.GetDatabase(id)
|
||||||
if status == datastore.StatusOk {
|
if status == repositorymodels.StatusOk {
|
||||||
c.IndentedJSON(http.StatusOK, database)
|
c.IndentedJSON(http.StatusOK, database)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if status == datastore.StatusNotFound {
|
if status == repositorymodels.StatusNotFound {
|
||||||
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse)
|
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
||||||
}
|
}
|
||||||
|
|
||||||
func (h *Handlers) DeleteDatabase(c *gin.Context) {
|
func DeleteDatabase(c *gin.Context) {
|
||||||
id := c.Param("databaseId")
|
id := c.Param("databaseId")
|
||||||
|
|
||||||
status := h.dataStore.DeleteDatabase(id)
|
status := repositories.DeleteDatabase(id)
|
||||||
if status == datastore.StatusOk {
|
if status == repositorymodels.StatusOk {
|
||||||
c.Status(http.StatusNoContent)
|
c.Status(http.StatusNoContent)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if status == datastore.StatusNotFound {
|
if status == repositorymodels.StatusNotFound {
|
||||||
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse)
|
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
||||||
}
|
}
|
||||||
|
|
||||||
func (h *Handlers) CreateDatabase(c *gin.Context) {
|
func CreateDatabase(c *gin.Context) {
|
||||||
var newDatabase datastore.Database
|
var newDatabase repositorymodels.Database
|
||||||
|
|
||||||
if err := c.BindJSON(&newDatabase); err != nil {
|
if err := c.BindJSON(&newDatabase); err != nil {
|
||||||
c.JSON(http.StatusBadRequest, gin.H{"message": err.Error()})
|
c.JSON(http.StatusBadRequest, gin.H{"message": err.Error()})
|
||||||
@ -67,20 +61,20 @@ func (h *Handlers) CreateDatabase(c *gin.Context) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if newDatabase.ID == "" {
|
if newDatabase.ID == "" {
|
||||||
c.JSON(http.StatusBadRequest, constants.BadRequestResponse)
|
c.JSON(http.StatusBadRequest, gin.H{"message": "BadRequest"})
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
createdDatabase, status := h.dataStore.CreateDatabase(newDatabase)
|
createdDatabase, status := repositories.CreateDatabase(newDatabase)
|
||||||
if status == datastore.Conflict {
|
if status == repositorymodels.Conflict {
|
||||||
c.IndentedJSON(http.StatusConflict, constants.ConflictResponse)
|
c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"})
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if status == datastore.StatusOk {
|
if status == repositorymodels.StatusOk {
|
||||||
c.IndentedJSON(http.StatusCreated, createdDatabase)
|
c.IndentedJSON(http.StatusCreated, createdDatabase)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
||||||
}
|
}
|
||||||
|
@ -1,83 +1,67 @@
|
|||||||
package handlers
|
package handlers
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
|
||||||
"net/http"
|
"net/http"
|
||||||
"strconv"
|
|
||||||
|
|
||||||
jsonpatch "github.com/cosmiumdev/json-patch/v5"
|
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
apimodels "github.com/pikami/cosmium/api/api_models"
|
|
||||||
"github.com/pikami/cosmium/internal/constants"
|
"github.com/pikami/cosmium/internal/constants"
|
||||||
"github.com/pikami/cosmium/internal/converters"
|
"github.com/pikami/cosmium/internal/repositories"
|
||||||
"github.com/pikami/cosmium/internal/datastore"
|
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
||||||
"github.com/pikami/cosmium/internal/logger"
|
|
||||||
"github.com/pikami/cosmium/parsers"
|
|
||||||
"github.com/pikami/cosmium/parsers/nosql"
|
|
||||||
memoryexecutor "github.com/pikami/cosmium/query_executors/memory_executor"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func (h *Handlers) GetAllDocuments(c *gin.Context) {
|
func GetAllDocuments(c *gin.Context) {
|
||||||
databaseId := c.Param("databaseId")
|
databaseId := c.Param("databaseId")
|
||||||
collectionId := c.Param("collId")
|
collectionId := c.Param("collId")
|
||||||
|
|
||||||
documents, status := h.dataStore.GetAllDocuments(databaseId, collectionId)
|
documents, status := repositories.GetAllDocuments(databaseId, collectionId)
|
||||||
if status == datastore.StatusOk {
|
if status == repositorymodels.StatusOk {
|
||||||
collection, _ := h.dataStore.GetCollection(databaseId, collectionId)
|
c.IndentedJSON(http.StatusOK, gin.H{"_rid": "", "Documents": documents, "_count": len(documents)})
|
||||||
|
|
||||||
c.Header("x-ms-item-count", fmt.Sprintf("%d", len(documents)))
|
|
||||||
c.IndentedJSON(http.StatusOK, gin.H{
|
|
||||||
"_rid": collection.ID,
|
|
||||||
"Documents": documents,
|
|
||||||
"_count": len(documents),
|
|
||||||
})
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
||||||
}
|
}
|
||||||
|
|
||||||
func (h *Handlers) GetDocument(c *gin.Context) {
|
func GetDocument(c *gin.Context) {
|
||||||
databaseId := c.Param("databaseId")
|
databaseId := c.Param("databaseId")
|
||||||
collectionId := c.Param("collId")
|
collectionId := c.Param("collId")
|
||||||
documentId := c.Param("docId")
|
documentId := c.Param("docId")
|
||||||
|
|
||||||
document, status := h.dataStore.GetDocument(databaseId, collectionId, documentId)
|
document, status := repositories.GetDocument(databaseId, collectionId, documentId)
|
||||||
if status == datastore.StatusOk {
|
if status == repositorymodels.StatusOk {
|
||||||
c.IndentedJSON(http.StatusOK, document)
|
c.IndentedJSON(http.StatusOK, document)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if status == datastore.StatusNotFound {
|
if status == repositorymodels.StatusNotFound {
|
||||||
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse)
|
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
||||||
}
|
}
|
||||||
|
|
||||||
func (h *Handlers) DeleteDocument(c *gin.Context) {
|
func DeleteDocument(c *gin.Context) {
|
||||||
databaseId := c.Param("databaseId")
|
databaseId := c.Param("databaseId")
|
||||||
collectionId := c.Param("collId")
|
collectionId := c.Param("collId")
|
||||||
documentId := c.Param("docId")
|
documentId := c.Param("docId")
|
||||||
|
|
||||||
status := h.dataStore.DeleteDocument(databaseId, collectionId, documentId)
|
status := repositories.DeleteDocument(databaseId, collectionId, documentId)
|
||||||
if status == datastore.StatusOk {
|
if status == repositorymodels.StatusOk {
|
||||||
c.Status(http.StatusNoContent)
|
c.Status(http.StatusNoContent)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if status == datastore.StatusNotFound {
|
if status == repositorymodels.StatusNotFound {
|
||||||
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse)
|
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: Maybe move "replace" logic to data store
|
// TODO: Maybe move "replace" logic to repository
|
||||||
func (h *Handlers) ReplaceDocument(c *gin.Context) {
|
func ReplaceDocument(c *gin.Context) {
|
||||||
databaseId := c.Param("databaseId")
|
databaseId := c.Param("databaseId")
|
||||||
collectionId := c.Param("collId")
|
collectionId := c.Param("collId")
|
||||||
documentId := c.Param("docId")
|
documentId := c.Param("docId")
|
||||||
@ -88,112 +72,29 @@ func (h *Handlers) ReplaceDocument(c *gin.Context) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
status := h.dataStore.DeleteDocument(databaseId, collectionId, documentId)
|
status := repositories.DeleteDocument(databaseId, collectionId, documentId)
|
||||||
if status == datastore.StatusNotFound {
|
if status == repositorymodels.StatusNotFound {
|
||||||
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse)
|
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
createdDocument, status := h.dataStore.CreateDocument(databaseId, collectionId, requestBody)
|
createdDocument, status := repositories.CreateDocument(databaseId, collectionId, requestBody)
|
||||||
if status == datastore.Conflict {
|
if status == repositorymodels.Conflict {
|
||||||
c.IndentedJSON(http.StatusConflict, constants.ConflictResponse)
|
c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"})
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if status == datastore.StatusOk {
|
if status == repositorymodels.StatusOk {
|
||||||
c.IndentedJSON(http.StatusCreated, createdDocument)
|
c.IndentedJSON(http.StatusCreated, createdDocument)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
||||||
}
|
}
|
||||||
|
|
||||||
func (h *Handlers) PatchDocument(c *gin.Context) {
|
func DocumentsPost(c *gin.Context) {
|
||||||
databaseId := c.Param("databaseId")
|
databaseId := c.Param("databaseId")
|
||||||
collectionId := c.Param("collId")
|
collectionId := c.Param("collId")
|
||||||
documentId := c.Param("docId")
|
|
||||||
|
|
||||||
document, status := h.dataStore.GetDocument(databaseId, collectionId, documentId)
|
|
||||||
if status == datastore.StatusNotFound {
|
|
||||||
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
var requestBody map[string]interface{}
|
|
||||||
if err := c.BindJSON(&requestBody); err != nil {
|
|
||||||
c.JSON(http.StatusBadRequest, gin.H{"message": err.Error()})
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
operations := requestBody["operations"]
|
|
||||||
operationsBytes, err := json.Marshal(operations)
|
|
||||||
if err != nil {
|
|
||||||
c.JSON(http.StatusBadRequest, gin.H{"message": "Could not decode operations"})
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
patch, err := jsonpatch.DecodePatch(operationsBytes)
|
|
||||||
if err != nil {
|
|
||||||
c.JSON(http.StatusBadRequest, gin.H{"message": err.Error()})
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
currentDocumentBytes, err := json.Marshal(document)
|
|
||||||
if err != nil {
|
|
||||||
logger.ErrorLn("Failed to marshal existing document:", err)
|
|
||||||
c.JSON(http.StatusInternalServerError, gin.H{"message": "Failed to marshal existing document"})
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
modifiedDocumentBytes, err := patch.Apply(currentDocumentBytes)
|
|
||||||
if err != nil {
|
|
||||||
c.JSON(http.StatusBadRequest, gin.H{"message": err.Error()})
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
var modifiedDocument map[string]interface{}
|
|
||||||
err = json.Unmarshal(modifiedDocumentBytes, &modifiedDocument)
|
|
||||||
if err != nil {
|
|
||||||
logger.ErrorLn("Failed to unmarshal modified document:", err)
|
|
||||||
c.JSON(http.StatusInternalServerError, gin.H{"message": "Failed to unmarshal modified document"})
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if modifiedDocument["id"] != document["id"] {
|
|
||||||
c.JSON(http.StatusUnprocessableEntity, gin.H{"message": "The ID field cannot be modified"})
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
status = h.dataStore.DeleteDocument(databaseId, collectionId, documentId)
|
|
||||||
if status == datastore.StatusNotFound {
|
|
||||||
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
createdDocument, status := h.dataStore.CreateDocument(databaseId, collectionId, modifiedDocument)
|
|
||||||
if status == datastore.Conflict {
|
|
||||||
c.IndentedJSON(http.StatusConflict, constants.ConflictResponse)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if status == datastore.StatusOk {
|
|
||||||
c.IndentedJSON(http.StatusCreated, createdDocument)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (h *Handlers) DocumentsPost(c *gin.Context) {
|
|
||||||
databaseId := c.Param("databaseId")
|
|
||||||
collectionId := c.Param("collId")
|
|
||||||
|
|
||||||
// Handle batch requests
|
|
||||||
isBatchRequest, _ := strconv.ParseBool(c.GetHeader("x-ms-cosmos-is-batch-request"))
|
|
||||||
if isBatchRequest {
|
|
||||||
h.handleBatchRequest(c)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
var requestBody map[string]interface{}
|
var requestBody map[string]interface{}
|
||||||
if err := c.BindJSON(&requestBody); err != nil {
|
if err := c.BindJSON(&requestBody); err != nil {
|
||||||
@ -203,32 +104,44 @@ func (h *Handlers) DocumentsPost(c *gin.Context) {
|
|||||||
|
|
||||||
query := requestBody["query"]
|
query := requestBody["query"]
|
||||||
if query != nil {
|
if query != nil {
|
||||||
h.handleDocumentQuery(c, requestBody)
|
if c.GetHeader("x-ms-cosmos-is-query-plan-request") != "" {
|
||||||
|
c.IndentedJSON(http.StatusOK, constants.QueryPlanResponse)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var queryParameters map[string]interface{}
|
||||||
|
if paramsArray, ok := requestBody["parameters"].([]interface{}); ok {
|
||||||
|
queryParameters = parametersToMap(paramsArray)
|
||||||
|
}
|
||||||
|
|
||||||
|
docs, status := repositories.ExecuteQueryDocuments(databaseId, collectionId, query.(string), queryParameters)
|
||||||
|
if status != repositorymodels.StatusOk {
|
||||||
|
// TODO: Currently we return everything if the query fails
|
||||||
|
GetAllDocuments(c)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.IndentedJSON(http.StatusOK, gin.H{"_rid": "", "Documents": docs, "_count": len(docs)})
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if requestBody["id"] == "" {
|
if requestBody["id"] == "" {
|
||||||
c.JSON(http.StatusBadRequest, constants.BadRequestResponse)
|
c.JSON(http.StatusBadRequest, gin.H{"message": "BadRequest"})
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
isUpsert, _ := strconv.ParseBool(c.GetHeader("x-ms-documentdb-is-upsert"))
|
createdDocument, status := repositories.CreateDocument(databaseId, collectionId, requestBody)
|
||||||
if isUpsert {
|
if status == repositorymodels.Conflict {
|
||||||
h.dataStore.DeleteDocument(databaseId, collectionId, requestBody["id"].(string))
|
c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"})
|
||||||
}
|
|
||||||
|
|
||||||
createdDocument, status := h.dataStore.CreateDocument(databaseId, collectionId, requestBody)
|
|
||||||
if status == datastore.Conflict {
|
|
||||||
c.IndentedJSON(http.StatusConflict, constants.ConflictResponse)
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if status == datastore.StatusOk {
|
if status == repositorymodels.StatusOk {
|
||||||
c.IndentedJSON(http.StatusCreated, createdDocument)
|
c.IndentedJSON(http.StatusCreated, createdDocument)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
||||||
}
|
}
|
||||||
|
|
||||||
func parametersToMap(pairs []interface{}) map[string]interface{} {
|
func parametersToMap(pairs []interface{}) map[string]interface{} {
|
||||||
@ -242,155 +155,3 @@ func parametersToMap(pairs []interface{}) map[string]interface{} {
|
|||||||
|
|
||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
|
|
||||||
func (h *Handlers) handleDocumentQuery(c *gin.Context, requestBody map[string]interface{}) {
|
|
||||||
databaseId := c.Param("databaseId")
|
|
||||||
collectionId := c.Param("collId")
|
|
||||||
|
|
||||||
if c.GetHeader("x-ms-cosmos-is-query-plan-request") != "" {
|
|
||||||
c.IndentedJSON(http.StatusOK, constants.QueryPlanResponse)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
var queryParameters map[string]interface{}
|
|
||||||
if paramsArray, ok := requestBody["parameters"].([]interface{}); ok {
|
|
||||||
queryParameters = parametersToMap(paramsArray)
|
|
||||||
}
|
|
||||||
|
|
||||||
queryText := requestBody["query"].(string)
|
|
||||||
docs, status := h.executeQueryDocuments(databaseId, collectionId, queryText, queryParameters)
|
|
||||||
if status != datastore.StatusOk {
|
|
||||||
// TODO: Currently we return everything if the query fails
|
|
||||||
h.GetAllDocuments(c)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
collection, _ := h.dataStore.GetCollection(databaseId, collectionId)
|
|
||||||
c.Header("x-ms-item-count", fmt.Sprintf("%d", len(docs)))
|
|
||||||
c.IndentedJSON(http.StatusOK, gin.H{
|
|
||||||
"_rid": collection.ResourceID,
|
|
||||||
"Documents": docs,
|
|
||||||
"_count": len(docs),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
func (h *Handlers) handleBatchRequest(c *gin.Context) {
|
|
||||||
databaseId := c.Param("databaseId")
|
|
||||||
collectionId := c.Param("collId")
|
|
||||||
|
|
||||||
batchOperations := make([]apimodels.BatchOperation, 0)
|
|
||||||
if err := c.BindJSON(&batchOperations); err != nil {
|
|
||||||
c.JSON(http.StatusBadRequest, gin.H{"message": err.Error()})
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
batchOperationResults := make([]apimodels.BatchOperationResult, len(batchOperations))
|
|
||||||
for idx, operation := range batchOperations {
|
|
||||||
switch operation.OperationType {
|
|
||||||
case apimodels.BatchOperationTypeCreate:
|
|
||||||
createdDocument, status := h.dataStore.CreateDocument(databaseId, collectionId, operation.ResourceBody)
|
|
||||||
responseCode := dataStoreStatusToResponseCode(status)
|
|
||||||
if status == datastore.StatusOk {
|
|
||||||
responseCode = http.StatusCreated
|
|
||||||
}
|
|
||||||
batchOperationResults[idx] = apimodels.BatchOperationResult{
|
|
||||||
StatusCode: responseCode,
|
|
||||||
ResourceBody: createdDocument,
|
|
||||||
}
|
|
||||||
case apimodels.BatchOperationTypeDelete:
|
|
||||||
status := h.dataStore.DeleteDocument(databaseId, collectionId, operation.Id)
|
|
||||||
responseCode := dataStoreStatusToResponseCode(status)
|
|
||||||
if status == datastore.StatusOk {
|
|
||||||
responseCode = http.StatusNoContent
|
|
||||||
}
|
|
||||||
batchOperationResults[idx] = apimodels.BatchOperationResult{
|
|
||||||
StatusCode: responseCode,
|
|
||||||
}
|
|
||||||
case apimodels.BatchOperationTypeReplace:
|
|
||||||
deleteStatus := h.dataStore.DeleteDocument(databaseId, collectionId, operation.Id)
|
|
||||||
if deleteStatus == datastore.StatusNotFound {
|
|
||||||
batchOperationResults[idx] = apimodels.BatchOperationResult{
|
|
||||||
StatusCode: http.StatusNotFound,
|
|
||||||
}
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
createdDocument, createStatus := h.dataStore.CreateDocument(databaseId, collectionId, operation.ResourceBody)
|
|
||||||
responseCode := dataStoreStatusToResponseCode(createStatus)
|
|
||||||
if createStatus == datastore.StatusOk {
|
|
||||||
responseCode = http.StatusCreated
|
|
||||||
}
|
|
||||||
batchOperationResults[idx] = apimodels.BatchOperationResult{
|
|
||||||
StatusCode: responseCode,
|
|
||||||
ResourceBody: createdDocument,
|
|
||||||
}
|
|
||||||
case apimodels.BatchOperationTypeUpsert:
|
|
||||||
documentId := operation.ResourceBody["id"].(string)
|
|
||||||
h.dataStore.DeleteDocument(databaseId, collectionId, documentId)
|
|
||||||
createdDocument, createStatus := h.dataStore.CreateDocument(databaseId, collectionId, operation.ResourceBody)
|
|
||||||
responseCode := dataStoreStatusToResponseCode(createStatus)
|
|
||||||
if createStatus == datastore.StatusOk {
|
|
||||||
responseCode = http.StatusCreated
|
|
||||||
}
|
|
||||||
batchOperationResults[idx] = apimodels.BatchOperationResult{
|
|
||||||
StatusCode: responseCode,
|
|
||||||
ResourceBody: createdDocument,
|
|
||||||
}
|
|
||||||
case apimodels.BatchOperationTypeRead:
|
|
||||||
document, status := h.dataStore.GetDocument(databaseId, collectionId, operation.Id)
|
|
||||||
batchOperationResults[idx] = apimodels.BatchOperationResult{
|
|
||||||
StatusCode: dataStoreStatusToResponseCode(status),
|
|
||||||
ResourceBody: document,
|
|
||||||
}
|
|
||||||
case apimodels.BatchOperationTypePatch:
|
|
||||||
batchOperationResults[idx] = apimodels.BatchOperationResult{
|
|
||||||
StatusCode: http.StatusNotImplemented,
|
|
||||||
Message: "Patch operation is not implemented",
|
|
||||||
}
|
|
||||||
default:
|
|
||||||
batchOperationResults[idx] = apimodels.BatchOperationResult{
|
|
||||||
StatusCode: http.StatusBadRequest,
|
|
||||||
Message: "Unknown operation type",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
c.JSON(http.StatusOK, batchOperationResults)
|
|
||||||
}
|
|
||||||
|
|
||||||
func dataStoreStatusToResponseCode(status datastore.DataStoreStatus) int {
|
|
||||||
switch status {
|
|
||||||
case datastore.StatusOk:
|
|
||||||
return http.StatusOK
|
|
||||||
case datastore.StatusNotFound:
|
|
||||||
return http.StatusNotFound
|
|
||||||
case datastore.Conflict:
|
|
||||||
return http.StatusConflict
|
|
||||||
case datastore.BadRequest:
|
|
||||||
return http.StatusBadRequest
|
|
||||||
default:
|
|
||||||
return http.StatusInternalServerError
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (h *Handlers) executeQueryDocuments(databaseId string, collectionId string, query string, queryParameters map[string]interface{}) ([]memoryexecutor.RowType, datastore.DataStoreStatus) {
|
|
||||||
parsedQuery, err := nosql.Parse("", []byte(query))
|
|
||||||
if err != nil {
|
|
||||||
logger.Errorf("Failed to parse query: %s\nerr: %v", query, err)
|
|
||||||
return nil, datastore.BadRequest
|
|
||||||
}
|
|
||||||
|
|
||||||
allDocumentsIterator, status := h.dataStore.GetDocumentIterator(databaseId, collectionId)
|
|
||||||
if status != datastore.StatusOk {
|
|
||||||
return nil, status
|
|
||||||
}
|
|
||||||
defer allDocumentsIterator.Close()
|
|
||||||
|
|
||||||
rowsIterator := converters.NewDocumentToRowTypeIterator(allDocumentsIterator)
|
|
||||||
|
|
||||||
if typedQuery, ok := parsedQuery.(parsers.SelectStmt); ok {
|
|
||||||
typedQuery.Parameters = queryParameters
|
|
||||||
return memoryexecutor.ExecuteQuery(typedQuery, rowsIterator), datastore.StatusOk
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil, datastore.BadRequest
|
|
||||||
}
|
|
||||||
|
@ -4,14 +4,15 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
|
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
|
"github.com/pikami/cosmium/api/config"
|
||||||
)
|
)
|
||||||
|
|
||||||
func (h *Handlers) RegisterExplorerHandlers(router *gin.Engine) {
|
func RegisterExplorerHandlers(router *gin.Engine) {
|
||||||
explorer := router.Group(h.config.ExplorerBaseUrlLocation)
|
explorer := router.Group("/_explorer")
|
||||||
{
|
{
|
||||||
explorer.Use(func(ctx *gin.Context) {
|
explorer.Use(func(ctx *gin.Context) {
|
||||||
if ctx.Param("filepath") == "/config.json" {
|
if ctx.Param("filepath") == "/config.json" {
|
||||||
endpoint := fmt.Sprintf("https://%s:%d", h.config.Host, h.config.Port)
|
endpoint := fmt.Sprintf("https://%s:%d", config.Config.Host, config.Config.Port)
|
||||||
ctx.JSON(200, gin.H{
|
ctx.JSON(200, gin.H{
|
||||||
"BACKEND_ENDPOINT": endpoint,
|
"BACKEND_ENDPOINT": endpoint,
|
||||||
"MONGO_BACKEND_ENDPOINT": endpoint,
|
"MONGO_BACKEND_ENDPOINT": endpoint,
|
||||||
@ -24,8 +25,8 @@ func (h *Handlers) RegisterExplorerHandlers(router *gin.Engine) {
|
|||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
if h.config.ExplorerPath != "" {
|
if config.Config.ExplorerPath != "" {
|
||||||
explorer.Static("/", h.config.ExplorerPath)
|
explorer.Static("/", config.Config.ExplorerPath)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,18 +0,0 @@
|
|||||||
package handlers
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/pikami/cosmium/api/config"
|
|
||||||
"github.com/pikami/cosmium/internal/datastore"
|
|
||||||
)
|
|
||||||
|
|
||||||
type Handlers struct {
|
|
||||||
dataStore datastore.DataStore
|
|
||||||
config *config.ServerConfig
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewHandlers(dataStore datastore.DataStore, config *config.ServerConfig) *Handlers {
|
|
||||||
return &Handlers{
|
|
||||||
dataStore: dataStore,
|
|
||||||
config: config,
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,37 +1,59 @@
|
|||||||
package middleware
|
package middleware
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"fmt"
|
||||||
"net/url"
|
"net/url"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
"github.com/pikami/cosmium/api/config"
|
"github.com/pikami/cosmium/api/config"
|
||||||
"github.com/pikami/cosmium/internal/authentication"
|
"github.com/pikami/cosmium/internal/authentication"
|
||||||
"github.com/pikami/cosmium/internal/logger"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func Authentication(config *config.ServerConfig) gin.HandlerFunc {
|
func Authentication() gin.HandlerFunc {
|
||||||
return func(c *gin.Context) {
|
return func(c *gin.Context) {
|
||||||
requestUrl := c.Request.URL.String()
|
requestUrl := c.Request.URL.String()
|
||||||
if config.DisableAuth ||
|
if config.Config.DisableAuth ||
|
||||||
strings.HasPrefix(requestUrl, config.ExplorerBaseUrlLocation) ||
|
strings.HasPrefix(requestUrl, "/_explorer") ||
|
||||||
strings.HasPrefix(requestUrl, "/cosmium") {
|
strings.HasPrefix(requestUrl, "/cosmium") {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
resourceType := urlToResourceType(requestUrl)
|
var resourceType string
|
||||||
resourceId := requestToResourceId(c)
|
parts := strings.Split(requestUrl, "/")
|
||||||
|
switch len(parts) {
|
||||||
|
case 2, 3:
|
||||||
|
resourceType = parts[1]
|
||||||
|
case 4, 5:
|
||||||
|
resourceType = parts[3]
|
||||||
|
case 6, 7:
|
||||||
|
resourceType = parts[5]
|
||||||
|
}
|
||||||
|
|
||||||
|
databaseId, _ := c.Params.Get("databaseId")
|
||||||
|
collId, _ := c.Params.Get("collId")
|
||||||
|
docId, _ := c.Params.Get("docId")
|
||||||
|
var resourceId string
|
||||||
|
if databaseId != "" {
|
||||||
|
resourceId += "dbs/" + databaseId
|
||||||
|
}
|
||||||
|
if collId != "" {
|
||||||
|
resourceId += "/colls/" + collId
|
||||||
|
}
|
||||||
|
if docId != "" {
|
||||||
|
resourceId += "/docs/" + docId
|
||||||
|
}
|
||||||
|
|
||||||
authHeader := c.Request.Header.Get("authorization")
|
authHeader := c.Request.Header.Get("authorization")
|
||||||
date := c.Request.Header.Get("x-ms-date")
|
date := c.Request.Header.Get("x-ms-date")
|
||||||
expectedSignature := authentication.GenerateSignature(
|
expectedSignature := authentication.GenerateSignature(
|
||||||
c.Request.Method, resourceType, resourceId, date, config.AccountKey)
|
c.Request.Method, resourceType, resourceId, date, config.Config.AccountKey)
|
||||||
|
|
||||||
decoded, _ := url.QueryUnescape(authHeader)
|
decoded, _ := url.QueryUnescape(authHeader)
|
||||||
params, _ := url.ParseQuery(decoded)
|
params, _ := url.ParseQuery(decoded)
|
||||||
clientSignature := strings.Replace(params.Get("sig"), " ", "+", -1)
|
clientSignature := strings.Replace(params.Get("sig"), " ", "+", -1)
|
||||||
if clientSignature != expectedSignature {
|
if clientSignature != expectedSignature {
|
||||||
logger.Errorf("Got wrong signature from client.\n- Expected: %s\n- Got: %s\n", expectedSignature, clientSignature)
|
fmt.Printf("Got wrong signature from client.\n- Expected: %s\n- Got: %s\n", expectedSignature, clientSignature)
|
||||||
c.IndentedJSON(401, gin.H{
|
c.IndentedJSON(401, gin.H{
|
||||||
"code": "Unauthorized",
|
"code": "Unauthorized",
|
||||||
"message": "Wrong signature.",
|
"message": "Wrong signature.",
|
||||||
@ -40,43 +62,3 @@ func Authentication(config *config.ServerConfig) gin.HandlerFunc {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func urlToResourceType(requestUrl string) string {
|
|
||||||
var resourceType string
|
|
||||||
parts := strings.Split(requestUrl, "/")
|
|
||||||
switch len(parts) {
|
|
||||||
case 2, 3:
|
|
||||||
resourceType = parts[1]
|
|
||||||
case 4, 5:
|
|
||||||
resourceType = parts[3]
|
|
||||||
case 6, 7:
|
|
||||||
resourceType = parts[5]
|
|
||||||
}
|
|
||||||
|
|
||||||
return resourceType
|
|
||||||
}
|
|
||||||
|
|
||||||
func requestToResourceId(c *gin.Context) string {
|
|
||||||
databaseId, _ := c.Params.Get("databaseId")
|
|
||||||
collId, _ := c.Params.Get("collId")
|
|
||||||
docId, _ := c.Params.Get("docId")
|
|
||||||
resourceType := urlToResourceType(c.Request.URL.String())
|
|
||||||
|
|
||||||
var resourceId string
|
|
||||||
if databaseId != "" {
|
|
||||||
resourceId += "dbs/" + databaseId
|
|
||||||
}
|
|
||||||
if collId != "" {
|
|
||||||
resourceId += "/colls/" + collId
|
|
||||||
}
|
|
||||||
if docId != "" {
|
|
||||||
resourceId += "/docs/" + docId
|
|
||||||
}
|
|
||||||
|
|
||||||
isFeed := c.Request.Header.Get("A-Im") == "Incremental Feed"
|
|
||||||
if resourceType == "pkranges" && isFeed {
|
|
||||||
resourceId = collId
|
|
||||||
}
|
|
||||||
|
|
||||||
return resourceId
|
|
||||||
}
|
|
||||||
|
@ -2,10 +2,10 @@ package middleware
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
|
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
"github.com/pikami/cosmium/internal/logger"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func RequestLogger() gin.HandlerFunc {
|
func RequestLogger() gin.HandlerFunc {
|
||||||
@ -16,7 +16,7 @@ func RequestLogger() gin.HandlerFunc {
|
|||||||
|
|
||||||
bodyStr := readBody(rdr1)
|
bodyStr := readBody(rdr1)
|
||||||
if bodyStr != "" {
|
if bodyStr != "" {
|
||||||
logger.DebugLn(bodyStr)
|
fmt.Println(bodyStr)
|
||||||
}
|
}
|
||||||
|
|
||||||
c.Request.Body = rdr2
|
c.Request.Body = rdr2
|
||||||
|
@ -1,21 +0,0 @@
|
|||||||
package middleware
|
|
||||||
|
|
||||||
import (
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/gin-gonic/gin"
|
|
||||||
"github.com/pikami/cosmium/api/config"
|
|
||||||
)
|
|
||||||
|
|
||||||
func StripTrailingSlashes(r *gin.Engine, config *config.ServerConfig) gin.HandlerFunc {
|
|
||||||
return func(c *gin.Context) {
|
|
||||||
path := c.Request.URL.Path
|
|
||||||
if len(path) > 1 && path[len(path)-1] == '/' && !strings.Contains(path, config.ExplorerBaseUrlLocation) {
|
|
||||||
c.Request.URL.Path = path[:len(path)-1]
|
|
||||||
r.HandleContext(c)
|
|
||||||
c.Abort()
|
|
||||||
return
|
|
||||||
}
|
|
||||||
c.Next()
|
|
||||||
}
|
|
||||||
}
|
|
@ -7,7 +7,6 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func GetOffers(c *gin.Context) {
|
func GetOffers(c *gin.Context) {
|
||||||
c.Header("x-ms-item-count", "0")
|
|
||||||
c.IndentedJSON(http.StatusOK, gin.H{
|
c.IndentedJSON(http.StatusOK, gin.H{
|
||||||
"_rid": "",
|
"_rid": "",
|
||||||
"_count": 0,
|
"_count": 0,
|
||||||
|
@ -5,12 +5,11 @@ import (
|
|||||||
"net/http"
|
"net/http"
|
||||||
|
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
"github.com/pikami/cosmium/internal/constants"
|
"github.com/pikami/cosmium/internal/repositories"
|
||||||
"github.com/pikami/cosmium/internal/datastore"
|
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
||||||
"github.com/pikami/cosmium/internal/resourceid"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func (h *Handlers) GetPartitionKeyRanges(c *gin.Context) {
|
func GetPartitionKeyRanges(c *gin.Context) {
|
||||||
databaseId := c.Param("databaseId")
|
databaseId := c.Param("databaseId")
|
||||||
collectionId := c.Param("collId")
|
collectionId := c.Param("collId")
|
||||||
|
|
||||||
@ -19,33 +18,26 @@ func (h *Handlers) GetPartitionKeyRanges(c *gin.Context) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
partitionKeyRanges, status := h.dataStore.GetPartitionKeyRanges(databaseId, collectionId)
|
partitionKeyRanges, status := repositories.GetPartitionKeyRanges(databaseId, collectionId)
|
||||||
if status == datastore.StatusOk {
|
if status == repositorymodels.StatusOk {
|
||||||
c.Header("etag", "\"420\"")
|
c.Header("etag", "\"420\"")
|
||||||
c.Header("lsn", "420")
|
c.Header("lsn", "420")
|
||||||
c.Header("x-ms-cosmos-llsn", "420")
|
c.Header("x-ms-cosmos-llsn", "420")
|
||||||
c.Header("x-ms-global-committed-lsn", "420")
|
c.Header("x-ms-global-committed-lsn", "420")
|
||||||
c.Header("x-ms-item-count", fmt.Sprintf("%d", len(partitionKeyRanges)))
|
c.Header("x-ms-item-count", fmt.Sprintf("%d", len(partitionKeyRanges)))
|
||||||
|
|
||||||
collectionRid := collectionId
|
|
||||||
collection, _ := h.dataStore.GetCollection(databaseId, collectionId)
|
|
||||||
if collection.ResourceID != "" {
|
|
||||||
collectionRid = collection.ResourceID
|
|
||||||
}
|
|
||||||
|
|
||||||
rid := resourceid.NewCombined(collectionRid, resourceid.New(resourceid.ResourceTypePartitionKeyRange))
|
|
||||||
c.IndentedJSON(http.StatusOK, gin.H{
|
c.IndentedJSON(http.StatusOK, gin.H{
|
||||||
"_rid": rid,
|
"_rid": "",
|
||||||
"_count": len(partitionKeyRanges),
|
"_count": len(partitionKeyRanges),
|
||||||
"PartitionKeyRanges": partitionKeyRanges,
|
"PartitionKeyRanges": partitionKeyRanges,
|
||||||
})
|
})
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if status == datastore.StatusNotFound {
|
if status == repositorymodels.StatusNotFound {
|
||||||
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse)
|
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
||||||
}
|
}
|
||||||
|
@ -1,43 +1,12 @@
|
|||||||
package handlers
|
package handlers
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
|
||||||
"net/http"
|
"net/http"
|
||||||
|
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
|
"github.com/pikami/cosmium/internal/constants"
|
||||||
)
|
)
|
||||||
|
|
||||||
func (h *Handlers) GetServerInfo(c *gin.Context) {
|
func GetServerInfo(c *gin.Context) {
|
||||||
c.IndentedJSON(http.StatusOK, gin.H{
|
c.IndentedJSON(http.StatusOK, constants.ServerInfoResponse)
|
||||||
"_self": "",
|
|
||||||
"id": h.config.DatabaseAccount,
|
|
||||||
"_rid": fmt.Sprintf("%s.%s", h.config.DatabaseAccount, h.config.DatabaseDomain),
|
|
||||||
"media": "//media/",
|
|
||||||
"addresses": "//addresses/",
|
|
||||||
"_dbs": "//dbs/",
|
|
||||||
"writableLocations": []map[string]interface{}{
|
|
||||||
{
|
|
||||||
"name": "South Central US",
|
|
||||||
"databaseAccountEndpoint": h.config.DatabaseEndpoint,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"readableLocations": []map[string]interface{}{
|
|
||||||
{
|
|
||||||
"name": "South Central US",
|
|
||||||
"databaseAccountEndpoint": h.config.DatabaseEndpoint,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"enableMultipleWriteLocations": false,
|
|
||||||
"continuousBackupEnabled": false,
|
|
||||||
"enableNRegionSynchronousCommit": false,
|
|
||||||
"userReplicationPolicy": map[string]interface{}{
|
|
||||||
"asyncReplication": false,
|
|
||||||
"minReplicaSetSize": 1,
|
|
||||||
"maxReplicasetSize": 4,
|
|
||||||
},
|
|
||||||
"userConsistencyPolicy": map[string]interface{}{"defaultConsistencyLevel": "Session"},
|
|
||||||
"systemReplicationPolicy": map[string]interface{}{"minReplicaSetSize": 1, "maxReplicasetSize": 4},
|
|
||||||
"readPolicy": map[string]interface{}{"primaryReadCoefficient": 1, "secondaryReadCoefficient": 1},
|
|
||||||
"queryEngineConfiguration": "{\"allowNewKeywords\":true,\"maxJoinsPerSqlQuery\":10,\"maxQueryRequestTimeoutFraction\":0.9,\"maxSqlQueryInputLength\":524288,\"maxUdfRefPerSqlQuery\":10,\"queryMaxInMemorySortDocumentCount\":-1000,\"spatialMaxGeometryPointCount\":256,\"sqlAllowNonFiniteNumbers\":false,\"sqlDisableOptimizationFlags\":0,\"enableSpatialIndexing\":true,\"maxInExpressionItemsCount\":2147483647,\"maxLogicalAndPerSqlQuery\":2147483647,\"maxLogicalOrPerSqlQuery\":2147483647,\"maxSpatialQueryCells\":2147483647,\"sqlAllowAggregateFunctions\":true,\"sqlAllowGroupByClause\":true,\"sqlAllowLike\":true,\"sqlAllowSubQuery\":true,\"sqlAllowScalarSubQuery\":true,\"sqlAllowTop\":true}",
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
@ -1,119 +1,23 @@
|
|||||||
package handlers
|
package handlers
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
|
||||||
"net/http"
|
"net/http"
|
||||||
|
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
"github.com/pikami/cosmium/internal/constants"
|
"github.com/pikami/cosmium/internal/repositories"
|
||||||
"github.com/pikami/cosmium/internal/datastore"
|
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
||||||
)
|
)
|
||||||
|
|
||||||
func (h *Handlers) GetAllStoredProcedures(c *gin.Context) {
|
func GetAllStoredProcedures(c *gin.Context) {
|
||||||
databaseId := c.Param("databaseId")
|
databaseId := c.Param("databaseId")
|
||||||
collectionId := c.Param("collId")
|
collectionId := c.Param("collId")
|
||||||
|
|
||||||
sps, status := h.dataStore.GetAllStoredProcedures(databaseId, collectionId)
|
sps, status := repositories.GetAllStoredProcedures(databaseId, collectionId)
|
||||||
|
|
||||||
if status == datastore.StatusOk {
|
if status == repositorymodels.StatusOk {
|
||||||
c.Header("x-ms-item-count", fmt.Sprintf("%d", len(sps)))
|
|
||||||
c.IndentedJSON(http.StatusOK, gin.H{"_rid": "", "StoredProcedures": sps, "_count": len(sps)})
|
c.IndentedJSON(http.StatusOK, gin.H{"_rid": "", "StoredProcedures": sps, "_count": len(sps)})
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
||||||
}
|
|
||||||
|
|
||||||
func (h *Handlers) GetStoredProcedure(c *gin.Context) {
|
|
||||||
databaseId := c.Param("databaseId")
|
|
||||||
collectionId := c.Param("collId")
|
|
||||||
spId := c.Param("spId")
|
|
||||||
|
|
||||||
sp, status := h.dataStore.GetStoredProcedure(databaseId, collectionId, spId)
|
|
||||||
|
|
||||||
if status == datastore.StatusOk {
|
|
||||||
c.IndentedJSON(http.StatusOK, sp)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if status == datastore.StatusNotFound {
|
|
||||||
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (h *Handlers) DeleteStoredProcedure(c *gin.Context) {
|
|
||||||
databaseId := c.Param("databaseId")
|
|
||||||
collectionId := c.Param("collId")
|
|
||||||
spId := c.Param("spId")
|
|
||||||
|
|
||||||
status := h.dataStore.DeleteStoredProcedure(databaseId, collectionId, spId)
|
|
||||||
if status == datastore.StatusOk {
|
|
||||||
c.Status(http.StatusNoContent)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if status == datastore.StatusNotFound {
|
|
||||||
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (h *Handlers) ReplaceStoredProcedure(c *gin.Context) {
|
|
||||||
databaseId := c.Param("databaseId")
|
|
||||||
collectionId := c.Param("collId")
|
|
||||||
spId := c.Param("spId")
|
|
||||||
|
|
||||||
var sp datastore.StoredProcedure
|
|
||||||
if err := c.BindJSON(&sp); err != nil {
|
|
||||||
c.IndentedJSON(http.StatusBadRequest, constants.BadRequestResponse)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
status := h.dataStore.DeleteStoredProcedure(databaseId, collectionId, spId)
|
|
||||||
if status == datastore.StatusNotFound {
|
|
||||||
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
createdSP, status := h.dataStore.CreateStoredProcedure(databaseId, collectionId, sp)
|
|
||||||
if status == datastore.Conflict {
|
|
||||||
c.IndentedJSON(http.StatusConflict, constants.ConflictResponse)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if status == datastore.StatusOk {
|
|
||||||
c.IndentedJSON(http.StatusOK, createdSP)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (h *Handlers) CreateStoredProcedure(c *gin.Context) {
|
|
||||||
databaseId := c.Param("databaseId")
|
|
||||||
collectionId := c.Param("collId")
|
|
||||||
|
|
||||||
var sp datastore.StoredProcedure
|
|
||||||
if err := c.BindJSON(&sp); err != nil {
|
|
||||||
c.IndentedJSON(http.StatusBadRequest, constants.BadRequestResponse)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
createdSP, status := h.dataStore.CreateStoredProcedure(databaseId, collectionId, sp)
|
|
||||||
if status == datastore.Conflict {
|
|
||||||
c.IndentedJSON(http.StatusConflict, constants.ConflictResponse)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if status == datastore.StatusOk {
|
|
||||||
c.IndentedJSON(http.StatusCreated, createdSP)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
|
||||||
}
|
}
|
||||||
|
@ -1,119 +1,23 @@
|
|||||||
package handlers
|
package handlers
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
|
||||||
"net/http"
|
"net/http"
|
||||||
|
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
"github.com/pikami/cosmium/internal/constants"
|
"github.com/pikami/cosmium/internal/repositories"
|
||||||
"github.com/pikami/cosmium/internal/datastore"
|
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
||||||
)
|
)
|
||||||
|
|
||||||
func (h *Handlers) GetAllTriggers(c *gin.Context) {
|
func GetAllTriggers(c *gin.Context) {
|
||||||
databaseId := c.Param("databaseId")
|
databaseId := c.Param("databaseId")
|
||||||
collectionId := c.Param("collId")
|
collectionId := c.Param("collId")
|
||||||
|
|
||||||
triggers, status := h.dataStore.GetAllTriggers(databaseId, collectionId)
|
triggers, status := repositories.GetAllTriggers(databaseId, collectionId)
|
||||||
|
|
||||||
if status == datastore.StatusOk {
|
if status == repositorymodels.StatusOk {
|
||||||
c.Header("x-ms-item-count", fmt.Sprintf("%d", len(triggers)))
|
|
||||||
c.IndentedJSON(http.StatusOK, gin.H{"_rid": "", "Triggers": triggers, "_count": len(triggers)})
|
c.IndentedJSON(http.StatusOK, gin.H{"_rid": "", "Triggers": triggers, "_count": len(triggers)})
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
||||||
}
|
|
||||||
|
|
||||||
func (h *Handlers) GetTrigger(c *gin.Context) {
|
|
||||||
databaseId := c.Param("databaseId")
|
|
||||||
collectionId := c.Param("collId")
|
|
||||||
triggerId := c.Param("triggerId")
|
|
||||||
|
|
||||||
trigger, status := h.dataStore.GetTrigger(databaseId, collectionId, triggerId)
|
|
||||||
|
|
||||||
if status == datastore.StatusOk {
|
|
||||||
c.IndentedJSON(http.StatusOK, trigger)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if status == datastore.StatusNotFound {
|
|
||||||
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (h *Handlers) DeleteTrigger(c *gin.Context) {
|
|
||||||
databaseId := c.Param("databaseId")
|
|
||||||
collectionId := c.Param("collId")
|
|
||||||
triggerId := c.Param("triggerId")
|
|
||||||
|
|
||||||
status := h.dataStore.DeleteTrigger(databaseId, collectionId, triggerId)
|
|
||||||
if status == datastore.StatusOk {
|
|
||||||
c.Status(http.StatusNoContent)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if status == datastore.StatusNotFound {
|
|
||||||
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (h *Handlers) ReplaceTrigger(c *gin.Context) {
|
|
||||||
databaseId := c.Param("databaseId")
|
|
||||||
collectionId := c.Param("collId")
|
|
||||||
triggerId := c.Param("triggerId")
|
|
||||||
|
|
||||||
var trigger datastore.Trigger
|
|
||||||
if err := c.BindJSON(&trigger); err != nil {
|
|
||||||
c.IndentedJSON(http.StatusBadRequest, constants.BadRequestResponse)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
status := h.dataStore.DeleteTrigger(databaseId, collectionId, triggerId)
|
|
||||||
if status == datastore.StatusNotFound {
|
|
||||||
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
createdTrigger, status := h.dataStore.CreateTrigger(databaseId, collectionId, trigger)
|
|
||||||
if status == datastore.Conflict {
|
|
||||||
c.IndentedJSON(http.StatusConflict, constants.ConflictResponse)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if status == datastore.StatusOk {
|
|
||||||
c.IndentedJSON(http.StatusOK, createdTrigger)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (h *Handlers) CreateTrigger(c *gin.Context) {
|
|
||||||
databaseId := c.Param("databaseId")
|
|
||||||
collectionId := c.Param("collId")
|
|
||||||
|
|
||||||
var trigger datastore.Trigger
|
|
||||||
if err := c.BindJSON(&trigger); err != nil {
|
|
||||||
c.IndentedJSON(http.StatusBadRequest, constants.BadRequestResponse)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
createdTrigger, status := h.dataStore.CreateTrigger(databaseId, collectionId, trigger)
|
|
||||||
if status == datastore.Conflict {
|
|
||||||
c.IndentedJSON(http.StatusConflict, constants.ConflictResponse)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if status == datastore.StatusOk {
|
|
||||||
c.IndentedJSON(http.StatusCreated, createdTrigger)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
|
||||||
}
|
}
|
||||||
|
@ -1,119 +1,23 @@
|
|||||||
package handlers
|
package handlers
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
|
||||||
"net/http"
|
"net/http"
|
||||||
|
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
"github.com/pikami/cosmium/internal/constants"
|
"github.com/pikami/cosmium/internal/repositories"
|
||||||
"github.com/pikami/cosmium/internal/datastore"
|
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
||||||
)
|
)
|
||||||
|
|
||||||
func (h *Handlers) GetAllUserDefinedFunctions(c *gin.Context) {
|
func GetAllUserDefinedFunctions(c *gin.Context) {
|
||||||
databaseId := c.Param("databaseId")
|
databaseId := c.Param("databaseId")
|
||||||
collectionId := c.Param("collId")
|
collectionId := c.Param("collId")
|
||||||
|
|
||||||
udfs, status := h.dataStore.GetAllUserDefinedFunctions(databaseId, collectionId)
|
udfs, status := repositories.GetAllUserDefinedFunctions(databaseId, collectionId)
|
||||||
|
|
||||||
if status == datastore.StatusOk {
|
if status == repositorymodels.StatusOk {
|
||||||
c.Header("x-ms-item-count", fmt.Sprintf("%d", len(udfs)))
|
|
||||||
c.IndentedJSON(http.StatusOK, gin.H{"_rid": "", "UserDefinedFunctions": udfs, "_count": len(udfs)})
|
c.IndentedJSON(http.StatusOK, gin.H{"_rid": "", "UserDefinedFunctions": udfs, "_count": len(udfs)})
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
||||||
}
|
|
||||||
|
|
||||||
func (h *Handlers) GetUserDefinedFunction(c *gin.Context) {
|
|
||||||
databaseId := c.Param("databaseId")
|
|
||||||
collectionId := c.Param("collId")
|
|
||||||
udfId := c.Param("udfId")
|
|
||||||
|
|
||||||
udf, status := h.dataStore.GetUserDefinedFunction(databaseId, collectionId, udfId)
|
|
||||||
|
|
||||||
if status == datastore.StatusOk {
|
|
||||||
c.IndentedJSON(http.StatusOK, udf)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if status == datastore.StatusNotFound {
|
|
||||||
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (h *Handlers) DeleteUserDefinedFunction(c *gin.Context) {
|
|
||||||
databaseId := c.Param("databaseId")
|
|
||||||
collectionId := c.Param("collId")
|
|
||||||
udfId := c.Param("udfId")
|
|
||||||
|
|
||||||
status := h.dataStore.DeleteUserDefinedFunction(databaseId, collectionId, udfId)
|
|
||||||
if status == datastore.StatusOk {
|
|
||||||
c.Status(http.StatusNoContent)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if status == datastore.StatusNotFound {
|
|
||||||
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (h *Handlers) ReplaceUserDefinedFunction(c *gin.Context) {
|
|
||||||
databaseId := c.Param("databaseId")
|
|
||||||
collectionId := c.Param("collId")
|
|
||||||
udfId := c.Param("udfId")
|
|
||||||
|
|
||||||
var udf datastore.UserDefinedFunction
|
|
||||||
if err := c.BindJSON(&udf); err != nil {
|
|
||||||
c.IndentedJSON(http.StatusBadRequest, constants.BadRequestResponse)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
status := h.dataStore.DeleteUserDefinedFunction(databaseId, collectionId, udfId)
|
|
||||||
if status == datastore.StatusNotFound {
|
|
||||||
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
createdUdf, status := h.dataStore.CreateUserDefinedFunction(databaseId, collectionId, udf)
|
|
||||||
if status == datastore.Conflict {
|
|
||||||
c.IndentedJSON(http.StatusConflict, constants.ConflictResponse)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if status == datastore.StatusOk {
|
|
||||||
c.IndentedJSON(http.StatusOK, createdUdf)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (h *Handlers) CreateUserDefinedFunction(c *gin.Context) {
|
|
||||||
databaseId := c.Param("databaseId")
|
|
||||||
collectionId := c.Param("collId")
|
|
||||||
|
|
||||||
var udf datastore.UserDefinedFunction
|
|
||||||
if err := c.BindJSON(&udf); err != nil {
|
|
||||||
c.IndentedJSON(http.StatusBadRequest, constants.BadRequestResponse)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
createdUdf, status := h.dataStore.CreateUserDefinedFunction(databaseId, collectionId, udf)
|
|
||||||
if status == datastore.Conflict {
|
|
||||||
c.IndentedJSON(http.StatusConflict, constants.ConflictResponse)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if status == datastore.StatusOk {
|
|
||||||
c.IndentedJSON(http.StatusCreated, createdUdf)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
|
||||||
}
|
}
|
||||||
|
149
api/router.go
149
api/router.go
@ -1,144 +1,45 @@
|
|||||||
package api
|
package api
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
|
||||||
"fmt"
|
|
||||||
"net/http"
|
|
||||||
"sync"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
"github.com/pikami/cosmium/api/handlers"
|
"github.com/pikami/cosmium/api/handlers"
|
||||||
"github.com/pikami/cosmium/api/handlers/middleware"
|
"github.com/pikami/cosmium/api/handlers/middleware"
|
||||||
"github.com/pikami/cosmium/internal/datastore"
|
|
||||||
"github.com/pikami/cosmium/internal/logger"
|
|
||||||
tlsprovider "github.com/pikami/cosmium/internal/tls_provider"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
var ginMux sync.Mutex
|
func CreateRouter() *gin.Engine {
|
||||||
|
router := gin.Default()
|
||||||
|
|
||||||
func (s *ApiServer) CreateRouter(dataStore datastore.DataStore) {
|
router.Use(middleware.RequestLogger())
|
||||||
routeHandlers := handlers.NewHandlers(dataStore, s.config)
|
router.Use(middleware.Authentication())
|
||||||
|
|
||||||
ginMux.Lock()
|
router.GET("/dbs/:databaseId/colls/:collId/pkranges", handlers.GetPartitionKeyRanges)
|
||||||
gin.DefaultWriter = logger.InfoWriter()
|
|
||||||
gin.DefaultErrorWriter = logger.ErrorWriter()
|
|
||||||
|
|
||||||
if s.config.LogLevel != "debug" {
|
router.POST("/dbs/:databaseId/colls/:collId/docs", handlers.DocumentsPost)
|
||||||
gin.SetMode(gin.ReleaseMode)
|
router.GET("/dbs/:databaseId/colls/:collId/docs", handlers.GetAllDocuments)
|
||||||
}
|
router.GET("/dbs/:databaseId/colls/:collId/docs/:docId", handlers.GetDocument)
|
||||||
ginMux.Unlock()
|
router.PUT("/dbs/:databaseId/colls/:collId/docs/:docId", handlers.ReplaceDocument)
|
||||||
|
router.DELETE("/dbs/:databaseId/colls/:collId/docs/:docId", handlers.DeleteDocument)
|
||||||
|
|
||||||
router := gin.Default(func(e *gin.Engine) {
|
router.POST("/dbs/:databaseId/colls", handlers.CreateCollection)
|
||||||
e.RedirectTrailingSlash = false
|
router.GET("/dbs/:databaseId/colls", handlers.GetAllCollections)
|
||||||
})
|
router.GET("/dbs/:databaseId/colls/:collId", handlers.GetCollection)
|
||||||
|
router.DELETE("/dbs/:databaseId/colls/:collId", handlers.DeleteCollection)
|
||||||
|
|
||||||
if s.config.LogLevel == "debug" {
|
router.POST("/dbs", handlers.CreateDatabase)
|
||||||
router.Use(middleware.RequestLogger())
|
router.GET("/dbs", handlers.GetAllDatabases)
|
||||||
}
|
router.GET("/dbs/:databaseId", handlers.GetDatabase)
|
||||||
|
router.DELETE("/dbs/:databaseId", handlers.DeleteDatabase)
|
||||||
|
|
||||||
router.Use(middleware.StripTrailingSlashes(router, s.config))
|
router.GET("/dbs/:databaseId/colls/:collId/udfs", handlers.GetAllUserDefinedFunctions)
|
||||||
router.Use(middleware.Authentication(s.config))
|
router.GET("/dbs/:databaseId/colls/:collId/sprocs", handlers.GetAllStoredProcedures)
|
||||||
|
router.GET("/dbs/:databaseId/colls/:collId/triggers", handlers.GetAllTriggers)
|
||||||
router.GET("/dbs/:databaseId/colls/:collId/pkranges", routeHandlers.GetPartitionKeyRanges)
|
|
||||||
|
|
||||||
router.POST("/dbs/:databaseId/colls/:collId/docs", routeHandlers.DocumentsPost)
|
|
||||||
router.GET("/dbs/:databaseId/colls/:collId/docs", routeHandlers.GetAllDocuments)
|
|
||||||
router.GET("/dbs/:databaseId/colls/:collId/docs/:docId", routeHandlers.GetDocument)
|
|
||||||
router.PUT("/dbs/:databaseId/colls/:collId/docs/:docId", routeHandlers.ReplaceDocument)
|
|
||||||
router.PATCH("/dbs/:databaseId/colls/:collId/docs/:docId", routeHandlers.PatchDocument)
|
|
||||||
router.DELETE("/dbs/:databaseId/colls/:collId/docs/:docId", routeHandlers.DeleteDocument)
|
|
||||||
|
|
||||||
router.POST("/dbs/:databaseId/colls", routeHandlers.CreateCollection)
|
|
||||||
router.GET("/dbs/:databaseId/colls", routeHandlers.GetAllCollections)
|
|
||||||
router.GET("/dbs/:databaseId/colls/:collId", routeHandlers.GetCollection)
|
|
||||||
router.DELETE("/dbs/:databaseId/colls/:collId", routeHandlers.DeleteCollection)
|
|
||||||
|
|
||||||
router.POST("/dbs", routeHandlers.CreateDatabase)
|
|
||||||
router.GET("/dbs", routeHandlers.GetAllDatabases)
|
|
||||||
router.GET("/dbs/:databaseId", routeHandlers.GetDatabase)
|
|
||||||
router.DELETE("/dbs/:databaseId", routeHandlers.DeleteDatabase)
|
|
||||||
|
|
||||||
router.POST("/dbs/:databaseId/colls/:collId/triggers", routeHandlers.CreateTrigger)
|
|
||||||
router.GET("/dbs/:databaseId/colls/:collId/triggers", routeHandlers.GetAllTriggers)
|
|
||||||
router.GET("/dbs/:databaseId/colls/:collId/triggers/:triggerId", routeHandlers.GetTrigger)
|
|
||||||
router.PUT("/dbs/:databaseId/colls/:collId/triggers/:triggerId", routeHandlers.ReplaceTrigger)
|
|
||||||
router.DELETE("/dbs/:databaseId/colls/:collId/triggers/:triggerId", routeHandlers.DeleteTrigger)
|
|
||||||
|
|
||||||
router.POST("/dbs/:databaseId/colls/:collId/sprocs", routeHandlers.CreateStoredProcedure)
|
|
||||||
router.GET("/dbs/:databaseId/colls/:collId/sprocs", routeHandlers.GetAllStoredProcedures)
|
|
||||||
router.GET("/dbs/:databaseId/colls/:collId/sprocs/:sprocId", routeHandlers.GetStoredProcedure)
|
|
||||||
router.PUT("/dbs/:databaseId/colls/:collId/sprocs/:sprocId", routeHandlers.ReplaceStoredProcedure)
|
|
||||||
router.DELETE("/dbs/:databaseId/colls/:collId/sprocs/:sprocId", routeHandlers.DeleteStoredProcedure)
|
|
||||||
|
|
||||||
router.POST("/dbs/:databaseId/colls/:collId/udfs", routeHandlers.CreateUserDefinedFunction)
|
|
||||||
router.GET("/dbs/:databaseId/colls/:collId/udfs", routeHandlers.GetAllUserDefinedFunctions)
|
|
||||||
router.GET("/dbs/:databaseId/colls/:collId/udfs/:udfId", routeHandlers.GetUserDefinedFunction)
|
|
||||||
router.PUT("/dbs/:databaseId/colls/:collId/udfs/:udfId", routeHandlers.ReplaceUserDefinedFunction)
|
|
||||||
router.DELETE("/dbs/:databaseId/colls/:collId/udfs/:udfId", routeHandlers.DeleteUserDefinedFunction)
|
|
||||||
|
|
||||||
router.GET("/offers", handlers.GetOffers)
|
router.GET("/offers", handlers.GetOffers)
|
||||||
router.GET("/", routeHandlers.GetServerInfo)
|
router.GET("/", handlers.GetServerInfo)
|
||||||
|
|
||||||
router.GET("/cosmium/export", routeHandlers.CosmiumExport)
|
router.GET("/cosmium/export", handlers.CosmiumExport)
|
||||||
|
|
||||||
routeHandlers.RegisterExplorerHandlers(router)
|
handlers.RegisterExplorerHandlers(router)
|
||||||
|
|
||||||
s.router = router
|
return router
|
||||||
}
|
|
||||||
|
|
||||||
func (s *ApiServer) Start() error {
|
|
||||||
listenAddress := fmt.Sprintf(":%d", s.config.Port)
|
|
||||||
s.isActive = true
|
|
||||||
|
|
||||||
server := &http.Server{
|
|
||||||
Addr: listenAddress,
|
|
||||||
Handler: s.router.Handler(),
|
|
||||||
}
|
|
||||||
|
|
||||||
errChan := make(chan error, 1)
|
|
||||||
|
|
||||||
go func() {
|
|
||||||
<-s.stopServer
|
|
||||||
logger.InfoLn("Shutting down server...")
|
|
||||||
err := server.Shutdown(context.TODO())
|
|
||||||
if err != nil {
|
|
||||||
logger.ErrorLn("Failed to shutdown server:", err)
|
|
||||||
}
|
|
||||||
s.onServerShutdown <- true
|
|
||||||
}()
|
|
||||||
|
|
||||||
go func() {
|
|
||||||
var err error
|
|
||||||
if s.config.DisableTls {
|
|
||||||
logger.Infof("Listening and serving HTTP on %s\n", server.Addr)
|
|
||||||
err = server.ListenAndServe()
|
|
||||||
} else if s.config.TLS_CertificatePath != "" && s.config.TLS_CertificateKey != "" {
|
|
||||||
logger.Infof("Listening and serving HTTPS on %s\n", server.Addr)
|
|
||||||
err = server.ListenAndServeTLS(
|
|
||||||
s.config.TLS_CertificatePath,
|
|
||||||
s.config.TLS_CertificateKey)
|
|
||||||
} else {
|
|
||||||
tlsConfig := tlsprovider.GetDefaultTlsConfig()
|
|
||||||
server.TLSConfig = tlsConfig
|
|
||||||
|
|
||||||
logger.Infof("Listening and serving HTTPS on %s\n", server.Addr)
|
|
||||||
err = server.ListenAndServeTLS("", "")
|
|
||||||
}
|
|
||||||
|
|
||||||
if err != nil && err != http.ErrServerClosed {
|
|
||||||
logger.ErrorLn("Failed to start server:", err)
|
|
||||||
errChan <- err
|
|
||||||
} else {
|
|
||||||
errChan <- nil
|
|
||||||
}
|
|
||||||
s.isActive = false
|
|
||||||
}()
|
|
||||||
|
|
||||||
select {
|
|
||||||
case err := <-errChan:
|
|
||||||
return err
|
|
||||||
case <-time.After(50 * time.Millisecond):
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
@ -2,24 +2,27 @@ package tests_test
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"github.com/Azure/azure-sdk-for-go/sdk/azcore"
|
||||||
"github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos"
|
"github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos"
|
||||||
"github.com/pikami/cosmium/api/config"
|
"github.com/pikami/cosmium/api/config"
|
||||||
|
"github.com/pikami/cosmium/internal/repositories"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
)
|
)
|
||||||
|
|
||||||
func Test_Authentication(t *testing.T) {
|
func Test_Authentication(t *testing.T) {
|
||||||
ts := runTestServer()
|
ts := runTestServer()
|
||||||
defer ts.Server.Close()
|
defer ts.Close()
|
||||||
|
|
||||||
t.Run("Should get 200 when correct account key is used", func(t *testing.T) {
|
t.Run("Should get 200 when correct account key is used", func(t *testing.T) {
|
||||||
ts.DataStore.DeleteDatabase(testDatabaseName)
|
repositories.DeleteDatabase(testDatabaseName)
|
||||||
client, err := azcosmos.NewClientFromConnectionString(
|
client, err := azcosmos.NewClientFromConnectionString(
|
||||||
formatConnectionString(ts.URL, config.DefaultAccountKey),
|
fmt.Sprintf("AccountEndpoint=%s;AccountKey=%s", ts.URL, config.DefaultAccountKey),
|
||||||
&azcosmos.ClientOptions{},
|
&azcosmos.ClientOptions{},
|
||||||
)
|
)
|
||||||
assert.Nil(t, err)
|
assert.Nil(t, err)
|
||||||
@ -32,10 +35,28 @@ func Test_Authentication(t *testing.T) {
|
|||||||
assert.Equal(t, createResponse.DatabaseProperties.ID, testDatabaseName)
|
assert.Equal(t, createResponse.DatabaseProperties.ID, testDatabaseName)
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("Should get 401 when wrong account key is used", func(t *testing.T) {
|
t.Run("Should get 200 when wrong account key is used, but authentication is dissabled", func(t *testing.T) {
|
||||||
ts.DataStore.DeleteDatabase(testDatabaseName)
|
config.Config.DisableAuth = true
|
||||||
|
repositories.DeleteDatabase(testDatabaseName)
|
||||||
client, err := azcosmos.NewClientFromConnectionString(
|
client, err := azcosmos.NewClientFromConnectionString(
|
||||||
formatConnectionString(ts.URL, "AAAA"),
|
fmt.Sprintf("AccountEndpoint=%s;AccountKey=%s", ts.URL, "AAAA"),
|
||||||
|
&azcosmos.ClientOptions{},
|
||||||
|
)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
createResponse, err := client.CreateDatabase(
|
||||||
|
context.TODO(),
|
||||||
|
azcosmos.DatabaseProperties{ID: testDatabaseName},
|
||||||
|
&azcosmos.CreateDatabaseOptions{})
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Equal(t, createResponse.DatabaseProperties.ID, testDatabaseName)
|
||||||
|
config.Config.DisableAuth = false
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should get 401 when wrong account key is used", func(t *testing.T) {
|
||||||
|
repositories.DeleteDatabase(testDatabaseName)
|
||||||
|
client, err := azcosmos.NewClientFromConnectionString(
|
||||||
|
fmt.Sprintf("AccountEndpoint=%s;AccountKey=%s", ts.URL, "AAAA"),
|
||||||
&azcosmos.ClientOptions{},
|
&azcosmos.ClientOptions{},
|
||||||
)
|
)
|
||||||
assert.Nil(t, err)
|
assert.Nil(t, err)
|
||||||
@ -45,7 +66,12 @@ func Test_Authentication(t *testing.T) {
|
|||||||
azcosmos.DatabaseProperties{ID: testDatabaseName},
|
azcosmos.DatabaseProperties{ID: testDatabaseName},
|
||||||
&azcosmos.CreateDatabaseOptions{})
|
&azcosmos.CreateDatabaseOptions{})
|
||||||
|
|
||||||
assert.Contains(t, err.Error(), "401 Unauthorized")
|
var respErr *azcore.ResponseError
|
||||||
|
if errors.As(err, &respErr) {
|
||||||
|
assert.Equal(t, respErr.StatusCode, http.StatusUnauthorized)
|
||||||
|
} else {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("Should allow unauthorized requests to /_explorer", func(t *testing.T) {
|
t.Run("Should allow unauthorized requests to /_explorer", func(t *testing.T) {
|
||||||
@ -59,33 +85,3 @@ func Test_Authentication(t *testing.T) {
|
|||||||
assert.Contains(t, string(responseBody), "BACKEND_ENDPOINT")
|
assert.Contains(t, string(responseBody), "BACKEND_ENDPOINT")
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func Test_Authentication_Disabled(t *testing.T) {
|
|
||||||
ts := runTestServerCustomConfig(&config.ServerConfig{
|
|
||||||
AccountKey: config.DefaultAccountKey,
|
|
||||||
ExplorerPath: "/tmp/nothing",
|
|
||||||
ExplorerBaseUrlLocation: config.ExplorerBaseUrlLocation,
|
|
||||||
DisableAuth: true,
|
|
||||||
})
|
|
||||||
defer ts.Server.Close()
|
|
||||||
|
|
||||||
t.Run("Should get 200 when wrong account key is used, but authentication is dissabled", func(t *testing.T) {
|
|
||||||
ts.DataStore.DeleteDatabase(testDatabaseName)
|
|
||||||
client, err := azcosmos.NewClientFromConnectionString(
|
|
||||||
formatConnectionString(ts.URL, "AAAA"),
|
|
||||||
&azcosmos.ClientOptions{},
|
|
||||||
)
|
|
||||||
assert.Nil(t, err)
|
|
||||||
|
|
||||||
createResponse, err := client.CreateDatabase(
|
|
||||||
context.TODO(),
|
|
||||||
azcosmos.DatabaseProperties{ID: testDatabaseName},
|
|
||||||
&azcosmos.CreateDatabaseOptions{})
|
|
||||||
assert.Nil(t, err)
|
|
||||||
assert.Equal(t, createResponse.DatabaseProperties.ID, testDatabaseName)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
func formatConnectionString(endpoint, key string) string {
|
|
||||||
return fmt.Sprintf("AccountEndpoint=%s;AccountKey=%s", endpoint, key)
|
|
||||||
}
|
|
||||||
|
@ -3,29 +3,33 @@ package tests_test
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"errors"
|
"errors"
|
||||||
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/Azure/azure-sdk-for-go/sdk/azcore"
|
"github.com/Azure/azure-sdk-for-go/sdk/azcore"
|
||||||
"github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos"
|
"github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos"
|
||||||
"github.com/pikami/cosmium/internal/datastore"
|
"github.com/pikami/cosmium/api/config"
|
||||||
|
"github.com/pikami/cosmium/internal/repositories"
|
||||||
|
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
)
|
)
|
||||||
|
|
||||||
func Test_Collections(t *testing.T) {
|
func Test_Collections(t *testing.T) {
|
||||||
presets := []testPreset{PresetJsonStore, PresetBadgerStore}
|
ts := runTestServer()
|
||||||
|
defer ts.Close()
|
||||||
|
|
||||||
setUp := func(ts *TestServer, client *azcosmos.Client) *azcosmos.DatabaseClient {
|
client, err := azcosmos.NewClientFromConnectionString(
|
||||||
ts.DataStore.CreateDatabase(datastore.Database{ID: testDatabaseName})
|
fmt.Sprintf("AccountEndpoint=%s;AccountKey=%s", ts.URL, config.Config.AccountKey),
|
||||||
databaseClient, err := client.NewDatabase(testDatabaseName)
|
&azcosmos.ClientOptions{},
|
||||||
assert.Nil(t, err)
|
)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
return databaseClient
|
repositories.CreateDatabase(repositorymodels.Database{ID: testDatabaseName})
|
||||||
}
|
databaseClient, err := client.NewDatabase(testDatabaseName)
|
||||||
|
assert.Nil(t, err)
|
||||||
runTestsWithPresets(t, "Collection Create", presets, func(t *testing.T, ts *TestServer, client *azcosmos.Client) {
|
|
||||||
databaseClient := setUp(ts, client)
|
|
||||||
|
|
||||||
|
t.Run("Collection Create", func(t *testing.T) {
|
||||||
t.Run("Should create collection", func(t *testing.T) {
|
t.Run("Should create collection", func(t *testing.T) {
|
||||||
createResponse, err := databaseClient.CreateContainer(context.TODO(), azcosmos.ContainerProperties{
|
createResponse, err := databaseClient.CreateContainer(context.TODO(), azcosmos.ContainerProperties{
|
||||||
ID: testCollectionName,
|
ID: testCollectionName,
|
||||||
@ -36,7 +40,7 @@ func Test_Collections(t *testing.T) {
|
|||||||
})
|
})
|
||||||
|
|
||||||
t.Run("Should return conflict when collection exists", func(t *testing.T) {
|
t.Run("Should return conflict when collection exists", func(t *testing.T) {
|
||||||
ts.DataStore.CreateCollection(testDatabaseName, datastore.Collection{
|
repositories.CreateCollection(testDatabaseName, repositorymodels.Collection{
|
||||||
ID: testCollectionName,
|
ID: testCollectionName,
|
||||||
})
|
})
|
||||||
|
|
||||||
@ -54,11 +58,9 @@ func Test_Collections(t *testing.T) {
|
|||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
runTestsWithPresets(t, "Collection Read", presets, func(t *testing.T, ts *TestServer, client *azcosmos.Client) {
|
t.Run("Collection Read", func(t *testing.T) {
|
||||||
databaseClient := setUp(ts, client)
|
|
||||||
|
|
||||||
t.Run("Should read collection", func(t *testing.T) {
|
t.Run("Should read collection", func(t *testing.T) {
|
||||||
ts.DataStore.CreateCollection(testDatabaseName, datastore.Collection{
|
repositories.CreateCollection(testDatabaseName, repositorymodels.Collection{
|
||||||
ID: testCollectionName,
|
ID: testCollectionName,
|
||||||
})
|
})
|
||||||
|
|
||||||
@ -72,7 +74,7 @@ func Test_Collections(t *testing.T) {
|
|||||||
})
|
})
|
||||||
|
|
||||||
t.Run("Should return not found when collection does not exist", func(t *testing.T) {
|
t.Run("Should return not found when collection does not exist", func(t *testing.T) {
|
||||||
ts.DataStore.DeleteCollection(testDatabaseName, testCollectionName)
|
repositories.DeleteCollection(testDatabaseName, testCollectionName)
|
||||||
|
|
||||||
collectionResponse, err := databaseClient.NewContainer(testCollectionName)
|
collectionResponse, err := databaseClient.NewContainer(testCollectionName)
|
||||||
assert.Nil(t, err)
|
assert.Nil(t, err)
|
||||||
@ -89,11 +91,9 @@ func Test_Collections(t *testing.T) {
|
|||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
runTestsWithPresets(t, "Collection Delete", presets, func(t *testing.T, ts *TestServer, client *azcosmos.Client) {
|
t.Run("Collection Delete", func(t *testing.T) {
|
||||||
databaseClient := setUp(ts, client)
|
|
||||||
|
|
||||||
t.Run("Should delete collection", func(t *testing.T) {
|
t.Run("Should delete collection", func(t *testing.T) {
|
||||||
ts.DataStore.CreateCollection(testDatabaseName, datastore.Collection{
|
repositories.CreateCollection(testDatabaseName, repositorymodels.Collection{
|
||||||
ID: testCollectionName,
|
ID: testCollectionName,
|
||||||
})
|
})
|
||||||
|
|
||||||
@ -106,7 +106,7 @@ func Test_Collections(t *testing.T) {
|
|||||||
})
|
})
|
||||||
|
|
||||||
t.Run("Should return not found when collection does not exist", func(t *testing.T) {
|
t.Run("Should return not found when collection does not exist", func(t *testing.T) {
|
||||||
ts.DataStore.DeleteCollection(testDatabaseName, testCollectionName)
|
repositories.DeleteCollection(testDatabaseName, testCollectionName)
|
||||||
|
|
||||||
collectionResponse, err := databaseClient.NewContainer(testCollectionName)
|
collectionResponse, err := databaseClient.NewContainer(testCollectionName)
|
||||||
assert.Nil(t, err)
|
assert.Nil(t, err)
|
||||||
|
@ -1,64 +1,17 @@
|
|||||||
package tests_test
|
package tests_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
|
||||||
"net/http/httptest"
|
"net/http/httptest"
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos"
|
|
||||||
"github.com/pikami/cosmium/api"
|
"github.com/pikami/cosmium/api"
|
||||||
"github.com/pikami/cosmium/api/config"
|
"github.com/pikami/cosmium/api/config"
|
||||||
"github.com/pikami/cosmium/internal/datastore"
|
|
||||||
badgerdatastore "github.com/pikami/cosmium/internal/datastore/badger_datastore"
|
|
||||||
jsondatastore "github.com/pikami/cosmium/internal/datastore/json_datastore"
|
|
||||||
"github.com/pikami/cosmium/internal/logger"
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type TestServer struct {
|
func runTestServer() *httptest.Server {
|
||||||
Server *httptest.Server
|
config.Config.AccountKey = config.DefaultAccountKey
|
||||||
DataStore datastore.DataStore
|
config.Config.ExplorerPath = "/tmp/nothing"
|
||||||
URL string
|
|
||||||
}
|
|
||||||
|
|
||||||
func getDefaultTestServerConfig() *config.ServerConfig {
|
return httptest.NewServer(api.CreateRouter())
|
||||||
return &config.ServerConfig{
|
|
||||||
AccountKey: config.DefaultAccountKey,
|
|
||||||
ExplorerPath: "/tmp/nothing",
|
|
||||||
ExplorerBaseUrlLocation: config.ExplorerBaseUrlLocation,
|
|
||||||
DataStore: "json",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func runTestServerCustomConfig(configuration *config.ServerConfig) *TestServer {
|
|
||||||
var dataStore datastore.DataStore
|
|
||||||
switch configuration.DataStore {
|
|
||||||
case config.DataStoreBadger:
|
|
||||||
dataStore = badgerdatastore.NewBadgerDataStore(badgerdatastore.BadgerDataStoreOptions{})
|
|
||||||
default:
|
|
||||||
dataStore = jsondatastore.NewJsonDataStore(jsondatastore.JsonDataStoreOptions{})
|
|
||||||
}
|
|
||||||
|
|
||||||
api := api.NewApiServer(dataStore, configuration)
|
|
||||||
|
|
||||||
server := httptest.NewServer(api.GetRouter())
|
|
||||||
|
|
||||||
configuration.DatabaseEndpoint = server.URL
|
|
||||||
|
|
||||||
return &TestServer{
|
|
||||||
Server: server,
|
|
||||||
DataStore: dataStore,
|
|
||||||
URL: server.URL,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func runTestServer() *TestServer {
|
|
||||||
config := getDefaultTestServerConfig()
|
|
||||||
|
|
||||||
config.LogLevel = "debug"
|
|
||||||
logger.SetLogLevel(logger.LogLevelDebug)
|
|
||||||
|
|
||||||
return runTestServerCustomConfig(config)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const (
|
const (
|
||||||
@ -66,47 +19,3 @@ const (
|
|||||||
testDatabaseName = "test-db"
|
testDatabaseName = "test-db"
|
||||||
testCollectionName = "test-coll"
|
testCollectionName = "test-coll"
|
||||||
)
|
)
|
||||||
|
|
||||||
type testFunc func(t *testing.T, ts *TestServer, cosmosClient *azcosmos.Client)
|
|
||||||
type testPreset string
|
|
||||||
|
|
||||||
const (
|
|
||||||
PresetJsonStore testPreset = "JsonDS"
|
|
||||||
PresetBadgerStore testPreset = "BadgerDS"
|
|
||||||
)
|
|
||||||
|
|
||||||
func runTestsWithPreset(t *testing.T, name string, testPreset testPreset, f testFunc) {
|
|
||||||
serverConfig := getDefaultTestServerConfig()
|
|
||||||
|
|
||||||
serverConfig.LogLevel = "debug"
|
|
||||||
logger.SetLogLevel(logger.LogLevelDebug)
|
|
||||||
|
|
||||||
switch testPreset {
|
|
||||||
case PresetBadgerStore:
|
|
||||||
serverConfig.DataStore = config.DataStoreBadger
|
|
||||||
case PresetJsonStore:
|
|
||||||
serverConfig.DataStore = config.DataStoreJson
|
|
||||||
}
|
|
||||||
|
|
||||||
ts := runTestServerCustomConfig(serverConfig)
|
|
||||||
defer ts.Server.Close()
|
|
||||||
defer ts.DataStore.Close()
|
|
||||||
|
|
||||||
client, err := azcosmos.NewClientFromConnectionString(
|
|
||||||
fmt.Sprintf("AccountEndpoint=%s;AccountKey=%s", ts.URL, config.DefaultAccountKey),
|
|
||||||
&azcosmos.ClientOptions{},
|
|
||||||
)
|
|
||||||
assert.Nil(t, err)
|
|
||||||
|
|
||||||
testName := fmt.Sprintf("%s_%s", testPreset, name)
|
|
||||||
|
|
||||||
t.Run(testName, func(t *testing.T) {
|
|
||||||
f(t, ts, client)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
func runTestsWithPresets(t *testing.T, name string, testPresets []testPreset, f testFunc) {
|
|
||||||
for _, testPreset := range testPresets {
|
|
||||||
runTestsWithPreset(t, name, testPreset, f)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
@ -3,21 +3,31 @@ package tests_test
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"errors"
|
"errors"
|
||||||
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/Azure/azure-sdk-for-go/sdk/azcore"
|
"github.com/Azure/azure-sdk-for-go/sdk/azcore"
|
||||||
"github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos"
|
"github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos"
|
||||||
"github.com/pikami/cosmium/internal/datastore"
|
"github.com/pikami/cosmium/api/config"
|
||||||
|
"github.com/pikami/cosmium/internal/repositories"
|
||||||
|
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
)
|
)
|
||||||
|
|
||||||
func Test_Databases(t *testing.T) {
|
func Test_Databases(t *testing.T) {
|
||||||
presets := []testPreset{PresetJsonStore, PresetBadgerStore}
|
ts := runTestServer()
|
||||||
|
defer ts.Close()
|
||||||
|
|
||||||
runTestsWithPresets(t, "Database Create", presets, func(t *testing.T, ts *TestServer, client *azcosmos.Client) {
|
client, err := azcosmos.NewClientFromConnectionString(
|
||||||
|
fmt.Sprintf("AccountEndpoint=%s;AccountKey=%s", ts.URL, config.Config.AccountKey),
|
||||||
|
&azcosmos.ClientOptions{},
|
||||||
|
)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
t.Run("Database Create", func(t *testing.T) {
|
||||||
t.Run("Should create database", func(t *testing.T) {
|
t.Run("Should create database", func(t *testing.T) {
|
||||||
ts.DataStore.DeleteDatabase(testDatabaseName)
|
repositories.DeleteDatabase(testDatabaseName)
|
||||||
|
|
||||||
createResponse, err := client.CreateDatabase(context.TODO(), azcosmos.DatabaseProperties{
|
createResponse, err := client.CreateDatabase(context.TODO(), azcosmos.DatabaseProperties{
|
||||||
ID: testDatabaseName,
|
ID: testDatabaseName,
|
||||||
@ -28,7 +38,7 @@ func Test_Databases(t *testing.T) {
|
|||||||
})
|
})
|
||||||
|
|
||||||
t.Run("Should return conflict when database exists", func(t *testing.T) {
|
t.Run("Should return conflict when database exists", func(t *testing.T) {
|
||||||
ts.DataStore.CreateDatabase(datastore.Database{
|
repositories.CreateDatabase(repositorymodels.Database{
|
||||||
ID: testDatabaseName,
|
ID: testDatabaseName,
|
||||||
})
|
})
|
||||||
|
|
||||||
@ -46,9 +56,9 @@ func Test_Databases(t *testing.T) {
|
|||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
runTestsWithPresets(t, "Database Read", presets, func(t *testing.T, ts *TestServer, client *azcosmos.Client) {
|
t.Run("Database Read", func(t *testing.T) {
|
||||||
t.Run("Should read database", func(t *testing.T) {
|
t.Run("Should read database", func(t *testing.T) {
|
||||||
ts.DataStore.CreateDatabase(datastore.Database{
|
repositories.CreateDatabase(repositorymodels.Database{
|
||||||
ID: testDatabaseName,
|
ID: testDatabaseName,
|
||||||
})
|
})
|
||||||
|
|
||||||
@ -62,7 +72,7 @@ func Test_Databases(t *testing.T) {
|
|||||||
})
|
})
|
||||||
|
|
||||||
t.Run("Should return not found when database does not exist", func(t *testing.T) {
|
t.Run("Should return not found when database does not exist", func(t *testing.T) {
|
||||||
ts.DataStore.DeleteDatabase(testDatabaseName)
|
repositories.DeleteDatabase(testDatabaseName)
|
||||||
|
|
||||||
databaseResponse, err := client.NewDatabase(testDatabaseName)
|
databaseResponse, err := client.NewDatabase(testDatabaseName)
|
||||||
assert.Nil(t, err)
|
assert.Nil(t, err)
|
||||||
@ -79,9 +89,9 @@ func Test_Databases(t *testing.T) {
|
|||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
runTestsWithPresets(t, "Database Delete", presets, func(t *testing.T, ts *TestServer, client *azcosmos.Client) {
|
t.Run("Database Delete", func(t *testing.T) {
|
||||||
t.Run("Should delete database", func(t *testing.T) {
|
t.Run("Should delete database", func(t *testing.T) {
|
||||||
ts.DataStore.CreateDatabase(datastore.Database{
|
repositories.CreateDatabase(repositorymodels.Database{
|
||||||
ID: testDatabaseName,
|
ID: testDatabaseName,
|
||||||
})
|
})
|
||||||
|
|
||||||
@ -94,7 +104,7 @@ func Test_Databases(t *testing.T) {
|
|||||||
})
|
})
|
||||||
|
|
||||||
t.Run("Should return not found when database does not exist", func(t *testing.T) {
|
t.Run("Should return not found when database does not exist", func(t *testing.T) {
|
||||||
ts.DataStore.DeleteDatabase(testDatabaseName)
|
repositories.DeleteDatabase(testDatabaseName)
|
||||||
|
|
||||||
databaseResponse, err := client.NewDatabase(testDatabaseName)
|
databaseResponse, err := client.NewDatabase(testDatabaseName)
|
||||||
assert.Nil(t, err)
|
assert.Nil(t, err)
|
||||||
|
@ -3,18 +3,14 @@ package tests_test
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"errors"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
"net/http"
|
|
||||||
"reflect"
|
"reflect"
|
||||||
"sync"
|
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/Azure/azure-sdk-for-go/sdk/azcore"
|
|
||||||
"github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos"
|
"github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos"
|
||||||
"github.com/pikami/cosmium/api/config"
|
"github.com/pikami/cosmium/api/config"
|
||||||
"github.com/pikami/cosmium/internal/datastore"
|
"github.com/pikami/cosmium/internal/repositories"
|
||||||
|
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -53,9 +49,9 @@ func testCosmosQuery(t *testing.T,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func documents_InitializeDb(t *testing.T, ts *TestServer) *azcosmos.ContainerClient {
|
func Test_Documents(t *testing.T) {
|
||||||
ts.DataStore.CreateDatabase(datastore.Database{ID: testDatabaseName})
|
repositories.CreateDatabase(repositorymodels.Database{ID: testDatabaseName})
|
||||||
ts.DataStore.CreateCollection(testDatabaseName, datastore.Collection{
|
repositories.CreateCollection(testDatabaseName, repositorymodels.Collection{
|
||||||
ID: testCollectionName,
|
ID: testCollectionName,
|
||||||
PartitionKey: struct {
|
PartitionKey: struct {
|
||||||
Paths []string "json:\"paths\""
|
Paths []string "json:\"paths\""
|
||||||
@ -65,11 +61,14 @@ func documents_InitializeDb(t *testing.T, ts *TestServer) *azcosmos.ContainerCli
|
|||||||
Paths: []string{"/pk"},
|
Paths: []string{"/pk"},
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
ts.DataStore.CreateDocument(testDatabaseName, testCollectionName, map[string]interface{}{"id": "12345", "pk": "123", "isCool": false, "arr": []int{1, 2, 3}})
|
repositories.CreateDocument(testDatabaseName, testCollectionName, map[string]interface{}{"id": "12345", "pk": "123", "isCool": false})
|
||||||
ts.DataStore.CreateDocument(testDatabaseName, testCollectionName, map[string]interface{}{"id": "67890", "pk": "456", "isCool": true, "arr": []int{6, 7, 8}})
|
repositories.CreateDocument(testDatabaseName, testCollectionName, map[string]interface{}{"id": "67890", "pk": "456", "isCool": true})
|
||||||
|
|
||||||
|
ts := runTestServer()
|
||||||
|
defer ts.Close()
|
||||||
|
|
||||||
client, err := azcosmos.NewClientFromConnectionString(
|
client, err := azcosmos.NewClientFromConnectionString(
|
||||||
fmt.Sprintf("AccountEndpoint=%s;AccountKey=%s", ts.URL, config.DefaultAccountKey),
|
fmt.Sprintf("AccountEndpoint=%s;AccountKey=%s", ts.URL, config.Config.AccountKey),
|
||||||
&azcosmos.ClientOptions{},
|
&azcosmos.ClientOptions{},
|
||||||
)
|
)
|
||||||
assert.Nil(t, err)
|
assert.Nil(t, err)
|
||||||
@ -77,439 +76,64 @@ func documents_InitializeDb(t *testing.T, ts *TestServer) *azcosmos.ContainerCli
|
|||||||
collectionClient, err := client.NewContainer(testDatabaseName, testCollectionName)
|
collectionClient, err := client.NewContainer(testDatabaseName, testCollectionName)
|
||||||
assert.Nil(t, err)
|
assert.Nil(t, err)
|
||||||
|
|
||||||
return collectionClient
|
t.Run("Should query document", func(t *testing.T) {
|
||||||
}
|
testCosmosQuery(t, collectionClient,
|
||||||
|
"SELECT c.id, c[\"pk\"] FROM c ORDER BY c.id",
|
||||||
func Test_Documents(t *testing.T) {
|
nil,
|
||||||
presets := []testPreset{PresetJsonStore, PresetBadgerStore}
|
[]interface{}{
|
||||||
|
map[string]interface{}{"id": "12345", "pk": "123"},
|
||||||
runTestsWithPresets(t, "Test_Documents", presets, func(t *testing.T, ts *TestServer, client *azcosmos.Client) {
|
map[string]interface{}{"id": "67890", "pk": "456"},
|
||||||
collectionClient := documents_InitializeDb(t, ts)
|
},
|
||||||
|
)
|
||||||
t.Run("Should query document", func(t *testing.T) {
|
|
||||||
testCosmosQuery(t, collectionClient,
|
|
||||||
"SELECT c.id, c[\"pk\"] FROM c ORDER BY c.id",
|
|
||||||
nil,
|
|
||||||
[]interface{}{
|
|
||||||
map[string]interface{}{"id": "12345", "pk": "123"},
|
|
||||||
map[string]interface{}{"id": "67890", "pk": "456"},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should query VALUE array", func(t *testing.T) {
|
|
||||||
testCosmosQuery(t, collectionClient,
|
|
||||||
"SELECT VALUE [c.id, c[\"pk\"]] FROM c ORDER BY c.id",
|
|
||||||
nil,
|
|
||||||
[]interface{}{
|
|
||||||
[]interface{}{"12345", "123"},
|
|
||||||
[]interface{}{"67890", "456"},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should query VALUE object", func(t *testing.T) {
|
|
||||||
testCosmosQuery(t, collectionClient,
|
|
||||||
"SELECT VALUE { id: c.id, _pk: c.pk } FROM c ORDER BY c.id",
|
|
||||||
nil,
|
|
||||||
[]interface{}{
|
|
||||||
map[string]interface{}{"id": "12345", "_pk": "123"},
|
|
||||||
map[string]interface{}{"id": "67890", "_pk": "456"},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should query document with single WHERE condition", func(t *testing.T) {
|
|
||||||
testCosmosQuery(t, collectionClient,
|
|
||||||
`select c.id
|
|
||||||
FROM c
|
|
||||||
WHERE c.isCool=true
|
|
||||||
ORDER BY c.id`,
|
|
||||||
nil,
|
|
||||||
[]interface{}{
|
|
||||||
map[string]interface{}{"id": "67890"},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should query document with query parameters", func(t *testing.T) {
|
|
||||||
testCosmosQuery(t, collectionClient,
|
|
||||||
`select c.id
|
|
||||||
FROM c
|
|
||||||
WHERE c.id=@param_id
|
|
||||||
ORDER BY c.id`,
|
|
||||||
[]azcosmos.QueryParameter{
|
|
||||||
{Name: "@param_id", Value: "67890"},
|
|
||||||
},
|
|
||||||
[]interface{}{
|
|
||||||
map[string]interface{}{"id": "67890"},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should query document with query parameters as accessor", func(t *testing.T) {
|
|
||||||
testCosmosQuery(t, collectionClient,
|
|
||||||
`select c.id
|
|
||||||
FROM c
|
|
||||||
WHERE c[@param]="67890"
|
|
||||||
ORDER BY c.id`,
|
|
||||||
[]azcosmos.QueryParameter{
|
|
||||||
{Name: "@param", Value: "id"},
|
|
||||||
},
|
|
||||||
[]interface{}{
|
|
||||||
map[string]interface{}{"id": "67890"},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should query array accessor", func(t *testing.T) {
|
|
||||||
testCosmosQuery(t, collectionClient,
|
|
||||||
`SELECT c.id,
|
|
||||||
c["arr"][0] AS arr0,
|
|
||||||
c["arr"][1] AS arr1,
|
|
||||||
c["arr"][2] AS arr2,
|
|
||||||
c["arr"][3] AS arr3
|
|
||||||
FROM c ORDER BY c.id`,
|
|
||||||
nil,
|
|
||||||
[]interface{}{
|
|
||||||
map[string]interface{}{"id": "12345", "arr0": 1.0, "arr1": 2.0, "arr2": 3.0, "arr3": nil},
|
|
||||||
map[string]interface{}{"id": "67890", "arr0": 6.0, "arr1": 7.0, "arr2": 8.0, "arr3": nil},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should handle parallel writes", func(t *testing.T) {
|
|
||||||
var wg sync.WaitGroup
|
|
||||||
rutineCount := 100
|
|
||||||
results := make(chan error, rutineCount)
|
|
||||||
|
|
||||||
createCall := func(i int) {
|
|
||||||
defer wg.Done()
|
|
||||||
item := map[string]interface{}{
|
|
||||||
"id": fmt.Sprintf("id-%d", i),
|
|
||||||
"pk": fmt.Sprintf("pk-%d", i),
|
|
||||||
"val": i,
|
|
||||||
}
|
|
||||||
bytes, err := json.Marshal(item)
|
|
||||||
if err != nil {
|
|
||||||
results <- err
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
ctx, cancel := context.WithTimeout(context.Background(), 2*time.Second)
|
|
||||||
defer cancel()
|
|
||||||
|
|
||||||
_, err = collectionClient.CreateItem(
|
|
||||||
ctx,
|
|
||||||
azcosmos.PartitionKey{},
|
|
||||||
bytes,
|
|
||||||
&azcosmos.ItemOptions{
|
|
||||||
EnableContentResponseOnWrite: false,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
results <- err
|
|
||||||
|
|
||||||
collectionClient.ReadItem(ctx, azcosmos.PartitionKey{}, fmt.Sprintf("id-%d", i), nil)
|
|
||||||
collectionClient.DeleteItem(ctx, azcosmos.PartitionKey{}, fmt.Sprintf("id-%d", i), nil)
|
|
||||||
}
|
|
||||||
|
|
||||||
for i := 0; i < rutineCount; i++ {
|
|
||||||
wg.Add(1)
|
|
||||||
go createCall(i)
|
|
||||||
}
|
|
||||||
|
|
||||||
wg.Wait()
|
|
||||||
close(results)
|
|
||||||
|
|
||||||
for err := range results {
|
|
||||||
if err != nil {
|
|
||||||
t.Errorf("Error creating item: %v", err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
})
|
})
|
||||||
|
|
||||||
runTestsWithPresets(t, "Test_Documents_Patch", presets, func(t *testing.T, ts *TestServer, client *azcosmos.Client) {
|
t.Run("Should query VALUE array", func(t *testing.T) {
|
||||||
collectionClient := documents_InitializeDb(t, ts)
|
testCosmosQuery(t, collectionClient,
|
||||||
|
"SELECT VALUE [c.id, c[\"pk\"]] FROM c ORDER BY c.id",
|
||||||
t.Run("Should PATCH document", func(t *testing.T) {
|
nil,
|
||||||
context := context.TODO()
|
[]interface{}{
|
||||||
expectedData := map[string]interface{}{"id": "67890", "pk": "666", "newField": "newValue", "incr": 15., "setted": "isSet"}
|
[]interface{}{"12345", "123"},
|
||||||
|
[]interface{}{"67890", "456"},
|
||||||
patch := azcosmos.PatchOperations{}
|
},
|
||||||
patch.AppendAdd("/newField", "newValue")
|
)
|
||||||
patch.AppendIncrement("/incr", 15)
|
|
||||||
patch.AppendRemove("/isCool")
|
|
||||||
patch.AppendReplace("/pk", "666")
|
|
||||||
patch.AppendSet("/setted", "isSet")
|
|
||||||
|
|
||||||
itemResponse, err := collectionClient.PatchItem(
|
|
||||||
context,
|
|
||||||
azcosmos.PartitionKey{},
|
|
||||||
"67890",
|
|
||||||
patch,
|
|
||||||
&azcosmos.ItemOptions{
|
|
||||||
EnableContentResponseOnWrite: false,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
assert.Nil(t, err)
|
|
||||||
|
|
||||||
var itemResponseBody map[string]interface{}
|
|
||||||
json.Unmarshal(itemResponse.Value, &itemResponseBody)
|
|
||||||
|
|
||||||
assert.Equal(t, expectedData["id"], itemResponseBody["id"])
|
|
||||||
assert.Equal(t, expectedData["pk"], itemResponseBody["pk"])
|
|
||||||
assert.Empty(t, itemResponseBody["isCool"])
|
|
||||||
assert.Equal(t, expectedData["newField"], itemResponseBody["newField"])
|
|
||||||
assert.Equal(t, expectedData["incr"], itemResponseBody["incr"])
|
|
||||||
assert.Equal(t, expectedData["setted"], itemResponseBody["setted"])
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should not allow to PATCH document ID", func(t *testing.T) {
|
|
||||||
context := context.TODO()
|
|
||||||
|
|
||||||
patch := azcosmos.PatchOperations{}
|
|
||||||
patch.AppendReplace("/id", "newValue")
|
|
||||||
|
|
||||||
_, err := collectionClient.PatchItem(
|
|
||||||
context,
|
|
||||||
azcosmos.PartitionKey{},
|
|
||||||
"67890",
|
|
||||||
patch,
|
|
||||||
&azcosmos.ItemOptions{
|
|
||||||
EnableContentResponseOnWrite: false,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
assert.NotNil(t, err)
|
|
||||||
|
|
||||||
var respErr *azcore.ResponseError
|
|
||||||
if errors.As(err, &respErr) {
|
|
||||||
assert.Equal(t, http.StatusUnprocessableEntity, respErr.StatusCode)
|
|
||||||
} else {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("CreateItem", func(t *testing.T) {
|
|
||||||
context := context.TODO()
|
|
||||||
|
|
||||||
item := map[string]interface{}{
|
|
||||||
"Id": "6789011",
|
|
||||||
"pk": "456",
|
|
||||||
"newField": "newValue2",
|
|
||||||
}
|
|
||||||
bytes, err := json.Marshal(item)
|
|
||||||
assert.Nil(t, err)
|
|
||||||
|
|
||||||
r, err2 := collectionClient.CreateItem(
|
|
||||||
context,
|
|
||||||
azcosmos.PartitionKey{},
|
|
||||||
bytes,
|
|
||||||
&azcosmos.ItemOptions{
|
|
||||||
EnableContentResponseOnWrite: false,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
assert.NotNil(t, r)
|
|
||||||
assert.Nil(t, err2)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("CreateItem that already exists", func(t *testing.T) {
|
|
||||||
context := context.TODO()
|
|
||||||
|
|
||||||
item := map[string]interface{}{"id": "12345", "pk": "123", "isCool": false, "arr": []int{1, 2, 3}}
|
|
||||||
bytes, err := json.Marshal(item)
|
|
||||||
assert.Nil(t, err)
|
|
||||||
|
|
||||||
r, err := collectionClient.CreateItem(
|
|
||||||
context,
|
|
||||||
azcosmos.PartitionKey{},
|
|
||||||
bytes,
|
|
||||||
&azcosmos.ItemOptions{
|
|
||||||
EnableContentResponseOnWrite: false,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
assert.NotNil(t, r)
|
|
||||||
assert.NotNil(t, err)
|
|
||||||
|
|
||||||
var respErr *azcore.ResponseError
|
|
||||||
if errors.As(err, &respErr) {
|
|
||||||
assert.Equal(t, http.StatusConflict, respErr.StatusCode)
|
|
||||||
} else {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("UpsertItem new", func(t *testing.T) {
|
|
||||||
context := context.TODO()
|
|
||||||
|
|
||||||
item := map[string]interface{}{"id": "123456", "pk": "1234", "isCool": false, "arr": []int{1, 2, 3}}
|
|
||||||
bytes, err := json.Marshal(item)
|
|
||||||
assert.Nil(t, err)
|
|
||||||
|
|
||||||
r, err2 := collectionClient.UpsertItem(
|
|
||||||
context,
|
|
||||||
azcosmos.PartitionKey{},
|
|
||||||
bytes,
|
|
||||||
&azcosmos.ItemOptions{
|
|
||||||
EnableContentResponseOnWrite: false,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
assert.NotNil(t, r)
|
|
||||||
assert.Nil(t, err2)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("UpsertItem that already exists", func(t *testing.T) {
|
|
||||||
context := context.TODO()
|
|
||||||
|
|
||||||
item := map[string]interface{}{"id": "12345", "pk": "123", "isCool": false, "arr": []int{1, 2, 3, 4}}
|
|
||||||
bytes, err := json.Marshal(item)
|
|
||||||
assert.Nil(t, err)
|
|
||||||
|
|
||||||
r, err2 := collectionClient.UpsertItem(
|
|
||||||
context,
|
|
||||||
azcosmos.PartitionKey{},
|
|
||||||
bytes,
|
|
||||||
&azcosmos.ItemOptions{
|
|
||||||
EnableContentResponseOnWrite: false,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
assert.NotNil(t, r)
|
|
||||||
assert.Nil(t, err2)
|
|
||||||
})
|
|
||||||
})
|
})
|
||||||
|
|
||||||
runTestsWithPresets(t, "Test_Documents_TransactionalBatch", presets, func(t *testing.T, ts *TestServer, client *azcosmos.Client) {
|
t.Run("Should query VALUE object", func(t *testing.T) {
|
||||||
collectionClient := documents_InitializeDb(t, ts)
|
testCosmosQuery(t, collectionClient,
|
||||||
|
"SELECT VALUE { id: c.id, _pk: c.pk } FROM c ORDER BY c.id",
|
||||||
|
nil,
|
||||||
|
[]interface{}{
|
||||||
|
map[string]interface{}{"id": "12345", "_pk": "123"},
|
||||||
|
map[string]interface{}{"id": "67890", "_pk": "456"},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
t.Run("Should execute CREATE transactional batch", func(t *testing.T) {
|
t.Run("Should query document with single WHERE condition", func(t *testing.T) {
|
||||||
context := context.TODO()
|
testCosmosQuery(t, collectionClient,
|
||||||
batch := collectionClient.NewTransactionalBatch(azcosmos.NewPartitionKeyString("pk"))
|
`select c.id
|
||||||
|
FROM c
|
||||||
|
WHERE c.isCool=true
|
||||||
|
ORDER BY c.id`,
|
||||||
|
nil,
|
||||||
|
[]interface{}{
|
||||||
|
map[string]interface{}{"id": "67890"},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
newItem := map[string]interface{}{
|
t.Run("Should query document with query parameters", func(t *testing.T) {
|
||||||
"id": "678901",
|
testCosmosQuery(t, collectionClient,
|
||||||
}
|
`select c.id
|
||||||
bytes, err := json.Marshal(newItem)
|
FROM c
|
||||||
assert.Nil(t, err)
|
WHERE c.id=@param_id
|
||||||
|
ORDER BY c.id`,
|
||||||
batch.CreateItem(bytes, nil)
|
[]azcosmos.QueryParameter{
|
||||||
response, err := collectionClient.ExecuteTransactionalBatch(context, batch, &azcosmos.TransactionalBatchOptions{})
|
{Name: "@param_id", Value: "67890"},
|
||||||
assert.Nil(t, err)
|
},
|
||||||
assert.True(t, response.Success)
|
[]interface{}{
|
||||||
assert.Equal(t, 1, len(response.OperationResults))
|
map[string]interface{}{"id": "67890"},
|
||||||
|
},
|
||||||
operationResponse := response.OperationResults[0]
|
)
|
||||||
assert.NotNil(t, operationResponse)
|
|
||||||
assert.NotNil(t, operationResponse.ResourceBody)
|
|
||||||
assert.Equal(t, int32(http.StatusCreated), operationResponse.StatusCode)
|
|
||||||
|
|
||||||
var itemResponseBody map[string]interface{}
|
|
||||||
json.Unmarshal(operationResponse.ResourceBody, &itemResponseBody)
|
|
||||||
assert.Equal(t, newItem["id"], itemResponseBody["id"])
|
|
||||||
|
|
||||||
createdDoc, _ := ts.DataStore.GetDocument(testDatabaseName, testCollectionName, newItem["id"].(string))
|
|
||||||
assert.Equal(t, newItem["id"], createdDoc["id"])
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should execute DELETE transactional batch", func(t *testing.T) {
|
|
||||||
context := context.TODO()
|
|
||||||
batch := collectionClient.NewTransactionalBatch(azcosmos.NewPartitionKeyString("pk"))
|
|
||||||
|
|
||||||
batch.DeleteItem("12345", nil)
|
|
||||||
response, err := collectionClient.ExecuteTransactionalBatch(context, batch, &azcosmos.TransactionalBatchOptions{})
|
|
||||||
assert.Nil(t, err)
|
|
||||||
assert.True(t, response.Success)
|
|
||||||
assert.Equal(t, 1, len(response.OperationResults))
|
|
||||||
|
|
||||||
operationResponse := response.OperationResults[0]
|
|
||||||
assert.NotNil(t, operationResponse)
|
|
||||||
assert.Equal(t, int32(http.StatusNoContent), operationResponse.StatusCode)
|
|
||||||
|
|
||||||
_, status := ts.DataStore.GetDocument(testDatabaseName, testCollectionName, "12345")
|
|
||||||
assert.Equal(t, datastore.StatusNotFound, int(status))
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should execute REPLACE transactional batch", func(t *testing.T) {
|
|
||||||
context := context.TODO()
|
|
||||||
batch := collectionClient.NewTransactionalBatch(azcosmos.NewPartitionKeyString("pk"))
|
|
||||||
|
|
||||||
newItem := map[string]interface{}{
|
|
||||||
"id": "67890",
|
|
||||||
"pk": "666",
|
|
||||||
}
|
|
||||||
bytes, err := json.Marshal(newItem)
|
|
||||||
assert.Nil(t, err)
|
|
||||||
|
|
||||||
batch.ReplaceItem("67890", bytes, nil)
|
|
||||||
response, err := collectionClient.ExecuteTransactionalBatch(context, batch, &azcosmos.TransactionalBatchOptions{})
|
|
||||||
assert.Nil(t, err)
|
|
||||||
assert.True(t, response.Success)
|
|
||||||
assert.Equal(t, 1, len(response.OperationResults))
|
|
||||||
|
|
||||||
operationResponse := response.OperationResults[0]
|
|
||||||
assert.NotNil(t, operationResponse)
|
|
||||||
assert.NotNil(t, operationResponse.ResourceBody)
|
|
||||||
assert.Equal(t, int32(http.StatusCreated), operationResponse.StatusCode)
|
|
||||||
|
|
||||||
var itemResponseBody map[string]interface{}
|
|
||||||
json.Unmarshal(operationResponse.ResourceBody, &itemResponseBody)
|
|
||||||
assert.Equal(t, newItem["id"], itemResponseBody["id"])
|
|
||||||
assert.Equal(t, newItem["pk"], itemResponseBody["pk"])
|
|
||||||
|
|
||||||
updatedDoc, _ := ts.DataStore.GetDocument(testDatabaseName, testCollectionName, newItem["id"].(string))
|
|
||||||
assert.Equal(t, newItem["id"], updatedDoc["id"])
|
|
||||||
assert.Equal(t, newItem["pk"], updatedDoc["pk"])
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should execute UPSERT transactional batch", func(t *testing.T) {
|
|
||||||
context := context.TODO()
|
|
||||||
batch := collectionClient.NewTransactionalBatch(azcosmos.NewPartitionKeyString("pk"))
|
|
||||||
|
|
||||||
newItem := map[string]interface{}{
|
|
||||||
"id": "678901",
|
|
||||||
"pk": "666",
|
|
||||||
}
|
|
||||||
bytes, err := json.Marshal(newItem)
|
|
||||||
assert.Nil(t, err)
|
|
||||||
|
|
||||||
batch.UpsertItem(bytes, nil)
|
|
||||||
response, err := collectionClient.ExecuteTransactionalBatch(context, batch, &azcosmos.TransactionalBatchOptions{})
|
|
||||||
assert.Nil(t, err)
|
|
||||||
assert.True(t, response.Success)
|
|
||||||
assert.Equal(t, 1, len(response.OperationResults))
|
|
||||||
|
|
||||||
operationResponse := response.OperationResults[0]
|
|
||||||
assert.NotNil(t, operationResponse)
|
|
||||||
assert.NotNil(t, operationResponse.ResourceBody)
|
|
||||||
assert.Equal(t, int32(http.StatusCreated), operationResponse.StatusCode)
|
|
||||||
|
|
||||||
var itemResponseBody map[string]interface{}
|
|
||||||
json.Unmarshal(operationResponse.ResourceBody, &itemResponseBody)
|
|
||||||
assert.Equal(t, newItem["id"], itemResponseBody["id"])
|
|
||||||
assert.Equal(t, newItem["pk"], itemResponseBody["pk"])
|
|
||||||
|
|
||||||
updatedDoc, _ := ts.DataStore.GetDocument(testDatabaseName, testCollectionName, newItem["id"].(string))
|
|
||||||
assert.Equal(t, newItem["id"], updatedDoc["id"])
|
|
||||||
assert.Equal(t, newItem["pk"], updatedDoc["pk"])
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should execute READ transactional batch", func(t *testing.T) {
|
|
||||||
context := context.TODO()
|
|
||||||
batch := collectionClient.NewTransactionalBatch(azcosmos.NewPartitionKeyString("pk"))
|
|
||||||
|
|
||||||
batch.ReadItem("67890", nil)
|
|
||||||
response, err := collectionClient.ExecuteTransactionalBatch(context, batch, &azcosmos.TransactionalBatchOptions{})
|
|
||||||
assert.Nil(t, err)
|
|
||||||
assert.True(t, response.Success)
|
|
||||||
assert.Equal(t, 1, len(response.OperationResults))
|
|
||||||
|
|
||||||
operationResponse := response.OperationResults[0]
|
|
||||||
assert.NotNil(t, operationResponse)
|
|
||||||
assert.NotNil(t, operationResponse.ResourceBody)
|
|
||||||
assert.Equal(t, int32(http.StatusOK), operationResponse.StatusCode)
|
|
||||||
|
|
||||||
var itemResponseBody map[string]interface{}
|
|
||||||
json.Unmarshal(operationResponse.ResourceBody, &itemResponseBody)
|
|
||||||
assert.Equal(t, "67890", itemResponseBody["id"])
|
|
||||||
})
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -1,42 +0,0 @@
|
|||||||
package tests_test
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"net/http"
|
|
||||||
"net/url"
|
|
||||||
"testing"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/pikami/cosmium/api/config"
|
|
||||||
"github.com/pikami/cosmium/internal/authentication"
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Request document with trailing slash like python cosmosdb client does.
|
|
||||||
func Test_Documents_Read_Trailing_Slash(t *testing.T) {
|
|
||||||
ts := runTestServer()
|
|
||||||
documents_InitializeDb(t, ts)
|
|
||||||
defer ts.Server.Close()
|
|
||||||
|
|
||||||
t.Run("Read doc with client that appends slash to path", func(t *testing.T) {
|
|
||||||
resourceIdTemplate := "dbs/%s/colls/%s/docs/%s"
|
|
||||||
path := fmt.Sprintf(resourceIdTemplate, testDatabaseName, testCollectionName, "12345")
|
|
||||||
testUrl := ts.URL + "/" + path + "/"
|
|
||||||
date := time.Now().Format(time.RFC1123)
|
|
||||||
signature := authentication.GenerateSignature("GET", "docs", path, date, config.DefaultAccountKey)
|
|
||||||
httpClient := &http.Client{}
|
|
||||||
req, _ := http.NewRequest("GET", testUrl, nil)
|
|
||||||
req.Header.Add("x-ms-date", date)
|
|
||||||
req.Header.Add("authorization", "sig="+url.QueryEscape(signature))
|
|
||||||
res, err := httpClient.Do(req)
|
|
||||||
|
|
||||||
assert.Nil(t, err)
|
|
||||||
|
|
||||||
if res != nil {
|
|
||||||
defer res.Body.Close()
|
|
||||||
assert.Equal(t, http.StatusOK, res.StatusCode, "Expected HTTP status 200 OK")
|
|
||||||
} else {
|
|
||||||
t.FailNow()
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
@ -1,54 +0,0 @@
|
|||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"os"
|
|
||||||
"os/signal"
|
|
||||||
"syscall"
|
|
||||||
|
|
||||||
"github.com/pikami/cosmium/api"
|
|
||||||
"github.com/pikami/cosmium/api/config"
|
|
||||||
"github.com/pikami/cosmium/internal/datastore"
|
|
||||||
badgerdatastore "github.com/pikami/cosmium/internal/datastore/badger_datastore"
|
|
||||||
jsondatastore "github.com/pikami/cosmium/internal/datastore/json_datastore"
|
|
||||||
"github.com/pikami/cosmium/internal/logger"
|
|
||||||
)
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
configuration := config.ParseFlags()
|
|
||||||
|
|
||||||
var dataStore datastore.DataStore
|
|
||||||
switch configuration.DataStore {
|
|
||||||
case config.DataStoreBadger:
|
|
||||||
dataStore = badgerdatastore.NewBadgerDataStore(badgerdatastore.BadgerDataStoreOptions{
|
|
||||||
PersistDataFilePath: configuration.PersistDataFilePath,
|
|
||||||
})
|
|
||||||
logger.InfoLn("Using Badger data store")
|
|
||||||
default:
|
|
||||||
dataStore = jsondatastore.NewJsonDataStore(jsondatastore.JsonDataStoreOptions{
|
|
||||||
InitialDataFilePath: configuration.InitialDataFilePath,
|
|
||||||
PersistDataFilePath: configuration.PersistDataFilePath,
|
|
||||||
})
|
|
||||||
logger.InfoLn("Using in-memory data store")
|
|
||||||
}
|
|
||||||
|
|
||||||
server := api.NewApiServer(dataStore, &configuration)
|
|
||||||
err := server.Start()
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
waitForExit(server, dataStore)
|
|
||||||
}
|
|
||||||
|
|
||||||
func waitForExit(server *api.ApiServer, dataStore datastore.DataStore) {
|
|
||||||
sigs := make(chan os.Signal, 1)
|
|
||||||
signal.Notify(sigs, syscall.SIGINT, syscall.SIGTERM)
|
|
||||||
|
|
||||||
// Block until a exit signal is received
|
|
||||||
<-sigs
|
|
||||||
|
|
||||||
// Stop the server
|
|
||||||
server.Stop()
|
|
||||||
|
|
||||||
dataStore.Close()
|
|
||||||
}
|
|
@ -1,125 +0,0 @@
|
|||||||
# Contributor Covenant Code of Conduct
|
|
||||||
|
|
||||||
## Our Pledge
|
|
||||||
|
|
||||||
We as members, contributors, and leaders pledge to make participation in our
|
|
||||||
community a harassment-free experience for everyone, regardless of age, body
|
|
||||||
size, visible or invisible disability, ethnicity, sex characteristics, gender
|
|
||||||
identity and expression, level of experience, education, socio-economic status,
|
|
||||||
nationality, personal appearance, race, caste, color, religion, or sexual
|
|
||||||
identity and orientation.
|
|
||||||
|
|
||||||
We pledge to act and interact in ways that contribute to an open, welcoming,
|
|
||||||
diverse, inclusive, and healthy community.
|
|
||||||
|
|
||||||
## Our Standards
|
|
||||||
|
|
||||||
Examples of behavior that contributes to a positive environment for our
|
|
||||||
community include:
|
|
||||||
|
|
||||||
* Demonstrating empathy and kindness toward other people
|
|
||||||
* Being respectful of differing opinions, viewpoints, and experiences
|
|
||||||
* Giving and gracefully accepting constructive feedback
|
|
||||||
* Accepting responsibility and apologizing to those affected by our mistakes,
|
|
||||||
and learning from the experience
|
|
||||||
* Focusing on what is best not just for us as individuals, but for the overall
|
|
||||||
community
|
|
||||||
|
|
||||||
Examples of unacceptable behavior include:
|
|
||||||
|
|
||||||
* The use of sexualized language or imagery, and sexual attention or advances of
|
|
||||||
any kind
|
|
||||||
* Trolling, insulting or derogatory comments, and personal or political attacks
|
|
||||||
* Public or private harassment
|
|
||||||
* Publishing others' private information, such as a physical or email address,
|
|
||||||
without their explicit permission
|
|
||||||
* Other conduct which could reasonably be considered inappropriate in a
|
|
||||||
professional setting
|
|
||||||
|
|
||||||
## Enforcement Responsibilities
|
|
||||||
|
|
||||||
Community leaders are responsible for clarifying and enforcing our standards of
|
|
||||||
acceptable behavior and will take appropriate and fair corrective action in
|
|
||||||
response to any behavior that they deem inappropriate, threatening, offensive,
|
|
||||||
or harmful.
|
|
||||||
|
|
||||||
Community leaders have the right and responsibility to remove, edit, or reject
|
|
||||||
comments, commits, code, wiki edits, issues, and other contributions that are
|
|
||||||
not aligned to this Code of Conduct, and will communicate reasons for moderation
|
|
||||||
decisions when appropriate.
|
|
||||||
|
|
||||||
## Scope
|
|
||||||
|
|
||||||
This Code of Conduct applies within all community spaces, and also applies when
|
|
||||||
an individual is officially representing the community in public spaces.
|
|
||||||
Examples of representing our community include using an official email address,
|
|
||||||
posting via an official social media account, or acting as an appointed
|
|
||||||
representative at an online or offline event.
|
|
||||||
|
|
||||||
## Enforcement
|
|
||||||
|
|
||||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
|
||||||
reported to the community leaders responsible for enforcement at
|
|
||||||
cosmium@pikami.org.
|
|
||||||
All complaints will be reviewed and investigated promptly and fairly.
|
|
||||||
|
|
||||||
All community leaders are obligated to respect the privacy and security of the
|
|
||||||
reporter of any incident.
|
|
||||||
|
|
||||||
## Enforcement Guidelines
|
|
||||||
|
|
||||||
Community leaders will follow these Community Impact Guidelines in determining
|
|
||||||
the consequences for any action they deem in violation of this Code of Conduct:
|
|
||||||
|
|
||||||
### 1. Correction
|
|
||||||
|
|
||||||
**Community Impact**: Use of inappropriate language or other behavior deemed
|
|
||||||
unprofessional or unwelcome in the community.
|
|
||||||
|
|
||||||
**Consequence**: A private, written warning from community leaders, providing
|
|
||||||
clarity around the nature of the violation and an explanation of why the
|
|
||||||
behavior was inappropriate. A public apology may be requested.
|
|
||||||
|
|
||||||
### 2. Warning
|
|
||||||
|
|
||||||
**Community Impact**: A violation through a single incident or series of
|
|
||||||
actions.
|
|
||||||
|
|
||||||
**Consequence**: A warning with consequences for continued behavior. No
|
|
||||||
interaction with the people involved, including unsolicited interaction with
|
|
||||||
those enforcing the Code of Conduct, for a specified period of time. This
|
|
||||||
includes avoiding interactions in community spaces as well as external channels
|
|
||||||
like social media. Violating these terms may lead to a temporary or permanent
|
|
||||||
ban.
|
|
||||||
|
|
||||||
### 3. Temporary Ban
|
|
||||||
|
|
||||||
**Community Impact**: A serious violation of community standards, including
|
|
||||||
sustained inappropriate behavior.
|
|
||||||
|
|
||||||
**Consequence**: A temporary ban from any sort of interaction or public
|
|
||||||
communication with the community for a specified period of time. No public or
|
|
||||||
private interaction with the people involved, including unsolicited interaction
|
|
||||||
with those enforcing the Code of Conduct, is allowed during this period.
|
|
||||||
Violating these terms may lead to a permanent ban.
|
|
||||||
|
|
||||||
### 4. Permanent Ban
|
|
||||||
|
|
||||||
**Community Impact**: Demonstrating a pattern of violation of community
|
|
||||||
standards, including sustained inappropriate behavior, harassment of an
|
|
||||||
individual, or aggression toward or disparagement of classes of individuals.
|
|
||||||
|
|
||||||
**Consequence**: A permanent ban from any sort of public interaction within the
|
|
||||||
community.
|
|
||||||
|
|
||||||
## Attribution
|
|
||||||
|
|
||||||
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
|
|
||||||
version 2.1, available at
|
|
||||||
[https://www.contributor-covenant.org/version/2/1/code_of_conduct.html][v2.1].
|
|
||||||
|
|
||||||
Community Impact Guidelines were inspired by
|
|
||||||
[Mozilla's code of conduct enforcement ladder][Mozilla CoC].
|
|
||||||
|
|
||||||
For answers to common questions about this code of conduct, see the FAQ at
|
|
||||||
[https://www.contributor-covenant.org/faq][FAQ].
|
|
@ -1,230 +0,0 @@
|
|||||||
# Cosmium Compatibility with Cosmos DB
|
|
||||||
|
|
||||||
## Introduction
|
|
||||||
|
|
||||||
Cosmium is designed to emulate the functionality of Cosmos DB, providing developers with a local development environment that closely mimics the behavior of Cosmos DB. While Cosmium aims to be compatible with Cosmos DB, there are certain differences and limitations to be aware of. This document provides an overview of Cosmium's compatibility with Cosmos DB and highlights areas where deviations may occur.
|
|
||||||
|
|
||||||
## Supported Features
|
|
||||||
|
|
||||||
Cosmium strives to support the core features of Cosmos DB, including:
|
|
||||||
|
|
||||||
- REST API
|
|
||||||
- SQL-like query language
|
|
||||||
- Document-based data model
|
|
||||||
|
|
||||||
## Compatibility Matrix
|
|
||||||
|
|
||||||
### Features
|
|
||||||
|
|
||||||
| Feature | Implemented |
|
|
||||||
| ----------------------------- | ----------- |
|
|
||||||
| Subqueries | Yes |
|
|
||||||
| Joins | Yes |
|
|
||||||
| Computed properties | No |
|
|
||||||
| Coalesce operators | No |
|
|
||||||
| Bitwise operators | No |
|
|
||||||
| GeoJSON location data | No |
|
|
||||||
| Parameterized queries | Yes |
|
|
||||||
| Stored procedures | No |
|
|
||||||
| Triggers | No |
|
|
||||||
| User-defined functions (UDFs) | No |
|
|
||||||
|
|
||||||
### Clauses
|
|
||||||
|
|
||||||
| Clause | Implemented |
|
|
||||||
| ------------ | ----------- |
|
|
||||||
| SELECT | Yes |
|
|
||||||
| FROM | Yes |
|
|
||||||
| WHERE | Yes |
|
|
||||||
| ORDER BY | Yes |
|
|
||||||
| GROUP BY | Yes |
|
|
||||||
| OFFSET LIMIT | Yes |
|
|
||||||
|
|
||||||
### Keywords
|
|
||||||
|
|
||||||
| Keyword | Implemented |
|
|
||||||
| -------- | ----------- |
|
|
||||||
| BETWEEN | No |
|
|
||||||
| DISTINCT | Yes |
|
|
||||||
| LIKE | No |
|
|
||||||
| IN | Yes |
|
|
||||||
| TOP | Yes |
|
|
||||||
|
|
||||||
### Aggregate Functions
|
|
||||||
|
|
||||||
| Function | Implemented |
|
|
||||||
| -------- | ----------- |
|
|
||||||
| AVG | Yes |
|
|
||||||
| COUNT | Yes |
|
|
||||||
| MAX | Yes |
|
|
||||||
| MIN | Yes |
|
|
||||||
| SUM | Yes |
|
|
||||||
|
|
||||||
### Array Functions
|
|
||||||
|
|
||||||
| Function | Implemented |
|
|
||||||
| ------------------ | ----------- |
|
|
||||||
| ARRAY_CONCAT | Yes |
|
|
||||||
| ARRAY_CONTAINS | Yes |
|
|
||||||
| ARRAY_CONTAINS_ANY | Yes |
|
|
||||||
| ARRAY_CONTAINS_ALL | Yes |
|
|
||||||
| ARRAY_LENGTH | Yes |
|
|
||||||
| ARRAY_SLICE | Yes |
|
|
||||||
| CHOOSE | No |
|
|
||||||
| ObjectToArray | No |
|
|
||||||
| SetIntersect | Yes |
|
|
||||||
| SetUnion | Yes |
|
|
||||||
|
|
||||||
### Conditional Functions
|
|
||||||
|
|
||||||
| Function | Implemented |
|
|
||||||
| -------- | ----------- |
|
|
||||||
| IIF | Yes |
|
|
||||||
|
|
||||||
### Date and time Functions
|
|
||||||
|
|
||||||
| Function | Implemented |
|
|
||||||
| ------------------------- | ----------- |
|
|
||||||
| DateTimeAdd | No |
|
|
||||||
| DateTimeBin | No |
|
|
||||||
| DateTimeDiff | No |
|
|
||||||
| DateTimeFromParts | No |
|
|
||||||
| DateTimePart | No |
|
|
||||||
| DateTimeToTicks | No |
|
|
||||||
| DateTimeToTimestamp | No |
|
|
||||||
| GetCurrentDateTime | No |
|
|
||||||
| GetCurrentDateTimeStatic | No |
|
|
||||||
| GetCurrentTicks | No |
|
|
||||||
| GetCurrentTicksStatic | No |
|
|
||||||
| GetCurrentTimestamp | No |
|
|
||||||
| GetCurrentTimestampStatic | No |
|
|
||||||
| TicksToDateTime | No |
|
|
||||||
| TimestampToDateTime | No |
|
|
||||||
|
|
||||||
### Item Functions
|
|
||||||
|
|
||||||
| Function | Implemented |
|
|
||||||
| ---------- | ----------- |
|
|
||||||
| DocumentId | No |
|
|
||||||
|
|
||||||
### Mathematical Functions
|
|
||||||
|
|
||||||
| Function | Implemented |
|
|
||||||
| ---------------- | ----------- |
|
|
||||||
| ABS | Yes |
|
|
||||||
| ACOS | Yes |
|
|
||||||
| ASIN | Yes |
|
|
||||||
| ATAN | Yes |
|
|
||||||
| ATN2 | Yes |
|
|
||||||
| CEILING | Yes |
|
|
||||||
| COS | Yes |
|
|
||||||
| COT | Yes |
|
|
||||||
| DEGREES | Yes |
|
|
||||||
| EXP | Yes |
|
|
||||||
| FLOOR | Yes |
|
|
||||||
| IntAdd | Yes |
|
|
||||||
| IntBitAnd | Yes |
|
|
||||||
| IntBitLeftShift | Yes |
|
|
||||||
| IntBitNot | Yes |
|
|
||||||
| IntBitOr | Yes |
|
|
||||||
| IntBitRightShift | Yes |
|
|
||||||
| IntBitXor | Yes |
|
|
||||||
| IntDiv | Yes |
|
|
||||||
| IntMod | Yes |
|
|
||||||
| IntMul | Yes |
|
|
||||||
| IntSub | Yes |
|
|
||||||
| LOG | Yes |
|
|
||||||
| LOG10 | Yes |
|
|
||||||
| NumberBin | Yes |
|
|
||||||
| PI | Yes |
|
|
||||||
| POWER | Yes |
|
|
||||||
| RADIANS | Yes |
|
|
||||||
| RAND | Yes |
|
|
||||||
| ROUND | Yes |
|
|
||||||
| SIGN | Yes |
|
|
||||||
| SIN | Yes |
|
|
||||||
| SQRT | Yes |
|
|
||||||
| SQUARE | Yes |
|
|
||||||
| TAN | Yes |
|
|
||||||
| TRUNC | Yes |
|
|
||||||
|
|
||||||
### Spatial Functions
|
|
||||||
|
|
||||||
| Function | Implemented |
|
|
||||||
| ------------------ | ----------- |
|
|
||||||
| ST_AREA | No |
|
|
||||||
| ST_DISTANCE | No |
|
|
||||||
| ST_WITHIN | No |
|
|
||||||
| ST_INTERSECTS | No |
|
|
||||||
| ST_ISVALID | No |
|
|
||||||
| ST_ISVALIDDETAILED | No |
|
|
||||||
|
|
||||||
### String Functions
|
|
||||||
|
|
||||||
| Function | Implemented |
|
|
||||||
| --------------- | ----------- |
|
|
||||||
| CONCAT | Yes |
|
|
||||||
| CONTAINS | Yes |
|
|
||||||
| ENDSWITH | Yes |
|
|
||||||
| INDEX_OF | Yes |
|
|
||||||
| LEFT | Yes |
|
|
||||||
| LENGTH | Yes |
|
|
||||||
| LOWER | Yes |
|
|
||||||
| LTRIM | Yes |
|
|
||||||
| REGEXMATCH | No |
|
|
||||||
| REPLACE | Yes |
|
|
||||||
| REPLICATE | Yes |
|
|
||||||
| REVERSE | Yes |
|
|
||||||
| RIGHT | Yes |
|
|
||||||
| RTRIM | Yes |
|
|
||||||
| STARTSWITH | Yes |
|
|
||||||
| STRINGEQUALS | Yes |
|
|
||||||
| StringToArray | No |
|
|
||||||
| StringToBoolean | No |
|
|
||||||
| StringToNull | No |
|
|
||||||
| StringToNumber | No |
|
|
||||||
| StringToObject | No |
|
|
||||||
| SUBSTRING | Yes |
|
|
||||||
| ToString | Yes |
|
|
||||||
| TRIM | Yes |
|
|
||||||
| UPPER | Yes |
|
|
||||||
|
|
||||||
### Type checking Functions
|
|
||||||
|
|
||||||
| Function | Implemented |
|
|
||||||
| ---------------- | ----------- |
|
|
||||||
| IS_ARRAY | Yes |
|
|
||||||
| IS_BOOL | Yes |
|
|
||||||
| IS_DEFINED | Yes |
|
|
||||||
| IS_FINITE_NUMBER | Yes |
|
|
||||||
| IS_INTEGER | Yes |
|
|
||||||
| IS_NULL | Yes |
|
|
||||||
| IS_NUMBER | Yes |
|
|
||||||
| IS_OBJECT | Yes |
|
|
||||||
| IS_PRIMITIVE | Yes |
|
|
||||||
| IS_STRING | Yes |
|
|
||||||
|
|
||||||
### Transactional batch operations
|
|
||||||
|
|
||||||
Note: There's actually no transaction here. Think of this as a 'bulk operation' that can partially succeed.
|
|
||||||
|
|
||||||
| Operation | Implemented |
|
|
||||||
| --------- | ----------- |
|
|
||||||
| Create | Yes |
|
|
||||||
| Delete | Yes |
|
|
||||||
| Replace | Yes |
|
|
||||||
| Upsert | Yes |
|
|
||||||
| Read | Yes |
|
|
||||||
| Patch | No |
|
|
||||||
|
|
||||||
## Known Differences
|
|
||||||
|
|
||||||
While Cosmium aims to replicate the behavior of Cosmos DB as closely as possible, there are certain differences and limitations to be aware of:
|
|
||||||
|
|
||||||
1. **Performance**: Cosmium may exhibit different performance characteristics compared to Cosmos DB, especially under heavy load or large datasets.
|
|
||||||
2. **Consistency Levels**: The consistency model in Cosmium may differ slightly from Cosmos DB.
|
|
||||||
3. **Features**: Some advanced features or functionalities of Cosmos DB may not be fully supported or available in Cosmium.
|
|
||||||
|
|
||||||
## Future Development
|
|
||||||
|
|
||||||
Cosmium is actively developed and maintained, with ongoing efforts to improve compatibility with Cosmos DB and enhance its features and capabilities. Future updates may address known differences and limitations, as well as introduce new functionality to bring Cosmium closer to feature parity with Cosmos DB.
|
|
@ -1,37 +0,0 @@
|
|||||||
# Contributing to Cosmium
|
|
||||||
|
|
||||||
Thank you for considering contributing to Cosmium! We appreciate your interest in helping to improve our project.
|
|
||||||
|
|
||||||
Please note that by participating in this project, you agree to abide by our [Code of Conduct](/docs/CODE_OF_CONDUCT.md). We expect all contributors to uphold the principles of respect, inclusivity, and professionalism.
|
|
||||||
|
|
||||||
If you have any questions or need assistance with the contribution process, feel free to reach out to us by opening an issue or contacting the maintainers directly.
|
|
||||||
|
|
||||||
We look forward to your contributions! 🚀
|
|
||||||
|
|
||||||
## Finding ways to contribute
|
|
||||||
|
|
||||||
A great way to contribute is to scan the [Compatibility Matrix](/docs/compatibility.md) for unsupported features and improving compatibility with CosmosDB.
|
|
||||||
|
|
||||||
A part from that, the [Issues page](https://github.com/pikami/cosmium/issues) might contain issues registered by other users. Fixing reported issues is a great way to contribute.
|
|
||||||
|
|
||||||
## How to Contribute
|
|
||||||
|
|
||||||
1. **Create an Issue**: Before starting work on a new feature or bug fix, please create an issue or look for existing ones on the [Issues page](https://github.com/pikami/cosmium/issues) to discuss your proposed changes. This allows us to provide feedback and ensure that your contribution aligns with the project goals.
|
|
||||||
|
|
||||||
2. **Fork the Repository**: Once you have identified an issue to work on, fork the repository to your own GitHub account.
|
|
||||||
|
|
||||||
3. **Create a Branch**: Create a new branch for your changes using a descriptive name that reflects the issue you are addressing.
|
|
||||||
|
|
||||||
4. **Commit Changes**: Commit your changes with clear and descriptive commit messages. Reference the issue number in the commit message. **Please write unit tests for your implemented feature!**
|
|
||||||
|
|
||||||
5. **Create a Pull Request**: Once your changes are ready, create a pull request from your forked repository to the main repository. Be sure to include a detailed description of your changes and reference the relevant issue.
|
|
||||||
|
|
||||||
6. **Review and Collaborate**: Participate in the code review process by addressing any feedback or comments from maintainers. Collaboration and constructive feedback help ensure the quality of contributions.
|
|
||||||
|
|
||||||
## Example Commits
|
|
||||||
|
|
||||||
To get an idea of how to implement new query functions, you can review the following example commits:
|
|
||||||
|
|
||||||
* [Implement IN function](https://github.com/pikami/cosmium/commit/f37c664c1aef39ee820106eaec1a3708ee7a93c8)
|
|
||||||
* [Implement ToString function](https://github.com/pikami/cosmium/commit/16f41a547956f54481605f0ce035eee978a5e74b)
|
|
||||||
* [Implement ARRAY_CONCAT, ARRAY_LENGTH, ARRAY_SLICE, SetIntersect, SetUnion functions](https://github.com/pikami/cosmium/commit/1c5e5ce85d70ed91e4b9be9e8f76d59e6eafc1b5)
|
|
67
go.mod
67
go.mod
@ -1,60 +1,43 @@
|
|||||||
module github.com/pikami/cosmium
|
module github.com/pikami/cosmium
|
||||||
|
|
||||||
go 1.24.0
|
go 1.21.6
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0
|
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.2
|
||||||
github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos v1.4.0
|
github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos v0.3.6
|
||||||
github.com/cosmiumdev/json-patch/v5 v5.9.11
|
github.com/gin-gonic/gin v1.9.1
|
||||||
github.com/dgraph-io/badger/v4 v4.7.0
|
github.com/google/uuid v1.1.1
|
||||||
github.com/gin-gonic/gin v1.10.0
|
github.com/stretchr/testify v1.8.4
|
||||||
github.com/google/uuid v1.6.0
|
golang.org/x/exp v0.0.0-20240222234643-814bf88cf225
|
||||||
github.com/stretchr/testify v1.10.0
|
|
||||||
github.com/vmihailenco/msgpack/v5 v5.4.1
|
|
||||||
golang.org/x/exp v0.0.0-20250506013437-ce4c2cf36ca6
|
|
||||||
)
|
)
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/Azure/azure-sdk-for-go v68.0.0+incompatible // indirect
|
github.com/Azure/azure-sdk-for-go v68.0.0+incompatible // indirect
|
||||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.1 // indirect
|
github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.2 // indirect
|
||||||
github.com/bytedance/sonic v1.13.2 // indirect
|
github.com/bytedance/sonic v1.9.1 // indirect
|
||||||
github.com/bytedance/sonic/loader v0.2.4 // indirect
|
github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311 // indirect
|
||||||
github.com/cespare/xxhash/v2 v2.3.0 // indirect
|
|
||||||
github.com/cloudwego/base64x v0.1.5 // indirect
|
|
||||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||||
github.com/dgraph-io/ristretto/v2 v2.2.0 // indirect
|
github.com/gabriel-vasile/mimetype v1.4.2 // indirect
|
||||||
github.com/dustin/go-humanize v1.0.1 // indirect
|
github.com/gin-contrib/sse v0.1.0 // indirect
|
||||||
github.com/gabriel-vasile/mimetype v1.4.9 // indirect
|
|
||||||
github.com/gin-contrib/sse v1.1.0 // indirect
|
|
||||||
github.com/go-logr/logr v1.4.2 // indirect
|
|
||||||
github.com/go-logr/stdr v1.2.2 // indirect
|
|
||||||
github.com/go-playground/locales v0.14.1 // indirect
|
github.com/go-playground/locales v0.14.1 // indirect
|
||||||
github.com/go-playground/universal-translator v0.18.1 // indirect
|
github.com/go-playground/universal-translator v0.18.1 // indirect
|
||||||
github.com/go-playground/validator/v10 v10.26.0 // indirect
|
github.com/go-playground/validator/v10 v10.14.0 // indirect
|
||||||
github.com/goccy/go-json v0.10.5 // indirect
|
github.com/goccy/go-json v0.10.2 // indirect
|
||||||
github.com/google/flatbuffers v25.2.10+incompatible // indirect
|
|
||||||
github.com/json-iterator/go v1.1.12 // indirect
|
github.com/json-iterator/go v1.1.12 // indirect
|
||||||
github.com/klauspost/compress v1.18.0 // indirect
|
github.com/klauspost/cpuid/v2 v2.2.4 // indirect
|
||||||
github.com/klauspost/cpuid/v2 v2.2.10 // indirect
|
github.com/leodido/go-urn v1.2.4 // indirect
|
||||||
github.com/leodido/go-urn v1.4.0 // indirect
|
github.com/mattn/go-isatty v0.0.19 // indirect
|
||||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
|
||||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
|
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
|
||||||
github.com/modern-go/reflect2 v1.0.2 // indirect
|
github.com/modern-go/reflect2 v1.0.2 // indirect
|
||||||
github.com/pelletier/go-toml/v2 v2.2.4 // indirect
|
github.com/pelletier/go-toml/v2 v2.0.8 // indirect
|
||||||
github.com/pkg/errors v0.9.1 // indirect
|
|
||||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||||
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
|
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
|
||||||
github.com/ugorji/go/codec v1.2.12 // indirect
|
github.com/ugorji/go/codec v1.2.11 // indirect
|
||||||
github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect
|
golang.org/x/arch v0.3.0 // indirect
|
||||||
go.opentelemetry.io/auto/sdk v1.1.0 // indirect
|
golang.org/x/crypto v0.18.0 // indirect
|
||||||
go.opentelemetry.io/otel v1.35.0 // indirect
|
golang.org/x/net v0.20.0 // indirect
|
||||||
go.opentelemetry.io/otel/metric v1.35.0 // indirect
|
golang.org/x/sys v0.16.0 // indirect
|
||||||
go.opentelemetry.io/otel/trace v1.35.0 // indirect
|
golang.org/x/text v0.14.0 // indirect
|
||||||
golang.org/x/arch v0.17.0 // indirect
|
google.golang.org/protobuf v1.30.0 // indirect
|
||||||
golang.org/x/crypto v0.38.0 // indirect
|
|
||||||
golang.org/x/net v0.40.0 // indirect
|
|
||||||
golang.org/x/sys v0.33.0 // indirect
|
|
||||||
golang.org/x/text v0.25.0 // indirect
|
|
||||||
google.golang.org/protobuf v1.36.6 // indirect
|
|
||||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||||
)
|
)
|
||||||
|
215
go.sum
215
go.sum
@ -1,186 +1,109 @@
|
|||||||
github.com/Azure/azure-sdk-for-go v68.0.0+incompatible h1:fcYLmCpyNYRnvJbPerq7U0hS+6+I79yEDJBqVNcqUzU=
|
github.com/Azure/azure-sdk-for-go v68.0.0+incompatible h1:fcYLmCpyNYRnvJbPerq7U0hS+6+I79yEDJBqVNcqUzU=
|
||||||
github.com/Azure/azure-sdk-for-go v68.0.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=
|
github.com/Azure/azure-sdk-for-go v68.0.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=
|
||||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.17.0 h1:g0EZJwz7xkXQiZAI5xi9f3WWFYBlX1CPTrR+NDToRkQ=
|
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.2 h1:c4k2FIYIh4xtwqrQwV0Ct1v5+ehlNXj5NI/MWVsiTkQ=
|
||||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.17.0/go.mod h1:XCW7KnZet0Opnr7HccfUw1PLc4CjHqpcaxW8DHklNkQ=
|
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.2/go.mod h1:5FDJtLEO/GxwNgUxbwrY3LP0pEoThTQJtk2oysdXHxM=
|
||||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0 h1:Gt0j3wceWMwPmiazCa8MzMA0MfhmPIz0Qp0FJ6qcM0U=
|
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.0.0 h1:Yoicul8bnVdQrhDMTHxdEckRGX01XvwXDHUT9zYZ3k0=
|
||||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0/go.mod h1:Ot/6aikWnKWi4l9QB7qVSwa8iMphQNqkWALMoNT3rzM=
|
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.0.0/go.mod h1:+6sju8gk8FRmSajX3Oz4G5Gm7P+mbqE9FVaXXFYTkCM=
|
||||||
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.8.0 h1:B/dfvscEQtew9dVuoxqxrUKKv8Ih2f55PydknDamU+g=
|
github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos v0.3.6 h1:oBqQLSI1pZwGOdXJAoJJSzmff9tlfD4KroVfjQQmd0g=
|
||||||
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.8.0/go.mod h1:fiPSssYvltE08HJchL04dOy+RD4hgrjph0cwGGMntdI=
|
github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos v0.3.6/go.mod h1:Beh5cHIXJ0oWEDWk9lNFtuklCojLLQ5hl+LqSNTTs0I=
|
||||||
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.8.2 h1:F0gBpfdPLGsw+nsgk6aqqkZS1jiixa5WwFe3fk/T3Ys=
|
github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.2 h1:LqbJ/WzJUwBf8UiaSzgX7aMclParm9/5Vgp+TY51uBQ=
|
||||||
github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos v1.3.0 h1:RGcdpSElvcXCwxydI0xzOBu1Gvp88OoiTGfbtO/z1m0=
|
github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.2/go.mod h1:yInRyqWXAuaPrgI7p70+lDDgh3mlBohis29jGMISnmc=
|
||||||
github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos v1.3.0/go.mod h1:YwUyrNUtcZcibA99JcfCP6UUp95VVQKO2MJfBzgJDwA=
|
github.com/AzureAD/microsoft-authentication-library-for-go v0.4.0 h1:WVsrXCnHlDDX8ls+tootqRE87/hL9S/g4ewig9RsD/c=
|
||||||
github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos v1.4.0 h1:TSaH6Lj0m8bDr4vX1+LC1KLQTnLzZb3tOxrx/PLqw+c=
|
github.com/AzureAD/microsoft-authentication-library-for-go v0.4.0/go.mod h1:Vt9sXTKwMyGcOxSmLDMnGPgqsUg7m8pe215qMLrDXw4=
|
||||||
github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos v1.4.0/go.mod h1:Krtog/7tz27z75TwM5cIS8bxEH4dcBUezcq+kGVeZEo=
|
github.com/bytedance/sonic v1.5.0/go.mod h1:ED5hyg4y6t3/9Ku1R6dU/4KyJ48DZ4jPhfY1O2AihPM=
|
||||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0 h1:ywEEhmNahHBihViHepv3xPBn1663uRv2t2q/ESv9seY=
|
github.com/bytedance/sonic v1.9.1 h1:6iJ6NqdoxCDr6mbY8h18oSO+cShGSMRGCEo7F2h0x8s=
|
||||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0/go.mod h1:iZDifYGJTIgIIkYRNWPENUnqx6bJ2xnSDFI2tjwZNuY=
|
github.com/bytedance/sonic v1.9.1/go.mod h1:i736AoUSYt75HyZLoJW9ERYxcy6eaN6h4BZXU064P/U=
|
||||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.1 h1:FPKJS1T+clwv+OLGt13a8UjqeRuh0O4SJ3lUriThc+4=
|
github.com/chenzhuoyu/base64x v0.0.0-20211019084208-fb5309c8db06/go.mod h1:DH46F32mSOjUmXrMHnKwZdA8wcEefY7UVqBKYGjpdQY=
|
||||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.1/go.mod h1:j2chePtV91HrC22tGoRX3sGY42uF13WzmmV80/OdVAA=
|
github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311 h1:qSGYFH7+jGhDF8vLC+iwCD4WpbV1EBDSzWkJODFLams=
|
||||||
github.com/AzureAD/microsoft-authentication-library-for-go v1.3.2 h1:kYRSnvJju5gYVyhkij+RTJ/VR6QIUaCfWeaFm2ycsjQ=
|
github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311/go.mod h1:b583jCggY9gE99b6G5LEC39OIiVsWj+R97kbl5odCEk=
|
||||||
github.com/AzureAD/microsoft-authentication-library-for-go v1.3.2/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI=
|
|
||||||
github.com/AzureAD/microsoft-authentication-library-for-go v1.4.2 h1:oygO0locgZJe7PpYPXT5A29ZkwJaPqcva7BVeemZOZs=
|
|
||||||
github.com/bytedance/sonic v1.13.1 h1:Jyd5CIvdFnkOWuKXr+wm4Nyk2h0yAFsr8ucJgEasO3g=
|
|
||||||
github.com/bytedance/sonic v1.13.1/go.mod h1:o68xyaF9u2gvVBuGHPlUVCy+ZfmNNO5ETf1+KgkJhz4=
|
|
||||||
github.com/bytedance/sonic v1.13.2 h1:8/H1FempDZqC4VqjptGo14QQlJx8VdZJegxs6wwfqpQ=
|
|
||||||
github.com/bytedance/sonic v1.13.2/go.mod h1:o68xyaF9u2gvVBuGHPlUVCy+ZfmNNO5ETf1+KgkJhz4=
|
|
||||||
github.com/bytedance/sonic/loader v0.1.1/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU=
|
|
||||||
github.com/bytedance/sonic/loader v0.2.4 h1:ZWCw4stuXUsn1/+zQDqeE7JKP+QO47tz7QCNan80NzY=
|
|
||||||
github.com/bytedance/sonic/loader v0.2.4/go.mod h1:N8A3vUdtUebEY2/VQC0MyhYeKUFosQU6FxH2JmUe6VI=
|
|
||||||
github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs=
|
|
||||||
github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
|
||||||
github.com/cloudwego/base64x v0.1.5 h1:XPciSp1xaq2VCSt6lF0phncD4koWyULpl5bUxbfCyP4=
|
|
||||||
github.com/cloudwego/base64x v0.1.5/go.mod h1:0zlkT4Wn5C6NdauXdJRhSKRlJvmclQ1hhJgA0rcu/8w=
|
|
||||||
github.com/cloudwego/iasm v0.2.0/go.mod h1:8rXZaNYT2n95jn+zTI1sDr+IgcD2GVs0nlbbQPiEFhY=
|
|
||||||
github.com/cosmiumdev/json-patch/v5 v5.9.11 h1:WD2Wqaz/vO987z2FFdqgkj15HgYZ/Y5TpqE3I4T/iOQ=
|
|
||||||
github.com/cosmiumdev/json-patch/v5 v5.9.11/go.mod h1:YPZmckmv4ZY+oxKIOjgq3sIudHVB6VEMcicCS9LtVLM=
|
|
||||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
github.com/dgraph-io/badger/v4 v4.6.0 h1:acOwfOOZ4p1dPRnYzvkVm7rUk2Y21TgPVepCy5dJdFQ=
|
github.com/gabriel-vasile/mimetype v1.4.2 h1:w5qFW6JKBz9Y393Y4q372O9A7cUSequkh1Q7OhCmWKU=
|
||||||
github.com/dgraph-io/badger/v4 v4.6.0/go.mod h1:KSJ5VTuZNC3Sd+YhvVjk2nYua9UZnnTr/SkXvdtiPgI=
|
github.com/gabriel-vasile/mimetype v1.4.2/go.mod h1:zApsH/mKG4w07erKIaJPFiX0Tsq9BFQgN3qGY5GnNgA=
|
||||||
github.com/dgraph-io/badger/v4 v4.7.0 h1:Q+J8HApYAY7UMpL8d9owqiB+odzEc0zn/aqOD9jhc6Y=
|
github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE=
|
||||||
github.com/dgraph-io/badger/v4 v4.7.0/go.mod h1:He7TzG3YBy3j4f5baj5B7Zl2XyfNe5bl4Udl0aPemVA=
|
github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI=
|
||||||
github.com/dgraph-io/ristretto/v2 v2.1.0 h1:59LjpOJLNDULHh8MC4UaegN52lC4JnO2dITsie/Pa8I=
|
github.com/gin-gonic/gin v1.9.1 h1:4idEAncQnU5cB7BeOkPtxjfCSye0AAm1R0RVIqJ+Jmg=
|
||||||
github.com/dgraph-io/ristretto/v2 v2.1.0/go.mod h1:uejeqfYXpUomfse0+lO+13ATz4TypQYLJZzBSAemuB4=
|
github.com/gin-gonic/gin v1.9.1/go.mod h1:hPrL7YrpYKXt5YId3A/Tnip5kqbEAP+KLuI3SUcPTeU=
|
||||||
github.com/dgraph-io/ristretto/v2 v2.2.0 h1:bkY3XzJcXoMuELV8F+vS8kzNgicwQFAaGINAEJdWGOM=
|
|
||||||
github.com/dgraph-io/ristretto/v2 v2.2.0/go.mod h1:RZrm63UmcBAaYWC1DotLYBmTvgkrs0+XhBd7Npn7/zI=
|
|
||||||
github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13 h1:fAjc9m62+UWV/WAFKLNi6ZS0675eEUC9y3AlwSbQu1Y=
|
|
||||||
github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw=
|
|
||||||
github.com/dgryski/go-farm v0.0.0-20240924180020-3414d57e47da h1:aIftn67I1fkbMa512G+w+Pxci9hJPB8oMnkcP3iZF38=
|
|
||||||
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
|
|
||||||
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
|
|
||||||
github.com/gabriel-vasile/mimetype v1.4.8 h1:FfZ3gj38NjllZIeJAmMhr+qKL8Wu+nOoI3GqacKw1NM=
|
|
||||||
github.com/gabriel-vasile/mimetype v1.4.8/go.mod h1:ByKUIKGjh1ODkGM1asKUbQZOLGrPjydw3hYPU2YU9t8=
|
|
||||||
github.com/gabriel-vasile/mimetype v1.4.9 h1:5k+WDwEsD9eTLL8Tz3L0VnmVh9QxGjRmjBvAG7U/oYY=
|
|
||||||
github.com/gabriel-vasile/mimetype v1.4.9/go.mod h1:WnSQhFKJuBlRyLiKohA/2DtIlPFAbguNaG7QCHcyGok=
|
|
||||||
github.com/gin-contrib/sse v1.0.0 h1:y3bT1mUWUxDpW4JLQg/HnTqV4rozuW4tC9eFKTxYI9E=
|
|
||||||
github.com/gin-contrib/sse v1.0.0/go.mod h1:zNuFdwarAygJBht0NTKiSi3jRf6RbqeILZ9Sp6Slhe0=
|
|
||||||
github.com/gin-contrib/sse v1.1.0 h1:n0w2GMuUpWDVp7qSpvze6fAu9iRxJY4Hmj6AmBOU05w=
|
|
||||||
github.com/gin-contrib/sse v1.1.0/go.mod h1:hxRZ5gVpWMT7Z0B0gSNYqqsSCNIJMjzvm6fqCz9vjwM=
|
|
||||||
github.com/gin-gonic/gin v1.10.0 h1:nTuyha1TYqgedzytsKYqna+DfLos46nTv2ygFy86HFU=
|
|
||||||
github.com/gin-gonic/gin v1.10.0/go.mod h1:4PMNQiOhvDRa013RKVbsiNwoyezlm2rm0uX/T7kzp5Y=
|
|
||||||
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
|
|
||||||
github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY=
|
|
||||||
github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
|
|
||||||
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
|
|
||||||
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
|
|
||||||
github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s=
|
github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s=
|
||||||
github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
|
github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
|
||||||
github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA=
|
github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA=
|
||||||
github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY=
|
github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY=
|
||||||
github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY=
|
github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY=
|
||||||
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
|
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
|
||||||
github.com/go-playground/validator/v10 v10.25.0 h1:5Dh7cjvzR7BRZadnsVOzPhWsrwUr0nmsZJxEAnFLNO8=
|
github.com/go-playground/validator/v10 v10.14.0 h1:vgvQWe3XCz3gIeFDm/HnTIbj6UGmg/+t63MyGU2n5js=
|
||||||
github.com/go-playground/validator/v10 v10.25.0/go.mod h1:GGzBIJMuE98Ic/kJsBXbz1x/7cByt++cQ+YOuDM5wus=
|
github.com/go-playground/validator/v10 v10.14.0/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU=
|
||||||
github.com/go-playground/validator/v10 v10.26.0 h1:SP05Nqhjcvz81uJaRfEV0YBSSSGMc/iMaVtFbr3Sw2k=
|
github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU=
|
||||||
github.com/go-playground/validator/v10 v10.26.0/go.mod h1:I5QpIEbmr8On7W0TktmJAumgzX4CA1XNl4ZmDuVHKKo=
|
github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
|
||||||
github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4=
|
github.com/golang-jwt/jwt v3.2.1+incompatible h1:73Z+4BJcrTC+KczS6WvTPvRGOp1WmfEP4Q1lOd9Z/+c=
|
||||||
github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
|
github.com/golang-jwt/jwt v3.2.1+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzqecmYZeUEB8OUGHkxJ+I=
|
||||||
github.com/golang-jwt/jwt/v5 v5.2.1 h1:OuVbFODueb089Lh128TAcimifWaLhJwVflnrgM17wHk=
|
github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
|
||||||
github.com/golang-jwt/jwt/v5 v5.2.1/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
|
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||||
github.com/golang-jwt/jwt/v5 v5.2.2 h1:Rl4B7itRWVtYIHFrSNd7vhTiz9UpLdi6gZhZ3wEeDy8=
|
github.com/google/go-cmp v0.5.8 h1:e6P7q2lk1O+qJJb4BtCQXlK8vWEO8V1ZeuEdJNOqZyg=
|
||||||
github.com/google/flatbuffers v25.2.10+incompatible h1:F3vclr7C3HpB1k9mxCGRMXq6FdUalZ6H/pNX4FP1v0Q=
|
github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||||
github.com/google/flatbuffers v25.2.10+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8=
|
|
||||||
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
|
|
||||||
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
|
|
||||||
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||||
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
github.com/google/uuid v1.1.1 h1:Gkbcsh/GbpXz7lPftLA3P6TYMwjCLYm83jiFQZF/3gY=
|
||||||
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||||
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
|
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
|
||||||
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
|
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
|
||||||
github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo=
|
|
||||||
github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ=
|
|
||||||
github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
|
github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
|
||||||
github.com/klauspost/cpuid/v2 v2.2.10 h1:tBs3QSyvjDyFTq3uoc/9xFpCuOsJQFNPiAhYdw2skhE=
|
github.com/klauspost/cpuid/v2 v2.2.4 h1:acbojRNwl3o09bUq+yDCtZFc1aiwaAAxtcn8YkZXnvk=
|
||||||
github.com/klauspost/cpuid/v2 v2.2.10/go.mod h1:hqwkgyIinND0mEev00jJYCxPNVRVXFQeu1XKlok6oO0=
|
github.com/klauspost/cpuid/v2 v2.2.4/go.mod h1:RVVoqg1df56z8g3pUjL/3lE5UfnlrJX8tyFgg4nqhuY=
|
||||||
github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M=
|
|
||||||
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
|
|
||||||
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
|
|
||||||
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
|
||||||
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
|
||||||
github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc=
|
github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc=
|
||||||
github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw=
|
github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw=
|
||||||
github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ=
|
github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q=
|
||||||
github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI=
|
github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4=
|
||||||
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
github.com/mattn/go-isatty v0.0.19 h1:JITubQf0MOLdlGRuRq+jtsDlekdYPia9ZFsB8h/APPA=
|
||||||
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
|
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
|
||||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||||
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
|
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
|
||||||
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
|
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
|
||||||
github.com/pelletier/go-toml/v2 v2.2.3 h1:YmeHyLY8mFWbdkNWwpr+qIL2bEqT0o95WSdkNHvL12M=
|
github.com/pelletier/go-toml/v2 v2.0.8 h1:0ctb6s9mE31h0/lhu+J6OPmVeDxJn+kYnJc2jZR9tGQ=
|
||||||
github.com/pelletier/go-toml/v2 v2.2.3/go.mod h1:MfCQTFTvCcUyyvvwm1+G6H/jORL20Xlb6rzQu9GuUkc=
|
github.com/pelletier/go-toml/v2 v2.0.8/go.mod h1:vuYfssBdrU2XDZ9bYydBu6t+6a6PYNcZljzZR9VXg+4=
|
||||||
github.com/pelletier/go-toml/v2 v2.2.4 h1:mye9XuhQ6gvn5h28+VilKrrPoQVanw5PMw/TB0t5Ec4=
|
github.com/pkg/browser v0.0.0-20210115035449-ce105d075bb4 h1:Qj1ukM4GlMWXNdMBuXcXfz/Kw9s1qm0CLY32QxuSImI=
|
||||||
github.com/pelletier/go-toml/v2 v2.2.4/go.mod h1:2gIqNv+qfxSVS7cM2xJQKtLSTLUE9V8t9Stt+h56mCY=
|
github.com/pkg/browser v0.0.0-20210115035449-ce105d075bb4/go.mod h1:N6UoU20jOqggOuDwUaBQpluzLNDqif3kq9z2wpdYEfQ=
|
||||||
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c h1:+mdjkGKdHQG3305AYmdv1U2eRNDiU2ErMBj1gwrq8eQ=
|
|
||||||
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c/go.mod h1:7rwL4CYBLnjLxUqIJNnCWiEdr3bn6IUYi15bNlnbCCU=
|
|
||||||
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
|
||||||
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
|
||||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||||
github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII=
|
|
||||||
github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o=
|
|
||||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||||
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
|
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
|
||||||
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
|
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
|
||||||
github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA=
|
|
||||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||||
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
||||||
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
|
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
|
||||||
|
github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
|
||||||
|
github.com/stretchr/testify v1.8.3/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
||||||
|
github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk=
|
||||||
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
||||||
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
|
|
||||||
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
|
||||||
github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI=
|
github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI=
|
||||||
github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08=
|
github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08=
|
||||||
github.com/ugorji/go/codec v1.2.12 h1:9LC83zGrHhuUA9l16C9AHXAqEV/2wBQ4nkvumAE65EE=
|
github.com/ugorji/go/codec v1.2.11 h1:BMaWp1Bb6fHwEtbplGBGJ498wD+LKlNSl25MjdZY4dU=
|
||||||
github.com/ugorji/go/codec v1.2.12/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg=
|
github.com/ugorji/go/codec v1.2.11/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg=
|
||||||
github.com/vmihailenco/msgpack/v5 v5.4.1 h1:cQriyiUvjTwOHg8QZaPihLWeRAAVoCpE00IUPn0Bjt8=
|
golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
|
||||||
github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok=
|
golang.org/x/arch v0.3.0 h1:02VY4/ZcO/gBOH6PUaoiptASxtXU10jazRCP865E97k=
|
||||||
github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g=
|
golang.org/x/arch v0.3.0/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
|
||||||
github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds=
|
golang.org/x/crypto v0.18.0 h1:PGVlW0xEltQnzFZ55hkuX5+KLyrMYhHld1YHO4AKcdc=
|
||||||
go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA=
|
golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg=
|
||||||
go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A=
|
golang.org/x/exp v0.0.0-20240222234643-814bf88cf225 h1:LfspQV/FYTatPTr/3HzIcmiUFH7PGP+OQ6mgDYo3yuQ=
|
||||||
go.opentelemetry.io/otel v1.35.0 h1:xKWKPxrxB6OtMCbmMY021CqC45J+3Onta9MqjhnusiQ=
|
golang.org/x/exp v0.0.0-20240222234643-814bf88cf225/go.mod h1:CxmFvTBINI24O/j8iY7H1xHzx2i4OsyguNBmN/uPtqc=
|
||||||
go.opentelemetry.io/otel v1.35.0/go.mod h1:UEqy8Zp11hpkUrL73gSlELM0DupHoiq72dR+Zqel/+Y=
|
golang.org/x/net v0.20.0 h1:aCL9BSgETF1k+blQaYUBx9hJ9LOGP3gAVemcZlf1Kpo=
|
||||||
go.opentelemetry.io/otel/metric v1.35.0 h1:0znxYu2SNyuMSQT4Y9WDWej0VpcsxkuklLa4/siN90M=
|
golang.org/x/net v0.20.0/go.mod h1:z8BVo6PvndSri0LbOE3hAn0apkU+1YvI6E70E9jsnvY=
|
||||||
go.opentelemetry.io/otel/metric v1.35.0/go.mod h1:nKVFgxBZ2fReX6IlyW28MgZojkoAkJGaE8CpgeAU3oE=
|
golang.org/x/sys v0.0.0-20220704084225-05e143d24a9e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
go.opentelemetry.io/otel/trace v1.35.0 h1:dPpEfJu1sDIqruz7BHFG3c7528f6ddfSWfFDVt/xgMs=
|
|
||||||
go.opentelemetry.io/otel/trace v1.35.0/go.mod h1:WUk7DtFp1Aw2MkvqGdwiXYDZZNvA/1J8o6xRXLrIkyc=
|
|
||||||
golang.org/x/arch v0.15.0 h1:QtOrQd0bTUnhNVNndMpLHNWrDmYzZ2KDqSrEymqInZw=
|
|
||||||
golang.org/x/arch v0.15.0/go.mod h1:JmwW7aLIoRUKgaTzhkiEFxvcEiQGyOg9BMonBJUS7EE=
|
|
||||||
golang.org/x/arch v0.17.0 h1:4O3dfLzd+lQewptAHqjewQZQDyEdejz3VwgeYwkZneU=
|
|
||||||
golang.org/x/arch v0.17.0/go.mod h1:bdwinDaKcfZUGpH09BB7ZmOfhalA8lQdzl62l8gGWsk=
|
|
||||||
golang.org/x/crypto v0.36.0 h1:AnAEvhDddvBdpY+uR+MyHmuZzzNqXSe/GvuDeob5L34=
|
|
||||||
golang.org/x/crypto v0.36.0/go.mod h1:Y4J0ReaxCR1IMaabaSMugxJES1EpwhBHhv2bDHklZvc=
|
|
||||||
golang.org/x/crypto v0.38.0 h1:jt+WWG8IZlBnVbomuhg2Mdq0+BBQaHbtqHEFEigjUV8=
|
|
||||||
golang.org/x/crypto v0.38.0/go.mod h1:MvrbAqul58NNYPKnOra203SB9vpuZW0e+RRZV+Ggqjw=
|
|
||||||
golang.org/x/exp v0.0.0-20250305212735-054e65f0b394 h1:nDVHiLt8aIbd/VzvPWN6kSOPE7+F/fNFDSXLVYkE/Iw=
|
|
||||||
golang.org/x/exp v0.0.0-20250305212735-054e65f0b394/go.mod h1:sIifuuw/Yco/y6yb6+bDNfyeQ/MdPUy/hKEMYQV17cM=
|
|
||||||
golang.org/x/exp v0.0.0-20250506013437-ce4c2cf36ca6 h1:y5zboxd6LQAqYIhHnB48p0ByQ/GnQx2BE33L8BOHQkI=
|
|
||||||
golang.org/x/exp v0.0.0-20250506013437-ce4c2cf36ca6/go.mod h1:U6Lno4MTRCDY+Ba7aCcauB9T60gsv5s4ralQzP72ZoQ=
|
|
||||||
golang.org/x/net v0.37.0 h1:1zLorHbz+LYj7MQlSf1+2tPIIgibq2eL5xkrGk6f+2c=
|
|
||||||
golang.org/x/net v0.37.0/go.mod h1:ivrbrMbzFq5J41QOQh0siUuly180yBYtLp+CKbEaFx8=
|
|
||||||
golang.org/x/net v0.40.0 h1:79Xs7wF06Gbdcg4kdCCIQArK11Z1hr5POQ6+fIYHNuY=
|
|
||||||
golang.org/x/net v0.40.0/go.mod h1:y0hY0exeL2Pku80/zKK7tpntoX23cqL3Oa6njdgRtds=
|
|
||||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.31.0 h1:ioabZlmFYtWhL+TRYpcnNlLwhyxaM9kWTDEmfnprqik=
|
golang.org/x/sys v0.16.0 h1:xWw16ngr6ZMtmxDyKyIgsE93KNKz5HKmMa3b8ALHidU=
|
||||||
golang.org/x/sys v0.31.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
|
golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||||
golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw=
|
golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ=
|
||||||
golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
|
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||||
golang.org/x/text v0.23.0 h1:D71I7dUrlY+VX0gQShAThNGHFxZ13dGLBHQLVl1mJlY=
|
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
golang.org/x/text v0.23.0/go.mod h1:/BLNzu4aZCJ1+kcD0DNRotWKage4q2rGVAg4o22unh4=
|
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
|
||||||
golang.org/x/text v0.25.0 h1:qVyWApTSYLk/drJRO5mDlNYskwQznZmkpV2c8q9zls4=
|
google.golang.org/protobuf v1.30.0 h1:kPPoIgf3TsEvrm0PFe15JQ+570QVxYzEvvHqChK+cng=
|
||||||
golang.org/x/text v0.25.0/go.mod h1:WEdwpYrmk1qmdHvhkSTNPm3app7v4rsT8F2UD6+VHIA=
|
google.golang.org/protobuf v1.30.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
|
||||||
google.golang.org/protobuf v1.36.5 h1:tPhr+woSbjfYvY6/GPufUoYizxw1cF/yFoxJ2fmpwlM=
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
||||||
google.golang.org/protobuf v1.36.5/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE=
|
|
||||||
google.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY=
|
|
||||||
google.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY=
|
|
||||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
|
|
||||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
|
|
||||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
nullprogram.com/x/optparse v1.0.0/go.mod h1:KdyPE+Igbe0jQUrVfMqDMeJQIJZEuyV7pjYmp6pbG50=
|
rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4=
|
||||||
|
@ -10,11 +10,6 @@ import (
|
|||||||
|
|
||||||
// https://learn.microsoft.com/en-us/rest/api/cosmos-db/access-control-on-cosmosdb-resources
|
// https://learn.microsoft.com/en-us/rest/api/cosmos-db/access-control-on-cosmosdb-resources
|
||||||
func GenerateSignature(verb string, resourceType string, resourceId string, date string, masterKey string) string {
|
func GenerateSignature(verb string, resourceType string, resourceId string, date string, masterKey string) string {
|
||||||
isNameBased := resourceId != "" && ((len(resourceId) > 4 && resourceId[3] == '/') || strings.HasPrefix(strings.ToLower(resourceId), "interopusers"))
|
|
||||||
if !isNameBased {
|
|
||||||
resourceId = strings.ToLower(resourceId)
|
|
||||||
}
|
|
||||||
|
|
||||||
payload := fmt.Sprintf(
|
payload := fmt.Sprintf(
|
||||||
"%s\n%s\n%s\n%s\n%s\n",
|
"%s\n%s\n%s\n%s\n%s\n",
|
||||||
strings.ToLower(verb),
|
strings.ToLower(verb),
|
||||||
|
@ -27,14 +27,4 @@ func Test_GenerateSignature(t *testing.T) {
|
|||||||
signature := authentication.GenerateSignature("DELETE", "dbs", "dbs/Test Database", testDate, config.DefaultAccountKey)
|
signature := authentication.GenerateSignature("DELETE", "dbs", "dbs/Test Database", testDate, config.DefaultAccountKey)
|
||||||
assert.Equal(t, "LcuXXg0TcXxZG0kUCj9tZIWRy2yCzim3oiqGiHpRqGs=", signature)
|
assert.Equal(t, "LcuXXg0TcXxZG0kUCj9tZIWRy2yCzim3oiqGiHpRqGs=", signature)
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("Should generate PKRANGES signature", func(t *testing.T) {
|
|
||||||
signature := authentication.GenerateSignature("GET", "pkranges", "m4d+xG08uVM=", testDate, config.DefaultAccountKey)
|
|
||||||
assert.Equal(t, "6S5ceZsl2EXWB3Jo5bJcK7zv8NxXnsxWPWD9TH3nNMo=", signature)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should generate PATCH signature", func(t *testing.T) {
|
|
||||||
signature := authentication.GenerateSignature("PATCH", "docs", "dbs/test-db/colls/test-coll/docs/67890", testDate, config.DefaultAccountKey)
|
|
||||||
assert.Equal(t, "VR1ddfxKBXnoaT+b3WkhyYVc9JmGNpTnaRmyDM44398=", signature)
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
@ -1,9 +1,43 @@
|
|||||||
package constants
|
package constants
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"fmt"
|
||||||
|
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
|
"github.com/pikami/cosmium/api/config"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
var ServerInfoResponse = gin.H{
|
||||||
|
"_self": "",
|
||||||
|
"id": config.Config.DatabaseAccount,
|
||||||
|
"_rid": fmt.Sprintf("%s.%s", config.Config.DatabaseAccount, config.Config.DatabaseDomain),
|
||||||
|
"media": "//media/",
|
||||||
|
"addresses": "//addresses/",
|
||||||
|
"_dbs": "//dbs/",
|
||||||
|
"writableLocations": []map[string]interface{}{
|
||||||
|
{
|
||||||
|
"name": "South Central US",
|
||||||
|
"databaseAccountEndpoint": config.Config.DatabaseEndpoint,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"readableLocations": []map[string]interface{}{
|
||||||
|
{
|
||||||
|
"name": "South Central US",
|
||||||
|
"databaseAccountEndpoint": config.Config.DatabaseEndpoint,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"enableMultipleWriteLocations": false,
|
||||||
|
"userReplicationPolicy": map[string]interface{}{
|
||||||
|
"asyncReplication": false,
|
||||||
|
"minReplicaSetSize": 1,
|
||||||
|
"maxReplicasetSize": 4,
|
||||||
|
},
|
||||||
|
"userConsistencyPolicy": map[string]interface{}{"defaultConsistencyLevel": "Session"},
|
||||||
|
"systemReplicationPolicy": map[string]interface{}{"minReplicaSetSize": 1, "maxReplicasetSize": 4},
|
||||||
|
"readPolicy": map[string]interface{}{"primaryReadCoefficient": 1, "secondaryReadCoefficient": 1},
|
||||||
|
"queryEngineConfiguration": "{\"allowNewKeywords\":true,\"maxJoinsPerSqlQuery\":10,\"maxQueryRequestTimeoutFraction\":0.9,\"maxSqlQueryInputLength\":524288,\"maxUdfRefPerSqlQuery\":10,\"queryMaxInMemorySortDocumentCount\":-1000,\"spatialMaxGeometryPointCount\":256,\"sqlAllowNonFiniteNumbers\":false,\"sqlDisableOptimizationFlags\":0,\"enableSpatialIndexing\":true,\"maxInExpressionItemsCount\":2147483647,\"maxLogicalAndPerSqlQuery\":2147483647,\"maxLogicalOrPerSqlQuery\":2147483647,\"maxSpatialQueryCells\":2147483647,\"sqlAllowAggregateFunctions\":true,\"sqlAllowGroupByClause\":true,\"sqlAllowLike\":true,\"sqlAllowSubQuery\":true,\"sqlAllowScalarSubQuery\":true,\"sqlAllowTop\":true}",
|
||||||
|
}
|
||||||
|
|
||||||
var QueryPlanResponse = gin.H{
|
var QueryPlanResponse = gin.H{
|
||||||
"partitionedQueryExecutionInfoVersion": 2,
|
"partitionedQueryExecutionInfoVersion": 2,
|
||||||
"queryInfo": map[string]interface{}{
|
"queryInfo": map[string]interface{}{
|
||||||
@ -30,8 +64,3 @@ var QueryPlanResponse = gin.H{
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
var UnknownErrorResponse = gin.H{"message": "Unknown error"}
|
|
||||||
var NotFoundResponse = gin.H{"message": "NotFound"}
|
|
||||||
var ConflictResponse = gin.H{"message": "Conflict"}
|
|
||||||
var BadRequestResponse = gin.H{"message": "BadRequest"}
|
|
||||||
|
@ -1,20 +0,0 @@
|
|||||||
package converters
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/pikami/cosmium/internal/datastore"
|
|
||||||
memoryexecutor "github.com/pikami/cosmium/query_executors/memory_executor"
|
|
||||||
)
|
|
||||||
|
|
||||||
type DocumentToRowTypeIterator struct {
|
|
||||||
documents datastore.DocumentIterator
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewDocumentToRowTypeIterator(documents datastore.DocumentIterator) *DocumentToRowTypeIterator {
|
|
||||||
return &DocumentToRowTypeIterator{
|
|
||||||
documents: documents,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (di *DocumentToRowTypeIterator) Next() (memoryexecutor.RowType, datastore.DataStoreStatus) {
|
|
||||||
return di.documents.Next()
|
|
||||||
}
|
|
@ -1,66 +0,0 @@
|
|||||||
package badgerdatastore
|
|
||||||
|
|
||||||
import (
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/dgraph-io/badger/v4"
|
|
||||||
"github.com/pikami/cosmium/internal/logger"
|
|
||||||
)
|
|
||||||
|
|
||||||
type BadgerDataStore struct {
|
|
||||||
db *badger.DB
|
|
||||||
gcTicker *time.Ticker
|
|
||||||
}
|
|
||||||
|
|
||||||
type BadgerDataStoreOptions struct {
|
|
||||||
PersistDataFilePath string
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewBadgerDataStore(options BadgerDataStoreOptions) *BadgerDataStore {
|
|
||||||
badgerOpts := badger.DefaultOptions(options.PersistDataFilePath)
|
|
||||||
badgerOpts = badgerOpts.WithLogger(newBadgerLogger())
|
|
||||||
if options.PersistDataFilePath == "" {
|
|
||||||
badgerOpts = badgerOpts.WithInMemory(true)
|
|
||||||
}
|
|
||||||
|
|
||||||
db, err := badger.Open(badgerOpts)
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
gcTicker := time.NewTicker(5 * time.Minute)
|
|
||||||
|
|
||||||
ds := &BadgerDataStore{
|
|
||||||
db: db,
|
|
||||||
gcTicker: gcTicker,
|
|
||||||
}
|
|
||||||
|
|
||||||
go ds.runGarbageCollector()
|
|
||||||
|
|
||||||
return ds
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *BadgerDataStore) Close() {
|
|
||||||
if r.gcTicker != nil {
|
|
||||||
r.gcTicker.Stop()
|
|
||||||
r.gcTicker = nil
|
|
||||||
}
|
|
||||||
|
|
||||||
r.db.Close()
|
|
||||||
r.db = nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *BadgerDataStore) DumpToJson() (string, error) {
|
|
||||||
logger.ErrorLn("Badger datastore does not support state export currently.")
|
|
||||||
return "{}", nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *BadgerDataStore) runGarbageCollector() {
|
|
||||||
for range r.gcTicker.C {
|
|
||||||
again:
|
|
||||||
err := r.db.RunValueLogGC(0.7)
|
|
||||||
if err == nil {
|
|
||||||
goto again
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,28 +0,0 @@
|
|||||||
package badgerdatastore
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/dgraph-io/badger/v4"
|
|
||||||
"github.com/pikami/cosmium/internal/logger"
|
|
||||||
)
|
|
||||||
|
|
||||||
type badgerLogger struct{}
|
|
||||||
|
|
||||||
func newBadgerLogger() badger.Logger {
|
|
||||||
return &badgerLogger{}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (l *badgerLogger) Errorf(format string, v ...interface{}) {
|
|
||||||
logger.Errorf(format, v...)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (l *badgerLogger) Warningf(format string, v ...interface{}) {
|
|
||||||
logger.Infof(format, v...)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (l *badgerLogger) Infof(format string, v ...interface{}) {
|
|
||||||
logger.Infof(format, v...)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (l *badgerLogger) Debugf(format string, v ...interface{}) {
|
|
||||||
logger.Debugf(format, v...)
|
|
||||||
}
|
|
@ -1,103 +0,0 @@
|
|||||||
package badgerdatastore
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/google/uuid"
|
|
||||||
"github.com/pikami/cosmium/internal/datastore"
|
|
||||||
"github.com/pikami/cosmium/internal/logger"
|
|
||||||
"github.com/pikami/cosmium/internal/resourceid"
|
|
||||||
structhidrators "github.com/pikami/cosmium/internal/struct_hidrators"
|
|
||||||
)
|
|
||||||
|
|
||||||
func (r *BadgerDataStore) GetAllCollections(databaseId string) ([]datastore.Collection, datastore.DataStoreStatus) {
|
|
||||||
exists, err := keyExists(r.db.NewTransaction(false), generateDatabaseKey(databaseId))
|
|
||||||
if err != nil {
|
|
||||||
logger.ErrorLn("Error while checking if database exists:", err)
|
|
||||||
return nil, datastore.Unknown
|
|
||||||
}
|
|
||||||
|
|
||||||
if !exists {
|
|
||||||
return nil, datastore.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
colls, status := listByPrefix[datastore.Collection](r.db, generateKey(resourceid.ResourceTypeCollection, databaseId, "", ""))
|
|
||||||
if status == datastore.StatusOk {
|
|
||||||
return colls, datastore.StatusOk
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil, status
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *BadgerDataStore) GetCollection(databaseId string, collectionId string) (datastore.Collection, datastore.DataStoreStatus) {
|
|
||||||
collectionKey := generateCollectionKey(databaseId, collectionId)
|
|
||||||
|
|
||||||
txn := r.db.NewTransaction(false)
|
|
||||||
defer txn.Discard()
|
|
||||||
|
|
||||||
var collection datastore.Collection
|
|
||||||
status := getKey(txn, collectionKey, &collection)
|
|
||||||
|
|
||||||
return collection, status
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *BadgerDataStore) DeleteCollection(databaseId string, collectionId string) datastore.DataStoreStatus {
|
|
||||||
collectionKey := generateCollectionKey(databaseId, collectionId)
|
|
||||||
|
|
||||||
txn := r.db.NewTransaction(true)
|
|
||||||
defer txn.Discard()
|
|
||||||
|
|
||||||
prefixes := []string{
|
|
||||||
generateKey(resourceid.ResourceTypeDocument, databaseId, collectionId, ""),
|
|
||||||
generateKey(resourceid.ResourceTypeTrigger, databaseId, collectionId, ""),
|
|
||||||
generateKey(resourceid.ResourceTypeStoredProcedure, databaseId, collectionId, ""),
|
|
||||||
generateKey(resourceid.ResourceTypeUserDefinedFunction, databaseId, collectionId, ""),
|
|
||||||
collectionKey,
|
|
||||||
}
|
|
||||||
for _, prefix := range prefixes {
|
|
||||||
if err := deleteKeysByPrefix(txn, prefix); err != nil {
|
|
||||||
return datastore.Unknown
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
err := txn.Commit()
|
|
||||||
if err != nil {
|
|
||||||
logger.ErrorLn("Error while committing transaction:", err)
|
|
||||||
return datastore.Unknown
|
|
||||||
}
|
|
||||||
|
|
||||||
return datastore.StatusOk
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *BadgerDataStore) CreateCollection(databaseId string, newCollection datastore.Collection) (datastore.Collection, datastore.DataStoreStatus) {
|
|
||||||
collectionKey := generateCollectionKey(databaseId, newCollection.ID)
|
|
||||||
|
|
||||||
txn := r.db.NewTransaction(true)
|
|
||||||
defer txn.Discard()
|
|
||||||
|
|
||||||
collectionExists, err := keyExists(txn, collectionKey)
|
|
||||||
if err != nil || collectionExists {
|
|
||||||
return datastore.Collection{}, datastore.Conflict
|
|
||||||
}
|
|
||||||
|
|
||||||
var database datastore.Database
|
|
||||||
status := getKey(txn, generateDatabaseKey(databaseId), &database)
|
|
||||||
if status != datastore.StatusOk {
|
|
||||||
return datastore.Collection{}, status
|
|
||||||
}
|
|
||||||
|
|
||||||
newCollection = structhidrators.Hidrate(newCollection).(datastore.Collection)
|
|
||||||
|
|
||||||
newCollection.TimeStamp = time.Now().Unix()
|
|
||||||
newCollection.ResourceID = resourceid.NewCombined(database.ResourceID, resourceid.New(resourceid.ResourceTypeCollection))
|
|
||||||
newCollection.ETag = fmt.Sprintf("\"%s\"", uuid.New())
|
|
||||||
newCollection.Self = fmt.Sprintf("dbs/%s/colls/%s/", database.ResourceID, newCollection.ResourceID)
|
|
||||||
|
|
||||||
status = insertKey(txn, collectionKey, newCollection)
|
|
||||||
if status != datastore.StatusOk {
|
|
||||||
return datastore.Collection{}, status
|
|
||||||
}
|
|
||||||
|
|
||||||
return newCollection, datastore.StatusOk
|
|
||||||
}
|
|
@ -1,80 +0,0 @@
|
|||||||
package badgerdatastore
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/google/uuid"
|
|
||||||
"github.com/pikami/cosmium/internal/datastore"
|
|
||||||
"github.com/pikami/cosmium/internal/logger"
|
|
||||||
"github.com/pikami/cosmium/internal/resourceid"
|
|
||||||
)
|
|
||||||
|
|
||||||
func (r *BadgerDataStore) GetAllDatabases() ([]datastore.Database, datastore.DataStoreStatus) {
|
|
||||||
dbs, status := listByPrefix[datastore.Database](r.db, DatabaseKeyPrefix)
|
|
||||||
if status == datastore.StatusOk {
|
|
||||||
return dbs, datastore.StatusOk
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil, status
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *BadgerDataStore) GetDatabase(id string) (datastore.Database, datastore.DataStoreStatus) {
|
|
||||||
databaseKey := generateDatabaseKey(id)
|
|
||||||
|
|
||||||
txn := r.db.NewTransaction(false)
|
|
||||||
defer txn.Discard()
|
|
||||||
|
|
||||||
var database datastore.Database
|
|
||||||
status := getKey(txn, databaseKey, &database)
|
|
||||||
|
|
||||||
return database, status
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *BadgerDataStore) DeleteDatabase(id string) datastore.DataStoreStatus {
|
|
||||||
databaseKey := generateDatabaseKey(id)
|
|
||||||
|
|
||||||
txn := r.db.NewTransaction(true)
|
|
||||||
defer txn.Discard()
|
|
||||||
|
|
||||||
prefixes := []string{
|
|
||||||
generateKey(resourceid.ResourceTypeCollection, id, "", ""),
|
|
||||||
generateKey(resourceid.ResourceTypeDocument, id, "", ""),
|
|
||||||
generateKey(resourceid.ResourceTypeTrigger, id, "", ""),
|
|
||||||
generateKey(resourceid.ResourceTypeStoredProcedure, id, "", ""),
|
|
||||||
generateKey(resourceid.ResourceTypeUserDefinedFunction, id, "", ""),
|
|
||||||
databaseKey,
|
|
||||||
}
|
|
||||||
for _, prefix := range prefixes {
|
|
||||||
if err := deleteKeysByPrefix(txn, prefix); err != nil {
|
|
||||||
return datastore.Unknown
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
err := txn.Commit()
|
|
||||||
if err != nil {
|
|
||||||
logger.ErrorLn("Error while committing transaction:", err)
|
|
||||||
return datastore.Unknown
|
|
||||||
}
|
|
||||||
|
|
||||||
return datastore.StatusOk
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *BadgerDataStore) CreateDatabase(newDatabase datastore.Database) (datastore.Database, datastore.DataStoreStatus) {
|
|
||||||
databaseKey := generateDatabaseKey(newDatabase.ID)
|
|
||||||
|
|
||||||
txn := r.db.NewTransaction(true)
|
|
||||||
defer txn.Discard()
|
|
||||||
|
|
||||||
newDatabase.TimeStamp = time.Now().Unix()
|
|
||||||
newDatabase.ResourceID = resourceid.New(resourceid.ResourceTypeDatabase)
|
|
||||||
newDatabase.ETag = fmt.Sprintf("\"%s\"", uuid.New())
|
|
||||||
newDatabase.Self = fmt.Sprintf("dbs/%s/", newDatabase.ResourceID)
|
|
||||||
|
|
||||||
status := insertKey(txn, databaseKey, newDatabase)
|
|
||||||
if status != datastore.StatusOk {
|
|
||||||
return datastore.Database{}, status
|
|
||||||
}
|
|
||||||
|
|
||||||
return newDatabase, datastore.StatusOk
|
|
||||||
}
|
|
@ -1,204 +0,0 @@
|
|||||||
package badgerdatastore
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/dgraph-io/badger/v4"
|
|
||||||
"github.com/pikami/cosmium/internal/datastore"
|
|
||||||
"github.com/pikami/cosmium/internal/logger"
|
|
||||||
"github.com/pikami/cosmium/internal/resourceid"
|
|
||||||
"github.com/vmihailenco/msgpack/v5"
|
|
||||||
)
|
|
||||||
|
|
||||||
const (
|
|
||||||
DatabaseKeyPrefix = "DB:"
|
|
||||||
CollectionKeyPrefix = "COL:"
|
|
||||||
DocumentKeyPrefix = "DOC:"
|
|
||||||
TriggerKeyPrefix = "TRG:"
|
|
||||||
StoredProcedureKeyPrefix = "SP:"
|
|
||||||
UserDefinedFunctionKeyPrefix = "UDF:"
|
|
||||||
)
|
|
||||||
|
|
||||||
func generateKey(
|
|
||||||
resourceType resourceid.ResourceType,
|
|
||||||
databaseId string,
|
|
||||||
collectionId string,
|
|
||||||
resourceId string,
|
|
||||||
) string {
|
|
||||||
result := ""
|
|
||||||
|
|
||||||
switch resourceType {
|
|
||||||
case resourceid.ResourceTypeDatabase:
|
|
||||||
result += DatabaseKeyPrefix
|
|
||||||
case resourceid.ResourceTypeCollection:
|
|
||||||
result += CollectionKeyPrefix
|
|
||||||
case resourceid.ResourceTypeDocument:
|
|
||||||
result += DocumentKeyPrefix
|
|
||||||
case resourceid.ResourceTypeTrigger:
|
|
||||||
result += TriggerKeyPrefix
|
|
||||||
case resourceid.ResourceTypeStoredProcedure:
|
|
||||||
result += StoredProcedureKeyPrefix
|
|
||||||
case resourceid.ResourceTypeUserDefinedFunction:
|
|
||||||
result += UserDefinedFunctionKeyPrefix
|
|
||||||
}
|
|
||||||
|
|
||||||
if databaseId != "" {
|
|
||||||
result += databaseId
|
|
||||||
}
|
|
||||||
|
|
||||||
if collectionId != "" {
|
|
||||||
result += "/colls/" + collectionId
|
|
||||||
}
|
|
||||||
|
|
||||||
if resourceId != "" {
|
|
||||||
result += "/" + resourceId
|
|
||||||
}
|
|
||||||
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
func generateDatabaseKey(databaseId string) string {
|
|
||||||
return generateKey(resourceid.ResourceTypeDatabase, databaseId, "", "")
|
|
||||||
}
|
|
||||||
|
|
||||||
func generateCollectionKey(databaseId string, collectionId string) string {
|
|
||||||
return generateKey(resourceid.ResourceTypeCollection, databaseId, collectionId, "")
|
|
||||||
}
|
|
||||||
|
|
||||||
func generateDocumentKey(databaseId string, collectionId string, documentId string) string {
|
|
||||||
return generateKey(resourceid.ResourceTypeDocument, databaseId, collectionId, documentId)
|
|
||||||
}
|
|
||||||
|
|
||||||
func generateTriggerKey(databaseId string, collectionId string, triggerId string) string {
|
|
||||||
return generateKey(resourceid.ResourceTypeTrigger, databaseId, collectionId, triggerId)
|
|
||||||
}
|
|
||||||
|
|
||||||
func generateStoredProcedureKey(databaseId string, collectionId string, storedProcedureId string) string {
|
|
||||||
return generateKey(resourceid.ResourceTypeStoredProcedure, databaseId, collectionId, storedProcedureId)
|
|
||||||
}
|
|
||||||
|
|
||||||
func generateUserDefinedFunctionKey(databaseId string, collectionId string, udfId string) string {
|
|
||||||
return generateKey(resourceid.ResourceTypeUserDefinedFunction, databaseId, collectionId, udfId)
|
|
||||||
}
|
|
||||||
|
|
||||||
func insertKey(txn *badger.Txn, key string, value interface{}) datastore.DataStoreStatus {
|
|
||||||
_, err := txn.Get([]byte(key))
|
|
||||||
if err == nil {
|
|
||||||
return datastore.Conflict
|
|
||||||
}
|
|
||||||
|
|
||||||
if err != badger.ErrKeyNotFound {
|
|
||||||
logger.ErrorLn("Error while checking if key exists:", err)
|
|
||||||
return datastore.Unknown
|
|
||||||
}
|
|
||||||
|
|
||||||
buf, err := msgpack.Marshal(value)
|
|
||||||
if err != nil {
|
|
||||||
logger.ErrorLn("Error while encoding value:", err)
|
|
||||||
return datastore.Unknown
|
|
||||||
}
|
|
||||||
|
|
||||||
err = txn.Set([]byte(key), buf)
|
|
||||||
if err != nil {
|
|
||||||
logger.ErrorLn("Error while setting key:", err)
|
|
||||||
return datastore.Unknown
|
|
||||||
}
|
|
||||||
|
|
||||||
err = txn.Commit()
|
|
||||||
if err != nil {
|
|
||||||
logger.ErrorLn("Error while committing transaction:", err)
|
|
||||||
return datastore.Unknown
|
|
||||||
}
|
|
||||||
|
|
||||||
return datastore.StatusOk
|
|
||||||
}
|
|
||||||
|
|
||||||
func getKey(txn *badger.Txn, key string, value interface{}) datastore.DataStoreStatus {
|
|
||||||
item, err := txn.Get([]byte(key))
|
|
||||||
if err != nil {
|
|
||||||
if err == badger.ErrKeyNotFound {
|
|
||||||
return datastore.StatusNotFound
|
|
||||||
}
|
|
||||||
logger.ErrorLn("Error while getting key:", err)
|
|
||||||
return datastore.Unknown
|
|
||||||
}
|
|
||||||
|
|
||||||
val, err := item.ValueCopy(nil)
|
|
||||||
if err != nil {
|
|
||||||
logger.ErrorLn("Error while copying value:", err)
|
|
||||||
return datastore.Unknown
|
|
||||||
}
|
|
||||||
|
|
||||||
if value == nil {
|
|
||||||
logger.ErrorLn("getKey called with nil value")
|
|
||||||
return datastore.Unknown
|
|
||||||
}
|
|
||||||
|
|
||||||
err = msgpack.Unmarshal(val, &value)
|
|
||||||
if err != nil {
|
|
||||||
logger.ErrorLn("Error while decoding value:", err)
|
|
||||||
return datastore.Unknown
|
|
||||||
}
|
|
||||||
|
|
||||||
return datastore.StatusOk
|
|
||||||
}
|
|
||||||
|
|
||||||
func keyExists(txn *badger.Txn, key string) (bool, error) {
|
|
||||||
_, err := txn.Get([]byte(key))
|
|
||||||
if err == nil {
|
|
||||||
return true, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
if err == badger.ErrKeyNotFound {
|
|
||||||
return false, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
return false, err
|
|
||||||
}
|
|
||||||
|
|
||||||
func listByPrefix[T any](db *badger.DB, prefix string) ([]T, datastore.DataStoreStatus) {
|
|
||||||
results := make([]T, 0)
|
|
||||||
|
|
||||||
err := db.View(func(txn *badger.Txn) error {
|
|
||||||
opts := badger.DefaultIteratorOptions
|
|
||||||
opts.Prefix = []byte(prefix)
|
|
||||||
it := txn.NewIterator(opts)
|
|
||||||
defer it.Close()
|
|
||||||
|
|
||||||
for it.Rewind(); it.Valid(); it.Next() {
|
|
||||||
item := it.Item()
|
|
||||||
var entry T
|
|
||||||
|
|
||||||
status := getKey(txn, string(item.Key()), &entry)
|
|
||||||
if status != datastore.StatusOk {
|
|
||||||
logger.ErrorLn("Failed to retrieve entry:", string(item.Key()))
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
results = append(results, entry)
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
})
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
logger.ErrorLn("Error while listing entries:", err)
|
|
||||||
return nil, datastore.Unknown
|
|
||||||
}
|
|
||||||
|
|
||||||
return results, datastore.StatusOk
|
|
||||||
}
|
|
||||||
|
|
||||||
func deleteKeysByPrefix(txn *badger.Txn, prefix string) error {
|
|
||||||
opts := badger.DefaultIteratorOptions
|
|
||||||
opts.Prefix = []byte(prefix)
|
|
||||||
it := txn.NewIterator(opts)
|
|
||||||
defer it.Close()
|
|
||||||
|
|
||||||
for it.Rewind(); it.Valid(); it.Next() {
|
|
||||||
key := it.Item().KeyCopy(nil)
|
|
||||||
if err := txn.Delete(key); err != nil {
|
|
||||||
logger.ErrorLn("Failed to delete key:", string(key), "Error:", err)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
@ -1,58 +0,0 @@
|
|||||||
package badgerdatastore
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/dgraph-io/badger/v4"
|
|
||||||
"github.com/pikami/cosmium/internal/datastore"
|
|
||||||
"github.com/pikami/cosmium/internal/logger"
|
|
||||||
"github.com/vmihailenco/msgpack/v5"
|
|
||||||
)
|
|
||||||
|
|
||||||
type BadgerDocumentIterator struct {
|
|
||||||
txn *badger.Txn
|
|
||||||
it *badger.Iterator
|
|
||||||
prefix string
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewBadgerDocumentIterator(txn *badger.Txn, prefix string) *BadgerDocumentIterator {
|
|
||||||
opts := badger.DefaultIteratorOptions
|
|
||||||
opts.Prefix = []byte(prefix)
|
|
||||||
|
|
||||||
it := txn.NewIterator(opts)
|
|
||||||
it.Rewind()
|
|
||||||
|
|
||||||
return &BadgerDocumentIterator{
|
|
||||||
txn: txn,
|
|
||||||
it: it,
|
|
||||||
prefix: prefix,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (i *BadgerDocumentIterator) Next() (datastore.Document, datastore.DataStoreStatus) {
|
|
||||||
if !i.it.Valid() {
|
|
||||||
i.it.Close()
|
|
||||||
return datastore.Document{}, datastore.IterEOF
|
|
||||||
}
|
|
||||||
|
|
||||||
item := i.it.Item()
|
|
||||||
val, err := item.ValueCopy(nil)
|
|
||||||
if err != nil {
|
|
||||||
logger.ErrorLn("Error while copying value:", err)
|
|
||||||
return datastore.Document{}, datastore.Unknown
|
|
||||||
}
|
|
||||||
|
|
||||||
current := &datastore.Document{}
|
|
||||||
err = msgpack.Unmarshal(val, ¤t)
|
|
||||||
if err != nil {
|
|
||||||
logger.ErrorLn("Error while decoding value:", err)
|
|
||||||
return datastore.Document{}, datastore.Unknown
|
|
||||||
}
|
|
||||||
|
|
||||||
i.it.Next()
|
|
||||||
|
|
||||||
return *current, datastore.StatusOk
|
|
||||||
}
|
|
||||||
|
|
||||||
func (i *BadgerDocumentIterator) Close() {
|
|
||||||
i.it.Close()
|
|
||||||
i.txn.Discard()
|
|
||||||
}
|
|
@ -1,127 +0,0 @@
|
|||||||
package badgerdatastore
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/google/uuid"
|
|
||||||
"github.com/pikami/cosmium/internal/datastore"
|
|
||||||
"github.com/pikami/cosmium/internal/logger"
|
|
||||||
"github.com/pikami/cosmium/internal/resourceid"
|
|
||||||
)
|
|
||||||
|
|
||||||
func (r *BadgerDataStore) GetAllDocuments(databaseId string, collectionId string) ([]datastore.Document, datastore.DataStoreStatus) {
|
|
||||||
txn := r.db.NewTransaction(false)
|
|
||||||
defer txn.Discard()
|
|
||||||
|
|
||||||
dbExists, err := keyExists(txn, generateDatabaseKey(databaseId))
|
|
||||||
if err != nil || !dbExists {
|
|
||||||
return nil, datastore.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
collExists, err := keyExists(txn, generateCollectionKey(databaseId, collectionId))
|
|
||||||
if err != nil || !collExists {
|
|
||||||
return nil, datastore.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
docs, status := listByPrefix[datastore.Document](r.db, generateKey(resourceid.ResourceTypeDocument, databaseId, collectionId, ""))
|
|
||||||
if status == datastore.StatusOk {
|
|
||||||
return docs, datastore.StatusOk
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil, status
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *BadgerDataStore) GetDocumentIterator(databaseId string, collectionId string) (datastore.DocumentIterator, datastore.DataStoreStatus) {
|
|
||||||
txn := r.db.NewTransaction(false)
|
|
||||||
|
|
||||||
dbExists, err := keyExists(txn, generateDatabaseKey(databaseId))
|
|
||||||
if err != nil || !dbExists {
|
|
||||||
return nil, datastore.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
collExists, err := keyExists(txn, generateCollectionKey(databaseId, collectionId))
|
|
||||||
if err != nil || !collExists {
|
|
||||||
return nil, datastore.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
iter := NewBadgerDocumentIterator(txn, generateKey(resourceid.ResourceTypeDocument, databaseId, collectionId, ""))
|
|
||||||
return iter, datastore.StatusOk
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *BadgerDataStore) GetDocument(databaseId string, collectionId string, documentId string) (datastore.Document, datastore.DataStoreStatus) {
|
|
||||||
documentKey := generateDocumentKey(databaseId, collectionId, documentId)
|
|
||||||
|
|
||||||
txn := r.db.NewTransaction(false)
|
|
||||||
defer txn.Discard()
|
|
||||||
|
|
||||||
var document datastore.Document
|
|
||||||
status := getKey(txn, documentKey, &document)
|
|
||||||
|
|
||||||
return document, status
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *BadgerDataStore) DeleteDocument(databaseId string, collectionId string, documentId string) datastore.DataStoreStatus {
|
|
||||||
documentKey := generateDocumentKey(databaseId, collectionId, documentId)
|
|
||||||
|
|
||||||
txn := r.db.NewTransaction(true)
|
|
||||||
defer txn.Discard()
|
|
||||||
|
|
||||||
exists, err := keyExists(txn, documentKey)
|
|
||||||
if err != nil {
|
|
||||||
return datastore.Unknown
|
|
||||||
}
|
|
||||||
if !exists {
|
|
||||||
return datastore.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
err = txn.Delete([]byte(documentKey))
|
|
||||||
if err != nil {
|
|
||||||
logger.ErrorLn("Error while deleting document:", err)
|
|
||||||
return datastore.Unknown
|
|
||||||
}
|
|
||||||
|
|
||||||
err = txn.Commit()
|
|
||||||
if err != nil {
|
|
||||||
logger.ErrorLn("Error while committing transaction:", err)
|
|
||||||
return datastore.Unknown
|
|
||||||
}
|
|
||||||
|
|
||||||
return datastore.StatusOk
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *BadgerDataStore) CreateDocument(databaseId string, collectionId string, document map[string]interface{}) (datastore.Document, datastore.DataStoreStatus) {
|
|
||||||
txn := r.db.NewTransaction(true)
|
|
||||||
defer txn.Discard()
|
|
||||||
|
|
||||||
var database datastore.Database
|
|
||||||
status := getKey(txn, generateDatabaseKey(databaseId), &database)
|
|
||||||
if status != datastore.StatusOk {
|
|
||||||
return datastore.Document{}, status
|
|
||||||
}
|
|
||||||
|
|
||||||
var collection datastore.Collection
|
|
||||||
status = getKey(txn, generateCollectionKey(databaseId, collectionId), &collection)
|
|
||||||
if status != datastore.StatusOk {
|
|
||||||
return datastore.Document{}, status
|
|
||||||
}
|
|
||||||
|
|
||||||
var ok bool
|
|
||||||
var documentId string
|
|
||||||
if documentId, ok = document["id"].(string); !ok || documentId == "" {
|
|
||||||
documentId = fmt.Sprint(uuid.New())
|
|
||||||
document["id"] = documentId
|
|
||||||
}
|
|
||||||
|
|
||||||
document["_ts"] = time.Now().Unix()
|
|
||||||
document["_rid"] = resourceid.NewCombined(collection.ResourceID, resourceid.New(resourceid.ResourceTypeDocument))
|
|
||||||
document["_etag"] = fmt.Sprintf("\"%s\"", uuid.New())
|
|
||||||
document["_self"] = fmt.Sprintf("dbs/%s/colls/%s/docs/%s/", database.ResourceID, collection.ResourceID, document["_rid"])
|
|
||||||
|
|
||||||
status = insertKey(txn, generateDocumentKey(databaseId, collectionId, documentId), document)
|
|
||||||
if status != datastore.StatusOk {
|
|
||||||
return datastore.Document{}, status
|
|
||||||
}
|
|
||||||
|
|
||||||
return document, datastore.StatusOk
|
|
||||||
}
|
|
@ -1,53 +0,0 @@
|
|||||||
package badgerdatastore
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
|
|
||||||
"github.com/google/uuid"
|
|
||||||
"github.com/pikami/cosmium/internal/datastore"
|
|
||||||
"github.com/pikami/cosmium/internal/resourceid"
|
|
||||||
)
|
|
||||||
|
|
||||||
// I have no idea what this is tbh
|
|
||||||
func (r *BadgerDataStore) GetPartitionKeyRanges(databaseId string, collectionId string) ([]datastore.PartitionKeyRange, datastore.DataStoreStatus) {
|
|
||||||
databaseRid := databaseId
|
|
||||||
collectionRid := collectionId
|
|
||||||
var timestamp int64 = 0
|
|
||||||
|
|
||||||
txn := r.db.NewTransaction(false)
|
|
||||||
defer txn.Discard()
|
|
||||||
|
|
||||||
var database datastore.Database
|
|
||||||
status := getKey(txn, generateDatabaseKey(databaseId), &database)
|
|
||||||
if status != datastore.StatusOk {
|
|
||||||
databaseRid = database.ResourceID
|
|
||||||
}
|
|
||||||
|
|
||||||
var collection datastore.Collection
|
|
||||||
status = getKey(txn, generateCollectionKey(databaseId, collectionId), &collection)
|
|
||||||
if status != datastore.StatusOk {
|
|
||||||
collectionRid = collection.ResourceID
|
|
||||||
timestamp = collection.TimeStamp
|
|
||||||
}
|
|
||||||
|
|
||||||
pkrResourceId := resourceid.NewCombined(collectionRid, resourceid.New(resourceid.ResourceTypePartitionKeyRange))
|
|
||||||
pkrSelf := fmt.Sprintf("dbs/%s/colls/%s/pkranges/%s/", databaseRid, collectionRid, pkrResourceId)
|
|
||||||
etag := fmt.Sprintf("\"%s\"", uuid.New())
|
|
||||||
|
|
||||||
return []datastore.PartitionKeyRange{
|
|
||||||
{
|
|
||||||
ResourceID: pkrResourceId,
|
|
||||||
ID: "0",
|
|
||||||
Etag: etag,
|
|
||||||
MinInclusive: "",
|
|
||||||
MaxExclusive: "FF",
|
|
||||||
RidPrefix: 0,
|
|
||||||
Self: pkrSelf,
|
|
||||||
ThroughputFraction: 1,
|
|
||||||
Status: "online",
|
|
||||||
Parents: []interface{}{},
|
|
||||||
TimeStamp: timestamp,
|
|
||||||
Lsn: 17,
|
|
||||||
},
|
|
||||||
}, datastore.StatusOk
|
|
||||||
}
|
|
@ -1,107 +0,0 @@
|
|||||||
package badgerdatastore
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/google/uuid"
|
|
||||||
"github.com/pikami/cosmium/internal/datastore"
|
|
||||||
"github.com/pikami/cosmium/internal/logger"
|
|
||||||
"github.com/pikami/cosmium/internal/resourceid"
|
|
||||||
)
|
|
||||||
|
|
||||||
func (r *BadgerDataStore) GetAllStoredProcedures(databaseId string, collectionId string) ([]datastore.StoredProcedure, datastore.DataStoreStatus) {
|
|
||||||
txn := r.db.NewTransaction(false)
|
|
||||||
defer txn.Discard()
|
|
||||||
|
|
||||||
dbExists, err := keyExists(txn, generateDatabaseKey(databaseId))
|
|
||||||
if err != nil || !dbExists {
|
|
||||||
return nil, datastore.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
collExists, err := keyExists(txn, generateCollectionKey(databaseId, collectionId))
|
|
||||||
if err != nil || !collExists {
|
|
||||||
return nil, datastore.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
storedProcedures, status := listByPrefix[datastore.StoredProcedure](r.db, generateKey(resourceid.ResourceTypeStoredProcedure, databaseId, collectionId, ""))
|
|
||||||
if status == datastore.StatusOk {
|
|
||||||
return storedProcedures, datastore.StatusOk
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil, status
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *BadgerDataStore) GetStoredProcedure(databaseId string, collectionId string, storedProcedureId string) (datastore.StoredProcedure, datastore.DataStoreStatus) {
|
|
||||||
storedProcedureKey := generateStoredProcedureKey(databaseId, collectionId, storedProcedureId)
|
|
||||||
|
|
||||||
txn := r.db.NewTransaction(false)
|
|
||||||
defer txn.Discard()
|
|
||||||
|
|
||||||
var storedProcedure datastore.StoredProcedure
|
|
||||||
status := getKey(txn, storedProcedureKey, &storedProcedure)
|
|
||||||
|
|
||||||
return storedProcedure, status
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *BadgerDataStore) DeleteStoredProcedure(databaseId string, collectionId string, storedProcedureId string) datastore.DataStoreStatus {
|
|
||||||
storedProcedureKey := generateStoredProcedureKey(databaseId, collectionId, storedProcedureId)
|
|
||||||
|
|
||||||
txn := r.db.NewTransaction(true)
|
|
||||||
defer txn.Discard()
|
|
||||||
|
|
||||||
exists, err := keyExists(txn, storedProcedureKey)
|
|
||||||
if err != nil {
|
|
||||||
return datastore.Unknown
|
|
||||||
}
|
|
||||||
if !exists {
|
|
||||||
return datastore.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
err = txn.Delete([]byte(storedProcedureKey))
|
|
||||||
if err != nil {
|
|
||||||
logger.ErrorLn("Error while deleting stored procedure:", err)
|
|
||||||
return datastore.Unknown
|
|
||||||
}
|
|
||||||
|
|
||||||
err = txn.Commit()
|
|
||||||
if err != nil {
|
|
||||||
logger.ErrorLn("Error while committing transaction:", err)
|
|
||||||
return datastore.Unknown
|
|
||||||
}
|
|
||||||
|
|
||||||
return datastore.StatusOk
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *BadgerDataStore) CreateStoredProcedure(databaseId string, collectionId string, storedProcedure datastore.StoredProcedure) (datastore.StoredProcedure, datastore.DataStoreStatus) {
|
|
||||||
txn := r.db.NewTransaction(true)
|
|
||||||
defer txn.Discard()
|
|
||||||
|
|
||||||
if storedProcedure.ID == "" {
|
|
||||||
return datastore.StoredProcedure{}, datastore.BadRequest
|
|
||||||
}
|
|
||||||
|
|
||||||
var database datastore.Database
|
|
||||||
status := getKey(txn, generateDatabaseKey(databaseId), &database)
|
|
||||||
if status != datastore.StatusOk {
|
|
||||||
return datastore.StoredProcedure{}, status
|
|
||||||
}
|
|
||||||
|
|
||||||
var collection datastore.Collection
|
|
||||||
status = getKey(txn, generateCollectionKey(databaseId, collectionId), &collection)
|
|
||||||
if status != datastore.StatusOk {
|
|
||||||
return datastore.StoredProcedure{}, status
|
|
||||||
}
|
|
||||||
|
|
||||||
storedProcedure.TimeStamp = time.Now().Unix()
|
|
||||||
storedProcedure.ResourceID = resourceid.NewCombined(collection.ResourceID, resourceid.New(resourceid.ResourceTypeStoredProcedure))
|
|
||||||
storedProcedure.ETag = fmt.Sprintf("\"%s\"", uuid.New())
|
|
||||||
storedProcedure.Self = fmt.Sprintf("dbs/%s/colls/%s/sprocs/%s/", database.ResourceID, collection.ResourceID, storedProcedure.ResourceID)
|
|
||||||
|
|
||||||
status = insertKey(txn, generateStoredProcedureKey(databaseId, collectionId, storedProcedure.ID), storedProcedure)
|
|
||||||
if status != datastore.StatusOk {
|
|
||||||
return datastore.StoredProcedure{}, status
|
|
||||||
}
|
|
||||||
|
|
||||||
return storedProcedure, datastore.StatusOk
|
|
||||||
}
|
|
@ -1,107 +0,0 @@
|
|||||||
package badgerdatastore
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/google/uuid"
|
|
||||||
"github.com/pikami/cosmium/internal/datastore"
|
|
||||||
"github.com/pikami/cosmium/internal/logger"
|
|
||||||
"github.com/pikami/cosmium/internal/resourceid"
|
|
||||||
)
|
|
||||||
|
|
||||||
func (r *BadgerDataStore) GetAllTriggers(databaseId string, collectionId string) ([]datastore.Trigger, datastore.DataStoreStatus) {
|
|
||||||
txn := r.db.NewTransaction(false)
|
|
||||||
defer txn.Discard()
|
|
||||||
|
|
||||||
dbExists, err := keyExists(txn, generateDatabaseKey(databaseId))
|
|
||||||
if err != nil || !dbExists {
|
|
||||||
return nil, datastore.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
collExists, err := keyExists(txn, generateCollectionKey(databaseId, collectionId))
|
|
||||||
if err != nil || !collExists {
|
|
||||||
return nil, datastore.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
triggers, status := listByPrefix[datastore.Trigger](r.db, generateKey(resourceid.ResourceTypeTrigger, databaseId, collectionId, ""))
|
|
||||||
if status == datastore.StatusOk {
|
|
||||||
return triggers, datastore.StatusOk
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil, status
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *BadgerDataStore) GetTrigger(databaseId string, collectionId string, triggerId string) (datastore.Trigger, datastore.DataStoreStatus) {
|
|
||||||
triggerKey := generateTriggerKey(databaseId, collectionId, triggerId)
|
|
||||||
|
|
||||||
txn := r.db.NewTransaction(false)
|
|
||||||
defer txn.Discard()
|
|
||||||
|
|
||||||
var trigger datastore.Trigger
|
|
||||||
status := getKey(txn, triggerKey, &trigger)
|
|
||||||
|
|
||||||
return trigger, status
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *BadgerDataStore) DeleteTrigger(databaseId string, collectionId string, triggerId string) datastore.DataStoreStatus {
|
|
||||||
triggerKey := generateTriggerKey(databaseId, collectionId, triggerId)
|
|
||||||
|
|
||||||
txn := r.db.NewTransaction(true)
|
|
||||||
defer txn.Discard()
|
|
||||||
|
|
||||||
exists, err := keyExists(txn, triggerKey)
|
|
||||||
if err != nil {
|
|
||||||
return datastore.Unknown
|
|
||||||
}
|
|
||||||
if !exists {
|
|
||||||
return datastore.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
err = txn.Delete([]byte(triggerKey))
|
|
||||||
if err != nil {
|
|
||||||
logger.ErrorLn("Error while deleting trigger:", err)
|
|
||||||
return datastore.Unknown
|
|
||||||
}
|
|
||||||
|
|
||||||
err = txn.Commit()
|
|
||||||
if err != nil {
|
|
||||||
logger.ErrorLn("Error while committing transaction:", err)
|
|
||||||
return datastore.Unknown
|
|
||||||
}
|
|
||||||
|
|
||||||
return datastore.StatusOk
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *BadgerDataStore) CreateTrigger(databaseId string, collectionId string, trigger datastore.Trigger) (datastore.Trigger, datastore.DataStoreStatus) {
|
|
||||||
txn := r.db.NewTransaction(true)
|
|
||||||
defer txn.Discard()
|
|
||||||
|
|
||||||
if trigger.ID == "" {
|
|
||||||
return datastore.Trigger{}, datastore.BadRequest
|
|
||||||
}
|
|
||||||
|
|
||||||
var database datastore.Database
|
|
||||||
status := getKey(txn, generateDatabaseKey(databaseId), &database)
|
|
||||||
if status != datastore.StatusOk {
|
|
||||||
return datastore.Trigger{}, status
|
|
||||||
}
|
|
||||||
|
|
||||||
var collection datastore.Collection
|
|
||||||
status = getKey(txn, generateCollectionKey(databaseId, collectionId), &collection)
|
|
||||||
if status != datastore.StatusOk {
|
|
||||||
return datastore.Trigger{}, status
|
|
||||||
}
|
|
||||||
|
|
||||||
trigger.TimeStamp = time.Now().Unix()
|
|
||||||
trigger.ResourceID = resourceid.NewCombined(collection.ResourceID, resourceid.New(resourceid.ResourceTypeTrigger))
|
|
||||||
trigger.ETag = fmt.Sprintf("\"%s\"", uuid.New())
|
|
||||||
trigger.Self = fmt.Sprintf("dbs/%s/colls/%s/triggers/%s/", database.ResourceID, collection.ResourceID, trigger.ResourceID)
|
|
||||||
|
|
||||||
status = insertKey(txn, generateTriggerKey(databaseId, collectionId, trigger.ID), trigger)
|
|
||||||
if status != datastore.StatusOk {
|
|
||||||
return datastore.Trigger{}, status
|
|
||||||
}
|
|
||||||
|
|
||||||
return trigger, datastore.StatusOk
|
|
||||||
}
|
|
@ -1,107 +0,0 @@
|
|||||||
package badgerdatastore
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/google/uuid"
|
|
||||||
"github.com/pikami/cosmium/internal/datastore"
|
|
||||||
"github.com/pikami/cosmium/internal/logger"
|
|
||||||
"github.com/pikami/cosmium/internal/resourceid"
|
|
||||||
)
|
|
||||||
|
|
||||||
func (r *BadgerDataStore) GetAllUserDefinedFunctions(databaseId string, collectionId string) ([]datastore.UserDefinedFunction, datastore.DataStoreStatus) {
|
|
||||||
txn := r.db.NewTransaction(false)
|
|
||||||
defer txn.Discard()
|
|
||||||
|
|
||||||
dbExists, err := keyExists(txn, generateDatabaseKey(databaseId))
|
|
||||||
if err != nil || !dbExists {
|
|
||||||
return nil, datastore.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
collExists, err := keyExists(txn, generateCollectionKey(databaseId, collectionId))
|
|
||||||
if err != nil || !collExists {
|
|
||||||
return nil, datastore.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
udfs, status := listByPrefix[datastore.UserDefinedFunction](r.db, generateKey(resourceid.ResourceTypeUserDefinedFunction, databaseId, collectionId, ""))
|
|
||||||
if status == datastore.StatusOk {
|
|
||||||
return udfs, datastore.StatusOk
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil, status
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *BadgerDataStore) GetUserDefinedFunction(databaseId string, collectionId string, udfId string) (datastore.UserDefinedFunction, datastore.DataStoreStatus) {
|
|
||||||
udfKey := generateUserDefinedFunctionKey(databaseId, collectionId, udfId)
|
|
||||||
|
|
||||||
txn := r.db.NewTransaction(false)
|
|
||||||
defer txn.Discard()
|
|
||||||
|
|
||||||
var udf datastore.UserDefinedFunction
|
|
||||||
status := getKey(txn, udfKey, &udf)
|
|
||||||
|
|
||||||
return udf, status
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *BadgerDataStore) DeleteUserDefinedFunction(databaseId string, collectionId string, udfId string) datastore.DataStoreStatus {
|
|
||||||
udfKey := generateUserDefinedFunctionKey(databaseId, collectionId, udfId)
|
|
||||||
|
|
||||||
txn := r.db.NewTransaction(true)
|
|
||||||
defer txn.Discard()
|
|
||||||
|
|
||||||
exists, err := keyExists(txn, udfKey)
|
|
||||||
if err != nil {
|
|
||||||
return datastore.Unknown
|
|
||||||
}
|
|
||||||
if !exists {
|
|
||||||
return datastore.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
err = txn.Delete([]byte(udfKey))
|
|
||||||
if err != nil {
|
|
||||||
logger.ErrorLn("Error while deleting user defined function:", err)
|
|
||||||
return datastore.Unknown
|
|
||||||
}
|
|
||||||
|
|
||||||
err = txn.Commit()
|
|
||||||
if err != nil {
|
|
||||||
logger.ErrorLn("Error while committing transaction:", err)
|
|
||||||
return datastore.Unknown
|
|
||||||
}
|
|
||||||
|
|
||||||
return datastore.StatusOk
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *BadgerDataStore) CreateUserDefinedFunction(databaseId string, collectionId string, udf datastore.UserDefinedFunction) (datastore.UserDefinedFunction, datastore.DataStoreStatus) {
|
|
||||||
txn := r.db.NewTransaction(true)
|
|
||||||
defer txn.Discard()
|
|
||||||
|
|
||||||
if udf.ID == "" {
|
|
||||||
return datastore.UserDefinedFunction{}, datastore.BadRequest
|
|
||||||
}
|
|
||||||
|
|
||||||
var database datastore.Database
|
|
||||||
status := getKey(txn, generateDatabaseKey(databaseId), &database)
|
|
||||||
if status != datastore.StatusOk {
|
|
||||||
return datastore.UserDefinedFunction{}, status
|
|
||||||
}
|
|
||||||
|
|
||||||
var collection datastore.Collection
|
|
||||||
status = getKey(txn, generateCollectionKey(databaseId, collectionId), &collection)
|
|
||||||
if status != datastore.StatusOk {
|
|
||||||
return datastore.UserDefinedFunction{}, status
|
|
||||||
}
|
|
||||||
|
|
||||||
udf.TimeStamp = time.Now().Unix()
|
|
||||||
udf.ResourceID = resourceid.NewCombined(collection.ResourceID, resourceid.New(resourceid.ResourceTypeUserDefinedFunction))
|
|
||||||
udf.ETag = fmt.Sprintf("\"%s\"", uuid.New())
|
|
||||||
udf.Self = fmt.Sprintf("dbs/%s/colls/%s/udfs/%s/", database.ResourceID, collection.ResourceID, udf.ResourceID)
|
|
||||||
|
|
||||||
status = insertKey(txn, generateUserDefinedFunctionKey(databaseId, collectionId, udf.ID), udf)
|
|
||||||
if status != datastore.StatusOk {
|
|
||||||
return datastore.UserDefinedFunction{}, status
|
|
||||||
}
|
|
||||||
|
|
||||||
return udf, datastore.StatusOk
|
|
||||||
}
|
|
@ -1,44 +0,0 @@
|
|||||||
package datastore
|
|
||||||
|
|
||||||
type DataStore interface {
|
|
||||||
GetAllDatabases() ([]Database, DataStoreStatus)
|
|
||||||
GetDatabase(databaseId string) (Database, DataStoreStatus)
|
|
||||||
DeleteDatabase(databaseId string) DataStoreStatus
|
|
||||||
CreateDatabase(newDatabase Database) (Database, DataStoreStatus)
|
|
||||||
|
|
||||||
GetAllCollections(databaseId string) ([]Collection, DataStoreStatus)
|
|
||||||
GetCollection(databaseId string, collectionId string) (Collection, DataStoreStatus)
|
|
||||||
DeleteCollection(databaseId string, collectionId string) DataStoreStatus
|
|
||||||
CreateCollection(databaseId string, newCollection Collection) (Collection, DataStoreStatus)
|
|
||||||
|
|
||||||
GetAllDocuments(databaseId string, collectionId string) ([]Document, DataStoreStatus)
|
|
||||||
GetDocumentIterator(databaseId string, collectionId string) (DocumentIterator, DataStoreStatus)
|
|
||||||
GetDocument(databaseId string, collectionId string, documentId string) (Document, DataStoreStatus)
|
|
||||||
DeleteDocument(databaseId string, collectionId string, documentId string) DataStoreStatus
|
|
||||||
CreateDocument(databaseId string, collectionId string, document map[string]interface{}) (Document, DataStoreStatus)
|
|
||||||
|
|
||||||
GetAllTriggers(databaseId string, collectionId string) ([]Trigger, DataStoreStatus)
|
|
||||||
GetTrigger(databaseId string, collectionId string, triggerId string) (Trigger, DataStoreStatus)
|
|
||||||
DeleteTrigger(databaseId string, collectionId string, triggerId string) DataStoreStatus
|
|
||||||
CreateTrigger(databaseId string, collectionId string, trigger Trigger) (Trigger, DataStoreStatus)
|
|
||||||
|
|
||||||
GetAllStoredProcedures(databaseId string, collectionId string) ([]StoredProcedure, DataStoreStatus)
|
|
||||||
GetStoredProcedure(databaseId string, collectionId string, storedProcedureId string) (StoredProcedure, DataStoreStatus)
|
|
||||||
DeleteStoredProcedure(databaseId string, collectionId string, storedProcedureId string) DataStoreStatus
|
|
||||||
CreateStoredProcedure(databaseId string, collectionId string, storedProcedure StoredProcedure) (StoredProcedure, DataStoreStatus)
|
|
||||||
|
|
||||||
GetAllUserDefinedFunctions(databaseId string, collectionId string) ([]UserDefinedFunction, DataStoreStatus)
|
|
||||||
GetUserDefinedFunction(databaseId string, collectionId string, udfId string) (UserDefinedFunction, DataStoreStatus)
|
|
||||||
DeleteUserDefinedFunction(databaseId string, collectionId string, udfId string) DataStoreStatus
|
|
||||||
CreateUserDefinedFunction(databaseId string, collectionId string, udf UserDefinedFunction) (UserDefinedFunction, DataStoreStatus)
|
|
||||||
|
|
||||||
GetPartitionKeyRanges(databaseId string, collectionId string) ([]PartitionKeyRange, DataStoreStatus)
|
|
||||||
|
|
||||||
Close()
|
|
||||||
DumpToJson() (string, error)
|
|
||||||
}
|
|
||||||
|
|
||||||
type DocumentIterator interface {
|
|
||||||
Next() (Document, DataStoreStatus)
|
|
||||||
Close()
|
|
||||||
}
|
|
@ -1,21 +0,0 @@
|
|||||||
package jsondatastore
|
|
||||||
|
|
||||||
import "github.com/pikami/cosmium/internal/datastore"
|
|
||||||
|
|
||||||
type ArrayDocumentIterator struct {
|
|
||||||
documents []datastore.Document
|
|
||||||
index int
|
|
||||||
}
|
|
||||||
|
|
||||||
func (i *ArrayDocumentIterator) Next() (datastore.Document, datastore.DataStoreStatus) {
|
|
||||||
i.index++
|
|
||||||
if i.index >= len(i.documents) {
|
|
||||||
return datastore.Document{}, datastore.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
return i.documents[i.index], datastore.StatusOk
|
|
||||||
}
|
|
||||||
|
|
||||||
func (i *ArrayDocumentIterator) Close() {
|
|
||||||
i.documents = []datastore.Document{}
|
|
||||||
}
|
|
@ -1,89 +0,0 @@
|
|||||||
package jsondatastore
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/google/uuid"
|
|
||||||
"github.com/pikami/cosmium/internal/datastore"
|
|
||||||
"github.com/pikami/cosmium/internal/resourceid"
|
|
||||||
structhidrators "github.com/pikami/cosmium/internal/struct_hidrators"
|
|
||||||
"golang.org/x/exp/maps"
|
|
||||||
)
|
|
||||||
|
|
||||||
func (r *JsonDataStore) GetAllCollections(databaseId string) ([]datastore.Collection, datastore.DataStoreStatus) {
|
|
||||||
r.storeState.RLock()
|
|
||||||
defer r.storeState.RUnlock()
|
|
||||||
|
|
||||||
if _, ok := r.storeState.Databases[databaseId]; !ok {
|
|
||||||
return make([]datastore.Collection, 0), datastore.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
return maps.Values(r.storeState.Collections[databaseId]), datastore.StatusOk
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *JsonDataStore) GetCollection(databaseId string, collectionId string) (datastore.Collection, datastore.DataStoreStatus) {
|
|
||||||
r.storeState.RLock()
|
|
||||||
defer r.storeState.RUnlock()
|
|
||||||
|
|
||||||
if _, ok := r.storeState.Databases[databaseId]; !ok {
|
|
||||||
return datastore.Collection{}, datastore.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
|
|
||||||
return datastore.Collection{}, datastore.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
return r.storeState.Collections[databaseId][collectionId], datastore.StatusOk
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *JsonDataStore) DeleteCollection(databaseId string, collectionId string) datastore.DataStoreStatus {
|
|
||||||
r.storeState.Lock()
|
|
||||||
defer r.storeState.Unlock()
|
|
||||||
|
|
||||||
if _, ok := r.storeState.Databases[databaseId]; !ok {
|
|
||||||
return datastore.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
|
|
||||||
return datastore.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
delete(r.storeState.Collections[databaseId], collectionId)
|
|
||||||
delete(r.storeState.Documents[databaseId], collectionId)
|
|
||||||
delete(r.storeState.Triggers[databaseId], collectionId)
|
|
||||||
delete(r.storeState.StoredProcedures[databaseId], collectionId)
|
|
||||||
delete(r.storeState.UserDefinedFunctions[databaseId], collectionId)
|
|
||||||
|
|
||||||
return datastore.StatusOk
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *JsonDataStore) CreateCollection(databaseId string, newCollection datastore.Collection) (datastore.Collection, datastore.DataStoreStatus) {
|
|
||||||
r.storeState.Lock()
|
|
||||||
defer r.storeState.Unlock()
|
|
||||||
|
|
||||||
var ok bool
|
|
||||||
var database datastore.Database
|
|
||||||
if database, ok = r.storeState.Databases[databaseId]; !ok {
|
|
||||||
return datastore.Collection{}, datastore.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, ok = r.storeState.Collections[databaseId][newCollection.ID]; ok {
|
|
||||||
return datastore.Collection{}, datastore.Conflict
|
|
||||||
}
|
|
||||||
|
|
||||||
newCollection = structhidrators.Hidrate(newCollection).(datastore.Collection)
|
|
||||||
|
|
||||||
newCollection.TimeStamp = time.Now().Unix()
|
|
||||||
newCollection.ResourceID = resourceid.NewCombined(database.ResourceID, resourceid.New(resourceid.ResourceTypeCollection))
|
|
||||||
newCollection.ETag = fmt.Sprintf("\"%s\"", uuid.New())
|
|
||||||
newCollection.Self = fmt.Sprintf("dbs/%s/colls/%s/", database.ResourceID, newCollection.ResourceID)
|
|
||||||
|
|
||||||
r.storeState.Collections[databaseId][newCollection.ID] = newCollection
|
|
||||||
r.storeState.Documents[databaseId][newCollection.ID] = make(map[string]datastore.Document)
|
|
||||||
r.storeState.Triggers[databaseId][newCollection.ID] = make(map[string]datastore.Trigger)
|
|
||||||
r.storeState.StoredProcedures[databaseId][newCollection.ID] = make(map[string]datastore.StoredProcedure)
|
|
||||||
r.storeState.UserDefinedFunctions[databaseId][newCollection.ID] = make(map[string]datastore.UserDefinedFunction)
|
|
||||||
|
|
||||||
return newCollection, datastore.StatusOk
|
|
||||||
}
|
|
@ -1,70 +0,0 @@
|
|||||||
package jsondatastore
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/google/uuid"
|
|
||||||
"github.com/pikami/cosmium/internal/datastore"
|
|
||||||
"github.com/pikami/cosmium/internal/resourceid"
|
|
||||||
"golang.org/x/exp/maps"
|
|
||||||
)
|
|
||||||
|
|
||||||
func (r *JsonDataStore) GetAllDatabases() ([]datastore.Database, datastore.DataStoreStatus) {
|
|
||||||
r.storeState.RLock()
|
|
||||||
defer r.storeState.RUnlock()
|
|
||||||
|
|
||||||
return maps.Values(r.storeState.Databases), datastore.StatusOk
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *JsonDataStore) GetDatabase(id string) (datastore.Database, datastore.DataStoreStatus) {
|
|
||||||
r.storeState.RLock()
|
|
||||||
defer r.storeState.RUnlock()
|
|
||||||
|
|
||||||
if database, ok := r.storeState.Databases[id]; ok {
|
|
||||||
return database, datastore.StatusOk
|
|
||||||
}
|
|
||||||
|
|
||||||
return datastore.Database{}, datastore.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *JsonDataStore) DeleteDatabase(id string) datastore.DataStoreStatus {
|
|
||||||
r.storeState.Lock()
|
|
||||||
defer r.storeState.Unlock()
|
|
||||||
|
|
||||||
if _, ok := r.storeState.Databases[id]; !ok {
|
|
||||||
return datastore.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
delete(r.storeState.Databases, id)
|
|
||||||
delete(r.storeState.Collections, id)
|
|
||||||
delete(r.storeState.Documents, id)
|
|
||||||
delete(r.storeState.Triggers, id)
|
|
||||||
delete(r.storeState.StoredProcedures, id)
|
|
||||||
delete(r.storeState.UserDefinedFunctions, id)
|
|
||||||
|
|
||||||
return datastore.StatusOk
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *JsonDataStore) CreateDatabase(newDatabase datastore.Database) (datastore.Database, datastore.DataStoreStatus) {
|
|
||||||
r.storeState.Lock()
|
|
||||||
defer r.storeState.Unlock()
|
|
||||||
|
|
||||||
if _, ok := r.storeState.Databases[newDatabase.ID]; ok {
|
|
||||||
return datastore.Database{}, datastore.Conflict
|
|
||||||
}
|
|
||||||
|
|
||||||
newDatabase.TimeStamp = time.Now().Unix()
|
|
||||||
newDatabase.ResourceID = resourceid.New(resourceid.ResourceTypeDatabase)
|
|
||||||
newDatabase.ETag = fmt.Sprintf("\"%s\"", uuid.New())
|
|
||||||
newDatabase.Self = fmt.Sprintf("dbs/%s/", newDatabase.ResourceID)
|
|
||||||
|
|
||||||
r.storeState.Databases[newDatabase.ID] = newDatabase
|
|
||||||
r.storeState.Collections[newDatabase.ID] = make(map[string]datastore.Collection)
|
|
||||||
r.storeState.Documents[newDatabase.ID] = make(map[string]map[string]datastore.Document)
|
|
||||||
r.storeState.Triggers[newDatabase.ID] = make(map[string]map[string]datastore.Trigger)
|
|
||||||
r.storeState.StoredProcedures[newDatabase.ID] = make(map[string]map[string]datastore.StoredProcedure)
|
|
||||||
r.storeState.UserDefinedFunctions[newDatabase.ID] = make(map[string]map[string]datastore.UserDefinedFunction)
|
|
||||||
|
|
||||||
return newDatabase, datastore.StatusOk
|
|
||||||
}
|
|
@ -1,113 +0,0 @@
|
|||||||
package jsondatastore
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/google/uuid"
|
|
||||||
"github.com/pikami/cosmium/internal/datastore"
|
|
||||||
"github.com/pikami/cosmium/internal/resourceid"
|
|
||||||
"golang.org/x/exp/maps"
|
|
||||||
)
|
|
||||||
|
|
||||||
func (r *JsonDataStore) GetAllDocuments(databaseId string, collectionId string) ([]datastore.Document, datastore.DataStoreStatus) {
|
|
||||||
r.storeState.RLock()
|
|
||||||
defer r.storeState.RUnlock()
|
|
||||||
|
|
||||||
if _, ok := r.storeState.Databases[databaseId]; !ok {
|
|
||||||
return make([]datastore.Document, 0), datastore.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
|
|
||||||
return make([]datastore.Document, 0), datastore.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
return maps.Values(r.storeState.Documents[databaseId][collectionId]), datastore.StatusOk
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *JsonDataStore) GetDocument(databaseId string, collectionId string, documentId string) (datastore.Document, datastore.DataStoreStatus) {
|
|
||||||
r.storeState.RLock()
|
|
||||||
defer r.storeState.RUnlock()
|
|
||||||
|
|
||||||
if _, ok := r.storeState.Databases[databaseId]; !ok {
|
|
||||||
return datastore.Document{}, datastore.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
|
|
||||||
return datastore.Document{}, datastore.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, ok := r.storeState.Documents[databaseId][collectionId][documentId]; !ok {
|
|
||||||
return datastore.Document{}, datastore.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
return r.storeState.Documents[databaseId][collectionId][documentId], datastore.StatusOk
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *JsonDataStore) DeleteDocument(databaseId string, collectionId string, documentId string) datastore.DataStoreStatus {
|
|
||||||
r.storeState.Lock()
|
|
||||||
defer r.storeState.Unlock()
|
|
||||||
|
|
||||||
if _, ok := r.storeState.Databases[databaseId]; !ok {
|
|
||||||
return datastore.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
|
|
||||||
return datastore.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, ok := r.storeState.Documents[databaseId][collectionId][documentId]; !ok {
|
|
||||||
return datastore.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
delete(r.storeState.Documents[databaseId][collectionId], documentId)
|
|
||||||
|
|
||||||
return datastore.StatusOk
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *JsonDataStore) CreateDocument(databaseId string, collectionId string, document map[string]interface{}) (datastore.Document, datastore.DataStoreStatus) {
|
|
||||||
r.storeState.Lock()
|
|
||||||
defer r.storeState.Unlock()
|
|
||||||
|
|
||||||
var ok bool
|
|
||||||
var documentId string
|
|
||||||
var database datastore.Database
|
|
||||||
var collection datastore.Collection
|
|
||||||
if documentId, ok = document["id"].(string); !ok || documentId == "" {
|
|
||||||
documentId = fmt.Sprint(uuid.New())
|
|
||||||
document["id"] = documentId
|
|
||||||
}
|
|
||||||
|
|
||||||
if database, ok = r.storeState.Databases[databaseId]; !ok {
|
|
||||||
return datastore.Document{}, datastore.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
if collection, ok = r.storeState.Collections[databaseId][collectionId]; !ok {
|
|
||||||
return datastore.Document{}, datastore.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, ok := r.storeState.Documents[databaseId][collectionId][documentId]; ok {
|
|
||||||
return datastore.Document{}, datastore.Conflict
|
|
||||||
}
|
|
||||||
|
|
||||||
document["_ts"] = time.Now().Unix()
|
|
||||||
document["_rid"] = resourceid.NewCombined(collection.ResourceID, resourceid.New(resourceid.ResourceTypeDocument))
|
|
||||||
document["_etag"] = fmt.Sprintf("\"%s\"", uuid.New())
|
|
||||||
document["_self"] = fmt.Sprintf("dbs/%s/colls/%s/docs/%s/", database.ResourceID, collection.ResourceID, document["_rid"])
|
|
||||||
|
|
||||||
r.storeState.Documents[databaseId][collectionId][documentId] = document
|
|
||||||
|
|
||||||
return document, datastore.StatusOk
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *JsonDataStore) GetDocumentIterator(databaseId string, collectionId string) (datastore.DocumentIterator, datastore.DataStoreStatus) {
|
|
||||||
documents, status := r.GetAllDocuments(databaseId, collectionId)
|
|
||||||
if status != datastore.StatusOk {
|
|
||||||
return nil, status
|
|
||||||
}
|
|
||||||
|
|
||||||
return &ArrayDocumentIterator{
|
|
||||||
documents: documents,
|
|
||||||
index: -1,
|
|
||||||
}, datastore.StatusOk
|
|
||||||
}
|
|
@ -1,34 +0,0 @@
|
|||||||
package jsondatastore
|
|
||||||
|
|
||||||
import "github.com/pikami/cosmium/internal/datastore"
|
|
||||||
|
|
||||||
type JsonDataStore struct {
|
|
||||||
storeState State
|
|
||||||
|
|
||||||
initialDataFilePath string
|
|
||||||
persistDataFilePath string
|
|
||||||
}
|
|
||||||
|
|
||||||
type JsonDataStoreOptions struct {
|
|
||||||
InitialDataFilePath string
|
|
||||||
PersistDataFilePath string
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewJsonDataStore(options JsonDataStoreOptions) *JsonDataStore {
|
|
||||||
dataStore := &JsonDataStore{
|
|
||||||
storeState: State{
|
|
||||||
Databases: make(map[string]datastore.Database),
|
|
||||||
Collections: make(map[string]map[string]datastore.Collection),
|
|
||||||
Documents: make(map[string]map[string]map[string]datastore.Document),
|
|
||||||
Triggers: make(map[string]map[string]map[string]datastore.Trigger),
|
|
||||||
StoredProcedures: make(map[string]map[string]map[string]datastore.StoredProcedure),
|
|
||||||
UserDefinedFunctions: make(map[string]map[string]map[string]datastore.UserDefinedFunction),
|
|
||||||
},
|
|
||||||
initialDataFilePath: options.InitialDataFilePath,
|
|
||||||
persistDataFilePath: options.PersistDataFilePath,
|
|
||||||
}
|
|
||||||
|
|
||||||
dataStore.InitializeDataStore()
|
|
||||||
|
|
||||||
return dataStore
|
|
||||||
}
|
|
@ -1,49 +0,0 @@
|
|||||||
package jsondatastore
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
|
|
||||||
"github.com/google/uuid"
|
|
||||||
"github.com/pikami/cosmium/internal/datastore"
|
|
||||||
"github.com/pikami/cosmium/internal/resourceid"
|
|
||||||
)
|
|
||||||
|
|
||||||
// I have no idea what this is tbh
|
|
||||||
func (r *JsonDataStore) GetPartitionKeyRanges(databaseId string, collectionId string) ([]datastore.PartitionKeyRange, datastore.DataStoreStatus) {
|
|
||||||
r.storeState.RLock()
|
|
||||||
defer r.storeState.RUnlock()
|
|
||||||
|
|
||||||
databaseRid := databaseId
|
|
||||||
collectionRid := collectionId
|
|
||||||
var timestamp int64 = 0
|
|
||||||
|
|
||||||
if database, ok := r.storeState.Databases[databaseId]; !ok {
|
|
||||||
databaseRid = database.ResourceID
|
|
||||||
}
|
|
||||||
|
|
||||||
if collection, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
|
|
||||||
collectionRid = collection.ResourceID
|
|
||||||
timestamp = collection.TimeStamp
|
|
||||||
}
|
|
||||||
|
|
||||||
pkrResourceId := resourceid.NewCombined(collectionRid, resourceid.New(resourceid.ResourceTypePartitionKeyRange))
|
|
||||||
pkrSelf := fmt.Sprintf("dbs/%s/colls/%s/pkranges/%s/", databaseRid, collectionRid, pkrResourceId)
|
|
||||||
etag := fmt.Sprintf("\"%s\"", uuid.New())
|
|
||||||
|
|
||||||
return []datastore.PartitionKeyRange{
|
|
||||||
{
|
|
||||||
ResourceID: pkrResourceId,
|
|
||||||
ID: "0",
|
|
||||||
Etag: etag,
|
|
||||||
MinInclusive: "",
|
|
||||||
MaxExclusive: "FF",
|
|
||||||
RidPrefix: 0,
|
|
||||||
Self: pkrSelf,
|
|
||||||
ThroughputFraction: 1,
|
|
||||||
Status: "online",
|
|
||||||
Parents: []interface{}{},
|
|
||||||
TimeStamp: timestamp,
|
|
||||||
Lsn: 17,
|
|
||||||
},
|
|
||||||
}, datastore.StatusOk
|
|
||||||
}
|
|
@ -1,236 +0,0 @@
|
|||||||
package jsondatastore
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/json"
|
|
||||||
"log"
|
|
||||||
"os"
|
|
||||||
"reflect"
|
|
||||||
"sync"
|
|
||||||
|
|
||||||
"github.com/pikami/cosmium/internal/datastore"
|
|
||||||
"github.com/pikami/cosmium/internal/logger"
|
|
||||||
)
|
|
||||||
|
|
||||||
type State struct {
|
|
||||||
sync.RWMutex
|
|
||||||
|
|
||||||
// Map databaseId -> Database
|
|
||||||
Databases map[string]datastore.Database `json:"databases"`
|
|
||||||
|
|
||||||
// Map databaseId -> collectionId -> Collection
|
|
||||||
Collections map[string]map[string]datastore.Collection `json:"collections"`
|
|
||||||
|
|
||||||
// Map databaseId -> collectionId -> documentId -> Documents
|
|
||||||
Documents map[string]map[string]map[string]datastore.Document `json:"documents"`
|
|
||||||
|
|
||||||
// Map databaseId -> collectionId -> triggerId -> Trigger
|
|
||||||
Triggers map[string]map[string]map[string]datastore.Trigger `json:"triggers"`
|
|
||||||
|
|
||||||
// Map databaseId -> collectionId -> spId -> StoredProcedure
|
|
||||||
StoredProcedures map[string]map[string]map[string]datastore.StoredProcedure `json:"sprocs"`
|
|
||||||
|
|
||||||
// Map databaseId -> collectionId -> udfId -> UserDefinedFunction
|
|
||||||
UserDefinedFunctions map[string]map[string]map[string]datastore.UserDefinedFunction `json:"udfs"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *JsonDataStore) InitializeDataStore() {
|
|
||||||
if r.initialDataFilePath != "" {
|
|
||||||
r.LoadStateFS(r.initialDataFilePath)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if r.persistDataFilePath != "" {
|
|
||||||
stat, err := os.Stat(r.persistDataFilePath)
|
|
||||||
if err != nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if stat.IsDir() {
|
|
||||||
logger.ErrorLn("Argument '-Persist' must be a path to file, not a directory.")
|
|
||||||
os.Exit(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
r.LoadStateFS(r.persistDataFilePath)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *JsonDataStore) LoadStateFS(filePath string) {
|
|
||||||
data, err := os.ReadFile(filePath)
|
|
||||||
if err != nil {
|
|
||||||
log.Fatalf("Error reading state JSON file: %v", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
err = r.LoadStateJSON(string(data))
|
|
||||||
if err != nil {
|
|
||||||
log.Fatalf("Error unmarshalling state JSON: %v", err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *JsonDataStore) LoadStateJSON(jsonData string) error {
|
|
||||||
r.storeState.Lock()
|
|
||||||
defer r.storeState.Unlock()
|
|
||||||
|
|
||||||
var state State
|
|
||||||
if err := json.Unmarshal([]byte(jsonData), &state); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
r.storeState.Collections = state.Collections
|
|
||||||
r.storeState.Databases = state.Databases
|
|
||||||
r.storeState.Documents = state.Documents
|
|
||||||
|
|
||||||
r.ensureStoreStateNoNullReferences()
|
|
||||||
|
|
||||||
logger.InfoLn("Loaded state:")
|
|
||||||
logger.Infof("Databases: %d\n", getLength(r.storeState.Databases))
|
|
||||||
logger.Infof("Collections: %d\n", getLength(r.storeState.Collections))
|
|
||||||
logger.Infof("Documents: %d\n", getLength(r.storeState.Documents))
|
|
||||||
logger.Infof("Triggers: %d\n", getLength(r.storeState.Triggers))
|
|
||||||
logger.Infof("Stored procedures: %d\n", getLength(r.storeState.StoredProcedures))
|
|
||||||
logger.Infof("User defined functions: %d\n", getLength(r.storeState.UserDefinedFunctions))
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *JsonDataStore) SaveStateFS(filePath string) {
|
|
||||||
r.storeState.RLock()
|
|
||||||
defer r.storeState.RUnlock()
|
|
||||||
|
|
||||||
data, err := json.MarshalIndent(r.storeState, "", "\t")
|
|
||||||
if err != nil {
|
|
||||||
logger.Errorf("Failed to save state: %v\n", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
os.WriteFile(filePath, data, os.ModePerm)
|
|
||||||
|
|
||||||
logger.InfoLn("Saved state:")
|
|
||||||
logger.Infof("Databases: %d\n", getLength(r.storeState.Databases))
|
|
||||||
logger.Infof("Collections: %d\n", getLength(r.storeState.Collections))
|
|
||||||
logger.Infof("Documents: %d\n", getLength(r.storeState.Documents))
|
|
||||||
logger.Infof("Triggers: %d\n", getLength(r.storeState.Triggers))
|
|
||||||
logger.Infof("Stored procedures: %d\n", getLength(r.storeState.StoredProcedures))
|
|
||||||
logger.Infof("User defined functions: %d\n", getLength(r.storeState.UserDefinedFunctions))
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *JsonDataStore) DumpToJson() (string, error) {
|
|
||||||
r.storeState.RLock()
|
|
||||||
defer r.storeState.RUnlock()
|
|
||||||
|
|
||||||
data, err := json.MarshalIndent(r.storeState, "", "\t")
|
|
||||||
if err != nil {
|
|
||||||
logger.Errorf("Failed to serialize state: %v\n", err)
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
|
|
||||||
return string(data), nil
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *JsonDataStore) Close() {
|
|
||||||
if r.persistDataFilePath != "" {
|
|
||||||
r.SaveStateFS(r.persistDataFilePath)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func getLength(v interface{}) int {
|
|
||||||
switch v.(type) {
|
|
||||||
case datastore.Database,
|
|
||||||
datastore.Collection,
|
|
||||||
datastore.Document,
|
|
||||||
datastore.Trigger,
|
|
||||||
datastore.StoredProcedure,
|
|
||||||
datastore.UserDefinedFunction:
|
|
||||||
return 1
|
|
||||||
}
|
|
||||||
|
|
||||||
rv := reflect.ValueOf(v)
|
|
||||||
if rv.Kind() != reflect.Map {
|
|
||||||
return -1
|
|
||||||
}
|
|
||||||
|
|
||||||
count := 0
|
|
||||||
for _, key := range rv.MapKeys() {
|
|
||||||
if rv.MapIndex(key).Kind() == reflect.Map {
|
|
||||||
count += getLength(rv.MapIndex(key).Interface())
|
|
||||||
} else {
|
|
||||||
count++
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return count
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *JsonDataStore) ensureStoreStateNoNullReferences() {
|
|
||||||
if r.storeState.Databases == nil {
|
|
||||||
r.storeState.Databases = make(map[string]datastore.Database)
|
|
||||||
}
|
|
||||||
|
|
||||||
if r.storeState.Collections == nil {
|
|
||||||
r.storeState.Collections = make(map[string]map[string]datastore.Collection)
|
|
||||||
}
|
|
||||||
|
|
||||||
if r.storeState.Documents == nil {
|
|
||||||
r.storeState.Documents = make(map[string]map[string]map[string]datastore.Document)
|
|
||||||
}
|
|
||||||
|
|
||||||
if r.storeState.Triggers == nil {
|
|
||||||
r.storeState.Triggers = make(map[string]map[string]map[string]datastore.Trigger)
|
|
||||||
}
|
|
||||||
|
|
||||||
if r.storeState.StoredProcedures == nil {
|
|
||||||
r.storeState.StoredProcedures = make(map[string]map[string]map[string]datastore.StoredProcedure)
|
|
||||||
}
|
|
||||||
|
|
||||||
if r.storeState.UserDefinedFunctions == nil {
|
|
||||||
r.storeState.UserDefinedFunctions = make(map[string]map[string]map[string]datastore.UserDefinedFunction)
|
|
||||||
}
|
|
||||||
|
|
||||||
for database := range r.storeState.Databases {
|
|
||||||
if r.storeState.Collections[database] == nil {
|
|
||||||
r.storeState.Collections[database] = make(map[string]datastore.Collection)
|
|
||||||
}
|
|
||||||
|
|
||||||
if r.storeState.Documents[database] == nil {
|
|
||||||
r.storeState.Documents[database] = make(map[string]map[string]datastore.Document)
|
|
||||||
}
|
|
||||||
|
|
||||||
if r.storeState.Triggers[database] == nil {
|
|
||||||
r.storeState.Triggers[database] = make(map[string]map[string]datastore.Trigger)
|
|
||||||
}
|
|
||||||
|
|
||||||
if r.storeState.StoredProcedures[database] == nil {
|
|
||||||
r.storeState.StoredProcedures[database] = make(map[string]map[string]datastore.StoredProcedure)
|
|
||||||
}
|
|
||||||
|
|
||||||
if r.storeState.UserDefinedFunctions[database] == nil {
|
|
||||||
r.storeState.UserDefinedFunctions[database] = make(map[string]map[string]datastore.UserDefinedFunction)
|
|
||||||
}
|
|
||||||
|
|
||||||
for collection := range r.storeState.Collections[database] {
|
|
||||||
if r.storeState.Documents[database][collection] == nil {
|
|
||||||
r.storeState.Documents[database][collection] = make(map[string]datastore.Document)
|
|
||||||
}
|
|
||||||
|
|
||||||
for document := range r.storeState.Documents[database][collection] {
|
|
||||||
if r.storeState.Documents[database][collection][document] == nil {
|
|
||||||
delete(r.storeState.Documents[database][collection], document)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if r.storeState.Triggers[database][collection] == nil {
|
|
||||||
r.storeState.Triggers[database][collection] = make(map[string]datastore.Trigger)
|
|
||||||
}
|
|
||||||
|
|
||||||
if r.storeState.StoredProcedures[database][collection] == nil {
|
|
||||||
r.storeState.StoredProcedures[database][collection] = make(map[string]datastore.StoredProcedure)
|
|
||||||
}
|
|
||||||
|
|
||||||
if r.storeState.UserDefinedFunctions[database][collection] == nil {
|
|
||||||
r.storeState.UserDefinedFunctions[database][collection] = make(map[string]datastore.UserDefinedFunction)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,91 +0,0 @@
|
|||||||
package jsondatastore
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/google/uuid"
|
|
||||||
"github.com/pikami/cosmium/internal/datastore"
|
|
||||||
"github.com/pikami/cosmium/internal/resourceid"
|
|
||||||
"golang.org/x/exp/maps"
|
|
||||||
)
|
|
||||||
|
|
||||||
func (r *JsonDataStore) GetAllStoredProcedures(databaseId string, collectionId string) ([]datastore.StoredProcedure, datastore.DataStoreStatus) {
|
|
||||||
r.storeState.RLock()
|
|
||||||
defer r.storeState.RUnlock()
|
|
||||||
|
|
||||||
return maps.Values(r.storeState.StoredProcedures[databaseId][collectionId]), datastore.StatusOk
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *JsonDataStore) GetStoredProcedure(databaseId string, collectionId string, spId string) (datastore.StoredProcedure, datastore.DataStoreStatus) {
|
|
||||||
r.storeState.RLock()
|
|
||||||
defer r.storeState.RUnlock()
|
|
||||||
|
|
||||||
if _, ok := r.storeState.Databases[databaseId]; !ok {
|
|
||||||
return datastore.StoredProcedure{}, datastore.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
|
|
||||||
return datastore.StoredProcedure{}, datastore.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
if sp, ok := r.storeState.StoredProcedures[databaseId][collectionId][spId]; ok {
|
|
||||||
return sp, datastore.StatusOk
|
|
||||||
}
|
|
||||||
|
|
||||||
return datastore.StoredProcedure{}, datastore.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *JsonDataStore) DeleteStoredProcedure(databaseId string, collectionId string, spId string) datastore.DataStoreStatus {
|
|
||||||
r.storeState.Lock()
|
|
||||||
defer r.storeState.Unlock()
|
|
||||||
|
|
||||||
if _, ok := r.storeState.Databases[databaseId]; !ok {
|
|
||||||
return datastore.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
|
|
||||||
return datastore.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, ok := r.storeState.StoredProcedures[databaseId][collectionId][spId]; !ok {
|
|
||||||
return datastore.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
delete(r.storeState.StoredProcedures[databaseId][collectionId], spId)
|
|
||||||
|
|
||||||
return datastore.StatusOk
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *JsonDataStore) CreateStoredProcedure(databaseId string, collectionId string, sp datastore.StoredProcedure) (datastore.StoredProcedure, datastore.DataStoreStatus) {
|
|
||||||
r.storeState.Lock()
|
|
||||||
defer r.storeState.Unlock()
|
|
||||||
|
|
||||||
var ok bool
|
|
||||||
var database datastore.Database
|
|
||||||
var collection datastore.Collection
|
|
||||||
if sp.ID == "" {
|
|
||||||
return datastore.StoredProcedure{}, datastore.BadRequest
|
|
||||||
}
|
|
||||||
|
|
||||||
if database, ok = r.storeState.Databases[databaseId]; !ok {
|
|
||||||
return datastore.StoredProcedure{}, datastore.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
if collection, ok = r.storeState.Collections[databaseId][collectionId]; !ok {
|
|
||||||
return datastore.StoredProcedure{}, datastore.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, ok = r.storeState.StoredProcedures[databaseId][collectionId][sp.ID]; ok {
|
|
||||||
return datastore.StoredProcedure{}, datastore.Conflict
|
|
||||||
}
|
|
||||||
|
|
||||||
sp.TimeStamp = time.Now().Unix()
|
|
||||||
sp.ResourceID = resourceid.NewCombined(collection.ResourceID, resourceid.New(resourceid.ResourceTypeStoredProcedure))
|
|
||||||
sp.ETag = fmt.Sprintf("\"%s\"", uuid.New())
|
|
||||||
sp.Self = fmt.Sprintf("dbs/%s/colls/%s/sprocs/%s/", database.ResourceID, collection.ResourceID, sp.ResourceID)
|
|
||||||
|
|
||||||
r.storeState.StoredProcedures[databaseId][collectionId][sp.ID] = sp
|
|
||||||
|
|
||||||
return sp, datastore.StatusOk
|
|
||||||
}
|
|
@ -1,91 +0,0 @@
|
|||||||
package jsondatastore
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/google/uuid"
|
|
||||||
"github.com/pikami/cosmium/internal/datastore"
|
|
||||||
"github.com/pikami/cosmium/internal/resourceid"
|
|
||||||
"golang.org/x/exp/maps"
|
|
||||||
)
|
|
||||||
|
|
||||||
func (r *JsonDataStore) GetAllTriggers(databaseId string, collectionId string) ([]datastore.Trigger, datastore.DataStoreStatus) {
|
|
||||||
r.storeState.RLock()
|
|
||||||
defer r.storeState.RUnlock()
|
|
||||||
|
|
||||||
return maps.Values(r.storeState.Triggers[databaseId][collectionId]), datastore.StatusOk
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *JsonDataStore) GetTrigger(databaseId string, collectionId string, triggerId string) (datastore.Trigger, datastore.DataStoreStatus) {
|
|
||||||
r.storeState.RLock()
|
|
||||||
defer r.storeState.RUnlock()
|
|
||||||
|
|
||||||
if _, ok := r.storeState.Databases[databaseId]; !ok {
|
|
||||||
return datastore.Trigger{}, datastore.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
|
|
||||||
return datastore.Trigger{}, datastore.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
if trigger, ok := r.storeState.Triggers[databaseId][collectionId][triggerId]; ok {
|
|
||||||
return trigger, datastore.StatusOk
|
|
||||||
}
|
|
||||||
|
|
||||||
return datastore.Trigger{}, datastore.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *JsonDataStore) DeleteTrigger(databaseId string, collectionId string, triggerId string) datastore.DataStoreStatus {
|
|
||||||
r.storeState.Lock()
|
|
||||||
defer r.storeState.Unlock()
|
|
||||||
|
|
||||||
if _, ok := r.storeState.Databases[databaseId]; !ok {
|
|
||||||
return datastore.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
|
|
||||||
return datastore.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, ok := r.storeState.Triggers[databaseId][collectionId][triggerId]; !ok {
|
|
||||||
return datastore.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
delete(r.storeState.Triggers[databaseId][collectionId], triggerId)
|
|
||||||
|
|
||||||
return datastore.StatusOk
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *JsonDataStore) CreateTrigger(databaseId string, collectionId string, trigger datastore.Trigger) (datastore.Trigger, datastore.DataStoreStatus) {
|
|
||||||
r.storeState.Lock()
|
|
||||||
defer r.storeState.Unlock()
|
|
||||||
|
|
||||||
var ok bool
|
|
||||||
var database datastore.Database
|
|
||||||
var collection datastore.Collection
|
|
||||||
if trigger.ID == "" {
|
|
||||||
return datastore.Trigger{}, datastore.BadRequest
|
|
||||||
}
|
|
||||||
|
|
||||||
if database, ok = r.storeState.Databases[databaseId]; !ok {
|
|
||||||
return datastore.Trigger{}, datastore.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
if collection, ok = r.storeState.Collections[databaseId][collectionId]; !ok {
|
|
||||||
return datastore.Trigger{}, datastore.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, ok = r.storeState.Triggers[databaseId][collectionId][trigger.ID]; ok {
|
|
||||||
return datastore.Trigger{}, datastore.Conflict
|
|
||||||
}
|
|
||||||
|
|
||||||
trigger.TimeStamp = time.Now().Unix()
|
|
||||||
trigger.ResourceID = resourceid.NewCombined(collection.ResourceID, resourceid.New(resourceid.ResourceTypeTrigger))
|
|
||||||
trigger.ETag = fmt.Sprintf("\"%s\"", uuid.New())
|
|
||||||
trigger.Self = fmt.Sprintf("dbs/%s/colls/%s/triggers/%s/", database.ResourceID, collection.ResourceID, trigger.ResourceID)
|
|
||||||
|
|
||||||
r.storeState.Triggers[databaseId][collectionId][trigger.ID] = trigger
|
|
||||||
|
|
||||||
return trigger, datastore.StatusOk
|
|
||||||
}
|
|
@ -1,91 +0,0 @@
|
|||||||
package jsondatastore
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/google/uuid"
|
|
||||||
"github.com/pikami/cosmium/internal/datastore"
|
|
||||||
"github.com/pikami/cosmium/internal/resourceid"
|
|
||||||
"golang.org/x/exp/maps"
|
|
||||||
)
|
|
||||||
|
|
||||||
func (r *JsonDataStore) GetAllUserDefinedFunctions(databaseId string, collectionId string) ([]datastore.UserDefinedFunction, datastore.DataStoreStatus) {
|
|
||||||
r.storeState.RLock()
|
|
||||||
defer r.storeState.RUnlock()
|
|
||||||
|
|
||||||
return maps.Values(r.storeState.UserDefinedFunctions[databaseId][collectionId]), datastore.StatusOk
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *JsonDataStore) GetUserDefinedFunction(databaseId string, collectionId string, udfId string) (datastore.UserDefinedFunction, datastore.DataStoreStatus) {
|
|
||||||
r.storeState.RLock()
|
|
||||||
defer r.storeState.RUnlock()
|
|
||||||
|
|
||||||
if _, ok := r.storeState.Databases[databaseId]; !ok {
|
|
||||||
return datastore.UserDefinedFunction{}, datastore.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
|
|
||||||
return datastore.UserDefinedFunction{}, datastore.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
if udf, ok := r.storeState.UserDefinedFunctions[databaseId][collectionId][udfId]; ok {
|
|
||||||
return udf, datastore.StatusOk
|
|
||||||
}
|
|
||||||
|
|
||||||
return datastore.UserDefinedFunction{}, datastore.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *JsonDataStore) DeleteUserDefinedFunction(databaseId string, collectionId string, udfId string) datastore.DataStoreStatus {
|
|
||||||
r.storeState.Lock()
|
|
||||||
defer r.storeState.Unlock()
|
|
||||||
|
|
||||||
if _, ok := r.storeState.Databases[databaseId]; !ok {
|
|
||||||
return datastore.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
|
|
||||||
return datastore.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, ok := r.storeState.UserDefinedFunctions[databaseId][collectionId][udfId]; !ok {
|
|
||||||
return datastore.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
delete(r.storeState.UserDefinedFunctions[databaseId][collectionId], udfId)
|
|
||||||
|
|
||||||
return datastore.StatusOk
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *JsonDataStore) CreateUserDefinedFunction(databaseId string, collectionId string, udf datastore.UserDefinedFunction) (datastore.UserDefinedFunction, datastore.DataStoreStatus) {
|
|
||||||
r.storeState.Lock()
|
|
||||||
defer r.storeState.Unlock()
|
|
||||||
|
|
||||||
var ok bool
|
|
||||||
var database datastore.Database
|
|
||||||
var collection datastore.Collection
|
|
||||||
if udf.ID == "" {
|
|
||||||
return datastore.UserDefinedFunction{}, datastore.BadRequest
|
|
||||||
}
|
|
||||||
|
|
||||||
if database, ok = r.storeState.Databases[databaseId]; !ok {
|
|
||||||
return datastore.UserDefinedFunction{}, datastore.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
if collection, ok = r.storeState.Collections[databaseId][collectionId]; !ok {
|
|
||||||
return datastore.UserDefinedFunction{}, datastore.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, ok := r.storeState.UserDefinedFunctions[databaseId][collectionId][udf.ID]; ok {
|
|
||||||
return datastore.UserDefinedFunction{}, datastore.Conflict
|
|
||||||
}
|
|
||||||
|
|
||||||
udf.TimeStamp = time.Now().Unix()
|
|
||||||
udf.ResourceID = resourceid.NewCombined(collection.ResourceID, resourceid.New(resourceid.ResourceTypeUserDefinedFunction))
|
|
||||||
udf.ETag = fmt.Sprintf("\"%s\"", uuid.New())
|
|
||||||
udf.Self = fmt.Sprintf("dbs/%s/colls/%s/udfs/%s/", database.ResourceID, collection.ResourceID, udf.ResourceID)
|
|
||||||
|
|
||||||
r.storeState.UserDefinedFunctions[databaseId][collectionId][udf.ID] = udf
|
|
||||||
|
|
||||||
return udf, datastore.StatusOk
|
|
||||||
}
|
|
@ -1,140 +0,0 @@
|
|||||||
package logger
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"log"
|
|
||||||
"os"
|
|
||||||
"runtime"
|
|
||||||
"strings"
|
|
||||||
"sync"
|
|
||||||
)
|
|
||||||
|
|
||||||
type LogLevelType int
|
|
||||||
|
|
||||||
var (
|
|
||||||
LogLevelDebug LogLevelType = 0
|
|
||||||
LogLevelInfo LogLevelType = 1
|
|
||||||
LogLevelError LogLevelType = 2
|
|
||||||
LogLevelSilent LogLevelType = 10
|
|
||||||
)
|
|
||||||
|
|
||||||
type LogWriter struct {
|
|
||||||
WriterLevel LogLevelType
|
|
||||||
}
|
|
||||||
|
|
||||||
var logLevelMutex sync.RWMutex
|
|
||||||
var logLevel = LogLevelInfo
|
|
||||||
|
|
||||||
var DebugLogger = log.New(os.Stdout, "", log.Ldate|log.Ltime)
|
|
||||||
var InfoLogger = log.New(os.Stdout, "", log.Ldate|log.Ltime)
|
|
||||||
var ErrorLogger = log.New(os.Stderr, "", log.Ldate|log.Ltime)
|
|
||||||
|
|
||||||
func DebugLn(v ...any) {
|
|
||||||
if GetLogLevel() <= LogLevelDebug {
|
|
||||||
prefix := getCallerPrefix()
|
|
||||||
DebugLogger.Println(append([]interface{}{prefix}, v...)...)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func Debug(v ...any) {
|
|
||||||
if GetLogLevel() <= LogLevelDebug {
|
|
||||||
prefix := getCallerPrefix()
|
|
||||||
DebugLogger.Println(append([]interface{}{prefix}, v...)...)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func Debugf(format string, v ...any) {
|
|
||||||
if GetLogLevel() <= LogLevelDebug {
|
|
||||||
prefix := getCallerPrefix()
|
|
||||||
DebugLogger.Printf(prefix+format, v...)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func InfoLn(v ...any) {
|
|
||||||
if GetLogLevel() <= LogLevelInfo {
|
|
||||||
InfoLogger.Println(v...)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func Info(v ...any) {
|
|
||||||
if GetLogLevel() <= LogLevelInfo {
|
|
||||||
InfoLogger.Print(v...)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func Infof(format string, v ...any) {
|
|
||||||
if GetLogLevel() <= LogLevelInfo {
|
|
||||||
InfoLogger.Printf(format, v...)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func ErrorLn(v ...any) {
|
|
||||||
if GetLogLevel() <= LogLevelError {
|
|
||||||
prefix := getCallerPrefix()
|
|
||||||
ErrorLogger.Println(append([]interface{}{prefix}, v...)...)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func Error(v ...any) {
|
|
||||||
if GetLogLevel() <= LogLevelError {
|
|
||||||
prefix := getCallerPrefix()
|
|
||||||
ErrorLogger.Print(append([]interface{}{prefix}, v...)...)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func Errorf(format string, v ...any) {
|
|
||||||
if GetLogLevel() <= LogLevelError {
|
|
||||||
prefix := getCallerPrefix()
|
|
||||||
ErrorLogger.Printf(prefix+format, v...)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (lw *LogWriter) Write(p []byte) (n int, err error) {
|
|
||||||
switch lw.WriterLevel {
|
|
||||||
case LogLevelDebug:
|
|
||||||
Debug(string(p))
|
|
||||||
case LogLevelInfo:
|
|
||||||
Info(string(p))
|
|
||||||
case LogLevelError:
|
|
||||||
Error(string(p))
|
|
||||||
}
|
|
||||||
|
|
||||||
return len(p), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func ErrorWriter() *LogWriter {
|
|
||||||
return &LogWriter{WriterLevel: LogLevelError}
|
|
||||||
}
|
|
||||||
|
|
||||||
func InfoWriter() *LogWriter {
|
|
||||||
return &LogWriter{WriterLevel: LogLevelInfo}
|
|
||||||
}
|
|
||||||
|
|
||||||
func DebugWriter() *LogWriter {
|
|
||||||
return &LogWriter{WriterLevel: LogLevelDebug}
|
|
||||||
}
|
|
||||||
|
|
||||||
func SetLogLevel(level LogLevelType) {
|
|
||||||
logLevelMutex.Lock()
|
|
||||||
defer logLevelMutex.Unlock()
|
|
||||||
logLevel = level
|
|
||||||
}
|
|
||||||
|
|
||||||
func GetLogLevel() LogLevelType {
|
|
||||||
logLevelMutex.RLock()
|
|
||||||
defer logLevelMutex.RUnlock()
|
|
||||||
return logLevel
|
|
||||||
}
|
|
||||||
|
|
||||||
func getCallerPrefix() string {
|
|
||||||
_, file, line, ok := runtime.Caller(2)
|
|
||||||
if ok {
|
|
||||||
parts := strings.Split(file, "/")
|
|
||||||
if len(parts) > 0 {
|
|
||||||
file = parts[len(parts)-1]
|
|
||||||
}
|
|
||||||
return fmt.Sprintf("%s:%d - ", file, line)
|
|
||||||
}
|
|
||||||
|
|
||||||
return ""
|
|
||||||
}
|
|
66
internal/repositories/collections.go
Normal file
66
internal/repositories/collections.go
Normal file
@ -0,0 +1,66 @@
|
|||||||
|
package repositories
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
||||||
|
structhidrators "github.com/pikami/cosmium/internal/struct_hidrators"
|
||||||
|
"golang.org/x/exp/maps"
|
||||||
|
)
|
||||||
|
|
||||||
|
func GetAllCollections(databaseId string) ([]repositorymodels.Collection, repositorymodels.RepositoryStatus) {
|
||||||
|
if _, ok := storeState.Databases[databaseId]; !ok {
|
||||||
|
return make([]repositorymodels.Collection, 0), repositorymodels.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
return maps.Values(storeState.Collections[databaseId]), repositorymodels.StatusOk
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetCollection(databaseId string, collectionId string) (repositorymodels.Collection, repositorymodels.RepositoryStatus) {
|
||||||
|
if _, ok := storeState.Databases[databaseId]; !ok {
|
||||||
|
return repositorymodels.Collection{}, repositorymodels.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, ok := storeState.Collections[databaseId][collectionId]; !ok {
|
||||||
|
return repositorymodels.Collection{}, repositorymodels.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
return storeState.Collections[databaseId][collectionId], repositorymodels.StatusOk
|
||||||
|
}
|
||||||
|
|
||||||
|
func DeleteCollection(databaseId string, collectionId string) repositorymodels.RepositoryStatus {
|
||||||
|
if _, ok := storeState.Databases[databaseId]; !ok {
|
||||||
|
return repositorymodels.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, ok := storeState.Collections[databaseId][collectionId]; !ok {
|
||||||
|
return repositorymodels.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
delete(storeState.Collections[databaseId], collectionId)
|
||||||
|
|
||||||
|
return repositorymodels.StatusOk
|
||||||
|
}
|
||||||
|
|
||||||
|
func CreateCollection(databaseId string, newCollection repositorymodels.Collection) (repositorymodels.Collection, repositorymodels.RepositoryStatus) {
|
||||||
|
if _, ok := storeState.Databases[databaseId]; !ok {
|
||||||
|
return repositorymodels.Collection{}, repositorymodels.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, ok := storeState.Collections[databaseId][newCollection.ID]; ok {
|
||||||
|
return repositorymodels.Collection{}, repositorymodels.Conflict
|
||||||
|
}
|
||||||
|
|
||||||
|
newCollection = structhidrators.Hidrate(newCollection).(repositorymodels.Collection)
|
||||||
|
|
||||||
|
newCollection.TimeStamp = time.Now().Unix()
|
||||||
|
newCollection.UniqueID = uuid.New().String()
|
||||||
|
newCollection.ETag = fmt.Sprintf("\"%s\"", newCollection.UniqueID)
|
||||||
|
|
||||||
|
storeState.Collections[databaseId][newCollection.ID] = newCollection
|
||||||
|
storeState.Documents[databaseId][newCollection.ID] = make(map[string]repositorymodels.Document)
|
||||||
|
|
||||||
|
return newCollection, repositorymodels.StatusOk
|
||||||
|
}
|
47
internal/repositories/databases.go
Normal file
47
internal/repositories/databases.go
Normal file
@ -0,0 +1,47 @@
|
|||||||
|
package repositories
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
||||||
|
"golang.org/x/exp/maps"
|
||||||
|
)
|
||||||
|
|
||||||
|
func GetAllDatabases() ([]repositorymodels.Database, repositorymodels.RepositoryStatus) {
|
||||||
|
return maps.Values(storeState.Databases), repositorymodels.StatusOk
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetDatabase(id string) (repositorymodels.Database, repositorymodels.RepositoryStatus) {
|
||||||
|
if database, ok := storeState.Databases[id]; ok {
|
||||||
|
return database, repositorymodels.StatusOk
|
||||||
|
}
|
||||||
|
|
||||||
|
return repositorymodels.Database{}, repositorymodels.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
func DeleteDatabase(id string) repositorymodels.RepositoryStatus {
|
||||||
|
if _, ok := storeState.Databases[id]; !ok {
|
||||||
|
return repositorymodels.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
delete(storeState.Databases, id)
|
||||||
|
|
||||||
|
return repositorymodels.StatusOk
|
||||||
|
}
|
||||||
|
|
||||||
|
func CreateDatabase(newDatabase repositorymodels.Database) (repositorymodels.Database, repositorymodels.RepositoryStatus) {
|
||||||
|
if _, ok := storeState.Databases[newDatabase.ID]; ok {
|
||||||
|
return repositorymodels.Database{}, repositorymodels.Conflict
|
||||||
|
}
|
||||||
|
|
||||||
|
newDatabase.TimeStamp = time.Now().Unix()
|
||||||
|
newDatabase.UniqueID = uuid.New().String()
|
||||||
|
newDatabase.ETag = fmt.Sprintf("\"%s\"", newDatabase.UniqueID)
|
||||||
|
storeState.Databases[newDatabase.ID] = newDatabase
|
||||||
|
storeState.Collections[newDatabase.ID] = make(map[string]repositorymodels.Collection)
|
||||||
|
storeState.Documents[newDatabase.ID] = make(map[string]map[string]repositorymodels.Document)
|
||||||
|
|
||||||
|
return newDatabase, repositorymodels.StatusOk
|
||||||
|
}
|
113
internal/repositories/documents.go
Normal file
113
internal/repositories/documents.go
Normal file
@ -0,0 +1,113 @@
|
|||||||
|
package repositories
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
||||||
|
"github.com/pikami/cosmium/parsers"
|
||||||
|
"github.com/pikami/cosmium/parsers/nosql"
|
||||||
|
memoryexecutor "github.com/pikami/cosmium/query_executors/memory_executor"
|
||||||
|
"golang.org/x/exp/maps"
|
||||||
|
)
|
||||||
|
|
||||||
|
func GetAllDocuments(databaseId string, collectionId string) ([]repositorymodels.Document, repositorymodels.RepositoryStatus) {
|
||||||
|
if _, ok := storeState.Databases[databaseId]; !ok {
|
||||||
|
return make([]repositorymodels.Document, 0), repositorymodels.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, ok := storeState.Collections[databaseId][collectionId]; !ok {
|
||||||
|
return make([]repositorymodels.Document, 0), repositorymodels.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
return maps.Values(storeState.Documents[databaseId][collectionId]), repositorymodels.StatusOk
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetDocument(databaseId string, collectionId string, documentId string) (repositorymodels.Document, repositorymodels.RepositoryStatus) {
|
||||||
|
if _, ok := storeState.Databases[databaseId]; !ok {
|
||||||
|
return repositorymodels.Document{}, repositorymodels.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, ok := storeState.Collections[databaseId][collectionId]; !ok {
|
||||||
|
return repositorymodels.Document{}, repositorymodels.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, ok := storeState.Documents[databaseId][collectionId][documentId]; !ok {
|
||||||
|
return repositorymodels.Document{}, repositorymodels.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
return storeState.Documents[databaseId][collectionId][documentId], repositorymodels.StatusOk
|
||||||
|
}
|
||||||
|
|
||||||
|
func DeleteDocument(databaseId string, collectionId string, documentId string) repositorymodels.RepositoryStatus {
|
||||||
|
if _, ok := storeState.Databases[databaseId]; !ok {
|
||||||
|
return repositorymodels.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, ok := storeState.Collections[databaseId][collectionId]; !ok {
|
||||||
|
return repositorymodels.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, ok := storeState.Documents[databaseId][collectionId][documentId]; !ok {
|
||||||
|
return repositorymodels.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
delete(storeState.Documents[databaseId][collectionId], documentId)
|
||||||
|
|
||||||
|
return repositorymodels.StatusOk
|
||||||
|
}
|
||||||
|
|
||||||
|
func CreateDocument(databaseId string, collectionId string, document map[string]interface{}) (repositorymodels.Document, repositorymodels.RepositoryStatus) {
|
||||||
|
var documentId string
|
||||||
|
var ok bool
|
||||||
|
if documentId, ok = document["id"].(string); !ok || documentId == "" {
|
||||||
|
return repositorymodels.Document{}, repositorymodels.BadRequest
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, ok := storeState.Databases[databaseId]; !ok {
|
||||||
|
return repositorymodels.Document{}, repositorymodels.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, ok = storeState.Collections[databaseId][collectionId]; !ok {
|
||||||
|
return repositorymodels.Document{}, repositorymodels.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, ok := storeState.Documents[databaseId][collectionId][documentId]; ok {
|
||||||
|
return repositorymodels.Document{}, repositorymodels.Conflict
|
||||||
|
}
|
||||||
|
|
||||||
|
document["_ts"] = time.Now().Unix()
|
||||||
|
document["_rid"] = uuid.New().String()
|
||||||
|
document["_etag"] = fmt.Sprintf("\"%s\"", document["_rid"])
|
||||||
|
|
||||||
|
storeState.Documents[databaseId][collectionId][documentId] = document
|
||||||
|
|
||||||
|
return document, repositorymodels.StatusOk
|
||||||
|
}
|
||||||
|
|
||||||
|
func ExecuteQueryDocuments(databaseId string, collectionId string, query string, queryParameters map[string]interface{}) ([]memoryexecutor.RowType, repositorymodels.RepositoryStatus) {
|
||||||
|
parsedQuery, err := nosql.Parse("", []byte(query))
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("Failed to parse query: %s\nerr: %v", query, err)
|
||||||
|
return nil, repositorymodels.BadRequest
|
||||||
|
}
|
||||||
|
|
||||||
|
collectionDocuments, status := GetAllDocuments(databaseId, collectionId)
|
||||||
|
if status != repositorymodels.StatusOk {
|
||||||
|
return nil, status
|
||||||
|
}
|
||||||
|
|
||||||
|
covDocs := make([]memoryexecutor.RowType, 0)
|
||||||
|
for _, doc := range collectionDocuments {
|
||||||
|
covDocs = append(covDocs, map[string]interface{}(doc))
|
||||||
|
}
|
||||||
|
|
||||||
|
if typedQuery, ok := parsedQuery.(parsers.SelectStmt); ok {
|
||||||
|
typedQuery.Parameters = queryParameters
|
||||||
|
return memoryexecutor.Execute(typedQuery, covDocs), repositorymodels.StatusOk
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, repositorymodels.BadRequest
|
||||||
|
}
|
23
internal/repositories/partition_key_ranges.go
Normal file
23
internal/repositories/partition_key_ranges.go
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
package repositories
|
||||||
|
|
||||||
|
import repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
||||||
|
|
||||||
|
func GetPartitionKeyRanges(databaseId string, collectionId string) ([]repositorymodels.PartitionKeyRange, repositorymodels.RepositoryStatus) {
|
||||||
|
// I have no idea what this is tbh
|
||||||
|
return []repositorymodels.PartitionKeyRange{
|
||||||
|
{
|
||||||
|
Rid: "ZxlyAP7rKwACAAAAAAAAUA==",
|
||||||
|
ID: "0",
|
||||||
|
Etag: "\"00005504-0000-0100-0000-65c555490000\"",
|
||||||
|
MinInclusive: "",
|
||||||
|
MaxExclusive: "FF",
|
||||||
|
RidPrefix: 0,
|
||||||
|
Self: "dbs/ZxlyAA==/colls/ZxlyAP7rKwA=/pkranges/ZxlyAP7rKwACAAAAAAAAUA==/",
|
||||||
|
ThroughputFraction: 1,
|
||||||
|
Status: "online",
|
||||||
|
Parents: []interface{}{},
|
||||||
|
Ts: 1707431241,
|
||||||
|
Lsn: 17,
|
||||||
|
},
|
||||||
|
}, repositorymodels.StatusOk
|
||||||
|
}
|
121
internal/repositories/state.go
Normal file
121
internal/repositories/state.go
Normal file
@ -0,0 +1,121 @@
|
|||||||
|
package repositories
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
"reflect"
|
||||||
|
|
||||||
|
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
||||||
|
)
|
||||||
|
|
||||||
|
var storedProcedures = []repositorymodels.StoredProcedure{}
|
||||||
|
var triggers = []repositorymodels.Trigger{}
|
||||||
|
var userDefinedFunctions = []repositorymodels.UserDefinedFunction{}
|
||||||
|
var storeState = repositorymodels.State{
|
||||||
|
Databases: make(map[string]repositorymodels.Database),
|
||||||
|
Collections: make(map[string]map[string]repositorymodels.Collection),
|
||||||
|
Documents: make(map[string]map[string]map[string]repositorymodels.Document),
|
||||||
|
}
|
||||||
|
|
||||||
|
func LoadStateFS(filePath string) {
|
||||||
|
data, err := os.ReadFile(filePath)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Error reading state JSON file: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var state repositorymodels.State
|
||||||
|
if err := json.Unmarshal(data, &state); err != nil {
|
||||||
|
log.Fatalf("Error unmarshalling state JSON: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Println("Loaded state:")
|
||||||
|
fmt.Printf("Databases: %d\n", getLength(state.Databases))
|
||||||
|
fmt.Printf("Collections: %d\n", getLength(state.Collections))
|
||||||
|
fmt.Printf("Documents: %d\n", getLength(state.Documents))
|
||||||
|
|
||||||
|
storeState = state
|
||||||
|
|
||||||
|
ensureStoreStateNoNullReferences()
|
||||||
|
}
|
||||||
|
|
||||||
|
func SaveStateFS(filePath string) {
|
||||||
|
data, err := json.MarshalIndent(storeState, "", "\t")
|
||||||
|
if err != nil {
|
||||||
|
fmt.Printf("Failed to save state: %v\n", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
os.WriteFile(filePath, data, os.ModePerm)
|
||||||
|
|
||||||
|
fmt.Println("Saved state:")
|
||||||
|
fmt.Printf("Databases: %d\n", getLength(storeState.Databases))
|
||||||
|
fmt.Printf("Collections: %d\n", getLength(storeState.Collections))
|
||||||
|
fmt.Printf("Documents: %d\n", getLength(storeState.Documents))
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetState() repositorymodels.State {
|
||||||
|
return storeState
|
||||||
|
}
|
||||||
|
|
||||||
|
func getLength(v interface{}) int {
|
||||||
|
switch v.(type) {
|
||||||
|
case repositorymodels.Database,
|
||||||
|
repositorymodels.Collection,
|
||||||
|
repositorymodels.Document:
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
rv := reflect.ValueOf(v)
|
||||||
|
if rv.Kind() != reflect.Map {
|
||||||
|
return -1
|
||||||
|
}
|
||||||
|
|
||||||
|
count := 0
|
||||||
|
for _, key := range rv.MapKeys() {
|
||||||
|
if rv.MapIndex(key).Kind() == reflect.Map {
|
||||||
|
count += getLength(rv.MapIndex(key).Interface())
|
||||||
|
} else {
|
||||||
|
count++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return count
|
||||||
|
}
|
||||||
|
|
||||||
|
func ensureStoreStateNoNullReferences() {
|
||||||
|
if storeState.Databases == nil {
|
||||||
|
storeState.Databases = make(map[string]repositorymodels.Database)
|
||||||
|
}
|
||||||
|
|
||||||
|
if storeState.Collections == nil {
|
||||||
|
storeState.Collections = make(map[string]map[string]repositorymodels.Collection)
|
||||||
|
}
|
||||||
|
|
||||||
|
if storeState.Documents == nil {
|
||||||
|
storeState.Documents = make(map[string]map[string]map[string]repositorymodels.Document)
|
||||||
|
}
|
||||||
|
|
||||||
|
for database := range storeState.Databases {
|
||||||
|
if storeState.Collections[database] == nil {
|
||||||
|
storeState.Collections[database] = make(map[string]repositorymodels.Collection)
|
||||||
|
}
|
||||||
|
|
||||||
|
if storeState.Documents[database] == nil {
|
||||||
|
storeState.Documents[database] = make(map[string]map[string]repositorymodels.Document)
|
||||||
|
}
|
||||||
|
|
||||||
|
for collection := range storeState.Collections[database] {
|
||||||
|
if storeState.Documents[database][collection] == nil {
|
||||||
|
storeState.Documents[database][collection] = make(map[string]repositorymodels.Document)
|
||||||
|
}
|
||||||
|
|
||||||
|
for document := range storeState.Documents[database][collection] {
|
||||||
|
if storeState.Documents[database][collection][document] == nil {
|
||||||
|
delete(storeState.Documents[database][collection], document)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
7
internal/repositories/stored_procedures.go
Normal file
7
internal/repositories/stored_procedures.go
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
package repositories
|
||||||
|
|
||||||
|
import repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
||||||
|
|
||||||
|
func GetAllStoredProcedures(databaseId string, collectionId string) ([]repositorymodels.StoredProcedure, repositorymodels.RepositoryStatus) {
|
||||||
|
return storedProcedures, repositorymodels.StatusOk
|
||||||
|
}
|
7
internal/repositories/triggers.go
Normal file
7
internal/repositories/triggers.go
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
package repositories
|
||||||
|
|
||||||
|
import repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
||||||
|
|
||||||
|
func GetAllTriggers(databaseId string, collectionId string) ([]repositorymodels.Trigger, repositorymodels.RepositoryStatus) {
|
||||||
|
return triggers, repositorymodels.StatusOk
|
||||||
|
}
|
7
internal/repositories/user_defined_functions.go
Normal file
7
internal/repositories/user_defined_functions.go
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
package repositories
|
||||||
|
|
||||||
|
import repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
||||||
|
|
||||||
|
func GetAllUserDefinedFunctions(databaseId string, collectionId string) ([]repositorymodels.UserDefinedFunction, repositorymodels.RepositoryStatus) {
|
||||||
|
return userDefinedFunctions, repositorymodels.StatusOk
|
||||||
|
}
|
@ -1,45 +1,26 @@
|
|||||||
package datastore
|
package repositorymodels
|
||||||
|
|
||||||
type Database struct {
|
type Database struct {
|
||||||
ID string `json:"id"`
|
ID string `json:"id"`
|
||||||
TimeStamp int64 `json:"_ts"`
|
TimeStamp int64 `json:"_ts"`
|
||||||
ResourceID string `json:"_rid"`
|
UniqueID string `json:"_rid"`
|
||||||
ETag string `json:"_etag"`
|
ETag string `json:"_etag"`
|
||||||
Self string `json:"_self"`
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type DataStoreStatus int
|
type RepositoryStatus int
|
||||||
|
|
||||||
const (
|
const (
|
||||||
StatusOk = 1
|
StatusOk = 1
|
||||||
StatusNotFound = 2
|
StatusNotFound = 2
|
||||||
Conflict = 3
|
Conflict = 3
|
||||||
BadRequest = 4
|
BadRequest = 4
|
||||||
IterEOF = 5
|
|
||||||
Unknown = 6
|
|
||||||
)
|
|
||||||
|
|
||||||
type TriggerOperation string
|
|
||||||
|
|
||||||
const (
|
|
||||||
All TriggerOperation = "All"
|
|
||||||
Create TriggerOperation = "Create"
|
|
||||||
Delete TriggerOperation = "Delete"
|
|
||||||
Replace TriggerOperation = "Replace"
|
|
||||||
)
|
|
||||||
|
|
||||||
type TriggerType string
|
|
||||||
|
|
||||||
const (
|
|
||||||
Pre TriggerType = "Pre"
|
|
||||||
Post TriggerType = "Post"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type Collection struct {
|
type Collection struct {
|
||||||
ID string `json:"id"`
|
ID string `json:"id"`
|
||||||
IndexingPolicy CollectionIndexingPolicy `json:"indexingPolicy"`
|
IndexingPolicy CollectionIndexingPolicy `json:"indexingPolicy"`
|
||||||
PartitionKey CollectionPartitionKey `json:"partitionKey"`
|
PartitionKey CollectionPartitionKey `json:"partitionKey"`
|
||||||
ResourceID string `json:"_rid"`
|
UniqueID string `json:"_rid"`
|
||||||
TimeStamp int64 `json:"_ts"`
|
TimeStamp int64 `json:"_ts"`
|
||||||
Self string `json:"_self"`
|
Self string `json:"_self"`
|
||||||
ETag string `json:"_etag"`
|
ETag string `json:"_etag"`
|
||||||
@ -73,38 +54,38 @@ type CollectionPartitionKey struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type UserDefinedFunction struct {
|
type UserDefinedFunction struct {
|
||||||
Body string `json:"body"`
|
Body string `json:"body"`
|
||||||
ID string `json:"id"`
|
ID string `json:"id"`
|
||||||
ResourceID string `json:"_rid"`
|
Rid string `json:"_rid"`
|
||||||
TimeStamp int64 `json:"_ts"`
|
Ts int `json:"_ts"`
|
||||||
Self string `json:"_self"`
|
Self string `json:"_self"`
|
||||||
ETag string `json:"_etag"`
|
Etag string `json:"_etag"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type StoredProcedure struct {
|
type StoredProcedure struct {
|
||||||
Body string `json:"body"`
|
Body string `json:"body"`
|
||||||
ID string `json:"id"`
|
ID string `json:"id"`
|
||||||
ResourceID string `json:"_rid"`
|
Rid string `json:"_rid"`
|
||||||
TimeStamp int64 `json:"_ts"`
|
Ts int `json:"_ts"`
|
||||||
Self string `json:"_self"`
|
Self string `json:"_self"`
|
||||||
ETag string `json:"_etag"`
|
Etag string `json:"_etag"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type Trigger struct {
|
type Trigger struct {
|
||||||
Body string `json:"body"`
|
Body string `json:"body"`
|
||||||
ID string `json:"id"`
|
ID string `json:"id"`
|
||||||
TriggerOperation TriggerOperation `json:"triggerOperation"`
|
TriggerOperation string `json:"triggerOperation"`
|
||||||
TriggerType TriggerType `json:"triggerType"`
|
TriggerType string `json:"triggerType"`
|
||||||
ResourceID string `json:"_rid"`
|
Rid string `json:"_rid"`
|
||||||
TimeStamp int64 `json:"_ts"`
|
Ts int `json:"_ts"`
|
||||||
Self string `json:"_self"`
|
Self string `json:"_self"`
|
||||||
ETag string `json:"_etag"`
|
Etag string `json:"_etag"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type Document map[string]interface{}
|
type Document map[string]interface{}
|
||||||
|
|
||||||
type PartitionKeyRange struct {
|
type PartitionKeyRange struct {
|
||||||
ResourceID string `json:"_rid"`
|
Rid string `json:"_rid"`
|
||||||
ID string `json:"id"`
|
ID string `json:"id"`
|
||||||
Etag string `json:"_etag"`
|
Etag string `json:"_etag"`
|
||||||
MinInclusive string `json:"minInclusive"`
|
MinInclusive string `json:"minInclusive"`
|
||||||
@ -114,6 +95,17 @@ type PartitionKeyRange struct {
|
|||||||
ThroughputFraction int `json:"throughputFraction"`
|
ThroughputFraction int `json:"throughputFraction"`
|
||||||
Status string `json:"status"`
|
Status string `json:"status"`
|
||||||
Parents []any `json:"parents"`
|
Parents []any `json:"parents"`
|
||||||
TimeStamp int64 `json:"_ts"`
|
Ts int `json:"_ts"`
|
||||||
Lsn int `json:"lsn"`
|
Lsn int `json:"lsn"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type State struct {
|
||||||
|
// Map databaseId -> Database
|
||||||
|
Databases map[string]Database `json:"databases"`
|
||||||
|
|
||||||
|
// Map databaseId -> collectionId -> Collection
|
||||||
|
Collections map[string]map[string]Collection `json:"collections"`
|
||||||
|
|
||||||
|
// Map databaseId -> collectionId -> documentId -> Documents
|
||||||
|
Documents map[string]map[string]map[string]Document `json:"documents"`
|
||||||
|
}
|
@ -1,95 +0,0 @@
|
|||||||
package resourceid
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/base64"
|
|
||||||
"math/rand"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/google/uuid"
|
|
||||||
)
|
|
||||||
|
|
||||||
type ResourceType int
|
|
||||||
|
|
||||||
const (
|
|
||||||
ResourceTypeDatabase ResourceType = iota
|
|
||||||
ResourceTypeCollection
|
|
||||||
ResourceTypeDocument
|
|
||||||
ResourceTypeStoredProcedure
|
|
||||||
ResourceTypeTrigger
|
|
||||||
ResourceTypeUserDefinedFunction
|
|
||||||
ResourceTypeConflict
|
|
||||||
ResourceTypePartitionKeyRange
|
|
||||||
ResourceTypeSchema
|
|
||||||
)
|
|
||||||
|
|
||||||
func New(resourceType ResourceType) string {
|
|
||||||
var idBytes []byte
|
|
||||||
switch resourceType {
|
|
||||||
case ResourceTypeDatabase:
|
|
||||||
idBytes = randomBytes(4)
|
|
||||||
case ResourceTypeCollection:
|
|
||||||
idBytes = randomBytes(4)
|
|
||||||
// first byte should be bigger than 0x80 for collection ids
|
|
||||||
// clients classify this id as "user" otherwise
|
|
||||||
if (idBytes[0] & 0x80) <= 0 {
|
|
||||||
idBytes[0] = byte(rand.Intn(0x80) + 0x80)
|
|
||||||
}
|
|
||||||
case ResourceTypeDocument:
|
|
||||||
idBytes = randomBytes(8)
|
|
||||||
idBytes[7] = byte(rand.Intn(0x10)) // Upper 4 bits = 0
|
|
||||||
case ResourceTypeStoredProcedure:
|
|
||||||
idBytes = randomBytes(8)
|
|
||||||
idBytes[7] = byte(rand.Intn(0x10)) | 0x08 // Upper 4 bits = 0x08
|
|
||||||
case ResourceTypeTrigger:
|
|
||||||
idBytes = randomBytes(8)
|
|
||||||
idBytes[7] = byte(rand.Intn(0x10)) | 0x07 // Upper 4 bits = 0x07
|
|
||||||
case ResourceTypeUserDefinedFunction:
|
|
||||||
idBytes = randomBytes(8)
|
|
||||||
idBytes[7] = byte(rand.Intn(0x10)) | 0x06 // Upper 4 bits = 0x06
|
|
||||||
case ResourceTypeConflict:
|
|
||||||
idBytes = randomBytes(8)
|
|
||||||
idBytes[7] = byte(rand.Intn(0x10)) | 0x04 // Upper 4 bits = 0x04
|
|
||||||
case ResourceTypePartitionKeyRange:
|
|
||||||
// we don't do partitions yet, so just use a fixed id
|
|
||||||
idBytes = []byte{0x69, 0x69, 0x69, 0x69, 0x69, 0x69, 0x69, 0x50}
|
|
||||||
case ResourceTypeSchema:
|
|
||||||
idBytes = randomBytes(8)
|
|
||||||
idBytes[7] = byte(rand.Intn(0x10)) | 0x09 // Upper 4 bits = 0x09
|
|
||||||
default:
|
|
||||||
idBytes = randomBytes(4)
|
|
||||||
}
|
|
||||||
|
|
||||||
encoded := base64.StdEncoding.EncodeToString(idBytes)
|
|
||||||
return strings.ReplaceAll(encoded, "/", "-")
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewCombined(ids ...string) string {
|
|
||||||
combinedIdBytes := make([]byte, 0)
|
|
||||||
|
|
||||||
for _, id := range ids {
|
|
||||||
idBytes, _ := base64.StdEncoding.DecodeString(strings.ReplaceAll(id, "-", "/"))
|
|
||||||
combinedIdBytes = append(combinedIdBytes, idBytes...)
|
|
||||||
}
|
|
||||||
|
|
||||||
encoded := base64.StdEncoding.EncodeToString(combinedIdBytes)
|
|
||||||
return strings.ReplaceAll(encoded, "/", "-")
|
|
||||||
}
|
|
||||||
|
|
||||||
func uintToBytes(id uint32) []byte {
|
|
||||||
buf := make([]byte, 4)
|
|
||||||
for i := 0; i < 4; i++ {
|
|
||||||
buf[i] = byte(id >> (i * 8))
|
|
||||||
}
|
|
||||||
|
|
||||||
return buf
|
|
||||||
}
|
|
||||||
|
|
||||||
func randomBytes(count int) []byte {
|
|
||||||
buf := make([]byte, count)
|
|
||||||
for i := 0; i < count; i += 4 {
|
|
||||||
id := uuid.New().ID()
|
|
||||||
idBytes := uintToBytes(id)
|
|
||||||
copy(buf[i:], idBytes)
|
|
||||||
}
|
|
||||||
return buf
|
|
||||||
}
|
|
@ -1,30 +1,32 @@
|
|||||||
package structhidrators
|
package structhidrators
|
||||||
|
|
||||||
import "github.com/pikami/cosmium/internal/datastore"
|
import (
|
||||||
|
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
||||||
|
)
|
||||||
|
|
||||||
var defaultCollection datastore.Collection = datastore.Collection{
|
var defaultCollection repositorymodels.Collection = repositorymodels.Collection{
|
||||||
IndexingPolicy: datastore.CollectionIndexingPolicy{
|
IndexingPolicy: repositorymodels.CollectionIndexingPolicy{
|
||||||
IndexingMode: "consistent",
|
IndexingMode: "consistent",
|
||||||
Automatic: true,
|
Automatic: true,
|
||||||
IncludedPaths: []datastore.CollectionIndexingPolicyPath{
|
IncludedPaths: []repositorymodels.CollectionIndexingPolicyPath{
|
||||||
{Path: "/*"},
|
{Path: "/*"},
|
||||||
},
|
},
|
||||||
ExcludedPaths: []datastore.CollectionIndexingPolicyPath{
|
ExcludedPaths: []repositorymodels.CollectionIndexingPolicyPath{
|
||||||
{Path: "/\"_etag\"/?"},
|
{Path: "/\"_etag\"/?"},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
PartitionKey: datastore.CollectionPartitionKey{
|
PartitionKey: repositorymodels.CollectionPartitionKey{
|
||||||
Paths: []string{"/_partitionKey"},
|
Paths: []string{"/_partitionKey"},
|
||||||
Kind: "Hash",
|
Kind: "Hash",
|
||||||
Version: 2,
|
Version: 2,
|
||||||
},
|
},
|
||||||
ResourceID: "nFFFFFFFFFF=",
|
UniqueID: "nFFFFFFFFFF=",
|
||||||
TimeStamp: 0,
|
TimeStamp: 0,
|
||||||
Self: "",
|
Self: "",
|
||||||
ETag: "\"00000000-0000-0000-0000-000000000000\"",
|
ETag: "\"00000000-0000-0000-0000-000000000000\"",
|
||||||
Docs: "docs/",
|
Docs: "docs/",
|
||||||
Sprocs: "sprocs/",
|
Sprocs: "sprocs/",
|
||||||
Triggers: "triggers/",
|
Triggers: "triggers/",
|
||||||
Udfs: "udfs/",
|
Udfs: "udfs/",
|
||||||
Conflicts: "conflicts/",
|
Conflicts: "conflicts/",
|
||||||
}
|
}
|
||||||
|
@ -3,11 +3,11 @@ package structhidrators
|
|||||||
import (
|
import (
|
||||||
"reflect"
|
"reflect"
|
||||||
|
|
||||||
"github.com/pikami/cosmium/internal/datastore"
|
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
||||||
)
|
)
|
||||||
|
|
||||||
func Hidrate(input interface{}) interface{} {
|
func Hidrate(input interface{}) interface{} {
|
||||||
if reflect.TypeOf(input) == reflect.TypeOf(datastore.Collection{}) {
|
if reflect.TypeOf(input) == reflect.TypeOf(repositorymodels.Collection{}) {
|
||||||
return hidrate(input, defaultCollection)
|
return hidrate(input, defaultCollection)
|
||||||
}
|
}
|
||||||
return input
|
return input
|
||||||
|
@ -1,61 +0,0 @@
|
|||||||
package tlsprovider
|
|
||||||
|
|
||||||
const certificate = `
|
|
||||||
-----BEGIN CERTIFICATE-----
|
|
||||||
MIIEaDCCAlCgAwIBAgIUAY7ito1IQfbIi52C0evhqHWgEvQwDQYJKoZIhvcNAQEL
|
|
||||||
BQAwMzELMAkGA1UEBhMCTFQxEjAQBgNVBAgMCUxpdGh1YW5pYTEQMA4GA1UECgwH
|
|
||||||
Q29zbWl1bTAeFw0yNDAyMjcxOTE4NThaFw0zNDAyMjYxOTE4NThaMD8xCzAJBgNV
|
|
||||||
BAYTAkxUMRIwEAYDVQQIDAlMaXRodWFuaWExEDAOBgNVBAoMB0Nvc21pdW0xCjAI
|
|
||||||
BgNVBAMMASowggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCZxGz5clcf
|
|
||||||
fvE6wS9Q2xPsUjeKdwotRCfKRu9kT7o1cZOSRBp7DgdeLvZ7BzqU1tk5wiLLiZwB
|
|
||||||
gI6amQAd6z6EwUcUH0mHtFiWU0y/FROz0QUojbbYp0PMUhWjlPAxAGaiwgF/82z7
|
|
||||||
/lmgMjf5v32XsMfa4U+FaaNYs7gu7aCQBQTAHmOIPnEAeFk9xQ2VzntRUWwzDYOV
|
|
||||||
SimtPZk2O2X18V8KTgTLMQF1KErIyznIwEPB/BLi+ihLkh/8BaaxoIeOPIhRLNFr
|
|
||||||
ecZrc/8+S4dUSUQDfmV3JFYFFheG0XIPEwXIaXiDAphpkCGhMIC2pDL8r14sntvn
|
|
||||||
juHFZxmSP4V5AgMBAAGjaDBmMB8GA1UdIwQYMBaAFEbQ/7hV4FWrptdOk540R2lF
|
|
||||||
SB1BMAkGA1UdEwQCMAAwCwYDVR0PBAQDAgTwMAwGA1UdEQQFMAOCASowHQYDVR0O
|
|
||||||
BBYEFGv5XvoFFzrG54GQ+WMFm6UO36BJMA0GCSqGSIb3DQEBCwUAA4ICAQBZh/vZ
|
|
||||||
PBamebTEpiQz6cgf8+GcTi++ebYUGQ3YJj82pqVBdipOhYQOZJ0fOlT1qRGNglut
|
|
||||||
+m5zn0iuXsNucP/32xdf1aJBnsU/aGrlf5ohJpGNxYfNPsewxeqQI23Yj22ec1gy
|
|
||||||
WL2pFDYNyTZMM7Wgys7m3i9lb6TYOF2lNO3WbNuuuETsDAPa0rD0R8QsQOfYOSNJ
|
|
||||||
YuWE4qZu+ySvTWsMZwlcqs7QL3Sd91UjItIS/AgqbnLvgt4z5ckGCIvickUfAZuQ
|
|
||||||
6x592hTz4OZ+WIYDejtb5MMXRaKEXgfF6o1idrD7YgVutm+2+mYpN1v9aLbCs7QW
|
|
||||||
9RkJoTXFQRNGq6j/cO0ZrCKFkttduziMWRz5X9QWADME1NsL53DfDkaxp9Nh+CCu
|
|
||||||
0S9OF9nVLJVigdXe4O1cQ0Qh633O6k+F/xWYcmMyVt3V2bs7FPfygGUx60tfIbpi
|
|
||||||
cBK3BsuzUrId3ozvYPsmfxYlzmyspyS6G+f7zLFOakm3fuqDJpnFNXmRY2Ljd3Cp
|
|
||||||
punuMT6zSctHAxpgJm1g9R6PcaGr+b/n6zkbxyK9+SFzwN3Lb18WFj5OcslNM/g5
|
|
||||||
ERE5Ws+Vae6MleSmsxSytgH4qn0ormPWuouBLaW0Rv2ZHdkt3myq8kTqtqdw3LRR
|
|
||||||
ogcLQ3cL6I5FKGjm2TOF72DQHvOol8ck0uMz/w==
|
|
||||||
-----END CERTIFICATE-----
|
|
||||||
`
|
|
||||||
|
|
||||||
const certificateKey = `
|
|
||||||
-----BEGIN PRIVATE KEY-----
|
|
||||||
MIIEuwIBADANBgkqhkiG9w0BAQEFAASCBKUwggShAgEAAoIBAQCZxGz5clcffvE6
|
|
||||||
wS9Q2xPsUjeKdwotRCfKRu9kT7o1cZOSRBp7DgdeLvZ7BzqU1tk5wiLLiZwBgI6a
|
|
||||||
mQAd6z6EwUcUH0mHtFiWU0y/FROz0QUojbbYp0PMUhWjlPAxAGaiwgF/82z7/lmg
|
|
||||||
Mjf5v32XsMfa4U+FaaNYs7gu7aCQBQTAHmOIPnEAeFk9xQ2VzntRUWwzDYOVSimt
|
|
||||||
PZk2O2X18V8KTgTLMQF1KErIyznIwEPB/BLi+ihLkh/8BaaxoIeOPIhRLNFrecZr
|
|
||||||
c/8+S4dUSUQDfmV3JFYFFheG0XIPEwXIaXiDAphpkCGhMIC2pDL8r14sntvnjuHF
|
|
||||||
ZxmSP4V5AgMBAAECgf89wcgjpZnzoWoiM3Z6QDJnkiUdXQumHQracBnRFXnMy8p9
|
|
||||||
wCd4ecnu9ptd8OArXgVMiaILWZeGXlqtW872m6Lej6DrJkpOt3NG9CvscdaHdthW
|
|
||||||
9dzv8d7IEtuRN4/WWOm7Tke7eD7763ta9i9/niR2q7DazPVw8vYhkyoNe864qVrq
|
|
||||||
Vw6+MMetz3TDHZ68p17yJJ9FJ0z0vHj3KJFrxnJonMe+/LcQX490y4zZw+zeyCkh
|
|
||||||
y/bsgvFGhnUhJ+mOz+qv0KL7HyUR69p9/+mjQH+AQH+j24xgd1IL0Dror9Cy1kxY
|
|
||||||
uKmi8pN1y288GmjkWosGMb0p3Pse1OkOyYFIbxECgYEA2ED3PSPoHWLHfKhg2BFw
|
|
||||||
yMPtern06rjKuwMNlD+mKS66Z+OsQi2EBsqomGnr1HGvYgQik0jwMcx0+Sup9/Zp
|
|
||||||
az8ebH6S4Tdxmnlwn34lhTIAF1KJS19AYvbhOydV+M+hq7Y7QxTqYsJAgEYwsozQ
|
|
||||||
0XeAzRBIiRxdcMFHP40zZIkCgYEAtgdiwo5d5iyvXEqx/5+NdM4b/ImrbaFIAb0v
|
|
||||||
MqiPpOA/+7EKlx72gJKVKh2iv4jvEUfduNEUXt77Yqo66HhfiTBVYxYwThK8E0Mq
|
|
||||||
TSKKdJsdPSThLS3qjeARpzQpWLiBZH90GxbfFL3ogIOa/UcgwRrqPc5a/yq8adSs
|
|
||||||
KGrfvXECgYEAmSMAMbqgn1aY32y5D6jiDjm4jMTsa98qKN5TmlysRNODSxhNnptu
|
|
||||||
uASA+VVgnBNZV/aHqXboKMuZNe22shI7uqd62ueTCYtiljpTB46j8TtkFx/qe4Zb
|
|
||||||
KPmcq3ACkGwwF1G3i5xfEkputKd/yqCvKvYOLqjORNHiVXt5Acby0skCgYBYkZ9s
|
|
||||||
KvllVbi9n1qclnWtr9vONO5EmYT/051zeLDr+HEpditA/L/UL36Ez4awy2AHeIBZ
|
|
||||||
vOG8h6Kpj0q6cleJ2Qqy+8jlNBhvBu8+OOBFfHPtnFQ0N3M5NR1hze+QS7YpwBou
|
|
||||||
VCKXZRAL9/0h38oAK6huCkocfh7PH7vkrpvPAQKBgCFDDtk7aBJsNcOW+aq4IEvf
|
|
||||||
nZ5hhhdelNLeN29RrJ71GwJrCG3NbhopWlCDqZ/Dd6QoEUpebqvlMGvQJBuz/QKb
|
|
||||||
ilcZlmaCS9pqIXAFK9GQ89V/xa8OibOuJUiBgShnfSQqAwQrfX1vYjtKErnjoRFs
|
|
||||||
9+zaWugLCC47Hw6QlMDa
|
|
||||||
-----END PRIVATE KEY-----
|
|
||||||
`
|
|
@ -1,19 +0,0 @@
|
|||||||
package tlsprovider
|
|
||||||
|
|
||||||
import (
|
|
||||||
"crypto/tls"
|
|
||||||
|
|
||||||
"github.com/pikami/cosmium/internal/logger"
|
|
||||||
)
|
|
||||||
|
|
||||||
func GetDefaultTlsConfig() *tls.Config {
|
|
||||||
cert, err := tls.X509KeyPair([]byte(certificate), []byte(certificateKey))
|
|
||||||
if err != nil {
|
|
||||||
logger.ErrorLn("Failed to parse certificate and key:", err)
|
|
||||||
return &tls.Config{}
|
|
||||||
}
|
|
||||||
|
|
||||||
return &tls.Config{
|
|
||||||
Certificates: []tls.Certificate{cert},
|
|
||||||
}
|
|
||||||
}
|
|
45
main.go
Normal file
45
main.go
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"os/signal"
|
||||||
|
"syscall"
|
||||||
|
|
||||||
|
"github.com/pikami/cosmium/api"
|
||||||
|
"github.com/pikami/cosmium/api/config"
|
||||||
|
"github.com/pikami/cosmium/internal/repositories"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
config.ParseFlags()
|
||||||
|
|
||||||
|
if config.Config.InitialDataFilePath != "" {
|
||||||
|
repositories.LoadStateFS(config.Config.InitialDataFilePath)
|
||||||
|
}
|
||||||
|
|
||||||
|
router := api.CreateRouter()
|
||||||
|
if config.Config.TLS_CertificatePath == "" ||
|
||||||
|
config.Config.TLS_CertificateKey == "" {
|
||||||
|
go router.Run(fmt.Sprintf(":%d", config.Config.Port))
|
||||||
|
} else {
|
||||||
|
go router.RunTLS(
|
||||||
|
fmt.Sprintf(":%d", config.Config.Port),
|
||||||
|
config.Config.TLS_CertificatePath,
|
||||||
|
config.Config.TLS_CertificateKey)
|
||||||
|
}
|
||||||
|
|
||||||
|
waitForExit()
|
||||||
|
}
|
||||||
|
|
||||||
|
func waitForExit() {
|
||||||
|
sigs := make(chan os.Signal, 1)
|
||||||
|
signal.Notify(sigs, syscall.SIGINT, syscall.SIGTERM)
|
||||||
|
|
||||||
|
// Block until a exit signal is received
|
||||||
|
<-sigs
|
||||||
|
|
||||||
|
if config.Config.PersistDataFilePath != "" {
|
||||||
|
repositories.SaveStateFS(config.Config.PersistDataFilePath)
|
||||||
|
}
|
||||||
|
}
|
@ -3,26 +3,14 @@ package parsers
|
|||||||
type SelectStmt struct {
|
type SelectStmt struct {
|
||||||
SelectItems []SelectItem
|
SelectItems []SelectItem
|
||||||
Table Table
|
Table Table
|
||||||
JoinItems []JoinItem
|
|
||||||
Filters interface{}
|
Filters interface{}
|
||||||
Exists bool
|
|
||||||
Distinct bool
|
|
||||||
Count int
|
Count int
|
||||||
Offset int
|
|
||||||
Parameters map[string]interface{}
|
Parameters map[string]interface{}
|
||||||
OrderExpressions []OrderExpression
|
OrderExpressions []OrderExpression
|
||||||
GroupBy []SelectItem
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type Table struct {
|
type Table struct {
|
||||||
Value string
|
Value string
|
||||||
SelectItem SelectItem
|
|
||||||
IsInSelect bool
|
|
||||||
}
|
|
||||||
|
|
||||||
type JoinItem struct {
|
|
||||||
Table Table
|
|
||||||
SelectItem SelectItem
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type SelectItemType int
|
type SelectItemType int
|
||||||
@ -33,9 +21,6 @@ const (
|
|||||||
SelectItemTypeArray
|
SelectItemTypeArray
|
||||||
SelectItemTypeConstant
|
SelectItemTypeConstant
|
||||||
SelectItemTypeFunctionCall
|
SelectItemTypeFunctionCall
|
||||||
SelectItemTypeSubQuery
|
|
||||||
SelectItemTypeExpression
|
|
||||||
SelectItemTypeBinaryExpression
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type SelectItem struct {
|
type SelectItem struct {
|
||||||
@ -44,7 +29,6 @@ type SelectItem struct {
|
|||||||
SelectItems []SelectItem
|
SelectItems []SelectItem
|
||||||
Type SelectItemType
|
Type SelectItemType
|
||||||
Value interface{}
|
Value interface{}
|
||||||
Invert bool
|
|
||||||
IsTopLevel bool
|
IsTopLevel bool
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -66,12 +50,6 @@ type ComparisonExpression struct {
|
|||||||
Operation string
|
Operation string
|
||||||
}
|
}
|
||||||
|
|
||||||
type BinaryExpression struct {
|
|
||||||
Left interface{}
|
|
||||||
Right interface{}
|
|
||||||
Operation string
|
|
||||||
}
|
|
||||||
|
|
||||||
type ConstantType int
|
type ConstantType int
|
||||||
|
|
||||||
const (
|
const (
|
||||||
@ -133,71 +111,15 @@ const (
|
|||||||
FunctionCallIsPrimitive FunctionCallType = "IsPrimitive"
|
FunctionCallIsPrimitive FunctionCallType = "IsPrimitive"
|
||||||
FunctionCallIsString FunctionCallType = "IsString"
|
FunctionCallIsString FunctionCallType = "IsString"
|
||||||
|
|
||||||
FunctionCallArrayConcat FunctionCallType = "ArrayConcat"
|
FunctionCallArrayConcat FunctionCallType = "ArrayConcat"
|
||||||
FunctionCallArrayContains FunctionCallType = "ArrayContains"
|
FunctionCallArrayLength FunctionCallType = "ArrayLength"
|
||||||
FunctionCallArrayContainsAny FunctionCallType = "ArrayContainsAny"
|
FunctionCallArraySlice FunctionCallType = "ArraySlice"
|
||||||
FunctionCallArrayContainsAll FunctionCallType = "ArrayContainsAll"
|
FunctionCallSetIntersect FunctionCallType = "SetIntersect"
|
||||||
FunctionCallArrayLength FunctionCallType = "ArrayLength"
|
FunctionCallSetUnion FunctionCallType = "SetUnion"
|
||||||
FunctionCallArraySlice FunctionCallType = "ArraySlice"
|
|
||||||
FunctionCallSetIntersect FunctionCallType = "SetIntersect"
|
|
||||||
FunctionCallSetUnion FunctionCallType = "SetUnion"
|
|
||||||
|
|
||||||
FunctionCallIif FunctionCallType = "Iif"
|
|
||||||
|
|
||||||
FunctionCallMathAbs FunctionCallType = "MathAbs"
|
|
||||||
FunctionCallMathAcos FunctionCallType = "MathAcos"
|
|
||||||
FunctionCallMathAsin FunctionCallType = "MathAsin"
|
|
||||||
FunctionCallMathAtan FunctionCallType = "MathAtan"
|
|
||||||
FunctionCallMathAtn2 FunctionCallType = "MathAtn2"
|
|
||||||
FunctionCallMathCeiling FunctionCallType = "MathCeiling"
|
|
||||||
FunctionCallMathCos FunctionCallType = "MathCos"
|
|
||||||
FunctionCallMathCot FunctionCallType = "MathCot"
|
|
||||||
FunctionCallMathDegrees FunctionCallType = "MathDegrees"
|
|
||||||
FunctionCallMathExp FunctionCallType = "MathExp"
|
|
||||||
FunctionCallMathFloor FunctionCallType = "MathFloor"
|
|
||||||
FunctionCallMathIntAdd FunctionCallType = "MathIntAdd"
|
|
||||||
FunctionCallMathIntBitAnd FunctionCallType = "MathIntBitAnd"
|
|
||||||
FunctionCallMathIntBitLeftShift FunctionCallType = "MathIntBitLeftShift"
|
|
||||||
FunctionCallMathIntBitNot FunctionCallType = "MathIntBitNot"
|
|
||||||
FunctionCallMathIntBitOr FunctionCallType = "MathIntBitOr"
|
|
||||||
FunctionCallMathIntBitRightShift FunctionCallType = "MathIntBitRightShift"
|
|
||||||
FunctionCallMathIntBitXor FunctionCallType = "MathIntBitXor"
|
|
||||||
FunctionCallMathIntDiv FunctionCallType = "MathIntDiv"
|
|
||||||
FunctionCallMathIntMod FunctionCallType = "MathIntMod"
|
|
||||||
FunctionCallMathIntMul FunctionCallType = "MathIntMul"
|
|
||||||
FunctionCallMathIntSub FunctionCallType = "MathIntSub"
|
|
||||||
FunctionCallMathLog FunctionCallType = "MathLog"
|
|
||||||
FunctionCallMathLog10 FunctionCallType = "MathLog10"
|
|
||||||
FunctionCallMathNumberBin FunctionCallType = "MathNumberBin"
|
|
||||||
FunctionCallMathPi FunctionCallType = "MathPi"
|
|
||||||
FunctionCallMathPower FunctionCallType = "MathPower"
|
|
||||||
FunctionCallMathRadians FunctionCallType = "MathRadians"
|
|
||||||
FunctionCallMathRand FunctionCallType = "MathRand"
|
|
||||||
FunctionCallMathRound FunctionCallType = "MathRound"
|
|
||||||
FunctionCallMathSign FunctionCallType = "MathSign"
|
|
||||||
FunctionCallMathSin FunctionCallType = "MathSin"
|
|
||||||
FunctionCallMathSqrt FunctionCallType = "MathSqrt"
|
|
||||||
FunctionCallMathSquare FunctionCallType = "MathSquare"
|
|
||||||
FunctionCallMathTan FunctionCallType = "MathTan"
|
|
||||||
FunctionCallMathTrunc FunctionCallType = "MathTrunc"
|
|
||||||
|
|
||||||
FunctionCallAggregateAvg FunctionCallType = "AggregateAvg"
|
|
||||||
FunctionCallAggregateCount FunctionCallType = "AggregateCount"
|
|
||||||
FunctionCallAggregateMax FunctionCallType = "AggregateMax"
|
|
||||||
FunctionCallAggregateMin FunctionCallType = "AggregateMin"
|
|
||||||
FunctionCallAggregateSum FunctionCallType = "AggregateSum"
|
|
||||||
|
|
||||||
FunctionCallIn FunctionCallType = "In"
|
FunctionCallIn FunctionCallType = "In"
|
||||||
)
|
)
|
||||||
|
|
||||||
var AggregateFunctions = []FunctionCallType{
|
|
||||||
FunctionCallAggregateAvg,
|
|
||||||
FunctionCallAggregateCount,
|
|
||||||
FunctionCallAggregateMax,
|
|
||||||
FunctionCallAggregateMin,
|
|
||||||
FunctionCallAggregateSum,
|
|
||||||
}
|
|
||||||
|
|
||||||
type FunctionCall struct {
|
type FunctionCall struct {
|
||||||
Arguments []interface{}
|
Arguments []interface{}
|
||||||
Type FunctionCallType
|
Type FunctionCallType
|
||||||
|
@ -1,131 +0,0 @@
|
|||||||
package nosql_test
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/pikami/cosmium/parsers"
|
|
||||||
testutils "github.com/pikami/cosmium/test_utils"
|
|
||||||
)
|
|
||||||
|
|
||||||
func Test_Parse_AggregateFunctions(t *testing.T) {
|
|
||||||
|
|
||||||
t.Run("Should parse function AVG()", func(t *testing.T) {
|
|
||||||
testQueryParse(
|
|
||||||
t,
|
|
||||||
`SELECT AVG(c.a1) FROM c`,
|
|
||||||
parsers.SelectStmt{
|
|
||||||
SelectItems: []parsers.SelectItem{
|
|
||||||
{
|
|
||||||
Type: parsers.SelectItemTypeFunctionCall,
|
|
||||||
Value: parsers.FunctionCall{
|
|
||||||
Type: parsers.FunctionCallAggregateAvg,
|
|
||||||
Arguments: []interface{}{
|
|
||||||
parsers.SelectItem{
|
|
||||||
Path: []string{"c", "a1"},
|
|
||||||
Type: parsers.SelectItemTypeField,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should parse function COUNT()", func(t *testing.T) {
|
|
||||||
testQueryParse(
|
|
||||||
t,
|
|
||||||
`SELECT COUNT(c.a1) FROM c`,
|
|
||||||
parsers.SelectStmt{
|
|
||||||
SelectItems: []parsers.SelectItem{
|
|
||||||
{
|
|
||||||
Type: parsers.SelectItemTypeFunctionCall,
|
|
||||||
Value: parsers.FunctionCall{
|
|
||||||
Type: parsers.FunctionCallAggregateCount,
|
|
||||||
Arguments: []interface{}{
|
|
||||||
parsers.SelectItem{
|
|
||||||
Path: []string{"c", "a1"},
|
|
||||||
Type: parsers.SelectItemTypeField,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should parse function MAX()", func(t *testing.T) {
|
|
||||||
testQueryParse(
|
|
||||||
t,
|
|
||||||
`SELECT MAX(c.a1) FROM c`,
|
|
||||||
parsers.SelectStmt{
|
|
||||||
SelectItems: []parsers.SelectItem{
|
|
||||||
{
|
|
||||||
Type: parsers.SelectItemTypeFunctionCall,
|
|
||||||
Value: parsers.FunctionCall{
|
|
||||||
Type: parsers.FunctionCallAggregateMax,
|
|
||||||
Arguments: []interface{}{
|
|
||||||
parsers.SelectItem{
|
|
||||||
Path: []string{"c", "a1"},
|
|
||||||
Type: parsers.SelectItemTypeField,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should parse function MIN()", func(t *testing.T) {
|
|
||||||
testQueryParse(
|
|
||||||
t,
|
|
||||||
`SELECT MIN(c.a1) FROM c`,
|
|
||||||
parsers.SelectStmt{
|
|
||||||
SelectItems: []parsers.SelectItem{
|
|
||||||
{
|
|
||||||
Type: parsers.SelectItemTypeFunctionCall,
|
|
||||||
Value: parsers.FunctionCall{
|
|
||||||
Type: parsers.FunctionCallAggregateMin,
|
|
||||||
Arguments: []interface{}{
|
|
||||||
parsers.SelectItem{
|
|
||||||
Path: []string{"c", "a1"},
|
|
||||||
Type: parsers.SelectItemTypeField,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should parse function SUM()", func(t *testing.T) {
|
|
||||||
testQueryParse(
|
|
||||||
t,
|
|
||||||
`SELECT SUM(c.a1) FROM c`,
|
|
||||||
parsers.SelectStmt{
|
|
||||||
SelectItems: []parsers.SelectItem{
|
|
||||||
{
|
|
||||||
Type: parsers.SelectItemTypeFunctionCall,
|
|
||||||
Value: parsers.FunctionCall{
|
|
||||||
Type: parsers.FunctionCallAggregateSum,
|
|
||||||
Arguments: []interface{}{
|
|
||||||
parsers.SelectItem{
|
|
||||||
Path: []string{"c", "a1"},
|
|
||||||
Type: parsers.SelectItemTypeField,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
})
|
|
||||||
}
|
|
@ -1,366 +0,0 @@
|
|||||||
package nosql_test
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/pikami/cosmium/parsers"
|
|
||||||
testutils "github.com/pikami/cosmium/test_utils"
|
|
||||||
)
|
|
||||||
|
|
||||||
func Test_Parse_Arithmetics(t *testing.T) {
|
|
||||||
t.Run("Should parse multiplication before addition", func(t *testing.T) {
|
|
||||||
testQueryParse(
|
|
||||||
t,
|
|
||||||
`SELECT c.a + c.b * c.c FROM c`,
|
|
||||||
parsers.SelectStmt{
|
|
||||||
SelectItems: []parsers.SelectItem{
|
|
||||||
{
|
|
||||||
Type: parsers.SelectItemTypeBinaryExpression,
|
|
||||||
Value: parsers.BinaryExpression{
|
|
||||||
Operation: "+",
|
|
||||||
Left: testutils.SelectItem_Path("c", "a"),
|
|
||||||
Right: parsers.SelectItem{
|
|
||||||
Type: parsers.SelectItemTypeBinaryExpression,
|
|
||||||
Value: parsers.BinaryExpression{
|
|
||||||
Operation: "*",
|
|
||||||
Left: testutils.SelectItem_Path("c", "b"),
|
|
||||||
Right: testutils.SelectItem_Path("c", "c"),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should parse division before subtraction", func(t *testing.T) {
|
|
||||||
testQueryParse(
|
|
||||||
t,
|
|
||||||
`SELECT c.x - c.y / c.z FROM c`,
|
|
||||||
parsers.SelectStmt{
|
|
||||||
SelectItems: []parsers.SelectItem{
|
|
||||||
{
|
|
||||||
Type: parsers.SelectItemTypeBinaryExpression,
|
|
||||||
Value: parsers.BinaryExpression{
|
|
||||||
Operation: "-",
|
|
||||||
Left: testutils.SelectItem_Path("c", "x"),
|
|
||||||
Right: parsers.SelectItem{
|
|
||||||
Type: parsers.SelectItemTypeBinaryExpression,
|
|
||||||
Value: parsers.BinaryExpression{
|
|
||||||
Operation: "/",
|
|
||||||
Left: testutils.SelectItem_Path("c", "y"),
|
|
||||||
Right: testutils.SelectItem_Path("c", "z"),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should handle complex mixed operations", func(t *testing.T) {
|
|
||||||
testQueryParse(
|
|
||||||
t,
|
|
||||||
`SELECT c.a + c.b * c.c - c.d / c.e FROM c`,
|
|
||||||
parsers.SelectStmt{
|
|
||||||
SelectItems: []parsers.SelectItem{
|
|
||||||
{
|
|
||||||
Type: parsers.SelectItemTypeBinaryExpression,
|
|
||||||
Value: parsers.BinaryExpression{
|
|
||||||
Operation: "-",
|
|
||||||
Left: parsers.SelectItem{
|
|
||||||
Type: parsers.SelectItemTypeBinaryExpression,
|
|
||||||
Value: parsers.BinaryExpression{
|
|
||||||
Operation: "+",
|
|
||||||
Left: testutils.SelectItem_Path("c", "a"),
|
|
||||||
Right: parsers.SelectItem{
|
|
||||||
Type: parsers.SelectItemTypeBinaryExpression,
|
|
||||||
Value: parsers.BinaryExpression{
|
|
||||||
Operation: "*",
|
|
||||||
Left: testutils.SelectItem_Path("c", "b"),
|
|
||||||
Right: testutils.SelectItem_Path("c", "c"),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Right: parsers.SelectItem{
|
|
||||||
Type: parsers.SelectItemTypeBinaryExpression,
|
|
||||||
Value: parsers.BinaryExpression{
|
|
||||||
Operation: "/",
|
|
||||||
Left: testutils.SelectItem_Path("c", "d"),
|
|
||||||
Right: testutils.SelectItem_Path("c", "e"),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should respect parentheses overriding precedence", func(t *testing.T) {
|
|
||||||
testQueryParse(
|
|
||||||
t,
|
|
||||||
`SELECT (c.a + c.b) * c.c FROM c`,
|
|
||||||
parsers.SelectStmt{
|
|
||||||
SelectItems: []parsers.SelectItem{
|
|
||||||
{
|
|
||||||
Type: parsers.SelectItemTypeBinaryExpression,
|
|
||||||
Value: parsers.BinaryExpression{
|
|
||||||
Operation: "*",
|
|
||||||
Left: parsers.SelectItem{
|
|
||||||
Type: parsers.SelectItemTypeBinaryExpression,
|
|
||||||
Value: parsers.BinaryExpression{
|
|
||||||
Operation: "+",
|
|
||||||
Left: testutils.SelectItem_Path("c", "a"),
|
|
||||||
Right: testutils.SelectItem_Path("c", "b"),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Right: testutils.SelectItem_Path("c", "c"),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should handle nested parentheses", func(t *testing.T) {
|
|
||||||
testQueryParse(
|
|
||||||
t,
|
|
||||||
`SELECT ((c.a + c.b) * c.c) - c.d FROM c`,
|
|
||||||
parsers.SelectStmt{
|
|
||||||
SelectItems: []parsers.SelectItem{
|
|
||||||
{
|
|
||||||
Type: parsers.SelectItemTypeBinaryExpression,
|
|
||||||
Value: parsers.BinaryExpression{
|
|
||||||
Operation: "-",
|
|
||||||
Left: parsers.SelectItem{
|
|
||||||
Type: parsers.SelectItemTypeBinaryExpression,
|
|
||||||
Value: parsers.BinaryExpression{
|
|
||||||
Operation: "*",
|
|
||||||
Left: parsers.SelectItem{
|
|
||||||
Type: parsers.SelectItemTypeBinaryExpression,
|
|
||||||
Value: parsers.BinaryExpression{
|
|
||||||
Operation: "+",
|
|
||||||
Left: testutils.SelectItem_Path("c", "a"),
|
|
||||||
Right: testutils.SelectItem_Path("c", "b"),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Right: testutils.SelectItem_Path("c", "c"),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Right: testutils.SelectItem_Path("c", "d"),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should be left associative for same precedence operators", func(t *testing.T) {
|
|
||||||
testQueryParse(
|
|
||||||
t,
|
|
||||||
`SELECT c.a - c.b - c.c FROM c`,
|
|
||||||
parsers.SelectStmt{
|
|
||||||
SelectItems: []parsers.SelectItem{
|
|
||||||
{
|
|
||||||
Type: parsers.SelectItemTypeBinaryExpression,
|
|
||||||
Value: parsers.BinaryExpression{
|
|
||||||
Operation: "-",
|
|
||||||
Left: parsers.SelectItem{
|
|
||||||
Type: parsers.SelectItemTypeBinaryExpression,
|
|
||||||
Value: parsers.BinaryExpression{
|
|
||||||
Operation: "-",
|
|
||||||
Left: testutils.SelectItem_Path("c", "a"),
|
|
||||||
Right: testutils.SelectItem_Path("c", "b"),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Right: testutils.SelectItem_Path("c", "c"),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should be left associative with multiplication and division", func(t *testing.T) {
|
|
||||||
testQueryParse(
|
|
||||||
t,
|
|
||||||
`SELECT c.a * c.b / c.c FROM c`,
|
|
||||||
parsers.SelectStmt{
|
|
||||||
SelectItems: []parsers.SelectItem{
|
|
||||||
{
|
|
||||||
Type: parsers.SelectItemTypeBinaryExpression,
|
|
||||||
Value: parsers.BinaryExpression{
|
|
||||||
Operation: "/",
|
|
||||||
Left: parsers.SelectItem{
|
|
||||||
Type: parsers.SelectItemTypeBinaryExpression,
|
|
||||||
Value: parsers.BinaryExpression{
|
|
||||||
Operation: "*",
|
|
||||||
Left: testutils.SelectItem_Path("c", "a"),
|
|
||||||
Right: testutils.SelectItem_Path("c", "b"),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Right: testutils.SelectItem_Path("c", "c"),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should handle math with constants", func(t *testing.T) {
|
|
||||||
testQueryParse(
|
|
||||||
t,
|
|
||||||
`SELECT 10 + 20 * 5 FROM c`,
|
|
||||||
parsers.SelectStmt{
|
|
||||||
SelectItems: []parsers.SelectItem{
|
|
||||||
{
|
|
||||||
Type: parsers.SelectItemTypeBinaryExpression,
|
|
||||||
Value: parsers.BinaryExpression{
|
|
||||||
Operation: "+",
|
|
||||||
Left: testutils.SelectItem_Constant_Int(10),
|
|
||||||
Right: parsers.SelectItem{
|
|
||||||
Type: parsers.SelectItemTypeBinaryExpression,
|
|
||||||
Value: parsers.BinaryExpression{
|
|
||||||
Operation: "*",
|
|
||||||
Left: testutils.SelectItem_Constant_Int(20),
|
|
||||||
Right: testutils.SelectItem_Constant_Int(5),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should handle math with floating point numbers", func(t *testing.T) {
|
|
||||||
testQueryParse(
|
|
||||||
t,
|
|
||||||
`SELECT c.price * 1.08 FROM c`,
|
|
||||||
parsers.SelectStmt{
|
|
||||||
SelectItems: []parsers.SelectItem{
|
|
||||||
{
|
|
||||||
Type: parsers.SelectItemTypeBinaryExpression,
|
|
||||||
Value: parsers.BinaryExpression{
|
|
||||||
Operation: "*",
|
|
||||||
Left: testutils.SelectItem_Path("c", "price"),
|
|
||||||
Right: testutils.SelectItem_Constant_Float(1.08),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should handle parentheses around single value", func(t *testing.T) {
|
|
||||||
testQueryParse(
|
|
||||||
t,
|
|
||||||
`SELECT (c.value) FROM c`,
|
|
||||||
parsers.SelectStmt{
|
|
||||||
SelectItems: []parsers.SelectItem{
|
|
||||||
testutils.SelectItem_Path("c", "value"),
|
|
||||||
},
|
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should handle function calls in math expressions", func(t *testing.T) {
|
|
||||||
testQueryParse(
|
|
||||||
t,
|
|
||||||
`SELECT LENGTH(c.name) * 2 + 10 FROM c`,
|
|
||||||
parsers.SelectStmt{
|
|
||||||
SelectItems: []parsers.SelectItem{
|
|
||||||
{
|
|
||||||
Type: parsers.SelectItemTypeBinaryExpression,
|
|
||||||
Value: parsers.BinaryExpression{
|
|
||||||
Operation: "+",
|
|
||||||
Left: parsers.SelectItem{
|
|
||||||
Type: parsers.SelectItemTypeBinaryExpression,
|
|
||||||
Value: parsers.BinaryExpression{
|
|
||||||
Operation: "*",
|
|
||||||
Left: parsers.SelectItem{
|
|
||||||
Type: parsers.SelectItemTypeFunctionCall,
|
|
||||||
Value: parsers.FunctionCall{
|
|
||||||
Type: parsers.FunctionCallLength,
|
|
||||||
Arguments: []interface{}{testutils.SelectItem_Path("c", "name")},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Right: testutils.SelectItem_Constant_Int(2),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Right: testutils.SelectItem_Constant_Int(10),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should handle multiple select items with math", func(t *testing.T) {
|
|
||||||
testQueryParse(
|
|
||||||
t,
|
|
||||||
`SELECT c.a + c.b, c.x * c.y FROM c`,
|
|
||||||
parsers.SelectStmt{
|
|
||||||
SelectItems: []parsers.SelectItem{
|
|
||||||
{
|
|
||||||
Type: parsers.SelectItemTypeBinaryExpression,
|
|
||||||
Value: parsers.BinaryExpression{
|
|
||||||
Operation: "+",
|
|
||||||
Left: testutils.SelectItem_Path("c", "a"),
|
|
||||||
Right: testutils.SelectItem_Path("c", "b"),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Type: parsers.SelectItemTypeBinaryExpression,
|
|
||||||
Value: parsers.BinaryExpression{
|
|
||||||
Operation: "*",
|
|
||||||
Left: testutils.SelectItem_Path("c", "x"),
|
|
||||||
Right: testutils.SelectItem_Path("c", "y"),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should handle math in WHERE clause", func(t *testing.T) {
|
|
||||||
testQueryParse(
|
|
||||||
t,
|
|
||||||
`SELECT c.id FROM c WHERE c.price * 1.08 > 100`,
|
|
||||||
parsers.SelectStmt{
|
|
||||||
SelectItems: []parsers.SelectItem{
|
|
||||||
testutils.SelectItem_Path("c", "id"),
|
|
||||||
},
|
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
|
||||||
Filters: parsers.ComparisonExpression{
|
|
||||||
Operation: ">",
|
|
||||||
Left: parsers.SelectItem{
|
|
||||||
Type: parsers.SelectItemTypeBinaryExpression,
|
|
||||||
Value: parsers.BinaryExpression{
|
|
||||||
Operation: "*",
|
|
||||||
Left: testutils.SelectItem_Path("c", "price"),
|
|
||||||
Right: testutils.SelectItem_Constant_Float(1.08),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Right: testutils.SelectItem_Constant_Int(100),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
})
|
|
||||||
}
|
|
@ -4,7 +4,6 @@ import (
|
|||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/pikami/cosmium/parsers"
|
"github.com/pikami/cosmium/parsers"
|
||||||
testutils "github.com/pikami/cosmium/test_utils"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func Test_Parse_ArrayFunctions(t *testing.T) {
|
func Test_Parse_ArrayFunctions(t *testing.T) {
|
||||||
@ -32,120 +31,7 @@ func Test_Parse_ArrayFunctions(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
Table: parsers.Table{Value: "c"},
|
||||||
},
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should parse function ARRAY_CONTAINS()", func(t *testing.T) {
|
|
||||||
testQueryParse(
|
|
||||||
t,
|
|
||||||
`SELECT ARRAY_CONTAINS(c.a1, "value") FROM c`,
|
|
||||||
parsers.SelectStmt{
|
|
||||||
SelectItems: []parsers.SelectItem{
|
|
||||||
{
|
|
||||||
Type: parsers.SelectItemTypeFunctionCall,
|
|
||||||
Value: parsers.FunctionCall{
|
|
||||||
Type: parsers.FunctionCallArrayContains,
|
|
||||||
Arguments: []interface{}{
|
|
||||||
parsers.SelectItem{
|
|
||||||
Path: []string{"c", "a1"},
|
|
||||||
Type: parsers.SelectItemTypeField,
|
|
||||||
},
|
|
||||||
testutils.SelectItem_Constant_String("value"),
|
|
||||||
nil,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should parse function ARRAY_CONTAINS() with partial match", func(t *testing.T) {
|
|
||||||
testQueryParse(
|
|
||||||
t,
|
|
||||||
`SELECT ARRAY_CONTAINS(["a", "b"], "value", true) FROM c`,
|
|
||||||
parsers.SelectStmt{
|
|
||||||
SelectItems: []parsers.SelectItem{
|
|
||||||
{
|
|
||||||
Type: parsers.SelectItemTypeFunctionCall,
|
|
||||||
Value: parsers.FunctionCall{
|
|
||||||
Type: parsers.FunctionCallArrayContains,
|
|
||||||
Arguments: []interface{}{
|
|
||||||
parsers.SelectItem{
|
|
||||||
Type: parsers.SelectItemTypeArray,
|
|
||||||
SelectItems: []parsers.SelectItem{
|
|
||||||
testutils.SelectItem_Constant_String("a"),
|
|
||||||
testutils.SelectItem_Constant_String("b"),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
testutils.SelectItem_Constant_String("value"),
|
|
||||||
testutils.SelectItem_Constant_Bool(true),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should parse function ARRAY_CONTAINS_ANY()", func(t *testing.T) {
|
|
||||||
testQueryParse(
|
|
||||||
t,
|
|
||||||
`SELECT ARRAY_CONTAINS_ANY(["a", "b"], "value", true) FROM c`,
|
|
||||||
parsers.SelectStmt{
|
|
||||||
SelectItems: []parsers.SelectItem{
|
|
||||||
{
|
|
||||||
Type: parsers.SelectItemTypeFunctionCall,
|
|
||||||
Value: parsers.FunctionCall{
|
|
||||||
Type: parsers.FunctionCallArrayContainsAny,
|
|
||||||
Arguments: []interface{}{
|
|
||||||
parsers.SelectItem{
|
|
||||||
Type: parsers.SelectItemTypeArray,
|
|
||||||
SelectItems: []parsers.SelectItem{
|
|
||||||
testutils.SelectItem_Constant_String("a"),
|
|
||||||
testutils.SelectItem_Constant_String("b"),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
testutils.SelectItem_Constant_String("value"),
|
|
||||||
testutils.SelectItem_Constant_Bool(true),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should parse function ARRAY_CONTAINS_ALL()", func(t *testing.T) {
|
|
||||||
testQueryParse(
|
|
||||||
t,
|
|
||||||
`SELECT ARRAY_CONTAINS_ALL(["a", "b"], "value", true) FROM c`,
|
|
||||||
parsers.SelectStmt{
|
|
||||||
SelectItems: []parsers.SelectItem{
|
|
||||||
{
|
|
||||||
Type: parsers.SelectItemTypeFunctionCall,
|
|
||||||
Value: parsers.FunctionCall{
|
|
||||||
Type: parsers.FunctionCallArrayContainsAll,
|
|
||||||
Arguments: []interface{}{
|
|
||||||
parsers.SelectItem{
|
|
||||||
Type: parsers.SelectItemTypeArray,
|
|
||||||
SelectItems: []parsers.SelectItem{
|
|
||||||
testutils.SelectItem_Constant_String("a"),
|
|
||||||
testutils.SelectItem_Constant_String("b"),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
testutils.SelectItem_Constant_String("value"),
|
|
||||||
testutils.SelectItem_Constant_Bool(true),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
@ -169,7 +55,7 @@ func Test_Parse_ArrayFunctions(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
Table: parsers.Table{Value: "c"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
@ -189,13 +75,25 @@ func Test_Parse_ArrayFunctions(t *testing.T) {
|
|||||||
Path: []string{"c", "array"},
|
Path: []string{"c", "array"},
|
||||||
Type: parsers.SelectItemTypeField,
|
Type: parsers.SelectItemTypeField,
|
||||||
},
|
},
|
||||||
testutils.SelectItem_Constant_Int(0),
|
parsers.SelectItem{
|
||||||
testutils.SelectItem_Constant_Int(2),
|
Type: parsers.SelectItemTypeConstant,
|
||||||
|
Value: parsers.Constant{
|
||||||
|
Type: parsers.ConstantTypeInteger,
|
||||||
|
Value: 0,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
parsers.SelectItem{
|
||||||
|
Type: parsers.SelectItemTypeConstant,
|
||||||
|
Value: parsers.Constant{
|
||||||
|
Type: parsers.ConstantTypeInteger,
|
||||||
|
Value: 2,
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
Table: parsers.Table{Value: "c"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
@ -223,7 +121,7 @@ func Test_Parse_ArrayFunctions(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
Table: parsers.Table{Value: "c"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
@ -251,7 +149,7 @@ func Test_Parse_ArrayFunctions(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
Table: parsers.Table{Value: "c"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
@ -1,58 +0,0 @@
|
|||||||
package nosql_test
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/pikami/cosmium/parsers"
|
|
||||||
testutils "github.com/pikami/cosmium/test_utils"
|
|
||||||
)
|
|
||||||
|
|
||||||
func Test_Parse_Join(t *testing.T) {
|
|
||||||
|
|
||||||
t.Run("Should parse simple JOIN", func(t *testing.T) {
|
|
||||||
testQueryParse(
|
|
||||||
t,
|
|
||||||
`SELECT c.id, c["pk"] FROM c JOIN cc IN c["tags"]`,
|
|
||||||
parsers.SelectStmt{
|
|
||||||
SelectItems: []parsers.SelectItem{
|
|
||||||
{Path: []string{"c", "id"}},
|
|
||||||
{Path: []string{"c", "pk"}},
|
|
||||||
},
|
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
|
||||||
JoinItems: []parsers.JoinItem{
|
|
||||||
{
|
|
||||||
Table: parsers.Table{
|
|
||||||
Value: "cc",
|
|
||||||
},
|
|
||||||
SelectItem: parsers.SelectItem{
|
|
||||||
Path: []string{"c", "tags"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should parse JOIN VALUE", func(t *testing.T) {
|
|
||||||
testQueryParse(
|
|
||||||
t,
|
|
||||||
`SELECT VALUE cc FROM c JOIN cc IN c["tags"]`,
|
|
||||||
parsers.SelectStmt{
|
|
||||||
SelectItems: []parsers.SelectItem{
|
|
||||||
{Path: []string{"cc"}, IsTopLevel: true},
|
|
||||||
},
|
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
|
||||||
JoinItems: []parsers.JoinItem{
|
|
||||||
{
|
|
||||||
Table: parsers.Table{
|
|
||||||
Value: "cc",
|
|
||||||
},
|
|
||||||
SelectItem: parsers.SelectItem{
|
|
||||||
Path: []string{"c", "tags"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
})
|
|
||||||
}
|
|
@ -1,651 +0,0 @@
|
|||||||
package nosql_test
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/pikami/cosmium/parsers"
|
|
||||||
testutils "github.com/pikami/cosmium/test_utils"
|
|
||||||
)
|
|
||||||
|
|
||||||
func Test_Execute_MathFunctions(t *testing.T) {
|
|
||||||
t.Run("Should parse function ABS(ex)", func(t *testing.T) {
|
|
||||||
testMathFunctionParse(
|
|
||||||
t,
|
|
||||||
`SELECT ABS(c.value) FROM c`,
|
|
||||||
parsers.FunctionCallMathAbs,
|
|
||||||
[]interface{}{
|
|
||||||
parsers.SelectItem{
|
|
||||||
Path: []string{"c", "value"},
|
|
||||||
Type: parsers.SelectItemTypeField,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"c",
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should parse function ACOS(ex)", func(t *testing.T) {
|
|
||||||
testMathFunctionParse(
|
|
||||||
t,
|
|
||||||
`SELECT ACOS(c.value) FROM c`,
|
|
||||||
parsers.FunctionCallMathAcos,
|
|
||||||
[]interface{}{
|
|
||||||
parsers.SelectItem{
|
|
||||||
Path: []string{"c", "value"},
|
|
||||||
Type: parsers.SelectItemTypeField,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"c",
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should parse function ASIN(ex)", func(t *testing.T) {
|
|
||||||
testMathFunctionParse(
|
|
||||||
t,
|
|
||||||
`SELECT ASIN(c.value) FROM c`,
|
|
||||||
parsers.FunctionCallMathAsin,
|
|
||||||
[]interface{}{
|
|
||||||
parsers.SelectItem{
|
|
||||||
Path: []string{"c", "value"},
|
|
||||||
Type: parsers.SelectItemTypeField,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"c",
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should parse function ATAN(ex)", func(t *testing.T) {
|
|
||||||
testMathFunctionParse(
|
|
||||||
t,
|
|
||||||
`SELECT ATAN(c.value) FROM c`,
|
|
||||||
parsers.FunctionCallMathAtan,
|
|
||||||
[]interface{}{
|
|
||||||
parsers.SelectItem{
|
|
||||||
Path: []string{"c", "value"},
|
|
||||||
Type: parsers.SelectItemTypeField,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"c",
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should parse function CEILING(ex)", func(t *testing.T) {
|
|
||||||
testMathFunctionParse(
|
|
||||||
t,
|
|
||||||
`SELECT CEILING(c.value) FROM c`,
|
|
||||||
parsers.FunctionCallMathCeiling,
|
|
||||||
[]interface{}{
|
|
||||||
parsers.SelectItem{
|
|
||||||
Path: []string{"c", "value"},
|
|
||||||
Type: parsers.SelectItemTypeField,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"c",
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should parse function COS(ex)", func(t *testing.T) {
|
|
||||||
testMathFunctionParse(
|
|
||||||
t,
|
|
||||||
`SELECT COS(c.value) FROM c`,
|
|
||||||
parsers.FunctionCallMathCos,
|
|
||||||
[]interface{}{
|
|
||||||
parsers.SelectItem{
|
|
||||||
Path: []string{"c", "value"},
|
|
||||||
Type: parsers.SelectItemTypeField,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"c",
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should parse function COT(ex)", func(t *testing.T) {
|
|
||||||
testMathFunctionParse(
|
|
||||||
t,
|
|
||||||
`SELECT COT(c.value) FROM c`,
|
|
||||||
parsers.FunctionCallMathCot,
|
|
||||||
[]interface{}{
|
|
||||||
parsers.SelectItem{
|
|
||||||
Path: []string{"c", "value"},
|
|
||||||
Type: parsers.SelectItemTypeField,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"c",
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should parse function DEGREES(ex)", func(t *testing.T) {
|
|
||||||
testMathFunctionParse(
|
|
||||||
t,
|
|
||||||
`SELECT DEGREES(c.value) FROM c`,
|
|
||||||
parsers.FunctionCallMathDegrees,
|
|
||||||
[]interface{}{
|
|
||||||
parsers.SelectItem{
|
|
||||||
Path: []string{"c", "value"},
|
|
||||||
Type: parsers.SelectItemTypeField,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"c",
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should parse function EXP(ex)", func(t *testing.T) {
|
|
||||||
testMathFunctionParse(
|
|
||||||
t,
|
|
||||||
`SELECT EXP(c.value) FROM c`,
|
|
||||||
parsers.FunctionCallMathExp,
|
|
||||||
[]interface{}{
|
|
||||||
parsers.SelectItem{
|
|
||||||
Path: []string{"c", "value"},
|
|
||||||
Type: parsers.SelectItemTypeField,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"c",
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should parse function FLOOR(ex)", func(t *testing.T) {
|
|
||||||
testMathFunctionParse(
|
|
||||||
t,
|
|
||||||
`SELECT FLOOR(c.value) FROM c`,
|
|
||||||
parsers.FunctionCallMathFloor,
|
|
||||||
[]interface{}{
|
|
||||||
parsers.SelectItem{
|
|
||||||
Path: []string{"c", "value"},
|
|
||||||
Type: parsers.SelectItemTypeField,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"c",
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should parse function IntBitNot(ex)", func(t *testing.T) {
|
|
||||||
testMathFunctionParse(
|
|
||||||
t,
|
|
||||||
`SELECT IntBitNot(c.value) FROM c`,
|
|
||||||
parsers.FunctionCallMathIntBitNot,
|
|
||||||
[]interface{}{
|
|
||||||
parsers.SelectItem{
|
|
||||||
Path: []string{"c", "value"},
|
|
||||||
Type: parsers.SelectItemTypeField,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"c",
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should parse function LOG10(ex)", func(t *testing.T) {
|
|
||||||
testMathFunctionParse(
|
|
||||||
t,
|
|
||||||
`SELECT LOG10(c.value) FROM c`,
|
|
||||||
parsers.FunctionCallMathLog10,
|
|
||||||
[]interface{}{
|
|
||||||
parsers.SelectItem{
|
|
||||||
Path: []string{"c", "value"},
|
|
||||||
Type: parsers.SelectItemTypeField,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"c",
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should parse function RADIANS(ex)", func(t *testing.T) {
|
|
||||||
testMathFunctionParse(
|
|
||||||
t,
|
|
||||||
`SELECT RADIANS(c.value) FROM c`,
|
|
||||||
parsers.FunctionCallMathRadians,
|
|
||||||
[]interface{}{
|
|
||||||
parsers.SelectItem{
|
|
||||||
Path: []string{"c", "value"},
|
|
||||||
Type: parsers.SelectItemTypeField,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"c",
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should parse function ROUND(ex)", func(t *testing.T) {
|
|
||||||
testMathFunctionParse(
|
|
||||||
t,
|
|
||||||
`SELECT ROUND(c.value) FROM c`,
|
|
||||||
parsers.FunctionCallMathRound,
|
|
||||||
[]interface{}{
|
|
||||||
parsers.SelectItem{
|
|
||||||
Path: []string{"c", "value"},
|
|
||||||
Type: parsers.SelectItemTypeField,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"c",
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should parse function SIGN(ex)", func(t *testing.T) {
|
|
||||||
testMathFunctionParse(
|
|
||||||
t,
|
|
||||||
`SELECT SIGN(c.value) FROM c`,
|
|
||||||
parsers.FunctionCallMathSign,
|
|
||||||
[]interface{}{
|
|
||||||
parsers.SelectItem{
|
|
||||||
Path: []string{"c", "value"},
|
|
||||||
Type: parsers.SelectItemTypeField,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"c",
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should parse function SIN(ex)", func(t *testing.T) {
|
|
||||||
testMathFunctionParse(
|
|
||||||
t,
|
|
||||||
`SELECT SIN(c.value) FROM c`,
|
|
||||||
parsers.FunctionCallMathSin,
|
|
||||||
[]interface{}{
|
|
||||||
parsers.SelectItem{
|
|
||||||
Path: []string{"c", "value"},
|
|
||||||
Type: parsers.SelectItemTypeField,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"c",
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should parse function SQRT(ex)", func(t *testing.T) {
|
|
||||||
testMathFunctionParse(
|
|
||||||
t,
|
|
||||||
`SELECT SQRT(c.value) FROM c`,
|
|
||||||
parsers.FunctionCallMathSqrt,
|
|
||||||
[]interface{}{
|
|
||||||
parsers.SelectItem{
|
|
||||||
Path: []string{"c", "value"},
|
|
||||||
Type: parsers.SelectItemTypeField,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"c",
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should parse function SQUARE(ex)", func(t *testing.T) {
|
|
||||||
testMathFunctionParse(
|
|
||||||
t,
|
|
||||||
`SELECT SQUARE(c.value) FROM c`,
|
|
||||||
parsers.FunctionCallMathSquare,
|
|
||||||
[]interface{}{
|
|
||||||
parsers.SelectItem{
|
|
||||||
Path: []string{"c", "value"},
|
|
||||||
Type: parsers.SelectItemTypeField,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"c",
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should parse function TAN(ex)", func(t *testing.T) {
|
|
||||||
testMathFunctionParse(
|
|
||||||
t,
|
|
||||||
`SELECT TAN(c.value) FROM c`,
|
|
||||||
parsers.FunctionCallMathTan,
|
|
||||||
[]interface{}{
|
|
||||||
parsers.SelectItem{
|
|
||||||
Path: []string{"c", "value"},
|
|
||||||
Type: parsers.SelectItemTypeField,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"c",
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should parse function TRUNC(ex)", func(t *testing.T) {
|
|
||||||
testMathFunctionParse(
|
|
||||||
t,
|
|
||||||
`SELECT TRUNC(c.value) FROM c`,
|
|
||||||
parsers.FunctionCallMathTrunc,
|
|
||||||
[]interface{}{
|
|
||||||
parsers.SelectItem{
|
|
||||||
Path: []string{"c", "value"},
|
|
||||||
Type: parsers.SelectItemTypeField,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"c",
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should parse function ATN2(ex1, ex2)", func(t *testing.T) {
|
|
||||||
testMathFunctionParse(
|
|
||||||
t,
|
|
||||||
`SELECT ATN2(c.value, c.secondValue) FROM c`,
|
|
||||||
parsers.FunctionCallMathAtn2,
|
|
||||||
[]interface{}{
|
|
||||||
parsers.SelectItem{
|
|
||||||
Path: []string{"c", "value"},
|
|
||||||
Type: parsers.SelectItemTypeField,
|
|
||||||
},
|
|
||||||
parsers.SelectItem{
|
|
||||||
Path: []string{"c", "secondValue"},
|
|
||||||
Type: parsers.SelectItemTypeField,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"c",
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should parse function IntAdd(ex1, ex2)", func(t *testing.T) {
|
|
||||||
testMathFunctionParse(
|
|
||||||
t,
|
|
||||||
`SELECT IntAdd(c.value, c.secondValue) FROM c`,
|
|
||||||
parsers.FunctionCallMathIntAdd,
|
|
||||||
[]interface{}{
|
|
||||||
parsers.SelectItem{
|
|
||||||
Path: []string{"c", "value"},
|
|
||||||
Type: parsers.SelectItemTypeField,
|
|
||||||
},
|
|
||||||
parsers.SelectItem{
|
|
||||||
Path: []string{"c", "secondValue"},
|
|
||||||
Type: parsers.SelectItemTypeField,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"c",
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should parse function IntBitAnd(ex1, ex2)", func(t *testing.T) {
|
|
||||||
testMathFunctionParse(
|
|
||||||
t,
|
|
||||||
`SELECT IntBitAnd(c.value, c.secondValue) FROM c`,
|
|
||||||
parsers.FunctionCallMathIntBitAnd,
|
|
||||||
[]interface{}{
|
|
||||||
parsers.SelectItem{
|
|
||||||
Path: []string{"c", "value"},
|
|
||||||
Type: parsers.SelectItemTypeField,
|
|
||||||
},
|
|
||||||
parsers.SelectItem{
|
|
||||||
Path: []string{"c", "secondValue"},
|
|
||||||
Type: parsers.SelectItemTypeField,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"c",
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should parse function IntBitLeftShift(ex1, ex2)", func(t *testing.T) {
|
|
||||||
testMathFunctionParse(
|
|
||||||
t,
|
|
||||||
`SELECT IntBitLeftShift(c.value, c.secondValue) FROM c`,
|
|
||||||
parsers.FunctionCallMathIntBitLeftShift,
|
|
||||||
[]interface{}{
|
|
||||||
parsers.SelectItem{
|
|
||||||
Path: []string{"c", "value"},
|
|
||||||
Type: parsers.SelectItemTypeField,
|
|
||||||
},
|
|
||||||
parsers.SelectItem{
|
|
||||||
Path: []string{"c", "secondValue"},
|
|
||||||
Type: parsers.SelectItemTypeField,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"c",
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should parse function IntBitOr(ex1, ex2)", func(t *testing.T) {
|
|
||||||
testMathFunctionParse(
|
|
||||||
t,
|
|
||||||
`SELECT IntBitOr(c.value, c.secondValue) FROM c`,
|
|
||||||
parsers.FunctionCallMathIntBitOr,
|
|
||||||
[]interface{}{
|
|
||||||
parsers.SelectItem{
|
|
||||||
Path: []string{"c", "value"},
|
|
||||||
Type: parsers.SelectItemTypeField,
|
|
||||||
},
|
|
||||||
parsers.SelectItem{
|
|
||||||
Path: []string{"c", "secondValue"},
|
|
||||||
Type: parsers.SelectItemTypeField,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"c",
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should parse function IntBitRightShift(ex1, ex2)", func(t *testing.T) {
|
|
||||||
testMathFunctionParse(
|
|
||||||
t,
|
|
||||||
`SELECT IntBitRightShift(c.value, c.secondValue) FROM c`,
|
|
||||||
parsers.FunctionCallMathIntBitRightShift,
|
|
||||||
[]interface{}{
|
|
||||||
parsers.SelectItem{
|
|
||||||
Path: []string{"c", "value"},
|
|
||||||
Type: parsers.SelectItemTypeField,
|
|
||||||
},
|
|
||||||
parsers.SelectItem{
|
|
||||||
Path: []string{"c", "secondValue"},
|
|
||||||
Type: parsers.SelectItemTypeField,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"c",
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should parse function IntBitXor(ex1, ex2)", func(t *testing.T) {
|
|
||||||
testMathFunctionParse(
|
|
||||||
t,
|
|
||||||
`SELECT IntBitXor(c.value, c.secondValue) FROM c`,
|
|
||||||
parsers.FunctionCallMathIntBitXor,
|
|
||||||
[]interface{}{
|
|
||||||
parsers.SelectItem{
|
|
||||||
Path: []string{"c", "value"},
|
|
||||||
Type: parsers.SelectItemTypeField,
|
|
||||||
},
|
|
||||||
parsers.SelectItem{
|
|
||||||
Path: []string{"c", "secondValue"},
|
|
||||||
Type: parsers.SelectItemTypeField,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"c",
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should parse function IntDiv(ex1, ex2)", func(t *testing.T) {
|
|
||||||
testMathFunctionParse(
|
|
||||||
t,
|
|
||||||
`SELECT IntDiv(c.value, c.secondValue) FROM c`,
|
|
||||||
parsers.FunctionCallMathIntDiv,
|
|
||||||
[]interface{}{
|
|
||||||
parsers.SelectItem{
|
|
||||||
Path: []string{"c", "value"},
|
|
||||||
Type: parsers.SelectItemTypeField,
|
|
||||||
},
|
|
||||||
parsers.SelectItem{
|
|
||||||
Path: []string{"c", "secondValue"},
|
|
||||||
Type: parsers.SelectItemTypeField,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"c",
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should parse function IntMod(ex1, ex2)", func(t *testing.T) {
|
|
||||||
testMathFunctionParse(
|
|
||||||
t,
|
|
||||||
`SELECT IntMod(c.value, c.secondValue) FROM c`,
|
|
||||||
parsers.FunctionCallMathIntMod,
|
|
||||||
[]interface{}{
|
|
||||||
parsers.SelectItem{
|
|
||||||
Path: []string{"c", "value"},
|
|
||||||
Type: parsers.SelectItemTypeField,
|
|
||||||
},
|
|
||||||
parsers.SelectItem{
|
|
||||||
Path: []string{"c", "secondValue"},
|
|
||||||
Type: parsers.SelectItemTypeField,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"c",
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should parse function IntMul(ex1, ex2)", func(t *testing.T) {
|
|
||||||
testMathFunctionParse(
|
|
||||||
t,
|
|
||||||
`SELECT IntMul(c.value, c.secondValue) FROM c`,
|
|
||||||
parsers.FunctionCallMathIntMul,
|
|
||||||
[]interface{}{
|
|
||||||
parsers.SelectItem{
|
|
||||||
Path: []string{"c", "value"},
|
|
||||||
Type: parsers.SelectItemTypeField,
|
|
||||||
},
|
|
||||||
parsers.SelectItem{
|
|
||||||
Path: []string{"c", "secondValue"},
|
|
||||||
Type: parsers.SelectItemTypeField,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"c",
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should parse function IntSub(ex1, ex2)", func(t *testing.T) {
|
|
||||||
testMathFunctionParse(
|
|
||||||
t,
|
|
||||||
`SELECT IntSub(c.value, c.secondValue) FROM c`,
|
|
||||||
parsers.FunctionCallMathIntSub,
|
|
||||||
[]interface{}{
|
|
||||||
parsers.SelectItem{
|
|
||||||
Path: []string{"c", "value"},
|
|
||||||
Type: parsers.SelectItemTypeField,
|
|
||||||
},
|
|
||||||
parsers.SelectItem{
|
|
||||||
Path: []string{"c", "secondValue"},
|
|
||||||
Type: parsers.SelectItemTypeField,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"c",
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should parse function POWER(ex1, ex2)", func(t *testing.T) {
|
|
||||||
testMathFunctionParse(
|
|
||||||
t,
|
|
||||||
`SELECT POWER(c.value, c.secondValue) FROM c`,
|
|
||||||
parsers.FunctionCallMathPower,
|
|
||||||
[]interface{}{
|
|
||||||
parsers.SelectItem{
|
|
||||||
Path: []string{"c", "value"},
|
|
||||||
Type: parsers.SelectItemTypeField,
|
|
||||||
},
|
|
||||||
parsers.SelectItem{
|
|
||||||
Path: []string{"c", "secondValue"},
|
|
||||||
Type: parsers.SelectItemTypeField,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"c",
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should parse function LOG(ex)", func(t *testing.T) {
|
|
||||||
testMathFunctionParse(
|
|
||||||
t,
|
|
||||||
`SELECT LOG(c.value) FROM c`,
|
|
||||||
parsers.FunctionCallMathLog,
|
|
||||||
[]interface{}{
|
|
||||||
parsers.SelectItem{
|
|
||||||
Path: []string{"c", "value"},
|
|
||||||
Type: parsers.SelectItemTypeField,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"c",
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should parse function LOG(ex1, ex2)", func(t *testing.T) {
|
|
||||||
testMathFunctionParse(
|
|
||||||
t,
|
|
||||||
`SELECT LOG(c.value, c.secondValue) FROM c`,
|
|
||||||
parsers.FunctionCallMathLog,
|
|
||||||
[]interface{}{
|
|
||||||
parsers.SelectItem{
|
|
||||||
Path: []string{"c", "value"},
|
|
||||||
Type: parsers.SelectItemTypeField,
|
|
||||||
},
|
|
||||||
parsers.SelectItem{
|
|
||||||
Path: []string{"c", "secondValue"},
|
|
||||||
Type: parsers.SelectItemTypeField,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"c",
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should parse function NumberBin(ex)", func(t *testing.T) {
|
|
||||||
testMathFunctionParse(
|
|
||||||
t,
|
|
||||||
`SELECT NumberBin(c.value) FROM c`,
|
|
||||||
parsers.FunctionCallMathNumberBin,
|
|
||||||
[]interface{}{
|
|
||||||
parsers.SelectItem{
|
|
||||||
Path: []string{"c", "value"},
|
|
||||||
Type: parsers.SelectItemTypeField,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"c",
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should parse function NumberBin(ex1, ex2)", func(t *testing.T) {
|
|
||||||
testMathFunctionParse(
|
|
||||||
t,
|
|
||||||
`SELECT NumberBin(c.value, c.secondValue) FROM c`,
|
|
||||||
parsers.FunctionCallMathNumberBin,
|
|
||||||
[]interface{}{
|
|
||||||
parsers.SelectItem{
|
|
||||||
Path: []string{"c", "value"},
|
|
||||||
Type: parsers.SelectItemTypeField,
|
|
||||||
},
|
|
||||||
parsers.SelectItem{
|
|
||||||
Path: []string{"c", "secondValue"},
|
|
||||||
Type: parsers.SelectItemTypeField,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"c",
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should parse function PI()", func(t *testing.T) {
|
|
||||||
testMathFunctionParse(
|
|
||||||
t,
|
|
||||||
`SELECT PI() FROM c`,
|
|
||||||
parsers.FunctionCallMathPi,
|
|
||||||
[]interface{}{},
|
|
||||||
"c",
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should parse function RAND()", func(t *testing.T) {
|
|
||||||
testMathFunctionParse(
|
|
||||||
t,
|
|
||||||
`SELECT RAND() FROM c`,
|
|
||||||
parsers.FunctionCallMathRand,
|
|
||||||
[]interface{}{},
|
|
||||||
"c",
|
|
||||||
)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
func testMathFunctionParse(
|
|
||||||
t *testing.T,
|
|
||||||
query string,
|
|
||||||
expectedFunctionType parsers.FunctionCallType,
|
|
||||||
expectedArguments []interface{},
|
|
||||||
expectedTable string,
|
|
||||||
) {
|
|
||||||
testQueryParse(
|
|
||||||
t,
|
|
||||||
query,
|
|
||||||
parsers.SelectStmt{
|
|
||||||
SelectItems: []parsers.SelectItem{
|
|
||||||
{
|
|
||||||
Type: parsers.SelectItemTypeFunctionCall,
|
|
||||||
Value: parsers.FunctionCall{
|
|
||||||
Type: expectedFunctionType,
|
|
||||||
Arguments: expectedArguments,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path(expectedTable)},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
}
|
|
@ -7,7 +7,6 @@ import (
|
|||||||
|
|
||||||
"github.com/pikami/cosmium/parsers"
|
"github.com/pikami/cosmium/parsers"
|
||||||
"github.com/pikami/cosmium/parsers/nosql"
|
"github.com/pikami/cosmium/parsers/nosql"
|
||||||
testutils "github.com/pikami/cosmium/test_utils"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// For Parser Debugging
|
// For Parser Debugging
|
||||||
@ -49,7 +48,7 @@ func Test_Parse(t *testing.T) {
|
|||||||
{Path: []string{"c", "id"}},
|
{Path: []string{"c", "id"}},
|
||||||
{Path: []string{"c", "pk"}},
|
{Path: []string{"c", "pk"}},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
Table: parsers.Table{Value: "c"},
|
||||||
OrderExpressions: []parsers.OrderExpression{
|
OrderExpressions: []parsers.OrderExpression{
|
||||||
{
|
{
|
||||||
SelectItem: parsers.SelectItem{Path: []string{"c", "id"}},
|
SelectItem: parsers.SelectItem{Path: []string{"c", "id"}},
|
||||||
@ -64,24 +63,6 @@ func Test_Parse(t *testing.T) {
|
|||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("Should parse SELECT with GROUP BY", func(t *testing.T) {
|
|
||||||
testQueryParse(
|
|
||||||
t,
|
|
||||||
`SELECT c.id, c["pk"] FROM c GROUP BY c.id, c.pk`,
|
|
||||||
parsers.SelectStmt{
|
|
||||||
SelectItems: []parsers.SelectItem{
|
|
||||||
{Path: []string{"c", "id"}},
|
|
||||||
{Path: []string{"c", "pk"}},
|
|
||||||
},
|
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
|
||||||
GroupBy: []parsers.SelectItem{
|
|
||||||
{Path: []string{"c", "id"}},
|
|
||||||
{Path: []string{"c", "pk"}},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should parse IN function", func(t *testing.T) {
|
t.Run("Should parse IN function", func(t *testing.T) {
|
||||||
testQueryParse(
|
testQueryParse(
|
||||||
t,
|
t,
|
||||||
@ -93,7 +74,7 @@ func Test_Parse(t *testing.T) {
|
|||||||
Type: parsers.SelectItemTypeField,
|
Type: parsers.SelectItemTypeField,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
Table: parsers.Table{Value: "c"},
|
||||||
Filters: parsers.SelectItem{
|
Filters: parsers.SelectItem{
|
||||||
Type: parsers.SelectItemTypeFunctionCall,
|
Type: parsers.SelectItemTypeFunctionCall,
|
||||||
Value: parsers.FunctionCall{
|
Value: parsers.FunctionCall{
|
||||||
@ -103,86 +84,23 @@ func Test_Parse(t *testing.T) {
|
|||||||
Path: []string{"c", "id"},
|
Path: []string{"c", "id"},
|
||||||
Type: parsers.SelectItemTypeField,
|
Type: parsers.SelectItemTypeField,
|
||||||
},
|
},
|
||||||
testutils.SelectItem_Constant_String("123"),
|
|
||||||
testutils.SelectItem_Constant_String("456"),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should parse IN function with function call", func(t *testing.T) {
|
|
||||||
testQueryParse(
|
|
||||||
t,
|
|
||||||
`Select c.id FROM c WHERE (ToString(c.id) IN ("123", "456"))`,
|
|
||||||
parsers.SelectStmt{
|
|
||||||
SelectItems: []parsers.SelectItem{
|
|
||||||
testutils.SelectItem_Path("c", "id"),
|
|
||||||
},
|
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
|
||||||
Filters: parsers.SelectItem{
|
|
||||||
Type: parsers.SelectItemTypeFunctionCall,
|
|
||||||
Value: parsers.FunctionCall{
|
|
||||||
Type: parsers.FunctionCallIn,
|
|
||||||
Arguments: []interface{}{
|
|
||||||
parsers.SelectItem{
|
parsers.SelectItem{
|
||||||
Type: parsers.SelectItemTypeFunctionCall,
|
Type: parsers.SelectItemTypeConstant,
|
||||||
Value: parsers.FunctionCall{
|
Value: parsers.Constant{
|
||||||
Type: parsers.FunctionCallToString,
|
Type: parsers.ConstantTypeString,
|
||||||
Arguments: []interface{}{
|
Value: "123",
|
||||||
testutils.SelectItem_Path("c", "id"),
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
testutils.SelectItem_Constant_String("123"),
|
parsers.SelectItem{
|
||||||
testutils.SelectItem_Constant_String("456"),
|
Type: parsers.SelectItemTypeConstant,
|
||||||
},
|
Value: parsers.Constant{
|
||||||
},
|
Type: parsers.ConstantTypeString,
|
||||||
},
|
Value: "456",
|
||||||
},
|
},
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should parse IN selector", func(t *testing.T) {
|
|
||||||
testQueryParse(
|
|
||||||
t,
|
|
||||||
`SELECT c.id FROM c IN c.tags`,
|
|
||||||
parsers.SelectStmt{
|
|
||||||
SelectItems: []parsers.SelectItem{
|
|
||||||
{
|
|
||||||
Path: []string{"c", "id"},
|
|
||||||
Type: parsers.SelectItemTypeField,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Table: parsers.Table{
|
|
||||||
Value: "c",
|
|
||||||
SelectItem: testutils.SelectItem_Path("c", "tags"),
|
|
||||||
IsInSelect: true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should parse IIF function", func(t *testing.T) {
|
|
||||||
testQueryParse(
|
|
||||||
t,
|
|
||||||
`SELECT IIF(true, c.pk, c.id) FROM c`,
|
|
||||||
parsers.SelectStmt{
|
|
||||||
SelectItems: []parsers.SelectItem{
|
|
||||||
{
|
|
||||||
Type: parsers.SelectItemTypeFunctionCall,
|
|
||||||
Value: parsers.FunctionCall{
|
|
||||||
Type: parsers.FunctionCallIif,
|
|
||||||
Arguments: []interface{}{
|
|
||||||
testutils.SelectItem_Constant_Bool(true),
|
|
||||||
testutils.SelectItem_Path("c", "pk"),
|
|
||||||
testutils.SelectItem_Path("c", "id"),
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -3,138 +3,90 @@ package nosql
|
|||||||
|
|
||||||
import "github.com/pikami/cosmium/parsers"
|
import "github.com/pikami/cosmium/parsers"
|
||||||
|
|
||||||
func makeSelectStmt(
|
func makeSelectStmt(columns, table, whereClause interface{}, count interface{}, orderList interface{}) (parsers.SelectStmt, error) {
|
||||||
columns, fromClause, joinItems,
|
selectStmt := parsers.SelectStmt{
|
||||||
whereClause interface{}, distinctClause interface{},
|
SelectItems: columns.([]parsers.SelectItem),
|
||||||
count interface{}, groupByClause interface{}, orderList interface{},
|
Table: table.(parsers.Table),
|
||||||
offsetClause interface{},
|
}
|
||||||
) (parsers.SelectStmt, error) {
|
|
||||||
selectStmt := parsers.SelectStmt{
|
|
||||||
SelectItems: columns.([]parsers.SelectItem),
|
|
||||||
}
|
|
||||||
|
|
||||||
if fromTable, ok := fromClause.(parsers.Table); ok {
|
|
||||||
selectStmt.Table = fromTable
|
|
||||||
}
|
|
||||||
|
|
||||||
if joinItemsArray, ok := joinItems.([]interface{}); ok && len(joinItemsArray) > 0 {
|
|
||||||
selectStmt.JoinItems = make([]parsers.JoinItem, len(joinItemsArray))
|
|
||||||
for i, joinItem := range joinItemsArray {
|
|
||||||
selectStmt.JoinItems[i] = joinItem.(parsers.JoinItem)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
switch v := whereClause.(type) {
|
switch v := whereClause.(type) {
|
||||||
case parsers.ComparisonExpression, parsers.LogicalExpression, parsers.Constant, parsers.SelectItem:
|
case parsers.ComparisonExpression, parsers.LogicalExpression, parsers.Constant, parsers.SelectItem:
|
||||||
selectStmt.Filters = v
|
selectStmt.Filters = v
|
||||||
}
|
}
|
||||||
|
|
||||||
if distinctClause != nil {
|
if n, ok := count.(int); ok {
|
||||||
selectStmt.Distinct = true
|
selectStmt.Count = n
|
||||||
}
|
}
|
||||||
|
|
||||||
if n, ok := count.(int); ok {
|
|
||||||
selectStmt.Count = n
|
|
||||||
}
|
|
||||||
|
|
||||||
if offsetArr, ok := offsetClause.([]interface{}); ok && len(offsetArr) == 2 {
|
|
||||||
if n, ok := offsetArr[0].(int); ok {
|
|
||||||
selectStmt.Offset = n
|
|
||||||
}
|
|
||||||
|
|
||||||
if n, ok := offsetArr[1].(int); ok {
|
|
||||||
selectStmt.Count = n
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if orderExpressions, ok := orderList.([]parsers.OrderExpression); ok {
|
if orderExpressions, ok := orderList.([]parsers.OrderExpression); ok {
|
||||||
selectStmt.OrderExpressions = orderExpressions
|
selectStmt.OrderExpressions = orderExpressions
|
||||||
}
|
}
|
||||||
|
|
||||||
if groupByClause != nil {
|
return selectStmt, nil
|
||||||
selectStmt.GroupBy = groupByClause.([]parsers.SelectItem)
|
|
||||||
}
|
|
||||||
|
|
||||||
return selectStmt, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func makeJoin(table interface{}, column interface{}) (parsers.JoinItem, error) {
|
|
||||||
joinItem := parsers.JoinItem{}
|
|
||||||
|
|
||||||
if selectItem, isSelectItem := column.(parsers.SelectItem); isSelectItem {
|
|
||||||
joinItem.SelectItem = selectItem
|
|
||||||
joinItem.Table.Value = selectItem.Alias
|
|
||||||
}
|
|
||||||
|
|
||||||
if tableTyped, isTable := table.(parsers.Table); isTable {
|
|
||||||
joinItem.Table = tableTyped
|
|
||||||
}
|
|
||||||
|
|
||||||
return joinItem, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func makeSelectItem(name interface{}, path interface{}, selectItemType parsers.SelectItemType) (parsers.SelectItem, error) {
|
func makeSelectItem(name interface{}, path interface{}, selectItemType parsers.SelectItemType) (parsers.SelectItem, error) {
|
||||||
ps := path.([]interface{})
|
ps := path.([]interface{})
|
||||||
|
|
||||||
paths := make([]string, 1)
|
paths := make([]string, 1)
|
||||||
paths[0] = name.(string)
|
paths[0] = name.(string)
|
||||||
for _, p := range ps {
|
for _, p := range ps {
|
||||||
paths = append(paths, p.(string))
|
paths = append(paths, p.(string))
|
||||||
}
|
}
|
||||||
|
|
||||||
return parsers.SelectItem{Path: paths, Type: selectItemType}, nil
|
return parsers.SelectItem{Path: paths, Type: selectItemType}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func makeColumnList(column interface{}, other_columns interface{}) ([]parsers.SelectItem, error) {
|
func makeColumnList(column interface{}, other_columns interface{}) ([]parsers.SelectItem, error) {
|
||||||
collsAsArray := other_columns.([]interface{})
|
collsAsArray := other_columns.([]interface{})
|
||||||
columnList := make([]parsers.SelectItem, len(collsAsArray) + 1)
|
columnList := make([]parsers.SelectItem, len(collsAsArray) + 1)
|
||||||
columnList[0] = column.(parsers.SelectItem)
|
columnList[0] = column.(parsers.SelectItem)
|
||||||
|
|
||||||
for i, v := range collsAsArray {
|
for i, v := range collsAsArray {
|
||||||
if col, ok := v.(parsers.SelectItem); ok {
|
if col, ok := v.(parsers.SelectItem); ok {
|
||||||
columnList[i+1] = col
|
columnList[i+1] = col
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return columnList, nil
|
return columnList, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func makeSelectArray(columns interface{}) (parsers.SelectItem, error) {
|
func makeSelectArray(columns interface{}) (parsers.SelectItem, error) {
|
||||||
return parsers.SelectItem{
|
return parsers.SelectItem{
|
||||||
SelectItems: columns.([]parsers.SelectItem),
|
SelectItems: columns.([]parsers.SelectItem),
|
||||||
Type: parsers.SelectItemTypeArray,
|
Type: parsers.SelectItemTypeArray,
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func makeSelectObject(field interface{}, other_fields interface{}) (parsers.SelectItem, error) {
|
func makeSelectObject(field interface{}, other_fields interface{}) (parsers.SelectItem, error) {
|
||||||
fieldsAsArray := other_fields.([]interface{})
|
fieldsAsArray := other_fields.([]interface{})
|
||||||
fieldsList := make([]parsers.SelectItem, len(fieldsAsArray)+1)
|
fieldsList := make([]parsers.SelectItem, len(fieldsAsArray)+1)
|
||||||
fieldsList[0] = field.(parsers.SelectItem)
|
fieldsList[0] = field.(parsers.SelectItem)
|
||||||
|
|
||||||
for i, v := range fieldsAsArray {
|
for i, v := range fieldsAsArray {
|
||||||
if col, ok := v.(parsers.SelectItem); ok {
|
if col, ok := v.(parsers.SelectItem); ok {
|
||||||
fieldsList[i+1] = col
|
fieldsList[i+1] = col
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return parsers.SelectItem{
|
return parsers.SelectItem{
|
||||||
SelectItems: fieldsList,
|
SelectItems: fieldsList,
|
||||||
Type: parsers.SelectItemTypeObject,
|
Type: parsers.SelectItemTypeObject,
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func makeOrderByClause(ex1 interface{}, others interface{}) ([]parsers.OrderExpression, error) {
|
func makeOrderByClause(ex1 interface{}, others interface{}) ([]parsers.OrderExpression, error) {
|
||||||
othersArray := others.([]interface{})
|
othersArray := others.([]interface{})
|
||||||
orderList := make([]parsers.OrderExpression, len(othersArray)+1)
|
orderList := make([]parsers.OrderExpression, len(othersArray)+1)
|
||||||
orderList[0] = ex1.(parsers.OrderExpression)
|
orderList[0] = ex1.(parsers.OrderExpression)
|
||||||
|
|
||||||
for i, v := range othersArray {
|
for i, v := range othersArray {
|
||||||
if col, ok := v.(parsers.OrderExpression); ok {
|
if col, ok := v.(parsers.OrderExpression); ok {
|
||||||
orderList[i+1] = col
|
orderList[i+1] = col
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return orderList, nil
|
return orderList, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func makeOrderExpression(field interface{}, order interface{}) (parsers.OrderExpression, error) {
|
func makeOrderExpression(field interface{}, order interface{}) (parsers.OrderExpression, error) {
|
||||||
@ -144,8 +96,8 @@ func makeOrderExpression(field interface{}, order interface{}) (parsers.OrderExp
|
|||||||
}
|
}
|
||||||
|
|
||||||
if orderValue, ok := order.(parsers.OrderDirection); ok {
|
if orderValue, ok := order.(parsers.OrderDirection); ok {
|
||||||
value.Direction = orderValue
|
value.Direction = orderValue
|
||||||
}
|
}
|
||||||
|
|
||||||
return value, nil
|
return value, nil
|
||||||
}
|
}
|
||||||
@ -169,39 +121,13 @@ func joinStrings(array []interface{}) string {
|
|||||||
|
|
||||||
func combineExpressions(ex1 interface{}, exs interface{}, operation parsers.LogicalExpressionType) (interface{}, error) {
|
func combineExpressions(ex1 interface{}, exs interface{}, operation parsers.LogicalExpressionType) (interface{}, error) {
|
||||||
if exs == nil || len(exs.([]interface{})) < 1 {
|
if exs == nil || len(exs.([]interface{})) < 1 {
|
||||||
return ex1, nil
|
return ex1, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
return parsers.LogicalExpression{
|
return parsers.LogicalExpression{
|
||||||
Expressions: append([]interface{}{ex1}, exs.([]interface{})...),
|
Expressions: append([]interface{}{ex1}, exs.([]interface{})...),
|
||||||
Operation: operation,
|
Operation: operation,
|
||||||
}, nil
|
}, nil
|
||||||
}
|
|
||||||
|
|
||||||
func makeMathExpression(left interface{}, operations interface{}) (interface{}, error) {
|
|
||||||
if operations == nil || len(operations.([]interface{})) == 0 {
|
|
||||||
return left, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
result := left.(parsers.SelectItem)
|
|
||||||
ops := operations.([]interface{})
|
|
||||||
|
|
||||||
for _, op := range ops {
|
|
||||||
opData := op.([]interface{})
|
|
||||||
operation := opData[0].(string)
|
|
||||||
right := opData[1].(parsers.SelectItem)
|
|
||||||
|
|
||||||
result = parsers.SelectItem{
|
|
||||||
Type: parsers.SelectItemTypeBinaryExpression,
|
|
||||||
Value: parsers.BinaryExpression{
|
|
||||||
Left: result,
|
|
||||||
Right: right,
|
|
||||||
Operation: operation,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return result, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
@ -210,121 +136,32 @@ Input <- selectStmt:SelectStmt {
|
|||||||
return selectStmt, nil
|
return selectStmt, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
SelectStmt <- Select ws
|
SelectStmt <- Select ws topClause:TopClause? ws columns:Selection ws
|
||||||
distinctClause:DistinctClause? ws
|
From ws table:TableName ws
|
||||||
topClause:TopClause? ws
|
|
||||||
columns:Selection ws
|
|
||||||
fromClause:FromClause? ws
|
|
||||||
joinClauses:(ws join:JoinClause { return join, nil })* ws
|
|
||||||
whereClause:(ws Where ws condition:Condition { return condition, nil })?
|
whereClause:(ws Where ws condition:Condition { return condition, nil })?
|
||||||
groupByClause:(ws GroupBy ws columns:ColumnList { return columns, nil })?
|
orderByClause:OrderByClause? {
|
||||||
orderByClause:(ws order:OrderByClause { return order, nil })?
|
return makeSelectStmt(columns, table, whereClause, topClause, orderByClause)
|
||||||
offsetClause:(ws offset:OffsetClause { return offset, nil })? {
|
|
||||||
return makeSelectStmt(columns, fromClause, joinClauses, whereClause,
|
|
||||||
distinctClause, topClause, groupByClause, orderByClause, offsetClause)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
DistinctClause <- "DISTINCT"i
|
|
||||||
|
|
||||||
TopClause <- Top ws count:Integer {
|
TopClause <- Top ws count:Integer {
|
||||||
return count, nil
|
return count, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
FromClause <- From ws table:TableName selectItem:(ws In ws column:SelectItemWithAlias { return column, nil }) {
|
|
||||||
tableTyped := table.(parsers.Table)
|
|
||||||
|
|
||||||
if selectItem != nil {
|
|
||||||
tableTyped.SelectItem = selectItem.(parsers.SelectItem)
|
|
||||||
tableTyped.IsInSelect = true
|
|
||||||
}
|
|
||||||
|
|
||||||
return tableTyped, nil
|
|
||||||
} / From ws column:SelectItemWithAlias {
|
|
||||||
tableSelectItem := column.(parsers.SelectItem)
|
|
||||||
table := parsers.Table{
|
|
||||||
Value: tableSelectItem.Alias,
|
|
||||||
SelectItem: tableSelectItem,
|
|
||||||
}
|
|
||||||
return table, nil
|
|
||||||
} / From ws subQuery:SubQuerySelectItem {
|
|
||||||
subQueryTyped := subQuery.(parsers.SelectItem)
|
|
||||||
table := parsers.Table{
|
|
||||||
Value: subQueryTyped.Alias,
|
|
||||||
SelectItem: subQueryTyped,
|
|
||||||
}
|
|
||||||
return table, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
SubQuery <- exists:(exists:Exists ws { return exists, nil })? "(" ws selectStmt:SelectStmt ws ")" {
|
|
||||||
if selectStatement, isGoodValue := selectStmt.(parsers.SelectStmt); isGoodValue {
|
|
||||||
selectStatement.Exists = exists != nil
|
|
||||||
return selectStatement, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
return selectStmt, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
SubQuerySelectItem <- subQuery:SubQuery asClause:(ws alias:AsClause { return alias, nil })? {
|
|
||||||
selectItem := parsers.SelectItem{
|
|
||||||
Type: parsers.SelectItemTypeSubQuery,
|
|
||||||
Value: subQuery,
|
|
||||||
}
|
|
||||||
|
|
||||||
if tableName, isString := asClause.(string); isString {
|
|
||||||
selectItem.Alias = tableName
|
|
||||||
}
|
|
||||||
|
|
||||||
return selectItem, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
JoinClause <- Join ws table:TableName ws In ws column:SelectItemWithAlias {
|
|
||||||
return makeJoin(table, column)
|
|
||||||
} / Join ws subQuery:SubQuerySelectItem {
|
|
||||||
return makeJoin(nil, subQuery)
|
|
||||||
}
|
|
||||||
|
|
||||||
OffsetClause <- Offset ws offset:IntegerLiteral ws "LIMIT"i ws limit:IntegerLiteral {
|
|
||||||
return []interface{}{offset.(parsers.Constant).Value, limit.(parsers.Constant).Value}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
Selection <- SelectValueSpec / ColumnList / SelectAsterisk
|
Selection <- SelectValueSpec / ColumnList / SelectAsterisk
|
||||||
|
|
||||||
SelectAsterisk <- "*" {
|
SelectAsterisk <- "*" {
|
||||||
selectItem, _ := makeSelectItem("c", make([]interface{}, 0), parsers.SelectItemTypeField)
|
selectItem, _ := makeSelectItem("c", make([]interface{}, 0), parsers.SelectItemTypeField)
|
||||||
selectItem.IsTopLevel = true
|
selectItem.IsTopLevel = true
|
||||||
return makeColumnList(selectItem, make([]interface{}, 0))
|
return makeColumnList(selectItem, make([]interface{}, 0))
|
||||||
}
|
}
|
||||||
|
|
||||||
ColumnList <- column:ExpressionOrSelectItem other_columns:(ws "," ws coll:ExpressionOrSelectItem {return coll, nil })* {
|
ColumnList <- column:SelectItem other_columns:(ws "," ws coll:SelectItem {return coll, nil })* {
|
||||||
return makeColumnList(column, other_columns)
|
return makeColumnList(column, other_columns)
|
||||||
}
|
}
|
||||||
|
|
||||||
ExpressionOrSelectItem <- expression:OrExpression asClause:AsClause? {
|
SelectValueSpec <- "VALUE"i ws column:SelectItem {
|
||||||
switch typedValue := expression.(type) {
|
|
||||||
case parsers.ComparisonExpression, parsers.LogicalExpression:
|
|
||||||
selectItem := parsers.SelectItem{
|
|
||||||
Type: parsers.SelectItemTypeExpression,
|
|
||||||
Value: typedValue,
|
|
||||||
}
|
|
||||||
|
|
||||||
if aliasValue, ok := asClause.(string); ok {
|
|
||||||
selectItem.Alias = aliasValue
|
|
||||||
}
|
|
||||||
|
|
||||||
return selectItem, nil
|
|
||||||
case parsers.SelectItem:
|
|
||||||
if aliasValue, ok := asClause.(string); ok {
|
|
||||||
typedValue.Alias = aliasValue
|
|
||||||
}
|
|
||||||
return typedValue, nil
|
|
||||||
default:
|
|
||||||
return typedValue, nil
|
|
||||||
}
|
|
||||||
} / item:SelectItemWithAlias { return item, nil }
|
|
||||||
|
|
||||||
SelectValueSpec <- "VALUE"i ws column:SelectItemWithAlias {
|
|
||||||
selectItem := column.(parsers.SelectItem)
|
selectItem := column.(parsers.SelectItem)
|
||||||
selectItem.IsTopLevel = true
|
selectItem.IsTopLevel = true
|
||||||
return makeColumnList(selectItem, make([]interface{}, 0))
|
return makeColumnList(selectItem, make([]interface{}, 0))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -338,32 +175,19 @@ SelectArray <- "[" ws columns:ColumnList ws "]" {
|
|||||||
|
|
||||||
SelectObject <- "{" ws field:SelectObjectField ws other_fields:(ws "," ws coll:SelectObjectField {return coll, nil })* ws "}" {
|
SelectObject <- "{" ws field:SelectObjectField ws other_fields:(ws "," ws coll:SelectObjectField {return coll, nil })* ws "}" {
|
||||||
return makeSelectObject(field, other_fields)
|
return makeSelectObject(field, other_fields)
|
||||||
} / "{" ws "}" {
|
|
||||||
return parsers.SelectItem{
|
|
||||||
SelectItems: []parsers.SelectItem{},
|
|
||||||
Type: parsers.SelectItemTypeObject,
|
|
||||||
}, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
SelectObjectField <- name:(Identifier / "\"" key:Identifier "\"" { return key, nil }) ws ":" ws selectItem:SelectItem {
|
SelectObjectField <- name:(Identifier / "\"" key:Identifier "\"" { return key, nil }) ws ":" ws selectItem:SelectItem {
|
||||||
item := selectItem.(parsers.SelectItem)
|
item := selectItem.(parsers.SelectItem)
|
||||||
item.Alias = name.(string)
|
item.Alias = name.(string)
|
||||||
return item, nil
|
return item, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
SelectProperty <- name:Identifier path:(DotFieldAccess / ArrayFieldAccess)* {
|
SelectProperty <- name:Identifier path:(DotFieldAccess / ArrayFieldAccess)* {
|
||||||
return makeSelectItem(name, path, parsers.SelectItemTypeField)
|
return makeSelectItem(name, path, parsers.SelectItemTypeField)
|
||||||
}
|
}
|
||||||
|
|
||||||
SelectItemWithAlias <- selectItem:SelectItem asClause:AsClause? {
|
SelectItem <- selectItem:(Literal / FunctionCall / SelectArray / SelectObject / SelectProperty) asClause:AsClause? {
|
||||||
item := selectItem.(parsers.SelectItem)
|
|
||||||
if aliasValue, ok := asClause.(string); ok {
|
|
||||||
item.Alias = aliasValue
|
|
||||||
}
|
|
||||||
return item, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
SelectItem <- selectItem:(SubQuerySelectItem / Literal / FunctionCall / SelectArray / SelectObject / SelectProperty) {
|
|
||||||
var itemResult parsers.SelectItem
|
var itemResult parsers.SelectItem
|
||||||
switch typedValue := selectItem.(type) {
|
switch typedValue := selectItem.(type) {
|
||||||
case parsers.SelectItem:
|
case parsers.SelectItem:
|
||||||
@ -380,22 +204,22 @@ SelectItem <- selectItem:(SubQuerySelectItem / Literal / FunctionCall / SelectAr
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return itemResult, nil
|
if aliasValue, ok := asClause.(string); ok {
|
||||||
|
itemResult.Alias = aliasValue
|
||||||
|
}
|
||||||
|
|
||||||
|
return itemResult, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
AsClause <- (ws As)? ws !ExcludedKeywords alias:Identifier {
|
AsClause <- ws As ws alias:Identifier { return alias, nil }
|
||||||
return alias, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
ExcludedKeywords <- Select / Top / As / From / In / Join / Exists / Where / And / Or / Not / GroupBy / OrderBy / Offset
|
|
||||||
|
|
||||||
DotFieldAccess <- "." id:Identifier {
|
DotFieldAccess <- "." id:Identifier {
|
||||||
return id, nil
|
return id, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
ArrayFieldAccess <- "[\"" id:Identifier "\"]" { return id, nil }
|
ArrayFieldAccess <- "[\"" id:Identifier "\"]" {
|
||||||
/ "[" id:Integer "]" { return strconv.Itoa(id.(int)), nil }
|
return id, nil
|
||||||
/ "[" id:ParameterConstant "]" { return id.(parsers.Constant).Value.(string), nil }
|
}
|
||||||
|
|
||||||
Identifier <- [a-zA-Z_][a-zA-Z0-9_]* {
|
Identifier <- [a-zA-Z_][a-zA-Z0-9_]* {
|
||||||
return string(c.text), nil
|
return string(c.text), nil
|
||||||
@ -413,27 +237,11 @@ AndExpression <- ex1:ComparisonExpression ex2:(ws And ws ex:ComparisonExpression
|
|||||||
return combineExpressions(ex1, ex2, parsers.LogicalExpressionTypeAnd)
|
return combineExpressions(ex1, ex2, parsers.LogicalExpressionTypeAnd)
|
||||||
}
|
}
|
||||||
|
|
||||||
ComparisonExpression <- left:AddSubExpression ws op:ComparisonOperator ws right:AddSubExpression {
|
ComparisonExpression <- "(" ws ex:OrExpression ws ")" { return ex, nil }
|
||||||
|
/ left:SelectItem ws op:ComparisonOperator ws right:SelectItem {
|
||||||
return parsers.ComparisonExpression{Left:left,Right:right,Operation:op.(string)}, nil
|
return parsers.ComparisonExpression{Left:left,Right:right,Operation:op.(string)}, nil
|
||||||
} / ex:AddSubExpression { return ex, nil }
|
|
||||||
|
|
||||||
AddSubExpression <- left:MulDivExpression operations:(ws op:AddOrSubtractOperation ws right:MulDivExpression { return []interface{}{op, right}, nil })* {
|
|
||||||
return makeMathExpression(left, operations)
|
|
||||||
}
|
|
||||||
|
|
||||||
MulDivExpression <- left:SelectItemWithParentheses operations:(ws op:MultiplyOrDivideOperation ws right:SelectItemWithParentheses { return []interface{}{op, right}, nil })* {
|
|
||||||
return makeMathExpression(left, operations)
|
|
||||||
}
|
|
||||||
|
|
||||||
SelectItemWithParentheses <- "(" ws ex:OrExpression ws ")" { return ex, nil }
|
|
||||||
/ inv:(Not ws)? ex:SelectItem {
|
|
||||||
if inv != nil {
|
|
||||||
ex1 := ex.(parsers.SelectItem)
|
|
||||||
ex1.Invert = true
|
|
||||||
return ex1, nil
|
|
||||||
}
|
|
||||||
return ex, nil
|
|
||||||
} / ex:BooleanLiteral { return ex, nil }
|
} / ex:BooleanLiteral { return ex, nil }
|
||||||
|
/ ex:SelectItem { return ex, nil }
|
||||||
|
|
||||||
OrderByClause <- OrderBy ws ex1:OrderExpression others:(ws "," ws ex:OrderExpression { return ex, nil })* {
|
OrderByClause <- OrderBy ws ex1:OrderExpression others:(ws "," ws ex:OrderExpression { return ex, nil })* {
|
||||||
return makeOrderByClause(ex1, others)
|
return makeOrderByClause(ex1, others)
|
||||||
@ -445,10 +253,10 @@ OrderExpression <- field:SelectProperty ws order:OrderDirection? {
|
|||||||
|
|
||||||
OrderDirection <- ("ASC"i / "DESC"i) {
|
OrderDirection <- ("ASC"i / "DESC"i) {
|
||||||
if strings.EqualFold(string(c.text), "DESC") {
|
if strings.EqualFold(string(c.text), "DESC") {
|
||||||
return parsers.OrderDirectionDesc, nil
|
return parsers.OrderDirectionDesc, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
return parsers.OrderDirectionAsc, nil
|
return parsers.OrderDirectionAsc, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
Select <- "SELECT"i
|
Select <- "SELECT"i
|
||||||
@ -459,34 +267,18 @@ As <- "AS"i
|
|||||||
|
|
||||||
From <- "FROM"i
|
From <- "FROM"i
|
||||||
|
|
||||||
In <- "IN"i
|
|
||||||
|
|
||||||
Join <- "JOIN"i
|
|
||||||
|
|
||||||
Exists <- "EXISTS"i
|
|
||||||
|
|
||||||
Where <- "WHERE"i
|
Where <- "WHERE"i
|
||||||
|
|
||||||
And <- "AND"i
|
And <- "AND"i
|
||||||
|
|
||||||
Or <- "OR"i wss
|
Or <- "OR"i
|
||||||
|
|
||||||
Not <- "NOT"i
|
|
||||||
|
|
||||||
GroupBy <- "GROUP"i ws "BY"i
|
|
||||||
|
|
||||||
OrderBy <- "ORDER"i ws "BY"i
|
OrderBy <- "ORDER"i ws "BY"i
|
||||||
|
|
||||||
Offset <- "OFFSET"i
|
ComparisonOperator <- ("=" / "!=" / "<" / "<=" / ">" / ">=") {
|
||||||
|
|
||||||
ComparisonOperator <- ("<=" / ">=" / "=" / "!=" / "<" / ">") {
|
|
||||||
return string(c.text), nil
|
return string(c.text), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
AddOrSubtractOperation <- ("+" / "-") { return string(c.text), nil }
|
|
||||||
|
|
||||||
MultiplyOrDivideOperation <- ("*" / "/") { return string(c.text), nil }
|
|
||||||
|
|
||||||
Literal <- FloatLiteral / IntegerLiteral / StringLiteral / BooleanLiteral / ParameterConstant / NullConstant
|
Literal <- FloatLiteral / IntegerLiteral / StringLiteral / BooleanLiteral / ParameterConstant / NullConstant
|
||||||
|
|
||||||
ParameterConstant <- "@" Identifier {
|
ParameterConstant <- "@" Identifier {
|
||||||
@ -514,10 +306,7 @@ BooleanLiteral <- ("true"i / "false"i) {
|
|||||||
FunctionCall <- StringFunctions
|
FunctionCall <- StringFunctions
|
||||||
/ TypeCheckingFunctions
|
/ TypeCheckingFunctions
|
||||||
/ ArrayFunctions
|
/ ArrayFunctions
|
||||||
/ ConditionalFunctions
|
|
||||||
/ InFunction
|
/ InFunction
|
||||||
/ AggregateFunctions
|
|
||||||
/ MathFunctions
|
|
||||||
|
|
||||||
StringFunctions <- StringEqualsExpression
|
StringFunctions <- StringEqualsExpression
|
||||||
/ ToStringExpression
|
/ ToStringExpression
|
||||||
@ -547,60 +336,12 @@ TypeCheckingFunctions <- IsDefined
|
|||||||
/ IsPrimitive
|
/ IsPrimitive
|
||||||
/ IsString
|
/ IsString
|
||||||
|
|
||||||
AggregateFunctions <- AvgAggregateExpression
|
|
||||||
/ CountAggregateExpression
|
|
||||||
/ MaxAggregateExpression
|
|
||||||
/ MinAggregateExpression
|
|
||||||
/ SumAggregateExpression
|
|
||||||
|
|
||||||
ArrayFunctions <- ArrayConcatExpression
|
ArrayFunctions <- ArrayConcatExpression
|
||||||
/ ArrayContainsExpression
|
|
||||||
/ ArrayContainsAnyExpression
|
|
||||||
/ ArrayContainsAllExpression
|
|
||||||
/ ArrayLengthExpression
|
/ ArrayLengthExpression
|
||||||
/ ArraySliceExpression
|
/ ArraySliceExpression
|
||||||
/ SetIntersectExpression
|
/ SetIntersectExpression
|
||||||
/ SetUnionExpression
|
/ SetUnionExpression
|
||||||
|
|
||||||
ConditionalFunctions <- IifExpression
|
|
||||||
|
|
||||||
MathFunctions <- MathAbsExpression
|
|
||||||
/ MathAcosExpression
|
|
||||||
/ MathAsinExpression
|
|
||||||
/ MathAtanExpression
|
|
||||||
/ MathCeilingExpression
|
|
||||||
/ MathCosExpression
|
|
||||||
/ MathCotExpression
|
|
||||||
/ MathDegreesExpression
|
|
||||||
/ MathExpExpression
|
|
||||||
/ MathFloorExpression
|
|
||||||
/ MathIntBitNotExpression
|
|
||||||
/ MathLog10Expression
|
|
||||||
/ MathRadiansExpression
|
|
||||||
/ MathRoundExpression
|
|
||||||
/ MathSignExpression
|
|
||||||
/ MathSinExpression
|
|
||||||
/ MathSqrtExpression
|
|
||||||
/ MathSquareExpression
|
|
||||||
/ MathTanExpression
|
|
||||||
/ MathTruncExpression
|
|
||||||
/ MathAtn2Expression
|
|
||||||
/ MathIntAddExpression
|
|
||||||
/ MathIntBitAndExpression
|
|
||||||
/ MathIntBitLeftShiftExpression
|
|
||||||
/ MathIntBitOrExpression
|
|
||||||
/ MathIntBitRightShiftExpression
|
|
||||||
/ MathIntBitXorExpression
|
|
||||||
/ MathIntDivExpression
|
|
||||||
/ MathIntModExpression
|
|
||||||
/ MathIntMulExpression
|
|
||||||
/ MathIntSubExpression
|
|
||||||
/ MathPowerExpression
|
|
||||||
/ MathLogExpression
|
|
||||||
/ MathNumberBinExpression
|
|
||||||
/ MathPiExpression
|
|
||||||
/ MathRandExpression
|
|
||||||
|
|
||||||
UpperExpression <- "UPPER"i ws "(" ex:SelectItem ")" {
|
UpperExpression <- "UPPER"i ws "(" ex:SelectItem ")" {
|
||||||
return createFunctionCall(parsers.FunctionCallUpper, []interface{}{ex})
|
return createFunctionCall(parsers.FunctionCallUpper, []interface{}{ex})
|
||||||
}
|
}
|
||||||
@ -728,18 +469,6 @@ ArrayConcatExpression <- "ARRAY_CONCAT"i ws "(" ws arrays:SelectItem others:(ws
|
|||||||
return createFunctionCall(parsers.FunctionCallArrayConcat, append([]interface{}{arrays}, others.([]interface{})...))
|
return createFunctionCall(parsers.FunctionCallArrayConcat, append([]interface{}{arrays}, others.([]interface{})...))
|
||||||
}
|
}
|
||||||
|
|
||||||
ArrayContainsExpression <- "ARRAY_CONTAINS"i ws "(" ws array:SelectItem ws "," ws item:SelectItem partialMatch:(ws "," ws ex:SelectItem { return ex, nil })? ws ")" {
|
|
||||||
return createFunctionCall(parsers.FunctionCallArrayContains, []interface{}{array, item, partialMatch})
|
|
||||||
}
|
|
||||||
|
|
||||||
ArrayContainsAnyExpression <- "ARRAY_CONTAINS_ANY"i ws "(" ws array:SelectItem items:(ws "," ws ex:SelectItem { return ex, nil })+ ws ")" {
|
|
||||||
return createFunctionCall(parsers.FunctionCallArrayContainsAny, append([]interface{}{array}, items.([]interface{})...))
|
|
||||||
}
|
|
||||||
|
|
||||||
ArrayContainsAllExpression <- "ARRAY_CONTAINS_ALL"i ws "(" ws array:SelectItem items:(ws "," ws ex:SelectItem { return ex, nil })+ ws ")" {
|
|
||||||
return createFunctionCall(parsers.FunctionCallArrayContainsAll, append([]interface{}{array}, items.([]interface{})...))
|
|
||||||
}
|
|
||||||
|
|
||||||
ArrayLengthExpression <- "ARRAY_LENGTH"i ws "(" ws array:SelectItem ws ")" {
|
ArrayLengthExpression <- "ARRAY_LENGTH"i ws "(" ws array:SelectItem ws ")" {
|
||||||
return createFunctionCall(parsers.FunctionCallArrayLength, []interface{}{array})
|
return createFunctionCall(parsers.FunctionCallArrayLength, []interface{}{array})
|
||||||
}
|
}
|
||||||
@ -756,77 +485,8 @@ SetUnionExpression <- "SetUnion"i ws "(" ws set1:SelectItem ws "," ws set2:Selec
|
|||||||
return createFunctionCall(parsers.FunctionCallSetUnion, []interface{}{set1, set2})
|
return createFunctionCall(parsers.FunctionCallSetUnion, []interface{}{set1, set2})
|
||||||
}
|
}
|
||||||
|
|
||||||
IifExpression <- "IIF"i ws "(" ws condition:SelectItem ws "," ws trueValue:SelectItem ws "," ws falseValue:SelectItem ws ")" {
|
InFunction <- ex1:SelectProperty ws "IN"i ws "(" ws ex2:SelectItem others:(ws "," ws ex:SelectItem { return ex, nil })* ws ")" {
|
||||||
return createFunctionCall(parsers.FunctionCallIif, []interface{}{condition, trueValue, falseValue})
|
|
||||||
}
|
|
||||||
|
|
||||||
MathAbsExpression <- "ABS"i ws "(" ws ex:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathAbs, []interface{}{ex}) }
|
|
||||||
MathAcosExpression <- "ACOS"i ws "(" ws ex:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathAcos, []interface{}{ex}) }
|
|
||||||
MathAsinExpression <- "ASIN"i ws "(" ws ex:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathAsin, []interface{}{ex}) }
|
|
||||||
MathAtanExpression <- "ATAN"i ws "(" ws ex:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathAtan, []interface{}{ex}) }
|
|
||||||
MathCeilingExpression <- "CEILING"i ws "(" ws ex:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathCeiling, []interface{}{ex}) }
|
|
||||||
MathCosExpression <- "COS"i ws "(" ws ex:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathCos, []interface{}{ex}) }
|
|
||||||
MathCotExpression <- "COT"i ws "(" ws ex:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathCot, []interface{}{ex}) }
|
|
||||||
MathDegreesExpression <- "DEGREES"i ws "(" ws ex:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathDegrees, []interface{}{ex}) }
|
|
||||||
MathExpExpression <- "EXP"i ws "(" ws ex:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathExp, []interface{}{ex}) }
|
|
||||||
MathFloorExpression <- "FLOOR"i ws "(" ws ex:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathFloor, []interface{}{ex}) }
|
|
||||||
MathIntBitNotExpression <- "IntBitNot"i ws "(" ws ex:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathIntBitNot, []interface{}{ex}) }
|
|
||||||
MathLog10Expression <- "LOG10"i ws "(" ws ex:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathLog10, []interface{}{ex}) }
|
|
||||||
MathRadiansExpression <- "RADIANS"i ws "(" ws ex:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathRadians, []interface{}{ex}) }
|
|
||||||
MathRoundExpression <- "ROUND"i ws "(" ws ex:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathRound, []interface{}{ex}) }
|
|
||||||
MathSignExpression <- "SIGN"i ws "(" ws ex:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathSign, []interface{}{ex}) }
|
|
||||||
MathSinExpression <- "SIN"i ws "(" ws ex:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathSin, []interface{}{ex}) }
|
|
||||||
MathSqrtExpression <- "SQRT"i ws "(" ws ex:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathSqrt, []interface{}{ex}) }
|
|
||||||
MathSquareExpression <- "SQUARE"i ws "(" ws ex:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathSquare, []interface{}{ex}) }
|
|
||||||
MathTanExpression <- "TAN"i ws "(" ws ex:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathTan, []interface{}{ex}) }
|
|
||||||
MathTruncExpression <- "TRUNC"i ws "(" ws ex:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathTrunc, []interface{}{ex}) }
|
|
||||||
|
|
||||||
MathAtn2Expression <- "ATN2"i ws "(" ws set1:SelectItem ws "," ws set2:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathAtn2, []interface{}{set1, set2}) }
|
|
||||||
MathIntAddExpression <- "IntAdd"i ws "(" ws set1:SelectItem ws "," ws set2:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathIntAdd, []interface{}{set1, set2}) }
|
|
||||||
MathIntBitAndExpression <- "IntBitAnd"i ws "(" ws set1:SelectItem ws "," ws set2:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathIntBitAnd, []interface{}{set1, set2}) }
|
|
||||||
MathIntBitLeftShiftExpression <- "IntBitLeftShift"i ws "(" ws set1:SelectItem ws "," ws set2:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathIntBitLeftShift, []interface{}{set1, set2}) }
|
|
||||||
MathIntBitOrExpression <- "IntBitOr"i ws "(" ws set1:SelectItem ws "," ws set2:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathIntBitOr, []interface{}{set1, set2}) }
|
|
||||||
MathIntBitRightShiftExpression <- "IntBitRightShift"i ws "(" ws set1:SelectItem ws "," ws set2:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathIntBitRightShift, []interface{}{set1, set2}) }
|
|
||||||
MathIntBitXorExpression <- "IntBitXor"i ws "(" ws set1:SelectItem ws "," ws set2:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathIntBitXor, []interface{}{set1, set2}) }
|
|
||||||
MathIntDivExpression <- "IntDiv"i ws "(" ws set1:SelectItem ws "," ws set2:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathIntDiv, []interface{}{set1, set2}) }
|
|
||||||
MathIntModExpression <- "IntMod"i ws "(" ws set1:SelectItem ws "," ws set2:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathIntMod, []interface{}{set1, set2}) }
|
|
||||||
MathIntMulExpression <- "IntMul"i ws "(" ws set1:SelectItem ws "," ws set2:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathIntMul, []interface{}{set1, set2}) }
|
|
||||||
MathIntSubExpression <- "IntSub"i ws "(" ws set1:SelectItem ws "," ws set2:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathIntSub, []interface{}{set1, set2}) }
|
|
||||||
MathPowerExpression <- "POWER"i ws "(" ws set1:SelectItem ws "," ws set2:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathPower, []interface{}{set1, set2}) }
|
|
||||||
|
|
||||||
MathLogExpression <- "LOG"i ws "(" ws ex1:SelectItem others:(ws "," ws ex:SelectItem { return ex, nil })* ws ")" {
|
|
||||||
return createFunctionCall(parsers.FunctionCallMathLog, append([]interface{}{ex1}, others.([]interface{})...))
|
|
||||||
}
|
|
||||||
MathNumberBinExpression <- "NumberBin"i ws "(" ws ex1:SelectItem others:(ws "," ws ex:SelectItem { return ex, nil })* ws ")" {
|
|
||||||
return createFunctionCall(parsers.FunctionCallMathNumberBin, append([]interface{}{ex1}, others.([]interface{})...))
|
|
||||||
}
|
|
||||||
MathPiExpression <- "PI"i ws "(" ws ")" { return createFunctionCall(parsers.FunctionCallMathPi, []interface{}{}) }
|
|
||||||
MathRandExpression <- "RAND"i ws "(" ws ")" { return createFunctionCall(parsers.FunctionCallMathRand, []interface{}{}) }
|
|
||||||
|
|
||||||
InFunction <- ex1:SelectProperty ws In ws "(" ws ex2:SelectItem others:(ws "," ws ex:SelectItem { return ex, nil })* ws ")" {
|
|
||||||
return createFunctionCall(parsers.FunctionCallIn, append([]interface{}{ex1, ex2}, others.([]interface{})...))
|
return createFunctionCall(parsers.FunctionCallIn, append([]interface{}{ex1, ex2}, others.([]interface{})...))
|
||||||
} / "(" ws ex1:SelectItem ws In ws "(" ws ex2:SelectItem others:(ws "," ws ex:SelectItem { return ex, nil })* ws ")" ws ")" {
|
|
||||||
return createFunctionCall(parsers.FunctionCallIn, append([]interface{}{ex1, ex2}, others.([]interface{})...))
|
|
||||||
}
|
|
||||||
|
|
||||||
AvgAggregateExpression <- "AVG"i "(" ws ex:SelectItem ws ")" {
|
|
||||||
return createFunctionCall(parsers.FunctionCallAggregateAvg, []interface{}{ex})
|
|
||||||
}
|
|
||||||
|
|
||||||
CountAggregateExpression <- "COUNT"i "(" ws ex:SelectItem ws ")" {
|
|
||||||
return createFunctionCall(parsers.FunctionCallAggregateCount, []interface{}{ex})
|
|
||||||
}
|
|
||||||
|
|
||||||
MaxAggregateExpression <- "MAX"i "(" ws ex:SelectItem ws ")" {
|
|
||||||
return createFunctionCall(parsers.FunctionCallAggregateMax, []interface{}{ex})
|
|
||||||
}
|
|
||||||
|
|
||||||
MinAggregateExpression <- "MIN"i "(" ws ex:SelectItem ws ")" {
|
|
||||||
return createFunctionCall(parsers.FunctionCallAggregateMin, []interface{}{ex})
|
|
||||||
}
|
|
||||||
|
|
||||||
SumAggregateExpression <- "SUM"i "(" ws ex:SelectItem ws ")" {
|
|
||||||
return createFunctionCall(parsers.FunctionCallAggregateSum, []interface{}{ex})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Integer <- [0-9]+ {
|
Integer <- [0-9]+ {
|
||||||
@ -852,6 +512,4 @@ non_escape_character <- !(escape_character) char:.
|
|||||||
|
|
||||||
ws <- [ \t\n\r]*
|
ws <- [ \t\n\r]*
|
||||||
|
|
||||||
wss <- [ \t\n\r]+
|
|
||||||
|
|
||||||
EOF <- !.
|
EOF <- !.
|
||||||
|
@ -4,7 +4,6 @@ import (
|
|||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/pikami/cosmium/parsers"
|
"github.com/pikami/cosmium/parsers"
|
||||||
testutils "github.com/pikami/cosmium/test_utils"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func Test_Parse_Select(t *testing.T) {
|
func Test_Parse_Select(t *testing.T) {
|
||||||
@ -18,35 +17,7 @@ func Test_Parse_Select(t *testing.T) {
|
|||||||
{Path: []string{"c", "id"}},
|
{Path: []string{"c", "id"}},
|
||||||
{Path: []string{"c", "pk"}},
|
{Path: []string{"c", "pk"}},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
Table: parsers.Table{Value: "c"},
|
||||||
},
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should parse SELECT with query parameters as accessor", func(t *testing.T) {
|
|
||||||
testQueryParse(
|
|
||||||
t,
|
|
||||||
`SELECT c.id, c[@param] FROM c`,
|
|
||||||
parsers.SelectStmt{
|
|
||||||
SelectItems: []parsers.SelectItem{
|
|
||||||
{Path: []string{"c", "id"}},
|
|
||||||
{Path: []string{"c", "@param"}},
|
|
||||||
},
|
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should parse SELECT DISTINCT", func(t *testing.T) {
|
|
||||||
testQueryParse(
|
|
||||||
t,
|
|
||||||
`SELECT DISTINCT c.id FROM c`,
|
|
||||||
parsers.SelectStmt{
|
|
||||||
SelectItems: []parsers.SelectItem{
|
|
||||||
{Path: []string{"c", "id"}},
|
|
||||||
},
|
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
|
||||||
Distinct: true,
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
@ -59,27 +30,12 @@ func Test_Parse_Select(t *testing.T) {
|
|||||||
SelectItems: []parsers.SelectItem{
|
SelectItems: []parsers.SelectItem{
|
||||||
{Path: []string{"c", "id"}},
|
{Path: []string{"c", "id"}},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
Table: parsers.Table{Value: "c"},
|
||||||
Count: 1,
|
Count: 1,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("Should parse SELECT OFFSET", func(t *testing.T) {
|
|
||||||
testQueryParse(
|
|
||||||
t,
|
|
||||||
`SELECT c.id FROM c OFFSET 3 LIMIT 5`,
|
|
||||||
parsers.SelectStmt{
|
|
||||||
SelectItems: []parsers.SelectItem{
|
|
||||||
{Path: []string{"c", "id"}},
|
|
||||||
},
|
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
|
||||||
Count: 5,
|
|
||||||
Offset: 3,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should parse SELECT VALUE", func(t *testing.T) {
|
t.Run("Should parse SELECT VALUE", func(t *testing.T) {
|
||||||
testQueryParse(
|
testQueryParse(
|
||||||
t,
|
t,
|
||||||
@ -88,7 +44,7 @@ func Test_Parse_Select(t *testing.T) {
|
|||||||
SelectItems: []parsers.SelectItem{
|
SelectItems: []parsers.SelectItem{
|
||||||
{Path: []string{"c", "id"}, IsTopLevel: true},
|
{Path: []string{"c", "id"}, IsTopLevel: true},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
Table: parsers.Table{Value: "c"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
@ -101,20 +57,7 @@ func Test_Parse_Select(t *testing.T) {
|
|||||||
SelectItems: []parsers.SelectItem{
|
SelectItems: []parsers.SelectItem{
|
||||||
{Path: []string{"c"}, IsTopLevel: true},
|
{Path: []string{"c"}, IsTopLevel: true},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
Table: parsers.Table{Value: "c"},
|
||||||
},
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should parse SELECT c", func(t *testing.T) {
|
|
||||||
testQueryParse(
|
|
||||||
t,
|
|
||||||
`SELECT c FROM c`,
|
|
||||||
parsers.SelectStmt{
|
|
||||||
SelectItems: []parsers.SelectItem{
|
|
||||||
{Path: []string{"c"}, IsTopLevel: false},
|
|
||||||
},
|
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
@ -134,27 +77,7 @@ func Test_Parse_Select(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
Table: parsers.Table{Value: "c"},
|
||||||
},
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should parse SELECT with alias", func(t *testing.T) {
|
|
||||||
testQueryParse(
|
|
||||||
t,
|
|
||||||
`SELECT
|
|
||||||
c.id AS aliasWithAs,
|
|
||||||
c.pk aliasWithoutAs
|
|
||||||
FROM root c`,
|
|
||||||
parsers.SelectStmt{
|
|
||||||
SelectItems: []parsers.SelectItem{
|
|
||||||
{Alias: "aliasWithAs", Path: []string{"c", "id"}},
|
|
||||||
{Alias: "aliasWithoutAs", Path: []string{"c", "pk"}},
|
|
||||||
},
|
|
||||||
Table: parsers.Table{
|
|
||||||
Value: "c",
|
|
||||||
SelectItem: parsers.SelectItem{Alias: "c", Path: []string{"root"}},
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
@ -174,93 +97,7 @@ func Test_Parse_Select(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
Table: parsers.Table{Value: "c"},
|
||||||
},
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should parse SELECT empty object", func(t *testing.T) {
|
|
||||||
testQueryParse(
|
|
||||||
t,
|
|
||||||
`SELECT {} AS obj FROM c`,
|
|
||||||
parsers.SelectStmt{
|
|
||||||
SelectItems: []parsers.SelectItem{
|
|
||||||
{
|
|
||||||
Alias: "obj",
|
|
||||||
Type: parsers.SelectItemTypeObject,
|
|
||||||
SelectItems: []parsers.SelectItem{},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should parse comparison expressions in SELECT", func(t *testing.T) {
|
|
||||||
testQueryParse(
|
|
||||||
t,
|
|
||||||
`SELECT c["id"] = "123", c["pk"] > 456 FROM c`,
|
|
||||||
parsers.SelectStmt{
|
|
||||||
SelectItems: []parsers.SelectItem{
|
|
||||||
{
|
|
||||||
Type: parsers.SelectItemTypeExpression,
|
|
||||||
Value: parsers.ComparisonExpression{
|
|
||||||
Operation: "=",
|
|
||||||
Left: testutils.SelectItem_Path("c", "id"),
|
|
||||||
Right: testutils.SelectItem_Constant_String("123"),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Type: parsers.SelectItemTypeExpression,
|
|
||||||
Value: parsers.ComparisonExpression{
|
|
||||||
Operation: ">",
|
|
||||||
Left: testutils.SelectItem_Path("c", "pk"),
|
|
||||||
Right: testutils.SelectItem_Constant_Int(456),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should parse logical expressions in SELECT", func(t *testing.T) {
|
|
||||||
testQueryParse(
|
|
||||||
t,
|
|
||||||
`SELECT c["id"] = "123" OR c["pk"] > 456, c["isCool"] AND c["hasRizz"] AS isRizzler FROM c`,
|
|
||||||
parsers.SelectStmt{
|
|
||||||
SelectItems: []parsers.SelectItem{
|
|
||||||
{
|
|
||||||
Type: parsers.SelectItemTypeExpression,
|
|
||||||
Value: parsers.LogicalExpression{
|
|
||||||
Operation: parsers.LogicalExpressionTypeOr,
|
|
||||||
Expressions: []interface{}{
|
|
||||||
parsers.ComparisonExpression{
|
|
||||||
Operation: "=",
|
|
||||||
Left: testutils.SelectItem_Path("c", "id"),
|
|
||||||
Right: testutils.SelectItem_Constant_String("123"),
|
|
||||||
},
|
|
||||||
parsers.ComparisonExpression{
|
|
||||||
Operation: ">",
|
|
||||||
Left: testutils.SelectItem_Path("c", "pk"),
|
|
||||||
Right: testutils.SelectItem_Constant_Int(456),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Type: parsers.SelectItemTypeExpression,
|
|
||||||
Alias: "isRizzler",
|
|
||||||
Value: parsers.LogicalExpression{
|
|
||||||
Operation: parsers.LogicalExpressionTypeAnd,
|
|
||||||
Expressions: []interface{}{
|
|
||||||
testutils.SelectItem_Path("c", "isCool"),
|
|
||||||
testutils.SelectItem_Path("c", "hasRizz"),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
@ -4,7 +4,6 @@ import (
|
|||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/pikami/cosmium/parsers"
|
"github.com/pikami/cosmium/parsers"
|
||||||
testutils "github.com/pikami/cosmium/test_utils"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func Test_Execute_StringFunctions(t *testing.T) {
|
func Test_Execute_StringFunctions(t *testing.T) {
|
||||||
@ -24,13 +23,25 @@ func Test_Execute_StringFunctions(t *testing.T) {
|
|||||||
Path: []string{"c", "id"},
|
Path: []string{"c", "id"},
|
||||||
Type: parsers.SelectItemTypeField,
|
Type: parsers.SelectItemTypeField,
|
||||||
},
|
},
|
||||||
testutils.SelectItem_Constant_String("123"),
|
parsers.SelectItem{
|
||||||
testutils.SelectItem_Constant_Bool(true),
|
Type: parsers.SelectItemTypeConstant,
|
||||||
|
Value: parsers.Constant{
|
||||||
|
Type: parsers.ConstantTypeString,
|
||||||
|
Value: "123",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
parsers.SelectItem{
|
||||||
|
Type: parsers.SelectItemTypeConstant,
|
||||||
|
Value: parsers.Constant{
|
||||||
|
Type: parsers.ConstantTypeBoolean,
|
||||||
|
Value: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
Table: parsers.Table{Value: "c"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
@ -50,13 +61,19 @@ func Test_Execute_StringFunctions(t *testing.T) {
|
|||||||
Path: []string{"c", "id"},
|
Path: []string{"c", "id"},
|
||||||
Type: parsers.SelectItemTypeField,
|
Type: parsers.SelectItemTypeField,
|
||||||
},
|
},
|
||||||
testutils.SelectItem_Constant_String("123"),
|
parsers.SelectItem{
|
||||||
|
Type: parsers.SelectItemTypeConstant,
|
||||||
|
Value: parsers.Constant{
|
||||||
|
Type: parsers.ConstantTypeString,
|
||||||
|
Value: "123",
|
||||||
|
},
|
||||||
|
},
|
||||||
nil,
|
nil,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
Table: parsers.Table{Value: "c"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
@ -76,7 +93,13 @@ func Test_Execute_StringFunctions(t *testing.T) {
|
|||||||
Path: []string{"c", "id"},
|
Path: []string{"c", "id"},
|
||||||
Type: parsers.SelectItemTypeField,
|
Type: parsers.SelectItemTypeField,
|
||||||
},
|
},
|
||||||
testutils.SelectItem_Constant_String("123"),
|
parsers.SelectItem{
|
||||||
|
Type: parsers.SelectItemTypeConstant,
|
||||||
|
Value: parsers.Constant{
|
||||||
|
Type: parsers.ConstantTypeString,
|
||||||
|
Value: "123",
|
||||||
|
},
|
||||||
|
},
|
||||||
parsers.SelectItem{
|
parsers.SelectItem{
|
||||||
Path: []string{"c", "pk"},
|
Path: []string{"c", "pk"},
|
||||||
Type: parsers.SelectItemTypeField,
|
Type: parsers.SelectItemTypeField,
|
||||||
@ -85,7 +108,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
Table: parsers.Table{Value: "c"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
@ -105,13 +128,25 @@ func Test_Execute_StringFunctions(t *testing.T) {
|
|||||||
Path: []string{"c", "id"},
|
Path: []string{"c", "id"},
|
||||||
Type: parsers.SelectItemTypeField,
|
Type: parsers.SelectItemTypeField,
|
||||||
},
|
},
|
||||||
testutils.SelectItem_Constant_String("123"),
|
parsers.SelectItem{
|
||||||
testutils.SelectItem_Constant_Bool(true),
|
Type: parsers.SelectItemTypeConstant,
|
||||||
|
Value: parsers.Constant{
|
||||||
|
Type: parsers.ConstantTypeString,
|
||||||
|
Value: "123",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
parsers.SelectItem{
|
||||||
|
Type: parsers.SelectItemTypeConstant,
|
||||||
|
Value: parsers.Constant{
|
||||||
|
Type: parsers.ConstantTypeBoolean,
|
||||||
|
Value: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
Table: parsers.Table{Value: "c"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
@ -131,13 +166,25 @@ func Test_Execute_StringFunctions(t *testing.T) {
|
|||||||
Path: []string{"c", "id"},
|
Path: []string{"c", "id"},
|
||||||
Type: parsers.SelectItemTypeField,
|
Type: parsers.SelectItemTypeField,
|
||||||
},
|
},
|
||||||
testutils.SelectItem_Constant_String("123"),
|
parsers.SelectItem{
|
||||||
testutils.SelectItem_Constant_Bool(true),
|
Type: parsers.SelectItemTypeConstant,
|
||||||
|
Value: parsers.Constant{
|
||||||
|
Type: parsers.ConstantTypeString,
|
||||||
|
Value: "123",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
parsers.SelectItem{
|
||||||
|
Type: parsers.SelectItemTypeConstant,
|
||||||
|
Value: parsers.Constant{
|
||||||
|
Type: parsers.ConstantTypeBoolean,
|
||||||
|
Value: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
Table: parsers.Table{Value: "c"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
@ -157,13 +204,25 @@ func Test_Execute_StringFunctions(t *testing.T) {
|
|||||||
Path: []string{"c", "id"},
|
Path: []string{"c", "id"},
|
||||||
Type: parsers.SelectItemTypeField,
|
Type: parsers.SelectItemTypeField,
|
||||||
},
|
},
|
||||||
testutils.SelectItem_Constant_String("123"),
|
parsers.SelectItem{
|
||||||
testutils.SelectItem_Constant_Bool(true),
|
Type: parsers.SelectItemTypeConstant,
|
||||||
|
Value: parsers.Constant{
|
||||||
|
Type: parsers.ConstantTypeString,
|
||||||
|
Value: "123",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
parsers.SelectItem{
|
||||||
|
Type: parsers.SelectItemTypeConstant,
|
||||||
|
Value: parsers.Constant{
|
||||||
|
Type: parsers.ConstantTypeBoolean,
|
||||||
|
Value: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
Table: parsers.Table{Value: "c"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
@ -183,13 +242,25 @@ func Test_Execute_StringFunctions(t *testing.T) {
|
|||||||
Path: []string{"c", "id"},
|
Path: []string{"c", "id"},
|
||||||
Type: parsers.SelectItemTypeField,
|
Type: parsers.SelectItemTypeField,
|
||||||
},
|
},
|
||||||
testutils.SelectItem_Constant_String("2"),
|
parsers.SelectItem{
|
||||||
testutils.SelectItem_Constant_Int(1),
|
Type: parsers.SelectItemTypeConstant,
|
||||||
|
Value: parsers.Constant{
|
||||||
|
Type: parsers.ConstantTypeString,
|
||||||
|
Value: "2",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
parsers.SelectItem{
|
||||||
|
Type: parsers.SelectItemTypeConstant,
|
||||||
|
Value: parsers.Constant{
|
||||||
|
Type: parsers.ConstantTypeInteger,
|
||||||
|
Value: 1,
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
Table: parsers.Table{Value: "c"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
@ -213,7 +284,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
Table: parsers.Table{Value: "c"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
@ -237,7 +308,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
Table: parsers.Table{Value: "c"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
@ -261,7 +332,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
Table: parsers.Table{Value: "c"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
@ -281,12 +352,18 @@ func Test_Execute_StringFunctions(t *testing.T) {
|
|||||||
Path: []string{"c", "id"},
|
Path: []string{"c", "id"},
|
||||||
Type: parsers.SelectItemTypeField,
|
Type: parsers.SelectItemTypeField,
|
||||||
},
|
},
|
||||||
testutils.SelectItem_Constant_Int(5),
|
parsers.SelectItem{
|
||||||
|
Type: parsers.SelectItemTypeConstant,
|
||||||
|
Value: parsers.Constant{
|
||||||
|
Type: parsers.ConstantTypeInteger,
|
||||||
|
Value: 5,
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
Table: parsers.Table{Value: "c"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
@ -310,7 +387,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
Table: parsers.Table{Value: "c"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
@ -334,7 +411,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
Table: parsers.Table{Value: "c"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
@ -354,13 +431,25 @@ func Test_Execute_StringFunctions(t *testing.T) {
|
|||||||
Path: []string{"c", "id"},
|
Path: []string{"c", "id"},
|
||||||
Type: parsers.SelectItemTypeField,
|
Type: parsers.SelectItemTypeField,
|
||||||
},
|
},
|
||||||
testutils.SelectItem_Constant_String("old"),
|
parsers.SelectItem{
|
||||||
testutils.SelectItem_Constant_String("new"),
|
Type: parsers.SelectItemTypeConstant,
|
||||||
|
Value: parsers.Constant{
|
||||||
|
Type: parsers.ConstantTypeString,
|
||||||
|
Value: "old",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
parsers.SelectItem{
|
||||||
|
Type: parsers.SelectItemTypeConstant,
|
||||||
|
Value: parsers.Constant{
|
||||||
|
Type: parsers.ConstantTypeString,
|
||||||
|
Value: "new",
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
Table: parsers.Table{Value: "c"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
@ -380,12 +469,18 @@ func Test_Execute_StringFunctions(t *testing.T) {
|
|||||||
Path: []string{"c", "id"},
|
Path: []string{"c", "id"},
|
||||||
Type: parsers.SelectItemTypeField,
|
Type: parsers.SelectItemTypeField,
|
||||||
},
|
},
|
||||||
testutils.SelectItem_Constant_Int(3),
|
parsers.SelectItem{
|
||||||
|
Type: parsers.SelectItemTypeConstant,
|
||||||
|
Value: parsers.Constant{
|
||||||
|
Type: parsers.ConstantTypeInteger,
|
||||||
|
Value: 3,
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
Table: parsers.Table{Value: "c"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
@ -409,7 +504,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
Table: parsers.Table{Value: "c"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
@ -429,12 +524,18 @@ func Test_Execute_StringFunctions(t *testing.T) {
|
|||||||
Path: []string{"c", "id"},
|
Path: []string{"c", "id"},
|
||||||
Type: parsers.SelectItemTypeField,
|
Type: parsers.SelectItemTypeField,
|
||||||
},
|
},
|
||||||
testutils.SelectItem_Constant_Int(3),
|
parsers.SelectItem{
|
||||||
|
Type: parsers.SelectItemTypeConstant,
|
||||||
|
Value: parsers.Constant{
|
||||||
|
Type: parsers.ConstantTypeInteger,
|
||||||
|
Value: 3,
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
Table: parsers.Table{Value: "c"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
@ -458,7 +559,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
Table: parsers.Table{Value: "c"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
@ -478,13 +579,25 @@ func Test_Execute_StringFunctions(t *testing.T) {
|
|||||||
Path: []string{"c", "id"},
|
Path: []string{"c", "id"},
|
||||||
Type: parsers.SelectItemTypeField,
|
Type: parsers.SelectItemTypeField,
|
||||||
},
|
},
|
||||||
testutils.SelectItem_Constant_Int(1),
|
parsers.SelectItem{
|
||||||
testutils.SelectItem_Constant_Int(5),
|
Type: parsers.SelectItemTypeConstant,
|
||||||
|
Value: parsers.Constant{
|
||||||
|
Type: parsers.ConstantTypeInteger,
|
||||||
|
Value: 1,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
parsers.SelectItem{
|
||||||
|
Type: parsers.SelectItemTypeConstant,
|
||||||
|
Value: parsers.Constant{
|
||||||
|
Type: parsers.ConstantTypeInteger,
|
||||||
|
Value: 5,
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
Table: parsers.Table{Value: "c"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
@ -508,7 +621,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
Table: parsers.Table{Value: "c"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
@ -1,122 +0,0 @@
|
|||||||
package nosql_test
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/pikami/cosmium/parsers"
|
|
||||||
testutils "github.com/pikami/cosmium/test_utils"
|
|
||||||
)
|
|
||||||
|
|
||||||
func Test_Parse_SubQuery(t *testing.T) {
|
|
||||||
|
|
||||||
t.Run("Should parse FROM subquery", func(t *testing.T) {
|
|
||||||
testQueryParse(
|
|
||||||
t,
|
|
||||||
`SELECT c.id FROM (SELECT VALUE cc["info"] FROM cc) AS c`,
|
|
||||||
parsers.SelectStmt{
|
|
||||||
SelectItems: []parsers.SelectItem{
|
|
||||||
{Path: []string{"c", "id"}},
|
|
||||||
},
|
|
||||||
Table: parsers.Table{
|
|
||||||
Value: "c",
|
|
||||||
SelectItem: parsers.SelectItem{
|
|
||||||
Alias: "c",
|
|
||||||
Type: parsers.SelectItemTypeSubQuery,
|
|
||||||
Value: parsers.SelectStmt{
|
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("cc")},
|
|
||||||
SelectItems: []parsers.SelectItem{
|
|
||||||
{Path: []string{"cc", "info"}, IsTopLevel: true},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should parse JOIN subquery", func(t *testing.T) {
|
|
||||||
testQueryParse(
|
|
||||||
t,
|
|
||||||
`SELECT c.id, cc.name FROM c JOIN (SELECT tag.name FROM tag IN c.tags) AS cc`,
|
|
||||||
parsers.SelectStmt{
|
|
||||||
SelectItems: []parsers.SelectItem{
|
|
||||||
{Path: []string{"c", "id"}},
|
|
||||||
{Path: []string{"cc", "name"}},
|
|
||||||
},
|
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
|
||||||
JoinItems: []parsers.JoinItem{
|
|
||||||
{
|
|
||||||
Table: parsers.Table{
|
|
||||||
Value: "cc",
|
|
||||||
},
|
|
||||||
SelectItem: parsers.SelectItem{
|
|
||||||
Alias: "cc",
|
|
||||||
Type: parsers.SelectItemTypeSubQuery,
|
|
||||||
Value: parsers.SelectStmt{
|
|
||||||
SelectItems: []parsers.SelectItem{
|
|
||||||
testutils.SelectItem_Path("tag", "name"),
|
|
||||||
},
|
|
||||||
Table: parsers.Table{
|
|
||||||
Value: "tag",
|
|
||||||
SelectItem: testutils.SelectItem_Path("c", "tags"),
|
|
||||||
IsInSelect: true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should parse JOIN EXISTS subquery", func(t *testing.T) {
|
|
||||||
testQueryParse(
|
|
||||||
t,
|
|
||||||
`SELECT c.id
|
|
||||||
FROM c
|
|
||||||
JOIN (
|
|
||||||
SELECT VALUE EXISTS(SELECT tag.name FROM tag IN c.tags)
|
|
||||||
) AS hasTags
|
|
||||||
WHERE hasTags`,
|
|
||||||
parsers.SelectStmt{
|
|
||||||
SelectItems: []parsers.SelectItem{
|
|
||||||
testutils.SelectItem_Path("c", "id"),
|
|
||||||
},
|
|
||||||
Table: parsers.Table{
|
|
||||||
SelectItem: testutils.SelectItem_Path("c"),
|
|
||||||
},
|
|
||||||
JoinItems: []parsers.JoinItem{
|
|
||||||
{
|
|
||||||
Table: parsers.Table{Value: "hasTags"},
|
|
||||||
SelectItem: parsers.SelectItem{
|
|
||||||
Alias: "hasTags",
|
|
||||||
Type: parsers.SelectItemTypeSubQuery,
|
|
||||||
Value: parsers.SelectStmt{
|
|
||||||
SelectItems: []parsers.SelectItem{
|
|
||||||
{
|
|
||||||
IsTopLevel: true,
|
|
||||||
Type: parsers.SelectItemTypeSubQuery,
|
|
||||||
Value: parsers.SelectStmt{
|
|
||||||
SelectItems: []parsers.SelectItem{
|
|
||||||
testutils.SelectItem_Path("tag", "name"),
|
|
||||||
},
|
|
||||||
Table: parsers.Table{
|
|
||||||
Value: "tag",
|
|
||||||
SelectItem: testutils.SelectItem_Path("c", "tags"),
|
|
||||||
IsInSelect: true,
|
|
||||||
},
|
|
||||||
Exists: true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Filters: parsers.SelectItem{
|
|
||||||
Path: []string{"hasTags"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
})
|
|
||||||
}
|
|
@ -4,7 +4,6 @@ import (
|
|||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/pikami/cosmium/parsers"
|
"github.com/pikami/cosmium/parsers"
|
||||||
testutils "github.com/pikami/cosmium/test_utils"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func Test_Execute_TypeCheckingFunctions(t *testing.T) {
|
func Test_Execute_TypeCheckingFunctions(t *testing.T) {
|
||||||
@ -28,7 +27,7 @@ func Test_Execute_TypeCheckingFunctions(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
Table: parsers.Table{Value: "c"},
|
||||||
Filters: parsers.SelectItem{
|
Filters: parsers.SelectItem{
|
||||||
Type: parsers.SelectItemTypeFunctionCall,
|
Type: parsers.SelectItemTypeFunctionCall,
|
||||||
Value: parsers.FunctionCall{
|
Value: parsers.FunctionCall{
|
||||||
@ -64,7 +63,7 @@ func Test_Execute_TypeCheckingFunctions(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
Table: parsers.Table{Value: "c"},
|
||||||
Filters: parsers.SelectItem{
|
Filters: parsers.SelectItem{
|
||||||
Type: parsers.SelectItemTypeFunctionCall,
|
Type: parsers.SelectItemTypeFunctionCall,
|
||||||
Value: parsers.FunctionCall{
|
Value: parsers.FunctionCall{
|
||||||
@ -100,7 +99,7 @@ func Test_Execute_TypeCheckingFunctions(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
Table: parsers.Table{Value: "c"},
|
||||||
Filters: parsers.SelectItem{
|
Filters: parsers.SelectItem{
|
||||||
Type: parsers.SelectItemTypeFunctionCall,
|
Type: parsers.SelectItemTypeFunctionCall,
|
||||||
Value: parsers.FunctionCall{
|
Value: parsers.FunctionCall{
|
||||||
@ -136,7 +135,7 @@ func Test_Execute_TypeCheckingFunctions(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
Table: parsers.Table{Value: "c"},
|
||||||
Filters: parsers.SelectItem{
|
Filters: parsers.SelectItem{
|
||||||
Type: parsers.SelectItemTypeFunctionCall,
|
Type: parsers.SelectItemTypeFunctionCall,
|
||||||
Value: parsers.FunctionCall{
|
Value: parsers.FunctionCall{
|
||||||
@ -172,7 +171,7 @@ func Test_Execute_TypeCheckingFunctions(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
Table: parsers.Table{Value: "c"},
|
||||||
Filters: parsers.SelectItem{
|
Filters: parsers.SelectItem{
|
||||||
Type: parsers.SelectItemTypeFunctionCall,
|
Type: parsers.SelectItemTypeFunctionCall,
|
||||||
Value: parsers.FunctionCall{
|
Value: parsers.FunctionCall{
|
||||||
@ -208,7 +207,7 @@ func Test_Execute_TypeCheckingFunctions(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
Table: parsers.Table{Value: "c"},
|
||||||
Filters: parsers.SelectItem{
|
Filters: parsers.SelectItem{
|
||||||
Type: parsers.SelectItemTypeFunctionCall,
|
Type: parsers.SelectItemTypeFunctionCall,
|
||||||
Value: parsers.FunctionCall{
|
Value: parsers.FunctionCall{
|
||||||
@ -244,7 +243,7 @@ func Test_Execute_TypeCheckingFunctions(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
Table: parsers.Table{Value: "c"},
|
||||||
Filters: parsers.SelectItem{
|
Filters: parsers.SelectItem{
|
||||||
Type: parsers.SelectItemTypeFunctionCall,
|
Type: parsers.SelectItemTypeFunctionCall,
|
||||||
Value: parsers.FunctionCall{
|
Value: parsers.FunctionCall{
|
||||||
@ -280,7 +279,7 @@ func Test_Execute_TypeCheckingFunctions(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
Table: parsers.Table{Value: "c"},
|
||||||
Filters: parsers.SelectItem{
|
Filters: parsers.SelectItem{
|
||||||
Type: parsers.SelectItemTypeFunctionCall,
|
Type: parsers.SelectItemTypeFunctionCall,
|
||||||
Value: parsers.FunctionCall{
|
Value: parsers.FunctionCall{
|
||||||
@ -316,7 +315,7 @@ func Test_Execute_TypeCheckingFunctions(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
Table: parsers.Table{Value: "c"},
|
||||||
Filters: parsers.SelectItem{
|
Filters: parsers.SelectItem{
|
||||||
Type: parsers.SelectItemTypeFunctionCall,
|
Type: parsers.SelectItemTypeFunctionCall,
|
||||||
Value: parsers.FunctionCall{
|
Value: parsers.FunctionCall{
|
||||||
@ -352,7 +351,7 @@ func Test_Execute_TypeCheckingFunctions(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
Table: parsers.Table{Value: "c"},
|
||||||
Filters: parsers.SelectItem{
|
Filters: parsers.SelectItem{
|
||||||
Type: parsers.SelectItemTypeFunctionCall,
|
Type: parsers.SelectItemTypeFunctionCall,
|
||||||
Value: parsers.FunctionCall{
|
Value: parsers.FunctionCall{
|
||||||
|
@ -4,7 +4,6 @@ import (
|
|||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/pikami/cosmium/parsers"
|
"github.com/pikami/cosmium/parsers"
|
||||||
testutils "github.com/pikami/cosmium/test_utils"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func Test_Parse_Were(t *testing.T) {
|
func Test_Parse_Were(t *testing.T) {
|
||||||
@ -19,11 +18,14 @@ func Test_Parse_Were(t *testing.T) {
|
|||||||
SelectItems: []parsers.SelectItem{
|
SelectItems: []parsers.SelectItem{
|
||||||
{Path: []string{"c", "id"}},
|
{Path: []string{"c", "id"}},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
Table: parsers.Table{Value: "c"},
|
||||||
Filters: parsers.ComparisonExpression{
|
Filters: parsers.ComparisonExpression{
|
||||||
Operation: "=",
|
Operation: "=",
|
||||||
Left: parsers.SelectItem{Path: []string{"c", "isCool"}},
|
Left: parsers.SelectItem{Path: []string{"c", "isCool"}},
|
||||||
Right: testutils.SelectItem_Constant_Bool(true),
|
Right: parsers.SelectItem{
|
||||||
|
Type: parsers.SelectItemTypeConstant,
|
||||||
|
Value: parsers.Constant{Type: parsers.ConstantTypeBoolean, Value: true},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
@ -42,19 +44,25 @@ func Test_Parse_Were(t *testing.T) {
|
|||||||
{Path: []string{"c", "_rid"}},
|
{Path: []string{"c", "_rid"}},
|
||||||
{Path: []string{"c", "_ts"}},
|
{Path: []string{"c", "_ts"}},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
Table: parsers.Table{Value: "c"},
|
||||||
Filters: parsers.LogicalExpression{
|
Filters: parsers.LogicalExpression{
|
||||||
Operation: parsers.LogicalExpressionTypeOr,
|
Operation: parsers.LogicalExpressionTypeOr,
|
||||||
Expressions: []interface{}{
|
Expressions: []interface{}{
|
||||||
parsers.ComparisonExpression{
|
parsers.ComparisonExpression{
|
||||||
Operation: "=",
|
Operation: "=",
|
||||||
Left: parsers.SelectItem{Path: []string{"c", "id"}},
|
Left: parsers.SelectItem{Path: []string{"c", "id"}},
|
||||||
Right: testutils.SelectItem_Constant_String("12345"),
|
Right: parsers.SelectItem{
|
||||||
|
Type: parsers.SelectItemTypeConstant,
|
||||||
|
Value: parsers.Constant{Type: parsers.ConstantTypeString, Value: "12345"},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
parsers.ComparisonExpression{
|
parsers.ComparisonExpression{
|
||||||
Operation: "=",
|
Operation: "=",
|
||||||
Left: parsers.SelectItem{Path: []string{"c", "pk"}},
|
Left: parsers.SelectItem{Path: []string{"c", "pk"}},
|
||||||
Right: testutils.SelectItem_Constant_Int(123),
|
Right: parsers.SelectItem{
|
||||||
|
Type: parsers.SelectItemTypeConstant,
|
||||||
|
Value: parsers.Constant{Type: parsers.ConstantTypeInteger, Value: 123},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -67,19 +75,22 @@ func Test_Parse_Were(t *testing.T) {
|
|||||||
t,
|
t,
|
||||||
`select c.id
|
`select c.id
|
||||||
FROM c
|
FROM c
|
||||||
WHERE c.isCool=true AND (c.id = "123" OR c.id <= "456")`,
|
WHERE c.isCool=true AND (c.id = "123" OR c.id = "456")`,
|
||||||
parsers.SelectStmt{
|
parsers.SelectStmt{
|
||||||
SelectItems: []parsers.SelectItem{
|
SelectItems: []parsers.SelectItem{
|
||||||
{Path: []string{"c", "id"}},
|
{Path: []string{"c", "id"}},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
Table: parsers.Table{Value: "c"},
|
||||||
Filters: parsers.LogicalExpression{
|
Filters: parsers.LogicalExpression{
|
||||||
Operation: parsers.LogicalExpressionTypeAnd,
|
Operation: parsers.LogicalExpressionTypeAnd,
|
||||||
Expressions: []interface{}{
|
Expressions: []interface{}{
|
||||||
parsers.ComparisonExpression{
|
parsers.ComparisonExpression{
|
||||||
Operation: "=",
|
Operation: "=",
|
||||||
Left: parsers.SelectItem{Path: []string{"c", "isCool"}},
|
Left: parsers.SelectItem{Path: []string{"c", "isCool"}},
|
||||||
Right: testutils.SelectItem_Constant_Bool(true),
|
Right: parsers.SelectItem{
|
||||||
|
Type: parsers.SelectItemTypeConstant,
|
||||||
|
Value: parsers.Constant{Type: parsers.ConstantTypeBoolean, Value: true},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
parsers.LogicalExpression{
|
parsers.LogicalExpression{
|
||||||
Operation: parsers.LogicalExpressionTypeOr,
|
Operation: parsers.LogicalExpressionTypeOr,
|
||||||
@ -87,12 +98,18 @@ func Test_Parse_Were(t *testing.T) {
|
|||||||
parsers.ComparisonExpression{
|
parsers.ComparisonExpression{
|
||||||
Operation: "=",
|
Operation: "=",
|
||||||
Left: parsers.SelectItem{Path: []string{"c", "id"}},
|
Left: parsers.SelectItem{Path: []string{"c", "id"}},
|
||||||
Right: testutils.SelectItem_Constant_String("123"),
|
Right: parsers.SelectItem{
|
||||||
|
Type: parsers.SelectItemTypeConstant,
|
||||||
|
Value: parsers.Constant{Type: parsers.ConstantTypeString, Value: "123"},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
parsers.ComparisonExpression{
|
parsers.ComparisonExpression{
|
||||||
Operation: "<=",
|
Operation: "=",
|
||||||
Left: parsers.SelectItem{Path: []string{"c", "id"}},
|
Left: parsers.SelectItem{Path: []string{"c", "id"}},
|
||||||
Right: testutils.SelectItem_Constant_String("456"),
|
Right: parsers.SelectItem{
|
||||||
|
Type: parsers.SelectItemTypeConstant,
|
||||||
|
Value: parsers.Constant{Type: parsers.ConstantTypeString, Value: "456"},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -114,32 +131,47 @@ func Test_Parse_Were(t *testing.T) {
|
|||||||
AND c.param=@param_id1`,
|
AND c.param=@param_id1`,
|
||||||
parsers.SelectStmt{
|
parsers.SelectStmt{
|
||||||
SelectItems: []parsers.SelectItem{{Path: []string{"c", "id"}, Alias: ""}},
|
SelectItems: []parsers.SelectItem{{Path: []string{"c", "id"}, Alias: ""}},
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
Table: parsers.Table{Value: "c"},
|
||||||
Filters: parsers.LogicalExpression{
|
Filters: parsers.LogicalExpression{
|
||||||
Expressions: []interface{}{
|
Expressions: []interface{}{
|
||||||
parsers.ComparisonExpression{
|
parsers.ComparisonExpression{
|
||||||
Left: parsers.SelectItem{Path: []string{"c", "boolean"}},
|
Left: parsers.SelectItem{Path: []string{"c", "boolean"}},
|
||||||
Right: testutils.SelectItem_Constant_Bool(true),
|
Right: parsers.SelectItem{
|
||||||
|
Type: parsers.SelectItemTypeConstant,
|
||||||
|
Value: parsers.Constant{Type: parsers.ConstantTypeBoolean, Value: true},
|
||||||
|
},
|
||||||
Operation: "=",
|
Operation: "=",
|
||||||
},
|
},
|
||||||
parsers.ComparisonExpression{
|
parsers.ComparisonExpression{
|
||||||
Left: parsers.SelectItem{Path: []string{"c", "integer"}},
|
Left: parsers.SelectItem{Path: []string{"c", "integer"}},
|
||||||
Right: testutils.SelectItem_Constant_Int(1),
|
Right: parsers.SelectItem{
|
||||||
|
Type: parsers.SelectItemTypeConstant,
|
||||||
|
Value: parsers.Constant{Type: parsers.ConstantTypeInteger, Value: 1},
|
||||||
|
},
|
||||||
Operation: "=",
|
Operation: "=",
|
||||||
},
|
},
|
||||||
parsers.ComparisonExpression{
|
parsers.ComparisonExpression{
|
||||||
Left: parsers.SelectItem{Path: []string{"c", "float"}},
|
Left: parsers.SelectItem{Path: []string{"c", "float"}},
|
||||||
Right: testutils.SelectItem_Constant_Float(6.9),
|
Right: parsers.SelectItem{
|
||||||
|
Type: parsers.SelectItemTypeConstant,
|
||||||
|
Value: parsers.Constant{Type: parsers.ConstantTypeFloat, Value: 6.9},
|
||||||
|
},
|
||||||
Operation: "=",
|
Operation: "=",
|
||||||
},
|
},
|
||||||
parsers.ComparisonExpression{
|
parsers.ComparisonExpression{
|
||||||
Left: parsers.SelectItem{Path: []string{"c", "string"}},
|
Left: parsers.SelectItem{Path: []string{"c", "string"}},
|
||||||
Right: testutils.SelectItem_Constant_String("hello"),
|
Right: parsers.SelectItem{
|
||||||
|
Type: parsers.SelectItemTypeConstant,
|
||||||
|
Value: parsers.Constant{Type: parsers.ConstantTypeString, Value: "hello"},
|
||||||
|
},
|
||||||
Operation: "=",
|
Operation: "=",
|
||||||
},
|
},
|
||||||
parsers.ComparisonExpression{
|
parsers.ComparisonExpression{
|
||||||
Left: parsers.SelectItem{Path: []string{"c", "param"}},
|
Left: parsers.SelectItem{Path: []string{"c", "param"}},
|
||||||
Right: testutils.SelectItem_Constant_Parameter("@param_id1"),
|
Right: parsers.SelectItem{
|
||||||
|
Type: parsers.SelectItemTypeConstant,
|
||||||
|
Value: parsers.Constant{Type: parsers.ConstantTypeParameterConstant, Value: "@param_id1"},
|
||||||
|
},
|
||||||
Operation: "=",
|
Operation: "=",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -148,21 +180,4 @@ func Test_Parse_Were(t *testing.T) {
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("Should correctly parse NOT conditions", func(t *testing.T) {
|
|
||||||
testQueryParse(
|
|
||||||
t,
|
|
||||||
`select c.id
|
|
||||||
FROM c
|
|
||||||
WHERE NOT c.boolean`,
|
|
||||||
parsers.SelectStmt{
|
|
||||||
SelectItems: []parsers.SelectItem{{Path: []string{"c", "id"}, Alias: ""}},
|
|
||||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
|
||||||
Filters: parsers.SelectItem{
|
|
||||||
Path: []string{"c", "boolean"},
|
|
||||||
Invert: true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
@ -1,121 +0,0 @@
|
|||||||
package memoryexecutor
|
|
||||||
|
|
||||||
import (
|
|
||||||
"math"
|
|
||||||
|
|
||||||
"github.com/pikami/cosmium/parsers"
|
|
||||||
)
|
|
||||||
|
|
||||||
func (r rowContext) aggregate_Avg(arguments []interface{}) interface{} {
|
|
||||||
selectExpression := arguments[0].(parsers.SelectItem)
|
|
||||||
sum := 0.0
|
|
||||||
count := 0
|
|
||||||
|
|
||||||
for _, item := range r.grouppedRows {
|
|
||||||
value := item.resolveSelectItem(selectExpression)
|
|
||||||
if numericValue, ok := value.(float64); ok {
|
|
||||||
sum += numericValue
|
|
||||||
count++
|
|
||||||
} else if numericValue, ok := value.(int); ok {
|
|
||||||
sum += float64(numericValue)
|
|
||||||
count++
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if count > 0 {
|
|
||||||
return sum / float64(count)
|
|
||||||
} else {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r rowContext) aggregate_Count(arguments []interface{}) interface{} {
|
|
||||||
selectExpression := arguments[0].(parsers.SelectItem)
|
|
||||||
count := 0
|
|
||||||
|
|
||||||
for _, item := range r.grouppedRows {
|
|
||||||
value := item.resolveSelectItem(selectExpression)
|
|
||||||
if value != nil {
|
|
||||||
count++
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return count
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r rowContext) aggregate_Max(arguments []interface{}) interface{} {
|
|
||||||
selectExpression := arguments[0].(parsers.SelectItem)
|
|
||||||
max := 0.0
|
|
||||||
count := 0
|
|
||||||
|
|
||||||
for _, item := range r.grouppedRows {
|
|
||||||
value := item.resolveSelectItem(selectExpression)
|
|
||||||
if numericValue, ok := value.(float64); ok {
|
|
||||||
if numericValue > max {
|
|
||||||
max = numericValue
|
|
||||||
}
|
|
||||||
count++
|
|
||||||
} else if numericValue, ok := value.(int); ok {
|
|
||||||
if float64(numericValue) > max {
|
|
||||||
max = float64(numericValue)
|
|
||||||
}
|
|
||||||
count++
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if count > 0 {
|
|
||||||
return max
|
|
||||||
} else {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r rowContext) aggregate_Min(arguments []interface{}) interface{} {
|
|
||||||
selectExpression := arguments[0].(parsers.SelectItem)
|
|
||||||
min := math.MaxFloat64
|
|
||||||
count := 0
|
|
||||||
|
|
||||||
for _, item := range r.grouppedRows {
|
|
||||||
value := item.resolveSelectItem(selectExpression)
|
|
||||||
if numericValue, ok := value.(float64); ok {
|
|
||||||
if numericValue < min {
|
|
||||||
min = numericValue
|
|
||||||
}
|
|
||||||
count++
|
|
||||||
} else if numericValue, ok := value.(int); ok {
|
|
||||||
if float64(numericValue) < min {
|
|
||||||
min = float64(numericValue)
|
|
||||||
}
|
|
||||||
count++
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if count > 0 {
|
|
||||||
return min
|
|
||||||
} else {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r rowContext) aggregate_Sum(arguments []interface{}) interface{} {
|
|
||||||
selectExpression := arguments[0].(parsers.SelectItem)
|
|
||||||
sum := 0.0
|
|
||||||
count := 0
|
|
||||||
|
|
||||||
for _, item := range r.grouppedRows {
|
|
||||||
value := item.resolveSelectItem(selectExpression)
|
|
||||||
if numericValue, ok := value.(float64); ok {
|
|
||||||
sum += numericValue
|
|
||||||
count++
|
|
||||||
} else if numericValue, ok := value.(int); ok {
|
|
||||||
sum += float64(numericValue)
|
|
||||||
count++
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if count > 0 {
|
|
||||||
return sum
|
|
||||||
} else {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user