mirror of
https://github.com/pikami/cosmium.git
synced 2025-06-08 00:20:28 +01:00
Compare commits
76 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
fba9b3df5f | ||
|
b743e23ff9 | ||
|
11851297f5 | ||
|
560ea5296d | ||
|
e20a6ca7cd | ||
|
7e0c10479b | ||
|
30195fae96 | ||
|
598f2837af | ||
|
28e3c0c3d8 | ||
|
97eea30c97 | ||
|
5fe60d831a | ||
|
d309d99906 | ||
|
b2516eda9f | ||
|
813b9faeaa | ||
|
e526b2269e | ||
|
221f029a1d | ||
|
bd4fe5abec | ||
|
f062e03f0c | ||
|
058b3271b7 | ||
|
1711c8fb5c | ||
|
851b3ca3a8 | ||
|
d27c633e1d | ||
|
3987df89c0 | ||
|
6e3f4169a1 | ||
|
14c5400d23 | ||
|
1cf5ae92f4 | ||
|
5d99b653cc | ||
|
787cdb33cf | ||
|
5caa829ac1 | ||
|
887d456ad4 | ||
|
da1566875b | ||
|
3fee3bc816 | ||
|
8657c48fc8 | ||
|
e080888c20 | ||
|
b8d79fd945 | ||
|
f25cb7fb03 | ||
|
125f10d8a2 | ||
|
d6b816b55a | ||
|
12215fba76 | ||
|
a1793c17ab | ||
|
96d3a0a7ae | ||
|
8b8b087aab | ||
|
c2c9dc03b3 | ||
|
d86bac7d79 | ||
|
69b76c1c3e | ||
|
8e3db3e44d | ||
|
f5b8453995 | ||
|
928ca29fe4 | ||
|
39cd9e2357 | ||
|
bcf4b513b6 | ||
|
363f822e5a | ||
|
be7a615931 | ||
|
83f086a2dc | ||
|
777034181f | ||
|
84c33e3c8e | ||
|
5e677431a3 | ||
|
a4659d90a9 | ||
|
503e6bb8ad | ||
|
e5ddc143f0 | ||
|
66ea859f34 | ||
|
3584f9b5ce | ||
|
c7d01b4593 | ||
|
2834f3f641 | ||
|
a6b5d32ff7 | ||
|
0e98e3481a | ||
|
827046f634 | ||
|
475d586dc5 | ||
|
9abef691d6 | ||
|
62dcbc1f2b | ||
|
2f42651fb7 | ||
|
20af73ee9c | ||
|
3bdff9b643 | ||
|
b808e97c72 | ||
|
e623a563f4 | ||
|
2cd61aa620 | ||
|
0cec7816c1 |
31
.github/workflows/compile-shared-libraries.yml
vendored
Normal file
31
.github/workflows/compile-shared-libraries.yml
vendored
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
name: Cross-Compile Shared Libraries
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Cross-Compile with xgo
|
||||||
|
uses: crazy-max/ghaction-xgo@e22d3c8b089adba750d5a74738b8e95d96f0c991 # v3.1.0
|
||||||
|
with:
|
||||||
|
xgo_version: latest
|
||||||
|
go_version: 1.24.0
|
||||||
|
dest: dist
|
||||||
|
pkg: sharedlibrary
|
||||||
|
prefix: cosmium
|
||||||
|
targets: linux/amd64,linux/arm64,windows/amd64,windows/arm64,darwin/amd64,darwin/arm64
|
||||||
|
v: true
|
||||||
|
buildmode: c-shared
|
||||||
|
buildvcs: true
|
||||||
|
|
||||||
|
- name: Upload artifact
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: shared-libraries
|
||||||
|
path: dist/*
|
20
.github/workflows/release.yml
vendored
20
.github/workflows/release.yml
vendored
@ -17,18 +17,34 @@ jobs:
|
|||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Set up Go
|
- name: Set up Go
|
||||||
uses: actions/setup-go@v5
|
uses: actions/setup-go@v5
|
||||||
with:
|
with:
|
||||||
go-version: 1.21.6
|
go-version: 1.24.0
|
||||||
|
|
||||||
|
- name: Cross-Compile with xgo
|
||||||
|
uses: crazy-max/ghaction-xgo@e22d3c8b089adba750d5a74738b8e95d96f0c991 # v3.1.0
|
||||||
|
with:
|
||||||
|
xgo_version: latest
|
||||||
|
go_version: 1.24.0
|
||||||
|
dest: sharedlibrary_dist
|
||||||
|
pkg: sharedlibrary
|
||||||
|
prefix: cosmium
|
||||||
|
targets: linux/amd64,linux/arm64,windows/amd64,windows/arm64,darwin/amd64,darwin/arm64
|
||||||
|
v: true
|
||||||
|
buildmode: c-shared
|
||||||
|
buildvcs: true
|
||||||
|
|
||||||
- name: Docker Login
|
- name: Docker Login
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.actor }}
|
username: ${{ github.actor }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Run GoReleaser
|
- name: Run GoReleaser
|
||||||
uses: goreleaser/goreleaser-action@v5
|
uses: goreleaser/goreleaser-action@5742e2a039330cbb23ebf35f046f814d4c6ff811 # v5
|
||||||
with:
|
with:
|
||||||
distribution: goreleaser
|
distribution: goreleaser
|
||||||
version: ${{ env.GITHUB_REF_NAME }}
|
version: ${{ env.GITHUB_REF_NAME }}
|
||||||
|
5
.gitignore
vendored
5
.gitignore
vendored
@ -1,2 +1,7 @@
|
|||||||
dist/
|
dist/
|
||||||
|
sharedlibrary_dist/
|
||||||
ignored/
|
ignored/
|
||||||
|
explorer_www/
|
||||||
|
main
|
||||||
|
save.json
|
||||||
|
.vscode/
|
||||||
|
112
.goreleaser.yaml
112
.goreleaser.yaml
@ -1,5 +1,6 @@
|
|||||||
builds:
|
builds:
|
||||||
- binary: cosmium
|
- binary: cosmium
|
||||||
|
main: ./cmd/server
|
||||||
goos:
|
goos:
|
||||||
- darwin
|
- darwin
|
||||||
- linux
|
- linux
|
||||||
@ -9,11 +10,6 @@ builds:
|
|||||||
- arm64
|
- arm64
|
||||||
env:
|
env:
|
||||||
- CGO_ENABLED=0
|
- CGO_ENABLED=0
|
||||||
ignore:
|
|
||||||
- goos: linux
|
|
||||||
goarch: arm64
|
|
||||||
- goos: windows
|
|
||||||
goarch: arm64
|
|
||||||
|
|
||||||
release:
|
release:
|
||||||
prerelease: auto
|
prerelease: auto
|
||||||
@ -30,13 +26,32 @@ brews:
|
|||||||
commit_author:
|
commit_author:
|
||||||
name: pikami
|
name: pikami
|
||||||
email: git@pikami.org
|
email: git@pikami.org
|
||||||
|
skip_upload: auto
|
||||||
|
|
||||||
|
archives:
|
||||||
|
- id: bundle
|
||||||
|
format: tar.gz
|
||||||
|
format_overrides:
|
||||||
|
- goos: windows
|
||||||
|
format: zip
|
||||||
|
- id: shared-libraries
|
||||||
|
meta: true
|
||||||
|
format: "tar.gz"
|
||||||
|
wrap_in_directory: true
|
||||||
|
name_template: "{{ .ProjectName }}_{{ .Version }}_shared-libraries"
|
||||||
|
files:
|
||||||
|
- LICENSE
|
||||||
|
- README.md
|
||||||
|
- sharedlibrary_dist/**
|
||||||
|
|
||||||
dockers:
|
dockers:
|
||||||
- image_templates:
|
- id: docker-linux-amd64
|
||||||
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}"
|
goos: linux
|
||||||
- "ghcr.io/pikami/{{ .ProjectName }}:latest"
|
goarch: amd64
|
||||||
|
image_templates:
|
||||||
|
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-amd64"
|
||||||
dockerfile: Dockerfile
|
dockerfile: Dockerfile
|
||||||
use: docker
|
use: buildx
|
||||||
build_flag_templates:
|
build_flag_templates:
|
||||||
- "--platform=linux/amd64"
|
- "--platform=linux/amd64"
|
||||||
- "--pull"
|
- "--pull"
|
||||||
@ -47,6 +62,85 @@ dockers:
|
|||||||
- "--label=org.opencontainers.image.created={{.Date}}"
|
- "--label=org.opencontainers.image.created={{.Date}}"
|
||||||
- "--label=org.opencontainers.image.revision={{.FullCommit}}"
|
- "--label=org.opencontainers.image.revision={{.FullCommit}}"
|
||||||
- "--label=org.opencontainers.image.version={{.Version}}"
|
- "--label=org.opencontainers.image.version={{.Version}}"
|
||||||
|
- id: docker-linux-arm64
|
||||||
|
goos: linux
|
||||||
|
goarch: arm64
|
||||||
|
image_templates:
|
||||||
|
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-arm64"
|
||||||
|
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-arm64v8"
|
||||||
|
dockerfile: Dockerfile
|
||||||
|
use: buildx
|
||||||
|
build_flag_templates:
|
||||||
|
- "--platform=linux/arm64"
|
||||||
|
- "--pull"
|
||||||
|
- "--label=org.opencontainers.image.title={{.ProjectName}}"
|
||||||
|
- "--label=org.opencontainers.image.description=Lightweight Cosmos DB emulator"
|
||||||
|
- "--label=org.opencontainers.image.url=https://github.com/pikami/cosmium"
|
||||||
|
- "--label=org.opencontainers.image.source=https://github.com/pikami/cosmium"
|
||||||
|
- "--label=org.opencontainers.image.created={{.Date}}"
|
||||||
|
- "--label=org.opencontainers.image.revision={{.FullCommit}}"
|
||||||
|
- "--label=org.opencontainers.image.version={{.Version}}"
|
||||||
|
- id: docker-explorer-linux-amd64
|
||||||
|
goos: linux
|
||||||
|
goarch: amd64
|
||||||
|
image_templates:
|
||||||
|
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-explorer-amd64"
|
||||||
|
dockerfile: Explorer.Dockerfile
|
||||||
|
use: buildx
|
||||||
|
build_flag_templates:
|
||||||
|
- "--platform=linux/amd64"
|
||||||
|
- "--pull"
|
||||||
|
- "--label=org.opencontainers.image.title={{.ProjectName}}"
|
||||||
|
- "--label=org.opencontainers.image.description=Lightweight Cosmos DB emulator"
|
||||||
|
- "--label=org.opencontainers.image.url=https://github.com/pikami/cosmium"
|
||||||
|
- "--label=org.opencontainers.image.source=https://github.com/pikami/cosmium"
|
||||||
|
- "--label=org.opencontainers.image.created={{.Date}}"
|
||||||
|
- "--label=org.opencontainers.image.revision={{.FullCommit}}"
|
||||||
|
- "--label=org.opencontainers.image.version={{.Version}}"
|
||||||
|
- id: docker-explorer-linux-arm64
|
||||||
|
goos: linux
|
||||||
|
goarch: arm64
|
||||||
|
image_templates:
|
||||||
|
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-explorer-arm64"
|
||||||
|
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-explorer-arm64v8"
|
||||||
|
dockerfile: Explorer.Dockerfile
|
||||||
|
use: buildx
|
||||||
|
build_flag_templates:
|
||||||
|
- "--platform=linux/arm64"
|
||||||
|
- "--pull"
|
||||||
|
- "--label=org.opencontainers.image.title={{.ProjectName}}"
|
||||||
|
- "--label=org.opencontainers.image.description=Lightweight Cosmos DB emulator"
|
||||||
|
- "--label=org.opencontainers.image.url=https://github.com/pikami/cosmium"
|
||||||
|
- "--label=org.opencontainers.image.source=https://github.com/pikami/cosmium"
|
||||||
|
- "--label=org.opencontainers.image.created={{.Date}}"
|
||||||
|
- "--label=org.opencontainers.image.revision={{.FullCommit}}"
|
||||||
|
- "--label=org.opencontainers.image.version={{.Version}}"
|
||||||
|
|
||||||
|
docker_manifests:
|
||||||
|
- name_template: 'ghcr.io/pikami/{{ .ProjectName }}:latest'
|
||||||
|
skip_push: auto
|
||||||
|
image_templates:
|
||||||
|
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-amd64"
|
||||||
|
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-arm64"
|
||||||
|
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-arm64v8"
|
||||||
|
- name_template: 'ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}'
|
||||||
|
skip_push: auto
|
||||||
|
image_templates:
|
||||||
|
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-amd64"
|
||||||
|
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-arm64"
|
||||||
|
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-arm64v8"
|
||||||
|
- name_template: 'ghcr.io/pikami/{{ .ProjectName }}:explorer'
|
||||||
|
skip_push: auto
|
||||||
|
image_templates:
|
||||||
|
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-explorer-amd64"
|
||||||
|
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-explorer-arm64"
|
||||||
|
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-explorer-arm64v8"
|
||||||
|
- name_template: 'ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-explorer'
|
||||||
|
skip_push: auto
|
||||||
|
image_templates:
|
||||||
|
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-explorer-amd64"
|
||||||
|
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-explorer-arm64"
|
||||||
|
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-explorer-arm64v8"
|
||||||
|
|
||||||
checksum:
|
checksum:
|
||||||
name_template: 'checksums.txt'
|
name_template: 'checksums.txt'
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
FROM scratch
|
FROM alpine:latest
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
COPY cosmium /app/cosmium
|
COPY cosmium /app/cosmium
|
||||||
|
9
Explorer.Dockerfile
Normal file
9
Explorer.Dockerfile
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
FROM ghcr.io/cosmiumdev/cosmos-explorer-base:latest AS explorer-base
|
||||||
|
FROM alpine:latest
|
||||||
|
|
||||||
|
COPY --from=explorer-base /cosmos-explorer /cosmos-explorer
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
COPY cosmium /app/cosmium
|
||||||
|
|
||||||
|
ENTRYPOINT ["/app/cosmium", "-ExplorerDir", "/cosmos-explorer"]
|
51
Makefile
51
Makefile
@ -4,28 +4,69 @@ GOTEST=$(GOCMD) test
|
|||||||
GOCLEAN=$(GOCMD) clean
|
GOCLEAN=$(GOCMD) clean
|
||||||
|
|
||||||
BINARY_NAME=cosmium
|
BINARY_NAME=cosmium
|
||||||
|
SERVER_LOCATION=./cmd/server
|
||||||
|
|
||||||
|
SHARED_LIB_LOCATION=./sharedlibrary
|
||||||
|
SHARED_LIB_OPT=-buildmode=c-shared
|
||||||
|
XGO_TARGETS=linux/amd64,linux/arm64,windows/amd64,windows/arm64,darwin/amd64,darwin/arm64
|
||||||
|
GOVERSION=1.24.0
|
||||||
|
|
||||||
DIST_DIR=dist
|
DIST_DIR=dist
|
||||||
|
|
||||||
|
SHARED_LIB_TEST_CC=gcc
|
||||||
|
SHARED_LIB_TEST_CFLAGS=-Wall -ldl
|
||||||
|
SHARED_LIB_TEST_TARGET=$(DIST_DIR)/sharedlibrary_test
|
||||||
|
SHARED_LIB_TEST_DIR=./sharedlibrary/tests
|
||||||
|
SHARED_LIB_TEST_SOURCES=$(wildcard $(SHARED_LIB_TEST_DIR)/*.c)
|
||||||
|
|
||||||
all: test build-all
|
all: test build-all
|
||||||
|
|
||||||
build-all: build-darwin-arm64 build-darwin-amd64 build-linux-amd64 build-windows-amd64
|
build-all: build-darwin-arm64 build-darwin-amd64 build-linux-amd64 build-linux-arm64 build-windows-amd64 build-windows-arm64
|
||||||
|
|
||||||
build-darwin-arm64:
|
build-darwin-arm64:
|
||||||
@echo "Building macOS ARM binary..."
|
@echo "Building macOS ARM binary..."
|
||||||
@GOOS=darwin GOARCH=arm64 $(GOBUILD) -o $(DIST_DIR)/$(BINARY_NAME)-darwin-arm64 .
|
@GOOS=darwin GOARCH=arm64 $(GOBUILD) -o $(DIST_DIR)/$(BINARY_NAME)-darwin-arm64 $(SERVER_LOCATION)
|
||||||
|
|
||||||
build-darwin-amd64:
|
build-darwin-amd64:
|
||||||
@echo "Building macOS x64 binary..."
|
@echo "Building macOS x64 binary..."
|
||||||
@GOOS=darwin GOARCH=amd64 $(GOBUILD) -o $(DIST_DIR)/$(BINARY_NAME)-darwin-amd64 .
|
@GOOS=darwin GOARCH=amd64 $(GOBUILD) -o $(DIST_DIR)/$(BINARY_NAME)-darwin-amd64 $(SERVER_LOCATION)
|
||||||
|
|
||||||
build-linux-amd64:
|
build-linux-amd64:
|
||||||
@echo "Building Linux x64 binary..."
|
@echo "Building Linux x64 binary..."
|
||||||
@GOOS=linux GOARCH=amd64 $(GOBUILD) -o $(DIST_DIR)/$(BINARY_NAME)-linux-amd64 .
|
@GOOS=linux GOARCH=amd64 $(GOBUILD) -o $(DIST_DIR)/$(BINARY_NAME)-linux-amd64 $(SERVER_LOCATION)
|
||||||
|
|
||||||
|
build-linux-arm64:
|
||||||
|
@echo "Building Linux ARM binary..."
|
||||||
|
@GOOS=linux GOARCH=arm64 $(GOBUILD) -o $(DIST_DIR)/$(BINARY_NAME)-linux-arm64 $(SERVER_LOCATION)
|
||||||
|
|
||||||
build-windows-amd64:
|
build-windows-amd64:
|
||||||
@echo "Building Windows x64 binary..."
|
@echo "Building Windows x64 binary..."
|
||||||
@GOOS=windows GOARCH=amd64 $(GOBUILD) -o $(DIST_DIR)/$(BINARY_NAME)-windows-amd64.exe .
|
@GOOS=windows GOARCH=amd64 $(GOBUILD) -o $(DIST_DIR)/$(BINARY_NAME)-windows-amd64.exe $(SERVER_LOCATION)
|
||||||
|
|
||||||
|
build-windows-arm64:
|
||||||
|
@echo "Building Windows ARM binary..."
|
||||||
|
@GOOS=windows GOARCH=arm64 $(GOBUILD) -o $(DIST_DIR)/$(BINARY_NAME)-windows-arm64.exe $(SERVER_LOCATION)
|
||||||
|
|
||||||
|
build-sharedlib-linux-amd64:
|
||||||
|
@echo "Building shared library for Linux x64..."
|
||||||
|
@GOOS=linux GOARCH=amd64 $(GOBUILD) $(SHARED_LIB_OPT) -o $(DIST_DIR)/$(BINARY_NAME)-linux-amd64.so $(SHARED_LIB_LOCATION)
|
||||||
|
|
||||||
|
build-sharedlib-darwin-arm64:
|
||||||
|
@echo "Building shared library for macOS ARM..."
|
||||||
|
@GOOS=darwin GOARCH=arm64 $(GOBUILD) $(SHARED_LIB_OPT) -o $(DIST_DIR)/$(BINARY_NAME)-darwin-arm64.so $(SHARED_LIB_LOCATION)
|
||||||
|
|
||||||
|
build-sharedlib-tests: build-sharedlib-linux-amd64
|
||||||
|
@echo "Building shared library tests..."
|
||||||
|
@$(SHARED_LIB_TEST_CC) $(SHARED_LIB_TEST_CFLAGS) -o $(SHARED_LIB_TEST_TARGET) $(SHARED_LIB_TEST_SOURCES)
|
||||||
|
|
||||||
|
run-sharedlib-tests: build-sharedlib-tests
|
||||||
|
@echo "Running shared library tests..."
|
||||||
|
@$(SHARED_LIB_TEST_TARGET) $(DIST_DIR)/$(BINARY_NAME)-linux-amd64.so
|
||||||
|
|
||||||
|
xgo-compile-sharedlib:
|
||||||
|
@echo "Building shared libraries using xgo..."
|
||||||
|
@mkdir -p $(DIST_DIR)
|
||||||
|
@xgo -targets=$(XGO_TARGETS) -go $(GOVERSION) -buildmode=c-shared -dest=$(DIST_DIR) -out=$(BINARY_NAME) -pkg=$(SHARED_LIB_LOCATION) .
|
||||||
|
|
||||||
generate-parser-nosql:
|
generate-parser-nosql:
|
||||||
pigeon -o ./parsers/nosql/nosql.go ./parsers/nosql/nosql.peg
|
pigeon -o ./parsers/nosql/nosql.go ./parsers/nosql/nosql.peg
|
||||||
|
72
README.md
72
README.md
@ -1,11 +1,13 @@
|
|||||||
# Cosmium
|
# Cosmium
|
||||||
|
|
||||||
Cosmium is a lightweight Cosmos DB emulator designed to facilitate local development and testing. While it aims to provide developers with a solution for running a local database during development, it's important to note that it's not 100% compatible with Cosmos DB. However, it serves as a convenient tool for E2E or integration tests during the CI/CD pipeline. Read more about compatibility [here](docs/compatibility.md).
|
Cosmium is a lightweight Cosmos DB emulator designed to facilitate local development and testing. While it aims to provide developers with a solution for running a local database during development, it's important to note that it's not 100% compatible with Cosmos DB. However, it serves as a convenient tool for E2E or integration tests during the CI/CD pipeline. Read more about compatibility [here](./docs/COMPATIBILITY.md).
|
||||||
|
|
||||||
One of Cosmium's notable features is its ability to save and load state to a single JSON file. This feature makes it easy to load different test cases or share state with other developers, enhancing collaboration and efficiency in development workflows.
|
One of Cosmium's notable features is its ability to save and load state to a single JSON file. This feature makes it easy to load different test cases or share state with other developers, enhancing collaboration and efficiency in development workflows.
|
||||||
|
|
||||||
# Getting Started
|
# Getting Started
|
||||||
|
|
||||||
### Installation via Homebrew
|
### Installation via Homebrew
|
||||||
|
|
||||||
You can install Cosmium using Homebrew by adding the `pikami/brew` tap and then installing the package.
|
You can install Cosmium using Homebrew by adding the `pikami/brew` tap and then installing the package.
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
@ -23,10 +25,12 @@ You can download the latest version of Cosmium from the [GitHub Releases page](h
|
|||||||
|
|
||||||
Cosmium is available for the following platforms:
|
Cosmium is available for the following platforms:
|
||||||
|
|
||||||
* **Linux**: cosmium-linux-amd64
|
- **Linux**: cosmium-linux-amd64
|
||||||
* **macOS**: cosmium-darwin-amd64
|
- **Linux on ARM**: cosmium-linux-arm64
|
||||||
* **macOS on Apple Silicon**: cosmium-darwin-arm64
|
- **macOS**: cosmium-darwin-amd64
|
||||||
* **Windows**: cosmium-windows-amd64.exe
|
- **macOS on Apple Silicon**: cosmium-darwin-arm64
|
||||||
|
- **Windows**: cosmium-windows-amd64.exe
|
||||||
|
- **Windows on ARM**: cosmium-windows-arm64.exe
|
||||||
|
|
||||||
### Running Cosmium
|
### Running Cosmium
|
||||||
|
|
||||||
@ -37,25 +41,34 @@ cosmium -Persist "./save.json"
|
|||||||
```
|
```
|
||||||
|
|
||||||
Connection String Example:
|
Connection String Example:
|
||||||
|
|
||||||
```
|
```
|
||||||
AccountEndpoint=https://localhost:8081/;AccountKey=C2y6yDjf5/R+ob0N8A7Cgv30VRDJIWEHLM+4QDU5DE2nQ9nDuVTqobD4b8mGGyPMbIZnqyMsEcaGQy67XIw/Jw==;
|
AccountEndpoint=https://localhost:8081/;AccountKey=C2y6yDjf5/R+ob0N8A7Cgv30VRDJIWEHLM+4QDU5DE2nQ9nDuVTqobD4b8mGGyPMbIZnqyMsEcaGQy67XIw/Jw==;
|
||||||
```
|
```
|
||||||
|
|
||||||
### Running Cosmos DB Explorer
|
### Running Cosmos DB Explorer
|
||||||
|
|
||||||
If you want to run Cosmos DB Explorer alongside Cosmium, you'll need to build it yourself and point the `-ExplorerDir` argument to the dist directory. Please refer to the [Cosmos DB Explorer repository](https://github.com/Azure/cosmos-explorer) for instructions on building the application.
|
If you want to run Cosmos DB Explorer alongside Cosmium, you'll need to build it yourself and point the `-ExplorerDir` argument to the dist directory. Please refer to the [Cosmos DB Explorer repository](https://github.com/Azure/cosmos-explorer) for instructions on building the application.
|
||||||
|
|
||||||
|
There's also a prebuilt docker image that includes the explorer: `ghcr.io/pikami/cosmium:explorer`
|
||||||
|
|
||||||
Once running, the explorer can be reached by navigating following URL: `https://127.0.0.1:8081/_explorer/` (might be different depending on your configuration).
|
Once running, the explorer can be reached by navigating following URL: `https://127.0.0.1:8081/_explorer/` (might be different depending on your configuration).
|
||||||
|
|
||||||
### Running with docker (optional)
|
### Running with docker (optional)
|
||||||
|
|
||||||
|
There are two docker tags available:
|
||||||
|
|
||||||
|
- ghcr.io/pikami/cosmium:latest - Cosmium core service
|
||||||
|
- ghcr.io/pikami/cosmium:explorer - Cosmium with database explorer available on `https://127.0.0.1:8081/_explorer/`
|
||||||
|
|
||||||
If you wan to run the application using docker, configure it using environment variables see example:
|
If you wan to run the application using docker, configure it using environment variables see example:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
docker run --rm \
|
docker run --rm \
|
||||||
-e Persist=/save.json \
|
-e COSMIUM_PERSIST=/save.json \
|
||||||
-v ./save.json:/save.json \
|
-v ./save.json:/save.json \
|
||||||
-p 8081:8081 \
|
-p 8081:8081 \
|
||||||
ghcr.io/pikami/cosmium
|
ghcr.io/pikami/cosmium # or `ghcr.io/pikami/cosmium:explorer`
|
||||||
```
|
```
|
||||||
|
|
||||||
### SSL Certificate
|
### SSL Certificate
|
||||||
@ -66,24 +79,39 @@ To disable SSL and run Cosmium on HTTP instead, you can use the `-DisableTls` fl
|
|||||||
|
|
||||||
### Other Available Arguments
|
### Other Available Arguments
|
||||||
|
|
||||||
* **-AccountKey**: Account key for authentication (default "C2y6yDjf5/R+ob0N8A7Cgv30VRDJIWEHLM+4QDU5DE2nQ9nDuVTqobD4b8mGGyPMbIZnqyMsEcaGQy67XIw/Jw==")
|
- **-AccountKey**: Account key for authentication (default "C2y6yDjf5/R+ob0N8A7Cgv30VRDJIWEHLM+4QDU5DE2nQ9nDuVTqobD4b8mGGyPMbIZnqyMsEcaGQy67XIw/Jw==")
|
||||||
* **-DisableAuth**: Disable authentication
|
- **-DisableAuth**: Disable authentication
|
||||||
* **-Host**: Hostname (default "localhost")
|
- **-Host**: Hostname (default "localhost")
|
||||||
* **-InitialData**: Path to JSON containing initial state
|
- **-InitialData**: Path to JSON containing initial state
|
||||||
* **-Persist**: Saves data to the given path on application exit (When `-InitialData` argument is not supplied, it will try to load data from path supplied in `-Persist`)
|
- **-Persist**: Saves data to the given path on application exit (When `-InitialData` argument is not supplied, it will try to load data from path supplied in `-Persist`)
|
||||||
* **-Port**: Listen port (default 8081)
|
- **-Port**: Listen port (default 8081)
|
||||||
* **-Debug**: Runs application in debug mode, this provides additional logging
|
- **-LogLevel**: Sets the logging level (one of: debug, info, error, silent) (default info)
|
||||||
|
- **-DataStore**: Allows selecting [storage backend](#data-storage-backends) (default "json")
|
||||||
|
|
||||||
These arguments allow you to configure various aspects of Cosmium's behavior according to your requirements.
|
These arguments allow you to configure various aspects of Cosmium's behavior according to your requirements.
|
||||||
|
|
||||||
All mentioned arguments can also be set using environment variables:
|
All mentioned arguments can also be set using environment variables:
|
||||||
* **COSMIUM_ACCOUNTKEY** for `-AccountKey`
|
|
||||||
* **COSMIUM_DISABLEAUTH** for `-DisableAuth`
|
- **COSMIUM_ACCOUNTKEY** for `-AccountKey`
|
||||||
* **COSMIUM_HOST** for `-Host`
|
- **COSMIUM_DISABLEAUTH** for `-DisableAuth`
|
||||||
* **COSMIUM_INITIALDATA** for `-InitialData`
|
- **COSMIUM_HOST** for `-Host`
|
||||||
* **COSMIUM_PERSIST** for `-Persist`
|
- **COSMIUM_INITIALDATA** for `-InitialData`
|
||||||
* **COSMIUM_PORT** for `-Port`
|
- **COSMIUM_PERSIST** for `-Persist`
|
||||||
* **COSMIUM_DEBUG** for `-Debug`
|
- **COSMIUM_PORT** for `-Port`
|
||||||
|
- **COSMIUM_LOGLEVEL** for `-LogLevel`
|
||||||
|
|
||||||
|
### Data Storage Backends
|
||||||
|
|
||||||
|
Cosmium supports multiple storage backends for saving, loading, and managing data at runtime.
|
||||||
|
|
||||||
|
| Backend | Storage Location | Write Behavior | Memory Usage | Supports Initial JSON Load |
|
||||||
|
|----------|--------------------------|--------------------------|----------------------|----------------------------|
|
||||||
|
| `json` (default) | JSON file on disk 📄 | On application exit ⏳ | 🛑 More than Badger | ✅ Yes |
|
||||||
|
| `badger` | BadgerDB database on disk ⚡ | Immediately on write 🚀 | ✅ Less than JSON | ❌ No |
|
||||||
|
|
||||||
|
|
||||||
|
The `badger` backend is generally recommended as it uses less memory and writes data to disk immediately. However, if you need to load initial data from a JSON file, use the `json` backend.
|
||||||
|
|
||||||
# License
|
# License
|
||||||
|
|
||||||
This project is [MIT licensed](./LICENSE).
|
This project is [MIT licensed](./LICENSE).
|
||||||
|
24
api/api_models/models.go
Normal file
24
api/api_models/models.go
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
package apimodels
|
||||||
|
|
||||||
|
const (
|
||||||
|
BatchOperationTypeCreate = "Create"
|
||||||
|
BatchOperationTypeDelete = "Delete"
|
||||||
|
BatchOperationTypeReplace = "Replace"
|
||||||
|
BatchOperationTypeUpsert = "Upsert"
|
||||||
|
BatchOperationTypeRead = "Read"
|
||||||
|
BatchOperationTypePatch = "Patch"
|
||||||
|
)
|
||||||
|
|
||||||
|
type BatchOperation struct {
|
||||||
|
OperationType string `json:"operationType"`
|
||||||
|
Id string `json:"id"`
|
||||||
|
ResourceBody map[string]interface{} `json:"resourceBody"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type BatchOperationResult struct {
|
||||||
|
StatusCode int `json:"statusCode"`
|
||||||
|
RequestCharge float64 `json:"requestCharge"`
|
||||||
|
ResourceBody map[string]interface{} `json:"resourceBody"`
|
||||||
|
Etag string `json:"etag"`
|
||||||
|
Message string `json:"message"`
|
||||||
|
}
|
39
api/api_server.go
Normal file
39
api/api_server.go
Normal file
@ -0,0 +1,39 @@
|
|||||||
|
package api
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
"github.com/pikami/cosmium/api/config"
|
||||||
|
"github.com/pikami/cosmium/internal/datastore"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ApiServer struct {
|
||||||
|
stopServer chan interface{}
|
||||||
|
onServerShutdown chan interface{}
|
||||||
|
isActive bool
|
||||||
|
router *gin.Engine
|
||||||
|
config *config.ServerConfig
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewApiServer(dataStore datastore.DataStore, config *config.ServerConfig) *ApiServer {
|
||||||
|
stopChan := make(chan interface{})
|
||||||
|
onServerShutdownChan := make(chan interface{})
|
||||||
|
|
||||||
|
apiServer := &ApiServer{
|
||||||
|
stopServer: stopChan,
|
||||||
|
onServerShutdown: onServerShutdownChan,
|
||||||
|
config: config,
|
||||||
|
}
|
||||||
|
|
||||||
|
apiServer.CreateRouter(dataStore)
|
||||||
|
|
||||||
|
return apiServer
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *ApiServer) GetRouter() *gin.Engine {
|
||||||
|
return s.router
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *ApiServer) Stop() {
|
||||||
|
s.stopServer <- true
|
||||||
|
<-s.onServerShutdown
|
||||||
|
}
|
@ -5,16 +5,22 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
"github.com/pikami/cosmium/internal/logger"
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
DefaultAccountKey = "C2y6yDjf5/R+ob0N8A7Cgv30VRDJIWEHLM+4QDU5DE2nQ9nDuVTqobD4b8mGGyPMbIZnqyMsEcaGQy67XIw/Jw=="
|
DefaultAccountKey = "C2y6yDjf5/R+ob0N8A7Cgv30VRDJIWEHLM+4QDU5DE2nQ9nDuVTqobD4b8mGGyPMbIZnqyMsEcaGQy67XIw/Jw=="
|
||||||
EnvPrefix = "COSMIUM_"
|
EnvPrefix = "COSMIUM_"
|
||||||
|
ExplorerBaseUrlLocation = "/_explorer"
|
||||||
)
|
)
|
||||||
|
|
||||||
var Config = ServerConfig{}
|
const (
|
||||||
|
DataStoreJson = "json"
|
||||||
|
DataStoreBadger = "badger"
|
||||||
|
)
|
||||||
|
|
||||||
func ParseFlags() {
|
func ParseFlags() ServerConfig {
|
||||||
host := flag.String("Host", "localhost", "Hostname")
|
host := flag.String("Host", "localhost", "Hostname")
|
||||||
port := flag.Int("Port", 8081, "Listen port")
|
port := flag.Int("Port", 8081, "Listen port")
|
||||||
explorerPath := flag.String("ExplorerDir", "", "Path to cosmos-explorer files")
|
explorerPath := flag.String("ExplorerDir", "", "Path to cosmos-explorer files")
|
||||||
@ -25,26 +31,86 @@ func ParseFlags() {
|
|||||||
disableAuthentication := flag.Bool("DisableAuth", false, "Disable authentication")
|
disableAuthentication := flag.Bool("DisableAuth", false, "Disable authentication")
|
||||||
disableTls := flag.Bool("DisableTls", false, "Disable TLS, serve over HTTP")
|
disableTls := flag.Bool("DisableTls", false, "Disable TLS, serve over HTTP")
|
||||||
persistDataPath := flag.String("Persist", "", "Saves data to given path on application exit")
|
persistDataPath := flag.String("Persist", "", "Saves data to given path on application exit")
|
||||||
debug := flag.Bool("Debug", false, "Runs application in debug mode, this provides additional logging")
|
logLevel := NewEnumValue("info", []string{"debug", "info", "error", "silent"})
|
||||||
|
flag.Var(logLevel, "LogLevel", fmt.Sprintf("Sets the logging level %s", logLevel.AllowedValuesList()))
|
||||||
|
dataStore := NewEnumValue("json", []string{DataStoreJson, DataStoreBadger})
|
||||||
|
flag.Var(dataStore, "DataStore", fmt.Sprintf("Sets the data store %s", dataStore.AllowedValuesList()))
|
||||||
|
|
||||||
flag.Parse()
|
flag.Parse()
|
||||||
setFlagsFromEnvironment()
|
setFlagsFromEnvironment()
|
||||||
|
|
||||||
Config.Host = *host
|
config := ServerConfig{}
|
||||||
Config.Port = *port
|
config.Host = *host
|
||||||
Config.ExplorerPath = *explorerPath
|
config.Port = *port
|
||||||
Config.TLS_CertificatePath = *tlsCertificatePath
|
config.ExplorerPath = *explorerPath
|
||||||
Config.TLS_CertificateKey = *tlsCertificateKey
|
config.TLS_CertificatePath = *tlsCertificatePath
|
||||||
Config.InitialDataFilePath = *initialDataPath
|
config.TLS_CertificateKey = *tlsCertificateKey
|
||||||
Config.PersistDataFilePath = *persistDataPath
|
config.InitialDataFilePath = *initialDataPath
|
||||||
Config.DisableAuth = *disableAuthentication
|
config.PersistDataFilePath = *persistDataPath
|
||||||
Config.DisableTls = *disableTls
|
config.DisableAuth = *disableAuthentication
|
||||||
Config.Debug = *debug
|
config.DisableTls = *disableTls
|
||||||
|
config.AccountKey = *accountKey
|
||||||
|
config.LogLevel = logLevel.value
|
||||||
|
config.DataStore = dataStore.value
|
||||||
|
|
||||||
Config.DatabaseAccount = Config.Host
|
config.PopulateCalculatedFields()
|
||||||
Config.DatabaseDomain = Config.Host
|
|
||||||
Config.DatabaseEndpoint = fmt.Sprintf("https://%s:%d/", Config.Host, Config.Port)
|
return config
|
||||||
Config.AccountKey = *accountKey
|
}
|
||||||
|
|
||||||
|
func (c *ServerConfig) PopulateCalculatedFields() {
|
||||||
|
c.DatabaseAccount = c.Host
|
||||||
|
c.DatabaseDomain = c.Host
|
||||||
|
c.DatabaseEndpoint = fmt.Sprintf("https://%s:%d/", c.Host, c.Port)
|
||||||
|
c.ExplorerBaseUrlLocation = ExplorerBaseUrlLocation
|
||||||
|
|
||||||
|
switch c.LogLevel {
|
||||||
|
case "debug":
|
||||||
|
logger.SetLogLevel(logger.LogLevelDebug)
|
||||||
|
case "info":
|
||||||
|
logger.SetLogLevel(logger.LogLevelInfo)
|
||||||
|
case "error":
|
||||||
|
logger.SetLogLevel(logger.LogLevelError)
|
||||||
|
case "silent":
|
||||||
|
logger.SetLogLevel(logger.LogLevelSilent)
|
||||||
|
default:
|
||||||
|
logger.SetLogLevel(logger.LogLevelInfo)
|
||||||
|
}
|
||||||
|
|
||||||
|
fileInfo, err := os.Stat(c.PersistDataFilePath)
|
||||||
|
if c.PersistDataFilePath != "" && !os.IsNotExist(err) {
|
||||||
|
if err != nil {
|
||||||
|
logger.ErrorLn("Failed to get file info for persist path:", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
if c.DataStore == DataStoreJson && fileInfo.IsDir() {
|
||||||
|
logger.ErrorLn("--Persist cannot be a directory when using json data store")
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
if c.DataStore == DataStoreBadger && !fileInfo.IsDir() {
|
||||||
|
logger.ErrorLn("--Persist must be a directory when using Badger data store")
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if c.DataStore == DataStoreBadger && c.InitialDataFilePath != "" {
|
||||||
|
logger.ErrorLn("InitialData option is currently not supported with Badger data store")
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *ServerConfig) ApplyDefaultsToEmptyFields() {
|
||||||
|
if c.Host == "" {
|
||||||
|
c.Host = "localhost"
|
||||||
|
}
|
||||||
|
if c.Port == 0 {
|
||||||
|
c.Port = 8081
|
||||||
|
}
|
||||||
|
if c.AccountKey == "" {
|
||||||
|
c.AccountKey = DefaultAccountKey
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func setFlagsFromEnvironment() (err error) {
|
func setFlagsFromEnvironment() (err error) {
|
||||||
|
36
api/config/enumFlag.go
Normal file
36
api/config/enumFlag.go
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
package config
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
type EnumValue struct {
|
||||||
|
allowedValues []string
|
||||||
|
value string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *EnumValue) String() string {
|
||||||
|
return e.value
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *EnumValue) Set(v string) error {
|
||||||
|
for _, allowed := range e.allowedValues {
|
||||||
|
if v == allowed {
|
||||||
|
e.value = v
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return fmt.Errorf("invalid value %q, must be one of: %s", v, strings.Join(e.allowedValues, ", "))
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewEnumValue(defaultValue string, allowedValues []string) *EnumValue {
|
||||||
|
return &EnumValue{
|
||||||
|
allowedValues: allowedValues,
|
||||||
|
value: defaultValue,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *EnumValue) AllowedValuesList() string {
|
||||||
|
return fmt.Sprintf("(one of: %s)", strings.Join(e.allowedValues, ", "))
|
||||||
|
}
|
@ -1,19 +1,22 @@
|
|||||||
package config
|
package config
|
||||||
|
|
||||||
type ServerConfig struct {
|
type ServerConfig struct {
|
||||||
DatabaseAccount string
|
DatabaseAccount string `json:"databaseAccount"`
|
||||||
DatabaseDomain string
|
DatabaseDomain string `json:"databaseDomain"`
|
||||||
DatabaseEndpoint string
|
DatabaseEndpoint string `json:"databaseEndpoint"`
|
||||||
AccountKey string
|
AccountKey string `json:"accountKey"`
|
||||||
|
|
||||||
ExplorerPath string
|
ExplorerPath string `json:"explorerPath"`
|
||||||
Port int
|
Port int `json:"port"`
|
||||||
Host string
|
Host string `json:"host"`
|
||||||
TLS_CertificatePath string
|
TLS_CertificatePath string `json:"tlsCertificatePath"`
|
||||||
TLS_CertificateKey string
|
TLS_CertificateKey string `json:"tlsCertificateKey"`
|
||||||
InitialDataFilePath string
|
InitialDataFilePath string `json:"initialDataFilePath"`
|
||||||
PersistDataFilePath string
|
PersistDataFilePath string `json:"persistDataFilePath"`
|
||||||
DisableAuth bool
|
DisableAuth bool `json:"disableAuth"`
|
||||||
DisableTls bool
|
DisableTls bool `json:"disableTls"`
|
||||||
Debug bool
|
LogLevel string `json:"logLevel"`
|
||||||
|
ExplorerBaseUrlLocation string `json:"explorerBaseUrlLocation"`
|
||||||
|
|
||||||
|
DataStore string `json:"dataStore"`
|
||||||
}
|
}
|
||||||
|
@ -1,20 +1,22 @@
|
|||||||
package handlers
|
package handlers
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
"github.com/pikami/cosmium/internal/repositories"
|
"github.com/pikami/cosmium/internal/constants"
|
||||||
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
"github.com/pikami/cosmium/internal/datastore"
|
||||||
)
|
)
|
||||||
|
|
||||||
func GetAllCollections(c *gin.Context) {
|
func (h *Handlers) GetAllCollections(c *gin.Context) {
|
||||||
databaseId := c.Param("databaseId")
|
databaseId := c.Param("databaseId")
|
||||||
|
|
||||||
collections, status := repositories.GetAllCollections(databaseId)
|
collections, status := h.dataStore.GetAllCollections(databaseId)
|
||||||
if status == repositorymodels.StatusOk {
|
if status == datastore.StatusOk {
|
||||||
database, _ := repositories.GetDatabase(databaseId)
|
database, _ := h.dataStore.GetDatabase(databaseId)
|
||||||
|
|
||||||
|
c.Header("x-ms-item-count", fmt.Sprintf("%d", len(collections)))
|
||||||
c.IndentedJSON(http.StatusOK, gin.H{
|
c.IndentedJSON(http.StatusOK, gin.H{
|
||||||
"_rid": database.ResourceID,
|
"_rid": database.ResourceID,
|
||||||
"DocumentCollections": collections,
|
"DocumentCollections": collections,
|
||||||
@ -23,48 +25,48 @@ func GetAllCollections(c *gin.Context) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
||||||
}
|
}
|
||||||
|
|
||||||
func GetCollection(c *gin.Context) {
|
func (h *Handlers) GetCollection(c *gin.Context) {
|
||||||
databaseId := c.Param("databaseId")
|
databaseId := c.Param("databaseId")
|
||||||
id := c.Param("collId")
|
id := c.Param("collId")
|
||||||
|
|
||||||
collection, status := repositories.GetCollection(databaseId, id)
|
collection, status := h.dataStore.GetCollection(databaseId, id)
|
||||||
if status == repositorymodels.StatusOk {
|
if status == datastore.StatusOk {
|
||||||
c.IndentedJSON(http.StatusOK, collection)
|
c.IndentedJSON(http.StatusOK, collection)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if status == repositorymodels.StatusNotFound {
|
if status == datastore.StatusNotFound {
|
||||||
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
|
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
||||||
}
|
}
|
||||||
|
|
||||||
func DeleteCollection(c *gin.Context) {
|
func (h *Handlers) DeleteCollection(c *gin.Context) {
|
||||||
databaseId := c.Param("databaseId")
|
databaseId := c.Param("databaseId")
|
||||||
id := c.Param("collId")
|
id := c.Param("collId")
|
||||||
|
|
||||||
status := repositories.DeleteCollection(databaseId, id)
|
status := h.dataStore.DeleteCollection(databaseId, id)
|
||||||
if status == repositorymodels.StatusOk {
|
if status == datastore.StatusOk {
|
||||||
c.Status(http.StatusNoContent)
|
c.Status(http.StatusNoContent)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if status == repositorymodels.StatusNotFound {
|
if status == datastore.StatusNotFound {
|
||||||
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
|
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
||||||
}
|
}
|
||||||
|
|
||||||
func CreateCollection(c *gin.Context) {
|
func (h *Handlers) CreateCollection(c *gin.Context) {
|
||||||
databaseId := c.Param("databaseId")
|
databaseId := c.Param("databaseId")
|
||||||
var newCollection repositorymodels.Collection
|
var newCollection datastore.Collection
|
||||||
|
|
||||||
if err := c.BindJSON(&newCollection); err != nil {
|
if err := c.BindJSON(&newCollection); err != nil {
|
||||||
c.JSON(http.StatusBadRequest, gin.H{"message": err.Error()})
|
c.JSON(http.StatusBadRequest, gin.H{"message": err.Error()})
|
||||||
@ -72,20 +74,20 @@ func CreateCollection(c *gin.Context) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if newCollection.ID == "" {
|
if newCollection.ID == "" {
|
||||||
c.JSON(http.StatusBadRequest, gin.H{"message": "BadRequest"})
|
c.JSON(http.StatusBadRequest, constants.BadRequestResponse)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
createdCollection, status := repositories.CreateCollection(databaseId, newCollection)
|
createdCollection, status := h.dataStore.CreateCollection(databaseId, newCollection)
|
||||||
if status == repositorymodels.Conflict {
|
if status == datastore.Conflict {
|
||||||
c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"})
|
c.IndentedJSON(http.StatusConflict, constants.ConflictResponse)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if status == repositorymodels.StatusOk {
|
if status == datastore.StatusOk {
|
||||||
c.IndentedJSON(http.StatusCreated, createdCollection)
|
c.IndentedJSON(http.StatusCreated, createdCollection)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
||||||
}
|
}
|
||||||
|
@ -4,9 +4,14 @@ import (
|
|||||||
"net/http"
|
"net/http"
|
||||||
|
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
"github.com/pikami/cosmium/internal/repositories"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func CosmiumExport(c *gin.Context) {
|
func (h *Handlers) CosmiumExport(c *gin.Context) {
|
||||||
c.IndentedJSON(http.StatusOK, repositories.GetState())
|
dataStoreState, err := h.dataStore.DumpToJson()
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.Data(http.StatusOK, "application/json", []byte(dataStoreState))
|
||||||
}
|
}
|
||||||
|
@ -1,16 +1,18 @@
|
|||||||
package handlers
|
package handlers
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
"github.com/pikami/cosmium/internal/repositories"
|
"github.com/pikami/cosmium/internal/constants"
|
||||||
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
"github.com/pikami/cosmium/internal/datastore"
|
||||||
)
|
)
|
||||||
|
|
||||||
func GetAllDatabases(c *gin.Context) {
|
func (h *Handlers) GetAllDatabases(c *gin.Context) {
|
||||||
databases, status := repositories.GetAllDatabases()
|
databases, status := h.dataStore.GetAllDatabases()
|
||||||
if status == repositorymodels.StatusOk {
|
if status == datastore.StatusOk {
|
||||||
|
c.Header("x-ms-item-count", fmt.Sprintf("%d", len(databases)))
|
||||||
c.IndentedJSON(http.StatusOK, gin.H{
|
c.IndentedJSON(http.StatusOK, gin.H{
|
||||||
"_rid": "",
|
"_rid": "",
|
||||||
"Databases": databases,
|
"Databases": databases,
|
||||||
@ -19,45 +21,45 @@ func GetAllDatabases(c *gin.Context) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
||||||
}
|
}
|
||||||
|
|
||||||
func GetDatabase(c *gin.Context) {
|
func (h *Handlers) GetDatabase(c *gin.Context) {
|
||||||
id := c.Param("databaseId")
|
id := c.Param("databaseId")
|
||||||
|
|
||||||
database, status := repositories.GetDatabase(id)
|
database, status := h.dataStore.GetDatabase(id)
|
||||||
if status == repositorymodels.StatusOk {
|
if status == datastore.StatusOk {
|
||||||
c.IndentedJSON(http.StatusOK, database)
|
c.IndentedJSON(http.StatusOK, database)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if status == repositorymodels.StatusNotFound {
|
if status == datastore.StatusNotFound {
|
||||||
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
|
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
||||||
}
|
}
|
||||||
|
|
||||||
func DeleteDatabase(c *gin.Context) {
|
func (h *Handlers) DeleteDatabase(c *gin.Context) {
|
||||||
id := c.Param("databaseId")
|
id := c.Param("databaseId")
|
||||||
|
|
||||||
status := repositories.DeleteDatabase(id)
|
status := h.dataStore.DeleteDatabase(id)
|
||||||
if status == repositorymodels.StatusOk {
|
if status == datastore.StatusOk {
|
||||||
c.Status(http.StatusNoContent)
|
c.Status(http.StatusNoContent)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if status == repositorymodels.StatusNotFound {
|
if status == datastore.StatusNotFound {
|
||||||
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
|
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
||||||
}
|
}
|
||||||
|
|
||||||
func CreateDatabase(c *gin.Context) {
|
func (h *Handlers) CreateDatabase(c *gin.Context) {
|
||||||
var newDatabase repositorymodels.Database
|
var newDatabase datastore.Database
|
||||||
|
|
||||||
if err := c.BindJSON(&newDatabase); err != nil {
|
if err := c.BindJSON(&newDatabase); err != nil {
|
||||||
c.JSON(http.StatusBadRequest, gin.H{"message": err.Error()})
|
c.JSON(http.StatusBadRequest, gin.H{"message": err.Error()})
|
||||||
@ -65,20 +67,20 @@ func CreateDatabase(c *gin.Context) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if newDatabase.ID == "" {
|
if newDatabase.ID == "" {
|
||||||
c.JSON(http.StatusBadRequest, gin.H{"message": "BadRequest"})
|
c.JSON(http.StatusBadRequest, constants.BadRequestResponse)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
createdDatabase, status := repositories.CreateDatabase(newDatabase)
|
createdDatabase, status := h.dataStore.CreateDatabase(newDatabase)
|
||||||
if status == repositorymodels.Conflict {
|
if status == datastore.Conflict {
|
||||||
c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"})
|
c.IndentedJSON(http.StatusConflict, constants.ConflictResponse)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if status == repositorymodels.StatusOk {
|
if status == datastore.StatusOk {
|
||||||
c.IndentedJSON(http.StatusCreated, createdDatabase)
|
c.IndentedJSON(http.StatusCreated, createdDatabase)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
||||||
}
|
}
|
||||||
|
@ -1,22 +1,32 @@
|
|||||||
package handlers
|
package handlers
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
"strconv"
|
||||||
|
|
||||||
|
jsonpatch "github.com/cosmiumdev/json-patch/v5"
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
|
apimodels "github.com/pikami/cosmium/api/api_models"
|
||||||
"github.com/pikami/cosmium/internal/constants"
|
"github.com/pikami/cosmium/internal/constants"
|
||||||
"github.com/pikami/cosmium/internal/repositories"
|
"github.com/pikami/cosmium/internal/converters"
|
||||||
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
"github.com/pikami/cosmium/internal/datastore"
|
||||||
|
"github.com/pikami/cosmium/internal/logger"
|
||||||
|
"github.com/pikami/cosmium/parsers"
|
||||||
|
"github.com/pikami/cosmium/parsers/nosql"
|
||||||
|
memoryexecutor "github.com/pikami/cosmium/query_executors/memory_executor"
|
||||||
)
|
)
|
||||||
|
|
||||||
func GetAllDocuments(c *gin.Context) {
|
func (h *Handlers) GetAllDocuments(c *gin.Context) {
|
||||||
databaseId := c.Param("databaseId")
|
databaseId := c.Param("databaseId")
|
||||||
collectionId := c.Param("collId")
|
collectionId := c.Param("collId")
|
||||||
|
|
||||||
documents, status := repositories.GetAllDocuments(databaseId, collectionId)
|
documents, status := h.dataStore.GetAllDocuments(databaseId, collectionId)
|
||||||
if status == repositorymodels.StatusOk {
|
if status == datastore.StatusOk {
|
||||||
collection, _ := repositories.GetCollection(databaseId, collectionId)
|
collection, _ := h.dataStore.GetCollection(databaseId, collectionId)
|
||||||
|
|
||||||
|
c.Header("x-ms-item-count", fmt.Sprintf("%d", len(documents)))
|
||||||
c.IndentedJSON(http.StatusOK, gin.H{
|
c.IndentedJSON(http.StatusOK, gin.H{
|
||||||
"_rid": collection.ID,
|
"_rid": collection.ID,
|
||||||
"Documents": documents,
|
"Documents": documents,
|
||||||
@ -25,49 +35,49 @@ func GetAllDocuments(c *gin.Context) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
||||||
}
|
}
|
||||||
|
|
||||||
func GetDocument(c *gin.Context) {
|
func (h *Handlers) GetDocument(c *gin.Context) {
|
||||||
databaseId := c.Param("databaseId")
|
databaseId := c.Param("databaseId")
|
||||||
collectionId := c.Param("collId")
|
collectionId := c.Param("collId")
|
||||||
documentId := c.Param("docId")
|
documentId := c.Param("docId")
|
||||||
|
|
||||||
document, status := repositories.GetDocument(databaseId, collectionId, documentId)
|
document, status := h.dataStore.GetDocument(databaseId, collectionId, documentId)
|
||||||
if status == repositorymodels.StatusOk {
|
if status == datastore.StatusOk {
|
||||||
c.IndentedJSON(http.StatusOK, document)
|
c.IndentedJSON(http.StatusOK, document)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if status == repositorymodels.StatusNotFound {
|
if status == datastore.StatusNotFound {
|
||||||
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
|
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
||||||
}
|
}
|
||||||
|
|
||||||
func DeleteDocument(c *gin.Context) {
|
func (h *Handlers) DeleteDocument(c *gin.Context) {
|
||||||
databaseId := c.Param("databaseId")
|
databaseId := c.Param("databaseId")
|
||||||
collectionId := c.Param("collId")
|
collectionId := c.Param("collId")
|
||||||
documentId := c.Param("docId")
|
documentId := c.Param("docId")
|
||||||
|
|
||||||
status := repositories.DeleteDocument(databaseId, collectionId, documentId)
|
status := h.dataStore.DeleteDocument(databaseId, collectionId, documentId)
|
||||||
if status == repositorymodels.StatusOk {
|
if status == datastore.StatusOk {
|
||||||
c.Status(http.StatusNoContent)
|
c.Status(http.StatusNoContent)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if status == repositorymodels.StatusNotFound {
|
if status == datastore.StatusNotFound {
|
||||||
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
|
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: Maybe move "replace" logic to repository
|
// TODO: Maybe move "replace" logic to data store
|
||||||
func ReplaceDocument(c *gin.Context) {
|
func (h *Handlers) ReplaceDocument(c *gin.Context) {
|
||||||
databaseId := c.Param("databaseId")
|
databaseId := c.Param("databaseId")
|
||||||
collectionId := c.Param("collId")
|
collectionId := c.Param("collId")
|
||||||
documentId := c.Param("docId")
|
documentId := c.Param("docId")
|
||||||
@ -78,29 +88,112 @@ func ReplaceDocument(c *gin.Context) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
status := repositories.DeleteDocument(databaseId, collectionId, documentId)
|
status := h.dataStore.DeleteDocument(databaseId, collectionId, documentId)
|
||||||
if status == repositorymodels.StatusNotFound {
|
if status == datastore.StatusNotFound {
|
||||||
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
|
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
createdDocument, status := repositories.CreateDocument(databaseId, collectionId, requestBody)
|
createdDocument, status := h.dataStore.CreateDocument(databaseId, collectionId, requestBody)
|
||||||
if status == repositorymodels.Conflict {
|
if status == datastore.Conflict {
|
||||||
c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"})
|
c.IndentedJSON(http.StatusConflict, constants.ConflictResponse)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if status == repositorymodels.StatusOk {
|
if status == datastore.StatusOk {
|
||||||
c.IndentedJSON(http.StatusCreated, createdDocument)
|
c.IndentedJSON(http.StatusCreated, createdDocument)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
||||||
}
|
}
|
||||||
|
|
||||||
func DocumentsPost(c *gin.Context) {
|
func (h *Handlers) PatchDocument(c *gin.Context) {
|
||||||
databaseId := c.Param("databaseId")
|
databaseId := c.Param("databaseId")
|
||||||
collectionId := c.Param("collId")
|
collectionId := c.Param("collId")
|
||||||
|
documentId := c.Param("docId")
|
||||||
|
|
||||||
|
document, status := h.dataStore.GetDocument(databaseId, collectionId, documentId)
|
||||||
|
if status == datastore.StatusNotFound {
|
||||||
|
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var requestBody map[string]interface{}
|
||||||
|
if err := c.BindJSON(&requestBody); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"message": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
operations := requestBody["operations"]
|
||||||
|
operationsBytes, err := json.Marshal(operations)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"message": "Could not decode operations"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
patch, err := jsonpatch.DecodePatch(operationsBytes)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"message": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
currentDocumentBytes, err := json.Marshal(document)
|
||||||
|
if err != nil {
|
||||||
|
logger.ErrorLn("Failed to marshal existing document:", err)
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"message": "Failed to marshal existing document"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
modifiedDocumentBytes, err := patch.Apply(currentDocumentBytes)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"message": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var modifiedDocument map[string]interface{}
|
||||||
|
err = json.Unmarshal(modifiedDocumentBytes, &modifiedDocument)
|
||||||
|
if err != nil {
|
||||||
|
logger.ErrorLn("Failed to unmarshal modified document:", err)
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"message": "Failed to unmarshal modified document"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if modifiedDocument["id"] != document["id"] {
|
||||||
|
c.JSON(http.StatusUnprocessableEntity, gin.H{"message": "The ID field cannot be modified"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
status = h.dataStore.DeleteDocument(databaseId, collectionId, documentId)
|
||||||
|
if status == datastore.StatusNotFound {
|
||||||
|
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
createdDocument, status := h.dataStore.CreateDocument(databaseId, collectionId, modifiedDocument)
|
||||||
|
if status == datastore.Conflict {
|
||||||
|
c.IndentedJSON(http.StatusConflict, constants.ConflictResponse)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if status == datastore.StatusOk {
|
||||||
|
c.IndentedJSON(http.StatusCreated, createdDocument)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *Handlers) DocumentsPost(c *gin.Context) {
|
||||||
|
databaseId := c.Param("databaseId")
|
||||||
|
collectionId := c.Param("collId")
|
||||||
|
|
||||||
|
// Handle batch requests
|
||||||
|
isBatchRequest, _ := strconv.ParseBool(c.GetHeader("x-ms-cosmos-is-batch-request"))
|
||||||
|
if isBatchRequest {
|
||||||
|
h.handleBatchRequest(c)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
var requestBody map[string]interface{}
|
var requestBody map[string]interface{}
|
||||||
if err := c.BindJSON(&requestBody); err != nil {
|
if err := c.BindJSON(&requestBody); err != nil {
|
||||||
@ -110,49 +203,32 @@ func DocumentsPost(c *gin.Context) {
|
|||||||
|
|
||||||
query := requestBody["query"]
|
query := requestBody["query"]
|
||||||
if query != nil {
|
if query != nil {
|
||||||
if c.GetHeader("x-ms-cosmos-is-query-plan-request") != "" {
|
h.handleDocumentQuery(c, requestBody)
|
||||||
c.IndentedJSON(http.StatusOK, constants.QueryPlanResponse)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
var queryParameters map[string]interface{}
|
|
||||||
if paramsArray, ok := requestBody["parameters"].([]interface{}); ok {
|
|
||||||
queryParameters = parametersToMap(paramsArray)
|
|
||||||
}
|
|
||||||
|
|
||||||
docs, status := repositories.ExecuteQueryDocuments(databaseId, collectionId, query.(string), queryParameters)
|
|
||||||
if status != repositorymodels.StatusOk {
|
|
||||||
// TODO: Currently we return everything if the query fails
|
|
||||||
GetAllDocuments(c)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
collection, _ := repositories.GetCollection(databaseId, collectionId)
|
|
||||||
c.IndentedJSON(http.StatusOK, gin.H{
|
|
||||||
"_rid": collection.ResourceID,
|
|
||||||
"Documents": docs,
|
|
||||||
"_count": len(docs),
|
|
||||||
})
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if requestBody["id"] == "" {
|
if requestBody["id"] == "" {
|
||||||
c.JSON(http.StatusBadRequest, gin.H{"message": "BadRequest"})
|
c.JSON(http.StatusBadRequest, constants.BadRequestResponse)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
createdDocument, status := repositories.CreateDocument(databaseId, collectionId, requestBody)
|
isUpsert, _ := strconv.ParseBool(c.GetHeader("x-ms-documentdb-is-upsert"))
|
||||||
if status == repositorymodels.Conflict {
|
if isUpsert {
|
||||||
c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"})
|
h.dataStore.DeleteDocument(databaseId, collectionId, requestBody["id"].(string))
|
||||||
|
}
|
||||||
|
|
||||||
|
createdDocument, status := h.dataStore.CreateDocument(databaseId, collectionId, requestBody)
|
||||||
|
if status == datastore.Conflict {
|
||||||
|
c.IndentedJSON(http.StatusConflict, constants.ConflictResponse)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if status == repositorymodels.StatusOk {
|
if status == datastore.StatusOk {
|
||||||
c.IndentedJSON(http.StatusCreated, createdDocument)
|
c.IndentedJSON(http.StatusCreated, createdDocument)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
||||||
}
|
}
|
||||||
|
|
||||||
func parametersToMap(pairs []interface{}) map[string]interface{} {
|
func parametersToMap(pairs []interface{}) map[string]interface{} {
|
||||||
@ -166,3 +242,155 @@ func parametersToMap(pairs []interface{}) map[string]interface{} {
|
|||||||
|
|
||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (h *Handlers) handleDocumentQuery(c *gin.Context, requestBody map[string]interface{}) {
|
||||||
|
databaseId := c.Param("databaseId")
|
||||||
|
collectionId := c.Param("collId")
|
||||||
|
|
||||||
|
if c.GetHeader("x-ms-cosmos-is-query-plan-request") != "" {
|
||||||
|
c.IndentedJSON(http.StatusOK, constants.QueryPlanResponse)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var queryParameters map[string]interface{}
|
||||||
|
if paramsArray, ok := requestBody["parameters"].([]interface{}); ok {
|
||||||
|
queryParameters = parametersToMap(paramsArray)
|
||||||
|
}
|
||||||
|
|
||||||
|
queryText := requestBody["query"].(string)
|
||||||
|
docs, status := h.executeQueryDocuments(databaseId, collectionId, queryText, queryParameters)
|
||||||
|
if status != datastore.StatusOk {
|
||||||
|
// TODO: Currently we return everything if the query fails
|
||||||
|
h.GetAllDocuments(c)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
collection, _ := h.dataStore.GetCollection(databaseId, collectionId)
|
||||||
|
c.Header("x-ms-item-count", fmt.Sprintf("%d", len(docs)))
|
||||||
|
c.IndentedJSON(http.StatusOK, gin.H{
|
||||||
|
"_rid": collection.ResourceID,
|
||||||
|
"Documents": docs,
|
||||||
|
"_count": len(docs),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *Handlers) handleBatchRequest(c *gin.Context) {
|
||||||
|
databaseId := c.Param("databaseId")
|
||||||
|
collectionId := c.Param("collId")
|
||||||
|
|
||||||
|
batchOperations := make([]apimodels.BatchOperation, 0)
|
||||||
|
if err := c.BindJSON(&batchOperations); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"message": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
batchOperationResults := make([]apimodels.BatchOperationResult, len(batchOperations))
|
||||||
|
for idx, operation := range batchOperations {
|
||||||
|
switch operation.OperationType {
|
||||||
|
case apimodels.BatchOperationTypeCreate:
|
||||||
|
createdDocument, status := h.dataStore.CreateDocument(databaseId, collectionId, operation.ResourceBody)
|
||||||
|
responseCode := dataStoreStatusToResponseCode(status)
|
||||||
|
if status == datastore.StatusOk {
|
||||||
|
responseCode = http.StatusCreated
|
||||||
|
}
|
||||||
|
batchOperationResults[idx] = apimodels.BatchOperationResult{
|
||||||
|
StatusCode: responseCode,
|
||||||
|
ResourceBody: createdDocument,
|
||||||
|
}
|
||||||
|
case apimodels.BatchOperationTypeDelete:
|
||||||
|
status := h.dataStore.DeleteDocument(databaseId, collectionId, operation.Id)
|
||||||
|
responseCode := dataStoreStatusToResponseCode(status)
|
||||||
|
if status == datastore.StatusOk {
|
||||||
|
responseCode = http.StatusNoContent
|
||||||
|
}
|
||||||
|
batchOperationResults[idx] = apimodels.BatchOperationResult{
|
||||||
|
StatusCode: responseCode,
|
||||||
|
}
|
||||||
|
case apimodels.BatchOperationTypeReplace:
|
||||||
|
deleteStatus := h.dataStore.DeleteDocument(databaseId, collectionId, operation.Id)
|
||||||
|
if deleteStatus == datastore.StatusNotFound {
|
||||||
|
batchOperationResults[idx] = apimodels.BatchOperationResult{
|
||||||
|
StatusCode: http.StatusNotFound,
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
createdDocument, createStatus := h.dataStore.CreateDocument(databaseId, collectionId, operation.ResourceBody)
|
||||||
|
responseCode := dataStoreStatusToResponseCode(createStatus)
|
||||||
|
if createStatus == datastore.StatusOk {
|
||||||
|
responseCode = http.StatusCreated
|
||||||
|
}
|
||||||
|
batchOperationResults[idx] = apimodels.BatchOperationResult{
|
||||||
|
StatusCode: responseCode,
|
||||||
|
ResourceBody: createdDocument,
|
||||||
|
}
|
||||||
|
case apimodels.BatchOperationTypeUpsert:
|
||||||
|
documentId := operation.ResourceBody["id"].(string)
|
||||||
|
h.dataStore.DeleteDocument(databaseId, collectionId, documentId)
|
||||||
|
createdDocument, createStatus := h.dataStore.CreateDocument(databaseId, collectionId, operation.ResourceBody)
|
||||||
|
responseCode := dataStoreStatusToResponseCode(createStatus)
|
||||||
|
if createStatus == datastore.StatusOk {
|
||||||
|
responseCode = http.StatusCreated
|
||||||
|
}
|
||||||
|
batchOperationResults[idx] = apimodels.BatchOperationResult{
|
||||||
|
StatusCode: responseCode,
|
||||||
|
ResourceBody: createdDocument,
|
||||||
|
}
|
||||||
|
case apimodels.BatchOperationTypeRead:
|
||||||
|
document, status := h.dataStore.GetDocument(databaseId, collectionId, operation.Id)
|
||||||
|
batchOperationResults[idx] = apimodels.BatchOperationResult{
|
||||||
|
StatusCode: dataStoreStatusToResponseCode(status),
|
||||||
|
ResourceBody: document,
|
||||||
|
}
|
||||||
|
case apimodels.BatchOperationTypePatch:
|
||||||
|
batchOperationResults[idx] = apimodels.BatchOperationResult{
|
||||||
|
StatusCode: http.StatusNotImplemented,
|
||||||
|
Message: "Patch operation is not implemented",
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
batchOperationResults[idx] = apimodels.BatchOperationResult{
|
||||||
|
StatusCode: http.StatusBadRequest,
|
||||||
|
Message: "Unknown operation type",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, batchOperationResults)
|
||||||
|
}
|
||||||
|
|
||||||
|
func dataStoreStatusToResponseCode(status datastore.DataStoreStatus) int {
|
||||||
|
switch status {
|
||||||
|
case datastore.StatusOk:
|
||||||
|
return http.StatusOK
|
||||||
|
case datastore.StatusNotFound:
|
||||||
|
return http.StatusNotFound
|
||||||
|
case datastore.Conflict:
|
||||||
|
return http.StatusConflict
|
||||||
|
case datastore.BadRequest:
|
||||||
|
return http.StatusBadRequest
|
||||||
|
default:
|
||||||
|
return http.StatusInternalServerError
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *Handlers) executeQueryDocuments(databaseId string, collectionId string, query string, queryParameters map[string]interface{}) ([]memoryexecutor.RowType, datastore.DataStoreStatus) {
|
||||||
|
parsedQuery, err := nosql.Parse("", []byte(query))
|
||||||
|
if err != nil {
|
||||||
|
logger.Errorf("Failed to parse query: %s\nerr: %v", query, err)
|
||||||
|
return nil, datastore.BadRequest
|
||||||
|
}
|
||||||
|
|
||||||
|
allDocumentsIterator, status := h.dataStore.GetDocumentIterator(databaseId, collectionId)
|
||||||
|
if status != datastore.StatusOk {
|
||||||
|
return nil, status
|
||||||
|
}
|
||||||
|
defer allDocumentsIterator.Close()
|
||||||
|
|
||||||
|
rowsIterator := converters.NewDocumentToRowTypeIterator(allDocumentsIterator)
|
||||||
|
|
||||||
|
if typedQuery, ok := parsedQuery.(parsers.SelectStmt); ok {
|
||||||
|
typedQuery.Parameters = queryParameters
|
||||||
|
return memoryexecutor.ExecuteQuery(typedQuery, rowsIterator), datastore.StatusOk
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, datastore.BadRequest
|
||||||
|
}
|
||||||
|
@ -4,15 +4,14 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
|
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
"github.com/pikami/cosmium/api/config"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func RegisterExplorerHandlers(router *gin.Engine) {
|
func (h *Handlers) RegisterExplorerHandlers(router *gin.Engine) {
|
||||||
explorer := router.Group("/_explorer")
|
explorer := router.Group(h.config.ExplorerBaseUrlLocation)
|
||||||
{
|
{
|
||||||
explorer.Use(func(ctx *gin.Context) {
|
explorer.Use(func(ctx *gin.Context) {
|
||||||
if ctx.Param("filepath") == "/config.json" {
|
if ctx.Param("filepath") == "/config.json" {
|
||||||
endpoint := fmt.Sprintf("https://%s:%d", config.Config.Host, config.Config.Port)
|
endpoint := fmt.Sprintf("https://%s:%d", h.config.Host, h.config.Port)
|
||||||
ctx.JSON(200, gin.H{
|
ctx.JSON(200, gin.H{
|
||||||
"BACKEND_ENDPOINT": endpoint,
|
"BACKEND_ENDPOINT": endpoint,
|
||||||
"MONGO_BACKEND_ENDPOINT": endpoint,
|
"MONGO_BACKEND_ENDPOINT": endpoint,
|
||||||
@ -25,8 +24,8 @@ func RegisterExplorerHandlers(router *gin.Engine) {
|
|||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
if config.Config.ExplorerPath != "" {
|
if h.config.ExplorerPath != "" {
|
||||||
explorer.Static("/", config.Config.ExplorerPath)
|
explorer.Static("/", h.config.ExplorerPath)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
18
api/handlers/handlers.go
Normal file
18
api/handlers/handlers.go
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
package handlers
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/pikami/cosmium/api/config"
|
||||||
|
"github.com/pikami/cosmium/internal/datastore"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Handlers struct {
|
||||||
|
dataStore datastore.DataStore
|
||||||
|
config *config.ServerConfig
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewHandlers(dataStore datastore.DataStore, config *config.ServerConfig) *Handlers {
|
||||||
|
return &Handlers{
|
||||||
|
dataStore: dataStore,
|
||||||
|
config: config,
|
||||||
|
}
|
||||||
|
}
|
@ -10,44 +10,22 @@ import (
|
|||||||
"github.com/pikami/cosmium/internal/logger"
|
"github.com/pikami/cosmium/internal/logger"
|
||||||
)
|
)
|
||||||
|
|
||||||
func Authentication() gin.HandlerFunc {
|
func Authentication(config *config.ServerConfig) gin.HandlerFunc {
|
||||||
return func(c *gin.Context) {
|
return func(c *gin.Context) {
|
||||||
requestUrl := c.Request.URL.String()
|
requestUrl := c.Request.URL.String()
|
||||||
if config.Config.DisableAuth ||
|
if config.DisableAuth ||
|
||||||
strings.HasPrefix(requestUrl, "/_explorer") ||
|
strings.HasPrefix(requestUrl, config.ExplorerBaseUrlLocation) ||
|
||||||
strings.HasPrefix(requestUrl, "/cosmium") {
|
strings.HasPrefix(requestUrl, "/cosmium") {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
var resourceType string
|
resourceType := urlToResourceType(requestUrl)
|
||||||
parts := strings.Split(requestUrl, "/")
|
resourceId := requestToResourceId(c)
|
||||||
switch len(parts) {
|
|
||||||
case 2, 3:
|
|
||||||
resourceType = parts[1]
|
|
||||||
case 4, 5:
|
|
||||||
resourceType = parts[3]
|
|
||||||
case 6, 7:
|
|
||||||
resourceType = parts[5]
|
|
||||||
}
|
|
||||||
|
|
||||||
databaseId, _ := c.Params.Get("databaseId")
|
|
||||||
collId, _ := c.Params.Get("collId")
|
|
||||||
docId, _ := c.Params.Get("docId")
|
|
||||||
var resourceId string
|
|
||||||
if databaseId != "" {
|
|
||||||
resourceId += "dbs/" + databaseId
|
|
||||||
}
|
|
||||||
if collId != "" {
|
|
||||||
resourceId += "/colls/" + collId
|
|
||||||
}
|
|
||||||
if docId != "" {
|
|
||||||
resourceId += "/docs/" + docId
|
|
||||||
}
|
|
||||||
|
|
||||||
authHeader := c.Request.Header.Get("authorization")
|
authHeader := c.Request.Header.Get("authorization")
|
||||||
date := c.Request.Header.Get("x-ms-date")
|
date := c.Request.Header.Get("x-ms-date")
|
||||||
expectedSignature := authentication.GenerateSignature(
|
expectedSignature := authentication.GenerateSignature(
|
||||||
c.Request.Method, resourceType, resourceId, date, config.Config.AccountKey)
|
c.Request.Method, resourceType, resourceId, date, config.AccountKey)
|
||||||
|
|
||||||
decoded, _ := url.QueryUnescape(authHeader)
|
decoded, _ := url.QueryUnescape(authHeader)
|
||||||
params, _ := url.ParseQuery(decoded)
|
params, _ := url.ParseQuery(decoded)
|
||||||
@ -62,3 +40,43 @@ func Authentication() gin.HandlerFunc {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func urlToResourceType(requestUrl string) string {
|
||||||
|
var resourceType string
|
||||||
|
parts := strings.Split(requestUrl, "/")
|
||||||
|
switch len(parts) {
|
||||||
|
case 2, 3:
|
||||||
|
resourceType = parts[1]
|
||||||
|
case 4, 5:
|
||||||
|
resourceType = parts[3]
|
||||||
|
case 6, 7:
|
||||||
|
resourceType = parts[5]
|
||||||
|
}
|
||||||
|
|
||||||
|
return resourceType
|
||||||
|
}
|
||||||
|
|
||||||
|
func requestToResourceId(c *gin.Context) string {
|
||||||
|
databaseId, _ := c.Params.Get("databaseId")
|
||||||
|
collId, _ := c.Params.Get("collId")
|
||||||
|
docId, _ := c.Params.Get("docId")
|
||||||
|
resourceType := urlToResourceType(c.Request.URL.String())
|
||||||
|
|
||||||
|
var resourceId string
|
||||||
|
if databaseId != "" {
|
||||||
|
resourceId += "dbs/" + databaseId
|
||||||
|
}
|
||||||
|
if collId != "" {
|
||||||
|
resourceId += "/colls/" + collId
|
||||||
|
}
|
||||||
|
if docId != "" {
|
||||||
|
resourceId += "/docs/" + docId
|
||||||
|
}
|
||||||
|
|
||||||
|
isFeed := c.Request.Header.Get("A-Im") == "Incremental Feed"
|
||||||
|
if resourceType == "pkranges" && isFeed {
|
||||||
|
resourceId = collId
|
||||||
|
}
|
||||||
|
|
||||||
|
return resourceId
|
||||||
|
}
|
||||||
|
@ -16,7 +16,7 @@ func RequestLogger() gin.HandlerFunc {
|
|||||||
|
|
||||||
bodyStr := readBody(rdr1)
|
bodyStr := readBody(rdr1)
|
||||||
if bodyStr != "" {
|
if bodyStr != "" {
|
||||||
logger.Debug(bodyStr)
|
logger.DebugLn(bodyStr)
|
||||||
}
|
}
|
||||||
|
|
||||||
c.Request.Body = rdr2
|
c.Request.Body = rdr2
|
||||||
|
21
api/handlers/middleware/strip_trailing_slashes.go
Normal file
21
api/handlers/middleware/strip_trailing_slashes.go
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
package middleware
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
"github.com/pikami/cosmium/api/config"
|
||||||
|
)
|
||||||
|
|
||||||
|
func StripTrailingSlashes(r *gin.Engine, config *config.ServerConfig) gin.HandlerFunc {
|
||||||
|
return func(c *gin.Context) {
|
||||||
|
path := c.Request.URL.Path
|
||||||
|
if len(path) > 1 && path[len(path)-1] == '/' && !strings.Contains(path, config.ExplorerBaseUrlLocation) {
|
||||||
|
c.Request.URL.Path = path[:len(path)-1]
|
||||||
|
r.HandleContext(c)
|
||||||
|
c.Abort()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
c.Next()
|
||||||
|
}
|
||||||
|
}
|
@ -7,6 +7,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func GetOffers(c *gin.Context) {
|
func GetOffers(c *gin.Context) {
|
||||||
|
c.Header("x-ms-item-count", "0")
|
||||||
c.IndentedJSON(http.StatusOK, gin.H{
|
c.IndentedJSON(http.StatusOK, gin.H{
|
||||||
"_rid": "",
|
"_rid": "",
|
||||||
"_count": 0,
|
"_count": 0,
|
||||||
|
@ -5,11 +5,12 @@ import (
|
|||||||
"net/http"
|
"net/http"
|
||||||
|
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
"github.com/pikami/cosmium/internal/repositories"
|
"github.com/pikami/cosmium/internal/constants"
|
||||||
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
"github.com/pikami/cosmium/internal/datastore"
|
||||||
|
"github.com/pikami/cosmium/internal/resourceid"
|
||||||
)
|
)
|
||||||
|
|
||||||
func GetPartitionKeyRanges(c *gin.Context) {
|
func (h *Handlers) GetPartitionKeyRanges(c *gin.Context) {
|
||||||
databaseId := c.Param("databaseId")
|
databaseId := c.Param("databaseId")
|
||||||
collectionId := c.Param("collId")
|
collectionId := c.Param("collId")
|
||||||
|
|
||||||
@ -18,8 +19,8 @@ func GetPartitionKeyRanges(c *gin.Context) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
partitionKeyRanges, status := repositories.GetPartitionKeyRanges(databaseId, collectionId)
|
partitionKeyRanges, status := h.dataStore.GetPartitionKeyRanges(databaseId, collectionId)
|
||||||
if status == repositorymodels.StatusOk {
|
if status == datastore.StatusOk {
|
||||||
c.Header("etag", "\"420\"")
|
c.Header("etag", "\"420\"")
|
||||||
c.Header("lsn", "420")
|
c.Header("lsn", "420")
|
||||||
c.Header("x-ms-cosmos-llsn", "420")
|
c.Header("x-ms-cosmos-llsn", "420")
|
||||||
@ -27,23 +28,24 @@ func GetPartitionKeyRanges(c *gin.Context) {
|
|||||||
c.Header("x-ms-item-count", fmt.Sprintf("%d", len(partitionKeyRanges)))
|
c.Header("x-ms-item-count", fmt.Sprintf("%d", len(partitionKeyRanges)))
|
||||||
|
|
||||||
collectionRid := collectionId
|
collectionRid := collectionId
|
||||||
collection, _ := repositories.GetCollection(databaseId, collectionId)
|
collection, _ := h.dataStore.GetCollection(databaseId, collectionId)
|
||||||
if collection.ResourceID != "" {
|
if collection.ResourceID != "" {
|
||||||
collectionRid = collection.ResourceID
|
collectionRid = collection.ResourceID
|
||||||
}
|
}
|
||||||
|
|
||||||
|
rid := resourceid.NewCombined(collectionRid, resourceid.New(resourceid.ResourceTypePartitionKeyRange))
|
||||||
c.IndentedJSON(http.StatusOK, gin.H{
|
c.IndentedJSON(http.StatusOK, gin.H{
|
||||||
"_rid": collectionRid,
|
"_rid": rid,
|
||||||
"_count": len(partitionKeyRanges),
|
"_count": len(partitionKeyRanges),
|
||||||
"PartitionKeyRanges": partitionKeyRanges,
|
"PartitionKeyRanges": partitionKeyRanges,
|
||||||
})
|
})
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if status == repositorymodels.StatusNotFound {
|
if status == datastore.StatusNotFound {
|
||||||
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
|
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
||||||
}
|
}
|
||||||
|
@ -5,30 +5,31 @@ import (
|
|||||||
"net/http"
|
"net/http"
|
||||||
|
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
"github.com/pikami/cosmium/api/config"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func GetServerInfo(c *gin.Context) {
|
func (h *Handlers) GetServerInfo(c *gin.Context) {
|
||||||
c.IndentedJSON(http.StatusOK, gin.H{
|
c.IndentedJSON(http.StatusOK, gin.H{
|
||||||
"_self": "",
|
"_self": "",
|
||||||
"id": config.Config.DatabaseAccount,
|
"id": h.config.DatabaseAccount,
|
||||||
"_rid": fmt.Sprintf("%s.%s", config.Config.DatabaseAccount, config.Config.DatabaseDomain),
|
"_rid": fmt.Sprintf("%s.%s", h.config.DatabaseAccount, h.config.DatabaseDomain),
|
||||||
"media": "//media/",
|
"media": "//media/",
|
||||||
"addresses": "//addresses/",
|
"addresses": "//addresses/",
|
||||||
"_dbs": "//dbs/",
|
"_dbs": "//dbs/",
|
||||||
"writableLocations": []map[string]interface{}{
|
"writableLocations": []map[string]interface{}{
|
||||||
{
|
{
|
||||||
"name": "South Central US",
|
"name": "South Central US",
|
||||||
"databaseAccountEndpoint": config.Config.DatabaseEndpoint,
|
"databaseAccountEndpoint": h.config.DatabaseEndpoint,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
"readableLocations": []map[string]interface{}{
|
"readableLocations": []map[string]interface{}{
|
||||||
{
|
{
|
||||||
"name": "South Central US",
|
"name": "South Central US",
|
||||||
"databaseAccountEndpoint": config.Config.DatabaseEndpoint,
|
"databaseAccountEndpoint": h.config.DatabaseEndpoint,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
"enableMultipleWriteLocations": false,
|
"enableMultipleWriteLocations": false,
|
||||||
|
"continuousBackupEnabled": false,
|
||||||
|
"enableNRegionSynchronousCommit": false,
|
||||||
"userReplicationPolicy": map[string]interface{}{
|
"userReplicationPolicy": map[string]interface{}{
|
||||||
"asyncReplication": false,
|
"asyncReplication": false,
|
||||||
"minReplicaSetSize": 1,
|
"minReplicaSetSize": 1,
|
||||||
|
@ -1,23 +1,119 @@
|
|||||||
package handlers
|
package handlers
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
"github.com/pikami/cosmium/internal/repositories"
|
"github.com/pikami/cosmium/internal/constants"
|
||||||
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
"github.com/pikami/cosmium/internal/datastore"
|
||||||
)
|
)
|
||||||
|
|
||||||
func GetAllStoredProcedures(c *gin.Context) {
|
func (h *Handlers) GetAllStoredProcedures(c *gin.Context) {
|
||||||
databaseId := c.Param("databaseId")
|
databaseId := c.Param("databaseId")
|
||||||
collectionId := c.Param("collId")
|
collectionId := c.Param("collId")
|
||||||
|
|
||||||
sps, status := repositories.GetAllStoredProcedures(databaseId, collectionId)
|
sps, status := h.dataStore.GetAllStoredProcedures(databaseId, collectionId)
|
||||||
|
|
||||||
if status == repositorymodels.StatusOk {
|
if status == datastore.StatusOk {
|
||||||
|
c.Header("x-ms-item-count", fmt.Sprintf("%d", len(sps)))
|
||||||
c.IndentedJSON(http.StatusOK, gin.H{"_rid": "", "StoredProcedures": sps, "_count": len(sps)})
|
c.IndentedJSON(http.StatusOK, gin.H{"_rid": "", "StoredProcedures": sps, "_count": len(sps)})
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *Handlers) GetStoredProcedure(c *gin.Context) {
|
||||||
|
databaseId := c.Param("databaseId")
|
||||||
|
collectionId := c.Param("collId")
|
||||||
|
spId := c.Param("spId")
|
||||||
|
|
||||||
|
sp, status := h.dataStore.GetStoredProcedure(databaseId, collectionId, spId)
|
||||||
|
|
||||||
|
if status == datastore.StatusOk {
|
||||||
|
c.IndentedJSON(http.StatusOK, sp)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if status == datastore.StatusNotFound {
|
||||||
|
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *Handlers) DeleteStoredProcedure(c *gin.Context) {
|
||||||
|
databaseId := c.Param("databaseId")
|
||||||
|
collectionId := c.Param("collId")
|
||||||
|
spId := c.Param("spId")
|
||||||
|
|
||||||
|
status := h.dataStore.DeleteStoredProcedure(databaseId, collectionId, spId)
|
||||||
|
if status == datastore.StatusOk {
|
||||||
|
c.Status(http.StatusNoContent)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if status == datastore.StatusNotFound {
|
||||||
|
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *Handlers) ReplaceStoredProcedure(c *gin.Context) {
|
||||||
|
databaseId := c.Param("databaseId")
|
||||||
|
collectionId := c.Param("collId")
|
||||||
|
spId := c.Param("spId")
|
||||||
|
|
||||||
|
var sp datastore.StoredProcedure
|
||||||
|
if err := c.BindJSON(&sp); err != nil {
|
||||||
|
c.IndentedJSON(http.StatusBadRequest, constants.BadRequestResponse)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
status := h.dataStore.DeleteStoredProcedure(databaseId, collectionId, spId)
|
||||||
|
if status == datastore.StatusNotFound {
|
||||||
|
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
createdSP, status := h.dataStore.CreateStoredProcedure(databaseId, collectionId, sp)
|
||||||
|
if status == datastore.Conflict {
|
||||||
|
c.IndentedJSON(http.StatusConflict, constants.ConflictResponse)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if status == datastore.StatusOk {
|
||||||
|
c.IndentedJSON(http.StatusOK, createdSP)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *Handlers) CreateStoredProcedure(c *gin.Context) {
|
||||||
|
databaseId := c.Param("databaseId")
|
||||||
|
collectionId := c.Param("collId")
|
||||||
|
|
||||||
|
var sp datastore.StoredProcedure
|
||||||
|
if err := c.BindJSON(&sp); err != nil {
|
||||||
|
c.IndentedJSON(http.StatusBadRequest, constants.BadRequestResponse)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
createdSP, status := h.dataStore.CreateStoredProcedure(databaseId, collectionId, sp)
|
||||||
|
if status == datastore.Conflict {
|
||||||
|
c.IndentedJSON(http.StatusConflict, constants.ConflictResponse)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if status == datastore.StatusOk {
|
||||||
|
c.IndentedJSON(http.StatusCreated, createdSP)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
||||||
}
|
}
|
||||||
|
@ -1,23 +1,119 @@
|
|||||||
package handlers
|
package handlers
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
"github.com/pikami/cosmium/internal/repositories"
|
"github.com/pikami/cosmium/internal/constants"
|
||||||
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
"github.com/pikami/cosmium/internal/datastore"
|
||||||
)
|
)
|
||||||
|
|
||||||
func GetAllTriggers(c *gin.Context) {
|
func (h *Handlers) GetAllTriggers(c *gin.Context) {
|
||||||
databaseId := c.Param("databaseId")
|
databaseId := c.Param("databaseId")
|
||||||
collectionId := c.Param("collId")
|
collectionId := c.Param("collId")
|
||||||
|
|
||||||
triggers, status := repositories.GetAllTriggers(databaseId, collectionId)
|
triggers, status := h.dataStore.GetAllTriggers(databaseId, collectionId)
|
||||||
|
|
||||||
if status == repositorymodels.StatusOk {
|
if status == datastore.StatusOk {
|
||||||
|
c.Header("x-ms-item-count", fmt.Sprintf("%d", len(triggers)))
|
||||||
c.IndentedJSON(http.StatusOK, gin.H{"_rid": "", "Triggers": triggers, "_count": len(triggers)})
|
c.IndentedJSON(http.StatusOK, gin.H{"_rid": "", "Triggers": triggers, "_count": len(triggers)})
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *Handlers) GetTrigger(c *gin.Context) {
|
||||||
|
databaseId := c.Param("databaseId")
|
||||||
|
collectionId := c.Param("collId")
|
||||||
|
triggerId := c.Param("triggerId")
|
||||||
|
|
||||||
|
trigger, status := h.dataStore.GetTrigger(databaseId, collectionId, triggerId)
|
||||||
|
|
||||||
|
if status == datastore.StatusOk {
|
||||||
|
c.IndentedJSON(http.StatusOK, trigger)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if status == datastore.StatusNotFound {
|
||||||
|
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *Handlers) DeleteTrigger(c *gin.Context) {
|
||||||
|
databaseId := c.Param("databaseId")
|
||||||
|
collectionId := c.Param("collId")
|
||||||
|
triggerId := c.Param("triggerId")
|
||||||
|
|
||||||
|
status := h.dataStore.DeleteTrigger(databaseId, collectionId, triggerId)
|
||||||
|
if status == datastore.StatusOk {
|
||||||
|
c.Status(http.StatusNoContent)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if status == datastore.StatusNotFound {
|
||||||
|
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *Handlers) ReplaceTrigger(c *gin.Context) {
|
||||||
|
databaseId := c.Param("databaseId")
|
||||||
|
collectionId := c.Param("collId")
|
||||||
|
triggerId := c.Param("triggerId")
|
||||||
|
|
||||||
|
var trigger datastore.Trigger
|
||||||
|
if err := c.BindJSON(&trigger); err != nil {
|
||||||
|
c.IndentedJSON(http.StatusBadRequest, constants.BadRequestResponse)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
status := h.dataStore.DeleteTrigger(databaseId, collectionId, triggerId)
|
||||||
|
if status == datastore.StatusNotFound {
|
||||||
|
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
createdTrigger, status := h.dataStore.CreateTrigger(databaseId, collectionId, trigger)
|
||||||
|
if status == datastore.Conflict {
|
||||||
|
c.IndentedJSON(http.StatusConflict, constants.ConflictResponse)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if status == datastore.StatusOk {
|
||||||
|
c.IndentedJSON(http.StatusOK, createdTrigger)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *Handlers) CreateTrigger(c *gin.Context) {
|
||||||
|
databaseId := c.Param("databaseId")
|
||||||
|
collectionId := c.Param("collId")
|
||||||
|
|
||||||
|
var trigger datastore.Trigger
|
||||||
|
if err := c.BindJSON(&trigger); err != nil {
|
||||||
|
c.IndentedJSON(http.StatusBadRequest, constants.BadRequestResponse)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
createdTrigger, status := h.dataStore.CreateTrigger(databaseId, collectionId, trigger)
|
||||||
|
if status == datastore.Conflict {
|
||||||
|
c.IndentedJSON(http.StatusConflict, constants.ConflictResponse)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if status == datastore.StatusOk {
|
||||||
|
c.IndentedJSON(http.StatusCreated, createdTrigger)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
||||||
}
|
}
|
||||||
|
@ -1,23 +1,119 @@
|
|||||||
package handlers
|
package handlers
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
"github.com/pikami/cosmium/internal/repositories"
|
"github.com/pikami/cosmium/internal/constants"
|
||||||
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
"github.com/pikami/cosmium/internal/datastore"
|
||||||
)
|
)
|
||||||
|
|
||||||
func GetAllUserDefinedFunctions(c *gin.Context) {
|
func (h *Handlers) GetAllUserDefinedFunctions(c *gin.Context) {
|
||||||
databaseId := c.Param("databaseId")
|
databaseId := c.Param("databaseId")
|
||||||
collectionId := c.Param("collId")
|
collectionId := c.Param("collId")
|
||||||
|
|
||||||
udfs, status := repositories.GetAllUserDefinedFunctions(databaseId, collectionId)
|
udfs, status := h.dataStore.GetAllUserDefinedFunctions(databaseId, collectionId)
|
||||||
|
|
||||||
if status == repositorymodels.StatusOk {
|
if status == datastore.StatusOk {
|
||||||
|
c.Header("x-ms-item-count", fmt.Sprintf("%d", len(udfs)))
|
||||||
c.IndentedJSON(http.StatusOK, gin.H{"_rid": "", "UserDefinedFunctions": udfs, "_count": len(udfs)})
|
c.IndentedJSON(http.StatusOK, gin.H{"_rid": "", "UserDefinedFunctions": udfs, "_count": len(udfs)})
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *Handlers) GetUserDefinedFunction(c *gin.Context) {
|
||||||
|
databaseId := c.Param("databaseId")
|
||||||
|
collectionId := c.Param("collId")
|
||||||
|
udfId := c.Param("udfId")
|
||||||
|
|
||||||
|
udf, status := h.dataStore.GetUserDefinedFunction(databaseId, collectionId, udfId)
|
||||||
|
|
||||||
|
if status == datastore.StatusOk {
|
||||||
|
c.IndentedJSON(http.StatusOK, udf)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if status == datastore.StatusNotFound {
|
||||||
|
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *Handlers) DeleteUserDefinedFunction(c *gin.Context) {
|
||||||
|
databaseId := c.Param("databaseId")
|
||||||
|
collectionId := c.Param("collId")
|
||||||
|
udfId := c.Param("udfId")
|
||||||
|
|
||||||
|
status := h.dataStore.DeleteUserDefinedFunction(databaseId, collectionId, udfId)
|
||||||
|
if status == datastore.StatusOk {
|
||||||
|
c.Status(http.StatusNoContent)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if status == datastore.StatusNotFound {
|
||||||
|
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *Handlers) ReplaceUserDefinedFunction(c *gin.Context) {
|
||||||
|
databaseId := c.Param("databaseId")
|
||||||
|
collectionId := c.Param("collId")
|
||||||
|
udfId := c.Param("udfId")
|
||||||
|
|
||||||
|
var udf datastore.UserDefinedFunction
|
||||||
|
if err := c.BindJSON(&udf); err != nil {
|
||||||
|
c.IndentedJSON(http.StatusBadRequest, constants.BadRequestResponse)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
status := h.dataStore.DeleteUserDefinedFunction(databaseId, collectionId, udfId)
|
||||||
|
if status == datastore.StatusNotFound {
|
||||||
|
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
createdUdf, status := h.dataStore.CreateUserDefinedFunction(databaseId, collectionId, udf)
|
||||||
|
if status == datastore.Conflict {
|
||||||
|
c.IndentedJSON(http.StatusConflict, constants.ConflictResponse)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if status == datastore.StatusOk {
|
||||||
|
c.IndentedJSON(http.StatusOK, createdUdf)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *Handlers) CreateUserDefinedFunction(c *gin.Context) {
|
||||||
|
databaseId := c.Param("databaseId")
|
||||||
|
collectionId := c.Param("collId")
|
||||||
|
|
||||||
|
var udf datastore.UserDefinedFunction
|
||||||
|
if err := c.BindJSON(&udf); err != nil {
|
||||||
|
c.IndentedJSON(http.StatusBadRequest, constants.BadRequestResponse)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
createdUdf, status := h.dataStore.CreateUserDefinedFunction(databaseId, collectionId, udf)
|
||||||
|
if status == datastore.Conflict {
|
||||||
|
c.IndentedJSON(http.StatusConflict, constants.ConflictResponse)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if status == datastore.StatusOk {
|
||||||
|
c.IndentedJSON(http.StatusCreated, createdUdf)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
||||||
}
|
}
|
||||||
|
162
api/router.go
162
api/router.go
@ -1,94 +1,144 @@
|
|||||||
package api
|
package api
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
"sync"
|
||||||
|
"time"
|
||||||
|
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
"github.com/pikami/cosmium/api/config"
|
|
||||||
"github.com/pikami/cosmium/api/handlers"
|
"github.com/pikami/cosmium/api/handlers"
|
||||||
"github.com/pikami/cosmium/api/handlers/middleware"
|
"github.com/pikami/cosmium/api/handlers/middleware"
|
||||||
|
"github.com/pikami/cosmium/internal/datastore"
|
||||||
"github.com/pikami/cosmium/internal/logger"
|
"github.com/pikami/cosmium/internal/logger"
|
||||||
tlsprovider "github.com/pikami/cosmium/internal/tls_provider"
|
tlsprovider "github.com/pikami/cosmium/internal/tls_provider"
|
||||||
)
|
)
|
||||||
|
|
||||||
func CreateRouter() *gin.Engine {
|
var ginMux sync.Mutex
|
||||||
router := gin.Default()
|
|
||||||
|
|
||||||
if config.Config.Debug {
|
func (s *ApiServer) CreateRouter(dataStore datastore.DataStore) {
|
||||||
|
routeHandlers := handlers.NewHandlers(dataStore, s.config)
|
||||||
|
|
||||||
|
ginMux.Lock()
|
||||||
|
gin.DefaultWriter = logger.InfoWriter()
|
||||||
|
gin.DefaultErrorWriter = logger.ErrorWriter()
|
||||||
|
|
||||||
|
if s.config.LogLevel != "debug" {
|
||||||
|
gin.SetMode(gin.ReleaseMode)
|
||||||
|
}
|
||||||
|
ginMux.Unlock()
|
||||||
|
|
||||||
|
router := gin.Default(func(e *gin.Engine) {
|
||||||
|
e.RedirectTrailingSlash = false
|
||||||
|
})
|
||||||
|
|
||||||
|
if s.config.LogLevel == "debug" {
|
||||||
router.Use(middleware.RequestLogger())
|
router.Use(middleware.RequestLogger())
|
||||||
}
|
}
|
||||||
|
|
||||||
router.Use(middleware.Authentication())
|
router.Use(middleware.StripTrailingSlashes(router, s.config))
|
||||||
|
router.Use(middleware.Authentication(s.config))
|
||||||
|
|
||||||
router.GET("/dbs/:databaseId/colls/:collId/pkranges", handlers.GetPartitionKeyRanges)
|
router.GET("/dbs/:databaseId/colls/:collId/pkranges", routeHandlers.GetPartitionKeyRanges)
|
||||||
|
|
||||||
router.POST("/dbs/:databaseId/colls/:collId/docs", handlers.DocumentsPost)
|
router.POST("/dbs/:databaseId/colls/:collId/docs", routeHandlers.DocumentsPost)
|
||||||
router.GET("/dbs/:databaseId/colls/:collId/docs", handlers.GetAllDocuments)
|
router.GET("/dbs/:databaseId/colls/:collId/docs", routeHandlers.GetAllDocuments)
|
||||||
router.GET("/dbs/:databaseId/colls/:collId/docs/:docId", handlers.GetDocument)
|
router.GET("/dbs/:databaseId/colls/:collId/docs/:docId", routeHandlers.GetDocument)
|
||||||
router.PUT("/dbs/:databaseId/colls/:collId/docs/:docId", handlers.ReplaceDocument)
|
router.PUT("/dbs/:databaseId/colls/:collId/docs/:docId", routeHandlers.ReplaceDocument)
|
||||||
router.DELETE("/dbs/:databaseId/colls/:collId/docs/:docId", handlers.DeleteDocument)
|
router.PATCH("/dbs/:databaseId/colls/:collId/docs/:docId", routeHandlers.PatchDocument)
|
||||||
|
router.DELETE("/dbs/:databaseId/colls/:collId/docs/:docId", routeHandlers.DeleteDocument)
|
||||||
|
|
||||||
router.POST("/dbs/:databaseId/colls", handlers.CreateCollection)
|
router.POST("/dbs/:databaseId/colls", routeHandlers.CreateCollection)
|
||||||
router.GET("/dbs/:databaseId/colls", handlers.GetAllCollections)
|
router.GET("/dbs/:databaseId/colls", routeHandlers.GetAllCollections)
|
||||||
router.GET("/dbs/:databaseId/colls/:collId", handlers.GetCollection)
|
router.GET("/dbs/:databaseId/colls/:collId", routeHandlers.GetCollection)
|
||||||
router.DELETE("/dbs/:databaseId/colls/:collId", handlers.DeleteCollection)
|
router.DELETE("/dbs/:databaseId/colls/:collId", routeHandlers.DeleteCollection)
|
||||||
|
|
||||||
router.POST("/dbs", handlers.CreateDatabase)
|
router.POST("/dbs", routeHandlers.CreateDatabase)
|
||||||
router.GET("/dbs", handlers.GetAllDatabases)
|
router.GET("/dbs", routeHandlers.GetAllDatabases)
|
||||||
router.GET("/dbs/:databaseId", handlers.GetDatabase)
|
router.GET("/dbs/:databaseId", routeHandlers.GetDatabase)
|
||||||
router.DELETE("/dbs/:databaseId", handlers.DeleteDatabase)
|
router.DELETE("/dbs/:databaseId", routeHandlers.DeleteDatabase)
|
||||||
|
|
||||||
router.GET("/dbs/:databaseId/colls/:collId/udfs", handlers.GetAllUserDefinedFunctions)
|
router.POST("/dbs/:databaseId/colls/:collId/triggers", routeHandlers.CreateTrigger)
|
||||||
router.GET("/dbs/:databaseId/colls/:collId/sprocs", handlers.GetAllStoredProcedures)
|
router.GET("/dbs/:databaseId/colls/:collId/triggers", routeHandlers.GetAllTriggers)
|
||||||
router.GET("/dbs/:databaseId/colls/:collId/triggers", handlers.GetAllTriggers)
|
router.GET("/dbs/:databaseId/colls/:collId/triggers/:triggerId", routeHandlers.GetTrigger)
|
||||||
|
router.PUT("/dbs/:databaseId/colls/:collId/triggers/:triggerId", routeHandlers.ReplaceTrigger)
|
||||||
|
router.DELETE("/dbs/:databaseId/colls/:collId/triggers/:triggerId", routeHandlers.DeleteTrigger)
|
||||||
|
|
||||||
|
router.POST("/dbs/:databaseId/colls/:collId/sprocs", routeHandlers.CreateStoredProcedure)
|
||||||
|
router.GET("/dbs/:databaseId/colls/:collId/sprocs", routeHandlers.GetAllStoredProcedures)
|
||||||
|
router.GET("/dbs/:databaseId/colls/:collId/sprocs/:sprocId", routeHandlers.GetStoredProcedure)
|
||||||
|
router.PUT("/dbs/:databaseId/colls/:collId/sprocs/:sprocId", routeHandlers.ReplaceStoredProcedure)
|
||||||
|
router.DELETE("/dbs/:databaseId/colls/:collId/sprocs/:sprocId", routeHandlers.DeleteStoredProcedure)
|
||||||
|
|
||||||
|
router.POST("/dbs/:databaseId/colls/:collId/udfs", routeHandlers.CreateUserDefinedFunction)
|
||||||
|
router.GET("/dbs/:databaseId/colls/:collId/udfs", routeHandlers.GetAllUserDefinedFunctions)
|
||||||
|
router.GET("/dbs/:databaseId/colls/:collId/udfs/:udfId", routeHandlers.GetUserDefinedFunction)
|
||||||
|
router.PUT("/dbs/:databaseId/colls/:collId/udfs/:udfId", routeHandlers.ReplaceUserDefinedFunction)
|
||||||
|
router.DELETE("/dbs/:databaseId/colls/:collId/udfs/:udfId", routeHandlers.DeleteUserDefinedFunction)
|
||||||
|
|
||||||
router.GET("/offers", handlers.GetOffers)
|
router.GET("/offers", handlers.GetOffers)
|
||||||
router.GET("/", handlers.GetServerInfo)
|
router.GET("/", routeHandlers.GetServerInfo)
|
||||||
|
|
||||||
router.GET("/cosmium/export", handlers.CosmiumExport)
|
router.GET("/cosmium/export", routeHandlers.CosmiumExport)
|
||||||
|
|
||||||
handlers.RegisterExplorerHandlers(router)
|
routeHandlers.RegisterExplorerHandlers(router)
|
||||||
|
|
||||||
return router
|
s.router = router
|
||||||
}
|
}
|
||||||
|
|
||||||
func StartAPI() {
|
func (s *ApiServer) Start() error {
|
||||||
if !config.Config.Debug {
|
listenAddress := fmt.Sprintf(":%d", s.config.Port)
|
||||||
gin.SetMode(gin.ReleaseMode)
|
s.isActive = true
|
||||||
|
|
||||||
|
server := &http.Server{
|
||||||
|
Addr: listenAddress,
|
||||||
|
Handler: s.router.Handler(),
|
||||||
}
|
}
|
||||||
|
|
||||||
router := CreateRouter()
|
errChan := make(chan error, 1)
|
||||||
listenAddress := fmt.Sprintf(":%d", config.Config.Port)
|
|
||||||
|
|
||||||
if config.Config.TLS_CertificatePath != "" && config.Config.TLS_CertificateKey != "" {
|
go func() {
|
||||||
err := router.RunTLS(
|
<-s.stopServer
|
||||||
listenAddress,
|
logger.InfoLn("Shutting down server...")
|
||||||
config.Config.TLS_CertificatePath,
|
err := server.Shutdown(context.TODO())
|
||||||
config.Config.TLS_CertificateKey)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Error("Failed to start HTTPS server:", err)
|
logger.ErrorLn("Failed to shutdown server:", err)
|
||||||
|
}
|
||||||
|
s.onServerShutdown <- true
|
||||||
|
}()
|
||||||
|
|
||||||
|
go func() {
|
||||||
|
var err error
|
||||||
|
if s.config.DisableTls {
|
||||||
|
logger.Infof("Listening and serving HTTP on %s\n", server.Addr)
|
||||||
|
err = server.ListenAndServe()
|
||||||
|
} else if s.config.TLS_CertificatePath != "" && s.config.TLS_CertificateKey != "" {
|
||||||
|
logger.Infof("Listening and serving HTTPS on %s\n", server.Addr)
|
||||||
|
err = server.ListenAndServeTLS(
|
||||||
|
s.config.TLS_CertificatePath,
|
||||||
|
s.config.TLS_CertificateKey)
|
||||||
|
} else {
|
||||||
|
tlsConfig := tlsprovider.GetDefaultTlsConfig()
|
||||||
|
server.TLSConfig = tlsConfig
|
||||||
|
|
||||||
|
logger.Infof("Listening and serving HTTPS on %s\n", server.Addr)
|
||||||
|
err = server.ListenAndServeTLS("", "")
|
||||||
}
|
}
|
||||||
|
|
||||||
return
|
if err != nil && err != http.ErrServerClosed {
|
||||||
}
|
logger.ErrorLn("Failed to start server:", err)
|
||||||
|
errChan <- err
|
||||||
|
} else {
|
||||||
|
errChan <- nil
|
||||||
|
}
|
||||||
|
s.isActive = false
|
||||||
|
}()
|
||||||
|
|
||||||
if config.Config.DisableTls {
|
select {
|
||||||
router.Run(listenAddress)
|
case err := <-errChan:
|
||||||
|
return err
|
||||||
|
case <-time.After(50 * time.Millisecond):
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
tlsConfig := tlsprovider.GetDefaultTlsConfig()
|
|
||||||
server := &http.Server{
|
|
||||||
Addr: listenAddress,
|
|
||||||
Handler: router.Handler(),
|
|
||||||
TLSConfig: tlsConfig,
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.Infof("Listening and serving HTTPS on %s\n", server.Addr)
|
|
||||||
err := server.ListenAndServeTLS("", "")
|
|
||||||
if err != nil {
|
|
||||||
logger.Error("Failed to start HTTPS server:", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
router.Run()
|
|
||||||
}
|
}
|
||||||
|
@ -2,27 +2,24 @@ package tests_test
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"errors"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/Azure/azure-sdk-for-go/sdk/azcore"
|
|
||||||
"github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos"
|
"github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos"
|
||||||
"github.com/pikami/cosmium/api/config"
|
"github.com/pikami/cosmium/api/config"
|
||||||
"github.com/pikami/cosmium/internal/repositories"
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
)
|
)
|
||||||
|
|
||||||
func Test_Authentication(t *testing.T) {
|
func Test_Authentication(t *testing.T) {
|
||||||
ts := runTestServer()
|
ts := runTestServer()
|
||||||
defer ts.Close()
|
defer ts.Server.Close()
|
||||||
|
|
||||||
t.Run("Should get 200 when correct account key is used", func(t *testing.T) {
|
t.Run("Should get 200 when correct account key is used", func(t *testing.T) {
|
||||||
repositories.DeleteDatabase(testDatabaseName)
|
ts.DataStore.DeleteDatabase(testDatabaseName)
|
||||||
client, err := azcosmos.NewClientFromConnectionString(
|
client, err := azcosmos.NewClientFromConnectionString(
|
||||||
fmt.Sprintf("AccountEndpoint=%s;AccountKey=%s", ts.URL, config.DefaultAccountKey),
|
formatConnectionString(ts.URL, config.DefaultAccountKey),
|
||||||
&azcosmos.ClientOptions{},
|
&azcosmos.ClientOptions{},
|
||||||
)
|
)
|
||||||
assert.Nil(t, err)
|
assert.Nil(t, err)
|
||||||
@ -35,28 +32,10 @@ func Test_Authentication(t *testing.T) {
|
|||||||
assert.Equal(t, createResponse.DatabaseProperties.ID, testDatabaseName)
|
assert.Equal(t, createResponse.DatabaseProperties.ID, testDatabaseName)
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("Should get 200 when wrong account key is used, but authentication is dissabled", func(t *testing.T) {
|
|
||||||
config.Config.DisableAuth = true
|
|
||||||
repositories.DeleteDatabase(testDatabaseName)
|
|
||||||
client, err := azcosmos.NewClientFromConnectionString(
|
|
||||||
fmt.Sprintf("AccountEndpoint=%s;AccountKey=%s", ts.URL, "AAAA"),
|
|
||||||
&azcosmos.ClientOptions{},
|
|
||||||
)
|
|
||||||
assert.Nil(t, err)
|
|
||||||
|
|
||||||
createResponse, err := client.CreateDatabase(
|
|
||||||
context.TODO(),
|
|
||||||
azcosmos.DatabaseProperties{ID: testDatabaseName},
|
|
||||||
&azcosmos.CreateDatabaseOptions{})
|
|
||||||
assert.Nil(t, err)
|
|
||||||
assert.Equal(t, createResponse.DatabaseProperties.ID, testDatabaseName)
|
|
||||||
config.Config.DisableAuth = false
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should get 401 when wrong account key is used", func(t *testing.T) {
|
t.Run("Should get 401 when wrong account key is used", func(t *testing.T) {
|
||||||
repositories.DeleteDatabase(testDatabaseName)
|
ts.DataStore.DeleteDatabase(testDatabaseName)
|
||||||
client, err := azcosmos.NewClientFromConnectionString(
|
client, err := azcosmos.NewClientFromConnectionString(
|
||||||
fmt.Sprintf("AccountEndpoint=%s;AccountKey=%s", ts.URL, "AAAA"),
|
formatConnectionString(ts.URL, "AAAA"),
|
||||||
&azcosmos.ClientOptions{},
|
&azcosmos.ClientOptions{},
|
||||||
)
|
)
|
||||||
assert.Nil(t, err)
|
assert.Nil(t, err)
|
||||||
@ -66,12 +45,7 @@ func Test_Authentication(t *testing.T) {
|
|||||||
azcosmos.DatabaseProperties{ID: testDatabaseName},
|
azcosmos.DatabaseProperties{ID: testDatabaseName},
|
||||||
&azcosmos.CreateDatabaseOptions{})
|
&azcosmos.CreateDatabaseOptions{})
|
||||||
|
|
||||||
var respErr *azcore.ResponseError
|
assert.Contains(t, err.Error(), "401 Unauthorized")
|
||||||
if errors.As(err, &respErr) {
|
|
||||||
assert.Equal(t, respErr.StatusCode, http.StatusUnauthorized)
|
|
||||||
} else {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("Should allow unauthorized requests to /_explorer", func(t *testing.T) {
|
t.Run("Should allow unauthorized requests to /_explorer", func(t *testing.T) {
|
||||||
@ -85,3 +59,33 @@ func Test_Authentication(t *testing.T) {
|
|||||||
assert.Contains(t, string(responseBody), "BACKEND_ENDPOINT")
|
assert.Contains(t, string(responseBody), "BACKEND_ENDPOINT")
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func Test_Authentication_Disabled(t *testing.T) {
|
||||||
|
ts := runTestServerCustomConfig(&config.ServerConfig{
|
||||||
|
AccountKey: config.DefaultAccountKey,
|
||||||
|
ExplorerPath: "/tmp/nothing",
|
||||||
|
ExplorerBaseUrlLocation: config.ExplorerBaseUrlLocation,
|
||||||
|
DisableAuth: true,
|
||||||
|
})
|
||||||
|
defer ts.Server.Close()
|
||||||
|
|
||||||
|
t.Run("Should get 200 when wrong account key is used, but authentication is dissabled", func(t *testing.T) {
|
||||||
|
ts.DataStore.DeleteDatabase(testDatabaseName)
|
||||||
|
client, err := azcosmos.NewClientFromConnectionString(
|
||||||
|
formatConnectionString(ts.URL, "AAAA"),
|
||||||
|
&azcosmos.ClientOptions{},
|
||||||
|
)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
createResponse, err := client.CreateDatabase(
|
||||||
|
context.TODO(),
|
||||||
|
azcosmos.DatabaseProperties{ID: testDatabaseName},
|
||||||
|
&azcosmos.CreateDatabaseOptions{})
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Equal(t, createResponse.DatabaseProperties.ID, testDatabaseName)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func formatConnectionString(endpoint, key string) string {
|
||||||
|
return fmt.Sprintf("AccountEndpoint=%s;AccountKey=%s", endpoint, key)
|
||||||
|
}
|
||||||
|
@ -3,33 +3,29 @@ package tests_test
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
|
||||||
"net/http"
|
"net/http"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/Azure/azure-sdk-for-go/sdk/azcore"
|
"github.com/Azure/azure-sdk-for-go/sdk/azcore"
|
||||||
"github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos"
|
"github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos"
|
||||||
"github.com/pikami/cosmium/api/config"
|
"github.com/pikami/cosmium/internal/datastore"
|
||||||
"github.com/pikami/cosmium/internal/repositories"
|
|
||||||
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
)
|
)
|
||||||
|
|
||||||
func Test_Collections(t *testing.T) {
|
func Test_Collections(t *testing.T) {
|
||||||
ts := runTestServer()
|
presets := []testPreset{PresetJsonStore, PresetBadgerStore}
|
||||||
defer ts.Close()
|
|
||||||
|
|
||||||
client, err := azcosmos.NewClientFromConnectionString(
|
setUp := func(ts *TestServer, client *azcosmos.Client) *azcosmos.DatabaseClient {
|
||||||
fmt.Sprintf("AccountEndpoint=%s;AccountKey=%s", ts.URL, config.Config.AccountKey),
|
ts.DataStore.CreateDatabase(datastore.Database{ID: testDatabaseName})
|
||||||
&azcosmos.ClientOptions{},
|
databaseClient, err := client.NewDatabase(testDatabaseName)
|
||||||
)
|
assert.Nil(t, err)
|
||||||
assert.Nil(t, err)
|
|
||||||
|
|
||||||
repositories.CreateDatabase(repositorymodels.Database{ID: testDatabaseName})
|
return databaseClient
|
||||||
databaseClient, err := client.NewDatabase(testDatabaseName)
|
}
|
||||||
assert.Nil(t, err)
|
|
||||||
|
runTestsWithPresets(t, "Collection Create", presets, func(t *testing.T, ts *TestServer, client *azcosmos.Client) {
|
||||||
|
databaseClient := setUp(ts, client)
|
||||||
|
|
||||||
t.Run("Collection Create", func(t *testing.T) {
|
|
||||||
t.Run("Should create collection", func(t *testing.T) {
|
t.Run("Should create collection", func(t *testing.T) {
|
||||||
createResponse, err := databaseClient.CreateContainer(context.TODO(), azcosmos.ContainerProperties{
|
createResponse, err := databaseClient.CreateContainer(context.TODO(), azcosmos.ContainerProperties{
|
||||||
ID: testCollectionName,
|
ID: testCollectionName,
|
||||||
@ -40,7 +36,7 @@ func Test_Collections(t *testing.T) {
|
|||||||
})
|
})
|
||||||
|
|
||||||
t.Run("Should return conflict when collection exists", func(t *testing.T) {
|
t.Run("Should return conflict when collection exists", func(t *testing.T) {
|
||||||
repositories.CreateCollection(testDatabaseName, repositorymodels.Collection{
|
ts.DataStore.CreateCollection(testDatabaseName, datastore.Collection{
|
||||||
ID: testCollectionName,
|
ID: testCollectionName,
|
||||||
})
|
})
|
||||||
|
|
||||||
@ -58,9 +54,11 @@ func Test_Collections(t *testing.T) {
|
|||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("Collection Read", func(t *testing.T) {
|
runTestsWithPresets(t, "Collection Read", presets, func(t *testing.T, ts *TestServer, client *azcosmos.Client) {
|
||||||
|
databaseClient := setUp(ts, client)
|
||||||
|
|
||||||
t.Run("Should read collection", func(t *testing.T) {
|
t.Run("Should read collection", func(t *testing.T) {
|
||||||
repositories.CreateCollection(testDatabaseName, repositorymodels.Collection{
|
ts.DataStore.CreateCollection(testDatabaseName, datastore.Collection{
|
||||||
ID: testCollectionName,
|
ID: testCollectionName,
|
||||||
})
|
})
|
||||||
|
|
||||||
@ -74,7 +72,7 @@ func Test_Collections(t *testing.T) {
|
|||||||
})
|
})
|
||||||
|
|
||||||
t.Run("Should return not found when collection does not exist", func(t *testing.T) {
|
t.Run("Should return not found when collection does not exist", func(t *testing.T) {
|
||||||
repositories.DeleteCollection(testDatabaseName, testCollectionName)
|
ts.DataStore.DeleteCollection(testDatabaseName, testCollectionName)
|
||||||
|
|
||||||
collectionResponse, err := databaseClient.NewContainer(testCollectionName)
|
collectionResponse, err := databaseClient.NewContainer(testCollectionName)
|
||||||
assert.Nil(t, err)
|
assert.Nil(t, err)
|
||||||
@ -91,9 +89,11 @@ func Test_Collections(t *testing.T) {
|
|||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("Collection Delete", func(t *testing.T) {
|
runTestsWithPresets(t, "Collection Delete", presets, func(t *testing.T, ts *TestServer, client *azcosmos.Client) {
|
||||||
|
databaseClient := setUp(ts, client)
|
||||||
|
|
||||||
t.Run("Should delete collection", func(t *testing.T) {
|
t.Run("Should delete collection", func(t *testing.T) {
|
||||||
repositories.CreateCollection(testDatabaseName, repositorymodels.Collection{
|
ts.DataStore.CreateCollection(testDatabaseName, datastore.Collection{
|
||||||
ID: testCollectionName,
|
ID: testCollectionName,
|
||||||
})
|
})
|
||||||
|
|
||||||
@ -106,7 +106,7 @@ func Test_Collections(t *testing.T) {
|
|||||||
})
|
})
|
||||||
|
|
||||||
t.Run("Should return not found when collection does not exist", func(t *testing.T) {
|
t.Run("Should return not found when collection does not exist", func(t *testing.T) {
|
||||||
repositories.DeleteCollection(testDatabaseName, testCollectionName)
|
ts.DataStore.DeleteCollection(testDatabaseName, testCollectionName)
|
||||||
|
|
||||||
collectionResponse, err := databaseClient.NewContainer(testCollectionName)
|
collectionResponse, err := databaseClient.NewContainer(testCollectionName)
|
||||||
assert.Nil(t, err)
|
assert.Nil(t, err)
|
||||||
|
@ -1,17 +1,64 @@
|
|||||||
package tests_test
|
package tests_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"fmt"
|
||||||
"net/http/httptest"
|
"net/http/httptest"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos"
|
||||||
"github.com/pikami/cosmium/api"
|
"github.com/pikami/cosmium/api"
|
||||||
"github.com/pikami/cosmium/api/config"
|
"github.com/pikami/cosmium/api/config"
|
||||||
|
"github.com/pikami/cosmium/internal/datastore"
|
||||||
|
badgerdatastore "github.com/pikami/cosmium/internal/datastore/badger_datastore"
|
||||||
|
jsondatastore "github.com/pikami/cosmium/internal/datastore/json_datastore"
|
||||||
|
"github.com/pikami/cosmium/internal/logger"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
)
|
)
|
||||||
|
|
||||||
func runTestServer() *httptest.Server {
|
type TestServer struct {
|
||||||
config.Config.AccountKey = config.DefaultAccountKey
|
Server *httptest.Server
|
||||||
config.Config.ExplorerPath = "/tmp/nothing"
|
DataStore datastore.DataStore
|
||||||
|
URL string
|
||||||
|
}
|
||||||
|
|
||||||
return httptest.NewServer(api.CreateRouter())
|
func getDefaultTestServerConfig() *config.ServerConfig {
|
||||||
|
return &config.ServerConfig{
|
||||||
|
AccountKey: config.DefaultAccountKey,
|
||||||
|
ExplorerPath: "/tmp/nothing",
|
||||||
|
ExplorerBaseUrlLocation: config.ExplorerBaseUrlLocation,
|
||||||
|
DataStore: "json",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func runTestServerCustomConfig(configuration *config.ServerConfig) *TestServer {
|
||||||
|
var dataStore datastore.DataStore
|
||||||
|
switch configuration.DataStore {
|
||||||
|
case config.DataStoreBadger:
|
||||||
|
dataStore = badgerdatastore.NewBadgerDataStore(badgerdatastore.BadgerDataStoreOptions{})
|
||||||
|
default:
|
||||||
|
dataStore = jsondatastore.NewJsonDataStore(jsondatastore.JsonDataStoreOptions{})
|
||||||
|
}
|
||||||
|
|
||||||
|
api := api.NewApiServer(dataStore, configuration)
|
||||||
|
|
||||||
|
server := httptest.NewServer(api.GetRouter())
|
||||||
|
|
||||||
|
configuration.DatabaseEndpoint = server.URL
|
||||||
|
|
||||||
|
return &TestServer{
|
||||||
|
Server: server,
|
||||||
|
DataStore: dataStore,
|
||||||
|
URL: server.URL,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func runTestServer() *TestServer {
|
||||||
|
config := getDefaultTestServerConfig()
|
||||||
|
|
||||||
|
config.LogLevel = "debug"
|
||||||
|
logger.SetLogLevel(logger.LogLevelDebug)
|
||||||
|
|
||||||
|
return runTestServerCustomConfig(config)
|
||||||
}
|
}
|
||||||
|
|
||||||
const (
|
const (
|
||||||
@ -19,3 +66,47 @@ const (
|
|||||||
testDatabaseName = "test-db"
|
testDatabaseName = "test-db"
|
||||||
testCollectionName = "test-coll"
|
testCollectionName = "test-coll"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
type testFunc func(t *testing.T, ts *TestServer, cosmosClient *azcosmos.Client)
|
||||||
|
type testPreset string
|
||||||
|
|
||||||
|
const (
|
||||||
|
PresetJsonStore testPreset = "JsonDS"
|
||||||
|
PresetBadgerStore testPreset = "BadgerDS"
|
||||||
|
)
|
||||||
|
|
||||||
|
func runTestsWithPreset(t *testing.T, name string, testPreset testPreset, f testFunc) {
|
||||||
|
serverConfig := getDefaultTestServerConfig()
|
||||||
|
|
||||||
|
serverConfig.LogLevel = "debug"
|
||||||
|
logger.SetLogLevel(logger.LogLevelDebug)
|
||||||
|
|
||||||
|
switch testPreset {
|
||||||
|
case PresetBadgerStore:
|
||||||
|
serverConfig.DataStore = config.DataStoreBadger
|
||||||
|
case PresetJsonStore:
|
||||||
|
serverConfig.DataStore = config.DataStoreJson
|
||||||
|
}
|
||||||
|
|
||||||
|
ts := runTestServerCustomConfig(serverConfig)
|
||||||
|
defer ts.Server.Close()
|
||||||
|
defer ts.DataStore.Close()
|
||||||
|
|
||||||
|
client, err := azcosmos.NewClientFromConnectionString(
|
||||||
|
fmt.Sprintf("AccountEndpoint=%s;AccountKey=%s", ts.URL, config.DefaultAccountKey),
|
||||||
|
&azcosmos.ClientOptions{},
|
||||||
|
)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
testName := fmt.Sprintf("%s_%s", testPreset, name)
|
||||||
|
|
||||||
|
t.Run(testName, func(t *testing.T) {
|
||||||
|
f(t, ts, client)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func runTestsWithPresets(t *testing.T, name string, testPresets []testPreset, f testFunc) {
|
||||||
|
for _, testPreset := range testPresets {
|
||||||
|
runTestsWithPreset(t, name, testPreset, f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -3,31 +3,21 @@ package tests_test
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
|
||||||
"net/http"
|
"net/http"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/Azure/azure-sdk-for-go/sdk/azcore"
|
"github.com/Azure/azure-sdk-for-go/sdk/azcore"
|
||||||
"github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos"
|
"github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos"
|
||||||
"github.com/pikami/cosmium/api/config"
|
"github.com/pikami/cosmium/internal/datastore"
|
||||||
"github.com/pikami/cosmium/internal/repositories"
|
|
||||||
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
)
|
)
|
||||||
|
|
||||||
func Test_Databases(t *testing.T) {
|
func Test_Databases(t *testing.T) {
|
||||||
ts := runTestServer()
|
presets := []testPreset{PresetJsonStore, PresetBadgerStore}
|
||||||
defer ts.Close()
|
|
||||||
|
|
||||||
client, err := azcosmos.NewClientFromConnectionString(
|
runTestsWithPresets(t, "Database Create", presets, func(t *testing.T, ts *TestServer, client *azcosmos.Client) {
|
||||||
fmt.Sprintf("AccountEndpoint=%s;AccountKey=%s", ts.URL, config.Config.AccountKey),
|
|
||||||
&azcosmos.ClientOptions{},
|
|
||||||
)
|
|
||||||
assert.Nil(t, err)
|
|
||||||
|
|
||||||
t.Run("Database Create", func(t *testing.T) {
|
|
||||||
t.Run("Should create database", func(t *testing.T) {
|
t.Run("Should create database", func(t *testing.T) {
|
||||||
repositories.DeleteDatabase(testDatabaseName)
|
ts.DataStore.DeleteDatabase(testDatabaseName)
|
||||||
|
|
||||||
createResponse, err := client.CreateDatabase(context.TODO(), azcosmos.DatabaseProperties{
|
createResponse, err := client.CreateDatabase(context.TODO(), azcosmos.DatabaseProperties{
|
||||||
ID: testDatabaseName,
|
ID: testDatabaseName,
|
||||||
@ -38,7 +28,7 @@ func Test_Databases(t *testing.T) {
|
|||||||
})
|
})
|
||||||
|
|
||||||
t.Run("Should return conflict when database exists", func(t *testing.T) {
|
t.Run("Should return conflict when database exists", func(t *testing.T) {
|
||||||
repositories.CreateDatabase(repositorymodels.Database{
|
ts.DataStore.CreateDatabase(datastore.Database{
|
||||||
ID: testDatabaseName,
|
ID: testDatabaseName,
|
||||||
})
|
})
|
||||||
|
|
||||||
@ -56,9 +46,9 @@ func Test_Databases(t *testing.T) {
|
|||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("Database Read", func(t *testing.T) {
|
runTestsWithPresets(t, "Database Read", presets, func(t *testing.T, ts *TestServer, client *azcosmos.Client) {
|
||||||
t.Run("Should read database", func(t *testing.T) {
|
t.Run("Should read database", func(t *testing.T) {
|
||||||
repositories.CreateDatabase(repositorymodels.Database{
|
ts.DataStore.CreateDatabase(datastore.Database{
|
||||||
ID: testDatabaseName,
|
ID: testDatabaseName,
|
||||||
})
|
})
|
||||||
|
|
||||||
@ -72,7 +62,7 @@ func Test_Databases(t *testing.T) {
|
|||||||
})
|
})
|
||||||
|
|
||||||
t.Run("Should return not found when database does not exist", func(t *testing.T) {
|
t.Run("Should return not found when database does not exist", func(t *testing.T) {
|
||||||
repositories.DeleteDatabase(testDatabaseName)
|
ts.DataStore.DeleteDatabase(testDatabaseName)
|
||||||
|
|
||||||
databaseResponse, err := client.NewDatabase(testDatabaseName)
|
databaseResponse, err := client.NewDatabase(testDatabaseName)
|
||||||
assert.Nil(t, err)
|
assert.Nil(t, err)
|
||||||
@ -89,9 +79,9 @@ func Test_Databases(t *testing.T) {
|
|||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("Database Delete", func(t *testing.T) {
|
runTestsWithPresets(t, "Database Delete", presets, func(t *testing.T, ts *TestServer, client *azcosmos.Client) {
|
||||||
t.Run("Should delete database", func(t *testing.T) {
|
t.Run("Should delete database", func(t *testing.T) {
|
||||||
repositories.CreateDatabase(repositorymodels.Database{
|
ts.DataStore.CreateDatabase(datastore.Database{
|
||||||
ID: testDatabaseName,
|
ID: testDatabaseName,
|
||||||
})
|
})
|
||||||
|
|
||||||
@ -104,7 +94,7 @@ func Test_Databases(t *testing.T) {
|
|||||||
})
|
})
|
||||||
|
|
||||||
t.Run("Should return not found when database does not exist", func(t *testing.T) {
|
t.Run("Should return not found when database does not exist", func(t *testing.T) {
|
||||||
repositories.DeleteDatabase(testDatabaseName)
|
ts.DataStore.DeleteDatabase(testDatabaseName)
|
||||||
|
|
||||||
databaseResponse, err := client.NewDatabase(testDatabaseName)
|
databaseResponse, err := client.NewDatabase(testDatabaseName)
|
||||||
assert.Nil(t, err)
|
assert.Nil(t, err)
|
||||||
|
@ -3,14 +3,18 @@ package tests_test
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"net/http"
|
||||||
"reflect"
|
"reflect"
|
||||||
|
"sync"
|
||||||
"testing"
|
"testing"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/Azure/azure-sdk-for-go/sdk/azcore"
|
||||||
"github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos"
|
"github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos"
|
||||||
"github.com/pikami/cosmium/api/config"
|
"github.com/pikami/cosmium/api/config"
|
||||||
"github.com/pikami/cosmium/internal/repositories"
|
"github.com/pikami/cosmium/internal/datastore"
|
||||||
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -49,9 +53,9 @@ func testCosmosQuery(t *testing.T,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func Test_Documents(t *testing.T) {
|
func documents_InitializeDb(t *testing.T, ts *TestServer) *azcosmos.ContainerClient {
|
||||||
repositories.CreateDatabase(repositorymodels.Database{ID: testDatabaseName})
|
ts.DataStore.CreateDatabase(datastore.Database{ID: testDatabaseName})
|
||||||
repositories.CreateCollection(testDatabaseName, repositorymodels.Collection{
|
ts.DataStore.CreateCollection(testDatabaseName, datastore.Collection{
|
||||||
ID: testCollectionName,
|
ID: testCollectionName,
|
||||||
PartitionKey: struct {
|
PartitionKey: struct {
|
||||||
Paths []string "json:\"paths\""
|
Paths []string "json:\"paths\""
|
||||||
@ -61,14 +65,11 @@ func Test_Documents(t *testing.T) {
|
|||||||
Paths: []string{"/pk"},
|
Paths: []string{"/pk"},
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
repositories.CreateDocument(testDatabaseName, testCollectionName, map[string]interface{}{"id": "12345", "pk": "123", "isCool": false})
|
ts.DataStore.CreateDocument(testDatabaseName, testCollectionName, map[string]interface{}{"id": "12345", "pk": "123", "isCool": false, "arr": []int{1, 2, 3}})
|
||||||
repositories.CreateDocument(testDatabaseName, testCollectionName, map[string]interface{}{"id": "67890", "pk": "456", "isCool": true})
|
ts.DataStore.CreateDocument(testDatabaseName, testCollectionName, map[string]interface{}{"id": "67890", "pk": "456", "isCool": true, "arr": []int{6, 7, 8}})
|
||||||
|
|
||||||
ts := runTestServer()
|
|
||||||
defer ts.Close()
|
|
||||||
|
|
||||||
client, err := azcosmos.NewClientFromConnectionString(
|
client, err := azcosmos.NewClientFromConnectionString(
|
||||||
fmt.Sprintf("AccountEndpoint=%s;AccountKey=%s", ts.URL, config.Config.AccountKey),
|
fmt.Sprintf("AccountEndpoint=%s;AccountKey=%s", ts.URL, config.DefaultAccountKey),
|
||||||
&azcosmos.ClientOptions{},
|
&azcosmos.ClientOptions{},
|
||||||
)
|
)
|
||||||
assert.Nil(t, err)
|
assert.Nil(t, err)
|
||||||
@ -76,64 +77,439 @@ func Test_Documents(t *testing.T) {
|
|||||||
collectionClient, err := client.NewContainer(testDatabaseName, testCollectionName)
|
collectionClient, err := client.NewContainer(testDatabaseName, testCollectionName)
|
||||||
assert.Nil(t, err)
|
assert.Nil(t, err)
|
||||||
|
|
||||||
t.Run("Should query document", func(t *testing.T) {
|
return collectionClient
|
||||||
testCosmosQuery(t, collectionClient,
|
}
|
||||||
"SELECT c.id, c[\"pk\"] FROM c ORDER BY c.id",
|
|
||||||
nil,
|
func Test_Documents(t *testing.T) {
|
||||||
[]interface{}{
|
presets := []testPreset{PresetJsonStore, PresetBadgerStore}
|
||||||
map[string]interface{}{"id": "12345", "pk": "123"},
|
|
||||||
map[string]interface{}{"id": "67890", "pk": "456"},
|
runTestsWithPresets(t, "Test_Documents", presets, func(t *testing.T, ts *TestServer, client *azcosmos.Client) {
|
||||||
},
|
collectionClient := documents_InitializeDb(t, ts)
|
||||||
)
|
|
||||||
|
t.Run("Should query document", func(t *testing.T) {
|
||||||
|
testCosmosQuery(t, collectionClient,
|
||||||
|
"SELECT c.id, c[\"pk\"] FROM c ORDER BY c.id",
|
||||||
|
nil,
|
||||||
|
[]interface{}{
|
||||||
|
map[string]interface{}{"id": "12345", "pk": "123"},
|
||||||
|
map[string]interface{}{"id": "67890", "pk": "456"},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should query VALUE array", func(t *testing.T) {
|
||||||
|
testCosmosQuery(t, collectionClient,
|
||||||
|
"SELECT VALUE [c.id, c[\"pk\"]] FROM c ORDER BY c.id",
|
||||||
|
nil,
|
||||||
|
[]interface{}{
|
||||||
|
[]interface{}{"12345", "123"},
|
||||||
|
[]interface{}{"67890", "456"},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should query VALUE object", func(t *testing.T) {
|
||||||
|
testCosmosQuery(t, collectionClient,
|
||||||
|
"SELECT VALUE { id: c.id, _pk: c.pk } FROM c ORDER BY c.id",
|
||||||
|
nil,
|
||||||
|
[]interface{}{
|
||||||
|
map[string]interface{}{"id": "12345", "_pk": "123"},
|
||||||
|
map[string]interface{}{"id": "67890", "_pk": "456"},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should query document with single WHERE condition", func(t *testing.T) {
|
||||||
|
testCosmosQuery(t, collectionClient,
|
||||||
|
`select c.id
|
||||||
|
FROM c
|
||||||
|
WHERE c.isCool=true
|
||||||
|
ORDER BY c.id`,
|
||||||
|
nil,
|
||||||
|
[]interface{}{
|
||||||
|
map[string]interface{}{"id": "67890"},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should query document with query parameters", func(t *testing.T) {
|
||||||
|
testCosmosQuery(t, collectionClient,
|
||||||
|
`select c.id
|
||||||
|
FROM c
|
||||||
|
WHERE c.id=@param_id
|
||||||
|
ORDER BY c.id`,
|
||||||
|
[]azcosmos.QueryParameter{
|
||||||
|
{Name: "@param_id", Value: "67890"},
|
||||||
|
},
|
||||||
|
[]interface{}{
|
||||||
|
map[string]interface{}{"id": "67890"},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should query document with query parameters as accessor", func(t *testing.T) {
|
||||||
|
testCosmosQuery(t, collectionClient,
|
||||||
|
`select c.id
|
||||||
|
FROM c
|
||||||
|
WHERE c[@param]="67890"
|
||||||
|
ORDER BY c.id`,
|
||||||
|
[]azcosmos.QueryParameter{
|
||||||
|
{Name: "@param", Value: "id"},
|
||||||
|
},
|
||||||
|
[]interface{}{
|
||||||
|
map[string]interface{}{"id": "67890"},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should query array accessor", func(t *testing.T) {
|
||||||
|
testCosmosQuery(t, collectionClient,
|
||||||
|
`SELECT c.id,
|
||||||
|
c["arr"][0] AS arr0,
|
||||||
|
c["arr"][1] AS arr1,
|
||||||
|
c["arr"][2] AS arr2,
|
||||||
|
c["arr"][3] AS arr3
|
||||||
|
FROM c ORDER BY c.id`,
|
||||||
|
nil,
|
||||||
|
[]interface{}{
|
||||||
|
map[string]interface{}{"id": "12345", "arr0": 1.0, "arr1": 2.0, "arr2": 3.0, "arr3": nil},
|
||||||
|
map[string]interface{}{"id": "67890", "arr0": 6.0, "arr1": 7.0, "arr2": 8.0, "arr3": nil},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should handle parallel writes", func(t *testing.T) {
|
||||||
|
var wg sync.WaitGroup
|
||||||
|
rutineCount := 100
|
||||||
|
results := make(chan error, rutineCount)
|
||||||
|
|
||||||
|
createCall := func(i int) {
|
||||||
|
defer wg.Done()
|
||||||
|
item := map[string]interface{}{
|
||||||
|
"id": fmt.Sprintf("id-%d", i),
|
||||||
|
"pk": fmt.Sprintf("pk-%d", i),
|
||||||
|
"val": i,
|
||||||
|
}
|
||||||
|
bytes, err := json.Marshal(item)
|
||||||
|
if err != nil {
|
||||||
|
results <- err
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx, cancel := context.WithTimeout(context.Background(), 2*time.Second)
|
||||||
|
defer cancel()
|
||||||
|
|
||||||
|
_, err = collectionClient.CreateItem(
|
||||||
|
ctx,
|
||||||
|
azcosmos.PartitionKey{},
|
||||||
|
bytes,
|
||||||
|
&azcosmos.ItemOptions{
|
||||||
|
EnableContentResponseOnWrite: false,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
results <- err
|
||||||
|
|
||||||
|
collectionClient.ReadItem(ctx, azcosmos.PartitionKey{}, fmt.Sprintf("id-%d", i), nil)
|
||||||
|
collectionClient.DeleteItem(ctx, azcosmos.PartitionKey{}, fmt.Sprintf("id-%d", i), nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
for i := 0; i < rutineCount; i++ {
|
||||||
|
wg.Add(1)
|
||||||
|
go createCall(i)
|
||||||
|
}
|
||||||
|
|
||||||
|
wg.Wait()
|
||||||
|
close(results)
|
||||||
|
|
||||||
|
for err := range results {
|
||||||
|
if err != nil {
|
||||||
|
t.Errorf("Error creating item: %v", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("Should query VALUE array", func(t *testing.T) {
|
runTestsWithPresets(t, "Test_Documents_Patch", presets, func(t *testing.T, ts *TestServer, client *azcosmos.Client) {
|
||||||
testCosmosQuery(t, collectionClient,
|
collectionClient := documents_InitializeDb(t, ts)
|
||||||
"SELECT VALUE [c.id, c[\"pk\"]] FROM c ORDER BY c.id",
|
|
||||||
nil,
|
t.Run("Should PATCH document", func(t *testing.T) {
|
||||||
[]interface{}{
|
context := context.TODO()
|
||||||
[]interface{}{"12345", "123"},
|
expectedData := map[string]interface{}{"id": "67890", "pk": "666", "newField": "newValue", "incr": 15., "setted": "isSet"}
|
||||||
[]interface{}{"67890", "456"},
|
|
||||||
},
|
patch := azcosmos.PatchOperations{}
|
||||||
)
|
patch.AppendAdd("/newField", "newValue")
|
||||||
|
patch.AppendIncrement("/incr", 15)
|
||||||
|
patch.AppendRemove("/isCool")
|
||||||
|
patch.AppendReplace("/pk", "666")
|
||||||
|
patch.AppendSet("/setted", "isSet")
|
||||||
|
|
||||||
|
itemResponse, err := collectionClient.PatchItem(
|
||||||
|
context,
|
||||||
|
azcosmos.PartitionKey{},
|
||||||
|
"67890",
|
||||||
|
patch,
|
||||||
|
&azcosmos.ItemOptions{
|
||||||
|
EnableContentResponseOnWrite: false,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
var itemResponseBody map[string]interface{}
|
||||||
|
json.Unmarshal(itemResponse.Value, &itemResponseBody)
|
||||||
|
|
||||||
|
assert.Equal(t, expectedData["id"], itemResponseBody["id"])
|
||||||
|
assert.Equal(t, expectedData["pk"], itemResponseBody["pk"])
|
||||||
|
assert.Empty(t, itemResponseBody["isCool"])
|
||||||
|
assert.Equal(t, expectedData["newField"], itemResponseBody["newField"])
|
||||||
|
assert.Equal(t, expectedData["incr"], itemResponseBody["incr"])
|
||||||
|
assert.Equal(t, expectedData["setted"], itemResponseBody["setted"])
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should not allow to PATCH document ID", func(t *testing.T) {
|
||||||
|
context := context.TODO()
|
||||||
|
|
||||||
|
patch := azcosmos.PatchOperations{}
|
||||||
|
patch.AppendReplace("/id", "newValue")
|
||||||
|
|
||||||
|
_, err := collectionClient.PatchItem(
|
||||||
|
context,
|
||||||
|
azcosmos.PartitionKey{},
|
||||||
|
"67890",
|
||||||
|
patch,
|
||||||
|
&azcosmos.ItemOptions{
|
||||||
|
EnableContentResponseOnWrite: false,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
|
||||||
|
var respErr *azcore.ResponseError
|
||||||
|
if errors.As(err, &respErr) {
|
||||||
|
assert.Equal(t, http.StatusUnprocessableEntity, respErr.StatusCode)
|
||||||
|
} else {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("CreateItem", func(t *testing.T) {
|
||||||
|
context := context.TODO()
|
||||||
|
|
||||||
|
item := map[string]interface{}{
|
||||||
|
"Id": "6789011",
|
||||||
|
"pk": "456",
|
||||||
|
"newField": "newValue2",
|
||||||
|
}
|
||||||
|
bytes, err := json.Marshal(item)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
r, err2 := collectionClient.CreateItem(
|
||||||
|
context,
|
||||||
|
azcosmos.PartitionKey{},
|
||||||
|
bytes,
|
||||||
|
&azcosmos.ItemOptions{
|
||||||
|
EnableContentResponseOnWrite: false,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
assert.NotNil(t, r)
|
||||||
|
assert.Nil(t, err2)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("CreateItem that already exists", func(t *testing.T) {
|
||||||
|
context := context.TODO()
|
||||||
|
|
||||||
|
item := map[string]interface{}{"id": "12345", "pk": "123", "isCool": false, "arr": []int{1, 2, 3}}
|
||||||
|
bytes, err := json.Marshal(item)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
r, err := collectionClient.CreateItem(
|
||||||
|
context,
|
||||||
|
azcosmos.PartitionKey{},
|
||||||
|
bytes,
|
||||||
|
&azcosmos.ItemOptions{
|
||||||
|
EnableContentResponseOnWrite: false,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
assert.NotNil(t, r)
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
|
||||||
|
var respErr *azcore.ResponseError
|
||||||
|
if errors.As(err, &respErr) {
|
||||||
|
assert.Equal(t, http.StatusConflict, respErr.StatusCode)
|
||||||
|
} else {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("UpsertItem new", func(t *testing.T) {
|
||||||
|
context := context.TODO()
|
||||||
|
|
||||||
|
item := map[string]interface{}{"id": "123456", "pk": "1234", "isCool": false, "arr": []int{1, 2, 3}}
|
||||||
|
bytes, err := json.Marshal(item)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
r, err2 := collectionClient.UpsertItem(
|
||||||
|
context,
|
||||||
|
azcosmos.PartitionKey{},
|
||||||
|
bytes,
|
||||||
|
&azcosmos.ItemOptions{
|
||||||
|
EnableContentResponseOnWrite: false,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
assert.NotNil(t, r)
|
||||||
|
assert.Nil(t, err2)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("UpsertItem that already exists", func(t *testing.T) {
|
||||||
|
context := context.TODO()
|
||||||
|
|
||||||
|
item := map[string]interface{}{"id": "12345", "pk": "123", "isCool": false, "arr": []int{1, 2, 3, 4}}
|
||||||
|
bytes, err := json.Marshal(item)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
r, err2 := collectionClient.UpsertItem(
|
||||||
|
context,
|
||||||
|
azcosmos.PartitionKey{},
|
||||||
|
bytes,
|
||||||
|
&azcosmos.ItemOptions{
|
||||||
|
EnableContentResponseOnWrite: false,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
assert.NotNil(t, r)
|
||||||
|
assert.Nil(t, err2)
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("Should query VALUE object", func(t *testing.T) {
|
runTestsWithPresets(t, "Test_Documents_TransactionalBatch", presets, func(t *testing.T, ts *TestServer, client *azcosmos.Client) {
|
||||||
testCosmosQuery(t, collectionClient,
|
collectionClient := documents_InitializeDb(t, ts)
|
||||||
"SELECT VALUE { id: c.id, _pk: c.pk } FROM c ORDER BY c.id",
|
|
||||||
nil,
|
|
||||||
[]interface{}{
|
|
||||||
map[string]interface{}{"id": "12345", "_pk": "123"},
|
|
||||||
map[string]interface{}{"id": "67890", "_pk": "456"},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should query document with single WHERE condition", func(t *testing.T) {
|
t.Run("Should execute CREATE transactional batch", func(t *testing.T) {
|
||||||
testCosmosQuery(t, collectionClient,
|
context := context.TODO()
|
||||||
`select c.id
|
batch := collectionClient.NewTransactionalBatch(azcosmos.NewPartitionKeyString("pk"))
|
||||||
FROM c
|
|
||||||
WHERE c.isCool=true
|
|
||||||
ORDER BY c.id`,
|
|
||||||
nil,
|
|
||||||
[]interface{}{
|
|
||||||
map[string]interface{}{"id": "67890"},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Should query document with query parameters", func(t *testing.T) {
|
newItem := map[string]interface{}{
|
||||||
testCosmosQuery(t, collectionClient,
|
"id": "678901",
|
||||||
`select c.id
|
}
|
||||||
FROM c
|
bytes, err := json.Marshal(newItem)
|
||||||
WHERE c.id=@param_id
|
assert.Nil(t, err)
|
||||||
ORDER BY c.id`,
|
|
||||||
[]azcosmos.QueryParameter{
|
batch.CreateItem(bytes, nil)
|
||||||
{Name: "@param_id", Value: "67890"},
|
response, err := collectionClient.ExecuteTransactionalBatch(context, batch, &azcosmos.TransactionalBatchOptions{})
|
||||||
},
|
assert.Nil(t, err)
|
||||||
[]interface{}{
|
assert.True(t, response.Success)
|
||||||
map[string]interface{}{"id": "67890"},
|
assert.Equal(t, 1, len(response.OperationResults))
|
||||||
},
|
|
||||||
)
|
operationResponse := response.OperationResults[0]
|
||||||
|
assert.NotNil(t, operationResponse)
|
||||||
|
assert.NotNil(t, operationResponse.ResourceBody)
|
||||||
|
assert.Equal(t, int32(http.StatusCreated), operationResponse.StatusCode)
|
||||||
|
|
||||||
|
var itemResponseBody map[string]interface{}
|
||||||
|
json.Unmarshal(operationResponse.ResourceBody, &itemResponseBody)
|
||||||
|
assert.Equal(t, newItem["id"], itemResponseBody["id"])
|
||||||
|
|
||||||
|
createdDoc, _ := ts.DataStore.GetDocument(testDatabaseName, testCollectionName, newItem["id"].(string))
|
||||||
|
assert.Equal(t, newItem["id"], createdDoc["id"])
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should execute DELETE transactional batch", func(t *testing.T) {
|
||||||
|
context := context.TODO()
|
||||||
|
batch := collectionClient.NewTransactionalBatch(azcosmos.NewPartitionKeyString("pk"))
|
||||||
|
|
||||||
|
batch.DeleteItem("12345", nil)
|
||||||
|
response, err := collectionClient.ExecuteTransactionalBatch(context, batch, &azcosmos.TransactionalBatchOptions{})
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.True(t, response.Success)
|
||||||
|
assert.Equal(t, 1, len(response.OperationResults))
|
||||||
|
|
||||||
|
operationResponse := response.OperationResults[0]
|
||||||
|
assert.NotNil(t, operationResponse)
|
||||||
|
assert.Equal(t, int32(http.StatusNoContent), operationResponse.StatusCode)
|
||||||
|
|
||||||
|
_, status := ts.DataStore.GetDocument(testDatabaseName, testCollectionName, "12345")
|
||||||
|
assert.Equal(t, datastore.StatusNotFound, int(status))
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should execute REPLACE transactional batch", func(t *testing.T) {
|
||||||
|
context := context.TODO()
|
||||||
|
batch := collectionClient.NewTransactionalBatch(azcosmos.NewPartitionKeyString("pk"))
|
||||||
|
|
||||||
|
newItem := map[string]interface{}{
|
||||||
|
"id": "67890",
|
||||||
|
"pk": "666",
|
||||||
|
}
|
||||||
|
bytes, err := json.Marshal(newItem)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
batch.ReplaceItem("67890", bytes, nil)
|
||||||
|
response, err := collectionClient.ExecuteTransactionalBatch(context, batch, &azcosmos.TransactionalBatchOptions{})
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.True(t, response.Success)
|
||||||
|
assert.Equal(t, 1, len(response.OperationResults))
|
||||||
|
|
||||||
|
operationResponse := response.OperationResults[0]
|
||||||
|
assert.NotNil(t, operationResponse)
|
||||||
|
assert.NotNil(t, operationResponse.ResourceBody)
|
||||||
|
assert.Equal(t, int32(http.StatusCreated), operationResponse.StatusCode)
|
||||||
|
|
||||||
|
var itemResponseBody map[string]interface{}
|
||||||
|
json.Unmarshal(operationResponse.ResourceBody, &itemResponseBody)
|
||||||
|
assert.Equal(t, newItem["id"], itemResponseBody["id"])
|
||||||
|
assert.Equal(t, newItem["pk"], itemResponseBody["pk"])
|
||||||
|
|
||||||
|
updatedDoc, _ := ts.DataStore.GetDocument(testDatabaseName, testCollectionName, newItem["id"].(string))
|
||||||
|
assert.Equal(t, newItem["id"], updatedDoc["id"])
|
||||||
|
assert.Equal(t, newItem["pk"], updatedDoc["pk"])
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should execute UPSERT transactional batch", func(t *testing.T) {
|
||||||
|
context := context.TODO()
|
||||||
|
batch := collectionClient.NewTransactionalBatch(azcosmos.NewPartitionKeyString("pk"))
|
||||||
|
|
||||||
|
newItem := map[string]interface{}{
|
||||||
|
"id": "678901",
|
||||||
|
"pk": "666",
|
||||||
|
}
|
||||||
|
bytes, err := json.Marshal(newItem)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
batch.UpsertItem(bytes, nil)
|
||||||
|
response, err := collectionClient.ExecuteTransactionalBatch(context, batch, &azcosmos.TransactionalBatchOptions{})
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.True(t, response.Success)
|
||||||
|
assert.Equal(t, 1, len(response.OperationResults))
|
||||||
|
|
||||||
|
operationResponse := response.OperationResults[0]
|
||||||
|
assert.NotNil(t, operationResponse)
|
||||||
|
assert.NotNil(t, operationResponse.ResourceBody)
|
||||||
|
assert.Equal(t, int32(http.StatusCreated), operationResponse.StatusCode)
|
||||||
|
|
||||||
|
var itemResponseBody map[string]interface{}
|
||||||
|
json.Unmarshal(operationResponse.ResourceBody, &itemResponseBody)
|
||||||
|
assert.Equal(t, newItem["id"], itemResponseBody["id"])
|
||||||
|
assert.Equal(t, newItem["pk"], itemResponseBody["pk"])
|
||||||
|
|
||||||
|
updatedDoc, _ := ts.DataStore.GetDocument(testDatabaseName, testCollectionName, newItem["id"].(string))
|
||||||
|
assert.Equal(t, newItem["id"], updatedDoc["id"])
|
||||||
|
assert.Equal(t, newItem["pk"], updatedDoc["pk"])
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should execute READ transactional batch", func(t *testing.T) {
|
||||||
|
context := context.TODO()
|
||||||
|
batch := collectionClient.NewTransactionalBatch(azcosmos.NewPartitionKeyString("pk"))
|
||||||
|
|
||||||
|
batch.ReadItem("67890", nil)
|
||||||
|
response, err := collectionClient.ExecuteTransactionalBatch(context, batch, &azcosmos.TransactionalBatchOptions{})
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.True(t, response.Success)
|
||||||
|
assert.Equal(t, 1, len(response.OperationResults))
|
||||||
|
|
||||||
|
operationResponse := response.OperationResults[0]
|
||||||
|
assert.NotNil(t, operationResponse)
|
||||||
|
assert.NotNil(t, operationResponse.ResourceBody)
|
||||||
|
assert.Equal(t, int32(http.StatusOK), operationResponse.StatusCode)
|
||||||
|
|
||||||
|
var itemResponseBody map[string]interface{}
|
||||||
|
json.Unmarshal(operationResponse.ResourceBody, &itemResponseBody)
|
||||||
|
assert.Equal(t, "67890", itemResponseBody["id"])
|
||||||
|
})
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
42
api/tests/documents_trailingslash_test.go
Normal file
42
api/tests/documents_trailingslash_test.go
Normal file
@ -0,0 +1,42 @@
|
|||||||
|
package tests_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
"net/url"
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/pikami/cosmium/api/config"
|
||||||
|
"github.com/pikami/cosmium/internal/authentication"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Request document with trailing slash like python cosmosdb client does.
|
||||||
|
func Test_Documents_Read_Trailing_Slash(t *testing.T) {
|
||||||
|
ts := runTestServer()
|
||||||
|
documents_InitializeDb(t, ts)
|
||||||
|
defer ts.Server.Close()
|
||||||
|
|
||||||
|
t.Run("Read doc with client that appends slash to path", func(t *testing.T) {
|
||||||
|
resourceIdTemplate := "dbs/%s/colls/%s/docs/%s"
|
||||||
|
path := fmt.Sprintf(resourceIdTemplate, testDatabaseName, testCollectionName, "12345")
|
||||||
|
testUrl := ts.URL + "/" + path + "/"
|
||||||
|
date := time.Now().Format(time.RFC1123)
|
||||||
|
signature := authentication.GenerateSignature("GET", "docs", path, date, config.DefaultAccountKey)
|
||||||
|
httpClient := &http.Client{}
|
||||||
|
req, _ := http.NewRequest("GET", testUrl, nil)
|
||||||
|
req.Header.Add("x-ms-date", date)
|
||||||
|
req.Header.Add("authorization", "sig="+url.QueryEscape(signature))
|
||||||
|
res, err := httpClient.Do(req)
|
||||||
|
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
if res != nil {
|
||||||
|
defer res.Body.Close()
|
||||||
|
assert.Equal(t, http.StatusOK, res.StatusCode, "Expected HTTP status 200 OK")
|
||||||
|
} else {
|
||||||
|
t.FailNow()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
54
cmd/server/server.go
Normal file
54
cmd/server/server.go
Normal file
@ -0,0 +1,54 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
"os/signal"
|
||||||
|
"syscall"
|
||||||
|
|
||||||
|
"github.com/pikami/cosmium/api"
|
||||||
|
"github.com/pikami/cosmium/api/config"
|
||||||
|
"github.com/pikami/cosmium/internal/datastore"
|
||||||
|
badgerdatastore "github.com/pikami/cosmium/internal/datastore/badger_datastore"
|
||||||
|
jsondatastore "github.com/pikami/cosmium/internal/datastore/json_datastore"
|
||||||
|
"github.com/pikami/cosmium/internal/logger"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
configuration := config.ParseFlags()
|
||||||
|
|
||||||
|
var dataStore datastore.DataStore
|
||||||
|
switch configuration.DataStore {
|
||||||
|
case config.DataStoreBadger:
|
||||||
|
dataStore = badgerdatastore.NewBadgerDataStore(badgerdatastore.BadgerDataStoreOptions{
|
||||||
|
PersistDataFilePath: configuration.PersistDataFilePath,
|
||||||
|
})
|
||||||
|
logger.InfoLn("Using Badger data store")
|
||||||
|
default:
|
||||||
|
dataStore = jsondatastore.NewJsonDataStore(jsondatastore.JsonDataStoreOptions{
|
||||||
|
InitialDataFilePath: configuration.InitialDataFilePath,
|
||||||
|
PersistDataFilePath: configuration.PersistDataFilePath,
|
||||||
|
})
|
||||||
|
logger.InfoLn("Using in-memory data store")
|
||||||
|
}
|
||||||
|
|
||||||
|
server := api.NewApiServer(dataStore, &configuration)
|
||||||
|
err := server.Start()
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
waitForExit(server, dataStore)
|
||||||
|
}
|
||||||
|
|
||||||
|
func waitForExit(server *api.ApiServer, dataStore datastore.DataStore) {
|
||||||
|
sigs := make(chan os.Signal, 1)
|
||||||
|
signal.Notify(sigs, syscall.SIGINT, syscall.SIGTERM)
|
||||||
|
|
||||||
|
// Block until a exit signal is received
|
||||||
|
<-sigs
|
||||||
|
|
||||||
|
// Stop the server
|
||||||
|
server.Stop()
|
||||||
|
|
||||||
|
dataStore.Close()
|
||||||
|
}
|
@ -15,10 +15,11 @@ Cosmium strives to support the core features of Cosmos DB, including:
|
|||||||
## Compatibility Matrix
|
## Compatibility Matrix
|
||||||
|
|
||||||
### Features
|
### Features
|
||||||
|
|
||||||
| Feature | Implemented |
|
| Feature | Implemented |
|
||||||
|-------------------------------|-------------|
|
| ----------------------------- | ----------- |
|
||||||
| Subqueries | No |
|
| Subqueries | Yes |
|
||||||
| Joins | No |
|
| Joins | Yes |
|
||||||
| Computed properties | No |
|
| Computed properties | No |
|
||||||
| Coalesce operators | No |
|
| Coalesce operators | No |
|
||||||
| Bitwise operators | No |
|
| Bitwise operators | No |
|
||||||
@ -29,8 +30,9 @@ Cosmium strives to support the core features of Cosmos DB, including:
|
|||||||
| User-defined functions (UDFs) | No |
|
| User-defined functions (UDFs) | No |
|
||||||
|
|
||||||
### Clauses
|
### Clauses
|
||||||
|
|
||||||
| Clause | Implemented |
|
| Clause | Implemented |
|
||||||
|--------------|-------------|
|
| ------------ | ----------- |
|
||||||
| SELECT | Yes |
|
| SELECT | Yes |
|
||||||
| FROM | Yes |
|
| FROM | Yes |
|
||||||
| WHERE | Yes |
|
| WHERE | Yes |
|
||||||
@ -39,8 +41,9 @@ Cosmium strives to support the core features of Cosmos DB, including:
|
|||||||
| OFFSET LIMIT | Yes |
|
| OFFSET LIMIT | Yes |
|
||||||
|
|
||||||
### Keywords
|
### Keywords
|
||||||
|
|
||||||
| Keyword | Implemented |
|
| Keyword | Implemented |
|
||||||
|----------|-------------|
|
| -------- | ----------- |
|
||||||
| BETWEEN | No |
|
| BETWEEN | No |
|
||||||
| DISTINCT | Yes |
|
| DISTINCT | Yes |
|
||||||
| LIKE | No |
|
| LIKE | No |
|
||||||
@ -48,8 +51,9 @@ Cosmium strives to support the core features of Cosmos DB, including:
|
|||||||
| TOP | Yes |
|
| TOP | Yes |
|
||||||
|
|
||||||
### Aggregate Functions
|
### Aggregate Functions
|
||||||
|
|
||||||
| Function | Implemented |
|
| Function | Implemented |
|
||||||
|----------|-------------|
|
| -------- | ----------- |
|
||||||
| AVG | Yes |
|
| AVG | Yes |
|
||||||
| COUNT | Yes |
|
| COUNT | Yes |
|
||||||
| MAX | Yes |
|
| MAX | Yes |
|
||||||
@ -57,25 +61,30 @@ Cosmium strives to support the core features of Cosmos DB, including:
|
|||||||
| SUM | Yes |
|
| SUM | Yes |
|
||||||
|
|
||||||
### Array Functions
|
### Array Functions
|
||||||
| Function | Implemented |
|
|
||||||
|----------------|-------------|
|
| Function | Implemented |
|
||||||
| ARRAY_CONCAT | Yes |
|
| ------------------ | ----------- |
|
||||||
| ARRAY_CONTAINS | No |
|
| ARRAY_CONCAT | Yes |
|
||||||
| ARRAY_LENGTH | Yes |
|
| ARRAY_CONTAINS | Yes |
|
||||||
| ARRAY_SLICE | Yes |
|
| ARRAY_CONTAINS_ANY | Yes |
|
||||||
| CHOOSE | No |
|
| ARRAY_CONTAINS_ALL | Yes |
|
||||||
| ObjectToArray | No |
|
| ARRAY_LENGTH | Yes |
|
||||||
| SetIntersect | Yes |
|
| ARRAY_SLICE | Yes |
|
||||||
| SetUnion | Yes |
|
| CHOOSE | No |
|
||||||
|
| ObjectToArray | No |
|
||||||
|
| SetIntersect | Yes |
|
||||||
|
| SetUnion | Yes |
|
||||||
|
|
||||||
### Conditional Functions
|
### Conditional Functions
|
||||||
|
|
||||||
| Function | Implemented |
|
| Function | Implemented |
|
||||||
|----------|-------------|
|
| -------- | ----------- |
|
||||||
| IIF | No |
|
| IIF | Yes |
|
||||||
|
|
||||||
### Date and time Functions
|
### Date and time Functions
|
||||||
|
|
||||||
| Function | Implemented |
|
| Function | Implemented |
|
||||||
|---------------------------|-------------|
|
| ------------------------- | ----------- |
|
||||||
| DateTimeAdd | No |
|
| DateTimeAdd | No |
|
||||||
| DateTimeBin | No |
|
| DateTimeBin | No |
|
||||||
| DateTimeDiff | No |
|
| DateTimeDiff | No |
|
||||||
@ -93,53 +102,56 @@ Cosmium strives to support the core features of Cosmos DB, including:
|
|||||||
| TimestampToDateTime | No |
|
| TimestampToDateTime | No |
|
||||||
|
|
||||||
### Item Functions
|
### Item Functions
|
||||||
|
|
||||||
| Function | Implemented |
|
| Function | Implemented |
|
||||||
|------------|-------------|
|
| ---------- | ----------- |
|
||||||
| DocumentId | No |
|
| DocumentId | No |
|
||||||
|
|
||||||
### Mathematical Functions
|
### Mathematical Functions
|
||||||
|
|
||||||
| Function | Implemented |
|
| Function | Implemented |
|
||||||
|------------------|-------------|
|
| ---------------- | ----------- |
|
||||||
| ABS | No |
|
| ABS | Yes |
|
||||||
| ACOS | No |
|
| ACOS | Yes |
|
||||||
| ASIN | No |
|
| ASIN | Yes |
|
||||||
| ATAN | No |
|
| ATAN | Yes |
|
||||||
| ATN2 | No |
|
| ATN2 | Yes |
|
||||||
| CEILING | No |
|
| CEILING | Yes |
|
||||||
| COS | No |
|
| COS | Yes |
|
||||||
| COT | No |
|
| COT | Yes |
|
||||||
| DEGREES | No |
|
| DEGREES | Yes |
|
||||||
| EXP | No |
|
| EXP | Yes |
|
||||||
| FLOOR | No |
|
| FLOOR | Yes |
|
||||||
| IntAdd | No |
|
| IntAdd | Yes |
|
||||||
| IntBitAnd | No |
|
| IntBitAnd | Yes |
|
||||||
| IntBitLeftShift | No |
|
| IntBitLeftShift | Yes |
|
||||||
| IntBitNot | No |
|
| IntBitNot | Yes |
|
||||||
| IntBitOr | No |
|
| IntBitOr | Yes |
|
||||||
| IntBitRightShift | No |
|
| IntBitRightShift | Yes |
|
||||||
| IntBitXor | No |
|
| IntBitXor | Yes |
|
||||||
| IntDiv | No |
|
| IntDiv | Yes |
|
||||||
| IntMod | No |
|
| IntMod | Yes |
|
||||||
| IntMul | No |
|
| IntMul | Yes |
|
||||||
| IntSub | No |
|
| IntSub | Yes |
|
||||||
| LOG | No |
|
| LOG | Yes |
|
||||||
| LOG10 | No |
|
| LOG10 | Yes |
|
||||||
| NumberBin | No |
|
| NumberBin | Yes |
|
||||||
| PI | No |
|
| PI | Yes |
|
||||||
| POWER | No |
|
| POWER | Yes |
|
||||||
| RADIANS | No |
|
| RADIANS | Yes |
|
||||||
| RAND | No |
|
| RAND | Yes |
|
||||||
| ROUND | No |
|
| ROUND | Yes |
|
||||||
| SIGN | No |
|
| SIGN | Yes |
|
||||||
| SIN | No |
|
| SIN | Yes |
|
||||||
| SQRT | No |
|
| SQRT | Yes |
|
||||||
| SQUARE | No |
|
| SQUARE | Yes |
|
||||||
| TAN | No |
|
| TAN | Yes |
|
||||||
| TRUNC | No |
|
| TRUNC | Yes |
|
||||||
|
|
||||||
### Spatial Functions
|
### Spatial Functions
|
||||||
|
|
||||||
| Function | Implemented |
|
| Function | Implemented |
|
||||||
|--------------------|-------------|
|
| ------------------ | ----------- |
|
||||||
| ST_AREA | No |
|
| ST_AREA | No |
|
||||||
| ST_DISTANCE | No |
|
| ST_DISTANCE | No |
|
||||||
| ST_WITHIN | No |
|
| ST_WITHIN | No |
|
||||||
@ -148,8 +160,9 @@ Cosmium strives to support the core features of Cosmos DB, including:
|
|||||||
| ST_ISVALIDDETAILED | No |
|
| ST_ISVALIDDETAILED | No |
|
||||||
|
|
||||||
### String Functions
|
### String Functions
|
||||||
|
|
||||||
| Function | Implemented |
|
| Function | Implemented |
|
||||||
|-----------------|-------------|
|
| --------------- | ----------- |
|
||||||
| CONCAT | Yes |
|
| CONCAT | Yes |
|
||||||
| CONTAINS | Yes |
|
| CONTAINS | Yes |
|
||||||
| ENDSWITH | Yes |
|
| ENDSWITH | Yes |
|
||||||
@ -177,8 +190,9 @@ Cosmium strives to support the core features of Cosmos DB, including:
|
|||||||
| UPPER | Yes |
|
| UPPER | Yes |
|
||||||
|
|
||||||
### Type checking Functions
|
### Type checking Functions
|
||||||
|
|
||||||
| Function | Implemented |
|
| Function | Implemented |
|
||||||
|------------------|-------------|
|
| ---------------- | ----------- |
|
||||||
| IS_ARRAY | Yes |
|
| IS_ARRAY | Yes |
|
||||||
| IS_BOOL | Yes |
|
| IS_BOOL | Yes |
|
||||||
| IS_DEFINED | Yes |
|
| IS_DEFINED | Yes |
|
||||||
@ -190,6 +204,19 @@ Cosmium strives to support the core features of Cosmos DB, including:
|
|||||||
| IS_PRIMITIVE | Yes |
|
| IS_PRIMITIVE | Yes |
|
||||||
| IS_STRING | Yes |
|
| IS_STRING | Yes |
|
||||||
|
|
||||||
|
### Transactional batch operations
|
||||||
|
|
||||||
|
Note: There's actually no transaction here. Think of this as a 'bulk operation' that can partially succeed.
|
||||||
|
|
||||||
|
| Operation | Implemented |
|
||||||
|
| --------- | ----------- |
|
||||||
|
| Create | Yes |
|
||||||
|
| Delete | Yes |
|
||||||
|
| Replace | Yes |
|
||||||
|
| Upsert | Yes |
|
||||||
|
| Read | Yes |
|
||||||
|
| Patch | No |
|
||||||
|
|
||||||
## Known Differences
|
## Known Differences
|
||||||
|
|
||||||
While Cosmium aims to replicate the behavior of Cosmos DB as closely as possible, there are certain differences and limitations to be aware of:
|
While Cosmium aims to replicate the behavior of Cosmos DB as closely as possible, there are certain differences and limitations to be aware of:
|
||||||
|
67
go.mod
67
go.mod
@ -1,43 +1,60 @@
|
|||||||
module github.com/pikami/cosmium
|
module github.com/pikami/cosmium
|
||||||
|
|
||||||
go 1.21.6
|
go 1.24.0
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.2
|
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0
|
||||||
github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos v0.3.6
|
github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos v1.4.0
|
||||||
github.com/gin-gonic/gin v1.9.1
|
github.com/cosmiumdev/json-patch/v5 v5.9.11
|
||||||
github.com/google/uuid v1.1.1
|
github.com/dgraph-io/badger/v4 v4.7.0
|
||||||
github.com/stretchr/testify v1.8.4
|
github.com/gin-gonic/gin v1.10.0
|
||||||
golang.org/x/exp v0.0.0-20240222234643-814bf88cf225
|
github.com/google/uuid v1.6.0
|
||||||
|
github.com/stretchr/testify v1.10.0
|
||||||
|
github.com/vmihailenco/msgpack/v5 v5.4.1
|
||||||
|
golang.org/x/exp v0.0.0-20250506013437-ce4c2cf36ca6
|
||||||
)
|
)
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/Azure/azure-sdk-for-go v68.0.0+incompatible // indirect
|
github.com/Azure/azure-sdk-for-go v68.0.0+incompatible // indirect
|
||||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.2 // indirect
|
github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.1 // indirect
|
||||||
github.com/bytedance/sonic v1.9.1 // indirect
|
github.com/bytedance/sonic v1.13.2 // indirect
|
||||||
github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311 // indirect
|
github.com/bytedance/sonic/loader v0.2.4 // indirect
|
||||||
|
github.com/cespare/xxhash/v2 v2.3.0 // indirect
|
||||||
|
github.com/cloudwego/base64x v0.1.5 // indirect
|
||||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||||
github.com/gabriel-vasile/mimetype v1.4.2 // indirect
|
github.com/dgraph-io/ristretto/v2 v2.2.0 // indirect
|
||||||
github.com/gin-contrib/sse v0.1.0 // indirect
|
github.com/dustin/go-humanize v1.0.1 // indirect
|
||||||
|
github.com/gabriel-vasile/mimetype v1.4.9 // indirect
|
||||||
|
github.com/gin-contrib/sse v1.1.0 // indirect
|
||||||
|
github.com/go-logr/logr v1.4.2 // indirect
|
||||||
|
github.com/go-logr/stdr v1.2.2 // indirect
|
||||||
github.com/go-playground/locales v0.14.1 // indirect
|
github.com/go-playground/locales v0.14.1 // indirect
|
||||||
github.com/go-playground/universal-translator v0.18.1 // indirect
|
github.com/go-playground/universal-translator v0.18.1 // indirect
|
||||||
github.com/go-playground/validator/v10 v10.14.0 // indirect
|
github.com/go-playground/validator/v10 v10.26.0 // indirect
|
||||||
github.com/goccy/go-json v0.10.2 // indirect
|
github.com/goccy/go-json v0.10.5 // indirect
|
||||||
|
github.com/google/flatbuffers v25.2.10+incompatible // indirect
|
||||||
github.com/json-iterator/go v1.1.12 // indirect
|
github.com/json-iterator/go v1.1.12 // indirect
|
||||||
github.com/klauspost/cpuid/v2 v2.2.4 // indirect
|
github.com/klauspost/compress v1.18.0 // indirect
|
||||||
github.com/leodido/go-urn v1.2.4 // indirect
|
github.com/klauspost/cpuid/v2 v2.2.10 // indirect
|
||||||
github.com/mattn/go-isatty v0.0.19 // indirect
|
github.com/leodido/go-urn v1.4.0 // indirect
|
||||||
|
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
|
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
|
||||||
github.com/modern-go/reflect2 v1.0.2 // indirect
|
github.com/modern-go/reflect2 v1.0.2 // indirect
|
||||||
github.com/pelletier/go-toml/v2 v2.0.8 // indirect
|
github.com/pelletier/go-toml/v2 v2.2.4 // indirect
|
||||||
|
github.com/pkg/errors v0.9.1 // indirect
|
||||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||||
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
|
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
|
||||||
github.com/ugorji/go/codec v1.2.11 // indirect
|
github.com/ugorji/go/codec v1.2.12 // indirect
|
||||||
golang.org/x/arch v0.3.0 // indirect
|
github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect
|
||||||
golang.org/x/crypto v0.18.0 // indirect
|
go.opentelemetry.io/auto/sdk v1.1.0 // indirect
|
||||||
golang.org/x/net v0.20.0 // indirect
|
go.opentelemetry.io/otel v1.35.0 // indirect
|
||||||
golang.org/x/sys v0.16.0 // indirect
|
go.opentelemetry.io/otel/metric v1.35.0 // indirect
|
||||||
golang.org/x/text v0.14.0 // indirect
|
go.opentelemetry.io/otel/trace v1.35.0 // indirect
|
||||||
google.golang.org/protobuf v1.30.0 // indirect
|
golang.org/x/arch v0.17.0 // indirect
|
||||||
|
golang.org/x/crypto v0.38.0 // indirect
|
||||||
|
golang.org/x/net v0.40.0 // indirect
|
||||||
|
golang.org/x/sys v0.33.0 // indirect
|
||||||
|
golang.org/x/text v0.25.0 // indirect
|
||||||
|
google.golang.org/protobuf v1.36.6 // indirect
|
||||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||||
)
|
)
|
||||||
|
215
go.sum
215
go.sum
@ -1,109 +1,186 @@
|
|||||||
github.com/Azure/azure-sdk-for-go v68.0.0+incompatible h1:fcYLmCpyNYRnvJbPerq7U0hS+6+I79yEDJBqVNcqUzU=
|
github.com/Azure/azure-sdk-for-go v68.0.0+incompatible h1:fcYLmCpyNYRnvJbPerq7U0hS+6+I79yEDJBqVNcqUzU=
|
||||||
github.com/Azure/azure-sdk-for-go v68.0.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=
|
github.com/Azure/azure-sdk-for-go v68.0.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=
|
||||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.2 h1:c4k2FIYIh4xtwqrQwV0Ct1v5+ehlNXj5NI/MWVsiTkQ=
|
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.17.0 h1:g0EZJwz7xkXQiZAI5xi9f3WWFYBlX1CPTrR+NDToRkQ=
|
||||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.2/go.mod h1:5FDJtLEO/GxwNgUxbwrY3LP0pEoThTQJtk2oysdXHxM=
|
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.17.0/go.mod h1:XCW7KnZet0Opnr7HccfUw1PLc4CjHqpcaxW8DHklNkQ=
|
||||||
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.0.0 h1:Yoicul8bnVdQrhDMTHxdEckRGX01XvwXDHUT9zYZ3k0=
|
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0 h1:Gt0j3wceWMwPmiazCa8MzMA0MfhmPIz0Qp0FJ6qcM0U=
|
||||||
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.0.0/go.mod h1:+6sju8gk8FRmSajX3Oz4G5Gm7P+mbqE9FVaXXFYTkCM=
|
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0/go.mod h1:Ot/6aikWnKWi4l9QB7qVSwa8iMphQNqkWALMoNT3rzM=
|
||||||
github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos v0.3.6 h1:oBqQLSI1pZwGOdXJAoJJSzmff9tlfD4KroVfjQQmd0g=
|
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.8.0 h1:B/dfvscEQtew9dVuoxqxrUKKv8Ih2f55PydknDamU+g=
|
||||||
github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos v0.3.6/go.mod h1:Beh5cHIXJ0oWEDWk9lNFtuklCojLLQ5hl+LqSNTTs0I=
|
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.8.0/go.mod h1:fiPSssYvltE08HJchL04dOy+RD4hgrjph0cwGGMntdI=
|
||||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.2 h1:LqbJ/WzJUwBf8UiaSzgX7aMclParm9/5Vgp+TY51uBQ=
|
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.8.2 h1:F0gBpfdPLGsw+nsgk6aqqkZS1jiixa5WwFe3fk/T3Ys=
|
||||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.2/go.mod h1:yInRyqWXAuaPrgI7p70+lDDgh3mlBohis29jGMISnmc=
|
github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos v1.3.0 h1:RGcdpSElvcXCwxydI0xzOBu1Gvp88OoiTGfbtO/z1m0=
|
||||||
github.com/AzureAD/microsoft-authentication-library-for-go v0.4.0 h1:WVsrXCnHlDDX8ls+tootqRE87/hL9S/g4ewig9RsD/c=
|
github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos v1.3.0/go.mod h1:YwUyrNUtcZcibA99JcfCP6UUp95VVQKO2MJfBzgJDwA=
|
||||||
github.com/AzureAD/microsoft-authentication-library-for-go v0.4.0/go.mod h1:Vt9sXTKwMyGcOxSmLDMnGPgqsUg7m8pe215qMLrDXw4=
|
github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos v1.4.0 h1:TSaH6Lj0m8bDr4vX1+LC1KLQTnLzZb3tOxrx/PLqw+c=
|
||||||
github.com/bytedance/sonic v1.5.0/go.mod h1:ED5hyg4y6t3/9Ku1R6dU/4KyJ48DZ4jPhfY1O2AihPM=
|
github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos v1.4.0/go.mod h1:Krtog/7tz27z75TwM5cIS8bxEH4dcBUezcq+kGVeZEo=
|
||||||
github.com/bytedance/sonic v1.9.1 h1:6iJ6NqdoxCDr6mbY8h18oSO+cShGSMRGCEo7F2h0x8s=
|
github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0 h1:ywEEhmNahHBihViHepv3xPBn1663uRv2t2q/ESv9seY=
|
||||||
github.com/bytedance/sonic v1.9.1/go.mod h1:i736AoUSYt75HyZLoJW9ERYxcy6eaN6h4BZXU064P/U=
|
github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0/go.mod h1:iZDifYGJTIgIIkYRNWPENUnqx6bJ2xnSDFI2tjwZNuY=
|
||||||
github.com/chenzhuoyu/base64x v0.0.0-20211019084208-fb5309c8db06/go.mod h1:DH46F32mSOjUmXrMHnKwZdA8wcEefY7UVqBKYGjpdQY=
|
github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.1 h1:FPKJS1T+clwv+OLGt13a8UjqeRuh0O4SJ3lUriThc+4=
|
||||||
github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311 h1:qSGYFH7+jGhDF8vLC+iwCD4WpbV1EBDSzWkJODFLams=
|
github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.1/go.mod h1:j2chePtV91HrC22tGoRX3sGY42uF13WzmmV80/OdVAA=
|
||||||
github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311/go.mod h1:b583jCggY9gE99b6G5LEC39OIiVsWj+R97kbl5odCEk=
|
github.com/AzureAD/microsoft-authentication-library-for-go v1.3.2 h1:kYRSnvJju5gYVyhkij+RTJ/VR6QIUaCfWeaFm2ycsjQ=
|
||||||
|
github.com/AzureAD/microsoft-authentication-library-for-go v1.3.2/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI=
|
||||||
|
github.com/AzureAD/microsoft-authentication-library-for-go v1.4.2 h1:oygO0locgZJe7PpYPXT5A29ZkwJaPqcva7BVeemZOZs=
|
||||||
|
github.com/bytedance/sonic v1.13.1 h1:Jyd5CIvdFnkOWuKXr+wm4Nyk2h0yAFsr8ucJgEasO3g=
|
||||||
|
github.com/bytedance/sonic v1.13.1/go.mod h1:o68xyaF9u2gvVBuGHPlUVCy+ZfmNNO5ETf1+KgkJhz4=
|
||||||
|
github.com/bytedance/sonic v1.13.2 h1:8/H1FempDZqC4VqjptGo14QQlJx8VdZJegxs6wwfqpQ=
|
||||||
|
github.com/bytedance/sonic v1.13.2/go.mod h1:o68xyaF9u2gvVBuGHPlUVCy+ZfmNNO5ETf1+KgkJhz4=
|
||||||
|
github.com/bytedance/sonic/loader v0.1.1/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU=
|
||||||
|
github.com/bytedance/sonic/loader v0.2.4 h1:ZWCw4stuXUsn1/+zQDqeE7JKP+QO47tz7QCNan80NzY=
|
||||||
|
github.com/bytedance/sonic/loader v0.2.4/go.mod h1:N8A3vUdtUebEY2/VQC0MyhYeKUFosQU6FxH2JmUe6VI=
|
||||||
|
github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs=
|
||||||
|
github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
||||||
|
github.com/cloudwego/base64x v0.1.5 h1:XPciSp1xaq2VCSt6lF0phncD4koWyULpl5bUxbfCyP4=
|
||||||
|
github.com/cloudwego/base64x v0.1.5/go.mod h1:0zlkT4Wn5C6NdauXdJRhSKRlJvmclQ1hhJgA0rcu/8w=
|
||||||
|
github.com/cloudwego/iasm v0.2.0/go.mod h1:8rXZaNYT2n95jn+zTI1sDr+IgcD2GVs0nlbbQPiEFhY=
|
||||||
|
github.com/cosmiumdev/json-patch/v5 v5.9.11 h1:WD2Wqaz/vO987z2FFdqgkj15HgYZ/Y5TpqE3I4T/iOQ=
|
||||||
|
github.com/cosmiumdev/json-patch/v5 v5.9.11/go.mod h1:YPZmckmv4ZY+oxKIOjgq3sIudHVB6VEMcicCS9LtVLM=
|
||||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
github.com/gabriel-vasile/mimetype v1.4.2 h1:w5qFW6JKBz9Y393Y4q372O9A7cUSequkh1Q7OhCmWKU=
|
github.com/dgraph-io/badger/v4 v4.6.0 h1:acOwfOOZ4p1dPRnYzvkVm7rUk2Y21TgPVepCy5dJdFQ=
|
||||||
github.com/gabriel-vasile/mimetype v1.4.2/go.mod h1:zApsH/mKG4w07erKIaJPFiX0Tsq9BFQgN3qGY5GnNgA=
|
github.com/dgraph-io/badger/v4 v4.6.0/go.mod h1:KSJ5VTuZNC3Sd+YhvVjk2nYua9UZnnTr/SkXvdtiPgI=
|
||||||
github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE=
|
github.com/dgraph-io/badger/v4 v4.7.0 h1:Q+J8HApYAY7UMpL8d9owqiB+odzEc0zn/aqOD9jhc6Y=
|
||||||
github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI=
|
github.com/dgraph-io/badger/v4 v4.7.0/go.mod h1:He7TzG3YBy3j4f5baj5B7Zl2XyfNe5bl4Udl0aPemVA=
|
||||||
github.com/gin-gonic/gin v1.9.1 h1:4idEAncQnU5cB7BeOkPtxjfCSye0AAm1R0RVIqJ+Jmg=
|
github.com/dgraph-io/ristretto/v2 v2.1.0 h1:59LjpOJLNDULHh8MC4UaegN52lC4JnO2dITsie/Pa8I=
|
||||||
github.com/gin-gonic/gin v1.9.1/go.mod h1:hPrL7YrpYKXt5YId3A/Tnip5kqbEAP+KLuI3SUcPTeU=
|
github.com/dgraph-io/ristretto/v2 v2.1.0/go.mod h1:uejeqfYXpUomfse0+lO+13ATz4TypQYLJZzBSAemuB4=
|
||||||
|
github.com/dgraph-io/ristretto/v2 v2.2.0 h1:bkY3XzJcXoMuELV8F+vS8kzNgicwQFAaGINAEJdWGOM=
|
||||||
|
github.com/dgraph-io/ristretto/v2 v2.2.0/go.mod h1:RZrm63UmcBAaYWC1DotLYBmTvgkrs0+XhBd7Npn7/zI=
|
||||||
|
github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13 h1:fAjc9m62+UWV/WAFKLNi6ZS0675eEUC9y3AlwSbQu1Y=
|
||||||
|
github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw=
|
||||||
|
github.com/dgryski/go-farm v0.0.0-20240924180020-3414d57e47da h1:aIftn67I1fkbMa512G+w+Pxci9hJPB8oMnkcP3iZF38=
|
||||||
|
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
|
||||||
|
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
|
||||||
|
github.com/gabriel-vasile/mimetype v1.4.8 h1:FfZ3gj38NjllZIeJAmMhr+qKL8Wu+nOoI3GqacKw1NM=
|
||||||
|
github.com/gabriel-vasile/mimetype v1.4.8/go.mod h1:ByKUIKGjh1ODkGM1asKUbQZOLGrPjydw3hYPU2YU9t8=
|
||||||
|
github.com/gabriel-vasile/mimetype v1.4.9 h1:5k+WDwEsD9eTLL8Tz3L0VnmVh9QxGjRmjBvAG7U/oYY=
|
||||||
|
github.com/gabriel-vasile/mimetype v1.4.9/go.mod h1:WnSQhFKJuBlRyLiKohA/2DtIlPFAbguNaG7QCHcyGok=
|
||||||
|
github.com/gin-contrib/sse v1.0.0 h1:y3bT1mUWUxDpW4JLQg/HnTqV4rozuW4tC9eFKTxYI9E=
|
||||||
|
github.com/gin-contrib/sse v1.0.0/go.mod h1:zNuFdwarAygJBht0NTKiSi3jRf6RbqeILZ9Sp6Slhe0=
|
||||||
|
github.com/gin-contrib/sse v1.1.0 h1:n0w2GMuUpWDVp7qSpvze6fAu9iRxJY4Hmj6AmBOU05w=
|
||||||
|
github.com/gin-contrib/sse v1.1.0/go.mod h1:hxRZ5gVpWMT7Z0B0gSNYqqsSCNIJMjzvm6fqCz9vjwM=
|
||||||
|
github.com/gin-gonic/gin v1.10.0 h1:nTuyha1TYqgedzytsKYqna+DfLos46nTv2ygFy86HFU=
|
||||||
|
github.com/gin-gonic/gin v1.10.0/go.mod h1:4PMNQiOhvDRa013RKVbsiNwoyezlm2rm0uX/T7kzp5Y=
|
||||||
|
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
|
||||||
|
github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY=
|
||||||
|
github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
|
||||||
|
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
|
||||||
|
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
|
||||||
github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s=
|
github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s=
|
||||||
github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
|
github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
|
||||||
github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA=
|
github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA=
|
||||||
github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY=
|
github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY=
|
||||||
github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY=
|
github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY=
|
||||||
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
|
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
|
||||||
github.com/go-playground/validator/v10 v10.14.0 h1:vgvQWe3XCz3gIeFDm/HnTIbj6UGmg/+t63MyGU2n5js=
|
github.com/go-playground/validator/v10 v10.25.0 h1:5Dh7cjvzR7BRZadnsVOzPhWsrwUr0nmsZJxEAnFLNO8=
|
||||||
github.com/go-playground/validator/v10 v10.14.0/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU=
|
github.com/go-playground/validator/v10 v10.25.0/go.mod h1:GGzBIJMuE98Ic/kJsBXbz1x/7cByt++cQ+YOuDM5wus=
|
||||||
github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU=
|
github.com/go-playground/validator/v10 v10.26.0 h1:SP05Nqhjcvz81uJaRfEV0YBSSSGMc/iMaVtFbr3Sw2k=
|
||||||
github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
|
github.com/go-playground/validator/v10 v10.26.0/go.mod h1:I5QpIEbmr8On7W0TktmJAumgzX4CA1XNl4ZmDuVHKKo=
|
||||||
github.com/golang-jwt/jwt v3.2.1+incompatible h1:73Z+4BJcrTC+KczS6WvTPvRGOp1WmfEP4Q1lOd9Z/+c=
|
github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4=
|
||||||
github.com/golang-jwt/jwt v3.2.1+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzqecmYZeUEB8OUGHkxJ+I=
|
github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
|
||||||
github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
|
github.com/golang-jwt/jwt/v5 v5.2.1 h1:OuVbFODueb089Lh128TAcimifWaLhJwVflnrgM17wHk=
|
||||||
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
github.com/golang-jwt/jwt/v5 v5.2.1/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
|
||||||
github.com/google/go-cmp v0.5.8 h1:e6P7q2lk1O+qJJb4BtCQXlK8vWEO8V1ZeuEdJNOqZyg=
|
github.com/golang-jwt/jwt/v5 v5.2.2 h1:Rl4B7itRWVtYIHFrSNd7vhTiz9UpLdi6gZhZ3wEeDy8=
|
||||||
github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
github.com/google/flatbuffers v25.2.10+incompatible h1:F3vclr7C3HpB1k9mxCGRMXq6FdUalZ6H/pNX4FP1v0Q=
|
||||||
|
github.com/google/flatbuffers v25.2.10+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8=
|
||||||
|
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
|
||||||
|
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
|
||||||
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||||
github.com/google/uuid v1.1.1 h1:Gkbcsh/GbpXz7lPftLA3P6TYMwjCLYm83jiFQZF/3gY=
|
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
||||||
github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||||
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
|
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
|
||||||
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
|
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
|
||||||
|
github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo=
|
||||||
|
github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ=
|
||||||
github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
|
github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
|
||||||
github.com/klauspost/cpuid/v2 v2.2.4 h1:acbojRNwl3o09bUq+yDCtZFc1aiwaAAxtcn8YkZXnvk=
|
github.com/klauspost/cpuid/v2 v2.2.10 h1:tBs3QSyvjDyFTq3uoc/9xFpCuOsJQFNPiAhYdw2skhE=
|
||||||
github.com/klauspost/cpuid/v2 v2.2.4/go.mod h1:RVVoqg1df56z8g3pUjL/3lE5UfnlrJX8tyFgg4nqhuY=
|
github.com/klauspost/cpuid/v2 v2.2.10/go.mod h1:hqwkgyIinND0mEev00jJYCxPNVRVXFQeu1XKlok6oO0=
|
||||||
|
github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M=
|
||||||
|
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
|
||||||
|
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
|
||||||
|
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
||||||
|
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
||||||
github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc=
|
github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc=
|
||||||
github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw=
|
github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw=
|
||||||
github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q=
|
github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ=
|
||||||
github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4=
|
github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI=
|
||||||
github.com/mattn/go-isatty v0.0.19 h1:JITubQf0MOLdlGRuRq+jtsDlekdYPia9ZFsB8h/APPA=
|
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
||||||
github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
|
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
|
||||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||||
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
|
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
|
||||||
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
|
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
|
||||||
github.com/pelletier/go-toml/v2 v2.0.8 h1:0ctb6s9mE31h0/lhu+J6OPmVeDxJn+kYnJc2jZR9tGQ=
|
github.com/pelletier/go-toml/v2 v2.2.3 h1:YmeHyLY8mFWbdkNWwpr+qIL2bEqT0o95WSdkNHvL12M=
|
||||||
github.com/pelletier/go-toml/v2 v2.0.8/go.mod h1:vuYfssBdrU2XDZ9bYydBu6t+6a6PYNcZljzZR9VXg+4=
|
github.com/pelletier/go-toml/v2 v2.2.3/go.mod h1:MfCQTFTvCcUyyvvwm1+G6H/jORL20Xlb6rzQu9GuUkc=
|
||||||
github.com/pkg/browser v0.0.0-20210115035449-ce105d075bb4 h1:Qj1ukM4GlMWXNdMBuXcXfz/Kw9s1qm0CLY32QxuSImI=
|
github.com/pelletier/go-toml/v2 v2.2.4 h1:mye9XuhQ6gvn5h28+VilKrrPoQVanw5PMw/TB0t5Ec4=
|
||||||
github.com/pkg/browser v0.0.0-20210115035449-ce105d075bb4/go.mod h1:N6UoU20jOqggOuDwUaBQpluzLNDqif3kq9z2wpdYEfQ=
|
github.com/pelletier/go-toml/v2 v2.2.4/go.mod h1:2gIqNv+qfxSVS7cM2xJQKtLSTLUE9V8t9Stt+h56mCY=
|
||||||
|
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c h1:+mdjkGKdHQG3305AYmdv1U2eRNDiU2ErMBj1gwrq8eQ=
|
||||||
|
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c/go.mod h1:7rwL4CYBLnjLxUqIJNnCWiEdr3bn6IUYi15bNlnbCCU=
|
||||||
|
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
||||||
|
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||||
|
github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII=
|
||||||
|
github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o=
|
||||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||||
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
|
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
|
||||||
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
|
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
|
||||||
|
github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA=
|
||||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||||
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
||||||
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
|
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
|
||||||
github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
|
|
||||||
github.com/stretchr/testify v1.8.3/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
|
||||||
github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk=
|
|
||||||
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
||||||
|
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
|
||||||
|
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
||||||
github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI=
|
github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI=
|
||||||
github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08=
|
github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08=
|
||||||
github.com/ugorji/go/codec v1.2.11 h1:BMaWp1Bb6fHwEtbplGBGJ498wD+LKlNSl25MjdZY4dU=
|
github.com/ugorji/go/codec v1.2.12 h1:9LC83zGrHhuUA9l16C9AHXAqEV/2wBQ4nkvumAE65EE=
|
||||||
github.com/ugorji/go/codec v1.2.11/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg=
|
github.com/ugorji/go/codec v1.2.12/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg=
|
||||||
golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
|
github.com/vmihailenco/msgpack/v5 v5.4.1 h1:cQriyiUvjTwOHg8QZaPihLWeRAAVoCpE00IUPn0Bjt8=
|
||||||
golang.org/x/arch v0.3.0 h1:02VY4/ZcO/gBOH6PUaoiptASxtXU10jazRCP865E97k=
|
github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok=
|
||||||
golang.org/x/arch v0.3.0/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
|
github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g=
|
||||||
golang.org/x/crypto v0.18.0 h1:PGVlW0xEltQnzFZ55hkuX5+KLyrMYhHld1YHO4AKcdc=
|
github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds=
|
||||||
golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg=
|
go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA=
|
||||||
golang.org/x/exp v0.0.0-20240222234643-814bf88cf225 h1:LfspQV/FYTatPTr/3HzIcmiUFH7PGP+OQ6mgDYo3yuQ=
|
go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A=
|
||||||
golang.org/x/exp v0.0.0-20240222234643-814bf88cf225/go.mod h1:CxmFvTBINI24O/j8iY7H1xHzx2i4OsyguNBmN/uPtqc=
|
go.opentelemetry.io/otel v1.35.0 h1:xKWKPxrxB6OtMCbmMY021CqC45J+3Onta9MqjhnusiQ=
|
||||||
golang.org/x/net v0.20.0 h1:aCL9BSgETF1k+blQaYUBx9hJ9LOGP3gAVemcZlf1Kpo=
|
go.opentelemetry.io/otel v1.35.0/go.mod h1:UEqy8Zp11hpkUrL73gSlELM0DupHoiq72dR+Zqel/+Y=
|
||||||
golang.org/x/net v0.20.0/go.mod h1:z8BVo6PvndSri0LbOE3hAn0apkU+1YvI6E70E9jsnvY=
|
go.opentelemetry.io/otel/metric v1.35.0 h1:0znxYu2SNyuMSQT4Y9WDWej0VpcsxkuklLa4/siN90M=
|
||||||
golang.org/x/sys v0.0.0-20220704084225-05e143d24a9e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
go.opentelemetry.io/otel/metric v1.35.0/go.mod h1:nKVFgxBZ2fReX6IlyW28MgZojkoAkJGaE8CpgeAU3oE=
|
||||||
|
go.opentelemetry.io/otel/trace v1.35.0 h1:dPpEfJu1sDIqruz7BHFG3c7528f6ddfSWfFDVt/xgMs=
|
||||||
|
go.opentelemetry.io/otel/trace v1.35.0/go.mod h1:WUk7DtFp1Aw2MkvqGdwiXYDZZNvA/1J8o6xRXLrIkyc=
|
||||||
|
golang.org/x/arch v0.15.0 h1:QtOrQd0bTUnhNVNndMpLHNWrDmYzZ2KDqSrEymqInZw=
|
||||||
|
golang.org/x/arch v0.15.0/go.mod h1:JmwW7aLIoRUKgaTzhkiEFxvcEiQGyOg9BMonBJUS7EE=
|
||||||
|
golang.org/x/arch v0.17.0 h1:4O3dfLzd+lQewptAHqjewQZQDyEdejz3VwgeYwkZneU=
|
||||||
|
golang.org/x/arch v0.17.0/go.mod h1:bdwinDaKcfZUGpH09BB7ZmOfhalA8lQdzl62l8gGWsk=
|
||||||
|
golang.org/x/crypto v0.36.0 h1:AnAEvhDddvBdpY+uR+MyHmuZzzNqXSe/GvuDeob5L34=
|
||||||
|
golang.org/x/crypto v0.36.0/go.mod h1:Y4J0ReaxCR1IMaabaSMugxJES1EpwhBHhv2bDHklZvc=
|
||||||
|
golang.org/x/crypto v0.38.0 h1:jt+WWG8IZlBnVbomuhg2Mdq0+BBQaHbtqHEFEigjUV8=
|
||||||
|
golang.org/x/crypto v0.38.0/go.mod h1:MvrbAqul58NNYPKnOra203SB9vpuZW0e+RRZV+Ggqjw=
|
||||||
|
golang.org/x/exp v0.0.0-20250305212735-054e65f0b394 h1:nDVHiLt8aIbd/VzvPWN6kSOPE7+F/fNFDSXLVYkE/Iw=
|
||||||
|
golang.org/x/exp v0.0.0-20250305212735-054e65f0b394/go.mod h1:sIifuuw/Yco/y6yb6+bDNfyeQ/MdPUy/hKEMYQV17cM=
|
||||||
|
golang.org/x/exp v0.0.0-20250506013437-ce4c2cf36ca6 h1:y5zboxd6LQAqYIhHnB48p0ByQ/GnQx2BE33L8BOHQkI=
|
||||||
|
golang.org/x/exp v0.0.0-20250506013437-ce4c2cf36ca6/go.mod h1:U6Lno4MTRCDY+Ba7aCcauB9T60gsv5s4ralQzP72ZoQ=
|
||||||
|
golang.org/x/net v0.37.0 h1:1zLorHbz+LYj7MQlSf1+2tPIIgibq2eL5xkrGk6f+2c=
|
||||||
|
golang.org/x/net v0.37.0/go.mod h1:ivrbrMbzFq5J41QOQh0siUuly180yBYtLp+CKbEaFx8=
|
||||||
|
golang.org/x/net v0.40.0 h1:79Xs7wF06Gbdcg4kdCCIQArK11Z1hr5POQ6+fIYHNuY=
|
||||||
|
golang.org/x/net v0.40.0/go.mod h1:y0hY0exeL2Pku80/zKK7tpntoX23cqL3Oa6njdgRtds=
|
||||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.16.0 h1:xWw16ngr6ZMtmxDyKyIgsE93KNKz5HKmMa3b8ALHidU=
|
golang.org/x/sys v0.31.0 h1:ioabZlmFYtWhL+TRYpcnNlLwhyxaM9kWTDEmfnprqik=
|
||||||
golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
golang.org/x/sys v0.31.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
|
||||||
golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ=
|
golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw=
|
||||||
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
|
||||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/text v0.23.0 h1:D71I7dUrlY+VX0gQShAThNGHFxZ13dGLBHQLVl1mJlY=
|
||||||
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
|
golang.org/x/text v0.23.0/go.mod h1:/BLNzu4aZCJ1+kcD0DNRotWKage4q2rGVAg4o22unh4=
|
||||||
google.golang.org/protobuf v1.30.0 h1:kPPoIgf3TsEvrm0PFe15JQ+570QVxYzEvvHqChK+cng=
|
golang.org/x/text v0.25.0 h1:qVyWApTSYLk/drJRO5mDlNYskwQznZmkpV2c8q9zls4=
|
||||||
google.golang.org/protobuf v1.30.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
|
golang.org/x/text v0.25.0/go.mod h1:WEdwpYrmk1qmdHvhkSTNPm3app7v4rsT8F2UD6+VHIA=
|
||||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
google.golang.org/protobuf v1.36.5 h1:tPhr+woSbjfYvY6/GPufUoYizxw1cF/yFoxJ2fmpwlM=
|
||||||
|
google.golang.org/protobuf v1.36.5/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE=
|
||||||
|
google.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY=
|
||||||
|
google.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY=
|
||||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
|
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
|
||||||
|
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
|
||||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4=
|
nullprogram.com/x/optparse v1.0.0/go.mod h1:KdyPE+Igbe0jQUrVfMqDMeJQIJZEuyV7pjYmp6pbG50=
|
||||||
|
@ -10,6 +10,11 @@ import (
|
|||||||
|
|
||||||
// https://learn.microsoft.com/en-us/rest/api/cosmos-db/access-control-on-cosmosdb-resources
|
// https://learn.microsoft.com/en-us/rest/api/cosmos-db/access-control-on-cosmosdb-resources
|
||||||
func GenerateSignature(verb string, resourceType string, resourceId string, date string, masterKey string) string {
|
func GenerateSignature(verb string, resourceType string, resourceId string, date string, masterKey string) string {
|
||||||
|
isNameBased := resourceId != "" && ((len(resourceId) > 4 && resourceId[3] == '/') || strings.HasPrefix(strings.ToLower(resourceId), "interopusers"))
|
||||||
|
if !isNameBased {
|
||||||
|
resourceId = strings.ToLower(resourceId)
|
||||||
|
}
|
||||||
|
|
||||||
payload := fmt.Sprintf(
|
payload := fmt.Sprintf(
|
||||||
"%s\n%s\n%s\n%s\n%s\n",
|
"%s\n%s\n%s\n%s\n%s\n",
|
||||||
strings.ToLower(verb),
|
strings.ToLower(verb),
|
||||||
|
@ -27,4 +27,14 @@ func Test_GenerateSignature(t *testing.T) {
|
|||||||
signature := authentication.GenerateSignature("DELETE", "dbs", "dbs/Test Database", testDate, config.DefaultAccountKey)
|
signature := authentication.GenerateSignature("DELETE", "dbs", "dbs/Test Database", testDate, config.DefaultAccountKey)
|
||||||
assert.Equal(t, "LcuXXg0TcXxZG0kUCj9tZIWRy2yCzim3oiqGiHpRqGs=", signature)
|
assert.Equal(t, "LcuXXg0TcXxZG0kUCj9tZIWRy2yCzim3oiqGiHpRqGs=", signature)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
t.Run("Should generate PKRANGES signature", func(t *testing.T) {
|
||||||
|
signature := authentication.GenerateSignature("GET", "pkranges", "m4d+xG08uVM=", testDate, config.DefaultAccountKey)
|
||||||
|
assert.Equal(t, "6S5ceZsl2EXWB3Jo5bJcK7zv8NxXnsxWPWD9TH3nNMo=", signature)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should generate PATCH signature", func(t *testing.T) {
|
||||||
|
signature := authentication.GenerateSignature("PATCH", "docs", "dbs/test-db/colls/test-coll/docs/67890", testDate, config.DefaultAccountKey)
|
||||||
|
assert.Equal(t, "VR1ddfxKBXnoaT+b3WkhyYVc9JmGNpTnaRmyDM44398=", signature)
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
@ -30,3 +30,8 @@ var QueryPlanResponse = gin.H{
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var UnknownErrorResponse = gin.H{"message": "Unknown error"}
|
||||||
|
var NotFoundResponse = gin.H{"message": "NotFound"}
|
||||||
|
var ConflictResponse = gin.H{"message": "Conflict"}
|
||||||
|
var BadRequestResponse = gin.H{"message": "BadRequest"}
|
||||||
|
20
internal/converters/document_to_rowtype.go
Normal file
20
internal/converters/document_to_rowtype.go
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
package converters
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/pikami/cosmium/internal/datastore"
|
||||||
|
memoryexecutor "github.com/pikami/cosmium/query_executors/memory_executor"
|
||||||
|
)
|
||||||
|
|
||||||
|
type DocumentToRowTypeIterator struct {
|
||||||
|
documents datastore.DocumentIterator
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewDocumentToRowTypeIterator(documents datastore.DocumentIterator) *DocumentToRowTypeIterator {
|
||||||
|
return &DocumentToRowTypeIterator{
|
||||||
|
documents: documents,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (di *DocumentToRowTypeIterator) Next() (memoryexecutor.RowType, datastore.DataStoreStatus) {
|
||||||
|
return di.documents.Next()
|
||||||
|
}
|
66
internal/datastore/badger_datastore/badger_datastore.go
Normal file
66
internal/datastore/badger_datastore/badger_datastore.go
Normal file
@ -0,0 +1,66 @@
|
|||||||
|
package badgerdatastore
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/dgraph-io/badger/v4"
|
||||||
|
"github.com/pikami/cosmium/internal/logger"
|
||||||
|
)
|
||||||
|
|
||||||
|
type BadgerDataStore struct {
|
||||||
|
db *badger.DB
|
||||||
|
gcTicker *time.Ticker
|
||||||
|
}
|
||||||
|
|
||||||
|
type BadgerDataStoreOptions struct {
|
||||||
|
PersistDataFilePath string
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewBadgerDataStore(options BadgerDataStoreOptions) *BadgerDataStore {
|
||||||
|
badgerOpts := badger.DefaultOptions(options.PersistDataFilePath)
|
||||||
|
badgerOpts = badgerOpts.WithLogger(newBadgerLogger())
|
||||||
|
if options.PersistDataFilePath == "" {
|
||||||
|
badgerOpts = badgerOpts.WithInMemory(true)
|
||||||
|
}
|
||||||
|
|
||||||
|
db, err := badger.Open(badgerOpts)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
gcTicker := time.NewTicker(5 * time.Minute)
|
||||||
|
|
||||||
|
ds := &BadgerDataStore{
|
||||||
|
db: db,
|
||||||
|
gcTicker: gcTicker,
|
||||||
|
}
|
||||||
|
|
||||||
|
go ds.runGarbageCollector()
|
||||||
|
|
||||||
|
return ds
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *BadgerDataStore) Close() {
|
||||||
|
if r.gcTicker != nil {
|
||||||
|
r.gcTicker.Stop()
|
||||||
|
r.gcTicker = nil
|
||||||
|
}
|
||||||
|
|
||||||
|
r.db.Close()
|
||||||
|
r.db = nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *BadgerDataStore) DumpToJson() (string, error) {
|
||||||
|
logger.ErrorLn("Badger datastore does not support state export currently.")
|
||||||
|
return "{}", nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *BadgerDataStore) runGarbageCollector() {
|
||||||
|
for range r.gcTicker.C {
|
||||||
|
again:
|
||||||
|
err := r.db.RunValueLogGC(0.7)
|
||||||
|
if err == nil {
|
||||||
|
goto again
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
28
internal/datastore/badger_datastore/badger_logger.go
Normal file
28
internal/datastore/badger_datastore/badger_logger.go
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
package badgerdatastore
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/dgraph-io/badger/v4"
|
||||||
|
"github.com/pikami/cosmium/internal/logger"
|
||||||
|
)
|
||||||
|
|
||||||
|
type badgerLogger struct{}
|
||||||
|
|
||||||
|
func newBadgerLogger() badger.Logger {
|
||||||
|
return &badgerLogger{}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *badgerLogger) Errorf(format string, v ...interface{}) {
|
||||||
|
logger.Errorf(format, v...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *badgerLogger) Warningf(format string, v ...interface{}) {
|
||||||
|
logger.Infof(format, v...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *badgerLogger) Infof(format string, v ...interface{}) {
|
||||||
|
logger.Infof(format, v...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *badgerLogger) Debugf(format string, v ...interface{}) {
|
||||||
|
logger.Debugf(format, v...)
|
||||||
|
}
|
103
internal/datastore/badger_datastore/collections.go
Normal file
103
internal/datastore/badger_datastore/collections.go
Normal file
@ -0,0 +1,103 @@
|
|||||||
|
package badgerdatastore
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/pikami/cosmium/internal/datastore"
|
||||||
|
"github.com/pikami/cosmium/internal/logger"
|
||||||
|
"github.com/pikami/cosmium/internal/resourceid"
|
||||||
|
structhidrators "github.com/pikami/cosmium/internal/struct_hidrators"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (r *BadgerDataStore) GetAllCollections(databaseId string) ([]datastore.Collection, datastore.DataStoreStatus) {
|
||||||
|
exists, err := keyExists(r.db.NewTransaction(false), generateDatabaseKey(databaseId))
|
||||||
|
if err != nil {
|
||||||
|
logger.ErrorLn("Error while checking if database exists:", err)
|
||||||
|
return nil, datastore.Unknown
|
||||||
|
}
|
||||||
|
|
||||||
|
if !exists {
|
||||||
|
return nil, datastore.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
colls, status := listByPrefix[datastore.Collection](r.db, generateKey(resourceid.ResourceTypeCollection, databaseId, "", ""))
|
||||||
|
if status == datastore.StatusOk {
|
||||||
|
return colls, datastore.StatusOk
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, status
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *BadgerDataStore) GetCollection(databaseId string, collectionId string) (datastore.Collection, datastore.DataStoreStatus) {
|
||||||
|
collectionKey := generateCollectionKey(databaseId, collectionId)
|
||||||
|
|
||||||
|
txn := r.db.NewTransaction(false)
|
||||||
|
defer txn.Discard()
|
||||||
|
|
||||||
|
var collection datastore.Collection
|
||||||
|
status := getKey(txn, collectionKey, &collection)
|
||||||
|
|
||||||
|
return collection, status
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *BadgerDataStore) DeleteCollection(databaseId string, collectionId string) datastore.DataStoreStatus {
|
||||||
|
collectionKey := generateCollectionKey(databaseId, collectionId)
|
||||||
|
|
||||||
|
txn := r.db.NewTransaction(true)
|
||||||
|
defer txn.Discard()
|
||||||
|
|
||||||
|
prefixes := []string{
|
||||||
|
generateKey(resourceid.ResourceTypeDocument, databaseId, collectionId, ""),
|
||||||
|
generateKey(resourceid.ResourceTypeTrigger, databaseId, collectionId, ""),
|
||||||
|
generateKey(resourceid.ResourceTypeStoredProcedure, databaseId, collectionId, ""),
|
||||||
|
generateKey(resourceid.ResourceTypeUserDefinedFunction, databaseId, collectionId, ""),
|
||||||
|
collectionKey,
|
||||||
|
}
|
||||||
|
for _, prefix := range prefixes {
|
||||||
|
if err := deleteKeysByPrefix(txn, prefix); err != nil {
|
||||||
|
return datastore.Unknown
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
err := txn.Commit()
|
||||||
|
if err != nil {
|
||||||
|
logger.ErrorLn("Error while committing transaction:", err)
|
||||||
|
return datastore.Unknown
|
||||||
|
}
|
||||||
|
|
||||||
|
return datastore.StatusOk
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *BadgerDataStore) CreateCollection(databaseId string, newCollection datastore.Collection) (datastore.Collection, datastore.DataStoreStatus) {
|
||||||
|
collectionKey := generateCollectionKey(databaseId, newCollection.ID)
|
||||||
|
|
||||||
|
txn := r.db.NewTransaction(true)
|
||||||
|
defer txn.Discard()
|
||||||
|
|
||||||
|
collectionExists, err := keyExists(txn, collectionKey)
|
||||||
|
if err != nil || collectionExists {
|
||||||
|
return datastore.Collection{}, datastore.Conflict
|
||||||
|
}
|
||||||
|
|
||||||
|
var database datastore.Database
|
||||||
|
status := getKey(txn, generateDatabaseKey(databaseId), &database)
|
||||||
|
if status != datastore.StatusOk {
|
||||||
|
return datastore.Collection{}, status
|
||||||
|
}
|
||||||
|
|
||||||
|
newCollection = structhidrators.Hidrate(newCollection).(datastore.Collection)
|
||||||
|
|
||||||
|
newCollection.TimeStamp = time.Now().Unix()
|
||||||
|
newCollection.ResourceID = resourceid.NewCombined(database.ResourceID, resourceid.New(resourceid.ResourceTypeCollection))
|
||||||
|
newCollection.ETag = fmt.Sprintf("\"%s\"", uuid.New())
|
||||||
|
newCollection.Self = fmt.Sprintf("dbs/%s/colls/%s/", database.ResourceID, newCollection.ResourceID)
|
||||||
|
|
||||||
|
status = insertKey(txn, collectionKey, newCollection)
|
||||||
|
if status != datastore.StatusOk {
|
||||||
|
return datastore.Collection{}, status
|
||||||
|
}
|
||||||
|
|
||||||
|
return newCollection, datastore.StatusOk
|
||||||
|
}
|
80
internal/datastore/badger_datastore/databases.go
Normal file
80
internal/datastore/badger_datastore/databases.go
Normal file
@ -0,0 +1,80 @@
|
|||||||
|
package badgerdatastore
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/pikami/cosmium/internal/datastore"
|
||||||
|
"github.com/pikami/cosmium/internal/logger"
|
||||||
|
"github.com/pikami/cosmium/internal/resourceid"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (r *BadgerDataStore) GetAllDatabases() ([]datastore.Database, datastore.DataStoreStatus) {
|
||||||
|
dbs, status := listByPrefix[datastore.Database](r.db, DatabaseKeyPrefix)
|
||||||
|
if status == datastore.StatusOk {
|
||||||
|
return dbs, datastore.StatusOk
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, status
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *BadgerDataStore) GetDatabase(id string) (datastore.Database, datastore.DataStoreStatus) {
|
||||||
|
databaseKey := generateDatabaseKey(id)
|
||||||
|
|
||||||
|
txn := r.db.NewTransaction(false)
|
||||||
|
defer txn.Discard()
|
||||||
|
|
||||||
|
var database datastore.Database
|
||||||
|
status := getKey(txn, databaseKey, &database)
|
||||||
|
|
||||||
|
return database, status
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *BadgerDataStore) DeleteDatabase(id string) datastore.DataStoreStatus {
|
||||||
|
databaseKey := generateDatabaseKey(id)
|
||||||
|
|
||||||
|
txn := r.db.NewTransaction(true)
|
||||||
|
defer txn.Discard()
|
||||||
|
|
||||||
|
prefixes := []string{
|
||||||
|
generateKey(resourceid.ResourceTypeCollection, id, "", ""),
|
||||||
|
generateKey(resourceid.ResourceTypeDocument, id, "", ""),
|
||||||
|
generateKey(resourceid.ResourceTypeTrigger, id, "", ""),
|
||||||
|
generateKey(resourceid.ResourceTypeStoredProcedure, id, "", ""),
|
||||||
|
generateKey(resourceid.ResourceTypeUserDefinedFunction, id, "", ""),
|
||||||
|
databaseKey,
|
||||||
|
}
|
||||||
|
for _, prefix := range prefixes {
|
||||||
|
if err := deleteKeysByPrefix(txn, prefix); err != nil {
|
||||||
|
return datastore.Unknown
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
err := txn.Commit()
|
||||||
|
if err != nil {
|
||||||
|
logger.ErrorLn("Error while committing transaction:", err)
|
||||||
|
return datastore.Unknown
|
||||||
|
}
|
||||||
|
|
||||||
|
return datastore.StatusOk
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *BadgerDataStore) CreateDatabase(newDatabase datastore.Database) (datastore.Database, datastore.DataStoreStatus) {
|
||||||
|
databaseKey := generateDatabaseKey(newDatabase.ID)
|
||||||
|
|
||||||
|
txn := r.db.NewTransaction(true)
|
||||||
|
defer txn.Discard()
|
||||||
|
|
||||||
|
newDatabase.TimeStamp = time.Now().Unix()
|
||||||
|
newDatabase.ResourceID = resourceid.New(resourceid.ResourceTypeDatabase)
|
||||||
|
newDatabase.ETag = fmt.Sprintf("\"%s\"", uuid.New())
|
||||||
|
newDatabase.Self = fmt.Sprintf("dbs/%s/", newDatabase.ResourceID)
|
||||||
|
|
||||||
|
status := insertKey(txn, databaseKey, newDatabase)
|
||||||
|
if status != datastore.StatusOk {
|
||||||
|
return datastore.Database{}, status
|
||||||
|
}
|
||||||
|
|
||||||
|
return newDatabase, datastore.StatusOk
|
||||||
|
}
|
204
internal/datastore/badger_datastore/db_abstractions.go
Normal file
204
internal/datastore/badger_datastore/db_abstractions.go
Normal file
@ -0,0 +1,204 @@
|
|||||||
|
package badgerdatastore
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/dgraph-io/badger/v4"
|
||||||
|
"github.com/pikami/cosmium/internal/datastore"
|
||||||
|
"github.com/pikami/cosmium/internal/logger"
|
||||||
|
"github.com/pikami/cosmium/internal/resourceid"
|
||||||
|
"github.com/vmihailenco/msgpack/v5"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
DatabaseKeyPrefix = "DB:"
|
||||||
|
CollectionKeyPrefix = "COL:"
|
||||||
|
DocumentKeyPrefix = "DOC:"
|
||||||
|
TriggerKeyPrefix = "TRG:"
|
||||||
|
StoredProcedureKeyPrefix = "SP:"
|
||||||
|
UserDefinedFunctionKeyPrefix = "UDF:"
|
||||||
|
)
|
||||||
|
|
||||||
|
func generateKey(
|
||||||
|
resourceType resourceid.ResourceType,
|
||||||
|
databaseId string,
|
||||||
|
collectionId string,
|
||||||
|
resourceId string,
|
||||||
|
) string {
|
||||||
|
result := ""
|
||||||
|
|
||||||
|
switch resourceType {
|
||||||
|
case resourceid.ResourceTypeDatabase:
|
||||||
|
result += DatabaseKeyPrefix
|
||||||
|
case resourceid.ResourceTypeCollection:
|
||||||
|
result += CollectionKeyPrefix
|
||||||
|
case resourceid.ResourceTypeDocument:
|
||||||
|
result += DocumentKeyPrefix
|
||||||
|
case resourceid.ResourceTypeTrigger:
|
||||||
|
result += TriggerKeyPrefix
|
||||||
|
case resourceid.ResourceTypeStoredProcedure:
|
||||||
|
result += StoredProcedureKeyPrefix
|
||||||
|
case resourceid.ResourceTypeUserDefinedFunction:
|
||||||
|
result += UserDefinedFunctionKeyPrefix
|
||||||
|
}
|
||||||
|
|
||||||
|
if databaseId != "" {
|
||||||
|
result += databaseId
|
||||||
|
}
|
||||||
|
|
||||||
|
if collectionId != "" {
|
||||||
|
result += "/colls/" + collectionId
|
||||||
|
}
|
||||||
|
|
||||||
|
if resourceId != "" {
|
||||||
|
result += "/" + resourceId
|
||||||
|
}
|
||||||
|
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
func generateDatabaseKey(databaseId string) string {
|
||||||
|
return generateKey(resourceid.ResourceTypeDatabase, databaseId, "", "")
|
||||||
|
}
|
||||||
|
|
||||||
|
func generateCollectionKey(databaseId string, collectionId string) string {
|
||||||
|
return generateKey(resourceid.ResourceTypeCollection, databaseId, collectionId, "")
|
||||||
|
}
|
||||||
|
|
||||||
|
func generateDocumentKey(databaseId string, collectionId string, documentId string) string {
|
||||||
|
return generateKey(resourceid.ResourceTypeDocument, databaseId, collectionId, documentId)
|
||||||
|
}
|
||||||
|
|
||||||
|
func generateTriggerKey(databaseId string, collectionId string, triggerId string) string {
|
||||||
|
return generateKey(resourceid.ResourceTypeTrigger, databaseId, collectionId, triggerId)
|
||||||
|
}
|
||||||
|
|
||||||
|
func generateStoredProcedureKey(databaseId string, collectionId string, storedProcedureId string) string {
|
||||||
|
return generateKey(resourceid.ResourceTypeStoredProcedure, databaseId, collectionId, storedProcedureId)
|
||||||
|
}
|
||||||
|
|
||||||
|
func generateUserDefinedFunctionKey(databaseId string, collectionId string, udfId string) string {
|
||||||
|
return generateKey(resourceid.ResourceTypeUserDefinedFunction, databaseId, collectionId, udfId)
|
||||||
|
}
|
||||||
|
|
||||||
|
func insertKey(txn *badger.Txn, key string, value interface{}) datastore.DataStoreStatus {
|
||||||
|
_, err := txn.Get([]byte(key))
|
||||||
|
if err == nil {
|
||||||
|
return datastore.Conflict
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != badger.ErrKeyNotFound {
|
||||||
|
logger.ErrorLn("Error while checking if key exists:", err)
|
||||||
|
return datastore.Unknown
|
||||||
|
}
|
||||||
|
|
||||||
|
buf, err := msgpack.Marshal(value)
|
||||||
|
if err != nil {
|
||||||
|
logger.ErrorLn("Error while encoding value:", err)
|
||||||
|
return datastore.Unknown
|
||||||
|
}
|
||||||
|
|
||||||
|
err = txn.Set([]byte(key), buf)
|
||||||
|
if err != nil {
|
||||||
|
logger.ErrorLn("Error while setting key:", err)
|
||||||
|
return datastore.Unknown
|
||||||
|
}
|
||||||
|
|
||||||
|
err = txn.Commit()
|
||||||
|
if err != nil {
|
||||||
|
logger.ErrorLn("Error while committing transaction:", err)
|
||||||
|
return datastore.Unknown
|
||||||
|
}
|
||||||
|
|
||||||
|
return datastore.StatusOk
|
||||||
|
}
|
||||||
|
|
||||||
|
func getKey(txn *badger.Txn, key string, value interface{}) datastore.DataStoreStatus {
|
||||||
|
item, err := txn.Get([]byte(key))
|
||||||
|
if err != nil {
|
||||||
|
if err == badger.ErrKeyNotFound {
|
||||||
|
return datastore.StatusNotFound
|
||||||
|
}
|
||||||
|
logger.ErrorLn("Error while getting key:", err)
|
||||||
|
return datastore.Unknown
|
||||||
|
}
|
||||||
|
|
||||||
|
val, err := item.ValueCopy(nil)
|
||||||
|
if err != nil {
|
||||||
|
logger.ErrorLn("Error while copying value:", err)
|
||||||
|
return datastore.Unknown
|
||||||
|
}
|
||||||
|
|
||||||
|
if value == nil {
|
||||||
|
logger.ErrorLn("getKey called with nil value")
|
||||||
|
return datastore.Unknown
|
||||||
|
}
|
||||||
|
|
||||||
|
err = msgpack.Unmarshal(val, &value)
|
||||||
|
if err != nil {
|
||||||
|
logger.ErrorLn("Error while decoding value:", err)
|
||||||
|
return datastore.Unknown
|
||||||
|
}
|
||||||
|
|
||||||
|
return datastore.StatusOk
|
||||||
|
}
|
||||||
|
|
||||||
|
func keyExists(txn *badger.Txn, key string) (bool, error) {
|
||||||
|
_, err := txn.Get([]byte(key))
|
||||||
|
if err == nil {
|
||||||
|
return true, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if err == badger.ErrKeyNotFound {
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func listByPrefix[T any](db *badger.DB, prefix string) ([]T, datastore.DataStoreStatus) {
|
||||||
|
results := make([]T, 0)
|
||||||
|
|
||||||
|
err := db.View(func(txn *badger.Txn) error {
|
||||||
|
opts := badger.DefaultIteratorOptions
|
||||||
|
opts.Prefix = []byte(prefix)
|
||||||
|
it := txn.NewIterator(opts)
|
||||||
|
defer it.Close()
|
||||||
|
|
||||||
|
for it.Rewind(); it.Valid(); it.Next() {
|
||||||
|
item := it.Item()
|
||||||
|
var entry T
|
||||||
|
|
||||||
|
status := getKey(txn, string(item.Key()), &entry)
|
||||||
|
if status != datastore.StatusOk {
|
||||||
|
logger.ErrorLn("Failed to retrieve entry:", string(item.Key()))
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
results = append(results, entry)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
logger.ErrorLn("Error while listing entries:", err)
|
||||||
|
return nil, datastore.Unknown
|
||||||
|
}
|
||||||
|
|
||||||
|
return results, datastore.StatusOk
|
||||||
|
}
|
||||||
|
|
||||||
|
func deleteKeysByPrefix(txn *badger.Txn, prefix string) error {
|
||||||
|
opts := badger.DefaultIteratorOptions
|
||||||
|
opts.Prefix = []byte(prefix)
|
||||||
|
it := txn.NewIterator(opts)
|
||||||
|
defer it.Close()
|
||||||
|
|
||||||
|
for it.Rewind(); it.Valid(); it.Next() {
|
||||||
|
key := it.Item().KeyCopy(nil)
|
||||||
|
if err := txn.Delete(key); err != nil {
|
||||||
|
logger.ErrorLn("Failed to delete key:", string(key), "Error:", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
58
internal/datastore/badger_datastore/document_iterator.go
Normal file
58
internal/datastore/badger_datastore/document_iterator.go
Normal file
@ -0,0 +1,58 @@
|
|||||||
|
package badgerdatastore
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/dgraph-io/badger/v4"
|
||||||
|
"github.com/pikami/cosmium/internal/datastore"
|
||||||
|
"github.com/pikami/cosmium/internal/logger"
|
||||||
|
"github.com/vmihailenco/msgpack/v5"
|
||||||
|
)
|
||||||
|
|
||||||
|
type BadgerDocumentIterator struct {
|
||||||
|
txn *badger.Txn
|
||||||
|
it *badger.Iterator
|
||||||
|
prefix string
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewBadgerDocumentIterator(txn *badger.Txn, prefix string) *BadgerDocumentIterator {
|
||||||
|
opts := badger.DefaultIteratorOptions
|
||||||
|
opts.Prefix = []byte(prefix)
|
||||||
|
|
||||||
|
it := txn.NewIterator(opts)
|
||||||
|
it.Rewind()
|
||||||
|
|
||||||
|
return &BadgerDocumentIterator{
|
||||||
|
txn: txn,
|
||||||
|
it: it,
|
||||||
|
prefix: prefix,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *BadgerDocumentIterator) Next() (datastore.Document, datastore.DataStoreStatus) {
|
||||||
|
if !i.it.Valid() {
|
||||||
|
i.it.Close()
|
||||||
|
return datastore.Document{}, datastore.IterEOF
|
||||||
|
}
|
||||||
|
|
||||||
|
item := i.it.Item()
|
||||||
|
val, err := item.ValueCopy(nil)
|
||||||
|
if err != nil {
|
||||||
|
logger.ErrorLn("Error while copying value:", err)
|
||||||
|
return datastore.Document{}, datastore.Unknown
|
||||||
|
}
|
||||||
|
|
||||||
|
current := &datastore.Document{}
|
||||||
|
err = msgpack.Unmarshal(val, ¤t)
|
||||||
|
if err != nil {
|
||||||
|
logger.ErrorLn("Error while decoding value:", err)
|
||||||
|
return datastore.Document{}, datastore.Unknown
|
||||||
|
}
|
||||||
|
|
||||||
|
i.it.Next()
|
||||||
|
|
||||||
|
return *current, datastore.StatusOk
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *BadgerDocumentIterator) Close() {
|
||||||
|
i.it.Close()
|
||||||
|
i.txn.Discard()
|
||||||
|
}
|
127
internal/datastore/badger_datastore/documents.go
Normal file
127
internal/datastore/badger_datastore/documents.go
Normal file
@ -0,0 +1,127 @@
|
|||||||
|
package badgerdatastore
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/pikami/cosmium/internal/datastore"
|
||||||
|
"github.com/pikami/cosmium/internal/logger"
|
||||||
|
"github.com/pikami/cosmium/internal/resourceid"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (r *BadgerDataStore) GetAllDocuments(databaseId string, collectionId string) ([]datastore.Document, datastore.DataStoreStatus) {
|
||||||
|
txn := r.db.NewTransaction(false)
|
||||||
|
defer txn.Discard()
|
||||||
|
|
||||||
|
dbExists, err := keyExists(txn, generateDatabaseKey(databaseId))
|
||||||
|
if err != nil || !dbExists {
|
||||||
|
return nil, datastore.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
collExists, err := keyExists(txn, generateCollectionKey(databaseId, collectionId))
|
||||||
|
if err != nil || !collExists {
|
||||||
|
return nil, datastore.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
docs, status := listByPrefix[datastore.Document](r.db, generateKey(resourceid.ResourceTypeDocument, databaseId, collectionId, ""))
|
||||||
|
if status == datastore.StatusOk {
|
||||||
|
return docs, datastore.StatusOk
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, status
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *BadgerDataStore) GetDocumentIterator(databaseId string, collectionId string) (datastore.DocumentIterator, datastore.DataStoreStatus) {
|
||||||
|
txn := r.db.NewTransaction(false)
|
||||||
|
|
||||||
|
dbExists, err := keyExists(txn, generateDatabaseKey(databaseId))
|
||||||
|
if err != nil || !dbExists {
|
||||||
|
return nil, datastore.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
collExists, err := keyExists(txn, generateCollectionKey(databaseId, collectionId))
|
||||||
|
if err != nil || !collExists {
|
||||||
|
return nil, datastore.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
iter := NewBadgerDocumentIterator(txn, generateKey(resourceid.ResourceTypeDocument, databaseId, collectionId, ""))
|
||||||
|
return iter, datastore.StatusOk
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *BadgerDataStore) GetDocument(databaseId string, collectionId string, documentId string) (datastore.Document, datastore.DataStoreStatus) {
|
||||||
|
documentKey := generateDocumentKey(databaseId, collectionId, documentId)
|
||||||
|
|
||||||
|
txn := r.db.NewTransaction(false)
|
||||||
|
defer txn.Discard()
|
||||||
|
|
||||||
|
var document datastore.Document
|
||||||
|
status := getKey(txn, documentKey, &document)
|
||||||
|
|
||||||
|
return document, status
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *BadgerDataStore) DeleteDocument(databaseId string, collectionId string, documentId string) datastore.DataStoreStatus {
|
||||||
|
documentKey := generateDocumentKey(databaseId, collectionId, documentId)
|
||||||
|
|
||||||
|
txn := r.db.NewTransaction(true)
|
||||||
|
defer txn.Discard()
|
||||||
|
|
||||||
|
exists, err := keyExists(txn, documentKey)
|
||||||
|
if err != nil {
|
||||||
|
return datastore.Unknown
|
||||||
|
}
|
||||||
|
if !exists {
|
||||||
|
return datastore.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
err = txn.Delete([]byte(documentKey))
|
||||||
|
if err != nil {
|
||||||
|
logger.ErrorLn("Error while deleting document:", err)
|
||||||
|
return datastore.Unknown
|
||||||
|
}
|
||||||
|
|
||||||
|
err = txn.Commit()
|
||||||
|
if err != nil {
|
||||||
|
logger.ErrorLn("Error while committing transaction:", err)
|
||||||
|
return datastore.Unknown
|
||||||
|
}
|
||||||
|
|
||||||
|
return datastore.StatusOk
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *BadgerDataStore) CreateDocument(databaseId string, collectionId string, document map[string]interface{}) (datastore.Document, datastore.DataStoreStatus) {
|
||||||
|
txn := r.db.NewTransaction(true)
|
||||||
|
defer txn.Discard()
|
||||||
|
|
||||||
|
var database datastore.Database
|
||||||
|
status := getKey(txn, generateDatabaseKey(databaseId), &database)
|
||||||
|
if status != datastore.StatusOk {
|
||||||
|
return datastore.Document{}, status
|
||||||
|
}
|
||||||
|
|
||||||
|
var collection datastore.Collection
|
||||||
|
status = getKey(txn, generateCollectionKey(databaseId, collectionId), &collection)
|
||||||
|
if status != datastore.StatusOk {
|
||||||
|
return datastore.Document{}, status
|
||||||
|
}
|
||||||
|
|
||||||
|
var ok bool
|
||||||
|
var documentId string
|
||||||
|
if documentId, ok = document["id"].(string); !ok || documentId == "" {
|
||||||
|
documentId = fmt.Sprint(uuid.New())
|
||||||
|
document["id"] = documentId
|
||||||
|
}
|
||||||
|
|
||||||
|
document["_ts"] = time.Now().Unix()
|
||||||
|
document["_rid"] = resourceid.NewCombined(collection.ResourceID, resourceid.New(resourceid.ResourceTypeDocument))
|
||||||
|
document["_etag"] = fmt.Sprintf("\"%s\"", uuid.New())
|
||||||
|
document["_self"] = fmt.Sprintf("dbs/%s/colls/%s/docs/%s/", database.ResourceID, collection.ResourceID, document["_rid"])
|
||||||
|
|
||||||
|
status = insertKey(txn, generateDocumentKey(databaseId, collectionId, documentId), document)
|
||||||
|
if status != datastore.StatusOk {
|
||||||
|
return datastore.Document{}, status
|
||||||
|
}
|
||||||
|
|
||||||
|
return document, datastore.StatusOk
|
||||||
|
}
|
53
internal/datastore/badger_datastore/partition_key_ranges.go
Normal file
53
internal/datastore/badger_datastore/partition_key_ranges.go
Normal file
@ -0,0 +1,53 @@
|
|||||||
|
package badgerdatastore
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/pikami/cosmium/internal/datastore"
|
||||||
|
"github.com/pikami/cosmium/internal/resourceid"
|
||||||
|
)
|
||||||
|
|
||||||
|
// I have no idea what this is tbh
|
||||||
|
func (r *BadgerDataStore) GetPartitionKeyRanges(databaseId string, collectionId string) ([]datastore.PartitionKeyRange, datastore.DataStoreStatus) {
|
||||||
|
databaseRid := databaseId
|
||||||
|
collectionRid := collectionId
|
||||||
|
var timestamp int64 = 0
|
||||||
|
|
||||||
|
txn := r.db.NewTransaction(false)
|
||||||
|
defer txn.Discard()
|
||||||
|
|
||||||
|
var database datastore.Database
|
||||||
|
status := getKey(txn, generateDatabaseKey(databaseId), &database)
|
||||||
|
if status != datastore.StatusOk {
|
||||||
|
databaseRid = database.ResourceID
|
||||||
|
}
|
||||||
|
|
||||||
|
var collection datastore.Collection
|
||||||
|
status = getKey(txn, generateCollectionKey(databaseId, collectionId), &collection)
|
||||||
|
if status != datastore.StatusOk {
|
||||||
|
collectionRid = collection.ResourceID
|
||||||
|
timestamp = collection.TimeStamp
|
||||||
|
}
|
||||||
|
|
||||||
|
pkrResourceId := resourceid.NewCombined(collectionRid, resourceid.New(resourceid.ResourceTypePartitionKeyRange))
|
||||||
|
pkrSelf := fmt.Sprintf("dbs/%s/colls/%s/pkranges/%s/", databaseRid, collectionRid, pkrResourceId)
|
||||||
|
etag := fmt.Sprintf("\"%s\"", uuid.New())
|
||||||
|
|
||||||
|
return []datastore.PartitionKeyRange{
|
||||||
|
{
|
||||||
|
ResourceID: pkrResourceId,
|
||||||
|
ID: "0",
|
||||||
|
Etag: etag,
|
||||||
|
MinInclusive: "",
|
||||||
|
MaxExclusive: "FF",
|
||||||
|
RidPrefix: 0,
|
||||||
|
Self: pkrSelf,
|
||||||
|
ThroughputFraction: 1,
|
||||||
|
Status: "online",
|
||||||
|
Parents: []interface{}{},
|
||||||
|
TimeStamp: timestamp,
|
||||||
|
Lsn: 17,
|
||||||
|
},
|
||||||
|
}, datastore.StatusOk
|
||||||
|
}
|
107
internal/datastore/badger_datastore/stored_procedures.go
Normal file
107
internal/datastore/badger_datastore/stored_procedures.go
Normal file
@ -0,0 +1,107 @@
|
|||||||
|
package badgerdatastore
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/pikami/cosmium/internal/datastore"
|
||||||
|
"github.com/pikami/cosmium/internal/logger"
|
||||||
|
"github.com/pikami/cosmium/internal/resourceid"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (r *BadgerDataStore) GetAllStoredProcedures(databaseId string, collectionId string) ([]datastore.StoredProcedure, datastore.DataStoreStatus) {
|
||||||
|
txn := r.db.NewTransaction(false)
|
||||||
|
defer txn.Discard()
|
||||||
|
|
||||||
|
dbExists, err := keyExists(txn, generateDatabaseKey(databaseId))
|
||||||
|
if err != nil || !dbExists {
|
||||||
|
return nil, datastore.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
collExists, err := keyExists(txn, generateCollectionKey(databaseId, collectionId))
|
||||||
|
if err != nil || !collExists {
|
||||||
|
return nil, datastore.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
storedProcedures, status := listByPrefix[datastore.StoredProcedure](r.db, generateKey(resourceid.ResourceTypeStoredProcedure, databaseId, collectionId, ""))
|
||||||
|
if status == datastore.StatusOk {
|
||||||
|
return storedProcedures, datastore.StatusOk
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, status
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *BadgerDataStore) GetStoredProcedure(databaseId string, collectionId string, storedProcedureId string) (datastore.StoredProcedure, datastore.DataStoreStatus) {
|
||||||
|
storedProcedureKey := generateStoredProcedureKey(databaseId, collectionId, storedProcedureId)
|
||||||
|
|
||||||
|
txn := r.db.NewTransaction(false)
|
||||||
|
defer txn.Discard()
|
||||||
|
|
||||||
|
var storedProcedure datastore.StoredProcedure
|
||||||
|
status := getKey(txn, storedProcedureKey, &storedProcedure)
|
||||||
|
|
||||||
|
return storedProcedure, status
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *BadgerDataStore) DeleteStoredProcedure(databaseId string, collectionId string, storedProcedureId string) datastore.DataStoreStatus {
|
||||||
|
storedProcedureKey := generateStoredProcedureKey(databaseId, collectionId, storedProcedureId)
|
||||||
|
|
||||||
|
txn := r.db.NewTransaction(true)
|
||||||
|
defer txn.Discard()
|
||||||
|
|
||||||
|
exists, err := keyExists(txn, storedProcedureKey)
|
||||||
|
if err != nil {
|
||||||
|
return datastore.Unknown
|
||||||
|
}
|
||||||
|
if !exists {
|
||||||
|
return datastore.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
err = txn.Delete([]byte(storedProcedureKey))
|
||||||
|
if err != nil {
|
||||||
|
logger.ErrorLn("Error while deleting stored procedure:", err)
|
||||||
|
return datastore.Unknown
|
||||||
|
}
|
||||||
|
|
||||||
|
err = txn.Commit()
|
||||||
|
if err != nil {
|
||||||
|
logger.ErrorLn("Error while committing transaction:", err)
|
||||||
|
return datastore.Unknown
|
||||||
|
}
|
||||||
|
|
||||||
|
return datastore.StatusOk
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *BadgerDataStore) CreateStoredProcedure(databaseId string, collectionId string, storedProcedure datastore.StoredProcedure) (datastore.StoredProcedure, datastore.DataStoreStatus) {
|
||||||
|
txn := r.db.NewTransaction(true)
|
||||||
|
defer txn.Discard()
|
||||||
|
|
||||||
|
if storedProcedure.ID == "" {
|
||||||
|
return datastore.StoredProcedure{}, datastore.BadRequest
|
||||||
|
}
|
||||||
|
|
||||||
|
var database datastore.Database
|
||||||
|
status := getKey(txn, generateDatabaseKey(databaseId), &database)
|
||||||
|
if status != datastore.StatusOk {
|
||||||
|
return datastore.StoredProcedure{}, status
|
||||||
|
}
|
||||||
|
|
||||||
|
var collection datastore.Collection
|
||||||
|
status = getKey(txn, generateCollectionKey(databaseId, collectionId), &collection)
|
||||||
|
if status != datastore.StatusOk {
|
||||||
|
return datastore.StoredProcedure{}, status
|
||||||
|
}
|
||||||
|
|
||||||
|
storedProcedure.TimeStamp = time.Now().Unix()
|
||||||
|
storedProcedure.ResourceID = resourceid.NewCombined(collection.ResourceID, resourceid.New(resourceid.ResourceTypeStoredProcedure))
|
||||||
|
storedProcedure.ETag = fmt.Sprintf("\"%s\"", uuid.New())
|
||||||
|
storedProcedure.Self = fmt.Sprintf("dbs/%s/colls/%s/sprocs/%s/", database.ResourceID, collection.ResourceID, storedProcedure.ResourceID)
|
||||||
|
|
||||||
|
status = insertKey(txn, generateStoredProcedureKey(databaseId, collectionId, storedProcedure.ID), storedProcedure)
|
||||||
|
if status != datastore.StatusOk {
|
||||||
|
return datastore.StoredProcedure{}, status
|
||||||
|
}
|
||||||
|
|
||||||
|
return storedProcedure, datastore.StatusOk
|
||||||
|
}
|
107
internal/datastore/badger_datastore/triggers.go
Normal file
107
internal/datastore/badger_datastore/triggers.go
Normal file
@ -0,0 +1,107 @@
|
|||||||
|
package badgerdatastore
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/pikami/cosmium/internal/datastore"
|
||||||
|
"github.com/pikami/cosmium/internal/logger"
|
||||||
|
"github.com/pikami/cosmium/internal/resourceid"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (r *BadgerDataStore) GetAllTriggers(databaseId string, collectionId string) ([]datastore.Trigger, datastore.DataStoreStatus) {
|
||||||
|
txn := r.db.NewTransaction(false)
|
||||||
|
defer txn.Discard()
|
||||||
|
|
||||||
|
dbExists, err := keyExists(txn, generateDatabaseKey(databaseId))
|
||||||
|
if err != nil || !dbExists {
|
||||||
|
return nil, datastore.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
collExists, err := keyExists(txn, generateCollectionKey(databaseId, collectionId))
|
||||||
|
if err != nil || !collExists {
|
||||||
|
return nil, datastore.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
triggers, status := listByPrefix[datastore.Trigger](r.db, generateKey(resourceid.ResourceTypeTrigger, databaseId, collectionId, ""))
|
||||||
|
if status == datastore.StatusOk {
|
||||||
|
return triggers, datastore.StatusOk
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, status
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *BadgerDataStore) GetTrigger(databaseId string, collectionId string, triggerId string) (datastore.Trigger, datastore.DataStoreStatus) {
|
||||||
|
triggerKey := generateTriggerKey(databaseId, collectionId, triggerId)
|
||||||
|
|
||||||
|
txn := r.db.NewTransaction(false)
|
||||||
|
defer txn.Discard()
|
||||||
|
|
||||||
|
var trigger datastore.Trigger
|
||||||
|
status := getKey(txn, triggerKey, &trigger)
|
||||||
|
|
||||||
|
return trigger, status
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *BadgerDataStore) DeleteTrigger(databaseId string, collectionId string, triggerId string) datastore.DataStoreStatus {
|
||||||
|
triggerKey := generateTriggerKey(databaseId, collectionId, triggerId)
|
||||||
|
|
||||||
|
txn := r.db.NewTransaction(true)
|
||||||
|
defer txn.Discard()
|
||||||
|
|
||||||
|
exists, err := keyExists(txn, triggerKey)
|
||||||
|
if err != nil {
|
||||||
|
return datastore.Unknown
|
||||||
|
}
|
||||||
|
if !exists {
|
||||||
|
return datastore.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
err = txn.Delete([]byte(triggerKey))
|
||||||
|
if err != nil {
|
||||||
|
logger.ErrorLn("Error while deleting trigger:", err)
|
||||||
|
return datastore.Unknown
|
||||||
|
}
|
||||||
|
|
||||||
|
err = txn.Commit()
|
||||||
|
if err != nil {
|
||||||
|
logger.ErrorLn("Error while committing transaction:", err)
|
||||||
|
return datastore.Unknown
|
||||||
|
}
|
||||||
|
|
||||||
|
return datastore.StatusOk
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *BadgerDataStore) CreateTrigger(databaseId string, collectionId string, trigger datastore.Trigger) (datastore.Trigger, datastore.DataStoreStatus) {
|
||||||
|
txn := r.db.NewTransaction(true)
|
||||||
|
defer txn.Discard()
|
||||||
|
|
||||||
|
if trigger.ID == "" {
|
||||||
|
return datastore.Trigger{}, datastore.BadRequest
|
||||||
|
}
|
||||||
|
|
||||||
|
var database datastore.Database
|
||||||
|
status := getKey(txn, generateDatabaseKey(databaseId), &database)
|
||||||
|
if status != datastore.StatusOk {
|
||||||
|
return datastore.Trigger{}, status
|
||||||
|
}
|
||||||
|
|
||||||
|
var collection datastore.Collection
|
||||||
|
status = getKey(txn, generateCollectionKey(databaseId, collectionId), &collection)
|
||||||
|
if status != datastore.StatusOk {
|
||||||
|
return datastore.Trigger{}, status
|
||||||
|
}
|
||||||
|
|
||||||
|
trigger.TimeStamp = time.Now().Unix()
|
||||||
|
trigger.ResourceID = resourceid.NewCombined(collection.ResourceID, resourceid.New(resourceid.ResourceTypeTrigger))
|
||||||
|
trigger.ETag = fmt.Sprintf("\"%s\"", uuid.New())
|
||||||
|
trigger.Self = fmt.Sprintf("dbs/%s/colls/%s/triggers/%s/", database.ResourceID, collection.ResourceID, trigger.ResourceID)
|
||||||
|
|
||||||
|
status = insertKey(txn, generateTriggerKey(databaseId, collectionId, trigger.ID), trigger)
|
||||||
|
if status != datastore.StatusOk {
|
||||||
|
return datastore.Trigger{}, status
|
||||||
|
}
|
||||||
|
|
||||||
|
return trigger, datastore.StatusOk
|
||||||
|
}
|
107
internal/datastore/badger_datastore/user_defined_functions.go
Normal file
107
internal/datastore/badger_datastore/user_defined_functions.go
Normal file
@ -0,0 +1,107 @@
|
|||||||
|
package badgerdatastore
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/pikami/cosmium/internal/datastore"
|
||||||
|
"github.com/pikami/cosmium/internal/logger"
|
||||||
|
"github.com/pikami/cosmium/internal/resourceid"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (r *BadgerDataStore) GetAllUserDefinedFunctions(databaseId string, collectionId string) ([]datastore.UserDefinedFunction, datastore.DataStoreStatus) {
|
||||||
|
txn := r.db.NewTransaction(false)
|
||||||
|
defer txn.Discard()
|
||||||
|
|
||||||
|
dbExists, err := keyExists(txn, generateDatabaseKey(databaseId))
|
||||||
|
if err != nil || !dbExists {
|
||||||
|
return nil, datastore.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
collExists, err := keyExists(txn, generateCollectionKey(databaseId, collectionId))
|
||||||
|
if err != nil || !collExists {
|
||||||
|
return nil, datastore.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
udfs, status := listByPrefix[datastore.UserDefinedFunction](r.db, generateKey(resourceid.ResourceTypeUserDefinedFunction, databaseId, collectionId, ""))
|
||||||
|
if status == datastore.StatusOk {
|
||||||
|
return udfs, datastore.StatusOk
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, status
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *BadgerDataStore) GetUserDefinedFunction(databaseId string, collectionId string, udfId string) (datastore.UserDefinedFunction, datastore.DataStoreStatus) {
|
||||||
|
udfKey := generateUserDefinedFunctionKey(databaseId, collectionId, udfId)
|
||||||
|
|
||||||
|
txn := r.db.NewTransaction(false)
|
||||||
|
defer txn.Discard()
|
||||||
|
|
||||||
|
var udf datastore.UserDefinedFunction
|
||||||
|
status := getKey(txn, udfKey, &udf)
|
||||||
|
|
||||||
|
return udf, status
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *BadgerDataStore) DeleteUserDefinedFunction(databaseId string, collectionId string, udfId string) datastore.DataStoreStatus {
|
||||||
|
udfKey := generateUserDefinedFunctionKey(databaseId, collectionId, udfId)
|
||||||
|
|
||||||
|
txn := r.db.NewTransaction(true)
|
||||||
|
defer txn.Discard()
|
||||||
|
|
||||||
|
exists, err := keyExists(txn, udfKey)
|
||||||
|
if err != nil {
|
||||||
|
return datastore.Unknown
|
||||||
|
}
|
||||||
|
if !exists {
|
||||||
|
return datastore.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
err = txn.Delete([]byte(udfKey))
|
||||||
|
if err != nil {
|
||||||
|
logger.ErrorLn("Error while deleting user defined function:", err)
|
||||||
|
return datastore.Unknown
|
||||||
|
}
|
||||||
|
|
||||||
|
err = txn.Commit()
|
||||||
|
if err != nil {
|
||||||
|
logger.ErrorLn("Error while committing transaction:", err)
|
||||||
|
return datastore.Unknown
|
||||||
|
}
|
||||||
|
|
||||||
|
return datastore.StatusOk
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *BadgerDataStore) CreateUserDefinedFunction(databaseId string, collectionId string, udf datastore.UserDefinedFunction) (datastore.UserDefinedFunction, datastore.DataStoreStatus) {
|
||||||
|
txn := r.db.NewTransaction(true)
|
||||||
|
defer txn.Discard()
|
||||||
|
|
||||||
|
if udf.ID == "" {
|
||||||
|
return datastore.UserDefinedFunction{}, datastore.BadRequest
|
||||||
|
}
|
||||||
|
|
||||||
|
var database datastore.Database
|
||||||
|
status := getKey(txn, generateDatabaseKey(databaseId), &database)
|
||||||
|
if status != datastore.StatusOk {
|
||||||
|
return datastore.UserDefinedFunction{}, status
|
||||||
|
}
|
||||||
|
|
||||||
|
var collection datastore.Collection
|
||||||
|
status = getKey(txn, generateCollectionKey(databaseId, collectionId), &collection)
|
||||||
|
if status != datastore.StatusOk {
|
||||||
|
return datastore.UserDefinedFunction{}, status
|
||||||
|
}
|
||||||
|
|
||||||
|
udf.TimeStamp = time.Now().Unix()
|
||||||
|
udf.ResourceID = resourceid.NewCombined(collection.ResourceID, resourceid.New(resourceid.ResourceTypeUserDefinedFunction))
|
||||||
|
udf.ETag = fmt.Sprintf("\"%s\"", uuid.New())
|
||||||
|
udf.Self = fmt.Sprintf("dbs/%s/colls/%s/udfs/%s/", database.ResourceID, collection.ResourceID, udf.ResourceID)
|
||||||
|
|
||||||
|
status = insertKey(txn, generateUserDefinedFunctionKey(databaseId, collectionId, udf.ID), udf)
|
||||||
|
if status != datastore.StatusOk {
|
||||||
|
return datastore.UserDefinedFunction{}, status
|
||||||
|
}
|
||||||
|
|
||||||
|
return udf, datastore.StatusOk
|
||||||
|
}
|
44
internal/datastore/datastore.go
Normal file
44
internal/datastore/datastore.go
Normal file
@ -0,0 +1,44 @@
|
|||||||
|
package datastore
|
||||||
|
|
||||||
|
type DataStore interface {
|
||||||
|
GetAllDatabases() ([]Database, DataStoreStatus)
|
||||||
|
GetDatabase(databaseId string) (Database, DataStoreStatus)
|
||||||
|
DeleteDatabase(databaseId string) DataStoreStatus
|
||||||
|
CreateDatabase(newDatabase Database) (Database, DataStoreStatus)
|
||||||
|
|
||||||
|
GetAllCollections(databaseId string) ([]Collection, DataStoreStatus)
|
||||||
|
GetCollection(databaseId string, collectionId string) (Collection, DataStoreStatus)
|
||||||
|
DeleteCollection(databaseId string, collectionId string) DataStoreStatus
|
||||||
|
CreateCollection(databaseId string, newCollection Collection) (Collection, DataStoreStatus)
|
||||||
|
|
||||||
|
GetAllDocuments(databaseId string, collectionId string) ([]Document, DataStoreStatus)
|
||||||
|
GetDocumentIterator(databaseId string, collectionId string) (DocumentIterator, DataStoreStatus)
|
||||||
|
GetDocument(databaseId string, collectionId string, documentId string) (Document, DataStoreStatus)
|
||||||
|
DeleteDocument(databaseId string, collectionId string, documentId string) DataStoreStatus
|
||||||
|
CreateDocument(databaseId string, collectionId string, document map[string]interface{}) (Document, DataStoreStatus)
|
||||||
|
|
||||||
|
GetAllTriggers(databaseId string, collectionId string) ([]Trigger, DataStoreStatus)
|
||||||
|
GetTrigger(databaseId string, collectionId string, triggerId string) (Trigger, DataStoreStatus)
|
||||||
|
DeleteTrigger(databaseId string, collectionId string, triggerId string) DataStoreStatus
|
||||||
|
CreateTrigger(databaseId string, collectionId string, trigger Trigger) (Trigger, DataStoreStatus)
|
||||||
|
|
||||||
|
GetAllStoredProcedures(databaseId string, collectionId string) ([]StoredProcedure, DataStoreStatus)
|
||||||
|
GetStoredProcedure(databaseId string, collectionId string, storedProcedureId string) (StoredProcedure, DataStoreStatus)
|
||||||
|
DeleteStoredProcedure(databaseId string, collectionId string, storedProcedureId string) DataStoreStatus
|
||||||
|
CreateStoredProcedure(databaseId string, collectionId string, storedProcedure StoredProcedure) (StoredProcedure, DataStoreStatus)
|
||||||
|
|
||||||
|
GetAllUserDefinedFunctions(databaseId string, collectionId string) ([]UserDefinedFunction, DataStoreStatus)
|
||||||
|
GetUserDefinedFunction(databaseId string, collectionId string, udfId string) (UserDefinedFunction, DataStoreStatus)
|
||||||
|
DeleteUserDefinedFunction(databaseId string, collectionId string, udfId string) DataStoreStatus
|
||||||
|
CreateUserDefinedFunction(databaseId string, collectionId string, udf UserDefinedFunction) (UserDefinedFunction, DataStoreStatus)
|
||||||
|
|
||||||
|
GetPartitionKeyRanges(databaseId string, collectionId string) ([]PartitionKeyRange, DataStoreStatus)
|
||||||
|
|
||||||
|
Close()
|
||||||
|
DumpToJson() (string, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
type DocumentIterator interface {
|
||||||
|
Next() (Document, DataStoreStatus)
|
||||||
|
Close()
|
||||||
|
}
|
21
internal/datastore/json_datastore/array_document_iterator.go
Normal file
21
internal/datastore/json_datastore/array_document_iterator.go
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
package jsondatastore
|
||||||
|
|
||||||
|
import "github.com/pikami/cosmium/internal/datastore"
|
||||||
|
|
||||||
|
type ArrayDocumentIterator struct {
|
||||||
|
documents []datastore.Document
|
||||||
|
index int
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *ArrayDocumentIterator) Next() (datastore.Document, datastore.DataStoreStatus) {
|
||||||
|
i.index++
|
||||||
|
if i.index >= len(i.documents) {
|
||||||
|
return datastore.Document{}, datastore.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
return i.documents[i.index], datastore.StatusOk
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *ArrayDocumentIterator) Close() {
|
||||||
|
i.documents = []datastore.Document{}
|
||||||
|
}
|
89
internal/datastore/json_datastore/collections.go
Normal file
89
internal/datastore/json_datastore/collections.go
Normal file
@ -0,0 +1,89 @@
|
|||||||
|
package jsondatastore
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/pikami/cosmium/internal/datastore"
|
||||||
|
"github.com/pikami/cosmium/internal/resourceid"
|
||||||
|
structhidrators "github.com/pikami/cosmium/internal/struct_hidrators"
|
||||||
|
"golang.org/x/exp/maps"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (r *JsonDataStore) GetAllCollections(databaseId string) ([]datastore.Collection, datastore.DataStoreStatus) {
|
||||||
|
r.storeState.RLock()
|
||||||
|
defer r.storeState.RUnlock()
|
||||||
|
|
||||||
|
if _, ok := r.storeState.Databases[databaseId]; !ok {
|
||||||
|
return make([]datastore.Collection, 0), datastore.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
return maps.Values(r.storeState.Collections[databaseId]), datastore.StatusOk
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *JsonDataStore) GetCollection(databaseId string, collectionId string) (datastore.Collection, datastore.DataStoreStatus) {
|
||||||
|
r.storeState.RLock()
|
||||||
|
defer r.storeState.RUnlock()
|
||||||
|
|
||||||
|
if _, ok := r.storeState.Databases[databaseId]; !ok {
|
||||||
|
return datastore.Collection{}, datastore.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
|
||||||
|
return datastore.Collection{}, datastore.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
return r.storeState.Collections[databaseId][collectionId], datastore.StatusOk
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *JsonDataStore) DeleteCollection(databaseId string, collectionId string) datastore.DataStoreStatus {
|
||||||
|
r.storeState.Lock()
|
||||||
|
defer r.storeState.Unlock()
|
||||||
|
|
||||||
|
if _, ok := r.storeState.Databases[databaseId]; !ok {
|
||||||
|
return datastore.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
|
||||||
|
return datastore.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
delete(r.storeState.Collections[databaseId], collectionId)
|
||||||
|
delete(r.storeState.Documents[databaseId], collectionId)
|
||||||
|
delete(r.storeState.Triggers[databaseId], collectionId)
|
||||||
|
delete(r.storeState.StoredProcedures[databaseId], collectionId)
|
||||||
|
delete(r.storeState.UserDefinedFunctions[databaseId], collectionId)
|
||||||
|
|
||||||
|
return datastore.StatusOk
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *JsonDataStore) CreateCollection(databaseId string, newCollection datastore.Collection) (datastore.Collection, datastore.DataStoreStatus) {
|
||||||
|
r.storeState.Lock()
|
||||||
|
defer r.storeState.Unlock()
|
||||||
|
|
||||||
|
var ok bool
|
||||||
|
var database datastore.Database
|
||||||
|
if database, ok = r.storeState.Databases[databaseId]; !ok {
|
||||||
|
return datastore.Collection{}, datastore.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, ok = r.storeState.Collections[databaseId][newCollection.ID]; ok {
|
||||||
|
return datastore.Collection{}, datastore.Conflict
|
||||||
|
}
|
||||||
|
|
||||||
|
newCollection = structhidrators.Hidrate(newCollection).(datastore.Collection)
|
||||||
|
|
||||||
|
newCollection.TimeStamp = time.Now().Unix()
|
||||||
|
newCollection.ResourceID = resourceid.NewCombined(database.ResourceID, resourceid.New(resourceid.ResourceTypeCollection))
|
||||||
|
newCollection.ETag = fmt.Sprintf("\"%s\"", uuid.New())
|
||||||
|
newCollection.Self = fmt.Sprintf("dbs/%s/colls/%s/", database.ResourceID, newCollection.ResourceID)
|
||||||
|
|
||||||
|
r.storeState.Collections[databaseId][newCollection.ID] = newCollection
|
||||||
|
r.storeState.Documents[databaseId][newCollection.ID] = make(map[string]datastore.Document)
|
||||||
|
r.storeState.Triggers[databaseId][newCollection.ID] = make(map[string]datastore.Trigger)
|
||||||
|
r.storeState.StoredProcedures[databaseId][newCollection.ID] = make(map[string]datastore.StoredProcedure)
|
||||||
|
r.storeState.UserDefinedFunctions[databaseId][newCollection.ID] = make(map[string]datastore.UserDefinedFunction)
|
||||||
|
|
||||||
|
return newCollection, datastore.StatusOk
|
||||||
|
}
|
70
internal/datastore/json_datastore/databases.go
Normal file
70
internal/datastore/json_datastore/databases.go
Normal file
@ -0,0 +1,70 @@
|
|||||||
|
package jsondatastore
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/pikami/cosmium/internal/datastore"
|
||||||
|
"github.com/pikami/cosmium/internal/resourceid"
|
||||||
|
"golang.org/x/exp/maps"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (r *JsonDataStore) GetAllDatabases() ([]datastore.Database, datastore.DataStoreStatus) {
|
||||||
|
r.storeState.RLock()
|
||||||
|
defer r.storeState.RUnlock()
|
||||||
|
|
||||||
|
return maps.Values(r.storeState.Databases), datastore.StatusOk
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *JsonDataStore) GetDatabase(id string) (datastore.Database, datastore.DataStoreStatus) {
|
||||||
|
r.storeState.RLock()
|
||||||
|
defer r.storeState.RUnlock()
|
||||||
|
|
||||||
|
if database, ok := r.storeState.Databases[id]; ok {
|
||||||
|
return database, datastore.StatusOk
|
||||||
|
}
|
||||||
|
|
||||||
|
return datastore.Database{}, datastore.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *JsonDataStore) DeleteDatabase(id string) datastore.DataStoreStatus {
|
||||||
|
r.storeState.Lock()
|
||||||
|
defer r.storeState.Unlock()
|
||||||
|
|
||||||
|
if _, ok := r.storeState.Databases[id]; !ok {
|
||||||
|
return datastore.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
delete(r.storeState.Databases, id)
|
||||||
|
delete(r.storeState.Collections, id)
|
||||||
|
delete(r.storeState.Documents, id)
|
||||||
|
delete(r.storeState.Triggers, id)
|
||||||
|
delete(r.storeState.StoredProcedures, id)
|
||||||
|
delete(r.storeState.UserDefinedFunctions, id)
|
||||||
|
|
||||||
|
return datastore.StatusOk
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *JsonDataStore) CreateDatabase(newDatabase datastore.Database) (datastore.Database, datastore.DataStoreStatus) {
|
||||||
|
r.storeState.Lock()
|
||||||
|
defer r.storeState.Unlock()
|
||||||
|
|
||||||
|
if _, ok := r.storeState.Databases[newDatabase.ID]; ok {
|
||||||
|
return datastore.Database{}, datastore.Conflict
|
||||||
|
}
|
||||||
|
|
||||||
|
newDatabase.TimeStamp = time.Now().Unix()
|
||||||
|
newDatabase.ResourceID = resourceid.New(resourceid.ResourceTypeDatabase)
|
||||||
|
newDatabase.ETag = fmt.Sprintf("\"%s\"", uuid.New())
|
||||||
|
newDatabase.Self = fmt.Sprintf("dbs/%s/", newDatabase.ResourceID)
|
||||||
|
|
||||||
|
r.storeState.Databases[newDatabase.ID] = newDatabase
|
||||||
|
r.storeState.Collections[newDatabase.ID] = make(map[string]datastore.Collection)
|
||||||
|
r.storeState.Documents[newDatabase.ID] = make(map[string]map[string]datastore.Document)
|
||||||
|
r.storeState.Triggers[newDatabase.ID] = make(map[string]map[string]datastore.Trigger)
|
||||||
|
r.storeState.StoredProcedures[newDatabase.ID] = make(map[string]map[string]datastore.StoredProcedure)
|
||||||
|
r.storeState.UserDefinedFunctions[newDatabase.ID] = make(map[string]map[string]datastore.UserDefinedFunction)
|
||||||
|
|
||||||
|
return newDatabase, datastore.StatusOk
|
||||||
|
}
|
113
internal/datastore/json_datastore/documents.go
Normal file
113
internal/datastore/json_datastore/documents.go
Normal file
@ -0,0 +1,113 @@
|
|||||||
|
package jsondatastore
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/pikami/cosmium/internal/datastore"
|
||||||
|
"github.com/pikami/cosmium/internal/resourceid"
|
||||||
|
"golang.org/x/exp/maps"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (r *JsonDataStore) GetAllDocuments(databaseId string, collectionId string) ([]datastore.Document, datastore.DataStoreStatus) {
|
||||||
|
r.storeState.RLock()
|
||||||
|
defer r.storeState.RUnlock()
|
||||||
|
|
||||||
|
if _, ok := r.storeState.Databases[databaseId]; !ok {
|
||||||
|
return make([]datastore.Document, 0), datastore.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
|
||||||
|
return make([]datastore.Document, 0), datastore.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
return maps.Values(r.storeState.Documents[databaseId][collectionId]), datastore.StatusOk
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *JsonDataStore) GetDocument(databaseId string, collectionId string, documentId string) (datastore.Document, datastore.DataStoreStatus) {
|
||||||
|
r.storeState.RLock()
|
||||||
|
defer r.storeState.RUnlock()
|
||||||
|
|
||||||
|
if _, ok := r.storeState.Databases[databaseId]; !ok {
|
||||||
|
return datastore.Document{}, datastore.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
|
||||||
|
return datastore.Document{}, datastore.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, ok := r.storeState.Documents[databaseId][collectionId][documentId]; !ok {
|
||||||
|
return datastore.Document{}, datastore.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
return r.storeState.Documents[databaseId][collectionId][documentId], datastore.StatusOk
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *JsonDataStore) DeleteDocument(databaseId string, collectionId string, documentId string) datastore.DataStoreStatus {
|
||||||
|
r.storeState.Lock()
|
||||||
|
defer r.storeState.Unlock()
|
||||||
|
|
||||||
|
if _, ok := r.storeState.Databases[databaseId]; !ok {
|
||||||
|
return datastore.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
|
||||||
|
return datastore.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, ok := r.storeState.Documents[databaseId][collectionId][documentId]; !ok {
|
||||||
|
return datastore.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
delete(r.storeState.Documents[databaseId][collectionId], documentId)
|
||||||
|
|
||||||
|
return datastore.StatusOk
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *JsonDataStore) CreateDocument(databaseId string, collectionId string, document map[string]interface{}) (datastore.Document, datastore.DataStoreStatus) {
|
||||||
|
r.storeState.Lock()
|
||||||
|
defer r.storeState.Unlock()
|
||||||
|
|
||||||
|
var ok bool
|
||||||
|
var documentId string
|
||||||
|
var database datastore.Database
|
||||||
|
var collection datastore.Collection
|
||||||
|
if documentId, ok = document["id"].(string); !ok || documentId == "" {
|
||||||
|
documentId = fmt.Sprint(uuid.New())
|
||||||
|
document["id"] = documentId
|
||||||
|
}
|
||||||
|
|
||||||
|
if database, ok = r.storeState.Databases[databaseId]; !ok {
|
||||||
|
return datastore.Document{}, datastore.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
if collection, ok = r.storeState.Collections[databaseId][collectionId]; !ok {
|
||||||
|
return datastore.Document{}, datastore.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, ok := r.storeState.Documents[databaseId][collectionId][documentId]; ok {
|
||||||
|
return datastore.Document{}, datastore.Conflict
|
||||||
|
}
|
||||||
|
|
||||||
|
document["_ts"] = time.Now().Unix()
|
||||||
|
document["_rid"] = resourceid.NewCombined(collection.ResourceID, resourceid.New(resourceid.ResourceTypeDocument))
|
||||||
|
document["_etag"] = fmt.Sprintf("\"%s\"", uuid.New())
|
||||||
|
document["_self"] = fmt.Sprintf("dbs/%s/colls/%s/docs/%s/", database.ResourceID, collection.ResourceID, document["_rid"])
|
||||||
|
|
||||||
|
r.storeState.Documents[databaseId][collectionId][documentId] = document
|
||||||
|
|
||||||
|
return document, datastore.StatusOk
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *JsonDataStore) GetDocumentIterator(databaseId string, collectionId string) (datastore.DocumentIterator, datastore.DataStoreStatus) {
|
||||||
|
documents, status := r.GetAllDocuments(databaseId, collectionId)
|
||||||
|
if status != datastore.StatusOk {
|
||||||
|
return nil, status
|
||||||
|
}
|
||||||
|
|
||||||
|
return &ArrayDocumentIterator{
|
||||||
|
documents: documents,
|
||||||
|
index: -1,
|
||||||
|
}, datastore.StatusOk
|
||||||
|
}
|
34
internal/datastore/json_datastore/map_datastore.go
Normal file
34
internal/datastore/json_datastore/map_datastore.go
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
package jsondatastore
|
||||||
|
|
||||||
|
import "github.com/pikami/cosmium/internal/datastore"
|
||||||
|
|
||||||
|
type JsonDataStore struct {
|
||||||
|
storeState State
|
||||||
|
|
||||||
|
initialDataFilePath string
|
||||||
|
persistDataFilePath string
|
||||||
|
}
|
||||||
|
|
||||||
|
type JsonDataStoreOptions struct {
|
||||||
|
InitialDataFilePath string
|
||||||
|
PersistDataFilePath string
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewJsonDataStore(options JsonDataStoreOptions) *JsonDataStore {
|
||||||
|
dataStore := &JsonDataStore{
|
||||||
|
storeState: State{
|
||||||
|
Databases: make(map[string]datastore.Database),
|
||||||
|
Collections: make(map[string]map[string]datastore.Collection),
|
||||||
|
Documents: make(map[string]map[string]map[string]datastore.Document),
|
||||||
|
Triggers: make(map[string]map[string]map[string]datastore.Trigger),
|
||||||
|
StoredProcedures: make(map[string]map[string]map[string]datastore.StoredProcedure),
|
||||||
|
UserDefinedFunctions: make(map[string]map[string]map[string]datastore.UserDefinedFunction),
|
||||||
|
},
|
||||||
|
initialDataFilePath: options.InitialDataFilePath,
|
||||||
|
persistDataFilePath: options.PersistDataFilePath,
|
||||||
|
}
|
||||||
|
|
||||||
|
dataStore.InitializeDataStore()
|
||||||
|
|
||||||
|
return dataStore
|
||||||
|
}
|
@ -1,33 +1,36 @@
|
|||||||
package repositories
|
package jsondatastore
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
|
||||||
"github.com/google/uuid"
|
"github.com/google/uuid"
|
||||||
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
"github.com/pikami/cosmium/internal/datastore"
|
||||||
"github.com/pikami/cosmium/internal/resourceid"
|
"github.com/pikami/cosmium/internal/resourceid"
|
||||||
)
|
)
|
||||||
|
|
||||||
// I have no idea what this is tbh
|
// I have no idea what this is tbh
|
||||||
func GetPartitionKeyRanges(databaseId string, collectionId string) ([]repositorymodels.PartitionKeyRange, repositorymodels.RepositoryStatus) {
|
func (r *JsonDataStore) GetPartitionKeyRanges(databaseId string, collectionId string) ([]datastore.PartitionKeyRange, datastore.DataStoreStatus) {
|
||||||
|
r.storeState.RLock()
|
||||||
|
defer r.storeState.RUnlock()
|
||||||
|
|
||||||
databaseRid := databaseId
|
databaseRid := databaseId
|
||||||
collectionRid := collectionId
|
collectionRid := collectionId
|
||||||
var timestamp int64 = 0
|
var timestamp int64 = 0
|
||||||
|
|
||||||
if database, ok := storeState.Databases[databaseId]; !ok {
|
if database, ok := r.storeState.Databases[databaseId]; !ok {
|
||||||
databaseRid = database.ResourceID
|
databaseRid = database.ResourceID
|
||||||
}
|
}
|
||||||
|
|
||||||
if collection, ok := storeState.Collections[databaseId][collectionId]; !ok {
|
if collection, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
|
||||||
collectionRid = collection.ResourceID
|
collectionRid = collection.ResourceID
|
||||||
timestamp = collection.TimeStamp
|
timestamp = collection.TimeStamp
|
||||||
}
|
}
|
||||||
|
|
||||||
pkrResourceId := resourceid.NewCombined(databaseRid, collectionRid, resourceid.New())
|
pkrResourceId := resourceid.NewCombined(collectionRid, resourceid.New(resourceid.ResourceTypePartitionKeyRange))
|
||||||
pkrSelf := fmt.Sprintf("dbs/%s/colls/%s/pkranges/%s/", databaseRid, collectionRid, pkrResourceId)
|
pkrSelf := fmt.Sprintf("dbs/%s/colls/%s/pkranges/%s/", databaseRid, collectionRid, pkrResourceId)
|
||||||
etag := fmt.Sprintf("\"%s\"", uuid.New())
|
etag := fmt.Sprintf("\"%s\"", uuid.New())
|
||||||
|
|
||||||
return []repositorymodels.PartitionKeyRange{
|
return []datastore.PartitionKeyRange{
|
||||||
{
|
{
|
||||||
ResourceID: pkrResourceId,
|
ResourceID: pkrResourceId,
|
||||||
ID: "0",
|
ID: "0",
|
||||||
@ -42,5 +45,5 @@ func GetPartitionKeyRanges(databaseId string, collectionId string) ([]repository
|
|||||||
TimeStamp: timestamp,
|
TimeStamp: timestamp,
|
||||||
Lsn: 17,
|
Lsn: 17,
|
||||||
},
|
},
|
||||||
}, repositorymodels.StatusOk
|
}, datastore.StatusOk
|
||||||
}
|
}
|
236
internal/datastore/json_datastore/state.go
Normal file
236
internal/datastore/json_datastore/state.go
Normal file
@ -0,0 +1,236 @@
|
|||||||
|
package jsondatastore
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
"reflect"
|
||||||
|
"sync"
|
||||||
|
|
||||||
|
"github.com/pikami/cosmium/internal/datastore"
|
||||||
|
"github.com/pikami/cosmium/internal/logger"
|
||||||
|
)
|
||||||
|
|
||||||
|
type State struct {
|
||||||
|
sync.RWMutex
|
||||||
|
|
||||||
|
// Map databaseId -> Database
|
||||||
|
Databases map[string]datastore.Database `json:"databases"`
|
||||||
|
|
||||||
|
// Map databaseId -> collectionId -> Collection
|
||||||
|
Collections map[string]map[string]datastore.Collection `json:"collections"`
|
||||||
|
|
||||||
|
// Map databaseId -> collectionId -> documentId -> Documents
|
||||||
|
Documents map[string]map[string]map[string]datastore.Document `json:"documents"`
|
||||||
|
|
||||||
|
// Map databaseId -> collectionId -> triggerId -> Trigger
|
||||||
|
Triggers map[string]map[string]map[string]datastore.Trigger `json:"triggers"`
|
||||||
|
|
||||||
|
// Map databaseId -> collectionId -> spId -> StoredProcedure
|
||||||
|
StoredProcedures map[string]map[string]map[string]datastore.StoredProcedure `json:"sprocs"`
|
||||||
|
|
||||||
|
// Map databaseId -> collectionId -> udfId -> UserDefinedFunction
|
||||||
|
UserDefinedFunctions map[string]map[string]map[string]datastore.UserDefinedFunction `json:"udfs"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *JsonDataStore) InitializeDataStore() {
|
||||||
|
if r.initialDataFilePath != "" {
|
||||||
|
r.LoadStateFS(r.initialDataFilePath)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if r.persistDataFilePath != "" {
|
||||||
|
stat, err := os.Stat(r.persistDataFilePath)
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if stat.IsDir() {
|
||||||
|
logger.ErrorLn("Argument '-Persist' must be a path to file, not a directory.")
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
r.LoadStateFS(r.persistDataFilePath)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *JsonDataStore) LoadStateFS(filePath string) {
|
||||||
|
data, err := os.ReadFile(filePath)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Error reading state JSON file: %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
err = r.LoadStateJSON(string(data))
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Error unmarshalling state JSON: %v", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *JsonDataStore) LoadStateJSON(jsonData string) error {
|
||||||
|
r.storeState.Lock()
|
||||||
|
defer r.storeState.Unlock()
|
||||||
|
|
||||||
|
var state State
|
||||||
|
if err := json.Unmarshal([]byte(jsonData), &state); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
r.storeState.Collections = state.Collections
|
||||||
|
r.storeState.Databases = state.Databases
|
||||||
|
r.storeState.Documents = state.Documents
|
||||||
|
|
||||||
|
r.ensureStoreStateNoNullReferences()
|
||||||
|
|
||||||
|
logger.InfoLn("Loaded state:")
|
||||||
|
logger.Infof("Databases: %d\n", getLength(r.storeState.Databases))
|
||||||
|
logger.Infof("Collections: %d\n", getLength(r.storeState.Collections))
|
||||||
|
logger.Infof("Documents: %d\n", getLength(r.storeState.Documents))
|
||||||
|
logger.Infof("Triggers: %d\n", getLength(r.storeState.Triggers))
|
||||||
|
logger.Infof("Stored procedures: %d\n", getLength(r.storeState.StoredProcedures))
|
||||||
|
logger.Infof("User defined functions: %d\n", getLength(r.storeState.UserDefinedFunctions))
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *JsonDataStore) SaveStateFS(filePath string) {
|
||||||
|
r.storeState.RLock()
|
||||||
|
defer r.storeState.RUnlock()
|
||||||
|
|
||||||
|
data, err := json.MarshalIndent(r.storeState, "", "\t")
|
||||||
|
if err != nil {
|
||||||
|
logger.Errorf("Failed to save state: %v\n", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
os.WriteFile(filePath, data, os.ModePerm)
|
||||||
|
|
||||||
|
logger.InfoLn("Saved state:")
|
||||||
|
logger.Infof("Databases: %d\n", getLength(r.storeState.Databases))
|
||||||
|
logger.Infof("Collections: %d\n", getLength(r.storeState.Collections))
|
||||||
|
logger.Infof("Documents: %d\n", getLength(r.storeState.Documents))
|
||||||
|
logger.Infof("Triggers: %d\n", getLength(r.storeState.Triggers))
|
||||||
|
logger.Infof("Stored procedures: %d\n", getLength(r.storeState.StoredProcedures))
|
||||||
|
logger.Infof("User defined functions: %d\n", getLength(r.storeState.UserDefinedFunctions))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *JsonDataStore) DumpToJson() (string, error) {
|
||||||
|
r.storeState.RLock()
|
||||||
|
defer r.storeState.RUnlock()
|
||||||
|
|
||||||
|
data, err := json.MarshalIndent(r.storeState, "", "\t")
|
||||||
|
if err != nil {
|
||||||
|
logger.Errorf("Failed to serialize state: %v\n", err)
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
return string(data), nil
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *JsonDataStore) Close() {
|
||||||
|
if r.persistDataFilePath != "" {
|
||||||
|
r.SaveStateFS(r.persistDataFilePath)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func getLength(v interface{}) int {
|
||||||
|
switch v.(type) {
|
||||||
|
case datastore.Database,
|
||||||
|
datastore.Collection,
|
||||||
|
datastore.Document,
|
||||||
|
datastore.Trigger,
|
||||||
|
datastore.StoredProcedure,
|
||||||
|
datastore.UserDefinedFunction:
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
rv := reflect.ValueOf(v)
|
||||||
|
if rv.Kind() != reflect.Map {
|
||||||
|
return -1
|
||||||
|
}
|
||||||
|
|
||||||
|
count := 0
|
||||||
|
for _, key := range rv.MapKeys() {
|
||||||
|
if rv.MapIndex(key).Kind() == reflect.Map {
|
||||||
|
count += getLength(rv.MapIndex(key).Interface())
|
||||||
|
} else {
|
||||||
|
count++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return count
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *JsonDataStore) ensureStoreStateNoNullReferences() {
|
||||||
|
if r.storeState.Databases == nil {
|
||||||
|
r.storeState.Databases = make(map[string]datastore.Database)
|
||||||
|
}
|
||||||
|
|
||||||
|
if r.storeState.Collections == nil {
|
||||||
|
r.storeState.Collections = make(map[string]map[string]datastore.Collection)
|
||||||
|
}
|
||||||
|
|
||||||
|
if r.storeState.Documents == nil {
|
||||||
|
r.storeState.Documents = make(map[string]map[string]map[string]datastore.Document)
|
||||||
|
}
|
||||||
|
|
||||||
|
if r.storeState.Triggers == nil {
|
||||||
|
r.storeState.Triggers = make(map[string]map[string]map[string]datastore.Trigger)
|
||||||
|
}
|
||||||
|
|
||||||
|
if r.storeState.StoredProcedures == nil {
|
||||||
|
r.storeState.StoredProcedures = make(map[string]map[string]map[string]datastore.StoredProcedure)
|
||||||
|
}
|
||||||
|
|
||||||
|
if r.storeState.UserDefinedFunctions == nil {
|
||||||
|
r.storeState.UserDefinedFunctions = make(map[string]map[string]map[string]datastore.UserDefinedFunction)
|
||||||
|
}
|
||||||
|
|
||||||
|
for database := range r.storeState.Databases {
|
||||||
|
if r.storeState.Collections[database] == nil {
|
||||||
|
r.storeState.Collections[database] = make(map[string]datastore.Collection)
|
||||||
|
}
|
||||||
|
|
||||||
|
if r.storeState.Documents[database] == nil {
|
||||||
|
r.storeState.Documents[database] = make(map[string]map[string]datastore.Document)
|
||||||
|
}
|
||||||
|
|
||||||
|
if r.storeState.Triggers[database] == nil {
|
||||||
|
r.storeState.Triggers[database] = make(map[string]map[string]datastore.Trigger)
|
||||||
|
}
|
||||||
|
|
||||||
|
if r.storeState.StoredProcedures[database] == nil {
|
||||||
|
r.storeState.StoredProcedures[database] = make(map[string]map[string]datastore.StoredProcedure)
|
||||||
|
}
|
||||||
|
|
||||||
|
if r.storeState.UserDefinedFunctions[database] == nil {
|
||||||
|
r.storeState.UserDefinedFunctions[database] = make(map[string]map[string]datastore.UserDefinedFunction)
|
||||||
|
}
|
||||||
|
|
||||||
|
for collection := range r.storeState.Collections[database] {
|
||||||
|
if r.storeState.Documents[database][collection] == nil {
|
||||||
|
r.storeState.Documents[database][collection] = make(map[string]datastore.Document)
|
||||||
|
}
|
||||||
|
|
||||||
|
for document := range r.storeState.Documents[database][collection] {
|
||||||
|
if r.storeState.Documents[database][collection][document] == nil {
|
||||||
|
delete(r.storeState.Documents[database][collection], document)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if r.storeState.Triggers[database][collection] == nil {
|
||||||
|
r.storeState.Triggers[database][collection] = make(map[string]datastore.Trigger)
|
||||||
|
}
|
||||||
|
|
||||||
|
if r.storeState.StoredProcedures[database][collection] == nil {
|
||||||
|
r.storeState.StoredProcedures[database][collection] = make(map[string]datastore.StoredProcedure)
|
||||||
|
}
|
||||||
|
|
||||||
|
if r.storeState.UserDefinedFunctions[database][collection] == nil {
|
||||||
|
r.storeState.UserDefinedFunctions[database][collection] = make(map[string]datastore.UserDefinedFunction)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
91
internal/datastore/json_datastore/stored_procedures.go
Normal file
91
internal/datastore/json_datastore/stored_procedures.go
Normal file
@ -0,0 +1,91 @@
|
|||||||
|
package jsondatastore
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/pikami/cosmium/internal/datastore"
|
||||||
|
"github.com/pikami/cosmium/internal/resourceid"
|
||||||
|
"golang.org/x/exp/maps"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (r *JsonDataStore) GetAllStoredProcedures(databaseId string, collectionId string) ([]datastore.StoredProcedure, datastore.DataStoreStatus) {
|
||||||
|
r.storeState.RLock()
|
||||||
|
defer r.storeState.RUnlock()
|
||||||
|
|
||||||
|
return maps.Values(r.storeState.StoredProcedures[databaseId][collectionId]), datastore.StatusOk
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *JsonDataStore) GetStoredProcedure(databaseId string, collectionId string, spId string) (datastore.StoredProcedure, datastore.DataStoreStatus) {
|
||||||
|
r.storeState.RLock()
|
||||||
|
defer r.storeState.RUnlock()
|
||||||
|
|
||||||
|
if _, ok := r.storeState.Databases[databaseId]; !ok {
|
||||||
|
return datastore.StoredProcedure{}, datastore.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
|
||||||
|
return datastore.StoredProcedure{}, datastore.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
if sp, ok := r.storeState.StoredProcedures[databaseId][collectionId][spId]; ok {
|
||||||
|
return sp, datastore.StatusOk
|
||||||
|
}
|
||||||
|
|
||||||
|
return datastore.StoredProcedure{}, datastore.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *JsonDataStore) DeleteStoredProcedure(databaseId string, collectionId string, spId string) datastore.DataStoreStatus {
|
||||||
|
r.storeState.Lock()
|
||||||
|
defer r.storeState.Unlock()
|
||||||
|
|
||||||
|
if _, ok := r.storeState.Databases[databaseId]; !ok {
|
||||||
|
return datastore.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
|
||||||
|
return datastore.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, ok := r.storeState.StoredProcedures[databaseId][collectionId][spId]; !ok {
|
||||||
|
return datastore.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
delete(r.storeState.StoredProcedures[databaseId][collectionId], spId)
|
||||||
|
|
||||||
|
return datastore.StatusOk
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *JsonDataStore) CreateStoredProcedure(databaseId string, collectionId string, sp datastore.StoredProcedure) (datastore.StoredProcedure, datastore.DataStoreStatus) {
|
||||||
|
r.storeState.Lock()
|
||||||
|
defer r.storeState.Unlock()
|
||||||
|
|
||||||
|
var ok bool
|
||||||
|
var database datastore.Database
|
||||||
|
var collection datastore.Collection
|
||||||
|
if sp.ID == "" {
|
||||||
|
return datastore.StoredProcedure{}, datastore.BadRequest
|
||||||
|
}
|
||||||
|
|
||||||
|
if database, ok = r.storeState.Databases[databaseId]; !ok {
|
||||||
|
return datastore.StoredProcedure{}, datastore.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
if collection, ok = r.storeState.Collections[databaseId][collectionId]; !ok {
|
||||||
|
return datastore.StoredProcedure{}, datastore.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, ok = r.storeState.StoredProcedures[databaseId][collectionId][sp.ID]; ok {
|
||||||
|
return datastore.StoredProcedure{}, datastore.Conflict
|
||||||
|
}
|
||||||
|
|
||||||
|
sp.TimeStamp = time.Now().Unix()
|
||||||
|
sp.ResourceID = resourceid.NewCombined(collection.ResourceID, resourceid.New(resourceid.ResourceTypeStoredProcedure))
|
||||||
|
sp.ETag = fmt.Sprintf("\"%s\"", uuid.New())
|
||||||
|
sp.Self = fmt.Sprintf("dbs/%s/colls/%s/sprocs/%s/", database.ResourceID, collection.ResourceID, sp.ResourceID)
|
||||||
|
|
||||||
|
r.storeState.StoredProcedures[databaseId][collectionId][sp.ID] = sp
|
||||||
|
|
||||||
|
return sp, datastore.StatusOk
|
||||||
|
}
|
91
internal/datastore/json_datastore/triggers.go
Normal file
91
internal/datastore/json_datastore/triggers.go
Normal file
@ -0,0 +1,91 @@
|
|||||||
|
package jsondatastore
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/pikami/cosmium/internal/datastore"
|
||||||
|
"github.com/pikami/cosmium/internal/resourceid"
|
||||||
|
"golang.org/x/exp/maps"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (r *JsonDataStore) GetAllTriggers(databaseId string, collectionId string) ([]datastore.Trigger, datastore.DataStoreStatus) {
|
||||||
|
r.storeState.RLock()
|
||||||
|
defer r.storeState.RUnlock()
|
||||||
|
|
||||||
|
return maps.Values(r.storeState.Triggers[databaseId][collectionId]), datastore.StatusOk
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *JsonDataStore) GetTrigger(databaseId string, collectionId string, triggerId string) (datastore.Trigger, datastore.DataStoreStatus) {
|
||||||
|
r.storeState.RLock()
|
||||||
|
defer r.storeState.RUnlock()
|
||||||
|
|
||||||
|
if _, ok := r.storeState.Databases[databaseId]; !ok {
|
||||||
|
return datastore.Trigger{}, datastore.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
|
||||||
|
return datastore.Trigger{}, datastore.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
if trigger, ok := r.storeState.Triggers[databaseId][collectionId][triggerId]; ok {
|
||||||
|
return trigger, datastore.StatusOk
|
||||||
|
}
|
||||||
|
|
||||||
|
return datastore.Trigger{}, datastore.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *JsonDataStore) DeleteTrigger(databaseId string, collectionId string, triggerId string) datastore.DataStoreStatus {
|
||||||
|
r.storeState.Lock()
|
||||||
|
defer r.storeState.Unlock()
|
||||||
|
|
||||||
|
if _, ok := r.storeState.Databases[databaseId]; !ok {
|
||||||
|
return datastore.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
|
||||||
|
return datastore.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, ok := r.storeState.Triggers[databaseId][collectionId][triggerId]; !ok {
|
||||||
|
return datastore.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
delete(r.storeState.Triggers[databaseId][collectionId], triggerId)
|
||||||
|
|
||||||
|
return datastore.StatusOk
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *JsonDataStore) CreateTrigger(databaseId string, collectionId string, trigger datastore.Trigger) (datastore.Trigger, datastore.DataStoreStatus) {
|
||||||
|
r.storeState.Lock()
|
||||||
|
defer r.storeState.Unlock()
|
||||||
|
|
||||||
|
var ok bool
|
||||||
|
var database datastore.Database
|
||||||
|
var collection datastore.Collection
|
||||||
|
if trigger.ID == "" {
|
||||||
|
return datastore.Trigger{}, datastore.BadRequest
|
||||||
|
}
|
||||||
|
|
||||||
|
if database, ok = r.storeState.Databases[databaseId]; !ok {
|
||||||
|
return datastore.Trigger{}, datastore.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
if collection, ok = r.storeState.Collections[databaseId][collectionId]; !ok {
|
||||||
|
return datastore.Trigger{}, datastore.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, ok = r.storeState.Triggers[databaseId][collectionId][trigger.ID]; ok {
|
||||||
|
return datastore.Trigger{}, datastore.Conflict
|
||||||
|
}
|
||||||
|
|
||||||
|
trigger.TimeStamp = time.Now().Unix()
|
||||||
|
trigger.ResourceID = resourceid.NewCombined(collection.ResourceID, resourceid.New(resourceid.ResourceTypeTrigger))
|
||||||
|
trigger.ETag = fmt.Sprintf("\"%s\"", uuid.New())
|
||||||
|
trigger.Self = fmt.Sprintf("dbs/%s/colls/%s/triggers/%s/", database.ResourceID, collection.ResourceID, trigger.ResourceID)
|
||||||
|
|
||||||
|
r.storeState.Triggers[databaseId][collectionId][trigger.ID] = trigger
|
||||||
|
|
||||||
|
return trigger, datastore.StatusOk
|
||||||
|
}
|
91
internal/datastore/json_datastore/user_defined_functions.go
Normal file
91
internal/datastore/json_datastore/user_defined_functions.go
Normal file
@ -0,0 +1,91 @@
|
|||||||
|
package jsondatastore
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/pikami/cosmium/internal/datastore"
|
||||||
|
"github.com/pikami/cosmium/internal/resourceid"
|
||||||
|
"golang.org/x/exp/maps"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (r *JsonDataStore) GetAllUserDefinedFunctions(databaseId string, collectionId string) ([]datastore.UserDefinedFunction, datastore.DataStoreStatus) {
|
||||||
|
r.storeState.RLock()
|
||||||
|
defer r.storeState.RUnlock()
|
||||||
|
|
||||||
|
return maps.Values(r.storeState.UserDefinedFunctions[databaseId][collectionId]), datastore.StatusOk
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *JsonDataStore) GetUserDefinedFunction(databaseId string, collectionId string, udfId string) (datastore.UserDefinedFunction, datastore.DataStoreStatus) {
|
||||||
|
r.storeState.RLock()
|
||||||
|
defer r.storeState.RUnlock()
|
||||||
|
|
||||||
|
if _, ok := r.storeState.Databases[databaseId]; !ok {
|
||||||
|
return datastore.UserDefinedFunction{}, datastore.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
|
||||||
|
return datastore.UserDefinedFunction{}, datastore.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
if udf, ok := r.storeState.UserDefinedFunctions[databaseId][collectionId][udfId]; ok {
|
||||||
|
return udf, datastore.StatusOk
|
||||||
|
}
|
||||||
|
|
||||||
|
return datastore.UserDefinedFunction{}, datastore.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *JsonDataStore) DeleteUserDefinedFunction(databaseId string, collectionId string, udfId string) datastore.DataStoreStatus {
|
||||||
|
r.storeState.Lock()
|
||||||
|
defer r.storeState.Unlock()
|
||||||
|
|
||||||
|
if _, ok := r.storeState.Databases[databaseId]; !ok {
|
||||||
|
return datastore.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
|
||||||
|
return datastore.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, ok := r.storeState.UserDefinedFunctions[databaseId][collectionId][udfId]; !ok {
|
||||||
|
return datastore.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
delete(r.storeState.UserDefinedFunctions[databaseId][collectionId], udfId)
|
||||||
|
|
||||||
|
return datastore.StatusOk
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *JsonDataStore) CreateUserDefinedFunction(databaseId string, collectionId string, udf datastore.UserDefinedFunction) (datastore.UserDefinedFunction, datastore.DataStoreStatus) {
|
||||||
|
r.storeState.Lock()
|
||||||
|
defer r.storeState.Unlock()
|
||||||
|
|
||||||
|
var ok bool
|
||||||
|
var database datastore.Database
|
||||||
|
var collection datastore.Collection
|
||||||
|
if udf.ID == "" {
|
||||||
|
return datastore.UserDefinedFunction{}, datastore.BadRequest
|
||||||
|
}
|
||||||
|
|
||||||
|
if database, ok = r.storeState.Databases[databaseId]; !ok {
|
||||||
|
return datastore.UserDefinedFunction{}, datastore.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
if collection, ok = r.storeState.Collections[databaseId][collectionId]; !ok {
|
||||||
|
return datastore.UserDefinedFunction{}, datastore.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, ok := r.storeState.UserDefinedFunctions[databaseId][collectionId][udf.ID]; ok {
|
||||||
|
return datastore.UserDefinedFunction{}, datastore.Conflict
|
||||||
|
}
|
||||||
|
|
||||||
|
udf.TimeStamp = time.Now().Unix()
|
||||||
|
udf.ResourceID = resourceid.NewCombined(collection.ResourceID, resourceid.New(resourceid.ResourceTypeUserDefinedFunction))
|
||||||
|
udf.ETag = fmt.Sprintf("\"%s\"", uuid.New())
|
||||||
|
udf.Self = fmt.Sprintf("dbs/%s/colls/%s/udfs/%s/", database.ResourceID, collection.ResourceID, udf.ResourceID)
|
||||||
|
|
||||||
|
r.storeState.UserDefinedFunctions[databaseId][collectionId][udf.ID] = udf
|
||||||
|
|
||||||
|
return udf, datastore.StatusOk
|
||||||
|
}
|
@ -1,4 +1,4 @@
|
|||||||
package repositorymodels
|
package datastore
|
||||||
|
|
||||||
type Database struct {
|
type Database struct {
|
||||||
ID string `json:"id"`
|
ID string `json:"id"`
|
||||||
@ -8,13 +8,31 @@ type Database struct {
|
|||||||
Self string `json:"_self"`
|
Self string `json:"_self"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type RepositoryStatus int
|
type DataStoreStatus int
|
||||||
|
|
||||||
const (
|
const (
|
||||||
StatusOk = 1
|
StatusOk = 1
|
||||||
StatusNotFound = 2
|
StatusNotFound = 2
|
||||||
Conflict = 3
|
Conflict = 3
|
||||||
BadRequest = 4
|
BadRequest = 4
|
||||||
|
IterEOF = 5
|
||||||
|
Unknown = 6
|
||||||
|
)
|
||||||
|
|
||||||
|
type TriggerOperation string
|
||||||
|
|
||||||
|
const (
|
||||||
|
All TriggerOperation = "All"
|
||||||
|
Create TriggerOperation = "Create"
|
||||||
|
Delete TriggerOperation = "Delete"
|
||||||
|
Replace TriggerOperation = "Replace"
|
||||||
|
)
|
||||||
|
|
||||||
|
type TriggerType string
|
||||||
|
|
||||||
|
const (
|
||||||
|
Pre TriggerType = "Pre"
|
||||||
|
Post TriggerType = "Post"
|
||||||
)
|
)
|
||||||
|
|
||||||
type Collection struct {
|
type Collection struct {
|
||||||
@ -58,29 +76,29 @@ type UserDefinedFunction struct {
|
|||||||
Body string `json:"body"`
|
Body string `json:"body"`
|
||||||
ID string `json:"id"`
|
ID string `json:"id"`
|
||||||
ResourceID string `json:"_rid"`
|
ResourceID string `json:"_rid"`
|
||||||
TimeStamp int `json:"_ts"`
|
TimeStamp int64 `json:"_ts"`
|
||||||
Self string `json:"_self"`
|
Self string `json:"_self"`
|
||||||
Etag string `json:"_etag"`
|
ETag string `json:"_etag"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type StoredProcedure struct {
|
type StoredProcedure struct {
|
||||||
Body string `json:"body"`
|
Body string `json:"body"`
|
||||||
ID string `json:"id"`
|
ID string `json:"id"`
|
||||||
ResourceID string `json:"_rid"`
|
ResourceID string `json:"_rid"`
|
||||||
TimeStamp int `json:"_ts"`
|
TimeStamp int64 `json:"_ts"`
|
||||||
Self string `json:"_self"`
|
Self string `json:"_self"`
|
||||||
Etag string `json:"_etag"`
|
ETag string `json:"_etag"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type Trigger struct {
|
type Trigger struct {
|
||||||
Body string `json:"body"`
|
Body string `json:"body"`
|
||||||
ID string `json:"id"`
|
ID string `json:"id"`
|
||||||
TriggerOperation string `json:"triggerOperation"`
|
TriggerOperation TriggerOperation `json:"triggerOperation"`
|
||||||
TriggerType string `json:"triggerType"`
|
TriggerType TriggerType `json:"triggerType"`
|
||||||
ResourceID string `json:"_rid"`
|
ResourceID string `json:"_rid"`
|
||||||
TimeStamp int `json:"_ts"`
|
TimeStamp int64 `json:"_ts"`
|
||||||
Self string `json:"_self"`
|
Self string `json:"_self"`
|
||||||
Etag string `json:"_etag"`
|
ETag string `json:"_etag"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type Document map[string]interface{}
|
type Document map[string]interface{}
|
||||||
@ -99,14 +117,3 @@ type PartitionKeyRange struct {
|
|||||||
TimeStamp int64 `json:"_ts"`
|
TimeStamp int64 `json:"_ts"`
|
||||||
Lsn int `json:"lsn"`
|
Lsn int `json:"lsn"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type State struct {
|
|
||||||
// Map databaseId -> Database
|
|
||||||
Databases map[string]Database `json:"databases"`
|
|
||||||
|
|
||||||
// Map databaseId -> collectionId -> Collection
|
|
||||||
Collections map[string]map[string]Collection `json:"collections"`
|
|
||||||
|
|
||||||
// Map databaseId -> collectionId -> documentId -> Documents
|
|
||||||
Documents map[string]map[string]map[string]Document `json:"documents"`
|
|
||||||
}
|
|
@ -1,40 +1,140 @@
|
|||||||
package logger
|
package logger
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"fmt"
|
||||||
"log"
|
"log"
|
||||||
"os"
|
"os"
|
||||||
|
"runtime"
|
||||||
"github.com/pikami/cosmium/api/config"
|
"strings"
|
||||||
|
"sync"
|
||||||
)
|
)
|
||||||
|
|
||||||
var DebugLogger = log.New(os.Stdout, "", log.Ldate|log.Ltime|log.Lshortfile)
|
type LogLevelType int
|
||||||
|
|
||||||
|
var (
|
||||||
|
LogLevelDebug LogLevelType = 0
|
||||||
|
LogLevelInfo LogLevelType = 1
|
||||||
|
LogLevelError LogLevelType = 2
|
||||||
|
LogLevelSilent LogLevelType = 10
|
||||||
|
)
|
||||||
|
|
||||||
|
type LogWriter struct {
|
||||||
|
WriterLevel LogLevelType
|
||||||
|
}
|
||||||
|
|
||||||
|
var logLevelMutex sync.RWMutex
|
||||||
|
var logLevel = LogLevelInfo
|
||||||
|
|
||||||
|
var DebugLogger = log.New(os.Stdout, "", log.Ldate|log.Ltime)
|
||||||
var InfoLogger = log.New(os.Stdout, "", log.Ldate|log.Ltime)
|
var InfoLogger = log.New(os.Stdout, "", log.Ldate|log.Ltime)
|
||||||
var ErrorLogger = log.New(os.Stderr, "", log.Ldate|log.Ltime|log.Lshortfile)
|
var ErrorLogger = log.New(os.Stderr, "", log.Ldate|log.Ltime)
|
||||||
|
|
||||||
|
func DebugLn(v ...any) {
|
||||||
|
if GetLogLevel() <= LogLevelDebug {
|
||||||
|
prefix := getCallerPrefix()
|
||||||
|
DebugLogger.Println(append([]interface{}{prefix}, v...)...)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func Debug(v ...any) {
|
func Debug(v ...any) {
|
||||||
if config.Config.Debug {
|
if GetLogLevel() <= LogLevelDebug {
|
||||||
DebugLogger.Println(v...)
|
prefix := getCallerPrefix()
|
||||||
|
DebugLogger.Println(append([]interface{}{prefix}, v...)...)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func Debugf(format string, v ...any) {
|
func Debugf(format string, v ...any) {
|
||||||
if config.Config.Debug {
|
if GetLogLevel() <= LogLevelDebug {
|
||||||
DebugLogger.Printf(format, v...)
|
prefix := getCallerPrefix()
|
||||||
|
DebugLogger.Printf(prefix+format, v...)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func InfoLn(v ...any) {
|
||||||
|
if GetLogLevel() <= LogLevelInfo {
|
||||||
|
InfoLogger.Println(v...)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func Info(v ...any) {
|
func Info(v ...any) {
|
||||||
InfoLogger.Println(v...)
|
if GetLogLevel() <= LogLevelInfo {
|
||||||
|
InfoLogger.Print(v...)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func Infof(format string, v ...any) {
|
func Infof(format string, v ...any) {
|
||||||
InfoLogger.Printf(format, v...)
|
if GetLogLevel() <= LogLevelInfo {
|
||||||
|
InfoLogger.Printf(format, v...)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func ErrorLn(v ...any) {
|
||||||
|
if GetLogLevel() <= LogLevelError {
|
||||||
|
prefix := getCallerPrefix()
|
||||||
|
ErrorLogger.Println(append([]interface{}{prefix}, v...)...)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func Error(v ...any) {
|
func Error(v ...any) {
|
||||||
ErrorLogger.Println(v...)
|
if GetLogLevel() <= LogLevelError {
|
||||||
|
prefix := getCallerPrefix()
|
||||||
|
ErrorLogger.Print(append([]interface{}{prefix}, v...)...)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func Errorf(format string, v ...any) {
|
func Errorf(format string, v ...any) {
|
||||||
ErrorLogger.Printf(format, v...)
|
if GetLogLevel() <= LogLevelError {
|
||||||
|
prefix := getCallerPrefix()
|
||||||
|
ErrorLogger.Printf(prefix+format, v...)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (lw *LogWriter) Write(p []byte) (n int, err error) {
|
||||||
|
switch lw.WriterLevel {
|
||||||
|
case LogLevelDebug:
|
||||||
|
Debug(string(p))
|
||||||
|
case LogLevelInfo:
|
||||||
|
Info(string(p))
|
||||||
|
case LogLevelError:
|
||||||
|
Error(string(p))
|
||||||
|
}
|
||||||
|
|
||||||
|
return len(p), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func ErrorWriter() *LogWriter {
|
||||||
|
return &LogWriter{WriterLevel: LogLevelError}
|
||||||
|
}
|
||||||
|
|
||||||
|
func InfoWriter() *LogWriter {
|
||||||
|
return &LogWriter{WriterLevel: LogLevelInfo}
|
||||||
|
}
|
||||||
|
|
||||||
|
func DebugWriter() *LogWriter {
|
||||||
|
return &LogWriter{WriterLevel: LogLevelDebug}
|
||||||
|
}
|
||||||
|
|
||||||
|
func SetLogLevel(level LogLevelType) {
|
||||||
|
logLevelMutex.Lock()
|
||||||
|
defer logLevelMutex.Unlock()
|
||||||
|
logLevel = level
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetLogLevel() LogLevelType {
|
||||||
|
logLevelMutex.RLock()
|
||||||
|
defer logLevelMutex.RUnlock()
|
||||||
|
return logLevel
|
||||||
|
}
|
||||||
|
|
||||||
|
func getCallerPrefix() string {
|
||||||
|
_, file, line, ok := runtime.Caller(2)
|
||||||
|
if ok {
|
||||||
|
parts := strings.Split(file, "/")
|
||||||
|
if len(parts) > 0 {
|
||||||
|
file = parts[len(parts)-1]
|
||||||
|
}
|
||||||
|
return fmt.Sprintf("%s:%d - ", file, line)
|
||||||
|
}
|
||||||
|
|
||||||
|
return ""
|
||||||
}
|
}
|
||||||
|
@ -1,70 +0,0 @@
|
|||||||
package repositories
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/google/uuid"
|
|
||||||
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
|
||||||
"github.com/pikami/cosmium/internal/resourceid"
|
|
||||||
structhidrators "github.com/pikami/cosmium/internal/struct_hidrators"
|
|
||||||
"golang.org/x/exp/maps"
|
|
||||||
)
|
|
||||||
|
|
||||||
func GetAllCollections(databaseId string) ([]repositorymodels.Collection, repositorymodels.RepositoryStatus) {
|
|
||||||
if _, ok := storeState.Databases[databaseId]; !ok {
|
|
||||||
return make([]repositorymodels.Collection, 0), repositorymodels.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
return maps.Values(storeState.Collections[databaseId]), repositorymodels.StatusOk
|
|
||||||
}
|
|
||||||
|
|
||||||
func GetCollection(databaseId string, collectionId string) (repositorymodels.Collection, repositorymodels.RepositoryStatus) {
|
|
||||||
if _, ok := storeState.Databases[databaseId]; !ok {
|
|
||||||
return repositorymodels.Collection{}, repositorymodels.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, ok := storeState.Collections[databaseId][collectionId]; !ok {
|
|
||||||
return repositorymodels.Collection{}, repositorymodels.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
return storeState.Collections[databaseId][collectionId], repositorymodels.StatusOk
|
|
||||||
}
|
|
||||||
|
|
||||||
func DeleteCollection(databaseId string, collectionId string) repositorymodels.RepositoryStatus {
|
|
||||||
if _, ok := storeState.Databases[databaseId]; !ok {
|
|
||||||
return repositorymodels.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, ok := storeState.Collections[databaseId][collectionId]; !ok {
|
|
||||||
return repositorymodels.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
delete(storeState.Collections[databaseId], collectionId)
|
|
||||||
|
|
||||||
return repositorymodels.StatusOk
|
|
||||||
}
|
|
||||||
|
|
||||||
func CreateCollection(databaseId string, newCollection repositorymodels.Collection) (repositorymodels.Collection, repositorymodels.RepositoryStatus) {
|
|
||||||
var ok bool
|
|
||||||
var database repositorymodels.Database
|
|
||||||
if database, ok = storeState.Databases[databaseId]; !ok {
|
|
||||||
return repositorymodels.Collection{}, repositorymodels.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, ok = storeState.Collections[databaseId][newCollection.ID]; ok {
|
|
||||||
return repositorymodels.Collection{}, repositorymodels.Conflict
|
|
||||||
}
|
|
||||||
|
|
||||||
newCollection = structhidrators.Hidrate(newCollection).(repositorymodels.Collection)
|
|
||||||
|
|
||||||
newCollection.TimeStamp = time.Now().Unix()
|
|
||||||
newCollection.ResourceID = resourceid.NewCombined(database.ResourceID, resourceid.New())
|
|
||||||
newCollection.ETag = fmt.Sprintf("\"%s\"", uuid.New())
|
|
||||||
newCollection.Self = fmt.Sprintf("dbs/%s/colls/%s/", database.ResourceID, newCollection.ResourceID)
|
|
||||||
|
|
||||||
storeState.Collections[databaseId][newCollection.ID] = newCollection
|
|
||||||
storeState.Documents[databaseId][newCollection.ID] = make(map[string]repositorymodels.Document)
|
|
||||||
|
|
||||||
return newCollection, repositorymodels.StatusOk
|
|
||||||
}
|
|
@ -1,50 +0,0 @@
|
|||||||
package repositories
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/google/uuid"
|
|
||||||
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
|
||||||
"github.com/pikami/cosmium/internal/resourceid"
|
|
||||||
"golang.org/x/exp/maps"
|
|
||||||
)
|
|
||||||
|
|
||||||
func GetAllDatabases() ([]repositorymodels.Database, repositorymodels.RepositoryStatus) {
|
|
||||||
return maps.Values(storeState.Databases), repositorymodels.StatusOk
|
|
||||||
}
|
|
||||||
|
|
||||||
func GetDatabase(id string) (repositorymodels.Database, repositorymodels.RepositoryStatus) {
|
|
||||||
if database, ok := storeState.Databases[id]; ok {
|
|
||||||
return database, repositorymodels.StatusOk
|
|
||||||
}
|
|
||||||
|
|
||||||
return repositorymodels.Database{}, repositorymodels.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
func DeleteDatabase(id string) repositorymodels.RepositoryStatus {
|
|
||||||
if _, ok := storeState.Databases[id]; !ok {
|
|
||||||
return repositorymodels.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
delete(storeState.Databases, id)
|
|
||||||
|
|
||||||
return repositorymodels.StatusOk
|
|
||||||
}
|
|
||||||
|
|
||||||
func CreateDatabase(newDatabase repositorymodels.Database) (repositorymodels.Database, repositorymodels.RepositoryStatus) {
|
|
||||||
if _, ok := storeState.Databases[newDatabase.ID]; ok {
|
|
||||||
return repositorymodels.Database{}, repositorymodels.Conflict
|
|
||||||
}
|
|
||||||
|
|
||||||
newDatabase.TimeStamp = time.Now().Unix()
|
|
||||||
newDatabase.ResourceID = resourceid.New()
|
|
||||||
newDatabase.ETag = fmt.Sprintf("\"%s\"", uuid.New())
|
|
||||||
newDatabase.Self = fmt.Sprintf("dbs/%s/", newDatabase.ResourceID)
|
|
||||||
|
|
||||||
storeState.Databases[newDatabase.ID] = newDatabase
|
|
||||||
storeState.Collections[newDatabase.ID] = make(map[string]repositorymodels.Collection)
|
|
||||||
storeState.Documents[newDatabase.ID] = make(map[string]map[string]repositorymodels.Document)
|
|
||||||
|
|
||||||
return newDatabase, repositorymodels.StatusOk
|
|
||||||
}
|
|
@ -1,117 +0,0 @@
|
|||||||
package repositories
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"log"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/google/uuid"
|
|
||||||
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
|
||||||
"github.com/pikami/cosmium/internal/resourceid"
|
|
||||||
"github.com/pikami/cosmium/parsers"
|
|
||||||
"github.com/pikami/cosmium/parsers/nosql"
|
|
||||||
memoryexecutor "github.com/pikami/cosmium/query_executors/memory_executor"
|
|
||||||
"golang.org/x/exp/maps"
|
|
||||||
)
|
|
||||||
|
|
||||||
func GetAllDocuments(databaseId string, collectionId string) ([]repositorymodels.Document, repositorymodels.RepositoryStatus) {
|
|
||||||
if _, ok := storeState.Databases[databaseId]; !ok {
|
|
||||||
return make([]repositorymodels.Document, 0), repositorymodels.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, ok := storeState.Collections[databaseId][collectionId]; !ok {
|
|
||||||
return make([]repositorymodels.Document, 0), repositorymodels.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
return maps.Values(storeState.Documents[databaseId][collectionId]), repositorymodels.StatusOk
|
|
||||||
}
|
|
||||||
|
|
||||||
func GetDocument(databaseId string, collectionId string, documentId string) (repositorymodels.Document, repositorymodels.RepositoryStatus) {
|
|
||||||
if _, ok := storeState.Databases[databaseId]; !ok {
|
|
||||||
return repositorymodels.Document{}, repositorymodels.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, ok := storeState.Collections[databaseId][collectionId]; !ok {
|
|
||||||
return repositorymodels.Document{}, repositorymodels.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, ok := storeState.Documents[databaseId][collectionId][documentId]; !ok {
|
|
||||||
return repositorymodels.Document{}, repositorymodels.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
return storeState.Documents[databaseId][collectionId][documentId], repositorymodels.StatusOk
|
|
||||||
}
|
|
||||||
|
|
||||||
func DeleteDocument(databaseId string, collectionId string, documentId string) repositorymodels.RepositoryStatus {
|
|
||||||
if _, ok := storeState.Databases[databaseId]; !ok {
|
|
||||||
return repositorymodels.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, ok := storeState.Collections[databaseId][collectionId]; !ok {
|
|
||||||
return repositorymodels.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, ok := storeState.Documents[databaseId][collectionId][documentId]; !ok {
|
|
||||||
return repositorymodels.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
delete(storeState.Documents[databaseId][collectionId], documentId)
|
|
||||||
|
|
||||||
return repositorymodels.StatusOk
|
|
||||||
}
|
|
||||||
|
|
||||||
func CreateDocument(databaseId string, collectionId string, document map[string]interface{}) (repositorymodels.Document, repositorymodels.RepositoryStatus) {
|
|
||||||
var ok bool
|
|
||||||
var documentId string
|
|
||||||
var database repositorymodels.Database
|
|
||||||
var collection repositorymodels.Collection
|
|
||||||
if documentId, ok = document["id"].(string); !ok || documentId == "" {
|
|
||||||
return repositorymodels.Document{}, repositorymodels.BadRequest
|
|
||||||
}
|
|
||||||
|
|
||||||
if database, ok = storeState.Databases[databaseId]; !ok {
|
|
||||||
return repositorymodels.Document{}, repositorymodels.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
if collection, ok = storeState.Collections[databaseId][collectionId]; !ok {
|
|
||||||
return repositorymodels.Document{}, repositorymodels.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, ok := storeState.Documents[databaseId][collectionId][documentId]; ok {
|
|
||||||
return repositorymodels.Document{}, repositorymodels.Conflict
|
|
||||||
}
|
|
||||||
|
|
||||||
document["_ts"] = time.Now().Unix()
|
|
||||||
document["_rid"] = resourceid.NewCombined(database.ResourceID, collection.ResourceID, resourceid.New())
|
|
||||||
document["_etag"] = fmt.Sprintf("\"%s\"", uuid.New())
|
|
||||||
document["_self"] = fmt.Sprintf("dbs/%s/colls/%s/docs/%s/", database.ResourceID, collection.ResourceID, document["_rid"])
|
|
||||||
|
|
||||||
storeState.Documents[databaseId][collectionId][documentId] = document
|
|
||||||
|
|
||||||
return document, repositorymodels.StatusOk
|
|
||||||
}
|
|
||||||
|
|
||||||
func ExecuteQueryDocuments(databaseId string, collectionId string, query string, queryParameters map[string]interface{}) ([]memoryexecutor.RowType, repositorymodels.RepositoryStatus) {
|
|
||||||
parsedQuery, err := nosql.Parse("", []byte(query))
|
|
||||||
if err != nil {
|
|
||||||
log.Printf("Failed to parse query: %s\nerr: %v", query, err)
|
|
||||||
return nil, repositorymodels.BadRequest
|
|
||||||
}
|
|
||||||
|
|
||||||
collectionDocuments, status := GetAllDocuments(databaseId, collectionId)
|
|
||||||
if status != repositorymodels.StatusOk {
|
|
||||||
return nil, status
|
|
||||||
}
|
|
||||||
|
|
||||||
covDocs := make([]memoryexecutor.RowType, 0)
|
|
||||||
for _, doc := range collectionDocuments {
|
|
||||||
covDocs = append(covDocs, map[string]interface{}(doc))
|
|
||||||
}
|
|
||||||
|
|
||||||
if typedQuery, ok := parsedQuery.(parsers.SelectStmt); ok {
|
|
||||||
typedQuery.Parameters = queryParameters
|
|
||||||
return memoryexecutor.Execute(typedQuery, covDocs), repositorymodels.StatusOk
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil, repositorymodels.BadRequest
|
|
||||||
}
|
|
@ -1,146 +0,0 @@
|
|||||||
package repositories
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/json"
|
|
||||||
"log"
|
|
||||||
"os"
|
|
||||||
"reflect"
|
|
||||||
|
|
||||||
"github.com/pikami/cosmium/api/config"
|
|
||||||
"github.com/pikami/cosmium/internal/logger"
|
|
||||||
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
|
||||||
)
|
|
||||||
|
|
||||||
var storedProcedures = []repositorymodels.StoredProcedure{}
|
|
||||||
var triggers = []repositorymodels.Trigger{}
|
|
||||||
var userDefinedFunctions = []repositorymodels.UserDefinedFunction{}
|
|
||||||
var storeState = repositorymodels.State{
|
|
||||||
Databases: make(map[string]repositorymodels.Database),
|
|
||||||
Collections: make(map[string]map[string]repositorymodels.Collection),
|
|
||||||
Documents: make(map[string]map[string]map[string]repositorymodels.Document),
|
|
||||||
}
|
|
||||||
|
|
||||||
func InitializeRepository() {
|
|
||||||
if config.Config.InitialDataFilePath != "" {
|
|
||||||
LoadStateFS(config.Config.InitialDataFilePath)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if config.Config.PersistDataFilePath != "" {
|
|
||||||
stat, err := os.Stat(config.Config.PersistDataFilePath)
|
|
||||||
if err != nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if stat.IsDir() {
|
|
||||||
logger.Error("Argument '-Persist' must be a path to file, not a directory.")
|
|
||||||
os.Exit(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
LoadStateFS(config.Config.PersistDataFilePath)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func LoadStateFS(filePath string) {
|
|
||||||
data, err := os.ReadFile(filePath)
|
|
||||||
if err != nil {
|
|
||||||
log.Fatalf("Error reading state JSON file: %v", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
var state repositorymodels.State
|
|
||||||
if err := json.Unmarshal(data, &state); err != nil {
|
|
||||||
log.Fatalf("Error unmarshalling state JSON: %v", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.Info("Loaded state:")
|
|
||||||
logger.Infof("Databases: %d\n", getLength(state.Databases))
|
|
||||||
logger.Infof("Collections: %d\n", getLength(state.Collections))
|
|
||||||
logger.Infof("Documents: %d\n", getLength(state.Documents))
|
|
||||||
|
|
||||||
storeState = state
|
|
||||||
|
|
||||||
ensureStoreStateNoNullReferences()
|
|
||||||
}
|
|
||||||
|
|
||||||
func SaveStateFS(filePath string) {
|
|
||||||
data, err := json.MarshalIndent(storeState, "", "\t")
|
|
||||||
if err != nil {
|
|
||||||
logger.Errorf("Failed to save state: %v\n", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
os.WriteFile(filePath, data, os.ModePerm)
|
|
||||||
|
|
||||||
logger.Info("Saved state:")
|
|
||||||
logger.Infof("Databases: %d\n", getLength(storeState.Databases))
|
|
||||||
logger.Infof("Collections: %d\n", getLength(storeState.Collections))
|
|
||||||
logger.Infof("Documents: %d\n", getLength(storeState.Documents))
|
|
||||||
}
|
|
||||||
|
|
||||||
func GetState() repositorymodels.State {
|
|
||||||
return storeState
|
|
||||||
}
|
|
||||||
|
|
||||||
func getLength(v interface{}) int {
|
|
||||||
switch v.(type) {
|
|
||||||
case repositorymodels.Database,
|
|
||||||
repositorymodels.Collection,
|
|
||||||
repositorymodels.Document:
|
|
||||||
return 1
|
|
||||||
}
|
|
||||||
|
|
||||||
rv := reflect.ValueOf(v)
|
|
||||||
if rv.Kind() != reflect.Map {
|
|
||||||
return -1
|
|
||||||
}
|
|
||||||
|
|
||||||
count := 0
|
|
||||||
for _, key := range rv.MapKeys() {
|
|
||||||
if rv.MapIndex(key).Kind() == reflect.Map {
|
|
||||||
count += getLength(rv.MapIndex(key).Interface())
|
|
||||||
} else {
|
|
||||||
count++
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return count
|
|
||||||
}
|
|
||||||
|
|
||||||
func ensureStoreStateNoNullReferences() {
|
|
||||||
if storeState.Databases == nil {
|
|
||||||
storeState.Databases = make(map[string]repositorymodels.Database)
|
|
||||||
}
|
|
||||||
|
|
||||||
if storeState.Collections == nil {
|
|
||||||
storeState.Collections = make(map[string]map[string]repositorymodels.Collection)
|
|
||||||
}
|
|
||||||
|
|
||||||
if storeState.Documents == nil {
|
|
||||||
storeState.Documents = make(map[string]map[string]map[string]repositorymodels.Document)
|
|
||||||
}
|
|
||||||
|
|
||||||
for database := range storeState.Databases {
|
|
||||||
if storeState.Collections[database] == nil {
|
|
||||||
storeState.Collections[database] = make(map[string]repositorymodels.Collection)
|
|
||||||
}
|
|
||||||
|
|
||||||
if storeState.Documents[database] == nil {
|
|
||||||
storeState.Documents[database] = make(map[string]map[string]repositorymodels.Document)
|
|
||||||
}
|
|
||||||
|
|
||||||
for collection := range storeState.Collections[database] {
|
|
||||||
if storeState.Documents[database][collection] == nil {
|
|
||||||
storeState.Documents[database][collection] = make(map[string]repositorymodels.Document)
|
|
||||||
}
|
|
||||||
|
|
||||||
for document := range storeState.Documents[database][collection] {
|
|
||||||
if storeState.Documents[database][collection][document] == nil {
|
|
||||||
delete(storeState.Documents[database][collection], document)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,7 +0,0 @@
|
|||||||
package repositories
|
|
||||||
|
|
||||||
import repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
|
||||||
|
|
||||||
func GetAllStoredProcedures(databaseId string, collectionId string) ([]repositorymodels.StoredProcedure, repositorymodels.RepositoryStatus) {
|
|
||||||
return storedProcedures, repositorymodels.StatusOk
|
|
||||||
}
|
|
@ -1,7 +0,0 @@
|
|||||||
package repositories
|
|
||||||
|
|
||||||
import repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
|
||||||
|
|
||||||
func GetAllTriggers(databaseId string, collectionId string) ([]repositorymodels.Trigger, repositorymodels.RepositoryStatus) {
|
|
||||||
return triggers, repositorymodels.StatusOk
|
|
||||||
}
|
|
@ -1,7 +0,0 @@
|
|||||||
package repositories
|
|
||||||
|
|
||||||
import repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
|
||||||
|
|
||||||
func GetAllUserDefinedFunctions(databaseId string, collectionId string) ([]repositorymodels.UserDefinedFunction, repositorymodels.RepositoryStatus) {
|
|
||||||
return userDefinedFunctions, repositorymodels.StatusOk
|
|
||||||
}
|
|
@ -2,26 +2,77 @@ package resourceid
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"encoding/base64"
|
"encoding/base64"
|
||||||
|
"math/rand"
|
||||||
|
"strings"
|
||||||
|
|
||||||
"github.com/google/uuid"
|
"github.com/google/uuid"
|
||||||
)
|
)
|
||||||
|
|
||||||
func New() string {
|
type ResourceType int
|
||||||
id := uuid.New().ID()
|
|
||||||
idBytes := uintToBytes(id)
|
|
||||||
|
|
||||||
return base64.StdEncoding.EncodeToString(idBytes)
|
const (
|
||||||
|
ResourceTypeDatabase ResourceType = iota
|
||||||
|
ResourceTypeCollection
|
||||||
|
ResourceTypeDocument
|
||||||
|
ResourceTypeStoredProcedure
|
||||||
|
ResourceTypeTrigger
|
||||||
|
ResourceTypeUserDefinedFunction
|
||||||
|
ResourceTypeConflict
|
||||||
|
ResourceTypePartitionKeyRange
|
||||||
|
ResourceTypeSchema
|
||||||
|
)
|
||||||
|
|
||||||
|
func New(resourceType ResourceType) string {
|
||||||
|
var idBytes []byte
|
||||||
|
switch resourceType {
|
||||||
|
case ResourceTypeDatabase:
|
||||||
|
idBytes = randomBytes(4)
|
||||||
|
case ResourceTypeCollection:
|
||||||
|
idBytes = randomBytes(4)
|
||||||
|
// first byte should be bigger than 0x80 for collection ids
|
||||||
|
// clients classify this id as "user" otherwise
|
||||||
|
if (idBytes[0] & 0x80) <= 0 {
|
||||||
|
idBytes[0] = byte(rand.Intn(0x80) + 0x80)
|
||||||
|
}
|
||||||
|
case ResourceTypeDocument:
|
||||||
|
idBytes = randomBytes(8)
|
||||||
|
idBytes[7] = byte(rand.Intn(0x10)) // Upper 4 bits = 0
|
||||||
|
case ResourceTypeStoredProcedure:
|
||||||
|
idBytes = randomBytes(8)
|
||||||
|
idBytes[7] = byte(rand.Intn(0x10)) | 0x08 // Upper 4 bits = 0x08
|
||||||
|
case ResourceTypeTrigger:
|
||||||
|
idBytes = randomBytes(8)
|
||||||
|
idBytes[7] = byte(rand.Intn(0x10)) | 0x07 // Upper 4 bits = 0x07
|
||||||
|
case ResourceTypeUserDefinedFunction:
|
||||||
|
idBytes = randomBytes(8)
|
||||||
|
idBytes[7] = byte(rand.Intn(0x10)) | 0x06 // Upper 4 bits = 0x06
|
||||||
|
case ResourceTypeConflict:
|
||||||
|
idBytes = randomBytes(8)
|
||||||
|
idBytes[7] = byte(rand.Intn(0x10)) | 0x04 // Upper 4 bits = 0x04
|
||||||
|
case ResourceTypePartitionKeyRange:
|
||||||
|
// we don't do partitions yet, so just use a fixed id
|
||||||
|
idBytes = []byte{0x69, 0x69, 0x69, 0x69, 0x69, 0x69, 0x69, 0x50}
|
||||||
|
case ResourceTypeSchema:
|
||||||
|
idBytes = randomBytes(8)
|
||||||
|
idBytes[7] = byte(rand.Intn(0x10)) | 0x09 // Upper 4 bits = 0x09
|
||||||
|
default:
|
||||||
|
idBytes = randomBytes(4)
|
||||||
|
}
|
||||||
|
|
||||||
|
encoded := base64.StdEncoding.EncodeToString(idBytes)
|
||||||
|
return strings.ReplaceAll(encoded, "/", "-")
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewCombined(ids ...string) string {
|
func NewCombined(ids ...string) string {
|
||||||
combinedIdBytes := make([]byte, 0)
|
combinedIdBytes := make([]byte, 0)
|
||||||
|
|
||||||
for _, id := range ids {
|
for _, id := range ids {
|
||||||
idBytes, _ := base64.StdEncoding.DecodeString(id)
|
idBytes, _ := base64.StdEncoding.DecodeString(strings.ReplaceAll(id, "-", "/"))
|
||||||
combinedIdBytes = append(combinedIdBytes, idBytes...)
|
combinedIdBytes = append(combinedIdBytes, idBytes...)
|
||||||
}
|
}
|
||||||
|
|
||||||
return base64.StdEncoding.EncodeToString(combinedIdBytes)
|
encoded := base64.StdEncoding.EncodeToString(combinedIdBytes)
|
||||||
|
return strings.ReplaceAll(encoded, "/", "-")
|
||||||
}
|
}
|
||||||
|
|
||||||
func uintToBytes(id uint32) []byte {
|
func uintToBytes(id uint32) []byte {
|
||||||
@ -32,3 +83,13 @@ func uintToBytes(id uint32) []byte {
|
|||||||
|
|
||||||
return buf
|
return buf
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func randomBytes(count int) []byte {
|
||||||
|
buf := make([]byte, count)
|
||||||
|
for i := 0; i < count; i += 4 {
|
||||||
|
id := uuid.New().ID()
|
||||||
|
idBytes := uintToBytes(id)
|
||||||
|
copy(buf[i:], idBytes)
|
||||||
|
}
|
||||||
|
return buf
|
||||||
|
}
|
||||||
|
@ -1,21 +1,19 @@
|
|||||||
package structhidrators
|
package structhidrators
|
||||||
|
|
||||||
import (
|
import "github.com/pikami/cosmium/internal/datastore"
|
||||||
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
|
||||||
)
|
|
||||||
|
|
||||||
var defaultCollection repositorymodels.Collection = repositorymodels.Collection{
|
var defaultCollection datastore.Collection = datastore.Collection{
|
||||||
IndexingPolicy: repositorymodels.CollectionIndexingPolicy{
|
IndexingPolicy: datastore.CollectionIndexingPolicy{
|
||||||
IndexingMode: "consistent",
|
IndexingMode: "consistent",
|
||||||
Automatic: true,
|
Automatic: true,
|
||||||
IncludedPaths: []repositorymodels.CollectionIndexingPolicyPath{
|
IncludedPaths: []datastore.CollectionIndexingPolicyPath{
|
||||||
{Path: "/*"},
|
{Path: "/*"},
|
||||||
},
|
},
|
||||||
ExcludedPaths: []repositorymodels.CollectionIndexingPolicyPath{
|
ExcludedPaths: []datastore.CollectionIndexingPolicyPath{
|
||||||
{Path: "/\"_etag\"/?"},
|
{Path: "/\"_etag\"/?"},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
PartitionKey: repositorymodels.CollectionPartitionKey{
|
PartitionKey: datastore.CollectionPartitionKey{
|
||||||
Paths: []string{"/_partitionKey"},
|
Paths: []string{"/_partitionKey"},
|
||||||
Kind: "Hash",
|
Kind: "Hash",
|
||||||
Version: 2,
|
Version: 2,
|
||||||
|
@ -3,11 +3,11 @@ package structhidrators
|
|||||||
import (
|
import (
|
||||||
"reflect"
|
"reflect"
|
||||||
|
|
||||||
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
"github.com/pikami/cosmium/internal/datastore"
|
||||||
)
|
)
|
||||||
|
|
||||||
func Hidrate(input interface{}) interface{} {
|
func Hidrate(input interface{}) interface{} {
|
||||||
if reflect.TypeOf(input) == reflect.TypeOf(repositorymodels.Collection{}) {
|
if reflect.TypeOf(input) == reflect.TypeOf(datastore.Collection{}) {
|
||||||
return hidrate(input, defaultCollection)
|
return hidrate(input, defaultCollection)
|
||||||
}
|
}
|
||||||
return input
|
return input
|
||||||
|
@ -9,7 +9,7 @@ import (
|
|||||||
func GetDefaultTlsConfig() *tls.Config {
|
func GetDefaultTlsConfig() *tls.Config {
|
||||||
cert, err := tls.X509KeyPair([]byte(certificate), []byte(certificateKey))
|
cert, err := tls.X509KeyPair([]byte(certificate), []byte(certificateKey))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Error("Failed to parse certificate and key:", err)
|
logger.ErrorLn("Failed to parse certificate and key:", err)
|
||||||
return &tls.Config{}
|
return &tls.Config{}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
33
main.go
33
main.go
@ -1,33 +0,0 @@
|
|||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"os"
|
|
||||||
"os/signal"
|
|
||||||
"syscall"
|
|
||||||
|
|
||||||
"github.com/pikami/cosmium/api"
|
|
||||||
"github.com/pikami/cosmium/api/config"
|
|
||||||
"github.com/pikami/cosmium/internal/repositories"
|
|
||||||
)
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
config.ParseFlags()
|
|
||||||
|
|
||||||
repositories.InitializeRepository()
|
|
||||||
|
|
||||||
go api.StartAPI()
|
|
||||||
|
|
||||||
waitForExit()
|
|
||||||
}
|
|
||||||
|
|
||||||
func waitForExit() {
|
|
||||||
sigs := make(chan os.Signal, 1)
|
|
||||||
signal.Notify(sigs, syscall.SIGINT, syscall.SIGTERM)
|
|
||||||
|
|
||||||
// Block until a exit signal is received
|
|
||||||
<-sigs
|
|
||||||
|
|
||||||
if config.Config.PersistDataFilePath != "" {
|
|
||||||
repositories.SaveStateFS(config.Config.PersistDataFilePath)
|
|
||||||
}
|
|
||||||
}
|
|
@ -3,7 +3,9 @@ package parsers
|
|||||||
type SelectStmt struct {
|
type SelectStmt struct {
|
||||||
SelectItems []SelectItem
|
SelectItems []SelectItem
|
||||||
Table Table
|
Table Table
|
||||||
|
JoinItems []JoinItem
|
||||||
Filters interface{}
|
Filters interface{}
|
||||||
|
Exists bool
|
||||||
Distinct bool
|
Distinct bool
|
||||||
Count int
|
Count int
|
||||||
Offset int
|
Offset int
|
||||||
@ -13,7 +15,14 @@ type SelectStmt struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type Table struct {
|
type Table struct {
|
||||||
Value string
|
Value string
|
||||||
|
SelectItem SelectItem
|
||||||
|
IsInSelect bool
|
||||||
|
}
|
||||||
|
|
||||||
|
type JoinItem struct {
|
||||||
|
Table Table
|
||||||
|
SelectItem SelectItem
|
||||||
}
|
}
|
||||||
|
|
||||||
type SelectItemType int
|
type SelectItemType int
|
||||||
@ -24,6 +33,9 @@ const (
|
|||||||
SelectItemTypeArray
|
SelectItemTypeArray
|
||||||
SelectItemTypeConstant
|
SelectItemTypeConstant
|
||||||
SelectItemTypeFunctionCall
|
SelectItemTypeFunctionCall
|
||||||
|
SelectItemTypeSubQuery
|
||||||
|
SelectItemTypeExpression
|
||||||
|
SelectItemTypeBinaryExpression
|
||||||
)
|
)
|
||||||
|
|
||||||
type SelectItem struct {
|
type SelectItem struct {
|
||||||
@ -32,6 +44,7 @@ type SelectItem struct {
|
|||||||
SelectItems []SelectItem
|
SelectItems []SelectItem
|
||||||
Type SelectItemType
|
Type SelectItemType
|
||||||
Value interface{}
|
Value interface{}
|
||||||
|
Invert bool
|
||||||
IsTopLevel bool
|
IsTopLevel bool
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -53,6 +66,12 @@ type ComparisonExpression struct {
|
|||||||
Operation string
|
Operation string
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type BinaryExpression struct {
|
||||||
|
Left interface{}
|
||||||
|
Right interface{}
|
||||||
|
Operation string
|
||||||
|
}
|
||||||
|
|
||||||
type ConstantType int
|
type ConstantType int
|
||||||
|
|
||||||
const (
|
const (
|
||||||
@ -114,11 +133,53 @@ const (
|
|||||||
FunctionCallIsPrimitive FunctionCallType = "IsPrimitive"
|
FunctionCallIsPrimitive FunctionCallType = "IsPrimitive"
|
||||||
FunctionCallIsString FunctionCallType = "IsString"
|
FunctionCallIsString FunctionCallType = "IsString"
|
||||||
|
|
||||||
FunctionCallArrayConcat FunctionCallType = "ArrayConcat"
|
FunctionCallArrayConcat FunctionCallType = "ArrayConcat"
|
||||||
FunctionCallArrayLength FunctionCallType = "ArrayLength"
|
FunctionCallArrayContains FunctionCallType = "ArrayContains"
|
||||||
FunctionCallArraySlice FunctionCallType = "ArraySlice"
|
FunctionCallArrayContainsAny FunctionCallType = "ArrayContainsAny"
|
||||||
FunctionCallSetIntersect FunctionCallType = "SetIntersect"
|
FunctionCallArrayContainsAll FunctionCallType = "ArrayContainsAll"
|
||||||
FunctionCallSetUnion FunctionCallType = "SetUnion"
|
FunctionCallArrayLength FunctionCallType = "ArrayLength"
|
||||||
|
FunctionCallArraySlice FunctionCallType = "ArraySlice"
|
||||||
|
FunctionCallSetIntersect FunctionCallType = "SetIntersect"
|
||||||
|
FunctionCallSetUnion FunctionCallType = "SetUnion"
|
||||||
|
|
||||||
|
FunctionCallIif FunctionCallType = "Iif"
|
||||||
|
|
||||||
|
FunctionCallMathAbs FunctionCallType = "MathAbs"
|
||||||
|
FunctionCallMathAcos FunctionCallType = "MathAcos"
|
||||||
|
FunctionCallMathAsin FunctionCallType = "MathAsin"
|
||||||
|
FunctionCallMathAtan FunctionCallType = "MathAtan"
|
||||||
|
FunctionCallMathAtn2 FunctionCallType = "MathAtn2"
|
||||||
|
FunctionCallMathCeiling FunctionCallType = "MathCeiling"
|
||||||
|
FunctionCallMathCos FunctionCallType = "MathCos"
|
||||||
|
FunctionCallMathCot FunctionCallType = "MathCot"
|
||||||
|
FunctionCallMathDegrees FunctionCallType = "MathDegrees"
|
||||||
|
FunctionCallMathExp FunctionCallType = "MathExp"
|
||||||
|
FunctionCallMathFloor FunctionCallType = "MathFloor"
|
||||||
|
FunctionCallMathIntAdd FunctionCallType = "MathIntAdd"
|
||||||
|
FunctionCallMathIntBitAnd FunctionCallType = "MathIntBitAnd"
|
||||||
|
FunctionCallMathIntBitLeftShift FunctionCallType = "MathIntBitLeftShift"
|
||||||
|
FunctionCallMathIntBitNot FunctionCallType = "MathIntBitNot"
|
||||||
|
FunctionCallMathIntBitOr FunctionCallType = "MathIntBitOr"
|
||||||
|
FunctionCallMathIntBitRightShift FunctionCallType = "MathIntBitRightShift"
|
||||||
|
FunctionCallMathIntBitXor FunctionCallType = "MathIntBitXor"
|
||||||
|
FunctionCallMathIntDiv FunctionCallType = "MathIntDiv"
|
||||||
|
FunctionCallMathIntMod FunctionCallType = "MathIntMod"
|
||||||
|
FunctionCallMathIntMul FunctionCallType = "MathIntMul"
|
||||||
|
FunctionCallMathIntSub FunctionCallType = "MathIntSub"
|
||||||
|
FunctionCallMathLog FunctionCallType = "MathLog"
|
||||||
|
FunctionCallMathLog10 FunctionCallType = "MathLog10"
|
||||||
|
FunctionCallMathNumberBin FunctionCallType = "MathNumberBin"
|
||||||
|
FunctionCallMathPi FunctionCallType = "MathPi"
|
||||||
|
FunctionCallMathPower FunctionCallType = "MathPower"
|
||||||
|
FunctionCallMathRadians FunctionCallType = "MathRadians"
|
||||||
|
FunctionCallMathRand FunctionCallType = "MathRand"
|
||||||
|
FunctionCallMathRound FunctionCallType = "MathRound"
|
||||||
|
FunctionCallMathSign FunctionCallType = "MathSign"
|
||||||
|
FunctionCallMathSin FunctionCallType = "MathSin"
|
||||||
|
FunctionCallMathSqrt FunctionCallType = "MathSqrt"
|
||||||
|
FunctionCallMathSquare FunctionCallType = "MathSquare"
|
||||||
|
FunctionCallMathTan FunctionCallType = "MathTan"
|
||||||
|
FunctionCallMathTrunc FunctionCallType = "MathTrunc"
|
||||||
|
|
||||||
FunctionCallAggregateAvg FunctionCallType = "AggregateAvg"
|
FunctionCallAggregateAvg FunctionCallType = "AggregateAvg"
|
||||||
FunctionCallAggregateCount FunctionCallType = "AggregateCount"
|
FunctionCallAggregateCount FunctionCallType = "AggregateCount"
|
||||||
|
@ -4,6 +4,7 @@ import (
|
|||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/pikami/cosmium/parsers"
|
"github.com/pikami/cosmium/parsers"
|
||||||
|
testutils "github.com/pikami/cosmium/test_utils"
|
||||||
)
|
)
|
||||||
|
|
||||||
func Test_Parse_AggregateFunctions(t *testing.T) {
|
func Test_Parse_AggregateFunctions(t *testing.T) {
|
||||||
@ -27,7 +28,7 @@ func Test_Parse_AggregateFunctions(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
@ -51,7 +52,7 @@ func Test_Parse_AggregateFunctions(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
@ -75,7 +76,7 @@ func Test_Parse_AggregateFunctions(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
@ -99,7 +100,7 @@ func Test_Parse_AggregateFunctions(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
@ -123,7 +124,7 @@ func Test_Parse_AggregateFunctions(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
366
parsers/nosql/arithmetics_test.go
Normal file
366
parsers/nosql/arithmetics_test.go
Normal file
@ -0,0 +1,366 @@
|
|||||||
|
package nosql_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/pikami/cosmium/parsers"
|
||||||
|
testutils "github.com/pikami/cosmium/test_utils"
|
||||||
|
)
|
||||||
|
|
||||||
|
func Test_Parse_Arithmetics(t *testing.T) {
|
||||||
|
t.Run("Should parse multiplication before addition", func(t *testing.T) {
|
||||||
|
testQueryParse(
|
||||||
|
t,
|
||||||
|
`SELECT c.a + c.b * c.c FROM c`,
|
||||||
|
parsers.SelectStmt{
|
||||||
|
SelectItems: []parsers.SelectItem{
|
||||||
|
{
|
||||||
|
Type: parsers.SelectItemTypeBinaryExpression,
|
||||||
|
Value: parsers.BinaryExpression{
|
||||||
|
Operation: "+",
|
||||||
|
Left: testutils.SelectItem_Path("c", "a"),
|
||||||
|
Right: parsers.SelectItem{
|
||||||
|
Type: parsers.SelectItemTypeBinaryExpression,
|
||||||
|
Value: parsers.BinaryExpression{
|
||||||
|
Operation: "*",
|
||||||
|
Left: testutils.SelectItem_Path("c", "b"),
|
||||||
|
Right: testutils.SelectItem_Path("c", "c"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should parse division before subtraction", func(t *testing.T) {
|
||||||
|
testQueryParse(
|
||||||
|
t,
|
||||||
|
`SELECT c.x - c.y / c.z FROM c`,
|
||||||
|
parsers.SelectStmt{
|
||||||
|
SelectItems: []parsers.SelectItem{
|
||||||
|
{
|
||||||
|
Type: parsers.SelectItemTypeBinaryExpression,
|
||||||
|
Value: parsers.BinaryExpression{
|
||||||
|
Operation: "-",
|
||||||
|
Left: testutils.SelectItem_Path("c", "x"),
|
||||||
|
Right: parsers.SelectItem{
|
||||||
|
Type: parsers.SelectItemTypeBinaryExpression,
|
||||||
|
Value: parsers.BinaryExpression{
|
||||||
|
Operation: "/",
|
||||||
|
Left: testutils.SelectItem_Path("c", "y"),
|
||||||
|
Right: testutils.SelectItem_Path("c", "z"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should handle complex mixed operations", func(t *testing.T) {
|
||||||
|
testQueryParse(
|
||||||
|
t,
|
||||||
|
`SELECT c.a + c.b * c.c - c.d / c.e FROM c`,
|
||||||
|
parsers.SelectStmt{
|
||||||
|
SelectItems: []parsers.SelectItem{
|
||||||
|
{
|
||||||
|
Type: parsers.SelectItemTypeBinaryExpression,
|
||||||
|
Value: parsers.BinaryExpression{
|
||||||
|
Operation: "-",
|
||||||
|
Left: parsers.SelectItem{
|
||||||
|
Type: parsers.SelectItemTypeBinaryExpression,
|
||||||
|
Value: parsers.BinaryExpression{
|
||||||
|
Operation: "+",
|
||||||
|
Left: testutils.SelectItem_Path("c", "a"),
|
||||||
|
Right: parsers.SelectItem{
|
||||||
|
Type: parsers.SelectItemTypeBinaryExpression,
|
||||||
|
Value: parsers.BinaryExpression{
|
||||||
|
Operation: "*",
|
||||||
|
Left: testutils.SelectItem_Path("c", "b"),
|
||||||
|
Right: testutils.SelectItem_Path("c", "c"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Right: parsers.SelectItem{
|
||||||
|
Type: parsers.SelectItemTypeBinaryExpression,
|
||||||
|
Value: parsers.BinaryExpression{
|
||||||
|
Operation: "/",
|
||||||
|
Left: testutils.SelectItem_Path("c", "d"),
|
||||||
|
Right: testutils.SelectItem_Path("c", "e"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should respect parentheses overriding precedence", func(t *testing.T) {
|
||||||
|
testQueryParse(
|
||||||
|
t,
|
||||||
|
`SELECT (c.a + c.b) * c.c FROM c`,
|
||||||
|
parsers.SelectStmt{
|
||||||
|
SelectItems: []parsers.SelectItem{
|
||||||
|
{
|
||||||
|
Type: parsers.SelectItemTypeBinaryExpression,
|
||||||
|
Value: parsers.BinaryExpression{
|
||||||
|
Operation: "*",
|
||||||
|
Left: parsers.SelectItem{
|
||||||
|
Type: parsers.SelectItemTypeBinaryExpression,
|
||||||
|
Value: parsers.BinaryExpression{
|
||||||
|
Operation: "+",
|
||||||
|
Left: testutils.SelectItem_Path("c", "a"),
|
||||||
|
Right: testutils.SelectItem_Path("c", "b"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Right: testutils.SelectItem_Path("c", "c"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should handle nested parentheses", func(t *testing.T) {
|
||||||
|
testQueryParse(
|
||||||
|
t,
|
||||||
|
`SELECT ((c.a + c.b) * c.c) - c.d FROM c`,
|
||||||
|
parsers.SelectStmt{
|
||||||
|
SelectItems: []parsers.SelectItem{
|
||||||
|
{
|
||||||
|
Type: parsers.SelectItemTypeBinaryExpression,
|
||||||
|
Value: parsers.BinaryExpression{
|
||||||
|
Operation: "-",
|
||||||
|
Left: parsers.SelectItem{
|
||||||
|
Type: parsers.SelectItemTypeBinaryExpression,
|
||||||
|
Value: parsers.BinaryExpression{
|
||||||
|
Operation: "*",
|
||||||
|
Left: parsers.SelectItem{
|
||||||
|
Type: parsers.SelectItemTypeBinaryExpression,
|
||||||
|
Value: parsers.BinaryExpression{
|
||||||
|
Operation: "+",
|
||||||
|
Left: testutils.SelectItem_Path("c", "a"),
|
||||||
|
Right: testutils.SelectItem_Path("c", "b"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Right: testutils.SelectItem_Path("c", "c"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Right: testutils.SelectItem_Path("c", "d"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should be left associative for same precedence operators", func(t *testing.T) {
|
||||||
|
testQueryParse(
|
||||||
|
t,
|
||||||
|
`SELECT c.a - c.b - c.c FROM c`,
|
||||||
|
parsers.SelectStmt{
|
||||||
|
SelectItems: []parsers.SelectItem{
|
||||||
|
{
|
||||||
|
Type: parsers.SelectItemTypeBinaryExpression,
|
||||||
|
Value: parsers.BinaryExpression{
|
||||||
|
Operation: "-",
|
||||||
|
Left: parsers.SelectItem{
|
||||||
|
Type: parsers.SelectItemTypeBinaryExpression,
|
||||||
|
Value: parsers.BinaryExpression{
|
||||||
|
Operation: "-",
|
||||||
|
Left: testutils.SelectItem_Path("c", "a"),
|
||||||
|
Right: testutils.SelectItem_Path("c", "b"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Right: testutils.SelectItem_Path("c", "c"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should be left associative with multiplication and division", func(t *testing.T) {
|
||||||
|
testQueryParse(
|
||||||
|
t,
|
||||||
|
`SELECT c.a * c.b / c.c FROM c`,
|
||||||
|
parsers.SelectStmt{
|
||||||
|
SelectItems: []parsers.SelectItem{
|
||||||
|
{
|
||||||
|
Type: parsers.SelectItemTypeBinaryExpression,
|
||||||
|
Value: parsers.BinaryExpression{
|
||||||
|
Operation: "/",
|
||||||
|
Left: parsers.SelectItem{
|
||||||
|
Type: parsers.SelectItemTypeBinaryExpression,
|
||||||
|
Value: parsers.BinaryExpression{
|
||||||
|
Operation: "*",
|
||||||
|
Left: testutils.SelectItem_Path("c", "a"),
|
||||||
|
Right: testutils.SelectItem_Path("c", "b"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Right: testutils.SelectItem_Path("c", "c"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should handle math with constants", func(t *testing.T) {
|
||||||
|
testQueryParse(
|
||||||
|
t,
|
||||||
|
`SELECT 10 + 20 * 5 FROM c`,
|
||||||
|
parsers.SelectStmt{
|
||||||
|
SelectItems: []parsers.SelectItem{
|
||||||
|
{
|
||||||
|
Type: parsers.SelectItemTypeBinaryExpression,
|
||||||
|
Value: parsers.BinaryExpression{
|
||||||
|
Operation: "+",
|
||||||
|
Left: testutils.SelectItem_Constant_Int(10),
|
||||||
|
Right: parsers.SelectItem{
|
||||||
|
Type: parsers.SelectItemTypeBinaryExpression,
|
||||||
|
Value: parsers.BinaryExpression{
|
||||||
|
Operation: "*",
|
||||||
|
Left: testutils.SelectItem_Constant_Int(20),
|
||||||
|
Right: testutils.SelectItem_Constant_Int(5),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should handle math with floating point numbers", func(t *testing.T) {
|
||||||
|
testQueryParse(
|
||||||
|
t,
|
||||||
|
`SELECT c.price * 1.08 FROM c`,
|
||||||
|
parsers.SelectStmt{
|
||||||
|
SelectItems: []parsers.SelectItem{
|
||||||
|
{
|
||||||
|
Type: parsers.SelectItemTypeBinaryExpression,
|
||||||
|
Value: parsers.BinaryExpression{
|
||||||
|
Operation: "*",
|
||||||
|
Left: testutils.SelectItem_Path("c", "price"),
|
||||||
|
Right: testutils.SelectItem_Constant_Float(1.08),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should handle parentheses around single value", func(t *testing.T) {
|
||||||
|
testQueryParse(
|
||||||
|
t,
|
||||||
|
`SELECT (c.value) FROM c`,
|
||||||
|
parsers.SelectStmt{
|
||||||
|
SelectItems: []parsers.SelectItem{
|
||||||
|
testutils.SelectItem_Path("c", "value"),
|
||||||
|
},
|
||||||
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should handle function calls in math expressions", func(t *testing.T) {
|
||||||
|
testQueryParse(
|
||||||
|
t,
|
||||||
|
`SELECT LENGTH(c.name) * 2 + 10 FROM c`,
|
||||||
|
parsers.SelectStmt{
|
||||||
|
SelectItems: []parsers.SelectItem{
|
||||||
|
{
|
||||||
|
Type: parsers.SelectItemTypeBinaryExpression,
|
||||||
|
Value: parsers.BinaryExpression{
|
||||||
|
Operation: "+",
|
||||||
|
Left: parsers.SelectItem{
|
||||||
|
Type: parsers.SelectItemTypeBinaryExpression,
|
||||||
|
Value: parsers.BinaryExpression{
|
||||||
|
Operation: "*",
|
||||||
|
Left: parsers.SelectItem{
|
||||||
|
Type: parsers.SelectItemTypeFunctionCall,
|
||||||
|
Value: parsers.FunctionCall{
|
||||||
|
Type: parsers.FunctionCallLength,
|
||||||
|
Arguments: []interface{}{testutils.SelectItem_Path("c", "name")},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Right: testutils.SelectItem_Constant_Int(2),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Right: testutils.SelectItem_Constant_Int(10),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should handle multiple select items with math", func(t *testing.T) {
|
||||||
|
testQueryParse(
|
||||||
|
t,
|
||||||
|
`SELECT c.a + c.b, c.x * c.y FROM c`,
|
||||||
|
parsers.SelectStmt{
|
||||||
|
SelectItems: []parsers.SelectItem{
|
||||||
|
{
|
||||||
|
Type: parsers.SelectItemTypeBinaryExpression,
|
||||||
|
Value: parsers.BinaryExpression{
|
||||||
|
Operation: "+",
|
||||||
|
Left: testutils.SelectItem_Path("c", "a"),
|
||||||
|
Right: testutils.SelectItem_Path("c", "b"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Type: parsers.SelectItemTypeBinaryExpression,
|
||||||
|
Value: parsers.BinaryExpression{
|
||||||
|
Operation: "*",
|
||||||
|
Left: testutils.SelectItem_Path("c", "x"),
|
||||||
|
Right: testutils.SelectItem_Path("c", "y"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should handle math in WHERE clause", func(t *testing.T) {
|
||||||
|
testQueryParse(
|
||||||
|
t,
|
||||||
|
`SELECT c.id FROM c WHERE c.price * 1.08 > 100`,
|
||||||
|
parsers.SelectStmt{
|
||||||
|
SelectItems: []parsers.SelectItem{
|
||||||
|
testutils.SelectItem_Path("c", "id"),
|
||||||
|
},
|
||||||
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
|
Filters: parsers.ComparisonExpression{
|
||||||
|
Operation: ">",
|
||||||
|
Left: parsers.SelectItem{
|
||||||
|
Type: parsers.SelectItemTypeBinaryExpression,
|
||||||
|
Value: parsers.BinaryExpression{
|
||||||
|
Operation: "*",
|
||||||
|
Left: testutils.SelectItem_Path("c", "price"),
|
||||||
|
Right: testutils.SelectItem_Constant_Float(1.08),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Right: testutils.SelectItem_Constant_Int(100),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
}
|
@ -4,6 +4,7 @@ import (
|
|||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/pikami/cosmium/parsers"
|
"github.com/pikami/cosmium/parsers"
|
||||||
|
testutils "github.com/pikami/cosmium/test_utils"
|
||||||
)
|
)
|
||||||
|
|
||||||
func Test_Parse_ArrayFunctions(t *testing.T) {
|
func Test_Parse_ArrayFunctions(t *testing.T) {
|
||||||
@ -31,7 +32,120 @@ func Test_Parse_ArrayFunctions(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should parse function ARRAY_CONTAINS()", func(t *testing.T) {
|
||||||
|
testQueryParse(
|
||||||
|
t,
|
||||||
|
`SELECT ARRAY_CONTAINS(c.a1, "value") FROM c`,
|
||||||
|
parsers.SelectStmt{
|
||||||
|
SelectItems: []parsers.SelectItem{
|
||||||
|
{
|
||||||
|
Type: parsers.SelectItemTypeFunctionCall,
|
||||||
|
Value: parsers.FunctionCall{
|
||||||
|
Type: parsers.FunctionCallArrayContains,
|
||||||
|
Arguments: []interface{}{
|
||||||
|
parsers.SelectItem{
|
||||||
|
Path: []string{"c", "a1"},
|
||||||
|
Type: parsers.SelectItemTypeField,
|
||||||
|
},
|
||||||
|
testutils.SelectItem_Constant_String("value"),
|
||||||
|
nil,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should parse function ARRAY_CONTAINS() with partial match", func(t *testing.T) {
|
||||||
|
testQueryParse(
|
||||||
|
t,
|
||||||
|
`SELECT ARRAY_CONTAINS(["a", "b"], "value", true) FROM c`,
|
||||||
|
parsers.SelectStmt{
|
||||||
|
SelectItems: []parsers.SelectItem{
|
||||||
|
{
|
||||||
|
Type: parsers.SelectItemTypeFunctionCall,
|
||||||
|
Value: parsers.FunctionCall{
|
||||||
|
Type: parsers.FunctionCallArrayContains,
|
||||||
|
Arguments: []interface{}{
|
||||||
|
parsers.SelectItem{
|
||||||
|
Type: parsers.SelectItemTypeArray,
|
||||||
|
SelectItems: []parsers.SelectItem{
|
||||||
|
testutils.SelectItem_Constant_String("a"),
|
||||||
|
testutils.SelectItem_Constant_String("b"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
testutils.SelectItem_Constant_String("value"),
|
||||||
|
testutils.SelectItem_Constant_Bool(true),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should parse function ARRAY_CONTAINS_ANY()", func(t *testing.T) {
|
||||||
|
testQueryParse(
|
||||||
|
t,
|
||||||
|
`SELECT ARRAY_CONTAINS_ANY(["a", "b"], "value", true) FROM c`,
|
||||||
|
parsers.SelectStmt{
|
||||||
|
SelectItems: []parsers.SelectItem{
|
||||||
|
{
|
||||||
|
Type: parsers.SelectItemTypeFunctionCall,
|
||||||
|
Value: parsers.FunctionCall{
|
||||||
|
Type: parsers.FunctionCallArrayContainsAny,
|
||||||
|
Arguments: []interface{}{
|
||||||
|
parsers.SelectItem{
|
||||||
|
Type: parsers.SelectItemTypeArray,
|
||||||
|
SelectItems: []parsers.SelectItem{
|
||||||
|
testutils.SelectItem_Constant_String("a"),
|
||||||
|
testutils.SelectItem_Constant_String("b"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
testutils.SelectItem_Constant_String("value"),
|
||||||
|
testutils.SelectItem_Constant_Bool(true),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should parse function ARRAY_CONTAINS_ALL()", func(t *testing.T) {
|
||||||
|
testQueryParse(
|
||||||
|
t,
|
||||||
|
`SELECT ARRAY_CONTAINS_ALL(["a", "b"], "value", true) FROM c`,
|
||||||
|
parsers.SelectStmt{
|
||||||
|
SelectItems: []parsers.SelectItem{
|
||||||
|
{
|
||||||
|
Type: parsers.SelectItemTypeFunctionCall,
|
||||||
|
Value: parsers.FunctionCall{
|
||||||
|
Type: parsers.FunctionCallArrayContainsAll,
|
||||||
|
Arguments: []interface{}{
|
||||||
|
parsers.SelectItem{
|
||||||
|
Type: parsers.SelectItemTypeArray,
|
||||||
|
SelectItems: []parsers.SelectItem{
|
||||||
|
testutils.SelectItem_Constant_String("a"),
|
||||||
|
testutils.SelectItem_Constant_String("b"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
testutils.SelectItem_Constant_String("value"),
|
||||||
|
testutils.SelectItem_Constant_Bool(true),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
@ -55,7 +169,7 @@ func Test_Parse_ArrayFunctions(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
@ -75,25 +189,13 @@ func Test_Parse_ArrayFunctions(t *testing.T) {
|
|||||||
Path: []string{"c", "array"},
|
Path: []string{"c", "array"},
|
||||||
Type: parsers.SelectItemTypeField,
|
Type: parsers.SelectItemTypeField,
|
||||||
},
|
},
|
||||||
parsers.SelectItem{
|
testutils.SelectItem_Constant_Int(0),
|
||||||
Type: parsers.SelectItemTypeConstant,
|
testutils.SelectItem_Constant_Int(2),
|
||||||
Value: parsers.Constant{
|
|
||||||
Type: parsers.ConstantTypeInteger,
|
|
||||||
Value: 0,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
parsers.SelectItem{
|
|
||||||
Type: parsers.SelectItemTypeConstant,
|
|
||||||
Value: parsers.Constant{
|
|
||||||
Type: parsers.ConstantTypeInteger,
|
|
||||||
Value: 2,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
@ -121,7 +223,7 @@ func Test_Parse_ArrayFunctions(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
@ -149,7 +251,7 @@ func Test_Parse_ArrayFunctions(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
58
parsers/nosql/join_test.go
Normal file
58
parsers/nosql/join_test.go
Normal file
@ -0,0 +1,58 @@
|
|||||||
|
package nosql_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/pikami/cosmium/parsers"
|
||||||
|
testutils "github.com/pikami/cosmium/test_utils"
|
||||||
|
)
|
||||||
|
|
||||||
|
func Test_Parse_Join(t *testing.T) {
|
||||||
|
|
||||||
|
t.Run("Should parse simple JOIN", func(t *testing.T) {
|
||||||
|
testQueryParse(
|
||||||
|
t,
|
||||||
|
`SELECT c.id, c["pk"] FROM c JOIN cc IN c["tags"]`,
|
||||||
|
parsers.SelectStmt{
|
||||||
|
SelectItems: []parsers.SelectItem{
|
||||||
|
{Path: []string{"c", "id"}},
|
||||||
|
{Path: []string{"c", "pk"}},
|
||||||
|
},
|
||||||
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
|
JoinItems: []parsers.JoinItem{
|
||||||
|
{
|
||||||
|
Table: parsers.Table{
|
||||||
|
Value: "cc",
|
||||||
|
},
|
||||||
|
SelectItem: parsers.SelectItem{
|
||||||
|
Path: []string{"c", "tags"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should parse JOIN VALUE", func(t *testing.T) {
|
||||||
|
testQueryParse(
|
||||||
|
t,
|
||||||
|
`SELECT VALUE cc FROM c JOIN cc IN c["tags"]`,
|
||||||
|
parsers.SelectStmt{
|
||||||
|
SelectItems: []parsers.SelectItem{
|
||||||
|
{Path: []string{"cc"}, IsTopLevel: true},
|
||||||
|
},
|
||||||
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
|
JoinItems: []parsers.JoinItem{
|
||||||
|
{
|
||||||
|
Table: parsers.Table{
|
||||||
|
Value: "cc",
|
||||||
|
},
|
||||||
|
SelectItem: parsers.SelectItem{
|
||||||
|
Path: []string{"c", "tags"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
}
|
651
parsers/nosql/math_functions_test.go
Normal file
651
parsers/nosql/math_functions_test.go
Normal file
@ -0,0 +1,651 @@
|
|||||||
|
package nosql_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/pikami/cosmium/parsers"
|
||||||
|
testutils "github.com/pikami/cosmium/test_utils"
|
||||||
|
)
|
||||||
|
|
||||||
|
func Test_Execute_MathFunctions(t *testing.T) {
|
||||||
|
t.Run("Should parse function ABS(ex)", func(t *testing.T) {
|
||||||
|
testMathFunctionParse(
|
||||||
|
t,
|
||||||
|
`SELECT ABS(c.value) FROM c`,
|
||||||
|
parsers.FunctionCallMathAbs,
|
||||||
|
[]interface{}{
|
||||||
|
parsers.SelectItem{
|
||||||
|
Path: []string{"c", "value"},
|
||||||
|
Type: parsers.SelectItemTypeField,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"c",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should parse function ACOS(ex)", func(t *testing.T) {
|
||||||
|
testMathFunctionParse(
|
||||||
|
t,
|
||||||
|
`SELECT ACOS(c.value) FROM c`,
|
||||||
|
parsers.FunctionCallMathAcos,
|
||||||
|
[]interface{}{
|
||||||
|
parsers.SelectItem{
|
||||||
|
Path: []string{"c", "value"},
|
||||||
|
Type: parsers.SelectItemTypeField,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"c",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should parse function ASIN(ex)", func(t *testing.T) {
|
||||||
|
testMathFunctionParse(
|
||||||
|
t,
|
||||||
|
`SELECT ASIN(c.value) FROM c`,
|
||||||
|
parsers.FunctionCallMathAsin,
|
||||||
|
[]interface{}{
|
||||||
|
parsers.SelectItem{
|
||||||
|
Path: []string{"c", "value"},
|
||||||
|
Type: parsers.SelectItemTypeField,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"c",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should parse function ATAN(ex)", func(t *testing.T) {
|
||||||
|
testMathFunctionParse(
|
||||||
|
t,
|
||||||
|
`SELECT ATAN(c.value) FROM c`,
|
||||||
|
parsers.FunctionCallMathAtan,
|
||||||
|
[]interface{}{
|
||||||
|
parsers.SelectItem{
|
||||||
|
Path: []string{"c", "value"},
|
||||||
|
Type: parsers.SelectItemTypeField,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"c",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should parse function CEILING(ex)", func(t *testing.T) {
|
||||||
|
testMathFunctionParse(
|
||||||
|
t,
|
||||||
|
`SELECT CEILING(c.value) FROM c`,
|
||||||
|
parsers.FunctionCallMathCeiling,
|
||||||
|
[]interface{}{
|
||||||
|
parsers.SelectItem{
|
||||||
|
Path: []string{"c", "value"},
|
||||||
|
Type: parsers.SelectItemTypeField,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"c",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should parse function COS(ex)", func(t *testing.T) {
|
||||||
|
testMathFunctionParse(
|
||||||
|
t,
|
||||||
|
`SELECT COS(c.value) FROM c`,
|
||||||
|
parsers.FunctionCallMathCos,
|
||||||
|
[]interface{}{
|
||||||
|
parsers.SelectItem{
|
||||||
|
Path: []string{"c", "value"},
|
||||||
|
Type: parsers.SelectItemTypeField,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"c",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should parse function COT(ex)", func(t *testing.T) {
|
||||||
|
testMathFunctionParse(
|
||||||
|
t,
|
||||||
|
`SELECT COT(c.value) FROM c`,
|
||||||
|
parsers.FunctionCallMathCot,
|
||||||
|
[]interface{}{
|
||||||
|
parsers.SelectItem{
|
||||||
|
Path: []string{"c", "value"},
|
||||||
|
Type: parsers.SelectItemTypeField,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"c",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should parse function DEGREES(ex)", func(t *testing.T) {
|
||||||
|
testMathFunctionParse(
|
||||||
|
t,
|
||||||
|
`SELECT DEGREES(c.value) FROM c`,
|
||||||
|
parsers.FunctionCallMathDegrees,
|
||||||
|
[]interface{}{
|
||||||
|
parsers.SelectItem{
|
||||||
|
Path: []string{"c", "value"},
|
||||||
|
Type: parsers.SelectItemTypeField,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"c",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should parse function EXP(ex)", func(t *testing.T) {
|
||||||
|
testMathFunctionParse(
|
||||||
|
t,
|
||||||
|
`SELECT EXP(c.value) FROM c`,
|
||||||
|
parsers.FunctionCallMathExp,
|
||||||
|
[]interface{}{
|
||||||
|
parsers.SelectItem{
|
||||||
|
Path: []string{"c", "value"},
|
||||||
|
Type: parsers.SelectItemTypeField,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"c",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should parse function FLOOR(ex)", func(t *testing.T) {
|
||||||
|
testMathFunctionParse(
|
||||||
|
t,
|
||||||
|
`SELECT FLOOR(c.value) FROM c`,
|
||||||
|
parsers.FunctionCallMathFloor,
|
||||||
|
[]interface{}{
|
||||||
|
parsers.SelectItem{
|
||||||
|
Path: []string{"c", "value"},
|
||||||
|
Type: parsers.SelectItemTypeField,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"c",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should parse function IntBitNot(ex)", func(t *testing.T) {
|
||||||
|
testMathFunctionParse(
|
||||||
|
t,
|
||||||
|
`SELECT IntBitNot(c.value) FROM c`,
|
||||||
|
parsers.FunctionCallMathIntBitNot,
|
||||||
|
[]interface{}{
|
||||||
|
parsers.SelectItem{
|
||||||
|
Path: []string{"c", "value"},
|
||||||
|
Type: parsers.SelectItemTypeField,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"c",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should parse function LOG10(ex)", func(t *testing.T) {
|
||||||
|
testMathFunctionParse(
|
||||||
|
t,
|
||||||
|
`SELECT LOG10(c.value) FROM c`,
|
||||||
|
parsers.FunctionCallMathLog10,
|
||||||
|
[]interface{}{
|
||||||
|
parsers.SelectItem{
|
||||||
|
Path: []string{"c", "value"},
|
||||||
|
Type: parsers.SelectItemTypeField,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"c",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should parse function RADIANS(ex)", func(t *testing.T) {
|
||||||
|
testMathFunctionParse(
|
||||||
|
t,
|
||||||
|
`SELECT RADIANS(c.value) FROM c`,
|
||||||
|
parsers.FunctionCallMathRadians,
|
||||||
|
[]interface{}{
|
||||||
|
parsers.SelectItem{
|
||||||
|
Path: []string{"c", "value"},
|
||||||
|
Type: parsers.SelectItemTypeField,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"c",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should parse function ROUND(ex)", func(t *testing.T) {
|
||||||
|
testMathFunctionParse(
|
||||||
|
t,
|
||||||
|
`SELECT ROUND(c.value) FROM c`,
|
||||||
|
parsers.FunctionCallMathRound,
|
||||||
|
[]interface{}{
|
||||||
|
parsers.SelectItem{
|
||||||
|
Path: []string{"c", "value"},
|
||||||
|
Type: parsers.SelectItemTypeField,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"c",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should parse function SIGN(ex)", func(t *testing.T) {
|
||||||
|
testMathFunctionParse(
|
||||||
|
t,
|
||||||
|
`SELECT SIGN(c.value) FROM c`,
|
||||||
|
parsers.FunctionCallMathSign,
|
||||||
|
[]interface{}{
|
||||||
|
parsers.SelectItem{
|
||||||
|
Path: []string{"c", "value"},
|
||||||
|
Type: parsers.SelectItemTypeField,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"c",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should parse function SIN(ex)", func(t *testing.T) {
|
||||||
|
testMathFunctionParse(
|
||||||
|
t,
|
||||||
|
`SELECT SIN(c.value) FROM c`,
|
||||||
|
parsers.FunctionCallMathSin,
|
||||||
|
[]interface{}{
|
||||||
|
parsers.SelectItem{
|
||||||
|
Path: []string{"c", "value"},
|
||||||
|
Type: parsers.SelectItemTypeField,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"c",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should parse function SQRT(ex)", func(t *testing.T) {
|
||||||
|
testMathFunctionParse(
|
||||||
|
t,
|
||||||
|
`SELECT SQRT(c.value) FROM c`,
|
||||||
|
parsers.FunctionCallMathSqrt,
|
||||||
|
[]interface{}{
|
||||||
|
parsers.SelectItem{
|
||||||
|
Path: []string{"c", "value"},
|
||||||
|
Type: parsers.SelectItemTypeField,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"c",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should parse function SQUARE(ex)", func(t *testing.T) {
|
||||||
|
testMathFunctionParse(
|
||||||
|
t,
|
||||||
|
`SELECT SQUARE(c.value) FROM c`,
|
||||||
|
parsers.FunctionCallMathSquare,
|
||||||
|
[]interface{}{
|
||||||
|
parsers.SelectItem{
|
||||||
|
Path: []string{"c", "value"},
|
||||||
|
Type: parsers.SelectItemTypeField,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"c",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should parse function TAN(ex)", func(t *testing.T) {
|
||||||
|
testMathFunctionParse(
|
||||||
|
t,
|
||||||
|
`SELECT TAN(c.value) FROM c`,
|
||||||
|
parsers.FunctionCallMathTan,
|
||||||
|
[]interface{}{
|
||||||
|
parsers.SelectItem{
|
||||||
|
Path: []string{"c", "value"},
|
||||||
|
Type: parsers.SelectItemTypeField,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"c",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should parse function TRUNC(ex)", func(t *testing.T) {
|
||||||
|
testMathFunctionParse(
|
||||||
|
t,
|
||||||
|
`SELECT TRUNC(c.value) FROM c`,
|
||||||
|
parsers.FunctionCallMathTrunc,
|
||||||
|
[]interface{}{
|
||||||
|
parsers.SelectItem{
|
||||||
|
Path: []string{"c", "value"},
|
||||||
|
Type: parsers.SelectItemTypeField,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"c",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should parse function ATN2(ex1, ex2)", func(t *testing.T) {
|
||||||
|
testMathFunctionParse(
|
||||||
|
t,
|
||||||
|
`SELECT ATN2(c.value, c.secondValue) FROM c`,
|
||||||
|
parsers.FunctionCallMathAtn2,
|
||||||
|
[]interface{}{
|
||||||
|
parsers.SelectItem{
|
||||||
|
Path: []string{"c", "value"},
|
||||||
|
Type: parsers.SelectItemTypeField,
|
||||||
|
},
|
||||||
|
parsers.SelectItem{
|
||||||
|
Path: []string{"c", "secondValue"},
|
||||||
|
Type: parsers.SelectItemTypeField,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"c",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should parse function IntAdd(ex1, ex2)", func(t *testing.T) {
|
||||||
|
testMathFunctionParse(
|
||||||
|
t,
|
||||||
|
`SELECT IntAdd(c.value, c.secondValue) FROM c`,
|
||||||
|
parsers.FunctionCallMathIntAdd,
|
||||||
|
[]interface{}{
|
||||||
|
parsers.SelectItem{
|
||||||
|
Path: []string{"c", "value"},
|
||||||
|
Type: parsers.SelectItemTypeField,
|
||||||
|
},
|
||||||
|
parsers.SelectItem{
|
||||||
|
Path: []string{"c", "secondValue"},
|
||||||
|
Type: parsers.SelectItemTypeField,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"c",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should parse function IntBitAnd(ex1, ex2)", func(t *testing.T) {
|
||||||
|
testMathFunctionParse(
|
||||||
|
t,
|
||||||
|
`SELECT IntBitAnd(c.value, c.secondValue) FROM c`,
|
||||||
|
parsers.FunctionCallMathIntBitAnd,
|
||||||
|
[]interface{}{
|
||||||
|
parsers.SelectItem{
|
||||||
|
Path: []string{"c", "value"},
|
||||||
|
Type: parsers.SelectItemTypeField,
|
||||||
|
},
|
||||||
|
parsers.SelectItem{
|
||||||
|
Path: []string{"c", "secondValue"},
|
||||||
|
Type: parsers.SelectItemTypeField,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"c",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should parse function IntBitLeftShift(ex1, ex2)", func(t *testing.T) {
|
||||||
|
testMathFunctionParse(
|
||||||
|
t,
|
||||||
|
`SELECT IntBitLeftShift(c.value, c.secondValue) FROM c`,
|
||||||
|
parsers.FunctionCallMathIntBitLeftShift,
|
||||||
|
[]interface{}{
|
||||||
|
parsers.SelectItem{
|
||||||
|
Path: []string{"c", "value"},
|
||||||
|
Type: parsers.SelectItemTypeField,
|
||||||
|
},
|
||||||
|
parsers.SelectItem{
|
||||||
|
Path: []string{"c", "secondValue"},
|
||||||
|
Type: parsers.SelectItemTypeField,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"c",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should parse function IntBitOr(ex1, ex2)", func(t *testing.T) {
|
||||||
|
testMathFunctionParse(
|
||||||
|
t,
|
||||||
|
`SELECT IntBitOr(c.value, c.secondValue) FROM c`,
|
||||||
|
parsers.FunctionCallMathIntBitOr,
|
||||||
|
[]interface{}{
|
||||||
|
parsers.SelectItem{
|
||||||
|
Path: []string{"c", "value"},
|
||||||
|
Type: parsers.SelectItemTypeField,
|
||||||
|
},
|
||||||
|
parsers.SelectItem{
|
||||||
|
Path: []string{"c", "secondValue"},
|
||||||
|
Type: parsers.SelectItemTypeField,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"c",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should parse function IntBitRightShift(ex1, ex2)", func(t *testing.T) {
|
||||||
|
testMathFunctionParse(
|
||||||
|
t,
|
||||||
|
`SELECT IntBitRightShift(c.value, c.secondValue) FROM c`,
|
||||||
|
parsers.FunctionCallMathIntBitRightShift,
|
||||||
|
[]interface{}{
|
||||||
|
parsers.SelectItem{
|
||||||
|
Path: []string{"c", "value"},
|
||||||
|
Type: parsers.SelectItemTypeField,
|
||||||
|
},
|
||||||
|
parsers.SelectItem{
|
||||||
|
Path: []string{"c", "secondValue"},
|
||||||
|
Type: parsers.SelectItemTypeField,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"c",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should parse function IntBitXor(ex1, ex2)", func(t *testing.T) {
|
||||||
|
testMathFunctionParse(
|
||||||
|
t,
|
||||||
|
`SELECT IntBitXor(c.value, c.secondValue) FROM c`,
|
||||||
|
parsers.FunctionCallMathIntBitXor,
|
||||||
|
[]interface{}{
|
||||||
|
parsers.SelectItem{
|
||||||
|
Path: []string{"c", "value"},
|
||||||
|
Type: parsers.SelectItemTypeField,
|
||||||
|
},
|
||||||
|
parsers.SelectItem{
|
||||||
|
Path: []string{"c", "secondValue"},
|
||||||
|
Type: parsers.SelectItemTypeField,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"c",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should parse function IntDiv(ex1, ex2)", func(t *testing.T) {
|
||||||
|
testMathFunctionParse(
|
||||||
|
t,
|
||||||
|
`SELECT IntDiv(c.value, c.secondValue) FROM c`,
|
||||||
|
parsers.FunctionCallMathIntDiv,
|
||||||
|
[]interface{}{
|
||||||
|
parsers.SelectItem{
|
||||||
|
Path: []string{"c", "value"},
|
||||||
|
Type: parsers.SelectItemTypeField,
|
||||||
|
},
|
||||||
|
parsers.SelectItem{
|
||||||
|
Path: []string{"c", "secondValue"},
|
||||||
|
Type: parsers.SelectItemTypeField,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"c",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should parse function IntMod(ex1, ex2)", func(t *testing.T) {
|
||||||
|
testMathFunctionParse(
|
||||||
|
t,
|
||||||
|
`SELECT IntMod(c.value, c.secondValue) FROM c`,
|
||||||
|
parsers.FunctionCallMathIntMod,
|
||||||
|
[]interface{}{
|
||||||
|
parsers.SelectItem{
|
||||||
|
Path: []string{"c", "value"},
|
||||||
|
Type: parsers.SelectItemTypeField,
|
||||||
|
},
|
||||||
|
parsers.SelectItem{
|
||||||
|
Path: []string{"c", "secondValue"},
|
||||||
|
Type: parsers.SelectItemTypeField,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"c",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should parse function IntMul(ex1, ex2)", func(t *testing.T) {
|
||||||
|
testMathFunctionParse(
|
||||||
|
t,
|
||||||
|
`SELECT IntMul(c.value, c.secondValue) FROM c`,
|
||||||
|
parsers.FunctionCallMathIntMul,
|
||||||
|
[]interface{}{
|
||||||
|
parsers.SelectItem{
|
||||||
|
Path: []string{"c", "value"},
|
||||||
|
Type: parsers.SelectItemTypeField,
|
||||||
|
},
|
||||||
|
parsers.SelectItem{
|
||||||
|
Path: []string{"c", "secondValue"},
|
||||||
|
Type: parsers.SelectItemTypeField,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"c",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should parse function IntSub(ex1, ex2)", func(t *testing.T) {
|
||||||
|
testMathFunctionParse(
|
||||||
|
t,
|
||||||
|
`SELECT IntSub(c.value, c.secondValue) FROM c`,
|
||||||
|
parsers.FunctionCallMathIntSub,
|
||||||
|
[]interface{}{
|
||||||
|
parsers.SelectItem{
|
||||||
|
Path: []string{"c", "value"},
|
||||||
|
Type: parsers.SelectItemTypeField,
|
||||||
|
},
|
||||||
|
parsers.SelectItem{
|
||||||
|
Path: []string{"c", "secondValue"},
|
||||||
|
Type: parsers.SelectItemTypeField,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"c",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should parse function POWER(ex1, ex2)", func(t *testing.T) {
|
||||||
|
testMathFunctionParse(
|
||||||
|
t,
|
||||||
|
`SELECT POWER(c.value, c.secondValue) FROM c`,
|
||||||
|
parsers.FunctionCallMathPower,
|
||||||
|
[]interface{}{
|
||||||
|
parsers.SelectItem{
|
||||||
|
Path: []string{"c", "value"},
|
||||||
|
Type: parsers.SelectItemTypeField,
|
||||||
|
},
|
||||||
|
parsers.SelectItem{
|
||||||
|
Path: []string{"c", "secondValue"},
|
||||||
|
Type: parsers.SelectItemTypeField,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"c",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should parse function LOG(ex)", func(t *testing.T) {
|
||||||
|
testMathFunctionParse(
|
||||||
|
t,
|
||||||
|
`SELECT LOG(c.value) FROM c`,
|
||||||
|
parsers.FunctionCallMathLog,
|
||||||
|
[]interface{}{
|
||||||
|
parsers.SelectItem{
|
||||||
|
Path: []string{"c", "value"},
|
||||||
|
Type: parsers.SelectItemTypeField,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"c",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should parse function LOG(ex1, ex2)", func(t *testing.T) {
|
||||||
|
testMathFunctionParse(
|
||||||
|
t,
|
||||||
|
`SELECT LOG(c.value, c.secondValue) FROM c`,
|
||||||
|
parsers.FunctionCallMathLog,
|
||||||
|
[]interface{}{
|
||||||
|
parsers.SelectItem{
|
||||||
|
Path: []string{"c", "value"},
|
||||||
|
Type: parsers.SelectItemTypeField,
|
||||||
|
},
|
||||||
|
parsers.SelectItem{
|
||||||
|
Path: []string{"c", "secondValue"},
|
||||||
|
Type: parsers.SelectItemTypeField,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"c",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should parse function NumberBin(ex)", func(t *testing.T) {
|
||||||
|
testMathFunctionParse(
|
||||||
|
t,
|
||||||
|
`SELECT NumberBin(c.value) FROM c`,
|
||||||
|
parsers.FunctionCallMathNumberBin,
|
||||||
|
[]interface{}{
|
||||||
|
parsers.SelectItem{
|
||||||
|
Path: []string{"c", "value"},
|
||||||
|
Type: parsers.SelectItemTypeField,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"c",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should parse function NumberBin(ex1, ex2)", func(t *testing.T) {
|
||||||
|
testMathFunctionParse(
|
||||||
|
t,
|
||||||
|
`SELECT NumberBin(c.value, c.secondValue) FROM c`,
|
||||||
|
parsers.FunctionCallMathNumberBin,
|
||||||
|
[]interface{}{
|
||||||
|
parsers.SelectItem{
|
||||||
|
Path: []string{"c", "value"},
|
||||||
|
Type: parsers.SelectItemTypeField,
|
||||||
|
},
|
||||||
|
parsers.SelectItem{
|
||||||
|
Path: []string{"c", "secondValue"},
|
||||||
|
Type: parsers.SelectItemTypeField,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"c",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should parse function PI()", func(t *testing.T) {
|
||||||
|
testMathFunctionParse(
|
||||||
|
t,
|
||||||
|
`SELECT PI() FROM c`,
|
||||||
|
parsers.FunctionCallMathPi,
|
||||||
|
[]interface{}{},
|
||||||
|
"c",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should parse function RAND()", func(t *testing.T) {
|
||||||
|
testMathFunctionParse(
|
||||||
|
t,
|
||||||
|
`SELECT RAND() FROM c`,
|
||||||
|
parsers.FunctionCallMathRand,
|
||||||
|
[]interface{}{},
|
||||||
|
"c",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func testMathFunctionParse(
|
||||||
|
t *testing.T,
|
||||||
|
query string,
|
||||||
|
expectedFunctionType parsers.FunctionCallType,
|
||||||
|
expectedArguments []interface{},
|
||||||
|
expectedTable string,
|
||||||
|
) {
|
||||||
|
testQueryParse(
|
||||||
|
t,
|
||||||
|
query,
|
||||||
|
parsers.SelectStmt{
|
||||||
|
SelectItems: []parsers.SelectItem{
|
||||||
|
{
|
||||||
|
Type: parsers.SelectItemTypeFunctionCall,
|
||||||
|
Value: parsers.FunctionCall{
|
||||||
|
Type: expectedFunctionType,
|
||||||
|
Arguments: expectedArguments,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path(expectedTable)},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
@ -7,6 +7,7 @@ import (
|
|||||||
|
|
||||||
"github.com/pikami/cosmium/parsers"
|
"github.com/pikami/cosmium/parsers"
|
||||||
"github.com/pikami/cosmium/parsers/nosql"
|
"github.com/pikami/cosmium/parsers/nosql"
|
||||||
|
testutils "github.com/pikami/cosmium/test_utils"
|
||||||
)
|
)
|
||||||
|
|
||||||
// For Parser Debugging
|
// For Parser Debugging
|
||||||
@ -48,7 +49,7 @@ func Test_Parse(t *testing.T) {
|
|||||||
{Path: []string{"c", "id"}},
|
{Path: []string{"c", "id"}},
|
||||||
{Path: []string{"c", "pk"}},
|
{Path: []string{"c", "pk"}},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
OrderExpressions: []parsers.OrderExpression{
|
OrderExpressions: []parsers.OrderExpression{
|
||||||
{
|
{
|
||||||
SelectItem: parsers.SelectItem{Path: []string{"c", "id"}},
|
SelectItem: parsers.SelectItem{Path: []string{"c", "id"}},
|
||||||
@ -72,7 +73,7 @@ func Test_Parse(t *testing.T) {
|
|||||||
{Path: []string{"c", "id"}},
|
{Path: []string{"c", "id"}},
|
||||||
{Path: []string{"c", "pk"}},
|
{Path: []string{"c", "pk"}},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
GroupBy: []parsers.SelectItem{
|
GroupBy: []parsers.SelectItem{
|
||||||
{Path: []string{"c", "id"}},
|
{Path: []string{"c", "id"}},
|
||||||
{Path: []string{"c", "pk"}},
|
{Path: []string{"c", "pk"}},
|
||||||
@ -92,7 +93,7 @@ func Test_Parse(t *testing.T) {
|
|||||||
Type: parsers.SelectItemTypeField,
|
Type: parsers.SelectItemTypeField,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
Filters: parsers.SelectItem{
|
Filters: parsers.SelectItem{
|
||||||
Type: parsers.SelectItemTypeFunctionCall,
|
Type: parsers.SelectItemTypeFunctionCall,
|
||||||
Value: parsers.FunctionCall{
|
Value: parsers.FunctionCall{
|
||||||
@ -102,24 +103,87 @@ func Test_Parse(t *testing.T) {
|
|||||||
Path: []string{"c", "id"},
|
Path: []string{"c", "id"},
|
||||||
Type: parsers.SelectItemTypeField,
|
Type: parsers.SelectItemTypeField,
|
||||||
},
|
},
|
||||||
parsers.SelectItem{
|
testutils.SelectItem_Constant_String("123"),
|
||||||
Type: parsers.SelectItemTypeConstant,
|
testutils.SelectItem_Constant_String("456"),
|
||||||
Value: parsers.Constant{
|
|
||||||
Type: parsers.ConstantTypeString,
|
|
||||||
Value: "123",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
parsers.SelectItem{
|
|
||||||
Type: parsers.SelectItemTypeConstant,
|
|
||||||
Value: parsers.Constant{
|
|
||||||
Type: parsers.ConstantTypeString,
|
|
||||||
Value: "456",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
t.Run("Should parse IN function with function call", func(t *testing.T) {
|
||||||
|
testQueryParse(
|
||||||
|
t,
|
||||||
|
`Select c.id FROM c WHERE (ToString(c.id) IN ("123", "456"))`,
|
||||||
|
parsers.SelectStmt{
|
||||||
|
SelectItems: []parsers.SelectItem{
|
||||||
|
testutils.SelectItem_Path("c", "id"),
|
||||||
|
},
|
||||||
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
|
Filters: parsers.SelectItem{
|
||||||
|
Type: parsers.SelectItemTypeFunctionCall,
|
||||||
|
Value: parsers.FunctionCall{
|
||||||
|
Type: parsers.FunctionCallIn,
|
||||||
|
Arguments: []interface{}{
|
||||||
|
parsers.SelectItem{
|
||||||
|
Type: parsers.SelectItemTypeFunctionCall,
|
||||||
|
Value: parsers.FunctionCall{
|
||||||
|
Type: parsers.FunctionCallToString,
|
||||||
|
Arguments: []interface{}{
|
||||||
|
testutils.SelectItem_Path("c", "id"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
testutils.SelectItem_Constant_String("123"),
|
||||||
|
testutils.SelectItem_Constant_String("456"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should parse IN selector", func(t *testing.T) {
|
||||||
|
testQueryParse(
|
||||||
|
t,
|
||||||
|
`SELECT c.id FROM c IN c.tags`,
|
||||||
|
parsers.SelectStmt{
|
||||||
|
SelectItems: []parsers.SelectItem{
|
||||||
|
{
|
||||||
|
Path: []string{"c", "id"},
|
||||||
|
Type: parsers.SelectItemTypeField,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Table: parsers.Table{
|
||||||
|
Value: "c",
|
||||||
|
SelectItem: testutils.SelectItem_Path("c", "tags"),
|
||||||
|
IsInSelect: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should parse IIF function", func(t *testing.T) {
|
||||||
|
testQueryParse(
|
||||||
|
t,
|
||||||
|
`SELECT IIF(true, c.pk, c.id) FROM c`,
|
||||||
|
parsers.SelectStmt{
|
||||||
|
SelectItems: []parsers.SelectItem{
|
||||||
|
{
|
||||||
|
Type: parsers.SelectItemTypeFunctionCall,
|
||||||
|
Value: parsers.FunctionCall{
|
||||||
|
Type: parsers.FunctionCallIif,
|
||||||
|
Arguments: []interface{}{
|
||||||
|
testutils.SelectItem_Constant_Bool(true),
|
||||||
|
testutils.SelectItem_Path("c", "pk"),
|
||||||
|
testutils.SelectItem_Path("c", "id"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -4,112 +4,137 @@ package nosql
|
|||||||
import "github.com/pikami/cosmium/parsers"
|
import "github.com/pikami/cosmium/parsers"
|
||||||
|
|
||||||
func makeSelectStmt(
|
func makeSelectStmt(
|
||||||
columns, table,
|
columns, fromClause, joinItems,
|
||||||
whereClause interface{}, distinctClause interface{},
|
whereClause interface{}, distinctClause interface{},
|
||||||
count interface{}, groupByClause interface{}, orderList interface{},
|
count interface{}, groupByClause interface{}, orderList interface{},
|
||||||
offsetClause interface{},
|
offsetClause interface{},
|
||||||
) (parsers.SelectStmt, error) {
|
) (parsers.SelectStmt, error) {
|
||||||
selectStmt := parsers.SelectStmt{
|
selectStmt := parsers.SelectStmt{
|
||||||
SelectItems: columns.([]parsers.SelectItem),
|
SelectItems: columns.([]parsers.SelectItem),
|
||||||
Table: table.(parsers.Table),
|
}
|
||||||
}
|
|
||||||
|
|
||||||
switch v := whereClause.(type) {
|
if fromTable, ok := fromClause.(parsers.Table); ok {
|
||||||
case parsers.ComparisonExpression, parsers.LogicalExpression, parsers.Constant, parsers.SelectItem:
|
selectStmt.Table = fromTable
|
||||||
selectStmt.Filters = v
|
}
|
||||||
}
|
|
||||||
|
|
||||||
if distinctClause != nil {
|
if joinItemsArray, ok := joinItems.([]interface{}); ok && len(joinItemsArray) > 0 {
|
||||||
selectStmt.Distinct = true
|
selectStmt.JoinItems = make([]parsers.JoinItem, len(joinItemsArray))
|
||||||
}
|
for i, joinItem := range joinItemsArray {
|
||||||
|
selectStmt.JoinItems[i] = joinItem.(parsers.JoinItem)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if n, ok := count.(int); ok {
|
switch v := whereClause.(type) {
|
||||||
selectStmt.Count = n
|
case parsers.ComparisonExpression, parsers.LogicalExpression, parsers.Constant, parsers.SelectItem:
|
||||||
}
|
selectStmt.Filters = v
|
||||||
|
}
|
||||||
|
|
||||||
if offsetArr, ok := offsetClause.([]interface{}); ok && len(offsetArr) == 2 {
|
if distinctClause != nil {
|
||||||
if n, ok := offsetArr[0].(int); ok {
|
selectStmt.Distinct = true
|
||||||
selectStmt.Offset = n
|
}
|
||||||
}
|
|
||||||
|
|
||||||
if n, ok := offsetArr[1].(int); ok {
|
if n, ok := count.(int); ok {
|
||||||
selectStmt.Count = n
|
selectStmt.Count = n
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
if orderExpressions, ok := orderList.([]parsers.OrderExpression); ok {
|
if offsetArr, ok := offsetClause.([]interface{}); ok && len(offsetArr) == 2 {
|
||||||
selectStmt.OrderExpressions = orderExpressions
|
if n, ok := offsetArr[0].(int); ok {
|
||||||
}
|
selectStmt.Offset = n
|
||||||
|
}
|
||||||
|
|
||||||
if groupByClause != nil {
|
if n, ok := offsetArr[1].(int); ok {
|
||||||
selectStmt.GroupBy = groupByClause.([]parsers.SelectItem)
|
selectStmt.Count = n
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return selectStmt, nil
|
if orderExpressions, ok := orderList.([]parsers.OrderExpression); ok {
|
||||||
|
selectStmt.OrderExpressions = orderExpressions
|
||||||
|
}
|
||||||
|
|
||||||
|
if groupByClause != nil {
|
||||||
|
selectStmt.GroupBy = groupByClause.([]parsers.SelectItem)
|
||||||
|
}
|
||||||
|
|
||||||
|
return selectStmt, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func makeJoin(table interface{}, column interface{}) (parsers.JoinItem, error) {
|
||||||
|
joinItem := parsers.JoinItem{}
|
||||||
|
|
||||||
|
if selectItem, isSelectItem := column.(parsers.SelectItem); isSelectItem {
|
||||||
|
joinItem.SelectItem = selectItem
|
||||||
|
joinItem.Table.Value = selectItem.Alias
|
||||||
|
}
|
||||||
|
|
||||||
|
if tableTyped, isTable := table.(parsers.Table); isTable {
|
||||||
|
joinItem.Table = tableTyped
|
||||||
|
}
|
||||||
|
|
||||||
|
return joinItem, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func makeSelectItem(name interface{}, path interface{}, selectItemType parsers.SelectItemType) (parsers.SelectItem, error) {
|
func makeSelectItem(name interface{}, path interface{}, selectItemType parsers.SelectItemType) (parsers.SelectItem, error) {
|
||||||
ps := path.([]interface{})
|
ps := path.([]interface{})
|
||||||
|
|
||||||
paths := make([]string, 1)
|
paths := make([]string, 1)
|
||||||
paths[0] = name.(string)
|
paths[0] = name.(string)
|
||||||
for _, p := range ps {
|
for _, p := range ps {
|
||||||
paths = append(paths, p.(string))
|
paths = append(paths, p.(string))
|
||||||
}
|
}
|
||||||
|
|
||||||
return parsers.SelectItem{Path: paths, Type: selectItemType}, nil
|
return parsers.SelectItem{Path: paths, Type: selectItemType}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func makeColumnList(column interface{}, other_columns interface{}) ([]parsers.SelectItem, error) {
|
func makeColumnList(column interface{}, other_columns interface{}) ([]parsers.SelectItem, error) {
|
||||||
collsAsArray := other_columns.([]interface{})
|
collsAsArray := other_columns.([]interface{})
|
||||||
columnList := make([]parsers.SelectItem, len(collsAsArray) + 1)
|
columnList := make([]parsers.SelectItem, len(collsAsArray) + 1)
|
||||||
columnList[0] = column.(parsers.SelectItem)
|
columnList[0] = column.(parsers.SelectItem)
|
||||||
|
|
||||||
for i, v := range collsAsArray {
|
for i, v := range collsAsArray {
|
||||||
if col, ok := v.(parsers.SelectItem); ok {
|
if col, ok := v.(parsers.SelectItem); ok {
|
||||||
columnList[i+1] = col
|
columnList[i+1] = col
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return columnList, nil
|
return columnList, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func makeSelectArray(columns interface{}) (parsers.SelectItem, error) {
|
func makeSelectArray(columns interface{}) (parsers.SelectItem, error) {
|
||||||
return parsers.SelectItem{
|
return parsers.SelectItem{
|
||||||
SelectItems: columns.([]parsers.SelectItem),
|
SelectItems: columns.([]parsers.SelectItem),
|
||||||
Type: parsers.SelectItemTypeArray,
|
Type: parsers.SelectItemTypeArray,
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func makeSelectObject(field interface{}, other_fields interface{}) (parsers.SelectItem, error) {
|
func makeSelectObject(field interface{}, other_fields interface{}) (parsers.SelectItem, error) {
|
||||||
fieldsAsArray := other_fields.([]interface{})
|
fieldsAsArray := other_fields.([]interface{})
|
||||||
fieldsList := make([]parsers.SelectItem, len(fieldsAsArray)+1)
|
fieldsList := make([]parsers.SelectItem, len(fieldsAsArray)+1)
|
||||||
fieldsList[0] = field.(parsers.SelectItem)
|
fieldsList[0] = field.(parsers.SelectItem)
|
||||||
|
|
||||||
for i, v := range fieldsAsArray {
|
for i, v := range fieldsAsArray {
|
||||||
if col, ok := v.(parsers.SelectItem); ok {
|
if col, ok := v.(parsers.SelectItem); ok {
|
||||||
fieldsList[i+1] = col
|
fieldsList[i+1] = col
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return parsers.SelectItem{
|
return parsers.SelectItem{
|
||||||
SelectItems: fieldsList,
|
SelectItems: fieldsList,
|
||||||
Type: parsers.SelectItemTypeObject,
|
Type: parsers.SelectItemTypeObject,
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func makeOrderByClause(ex1 interface{}, others interface{}) ([]parsers.OrderExpression, error) {
|
func makeOrderByClause(ex1 interface{}, others interface{}) ([]parsers.OrderExpression, error) {
|
||||||
othersArray := others.([]interface{})
|
othersArray := others.([]interface{})
|
||||||
orderList := make([]parsers.OrderExpression, len(othersArray)+1)
|
orderList := make([]parsers.OrderExpression, len(othersArray)+1)
|
||||||
orderList[0] = ex1.(parsers.OrderExpression)
|
orderList[0] = ex1.(parsers.OrderExpression)
|
||||||
|
|
||||||
for i, v := range othersArray {
|
for i, v := range othersArray {
|
||||||
if col, ok := v.(parsers.OrderExpression); ok {
|
if col, ok := v.(parsers.OrderExpression); ok {
|
||||||
orderList[i+1] = col
|
orderList[i+1] = col
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return orderList, nil
|
return orderList, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func makeOrderExpression(field interface{}, order interface{}) (parsers.OrderExpression, error) {
|
func makeOrderExpression(field interface{}, order interface{}) (parsers.OrderExpression, error) {
|
||||||
@ -119,8 +144,8 @@ func makeOrderExpression(field interface{}, order interface{}) (parsers.OrderExp
|
|||||||
}
|
}
|
||||||
|
|
||||||
if orderValue, ok := order.(parsers.OrderDirection); ok {
|
if orderValue, ok := order.(parsers.OrderDirection); ok {
|
||||||
value.Direction = orderValue
|
value.Direction = orderValue
|
||||||
}
|
}
|
||||||
|
|
||||||
return value, nil
|
return value, nil
|
||||||
}
|
}
|
||||||
@ -144,13 +169,39 @@ func joinStrings(array []interface{}) string {
|
|||||||
|
|
||||||
func combineExpressions(ex1 interface{}, exs interface{}, operation parsers.LogicalExpressionType) (interface{}, error) {
|
func combineExpressions(ex1 interface{}, exs interface{}, operation parsers.LogicalExpressionType) (interface{}, error) {
|
||||||
if exs == nil || len(exs.([]interface{})) < 1 {
|
if exs == nil || len(exs.([]interface{})) < 1 {
|
||||||
return ex1, nil
|
return ex1, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
return parsers.LogicalExpression{
|
return parsers.LogicalExpression{
|
||||||
Expressions: append([]interface{}{ex1}, exs.([]interface{})...),
|
Expressions: append([]interface{}{ex1}, exs.([]interface{})...),
|
||||||
Operation: operation,
|
Operation: operation,
|
||||||
}, nil
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func makeMathExpression(left interface{}, operations interface{}) (interface{}, error) {
|
||||||
|
if operations == nil || len(operations.([]interface{})) == 0 {
|
||||||
|
return left, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
result := left.(parsers.SelectItem)
|
||||||
|
ops := operations.([]interface{})
|
||||||
|
|
||||||
|
for _, op := range ops {
|
||||||
|
opData := op.([]interface{})
|
||||||
|
operation := opData[0].(string)
|
||||||
|
right := opData[1].(parsers.SelectItem)
|
||||||
|
|
||||||
|
result = parsers.SelectItem{
|
||||||
|
Type: parsers.SelectItemTypeBinaryExpression,
|
||||||
|
Value: parsers.BinaryExpression{
|
||||||
|
Left: result,
|
||||||
|
Right: right,
|
||||||
|
Operation: operation,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
@ -161,13 +212,15 @@ Input <- selectStmt:SelectStmt {
|
|||||||
|
|
||||||
SelectStmt <- Select ws
|
SelectStmt <- Select ws
|
||||||
distinctClause:DistinctClause? ws
|
distinctClause:DistinctClause? ws
|
||||||
topClause:TopClause? ws columns:Selection ws
|
topClause:TopClause? ws
|
||||||
From ws table:TableName ws
|
columns:Selection ws
|
||||||
|
fromClause:FromClause? ws
|
||||||
|
joinClauses:(ws join:JoinClause { return join, nil })* ws
|
||||||
whereClause:(ws Where ws condition:Condition { return condition, nil })?
|
whereClause:(ws Where ws condition:Condition { return condition, nil })?
|
||||||
groupByClause:(ws GroupBy ws columns:ColumnList { return columns, nil })?
|
groupByClause:(ws GroupBy ws columns:ColumnList { return columns, nil })?
|
||||||
orderByClause:OrderByClause?
|
orderByClause:(ws order:OrderByClause { return order, nil })?
|
||||||
offsetClause:OffsetClause? {
|
offsetClause:(ws offset:OffsetClause { return offset, nil })? {
|
||||||
return makeSelectStmt(columns, table, whereClause,
|
return makeSelectStmt(columns, fromClause, joinClauses, whereClause,
|
||||||
distinctClause, topClause, groupByClause, orderByClause, offsetClause)
|
distinctClause, topClause, groupByClause, orderByClause, offsetClause)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -177,7 +230,60 @@ TopClause <- Top ws count:Integer {
|
|||||||
return count, nil
|
return count, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
OffsetClause <- "OFFSET"i ws offset:IntegerLiteral ws "LIMIT"i ws limit:IntegerLiteral {
|
FromClause <- From ws table:TableName selectItem:(ws In ws column:SelectItemWithAlias { return column, nil }) {
|
||||||
|
tableTyped := table.(parsers.Table)
|
||||||
|
|
||||||
|
if selectItem != nil {
|
||||||
|
tableTyped.SelectItem = selectItem.(parsers.SelectItem)
|
||||||
|
tableTyped.IsInSelect = true
|
||||||
|
}
|
||||||
|
|
||||||
|
return tableTyped, nil
|
||||||
|
} / From ws column:SelectItemWithAlias {
|
||||||
|
tableSelectItem := column.(parsers.SelectItem)
|
||||||
|
table := parsers.Table{
|
||||||
|
Value: tableSelectItem.Alias,
|
||||||
|
SelectItem: tableSelectItem,
|
||||||
|
}
|
||||||
|
return table, nil
|
||||||
|
} / From ws subQuery:SubQuerySelectItem {
|
||||||
|
subQueryTyped := subQuery.(parsers.SelectItem)
|
||||||
|
table := parsers.Table{
|
||||||
|
Value: subQueryTyped.Alias,
|
||||||
|
SelectItem: subQueryTyped,
|
||||||
|
}
|
||||||
|
return table, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
SubQuery <- exists:(exists:Exists ws { return exists, nil })? "(" ws selectStmt:SelectStmt ws ")" {
|
||||||
|
if selectStatement, isGoodValue := selectStmt.(parsers.SelectStmt); isGoodValue {
|
||||||
|
selectStatement.Exists = exists != nil
|
||||||
|
return selectStatement, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return selectStmt, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
SubQuerySelectItem <- subQuery:SubQuery asClause:(ws alias:AsClause { return alias, nil })? {
|
||||||
|
selectItem := parsers.SelectItem{
|
||||||
|
Type: parsers.SelectItemTypeSubQuery,
|
||||||
|
Value: subQuery,
|
||||||
|
}
|
||||||
|
|
||||||
|
if tableName, isString := asClause.(string); isString {
|
||||||
|
selectItem.Alias = tableName
|
||||||
|
}
|
||||||
|
|
||||||
|
return selectItem, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
JoinClause <- Join ws table:TableName ws In ws column:SelectItemWithAlias {
|
||||||
|
return makeJoin(table, column)
|
||||||
|
} / Join ws subQuery:SubQuerySelectItem {
|
||||||
|
return makeJoin(nil, subQuery)
|
||||||
|
}
|
||||||
|
|
||||||
|
OffsetClause <- Offset ws offset:IntegerLiteral ws "LIMIT"i ws limit:IntegerLiteral {
|
||||||
return []interface{}{offset.(parsers.Constant).Value, limit.(parsers.Constant).Value}, nil
|
return []interface{}{offset.(parsers.Constant).Value, limit.(parsers.Constant).Value}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -185,17 +291,40 @@ Selection <- SelectValueSpec / ColumnList / SelectAsterisk
|
|||||||
|
|
||||||
SelectAsterisk <- "*" {
|
SelectAsterisk <- "*" {
|
||||||
selectItem, _ := makeSelectItem("c", make([]interface{}, 0), parsers.SelectItemTypeField)
|
selectItem, _ := makeSelectItem("c", make([]interface{}, 0), parsers.SelectItemTypeField)
|
||||||
selectItem.IsTopLevel = true
|
selectItem.IsTopLevel = true
|
||||||
return makeColumnList(selectItem, make([]interface{}, 0))
|
return makeColumnList(selectItem, make([]interface{}, 0))
|
||||||
}
|
}
|
||||||
|
|
||||||
ColumnList <- column:SelectItem other_columns:(ws "," ws coll:SelectItem {return coll, nil })* {
|
ColumnList <- column:ExpressionOrSelectItem other_columns:(ws "," ws coll:ExpressionOrSelectItem {return coll, nil })* {
|
||||||
return makeColumnList(column, other_columns)
|
return makeColumnList(column, other_columns)
|
||||||
}
|
}
|
||||||
|
|
||||||
SelectValueSpec <- "VALUE"i ws column:SelectItem {
|
ExpressionOrSelectItem <- expression:OrExpression asClause:AsClause? {
|
||||||
|
switch typedValue := expression.(type) {
|
||||||
|
case parsers.ComparisonExpression, parsers.LogicalExpression:
|
||||||
|
selectItem := parsers.SelectItem{
|
||||||
|
Type: parsers.SelectItemTypeExpression,
|
||||||
|
Value: typedValue,
|
||||||
|
}
|
||||||
|
|
||||||
|
if aliasValue, ok := asClause.(string); ok {
|
||||||
|
selectItem.Alias = aliasValue
|
||||||
|
}
|
||||||
|
|
||||||
|
return selectItem, nil
|
||||||
|
case parsers.SelectItem:
|
||||||
|
if aliasValue, ok := asClause.(string); ok {
|
||||||
|
typedValue.Alias = aliasValue
|
||||||
|
}
|
||||||
|
return typedValue, nil
|
||||||
|
default:
|
||||||
|
return typedValue, nil
|
||||||
|
}
|
||||||
|
} / item:SelectItemWithAlias { return item, nil }
|
||||||
|
|
||||||
|
SelectValueSpec <- "VALUE"i ws column:SelectItemWithAlias {
|
||||||
selectItem := column.(parsers.SelectItem)
|
selectItem := column.(parsers.SelectItem)
|
||||||
selectItem.IsTopLevel = true
|
selectItem.IsTopLevel = true
|
||||||
return makeColumnList(selectItem, make([]interface{}, 0))
|
return makeColumnList(selectItem, make([]interface{}, 0))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -209,19 +338,32 @@ SelectArray <- "[" ws columns:ColumnList ws "]" {
|
|||||||
|
|
||||||
SelectObject <- "{" ws field:SelectObjectField ws other_fields:(ws "," ws coll:SelectObjectField {return coll, nil })* ws "}" {
|
SelectObject <- "{" ws field:SelectObjectField ws other_fields:(ws "," ws coll:SelectObjectField {return coll, nil })* ws "}" {
|
||||||
return makeSelectObject(field, other_fields)
|
return makeSelectObject(field, other_fields)
|
||||||
|
} / "{" ws "}" {
|
||||||
|
return parsers.SelectItem{
|
||||||
|
SelectItems: []parsers.SelectItem{},
|
||||||
|
Type: parsers.SelectItemTypeObject,
|
||||||
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
SelectObjectField <- name:(Identifier / "\"" key:Identifier "\"" { return key, nil }) ws ":" ws selectItem:SelectItem {
|
SelectObjectField <- name:(Identifier / "\"" key:Identifier "\"" { return key, nil }) ws ":" ws selectItem:SelectItem {
|
||||||
item := selectItem.(parsers.SelectItem)
|
item := selectItem.(parsers.SelectItem)
|
||||||
item.Alias = name.(string)
|
item.Alias = name.(string)
|
||||||
return item, nil
|
return item, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
SelectProperty <- name:Identifier path:(DotFieldAccess / ArrayFieldAccess)* {
|
SelectProperty <- name:Identifier path:(DotFieldAccess / ArrayFieldAccess)* {
|
||||||
return makeSelectItem(name, path, parsers.SelectItemTypeField)
|
return makeSelectItem(name, path, parsers.SelectItemTypeField)
|
||||||
}
|
}
|
||||||
|
|
||||||
SelectItem <- selectItem:(Literal / FunctionCall / SelectArray / SelectObject / SelectProperty) asClause:AsClause? {
|
SelectItemWithAlias <- selectItem:SelectItem asClause:AsClause? {
|
||||||
|
item := selectItem.(parsers.SelectItem)
|
||||||
|
if aliasValue, ok := asClause.(string); ok {
|
||||||
|
item.Alias = aliasValue
|
||||||
|
}
|
||||||
|
return item, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
SelectItem <- selectItem:(SubQuerySelectItem / Literal / FunctionCall / SelectArray / SelectObject / SelectProperty) {
|
||||||
var itemResult parsers.SelectItem
|
var itemResult parsers.SelectItem
|
||||||
switch typedValue := selectItem.(type) {
|
switch typedValue := selectItem.(type) {
|
||||||
case parsers.SelectItem:
|
case parsers.SelectItem:
|
||||||
@ -238,22 +380,22 @@ SelectItem <- selectItem:(Literal / FunctionCall / SelectArray / SelectObject /
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if aliasValue, ok := asClause.(string); ok {
|
return itemResult, nil
|
||||||
itemResult.Alias = aliasValue
|
|
||||||
}
|
|
||||||
|
|
||||||
return itemResult, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
AsClause <- ws As ws alias:Identifier { return alias, nil }
|
AsClause <- (ws As)? ws !ExcludedKeywords alias:Identifier {
|
||||||
|
return alias, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
ExcludedKeywords <- Select / Top / As / From / In / Join / Exists / Where / And / Or / Not / GroupBy / OrderBy / Offset
|
||||||
|
|
||||||
DotFieldAccess <- "." id:Identifier {
|
DotFieldAccess <- "." id:Identifier {
|
||||||
return id, nil
|
return id, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
ArrayFieldAccess <- "[\"" id:Identifier "\"]" {
|
ArrayFieldAccess <- "[\"" id:Identifier "\"]" { return id, nil }
|
||||||
return id, nil
|
/ "[" id:Integer "]" { return strconv.Itoa(id.(int)), nil }
|
||||||
}
|
/ "[" id:ParameterConstant "]" { return id.(parsers.Constant).Value.(string), nil }
|
||||||
|
|
||||||
Identifier <- [a-zA-Z_][a-zA-Z0-9_]* {
|
Identifier <- [a-zA-Z_][a-zA-Z0-9_]* {
|
||||||
return string(c.text), nil
|
return string(c.text), nil
|
||||||
@ -271,11 +413,27 @@ AndExpression <- ex1:ComparisonExpression ex2:(ws And ws ex:ComparisonExpression
|
|||||||
return combineExpressions(ex1, ex2, parsers.LogicalExpressionTypeAnd)
|
return combineExpressions(ex1, ex2, parsers.LogicalExpressionTypeAnd)
|
||||||
}
|
}
|
||||||
|
|
||||||
ComparisonExpression <- "(" ws ex:OrExpression ws ")" { return ex, nil }
|
ComparisonExpression <- left:AddSubExpression ws op:ComparisonOperator ws right:AddSubExpression {
|
||||||
/ left:SelectItem ws op:ComparisonOperator ws right:SelectItem {
|
|
||||||
return parsers.ComparisonExpression{Left:left,Right:right,Operation:op.(string)}, nil
|
return parsers.ComparisonExpression{Left:left,Right:right,Operation:op.(string)}, nil
|
||||||
|
} / ex:AddSubExpression { return ex, nil }
|
||||||
|
|
||||||
|
AddSubExpression <- left:MulDivExpression operations:(ws op:AddOrSubtractOperation ws right:MulDivExpression { return []interface{}{op, right}, nil })* {
|
||||||
|
return makeMathExpression(left, operations)
|
||||||
|
}
|
||||||
|
|
||||||
|
MulDivExpression <- left:SelectItemWithParentheses operations:(ws op:MultiplyOrDivideOperation ws right:SelectItemWithParentheses { return []interface{}{op, right}, nil })* {
|
||||||
|
return makeMathExpression(left, operations)
|
||||||
|
}
|
||||||
|
|
||||||
|
SelectItemWithParentheses <- "(" ws ex:OrExpression ws ")" { return ex, nil }
|
||||||
|
/ inv:(Not ws)? ex:SelectItem {
|
||||||
|
if inv != nil {
|
||||||
|
ex1 := ex.(parsers.SelectItem)
|
||||||
|
ex1.Invert = true
|
||||||
|
return ex1, nil
|
||||||
|
}
|
||||||
|
return ex, nil
|
||||||
} / ex:BooleanLiteral { return ex, nil }
|
} / ex:BooleanLiteral { return ex, nil }
|
||||||
/ ex:SelectItem { return ex, nil }
|
|
||||||
|
|
||||||
OrderByClause <- OrderBy ws ex1:OrderExpression others:(ws "," ws ex:OrderExpression { return ex, nil })* {
|
OrderByClause <- OrderBy ws ex1:OrderExpression others:(ws "," ws ex:OrderExpression { return ex, nil })* {
|
||||||
return makeOrderByClause(ex1, others)
|
return makeOrderByClause(ex1, others)
|
||||||
@ -287,10 +445,10 @@ OrderExpression <- field:SelectProperty ws order:OrderDirection? {
|
|||||||
|
|
||||||
OrderDirection <- ("ASC"i / "DESC"i) {
|
OrderDirection <- ("ASC"i / "DESC"i) {
|
||||||
if strings.EqualFold(string(c.text), "DESC") {
|
if strings.EqualFold(string(c.text), "DESC") {
|
||||||
return parsers.OrderDirectionDesc, nil
|
return parsers.OrderDirectionDesc, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
return parsers.OrderDirectionAsc, nil
|
return parsers.OrderDirectionAsc, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
Select <- "SELECT"i
|
Select <- "SELECT"i
|
||||||
@ -301,20 +459,34 @@ As <- "AS"i
|
|||||||
|
|
||||||
From <- "FROM"i
|
From <- "FROM"i
|
||||||
|
|
||||||
|
In <- "IN"i
|
||||||
|
|
||||||
|
Join <- "JOIN"i
|
||||||
|
|
||||||
|
Exists <- "EXISTS"i
|
||||||
|
|
||||||
Where <- "WHERE"i
|
Where <- "WHERE"i
|
||||||
|
|
||||||
And <- "AND"i
|
And <- "AND"i
|
||||||
|
|
||||||
Or <- "OR"i
|
Or <- "OR"i wss
|
||||||
|
|
||||||
|
Not <- "NOT"i
|
||||||
|
|
||||||
GroupBy <- "GROUP"i ws "BY"i
|
GroupBy <- "GROUP"i ws "BY"i
|
||||||
|
|
||||||
OrderBy <- "ORDER"i ws "BY"i
|
OrderBy <- "ORDER"i ws "BY"i
|
||||||
|
|
||||||
ComparisonOperator <- ("=" / "!=" / "<" / "<=" / ">" / ">=") {
|
Offset <- "OFFSET"i
|
||||||
|
|
||||||
|
ComparisonOperator <- ("<=" / ">=" / "=" / "!=" / "<" / ">") {
|
||||||
return string(c.text), nil
|
return string(c.text), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
AddOrSubtractOperation <- ("+" / "-") { return string(c.text), nil }
|
||||||
|
|
||||||
|
MultiplyOrDivideOperation <- ("*" / "/") { return string(c.text), nil }
|
||||||
|
|
||||||
Literal <- FloatLiteral / IntegerLiteral / StringLiteral / BooleanLiteral / ParameterConstant / NullConstant
|
Literal <- FloatLiteral / IntegerLiteral / StringLiteral / BooleanLiteral / ParameterConstant / NullConstant
|
||||||
|
|
||||||
ParameterConstant <- "@" Identifier {
|
ParameterConstant <- "@" Identifier {
|
||||||
@ -342,8 +514,10 @@ BooleanLiteral <- ("true"i / "false"i) {
|
|||||||
FunctionCall <- StringFunctions
|
FunctionCall <- StringFunctions
|
||||||
/ TypeCheckingFunctions
|
/ TypeCheckingFunctions
|
||||||
/ ArrayFunctions
|
/ ArrayFunctions
|
||||||
|
/ ConditionalFunctions
|
||||||
/ InFunction
|
/ InFunction
|
||||||
/ AggregateFunctions
|
/ AggregateFunctions
|
||||||
|
/ MathFunctions
|
||||||
|
|
||||||
StringFunctions <- StringEqualsExpression
|
StringFunctions <- StringEqualsExpression
|
||||||
/ ToStringExpression
|
/ ToStringExpression
|
||||||
@ -380,11 +554,53 @@ AggregateFunctions <- AvgAggregateExpression
|
|||||||
/ SumAggregateExpression
|
/ SumAggregateExpression
|
||||||
|
|
||||||
ArrayFunctions <- ArrayConcatExpression
|
ArrayFunctions <- ArrayConcatExpression
|
||||||
|
/ ArrayContainsExpression
|
||||||
|
/ ArrayContainsAnyExpression
|
||||||
|
/ ArrayContainsAllExpression
|
||||||
/ ArrayLengthExpression
|
/ ArrayLengthExpression
|
||||||
/ ArraySliceExpression
|
/ ArraySliceExpression
|
||||||
/ SetIntersectExpression
|
/ SetIntersectExpression
|
||||||
/ SetUnionExpression
|
/ SetUnionExpression
|
||||||
|
|
||||||
|
ConditionalFunctions <- IifExpression
|
||||||
|
|
||||||
|
MathFunctions <- MathAbsExpression
|
||||||
|
/ MathAcosExpression
|
||||||
|
/ MathAsinExpression
|
||||||
|
/ MathAtanExpression
|
||||||
|
/ MathCeilingExpression
|
||||||
|
/ MathCosExpression
|
||||||
|
/ MathCotExpression
|
||||||
|
/ MathDegreesExpression
|
||||||
|
/ MathExpExpression
|
||||||
|
/ MathFloorExpression
|
||||||
|
/ MathIntBitNotExpression
|
||||||
|
/ MathLog10Expression
|
||||||
|
/ MathRadiansExpression
|
||||||
|
/ MathRoundExpression
|
||||||
|
/ MathSignExpression
|
||||||
|
/ MathSinExpression
|
||||||
|
/ MathSqrtExpression
|
||||||
|
/ MathSquareExpression
|
||||||
|
/ MathTanExpression
|
||||||
|
/ MathTruncExpression
|
||||||
|
/ MathAtn2Expression
|
||||||
|
/ MathIntAddExpression
|
||||||
|
/ MathIntBitAndExpression
|
||||||
|
/ MathIntBitLeftShiftExpression
|
||||||
|
/ MathIntBitOrExpression
|
||||||
|
/ MathIntBitRightShiftExpression
|
||||||
|
/ MathIntBitXorExpression
|
||||||
|
/ MathIntDivExpression
|
||||||
|
/ MathIntModExpression
|
||||||
|
/ MathIntMulExpression
|
||||||
|
/ MathIntSubExpression
|
||||||
|
/ MathPowerExpression
|
||||||
|
/ MathLogExpression
|
||||||
|
/ MathNumberBinExpression
|
||||||
|
/ MathPiExpression
|
||||||
|
/ MathRandExpression
|
||||||
|
|
||||||
UpperExpression <- "UPPER"i ws "(" ex:SelectItem ")" {
|
UpperExpression <- "UPPER"i ws "(" ex:SelectItem ")" {
|
||||||
return createFunctionCall(parsers.FunctionCallUpper, []interface{}{ex})
|
return createFunctionCall(parsers.FunctionCallUpper, []interface{}{ex})
|
||||||
}
|
}
|
||||||
@ -512,6 +728,18 @@ ArrayConcatExpression <- "ARRAY_CONCAT"i ws "(" ws arrays:SelectItem others:(ws
|
|||||||
return createFunctionCall(parsers.FunctionCallArrayConcat, append([]interface{}{arrays}, others.([]interface{})...))
|
return createFunctionCall(parsers.FunctionCallArrayConcat, append([]interface{}{arrays}, others.([]interface{})...))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
ArrayContainsExpression <- "ARRAY_CONTAINS"i ws "(" ws array:SelectItem ws "," ws item:SelectItem partialMatch:(ws "," ws ex:SelectItem { return ex, nil })? ws ")" {
|
||||||
|
return createFunctionCall(parsers.FunctionCallArrayContains, []interface{}{array, item, partialMatch})
|
||||||
|
}
|
||||||
|
|
||||||
|
ArrayContainsAnyExpression <- "ARRAY_CONTAINS_ANY"i ws "(" ws array:SelectItem items:(ws "," ws ex:SelectItem { return ex, nil })+ ws ")" {
|
||||||
|
return createFunctionCall(parsers.FunctionCallArrayContainsAny, append([]interface{}{array}, items.([]interface{})...))
|
||||||
|
}
|
||||||
|
|
||||||
|
ArrayContainsAllExpression <- "ARRAY_CONTAINS_ALL"i ws "(" ws array:SelectItem items:(ws "," ws ex:SelectItem { return ex, nil })+ ws ")" {
|
||||||
|
return createFunctionCall(parsers.FunctionCallArrayContainsAll, append([]interface{}{array}, items.([]interface{})...))
|
||||||
|
}
|
||||||
|
|
||||||
ArrayLengthExpression <- "ARRAY_LENGTH"i ws "(" ws array:SelectItem ws ")" {
|
ArrayLengthExpression <- "ARRAY_LENGTH"i ws "(" ws array:SelectItem ws ")" {
|
||||||
return createFunctionCall(parsers.FunctionCallArrayLength, []interface{}{array})
|
return createFunctionCall(parsers.FunctionCallArrayLength, []interface{}{array})
|
||||||
}
|
}
|
||||||
@ -528,7 +756,56 @@ SetUnionExpression <- "SetUnion"i ws "(" ws set1:SelectItem ws "," ws set2:Selec
|
|||||||
return createFunctionCall(parsers.FunctionCallSetUnion, []interface{}{set1, set2})
|
return createFunctionCall(parsers.FunctionCallSetUnion, []interface{}{set1, set2})
|
||||||
}
|
}
|
||||||
|
|
||||||
InFunction <- ex1:SelectProperty ws "IN"i ws "(" ws ex2:SelectItem others:(ws "," ws ex:SelectItem { return ex, nil })* ws ")" {
|
IifExpression <- "IIF"i ws "(" ws condition:SelectItem ws "," ws trueValue:SelectItem ws "," ws falseValue:SelectItem ws ")" {
|
||||||
|
return createFunctionCall(parsers.FunctionCallIif, []interface{}{condition, trueValue, falseValue})
|
||||||
|
}
|
||||||
|
|
||||||
|
MathAbsExpression <- "ABS"i ws "(" ws ex:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathAbs, []interface{}{ex}) }
|
||||||
|
MathAcosExpression <- "ACOS"i ws "(" ws ex:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathAcos, []interface{}{ex}) }
|
||||||
|
MathAsinExpression <- "ASIN"i ws "(" ws ex:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathAsin, []interface{}{ex}) }
|
||||||
|
MathAtanExpression <- "ATAN"i ws "(" ws ex:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathAtan, []interface{}{ex}) }
|
||||||
|
MathCeilingExpression <- "CEILING"i ws "(" ws ex:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathCeiling, []interface{}{ex}) }
|
||||||
|
MathCosExpression <- "COS"i ws "(" ws ex:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathCos, []interface{}{ex}) }
|
||||||
|
MathCotExpression <- "COT"i ws "(" ws ex:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathCot, []interface{}{ex}) }
|
||||||
|
MathDegreesExpression <- "DEGREES"i ws "(" ws ex:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathDegrees, []interface{}{ex}) }
|
||||||
|
MathExpExpression <- "EXP"i ws "(" ws ex:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathExp, []interface{}{ex}) }
|
||||||
|
MathFloorExpression <- "FLOOR"i ws "(" ws ex:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathFloor, []interface{}{ex}) }
|
||||||
|
MathIntBitNotExpression <- "IntBitNot"i ws "(" ws ex:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathIntBitNot, []interface{}{ex}) }
|
||||||
|
MathLog10Expression <- "LOG10"i ws "(" ws ex:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathLog10, []interface{}{ex}) }
|
||||||
|
MathRadiansExpression <- "RADIANS"i ws "(" ws ex:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathRadians, []interface{}{ex}) }
|
||||||
|
MathRoundExpression <- "ROUND"i ws "(" ws ex:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathRound, []interface{}{ex}) }
|
||||||
|
MathSignExpression <- "SIGN"i ws "(" ws ex:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathSign, []interface{}{ex}) }
|
||||||
|
MathSinExpression <- "SIN"i ws "(" ws ex:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathSin, []interface{}{ex}) }
|
||||||
|
MathSqrtExpression <- "SQRT"i ws "(" ws ex:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathSqrt, []interface{}{ex}) }
|
||||||
|
MathSquareExpression <- "SQUARE"i ws "(" ws ex:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathSquare, []interface{}{ex}) }
|
||||||
|
MathTanExpression <- "TAN"i ws "(" ws ex:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathTan, []interface{}{ex}) }
|
||||||
|
MathTruncExpression <- "TRUNC"i ws "(" ws ex:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathTrunc, []interface{}{ex}) }
|
||||||
|
|
||||||
|
MathAtn2Expression <- "ATN2"i ws "(" ws set1:SelectItem ws "," ws set2:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathAtn2, []interface{}{set1, set2}) }
|
||||||
|
MathIntAddExpression <- "IntAdd"i ws "(" ws set1:SelectItem ws "," ws set2:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathIntAdd, []interface{}{set1, set2}) }
|
||||||
|
MathIntBitAndExpression <- "IntBitAnd"i ws "(" ws set1:SelectItem ws "," ws set2:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathIntBitAnd, []interface{}{set1, set2}) }
|
||||||
|
MathIntBitLeftShiftExpression <- "IntBitLeftShift"i ws "(" ws set1:SelectItem ws "," ws set2:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathIntBitLeftShift, []interface{}{set1, set2}) }
|
||||||
|
MathIntBitOrExpression <- "IntBitOr"i ws "(" ws set1:SelectItem ws "," ws set2:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathIntBitOr, []interface{}{set1, set2}) }
|
||||||
|
MathIntBitRightShiftExpression <- "IntBitRightShift"i ws "(" ws set1:SelectItem ws "," ws set2:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathIntBitRightShift, []interface{}{set1, set2}) }
|
||||||
|
MathIntBitXorExpression <- "IntBitXor"i ws "(" ws set1:SelectItem ws "," ws set2:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathIntBitXor, []interface{}{set1, set2}) }
|
||||||
|
MathIntDivExpression <- "IntDiv"i ws "(" ws set1:SelectItem ws "," ws set2:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathIntDiv, []interface{}{set1, set2}) }
|
||||||
|
MathIntModExpression <- "IntMod"i ws "(" ws set1:SelectItem ws "," ws set2:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathIntMod, []interface{}{set1, set2}) }
|
||||||
|
MathIntMulExpression <- "IntMul"i ws "(" ws set1:SelectItem ws "," ws set2:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathIntMul, []interface{}{set1, set2}) }
|
||||||
|
MathIntSubExpression <- "IntSub"i ws "(" ws set1:SelectItem ws "," ws set2:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathIntSub, []interface{}{set1, set2}) }
|
||||||
|
MathPowerExpression <- "POWER"i ws "(" ws set1:SelectItem ws "," ws set2:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathPower, []interface{}{set1, set2}) }
|
||||||
|
|
||||||
|
MathLogExpression <- "LOG"i ws "(" ws ex1:SelectItem others:(ws "," ws ex:SelectItem { return ex, nil })* ws ")" {
|
||||||
|
return createFunctionCall(parsers.FunctionCallMathLog, append([]interface{}{ex1}, others.([]interface{})...))
|
||||||
|
}
|
||||||
|
MathNumberBinExpression <- "NumberBin"i ws "(" ws ex1:SelectItem others:(ws "," ws ex:SelectItem { return ex, nil })* ws ")" {
|
||||||
|
return createFunctionCall(parsers.FunctionCallMathNumberBin, append([]interface{}{ex1}, others.([]interface{})...))
|
||||||
|
}
|
||||||
|
MathPiExpression <- "PI"i ws "(" ws ")" { return createFunctionCall(parsers.FunctionCallMathPi, []interface{}{}) }
|
||||||
|
MathRandExpression <- "RAND"i ws "(" ws ")" { return createFunctionCall(parsers.FunctionCallMathRand, []interface{}{}) }
|
||||||
|
|
||||||
|
InFunction <- ex1:SelectProperty ws In ws "(" ws ex2:SelectItem others:(ws "," ws ex:SelectItem { return ex, nil })* ws ")" {
|
||||||
|
return createFunctionCall(parsers.FunctionCallIn, append([]interface{}{ex1, ex2}, others.([]interface{})...))
|
||||||
|
} / "(" ws ex1:SelectItem ws In ws "(" ws ex2:SelectItem others:(ws "," ws ex:SelectItem { return ex, nil })* ws ")" ws ")" {
|
||||||
return createFunctionCall(parsers.FunctionCallIn, append([]interface{}{ex1, ex2}, others.([]interface{})...))
|
return createFunctionCall(parsers.FunctionCallIn, append([]interface{}{ex1, ex2}, others.([]interface{})...))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -575,4 +852,6 @@ non_escape_character <- !(escape_character) char:.
|
|||||||
|
|
||||||
ws <- [ \t\n\r]*
|
ws <- [ \t\n\r]*
|
||||||
|
|
||||||
|
wss <- [ \t\n\r]+
|
||||||
|
|
||||||
EOF <- !.
|
EOF <- !.
|
||||||
|
@ -4,6 +4,7 @@ import (
|
|||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/pikami/cosmium/parsers"
|
"github.com/pikami/cosmium/parsers"
|
||||||
|
testutils "github.com/pikami/cosmium/test_utils"
|
||||||
)
|
)
|
||||||
|
|
||||||
func Test_Parse_Select(t *testing.T) {
|
func Test_Parse_Select(t *testing.T) {
|
||||||
@ -17,7 +18,21 @@ func Test_Parse_Select(t *testing.T) {
|
|||||||
{Path: []string{"c", "id"}},
|
{Path: []string{"c", "id"}},
|
||||||
{Path: []string{"c", "pk"}},
|
{Path: []string{"c", "pk"}},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should parse SELECT with query parameters as accessor", func(t *testing.T) {
|
||||||
|
testQueryParse(
|
||||||
|
t,
|
||||||
|
`SELECT c.id, c[@param] FROM c`,
|
||||||
|
parsers.SelectStmt{
|
||||||
|
SelectItems: []parsers.SelectItem{
|
||||||
|
{Path: []string{"c", "id"}},
|
||||||
|
{Path: []string{"c", "@param"}},
|
||||||
|
},
|
||||||
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
@ -30,7 +45,7 @@ func Test_Parse_Select(t *testing.T) {
|
|||||||
SelectItems: []parsers.SelectItem{
|
SelectItems: []parsers.SelectItem{
|
||||||
{Path: []string{"c", "id"}},
|
{Path: []string{"c", "id"}},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
Distinct: true,
|
Distinct: true,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
@ -44,7 +59,7 @@ func Test_Parse_Select(t *testing.T) {
|
|||||||
SelectItems: []parsers.SelectItem{
|
SelectItems: []parsers.SelectItem{
|
||||||
{Path: []string{"c", "id"}},
|
{Path: []string{"c", "id"}},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
Count: 1,
|
Count: 1,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
@ -58,7 +73,7 @@ func Test_Parse_Select(t *testing.T) {
|
|||||||
SelectItems: []parsers.SelectItem{
|
SelectItems: []parsers.SelectItem{
|
||||||
{Path: []string{"c", "id"}},
|
{Path: []string{"c", "id"}},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
Count: 5,
|
Count: 5,
|
||||||
Offset: 3,
|
Offset: 3,
|
||||||
},
|
},
|
||||||
@ -73,7 +88,7 @@ func Test_Parse_Select(t *testing.T) {
|
|||||||
SelectItems: []parsers.SelectItem{
|
SelectItems: []parsers.SelectItem{
|
||||||
{Path: []string{"c", "id"}, IsTopLevel: true},
|
{Path: []string{"c", "id"}, IsTopLevel: true},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
@ -86,7 +101,20 @@ func Test_Parse_Select(t *testing.T) {
|
|||||||
SelectItems: []parsers.SelectItem{
|
SelectItems: []parsers.SelectItem{
|
||||||
{Path: []string{"c"}, IsTopLevel: true},
|
{Path: []string{"c"}, IsTopLevel: true},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should parse SELECT c", func(t *testing.T) {
|
||||||
|
testQueryParse(
|
||||||
|
t,
|
||||||
|
`SELECT c FROM c`,
|
||||||
|
parsers.SelectStmt{
|
||||||
|
SelectItems: []parsers.SelectItem{
|
||||||
|
{Path: []string{"c"}, IsTopLevel: false},
|
||||||
|
},
|
||||||
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
@ -106,7 +134,27 @@ func Test_Parse_Select(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should parse SELECT with alias", func(t *testing.T) {
|
||||||
|
testQueryParse(
|
||||||
|
t,
|
||||||
|
`SELECT
|
||||||
|
c.id AS aliasWithAs,
|
||||||
|
c.pk aliasWithoutAs
|
||||||
|
FROM root c`,
|
||||||
|
parsers.SelectStmt{
|
||||||
|
SelectItems: []parsers.SelectItem{
|
||||||
|
{Alias: "aliasWithAs", Path: []string{"c", "id"}},
|
||||||
|
{Alias: "aliasWithoutAs", Path: []string{"c", "pk"}},
|
||||||
|
},
|
||||||
|
Table: parsers.Table{
|
||||||
|
Value: "c",
|
||||||
|
SelectItem: parsers.SelectItem{Alias: "c", Path: []string{"root"}},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
@ -126,7 +174,93 @@ func Test_Parse_Select(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should parse SELECT empty object", func(t *testing.T) {
|
||||||
|
testQueryParse(
|
||||||
|
t,
|
||||||
|
`SELECT {} AS obj FROM c`,
|
||||||
|
parsers.SelectStmt{
|
||||||
|
SelectItems: []parsers.SelectItem{
|
||||||
|
{
|
||||||
|
Alias: "obj",
|
||||||
|
Type: parsers.SelectItemTypeObject,
|
||||||
|
SelectItems: []parsers.SelectItem{},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should parse comparison expressions in SELECT", func(t *testing.T) {
|
||||||
|
testQueryParse(
|
||||||
|
t,
|
||||||
|
`SELECT c["id"] = "123", c["pk"] > 456 FROM c`,
|
||||||
|
parsers.SelectStmt{
|
||||||
|
SelectItems: []parsers.SelectItem{
|
||||||
|
{
|
||||||
|
Type: parsers.SelectItemTypeExpression,
|
||||||
|
Value: parsers.ComparisonExpression{
|
||||||
|
Operation: "=",
|
||||||
|
Left: testutils.SelectItem_Path("c", "id"),
|
||||||
|
Right: testutils.SelectItem_Constant_String("123"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Type: parsers.SelectItemTypeExpression,
|
||||||
|
Value: parsers.ComparisonExpression{
|
||||||
|
Operation: ">",
|
||||||
|
Left: testutils.SelectItem_Path("c", "pk"),
|
||||||
|
Right: testutils.SelectItem_Constant_Int(456),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should parse logical expressions in SELECT", func(t *testing.T) {
|
||||||
|
testQueryParse(
|
||||||
|
t,
|
||||||
|
`SELECT c["id"] = "123" OR c["pk"] > 456, c["isCool"] AND c["hasRizz"] AS isRizzler FROM c`,
|
||||||
|
parsers.SelectStmt{
|
||||||
|
SelectItems: []parsers.SelectItem{
|
||||||
|
{
|
||||||
|
Type: parsers.SelectItemTypeExpression,
|
||||||
|
Value: parsers.LogicalExpression{
|
||||||
|
Operation: parsers.LogicalExpressionTypeOr,
|
||||||
|
Expressions: []interface{}{
|
||||||
|
parsers.ComparisonExpression{
|
||||||
|
Operation: "=",
|
||||||
|
Left: testutils.SelectItem_Path("c", "id"),
|
||||||
|
Right: testutils.SelectItem_Constant_String("123"),
|
||||||
|
},
|
||||||
|
parsers.ComparisonExpression{
|
||||||
|
Operation: ">",
|
||||||
|
Left: testutils.SelectItem_Path("c", "pk"),
|
||||||
|
Right: testutils.SelectItem_Constant_Int(456),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Type: parsers.SelectItemTypeExpression,
|
||||||
|
Alias: "isRizzler",
|
||||||
|
Value: parsers.LogicalExpression{
|
||||||
|
Operation: parsers.LogicalExpressionTypeAnd,
|
||||||
|
Expressions: []interface{}{
|
||||||
|
testutils.SelectItem_Path("c", "isCool"),
|
||||||
|
testutils.SelectItem_Path("c", "hasRizz"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
@ -4,6 +4,7 @@ import (
|
|||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/pikami/cosmium/parsers"
|
"github.com/pikami/cosmium/parsers"
|
||||||
|
testutils "github.com/pikami/cosmium/test_utils"
|
||||||
)
|
)
|
||||||
|
|
||||||
func Test_Execute_StringFunctions(t *testing.T) {
|
func Test_Execute_StringFunctions(t *testing.T) {
|
||||||
@ -23,25 +24,13 @@ func Test_Execute_StringFunctions(t *testing.T) {
|
|||||||
Path: []string{"c", "id"},
|
Path: []string{"c", "id"},
|
||||||
Type: parsers.SelectItemTypeField,
|
Type: parsers.SelectItemTypeField,
|
||||||
},
|
},
|
||||||
parsers.SelectItem{
|
testutils.SelectItem_Constant_String("123"),
|
||||||
Type: parsers.SelectItemTypeConstant,
|
testutils.SelectItem_Constant_Bool(true),
|
||||||
Value: parsers.Constant{
|
|
||||||
Type: parsers.ConstantTypeString,
|
|
||||||
Value: "123",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
parsers.SelectItem{
|
|
||||||
Type: parsers.SelectItemTypeConstant,
|
|
||||||
Value: parsers.Constant{
|
|
||||||
Type: parsers.ConstantTypeBoolean,
|
|
||||||
Value: true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
@ -61,19 +50,13 @@ func Test_Execute_StringFunctions(t *testing.T) {
|
|||||||
Path: []string{"c", "id"},
|
Path: []string{"c", "id"},
|
||||||
Type: parsers.SelectItemTypeField,
|
Type: parsers.SelectItemTypeField,
|
||||||
},
|
},
|
||||||
parsers.SelectItem{
|
testutils.SelectItem_Constant_String("123"),
|
||||||
Type: parsers.SelectItemTypeConstant,
|
|
||||||
Value: parsers.Constant{
|
|
||||||
Type: parsers.ConstantTypeString,
|
|
||||||
Value: "123",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
nil,
|
nil,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
@ -93,13 +76,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
|
|||||||
Path: []string{"c", "id"},
|
Path: []string{"c", "id"},
|
||||||
Type: parsers.SelectItemTypeField,
|
Type: parsers.SelectItemTypeField,
|
||||||
},
|
},
|
||||||
parsers.SelectItem{
|
testutils.SelectItem_Constant_String("123"),
|
||||||
Type: parsers.SelectItemTypeConstant,
|
|
||||||
Value: parsers.Constant{
|
|
||||||
Type: parsers.ConstantTypeString,
|
|
||||||
Value: "123",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
parsers.SelectItem{
|
parsers.SelectItem{
|
||||||
Path: []string{"c", "pk"},
|
Path: []string{"c", "pk"},
|
||||||
Type: parsers.SelectItemTypeField,
|
Type: parsers.SelectItemTypeField,
|
||||||
@ -108,7 +85,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
@ -128,25 +105,13 @@ func Test_Execute_StringFunctions(t *testing.T) {
|
|||||||
Path: []string{"c", "id"},
|
Path: []string{"c", "id"},
|
||||||
Type: parsers.SelectItemTypeField,
|
Type: parsers.SelectItemTypeField,
|
||||||
},
|
},
|
||||||
parsers.SelectItem{
|
testutils.SelectItem_Constant_String("123"),
|
||||||
Type: parsers.SelectItemTypeConstant,
|
testutils.SelectItem_Constant_Bool(true),
|
||||||
Value: parsers.Constant{
|
|
||||||
Type: parsers.ConstantTypeString,
|
|
||||||
Value: "123",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
parsers.SelectItem{
|
|
||||||
Type: parsers.SelectItemTypeConstant,
|
|
||||||
Value: parsers.Constant{
|
|
||||||
Type: parsers.ConstantTypeBoolean,
|
|
||||||
Value: true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
@ -166,25 +131,13 @@ func Test_Execute_StringFunctions(t *testing.T) {
|
|||||||
Path: []string{"c", "id"},
|
Path: []string{"c", "id"},
|
||||||
Type: parsers.SelectItemTypeField,
|
Type: parsers.SelectItemTypeField,
|
||||||
},
|
},
|
||||||
parsers.SelectItem{
|
testutils.SelectItem_Constant_String("123"),
|
||||||
Type: parsers.SelectItemTypeConstant,
|
testutils.SelectItem_Constant_Bool(true),
|
||||||
Value: parsers.Constant{
|
|
||||||
Type: parsers.ConstantTypeString,
|
|
||||||
Value: "123",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
parsers.SelectItem{
|
|
||||||
Type: parsers.SelectItemTypeConstant,
|
|
||||||
Value: parsers.Constant{
|
|
||||||
Type: parsers.ConstantTypeBoolean,
|
|
||||||
Value: true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
@ -204,25 +157,13 @@ func Test_Execute_StringFunctions(t *testing.T) {
|
|||||||
Path: []string{"c", "id"},
|
Path: []string{"c", "id"},
|
||||||
Type: parsers.SelectItemTypeField,
|
Type: parsers.SelectItemTypeField,
|
||||||
},
|
},
|
||||||
parsers.SelectItem{
|
testutils.SelectItem_Constant_String("123"),
|
||||||
Type: parsers.SelectItemTypeConstant,
|
testutils.SelectItem_Constant_Bool(true),
|
||||||
Value: parsers.Constant{
|
|
||||||
Type: parsers.ConstantTypeString,
|
|
||||||
Value: "123",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
parsers.SelectItem{
|
|
||||||
Type: parsers.SelectItemTypeConstant,
|
|
||||||
Value: parsers.Constant{
|
|
||||||
Type: parsers.ConstantTypeBoolean,
|
|
||||||
Value: true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
@ -242,25 +183,13 @@ func Test_Execute_StringFunctions(t *testing.T) {
|
|||||||
Path: []string{"c", "id"},
|
Path: []string{"c", "id"},
|
||||||
Type: parsers.SelectItemTypeField,
|
Type: parsers.SelectItemTypeField,
|
||||||
},
|
},
|
||||||
parsers.SelectItem{
|
testutils.SelectItem_Constant_String("2"),
|
||||||
Type: parsers.SelectItemTypeConstant,
|
testutils.SelectItem_Constant_Int(1),
|
||||||
Value: parsers.Constant{
|
|
||||||
Type: parsers.ConstantTypeString,
|
|
||||||
Value: "2",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
parsers.SelectItem{
|
|
||||||
Type: parsers.SelectItemTypeConstant,
|
|
||||||
Value: parsers.Constant{
|
|
||||||
Type: parsers.ConstantTypeInteger,
|
|
||||||
Value: 1,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
@ -284,7 +213,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
@ -308,7 +237,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
@ -332,7 +261,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
@ -352,18 +281,12 @@ func Test_Execute_StringFunctions(t *testing.T) {
|
|||||||
Path: []string{"c", "id"},
|
Path: []string{"c", "id"},
|
||||||
Type: parsers.SelectItemTypeField,
|
Type: parsers.SelectItemTypeField,
|
||||||
},
|
},
|
||||||
parsers.SelectItem{
|
testutils.SelectItem_Constant_Int(5),
|
||||||
Type: parsers.SelectItemTypeConstant,
|
|
||||||
Value: parsers.Constant{
|
|
||||||
Type: parsers.ConstantTypeInteger,
|
|
||||||
Value: 5,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
@ -387,7 +310,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
@ -411,7 +334,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
@ -431,25 +354,13 @@ func Test_Execute_StringFunctions(t *testing.T) {
|
|||||||
Path: []string{"c", "id"},
|
Path: []string{"c", "id"},
|
||||||
Type: parsers.SelectItemTypeField,
|
Type: parsers.SelectItemTypeField,
|
||||||
},
|
},
|
||||||
parsers.SelectItem{
|
testutils.SelectItem_Constant_String("old"),
|
||||||
Type: parsers.SelectItemTypeConstant,
|
testutils.SelectItem_Constant_String("new"),
|
||||||
Value: parsers.Constant{
|
|
||||||
Type: parsers.ConstantTypeString,
|
|
||||||
Value: "old",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
parsers.SelectItem{
|
|
||||||
Type: parsers.SelectItemTypeConstant,
|
|
||||||
Value: parsers.Constant{
|
|
||||||
Type: parsers.ConstantTypeString,
|
|
||||||
Value: "new",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
@ -469,18 +380,12 @@ func Test_Execute_StringFunctions(t *testing.T) {
|
|||||||
Path: []string{"c", "id"},
|
Path: []string{"c", "id"},
|
||||||
Type: parsers.SelectItemTypeField,
|
Type: parsers.SelectItemTypeField,
|
||||||
},
|
},
|
||||||
parsers.SelectItem{
|
testutils.SelectItem_Constant_Int(3),
|
||||||
Type: parsers.SelectItemTypeConstant,
|
|
||||||
Value: parsers.Constant{
|
|
||||||
Type: parsers.ConstantTypeInteger,
|
|
||||||
Value: 3,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
@ -504,7 +409,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
@ -524,18 +429,12 @@ func Test_Execute_StringFunctions(t *testing.T) {
|
|||||||
Path: []string{"c", "id"},
|
Path: []string{"c", "id"},
|
||||||
Type: parsers.SelectItemTypeField,
|
Type: parsers.SelectItemTypeField,
|
||||||
},
|
},
|
||||||
parsers.SelectItem{
|
testutils.SelectItem_Constant_Int(3),
|
||||||
Type: parsers.SelectItemTypeConstant,
|
|
||||||
Value: parsers.Constant{
|
|
||||||
Type: parsers.ConstantTypeInteger,
|
|
||||||
Value: 3,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
@ -559,7 +458,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
@ -579,25 +478,13 @@ func Test_Execute_StringFunctions(t *testing.T) {
|
|||||||
Path: []string{"c", "id"},
|
Path: []string{"c", "id"},
|
||||||
Type: parsers.SelectItemTypeField,
|
Type: parsers.SelectItemTypeField,
|
||||||
},
|
},
|
||||||
parsers.SelectItem{
|
testutils.SelectItem_Constant_Int(1),
|
||||||
Type: parsers.SelectItemTypeConstant,
|
testutils.SelectItem_Constant_Int(5),
|
||||||
Value: parsers.Constant{
|
|
||||||
Type: parsers.ConstantTypeInteger,
|
|
||||||
Value: 1,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
parsers.SelectItem{
|
|
||||||
Type: parsers.SelectItemTypeConstant,
|
|
||||||
Value: parsers.Constant{
|
|
||||||
Type: parsers.ConstantTypeInteger,
|
|
||||||
Value: 5,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
@ -621,7 +508,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
122
parsers/nosql/subquery_test.go
Normal file
122
parsers/nosql/subquery_test.go
Normal file
@ -0,0 +1,122 @@
|
|||||||
|
package nosql_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/pikami/cosmium/parsers"
|
||||||
|
testutils "github.com/pikami/cosmium/test_utils"
|
||||||
|
)
|
||||||
|
|
||||||
|
func Test_Parse_SubQuery(t *testing.T) {
|
||||||
|
|
||||||
|
t.Run("Should parse FROM subquery", func(t *testing.T) {
|
||||||
|
testQueryParse(
|
||||||
|
t,
|
||||||
|
`SELECT c.id FROM (SELECT VALUE cc["info"] FROM cc) AS c`,
|
||||||
|
parsers.SelectStmt{
|
||||||
|
SelectItems: []parsers.SelectItem{
|
||||||
|
{Path: []string{"c", "id"}},
|
||||||
|
},
|
||||||
|
Table: parsers.Table{
|
||||||
|
Value: "c",
|
||||||
|
SelectItem: parsers.SelectItem{
|
||||||
|
Alias: "c",
|
||||||
|
Type: parsers.SelectItemTypeSubQuery,
|
||||||
|
Value: parsers.SelectStmt{
|
||||||
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("cc")},
|
||||||
|
SelectItems: []parsers.SelectItem{
|
||||||
|
{Path: []string{"cc", "info"}, IsTopLevel: true},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should parse JOIN subquery", func(t *testing.T) {
|
||||||
|
testQueryParse(
|
||||||
|
t,
|
||||||
|
`SELECT c.id, cc.name FROM c JOIN (SELECT tag.name FROM tag IN c.tags) AS cc`,
|
||||||
|
parsers.SelectStmt{
|
||||||
|
SelectItems: []parsers.SelectItem{
|
||||||
|
{Path: []string{"c", "id"}},
|
||||||
|
{Path: []string{"cc", "name"}},
|
||||||
|
},
|
||||||
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
|
JoinItems: []parsers.JoinItem{
|
||||||
|
{
|
||||||
|
Table: parsers.Table{
|
||||||
|
Value: "cc",
|
||||||
|
},
|
||||||
|
SelectItem: parsers.SelectItem{
|
||||||
|
Alias: "cc",
|
||||||
|
Type: parsers.SelectItemTypeSubQuery,
|
||||||
|
Value: parsers.SelectStmt{
|
||||||
|
SelectItems: []parsers.SelectItem{
|
||||||
|
testutils.SelectItem_Path("tag", "name"),
|
||||||
|
},
|
||||||
|
Table: parsers.Table{
|
||||||
|
Value: "tag",
|
||||||
|
SelectItem: testutils.SelectItem_Path("c", "tags"),
|
||||||
|
IsInSelect: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should parse JOIN EXISTS subquery", func(t *testing.T) {
|
||||||
|
testQueryParse(
|
||||||
|
t,
|
||||||
|
`SELECT c.id
|
||||||
|
FROM c
|
||||||
|
JOIN (
|
||||||
|
SELECT VALUE EXISTS(SELECT tag.name FROM tag IN c.tags)
|
||||||
|
) AS hasTags
|
||||||
|
WHERE hasTags`,
|
||||||
|
parsers.SelectStmt{
|
||||||
|
SelectItems: []parsers.SelectItem{
|
||||||
|
testutils.SelectItem_Path("c", "id"),
|
||||||
|
},
|
||||||
|
Table: parsers.Table{
|
||||||
|
SelectItem: testutils.SelectItem_Path("c"),
|
||||||
|
},
|
||||||
|
JoinItems: []parsers.JoinItem{
|
||||||
|
{
|
||||||
|
Table: parsers.Table{Value: "hasTags"},
|
||||||
|
SelectItem: parsers.SelectItem{
|
||||||
|
Alias: "hasTags",
|
||||||
|
Type: parsers.SelectItemTypeSubQuery,
|
||||||
|
Value: parsers.SelectStmt{
|
||||||
|
SelectItems: []parsers.SelectItem{
|
||||||
|
{
|
||||||
|
IsTopLevel: true,
|
||||||
|
Type: parsers.SelectItemTypeSubQuery,
|
||||||
|
Value: parsers.SelectStmt{
|
||||||
|
SelectItems: []parsers.SelectItem{
|
||||||
|
testutils.SelectItem_Path("tag", "name"),
|
||||||
|
},
|
||||||
|
Table: parsers.Table{
|
||||||
|
Value: "tag",
|
||||||
|
SelectItem: testutils.SelectItem_Path("c", "tags"),
|
||||||
|
IsInSelect: true,
|
||||||
|
},
|
||||||
|
Exists: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Filters: parsers.SelectItem{
|
||||||
|
Path: []string{"hasTags"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
}
|
@ -4,6 +4,7 @@ import (
|
|||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/pikami/cosmium/parsers"
|
"github.com/pikami/cosmium/parsers"
|
||||||
|
testutils "github.com/pikami/cosmium/test_utils"
|
||||||
)
|
)
|
||||||
|
|
||||||
func Test_Execute_TypeCheckingFunctions(t *testing.T) {
|
func Test_Execute_TypeCheckingFunctions(t *testing.T) {
|
||||||
@ -27,7 +28,7 @@ func Test_Execute_TypeCheckingFunctions(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
Filters: parsers.SelectItem{
|
Filters: parsers.SelectItem{
|
||||||
Type: parsers.SelectItemTypeFunctionCall,
|
Type: parsers.SelectItemTypeFunctionCall,
|
||||||
Value: parsers.FunctionCall{
|
Value: parsers.FunctionCall{
|
||||||
@ -63,7 +64,7 @@ func Test_Execute_TypeCheckingFunctions(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
Filters: parsers.SelectItem{
|
Filters: parsers.SelectItem{
|
||||||
Type: parsers.SelectItemTypeFunctionCall,
|
Type: parsers.SelectItemTypeFunctionCall,
|
||||||
Value: parsers.FunctionCall{
|
Value: parsers.FunctionCall{
|
||||||
@ -99,7 +100,7 @@ func Test_Execute_TypeCheckingFunctions(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
Filters: parsers.SelectItem{
|
Filters: parsers.SelectItem{
|
||||||
Type: parsers.SelectItemTypeFunctionCall,
|
Type: parsers.SelectItemTypeFunctionCall,
|
||||||
Value: parsers.FunctionCall{
|
Value: parsers.FunctionCall{
|
||||||
@ -135,7 +136,7 @@ func Test_Execute_TypeCheckingFunctions(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
Filters: parsers.SelectItem{
|
Filters: parsers.SelectItem{
|
||||||
Type: parsers.SelectItemTypeFunctionCall,
|
Type: parsers.SelectItemTypeFunctionCall,
|
||||||
Value: parsers.FunctionCall{
|
Value: parsers.FunctionCall{
|
||||||
@ -171,7 +172,7 @@ func Test_Execute_TypeCheckingFunctions(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
Filters: parsers.SelectItem{
|
Filters: parsers.SelectItem{
|
||||||
Type: parsers.SelectItemTypeFunctionCall,
|
Type: parsers.SelectItemTypeFunctionCall,
|
||||||
Value: parsers.FunctionCall{
|
Value: parsers.FunctionCall{
|
||||||
@ -207,7 +208,7 @@ func Test_Execute_TypeCheckingFunctions(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
Filters: parsers.SelectItem{
|
Filters: parsers.SelectItem{
|
||||||
Type: parsers.SelectItemTypeFunctionCall,
|
Type: parsers.SelectItemTypeFunctionCall,
|
||||||
Value: parsers.FunctionCall{
|
Value: parsers.FunctionCall{
|
||||||
@ -243,7 +244,7 @@ func Test_Execute_TypeCheckingFunctions(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
Filters: parsers.SelectItem{
|
Filters: parsers.SelectItem{
|
||||||
Type: parsers.SelectItemTypeFunctionCall,
|
Type: parsers.SelectItemTypeFunctionCall,
|
||||||
Value: parsers.FunctionCall{
|
Value: parsers.FunctionCall{
|
||||||
@ -279,7 +280,7 @@ func Test_Execute_TypeCheckingFunctions(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
Filters: parsers.SelectItem{
|
Filters: parsers.SelectItem{
|
||||||
Type: parsers.SelectItemTypeFunctionCall,
|
Type: parsers.SelectItemTypeFunctionCall,
|
||||||
Value: parsers.FunctionCall{
|
Value: parsers.FunctionCall{
|
||||||
@ -315,7 +316,7 @@ func Test_Execute_TypeCheckingFunctions(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
Filters: parsers.SelectItem{
|
Filters: parsers.SelectItem{
|
||||||
Type: parsers.SelectItemTypeFunctionCall,
|
Type: parsers.SelectItemTypeFunctionCall,
|
||||||
Value: parsers.FunctionCall{
|
Value: parsers.FunctionCall{
|
||||||
@ -351,7 +352,7 @@ func Test_Execute_TypeCheckingFunctions(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
Filters: parsers.SelectItem{
|
Filters: parsers.SelectItem{
|
||||||
Type: parsers.SelectItemTypeFunctionCall,
|
Type: parsers.SelectItemTypeFunctionCall,
|
||||||
Value: parsers.FunctionCall{
|
Value: parsers.FunctionCall{
|
||||||
|
@ -4,6 +4,7 @@ import (
|
|||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/pikami/cosmium/parsers"
|
"github.com/pikami/cosmium/parsers"
|
||||||
|
testutils "github.com/pikami/cosmium/test_utils"
|
||||||
)
|
)
|
||||||
|
|
||||||
func Test_Parse_Were(t *testing.T) {
|
func Test_Parse_Were(t *testing.T) {
|
||||||
@ -18,14 +19,11 @@ func Test_Parse_Were(t *testing.T) {
|
|||||||
SelectItems: []parsers.SelectItem{
|
SelectItems: []parsers.SelectItem{
|
||||||
{Path: []string{"c", "id"}},
|
{Path: []string{"c", "id"}},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
Filters: parsers.ComparisonExpression{
|
Filters: parsers.ComparisonExpression{
|
||||||
Operation: "=",
|
Operation: "=",
|
||||||
Left: parsers.SelectItem{Path: []string{"c", "isCool"}},
|
Left: parsers.SelectItem{Path: []string{"c", "isCool"}},
|
||||||
Right: parsers.SelectItem{
|
Right: testutils.SelectItem_Constant_Bool(true),
|
||||||
Type: parsers.SelectItemTypeConstant,
|
|
||||||
Value: parsers.Constant{Type: parsers.ConstantTypeBoolean, Value: true},
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
@ -44,25 +42,19 @@ func Test_Parse_Were(t *testing.T) {
|
|||||||
{Path: []string{"c", "_rid"}},
|
{Path: []string{"c", "_rid"}},
|
||||||
{Path: []string{"c", "_ts"}},
|
{Path: []string{"c", "_ts"}},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
Filters: parsers.LogicalExpression{
|
Filters: parsers.LogicalExpression{
|
||||||
Operation: parsers.LogicalExpressionTypeOr,
|
Operation: parsers.LogicalExpressionTypeOr,
|
||||||
Expressions: []interface{}{
|
Expressions: []interface{}{
|
||||||
parsers.ComparisonExpression{
|
parsers.ComparisonExpression{
|
||||||
Operation: "=",
|
Operation: "=",
|
||||||
Left: parsers.SelectItem{Path: []string{"c", "id"}},
|
Left: parsers.SelectItem{Path: []string{"c", "id"}},
|
||||||
Right: parsers.SelectItem{
|
Right: testutils.SelectItem_Constant_String("12345"),
|
||||||
Type: parsers.SelectItemTypeConstant,
|
|
||||||
Value: parsers.Constant{Type: parsers.ConstantTypeString, Value: "12345"},
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
parsers.ComparisonExpression{
|
parsers.ComparisonExpression{
|
||||||
Operation: "=",
|
Operation: "=",
|
||||||
Left: parsers.SelectItem{Path: []string{"c", "pk"}},
|
Left: parsers.SelectItem{Path: []string{"c", "pk"}},
|
||||||
Right: parsers.SelectItem{
|
Right: testutils.SelectItem_Constant_Int(123),
|
||||||
Type: parsers.SelectItemTypeConstant,
|
|
||||||
Value: parsers.Constant{Type: parsers.ConstantTypeInteger, Value: 123},
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -75,22 +67,19 @@ func Test_Parse_Were(t *testing.T) {
|
|||||||
t,
|
t,
|
||||||
`select c.id
|
`select c.id
|
||||||
FROM c
|
FROM c
|
||||||
WHERE c.isCool=true AND (c.id = "123" OR c.id = "456")`,
|
WHERE c.isCool=true AND (c.id = "123" OR c.id <= "456")`,
|
||||||
parsers.SelectStmt{
|
parsers.SelectStmt{
|
||||||
SelectItems: []parsers.SelectItem{
|
SelectItems: []parsers.SelectItem{
|
||||||
{Path: []string{"c", "id"}},
|
{Path: []string{"c", "id"}},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
Filters: parsers.LogicalExpression{
|
Filters: parsers.LogicalExpression{
|
||||||
Operation: parsers.LogicalExpressionTypeAnd,
|
Operation: parsers.LogicalExpressionTypeAnd,
|
||||||
Expressions: []interface{}{
|
Expressions: []interface{}{
|
||||||
parsers.ComparisonExpression{
|
parsers.ComparisonExpression{
|
||||||
Operation: "=",
|
Operation: "=",
|
||||||
Left: parsers.SelectItem{Path: []string{"c", "isCool"}},
|
Left: parsers.SelectItem{Path: []string{"c", "isCool"}},
|
||||||
Right: parsers.SelectItem{
|
Right: testutils.SelectItem_Constant_Bool(true),
|
||||||
Type: parsers.SelectItemTypeConstant,
|
|
||||||
Value: parsers.Constant{Type: parsers.ConstantTypeBoolean, Value: true},
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
parsers.LogicalExpression{
|
parsers.LogicalExpression{
|
||||||
Operation: parsers.LogicalExpressionTypeOr,
|
Operation: parsers.LogicalExpressionTypeOr,
|
||||||
@ -98,18 +87,12 @@ func Test_Parse_Were(t *testing.T) {
|
|||||||
parsers.ComparisonExpression{
|
parsers.ComparisonExpression{
|
||||||
Operation: "=",
|
Operation: "=",
|
||||||
Left: parsers.SelectItem{Path: []string{"c", "id"}},
|
Left: parsers.SelectItem{Path: []string{"c", "id"}},
|
||||||
Right: parsers.SelectItem{
|
Right: testutils.SelectItem_Constant_String("123"),
|
||||||
Type: parsers.SelectItemTypeConstant,
|
|
||||||
Value: parsers.Constant{Type: parsers.ConstantTypeString, Value: "123"},
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
parsers.ComparisonExpression{
|
parsers.ComparisonExpression{
|
||||||
Operation: "=",
|
Operation: "<=",
|
||||||
Left: parsers.SelectItem{Path: []string{"c", "id"}},
|
Left: parsers.SelectItem{Path: []string{"c", "id"}},
|
||||||
Right: parsers.SelectItem{
|
Right: testutils.SelectItem_Constant_String("456"),
|
||||||
Type: parsers.SelectItemTypeConstant,
|
|
||||||
Value: parsers.Constant{Type: parsers.ConstantTypeString, Value: "456"},
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -131,47 +114,32 @@ func Test_Parse_Were(t *testing.T) {
|
|||||||
AND c.param=@param_id1`,
|
AND c.param=@param_id1`,
|
||||||
parsers.SelectStmt{
|
parsers.SelectStmt{
|
||||||
SelectItems: []parsers.SelectItem{{Path: []string{"c", "id"}, Alias: ""}},
|
SelectItems: []parsers.SelectItem{{Path: []string{"c", "id"}, Alias: ""}},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
Filters: parsers.LogicalExpression{
|
Filters: parsers.LogicalExpression{
|
||||||
Expressions: []interface{}{
|
Expressions: []interface{}{
|
||||||
parsers.ComparisonExpression{
|
parsers.ComparisonExpression{
|
||||||
Left: parsers.SelectItem{Path: []string{"c", "boolean"}},
|
Left: parsers.SelectItem{Path: []string{"c", "boolean"}},
|
||||||
Right: parsers.SelectItem{
|
Right: testutils.SelectItem_Constant_Bool(true),
|
||||||
Type: parsers.SelectItemTypeConstant,
|
|
||||||
Value: parsers.Constant{Type: parsers.ConstantTypeBoolean, Value: true},
|
|
||||||
},
|
|
||||||
Operation: "=",
|
Operation: "=",
|
||||||
},
|
},
|
||||||
parsers.ComparisonExpression{
|
parsers.ComparisonExpression{
|
||||||
Left: parsers.SelectItem{Path: []string{"c", "integer"}},
|
Left: parsers.SelectItem{Path: []string{"c", "integer"}},
|
||||||
Right: parsers.SelectItem{
|
Right: testutils.SelectItem_Constant_Int(1),
|
||||||
Type: parsers.SelectItemTypeConstant,
|
|
||||||
Value: parsers.Constant{Type: parsers.ConstantTypeInteger, Value: 1},
|
|
||||||
},
|
|
||||||
Operation: "=",
|
Operation: "=",
|
||||||
},
|
},
|
||||||
parsers.ComparisonExpression{
|
parsers.ComparisonExpression{
|
||||||
Left: parsers.SelectItem{Path: []string{"c", "float"}},
|
Left: parsers.SelectItem{Path: []string{"c", "float"}},
|
||||||
Right: parsers.SelectItem{
|
Right: testutils.SelectItem_Constant_Float(6.9),
|
||||||
Type: parsers.SelectItemTypeConstant,
|
|
||||||
Value: parsers.Constant{Type: parsers.ConstantTypeFloat, Value: 6.9},
|
|
||||||
},
|
|
||||||
Operation: "=",
|
Operation: "=",
|
||||||
},
|
},
|
||||||
parsers.ComparisonExpression{
|
parsers.ComparisonExpression{
|
||||||
Left: parsers.SelectItem{Path: []string{"c", "string"}},
|
Left: parsers.SelectItem{Path: []string{"c", "string"}},
|
||||||
Right: parsers.SelectItem{
|
Right: testutils.SelectItem_Constant_String("hello"),
|
||||||
Type: parsers.SelectItemTypeConstant,
|
|
||||||
Value: parsers.Constant{Type: parsers.ConstantTypeString, Value: "hello"},
|
|
||||||
},
|
|
||||||
Operation: "=",
|
Operation: "=",
|
||||||
},
|
},
|
||||||
parsers.ComparisonExpression{
|
parsers.ComparisonExpression{
|
||||||
Left: parsers.SelectItem{Path: []string{"c", "param"}},
|
Left: parsers.SelectItem{Path: []string{"c", "param"}},
|
||||||
Right: parsers.SelectItem{
|
Right: testutils.SelectItem_Constant_Parameter("@param_id1"),
|
||||||
Type: parsers.SelectItemTypeConstant,
|
|
||||||
Value: parsers.Constant{Type: parsers.ConstantTypeParameterConstant, Value: "@param_id1"},
|
|
||||||
},
|
|
||||||
Operation: "=",
|
Operation: "=",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -180,4 +148,21 @@ func Test_Parse_Were(t *testing.T) {
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
t.Run("Should correctly parse NOT conditions", func(t *testing.T) {
|
||||||
|
testQueryParse(
|
||||||
|
t,
|
||||||
|
`select c.id
|
||||||
|
FROM c
|
||||||
|
WHERE NOT c.boolean`,
|
||||||
|
parsers.SelectStmt{
|
||||||
|
SelectItems: []parsers.SelectItem{{Path: []string{"c", "id"}, Alias: ""}},
|
||||||
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
|
Filters: parsers.SelectItem{
|
||||||
|
Path: []string{"c", "boolean"},
|
||||||
|
Invert: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
@ -6,21 +6,19 @@ import (
|
|||||||
"github.com/pikami/cosmium/parsers"
|
"github.com/pikami/cosmium/parsers"
|
||||||
)
|
)
|
||||||
|
|
||||||
func (c memoryExecutorContext) aggregate_Avg(arguments []interface{}, row RowType) interface{} {
|
func (r rowContext) aggregate_Avg(arguments []interface{}) interface{} {
|
||||||
selectExpression := arguments[0].(parsers.SelectItem)
|
selectExpression := arguments[0].(parsers.SelectItem)
|
||||||
sum := 0.0
|
sum := 0.0
|
||||||
count := 0
|
count := 0
|
||||||
|
|
||||||
if array, isArray := row.([]RowType); isArray {
|
for _, item := range r.grouppedRows {
|
||||||
for _, item := range array {
|
value := item.resolveSelectItem(selectExpression)
|
||||||
value := c.getFieldValue(selectExpression, item)
|
if numericValue, ok := value.(float64); ok {
|
||||||
if numericValue, ok := value.(float64); ok {
|
sum += numericValue
|
||||||
sum += numericValue
|
count++
|
||||||
count++
|
} else if numericValue, ok := value.(int); ok {
|
||||||
} else if numericValue, ok := value.(int); ok {
|
sum += float64(numericValue)
|
||||||
sum += float64(numericValue)
|
count++
|
||||||
count++
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -31,41 +29,37 @@ func (c memoryExecutorContext) aggregate_Avg(arguments []interface{}, row RowTyp
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c memoryExecutorContext) aggregate_Count(arguments []interface{}, row RowType) interface{} {
|
func (r rowContext) aggregate_Count(arguments []interface{}) interface{} {
|
||||||
selectExpression := arguments[0].(parsers.SelectItem)
|
selectExpression := arguments[0].(parsers.SelectItem)
|
||||||
count := 0
|
count := 0
|
||||||
|
|
||||||
if array, isArray := row.([]RowType); isArray {
|
for _, item := range r.grouppedRows {
|
||||||
for _, item := range array {
|
value := item.resolveSelectItem(selectExpression)
|
||||||
value := c.getFieldValue(selectExpression, item)
|
if value != nil {
|
||||||
if value != nil {
|
count++
|
||||||
count++
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return count
|
return count
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c memoryExecutorContext) aggregate_Max(arguments []interface{}, row RowType) interface{} {
|
func (r rowContext) aggregate_Max(arguments []interface{}) interface{} {
|
||||||
selectExpression := arguments[0].(parsers.SelectItem)
|
selectExpression := arguments[0].(parsers.SelectItem)
|
||||||
max := 0.0
|
max := 0.0
|
||||||
count := 0
|
count := 0
|
||||||
|
|
||||||
if array, isArray := row.([]RowType); isArray {
|
for _, item := range r.grouppedRows {
|
||||||
for _, item := range array {
|
value := item.resolveSelectItem(selectExpression)
|
||||||
value := c.getFieldValue(selectExpression, item)
|
if numericValue, ok := value.(float64); ok {
|
||||||
if numericValue, ok := value.(float64); ok {
|
if numericValue > max {
|
||||||
if numericValue > max {
|
max = numericValue
|
||||||
max = numericValue
|
|
||||||
}
|
|
||||||
count++
|
|
||||||
} else if numericValue, ok := value.(int); ok {
|
|
||||||
if float64(numericValue) > max {
|
|
||||||
max = float64(numericValue)
|
|
||||||
}
|
|
||||||
count++
|
|
||||||
}
|
}
|
||||||
|
count++
|
||||||
|
} else if numericValue, ok := value.(int); ok {
|
||||||
|
if float64(numericValue) > max {
|
||||||
|
max = float64(numericValue)
|
||||||
|
}
|
||||||
|
count++
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -76,25 +70,23 @@ func (c memoryExecutorContext) aggregate_Max(arguments []interface{}, row RowTyp
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c memoryExecutorContext) aggregate_Min(arguments []interface{}, row RowType) interface{} {
|
func (r rowContext) aggregate_Min(arguments []interface{}) interface{} {
|
||||||
selectExpression := arguments[0].(parsers.SelectItem)
|
selectExpression := arguments[0].(parsers.SelectItem)
|
||||||
min := math.MaxFloat64
|
min := math.MaxFloat64
|
||||||
count := 0
|
count := 0
|
||||||
|
|
||||||
if array, isArray := row.([]RowType); isArray {
|
for _, item := range r.grouppedRows {
|
||||||
for _, item := range array {
|
value := item.resolveSelectItem(selectExpression)
|
||||||
value := c.getFieldValue(selectExpression, item)
|
if numericValue, ok := value.(float64); ok {
|
||||||
if numericValue, ok := value.(float64); ok {
|
if numericValue < min {
|
||||||
if numericValue < min {
|
min = numericValue
|
||||||
min = numericValue
|
|
||||||
}
|
|
||||||
count++
|
|
||||||
} else if numericValue, ok := value.(int); ok {
|
|
||||||
if float64(numericValue) < min {
|
|
||||||
min = float64(numericValue)
|
|
||||||
}
|
|
||||||
count++
|
|
||||||
}
|
}
|
||||||
|
count++
|
||||||
|
} else if numericValue, ok := value.(int); ok {
|
||||||
|
if float64(numericValue) < min {
|
||||||
|
min = float64(numericValue)
|
||||||
|
}
|
||||||
|
count++
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -105,21 +97,19 @@ func (c memoryExecutorContext) aggregate_Min(arguments []interface{}, row RowTyp
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c memoryExecutorContext) aggregate_Sum(arguments []interface{}, row RowType) interface{} {
|
func (r rowContext) aggregate_Sum(arguments []interface{}) interface{} {
|
||||||
selectExpression := arguments[0].(parsers.SelectItem)
|
selectExpression := arguments[0].(parsers.SelectItem)
|
||||||
sum := 0.0
|
sum := 0.0
|
||||||
count := 0
|
count := 0
|
||||||
|
|
||||||
if array, isArray := row.([]RowType); isArray {
|
for _, item := range r.grouppedRows {
|
||||||
for _, item := range array {
|
value := item.resolveSelectItem(selectExpression)
|
||||||
value := c.getFieldValue(selectExpression, item)
|
if numericValue, ok := value.(float64); ok {
|
||||||
if numericValue, ok := value.(float64); ok {
|
sum += numericValue
|
||||||
sum += numericValue
|
count++
|
||||||
count++
|
} else if numericValue, ok := value.(int); ok {
|
||||||
} else if numericValue, ok := value.(int); ok {
|
sum += float64(numericValue)
|
||||||
sum += float64(numericValue)
|
count++
|
||||||
count++
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -5,6 +5,7 @@ import (
|
|||||||
|
|
||||||
"github.com/pikami/cosmium/parsers"
|
"github.com/pikami/cosmium/parsers"
|
||||||
memoryexecutor "github.com/pikami/cosmium/query_executors/memory_executor"
|
memoryexecutor "github.com/pikami/cosmium/query_executors/memory_executor"
|
||||||
|
testutils "github.com/pikami/cosmium/test_utils"
|
||||||
)
|
)
|
||||||
|
|
||||||
func Test_Execute_AggregateFunctions(t *testing.T) {
|
func Test_Execute_AggregateFunctions(t *testing.T) {
|
||||||
@ -38,7 +39,7 @@ func Test_Execute_AggregateFunctions(t *testing.T) {
|
|||||||
GroupBy: []parsers.SelectItem{
|
GroupBy: []parsers.SelectItem{
|
||||||
{Path: []string{"c", "key"}},
|
{Path: []string{"c", "key"}},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
},
|
},
|
||||||
mockData,
|
mockData,
|
||||||
[]memoryexecutor.RowType{
|
[]memoryexecutor.RowType{
|
||||||
@ -67,7 +68,7 @@ func Test_Execute_AggregateFunctions(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
},
|
},
|
||||||
mockData,
|
mockData,
|
||||||
[]memoryexecutor.RowType{
|
[]memoryexecutor.RowType{
|
||||||
@ -99,7 +100,7 @@ func Test_Execute_AggregateFunctions(t *testing.T) {
|
|||||||
GroupBy: []parsers.SelectItem{
|
GroupBy: []parsers.SelectItem{
|
||||||
{Path: []string{"c", "key"}},
|
{Path: []string{"c", "key"}},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
},
|
},
|
||||||
mockData,
|
mockData,
|
||||||
[]memoryexecutor.RowType{
|
[]memoryexecutor.RowType{
|
||||||
@ -132,7 +133,7 @@ func Test_Execute_AggregateFunctions(t *testing.T) {
|
|||||||
GroupBy: []parsers.SelectItem{
|
GroupBy: []parsers.SelectItem{
|
||||||
{Path: []string{"c", "key"}},
|
{Path: []string{"c", "key"}},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
},
|
},
|
||||||
mockData,
|
mockData,
|
||||||
[]memoryexecutor.RowType{
|
[]memoryexecutor.RowType{
|
||||||
@ -165,7 +166,7 @@ func Test_Execute_AggregateFunctions(t *testing.T) {
|
|||||||
GroupBy: []parsers.SelectItem{
|
GroupBy: []parsers.SelectItem{
|
||||||
{Path: []string{"c", "key"}},
|
{Path: []string{"c", "key"}},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
},
|
},
|
||||||
mockData,
|
mockData,
|
||||||
[]memoryexecutor.RowType{
|
[]memoryexecutor.RowType{
|
||||||
@ -198,7 +199,7 @@ func Test_Execute_AggregateFunctions(t *testing.T) {
|
|||||||
GroupBy: []parsers.SelectItem{
|
GroupBy: []parsers.SelectItem{
|
||||||
{Path: []string{"c", "key"}},
|
{Path: []string{"c", "key"}},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
},
|
},
|
||||||
mockData,
|
mockData,
|
||||||
[]memoryexecutor.RowType{
|
[]memoryexecutor.RowType{
|
||||||
|
91
query_executors/memory_executor/arithmetics_test.go
Normal file
91
query_executors/memory_executor/arithmetics_test.go
Normal file
@ -0,0 +1,91 @@
|
|||||||
|
package memoryexecutor_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/pikami/cosmium/parsers"
|
||||||
|
memoryexecutor "github.com/pikami/cosmium/query_executors/memory_executor"
|
||||||
|
testutils "github.com/pikami/cosmium/test_utils"
|
||||||
|
)
|
||||||
|
|
||||||
|
func Test_Execute_Arithmetics(t *testing.T) {
|
||||||
|
mockData := []memoryexecutor.RowType{
|
||||||
|
map[string]interface{}{"id": 1, "a": 420},
|
||||||
|
map[string]interface{}{"id": 2, "a": 6.9},
|
||||||
|
map[string]interface{}{"id": 3},
|
||||||
|
}
|
||||||
|
|
||||||
|
t.Run("Should execute simple arithmetics", func(t *testing.T) {
|
||||||
|
testQueryExecute(
|
||||||
|
t,
|
||||||
|
parsers.SelectStmt{
|
||||||
|
SelectItems: []parsers.SelectItem{
|
||||||
|
{
|
||||||
|
Path: []string{"c", "id"},
|
||||||
|
Type: parsers.SelectItemTypeField,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Type: parsers.SelectItemTypeBinaryExpression,
|
||||||
|
Alias: "result",
|
||||||
|
Value: parsers.BinaryExpression{
|
||||||
|
Operation: "+",
|
||||||
|
Left: testutils.SelectItem_Path("c", "a"),
|
||||||
|
Right: parsers.SelectItem{
|
||||||
|
Type: parsers.SelectItemTypeBinaryExpression,
|
||||||
|
Value: parsers.BinaryExpression{
|
||||||
|
Operation: "*",
|
||||||
|
Left: testutils.SelectItem_Constant_Float(2.0),
|
||||||
|
Right: testutils.SelectItem_Constant_Int(3),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
|
},
|
||||||
|
mockData,
|
||||||
|
[]memoryexecutor.RowType{
|
||||||
|
map[string]interface{}{"id": 1, "result": 426.0},
|
||||||
|
map[string]interface{}{"id": 2, "result": 12.9},
|
||||||
|
map[string]interface{}{"id": 3, "result": nil},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should execute arithmetics in WHERE clause", func(t *testing.T) {
|
||||||
|
testQueryExecute(
|
||||||
|
t,
|
||||||
|
parsers.SelectStmt{
|
||||||
|
SelectItems: []parsers.SelectItem{
|
||||||
|
testutils.SelectItem_Path("c", "id"),
|
||||||
|
{
|
||||||
|
Alias: "result",
|
||||||
|
Type: parsers.SelectItemTypeBinaryExpression,
|
||||||
|
Value: parsers.BinaryExpression{
|
||||||
|
Operation: "*",
|
||||||
|
Left: testutils.SelectItem_Path("c", "a"),
|
||||||
|
Right: testutils.SelectItem_Constant_Int(2),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
|
Filters: parsers.ComparisonExpression{
|
||||||
|
Operation: ">",
|
||||||
|
Left: parsers.SelectItem{
|
||||||
|
Type: parsers.SelectItemTypeBinaryExpression,
|
||||||
|
Value: parsers.BinaryExpression{
|
||||||
|
Operation: "*",
|
||||||
|
Left: testutils.SelectItem_Path("c", "a"),
|
||||||
|
Right: testutils.SelectItem_Constant_Int(2),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Right: testutils.SelectItem_Constant_Int(500),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
mockData,
|
||||||
|
[]memoryexecutor.RowType{
|
||||||
|
map[string]interface{}{"id": 1, "result": 840.0},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
}
|
@ -7,17 +7,97 @@ import (
|
|||||||
"github.com/pikami/cosmium/parsers"
|
"github.com/pikami/cosmium/parsers"
|
||||||
)
|
)
|
||||||
|
|
||||||
func (c memoryExecutorContext) array_Concat(arguments []interface{}, row RowType) []interface{} {
|
func (r rowContext) array_Concat(arguments []interface{}) []interface{} {
|
||||||
var result []interface{}
|
var result []interface{}
|
||||||
for _, arg := range arguments {
|
for _, arg := range arguments {
|
||||||
array := c.parseArray(arg, row)
|
array := r.parseArray(arg)
|
||||||
result = append(result, array...)
|
result = append(result, array...)
|
||||||
}
|
}
|
||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c memoryExecutorContext) array_Length(arguments []interface{}, row RowType) int {
|
func (r rowContext) array_Contains(arguments []interface{}) bool {
|
||||||
array := c.parseArray(arguments[0], row)
|
array := r.parseArray(arguments[0])
|
||||||
|
if array == nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
exprToSearch := r.resolveSelectItem(arguments[1].(parsers.SelectItem))
|
||||||
|
|
||||||
|
partialSearch := false
|
||||||
|
if len(arguments) > 2 {
|
||||||
|
boolExpr := r.resolveSelectItem(arguments[2].(parsers.SelectItem))
|
||||||
|
if boolValue, ok := boolExpr.(bool); ok {
|
||||||
|
partialSearch = boolValue
|
||||||
|
} else {
|
||||||
|
logger.ErrorLn("array_Contains - got parameters of wrong type")
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, item := range array {
|
||||||
|
if partialSearch {
|
||||||
|
if r.partialMatch(item, exprToSearch) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if reflect.DeepEqual(item, exprToSearch) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r rowContext) array_Contains_Any(arguments []interface{}) bool {
|
||||||
|
array := r.parseArray(arguments[0])
|
||||||
|
if array == nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
valueSelectItems := arguments[1:]
|
||||||
|
|
||||||
|
for _, valueSelectItem := range valueSelectItems {
|
||||||
|
value := r.resolveSelectItem(valueSelectItem.(parsers.SelectItem))
|
||||||
|
for _, item := range array {
|
||||||
|
if reflect.DeepEqual(item, value) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r rowContext) array_Contains_All(arguments []interface{}) bool {
|
||||||
|
array := r.parseArray(arguments[0])
|
||||||
|
if array == nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
valueSelectItems := arguments[1:]
|
||||||
|
|
||||||
|
for _, valueSelectItem := range valueSelectItems {
|
||||||
|
value := r.resolveSelectItem(valueSelectItem.(parsers.SelectItem))
|
||||||
|
|
||||||
|
found := false
|
||||||
|
for _, item := range array {
|
||||||
|
if reflect.DeepEqual(item, value) {
|
||||||
|
found = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !found {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r rowContext) array_Length(arguments []interface{}) int {
|
||||||
|
array := r.parseArray(arguments[0])
|
||||||
if array == nil {
|
if array == nil {
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
@ -25,24 +105,24 @@ func (c memoryExecutorContext) array_Length(arguments []interface{}, row RowType
|
|||||||
return len(array)
|
return len(array)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c memoryExecutorContext) array_Slice(arguments []interface{}, row RowType) []interface{} {
|
func (r rowContext) array_Slice(arguments []interface{}) []interface{} {
|
||||||
var ok bool
|
var ok bool
|
||||||
var start int
|
var start int
|
||||||
var length int
|
var length int
|
||||||
array := c.parseArray(arguments[0], row)
|
array := r.parseArray(arguments[0])
|
||||||
startEx := c.getFieldValue(arguments[1].(parsers.SelectItem), row)
|
startEx := r.resolveSelectItem(arguments[1].(parsers.SelectItem))
|
||||||
|
|
||||||
if arguments[2] != nil {
|
if arguments[2] != nil {
|
||||||
lengthEx := c.getFieldValue(arguments[2].(parsers.SelectItem), row)
|
lengthEx := r.resolveSelectItem(arguments[2].(parsers.SelectItem))
|
||||||
|
|
||||||
if length, ok = lengthEx.(int); !ok {
|
if length, ok = lengthEx.(int); !ok {
|
||||||
logger.Error("array_Slice - got length parameters of wrong type")
|
logger.ErrorLn("array_Slice - got length parameters of wrong type")
|
||||||
return []interface{}{}
|
return []interface{}{}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if start, ok = startEx.(int); !ok {
|
if start, ok = startEx.(int); !ok {
|
||||||
logger.Error("array_Slice - got start parameters of wrong type")
|
logger.ErrorLn("array_Slice - got start parameters of wrong type")
|
||||||
return []interface{}{}
|
return []interface{}{}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -65,9 +145,9 @@ func (c memoryExecutorContext) array_Slice(arguments []interface{}, row RowType)
|
|||||||
return array[start:end]
|
return array[start:end]
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c memoryExecutorContext) set_Intersect(arguments []interface{}, row RowType) []interface{} {
|
func (r rowContext) set_Intersect(arguments []interface{}) []interface{} {
|
||||||
set1 := c.parseArray(arguments[0], row)
|
set1 := r.parseArray(arguments[0])
|
||||||
set2 := c.parseArray(arguments[1], row)
|
set2 := r.parseArray(arguments[1])
|
||||||
|
|
||||||
intersection := make(map[interface{}]struct{})
|
intersection := make(map[interface{}]struct{})
|
||||||
if set1 == nil || set2 == nil {
|
if set1 == nil || set2 == nil {
|
||||||
@ -88,9 +168,9 @@ func (c memoryExecutorContext) set_Intersect(arguments []interface{}, row RowTyp
|
|||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c memoryExecutorContext) set_Union(arguments []interface{}, row RowType) []interface{} {
|
func (r rowContext) set_Union(arguments []interface{}) []interface{} {
|
||||||
set1 := c.parseArray(arguments[0], row)
|
set1 := r.parseArray(arguments[0])
|
||||||
set2 := c.parseArray(arguments[1], row)
|
set2 := r.parseArray(arguments[1])
|
||||||
|
|
||||||
var result []interface{}
|
var result []interface{}
|
||||||
union := make(map[interface{}]struct{})
|
union := make(map[interface{}]struct{})
|
||||||
@ -111,13 +191,17 @@ func (c memoryExecutorContext) set_Union(arguments []interface{}, row RowType) [
|
|||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c memoryExecutorContext) parseArray(argument interface{}, row RowType) []interface{} {
|
func (r rowContext) parseArray(argument interface{}) []interface{} {
|
||||||
exItem := argument.(parsers.SelectItem)
|
exItem := argument.(parsers.SelectItem)
|
||||||
ex := c.getFieldValue(exItem, row)
|
ex := r.resolveSelectItem(exItem)
|
||||||
|
|
||||||
arrValue := reflect.ValueOf(ex)
|
arrValue := reflect.ValueOf(ex)
|
||||||
|
if arrValue.Kind() == reflect.Invalid {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
if arrValue.Kind() != reflect.Slice {
|
if arrValue.Kind() != reflect.Slice {
|
||||||
logger.Error("parseArray got parameters of wrong type")
|
logger.ErrorLn("parseArray got parameters of wrong type")
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -129,3 +213,21 @@ func (c memoryExecutorContext) parseArray(argument interface{}, row RowType) []i
|
|||||||
|
|
||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (r rowContext) partialMatch(item interface{}, exprToSearch interface{}) bool {
|
||||||
|
itemValue := reflect.ValueOf(item)
|
||||||
|
exprValue := reflect.ValueOf(exprToSearch)
|
||||||
|
|
||||||
|
if itemValue.Kind() != reflect.Map || exprValue.Kind() != reflect.Map {
|
||||||
|
logger.ErrorLn("partialMatch got parameters of wrong type")
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, key := range exprValue.MapKeys() {
|
||||||
|
if !reflect.DeepEqual(itemValue.MapIndex(key).Interface(), exprValue.MapIndex(key).Interface()) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
@ -5,6 +5,7 @@ import (
|
|||||||
|
|
||||||
"github.com/pikami/cosmium/parsers"
|
"github.com/pikami/cosmium/parsers"
|
||||||
memoryexecutor "github.com/pikami/cosmium/query_executors/memory_executor"
|
memoryexecutor "github.com/pikami/cosmium/query_executors/memory_executor"
|
||||||
|
testutils "github.com/pikami/cosmium/test_utils"
|
||||||
)
|
)
|
||||||
|
|
||||||
func Test_Execute_ArrayFunctions(t *testing.T) {
|
func Test_Execute_ArrayFunctions(t *testing.T) {
|
||||||
@ -41,7 +42,7 @@ func Test_Execute_ArrayFunctions(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
},
|
},
|
||||||
mockData,
|
mockData,
|
||||||
[]memoryexecutor.RowType{
|
[]memoryexecutor.RowType{
|
||||||
@ -52,6 +53,300 @@ func Test_Execute_ArrayFunctions(t *testing.T) {
|
|||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
t.Run("Should execute function ARRAY_CONTAINS()", func(t *testing.T) {
|
||||||
|
testQueryExecute(
|
||||||
|
t,
|
||||||
|
parsers.SelectStmt{
|
||||||
|
Parameters: map[string]interface{}{
|
||||||
|
"@categories": []interface{}{"coats", "jackets", "sweatshirts"},
|
||||||
|
"@objectArray": []interface{}{map[string]interface{}{"category": "shirts", "color": "blue", "nestedObject": map[string]interface{}{"size": "M"}}},
|
||||||
|
"@fullMatchObject": map[string]interface{}{"category": "shirts", "color": "blue", "nestedObject": map[string]interface{}{"size": "M"}},
|
||||||
|
"@partialMatchObject": map[string]interface{}{"category": "shirts"},
|
||||||
|
"@missingPartialMatchObject": map[string]interface{}{"category": "shorts", "color": "blue"},
|
||||||
|
"@nestedPartialMatchObject": map[string]interface{}{"nestedObject": map[string]interface{}{"size": "M"}},
|
||||||
|
},
|
||||||
|
SelectItems: []parsers.SelectItem{
|
||||||
|
{
|
||||||
|
Alias: "ContainsItem",
|
||||||
|
Type: parsers.SelectItemTypeFunctionCall,
|
||||||
|
Value: parsers.FunctionCall{
|
||||||
|
Type: parsers.FunctionCallArrayContains,
|
||||||
|
Arguments: []interface{}{
|
||||||
|
testutils.SelectItem_Constant_Parameter("@categories"),
|
||||||
|
testutils.SelectItem_Constant_String("coats"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Alias: "MissingItem",
|
||||||
|
Type: parsers.SelectItemTypeFunctionCall,
|
||||||
|
Value: parsers.FunctionCall{
|
||||||
|
Type: parsers.FunctionCallArrayContains,
|
||||||
|
Arguments: []interface{}{
|
||||||
|
testutils.SelectItem_Constant_Parameter("@categories"),
|
||||||
|
testutils.SelectItem_Constant_String("hoodies"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Alias: "ContainsFullMatchObject",
|
||||||
|
Type: parsers.SelectItemTypeFunctionCall,
|
||||||
|
Value: parsers.FunctionCall{
|
||||||
|
Type: parsers.FunctionCallArrayContains,
|
||||||
|
Arguments: []interface{}{
|
||||||
|
testutils.SelectItem_Constant_Parameter("@objectArray"),
|
||||||
|
testutils.SelectItem_Constant_Parameter("@fullMatchObject"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Alias: "MissingFullMatchObject",
|
||||||
|
Type: parsers.SelectItemTypeFunctionCall,
|
||||||
|
Value: parsers.FunctionCall{
|
||||||
|
Type: parsers.FunctionCallArrayContains,
|
||||||
|
Arguments: []interface{}{
|
||||||
|
testutils.SelectItem_Constant_Parameter("@objectArray"),
|
||||||
|
testutils.SelectItem_Constant_Parameter("@partialMatchObject"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Alias: "ContainsPartialMatchObject",
|
||||||
|
Type: parsers.SelectItemTypeFunctionCall,
|
||||||
|
Value: parsers.FunctionCall{
|
||||||
|
Type: parsers.FunctionCallArrayContains,
|
||||||
|
Arguments: []interface{}{
|
||||||
|
testutils.SelectItem_Constant_Parameter("@objectArray"),
|
||||||
|
testutils.SelectItem_Constant_Parameter("@partialMatchObject"),
|
||||||
|
testutils.SelectItem_Constant_Bool(true),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Alias: "MissingPartialMatchObject",
|
||||||
|
Type: parsers.SelectItemTypeFunctionCall,
|
||||||
|
Value: parsers.FunctionCall{
|
||||||
|
Type: parsers.FunctionCallArrayContains,
|
||||||
|
Arguments: []interface{}{
|
||||||
|
testutils.SelectItem_Constant_Parameter("@objectArray"),
|
||||||
|
testutils.SelectItem_Constant_Parameter("@missingPartialMatchObject"),
|
||||||
|
testutils.SelectItem_Constant_Bool(true),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Alias: "ContainsNestedPartialMatchObject",
|
||||||
|
Type: parsers.SelectItemTypeFunctionCall,
|
||||||
|
Value: parsers.FunctionCall{
|
||||||
|
Type: parsers.FunctionCallArrayContains,
|
||||||
|
Arguments: []interface{}{
|
||||||
|
testutils.SelectItem_Constant_Parameter("@objectArray"),
|
||||||
|
testutils.SelectItem_Constant_Parameter("@nestedPartialMatchObject"),
|
||||||
|
testutils.SelectItem_Constant_Bool(true),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
[]memoryexecutor.RowType{map[string]interface{}{"id": "123"}},
|
||||||
|
[]memoryexecutor.RowType{
|
||||||
|
map[string]interface{}{
|
||||||
|
"ContainsItem": true,
|
||||||
|
"MissingItem": false,
|
||||||
|
"ContainsFullMatchObject": true,
|
||||||
|
"MissingFullMatchObject": false,
|
||||||
|
"ContainsPartialMatchObject": true,
|
||||||
|
"MissingPartialMatchObject": false,
|
||||||
|
"ContainsNestedPartialMatchObject": true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should execute function ARRAY_CONTAINS_ANY()", func(t *testing.T) {
|
||||||
|
testQueryExecute(
|
||||||
|
t,
|
||||||
|
parsers.SelectStmt{
|
||||||
|
Parameters: map[string]interface{}{
|
||||||
|
"@mixedArray": []interface{}{1, true, "3", []int{1, 2, 3}},
|
||||||
|
"@numbers": []interface{}{1, 2, 3, 4},
|
||||||
|
"@emptyArray": []interface{}{},
|
||||||
|
"@arr123": []interface{}{1, 2, 3},
|
||||||
|
},
|
||||||
|
SelectItems: []parsers.SelectItem{
|
||||||
|
{
|
||||||
|
Alias: "matchesEntireArray",
|
||||||
|
Type: parsers.SelectItemTypeFunctionCall,
|
||||||
|
Value: parsers.FunctionCall{
|
||||||
|
Type: parsers.FunctionCallArrayContainsAny,
|
||||||
|
Arguments: []interface{}{
|
||||||
|
testutils.SelectItem_Constant_Parameter("@mixedArray"),
|
||||||
|
testutils.SelectItem_Constant_Int(1),
|
||||||
|
testutils.SelectItem_Constant_Bool(true),
|
||||||
|
testutils.SelectItem_Constant_String("3"),
|
||||||
|
testutils.SelectItem_Constant_Parameter("@arr123"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Alias: "matchesSomeValues",
|
||||||
|
Type: parsers.SelectItemTypeFunctionCall,
|
||||||
|
Value: parsers.FunctionCall{
|
||||||
|
Type: parsers.FunctionCallArrayContainsAny,
|
||||||
|
Arguments: []interface{}{
|
||||||
|
testutils.SelectItem_Constant_Parameter("@numbers"),
|
||||||
|
testutils.SelectItem_Constant_Int(2),
|
||||||
|
testutils.SelectItem_Constant_Int(3),
|
||||||
|
testutils.SelectItem_Constant_Int(4),
|
||||||
|
testutils.SelectItem_Constant_Int(5),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Alias: "matchSingleValue",
|
||||||
|
Type: parsers.SelectItemTypeFunctionCall,
|
||||||
|
Value: parsers.FunctionCall{
|
||||||
|
Type: parsers.FunctionCallArrayContainsAny,
|
||||||
|
Arguments: []interface{}{
|
||||||
|
testutils.SelectItem_Constant_Parameter("@numbers"),
|
||||||
|
testutils.SelectItem_Constant_Int(1),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Alias: "noMatches",
|
||||||
|
Type: parsers.SelectItemTypeFunctionCall,
|
||||||
|
Value: parsers.FunctionCall{
|
||||||
|
Type: parsers.FunctionCallArrayContainsAny,
|
||||||
|
Arguments: []interface{}{
|
||||||
|
testutils.SelectItem_Constant_Parameter("@numbers"),
|
||||||
|
testutils.SelectItem_Constant_Int(5),
|
||||||
|
testutils.SelectItem_Constant_Int(6),
|
||||||
|
testutils.SelectItem_Constant_Int(7),
|
||||||
|
testutils.SelectItem_Constant_Int(8),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Alias: "emptyArray",
|
||||||
|
Type: parsers.SelectItemTypeFunctionCall,
|
||||||
|
Value: parsers.FunctionCall{
|
||||||
|
Type: parsers.FunctionCallArrayContainsAny,
|
||||||
|
Arguments: []interface{}{
|
||||||
|
testutils.SelectItem_Constant_Parameter("@emptyArray"),
|
||||||
|
testutils.SelectItem_Constant_Int(1),
|
||||||
|
testutils.SelectItem_Constant_Int(2),
|
||||||
|
testutils.SelectItem_Constant_Int(3),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
[]memoryexecutor.RowType{map[string]interface{}{"id": "123"}},
|
||||||
|
[]memoryexecutor.RowType{
|
||||||
|
map[string]interface{}{
|
||||||
|
"matchesEntireArray": true,
|
||||||
|
"matchesSomeValues": true,
|
||||||
|
"matchSingleValue": true,
|
||||||
|
"noMatches": false,
|
||||||
|
"emptyArray": false,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Should execute function ARRAY_CONTAINS_ALL()", func(t *testing.T) {
|
||||||
|
testQueryExecute(
|
||||||
|
t,
|
||||||
|
parsers.SelectStmt{
|
||||||
|
Parameters: map[string]interface{}{
|
||||||
|
"@mixedArray": []interface{}{1, true, "3", []interface{}{1, 2, 3}},
|
||||||
|
"@numbers": []interface{}{1, 2, 3, 4},
|
||||||
|
"@emptyArray": []interface{}{},
|
||||||
|
"@arr123": []interface{}{1, 2, 3},
|
||||||
|
},
|
||||||
|
SelectItems: []parsers.SelectItem{
|
||||||
|
{
|
||||||
|
Alias: "matchesEntireArray",
|
||||||
|
Type: parsers.SelectItemTypeFunctionCall,
|
||||||
|
Value: parsers.FunctionCall{
|
||||||
|
Type: parsers.FunctionCallArrayContainsAll,
|
||||||
|
Arguments: []interface{}{
|
||||||
|
testutils.SelectItem_Constant_Parameter("@mixedArray"),
|
||||||
|
testutils.SelectItem_Constant_Int(1),
|
||||||
|
testutils.SelectItem_Constant_Bool(true),
|
||||||
|
testutils.SelectItem_Constant_String("3"),
|
||||||
|
testutils.SelectItem_Constant_Parameter("@arr123"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Alias: "matchesSomeValues",
|
||||||
|
Type: parsers.SelectItemTypeFunctionCall,
|
||||||
|
Value: parsers.FunctionCall{
|
||||||
|
Type: parsers.FunctionCallArrayContainsAll,
|
||||||
|
Arguments: []interface{}{
|
||||||
|
testutils.SelectItem_Constant_Parameter("@numbers"),
|
||||||
|
testutils.SelectItem_Constant_Int(2),
|
||||||
|
testutils.SelectItem_Constant_Int(3),
|
||||||
|
testutils.SelectItem_Constant_Int(4),
|
||||||
|
testutils.SelectItem_Constant_Int(5),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Alias: "matchSingleValue",
|
||||||
|
Type: parsers.SelectItemTypeFunctionCall,
|
||||||
|
Value: parsers.FunctionCall{
|
||||||
|
Type: parsers.FunctionCallArrayContainsAll,
|
||||||
|
Arguments: []interface{}{
|
||||||
|
testutils.SelectItem_Constant_Parameter("@numbers"),
|
||||||
|
testutils.SelectItem_Constant_Int(1),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Alias: "noMatches",
|
||||||
|
Type: parsers.SelectItemTypeFunctionCall,
|
||||||
|
Value: parsers.FunctionCall{
|
||||||
|
Type: parsers.FunctionCallArrayContainsAll,
|
||||||
|
Arguments: []interface{}{
|
||||||
|
testutils.SelectItem_Constant_Parameter("@numbers"),
|
||||||
|
testutils.SelectItem_Constant_Int(5),
|
||||||
|
testutils.SelectItem_Constant_Int(6),
|
||||||
|
testutils.SelectItem_Constant_Int(7),
|
||||||
|
testutils.SelectItem_Constant_Int(8),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Alias: "emptyArray",
|
||||||
|
Type: parsers.SelectItemTypeFunctionCall,
|
||||||
|
Value: parsers.FunctionCall{
|
||||||
|
Type: parsers.FunctionCallArrayContainsAll,
|
||||||
|
Arguments: []interface{}{
|
||||||
|
testutils.SelectItem_Constant_Parameter("@emptyArray"),
|
||||||
|
testutils.SelectItem_Constant_Int(1),
|
||||||
|
testutils.SelectItem_Constant_Int(2),
|
||||||
|
testutils.SelectItem_Constant_Int(3),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
[]memoryexecutor.RowType{map[string]interface{}{"id": "123"}},
|
||||||
|
[]memoryexecutor.RowType{
|
||||||
|
map[string]interface{}{
|
||||||
|
"matchesEntireArray": true,
|
||||||
|
"matchesSomeValues": false,
|
||||||
|
"matchSingleValue": true,
|
||||||
|
"noMatches": false,
|
||||||
|
"emptyArray": false,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
t.Run("Should execute function ARRAY_LENGTH()", func(t *testing.T) {
|
t.Run("Should execute function ARRAY_LENGTH()", func(t *testing.T) {
|
||||||
testQueryExecute(
|
testQueryExecute(
|
||||||
t,
|
t,
|
||||||
@ -75,7 +370,7 @@ func Test_Execute_ArrayFunctions(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
},
|
},
|
||||||
mockData,
|
mockData,
|
||||||
[]memoryexecutor.RowType{
|
[]memoryexecutor.RowType{
|
||||||
@ -105,25 +400,13 @@ func Test_Execute_ArrayFunctions(t *testing.T) {
|
|||||||
Path: []string{"c", "arr2"},
|
Path: []string{"c", "arr2"},
|
||||||
Type: parsers.SelectItemTypeField,
|
Type: parsers.SelectItemTypeField,
|
||||||
},
|
},
|
||||||
parsers.SelectItem{
|
testutils.SelectItem_Constant_Int(1),
|
||||||
Type: parsers.SelectItemTypeConstant,
|
testutils.SelectItem_Constant_Int(2),
|
||||||
Value: parsers.Constant{
|
|
||||||
Type: parsers.ConstantTypeInteger,
|
|
||||||
Value: 1,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
parsers.SelectItem{
|
|
||||||
Type: parsers.SelectItemTypeConstant,
|
|
||||||
Value: parsers.Constant{
|
|
||||||
Type: parsers.ConstantTypeInteger,
|
|
||||||
Value: 2,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
},
|
},
|
||||||
mockData,
|
mockData,
|
||||||
[]memoryexecutor.RowType{
|
[]memoryexecutor.RowType{
|
||||||
@ -161,7 +444,7 @@ func Test_Execute_ArrayFunctions(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
},
|
},
|
||||||
mockData,
|
mockData,
|
||||||
[]memoryexecutor.RowType{
|
[]memoryexecutor.RowType{
|
||||||
@ -199,7 +482,7 @@ func Test_Execute_ArrayFunctions(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Table: parsers.Table{Value: "c"},
|
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||||
},
|
},
|
||||||
mockData,
|
mockData,
|
||||||
[]memoryexecutor.RowType{
|
[]memoryexecutor.RowType{
|
||||||
|
27
query_executors/memory_executor/array_iterator.go
Normal file
27
query_executors/memory_executor/array_iterator.go
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
package memoryexecutor
|
||||||
|
|
||||||
|
import "github.com/pikami/cosmium/internal/datastore"
|
||||||
|
|
||||||
|
type rowArrayIterator struct {
|
||||||
|
documents []rowContext
|
||||||
|
index int
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewRowArrayIterator(documents []rowContext) *rowArrayIterator {
|
||||||
|
return &rowArrayIterator{
|
||||||
|
documents: documents,
|
||||||
|
index: -1,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *rowArrayIterator) Next() (rowContext, datastore.DataStoreStatus) {
|
||||||
|
i.index++
|
||||||
|
if i.index >= len(i.documents) {
|
||||||
|
return rowContext{}, datastore.IterEOF
|
||||||
|
}
|
||||||
|
|
||||||
|
row := i.documents[i.index]
|
||||||
|
i.documents[i.index] = rowContext{} // Help GC reclaim memory
|
||||||
|
|
||||||
|
return row, datastore.StatusOk
|
||||||
|
}
|
459
query_executors/memory_executor/common.go
Normal file
459
query_executors/memory_executor/common.go
Normal file
@ -0,0 +1,459 @@
|
|||||||
|
package memoryexecutor
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"reflect"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/pikami/cosmium/internal/datastore"
|
||||||
|
"github.com/pikami/cosmium/internal/logger"
|
||||||
|
"github.com/pikami/cosmium/parsers"
|
||||||
|
)
|
||||||
|
|
||||||
|
type RowType interface{}
|
||||||
|
type rowContext struct {
|
||||||
|
tables map[string]RowType
|
||||||
|
parameters map[string]interface{}
|
||||||
|
grouppedRows []rowContext
|
||||||
|
}
|
||||||
|
|
||||||
|
type rowIterator interface {
|
||||||
|
Next() (rowContext, datastore.DataStoreStatus)
|
||||||
|
}
|
||||||
|
|
||||||
|
type rowTypeIterator interface {
|
||||||
|
Next() (RowType, datastore.DataStoreStatus)
|
||||||
|
}
|
||||||
|
|
||||||
|
func resolveDestinationColumnName(selectItem parsers.SelectItem, itemIndex int, queryParameters map[string]interface{}) string {
|
||||||
|
if selectItem.Alias != "" {
|
||||||
|
return selectItem.Alias
|
||||||
|
}
|
||||||
|
|
||||||
|
destinationName := fmt.Sprintf("$%d", itemIndex+1)
|
||||||
|
if len(selectItem.Path) > 0 {
|
||||||
|
destinationName = selectItem.Path[len(selectItem.Path)-1]
|
||||||
|
}
|
||||||
|
|
||||||
|
if destinationName[0] == '@' {
|
||||||
|
destinationName = queryParameters[destinationName].(string)
|
||||||
|
}
|
||||||
|
|
||||||
|
return destinationName
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r rowContext) resolveSelectItem(selectItem parsers.SelectItem) interface{} {
|
||||||
|
if selectItem.Type == parsers.SelectItemTypeArray {
|
||||||
|
return r.selectItem_SelectItemTypeArray(selectItem)
|
||||||
|
}
|
||||||
|
|
||||||
|
if selectItem.Type == parsers.SelectItemTypeObject {
|
||||||
|
return r.selectItem_SelectItemTypeObject(selectItem)
|
||||||
|
}
|
||||||
|
|
||||||
|
if selectItem.Type == parsers.SelectItemTypeConstant {
|
||||||
|
return r.selectItem_SelectItemTypeConstant(selectItem)
|
||||||
|
}
|
||||||
|
|
||||||
|
if selectItem.Type == parsers.SelectItemTypeSubQuery {
|
||||||
|
return r.selectItem_SelectItemTypeSubQuery(selectItem)
|
||||||
|
}
|
||||||
|
|
||||||
|
if selectItem.Type == parsers.SelectItemTypeFunctionCall {
|
||||||
|
if typedFunctionCall, ok := selectItem.Value.(parsers.FunctionCall); ok {
|
||||||
|
return r.selectItem_SelectItemTypeFunctionCall(typedFunctionCall)
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.ErrorLn("parsers.SelectItem has incorrect Value type (expected parsers.FunctionCall)")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if selectItem.Type == parsers.SelectItemTypeExpression {
|
||||||
|
if typedExpression, ok := selectItem.Value.(parsers.ComparisonExpression); ok {
|
||||||
|
return r.filters_ComparisonExpression(typedExpression)
|
||||||
|
}
|
||||||
|
|
||||||
|
if typedExpression, ok := selectItem.Value.(parsers.LogicalExpression); ok {
|
||||||
|
return r.filters_LogicalExpression(typedExpression)
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.ErrorLn("parsers.SelectItem has incorrect Value type (expected parsers.ComparisonExpression)")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if selectItem.Type == parsers.SelectItemTypeBinaryExpression {
|
||||||
|
if typedSelectItem, ok := selectItem.Value.(parsers.BinaryExpression); ok {
|
||||||
|
return r.selectItem_SelectItemTypeBinaryExpression(typedSelectItem)
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.ErrorLn("parsers.SelectItem has incorrect Value type (expected parsers.BinaryExpression)")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return r.selectItem_SelectItemTypeField(selectItem)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r rowContext) selectItem_SelectItemTypeArray(selectItem parsers.SelectItem) interface{} {
|
||||||
|
arrayValue := make([]interface{}, 0)
|
||||||
|
for _, subSelectItem := range selectItem.SelectItems {
|
||||||
|
arrayValue = append(arrayValue, r.resolveSelectItem(subSelectItem))
|
||||||
|
}
|
||||||
|
return arrayValue
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r rowContext) selectItem_SelectItemTypeObject(selectItem parsers.SelectItem) interface{} {
|
||||||
|
objectValue := make(map[string]interface{})
|
||||||
|
for _, subSelectItem := range selectItem.SelectItems {
|
||||||
|
objectValue[subSelectItem.Alias] = r.resolveSelectItem(subSelectItem)
|
||||||
|
}
|
||||||
|
return objectValue
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r rowContext) selectItem_SelectItemTypeConstant(selectItem parsers.SelectItem) interface{} {
|
||||||
|
var typedValue parsers.Constant
|
||||||
|
var ok bool
|
||||||
|
if typedValue, ok = selectItem.Value.(parsers.Constant); !ok {
|
||||||
|
// TODO: Handle error
|
||||||
|
logger.ErrorLn("parsers.Constant has incorrect Value type")
|
||||||
|
}
|
||||||
|
|
||||||
|
if typedValue.Type == parsers.ConstantTypeParameterConstant &&
|
||||||
|
r.parameters != nil {
|
||||||
|
if key, ok := typedValue.Value.(string); ok {
|
||||||
|
return r.parameters[key]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return typedValue.Value
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r rowContext) selectItem_SelectItemTypeSubQuery(selectItem parsers.SelectItem) interface{} {
|
||||||
|
subQuery := selectItem.Value.(parsers.SelectStmt)
|
||||||
|
subQueryResult := executeQuery(
|
||||||
|
subQuery,
|
||||||
|
NewRowArrayIterator([]rowContext{r}),
|
||||||
|
)
|
||||||
|
|
||||||
|
if subQuery.Exists {
|
||||||
|
_, status := subQueryResult.Next()
|
||||||
|
return status == datastore.StatusOk
|
||||||
|
}
|
||||||
|
|
||||||
|
allDocuments := make([]RowType, 0)
|
||||||
|
for {
|
||||||
|
row, status := subQueryResult.Next()
|
||||||
|
if status != datastore.StatusOk {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
allDocuments = append(allDocuments, row)
|
||||||
|
}
|
||||||
|
|
||||||
|
return allDocuments
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r rowContext) selectItem_SelectItemTypeFunctionCall(functionCall parsers.FunctionCall) interface{} {
|
||||||
|
switch functionCall.Type {
|
||||||
|
case parsers.FunctionCallStringEquals:
|
||||||
|
return r.strings_StringEquals(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallContains:
|
||||||
|
return r.strings_Contains(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallEndsWith:
|
||||||
|
return r.strings_EndsWith(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallStartsWith:
|
||||||
|
return r.strings_StartsWith(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallConcat:
|
||||||
|
return r.strings_Concat(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallIndexOf:
|
||||||
|
return r.strings_IndexOf(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallToString:
|
||||||
|
return r.strings_ToString(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallUpper:
|
||||||
|
return r.strings_Upper(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallLower:
|
||||||
|
return r.strings_Lower(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallLeft:
|
||||||
|
return r.strings_Left(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallLength:
|
||||||
|
return r.strings_Length(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallLTrim:
|
||||||
|
return r.strings_LTrim(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallReplace:
|
||||||
|
return r.strings_Replace(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallReplicate:
|
||||||
|
return r.strings_Replicate(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallReverse:
|
||||||
|
return r.strings_Reverse(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallRight:
|
||||||
|
return r.strings_Right(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallRTrim:
|
||||||
|
return r.strings_RTrim(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallSubstring:
|
||||||
|
return r.strings_Substring(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallTrim:
|
||||||
|
return r.strings_Trim(functionCall.Arguments)
|
||||||
|
|
||||||
|
case parsers.FunctionCallIsDefined:
|
||||||
|
return r.typeChecking_IsDefined(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallIsArray:
|
||||||
|
return r.typeChecking_IsArray(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallIsBool:
|
||||||
|
return r.typeChecking_IsBool(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallIsFiniteNumber:
|
||||||
|
return r.typeChecking_IsFiniteNumber(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallIsInteger:
|
||||||
|
return r.typeChecking_IsInteger(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallIsNull:
|
||||||
|
return r.typeChecking_IsNull(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallIsNumber:
|
||||||
|
return r.typeChecking_IsNumber(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallIsObject:
|
||||||
|
return r.typeChecking_IsObject(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallIsPrimitive:
|
||||||
|
return r.typeChecking_IsPrimitive(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallIsString:
|
||||||
|
return r.typeChecking_IsString(functionCall.Arguments)
|
||||||
|
|
||||||
|
case parsers.FunctionCallArrayConcat:
|
||||||
|
return r.array_Concat(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallArrayContains:
|
||||||
|
return r.array_Contains(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallArrayContainsAny:
|
||||||
|
return r.array_Contains_Any(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallArrayContainsAll:
|
||||||
|
return r.array_Contains_All(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallArrayLength:
|
||||||
|
return r.array_Length(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallArraySlice:
|
||||||
|
return r.array_Slice(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallSetIntersect:
|
||||||
|
return r.set_Intersect(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallSetUnion:
|
||||||
|
return r.set_Union(functionCall.Arguments)
|
||||||
|
|
||||||
|
case parsers.FunctionCallIif:
|
||||||
|
return r.misc_Iif(functionCall.Arguments)
|
||||||
|
|
||||||
|
case parsers.FunctionCallMathAbs:
|
||||||
|
return r.math_Abs(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallMathAcos:
|
||||||
|
return r.math_Acos(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallMathAsin:
|
||||||
|
return r.math_Asin(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallMathAtan:
|
||||||
|
return r.math_Atan(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallMathCeiling:
|
||||||
|
return r.math_Ceiling(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallMathCos:
|
||||||
|
return r.math_Cos(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallMathCot:
|
||||||
|
return r.math_Cot(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallMathDegrees:
|
||||||
|
return r.math_Degrees(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallMathExp:
|
||||||
|
return r.math_Exp(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallMathFloor:
|
||||||
|
return r.math_Floor(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallMathIntBitNot:
|
||||||
|
return r.math_IntBitNot(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallMathLog10:
|
||||||
|
return r.math_Log10(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallMathRadians:
|
||||||
|
return r.math_Radians(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallMathRound:
|
||||||
|
return r.math_Round(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallMathSign:
|
||||||
|
return r.math_Sign(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallMathSin:
|
||||||
|
return r.math_Sin(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallMathSqrt:
|
||||||
|
return r.math_Sqrt(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallMathSquare:
|
||||||
|
return r.math_Square(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallMathTan:
|
||||||
|
return r.math_Tan(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallMathTrunc:
|
||||||
|
return r.math_Trunc(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallMathAtn2:
|
||||||
|
return r.math_Atn2(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallMathIntAdd:
|
||||||
|
return r.math_IntAdd(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallMathIntBitAnd:
|
||||||
|
return r.math_IntBitAnd(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallMathIntBitLeftShift:
|
||||||
|
return r.math_IntBitLeftShift(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallMathIntBitOr:
|
||||||
|
return r.math_IntBitOr(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallMathIntBitRightShift:
|
||||||
|
return r.math_IntBitRightShift(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallMathIntBitXor:
|
||||||
|
return r.math_IntBitXor(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallMathIntDiv:
|
||||||
|
return r.math_IntDiv(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallMathIntMod:
|
||||||
|
return r.math_IntMod(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallMathIntMul:
|
||||||
|
return r.math_IntMul(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallMathIntSub:
|
||||||
|
return r.math_IntSub(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallMathPower:
|
||||||
|
return r.math_Power(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallMathLog:
|
||||||
|
return r.math_Log(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallMathNumberBin:
|
||||||
|
return r.math_NumberBin(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallMathPi:
|
||||||
|
return r.math_Pi()
|
||||||
|
case parsers.FunctionCallMathRand:
|
||||||
|
return r.math_Rand()
|
||||||
|
|
||||||
|
case parsers.FunctionCallAggregateAvg:
|
||||||
|
return r.aggregate_Avg(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallAggregateCount:
|
||||||
|
return r.aggregate_Count(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallAggregateMax:
|
||||||
|
return r.aggregate_Max(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallAggregateMin:
|
||||||
|
return r.aggregate_Min(functionCall.Arguments)
|
||||||
|
case parsers.FunctionCallAggregateSum:
|
||||||
|
return r.aggregate_Sum(functionCall.Arguments)
|
||||||
|
|
||||||
|
case parsers.FunctionCallIn:
|
||||||
|
return r.misc_In(functionCall.Arguments)
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.Errorf("Unknown function call type: %v", functionCall.Type)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r rowContext) selectItem_SelectItemTypeBinaryExpression(binaryExpression parsers.BinaryExpression) interface{} {
|
||||||
|
if binaryExpression.Left == nil || binaryExpression.Right == nil {
|
||||||
|
logger.Debug("parsers.BinaryExpression has nil Left or Right value")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
leftValue := r.resolveSelectItem(binaryExpression.Left.(parsers.SelectItem))
|
||||||
|
rightValue := r.resolveSelectItem(binaryExpression.Right.(parsers.SelectItem))
|
||||||
|
|
||||||
|
if leftValue == nil || rightValue == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
leftNumber, leftIsNumber := numToFloat64(leftValue)
|
||||||
|
rightNumber, rightIsNumber := numToFloat64(rightValue)
|
||||||
|
|
||||||
|
if !leftIsNumber || !rightIsNumber {
|
||||||
|
logger.Debug("Binary expression operands are not numbers, returning nil")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
switch binaryExpression.Operation {
|
||||||
|
case "+":
|
||||||
|
return leftNumber + rightNumber
|
||||||
|
case "-":
|
||||||
|
return leftNumber - rightNumber
|
||||||
|
case "*":
|
||||||
|
return leftNumber * rightNumber
|
||||||
|
case "/":
|
||||||
|
if rightNumber == 0 {
|
||||||
|
logger.Debug("Division by zero in binary expression")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return leftNumber / rightNumber
|
||||||
|
default:
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r rowContext) selectItem_SelectItemTypeField(selectItem parsers.SelectItem) interface{} {
|
||||||
|
value := r.tables[selectItem.Path[0]]
|
||||||
|
|
||||||
|
if len(selectItem.Path) > 1 {
|
||||||
|
for _, pathSegment := range selectItem.Path[1:] {
|
||||||
|
if pathSegment[0] == '@' {
|
||||||
|
pathSegment = r.parameters[pathSegment].(string)
|
||||||
|
}
|
||||||
|
|
||||||
|
switch nestedValue := value.(type) {
|
||||||
|
case map[string]interface{}:
|
||||||
|
value = nestedValue[pathSegment]
|
||||||
|
case map[string]RowType:
|
||||||
|
value = nestedValue[pathSegment]
|
||||||
|
case datastore.Document:
|
||||||
|
value = nestedValue[pathSegment]
|
||||||
|
case map[string]datastore.Document:
|
||||||
|
value = nestedValue[pathSegment]
|
||||||
|
case []int, []string, []interface{}:
|
||||||
|
slice := reflect.ValueOf(nestedValue)
|
||||||
|
if arrayIndex, err := strconv.Atoi(pathSegment); err == nil && slice.Len() > arrayIndex {
|
||||||
|
value = slice.Index(arrayIndex).Interface()
|
||||||
|
} else {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return value
|
||||||
|
}
|
||||||
|
|
||||||
|
func compareValues(val1, val2 interface{}) int {
|
||||||
|
// Handle nil values
|
||||||
|
if val1 == nil && val2 == nil {
|
||||||
|
return 0
|
||||||
|
} else if val1 == nil {
|
||||||
|
return -1
|
||||||
|
} else if val2 == nil {
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle number values
|
||||||
|
val1Number, val1IsNumber := numToFloat64(val1)
|
||||||
|
val2Number, val2IsNumber := numToFloat64(val2)
|
||||||
|
if val1IsNumber && val2IsNumber {
|
||||||
|
if val1Number < val2Number {
|
||||||
|
return -1
|
||||||
|
} else if val1Number > val2Number {
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle different types
|
||||||
|
if reflect.TypeOf(val1) != reflect.TypeOf(val2) {
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
switch val1 := val1.(type) {
|
||||||
|
case string:
|
||||||
|
val2 := val2.(string)
|
||||||
|
return strings.Compare(val1, val2)
|
||||||
|
case bool:
|
||||||
|
val2 := val2.(bool)
|
||||||
|
if val1 == val2 {
|
||||||
|
return 0
|
||||||
|
} else if val1 {
|
||||||
|
return 1
|
||||||
|
} else {
|
||||||
|
return -1
|
||||||
|
}
|
||||||
|
// TODO: Add more types
|
||||||
|
default:
|
||||||
|
if reflect.DeepEqual(val1, val2) {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func copyMap[T RowType | []RowType](originalMap map[string]T) map[string]T {
|
||||||
|
targetMap := make(map[string]T)
|
||||||
|
|
||||||
|
for k, v := range originalMap {
|
||||||
|
targetMap[k] = v
|
||||||
|
}
|
||||||
|
|
||||||
|
return targetMap
|
||||||
|
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user