Compare commits

..

No commits in common. "master" and "v0.1.14" have entirely different histories.

118 changed files with 4397 additions and 8691 deletions

View File

@ -12,20 +12,19 @@ jobs:
uses: actions/checkout@v3 uses: actions/checkout@v3
- name: Cross-Compile with xgo - name: Cross-Compile with xgo
uses: crazy-max/ghaction-xgo@e22d3c8b089adba750d5a74738b8e95d96f0c991 # v3.1.0 uses: crazy-max/ghaction-xgo@v3.1.0
with: with:
xgo_version: latest xgo_version: latest
go_version: 1.24.0 go_version: 1.22.0
dest: dist dest: dist
pkg: sharedlibrary pkg: sharedlibrary
prefix: cosmium prefix: cosmium
targets: linux/amd64,linux/arm64,windows/amd64,windows/arm64,darwin/amd64,darwin/arm64 targets: linux/amd64,linux/arm64,windows/amd64,windows/arm64,darwin/amd64,darwin/arm64
v: true v: true
buildmode: c-shared buildmode: c-shared
buildvcs: true
- name: Upload artifact - name: Upload artifact
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v3
with: with:
name: shared-libraries name: shared-libraries
path: dist/* path: dist/*

View File

@ -17,34 +17,18 @@ jobs:
uses: actions/checkout@v4 uses: actions/checkout@v4
with: with:
fetch-depth: 0 fetch-depth: 0
- name: Set up Go - name: Set up Go
uses: actions/setup-go@v5 uses: actions/setup-go@v5
with: with:
go-version: 1.24.0 go-version: 1.22.0
- name: Cross-Compile with xgo
uses: crazy-max/ghaction-xgo@e22d3c8b089adba750d5a74738b8e95d96f0c991 # v3.1.0
with:
xgo_version: latest
go_version: 1.24.0
dest: sharedlibrary_dist
pkg: sharedlibrary
prefix: cosmium
targets: linux/amd64,linux/arm64,windows/amd64,windows/arm64,darwin/amd64,darwin/arm64
v: true
buildmode: c-shared
buildvcs: true
- name: Docker Login - name: Docker Login
uses: docker/login-action@v3 uses: docker/login-action@v3
with: with:
registry: ghcr.io registry: ghcr.io
username: ${{ github.actor }} username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }} password: ${{ secrets.GITHUB_TOKEN }}
- name: Run GoReleaser - name: Run GoReleaser
uses: goreleaser/goreleaser-action@5742e2a039330cbb23ebf35f046f814d4c6ff811 # v5 uses: goreleaser/goreleaser-action@v5
with: with:
distribution: goreleaser distribution: goreleaser
version: ${{ env.GITHUB_REF_NAME }} version: ${{ env.GITHUB_REF_NAME }}

1
.gitignore vendored
View File

@ -1,5 +1,4 @@
dist/ dist/
sharedlibrary_dist/
ignored/ ignored/
explorer_www/ explorer_www/
main main

View File

@ -26,23 +26,6 @@ brews:
commit_author: commit_author:
name: pikami name: pikami
email: git@pikami.org email: git@pikami.org
skip_upload: auto
archives:
- id: bundle
format: tar.gz
format_overrides:
- goos: windows
format: zip
- id: shared-libraries
meta: true
format: "tar.gz"
wrap_in_directory: true
name_template: "{{ .ProjectName }}_{{ .Version }}_shared-libraries"
files:
- LICENSE
- README.md
- sharedlibrary_dist/**
dockers: dockers:
- id: docker-linux-amd64 - id: docker-linux-amd64
@ -50,6 +33,7 @@ dockers:
goarch: amd64 goarch: amd64
image_templates: image_templates:
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-amd64" - "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-amd64"
- "ghcr.io/pikami/{{ .ProjectName }}:latest-amd64"
dockerfile: Dockerfile dockerfile: Dockerfile
use: buildx use: buildx
build_flag_templates: build_flag_templates:
@ -67,7 +51,9 @@ dockers:
goarch: arm64 goarch: arm64
image_templates: image_templates:
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-arm64" - "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-arm64"
- "ghcr.io/pikami/{{ .ProjectName }}:latest-arm64"
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-arm64v8" - "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-arm64v8"
- "ghcr.io/pikami/{{ .ProjectName }}:latest-arm64v8"
dockerfile: Dockerfile dockerfile: Dockerfile
use: buildx use: buildx
build_flag_templates: build_flag_templates:
@ -85,7 +71,7 @@ dockers:
goarch: amd64 goarch: amd64
image_templates: image_templates:
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-explorer-amd64" - "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-explorer-amd64"
dockerfile: Explorer.Dockerfile dockerfile: Dockerfile
use: buildx use: buildx
build_flag_templates: build_flag_templates:
- "--platform=linux/amd64" - "--platform=linux/amd64"
@ -103,7 +89,7 @@ dockers:
image_templates: image_templates:
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-explorer-arm64" - "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-explorer-arm64"
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-explorer-arm64v8" - "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-explorer-arm64v8"
dockerfile: Explorer.Dockerfile dockerfile: Dockerfile
use: buildx use: buildx
build_flag_templates: build_flag_templates:
- "--platform=linux/arm64" - "--platform=linux/arm64"
@ -118,25 +104,16 @@ dockers:
docker_manifests: docker_manifests:
- name_template: 'ghcr.io/pikami/{{ .ProjectName }}:latest' - name_template: 'ghcr.io/pikami/{{ .ProjectName }}:latest'
skip_push: auto
image_templates: image_templates:
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-amd64" - "ghcr.io/pikami/{{ .ProjectName }}:latest-amd64"
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-arm64" - "ghcr.io/pikami/{{ .ProjectName }}:latest-arm64"
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-arm64v8" - "ghcr.io/pikami/{{ .ProjectName }}:latest-arm64v8"
- name_template: 'ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}' - name_template: 'ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}'
skip_push: auto
image_templates: image_templates:
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-amd64" - "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-amd64"
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-arm64" - "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-arm64"
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-arm64v8" - "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-arm64v8"
- name_template: 'ghcr.io/pikami/{{ .ProjectName }}:explorer' - name_template: 'ghcr.io/pikami/{{ .ProjectName }}:explorer'
skip_push: auto
image_templates:
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-explorer-amd64"
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-explorer-arm64"
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-explorer-arm64v8"
- name_template: 'ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-explorer'
skip_push: auto
image_templates: image_templates:
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-explorer-amd64" - "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-explorer-amd64"
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-explorer-arm64" - "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-explorer-arm64"

View File

@ -9,7 +9,7 @@ SERVER_LOCATION=./cmd/server
SHARED_LIB_LOCATION=./sharedlibrary SHARED_LIB_LOCATION=./sharedlibrary
SHARED_LIB_OPT=-buildmode=c-shared SHARED_LIB_OPT=-buildmode=c-shared
XGO_TARGETS=linux/amd64,linux/arm64,windows/amd64,windows/arm64,darwin/amd64,darwin/arm64 XGO_TARGETS=linux/amd64,linux/arm64,windows/amd64,windows/arm64,darwin/amd64,darwin/arm64
GOVERSION=1.24.0 GOVERSION=1.22.0
DIST_DIR=dist DIST_DIR=dist
@ -51,10 +51,6 @@ build-sharedlib-linux-amd64:
@echo "Building shared library for Linux x64..." @echo "Building shared library for Linux x64..."
@GOOS=linux GOARCH=amd64 $(GOBUILD) $(SHARED_LIB_OPT) -o $(DIST_DIR)/$(BINARY_NAME)-linux-amd64.so $(SHARED_LIB_LOCATION) @GOOS=linux GOARCH=amd64 $(GOBUILD) $(SHARED_LIB_OPT) -o $(DIST_DIR)/$(BINARY_NAME)-linux-amd64.so $(SHARED_LIB_LOCATION)
build-sharedlib-darwin-arm64:
@echo "Building shared library for macOS ARM..."
@GOOS=darwin GOARCH=arm64 $(GOBUILD) $(SHARED_LIB_OPT) -o $(DIST_DIR)/$(BINARY_NAME)-darwin-arm64.so $(SHARED_LIB_LOCATION)
build-sharedlib-tests: build-sharedlib-linux-amd64 build-sharedlib-tests: build-sharedlib-linux-amd64
@echo "Building shared library tests..." @echo "Building shared library tests..."
@$(SHARED_LIB_TEST_CC) $(SHARED_LIB_TEST_CFLAGS) -o $(SHARED_LIB_TEST_TARGET) $(SHARED_LIB_TEST_SOURCES) @$(SHARED_LIB_TEST_CC) $(SHARED_LIB_TEST_CFLAGS) -o $(SHARED_LIB_TEST_TARGET) $(SHARED_LIB_TEST_SOURCES)

View File

@ -86,7 +86,6 @@ To disable SSL and run Cosmium on HTTP instead, you can use the `-DisableTls` fl
- **-Persist**: Saves data to the given path on application exit (When `-InitialData` argument is not supplied, it will try to load data from path supplied in `-Persist`) - **-Persist**: Saves data to the given path on application exit (When `-InitialData` argument is not supplied, it will try to load data from path supplied in `-Persist`)
- **-Port**: Listen port (default 8081) - **-Port**: Listen port (default 8081)
- **-LogLevel**: Sets the logging level (one of: debug, info, error, silent) (default info) - **-LogLevel**: Sets the logging level (one of: debug, info, error, silent) (default info)
- **-DataStore**: Allows selecting [storage backend](#data-storage-backends) (default "json")
These arguments allow you to configure various aspects of Cosmium's behavior according to your requirements. These arguments allow you to configure various aspects of Cosmium's behavior according to your requirements.
@ -100,18 +99,6 @@ All mentioned arguments can also be set using environment variables:
- **COSMIUM_PORT** for `-Port` - **COSMIUM_PORT** for `-Port`
- **COSMIUM_LOGLEVEL** for `-LogLevel` - **COSMIUM_LOGLEVEL** for `-LogLevel`
### Data Storage Backends
Cosmium supports multiple storage backends for saving, loading, and managing data at runtime.
| Backend | Storage Location | Write Behavior | Memory Usage | Supports Initial JSON Load |
|----------|--------------------------|--------------------------|----------------------|----------------------------|
| `json` (default) | JSON file on disk 📄 | On application exit ⏳ | 🛑 More than Badger | ✅ Yes |
| `badger` | BadgerDB database on disk ⚡ | Immediately on write 🚀 | ✅ Less than JSON | ❌ No |
The `badger` backend is generally recommended as it uses less memory and writes data to disk immediately. However, if you need to load initial data from a JSON file, use the `json` backend.
# License # License
This project is [MIT licensed](./LICENSE). This project is [MIT licensed](./LICENSE).

View File

@ -1,24 +0,0 @@
package apimodels
const (
BatchOperationTypeCreate = "Create"
BatchOperationTypeDelete = "Delete"
BatchOperationTypeReplace = "Replace"
BatchOperationTypeUpsert = "Upsert"
BatchOperationTypeRead = "Read"
BatchOperationTypePatch = "Patch"
)
type BatchOperation struct {
OperationType string `json:"operationType"`
Id string `json:"id"`
ResourceBody map[string]interface{} `json:"resourceBody"`
}
type BatchOperationResult struct {
StatusCode int `json:"statusCode"`
RequestCharge float64 `json:"requestCharge"`
ResourceBody map[string]interface{} `json:"resourceBody"`
Etag string `json:"etag"`
Message string `json:"message"`
}

View File

@ -3,28 +3,25 @@ package api
import ( import (
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
"github.com/pikami/cosmium/api/config" "github.com/pikami/cosmium/api/config"
"github.com/pikami/cosmium/internal/datastore" "github.com/pikami/cosmium/internal/repositories"
) )
type ApiServer struct { type ApiServer struct {
stopServer chan interface{} stopServer chan interface{}
onServerShutdown chan interface{}
isActive bool isActive bool
router *gin.Engine router *gin.Engine
config *config.ServerConfig config config.ServerConfig
} }
func NewApiServer(dataStore datastore.DataStore, config *config.ServerConfig) *ApiServer { func NewApiServer(dataRepository *repositories.DataRepository, config config.ServerConfig) *ApiServer {
stopChan := make(chan interface{}) stopChan := make(chan interface{})
onServerShutdownChan := make(chan interface{})
apiServer := &ApiServer{ apiServer := &ApiServer{
stopServer: stopChan, stopServer: stopChan,
onServerShutdown: onServerShutdownChan,
config: config, config: config,
} }
apiServer.CreateRouter(dataStore) apiServer.CreateRouter(dataRepository)
return apiServer return apiServer
} }
@ -35,5 +32,4 @@ func (s *ApiServer) GetRouter() *gin.Engine {
func (s *ApiServer) Stop() { func (s *ApiServer) Stop() {
s.stopServer <- true s.stopServer <- true
<-s.onServerShutdown
} }

View File

@ -15,11 +15,6 @@ const (
ExplorerBaseUrlLocation = "/_explorer" ExplorerBaseUrlLocation = "/_explorer"
) )
const (
DataStoreJson = "json"
DataStoreBadger = "badger"
)
func ParseFlags() ServerConfig { func ParseFlags() ServerConfig {
host := flag.String("Host", "localhost", "Hostname") host := flag.String("Host", "localhost", "Hostname")
port := flag.Int("Port", 8081, "Listen port") port := flag.Int("Port", 8081, "Listen port")
@ -33,8 +28,6 @@ func ParseFlags() ServerConfig {
persistDataPath := flag.String("Persist", "", "Saves data to given path on application exit") persistDataPath := flag.String("Persist", "", "Saves data to given path on application exit")
logLevel := NewEnumValue("info", []string{"debug", "info", "error", "silent"}) logLevel := NewEnumValue("info", []string{"debug", "info", "error", "silent"})
flag.Var(logLevel, "LogLevel", fmt.Sprintf("Sets the logging level %s", logLevel.AllowedValuesList())) flag.Var(logLevel, "LogLevel", fmt.Sprintf("Sets the logging level %s", logLevel.AllowedValuesList()))
dataStore := NewEnumValue("json", []string{DataStoreJson, DataStoreBadger})
flag.Var(dataStore, "DataStore", fmt.Sprintf("Sets the data store %s", dataStore.AllowedValuesList()))
flag.Parse() flag.Parse()
setFlagsFromEnvironment() setFlagsFromEnvironment()
@ -51,7 +44,6 @@ func ParseFlags() ServerConfig {
config.DisableTls = *disableTls config.DisableTls = *disableTls
config.AccountKey = *accountKey config.AccountKey = *accountKey
config.LogLevel = logLevel.value config.LogLevel = logLevel.value
config.DataStore = dataStore.value
config.PopulateCalculatedFields() config.PopulateCalculatedFields()
@ -66,38 +58,15 @@ func (c *ServerConfig) PopulateCalculatedFields() {
switch c.LogLevel { switch c.LogLevel {
case "debug": case "debug":
logger.SetLogLevel(logger.LogLevelDebug) logger.LogLevel = logger.LogLevelDebug
case "info": case "info":
logger.SetLogLevel(logger.LogLevelInfo) logger.LogLevel = logger.LogLevelInfo
case "error": case "error":
logger.SetLogLevel(logger.LogLevelError) logger.LogLevel = logger.LogLevelError
case "silent": case "silent":
logger.SetLogLevel(logger.LogLevelSilent) logger.LogLevel = logger.LogLevelSilent
default: default:
logger.SetLogLevel(logger.LogLevelInfo) logger.LogLevel = logger.LogLevelInfo
}
fileInfo, err := os.Stat(c.PersistDataFilePath)
if c.PersistDataFilePath != "" && !os.IsNotExist(err) {
if err != nil {
logger.ErrorLn("Failed to get file info for persist path:", err)
os.Exit(1)
}
if c.DataStore == DataStoreJson && fileInfo.IsDir() {
logger.ErrorLn("--Persist cannot be a directory when using json data store")
os.Exit(1)
}
if c.DataStore == DataStoreBadger && !fileInfo.IsDir() {
logger.ErrorLn("--Persist must be a directory when using Badger data store")
os.Exit(1)
}
}
if c.DataStore == DataStoreBadger && c.InitialDataFilePath != "" {
logger.ErrorLn("InitialData option is currently not supported with Badger data store")
os.Exit(1)
} }
} }

View File

@ -17,6 +17,4 @@ type ServerConfig struct {
DisableTls bool `json:"disableTls"` DisableTls bool `json:"disableTls"`
LogLevel string `json:"logLevel"` LogLevel string `json:"logLevel"`
ExplorerBaseUrlLocation string `json:"explorerBaseUrlLocation"` ExplorerBaseUrlLocation string `json:"explorerBaseUrlLocation"`
DataStore string `json:"dataStore"`
} }

View File

@ -5,16 +5,15 @@ import (
"net/http" "net/http"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
"github.com/pikami/cosmium/internal/constants" repositorymodels "github.com/pikami/cosmium/internal/repository_models"
"github.com/pikami/cosmium/internal/datastore"
) )
func (h *Handlers) GetAllCollections(c *gin.Context) { func (h *Handlers) GetAllCollections(c *gin.Context) {
databaseId := c.Param("databaseId") databaseId := c.Param("databaseId")
collections, status := h.dataStore.GetAllCollections(databaseId) collections, status := h.repository.GetAllCollections(databaseId)
if status == datastore.StatusOk { if status == repositorymodels.StatusOk {
database, _ := h.dataStore.GetDatabase(databaseId) database, _ := h.repository.GetDatabase(databaseId)
c.Header("x-ms-item-count", fmt.Sprintf("%d", len(collections))) c.Header("x-ms-item-count", fmt.Sprintf("%d", len(collections)))
c.IndentedJSON(http.StatusOK, gin.H{ c.IndentedJSON(http.StatusOK, gin.H{
@ -25,48 +24,48 @@ func (h *Handlers) GetAllCollections(c *gin.Context) {
return return
} }
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse) c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
} }
func (h *Handlers) GetCollection(c *gin.Context) { func (h *Handlers) GetCollection(c *gin.Context) {
databaseId := c.Param("databaseId") databaseId := c.Param("databaseId")
id := c.Param("collId") id := c.Param("collId")
collection, status := h.dataStore.GetCollection(databaseId, id) collection, status := h.repository.GetCollection(databaseId, id)
if status == datastore.StatusOk { if status == repositorymodels.StatusOk {
c.IndentedJSON(http.StatusOK, collection) c.IndentedJSON(http.StatusOK, collection)
return return
} }
if status == datastore.StatusNotFound { if status == repositorymodels.StatusNotFound {
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse) c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
return return
} }
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse) c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
} }
func (h *Handlers) DeleteCollection(c *gin.Context) { func (h *Handlers) DeleteCollection(c *gin.Context) {
databaseId := c.Param("databaseId") databaseId := c.Param("databaseId")
id := c.Param("collId") id := c.Param("collId")
status := h.dataStore.DeleteCollection(databaseId, id) status := h.repository.DeleteCollection(databaseId, id)
if status == datastore.StatusOk { if status == repositorymodels.StatusOk {
c.Status(http.StatusNoContent) c.Status(http.StatusNoContent)
return return
} }
if status == datastore.StatusNotFound { if status == repositorymodels.StatusNotFound {
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse) c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
return return
} }
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse) c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
} }
func (h *Handlers) CreateCollection(c *gin.Context) { func (h *Handlers) CreateCollection(c *gin.Context) {
databaseId := c.Param("databaseId") databaseId := c.Param("databaseId")
var newCollection datastore.Collection var newCollection repositorymodels.Collection
if err := c.BindJSON(&newCollection); err != nil { if err := c.BindJSON(&newCollection); err != nil {
c.JSON(http.StatusBadRequest, gin.H{"message": err.Error()}) c.JSON(http.StatusBadRequest, gin.H{"message": err.Error()})
@ -74,20 +73,20 @@ func (h *Handlers) CreateCollection(c *gin.Context) {
} }
if newCollection.ID == "" { if newCollection.ID == "" {
c.JSON(http.StatusBadRequest, constants.BadRequestResponse) c.JSON(http.StatusBadRequest, gin.H{"message": "BadRequest"})
return return
} }
createdCollection, status := h.dataStore.CreateCollection(databaseId, newCollection) createdCollection, status := h.repository.CreateCollection(databaseId, newCollection)
if status == datastore.Conflict { if status == repositorymodels.Conflict {
c.IndentedJSON(http.StatusConflict, constants.ConflictResponse) c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"})
return return
} }
if status == datastore.StatusOk { if status == repositorymodels.StatusOk {
c.IndentedJSON(http.StatusCreated, createdCollection) c.IndentedJSON(http.StatusCreated, createdCollection)
return return
} }
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse) c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
} }

View File

@ -7,11 +7,11 @@ import (
) )
func (h *Handlers) CosmiumExport(c *gin.Context) { func (h *Handlers) CosmiumExport(c *gin.Context) {
dataStoreState, err := h.dataStore.DumpToJson() repositoryState, err := h.repository.GetState()
if err != nil { if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()}) c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return return
} }
c.Data(http.StatusOK, "application/json", []byte(dataStoreState)) c.Data(http.StatusOK, "application/json", []byte(repositoryState))
} }

View File

@ -5,13 +5,12 @@ import (
"net/http" "net/http"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
"github.com/pikami/cosmium/internal/constants" repositorymodels "github.com/pikami/cosmium/internal/repository_models"
"github.com/pikami/cosmium/internal/datastore"
) )
func (h *Handlers) GetAllDatabases(c *gin.Context) { func (h *Handlers) GetAllDatabases(c *gin.Context) {
databases, status := h.dataStore.GetAllDatabases() databases, status := h.repository.GetAllDatabases()
if status == datastore.StatusOk { if status == repositorymodels.StatusOk {
c.Header("x-ms-item-count", fmt.Sprintf("%d", len(databases))) c.Header("x-ms-item-count", fmt.Sprintf("%d", len(databases)))
c.IndentedJSON(http.StatusOK, gin.H{ c.IndentedJSON(http.StatusOK, gin.H{
"_rid": "", "_rid": "",
@ -21,45 +20,45 @@ func (h *Handlers) GetAllDatabases(c *gin.Context) {
return return
} }
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse) c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
} }
func (h *Handlers) GetDatabase(c *gin.Context) { func (h *Handlers) GetDatabase(c *gin.Context) {
id := c.Param("databaseId") id := c.Param("databaseId")
database, status := h.dataStore.GetDatabase(id) database, status := h.repository.GetDatabase(id)
if status == datastore.StatusOk { if status == repositorymodels.StatusOk {
c.IndentedJSON(http.StatusOK, database) c.IndentedJSON(http.StatusOK, database)
return return
} }
if status == datastore.StatusNotFound { if status == repositorymodels.StatusNotFound {
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse) c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
return return
} }
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse) c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
} }
func (h *Handlers) DeleteDatabase(c *gin.Context) { func (h *Handlers) DeleteDatabase(c *gin.Context) {
id := c.Param("databaseId") id := c.Param("databaseId")
status := h.dataStore.DeleteDatabase(id) status := h.repository.DeleteDatabase(id)
if status == datastore.StatusOk { if status == repositorymodels.StatusOk {
c.Status(http.StatusNoContent) c.Status(http.StatusNoContent)
return return
} }
if status == datastore.StatusNotFound { if status == repositorymodels.StatusNotFound {
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse) c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
return return
} }
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse) c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
} }
func (h *Handlers) CreateDatabase(c *gin.Context) { func (h *Handlers) CreateDatabase(c *gin.Context) {
var newDatabase datastore.Database var newDatabase repositorymodels.Database
if err := c.BindJSON(&newDatabase); err != nil { if err := c.BindJSON(&newDatabase); err != nil {
c.JSON(http.StatusBadRequest, gin.H{"message": err.Error()}) c.JSON(http.StatusBadRequest, gin.H{"message": err.Error()})
@ -67,20 +66,20 @@ func (h *Handlers) CreateDatabase(c *gin.Context) {
} }
if newDatabase.ID == "" { if newDatabase.ID == "" {
c.JSON(http.StatusBadRequest, constants.BadRequestResponse) c.JSON(http.StatusBadRequest, gin.H{"message": "BadRequest"})
return return
} }
createdDatabase, status := h.dataStore.CreateDatabase(newDatabase) createdDatabase, status := h.repository.CreateDatabase(newDatabase)
if status == datastore.Conflict { if status == repositorymodels.Conflict {
c.IndentedJSON(http.StatusConflict, constants.ConflictResponse) c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"})
return return
} }
if status == datastore.StatusOk { if status == repositorymodels.StatusOk {
c.IndentedJSON(http.StatusCreated, createdDatabase) c.IndentedJSON(http.StatusCreated, createdDatabase)
return return
} }
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse) c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
} }

View File

@ -8,23 +8,18 @@ import (
jsonpatch "github.com/cosmiumdev/json-patch/v5" jsonpatch "github.com/cosmiumdev/json-patch/v5"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
apimodels "github.com/pikami/cosmium/api/api_models"
"github.com/pikami/cosmium/internal/constants" "github.com/pikami/cosmium/internal/constants"
"github.com/pikami/cosmium/internal/converters"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/logger" "github.com/pikami/cosmium/internal/logger"
"github.com/pikami/cosmium/parsers" repositorymodels "github.com/pikami/cosmium/internal/repository_models"
"github.com/pikami/cosmium/parsers/nosql"
memoryexecutor "github.com/pikami/cosmium/query_executors/memory_executor"
) )
func (h *Handlers) GetAllDocuments(c *gin.Context) { func (h *Handlers) GetAllDocuments(c *gin.Context) {
databaseId := c.Param("databaseId") databaseId := c.Param("databaseId")
collectionId := c.Param("collId") collectionId := c.Param("collId")
documents, status := h.dataStore.GetAllDocuments(databaseId, collectionId) documents, status := h.repository.GetAllDocuments(databaseId, collectionId)
if status == datastore.StatusOk { if status == repositorymodels.StatusOk {
collection, _ := h.dataStore.GetCollection(databaseId, collectionId) collection, _ := h.repository.GetCollection(databaseId, collectionId)
c.Header("x-ms-item-count", fmt.Sprintf("%d", len(documents))) c.Header("x-ms-item-count", fmt.Sprintf("%d", len(documents)))
c.IndentedJSON(http.StatusOK, gin.H{ c.IndentedJSON(http.StatusOK, gin.H{
@ -35,7 +30,7 @@ func (h *Handlers) GetAllDocuments(c *gin.Context) {
return return
} }
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse) c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
} }
func (h *Handlers) GetDocument(c *gin.Context) { func (h *Handlers) GetDocument(c *gin.Context) {
@ -43,18 +38,18 @@ func (h *Handlers) GetDocument(c *gin.Context) {
collectionId := c.Param("collId") collectionId := c.Param("collId")
documentId := c.Param("docId") documentId := c.Param("docId")
document, status := h.dataStore.GetDocument(databaseId, collectionId, documentId) document, status := h.repository.GetDocument(databaseId, collectionId, documentId)
if status == datastore.StatusOk { if status == repositorymodels.StatusOk {
c.IndentedJSON(http.StatusOK, document) c.IndentedJSON(http.StatusOK, document)
return return
} }
if status == datastore.StatusNotFound { if status == repositorymodels.StatusNotFound {
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse) c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
return return
} }
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse) c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
} }
func (h *Handlers) DeleteDocument(c *gin.Context) { func (h *Handlers) DeleteDocument(c *gin.Context) {
@ -62,21 +57,21 @@ func (h *Handlers) DeleteDocument(c *gin.Context) {
collectionId := c.Param("collId") collectionId := c.Param("collId")
documentId := c.Param("docId") documentId := c.Param("docId")
status := h.dataStore.DeleteDocument(databaseId, collectionId, documentId) status := h.repository.DeleteDocument(databaseId, collectionId, documentId)
if status == datastore.StatusOk { if status == repositorymodels.StatusOk {
c.Status(http.StatusNoContent) c.Status(http.StatusNoContent)
return return
} }
if status == datastore.StatusNotFound { if status == repositorymodels.StatusNotFound {
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse) c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
return return
} }
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse) c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
} }
// TODO: Maybe move "replace" logic to data store // TODO: Maybe move "replace" logic to repository
func (h *Handlers) ReplaceDocument(c *gin.Context) { func (h *Handlers) ReplaceDocument(c *gin.Context) {
databaseId := c.Param("databaseId") databaseId := c.Param("databaseId")
collectionId := c.Param("collId") collectionId := c.Param("collId")
@ -88,24 +83,24 @@ func (h *Handlers) ReplaceDocument(c *gin.Context) {
return return
} }
status := h.dataStore.DeleteDocument(databaseId, collectionId, documentId) status := h.repository.DeleteDocument(databaseId, collectionId, documentId)
if status == datastore.StatusNotFound { if status == repositorymodels.StatusNotFound {
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse) c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
return return
} }
createdDocument, status := h.dataStore.CreateDocument(databaseId, collectionId, requestBody) createdDocument, status := h.repository.CreateDocument(databaseId, collectionId, requestBody)
if status == datastore.Conflict { if status == repositorymodels.Conflict {
c.IndentedJSON(http.StatusConflict, constants.ConflictResponse) c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"})
return return
} }
if status == datastore.StatusOk { if status == repositorymodels.StatusOk {
c.IndentedJSON(http.StatusCreated, createdDocument) c.IndentedJSON(http.StatusCreated, createdDocument)
return return
} }
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse) c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
} }
func (h *Handlers) PatchDocument(c *gin.Context) { func (h *Handlers) PatchDocument(c *gin.Context) {
@ -113,9 +108,9 @@ func (h *Handlers) PatchDocument(c *gin.Context) {
collectionId := c.Param("collId") collectionId := c.Param("collId")
documentId := c.Param("docId") documentId := c.Param("docId")
document, status := h.dataStore.GetDocument(databaseId, collectionId, documentId) document, status := h.repository.GetDocument(databaseId, collectionId, documentId)
if status == datastore.StatusNotFound { if status == repositorymodels.StatusNotFound {
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse) c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
return return
} }
@ -164,37 +159,30 @@ func (h *Handlers) PatchDocument(c *gin.Context) {
return return
} }
status = h.dataStore.DeleteDocument(databaseId, collectionId, documentId) status = h.repository.DeleteDocument(databaseId, collectionId, documentId)
if status == datastore.StatusNotFound { if status == repositorymodels.StatusNotFound {
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse) c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
return return
} }
createdDocument, status := h.dataStore.CreateDocument(databaseId, collectionId, modifiedDocument) createdDocument, status := h.repository.CreateDocument(databaseId, collectionId, modifiedDocument)
if status == datastore.Conflict { if status == repositorymodels.Conflict {
c.IndentedJSON(http.StatusConflict, constants.ConflictResponse) c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"})
return return
} }
if status == datastore.StatusOk { if status == repositorymodels.StatusOk {
c.IndentedJSON(http.StatusCreated, createdDocument) c.IndentedJSON(http.StatusCreated, createdDocument)
return return
} }
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse) c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
} }
func (h *Handlers) DocumentsPost(c *gin.Context) { func (h *Handlers) DocumentsPost(c *gin.Context) {
databaseId := c.Param("databaseId") databaseId := c.Param("databaseId")
collectionId := c.Param("collId") collectionId := c.Param("collId")
// Handle batch requests
isBatchRequest, _ := strconv.ParseBool(c.GetHeader("x-ms-cosmos-is-batch-request"))
if isBatchRequest {
h.handleBatchRequest(c)
return
}
var requestBody map[string]interface{} var requestBody map[string]interface{}
if err := c.BindJSON(&requestBody); err != nil { if err := c.BindJSON(&requestBody); err != nil {
c.JSON(http.StatusBadRequest, gin.H{"message": err.Error()}) c.JSON(http.StatusBadRequest, gin.H{"message": err.Error()})
@ -203,32 +191,55 @@ func (h *Handlers) DocumentsPost(c *gin.Context) {
query := requestBody["query"] query := requestBody["query"]
if query != nil { if query != nil {
h.handleDocumentQuery(c, requestBody) if c.GetHeader("x-ms-cosmos-is-query-plan-request") != "" {
c.IndentedJSON(http.StatusOK, constants.QueryPlanResponse)
return
}
var queryParameters map[string]interface{}
if paramsArray, ok := requestBody["parameters"].([]interface{}); ok {
queryParameters = parametersToMap(paramsArray)
}
docs, status := h.repository.ExecuteQueryDocuments(databaseId, collectionId, query.(string), queryParameters)
if status != repositorymodels.StatusOk {
// TODO: Currently we return everything if the query fails
h.GetAllDocuments(c)
return
}
collection, _ := h.repository.GetCollection(databaseId, collectionId)
c.Header("x-ms-item-count", fmt.Sprintf("%d", len(docs)))
c.IndentedJSON(http.StatusOK, gin.H{
"_rid": collection.ResourceID,
"Documents": docs,
"_count": len(docs),
})
return return
} }
if requestBody["id"] == "" { if requestBody["id"] == "" {
c.JSON(http.StatusBadRequest, constants.BadRequestResponse) c.JSON(http.StatusBadRequest, gin.H{"message": "BadRequest"})
return return
} }
isUpsert, _ := strconv.ParseBool(c.GetHeader("x-ms-documentdb-is-upsert")) isUpsert, _ := strconv.ParseBool(c.GetHeader("x-ms-documentdb-is-upsert"))
if isUpsert { if isUpsert {
h.dataStore.DeleteDocument(databaseId, collectionId, requestBody["id"].(string)) h.repository.DeleteDocument(databaseId, collectionId, requestBody["id"].(string))
} }
createdDocument, status := h.dataStore.CreateDocument(databaseId, collectionId, requestBody) createdDocument, status := h.repository.CreateDocument(databaseId, collectionId, requestBody)
if status == datastore.Conflict { if status == repositorymodels.Conflict {
c.IndentedJSON(http.StatusConflict, constants.ConflictResponse) c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"})
return return
} }
if status == datastore.StatusOk { if status == repositorymodels.StatusOk {
c.IndentedJSON(http.StatusCreated, createdDocument) c.IndentedJSON(http.StatusCreated, createdDocument)
return return
} }
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse) c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
} }
func parametersToMap(pairs []interface{}) map[string]interface{} { func parametersToMap(pairs []interface{}) map[string]interface{} {
@ -242,155 +253,3 @@ func parametersToMap(pairs []interface{}) map[string]interface{} {
return result return result
} }
func (h *Handlers) handleDocumentQuery(c *gin.Context, requestBody map[string]interface{}) {
databaseId := c.Param("databaseId")
collectionId := c.Param("collId")
if c.GetHeader("x-ms-cosmos-is-query-plan-request") != "" {
c.IndentedJSON(http.StatusOK, constants.QueryPlanResponse)
return
}
var queryParameters map[string]interface{}
if paramsArray, ok := requestBody["parameters"].([]interface{}); ok {
queryParameters = parametersToMap(paramsArray)
}
queryText := requestBody["query"].(string)
docs, status := h.executeQueryDocuments(databaseId, collectionId, queryText, queryParameters)
if status != datastore.StatusOk {
// TODO: Currently we return everything if the query fails
h.GetAllDocuments(c)
return
}
collection, _ := h.dataStore.GetCollection(databaseId, collectionId)
c.Header("x-ms-item-count", fmt.Sprintf("%d", len(docs)))
c.IndentedJSON(http.StatusOK, gin.H{
"_rid": collection.ResourceID,
"Documents": docs,
"_count": len(docs),
})
}
func (h *Handlers) handleBatchRequest(c *gin.Context) {
databaseId := c.Param("databaseId")
collectionId := c.Param("collId")
batchOperations := make([]apimodels.BatchOperation, 0)
if err := c.BindJSON(&batchOperations); err != nil {
c.JSON(http.StatusBadRequest, gin.H{"message": err.Error()})
return
}
batchOperationResults := make([]apimodels.BatchOperationResult, len(batchOperations))
for idx, operation := range batchOperations {
switch operation.OperationType {
case apimodels.BatchOperationTypeCreate:
createdDocument, status := h.dataStore.CreateDocument(databaseId, collectionId, operation.ResourceBody)
responseCode := dataStoreStatusToResponseCode(status)
if status == datastore.StatusOk {
responseCode = http.StatusCreated
}
batchOperationResults[idx] = apimodels.BatchOperationResult{
StatusCode: responseCode,
ResourceBody: createdDocument,
}
case apimodels.BatchOperationTypeDelete:
status := h.dataStore.DeleteDocument(databaseId, collectionId, operation.Id)
responseCode := dataStoreStatusToResponseCode(status)
if status == datastore.StatusOk {
responseCode = http.StatusNoContent
}
batchOperationResults[idx] = apimodels.BatchOperationResult{
StatusCode: responseCode,
}
case apimodels.BatchOperationTypeReplace:
deleteStatus := h.dataStore.DeleteDocument(databaseId, collectionId, operation.Id)
if deleteStatus == datastore.StatusNotFound {
batchOperationResults[idx] = apimodels.BatchOperationResult{
StatusCode: http.StatusNotFound,
}
continue
}
createdDocument, createStatus := h.dataStore.CreateDocument(databaseId, collectionId, operation.ResourceBody)
responseCode := dataStoreStatusToResponseCode(createStatus)
if createStatus == datastore.StatusOk {
responseCode = http.StatusCreated
}
batchOperationResults[idx] = apimodels.BatchOperationResult{
StatusCode: responseCode,
ResourceBody: createdDocument,
}
case apimodels.BatchOperationTypeUpsert:
documentId := operation.ResourceBody["id"].(string)
h.dataStore.DeleteDocument(databaseId, collectionId, documentId)
createdDocument, createStatus := h.dataStore.CreateDocument(databaseId, collectionId, operation.ResourceBody)
responseCode := dataStoreStatusToResponseCode(createStatus)
if createStatus == datastore.StatusOk {
responseCode = http.StatusCreated
}
batchOperationResults[idx] = apimodels.BatchOperationResult{
StatusCode: responseCode,
ResourceBody: createdDocument,
}
case apimodels.BatchOperationTypeRead:
document, status := h.dataStore.GetDocument(databaseId, collectionId, operation.Id)
batchOperationResults[idx] = apimodels.BatchOperationResult{
StatusCode: dataStoreStatusToResponseCode(status),
ResourceBody: document,
}
case apimodels.BatchOperationTypePatch:
batchOperationResults[idx] = apimodels.BatchOperationResult{
StatusCode: http.StatusNotImplemented,
Message: "Patch operation is not implemented",
}
default:
batchOperationResults[idx] = apimodels.BatchOperationResult{
StatusCode: http.StatusBadRequest,
Message: "Unknown operation type",
}
}
}
c.JSON(http.StatusOK, batchOperationResults)
}
func dataStoreStatusToResponseCode(status datastore.DataStoreStatus) int {
switch status {
case datastore.StatusOk:
return http.StatusOK
case datastore.StatusNotFound:
return http.StatusNotFound
case datastore.Conflict:
return http.StatusConflict
case datastore.BadRequest:
return http.StatusBadRequest
default:
return http.StatusInternalServerError
}
}
func (h *Handlers) executeQueryDocuments(databaseId string, collectionId string, query string, queryParameters map[string]interface{}) ([]memoryexecutor.RowType, datastore.DataStoreStatus) {
parsedQuery, err := nosql.Parse("", []byte(query))
if err != nil {
logger.Errorf("Failed to parse query: %s\nerr: %v", query, err)
return nil, datastore.BadRequest
}
allDocumentsIterator, status := h.dataStore.GetDocumentIterator(databaseId, collectionId)
if status != datastore.StatusOk {
return nil, status
}
defer allDocumentsIterator.Close()
rowsIterator := converters.NewDocumentToRowTypeIterator(allDocumentsIterator)
if typedQuery, ok := parsedQuery.(parsers.SelectStmt); ok {
typedQuery.Parameters = queryParameters
return memoryexecutor.ExecuteQuery(typedQuery, rowsIterator), datastore.StatusOk
}
return nil, datastore.BadRequest
}

View File

@ -2,17 +2,17 @@ package handlers
import ( import (
"github.com/pikami/cosmium/api/config" "github.com/pikami/cosmium/api/config"
"github.com/pikami/cosmium/internal/datastore" "github.com/pikami/cosmium/internal/repositories"
) )
type Handlers struct { type Handlers struct {
dataStore datastore.DataStore repository *repositories.DataRepository
config *config.ServerConfig config config.ServerConfig
} }
func NewHandlers(dataStore datastore.DataStore, config *config.ServerConfig) *Handlers { func NewHandlers(dataRepository *repositories.DataRepository, config config.ServerConfig) *Handlers {
return &Handlers{ return &Handlers{
dataStore: dataStore, repository: dataRepository,
config: config, config: config,
} }
} }

View File

@ -10,7 +10,7 @@ import (
"github.com/pikami/cosmium/internal/logger" "github.com/pikami/cosmium/internal/logger"
) )
func Authentication(config *config.ServerConfig) gin.HandlerFunc { func Authentication(config config.ServerConfig) gin.HandlerFunc {
return func(c *gin.Context) { return func(c *gin.Context) {
requestUrl := c.Request.URL.String() requestUrl := c.Request.URL.String()
if config.DisableAuth || if config.DisableAuth ||
@ -75,7 +75,8 @@ func requestToResourceId(c *gin.Context) string {
isFeed := c.Request.Header.Get("A-Im") == "Incremental Feed" isFeed := c.Request.Header.Get("A-Im") == "Incremental Feed"
if resourceType == "pkranges" && isFeed { if resourceType == "pkranges" && isFeed {
resourceId = collId // CosmosSDK replaces '/' with '-' in resource id requests
resourceId = strings.Replace(collId, "-", "/", -1)
} }
return resourceId return resourceId

View File

@ -7,7 +7,7 @@ import (
"github.com/pikami/cosmium/api/config" "github.com/pikami/cosmium/api/config"
) )
func StripTrailingSlashes(r *gin.Engine, config *config.ServerConfig) gin.HandlerFunc { func StripTrailingSlashes(r *gin.Engine, config config.ServerConfig) gin.HandlerFunc {
return func(c *gin.Context) { return func(c *gin.Context) {
path := c.Request.URL.Path path := c.Request.URL.Path
if len(path) > 1 && path[len(path)-1] == '/' && !strings.Contains(path, config.ExplorerBaseUrlLocation) { if len(path) > 1 && path[len(path)-1] == '/' && !strings.Contains(path, config.ExplorerBaseUrlLocation) {

View File

@ -5,9 +5,7 @@ import (
"net/http" "net/http"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
"github.com/pikami/cosmium/internal/constants" repositorymodels "github.com/pikami/cosmium/internal/repository_models"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/resourceid"
) )
func (h *Handlers) GetPartitionKeyRanges(c *gin.Context) { func (h *Handlers) GetPartitionKeyRanges(c *gin.Context) {
@ -19,8 +17,8 @@ func (h *Handlers) GetPartitionKeyRanges(c *gin.Context) {
return return
} }
partitionKeyRanges, status := h.dataStore.GetPartitionKeyRanges(databaseId, collectionId) partitionKeyRanges, status := h.repository.GetPartitionKeyRanges(databaseId, collectionId)
if status == datastore.StatusOk { if status == repositorymodels.StatusOk {
c.Header("etag", "\"420\"") c.Header("etag", "\"420\"")
c.Header("lsn", "420") c.Header("lsn", "420")
c.Header("x-ms-cosmos-llsn", "420") c.Header("x-ms-cosmos-llsn", "420")
@ -28,24 +26,23 @@ func (h *Handlers) GetPartitionKeyRanges(c *gin.Context) {
c.Header("x-ms-item-count", fmt.Sprintf("%d", len(partitionKeyRanges))) c.Header("x-ms-item-count", fmt.Sprintf("%d", len(partitionKeyRanges)))
collectionRid := collectionId collectionRid := collectionId
collection, _ := h.dataStore.GetCollection(databaseId, collectionId) collection, _ := h.repository.GetCollection(databaseId, collectionId)
if collection.ResourceID != "" { if collection.ResourceID != "" {
collectionRid = collection.ResourceID collectionRid = collection.ResourceID
} }
rid := resourceid.NewCombined(collectionRid, resourceid.New(resourceid.ResourceTypePartitionKeyRange))
c.IndentedJSON(http.StatusOK, gin.H{ c.IndentedJSON(http.StatusOK, gin.H{
"_rid": rid, "_rid": collectionRid,
"_count": len(partitionKeyRanges), "_count": len(partitionKeyRanges),
"PartitionKeyRanges": partitionKeyRanges, "PartitionKeyRanges": partitionKeyRanges,
}) })
return return
} }
if status == datastore.StatusNotFound { if status == repositorymodels.StatusNotFound {
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse) c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
return return
} }
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse) c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
} }

View File

@ -28,8 +28,6 @@ func (h *Handlers) GetServerInfo(c *gin.Context) {
}, },
}, },
"enableMultipleWriteLocations": false, "enableMultipleWriteLocations": false,
"continuousBackupEnabled": false,
"enableNRegionSynchronousCommit": false,
"userReplicationPolicy": map[string]interface{}{ "userReplicationPolicy": map[string]interface{}{
"asyncReplication": false, "asyncReplication": false,
"minReplicaSetSize": 1, "minReplicaSetSize": 1,

View File

@ -5,23 +5,22 @@ import (
"net/http" "net/http"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
"github.com/pikami/cosmium/internal/constants" repositorymodels "github.com/pikami/cosmium/internal/repository_models"
"github.com/pikami/cosmium/internal/datastore"
) )
func (h *Handlers) GetAllStoredProcedures(c *gin.Context) { func (h *Handlers) GetAllStoredProcedures(c *gin.Context) {
databaseId := c.Param("databaseId") databaseId := c.Param("databaseId")
collectionId := c.Param("collId") collectionId := c.Param("collId")
sps, status := h.dataStore.GetAllStoredProcedures(databaseId, collectionId) sps, status := h.repository.GetAllStoredProcedures(databaseId, collectionId)
if status == datastore.StatusOk { if status == repositorymodels.StatusOk {
c.Header("x-ms-item-count", fmt.Sprintf("%d", len(sps))) c.Header("x-ms-item-count", fmt.Sprintf("%d", len(sps)))
c.IndentedJSON(http.StatusOK, gin.H{"_rid": "", "StoredProcedures": sps, "_count": len(sps)}) c.IndentedJSON(http.StatusOK, gin.H{"_rid": "", "StoredProcedures": sps, "_count": len(sps)})
return return
} }
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse) c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
} }
func (h *Handlers) GetStoredProcedure(c *gin.Context) { func (h *Handlers) GetStoredProcedure(c *gin.Context) {
@ -29,19 +28,19 @@ func (h *Handlers) GetStoredProcedure(c *gin.Context) {
collectionId := c.Param("collId") collectionId := c.Param("collId")
spId := c.Param("spId") spId := c.Param("spId")
sp, status := h.dataStore.GetStoredProcedure(databaseId, collectionId, spId) sp, status := h.repository.GetStoredProcedure(databaseId, collectionId, spId)
if status == datastore.StatusOk { if status == repositorymodels.StatusOk {
c.IndentedJSON(http.StatusOK, sp) c.IndentedJSON(http.StatusOK, sp)
return return
} }
if status == datastore.StatusNotFound { if status == repositorymodels.StatusNotFound {
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse) c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
return return
} }
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse) c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
} }
func (h *Handlers) DeleteStoredProcedure(c *gin.Context) { func (h *Handlers) DeleteStoredProcedure(c *gin.Context) {
@ -49,18 +48,18 @@ func (h *Handlers) DeleteStoredProcedure(c *gin.Context) {
collectionId := c.Param("collId") collectionId := c.Param("collId")
spId := c.Param("spId") spId := c.Param("spId")
status := h.dataStore.DeleteStoredProcedure(databaseId, collectionId, spId) status := h.repository.DeleteStoredProcedure(databaseId, collectionId, spId)
if status == datastore.StatusOk { if status == repositorymodels.StatusOk {
c.Status(http.StatusNoContent) c.Status(http.StatusNoContent)
return return
} }
if status == datastore.StatusNotFound { if status == repositorymodels.StatusNotFound {
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse) c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
return return
} }
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse) c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
} }
func (h *Handlers) ReplaceStoredProcedure(c *gin.Context) { func (h *Handlers) ReplaceStoredProcedure(c *gin.Context) {
@ -68,52 +67,52 @@ func (h *Handlers) ReplaceStoredProcedure(c *gin.Context) {
collectionId := c.Param("collId") collectionId := c.Param("collId")
spId := c.Param("spId") spId := c.Param("spId")
var sp datastore.StoredProcedure var sp repositorymodels.StoredProcedure
if err := c.BindJSON(&sp); err != nil { if err := c.BindJSON(&sp); err != nil {
c.IndentedJSON(http.StatusBadRequest, constants.BadRequestResponse) c.IndentedJSON(http.StatusBadRequest, gin.H{"message": "Invalid body"})
return return
} }
status := h.dataStore.DeleteStoredProcedure(databaseId, collectionId, spId) status := h.repository.DeleteStoredProcedure(databaseId, collectionId, spId)
if status == datastore.StatusNotFound { if status == repositorymodels.StatusNotFound {
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse) c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
return return
} }
createdSP, status := h.dataStore.CreateStoredProcedure(databaseId, collectionId, sp) createdSP, status := h.repository.CreateStoredProcedure(databaseId, collectionId, sp)
if status == datastore.Conflict { if status == repositorymodels.Conflict {
c.IndentedJSON(http.StatusConflict, constants.ConflictResponse) c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"})
return return
} }
if status == datastore.StatusOk { if status == repositorymodels.StatusOk {
c.IndentedJSON(http.StatusOK, createdSP) c.IndentedJSON(http.StatusOK, createdSP)
return return
} }
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse) c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
} }
func (h *Handlers) CreateStoredProcedure(c *gin.Context) { func (h *Handlers) CreateStoredProcedure(c *gin.Context) {
databaseId := c.Param("databaseId") databaseId := c.Param("databaseId")
collectionId := c.Param("collId") collectionId := c.Param("collId")
var sp datastore.StoredProcedure var sp repositorymodels.StoredProcedure
if err := c.BindJSON(&sp); err != nil { if err := c.BindJSON(&sp); err != nil {
c.IndentedJSON(http.StatusBadRequest, constants.BadRequestResponse) c.IndentedJSON(http.StatusBadRequest, gin.H{"message": "Invalid body"})
return return
} }
createdSP, status := h.dataStore.CreateStoredProcedure(databaseId, collectionId, sp) createdSP, status := h.repository.CreateStoredProcedure(databaseId, collectionId, sp)
if status == datastore.Conflict { if status == repositorymodels.Conflict {
c.IndentedJSON(http.StatusConflict, constants.ConflictResponse) c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"})
return return
} }
if status == datastore.StatusOk { if status == repositorymodels.StatusOk {
c.IndentedJSON(http.StatusCreated, createdSP) c.IndentedJSON(http.StatusCreated, createdSP)
return return
} }
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse) c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
} }

View File

@ -5,23 +5,22 @@ import (
"net/http" "net/http"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
"github.com/pikami/cosmium/internal/constants" repositorymodels "github.com/pikami/cosmium/internal/repository_models"
"github.com/pikami/cosmium/internal/datastore"
) )
func (h *Handlers) GetAllTriggers(c *gin.Context) { func (h *Handlers) GetAllTriggers(c *gin.Context) {
databaseId := c.Param("databaseId") databaseId := c.Param("databaseId")
collectionId := c.Param("collId") collectionId := c.Param("collId")
triggers, status := h.dataStore.GetAllTriggers(databaseId, collectionId) triggers, status := h.repository.GetAllTriggers(databaseId, collectionId)
if status == datastore.StatusOk { if status == repositorymodels.StatusOk {
c.Header("x-ms-item-count", fmt.Sprintf("%d", len(triggers))) c.Header("x-ms-item-count", fmt.Sprintf("%d", len(triggers)))
c.IndentedJSON(http.StatusOK, gin.H{"_rid": "", "Triggers": triggers, "_count": len(triggers)}) c.IndentedJSON(http.StatusOK, gin.H{"_rid": "", "Triggers": triggers, "_count": len(triggers)})
return return
} }
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse) c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
} }
func (h *Handlers) GetTrigger(c *gin.Context) { func (h *Handlers) GetTrigger(c *gin.Context) {
@ -29,19 +28,19 @@ func (h *Handlers) GetTrigger(c *gin.Context) {
collectionId := c.Param("collId") collectionId := c.Param("collId")
triggerId := c.Param("triggerId") triggerId := c.Param("triggerId")
trigger, status := h.dataStore.GetTrigger(databaseId, collectionId, triggerId) trigger, status := h.repository.GetTrigger(databaseId, collectionId, triggerId)
if status == datastore.StatusOk { if status == repositorymodels.StatusOk {
c.IndentedJSON(http.StatusOK, trigger) c.IndentedJSON(http.StatusOK, trigger)
return return
} }
if status == datastore.StatusNotFound { if status == repositorymodels.StatusNotFound {
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse) c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
return return
} }
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse) c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
} }
func (h *Handlers) DeleteTrigger(c *gin.Context) { func (h *Handlers) DeleteTrigger(c *gin.Context) {
@ -49,18 +48,18 @@ func (h *Handlers) DeleteTrigger(c *gin.Context) {
collectionId := c.Param("collId") collectionId := c.Param("collId")
triggerId := c.Param("triggerId") triggerId := c.Param("triggerId")
status := h.dataStore.DeleteTrigger(databaseId, collectionId, triggerId) status := h.repository.DeleteTrigger(databaseId, collectionId, triggerId)
if status == datastore.StatusOk { if status == repositorymodels.StatusOk {
c.Status(http.StatusNoContent) c.Status(http.StatusNoContent)
return return
} }
if status == datastore.StatusNotFound { if status == repositorymodels.StatusNotFound {
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse) c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
return return
} }
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse) c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
} }
func (h *Handlers) ReplaceTrigger(c *gin.Context) { func (h *Handlers) ReplaceTrigger(c *gin.Context) {
@ -68,52 +67,52 @@ func (h *Handlers) ReplaceTrigger(c *gin.Context) {
collectionId := c.Param("collId") collectionId := c.Param("collId")
triggerId := c.Param("triggerId") triggerId := c.Param("triggerId")
var trigger datastore.Trigger var trigger repositorymodels.Trigger
if err := c.BindJSON(&trigger); err != nil { if err := c.BindJSON(&trigger); err != nil {
c.IndentedJSON(http.StatusBadRequest, constants.BadRequestResponse) c.IndentedJSON(http.StatusBadRequest, gin.H{"message": "Invalid body"})
return return
} }
status := h.dataStore.DeleteTrigger(databaseId, collectionId, triggerId) status := h.repository.DeleteTrigger(databaseId, collectionId, triggerId)
if status == datastore.StatusNotFound { if status == repositorymodels.StatusNotFound {
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse) c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
return return
} }
createdTrigger, status := h.dataStore.CreateTrigger(databaseId, collectionId, trigger) createdTrigger, status := h.repository.CreateTrigger(databaseId, collectionId, trigger)
if status == datastore.Conflict { if status == repositorymodels.Conflict {
c.IndentedJSON(http.StatusConflict, constants.ConflictResponse) c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"})
return return
} }
if status == datastore.StatusOk { if status == repositorymodels.StatusOk {
c.IndentedJSON(http.StatusOK, createdTrigger) c.IndentedJSON(http.StatusOK, createdTrigger)
return return
} }
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse) c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
} }
func (h *Handlers) CreateTrigger(c *gin.Context) { func (h *Handlers) CreateTrigger(c *gin.Context) {
databaseId := c.Param("databaseId") databaseId := c.Param("databaseId")
collectionId := c.Param("collId") collectionId := c.Param("collId")
var trigger datastore.Trigger var trigger repositorymodels.Trigger
if err := c.BindJSON(&trigger); err != nil { if err := c.BindJSON(&trigger); err != nil {
c.IndentedJSON(http.StatusBadRequest, constants.BadRequestResponse) c.IndentedJSON(http.StatusBadRequest, gin.H{"message": "Invalid body"})
return return
} }
createdTrigger, status := h.dataStore.CreateTrigger(databaseId, collectionId, trigger) createdTrigger, status := h.repository.CreateTrigger(databaseId, collectionId, trigger)
if status == datastore.Conflict { if status == repositorymodels.Conflict {
c.IndentedJSON(http.StatusConflict, constants.ConflictResponse) c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"})
return return
} }
if status == datastore.StatusOk { if status == repositorymodels.StatusOk {
c.IndentedJSON(http.StatusCreated, createdTrigger) c.IndentedJSON(http.StatusCreated, createdTrigger)
return return
} }
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse) c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
} }

View File

@ -5,23 +5,22 @@ import (
"net/http" "net/http"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
"github.com/pikami/cosmium/internal/constants" repositorymodels "github.com/pikami/cosmium/internal/repository_models"
"github.com/pikami/cosmium/internal/datastore"
) )
func (h *Handlers) GetAllUserDefinedFunctions(c *gin.Context) { func (h *Handlers) GetAllUserDefinedFunctions(c *gin.Context) {
databaseId := c.Param("databaseId") databaseId := c.Param("databaseId")
collectionId := c.Param("collId") collectionId := c.Param("collId")
udfs, status := h.dataStore.GetAllUserDefinedFunctions(databaseId, collectionId) udfs, status := h.repository.GetAllUserDefinedFunctions(databaseId, collectionId)
if status == datastore.StatusOk { if status == repositorymodels.StatusOk {
c.Header("x-ms-item-count", fmt.Sprintf("%d", len(udfs))) c.Header("x-ms-item-count", fmt.Sprintf("%d", len(udfs)))
c.IndentedJSON(http.StatusOK, gin.H{"_rid": "", "UserDefinedFunctions": udfs, "_count": len(udfs)}) c.IndentedJSON(http.StatusOK, gin.H{"_rid": "", "UserDefinedFunctions": udfs, "_count": len(udfs)})
return return
} }
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse) c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
} }
func (h *Handlers) GetUserDefinedFunction(c *gin.Context) { func (h *Handlers) GetUserDefinedFunction(c *gin.Context) {
@ -29,19 +28,19 @@ func (h *Handlers) GetUserDefinedFunction(c *gin.Context) {
collectionId := c.Param("collId") collectionId := c.Param("collId")
udfId := c.Param("udfId") udfId := c.Param("udfId")
udf, status := h.dataStore.GetUserDefinedFunction(databaseId, collectionId, udfId) udf, status := h.repository.GetUserDefinedFunction(databaseId, collectionId, udfId)
if status == datastore.StatusOk { if status == repositorymodels.StatusOk {
c.IndentedJSON(http.StatusOK, udf) c.IndentedJSON(http.StatusOK, udf)
return return
} }
if status == datastore.StatusNotFound { if status == repositorymodels.StatusNotFound {
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse) c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
return return
} }
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse) c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
} }
func (h *Handlers) DeleteUserDefinedFunction(c *gin.Context) { func (h *Handlers) DeleteUserDefinedFunction(c *gin.Context) {
@ -49,18 +48,18 @@ func (h *Handlers) DeleteUserDefinedFunction(c *gin.Context) {
collectionId := c.Param("collId") collectionId := c.Param("collId")
udfId := c.Param("udfId") udfId := c.Param("udfId")
status := h.dataStore.DeleteUserDefinedFunction(databaseId, collectionId, udfId) status := h.repository.DeleteUserDefinedFunction(databaseId, collectionId, udfId)
if status == datastore.StatusOk { if status == repositorymodels.StatusOk {
c.Status(http.StatusNoContent) c.Status(http.StatusNoContent)
return return
} }
if status == datastore.StatusNotFound { if status == repositorymodels.StatusNotFound {
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse) c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
return return
} }
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse) c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
} }
func (h *Handlers) ReplaceUserDefinedFunction(c *gin.Context) { func (h *Handlers) ReplaceUserDefinedFunction(c *gin.Context) {
@ -68,52 +67,52 @@ func (h *Handlers) ReplaceUserDefinedFunction(c *gin.Context) {
collectionId := c.Param("collId") collectionId := c.Param("collId")
udfId := c.Param("udfId") udfId := c.Param("udfId")
var udf datastore.UserDefinedFunction var udf repositorymodels.UserDefinedFunction
if err := c.BindJSON(&udf); err != nil { if err := c.BindJSON(&udf); err != nil {
c.IndentedJSON(http.StatusBadRequest, constants.BadRequestResponse) c.IndentedJSON(http.StatusBadRequest, gin.H{"message": "Invalid body"})
return return
} }
status := h.dataStore.DeleteUserDefinedFunction(databaseId, collectionId, udfId) status := h.repository.DeleteUserDefinedFunction(databaseId, collectionId, udfId)
if status == datastore.StatusNotFound { if status == repositorymodels.StatusNotFound {
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse) c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
return return
} }
createdUdf, status := h.dataStore.CreateUserDefinedFunction(databaseId, collectionId, udf) createdUdf, status := h.repository.CreateUserDefinedFunction(databaseId, collectionId, udf)
if status == datastore.Conflict { if status == repositorymodels.Conflict {
c.IndentedJSON(http.StatusConflict, constants.ConflictResponse) c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"})
return return
} }
if status == datastore.StatusOk { if status == repositorymodels.StatusOk {
c.IndentedJSON(http.StatusOK, createdUdf) c.IndentedJSON(http.StatusOK, createdUdf)
return return
} }
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse) c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
} }
func (h *Handlers) CreateUserDefinedFunction(c *gin.Context) { func (h *Handlers) CreateUserDefinedFunction(c *gin.Context) {
databaseId := c.Param("databaseId") databaseId := c.Param("databaseId")
collectionId := c.Param("collId") collectionId := c.Param("collId")
var udf datastore.UserDefinedFunction var udf repositorymodels.UserDefinedFunction
if err := c.BindJSON(&udf); err != nil { if err := c.BindJSON(&udf); err != nil {
c.IndentedJSON(http.StatusBadRequest, constants.BadRequestResponse) c.IndentedJSON(http.StatusBadRequest, gin.H{"message": "Invalid body"})
return return
} }
createdUdf, status := h.dataStore.CreateUserDefinedFunction(databaseId, collectionId, udf) createdUdf, status := h.repository.CreateUserDefinedFunction(databaseId, collectionId, udf)
if status == datastore.Conflict { if status == repositorymodels.Conflict {
c.IndentedJSON(http.StatusConflict, constants.ConflictResponse) c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"})
return return
} }
if status == datastore.StatusOk { if status == repositorymodels.StatusOk {
c.IndentedJSON(http.StatusCreated, createdUdf) c.IndentedJSON(http.StatusCreated, createdUdf)
return return
} }
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse) c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
} }

View File

@ -4,30 +4,24 @@ import (
"context" "context"
"fmt" "fmt"
"net/http" "net/http"
"sync"
"time"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
"github.com/pikami/cosmium/api/handlers" "github.com/pikami/cosmium/api/handlers"
"github.com/pikami/cosmium/api/handlers/middleware" "github.com/pikami/cosmium/api/handlers/middleware"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/logger" "github.com/pikami/cosmium/internal/logger"
"github.com/pikami/cosmium/internal/repositories"
tlsprovider "github.com/pikami/cosmium/internal/tls_provider" tlsprovider "github.com/pikami/cosmium/internal/tls_provider"
) )
var ginMux sync.Mutex func (s *ApiServer) CreateRouter(repository *repositories.DataRepository) {
routeHandlers := handlers.NewHandlers(repository, s.config)
func (s *ApiServer) CreateRouter(dataStore datastore.DataStore) {
routeHandlers := handlers.NewHandlers(dataStore, s.config)
ginMux.Lock()
gin.DefaultWriter = logger.InfoWriter() gin.DefaultWriter = logger.InfoWriter()
gin.DefaultErrorWriter = logger.ErrorWriter() gin.DefaultErrorWriter = logger.ErrorWriter()
if s.config.LogLevel != "debug" { if s.config.LogLevel != "debug" {
gin.SetMode(gin.ReleaseMode) gin.SetMode(gin.ReleaseMode)
} }
ginMux.Unlock()
router := gin.Default(func(e *gin.Engine) { router := gin.Default(func(e *gin.Engine) {
e.RedirectTrailingSlash = false e.RedirectTrailingSlash = false
@ -87,7 +81,7 @@ func (s *ApiServer) CreateRouter(dataStore datastore.DataStore) {
s.router = router s.router = router
} }
func (s *ApiServer) Start() error { func (s *ApiServer) Start() {
listenAddress := fmt.Sprintf(":%d", s.config.Port) listenAddress := fmt.Sprintf(":%d", s.config.Port)
s.isActive = true s.isActive = true
@ -96,8 +90,6 @@ func (s *ApiServer) Start() error {
Handler: s.router.Handler(), Handler: s.router.Handler(),
} }
errChan := make(chan error, 1)
go func() { go func() {
<-s.stopServer <-s.stopServer
logger.InfoLn("Shutting down server...") logger.InfoLn("Shutting down server...")
@ -105,40 +97,35 @@ func (s *ApiServer) Start() error {
if err != nil { if err != nil {
logger.ErrorLn("Failed to shutdown server:", err) logger.ErrorLn("Failed to shutdown server:", err)
} }
s.onServerShutdown <- true
}() }()
go func() { go func() {
var err error
if s.config.DisableTls { if s.config.DisableTls {
logger.Infof("Listening and serving HTTP on %s\n", server.Addr) logger.Infof("Listening and serving HTTP on %s\n", server.Addr)
err = server.ListenAndServe() err := server.ListenAndServe()
if err != nil && err != http.ErrServerClosed {
logger.ErrorLn("Failed to start HTTP server:", err)
}
s.isActive = false
} else if s.config.TLS_CertificatePath != "" && s.config.TLS_CertificateKey != "" { } else if s.config.TLS_CertificatePath != "" && s.config.TLS_CertificateKey != "" {
logger.Infof("Listening and serving HTTPS on %s\n", server.Addr) logger.Infof("Listening and serving HTTPS on %s\n", server.Addr)
err = server.ListenAndServeTLS( err := server.ListenAndServeTLS(
s.config.TLS_CertificatePath, s.config.TLS_CertificatePath,
s.config.TLS_CertificateKey) s.config.TLS_CertificateKey)
if err != nil && err != http.ErrServerClosed {
logger.ErrorLn("Failed to start HTTPS server:", err)
}
s.isActive = false
} else { } else {
tlsConfig := tlsprovider.GetDefaultTlsConfig() tlsConfig := tlsprovider.GetDefaultTlsConfig()
server.TLSConfig = tlsConfig server.TLSConfig = tlsConfig
logger.Infof("Listening and serving HTTPS on %s\n", server.Addr) logger.Infof("Listening and serving HTTPS on %s\n", server.Addr)
err = server.ListenAndServeTLS("", "") err := server.ListenAndServeTLS("", "")
}
if err != nil && err != http.ErrServerClosed { if err != nil && err != http.ErrServerClosed {
logger.ErrorLn("Failed to start server:", err) logger.ErrorLn("Failed to start HTTPS server:", err)
errChan <- err
} else {
errChan <- nil
} }
s.isActive = false s.isActive = false
}
}() }()
select {
case err := <-errChan:
return err
case <-time.After(50 * time.Millisecond):
return nil
}
} }

View File

@ -2,11 +2,13 @@ package tests_test
import ( import (
"context" "context"
"errors"
"fmt" "fmt"
"io" "io"
"net/http" "net/http"
"testing" "testing"
"github.com/Azure/azure-sdk-for-go/sdk/azcore"
"github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos" "github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos"
"github.com/pikami/cosmium/api/config" "github.com/pikami/cosmium/api/config"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
@ -17,9 +19,9 @@ func Test_Authentication(t *testing.T) {
defer ts.Server.Close() defer ts.Server.Close()
t.Run("Should get 200 when correct account key is used", func(t *testing.T) { t.Run("Should get 200 when correct account key is used", func(t *testing.T) {
ts.DataStore.DeleteDatabase(testDatabaseName) ts.Repository.DeleteDatabase(testDatabaseName)
client, err := azcosmos.NewClientFromConnectionString( client, err := azcosmos.NewClientFromConnectionString(
formatConnectionString(ts.URL, config.DefaultAccountKey), fmt.Sprintf("AccountEndpoint=%s;AccountKey=%s", ts.URL, config.DefaultAccountKey),
&azcosmos.ClientOptions{}, &azcosmos.ClientOptions{},
) )
assert.Nil(t, err) assert.Nil(t, err)
@ -33,9 +35,9 @@ func Test_Authentication(t *testing.T) {
}) })
t.Run("Should get 401 when wrong account key is used", func(t *testing.T) { t.Run("Should get 401 when wrong account key is used", func(t *testing.T) {
ts.DataStore.DeleteDatabase(testDatabaseName) ts.Repository.DeleteDatabase(testDatabaseName)
client, err := azcosmos.NewClientFromConnectionString( client, err := azcosmos.NewClientFromConnectionString(
formatConnectionString(ts.URL, "AAAA"), fmt.Sprintf("AccountEndpoint=%s;AccountKey=%s", ts.URL, "AAAA"),
&azcosmos.ClientOptions{}, &azcosmos.ClientOptions{},
) )
assert.Nil(t, err) assert.Nil(t, err)
@ -45,7 +47,12 @@ func Test_Authentication(t *testing.T) {
azcosmos.DatabaseProperties{ID: testDatabaseName}, azcosmos.DatabaseProperties{ID: testDatabaseName},
&azcosmos.CreateDatabaseOptions{}) &azcosmos.CreateDatabaseOptions{})
assert.Contains(t, err.Error(), "401 Unauthorized") var respErr *azcore.ResponseError
if errors.As(err, &respErr) {
assert.Equal(t, respErr.StatusCode, http.StatusUnauthorized)
} else {
panic(err)
}
}) })
t.Run("Should allow unauthorized requests to /_explorer", func(t *testing.T) { t.Run("Should allow unauthorized requests to /_explorer", func(t *testing.T) {
@ -61,7 +68,7 @@ func Test_Authentication(t *testing.T) {
} }
func Test_Authentication_Disabled(t *testing.T) { func Test_Authentication_Disabled(t *testing.T) {
ts := runTestServerCustomConfig(&config.ServerConfig{ ts := runTestServerCustomConfig(config.ServerConfig{
AccountKey: config.DefaultAccountKey, AccountKey: config.DefaultAccountKey,
ExplorerPath: "/tmp/nothing", ExplorerPath: "/tmp/nothing",
ExplorerBaseUrlLocation: config.ExplorerBaseUrlLocation, ExplorerBaseUrlLocation: config.ExplorerBaseUrlLocation,
@ -70,9 +77,9 @@ func Test_Authentication_Disabled(t *testing.T) {
defer ts.Server.Close() defer ts.Server.Close()
t.Run("Should get 200 when wrong account key is used, but authentication is dissabled", func(t *testing.T) { t.Run("Should get 200 when wrong account key is used, but authentication is dissabled", func(t *testing.T) {
ts.DataStore.DeleteDatabase(testDatabaseName) ts.Repository.DeleteDatabase(testDatabaseName)
client, err := azcosmos.NewClientFromConnectionString( client, err := azcosmos.NewClientFromConnectionString(
formatConnectionString(ts.URL, "AAAA"), fmt.Sprintf("AccountEndpoint=%s;AccountKey=%s", ts.URL, "AAAA"),
&azcosmos.ClientOptions{}, &azcosmos.ClientOptions{},
) )
assert.Nil(t, err) assert.Nil(t, err)
@ -85,7 +92,3 @@ func Test_Authentication_Disabled(t *testing.T) {
assert.Equal(t, createResponse.DatabaseProperties.ID, testDatabaseName) assert.Equal(t, createResponse.DatabaseProperties.ID, testDatabaseName)
}) })
} }
func formatConnectionString(endpoint, key string) string {
return fmt.Sprintf("AccountEndpoint=%s;AccountKey=%s", endpoint, key)
}

View File

@ -3,29 +3,32 @@ package tests_test
import ( import (
"context" "context"
"errors" "errors"
"fmt"
"net/http" "net/http"
"testing" "testing"
"github.com/Azure/azure-sdk-for-go/sdk/azcore" "github.com/Azure/azure-sdk-for-go/sdk/azcore"
"github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos" "github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos"
"github.com/pikami/cosmium/internal/datastore" "github.com/pikami/cosmium/api/config"
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
) )
func Test_Collections(t *testing.T) { func Test_Collections(t *testing.T) {
presets := []testPreset{PresetJsonStore, PresetBadgerStore} ts := runTestServer()
defer ts.Server.Close()
setUp := func(ts *TestServer, client *azcosmos.Client) *azcosmos.DatabaseClient { client, err := azcosmos.NewClientFromConnectionString(
ts.DataStore.CreateDatabase(datastore.Database{ID: testDatabaseName}) fmt.Sprintf("AccountEndpoint=%s;AccountKey=%s", ts.URL, config.DefaultAccountKey),
&azcosmos.ClientOptions{},
)
assert.Nil(t, err)
ts.Repository.CreateDatabase(repositorymodels.Database{ID: testDatabaseName})
databaseClient, err := client.NewDatabase(testDatabaseName) databaseClient, err := client.NewDatabase(testDatabaseName)
assert.Nil(t, err) assert.Nil(t, err)
return databaseClient t.Run("Collection Create", func(t *testing.T) {
}
runTestsWithPresets(t, "Collection Create", presets, func(t *testing.T, ts *TestServer, client *azcosmos.Client) {
databaseClient := setUp(ts, client)
t.Run("Should create collection", func(t *testing.T) { t.Run("Should create collection", func(t *testing.T) {
createResponse, err := databaseClient.CreateContainer(context.TODO(), azcosmos.ContainerProperties{ createResponse, err := databaseClient.CreateContainer(context.TODO(), azcosmos.ContainerProperties{
ID: testCollectionName, ID: testCollectionName,
@ -36,7 +39,7 @@ func Test_Collections(t *testing.T) {
}) })
t.Run("Should return conflict when collection exists", func(t *testing.T) { t.Run("Should return conflict when collection exists", func(t *testing.T) {
ts.DataStore.CreateCollection(testDatabaseName, datastore.Collection{ ts.Repository.CreateCollection(testDatabaseName, repositorymodels.Collection{
ID: testCollectionName, ID: testCollectionName,
}) })
@ -54,11 +57,9 @@ func Test_Collections(t *testing.T) {
}) })
}) })
runTestsWithPresets(t, "Collection Read", presets, func(t *testing.T, ts *TestServer, client *azcosmos.Client) { t.Run("Collection Read", func(t *testing.T) {
databaseClient := setUp(ts, client)
t.Run("Should read collection", func(t *testing.T) { t.Run("Should read collection", func(t *testing.T) {
ts.DataStore.CreateCollection(testDatabaseName, datastore.Collection{ ts.Repository.CreateCollection(testDatabaseName, repositorymodels.Collection{
ID: testCollectionName, ID: testCollectionName,
}) })
@ -72,7 +73,7 @@ func Test_Collections(t *testing.T) {
}) })
t.Run("Should return not found when collection does not exist", func(t *testing.T) { t.Run("Should return not found when collection does not exist", func(t *testing.T) {
ts.DataStore.DeleteCollection(testDatabaseName, testCollectionName) ts.Repository.DeleteCollection(testDatabaseName, testCollectionName)
collectionResponse, err := databaseClient.NewContainer(testCollectionName) collectionResponse, err := databaseClient.NewContainer(testCollectionName)
assert.Nil(t, err) assert.Nil(t, err)
@ -89,11 +90,9 @@ func Test_Collections(t *testing.T) {
}) })
}) })
runTestsWithPresets(t, "Collection Delete", presets, func(t *testing.T, ts *TestServer, client *azcosmos.Client) { t.Run("Collection Delete", func(t *testing.T) {
databaseClient := setUp(ts, client)
t.Run("Should delete collection", func(t *testing.T) { t.Run("Should delete collection", func(t *testing.T) {
ts.DataStore.CreateCollection(testDatabaseName, datastore.Collection{ ts.Repository.CreateCollection(testDatabaseName, repositorymodels.Collection{
ID: testCollectionName, ID: testCollectionName,
}) })
@ -106,7 +105,7 @@ func Test_Collections(t *testing.T) {
}) })
t.Run("Should return not found when collection does not exist", func(t *testing.T) { t.Run("Should return not found when collection does not exist", func(t *testing.T) {
ts.DataStore.DeleteCollection(testDatabaseName, testCollectionName) ts.Repository.DeleteCollection(testDatabaseName, testCollectionName)
collectionResponse, err := databaseClient.NewContainer(testCollectionName) collectionResponse, err := databaseClient.NewContainer(testCollectionName)
assert.Nil(t, err) assert.Nil(t, err)

View File

@ -1,62 +1,39 @@
package tests_test package tests_test
import ( import (
"fmt"
"net/http/httptest" "net/http/httptest"
"testing"
"github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos"
"github.com/pikami/cosmium/api" "github.com/pikami/cosmium/api"
"github.com/pikami/cosmium/api/config" "github.com/pikami/cosmium/api/config"
"github.com/pikami/cosmium/internal/datastore" "github.com/pikami/cosmium/internal/repositories"
badgerdatastore "github.com/pikami/cosmium/internal/datastore/badger_datastore"
jsondatastore "github.com/pikami/cosmium/internal/datastore/json_datastore"
"github.com/pikami/cosmium/internal/logger"
"github.com/stretchr/testify/assert"
) )
type TestServer struct { type TestServer struct {
Server *httptest.Server Server *httptest.Server
DataStore datastore.DataStore Repository *repositories.DataRepository
URL string URL string
} }
func getDefaultTestServerConfig() *config.ServerConfig { func runTestServerCustomConfig(config config.ServerConfig) *TestServer {
return &config.ServerConfig{ repository := repositories.NewDataRepository(repositories.RepositoryOptions{})
AccountKey: config.DefaultAccountKey,
ExplorerPath: "/tmp/nothing",
ExplorerBaseUrlLocation: config.ExplorerBaseUrlLocation,
DataStore: "json",
}
}
func runTestServerCustomConfig(configuration *config.ServerConfig) *TestServer { api := api.NewApiServer(repository, config)
var dataStore datastore.DataStore
switch configuration.DataStore {
case config.DataStoreBadger:
dataStore = badgerdatastore.NewBadgerDataStore(badgerdatastore.BadgerDataStoreOptions{})
default:
dataStore = jsondatastore.NewJsonDataStore(jsondatastore.JsonDataStoreOptions{})
}
api := api.NewApiServer(dataStore, configuration)
server := httptest.NewServer(api.GetRouter()) server := httptest.NewServer(api.GetRouter())
configuration.DatabaseEndpoint = server.URL
return &TestServer{ return &TestServer{
Server: server, Server: server,
DataStore: dataStore, Repository: repository,
URL: server.URL, URL: server.URL,
} }
} }
func runTestServer() *TestServer { func runTestServer() *TestServer {
config := getDefaultTestServerConfig() config := config.ServerConfig{
AccountKey: config.DefaultAccountKey,
config.LogLevel = "debug" ExplorerPath: "/tmp/nothing",
logger.SetLogLevel(logger.LogLevelDebug) ExplorerBaseUrlLocation: config.ExplorerBaseUrlLocation,
}
return runTestServerCustomConfig(config) return runTestServerCustomConfig(config)
} }
@ -66,47 +43,3 @@ const (
testDatabaseName = "test-db" testDatabaseName = "test-db"
testCollectionName = "test-coll" testCollectionName = "test-coll"
) )
type testFunc func(t *testing.T, ts *TestServer, cosmosClient *azcosmos.Client)
type testPreset string
const (
PresetJsonStore testPreset = "JsonDS"
PresetBadgerStore testPreset = "BadgerDS"
)
func runTestsWithPreset(t *testing.T, name string, testPreset testPreset, f testFunc) {
serverConfig := getDefaultTestServerConfig()
serverConfig.LogLevel = "debug"
logger.SetLogLevel(logger.LogLevelDebug)
switch testPreset {
case PresetBadgerStore:
serverConfig.DataStore = config.DataStoreBadger
case PresetJsonStore:
serverConfig.DataStore = config.DataStoreJson
}
ts := runTestServerCustomConfig(serverConfig)
defer ts.Server.Close()
defer ts.DataStore.Close()
client, err := azcosmos.NewClientFromConnectionString(
fmt.Sprintf("AccountEndpoint=%s;AccountKey=%s", ts.URL, config.DefaultAccountKey),
&azcosmos.ClientOptions{},
)
assert.Nil(t, err)
testName := fmt.Sprintf("%s_%s", testPreset, name)
t.Run(testName, func(t *testing.T) {
f(t, ts, client)
})
}
func runTestsWithPresets(t *testing.T, name string, testPresets []testPreset, f testFunc) {
for _, testPreset := range testPresets {
runTestsWithPreset(t, name, testPreset, f)
}
}

View File

@ -3,21 +3,30 @@ package tests_test
import ( import (
"context" "context"
"errors" "errors"
"fmt"
"net/http" "net/http"
"testing" "testing"
"github.com/Azure/azure-sdk-for-go/sdk/azcore" "github.com/Azure/azure-sdk-for-go/sdk/azcore"
"github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos" "github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos"
"github.com/pikami/cosmium/internal/datastore" "github.com/pikami/cosmium/api/config"
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
) )
func Test_Databases(t *testing.T) { func Test_Databases(t *testing.T) {
presets := []testPreset{PresetJsonStore, PresetBadgerStore} ts := runTestServer()
defer ts.Server.Close()
runTestsWithPresets(t, "Database Create", presets, func(t *testing.T, ts *TestServer, client *azcosmos.Client) { client, err := azcosmos.NewClientFromConnectionString(
fmt.Sprintf("AccountEndpoint=%s;AccountKey=%s", ts.URL, config.DefaultAccountKey),
&azcosmos.ClientOptions{},
)
assert.Nil(t, err)
t.Run("Database Create", func(t *testing.T) {
t.Run("Should create database", func(t *testing.T) { t.Run("Should create database", func(t *testing.T) {
ts.DataStore.DeleteDatabase(testDatabaseName) ts.Repository.DeleteDatabase(testDatabaseName)
createResponse, err := client.CreateDatabase(context.TODO(), azcosmos.DatabaseProperties{ createResponse, err := client.CreateDatabase(context.TODO(), azcosmos.DatabaseProperties{
ID: testDatabaseName, ID: testDatabaseName,
@ -28,7 +37,7 @@ func Test_Databases(t *testing.T) {
}) })
t.Run("Should return conflict when database exists", func(t *testing.T) { t.Run("Should return conflict when database exists", func(t *testing.T) {
ts.DataStore.CreateDatabase(datastore.Database{ ts.Repository.CreateDatabase(repositorymodels.Database{
ID: testDatabaseName, ID: testDatabaseName,
}) })
@ -46,9 +55,9 @@ func Test_Databases(t *testing.T) {
}) })
}) })
runTestsWithPresets(t, "Database Read", presets, func(t *testing.T, ts *TestServer, client *azcosmos.Client) { t.Run("Database Read", func(t *testing.T) {
t.Run("Should read database", func(t *testing.T) { t.Run("Should read database", func(t *testing.T) {
ts.DataStore.CreateDatabase(datastore.Database{ ts.Repository.CreateDatabase(repositorymodels.Database{
ID: testDatabaseName, ID: testDatabaseName,
}) })
@ -62,7 +71,7 @@ func Test_Databases(t *testing.T) {
}) })
t.Run("Should return not found when database does not exist", func(t *testing.T) { t.Run("Should return not found when database does not exist", func(t *testing.T) {
ts.DataStore.DeleteDatabase(testDatabaseName) ts.Repository.DeleteDatabase(testDatabaseName)
databaseResponse, err := client.NewDatabase(testDatabaseName) databaseResponse, err := client.NewDatabase(testDatabaseName)
assert.Nil(t, err) assert.Nil(t, err)
@ -79,9 +88,9 @@ func Test_Databases(t *testing.T) {
}) })
}) })
runTestsWithPresets(t, "Database Delete", presets, func(t *testing.T, ts *TestServer, client *azcosmos.Client) { t.Run("Database Delete", func(t *testing.T) {
t.Run("Should delete database", func(t *testing.T) { t.Run("Should delete database", func(t *testing.T) {
ts.DataStore.CreateDatabase(datastore.Database{ ts.Repository.CreateDatabase(repositorymodels.Database{
ID: testDatabaseName, ID: testDatabaseName,
}) })
@ -94,7 +103,7 @@ func Test_Databases(t *testing.T) {
}) })
t.Run("Should return not found when database does not exist", func(t *testing.T) { t.Run("Should return not found when database does not exist", func(t *testing.T) {
ts.DataStore.DeleteDatabase(testDatabaseName) ts.Repository.DeleteDatabase(testDatabaseName)
databaseResponse, err := client.NewDatabase(testDatabaseName) databaseResponse, err := client.NewDatabase(testDatabaseName)
assert.Nil(t, err) assert.Nil(t, err)

View File

@ -14,7 +14,7 @@ import (
"github.com/Azure/azure-sdk-for-go/sdk/azcore" "github.com/Azure/azure-sdk-for-go/sdk/azcore"
"github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos" "github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos"
"github.com/pikami/cosmium/api/config" "github.com/pikami/cosmium/api/config"
"github.com/pikami/cosmium/internal/datastore" repositorymodels "github.com/pikami/cosmium/internal/repository_models"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
) )
@ -53,9 +53,11 @@ func testCosmosQuery(t *testing.T,
} }
} }
func documents_InitializeDb(t *testing.T, ts *TestServer) *azcosmos.ContainerClient { func documents_InitializeDb(t *testing.T) (*TestServer, *azcosmos.ContainerClient) {
ts.DataStore.CreateDatabase(datastore.Database{ID: testDatabaseName}) ts := runTestServer()
ts.DataStore.CreateCollection(testDatabaseName, datastore.Collection{
ts.Repository.CreateDatabase(repositorymodels.Database{ID: testDatabaseName})
ts.Repository.CreateCollection(testDatabaseName, repositorymodels.Collection{
ID: testCollectionName, ID: testCollectionName,
PartitionKey: struct { PartitionKey: struct {
Paths []string "json:\"paths\"" Paths []string "json:\"paths\""
@ -65,8 +67,8 @@ func documents_InitializeDb(t *testing.T, ts *TestServer) *azcosmos.ContainerCli
Paths: []string{"/pk"}, Paths: []string{"/pk"},
}, },
}) })
ts.DataStore.CreateDocument(testDatabaseName, testCollectionName, map[string]interface{}{"id": "12345", "pk": "123", "isCool": false, "arr": []int{1, 2, 3}}) ts.Repository.CreateDocument(testDatabaseName, testCollectionName, map[string]interface{}{"id": "12345", "pk": "123", "isCool": false, "arr": []int{1, 2, 3}})
ts.DataStore.CreateDocument(testDatabaseName, testCollectionName, map[string]interface{}{"id": "67890", "pk": "456", "isCool": true, "arr": []int{6, 7, 8}}) ts.Repository.CreateDocument(testDatabaseName, testCollectionName, map[string]interface{}{"id": "67890", "pk": "456", "isCool": true, "arr": []int{6, 7, 8}})
client, err := azcosmos.NewClientFromConnectionString( client, err := azcosmos.NewClientFromConnectionString(
fmt.Sprintf("AccountEndpoint=%s;AccountKey=%s", ts.URL, config.DefaultAccountKey), fmt.Sprintf("AccountEndpoint=%s;AccountKey=%s", ts.URL, config.DefaultAccountKey),
@ -77,14 +79,12 @@ func documents_InitializeDb(t *testing.T, ts *TestServer) *azcosmos.ContainerCli
collectionClient, err := client.NewContainer(testDatabaseName, testCollectionName) collectionClient, err := client.NewContainer(testDatabaseName, testCollectionName)
assert.Nil(t, err) assert.Nil(t, err)
return collectionClient return ts, collectionClient
} }
func Test_Documents(t *testing.T) { func Test_Documents(t *testing.T) {
presets := []testPreset{PresetJsonStore, PresetBadgerStore} ts, collectionClient := documents_InitializeDb(t)
defer ts.Server.Close()
runTestsWithPresets(t, "Test_Documents", presets, func(t *testing.T, ts *TestServer, client *azcosmos.Client) {
collectionClient := documents_InitializeDb(t, ts)
t.Run("Should query document", func(t *testing.T) { t.Run("Should query document", func(t *testing.T) {
testCosmosQuery(t, collectionClient, testCosmosQuery(t, collectionClient,
@ -227,10 +227,11 @@ func Test_Documents(t *testing.T) {
} }
} }
}) })
}) }
runTestsWithPresets(t, "Test_Documents_Patch", presets, func(t *testing.T, ts *TestServer, client *azcosmos.Client) { func Test_Documents_Patch(t *testing.T) {
collectionClient := documents_InitializeDb(t, ts) ts, collectionClient := documents_InitializeDb(t)
defer ts.Server.Close()
t.Run("Should PATCH document", func(t *testing.T) { t.Run("Should PATCH document", func(t *testing.T) {
context := context.TODO() context := context.TODO()
@ -376,140 +377,5 @@ func Test_Documents(t *testing.T) {
assert.NotNil(t, r) assert.NotNil(t, r)
assert.Nil(t, err2) assert.Nil(t, err2)
}) })
})
runTestsWithPresets(t, "Test_Documents_TransactionalBatch", presets, func(t *testing.T, ts *TestServer, client *azcosmos.Client) {
collectionClient := documents_InitializeDb(t, ts)
t.Run("Should execute CREATE transactional batch", func(t *testing.T) {
context := context.TODO()
batch := collectionClient.NewTransactionalBatch(azcosmos.NewPartitionKeyString("pk"))
newItem := map[string]interface{}{
"id": "678901",
}
bytes, err := json.Marshal(newItem)
assert.Nil(t, err)
batch.CreateItem(bytes, nil)
response, err := collectionClient.ExecuteTransactionalBatch(context, batch, &azcosmos.TransactionalBatchOptions{})
assert.Nil(t, err)
assert.True(t, response.Success)
assert.Equal(t, 1, len(response.OperationResults))
operationResponse := response.OperationResults[0]
assert.NotNil(t, operationResponse)
assert.NotNil(t, operationResponse.ResourceBody)
assert.Equal(t, int32(http.StatusCreated), operationResponse.StatusCode)
var itemResponseBody map[string]interface{}
json.Unmarshal(operationResponse.ResourceBody, &itemResponseBody)
assert.Equal(t, newItem["id"], itemResponseBody["id"])
createdDoc, _ := ts.DataStore.GetDocument(testDatabaseName, testCollectionName, newItem["id"].(string))
assert.Equal(t, newItem["id"], createdDoc["id"])
})
t.Run("Should execute DELETE transactional batch", func(t *testing.T) {
context := context.TODO()
batch := collectionClient.NewTransactionalBatch(azcosmos.NewPartitionKeyString("pk"))
batch.DeleteItem("12345", nil)
response, err := collectionClient.ExecuteTransactionalBatch(context, batch, &azcosmos.TransactionalBatchOptions{})
assert.Nil(t, err)
assert.True(t, response.Success)
assert.Equal(t, 1, len(response.OperationResults))
operationResponse := response.OperationResults[0]
assert.NotNil(t, operationResponse)
assert.Equal(t, int32(http.StatusNoContent), operationResponse.StatusCode)
_, status := ts.DataStore.GetDocument(testDatabaseName, testCollectionName, "12345")
assert.Equal(t, datastore.StatusNotFound, int(status))
})
t.Run("Should execute REPLACE transactional batch", func(t *testing.T) {
context := context.TODO()
batch := collectionClient.NewTransactionalBatch(azcosmos.NewPartitionKeyString("pk"))
newItem := map[string]interface{}{
"id": "67890",
"pk": "666",
}
bytes, err := json.Marshal(newItem)
assert.Nil(t, err)
batch.ReplaceItem("67890", bytes, nil)
response, err := collectionClient.ExecuteTransactionalBatch(context, batch, &azcosmos.TransactionalBatchOptions{})
assert.Nil(t, err)
assert.True(t, response.Success)
assert.Equal(t, 1, len(response.OperationResults))
operationResponse := response.OperationResults[0]
assert.NotNil(t, operationResponse)
assert.NotNil(t, operationResponse.ResourceBody)
assert.Equal(t, int32(http.StatusCreated), operationResponse.StatusCode)
var itemResponseBody map[string]interface{}
json.Unmarshal(operationResponse.ResourceBody, &itemResponseBody)
assert.Equal(t, newItem["id"], itemResponseBody["id"])
assert.Equal(t, newItem["pk"], itemResponseBody["pk"])
updatedDoc, _ := ts.DataStore.GetDocument(testDatabaseName, testCollectionName, newItem["id"].(string))
assert.Equal(t, newItem["id"], updatedDoc["id"])
assert.Equal(t, newItem["pk"], updatedDoc["pk"])
})
t.Run("Should execute UPSERT transactional batch", func(t *testing.T) {
context := context.TODO()
batch := collectionClient.NewTransactionalBatch(azcosmos.NewPartitionKeyString("pk"))
newItem := map[string]interface{}{
"id": "678901",
"pk": "666",
}
bytes, err := json.Marshal(newItem)
assert.Nil(t, err)
batch.UpsertItem(bytes, nil)
response, err := collectionClient.ExecuteTransactionalBatch(context, batch, &azcosmos.TransactionalBatchOptions{})
assert.Nil(t, err)
assert.True(t, response.Success)
assert.Equal(t, 1, len(response.OperationResults))
operationResponse := response.OperationResults[0]
assert.NotNil(t, operationResponse)
assert.NotNil(t, operationResponse.ResourceBody)
assert.Equal(t, int32(http.StatusCreated), operationResponse.StatusCode)
var itemResponseBody map[string]interface{}
json.Unmarshal(operationResponse.ResourceBody, &itemResponseBody)
assert.Equal(t, newItem["id"], itemResponseBody["id"])
assert.Equal(t, newItem["pk"], itemResponseBody["pk"])
updatedDoc, _ := ts.DataStore.GetDocument(testDatabaseName, testCollectionName, newItem["id"].(string))
assert.Equal(t, newItem["id"], updatedDoc["id"])
assert.Equal(t, newItem["pk"], updatedDoc["pk"])
})
t.Run("Should execute READ transactional batch", func(t *testing.T) {
context := context.TODO()
batch := collectionClient.NewTransactionalBatch(azcosmos.NewPartitionKeyString("pk"))
batch.ReadItem("67890", nil)
response, err := collectionClient.ExecuteTransactionalBatch(context, batch, &azcosmos.TransactionalBatchOptions{})
assert.Nil(t, err)
assert.True(t, response.Success)
assert.Equal(t, 1, len(response.OperationResults))
operationResponse := response.OperationResults[0]
assert.NotNil(t, operationResponse)
assert.NotNil(t, operationResponse.ResourceBody)
assert.Equal(t, int32(http.StatusOK), operationResponse.StatusCode)
var itemResponseBody map[string]interface{}
json.Unmarshal(operationResponse.ResourceBody, &itemResponseBody)
assert.Equal(t, "67890", itemResponseBody["id"])
})
})
} }

View File

@ -14,8 +14,7 @@ import (
// Request document with trailing slash like python cosmosdb client does. // Request document with trailing slash like python cosmosdb client does.
func Test_Documents_Read_Trailing_Slash(t *testing.T) { func Test_Documents_Read_Trailing_Slash(t *testing.T) {
ts := runTestServer() ts, _ := documents_InitializeDb(t)
documents_InitializeDb(t, ts)
defer ts.Server.Close() defer ts.Server.Close()
t.Run("Read doc with client that appends slash to path", func(t *testing.T) { t.Run("Read doc with client that appends slash to path", func(t *testing.T) {

View File

@ -7,40 +7,24 @@ import (
"github.com/pikami/cosmium/api" "github.com/pikami/cosmium/api"
"github.com/pikami/cosmium/api/config" "github.com/pikami/cosmium/api/config"
"github.com/pikami/cosmium/internal/datastore" "github.com/pikami/cosmium/internal/repositories"
badgerdatastore "github.com/pikami/cosmium/internal/datastore/badger_datastore"
jsondatastore "github.com/pikami/cosmium/internal/datastore/json_datastore"
"github.com/pikami/cosmium/internal/logger"
) )
func main() { func main() {
configuration := config.ParseFlags() configuration := config.ParseFlags()
var dataStore datastore.DataStore repository := repositories.NewDataRepository(repositories.RepositoryOptions{
switch configuration.DataStore {
case config.DataStoreBadger:
dataStore = badgerdatastore.NewBadgerDataStore(badgerdatastore.BadgerDataStoreOptions{
PersistDataFilePath: configuration.PersistDataFilePath,
})
logger.InfoLn("Using Badger data store")
default:
dataStore = jsondatastore.NewJsonDataStore(jsondatastore.JsonDataStoreOptions{
InitialDataFilePath: configuration.InitialDataFilePath, InitialDataFilePath: configuration.InitialDataFilePath,
PersistDataFilePath: configuration.PersistDataFilePath, PersistDataFilePath: configuration.PersistDataFilePath,
}) })
logger.InfoLn("Using in-memory data store")
server := api.NewApiServer(repository, configuration)
server.Start()
waitForExit(server, repository, configuration)
} }
server := api.NewApiServer(dataStore, &configuration) func waitForExit(server *api.ApiServer, repository *repositories.DataRepository, config config.ServerConfig) {
err := server.Start()
if err != nil {
panic(err)
}
waitForExit(server, dataStore)
}
func waitForExit(server *api.ApiServer, dataStore datastore.DataStore) {
sigs := make(chan os.Signal, 1) sigs := make(chan os.Signal, 1)
signal.Notify(sigs, syscall.SIGINT, syscall.SIGTERM) signal.Notify(sigs, syscall.SIGINT, syscall.SIGTERM)
@ -50,5 +34,7 @@ func waitForExit(server *api.ApiServer, dataStore datastore.DataStore) {
// Stop the server // Stop the server
server.Stop() server.Stop()
dataStore.Close() if config.PersistDataFilePath != "" {
repository.SaveStateFS(config.PersistDataFilePath)
}
} }

View File

@ -79,7 +79,7 @@ Cosmium strives to support the core features of Cosmos DB, including:
| Function | Implemented | | Function | Implemented |
| -------- | ----------- | | -------- | ----------- |
| IIF | Yes | | IIF | No |
### Date and time Functions ### Date and time Functions
@ -204,19 +204,6 @@ Cosmium strives to support the core features of Cosmos DB, including:
| IS_PRIMITIVE | Yes | | IS_PRIMITIVE | Yes |
| IS_STRING | Yes | | IS_STRING | Yes |
### Transactional batch operations
Note: There's actually no transaction here. Think of this as a 'bulk operation' that can partially succeed.
| Operation | Implemented |
| --------- | ----------- |
| Create | Yes |
| Delete | Yes |
| Replace | Yes |
| Upsert | Yes |
| Read | Yes |
| Patch | No |
## Known Differences ## Known Differences
While Cosmium aims to replicate the behavior of Cosmos DB as closely as possible, there are certain differences and limitations to be aware of: While Cosmium aims to replicate the behavior of Cosmos DB as closely as possible, there are certain differences and limitations to be aware of:

54
go.mod
View File

@ -1,60 +1,46 @@
module github.com/pikami/cosmium module github.com/pikami/cosmium
go 1.24.0 go 1.22.0
require ( require (
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0 github.com/Azure/azure-sdk-for-go/sdk/azcore v1.12.0
github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos v1.4.0 github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos v0.3.6
github.com/cosmiumdev/json-patch/v5 v5.9.11 github.com/cosmiumdev/json-patch/v5 v5.9.3
github.com/dgraph-io/badger/v4 v4.7.0
github.com/gin-gonic/gin v1.10.0 github.com/gin-gonic/gin v1.10.0
github.com/google/uuid v1.6.0 github.com/google/uuid v1.6.0
github.com/stretchr/testify v1.10.0 github.com/stretchr/testify v1.10.0
github.com/vmihailenco/msgpack/v5 v5.4.1 golang.org/x/exp v0.0.0-20250106191152-7588d65b2ba8
golang.org/x/exp v0.0.0-20250506013437-ce4c2cf36ca6
) )
require ( require (
github.com/Azure/azure-sdk-for-go v68.0.0+incompatible // indirect github.com/Azure/azure-sdk-for-go v68.0.0+incompatible // indirect
github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.1 // indirect github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0 // indirect
github.com/bytedance/sonic v1.13.2 // indirect github.com/bytedance/sonic v1.12.7 // indirect
github.com/bytedance/sonic/loader v0.2.4 // indirect github.com/bytedance/sonic/loader v0.2.3 // indirect
github.com/cespare/xxhash/v2 v2.3.0 // indirect
github.com/cloudwego/base64x v0.1.5 // indirect github.com/cloudwego/base64x v0.1.5 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect github.com/davecgh/go-spew v1.1.1 // indirect
github.com/dgraph-io/ristretto/v2 v2.2.0 // indirect github.com/gabriel-vasile/mimetype v1.4.8 // indirect
github.com/dustin/go-humanize v1.0.1 // indirect github.com/gin-contrib/sse v1.0.0 // indirect
github.com/gabriel-vasile/mimetype v1.4.9 // indirect
github.com/gin-contrib/sse v1.1.0 // indirect
github.com/go-logr/logr v1.4.2 // indirect
github.com/go-logr/stdr v1.2.2 // indirect
github.com/go-playground/locales v0.14.1 // indirect github.com/go-playground/locales v0.14.1 // indirect
github.com/go-playground/universal-translator v0.18.1 // indirect github.com/go-playground/universal-translator v0.18.1 // indirect
github.com/go-playground/validator/v10 v10.26.0 // indirect github.com/go-playground/validator/v10 v10.24.0 // indirect
github.com/goccy/go-json v0.10.5 // indirect github.com/goccy/go-json v0.10.4 // indirect
github.com/google/flatbuffers v25.2.10+incompatible // indirect
github.com/json-iterator/go v1.1.12 // indirect github.com/json-iterator/go v1.1.12 // indirect
github.com/klauspost/compress v1.18.0 // indirect github.com/klauspost/cpuid/v2 v2.2.9 // indirect
github.com/klauspost/cpuid/v2 v2.2.10 // indirect
github.com/leodido/go-urn v1.4.0 // indirect github.com/leodido/go-urn v1.4.0 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect github.com/mattn/go-isatty v0.0.20 // indirect
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
github.com/modern-go/reflect2 v1.0.2 // indirect github.com/modern-go/reflect2 v1.0.2 // indirect
github.com/pelletier/go-toml/v2 v2.2.4 // indirect github.com/pelletier/go-toml/v2 v2.2.3 // indirect
github.com/pkg/errors v0.9.1 // indirect github.com/pkg/errors v0.9.1 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
github.com/ugorji/go/codec v1.2.12 // indirect github.com/ugorji/go/codec v1.2.12 // indirect
github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect golang.org/x/arch v0.13.0 // indirect
go.opentelemetry.io/auto/sdk v1.1.0 // indirect golang.org/x/crypto v0.32.0 // indirect
go.opentelemetry.io/otel v1.35.0 // indirect golang.org/x/net v0.34.0 // indirect
go.opentelemetry.io/otel/metric v1.35.0 // indirect golang.org/x/sys v0.29.0 // indirect
go.opentelemetry.io/otel/trace v1.35.0 // indirect golang.org/x/text v0.21.0 // indirect
golang.org/x/arch v0.17.0 // indirect google.golang.org/protobuf v1.36.4 // indirect
golang.org/x/crypto v0.38.0 // indirect
golang.org/x/net v0.40.0 // indirect
golang.org/x/sys v0.33.0 // indirect
golang.org/x/text v0.25.0 // indirect
google.golang.org/protobuf v1.36.6 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect
) )

146
go.sum
View File

@ -1,97 +1,57 @@
github.com/Azure/azure-sdk-for-go v68.0.0+incompatible h1:fcYLmCpyNYRnvJbPerq7U0hS+6+I79yEDJBqVNcqUzU= github.com/Azure/azure-sdk-for-go v68.0.0+incompatible h1:fcYLmCpyNYRnvJbPerq7U0hS+6+I79yEDJBqVNcqUzU=
github.com/Azure/azure-sdk-for-go v68.0.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc= github.com/Azure/azure-sdk-for-go v68.0.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.17.0 h1:g0EZJwz7xkXQiZAI5xi9f3WWFYBlX1CPTrR+NDToRkQ= github.com/Azure/azure-sdk-for-go/sdk/azcore v1.12.0 h1:1nGuui+4POelzDwI7RG56yfQJHCnKvwfMoU7VsEp+Zg=
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.17.0/go.mod h1:XCW7KnZet0Opnr7HccfUw1PLc4CjHqpcaxW8DHklNkQ= github.com/Azure/azure-sdk-for-go/sdk/azcore v1.12.0/go.mod h1:99EvauvlcJ1U06amZiksfYz/3aFGyIhWGHVyiZXtBAI=
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0 h1:Gt0j3wceWMwPmiazCa8MzMA0MfhmPIz0Qp0FJ6qcM0U= github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.7.0 h1:tfLQ34V6F7tVSwoTf/4lH5sE0o6eCJuNDTmH09nDpbc=
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0/go.mod h1:Ot/6aikWnKWi4l9QB7qVSwa8iMphQNqkWALMoNT3rzM= github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.7.0/go.mod h1:9kIvujWAA58nmPmWB1m23fyWic1kYZMxD9CxaWn4Qpg=
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.8.0 h1:B/dfvscEQtew9dVuoxqxrUKKv8Ih2f55PydknDamU+g= github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos v0.3.6 h1:oBqQLSI1pZwGOdXJAoJJSzmff9tlfD4KroVfjQQmd0g=
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.8.0/go.mod h1:fiPSssYvltE08HJchL04dOy+RD4hgrjph0cwGGMntdI= github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos v0.3.6/go.mod h1:Beh5cHIXJ0oWEDWk9lNFtuklCojLLQ5hl+LqSNTTs0I=
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.8.2 h1:F0gBpfdPLGsw+nsgk6aqqkZS1jiixa5WwFe3fk/T3Ys=
github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos v1.3.0 h1:RGcdpSElvcXCwxydI0xzOBu1Gvp88OoiTGfbtO/z1m0=
github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos v1.3.0/go.mod h1:YwUyrNUtcZcibA99JcfCP6UUp95VVQKO2MJfBzgJDwA=
github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos v1.4.0 h1:TSaH6Lj0m8bDr4vX1+LC1KLQTnLzZb3tOxrx/PLqw+c=
github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos v1.4.0/go.mod h1:Krtog/7tz27z75TwM5cIS8bxEH4dcBUezcq+kGVeZEo=
github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0 h1:ywEEhmNahHBihViHepv3xPBn1663uRv2t2q/ESv9seY= github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0 h1:ywEEhmNahHBihViHepv3xPBn1663uRv2t2q/ESv9seY=
github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0/go.mod h1:iZDifYGJTIgIIkYRNWPENUnqx6bJ2xnSDFI2tjwZNuY= github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0/go.mod h1:iZDifYGJTIgIIkYRNWPENUnqx6bJ2xnSDFI2tjwZNuY=
github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.1 h1:FPKJS1T+clwv+OLGt13a8UjqeRuh0O4SJ3lUriThc+4= github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2 h1:XHOnouVk1mxXfQidrMEnLlPk9UMeRtyBTnEFtxkV0kU=
github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.1/go.mod h1:j2chePtV91HrC22tGoRX3sGY42uF13WzmmV80/OdVAA= github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI=
github.com/AzureAD/microsoft-authentication-library-for-go v1.3.2 h1:kYRSnvJju5gYVyhkij+RTJ/VR6QIUaCfWeaFm2ycsjQ= github.com/bytedance/sonic v1.12.7 h1:CQU8pxOy9HToxhndH0Kx/S1qU/CuS9GnKYrGioDcU1Q=
github.com/AzureAD/microsoft-authentication-library-for-go v1.3.2/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI= github.com/bytedance/sonic v1.12.7/go.mod h1:tnbal4mxOMju17EGfknm2XyYcpyCnIROYOEYuemj13I=
github.com/AzureAD/microsoft-authentication-library-for-go v1.4.2 h1:oygO0locgZJe7PpYPXT5A29ZkwJaPqcva7BVeemZOZs=
github.com/bytedance/sonic v1.13.1 h1:Jyd5CIvdFnkOWuKXr+wm4Nyk2h0yAFsr8ucJgEasO3g=
github.com/bytedance/sonic v1.13.1/go.mod h1:o68xyaF9u2gvVBuGHPlUVCy+ZfmNNO5ETf1+KgkJhz4=
github.com/bytedance/sonic v1.13.2 h1:8/H1FempDZqC4VqjptGo14QQlJx8VdZJegxs6wwfqpQ=
github.com/bytedance/sonic v1.13.2/go.mod h1:o68xyaF9u2gvVBuGHPlUVCy+ZfmNNO5ETf1+KgkJhz4=
github.com/bytedance/sonic/loader v0.1.1/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU= github.com/bytedance/sonic/loader v0.1.1/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU=
github.com/bytedance/sonic/loader v0.2.4 h1:ZWCw4stuXUsn1/+zQDqeE7JKP+QO47tz7QCNan80NzY= github.com/bytedance/sonic/loader v0.2.3 h1:yctD0Q3v2NOGfSWPLPvG2ggA2kV6TS6s4wioyEqssH0=
github.com/bytedance/sonic/loader v0.2.4/go.mod h1:N8A3vUdtUebEY2/VQC0MyhYeKUFosQU6FxH2JmUe6VI= github.com/bytedance/sonic/loader v0.2.3/go.mod h1:N8A3vUdtUebEY2/VQC0MyhYeKUFosQU6FxH2JmUe6VI=
github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs=
github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/cloudwego/base64x v0.1.5 h1:XPciSp1xaq2VCSt6lF0phncD4koWyULpl5bUxbfCyP4= github.com/cloudwego/base64x v0.1.5 h1:XPciSp1xaq2VCSt6lF0phncD4koWyULpl5bUxbfCyP4=
github.com/cloudwego/base64x v0.1.5/go.mod h1:0zlkT4Wn5C6NdauXdJRhSKRlJvmclQ1hhJgA0rcu/8w= github.com/cloudwego/base64x v0.1.5/go.mod h1:0zlkT4Wn5C6NdauXdJRhSKRlJvmclQ1hhJgA0rcu/8w=
github.com/cloudwego/iasm v0.2.0/go.mod h1:8rXZaNYT2n95jn+zTI1sDr+IgcD2GVs0nlbbQPiEFhY= github.com/cloudwego/iasm v0.2.0/go.mod h1:8rXZaNYT2n95jn+zTI1sDr+IgcD2GVs0nlbbQPiEFhY=
github.com/cosmiumdev/json-patch/v5 v5.9.11 h1:WD2Wqaz/vO987z2FFdqgkj15HgYZ/Y5TpqE3I4T/iOQ= github.com/cosmiumdev/json-patch/v5 v5.9.3 h1:l+Og3+5edqV2NHDo58sz72eS733lbXVYP61seYK43Do=
github.com/cosmiumdev/json-patch/v5 v5.9.11/go.mod h1:YPZmckmv4ZY+oxKIOjgq3sIudHVB6VEMcicCS9LtVLM= github.com/cosmiumdev/json-patch/v5 v5.9.3/go.mod h1:WzSTCdia0WrlZtjnL19P4RiwWtfdyArm/E7stgEeP5g=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/dgraph-io/badger/v4 v4.6.0 h1:acOwfOOZ4p1dPRnYzvkVm7rUk2Y21TgPVepCy5dJdFQ=
github.com/dgraph-io/badger/v4 v4.6.0/go.mod h1:KSJ5VTuZNC3Sd+YhvVjk2nYua9UZnnTr/SkXvdtiPgI=
github.com/dgraph-io/badger/v4 v4.7.0 h1:Q+J8HApYAY7UMpL8d9owqiB+odzEc0zn/aqOD9jhc6Y=
github.com/dgraph-io/badger/v4 v4.7.0/go.mod h1:He7TzG3YBy3j4f5baj5B7Zl2XyfNe5bl4Udl0aPemVA=
github.com/dgraph-io/ristretto/v2 v2.1.0 h1:59LjpOJLNDULHh8MC4UaegN52lC4JnO2dITsie/Pa8I=
github.com/dgraph-io/ristretto/v2 v2.1.0/go.mod h1:uejeqfYXpUomfse0+lO+13ATz4TypQYLJZzBSAemuB4=
github.com/dgraph-io/ristretto/v2 v2.2.0 h1:bkY3XzJcXoMuELV8F+vS8kzNgicwQFAaGINAEJdWGOM=
github.com/dgraph-io/ristretto/v2 v2.2.0/go.mod h1:RZrm63UmcBAaYWC1DotLYBmTvgkrs0+XhBd7Npn7/zI=
github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13 h1:fAjc9m62+UWV/WAFKLNi6ZS0675eEUC9y3AlwSbQu1Y=
github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw=
github.com/dgryski/go-farm v0.0.0-20240924180020-3414d57e47da h1:aIftn67I1fkbMa512G+w+Pxci9hJPB8oMnkcP3iZF38=
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
github.com/gabriel-vasile/mimetype v1.4.8 h1:FfZ3gj38NjllZIeJAmMhr+qKL8Wu+nOoI3GqacKw1NM= github.com/gabriel-vasile/mimetype v1.4.8 h1:FfZ3gj38NjllZIeJAmMhr+qKL8Wu+nOoI3GqacKw1NM=
github.com/gabriel-vasile/mimetype v1.4.8/go.mod h1:ByKUIKGjh1ODkGM1asKUbQZOLGrPjydw3hYPU2YU9t8= github.com/gabriel-vasile/mimetype v1.4.8/go.mod h1:ByKUIKGjh1ODkGM1asKUbQZOLGrPjydw3hYPU2YU9t8=
github.com/gabriel-vasile/mimetype v1.4.9 h1:5k+WDwEsD9eTLL8Tz3L0VnmVh9QxGjRmjBvAG7U/oYY=
github.com/gabriel-vasile/mimetype v1.4.9/go.mod h1:WnSQhFKJuBlRyLiKohA/2DtIlPFAbguNaG7QCHcyGok=
github.com/gin-contrib/sse v1.0.0 h1:y3bT1mUWUxDpW4JLQg/HnTqV4rozuW4tC9eFKTxYI9E= github.com/gin-contrib/sse v1.0.0 h1:y3bT1mUWUxDpW4JLQg/HnTqV4rozuW4tC9eFKTxYI9E=
github.com/gin-contrib/sse v1.0.0/go.mod h1:zNuFdwarAygJBht0NTKiSi3jRf6RbqeILZ9Sp6Slhe0= github.com/gin-contrib/sse v1.0.0/go.mod h1:zNuFdwarAygJBht0NTKiSi3jRf6RbqeILZ9Sp6Slhe0=
github.com/gin-contrib/sse v1.1.0 h1:n0w2GMuUpWDVp7qSpvze6fAu9iRxJY4Hmj6AmBOU05w=
github.com/gin-contrib/sse v1.1.0/go.mod h1:hxRZ5gVpWMT7Z0B0gSNYqqsSCNIJMjzvm6fqCz9vjwM=
github.com/gin-gonic/gin v1.10.0 h1:nTuyha1TYqgedzytsKYqna+DfLos46nTv2ygFy86HFU= github.com/gin-gonic/gin v1.10.0 h1:nTuyha1TYqgedzytsKYqna+DfLos46nTv2ygFy86HFU=
github.com/gin-gonic/gin v1.10.0/go.mod h1:4PMNQiOhvDRa013RKVbsiNwoyezlm2rm0uX/T7kzp5Y= github.com/gin-gonic/gin v1.10.0/go.mod h1:4PMNQiOhvDRa013RKVbsiNwoyezlm2rm0uX/T7kzp5Y=
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY=
github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s= github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s=
github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA=
github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY=
github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY=
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
github.com/go-playground/validator/v10 v10.25.0 h1:5Dh7cjvzR7BRZadnsVOzPhWsrwUr0nmsZJxEAnFLNO8= github.com/go-playground/validator/v10 v10.24.0 h1:KHQckvo8G6hlWnrPX4NJJ+aBfWNAE/HH+qdL2cBpCmg=
github.com/go-playground/validator/v10 v10.25.0/go.mod h1:GGzBIJMuE98Ic/kJsBXbz1x/7cByt++cQ+YOuDM5wus= github.com/go-playground/validator/v10 v10.24.0/go.mod h1:GGzBIJMuE98Ic/kJsBXbz1x/7cByt++cQ+YOuDM5wus=
github.com/go-playground/validator/v10 v10.26.0 h1:SP05Nqhjcvz81uJaRfEV0YBSSSGMc/iMaVtFbr3Sw2k= github.com/goccy/go-json v0.10.4 h1:JSwxQzIqKfmFX1swYPpUThQZp/Ka4wzJdK0LWVytLPM=
github.com/go-playground/validator/v10 v10.26.0/go.mod h1:I5QpIEbmr8On7W0TktmJAumgzX4CA1XNl4ZmDuVHKKo= github.com/goccy/go-json v0.10.4/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4= github.com/golang-jwt/jwt v3.2.1+incompatible h1:73Z+4BJcrTC+KczS6WvTPvRGOp1WmfEP4Q1lOd9Z/+c=
github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
github.com/golang-jwt/jwt/v5 v5.2.1 h1:OuVbFODueb089Lh128TAcimifWaLhJwVflnrgM17wHk= github.com/golang-jwt/jwt/v5 v5.2.1 h1:OuVbFODueb089Lh128TAcimifWaLhJwVflnrgM17wHk=
github.com/golang-jwt/jwt/v5 v5.2.1/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= github.com/golang-jwt/jwt/v5 v5.2.1/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
github.com/golang-jwt/jwt/v5 v5.2.2 h1:Rl4B7itRWVtYIHFrSNd7vhTiz9UpLdi6gZhZ3wEeDy8= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
github.com/google/flatbuffers v25.2.10+incompatible h1:F3vclr7C3HpB1k9mxCGRMXq6FdUalZ6H/pNX4FP1v0Q= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/flatbuffers v25.2.10+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8=
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo=
github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ=
github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
github.com/klauspost/cpuid/v2 v2.2.10 h1:tBs3QSyvjDyFTq3uoc/9xFpCuOsJQFNPiAhYdw2skhE= github.com/klauspost/cpuid/v2 v2.2.9 h1:66ze0taIn2H33fBvCkXuv9BmCwDfafmiIVpKV9kKGuY=
github.com/klauspost/cpuid/v2 v2.2.10/go.mod h1:hqwkgyIinND0mEev00jJYCxPNVRVXFQeu1XKlok6oO0= github.com/klauspost/cpuid/v2 v2.2.9/go.mod h1:rqkxqrZ1EhYM9G+hXH7YdowN5R5RGN6NK4QwQ3WMXF8=
github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M= github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M=
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
@ -110,16 +70,14 @@ github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9G
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
github.com/pelletier/go-toml/v2 v2.2.3 h1:YmeHyLY8mFWbdkNWwpr+qIL2bEqT0o95WSdkNHvL12M= github.com/pelletier/go-toml/v2 v2.2.3 h1:YmeHyLY8mFWbdkNWwpr+qIL2bEqT0o95WSdkNHvL12M=
github.com/pelletier/go-toml/v2 v2.2.3/go.mod h1:MfCQTFTvCcUyyvvwm1+G6H/jORL20Xlb6rzQu9GuUkc= github.com/pelletier/go-toml/v2 v2.2.3/go.mod h1:MfCQTFTvCcUyyvvwm1+G6H/jORL20Xlb6rzQu9GuUkc=
github.com/pelletier/go-toml/v2 v2.2.4 h1:mye9XuhQ6gvn5h28+VilKrrPoQVanw5PMw/TB0t5Ec4=
github.com/pelletier/go-toml/v2 v2.2.4/go.mod h1:2gIqNv+qfxSVS7cM2xJQKtLSTLUE9V8t9Stt+h56mCY=
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c h1:+mdjkGKdHQG3305AYmdv1U2eRNDiU2ErMBj1gwrq8eQ= github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c h1:+mdjkGKdHQG3305AYmdv1U2eRNDiU2ErMBj1gwrq8eQ=
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c/go.mod h1:7rwL4CYBLnjLxUqIJNnCWiEdr3bn6IUYi15bNlnbCCU= github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c/go.mod h1:7rwL4CYBLnjLxUqIJNnCWiEdr3bn6IUYi15bNlnbCCU=
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII= github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8=
github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o= github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
@ -136,47 +94,21 @@ github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS
github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08= github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08=
github.com/ugorji/go/codec v1.2.12 h1:9LC83zGrHhuUA9l16C9AHXAqEV/2wBQ4nkvumAE65EE= github.com/ugorji/go/codec v1.2.12 h1:9LC83zGrHhuUA9l16C9AHXAqEV/2wBQ4nkvumAE65EE=
github.com/ugorji/go/codec v1.2.12/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg= github.com/ugorji/go/codec v1.2.12/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg=
github.com/vmihailenco/msgpack/v5 v5.4.1 h1:cQriyiUvjTwOHg8QZaPihLWeRAAVoCpE00IUPn0Bjt8= golang.org/x/arch v0.13.0 h1:KCkqVVV1kGg0X87TFysjCJ8MxtZEIU4Ja/yXGeoECdA=
github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok= golang.org/x/arch v0.13.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys=
github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g= golang.org/x/crypto v0.32.0 h1:euUpcYgM8WcP71gNpTqQCn6rC2t6ULUPiOzfWaXVVfc=
github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds= golang.org/x/crypto v0.32.0/go.mod h1:ZnnJkOaASj8g0AjIduWNlq2NRxL0PlBrbKVyZ6V/Ugc=
go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA= golang.org/x/exp v0.0.0-20250106191152-7588d65b2ba8 h1:yqrTHse8TCMW1M1ZCP+VAR/l0kKxwaAIqN/il7x4voA=
go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A= golang.org/x/exp v0.0.0-20250106191152-7588d65b2ba8/go.mod h1:tujkw807nyEEAamNbDrEGzRav+ilXA7PCRAd6xsmwiU=
go.opentelemetry.io/otel v1.35.0 h1:xKWKPxrxB6OtMCbmMY021CqC45J+3Onta9MqjhnusiQ= golang.org/x/net v0.34.0 h1:Mb7Mrk043xzHgnRM88suvJFwzVrRfHEHJEl5/71CKw0=
go.opentelemetry.io/otel v1.35.0/go.mod h1:UEqy8Zp11hpkUrL73gSlELM0DupHoiq72dR+Zqel/+Y= golang.org/x/net v0.34.0/go.mod h1:di0qlW3YNM5oh6GqDGQr92MyTozJPmybPK4Ev/Gm31k=
go.opentelemetry.io/otel/metric v1.35.0 h1:0znxYu2SNyuMSQT4Y9WDWej0VpcsxkuklLa4/siN90M=
go.opentelemetry.io/otel/metric v1.35.0/go.mod h1:nKVFgxBZ2fReX6IlyW28MgZojkoAkJGaE8CpgeAU3oE=
go.opentelemetry.io/otel/trace v1.35.0 h1:dPpEfJu1sDIqruz7BHFG3c7528f6ddfSWfFDVt/xgMs=
go.opentelemetry.io/otel/trace v1.35.0/go.mod h1:WUk7DtFp1Aw2MkvqGdwiXYDZZNvA/1J8o6xRXLrIkyc=
golang.org/x/arch v0.15.0 h1:QtOrQd0bTUnhNVNndMpLHNWrDmYzZ2KDqSrEymqInZw=
golang.org/x/arch v0.15.0/go.mod h1:JmwW7aLIoRUKgaTzhkiEFxvcEiQGyOg9BMonBJUS7EE=
golang.org/x/arch v0.17.0 h1:4O3dfLzd+lQewptAHqjewQZQDyEdejz3VwgeYwkZneU=
golang.org/x/arch v0.17.0/go.mod h1:bdwinDaKcfZUGpH09BB7ZmOfhalA8lQdzl62l8gGWsk=
golang.org/x/crypto v0.36.0 h1:AnAEvhDddvBdpY+uR+MyHmuZzzNqXSe/GvuDeob5L34=
golang.org/x/crypto v0.36.0/go.mod h1:Y4J0ReaxCR1IMaabaSMugxJES1EpwhBHhv2bDHklZvc=
golang.org/x/crypto v0.38.0 h1:jt+WWG8IZlBnVbomuhg2Mdq0+BBQaHbtqHEFEigjUV8=
golang.org/x/crypto v0.38.0/go.mod h1:MvrbAqul58NNYPKnOra203SB9vpuZW0e+RRZV+Ggqjw=
golang.org/x/exp v0.0.0-20250305212735-054e65f0b394 h1:nDVHiLt8aIbd/VzvPWN6kSOPE7+F/fNFDSXLVYkE/Iw=
golang.org/x/exp v0.0.0-20250305212735-054e65f0b394/go.mod h1:sIifuuw/Yco/y6yb6+bDNfyeQ/MdPUy/hKEMYQV17cM=
golang.org/x/exp v0.0.0-20250506013437-ce4c2cf36ca6 h1:y5zboxd6LQAqYIhHnB48p0ByQ/GnQx2BE33L8BOHQkI=
golang.org/x/exp v0.0.0-20250506013437-ce4c2cf36ca6/go.mod h1:U6Lno4MTRCDY+Ba7aCcauB9T60gsv5s4ralQzP72ZoQ=
golang.org/x/net v0.37.0 h1:1zLorHbz+LYj7MQlSf1+2tPIIgibq2eL5xkrGk6f+2c=
golang.org/x/net v0.37.0/go.mod h1:ivrbrMbzFq5J41QOQh0siUuly180yBYtLp+CKbEaFx8=
golang.org/x/net v0.40.0 h1:79Xs7wF06Gbdcg4kdCCIQArK11Z1hr5POQ6+fIYHNuY=
golang.org/x/net v0.40.0/go.mod h1:y0hY0exeL2Pku80/zKK7tpntoX23cqL3Oa6njdgRtds=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.31.0 h1:ioabZlmFYtWhL+TRYpcnNlLwhyxaM9kWTDEmfnprqik= golang.org/x/sys v0.29.0 h1:TPYlXGxvx1MGTn2GiZDhnjPA9wZzZeGKHHmKhHYvgaU=
golang.org/x/sys v0.31.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw= golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo=
golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
golang.org/x/text v0.23.0 h1:D71I7dUrlY+VX0gQShAThNGHFxZ13dGLBHQLVl1mJlY= google.golang.org/protobuf v1.36.4 h1:6A3ZDJHn/eNqc1i+IdefRzy/9PokBTPvcqMySR7NNIM=
golang.org/x/text v0.23.0/go.mod h1:/BLNzu4aZCJ1+kcD0DNRotWKage4q2rGVAg4o22unh4= google.golang.org/protobuf v1.36.4/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE=
golang.org/x/text v0.25.0 h1:qVyWApTSYLk/drJRO5mDlNYskwQznZmkpV2c8q9zls4=
golang.org/x/text v0.25.0/go.mod h1:WEdwpYrmk1qmdHvhkSTNPm3app7v4rsT8F2UD6+VHIA=
google.golang.org/protobuf v1.36.5 h1:tPhr+woSbjfYvY6/GPufUoYizxw1cF/yFoxJ2fmpwlM=
google.golang.org/protobuf v1.36.5/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE=
google.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY=
google.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=

View File

@ -30,8 +30,3 @@ var QueryPlanResponse = gin.H{
}, },
}, },
} }
var UnknownErrorResponse = gin.H{"message": "Unknown error"}
var NotFoundResponse = gin.H{"message": "NotFound"}
var ConflictResponse = gin.H{"message": "Conflict"}
var BadRequestResponse = gin.H{"message": "BadRequest"}

View File

@ -1,20 +0,0 @@
package converters
import (
"github.com/pikami/cosmium/internal/datastore"
memoryexecutor "github.com/pikami/cosmium/query_executors/memory_executor"
)
type DocumentToRowTypeIterator struct {
documents datastore.DocumentIterator
}
func NewDocumentToRowTypeIterator(documents datastore.DocumentIterator) *DocumentToRowTypeIterator {
return &DocumentToRowTypeIterator{
documents: documents,
}
}
func (di *DocumentToRowTypeIterator) Next() (memoryexecutor.RowType, datastore.DataStoreStatus) {
return di.documents.Next()
}

View File

@ -1,66 +0,0 @@
package badgerdatastore
import (
"time"
"github.com/dgraph-io/badger/v4"
"github.com/pikami/cosmium/internal/logger"
)
type BadgerDataStore struct {
db *badger.DB
gcTicker *time.Ticker
}
type BadgerDataStoreOptions struct {
PersistDataFilePath string
}
func NewBadgerDataStore(options BadgerDataStoreOptions) *BadgerDataStore {
badgerOpts := badger.DefaultOptions(options.PersistDataFilePath)
badgerOpts = badgerOpts.WithLogger(newBadgerLogger())
if options.PersistDataFilePath == "" {
badgerOpts = badgerOpts.WithInMemory(true)
}
db, err := badger.Open(badgerOpts)
if err != nil {
panic(err)
}
gcTicker := time.NewTicker(5 * time.Minute)
ds := &BadgerDataStore{
db: db,
gcTicker: gcTicker,
}
go ds.runGarbageCollector()
return ds
}
func (r *BadgerDataStore) Close() {
if r.gcTicker != nil {
r.gcTicker.Stop()
r.gcTicker = nil
}
r.db.Close()
r.db = nil
}
func (r *BadgerDataStore) DumpToJson() (string, error) {
logger.ErrorLn("Badger datastore does not support state export currently.")
return "{}", nil
}
func (r *BadgerDataStore) runGarbageCollector() {
for range r.gcTicker.C {
again:
err := r.db.RunValueLogGC(0.7)
if err == nil {
goto again
}
}
}

View File

@ -1,28 +0,0 @@
package badgerdatastore
import (
"github.com/dgraph-io/badger/v4"
"github.com/pikami/cosmium/internal/logger"
)
type badgerLogger struct{}
func newBadgerLogger() badger.Logger {
return &badgerLogger{}
}
func (l *badgerLogger) Errorf(format string, v ...interface{}) {
logger.Errorf(format, v...)
}
func (l *badgerLogger) Warningf(format string, v ...interface{}) {
logger.Infof(format, v...)
}
func (l *badgerLogger) Infof(format string, v ...interface{}) {
logger.Infof(format, v...)
}
func (l *badgerLogger) Debugf(format string, v ...interface{}) {
logger.Debugf(format, v...)
}

View File

@ -1,103 +0,0 @@
package badgerdatastore
import (
"fmt"
"time"
"github.com/google/uuid"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/logger"
"github.com/pikami/cosmium/internal/resourceid"
structhidrators "github.com/pikami/cosmium/internal/struct_hidrators"
)
func (r *BadgerDataStore) GetAllCollections(databaseId string) ([]datastore.Collection, datastore.DataStoreStatus) {
exists, err := keyExists(r.db.NewTransaction(false), generateDatabaseKey(databaseId))
if err != nil {
logger.ErrorLn("Error while checking if database exists:", err)
return nil, datastore.Unknown
}
if !exists {
return nil, datastore.StatusNotFound
}
colls, status := listByPrefix[datastore.Collection](r.db, generateKey(resourceid.ResourceTypeCollection, databaseId, "", ""))
if status == datastore.StatusOk {
return colls, datastore.StatusOk
}
return nil, status
}
func (r *BadgerDataStore) GetCollection(databaseId string, collectionId string) (datastore.Collection, datastore.DataStoreStatus) {
collectionKey := generateCollectionKey(databaseId, collectionId)
txn := r.db.NewTransaction(false)
defer txn.Discard()
var collection datastore.Collection
status := getKey(txn, collectionKey, &collection)
return collection, status
}
func (r *BadgerDataStore) DeleteCollection(databaseId string, collectionId string) datastore.DataStoreStatus {
collectionKey := generateCollectionKey(databaseId, collectionId)
txn := r.db.NewTransaction(true)
defer txn.Discard()
prefixes := []string{
generateKey(resourceid.ResourceTypeDocument, databaseId, collectionId, ""),
generateKey(resourceid.ResourceTypeTrigger, databaseId, collectionId, ""),
generateKey(resourceid.ResourceTypeStoredProcedure, databaseId, collectionId, ""),
generateKey(resourceid.ResourceTypeUserDefinedFunction, databaseId, collectionId, ""),
collectionKey,
}
for _, prefix := range prefixes {
if err := deleteKeysByPrefix(txn, prefix); err != nil {
return datastore.Unknown
}
}
err := txn.Commit()
if err != nil {
logger.ErrorLn("Error while committing transaction:", err)
return datastore.Unknown
}
return datastore.StatusOk
}
func (r *BadgerDataStore) CreateCollection(databaseId string, newCollection datastore.Collection) (datastore.Collection, datastore.DataStoreStatus) {
collectionKey := generateCollectionKey(databaseId, newCollection.ID)
txn := r.db.NewTransaction(true)
defer txn.Discard()
collectionExists, err := keyExists(txn, collectionKey)
if err != nil || collectionExists {
return datastore.Collection{}, datastore.Conflict
}
var database datastore.Database
status := getKey(txn, generateDatabaseKey(databaseId), &database)
if status != datastore.StatusOk {
return datastore.Collection{}, status
}
newCollection = structhidrators.Hidrate(newCollection).(datastore.Collection)
newCollection.TimeStamp = time.Now().Unix()
newCollection.ResourceID = resourceid.NewCombined(database.ResourceID, resourceid.New(resourceid.ResourceTypeCollection))
newCollection.ETag = fmt.Sprintf("\"%s\"", uuid.New())
newCollection.Self = fmt.Sprintf("dbs/%s/colls/%s/", database.ResourceID, newCollection.ResourceID)
status = insertKey(txn, collectionKey, newCollection)
if status != datastore.StatusOk {
return datastore.Collection{}, status
}
return newCollection, datastore.StatusOk
}

View File

@ -1,80 +0,0 @@
package badgerdatastore
import (
"fmt"
"time"
"github.com/google/uuid"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/logger"
"github.com/pikami/cosmium/internal/resourceid"
)
func (r *BadgerDataStore) GetAllDatabases() ([]datastore.Database, datastore.DataStoreStatus) {
dbs, status := listByPrefix[datastore.Database](r.db, DatabaseKeyPrefix)
if status == datastore.StatusOk {
return dbs, datastore.StatusOk
}
return nil, status
}
func (r *BadgerDataStore) GetDatabase(id string) (datastore.Database, datastore.DataStoreStatus) {
databaseKey := generateDatabaseKey(id)
txn := r.db.NewTransaction(false)
defer txn.Discard()
var database datastore.Database
status := getKey(txn, databaseKey, &database)
return database, status
}
func (r *BadgerDataStore) DeleteDatabase(id string) datastore.DataStoreStatus {
databaseKey := generateDatabaseKey(id)
txn := r.db.NewTransaction(true)
defer txn.Discard()
prefixes := []string{
generateKey(resourceid.ResourceTypeCollection, id, "", ""),
generateKey(resourceid.ResourceTypeDocument, id, "", ""),
generateKey(resourceid.ResourceTypeTrigger, id, "", ""),
generateKey(resourceid.ResourceTypeStoredProcedure, id, "", ""),
generateKey(resourceid.ResourceTypeUserDefinedFunction, id, "", ""),
databaseKey,
}
for _, prefix := range prefixes {
if err := deleteKeysByPrefix(txn, prefix); err != nil {
return datastore.Unknown
}
}
err := txn.Commit()
if err != nil {
logger.ErrorLn("Error while committing transaction:", err)
return datastore.Unknown
}
return datastore.StatusOk
}
func (r *BadgerDataStore) CreateDatabase(newDatabase datastore.Database) (datastore.Database, datastore.DataStoreStatus) {
databaseKey := generateDatabaseKey(newDatabase.ID)
txn := r.db.NewTransaction(true)
defer txn.Discard()
newDatabase.TimeStamp = time.Now().Unix()
newDatabase.ResourceID = resourceid.New(resourceid.ResourceTypeDatabase)
newDatabase.ETag = fmt.Sprintf("\"%s\"", uuid.New())
newDatabase.Self = fmt.Sprintf("dbs/%s/", newDatabase.ResourceID)
status := insertKey(txn, databaseKey, newDatabase)
if status != datastore.StatusOk {
return datastore.Database{}, status
}
return newDatabase, datastore.StatusOk
}

View File

@ -1,204 +0,0 @@
package badgerdatastore
import (
"github.com/dgraph-io/badger/v4"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/logger"
"github.com/pikami/cosmium/internal/resourceid"
"github.com/vmihailenco/msgpack/v5"
)
const (
DatabaseKeyPrefix = "DB:"
CollectionKeyPrefix = "COL:"
DocumentKeyPrefix = "DOC:"
TriggerKeyPrefix = "TRG:"
StoredProcedureKeyPrefix = "SP:"
UserDefinedFunctionKeyPrefix = "UDF:"
)
func generateKey(
resourceType resourceid.ResourceType,
databaseId string,
collectionId string,
resourceId string,
) string {
result := ""
switch resourceType {
case resourceid.ResourceTypeDatabase:
result += DatabaseKeyPrefix
case resourceid.ResourceTypeCollection:
result += CollectionKeyPrefix
case resourceid.ResourceTypeDocument:
result += DocumentKeyPrefix
case resourceid.ResourceTypeTrigger:
result += TriggerKeyPrefix
case resourceid.ResourceTypeStoredProcedure:
result += StoredProcedureKeyPrefix
case resourceid.ResourceTypeUserDefinedFunction:
result += UserDefinedFunctionKeyPrefix
}
if databaseId != "" {
result += databaseId
}
if collectionId != "" {
result += "/colls/" + collectionId
}
if resourceId != "" {
result += "/" + resourceId
}
return result
}
func generateDatabaseKey(databaseId string) string {
return generateKey(resourceid.ResourceTypeDatabase, databaseId, "", "")
}
func generateCollectionKey(databaseId string, collectionId string) string {
return generateKey(resourceid.ResourceTypeCollection, databaseId, collectionId, "")
}
func generateDocumentKey(databaseId string, collectionId string, documentId string) string {
return generateKey(resourceid.ResourceTypeDocument, databaseId, collectionId, documentId)
}
func generateTriggerKey(databaseId string, collectionId string, triggerId string) string {
return generateKey(resourceid.ResourceTypeTrigger, databaseId, collectionId, triggerId)
}
func generateStoredProcedureKey(databaseId string, collectionId string, storedProcedureId string) string {
return generateKey(resourceid.ResourceTypeStoredProcedure, databaseId, collectionId, storedProcedureId)
}
func generateUserDefinedFunctionKey(databaseId string, collectionId string, udfId string) string {
return generateKey(resourceid.ResourceTypeUserDefinedFunction, databaseId, collectionId, udfId)
}
func insertKey(txn *badger.Txn, key string, value interface{}) datastore.DataStoreStatus {
_, err := txn.Get([]byte(key))
if err == nil {
return datastore.Conflict
}
if err != badger.ErrKeyNotFound {
logger.ErrorLn("Error while checking if key exists:", err)
return datastore.Unknown
}
buf, err := msgpack.Marshal(value)
if err != nil {
logger.ErrorLn("Error while encoding value:", err)
return datastore.Unknown
}
err = txn.Set([]byte(key), buf)
if err != nil {
logger.ErrorLn("Error while setting key:", err)
return datastore.Unknown
}
err = txn.Commit()
if err != nil {
logger.ErrorLn("Error while committing transaction:", err)
return datastore.Unknown
}
return datastore.StatusOk
}
func getKey(txn *badger.Txn, key string, value interface{}) datastore.DataStoreStatus {
item, err := txn.Get([]byte(key))
if err != nil {
if err == badger.ErrKeyNotFound {
return datastore.StatusNotFound
}
logger.ErrorLn("Error while getting key:", err)
return datastore.Unknown
}
val, err := item.ValueCopy(nil)
if err != nil {
logger.ErrorLn("Error while copying value:", err)
return datastore.Unknown
}
if value == nil {
logger.ErrorLn("getKey called with nil value")
return datastore.Unknown
}
err = msgpack.Unmarshal(val, &value)
if err != nil {
logger.ErrorLn("Error while decoding value:", err)
return datastore.Unknown
}
return datastore.StatusOk
}
func keyExists(txn *badger.Txn, key string) (bool, error) {
_, err := txn.Get([]byte(key))
if err == nil {
return true, nil
}
if err == badger.ErrKeyNotFound {
return false, nil
}
return false, err
}
func listByPrefix[T any](db *badger.DB, prefix string) ([]T, datastore.DataStoreStatus) {
results := make([]T, 0)
err := db.View(func(txn *badger.Txn) error {
opts := badger.DefaultIteratorOptions
opts.Prefix = []byte(prefix)
it := txn.NewIterator(opts)
defer it.Close()
for it.Rewind(); it.Valid(); it.Next() {
item := it.Item()
var entry T
status := getKey(txn, string(item.Key()), &entry)
if status != datastore.StatusOk {
logger.ErrorLn("Failed to retrieve entry:", string(item.Key()))
continue
}
results = append(results, entry)
}
return nil
})
if err != nil {
logger.ErrorLn("Error while listing entries:", err)
return nil, datastore.Unknown
}
return results, datastore.StatusOk
}
func deleteKeysByPrefix(txn *badger.Txn, prefix string) error {
opts := badger.DefaultIteratorOptions
opts.Prefix = []byte(prefix)
it := txn.NewIterator(opts)
defer it.Close()
for it.Rewind(); it.Valid(); it.Next() {
key := it.Item().KeyCopy(nil)
if err := txn.Delete(key); err != nil {
logger.ErrorLn("Failed to delete key:", string(key), "Error:", err)
return err
}
}
return nil
}

View File

@ -1,58 +0,0 @@
package badgerdatastore
import (
"github.com/dgraph-io/badger/v4"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/logger"
"github.com/vmihailenco/msgpack/v5"
)
type BadgerDocumentIterator struct {
txn *badger.Txn
it *badger.Iterator
prefix string
}
func NewBadgerDocumentIterator(txn *badger.Txn, prefix string) *BadgerDocumentIterator {
opts := badger.DefaultIteratorOptions
opts.Prefix = []byte(prefix)
it := txn.NewIterator(opts)
it.Rewind()
return &BadgerDocumentIterator{
txn: txn,
it: it,
prefix: prefix,
}
}
func (i *BadgerDocumentIterator) Next() (datastore.Document, datastore.DataStoreStatus) {
if !i.it.Valid() {
i.it.Close()
return datastore.Document{}, datastore.IterEOF
}
item := i.it.Item()
val, err := item.ValueCopy(nil)
if err != nil {
logger.ErrorLn("Error while copying value:", err)
return datastore.Document{}, datastore.Unknown
}
current := &datastore.Document{}
err = msgpack.Unmarshal(val, &current)
if err != nil {
logger.ErrorLn("Error while decoding value:", err)
return datastore.Document{}, datastore.Unknown
}
i.it.Next()
return *current, datastore.StatusOk
}
func (i *BadgerDocumentIterator) Close() {
i.it.Close()
i.txn.Discard()
}

View File

@ -1,127 +0,0 @@
package badgerdatastore
import (
"fmt"
"time"
"github.com/google/uuid"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/logger"
"github.com/pikami/cosmium/internal/resourceid"
)
func (r *BadgerDataStore) GetAllDocuments(databaseId string, collectionId string) ([]datastore.Document, datastore.DataStoreStatus) {
txn := r.db.NewTransaction(false)
defer txn.Discard()
dbExists, err := keyExists(txn, generateDatabaseKey(databaseId))
if err != nil || !dbExists {
return nil, datastore.StatusNotFound
}
collExists, err := keyExists(txn, generateCollectionKey(databaseId, collectionId))
if err != nil || !collExists {
return nil, datastore.StatusNotFound
}
docs, status := listByPrefix[datastore.Document](r.db, generateKey(resourceid.ResourceTypeDocument, databaseId, collectionId, ""))
if status == datastore.StatusOk {
return docs, datastore.StatusOk
}
return nil, status
}
func (r *BadgerDataStore) GetDocumentIterator(databaseId string, collectionId string) (datastore.DocumentIterator, datastore.DataStoreStatus) {
txn := r.db.NewTransaction(false)
dbExists, err := keyExists(txn, generateDatabaseKey(databaseId))
if err != nil || !dbExists {
return nil, datastore.StatusNotFound
}
collExists, err := keyExists(txn, generateCollectionKey(databaseId, collectionId))
if err != nil || !collExists {
return nil, datastore.StatusNotFound
}
iter := NewBadgerDocumentIterator(txn, generateKey(resourceid.ResourceTypeDocument, databaseId, collectionId, ""))
return iter, datastore.StatusOk
}
func (r *BadgerDataStore) GetDocument(databaseId string, collectionId string, documentId string) (datastore.Document, datastore.DataStoreStatus) {
documentKey := generateDocumentKey(databaseId, collectionId, documentId)
txn := r.db.NewTransaction(false)
defer txn.Discard()
var document datastore.Document
status := getKey(txn, documentKey, &document)
return document, status
}
func (r *BadgerDataStore) DeleteDocument(databaseId string, collectionId string, documentId string) datastore.DataStoreStatus {
documentKey := generateDocumentKey(databaseId, collectionId, documentId)
txn := r.db.NewTransaction(true)
defer txn.Discard()
exists, err := keyExists(txn, documentKey)
if err != nil {
return datastore.Unknown
}
if !exists {
return datastore.StatusNotFound
}
err = txn.Delete([]byte(documentKey))
if err != nil {
logger.ErrorLn("Error while deleting document:", err)
return datastore.Unknown
}
err = txn.Commit()
if err != nil {
logger.ErrorLn("Error while committing transaction:", err)
return datastore.Unknown
}
return datastore.StatusOk
}
func (r *BadgerDataStore) CreateDocument(databaseId string, collectionId string, document map[string]interface{}) (datastore.Document, datastore.DataStoreStatus) {
txn := r.db.NewTransaction(true)
defer txn.Discard()
var database datastore.Database
status := getKey(txn, generateDatabaseKey(databaseId), &database)
if status != datastore.StatusOk {
return datastore.Document{}, status
}
var collection datastore.Collection
status = getKey(txn, generateCollectionKey(databaseId, collectionId), &collection)
if status != datastore.StatusOk {
return datastore.Document{}, status
}
var ok bool
var documentId string
if documentId, ok = document["id"].(string); !ok || documentId == "" {
documentId = fmt.Sprint(uuid.New())
document["id"] = documentId
}
document["_ts"] = time.Now().Unix()
document["_rid"] = resourceid.NewCombined(collection.ResourceID, resourceid.New(resourceid.ResourceTypeDocument))
document["_etag"] = fmt.Sprintf("\"%s\"", uuid.New())
document["_self"] = fmt.Sprintf("dbs/%s/colls/%s/docs/%s/", database.ResourceID, collection.ResourceID, document["_rid"])
status = insertKey(txn, generateDocumentKey(databaseId, collectionId, documentId), document)
if status != datastore.StatusOk {
return datastore.Document{}, status
}
return document, datastore.StatusOk
}

View File

@ -1,53 +0,0 @@
package badgerdatastore
import (
"fmt"
"github.com/google/uuid"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/resourceid"
)
// I have no idea what this is tbh
func (r *BadgerDataStore) GetPartitionKeyRanges(databaseId string, collectionId string) ([]datastore.PartitionKeyRange, datastore.DataStoreStatus) {
databaseRid := databaseId
collectionRid := collectionId
var timestamp int64 = 0
txn := r.db.NewTransaction(false)
defer txn.Discard()
var database datastore.Database
status := getKey(txn, generateDatabaseKey(databaseId), &database)
if status != datastore.StatusOk {
databaseRid = database.ResourceID
}
var collection datastore.Collection
status = getKey(txn, generateCollectionKey(databaseId, collectionId), &collection)
if status != datastore.StatusOk {
collectionRid = collection.ResourceID
timestamp = collection.TimeStamp
}
pkrResourceId := resourceid.NewCombined(collectionRid, resourceid.New(resourceid.ResourceTypePartitionKeyRange))
pkrSelf := fmt.Sprintf("dbs/%s/colls/%s/pkranges/%s/", databaseRid, collectionRid, pkrResourceId)
etag := fmt.Sprintf("\"%s\"", uuid.New())
return []datastore.PartitionKeyRange{
{
ResourceID: pkrResourceId,
ID: "0",
Etag: etag,
MinInclusive: "",
MaxExclusive: "FF",
RidPrefix: 0,
Self: pkrSelf,
ThroughputFraction: 1,
Status: "online",
Parents: []interface{}{},
TimeStamp: timestamp,
Lsn: 17,
},
}, datastore.StatusOk
}

View File

@ -1,107 +0,0 @@
package badgerdatastore
import (
"fmt"
"time"
"github.com/google/uuid"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/logger"
"github.com/pikami/cosmium/internal/resourceid"
)
func (r *BadgerDataStore) GetAllStoredProcedures(databaseId string, collectionId string) ([]datastore.StoredProcedure, datastore.DataStoreStatus) {
txn := r.db.NewTransaction(false)
defer txn.Discard()
dbExists, err := keyExists(txn, generateDatabaseKey(databaseId))
if err != nil || !dbExists {
return nil, datastore.StatusNotFound
}
collExists, err := keyExists(txn, generateCollectionKey(databaseId, collectionId))
if err != nil || !collExists {
return nil, datastore.StatusNotFound
}
storedProcedures, status := listByPrefix[datastore.StoredProcedure](r.db, generateKey(resourceid.ResourceTypeStoredProcedure, databaseId, collectionId, ""))
if status == datastore.StatusOk {
return storedProcedures, datastore.StatusOk
}
return nil, status
}
func (r *BadgerDataStore) GetStoredProcedure(databaseId string, collectionId string, storedProcedureId string) (datastore.StoredProcedure, datastore.DataStoreStatus) {
storedProcedureKey := generateStoredProcedureKey(databaseId, collectionId, storedProcedureId)
txn := r.db.NewTransaction(false)
defer txn.Discard()
var storedProcedure datastore.StoredProcedure
status := getKey(txn, storedProcedureKey, &storedProcedure)
return storedProcedure, status
}
func (r *BadgerDataStore) DeleteStoredProcedure(databaseId string, collectionId string, storedProcedureId string) datastore.DataStoreStatus {
storedProcedureKey := generateStoredProcedureKey(databaseId, collectionId, storedProcedureId)
txn := r.db.NewTransaction(true)
defer txn.Discard()
exists, err := keyExists(txn, storedProcedureKey)
if err != nil {
return datastore.Unknown
}
if !exists {
return datastore.StatusNotFound
}
err = txn.Delete([]byte(storedProcedureKey))
if err != nil {
logger.ErrorLn("Error while deleting stored procedure:", err)
return datastore.Unknown
}
err = txn.Commit()
if err != nil {
logger.ErrorLn("Error while committing transaction:", err)
return datastore.Unknown
}
return datastore.StatusOk
}
func (r *BadgerDataStore) CreateStoredProcedure(databaseId string, collectionId string, storedProcedure datastore.StoredProcedure) (datastore.StoredProcedure, datastore.DataStoreStatus) {
txn := r.db.NewTransaction(true)
defer txn.Discard()
if storedProcedure.ID == "" {
return datastore.StoredProcedure{}, datastore.BadRequest
}
var database datastore.Database
status := getKey(txn, generateDatabaseKey(databaseId), &database)
if status != datastore.StatusOk {
return datastore.StoredProcedure{}, status
}
var collection datastore.Collection
status = getKey(txn, generateCollectionKey(databaseId, collectionId), &collection)
if status != datastore.StatusOk {
return datastore.StoredProcedure{}, status
}
storedProcedure.TimeStamp = time.Now().Unix()
storedProcedure.ResourceID = resourceid.NewCombined(collection.ResourceID, resourceid.New(resourceid.ResourceTypeStoredProcedure))
storedProcedure.ETag = fmt.Sprintf("\"%s\"", uuid.New())
storedProcedure.Self = fmt.Sprintf("dbs/%s/colls/%s/sprocs/%s/", database.ResourceID, collection.ResourceID, storedProcedure.ResourceID)
status = insertKey(txn, generateStoredProcedureKey(databaseId, collectionId, storedProcedure.ID), storedProcedure)
if status != datastore.StatusOk {
return datastore.StoredProcedure{}, status
}
return storedProcedure, datastore.StatusOk
}

View File

@ -1,107 +0,0 @@
package badgerdatastore
import (
"fmt"
"time"
"github.com/google/uuid"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/logger"
"github.com/pikami/cosmium/internal/resourceid"
)
func (r *BadgerDataStore) GetAllTriggers(databaseId string, collectionId string) ([]datastore.Trigger, datastore.DataStoreStatus) {
txn := r.db.NewTransaction(false)
defer txn.Discard()
dbExists, err := keyExists(txn, generateDatabaseKey(databaseId))
if err != nil || !dbExists {
return nil, datastore.StatusNotFound
}
collExists, err := keyExists(txn, generateCollectionKey(databaseId, collectionId))
if err != nil || !collExists {
return nil, datastore.StatusNotFound
}
triggers, status := listByPrefix[datastore.Trigger](r.db, generateKey(resourceid.ResourceTypeTrigger, databaseId, collectionId, ""))
if status == datastore.StatusOk {
return triggers, datastore.StatusOk
}
return nil, status
}
func (r *BadgerDataStore) GetTrigger(databaseId string, collectionId string, triggerId string) (datastore.Trigger, datastore.DataStoreStatus) {
triggerKey := generateTriggerKey(databaseId, collectionId, triggerId)
txn := r.db.NewTransaction(false)
defer txn.Discard()
var trigger datastore.Trigger
status := getKey(txn, triggerKey, &trigger)
return trigger, status
}
func (r *BadgerDataStore) DeleteTrigger(databaseId string, collectionId string, triggerId string) datastore.DataStoreStatus {
triggerKey := generateTriggerKey(databaseId, collectionId, triggerId)
txn := r.db.NewTransaction(true)
defer txn.Discard()
exists, err := keyExists(txn, triggerKey)
if err != nil {
return datastore.Unknown
}
if !exists {
return datastore.StatusNotFound
}
err = txn.Delete([]byte(triggerKey))
if err != nil {
logger.ErrorLn("Error while deleting trigger:", err)
return datastore.Unknown
}
err = txn.Commit()
if err != nil {
logger.ErrorLn("Error while committing transaction:", err)
return datastore.Unknown
}
return datastore.StatusOk
}
func (r *BadgerDataStore) CreateTrigger(databaseId string, collectionId string, trigger datastore.Trigger) (datastore.Trigger, datastore.DataStoreStatus) {
txn := r.db.NewTransaction(true)
defer txn.Discard()
if trigger.ID == "" {
return datastore.Trigger{}, datastore.BadRequest
}
var database datastore.Database
status := getKey(txn, generateDatabaseKey(databaseId), &database)
if status != datastore.StatusOk {
return datastore.Trigger{}, status
}
var collection datastore.Collection
status = getKey(txn, generateCollectionKey(databaseId, collectionId), &collection)
if status != datastore.StatusOk {
return datastore.Trigger{}, status
}
trigger.TimeStamp = time.Now().Unix()
trigger.ResourceID = resourceid.NewCombined(collection.ResourceID, resourceid.New(resourceid.ResourceTypeTrigger))
trigger.ETag = fmt.Sprintf("\"%s\"", uuid.New())
trigger.Self = fmt.Sprintf("dbs/%s/colls/%s/triggers/%s/", database.ResourceID, collection.ResourceID, trigger.ResourceID)
status = insertKey(txn, generateTriggerKey(databaseId, collectionId, trigger.ID), trigger)
if status != datastore.StatusOk {
return datastore.Trigger{}, status
}
return trigger, datastore.StatusOk
}

View File

@ -1,107 +0,0 @@
package badgerdatastore
import (
"fmt"
"time"
"github.com/google/uuid"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/logger"
"github.com/pikami/cosmium/internal/resourceid"
)
func (r *BadgerDataStore) GetAllUserDefinedFunctions(databaseId string, collectionId string) ([]datastore.UserDefinedFunction, datastore.DataStoreStatus) {
txn := r.db.NewTransaction(false)
defer txn.Discard()
dbExists, err := keyExists(txn, generateDatabaseKey(databaseId))
if err != nil || !dbExists {
return nil, datastore.StatusNotFound
}
collExists, err := keyExists(txn, generateCollectionKey(databaseId, collectionId))
if err != nil || !collExists {
return nil, datastore.StatusNotFound
}
udfs, status := listByPrefix[datastore.UserDefinedFunction](r.db, generateKey(resourceid.ResourceTypeUserDefinedFunction, databaseId, collectionId, ""))
if status == datastore.StatusOk {
return udfs, datastore.StatusOk
}
return nil, status
}
func (r *BadgerDataStore) GetUserDefinedFunction(databaseId string, collectionId string, udfId string) (datastore.UserDefinedFunction, datastore.DataStoreStatus) {
udfKey := generateUserDefinedFunctionKey(databaseId, collectionId, udfId)
txn := r.db.NewTransaction(false)
defer txn.Discard()
var udf datastore.UserDefinedFunction
status := getKey(txn, udfKey, &udf)
return udf, status
}
func (r *BadgerDataStore) DeleteUserDefinedFunction(databaseId string, collectionId string, udfId string) datastore.DataStoreStatus {
udfKey := generateUserDefinedFunctionKey(databaseId, collectionId, udfId)
txn := r.db.NewTransaction(true)
defer txn.Discard()
exists, err := keyExists(txn, udfKey)
if err != nil {
return datastore.Unknown
}
if !exists {
return datastore.StatusNotFound
}
err = txn.Delete([]byte(udfKey))
if err != nil {
logger.ErrorLn("Error while deleting user defined function:", err)
return datastore.Unknown
}
err = txn.Commit()
if err != nil {
logger.ErrorLn("Error while committing transaction:", err)
return datastore.Unknown
}
return datastore.StatusOk
}
func (r *BadgerDataStore) CreateUserDefinedFunction(databaseId string, collectionId string, udf datastore.UserDefinedFunction) (datastore.UserDefinedFunction, datastore.DataStoreStatus) {
txn := r.db.NewTransaction(true)
defer txn.Discard()
if udf.ID == "" {
return datastore.UserDefinedFunction{}, datastore.BadRequest
}
var database datastore.Database
status := getKey(txn, generateDatabaseKey(databaseId), &database)
if status != datastore.StatusOk {
return datastore.UserDefinedFunction{}, status
}
var collection datastore.Collection
status = getKey(txn, generateCollectionKey(databaseId, collectionId), &collection)
if status != datastore.StatusOk {
return datastore.UserDefinedFunction{}, status
}
udf.TimeStamp = time.Now().Unix()
udf.ResourceID = resourceid.NewCombined(collection.ResourceID, resourceid.New(resourceid.ResourceTypeUserDefinedFunction))
udf.ETag = fmt.Sprintf("\"%s\"", uuid.New())
udf.Self = fmt.Sprintf("dbs/%s/colls/%s/udfs/%s/", database.ResourceID, collection.ResourceID, udf.ResourceID)
status = insertKey(txn, generateUserDefinedFunctionKey(databaseId, collectionId, udf.ID), udf)
if status != datastore.StatusOk {
return datastore.UserDefinedFunction{}, status
}
return udf, datastore.StatusOk
}

View File

@ -1,44 +0,0 @@
package datastore
type DataStore interface {
GetAllDatabases() ([]Database, DataStoreStatus)
GetDatabase(databaseId string) (Database, DataStoreStatus)
DeleteDatabase(databaseId string) DataStoreStatus
CreateDatabase(newDatabase Database) (Database, DataStoreStatus)
GetAllCollections(databaseId string) ([]Collection, DataStoreStatus)
GetCollection(databaseId string, collectionId string) (Collection, DataStoreStatus)
DeleteCollection(databaseId string, collectionId string) DataStoreStatus
CreateCollection(databaseId string, newCollection Collection) (Collection, DataStoreStatus)
GetAllDocuments(databaseId string, collectionId string) ([]Document, DataStoreStatus)
GetDocumentIterator(databaseId string, collectionId string) (DocumentIterator, DataStoreStatus)
GetDocument(databaseId string, collectionId string, documentId string) (Document, DataStoreStatus)
DeleteDocument(databaseId string, collectionId string, documentId string) DataStoreStatus
CreateDocument(databaseId string, collectionId string, document map[string]interface{}) (Document, DataStoreStatus)
GetAllTriggers(databaseId string, collectionId string) ([]Trigger, DataStoreStatus)
GetTrigger(databaseId string, collectionId string, triggerId string) (Trigger, DataStoreStatus)
DeleteTrigger(databaseId string, collectionId string, triggerId string) DataStoreStatus
CreateTrigger(databaseId string, collectionId string, trigger Trigger) (Trigger, DataStoreStatus)
GetAllStoredProcedures(databaseId string, collectionId string) ([]StoredProcedure, DataStoreStatus)
GetStoredProcedure(databaseId string, collectionId string, storedProcedureId string) (StoredProcedure, DataStoreStatus)
DeleteStoredProcedure(databaseId string, collectionId string, storedProcedureId string) DataStoreStatus
CreateStoredProcedure(databaseId string, collectionId string, storedProcedure StoredProcedure) (StoredProcedure, DataStoreStatus)
GetAllUserDefinedFunctions(databaseId string, collectionId string) ([]UserDefinedFunction, DataStoreStatus)
GetUserDefinedFunction(databaseId string, collectionId string, udfId string) (UserDefinedFunction, DataStoreStatus)
DeleteUserDefinedFunction(databaseId string, collectionId string, udfId string) DataStoreStatus
CreateUserDefinedFunction(databaseId string, collectionId string, udf UserDefinedFunction) (UserDefinedFunction, DataStoreStatus)
GetPartitionKeyRanges(databaseId string, collectionId string) ([]PartitionKeyRange, DataStoreStatus)
Close()
DumpToJson() (string, error)
}
type DocumentIterator interface {
Next() (Document, DataStoreStatus)
Close()
}

View File

@ -1,21 +0,0 @@
package jsondatastore
import "github.com/pikami/cosmium/internal/datastore"
type ArrayDocumentIterator struct {
documents []datastore.Document
index int
}
func (i *ArrayDocumentIterator) Next() (datastore.Document, datastore.DataStoreStatus) {
i.index++
if i.index >= len(i.documents) {
return datastore.Document{}, datastore.StatusNotFound
}
return i.documents[i.index], datastore.StatusOk
}
func (i *ArrayDocumentIterator) Close() {
i.documents = []datastore.Document{}
}

View File

@ -1,89 +0,0 @@
package jsondatastore
import (
"fmt"
"time"
"github.com/google/uuid"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/resourceid"
structhidrators "github.com/pikami/cosmium/internal/struct_hidrators"
"golang.org/x/exp/maps"
)
func (r *JsonDataStore) GetAllCollections(databaseId string) ([]datastore.Collection, datastore.DataStoreStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return make([]datastore.Collection, 0), datastore.StatusNotFound
}
return maps.Values(r.storeState.Collections[databaseId]), datastore.StatusOk
}
func (r *JsonDataStore) GetCollection(databaseId string, collectionId string) (datastore.Collection, datastore.DataStoreStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return datastore.Collection{}, datastore.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return datastore.Collection{}, datastore.StatusNotFound
}
return r.storeState.Collections[databaseId][collectionId], datastore.StatusOk
}
func (r *JsonDataStore) DeleteCollection(databaseId string, collectionId string) datastore.DataStoreStatus {
r.storeState.Lock()
defer r.storeState.Unlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return datastore.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return datastore.StatusNotFound
}
delete(r.storeState.Collections[databaseId], collectionId)
delete(r.storeState.Documents[databaseId], collectionId)
delete(r.storeState.Triggers[databaseId], collectionId)
delete(r.storeState.StoredProcedures[databaseId], collectionId)
delete(r.storeState.UserDefinedFunctions[databaseId], collectionId)
return datastore.StatusOk
}
func (r *JsonDataStore) CreateCollection(databaseId string, newCollection datastore.Collection) (datastore.Collection, datastore.DataStoreStatus) {
r.storeState.Lock()
defer r.storeState.Unlock()
var ok bool
var database datastore.Database
if database, ok = r.storeState.Databases[databaseId]; !ok {
return datastore.Collection{}, datastore.StatusNotFound
}
if _, ok = r.storeState.Collections[databaseId][newCollection.ID]; ok {
return datastore.Collection{}, datastore.Conflict
}
newCollection = structhidrators.Hidrate(newCollection).(datastore.Collection)
newCollection.TimeStamp = time.Now().Unix()
newCollection.ResourceID = resourceid.NewCombined(database.ResourceID, resourceid.New(resourceid.ResourceTypeCollection))
newCollection.ETag = fmt.Sprintf("\"%s\"", uuid.New())
newCollection.Self = fmt.Sprintf("dbs/%s/colls/%s/", database.ResourceID, newCollection.ResourceID)
r.storeState.Collections[databaseId][newCollection.ID] = newCollection
r.storeState.Documents[databaseId][newCollection.ID] = make(map[string]datastore.Document)
r.storeState.Triggers[databaseId][newCollection.ID] = make(map[string]datastore.Trigger)
r.storeState.StoredProcedures[databaseId][newCollection.ID] = make(map[string]datastore.StoredProcedure)
r.storeState.UserDefinedFunctions[databaseId][newCollection.ID] = make(map[string]datastore.UserDefinedFunction)
return newCollection, datastore.StatusOk
}

View File

@ -1,70 +0,0 @@
package jsondatastore
import (
"fmt"
"time"
"github.com/google/uuid"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/resourceid"
"golang.org/x/exp/maps"
)
func (r *JsonDataStore) GetAllDatabases() ([]datastore.Database, datastore.DataStoreStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
return maps.Values(r.storeState.Databases), datastore.StatusOk
}
func (r *JsonDataStore) GetDatabase(id string) (datastore.Database, datastore.DataStoreStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
if database, ok := r.storeState.Databases[id]; ok {
return database, datastore.StatusOk
}
return datastore.Database{}, datastore.StatusNotFound
}
func (r *JsonDataStore) DeleteDatabase(id string) datastore.DataStoreStatus {
r.storeState.Lock()
defer r.storeState.Unlock()
if _, ok := r.storeState.Databases[id]; !ok {
return datastore.StatusNotFound
}
delete(r.storeState.Databases, id)
delete(r.storeState.Collections, id)
delete(r.storeState.Documents, id)
delete(r.storeState.Triggers, id)
delete(r.storeState.StoredProcedures, id)
delete(r.storeState.UserDefinedFunctions, id)
return datastore.StatusOk
}
func (r *JsonDataStore) CreateDatabase(newDatabase datastore.Database) (datastore.Database, datastore.DataStoreStatus) {
r.storeState.Lock()
defer r.storeState.Unlock()
if _, ok := r.storeState.Databases[newDatabase.ID]; ok {
return datastore.Database{}, datastore.Conflict
}
newDatabase.TimeStamp = time.Now().Unix()
newDatabase.ResourceID = resourceid.New(resourceid.ResourceTypeDatabase)
newDatabase.ETag = fmt.Sprintf("\"%s\"", uuid.New())
newDatabase.Self = fmt.Sprintf("dbs/%s/", newDatabase.ResourceID)
r.storeState.Databases[newDatabase.ID] = newDatabase
r.storeState.Collections[newDatabase.ID] = make(map[string]datastore.Collection)
r.storeState.Documents[newDatabase.ID] = make(map[string]map[string]datastore.Document)
r.storeState.Triggers[newDatabase.ID] = make(map[string]map[string]datastore.Trigger)
r.storeState.StoredProcedures[newDatabase.ID] = make(map[string]map[string]datastore.StoredProcedure)
r.storeState.UserDefinedFunctions[newDatabase.ID] = make(map[string]map[string]datastore.UserDefinedFunction)
return newDatabase, datastore.StatusOk
}

View File

@ -1,113 +0,0 @@
package jsondatastore
import (
"fmt"
"time"
"github.com/google/uuid"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/resourceid"
"golang.org/x/exp/maps"
)
func (r *JsonDataStore) GetAllDocuments(databaseId string, collectionId string) ([]datastore.Document, datastore.DataStoreStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return make([]datastore.Document, 0), datastore.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return make([]datastore.Document, 0), datastore.StatusNotFound
}
return maps.Values(r.storeState.Documents[databaseId][collectionId]), datastore.StatusOk
}
func (r *JsonDataStore) GetDocument(databaseId string, collectionId string, documentId string) (datastore.Document, datastore.DataStoreStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return datastore.Document{}, datastore.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return datastore.Document{}, datastore.StatusNotFound
}
if _, ok := r.storeState.Documents[databaseId][collectionId][documentId]; !ok {
return datastore.Document{}, datastore.StatusNotFound
}
return r.storeState.Documents[databaseId][collectionId][documentId], datastore.StatusOk
}
func (r *JsonDataStore) DeleteDocument(databaseId string, collectionId string, documentId string) datastore.DataStoreStatus {
r.storeState.Lock()
defer r.storeState.Unlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return datastore.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return datastore.StatusNotFound
}
if _, ok := r.storeState.Documents[databaseId][collectionId][documentId]; !ok {
return datastore.StatusNotFound
}
delete(r.storeState.Documents[databaseId][collectionId], documentId)
return datastore.StatusOk
}
func (r *JsonDataStore) CreateDocument(databaseId string, collectionId string, document map[string]interface{}) (datastore.Document, datastore.DataStoreStatus) {
r.storeState.Lock()
defer r.storeState.Unlock()
var ok bool
var documentId string
var database datastore.Database
var collection datastore.Collection
if documentId, ok = document["id"].(string); !ok || documentId == "" {
documentId = fmt.Sprint(uuid.New())
document["id"] = documentId
}
if database, ok = r.storeState.Databases[databaseId]; !ok {
return datastore.Document{}, datastore.StatusNotFound
}
if collection, ok = r.storeState.Collections[databaseId][collectionId]; !ok {
return datastore.Document{}, datastore.StatusNotFound
}
if _, ok := r.storeState.Documents[databaseId][collectionId][documentId]; ok {
return datastore.Document{}, datastore.Conflict
}
document["_ts"] = time.Now().Unix()
document["_rid"] = resourceid.NewCombined(collection.ResourceID, resourceid.New(resourceid.ResourceTypeDocument))
document["_etag"] = fmt.Sprintf("\"%s\"", uuid.New())
document["_self"] = fmt.Sprintf("dbs/%s/colls/%s/docs/%s/", database.ResourceID, collection.ResourceID, document["_rid"])
r.storeState.Documents[databaseId][collectionId][documentId] = document
return document, datastore.StatusOk
}
func (r *JsonDataStore) GetDocumentIterator(databaseId string, collectionId string) (datastore.DocumentIterator, datastore.DataStoreStatus) {
documents, status := r.GetAllDocuments(databaseId, collectionId)
if status != datastore.StatusOk {
return nil, status
}
return &ArrayDocumentIterator{
documents: documents,
index: -1,
}, datastore.StatusOk
}

View File

@ -1,34 +0,0 @@
package jsondatastore
import "github.com/pikami/cosmium/internal/datastore"
type JsonDataStore struct {
storeState State
initialDataFilePath string
persistDataFilePath string
}
type JsonDataStoreOptions struct {
InitialDataFilePath string
PersistDataFilePath string
}
func NewJsonDataStore(options JsonDataStoreOptions) *JsonDataStore {
dataStore := &JsonDataStore{
storeState: State{
Databases: make(map[string]datastore.Database),
Collections: make(map[string]map[string]datastore.Collection),
Documents: make(map[string]map[string]map[string]datastore.Document),
Triggers: make(map[string]map[string]map[string]datastore.Trigger),
StoredProcedures: make(map[string]map[string]map[string]datastore.StoredProcedure),
UserDefinedFunctions: make(map[string]map[string]map[string]datastore.UserDefinedFunction),
},
initialDataFilePath: options.InitialDataFilePath,
persistDataFilePath: options.PersistDataFilePath,
}
dataStore.InitializeDataStore()
return dataStore
}

View File

@ -1,91 +0,0 @@
package jsondatastore
import (
"fmt"
"time"
"github.com/google/uuid"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/resourceid"
"golang.org/x/exp/maps"
)
func (r *JsonDataStore) GetAllStoredProcedures(databaseId string, collectionId string) ([]datastore.StoredProcedure, datastore.DataStoreStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
return maps.Values(r.storeState.StoredProcedures[databaseId][collectionId]), datastore.StatusOk
}
func (r *JsonDataStore) GetStoredProcedure(databaseId string, collectionId string, spId string) (datastore.StoredProcedure, datastore.DataStoreStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return datastore.StoredProcedure{}, datastore.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return datastore.StoredProcedure{}, datastore.StatusNotFound
}
if sp, ok := r.storeState.StoredProcedures[databaseId][collectionId][spId]; ok {
return sp, datastore.StatusOk
}
return datastore.StoredProcedure{}, datastore.StatusNotFound
}
func (r *JsonDataStore) DeleteStoredProcedure(databaseId string, collectionId string, spId string) datastore.DataStoreStatus {
r.storeState.Lock()
defer r.storeState.Unlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return datastore.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return datastore.StatusNotFound
}
if _, ok := r.storeState.StoredProcedures[databaseId][collectionId][spId]; !ok {
return datastore.StatusNotFound
}
delete(r.storeState.StoredProcedures[databaseId][collectionId], spId)
return datastore.StatusOk
}
func (r *JsonDataStore) CreateStoredProcedure(databaseId string, collectionId string, sp datastore.StoredProcedure) (datastore.StoredProcedure, datastore.DataStoreStatus) {
r.storeState.Lock()
defer r.storeState.Unlock()
var ok bool
var database datastore.Database
var collection datastore.Collection
if sp.ID == "" {
return datastore.StoredProcedure{}, datastore.BadRequest
}
if database, ok = r.storeState.Databases[databaseId]; !ok {
return datastore.StoredProcedure{}, datastore.StatusNotFound
}
if collection, ok = r.storeState.Collections[databaseId][collectionId]; !ok {
return datastore.StoredProcedure{}, datastore.StatusNotFound
}
if _, ok = r.storeState.StoredProcedures[databaseId][collectionId][sp.ID]; ok {
return datastore.StoredProcedure{}, datastore.Conflict
}
sp.TimeStamp = time.Now().Unix()
sp.ResourceID = resourceid.NewCombined(collection.ResourceID, resourceid.New(resourceid.ResourceTypeStoredProcedure))
sp.ETag = fmt.Sprintf("\"%s\"", uuid.New())
sp.Self = fmt.Sprintf("dbs/%s/colls/%s/sprocs/%s/", database.ResourceID, collection.ResourceID, sp.ResourceID)
r.storeState.StoredProcedures[databaseId][collectionId][sp.ID] = sp
return sp, datastore.StatusOk
}

View File

@ -1,91 +0,0 @@
package jsondatastore
import (
"fmt"
"time"
"github.com/google/uuid"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/resourceid"
"golang.org/x/exp/maps"
)
func (r *JsonDataStore) GetAllTriggers(databaseId string, collectionId string) ([]datastore.Trigger, datastore.DataStoreStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
return maps.Values(r.storeState.Triggers[databaseId][collectionId]), datastore.StatusOk
}
func (r *JsonDataStore) GetTrigger(databaseId string, collectionId string, triggerId string) (datastore.Trigger, datastore.DataStoreStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return datastore.Trigger{}, datastore.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return datastore.Trigger{}, datastore.StatusNotFound
}
if trigger, ok := r.storeState.Triggers[databaseId][collectionId][triggerId]; ok {
return trigger, datastore.StatusOk
}
return datastore.Trigger{}, datastore.StatusNotFound
}
func (r *JsonDataStore) DeleteTrigger(databaseId string, collectionId string, triggerId string) datastore.DataStoreStatus {
r.storeState.Lock()
defer r.storeState.Unlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return datastore.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return datastore.StatusNotFound
}
if _, ok := r.storeState.Triggers[databaseId][collectionId][triggerId]; !ok {
return datastore.StatusNotFound
}
delete(r.storeState.Triggers[databaseId][collectionId], triggerId)
return datastore.StatusOk
}
func (r *JsonDataStore) CreateTrigger(databaseId string, collectionId string, trigger datastore.Trigger) (datastore.Trigger, datastore.DataStoreStatus) {
r.storeState.Lock()
defer r.storeState.Unlock()
var ok bool
var database datastore.Database
var collection datastore.Collection
if trigger.ID == "" {
return datastore.Trigger{}, datastore.BadRequest
}
if database, ok = r.storeState.Databases[databaseId]; !ok {
return datastore.Trigger{}, datastore.StatusNotFound
}
if collection, ok = r.storeState.Collections[databaseId][collectionId]; !ok {
return datastore.Trigger{}, datastore.StatusNotFound
}
if _, ok = r.storeState.Triggers[databaseId][collectionId][trigger.ID]; ok {
return datastore.Trigger{}, datastore.Conflict
}
trigger.TimeStamp = time.Now().Unix()
trigger.ResourceID = resourceid.NewCombined(collection.ResourceID, resourceid.New(resourceid.ResourceTypeTrigger))
trigger.ETag = fmt.Sprintf("\"%s\"", uuid.New())
trigger.Self = fmt.Sprintf("dbs/%s/colls/%s/triggers/%s/", database.ResourceID, collection.ResourceID, trigger.ResourceID)
r.storeState.Triggers[databaseId][collectionId][trigger.ID] = trigger
return trigger, datastore.StatusOk
}

View File

@ -1,91 +0,0 @@
package jsondatastore
import (
"fmt"
"time"
"github.com/google/uuid"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/resourceid"
"golang.org/x/exp/maps"
)
func (r *JsonDataStore) GetAllUserDefinedFunctions(databaseId string, collectionId string) ([]datastore.UserDefinedFunction, datastore.DataStoreStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
return maps.Values(r.storeState.UserDefinedFunctions[databaseId][collectionId]), datastore.StatusOk
}
func (r *JsonDataStore) GetUserDefinedFunction(databaseId string, collectionId string, udfId string) (datastore.UserDefinedFunction, datastore.DataStoreStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return datastore.UserDefinedFunction{}, datastore.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return datastore.UserDefinedFunction{}, datastore.StatusNotFound
}
if udf, ok := r.storeState.UserDefinedFunctions[databaseId][collectionId][udfId]; ok {
return udf, datastore.StatusOk
}
return datastore.UserDefinedFunction{}, datastore.StatusNotFound
}
func (r *JsonDataStore) DeleteUserDefinedFunction(databaseId string, collectionId string, udfId string) datastore.DataStoreStatus {
r.storeState.Lock()
defer r.storeState.Unlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return datastore.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return datastore.StatusNotFound
}
if _, ok := r.storeState.UserDefinedFunctions[databaseId][collectionId][udfId]; !ok {
return datastore.StatusNotFound
}
delete(r.storeState.UserDefinedFunctions[databaseId][collectionId], udfId)
return datastore.StatusOk
}
func (r *JsonDataStore) CreateUserDefinedFunction(databaseId string, collectionId string, udf datastore.UserDefinedFunction) (datastore.UserDefinedFunction, datastore.DataStoreStatus) {
r.storeState.Lock()
defer r.storeState.Unlock()
var ok bool
var database datastore.Database
var collection datastore.Collection
if udf.ID == "" {
return datastore.UserDefinedFunction{}, datastore.BadRequest
}
if database, ok = r.storeState.Databases[databaseId]; !ok {
return datastore.UserDefinedFunction{}, datastore.StatusNotFound
}
if collection, ok = r.storeState.Collections[databaseId][collectionId]; !ok {
return datastore.UserDefinedFunction{}, datastore.StatusNotFound
}
if _, ok := r.storeState.UserDefinedFunctions[databaseId][collectionId][udf.ID]; ok {
return datastore.UserDefinedFunction{}, datastore.Conflict
}
udf.TimeStamp = time.Now().Unix()
udf.ResourceID = resourceid.NewCombined(collection.ResourceID, resourceid.New(resourceid.ResourceTypeUserDefinedFunction))
udf.ETag = fmt.Sprintf("\"%s\"", uuid.New())
udf.Self = fmt.Sprintf("dbs/%s/colls/%s/udfs/%s/", database.ResourceID, collection.ResourceID, udf.ResourceID)
r.storeState.UserDefinedFunctions[databaseId][collectionId][udf.ID] = udf
return udf, datastore.StatusOk
}

View File

@ -6,7 +6,6 @@ import (
"os" "os"
"runtime" "runtime"
"strings" "strings"
"sync"
) )
type LogLevelType int type LogLevelType int
@ -22,68 +21,67 @@ type LogWriter struct {
WriterLevel LogLevelType WriterLevel LogLevelType
} }
var logLevelMutex sync.RWMutex var LogLevel = LogLevelInfo
var logLevel = LogLevelInfo
var DebugLogger = log.New(os.Stdout, "", log.Ldate|log.Ltime) var DebugLogger = log.New(os.Stdout, "", log.Ldate|log.Ltime)
var InfoLogger = log.New(os.Stdout, "", log.Ldate|log.Ltime) var InfoLogger = log.New(os.Stdout, "", log.Ldate|log.Ltime)
var ErrorLogger = log.New(os.Stderr, "", log.Ldate|log.Ltime) var ErrorLogger = log.New(os.Stderr, "", log.Ldate|log.Ltime)
func DebugLn(v ...any) { func DebugLn(v ...any) {
if GetLogLevel() <= LogLevelDebug { if LogLevel <= LogLevelDebug {
prefix := getCallerPrefix() prefix := getCallerPrefix()
DebugLogger.Println(append([]interface{}{prefix}, v...)...) DebugLogger.Println(append([]interface{}{prefix}, v...)...)
} }
} }
func Debug(v ...any) { func Debug(v ...any) {
if GetLogLevel() <= LogLevelDebug { if LogLevel <= LogLevelDebug {
prefix := getCallerPrefix() prefix := getCallerPrefix()
DebugLogger.Println(append([]interface{}{prefix}, v...)...) DebugLogger.Println(append([]interface{}{prefix}, v...)...)
} }
} }
func Debugf(format string, v ...any) { func Debugf(format string, v ...any) {
if GetLogLevel() <= LogLevelDebug { if LogLevel <= LogLevelDebug {
prefix := getCallerPrefix() prefix := getCallerPrefix()
DebugLogger.Printf(prefix+format, v...) DebugLogger.Printf(prefix+format, v...)
} }
} }
func InfoLn(v ...any) { func InfoLn(v ...any) {
if GetLogLevel() <= LogLevelInfo { if LogLevel <= LogLevelInfo {
InfoLogger.Println(v...) InfoLogger.Println(v...)
} }
} }
func Info(v ...any) { func Info(v ...any) {
if GetLogLevel() <= LogLevelInfo { if LogLevel <= LogLevelInfo {
InfoLogger.Print(v...) InfoLogger.Print(v...)
} }
} }
func Infof(format string, v ...any) { func Infof(format string, v ...any) {
if GetLogLevel() <= LogLevelInfo { if LogLevel <= LogLevelInfo {
InfoLogger.Printf(format, v...) InfoLogger.Printf(format, v...)
} }
} }
func ErrorLn(v ...any) { func ErrorLn(v ...any) {
if GetLogLevel() <= LogLevelError { if LogLevel <= LogLevelError {
prefix := getCallerPrefix() prefix := getCallerPrefix()
ErrorLogger.Println(append([]interface{}{prefix}, v...)...) ErrorLogger.Println(append([]interface{}{prefix}, v...)...)
} }
} }
func Error(v ...any) { func Error(v ...any) {
if GetLogLevel() <= LogLevelError { if LogLevel <= LogLevelError {
prefix := getCallerPrefix() prefix := getCallerPrefix()
ErrorLogger.Print(append([]interface{}{prefix}, v...)...) ErrorLogger.Print(append([]interface{}{prefix}, v...)...)
} }
} }
func Errorf(format string, v ...any) { func Errorf(format string, v ...any) {
if GetLogLevel() <= LogLevelError { if LogLevel <= LogLevelError {
prefix := getCallerPrefix() prefix := getCallerPrefix()
ErrorLogger.Printf(prefix+format, v...) ErrorLogger.Printf(prefix+format, v...)
} }
@ -114,25 +112,11 @@ func DebugWriter() *LogWriter {
return &LogWriter{WriterLevel: LogLevelDebug} return &LogWriter{WriterLevel: LogLevelDebug}
} }
func SetLogLevel(level LogLevelType) {
logLevelMutex.Lock()
defer logLevelMutex.Unlock()
logLevel = level
}
func GetLogLevel() LogLevelType {
logLevelMutex.RLock()
defer logLevelMutex.RUnlock()
return logLevel
}
func getCallerPrefix() string { func getCallerPrefix() string {
_, file, line, ok := runtime.Caller(2) _, file, line, ok := runtime.Caller(2)
if ok { if ok {
parts := strings.Split(file, "/") parts := strings.Split(file, "/")
if len(parts) > 0 {
file = parts[len(parts)-1] file = parts[len(parts)-1]
}
return fmt.Sprintf("%s:%d - ", file, line) return fmt.Sprintf("%s:%d - ", file, line)
} }

View File

@ -0,0 +1,85 @@
package repositories
import (
"fmt"
"time"
"github.com/google/uuid"
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
"github.com/pikami/cosmium/internal/resourceid"
structhidrators "github.com/pikami/cosmium/internal/struct_hidrators"
"golang.org/x/exp/maps"
)
func (r *DataRepository) GetAllCollections(databaseId string) ([]repositorymodels.Collection, repositorymodels.RepositoryStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return make([]repositorymodels.Collection, 0), repositorymodels.StatusNotFound
}
return maps.Values(r.storeState.Collections[databaseId]), repositorymodels.StatusOk
}
func (r *DataRepository) GetCollection(databaseId string, collectionId string) (repositorymodels.Collection, repositorymodels.RepositoryStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return repositorymodels.Collection{}, repositorymodels.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return repositorymodels.Collection{}, repositorymodels.StatusNotFound
}
return r.storeState.Collections[databaseId][collectionId], repositorymodels.StatusOk
}
func (r *DataRepository) DeleteCollection(databaseId string, collectionId string) repositorymodels.RepositoryStatus {
r.storeState.Lock()
defer r.storeState.Unlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return repositorymodels.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return repositorymodels.StatusNotFound
}
delete(r.storeState.Collections[databaseId], collectionId)
return repositorymodels.StatusOk
}
func (r *DataRepository) CreateCollection(databaseId string, newCollection repositorymodels.Collection) (repositorymodels.Collection, repositorymodels.RepositoryStatus) {
r.storeState.Lock()
defer r.storeState.Unlock()
var ok bool
var database repositorymodels.Database
if database, ok = r.storeState.Databases[databaseId]; !ok {
return repositorymodels.Collection{}, repositorymodels.StatusNotFound
}
if _, ok = r.storeState.Collections[databaseId][newCollection.ID]; ok {
return repositorymodels.Collection{}, repositorymodels.Conflict
}
newCollection = structhidrators.Hidrate(newCollection).(repositorymodels.Collection)
newCollection.TimeStamp = time.Now().Unix()
newCollection.ResourceID = resourceid.NewCombined(database.ResourceID, resourceid.New())
newCollection.ETag = fmt.Sprintf("\"%s\"", uuid.New())
newCollection.Self = fmt.Sprintf("dbs/%s/colls/%s/", database.ResourceID, newCollection.ResourceID)
r.storeState.Collections[databaseId][newCollection.ID] = newCollection
r.storeState.Documents[databaseId][newCollection.ID] = make(map[string]repositorymodels.Document)
r.storeState.Triggers[databaseId][newCollection.ID] = make(map[string]repositorymodels.Trigger)
r.storeState.StoredProcedures[databaseId][newCollection.ID] = make(map[string]repositorymodels.StoredProcedure)
r.storeState.UserDefinedFunctions[databaseId][newCollection.ID] = make(map[string]repositorymodels.UserDefinedFunction)
return newCollection, repositorymodels.StatusOk
}

View File

@ -0,0 +1,65 @@
package repositories
import (
"fmt"
"time"
"github.com/google/uuid"
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
"github.com/pikami/cosmium/internal/resourceid"
"golang.org/x/exp/maps"
)
func (r *DataRepository) GetAllDatabases() ([]repositorymodels.Database, repositorymodels.RepositoryStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
return maps.Values(r.storeState.Databases), repositorymodels.StatusOk
}
func (r *DataRepository) GetDatabase(id string) (repositorymodels.Database, repositorymodels.RepositoryStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
if database, ok := r.storeState.Databases[id]; ok {
return database, repositorymodels.StatusOk
}
return repositorymodels.Database{}, repositorymodels.StatusNotFound
}
func (r *DataRepository) DeleteDatabase(id string) repositorymodels.RepositoryStatus {
r.storeState.Lock()
defer r.storeState.Unlock()
if _, ok := r.storeState.Databases[id]; !ok {
return repositorymodels.StatusNotFound
}
delete(r.storeState.Databases, id)
return repositorymodels.StatusOk
}
func (r *DataRepository) CreateDatabase(newDatabase repositorymodels.Database) (repositorymodels.Database, repositorymodels.RepositoryStatus) {
r.storeState.Lock()
defer r.storeState.Unlock()
if _, ok := r.storeState.Databases[newDatabase.ID]; ok {
return repositorymodels.Database{}, repositorymodels.Conflict
}
newDatabase.TimeStamp = time.Now().Unix()
newDatabase.ResourceID = resourceid.New()
newDatabase.ETag = fmt.Sprintf("\"%s\"", uuid.New())
newDatabase.Self = fmt.Sprintf("dbs/%s/", newDatabase.ResourceID)
r.storeState.Databases[newDatabase.ID] = newDatabase
r.storeState.Collections[newDatabase.ID] = make(map[string]repositorymodels.Collection)
r.storeState.Documents[newDatabase.ID] = make(map[string]map[string]repositorymodels.Document)
r.storeState.Triggers[newDatabase.ID] = make(map[string]map[string]repositorymodels.Trigger)
r.storeState.StoredProcedures[newDatabase.ID] = make(map[string]map[string]repositorymodels.StoredProcedure)
r.storeState.UserDefinedFunctions[newDatabase.ID] = make(map[string]map[string]repositorymodels.UserDefinedFunction)
return newDatabase, repositorymodels.StatusOk
}

View File

@ -0,0 +1,130 @@
package repositories
import (
"fmt"
"log"
"time"
"github.com/google/uuid"
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
"github.com/pikami/cosmium/internal/resourceid"
"github.com/pikami/cosmium/parsers"
"github.com/pikami/cosmium/parsers/nosql"
memoryexecutor "github.com/pikami/cosmium/query_executors/memory_executor"
"golang.org/x/exp/maps"
)
func (r *DataRepository) GetAllDocuments(databaseId string, collectionId string) ([]repositorymodels.Document, repositorymodels.RepositoryStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return make([]repositorymodels.Document, 0), repositorymodels.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return make([]repositorymodels.Document, 0), repositorymodels.StatusNotFound
}
return maps.Values(r.storeState.Documents[databaseId][collectionId]), repositorymodels.StatusOk
}
func (r *DataRepository) GetDocument(databaseId string, collectionId string, documentId string) (repositorymodels.Document, repositorymodels.RepositoryStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return repositorymodels.Document{}, repositorymodels.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return repositorymodels.Document{}, repositorymodels.StatusNotFound
}
if _, ok := r.storeState.Documents[databaseId][collectionId][documentId]; !ok {
return repositorymodels.Document{}, repositorymodels.StatusNotFound
}
return r.storeState.Documents[databaseId][collectionId][documentId], repositorymodels.StatusOk
}
func (r *DataRepository) DeleteDocument(databaseId string, collectionId string, documentId string) repositorymodels.RepositoryStatus {
r.storeState.Lock()
defer r.storeState.Unlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return repositorymodels.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return repositorymodels.StatusNotFound
}
if _, ok := r.storeState.Documents[databaseId][collectionId][documentId]; !ok {
return repositorymodels.StatusNotFound
}
delete(r.storeState.Documents[databaseId][collectionId], documentId)
return repositorymodels.StatusOk
}
func (r *DataRepository) CreateDocument(databaseId string, collectionId string, document map[string]interface{}) (repositorymodels.Document, repositorymodels.RepositoryStatus) {
r.storeState.Lock()
defer r.storeState.Unlock()
var ok bool
var documentId string
var database repositorymodels.Database
var collection repositorymodels.Collection
if documentId, ok = document["id"].(string); !ok || documentId == "" {
documentId = fmt.Sprint(uuid.New())
document["id"] = documentId
}
if database, ok = r.storeState.Databases[databaseId]; !ok {
return repositorymodels.Document{}, repositorymodels.StatusNotFound
}
if collection, ok = r.storeState.Collections[databaseId][collectionId]; !ok {
return repositorymodels.Document{}, repositorymodels.StatusNotFound
}
if _, ok := r.storeState.Documents[databaseId][collectionId][documentId]; ok {
return repositorymodels.Document{}, repositorymodels.Conflict
}
document["_ts"] = time.Now().Unix()
document["_rid"] = resourceid.NewCombined(database.ResourceID, collection.ResourceID, resourceid.New())
document["_etag"] = fmt.Sprintf("\"%s\"", uuid.New())
document["_self"] = fmt.Sprintf("dbs/%s/colls/%s/docs/%s/", database.ResourceID, collection.ResourceID, document["_rid"])
r.storeState.Documents[databaseId][collectionId][documentId] = document
return document, repositorymodels.StatusOk
}
func (r *DataRepository) ExecuteQueryDocuments(databaseId string, collectionId string, query string, queryParameters map[string]interface{}) ([]memoryexecutor.RowType, repositorymodels.RepositoryStatus) {
parsedQuery, err := nosql.Parse("", []byte(query))
if err != nil {
log.Printf("Failed to parse query: %s\nerr: %v", query, err)
return nil, repositorymodels.BadRequest
}
collectionDocuments, status := r.GetAllDocuments(databaseId, collectionId)
if status != repositorymodels.StatusOk {
return nil, status
}
covDocs := make([]memoryexecutor.RowType, 0)
for _, doc := range collectionDocuments {
covDocs = append(covDocs, map[string]interface{}(doc))
}
if typedQuery, ok := parsedQuery.(parsers.SelectStmt); ok {
typedQuery.Parameters = queryParameters
return memoryexecutor.ExecuteQuery(typedQuery, covDocs), repositorymodels.StatusOk
}
return nil, repositorymodels.BadRequest
}

View File

@ -1,15 +1,15 @@
package jsondatastore package repositories
import ( import (
"fmt" "fmt"
"github.com/google/uuid" "github.com/google/uuid"
"github.com/pikami/cosmium/internal/datastore" repositorymodels "github.com/pikami/cosmium/internal/repository_models"
"github.com/pikami/cosmium/internal/resourceid" "github.com/pikami/cosmium/internal/resourceid"
) )
// I have no idea what this is tbh // I have no idea what this is tbh
func (r *JsonDataStore) GetPartitionKeyRanges(databaseId string, collectionId string) ([]datastore.PartitionKeyRange, datastore.DataStoreStatus) { func (r *DataRepository) GetPartitionKeyRanges(databaseId string, collectionId string) ([]repositorymodels.PartitionKeyRange, repositorymodels.RepositoryStatus) {
r.storeState.RLock() r.storeState.RLock()
defer r.storeState.RUnlock() defer r.storeState.RUnlock()
@ -26,11 +26,11 @@ func (r *JsonDataStore) GetPartitionKeyRanges(databaseId string, collectionId st
timestamp = collection.TimeStamp timestamp = collection.TimeStamp
} }
pkrResourceId := resourceid.NewCombined(collectionRid, resourceid.New(resourceid.ResourceTypePartitionKeyRange)) pkrResourceId := resourceid.NewCombined(databaseRid, collectionRid, resourceid.New())
pkrSelf := fmt.Sprintf("dbs/%s/colls/%s/pkranges/%s/", databaseRid, collectionRid, pkrResourceId) pkrSelf := fmt.Sprintf("dbs/%s/colls/%s/pkranges/%s/", databaseRid, collectionRid, pkrResourceId)
etag := fmt.Sprintf("\"%s\"", uuid.New()) etag := fmt.Sprintf("\"%s\"", uuid.New())
return []datastore.PartitionKeyRange{ return []repositorymodels.PartitionKeyRange{
{ {
ResourceID: pkrResourceId, ResourceID: pkrResourceId,
ID: "0", ID: "0",
@ -45,5 +45,5 @@ func (r *JsonDataStore) GetPartitionKeyRanges(databaseId string, collectionId st
TimeStamp: timestamp, TimeStamp: timestamp,
Lsn: 17, Lsn: 17,
}, },
}, datastore.StatusOk }, repositorymodels.StatusOk
} }

View File

@ -0,0 +1,34 @@
package repositories
import repositorymodels "github.com/pikami/cosmium/internal/repository_models"
type DataRepository struct {
storeState repositorymodels.State
initialDataFilePath string
persistDataFilePath string
}
type RepositoryOptions struct {
InitialDataFilePath string
PersistDataFilePath string
}
func NewDataRepository(options RepositoryOptions) *DataRepository {
repository := &DataRepository{
storeState: repositorymodels.State{
Databases: make(map[string]repositorymodels.Database),
Collections: make(map[string]map[string]repositorymodels.Collection),
Documents: make(map[string]map[string]map[string]repositorymodels.Document),
Triggers: make(map[string]map[string]map[string]repositorymodels.Trigger),
StoredProcedures: make(map[string]map[string]map[string]repositorymodels.StoredProcedure),
UserDefinedFunctions: make(map[string]map[string]map[string]repositorymodels.UserDefinedFunction),
},
initialDataFilePath: options.InitialDataFilePath,
persistDataFilePath: options.PersistDataFilePath,
}
repository.InitializeRepository()
return repository
}

View File

@ -1,39 +1,16 @@
package jsondatastore package repositories
import ( import (
"encoding/json" "encoding/json"
"log" "log"
"os" "os"
"reflect" "reflect"
"sync"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/logger" "github.com/pikami/cosmium/internal/logger"
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
) )
type State struct { func (r *DataRepository) InitializeRepository() {
sync.RWMutex
// Map databaseId -> Database
Databases map[string]datastore.Database `json:"databases"`
// Map databaseId -> collectionId -> Collection
Collections map[string]map[string]datastore.Collection `json:"collections"`
// Map databaseId -> collectionId -> documentId -> Documents
Documents map[string]map[string]map[string]datastore.Document `json:"documents"`
// Map databaseId -> collectionId -> triggerId -> Trigger
Triggers map[string]map[string]map[string]datastore.Trigger `json:"triggers"`
// Map databaseId -> collectionId -> spId -> StoredProcedure
StoredProcedures map[string]map[string]map[string]datastore.StoredProcedure `json:"sprocs"`
// Map databaseId -> collectionId -> udfId -> UserDefinedFunction
UserDefinedFunctions map[string]map[string]map[string]datastore.UserDefinedFunction `json:"udfs"`
}
func (r *JsonDataStore) InitializeDataStore() {
if r.initialDataFilePath != "" { if r.initialDataFilePath != "" {
r.LoadStateFS(r.initialDataFilePath) r.LoadStateFS(r.initialDataFilePath)
return return
@ -55,7 +32,7 @@ func (r *JsonDataStore) InitializeDataStore() {
} }
} }
func (r *JsonDataStore) LoadStateFS(filePath string) { func (r *DataRepository) LoadStateFS(filePath string) {
data, err := os.ReadFile(filePath) data, err := os.ReadFile(filePath)
if err != nil { if err != nil {
log.Fatalf("Error reading state JSON file: %v", err) log.Fatalf("Error reading state JSON file: %v", err)
@ -68,11 +45,11 @@ func (r *JsonDataStore) LoadStateFS(filePath string) {
} }
} }
func (r *JsonDataStore) LoadStateJSON(jsonData string) error { func (r *DataRepository) LoadStateJSON(jsonData string) error {
r.storeState.Lock() r.storeState.RLock()
defer r.storeState.Unlock() defer r.storeState.RUnlock()
var state State var state repositorymodels.State
if err := json.Unmarshal([]byte(jsonData), &state); err != nil { if err := json.Unmarshal([]byte(jsonData), &state); err != nil {
return err return err
} }
@ -94,7 +71,7 @@ func (r *JsonDataStore) LoadStateJSON(jsonData string) error {
return nil return nil
} }
func (r *JsonDataStore) SaveStateFS(filePath string) { func (r *DataRepository) SaveStateFS(filePath string) {
r.storeState.RLock() r.storeState.RLock()
defer r.storeState.RUnlock() defer r.storeState.RUnlock()
@ -115,7 +92,7 @@ func (r *JsonDataStore) SaveStateFS(filePath string) {
logger.Infof("User defined functions: %d\n", getLength(r.storeState.UserDefinedFunctions)) logger.Infof("User defined functions: %d\n", getLength(r.storeState.UserDefinedFunctions))
} }
func (r *JsonDataStore) DumpToJson() (string, error) { func (r *DataRepository) GetState() (string, error) {
r.storeState.RLock() r.storeState.RLock()
defer r.storeState.RUnlock() defer r.storeState.RUnlock()
@ -126,23 +103,16 @@ func (r *JsonDataStore) DumpToJson() (string, error) {
} }
return string(data), nil return string(data), nil
}
func (r *JsonDataStore) Close() {
if r.persistDataFilePath != "" {
r.SaveStateFS(r.persistDataFilePath)
}
} }
func getLength(v interface{}) int { func getLength(v interface{}) int {
switch v.(type) { switch v.(type) {
case datastore.Database, case repositorymodels.Database,
datastore.Collection, repositorymodels.Collection,
datastore.Document, repositorymodels.Document,
datastore.Trigger, repositorymodels.Trigger,
datastore.StoredProcedure, repositorymodels.StoredProcedure,
datastore.UserDefinedFunction: repositorymodels.UserDefinedFunction:
return 1 return 1
} }
@ -163,55 +133,55 @@ func getLength(v interface{}) int {
return count return count
} }
func (r *JsonDataStore) ensureStoreStateNoNullReferences() { func (r *DataRepository) ensureStoreStateNoNullReferences() {
if r.storeState.Databases == nil { if r.storeState.Databases == nil {
r.storeState.Databases = make(map[string]datastore.Database) r.storeState.Databases = make(map[string]repositorymodels.Database)
} }
if r.storeState.Collections == nil { if r.storeState.Collections == nil {
r.storeState.Collections = make(map[string]map[string]datastore.Collection) r.storeState.Collections = make(map[string]map[string]repositorymodels.Collection)
} }
if r.storeState.Documents == nil { if r.storeState.Documents == nil {
r.storeState.Documents = make(map[string]map[string]map[string]datastore.Document) r.storeState.Documents = make(map[string]map[string]map[string]repositorymodels.Document)
} }
if r.storeState.Triggers == nil { if r.storeState.Triggers == nil {
r.storeState.Triggers = make(map[string]map[string]map[string]datastore.Trigger) r.storeState.Triggers = make(map[string]map[string]map[string]repositorymodels.Trigger)
} }
if r.storeState.StoredProcedures == nil { if r.storeState.StoredProcedures == nil {
r.storeState.StoredProcedures = make(map[string]map[string]map[string]datastore.StoredProcedure) r.storeState.StoredProcedures = make(map[string]map[string]map[string]repositorymodels.StoredProcedure)
} }
if r.storeState.UserDefinedFunctions == nil { if r.storeState.UserDefinedFunctions == nil {
r.storeState.UserDefinedFunctions = make(map[string]map[string]map[string]datastore.UserDefinedFunction) r.storeState.UserDefinedFunctions = make(map[string]map[string]map[string]repositorymodels.UserDefinedFunction)
} }
for database := range r.storeState.Databases { for database := range r.storeState.Databases {
if r.storeState.Collections[database] == nil { if r.storeState.Collections[database] == nil {
r.storeState.Collections[database] = make(map[string]datastore.Collection) r.storeState.Collections[database] = make(map[string]repositorymodels.Collection)
} }
if r.storeState.Documents[database] == nil { if r.storeState.Documents[database] == nil {
r.storeState.Documents[database] = make(map[string]map[string]datastore.Document) r.storeState.Documents[database] = make(map[string]map[string]repositorymodels.Document)
} }
if r.storeState.Triggers[database] == nil { if r.storeState.Triggers[database] == nil {
r.storeState.Triggers[database] = make(map[string]map[string]datastore.Trigger) r.storeState.Triggers[database] = make(map[string]map[string]repositorymodels.Trigger)
} }
if r.storeState.StoredProcedures[database] == nil { if r.storeState.StoredProcedures[database] == nil {
r.storeState.StoredProcedures[database] = make(map[string]map[string]datastore.StoredProcedure) r.storeState.StoredProcedures[database] = make(map[string]map[string]repositorymodels.StoredProcedure)
} }
if r.storeState.UserDefinedFunctions[database] == nil { if r.storeState.UserDefinedFunctions[database] == nil {
r.storeState.UserDefinedFunctions[database] = make(map[string]map[string]datastore.UserDefinedFunction) r.storeState.UserDefinedFunctions[database] = make(map[string]map[string]repositorymodels.UserDefinedFunction)
} }
for collection := range r.storeState.Collections[database] { for collection := range r.storeState.Collections[database] {
if r.storeState.Documents[database][collection] == nil { if r.storeState.Documents[database][collection] == nil {
r.storeState.Documents[database][collection] = make(map[string]datastore.Document) r.storeState.Documents[database][collection] = make(map[string]repositorymodels.Document)
} }
for document := range r.storeState.Documents[database][collection] { for document := range r.storeState.Documents[database][collection] {
@ -221,15 +191,15 @@ func (r *JsonDataStore) ensureStoreStateNoNullReferences() {
} }
if r.storeState.Triggers[database][collection] == nil { if r.storeState.Triggers[database][collection] == nil {
r.storeState.Triggers[database][collection] = make(map[string]datastore.Trigger) r.storeState.Triggers[database][collection] = make(map[string]repositorymodels.Trigger)
} }
if r.storeState.StoredProcedures[database][collection] == nil { if r.storeState.StoredProcedures[database][collection] == nil {
r.storeState.StoredProcedures[database][collection] = make(map[string]datastore.StoredProcedure) r.storeState.StoredProcedures[database][collection] = make(map[string]repositorymodels.StoredProcedure)
} }
if r.storeState.UserDefinedFunctions[database][collection] == nil { if r.storeState.UserDefinedFunctions[database][collection] == nil {
r.storeState.UserDefinedFunctions[database][collection] = make(map[string]datastore.UserDefinedFunction) r.storeState.UserDefinedFunctions[database][collection] = make(map[string]repositorymodels.UserDefinedFunction)
} }
} }
} }

View File

@ -0,0 +1,91 @@
package repositories
import (
"fmt"
"time"
"github.com/google/uuid"
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
"github.com/pikami/cosmium/internal/resourceid"
"golang.org/x/exp/maps"
)
func (r *DataRepository) GetAllStoredProcedures(databaseId string, collectionId string) ([]repositorymodels.StoredProcedure, repositorymodels.RepositoryStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
return maps.Values(r.storeState.StoredProcedures[databaseId][collectionId]), repositorymodels.StatusOk
}
func (r *DataRepository) GetStoredProcedure(databaseId string, collectionId string, spId string) (repositorymodels.StoredProcedure, repositorymodels.RepositoryStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return repositorymodels.StoredProcedure{}, repositorymodels.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return repositorymodels.StoredProcedure{}, repositorymodels.StatusNotFound
}
if sp, ok := r.storeState.StoredProcedures[databaseId][collectionId][spId]; ok {
return sp, repositorymodels.StatusOk
}
return repositorymodels.StoredProcedure{}, repositorymodels.StatusNotFound
}
func (r *DataRepository) DeleteStoredProcedure(databaseId string, collectionId string, spId string) repositorymodels.RepositoryStatus {
r.storeState.Lock()
defer r.storeState.Unlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return repositorymodels.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return repositorymodels.StatusNotFound
}
if _, ok := r.storeState.StoredProcedures[databaseId][collectionId][spId]; !ok {
return repositorymodels.StatusNotFound
}
delete(r.storeState.StoredProcedures[databaseId][collectionId], spId)
return repositorymodels.StatusOk
}
func (r *DataRepository) CreateStoredProcedure(databaseId string, collectionId string, sp repositorymodels.StoredProcedure) (repositorymodels.StoredProcedure, repositorymodels.RepositoryStatus) {
r.storeState.Lock()
defer r.storeState.Unlock()
var ok bool
var database repositorymodels.Database
var collection repositorymodels.Collection
if sp.ID == "" {
return repositorymodels.StoredProcedure{}, repositorymodels.BadRequest
}
if database, ok = r.storeState.Databases[databaseId]; !ok {
return repositorymodels.StoredProcedure{}, repositorymodels.StatusNotFound
}
if collection, ok = r.storeState.Collections[databaseId][collectionId]; !ok {
return repositorymodels.StoredProcedure{}, repositorymodels.StatusNotFound
}
if _, ok = r.storeState.StoredProcedures[databaseId][collectionId][sp.ID]; ok {
return repositorymodels.StoredProcedure{}, repositorymodels.Conflict
}
sp.TimeStamp = time.Now().Unix()
sp.ResourceID = resourceid.NewCombined(database.ResourceID, collection.ResourceID, resourceid.New())
sp.ETag = fmt.Sprintf("\"%s\"", uuid.New())
sp.Self = fmt.Sprintf("dbs/%s/colls/%s/sprocs/%s/", database.ResourceID, collection.ResourceID, sp.ResourceID)
r.storeState.StoredProcedures[databaseId][collectionId][sp.ID] = sp
return sp, repositorymodels.StatusOk
}

View File

@ -0,0 +1,91 @@
package repositories
import (
"fmt"
"time"
"github.com/google/uuid"
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
"github.com/pikami/cosmium/internal/resourceid"
"golang.org/x/exp/maps"
)
func (r *DataRepository) GetAllTriggers(databaseId string, collectionId string) ([]repositorymodels.Trigger, repositorymodels.RepositoryStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
return maps.Values(r.storeState.Triggers[databaseId][collectionId]), repositorymodels.StatusOk
}
func (r *DataRepository) GetTrigger(databaseId string, collectionId string, triggerId string) (repositorymodels.Trigger, repositorymodels.RepositoryStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return repositorymodels.Trigger{}, repositorymodels.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return repositorymodels.Trigger{}, repositorymodels.StatusNotFound
}
if trigger, ok := r.storeState.Triggers[databaseId][collectionId][triggerId]; ok {
return trigger, repositorymodels.StatusOk
}
return repositorymodels.Trigger{}, repositorymodels.StatusNotFound
}
func (r *DataRepository) DeleteTrigger(databaseId string, collectionId string, triggerId string) repositorymodels.RepositoryStatus {
r.storeState.Lock()
defer r.storeState.Unlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return repositorymodels.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return repositorymodels.StatusNotFound
}
if _, ok := r.storeState.Triggers[databaseId][collectionId][triggerId]; !ok {
return repositorymodels.StatusNotFound
}
delete(r.storeState.Triggers[databaseId][collectionId], triggerId)
return repositorymodels.StatusOk
}
func (r *DataRepository) CreateTrigger(databaseId string, collectionId string, trigger repositorymodels.Trigger) (repositorymodels.Trigger, repositorymodels.RepositoryStatus) {
r.storeState.Lock()
defer r.storeState.Unlock()
var ok bool
var database repositorymodels.Database
var collection repositorymodels.Collection
if trigger.ID == "" {
return repositorymodels.Trigger{}, repositorymodels.BadRequest
}
if database, ok = r.storeState.Databases[databaseId]; !ok {
return repositorymodels.Trigger{}, repositorymodels.StatusNotFound
}
if collection, ok = r.storeState.Collections[databaseId][collectionId]; !ok {
return repositorymodels.Trigger{}, repositorymodels.StatusNotFound
}
if _, ok = r.storeState.Triggers[databaseId][collectionId][trigger.ID]; ok {
return repositorymodels.Trigger{}, repositorymodels.Conflict
}
trigger.TimeStamp = time.Now().Unix()
trigger.ResourceID = resourceid.NewCombined(database.ResourceID, collection.ResourceID, resourceid.New())
trigger.ETag = fmt.Sprintf("\"%s\"", uuid.New())
trigger.Self = fmt.Sprintf("dbs/%s/colls/%s/triggers/%s/", database.ResourceID, collection.ResourceID, trigger.ResourceID)
r.storeState.Triggers[databaseId][collectionId][trigger.ID] = trigger
return trigger, repositorymodels.StatusOk
}

View File

@ -0,0 +1,91 @@
package repositories
import (
"fmt"
"time"
"github.com/google/uuid"
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
"github.com/pikami/cosmium/internal/resourceid"
"golang.org/x/exp/maps"
)
func (r *DataRepository) GetAllUserDefinedFunctions(databaseId string, collectionId string) ([]repositorymodels.UserDefinedFunction, repositorymodels.RepositoryStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
return maps.Values(r.storeState.UserDefinedFunctions[databaseId][collectionId]), repositorymodels.StatusOk
}
func (r *DataRepository) GetUserDefinedFunction(databaseId string, collectionId string, udfId string) (repositorymodels.UserDefinedFunction, repositorymodels.RepositoryStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return repositorymodels.UserDefinedFunction{}, repositorymodels.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return repositorymodels.UserDefinedFunction{}, repositorymodels.StatusNotFound
}
if udf, ok := r.storeState.UserDefinedFunctions[databaseId][collectionId][udfId]; ok {
return udf, repositorymodels.StatusOk
}
return repositorymodels.UserDefinedFunction{}, repositorymodels.StatusNotFound
}
func (r *DataRepository) DeleteUserDefinedFunction(databaseId string, collectionId string, udfId string) repositorymodels.RepositoryStatus {
r.storeState.Lock()
defer r.storeState.Unlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return repositorymodels.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return repositorymodels.StatusNotFound
}
if _, ok := r.storeState.UserDefinedFunctions[databaseId][collectionId][udfId]; !ok {
return repositorymodels.StatusNotFound
}
delete(r.storeState.UserDefinedFunctions[databaseId][collectionId], udfId)
return repositorymodels.StatusOk
}
func (r *DataRepository) CreateUserDefinedFunction(databaseId string, collectionId string, udf repositorymodels.UserDefinedFunction) (repositorymodels.UserDefinedFunction, repositorymodels.RepositoryStatus) {
r.storeState.Lock()
defer r.storeState.Unlock()
var ok bool
var database repositorymodels.Database
var collection repositorymodels.Collection
if udf.ID == "" {
return repositorymodels.UserDefinedFunction{}, repositorymodels.BadRequest
}
if database, ok = r.storeState.Databases[databaseId]; !ok {
return repositorymodels.UserDefinedFunction{}, repositorymodels.StatusNotFound
}
if collection, ok = r.storeState.Collections[databaseId][collectionId]; !ok {
return repositorymodels.UserDefinedFunction{}, repositorymodels.StatusNotFound
}
if _, ok := r.storeState.UserDefinedFunctions[databaseId][collectionId][udf.ID]; ok {
return repositorymodels.UserDefinedFunction{}, repositorymodels.Conflict
}
udf.TimeStamp = time.Now().Unix()
udf.ResourceID = resourceid.NewCombined(database.ResourceID, collection.ResourceID, resourceid.New())
udf.ETag = fmt.Sprintf("\"%s\"", uuid.New())
udf.Self = fmt.Sprintf("dbs/%s/colls/%s/udfs/%s/", database.ResourceID, collection.ResourceID, udf.ResourceID)
r.storeState.UserDefinedFunctions[databaseId][collectionId][udf.ID] = udf
return udf, repositorymodels.StatusOk
}

View File

@ -1,4 +1,6 @@
package datastore package repositorymodels
import "sync"
type Database struct { type Database struct {
ID string `json:"id"` ID string `json:"id"`
@ -8,15 +10,13 @@ type Database struct {
Self string `json:"_self"` Self string `json:"_self"`
} }
type DataStoreStatus int type RepositoryStatus int
const ( const (
StatusOk = 1 StatusOk = 1
StatusNotFound = 2 StatusNotFound = 2
Conflict = 3 Conflict = 3
BadRequest = 4 BadRequest = 4
IterEOF = 5
Unknown = 6
) )
type TriggerOperation string type TriggerOperation string
@ -117,3 +117,25 @@ type PartitionKeyRange struct {
TimeStamp int64 `json:"_ts"` TimeStamp int64 `json:"_ts"`
Lsn int `json:"lsn"` Lsn int `json:"lsn"`
} }
type State struct {
sync.RWMutex
// Map databaseId -> Database
Databases map[string]Database `json:"databases"`
// Map databaseId -> collectionId -> Collection
Collections map[string]map[string]Collection `json:"collections"`
// Map databaseId -> collectionId -> documentId -> Documents
Documents map[string]map[string]map[string]Document `json:"documents"`
// Map databaseId -> collectionId -> triggerId -> Trigger
Triggers map[string]map[string]map[string]Trigger `json:"triggers"`
// Map databaseId -> collectionId -> spId -> StoredProcedure
StoredProcedures map[string]map[string]map[string]StoredProcedure `json:"sprocs"`
// Map databaseId -> collectionId -> udfId -> UserDefinedFunction
UserDefinedFunctions map[string]map[string]map[string]UserDefinedFunction `json:"udfs"`
}

View File

@ -3,76 +3,32 @@ package resourceid
import ( import (
"encoding/base64" "encoding/base64"
"math/rand" "math/rand"
"strings"
"github.com/google/uuid" "github.com/google/uuid"
) )
type ResourceType int func New() string {
id := uuid.New().ID()
idBytes := uintToBytes(id)
const (
ResourceTypeDatabase ResourceType = iota
ResourceTypeCollection
ResourceTypeDocument
ResourceTypeStoredProcedure
ResourceTypeTrigger
ResourceTypeUserDefinedFunction
ResourceTypeConflict
ResourceTypePartitionKeyRange
ResourceTypeSchema
)
func New(resourceType ResourceType) string {
var idBytes []byte
switch resourceType {
case ResourceTypeDatabase:
idBytes = randomBytes(4)
case ResourceTypeCollection:
idBytes = randomBytes(4)
// first byte should be bigger than 0x80 for collection ids // first byte should be bigger than 0x80 for collection ids
// clients classify this id as "user" otherwise // clients classify this id as "user" otherwise
if (idBytes[0] & 0x80) <= 0 { if (idBytes[0] & 0x80) <= 0 {
idBytes[0] = byte(rand.Intn(0x80) + 0x80) idBytes[0] = byte(rand.Intn(0x80) + 0x80)
} }
case ResourceTypeDocument:
idBytes = randomBytes(8)
idBytes[7] = byte(rand.Intn(0x10)) // Upper 4 bits = 0
case ResourceTypeStoredProcedure:
idBytes = randomBytes(8)
idBytes[7] = byte(rand.Intn(0x10)) | 0x08 // Upper 4 bits = 0x08
case ResourceTypeTrigger:
idBytes = randomBytes(8)
idBytes[7] = byte(rand.Intn(0x10)) | 0x07 // Upper 4 bits = 0x07
case ResourceTypeUserDefinedFunction:
idBytes = randomBytes(8)
idBytes[7] = byte(rand.Intn(0x10)) | 0x06 // Upper 4 bits = 0x06
case ResourceTypeConflict:
idBytes = randomBytes(8)
idBytes[7] = byte(rand.Intn(0x10)) | 0x04 // Upper 4 bits = 0x04
case ResourceTypePartitionKeyRange:
// we don't do partitions yet, so just use a fixed id
idBytes = []byte{0x69, 0x69, 0x69, 0x69, 0x69, 0x69, 0x69, 0x50}
case ResourceTypeSchema:
idBytes = randomBytes(8)
idBytes[7] = byte(rand.Intn(0x10)) | 0x09 // Upper 4 bits = 0x09
default:
idBytes = randomBytes(4)
}
encoded := base64.StdEncoding.EncodeToString(idBytes) return base64.StdEncoding.EncodeToString(idBytes)
return strings.ReplaceAll(encoded, "/", "-")
} }
func NewCombined(ids ...string) string { func NewCombined(ids ...string) string {
combinedIdBytes := make([]byte, 0) combinedIdBytes := make([]byte, 0)
for _, id := range ids { for _, id := range ids {
idBytes, _ := base64.StdEncoding.DecodeString(strings.ReplaceAll(id, "-", "/")) idBytes, _ := base64.StdEncoding.DecodeString(id)
combinedIdBytes = append(combinedIdBytes, idBytes...) combinedIdBytes = append(combinedIdBytes, idBytes...)
} }
encoded := base64.StdEncoding.EncodeToString(combinedIdBytes) return base64.StdEncoding.EncodeToString(combinedIdBytes)
return strings.ReplaceAll(encoded, "/", "-")
} }
func uintToBytes(id uint32) []byte { func uintToBytes(id uint32) []byte {
@ -83,13 +39,3 @@ func uintToBytes(id uint32) []byte {
return buf return buf
} }
func randomBytes(count int) []byte {
buf := make([]byte, count)
for i := 0; i < count; i += 4 {
id := uuid.New().ID()
idBytes := uintToBytes(id)
copy(buf[i:], idBytes)
}
return buf
}

View File

@ -1,19 +1,21 @@
package structhidrators package structhidrators
import "github.com/pikami/cosmium/internal/datastore" import (
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
)
var defaultCollection datastore.Collection = datastore.Collection{ var defaultCollection repositorymodels.Collection = repositorymodels.Collection{
IndexingPolicy: datastore.CollectionIndexingPolicy{ IndexingPolicy: repositorymodels.CollectionIndexingPolicy{
IndexingMode: "consistent", IndexingMode: "consistent",
Automatic: true, Automatic: true,
IncludedPaths: []datastore.CollectionIndexingPolicyPath{ IncludedPaths: []repositorymodels.CollectionIndexingPolicyPath{
{Path: "/*"}, {Path: "/*"},
}, },
ExcludedPaths: []datastore.CollectionIndexingPolicyPath{ ExcludedPaths: []repositorymodels.CollectionIndexingPolicyPath{
{Path: "/\"_etag\"/?"}, {Path: "/\"_etag\"/?"},
}, },
}, },
PartitionKey: datastore.CollectionPartitionKey{ PartitionKey: repositorymodels.CollectionPartitionKey{
Paths: []string{"/_partitionKey"}, Paths: []string{"/_partitionKey"},
Kind: "Hash", Kind: "Hash",
Version: 2, Version: 2,

View File

@ -3,11 +3,11 @@ package structhidrators
import ( import (
"reflect" "reflect"
"github.com/pikami/cosmium/internal/datastore" repositorymodels "github.com/pikami/cosmium/internal/repository_models"
) )
func Hidrate(input interface{}) interface{} { func Hidrate(input interface{}) interface{} {
if reflect.TypeOf(input) == reflect.TypeOf(datastore.Collection{}) { if reflect.TypeOf(input) == reflect.TypeOf(repositorymodels.Collection{}) {
return hidrate(input, defaultCollection) return hidrate(input, defaultCollection)
} }
return input return input

View File

@ -17,7 +17,6 @@ type SelectStmt struct {
type Table struct { type Table struct {
Value string Value string
SelectItem SelectItem SelectItem SelectItem
IsInSelect bool
} }
type JoinItem struct { type JoinItem struct {
@ -34,8 +33,6 @@ const (
SelectItemTypeConstant SelectItemTypeConstant
SelectItemTypeFunctionCall SelectItemTypeFunctionCall
SelectItemTypeSubQuery SelectItemTypeSubQuery
SelectItemTypeExpression
SelectItemTypeBinaryExpression
) )
type SelectItem struct { type SelectItem struct {
@ -44,7 +41,6 @@ type SelectItem struct {
SelectItems []SelectItem SelectItems []SelectItem
Type SelectItemType Type SelectItemType
Value interface{} Value interface{}
Invert bool
IsTopLevel bool IsTopLevel bool
} }
@ -66,12 +62,6 @@ type ComparisonExpression struct {
Operation string Operation string
} }
type BinaryExpression struct {
Left interface{}
Right interface{}
Operation string
}
type ConstantType int type ConstantType int
const ( const (
@ -142,8 +132,6 @@ const (
FunctionCallSetIntersect FunctionCallType = "SetIntersect" FunctionCallSetIntersect FunctionCallType = "SetIntersect"
FunctionCallSetUnion FunctionCallType = "SetUnion" FunctionCallSetUnion FunctionCallType = "SetUnion"
FunctionCallIif FunctionCallType = "Iif"
FunctionCallMathAbs FunctionCallType = "MathAbs" FunctionCallMathAbs FunctionCallType = "MathAbs"
FunctionCallMathAcos FunctionCallType = "MathAcos" FunctionCallMathAcos FunctionCallType = "MathAcos"
FunctionCallMathAsin FunctionCallType = "MathAsin" FunctionCallMathAsin FunctionCallType = "MathAsin"

View File

@ -4,7 +4,6 @@ import (
"testing" "testing"
"github.com/pikami/cosmium/parsers" "github.com/pikami/cosmium/parsers"
testutils "github.com/pikami/cosmium/test_utils"
) )
func Test_Parse_AggregateFunctions(t *testing.T) { func Test_Parse_AggregateFunctions(t *testing.T) {
@ -28,7 +27,7 @@ func Test_Parse_AggregateFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
}, },
) )
}) })
@ -52,7 +51,7 @@ func Test_Parse_AggregateFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
}, },
) )
}) })
@ -76,7 +75,7 @@ func Test_Parse_AggregateFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
}, },
) )
}) })
@ -100,7 +99,7 @@ func Test_Parse_AggregateFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
}, },
) )
}) })
@ -124,7 +123,7 @@ func Test_Parse_AggregateFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
}, },
) )
}) })

View File

@ -1,366 +0,0 @@
package nosql_test
import (
"testing"
"github.com/pikami/cosmium/parsers"
testutils "github.com/pikami/cosmium/test_utils"
)
func Test_Parse_Arithmetics(t *testing.T) {
t.Run("Should parse multiplication before addition", func(t *testing.T) {
testQueryParse(
t,
`SELECT c.a + c.b * c.c FROM c`,
parsers.SelectStmt{
SelectItems: []parsers.SelectItem{
{
Type: parsers.SelectItemTypeBinaryExpression,
Value: parsers.BinaryExpression{
Operation: "+",
Left: testutils.SelectItem_Path("c", "a"),
Right: parsers.SelectItem{
Type: parsers.SelectItemTypeBinaryExpression,
Value: parsers.BinaryExpression{
Operation: "*",
Left: testutils.SelectItem_Path("c", "b"),
Right: testutils.SelectItem_Path("c", "c"),
},
},
},
},
},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
)
})
t.Run("Should parse division before subtraction", func(t *testing.T) {
testQueryParse(
t,
`SELECT c.x - c.y / c.z FROM c`,
parsers.SelectStmt{
SelectItems: []parsers.SelectItem{
{
Type: parsers.SelectItemTypeBinaryExpression,
Value: parsers.BinaryExpression{
Operation: "-",
Left: testutils.SelectItem_Path("c", "x"),
Right: parsers.SelectItem{
Type: parsers.SelectItemTypeBinaryExpression,
Value: parsers.BinaryExpression{
Operation: "/",
Left: testutils.SelectItem_Path("c", "y"),
Right: testutils.SelectItem_Path("c", "z"),
},
},
},
},
},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
)
})
t.Run("Should handle complex mixed operations", func(t *testing.T) {
testQueryParse(
t,
`SELECT c.a + c.b * c.c - c.d / c.e FROM c`,
parsers.SelectStmt{
SelectItems: []parsers.SelectItem{
{
Type: parsers.SelectItemTypeBinaryExpression,
Value: parsers.BinaryExpression{
Operation: "-",
Left: parsers.SelectItem{
Type: parsers.SelectItemTypeBinaryExpression,
Value: parsers.BinaryExpression{
Operation: "+",
Left: testutils.SelectItem_Path("c", "a"),
Right: parsers.SelectItem{
Type: parsers.SelectItemTypeBinaryExpression,
Value: parsers.BinaryExpression{
Operation: "*",
Left: testutils.SelectItem_Path("c", "b"),
Right: testutils.SelectItem_Path("c", "c"),
},
},
},
},
Right: parsers.SelectItem{
Type: parsers.SelectItemTypeBinaryExpression,
Value: parsers.BinaryExpression{
Operation: "/",
Left: testutils.SelectItem_Path("c", "d"),
Right: testutils.SelectItem_Path("c", "e"),
},
},
},
},
},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
)
})
t.Run("Should respect parentheses overriding precedence", func(t *testing.T) {
testQueryParse(
t,
`SELECT (c.a + c.b) * c.c FROM c`,
parsers.SelectStmt{
SelectItems: []parsers.SelectItem{
{
Type: parsers.SelectItemTypeBinaryExpression,
Value: parsers.BinaryExpression{
Operation: "*",
Left: parsers.SelectItem{
Type: parsers.SelectItemTypeBinaryExpression,
Value: parsers.BinaryExpression{
Operation: "+",
Left: testutils.SelectItem_Path("c", "a"),
Right: testutils.SelectItem_Path("c", "b"),
},
},
Right: testutils.SelectItem_Path("c", "c"),
},
},
},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
)
})
t.Run("Should handle nested parentheses", func(t *testing.T) {
testQueryParse(
t,
`SELECT ((c.a + c.b) * c.c) - c.d FROM c`,
parsers.SelectStmt{
SelectItems: []parsers.SelectItem{
{
Type: parsers.SelectItemTypeBinaryExpression,
Value: parsers.BinaryExpression{
Operation: "-",
Left: parsers.SelectItem{
Type: parsers.SelectItemTypeBinaryExpression,
Value: parsers.BinaryExpression{
Operation: "*",
Left: parsers.SelectItem{
Type: parsers.SelectItemTypeBinaryExpression,
Value: parsers.BinaryExpression{
Operation: "+",
Left: testutils.SelectItem_Path("c", "a"),
Right: testutils.SelectItem_Path("c", "b"),
},
},
Right: testutils.SelectItem_Path("c", "c"),
},
},
Right: testutils.SelectItem_Path("c", "d"),
},
},
},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
)
})
t.Run("Should be left associative for same precedence operators", func(t *testing.T) {
testQueryParse(
t,
`SELECT c.a - c.b - c.c FROM c`,
parsers.SelectStmt{
SelectItems: []parsers.SelectItem{
{
Type: parsers.SelectItemTypeBinaryExpression,
Value: parsers.BinaryExpression{
Operation: "-",
Left: parsers.SelectItem{
Type: parsers.SelectItemTypeBinaryExpression,
Value: parsers.BinaryExpression{
Operation: "-",
Left: testutils.SelectItem_Path("c", "a"),
Right: testutils.SelectItem_Path("c", "b"),
},
},
Right: testutils.SelectItem_Path("c", "c"),
},
},
},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
)
})
t.Run("Should be left associative with multiplication and division", func(t *testing.T) {
testQueryParse(
t,
`SELECT c.a * c.b / c.c FROM c`,
parsers.SelectStmt{
SelectItems: []parsers.SelectItem{
{
Type: parsers.SelectItemTypeBinaryExpression,
Value: parsers.BinaryExpression{
Operation: "/",
Left: parsers.SelectItem{
Type: parsers.SelectItemTypeBinaryExpression,
Value: parsers.BinaryExpression{
Operation: "*",
Left: testutils.SelectItem_Path("c", "a"),
Right: testutils.SelectItem_Path("c", "b"),
},
},
Right: testutils.SelectItem_Path("c", "c"),
},
},
},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
)
})
t.Run("Should handle math with constants", func(t *testing.T) {
testQueryParse(
t,
`SELECT 10 + 20 * 5 FROM c`,
parsers.SelectStmt{
SelectItems: []parsers.SelectItem{
{
Type: parsers.SelectItemTypeBinaryExpression,
Value: parsers.BinaryExpression{
Operation: "+",
Left: testutils.SelectItem_Constant_Int(10),
Right: parsers.SelectItem{
Type: parsers.SelectItemTypeBinaryExpression,
Value: parsers.BinaryExpression{
Operation: "*",
Left: testutils.SelectItem_Constant_Int(20),
Right: testutils.SelectItem_Constant_Int(5),
},
},
},
},
},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
)
})
t.Run("Should handle math with floating point numbers", func(t *testing.T) {
testQueryParse(
t,
`SELECT c.price * 1.08 FROM c`,
parsers.SelectStmt{
SelectItems: []parsers.SelectItem{
{
Type: parsers.SelectItemTypeBinaryExpression,
Value: parsers.BinaryExpression{
Operation: "*",
Left: testutils.SelectItem_Path("c", "price"),
Right: testutils.SelectItem_Constant_Float(1.08),
},
},
},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
)
})
t.Run("Should handle parentheses around single value", func(t *testing.T) {
testQueryParse(
t,
`SELECT (c.value) FROM c`,
parsers.SelectStmt{
SelectItems: []parsers.SelectItem{
testutils.SelectItem_Path("c", "value"),
},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
)
})
t.Run("Should handle function calls in math expressions", func(t *testing.T) {
testQueryParse(
t,
`SELECT LENGTH(c.name) * 2 + 10 FROM c`,
parsers.SelectStmt{
SelectItems: []parsers.SelectItem{
{
Type: parsers.SelectItemTypeBinaryExpression,
Value: parsers.BinaryExpression{
Operation: "+",
Left: parsers.SelectItem{
Type: parsers.SelectItemTypeBinaryExpression,
Value: parsers.BinaryExpression{
Operation: "*",
Left: parsers.SelectItem{
Type: parsers.SelectItemTypeFunctionCall,
Value: parsers.FunctionCall{
Type: parsers.FunctionCallLength,
Arguments: []interface{}{testutils.SelectItem_Path("c", "name")},
},
},
Right: testutils.SelectItem_Constant_Int(2),
},
},
Right: testutils.SelectItem_Constant_Int(10),
},
},
},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
)
})
t.Run("Should handle multiple select items with math", func(t *testing.T) {
testQueryParse(
t,
`SELECT c.a + c.b, c.x * c.y FROM c`,
parsers.SelectStmt{
SelectItems: []parsers.SelectItem{
{
Type: parsers.SelectItemTypeBinaryExpression,
Value: parsers.BinaryExpression{
Operation: "+",
Left: testutils.SelectItem_Path("c", "a"),
Right: testutils.SelectItem_Path("c", "b"),
},
},
{
Type: parsers.SelectItemTypeBinaryExpression,
Value: parsers.BinaryExpression{
Operation: "*",
Left: testutils.SelectItem_Path("c", "x"),
Right: testutils.SelectItem_Path("c", "y"),
},
},
},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
)
})
t.Run("Should handle math in WHERE clause", func(t *testing.T) {
testQueryParse(
t,
`SELECT c.id FROM c WHERE c.price * 1.08 > 100`,
parsers.SelectStmt{
SelectItems: []parsers.SelectItem{
testutils.SelectItem_Path("c", "id"),
},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
Filters: parsers.ComparisonExpression{
Operation: ">",
Left: parsers.SelectItem{
Type: parsers.SelectItemTypeBinaryExpression,
Value: parsers.BinaryExpression{
Operation: "*",
Left: testutils.SelectItem_Path("c", "price"),
Right: testutils.SelectItem_Constant_Float(1.08),
},
},
Right: testutils.SelectItem_Constant_Int(100),
},
},
)
})
}

View File

@ -32,7 +32,7 @@ func Test_Parse_ArrayFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
}, },
) )
}) })
@ -58,7 +58,7 @@ func Test_Parse_ArrayFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
}, },
) )
}) })
@ -87,7 +87,7 @@ func Test_Parse_ArrayFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
}, },
) )
}) })
@ -116,7 +116,7 @@ func Test_Parse_ArrayFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
}, },
) )
}) })
@ -145,7 +145,7 @@ func Test_Parse_ArrayFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
}, },
) )
}) })
@ -169,7 +169,7 @@ func Test_Parse_ArrayFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
}, },
) )
}) })
@ -195,7 +195,7 @@ func Test_Parse_ArrayFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
}, },
) )
}) })
@ -223,7 +223,7 @@ func Test_Parse_ArrayFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
}, },
) )
}) })
@ -251,7 +251,7 @@ func Test_Parse_ArrayFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
}, },
) )
}) })

View File

@ -4,7 +4,6 @@ import (
"testing" "testing"
"github.com/pikami/cosmium/parsers" "github.com/pikami/cosmium/parsers"
testutils "github.com/pikami/cosmium/test_utils"
) )
func Test_Parse_Join(t *testing.T) { func Test_Parse_Join(t *testing.T) {
@ -18,7 +17,7 @@ func Test_Parse_Join(t *testing.T) {
{Path: []string{"c", "id"}}, {Path: []string{"c", "id"}},
{Path: []string{"c", "pk"}}, {Path: []string{"c", "pk"}},
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
JoinItems: []parsers.JoinItem{ JoinItems: []parsers.JoinItem{
{ {
Table: parsers.Table{ Table: parsers.Table{
@ -41,7 +40,7 @@ func Test_Parse_Join(t *testing.T) {
SelectItems: []parsers.SelectItem{ SelectItems: []parsers.SelectItem{
{Path: []string{"cc"}, IsTopLevel: true}, {Path: []string{"cc"}, IsTopLevel: true},
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
JoinItems: []parsers.JoinItem{ JoinItems: []parsers.JoinItem{
{ {
Table: parsers.Table{ Table: parsers.Table{

View File

@ -4,7 +4,6 @@ import (
"testing" "testing"
"github.com/pikami/cosmium/parsers" "github.com/pikami/cosmium/parsers"
testutils "github.com/pikami/cosmium/test_utils"
) )
func Test_Execute_MathFunctions(t *testing.T) { func Test_Execute_MathFunctions(t *testing.T) {
@ -645,7 +644,7 @@ func testMathFunctionParse(
}, },
}, },
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path(expectedTable)}, Table: parsers.Table{Value: expectedTable},
}, },
) )
} }

View File

@ -49,7 +49,7 @@ func Test_Parse(t *testing.T) {
{Path: []string{"c", "id"}}, {Path: []string{"c", "id"}},
{Path: []string{"c", "pk"}}, {Path: []string{"c", "pk"}},
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
OrderExpressions: []parsers.OrderExpression{ OrderExpressions: []parsers.OrderExpression{
{ {
SelectItem: parsers.SelectItem{Path: []string{"c", "id"}}, SelectItem: parsers.SelectItem{Path: []string{"c", "id"}},
@ -73,7 +73,7 @@ func Test_Parse(t *testing.T) {
{Path: []string{"c", "id"}}, {Path: []string{"c", "id"}},
{Path: []string{"c", "pk"}}, {Path: []string{"c", "pk"}},
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
GroupBy: []parsers.SelectItem{ GroupBy: []parsers.SelectItem{
{Path: []string{"c", "id"}}, {Path: []string{"c", "id"}},
{Path: []string{"c", "pk"}}, {Path: []string{"c", "pk"}},
@ -93,7 +93,7 @@ func Test_Parse(t *testing.T) {
Type: parsers.SelectItemTypeField, Type: parsers.SelectItemTypeField,
}, },
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
Filters: parsers.SelectItem{ Filters: parsers.SelectItem{
Type: parsers.SelectItemTypeFunctionCall, Type: parsers.SelectItemTypeFunctionCall,
Value: parsers.FunctionCall{ Value: parsers.FunctionCall{
@ -112,38 +112,6 @@ func Test_Parse(t *testing.T) {
) )
}) })
t.Run("Should parse IN function with function call", func(t *testing.T) {
testQueryParse(
t,
`Select c.id FROM c WHERE (ToString(c.id) IN ("123", "456"))`,
parsers.SelectStmt{
SelectItems: []parsers.SelectItem{
testutils.SelectItem_Path("c", "id"),
},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
Filters: parsers.SelectItem{
Type: parsers.SelectItemTypeFunctionCall,
Value: parsers.FunctionCall{
Type: parsers.FunctionCallIn,
Arguments: []interface{}{
parsers.SelectItem{
Type: parsers.SelectItemTypeFunctionCall,
Value: parsers.FunctionCall{
Type: parsers.FunctionCallToString,
Arguments: []interface{}{
testutils.SelectItem_Path("c", "id"),
},
},
},
testutils.SelectItem_Constant_String("123"),
testutils.SelectItem_Constant_String("456"),
},
},
},
},
)
})
t.Run("Should parse IN selector", func(t *testing.T) { t.Run("Should parse IN selector", func(t *testing.T) {
testQueryParse( testQueryParse(
t, t,
@ -157,32 +125,10 @@ func Test_Parse(t *testing.T) {
}, },
Table: parsers.Table{ Table: parsers.Table{
Value: "c", Value: "c",
SelectItem: testutils.SelectItem_Path("c", "tags"), SelectItem: parsers.SelectItem{
IsInSelect: true, Path: []string{"c", "tags"},
}, },
}, },
)
})
t.Run("Should parse IIF function", func(t *testing.T) {
testQueryParse(
t,
`SELECT IIF(true, c.pk, c.id) FROM c`,
parsers.SelectStmt{
SelectItems: []parsers.SelectItem{
{
Type: parsers.SelectItemTypeFunctionCall,
Value: parsers.FunctionCall{
Type: parsers.FunctionCallIif,
Arguments: []interface{}{
testutils.SelectItem_Constant_Bool(true),
testutils.SelectItem_Path("c", "pk"),
testutils.SelectItem_Path("c", "id"),
},
},
},
},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
}, },
) )
}) })

File diff suppressed because it is too large Load Diff

View File

@ -178,32 +178,6 @@ func combineExpressions(ex1 interface{}, exs interface{}, operation parsers.Logi
}, nil }, nil
} }
func makeMathExpression(left interface{}, operations interface{}) (interface{}, error) {
if operations == nil || len(operations.([]interface{})) == 0 {
return left, nil
}
result := left.(parsers.SelectItem)
ops := operations.([]interface{})
for _, op := range ops {
opData := op.([]interface{})
operation := opData[0].(string)
right := opData[1].(parsers.SelectItem)
result = parsers.SelectItem{
Type: parsers.SelectItemTypeBinaryExpression,
Value: parsers.BinaryExpression{
Left: result,
Right: right,
Operation: operation,
},
}
}
return result, nil
}
} }
Input <- selectStmt:SelectStmt { Input <- selectStmt:SelectStmt {
@ -230,22 +204,14 @@ TopClause <- Top ws count:Integer {
return count, nil return count, nil
} }
FromClause <- From ws table:TableName selectItem:(ws In ws column:SelectItemWithAlias { return column, nil }) { FromClause <- From ws table:TableName selectItem:(ws "IN"i ws column:SelectItem { return column, nil })? {
tableTyped := table.(parsers.Table) tableTyped := table.(parsers.Table)
if selectItem != nil { if selectItem != nil {
tableTyped.SelectItem = selectItem.(parsers.SelectItem) tableTyped.SelectItem = selectItem.(parsers.SelectItem)
tableTyped.IsInSelect = true
} }
return tableTyped, nil return tableTyped, nil
} / From ws column:SelectItemWithAlias {
tableSelectItem := column.(parsers.SelectItem)
table := parsers.Table{
Value: tableSelectItem.Alias,
SelectItem: tableSelectItem,
}
return table, nil
} / From ws subQuery:SubQuerySelectItem { } / From ws subQuery:SubQuerySelectItem {
subQueryTyped := subQuery.(parsers.SelectItem) subQueryTyped := subQuery.(parsers.SelectItem)
table := parsers.Table{ table := parsers.Table{
@ -277,13 +243,13 @@ SubQuerySelectItem <- subQuery:SubQuery asClause:(ws alias:AsClause { return ali
return selectItem, nil return selectItem, nil
} }
JoinClause <- Join ws table:TableName ws In ws column:SelectItemWithAlias { JoinClause <- Join ws table:TableName ws "IN"i ws column:SelectItem {
return makeJoin(table, column) return makeJoin(table, column)
} / Join ws subQuery:SubQuerySelectItem { } / Join ws subQuery:SubQuerySelectItem {
return makeJoin(nil, subQuery) return makeJoin(nil, subQuery)
} }
OffsetClause <- Offset ws offset:IntegerLiteral ws "LIMIT"i ws limit:IntegerLiteral { OffsetClause <- "OFFSET"i ws offset:IntegerLiteral ws "LIMIT"i ws limit:IntegerLiteral {
return []interface{}{offset.(parsers.Constant).Value, limit.(parsers.Constant).Value}, nil return []interface{}{offset.(parsers.Constant).Value, limit.(parsers.Constant).Value}, nil
} }
@ -295,34 +261,11 @@ SelectAsterisk <- "*" {
return makeColumnList(selectItem, make([]interface{}, 0)) return makeColumnList(selectItem, make([]interface{}, 0))
} }
ColumnList <- column:ExpressionOrSelectItem other_columns:(ws "," ws coll:ExpressionOrSelectItem {return coll, nil })* { ColumnList <- column:SelectItem other_columns:(ws "," ws coll:SelectItem {return coll, nil })* {
return makeColumnList(column, other_columns) return makeColumnList(column, other_columns)
} }
ExpressionOrSelectItem <- expression:OrExpression asClause:AsClause? { SelectValueSpec <- "VALUE"i ws column:SelectItem {
switch typedValue := expression.(type) {
case parsers.ComparisonExpression, parsers.LogicalExpression:
selectItem := parsers.SelectItem{
Type: parsers.SelectItemTypeExpression,
Value: typedValue,
}
if aliasValue, ok := asClause.(string); ok {
selectItem.Alias = aliasValue
}
return selectItem, nil
case parsers.SelectItem:
if aliasValue, ok := asClause.(string); ok {
typedValue.Alias = aliasValue
}
return typedValue, nil
default:
return typedValue, nil
}
} / item:SelectItemWithAlias { return item, nil }
SelectValueSpec <- "VALUE"i ws column:SelectItemWithAlias {
selectItem := column.(parsers.SelectItem) selectItem := column.(parsers.SelectItem)
selectItem.IsTopLevel = true selectItem.IsTopLevel = true
return makeColumnList(selectItem, make([]interface{}, 0)) return makeColumnList(selectItem, make([]interface{}, 0))
@ -338,11 +281,6 @@ SelectArray <- "[" ws columns:ColumnList ws "]" {
SelectObject <- "{" ws field:SelectObjectField ws other_fields:(ws "," ws coll:SelectObjectField {return coll, nil })* ws "}" { SelectObject <- "{" ws field:SelectObjectField ws other_fields:(ws "," ws coll:SelectObjectField {return coll, nil })* ws "}" {
return makeSelectObject(field, other_fields) return makeSelectObject(field, other_fields)
} / "{" ws "}" {
return parsers.SelectItem{
SelectItems: []parsers.SelectItem{},
Type: parsers.SelectItemTypeObject,
}, nil
} }
SelectObjectField <- name:(Identifier / "\"" key:Identifier "\"" { return key, nil }) ws ":" ws selectItem:SelectItem { SelectObjectField <- name:(Identifier / "\"" key:Identifier "\"" { return key, nil }) ws ":" ws selectItem:SelectItem {
@ -355,15 +293,7 @@ SelectProperty <- name:Identifier path:(DotFieldAccess / ArrayFieldAccess)* {
return makeSelectItem(name, path, parsers.SelectItemTypeField) return makeSelectItem(name, path, parsers.SelectItemTypeField)
} }
SelectItemWithAlias <- selectItem:SelectItem asClause:AsClause? { SelectItem <- selectItem:(SubQuerySelectItem / Literal / FunctionCall / SelectArray / SelectObject / SelectProperty) asClause:AsClause? {
item := selectItem.(parsers.SelectItem)
if aliasValue, ok := asClause.(string); ok {
item.Alias = aliasValue
}
return item, nil
}
SelectItem <- selectItem:(SubQuerySelectItem / Literal / FunctionCall / SelectArray / SelectObject / SelectProperty) {
var itemResult parsers.SelectItem var itemResult parsers.SelectItem
switch typedValue := selectItem.(type) { switch typedValue := selectItem.(type) {
case parsers.SelectItem: case parsers.SelectItem:
@ -380,14 +310,14 @@ SelectItem <- selectItem:(SubQuerySelectItem / Literal / FunctionCall / SelectAr
} }
} }
if aliasValue, ok := asClause.(string); ok {
itemResult.Alias = aliasValue
}
return itemResult, nil return itemResult, nil
} }
AsClause <- (ws As)? ws !ExcludedKeywords alias:Identifier { AsClause <- ws As ws alias:Identifier { return alias, nil }
return alias, nil
}
ExcludedKeywords <- Select / Top / As / From / In / Join / Exists / Where / And / Or / Not / GroupBy / OrderBy / Offset
DotFieldAccess <- "." id:Identifier { DotFieldAccess <- "." id:Identifier {
return id, nil return id, nil
@ -413,27 +343,11 @@ AndExpression <- ex1:ComparisonExpression ex2:(ws And ws ex:ComparisonExpression
return combineExpressions(ex1, ex2, parsers.LogicalExpressionTypeAnd) return combineExpressions(ex1, ex2, parsers.LogicalExpressionTypeAnd)
} }
ComparisonExpression <- left:AddSubExpression ws op:ComparisonOperator ws right:AddSubExpression { ComparisonExpression <- "(" ws ex:OrExpression ws ")" { return ex, nil }
/ left:SelectItem ws op:ComparisonOperator ws right:SelectItem {
return parsers.ComparisonExpression{Left:left,Right:right,Operation:op.(string)}, nil return parsers.ComparisonExpression{Left:left,Right:right,Operation:op.(string)}, nil
} / ex:AddSubExpression { return ex, nil }
AddSubExpression <- left:MulDivExpression operations:(ws op:AddOrSubtractOperation ws right:MulDivExpression { return []interface{}{op, right}, nil })* {
return makeMathExpression(left, operations)
}
MulDivExpression <- left:SelectItemWithParentheses operations:(ws op:MultiplyOrDivideOperation ws right:SelectItemWithParentheses { return []interface{}{op, right}, nil })* {
return makeMathExpression(left, operations)
}
SelectItemWithParentheses <- "(" ws ex:OrExpression ws ")" { return ex, nil }
/ inv:(Not ws)? ex:SelectItem {
if inv != nil {
ex1 := ex.(parsers.SelectItem)
ex1.Invert = true
return ex1, nil
}
return ex, nil
} / ex:BooleanLiteral { return ex, nil } } / ex:BooleanLiteral { return ex, nil }
/ ex:SelectItem { return ex, nil }
OrderByClause <- OrderBy ws ex1:OrderExpression others:(ws "," ws ex:OrderExpression { return ex, nil })* { OrderByClause <- OrderBy ws ex1:OrderExpression others:(ws "," ws ex:OrderExpression { return ex, nil })* {
return makeOrderByClause(ex1, others) return makeOrderByClause(ex1, others)
@ -459,8 +373,6 @@ As <- "AS"i
From <- "FROM"i From <- "FROM"i
In <- "IN"i
Join <- "JOIN"i Join <- "JOIN"i
Exists <- "EXISTS"i Exists <- "EXISTS"i
@ -471,22 +383,14 @@ And <- "AND"i
Or <- "OR"i wss Or <- "OR"i wss
Not <- "NOT"i
GroupBy <- "GROUP"i ws "BY"i GroupBy <- "GROUP"i ws "BY"i
OrderBy <- "ORDER"i ws "BY"i OrderBy <- "ORDER"i ws "BY"i
Offset <- "OFFSET"i ComparisonOperator <- ("=" / "!=" / "<" / "<=" / ">" / ">=") {
ComparisonOperator <- ("<=" / ">=" / "=" / "!=" / "<" / ">") {
return string(c.text), nil return string(c.text), nil
} }
AddOrSubtractOperation <- ("+" / "-") { return string(c.text), nil }
MultiplyOrDivideOperation <- ("*" / "/") { return string(c.text), nil }
Literal <- FloatLiteral / IntegerLiteral / StringLiteral / BooleanLiteral / ParameterConstant / NullConstant Literal <- FloatLiteral / IntegerLiteral / StringLiteral / BooleanLiteral / ParameterConstant / NullConstant
ParameterConstant <- "@" Identifier { ParameterConstant <- "@" Identifier {
@ -514,7 +418,6 @@ BooleanLiteral <- ("true"i / "false"i) {
FunctionCall <- StringFunctions FunctionCall <- StringFunctions
/ TypeCheckingFunctions / TypeCheckingFunctions
/ ArrayFunctions / ArrayFunctions
/ ConditionalFunctions
/ InFunction / InFunction
/ AggregateFunctions / AggregateFunctions
/ MathFunctions / MathFunctions
@ -562,8 +465,6 @@ ArrayFunctions <- ArrayConcatExpression
/ SetIntersectExpression / SetIntersectExpression
/ SetUnionExpression / SetUnionExpression
ConditionalFunctions <- IifExpression
MathFunctions <- MathAbsExpression MathFunctions <- MathAbsExpression
/ MathAcosExpression / MathAcosExpression
/ MathAsinExpression / MathAsinExpression
@ -756,10 +657,6 @@ SetUnionExpression <- "SetUnion"i ws "(" ws set1:SelectItem ws "," ws set2:Selec
return createFunctionCall(parsers.FunctionCallSetUnion, []interface{}{set1, set2}) return createFunctionCall(parsers.FunctionCallSetUnion, []interface{}{set1, set2})
} }
IifExpression <- "IIF"i ws "(" ws condition:SelectItem ws "," ws trueValue:SelectItem ws "," ws falseValue:SelectItem ws ")" {
return createFunctionCall(parsers.FunctionCallIif, []interface{}{condition, trueValue, falseValue})
}
MathAbsExpression <- "ABS"i ws "(" ws ex:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathAbs, []interface{}{ex}) } MathAbsExpression <- "ABS"i ws "(" ws ex:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathAbs, []interface{}{ex}) }
MathAcosExpression <- "ACOS"i ws "(" ws ex:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathAcos, []interface{}{ex}) } MathAcosExpression <- "ACOS"i ws "(" ws ex:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathAcos, []interface{}{ex}) }
MathAsinExpression <- "ASIN"i ws "(" ws ex:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathAsin, []interface{}{ex}) } MathAsinExpression <- "ASIN"i ws "(" ws ex:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathAsin, []interface{}{ex}) }
@ -803,9 +700,7 @@ MathNumberBinExpression <- "NumberBin"i ws "(" ws ex1:SelectItem others:(ws ","
MathPiExpression <- "PI"i ws "(" ws ")" { return createFunctionCall(parsers.FunctionCallMathPi, []interface{}{}) } MathPiExpression <- "PI"i ws "(" ws ")" { return createFunctionCall(parsers.FunctionCallMathPi, []interface{}{}) }
MathRandExpression <- "RAND"i ws "(" ws ")" { return createFunctionCall(parsers.FunctionCallMathRand, []interface{}{}) } MathRandExpression <- "RAND"i ws "(" ws ")" { return createFunctionCall(parsers.FunctionCallMathRand, []interface{}{}) }
InFunction <- ex1:SelectProperty ws In ws "(" ws ex2:SelectItem others:(ws "," ws ex:SelectItem { return ex, nil })* ws ")" { InFunction <- ex1:SelectProperty ws "IN"i ws "(" ws ex2:SelectItem others:(ws "," ws ex:SelectItem { return ex, nil })* ws ")" {
return createFunctionCall(parsers.FunctionCallIn, append([]interface{}{ex1, ex2}, others.([]interface{})...))
} / "(" ws ex1:SelectItem ws In ws "(" ws ex2:SelectItem others:(ws "," ws ex:SelectItem { return ex, nil })* ws ")" ws ")" {
return createFunctionCall(parsers.FunctionCallIn, append([]interface{}{ex1, ex2}, others.([]interface{})...)) return createFunctionCall(parsers.FunctionCallIn, append([]interface{}{ex1, ex2}, others.([]interface{})...))
} }

View File

@ -4,7 +4,6 @@ import (
"testing" "testing"
"github.com/pikami/cosmium/parsers" "github.com/pikami/cosmium/parsers"
testutils "github.com/pikami/cosmium/test_utils"
) )
func Test_Parse_Select(t *testing.T) { func Test_Parse_Select(t *testing.T) {
@ -18,7 +17,7 @@ func Test_Parse_Select(t *testing.T) {
{Path: []string{"c", "id"}}, {Path: []string{"c", "id"}},
{Path: []string{"c", "pk"}}, {Path: []string{"c", "pk"}},
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
}, },
) )
}) })
@ -32,7 +31,7 @@ func Test_Parse_Select(t *testing.T) {
{Path: []string{"c", "id"}}, {Path: []string{"c", "id"}},
{Path: []string{"c", "@param"}}, {Path: []string{"c", "@param"}},
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
}, },
) )
}) })
@ -45,7 +44,7 @@ func Test_Parse_Select(t *testing.T) {
SelectItems: []parsers.SelectItem{ SelectItems: []parsers.SelectItem{
{Path: []string{"c", "id"}}, {Path: []string{"c", "id"}},
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
Distinct: true, Distinct: true,
}, },
) )
@ -59,7 +58,7 @@ func Test_Parse_Select(t *testing.T) {
SelectItems: []parsers.SelectItem{ SelectItems: []parsers.SelectItem{
{Path: []string{"c", "id"}}, {Path: []string{"c", "id"}},
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
Count: 1, Count: 1,
}, },
) )
@ -73,7 +72,7 @@ func Test_Parse_Select(t *testing.T) {
SelectItems: []parsers.SelectItem{ SelectItems: []parsers.SelectItem{
{Path: []string{"c", "id"}}, {Path: []string{"c", "id"}},
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
Count: 5, Count: 5,
Offset: 3, Offset: 3,
}, },
@ -88,7 +87,7 @@ func Test_Parse_Select(t *testing.T) {
SelectItems: []parsers.SelectItem{ SelectItems: []parsers.SelectItem{
{Path: []string{"c", "id"}, IsTopLevel: true}, {Path: []string{"c", "id"}, IsTopLevel: true},
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
}, },
) )
}) })
@ -101,20 +100,7 @@ func Test_Parse_Select(t *testing.T) {
SelectItems: []parsers.SelectItem{ SelectItems: []parsers.SelectItem{
{Path: []string{"c"}, IsTopLevel: true}, {Path: []string{"c"}, IsTopLevel: true},
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
},
)
})
t.Run("Should parse SELECT c", func(t *testing.T) {
testQueryParse(
t,
`SELECT c FROM c`,
parsers.SelectStmt{
SelectItems: []parsers.SelectItem{
{Path: []string{"c"}, IsTopLevel: false},
},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
}, },
) )
}) })
@ -134,27 +120,7 @@ func Test_Parse_Select(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
},
)
})
t.Run("Should parse SELECT with alias", func(t *testing.T) {
testQueryParse(
t,
`SELECT
c.id AS aliasWithAs,
c.pk aliasWithoutAs
FROM root c`,
parsers.SelectStmt{
SelectItems: []parsers.SelectItem{
{Alias: "aliasWithAs", Path: []string{"c", "id"}},
{Alias: "aliasWithoutAs", Path: []string{"c", "pk"}},
},
Table: parsers.Table{
Value: "c",
SelectItem: parsers.SelectItem{Alias: "c", Path: []string{"root"}},
},
}, },
) )
}) })
@ -174,93 +140,7 @@ func Test_Parse_Select(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
},
)
})
t.Run("Should parse SELECT empty object", func(t *testing.T) {
testQueryParse(
t,
`SELECT {} AS obj FROM c`,
parsers.SelectStmt{
SelectItems: []parsers.SelectItem{
{
Alias: "obj",
Type: parsers.SelectItemTypeObject,
SelectItems: []parsers.SelectItem{},
},
},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
)
})
t.Run("Should parse comparison expressions in SELECT", func(t *testing.T) {
testQueryParse(
t,
`SELECT c["id"] = "123", c["pk"] > 456 FROM c`,
parsers.SelectStmt{
SelectItems: []parsers.SelectItem{
{
Type: parsers.SelectItemTypeExpression,
Value: parsers.ComparisonExpression{
Operation: "=",
Left: testutils.SelectItem_Path("c", "id"),
Right: testutils.SelectItem_Constant_String("123"),
},
},
{
Type: parsers.SelectItemTypeExpression,
Value: parsers.ComparisonExpression{
Operation: ">",
Left: testutils.SelectItem_Path("c", "pk"),
Right: testutils.SelectItem_Constant_Int(456),
},
},
},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
)
})
t.Run("Should parse logical expressions in SELECT", func(t *testing.T) {
testQueryParse(
t,
`SELECT c["id"] = "123" OR c["pk"] > 456, c["isCool"] AND c["hasRizz"] AS isRizzler FROM c`,
parsers.SelectStmt{
SelectItems: []parsers.SelectItem{
{
Type: parsers.SelectItemTypeExpression,
Value: parsers.LogicalExpression{
Operation: parsers.LogicalExpressionTypeOr,
Expressions: []interface{}{
parsers.ComparisonExpression{
Operation: "=",
Left: testutils.SelectItem_Path("c", "id"),
Right: testutils.SelectItem_Constant_String("123"),
},
parsers.ComparisonExpression{
Operation: ">",
Left: testutils.SelectItem_Path("c", "pk"),
Right: testutils.SelectItem_Constant_Int(456),
},
},
},
},
{
Type: parsers.SelectItemTypeExpression,
Alias: "isRizzler",
Value: parsers.LogicalExpression{
Operation: parsers.LogicalExpressionTypeAnd,
Expressions: []interface{}{
testutils.SelectItem_Path("c", "isCool"),
testutils.SelectItem_Path("c", "hasRizz"),
},
},
},
},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
}, },
) )
}) })

View File

@ -30,7 +30,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
}, },
) )
}) })
@ -56,7 +56,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
}, },
) )
}) })
@ -85,7 +85,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
}, },
) )
}) })
@ -111,7 +111,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
}, },
) )
}) })
@ -137,7 +137,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
}, },
) )
}) })
@ -163,7 +163,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
}, },
) )
}) })
@ -189,7 +189,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
}, },
) )
}) })
@ -213,7 +213,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
}, },
) )
}) })
@ -237,7 +237,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
}, },
) )
}) })
@ -261,7 +261,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
}, },
) )
}) })
@ -286,7 +286,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
}, },
) )
}) })
@ -310,7 +310,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
}, },
) )
}) })
@ -334,7 +334,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
}, },
) )
}) })
@ -360,7 +360,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
}, },
) )
}) })
@ -385,7 +385,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
}, },
) )
}) })
@ -409,7 +409,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
}, },
) )
}) })
@ -434,7 +434,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
}, },
) )
}) })
@ -458,7 +458,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
}, },
) )
}) })
@ -484,7 +484,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
}, },
) )
}) })
@ -508,7 +508,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
}, },
) )
}) })

View File

@ -4,7 +4,6 @@ import (
"testing" "testing"
"github.com/pikami/cosmium/parsers" "github.com/pikami/cosmium/parsers"
testutils "github.com/pikami/cosmium/test_utils"
) )
func Test_Parse_SubQuery(t *testing.T) { func Test_Parse_SubQuery(t *testing.T) {
@ -23,7 +22,7 @@ func Test_Parse_SubQuery(t *testing.T) {
Alias: "c", Alias: "c",
Type: parsers.SelectItemTypeSubQuery, Type: parsers.SelectItemTypeSubQuery,
Value: parsers.SelectStmt{ Value: parsers.SelectStmt{
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("cc")}, Table: parsers.Table{Value: "cc"},
SelectItems: []parsers.SelectItem{ SelectItems: []parsers.SelectItem{
{Path: []string{"cc", "info"}, IsTopLevel: true}, {Path: []string{"cc", "info"}, IsTopLevel: true},
}, },
@ -43,7 +42,9 @@ func Test_Parse_SubQuery(t *testing.T) {
{Path: []string{"c", "id"}}, {Path: []string{"c", "id"}},
{Path: []string{"cc", "name"}}, {Path: []string{"cc", "name"}},
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{
Value: "c",
},
JoinItems: []parsers.JoinItem{ JoinItems: []parsers.JoinItem{
{ {
Table: parsers.Table{ Table: parsers.Table{
@ -54,12 +55,13 @@ func Test_Parse_SubQuery(t *testing.T) {
Type: parsers.SelectItemTypeSubQuery, Type: parsers.SelectItemTypeSubQuery,
Value: parsers.SelectStmt{ Value: parsers.SelectStmt{
SelectItems: []parsers.SelectItem{ SelectItems: []parsers.SelectItem{
testutils.SelectItem_Path("tag", "name"), {Path: []string{"tag", "name"}},
}, },
Table: parsers.Table{ Table: parsers.Table{
Value: "tag", Value: "tag",
SelectItem: testutils.SelectItem_Path("c", "tags"), SelectItem: parsers.SelectItem{
IsInSelect: true, Path: []string{"c", "tags"},
},
}, },
}, },
}, },
@ -80,10 +82,10 @@ func Test_Parse_SubQuery(t *testing.T) {
WHERE hasTags`, WHERE hasTags`,
parsers.SelectStmt{ parsers.SelectStmt{
SelectItems: []parsers.SelectItem{ SelectItems: []parsers.SelectItem{
testutils.SelectItem_Path("c", "id"), {Path: []string{"c", "id"}},
}, },
Table: parsers.Table{ Table: parsers.Table{
SelectItem: testutils.SelectItem_Path("c"), Value: "c",
}, },
JoinItems: []parsers.JoinItem{ JoinItems: []parsers.JoinItem{
{ {
@ -98,12 +100,13 @@ func Test_Parse_SubQuery(t *testing.T) {
Type: parsers.SelectItemTypeSubQuery, Type: parsers.SelectItemTypeSubQuery,
Value: parsers.SelectStmt{ Value: parsers.SelectStmt{
SelectItems: []parsers.SelectItem{ SelectItems: []parsers.SelectItem{
testutils.SelectItem_Path("tag", "name"), {Path: []string{"tag", "name"}},
}, },
Table: parsers.Table{ Table: parsers.Table{
Value: "tag", Value: "tag",
SelectItem: testutils.SelectItem_Path("c", "tags"), SelectItem: parsers.SelectItem{
IsInSelect: true, Path: []string{"c", "tags"},
},
}, },
Exists: true, Exists: true,
}, },

View File

@ -4,7 +4,6 @@ import (
"testing" "testing"
"github.com/pikami/cosmium/parsers" "github.com/pikami/cosmium/parsers"
testutils "github.com/pikami/cosmium/test_utils"
) )
func Test_Execute_TypeCheckingFunctions(t *testing.T) { func Test_Execute_TypeCheckingFunctions(t *testing.T) {
@ -28,7 +27,7 @@ func Test_Execute_TypeCheckingFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
Filters: parsers.SelectItem{ Filters: parsers.SelectItem{
Type: parsers.SelectItemTypeFunctionCall, Type: parsers.SelectItemTypeFunctionCall,
Value: parsers.FunctionCall{ Value: parsers.FunctionCall{
@ -64,7 +63,7 @@ func Test_Execute_TypeCheckingFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
Filters: parsers.SelectItem{ Filters: parsers.SelectItem{
Type: parsers.SelectItemTypeFunctionCall, Type: parsers.SelectItemTypeFunctionCall,
Value: parsers.FunctionCall{ Value: parsers.FunctionCall{
@ -100,7 +99,7 @@ func Test_Execute_TypeCheckingFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
Filters: parsers.SelectItem{ Filters: parsers.SelectItem{
Type: parsers.SelectItemTypeFunctionCall, Type: parsers.SelectItemTypeFunctionCall,
Value: parsers.FunctionCall{ Value: parsers.FunctionCall{
@ -136,7 +135,7 @@ func Test_Execute_TypeCheckingFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
Filters: parsers.SelectItem{ Filters: parsers.SelectItem{
Type: parsers.SelectItemTypeFunctionCall, Type: parsers.SelectItemTypeFunctionCall,
Value: parsers.FunctionCall{ Value: parsers.FunctionCall{
@ -172,7 +171,7 @@ func Test_Execute_TypeCheckingFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
Filters: parsers.SelectItem{ Filters: parsers.SelectItem{
Type: parsers.SelectItemTypeFunctionCall, Type: parsers.SelectItemTypeFunctionCall,
Value: parsers.FunctionCall{ Value: parsers.FunctionCall{
@ -208,7 +207,7 @@ func Test_Execute_TypeCheckingFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
Filters: parsers.SelectItem{ Filters: parsers.SelectItem{
Type: parsers.SelectItemTypeFunctionCall, Type: parsers.SelectItemTypeFunctionCall,
Value: parsers.FunctionCall{ Value: parsers.FunctionCall{
@ -244,7 +243,7 @@ func Test_Execute_TypeCheckingFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
Filters: parsers.SelectItem{ Filters: parsers.SelectItem{
Type: parsers.SelectItemTypeFunctionCall, Type: parsers.SelectItemTypeFunctionCall,
Value: parsers.FunctionCall{ Value: parsers.FunctionCall{
@ -280,7 +279,7 @@ func Test_Execute_TypeCheckingFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
Filters: parsers.SelectItem{ Filters: parsers.SelectItem{
Type: parsers.SelectItemTypeFunctionCall, Type: parsers.SelectItemTypeFunctionCall,
Value: parsers.FunctionCall{ Value: parsers.FunctionCall{
@ -316,7 +315,7 @@ func Test_Execute_TypeCheckingFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
Filters: parsers.SelectItem{ Filters: parsers.SelectItem{
Type: parsers.SelectItemTypeFunctionCall, Type: parsers.SelectItemTypeFunctionCall,
Value: parsers.FunctionCall{ Value: parsers.FunctionCall{
@ -352,7 +351,7 @@ func Test_Execute_TypeCheckingFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
Filters: parsers.SelectItem{ Filters: parsers.SelectItem{
Type: parsers.SelectItemTypeFunctionCall, Type: parsers.SelectItemTypeFunctionCall,
Value: parsers.FunctionCall{ Value: parsers.FunctionCall{

View File

@ -19,7 +19,7 @@ func Test_Parse_Were(t *testing.T) {
SelectItems: []parsers.SelectItem{ SelectItems: []parsers.SelectItem{
{Path: []string{"c", "id"}}, {Path: []string{"c", "id"}},
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
Filters: parsers.ComparisonExpression{ Filters: parsers.ComparisonExpression{
Operation: "=", Operation: "=",
Left: parsers.SelectItem{Path: []string{"c", "isCool"}}, Left: parsers.SelectItem{Path: []string{"c", "isCool"}},
@ -42,7 +42,7 @@ func Test_Parse_Were(t *testing.T) {
{Path: []string{"c", "_rid"}}, {Path: []string{"c", "_rid"}},
{Path: []string{"c", "_ts"}}, {Path: []string{"c", "_ts"}},
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
Filters: parsers.LogicalExpression{ Filters: parsers.LogicalExpression{
Operation: parsers.LogicalExpressionTypeOr, Operation: parsers.LogicalExpressionTypeOr,
Expressions: []interface{}{ Expressions: []interface{}{
@ -67,12 +67,12 @@ func Test_Parse_Were(t *testing.T) {
t, t,
`select c.id `select c.id
FROM c FROM c
WHERE c.isCool=true AND (c.id = "123" OR c.id <= "456")`, WHERE c.isCool=true AND (c.id = "123" OR c.id = "456")`,
parsers.SelectStmt{ parsers.SelectStmt{
SelectItems: []parsers.SelectItem{ SelectItems: []parsers.SelectItem{
{Path: []string{"c", "id"}}, {Path: []string{"c", "id"}},
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
Filters: parsers.LogicalExpression{ Filters: parsers.LogicalExpression{
Operation: parsers.LogicalExpressionTypeAnd, Operation: parsers.LogicalExpressionTypeAnd,
Expressions: []interface{}{ Expressions: []interface{}{
@ -90,7 +90,7 @@ func Test_Parse_Were(t *testing.T) {
Right: testutils.SelectItem_Constant_String("123"), Right: testutils.SelectItem_Constant_String("123"),
}, },
parsers.ComparisonExpression{ parsers.ComparisonExpression{
Operation: "<=", Operation: "=",
Left: parsers.SelectItem{Path: []string{"c", "id"}}, Left: parsers.SelectItem{Path: []string{"c", "id"}},
Right: testutils.SelectItem_Constant_String("456"), Right: testutils.SelectItem_Constant_String("456"),
}, },
@ -114,7 +114,7 @@ func Test_Parse_Were(t *testing.T) {
AND c.param=@param_id1`, AND c.param=@param_id1`,
parsers.SelectStmt{ parsers.SelectStmt{
SelectItems: []parsers.SelectItem{{Path: []string{"c", "id"}, Alias: ""}}, SelectItems: []parsers.SelectItem{{Path: []string{"c", "id"}, Alias: ""}},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
Filters: parsers.LogicalExpression{ Filters: parsers.LogicalExpression{
Expressions: []interface{}{ Expressions: []interface{}{
parsers.ComparisonExpression{ parsers.ComparisonExpression{
@ -148,21 +148,4 @@ func Test_Parse_Were(t *testing.T) {
}, },
) )
}) })
t.Run("Should correctly parse NOT conditions", func(t *testing.T) {
testQueryParse(
t,
`select c.id
FROM c
WHERE NOT c.boolean`,
parsers.SelectStmt{
SelectItems: []parsers.SelectItem{{Path: []string{"c", "id"}, Alias: ""}},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
Filters: parsers.SelectItem{
Path: []string{"c", "boolean"},
Invert: true,
},
},
)
})
} }

View File

@ -5,7 +5,6 @@ import (
"github.com/pikami/cosmium/parsers" "github.com/pikami/cosmium/parsers"
memoryexecutor "github.com/pikami/cosmium/query_executors/memory_executor" memoryexecutor "github.com/pikami/cosmium/query_executors/memory_executor"
testutils "github.com/pikami/cosmium/test_utils"
) )
func Test_Execute_AggregateFunctions(t *testing.T) { func Test_Execute_AggregateFunctions(t *testing.T) {
@ -39,7 +38,7 @@ func Test_Execute_AggregateFunctions(t *testing.T) {
GroupBy: []parsers.SelectItem{ GroupBy: []parsers.SelectItem{
{Path: []string{"c", "key"}}, {Path: []string{"c", "key"}},
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
}, },
mockData, mockData,
[]memoryexecutor.RowType{ []memoryexecutor.RowType{
@ -68,7 +67,7 @@ func Test_Execute_AggregateFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
}, },
mockData, mockData,
[]memoryexecutor.RowType{ []memoryexecutor.RowType{
@ -100,7 +99,7 @@ func Test_Execute_AggregateFunctions(t *testing.T) {
GroupBy: []parsers.SelectItem{ GroupBy: []parsers.SelectItem{
{Path: []string{"c", "key"}}, {Path: []string{"c", "key"}},
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
}, },
mockData, mockData,
[]memoryexecutor.RowType{ []memoryexecutor.RowType{
@ -133,7 +132,7 @@ func Test_Execute_AggregateFunctions(t *testing.T) {
GroupBy: []parsers.SelectItem{ GroupBy: []parsers.SelectItem{
{Path: []string{"c", "key"}}, {Path: []string{"c", "key"}},
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
}, },
mockData, mockData,
[]memoryexecutor.RowType{ []memoryexecutor.RowType{
@ -166,7 +165,7 @@ func Test_Execute_AggregateFunctions(t *testing.T) {
GroupBy: []parsers.SelectItem{ GroupBy: []parsers.SelectItem{
{Path: []string{"c", "key"}}, {Path: []string{"c", "key"}},
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
}, },
mockData, mockData,
[]memoryexecutor.RowType{ []memoryexecutor.RowType{
@ -199,7 +198,7 @@ func Test_Execute_AggregateFunctions(t *testing.T) {
GroupBy: []parsers.SelectItem{ GroupBy: []parsers.SelectItem{
{Path: []string{"c", "key"}}, {Path: []string{"c", "key"}},
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
}, },
mockData, mockData,
[]memoryexecutor.RowType{ []memoryexecutor.RowType{

View File

@ -1,91 +0,0 @@
package memoryexecutor_test
import (
"testing"
"github.com/pikami/cosmium/parsers"
memoryexecutor "github.com/pikami/cosmium/query_executors/memory_executor"
testutils "github.com/pikami/cosmium/test_utils"
)
func Test_Execute_Arithmetics(t *testing.T) {
mockData := []memoryexecutor.RowType{
map[string]interface{}{"id": 1, "a": 420},
map[string]interface{}{"id": 2, "a": 6.9},
map[string]interface{}{"id": 3},
}
t.Run("Should execute simple arithmetics", func(t *testing.T) {
testQueryExecute(
t,
parsers.SelectStmt{
SelectItems: []parsers.SelectItem{
{
Path: []string{"c", "id"},
Type: parsers.SelectItemTypeField,
},
{
Type: parsers.SelectItemTypeBinaryExpression,
Alias: "result",
Value: parsers.BinaryExpression{
Operation: "+",
Left: testutils.SelectItem_Path("c", "a"),
Right: parsers.SelectItem{
Type: parsers.SelectItemTypeBinaryExpression,
Value: parsers.BinaryExpression{
Operation: "*",
Left: testutils.SelectItem_Constant_Float(2.0),
Right: testutils.SelectItem_Constant_Int(3),
},
},
},
},
},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
mockData,
[]memoryexecutor.RowType{
map[string]interface{}{"id": 1, "result": 426.0},
map[string]interface{}{"id": 2, "result": 12.9},
map[string]interface{}{"id": 3, "result": nil},
},
)
})
t.Run("Should execute arithmetics in WHERE clause", func(t *testing.T) {
testQueryExecute(
t,
parsers.SelectStmt{
SelectItems: []parsers.SelectItem{
testutils.SelectItem_Path("c", "id"),
{
Alias: "result",
Type: parsers.SelectItemTypeBinaryExpression,
Value: parsers.BinaryExpression{
Operation: "*",
Left: testutils.SelectItem_Path("c", "a"),
Right: testutils.SelectItem_Constant_Int(2),
},
},
},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
Filters: parsers.ComparisonExpression{
Operation: ">",
Left: parsers.SelectItem{
Type: parsers.SelectItemTypeBinaryExpression,
Value: parsers.BinaryExpression{
Operation: "*",
Left: testutils.SelectItem_Path("c", "a"),
Right: testutils.SelectItem_Constant_Int(2),
},
},
Right: testutils.SelectItem_Constant_Int(500),
},
},
mockData,
[]memoryexecutor.RowType{
map[string]interface{}{"id": 1, "result": 840.0},
},
)
})
}

View File

@ -196,10 +196,6 @@ func (r rowContext) parseArray(argument interface{}) []interface{} {
ex := r.resolveSelectItem(exItem) ex := r.resolveSelectItem(exItem)
arrValue := reflect.ValueOf(ex) arrValue := reflect.ValueOf(ex)
if arrValue.Kind() == reflect.Invalid {
return nil
}
if arrValue.Kind() != reflect.Slice { if arrValue.Kind() != reflect.Slice {
logger.ErrorLn("parseArray got parameters of wrong type") logger.ErrorLn("parseArray got parameters of wrong type")
return nil return nil
@ -224,7 +220,7 @@ func (r rowContext) partialMatch(item interface{}, exprToSearch interface{}) boo
} }
for _, key := range exprValue.MapKeys() { for _, key := range exprValue.MapKeys() {
if !reflect.DeepEqual(itemValue.MapIndex(key).Interface(), exprValue.MapIndex(key).Interface()) { if itemValue.MapIndex(key).Interface() != exprValue.MapIndex(key).Interface() {
return false return false
} }
} }

View File

@ -42,7 +42,7 @@ func Test_Execute_ArrayFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
}, },
mockData, mockData,
[]memoryexecutor.RowType{ []memoryexecutor.RowType{
@ -59,11 +59,10 @@ func Test_Execute_ArrayFunctions(t *testing.T) {
parsers.SelectStmt{ parsers.SelectStmt{
Parameters: map[string]interface{}{ Parameters: map[string]interface{}{
"@categories": []interface{}{"coats", "jackets", "sweatshirts"}, "@categories": []interface{}{"coats", "jackets", "sweatshirts"},
"@objectArray": []interface{}{map[string]interface{}{"category": "shirts", "color": "blue", "nestedObject": map[string]interface{}{"size": "M"}}}, "@objectArray": []interface{}{map[string]interface{}{"category": "shirts", "color": "blue"}},
"@fullMatchObject": map[string]interface{}{"category": "shirts", "color": "blue", "nestedObject": map[string]interface{}{"size": "M"}}, "@fullMatchObject": map[string]interface{}{"category": "shirts", "color": "blue"},
"@partialMatchObject": map[string]interface{}{"category": "shirts"}, "@partialMatchObject": map[string]interface{}{"category": "shirts"},
"@missingPartialMatchObject": map[string]interface{}{"category": "shorts", "color": "blue"}, "@missingPartialMatchObject": map[string]interface{}{"category": "shorts", "color": "blue"},
"@nestedPartialMatchObject": map[string]interface{}{"nestedObject": map[string]interface{}{"size": "M"}},
}, },
SelectItems: []parsers.SelectItem{ SelectItems: []parsers.SelectItem{
{ {
@ -134,18 +133,6 @@ func Test_Execute_ArrayFunctions(t *testing.T) {
}, },
}, },
}, },
{
Alias: "ContainsNestedPartialMatchObject",
Type: parsers.SelectItemTypeFunctionCall,
Value: parsers.FunctionCall{
Type: parsers.FunctionCallArrayContains,
Arguments: []interface{}{
testutils.SelectItem_Constant_Parameter("@objectArray"),
testutils.SelectItem_Constant_Parameter("@nestedPartialMatchObject"),
testutils.SelectItem_Constant_Bool(true),
},
},
},
}, },
}, },
[]memoryexecutor.RowType{map[string]interface{}{"id": "123"}}, []memoryexecutor.RowType{map[string]interface{}{"id": "123"}},
@ -157,7 +144,6 @@ func Test_Execute_ArrayFunctions(t *testing.T) {
"MissingFullMatchObject": false, "MissingFullMatchObject": false,
"ContainsPartialMatchObject": true, "ContainsPartialMatchObject": true,
"MissingPartialMatchObject": false, "MissingPartialMatchObject": false,
"ContainsNestedPartialMatchObject": true,
}, },
}, },
) )
@ -370,7 +356,7 @@ func Test_Execute_ArrayFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
}, },
mockData, mockData,
[]memoryexecutor.RowType{ []memoryexecutor.RowType{
@ -406,7 +392,7 @@ func Test_Execute_ArrayFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
}, },
mockData, mockData,
[]memoryexecutor.RowType{ []memoryexecutor.RowType{
@ -444,7 +430,7 @@ func Test_Execute_ArrayFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
}, },
mockData, mockData,
[]memoryexecutor.RowType{ []memoryexecutor.RowType{
@ -482,7 +468,7 @@ func Test_Execute_ArrayFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
}, },
mockData, mockData,
[]memoryexecutor.RowType{ []memoryexecutor.RowType{

View File

@ -1,27 +0,0 @@
package memoryexecutor
import "github.com/pikami/cosmium/internal/datastore"
type rowArrayIterator struct {
documents []rowContext
index int
}
func NewRowArrayIterator(documents []rowContext) *rowArrayIterator {
return &rowArrayIterator{
documents: documents,
index: -1,
}
}
func (i *rowArrayIterator) Next() (rowContext, datastore.DataStoreStatus) {
i.index++
if i.index >= len(i.documents) {
return rowContext{}, datastore.IterEOF
}
row := i.documents[i.index]
i.documents[i.index] = rowContext{} // Help GC reclaim memory
return row, datastore.StatusOk
}

View File

@ -1,459 +0,0 @@
package memoryexecutor
import (
"fmt"
"reflect"
"strconv"
"strings"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/logger"
"github.com/pikami/cosmium/parsers"
)
type RowType interface{}
type rowContext struct {
tables map[string]RowType
parameters map[string]interface{}
grouppedRows []rowContext
}
type rowIterator interface {
Next() (rowContext, datastore.DataStoreStatus)
}
type rowTypeIterator interface {
Next() (RowType, datastore.DataStoreStatus)
}
func resolveDestinationColumnName(selectItem parsers.SelectItem, itemIndex int, queryParameters map[string]interface{}) string {
if selectItem.Alias != "" {
return selectItem.Alias
}
destinationName := fmt.Sprintf("$%d", itemIndex+1)
if len(selectItem.Path) > 0 {
destinationName = selectItem.Path[len(selectItem.Path)-1]
}
if destinationName[0] == '@' {
destinationName = queryParameters[destinationName].(string)
}
return destinationName
}
func (r rowContext) resolveSelectItem(selectItem parsers.SelectItem) interface{} {
if selectItem.Type == parsers.SelectItemTypeArray {
return r.selectItem_SelectItemTypeArray(selectItem)
}
if selectItem.Type == parsers.SelectItemTypeObject {
return r.selectItem_SelectItemTypeObject(selectItem)
}
if selectItem.Type == parsers.SelectItemTypeConstant {
return r.selectItem_SelectItemTypeConstant(selectItem)
}
if selectItem.Type == parsers.SelectItemTypeSubQuery {
return r.selectItem_SelectItemTypeSubQuery(selectItem)
}
if selectItem.Type == parsers.SelectItemTypeFunctionCall {
if typedFunctionCall, ok := selectItem.Value.(parsers.FunctionCall); ok {
return r.selectItem_SelectItemTypeFunctionCall(typedFunctionCall)
}
logger.ErrorLn("parsers.SelectItem has incorrect Value type (expected parsers.FunctionCall)")
return nil
}
if selectItem.Type == parsers.SelectItemTypeExpression {
if typedExpression, ok := selectItem.Value.(parsers.ComparisonExpression); ok {
return r.filters_ComparisonExpression(typedExpression)
}
if typedExpression, ok := selectItem.Value.(parsers.LogicalExpression); ok {
return r.filters_LogicalExpression(typedExpression)
}
logger.ErrorLn("parsers.SelectItem has incorrect Value type (expected parsers.ComparisonExpression)")
return nil
}
if selectItem.Type == parsers.SelectItemTypeBinaryExpression {
if typedSelectItem, ok := selectItem.Value.(parsers.BinaryExpression); ok {
return r.selectItem_SelectItemTypeBinaryExpression(typedSelectItem)
}
logger.ErrorLn("parsers.SelectItem has incorrect Value type (expected parsers.BinaryExpression)")
return nil
}
return r.selectItem_SelectItemTypeField(selectItem)
}
func (r rowContext) selectItem_SelectItemTypeArray(selectItem parsers.SelectItem) interface{} {
arrayValue := make([]interface{}, 0)
for _, subSelectItem := range selectItem.SelectItems {
arrayValue = append(arrayValue, r.resolveSelectItem(subSelectItem))
}
return arrayValue
}
func (r rowContext) selectItem_SelectItemTypeObject(selectItem parsers.SelectItem) interface{} {
objectValue := make(map[string]interface{})
for _, subSelectItem := range selectItem.SelectItems {
objectValue[subSelectItem.Alias] = r.resolveSelectItem(subSelectItem)
}
return objectValue
}
func (r rowContext) selectItem_SelectItemTypeConstant(selectItem parsers.SelectItem) interface{} {
var typedValue parsers.Constant
var ok bool
if typedValue, ok = selectItem.Value.(parsers.Constant); !ok {
// TODO: Handle error
logger.ErrorLn("parsers.Constant has incorrect Value type")
}
if typedValue.Type == parsers.ConstantTypeParameterConstant &&
r.parameters != nil {
if key, ok := typedValue.Value.(string); ok {
return r.parameters[key]
}
}
return typedValue.Value
}
func (r rowContext) selectItem_SelectItemTypeSubQuery(selectItem parsers.SelectItem) interface{} {
subQuery := selectItem.Value.(parsers.SelectStmt)
subQueryResult := executeQuery(
subQuery,
NewRowArrayIterator([]rowContext{r}),
)
if subQuery.Exists {
_, status := subQueryResult.Next()
return status == datastore.StatusOk
}
allDocuments := make([]RowType, 0)
for {
row, status := subQueryResult.Next()
if status != datastore.StatusOk {
break
}
allDocuments = append(allDocuments, row)
}
return allDocuments
}
func (r rowContext) selectItem_SelectItemTypeFunctionCall(functionCall parsers.FunctionCall) interface{} {
switch functionCall.Type {
case parsers.FunctionCallStringEquals:
return r.strings_StringEquals(functionCall.Arguments)
case parsers.FunctionCallContains:
return r.strings_Contains(functionCall.Arguments)
case parsers.FunctionCallEndsWith:
return r.strings_EndsWith(functionCall.Arguments)
case parsers.FunctionCallStartsWith:
return r.strings_StartsWith(functionCall.Arguments)
case parsers.FunctionCallConcat:
return r.strings_Concat(functionCall.Arguments)
case parsers.FunctionCallIndexOf:
return r.strings_IndexOf(functionCall.Arguments)
case parsers.FunctionCallToString:
return r.strings_ToString(functionCall.Arguments)
case parsers.FunctionCallUpper:
return r.strings_Upper(functionCall.Arguments)
case parsers.FunctionCallLower:
return r.strings_Lower(functionCall.Arguments)
case parsers.FunctionCallLeft:
return r.strings_Left(functionCall.Arguments)
case parsers.FunctionCallLength:
return r.strings_Length(functionCall.Arguments)
case parsers.FunctionCallLTrim:
return r.strings_LTrim(functionCall.Arguments)
case parsers.FunctionCallReplace:
return r.strings_Replace(functionCall.Arguments)
case parsers.FunctionCallReplicate:
return r.strings_Replicate(functionCall.Arguments)
case parsers.FunctionCallReverse:
return r.strings_Reverse(functionCall.Arguments)
case parsers.FunctionCallRight:
return r.strings_Right(functionCall.Arguments)
case parsers.FunctionCallRTrim:
return r.strings_RTrim(functionCall.Arguments)
case parsers.FunctionCallSubstring:
return r.strings_Substring(functionCall.Arguments)
case parsers.FunctionCallTrim:
return r.strings_Trim(functionCall.Arguments)
case parsers.FunctionCallIsDefined:
return r.typeChecking_IsDefined(functionCall.Arguments)
case parsers.FunctionCallIsArray:
return r.typeChecking_IsArray(functionCall.Arguments)
case parsers.FunctionCallIsBool:
return r.typeChecking_IsBool(functionCall.Arguments)
case parsers.FunctionCallIsFiniteNumber:
return r.typeChecking_IsFiniteNumber(functionCall.Arguments)
case parsers.FunctionCallIsInteger:
return r.typeChecking_IsInteger(functionCall.Arguments)
case parsers.FunctionCallIsNull:
return r.typeChecking_IsNull(functionCall.Arguments)
case parsers.FunctionCallIsNumber:
return r.typeChecking_IsNumber(functionCall.Arguments)
case parsers.FunctionCallIsObject:
return r.typeChecking_IsObject(functionCall.Arguments)
case parsers.FunctionCallIsPrimitive:
return r.typeChecking_IsPrimitive(functionCall.Arguments)
case parsers.FunctionCallIsString:
return r.typeChecking_IsString(functionCall.Arguments)
case parsers.FunctionCallArrayConcat:
return r.array_Concat(functionCall.Arguments)
case parsers.FunctionCallArrayContains:
return r.array_Contains(functionCall.Arguments)
case parsers.FunctionCallArrayContainsAny:
return r.array_Contains_Any(functionCall.Arguments)
case parsers.FunctionCallArrayContainsAll:
return r.array_Contains_All(functionCall.Arguments)
case parsers.FunctionCallArrayLength:
return r.array_Length(functionCall.Arguments)
case parsers.FunctionCallArraySlice:
return r.array_Slice(functionCall.Arguments)
case parsers.FunctionCallSetIntersect:
return r.set_Intersect(functionCall.Arguments)
case parsers.FunctionCallSetUnion:
return r.set_Union(functionCall.Arguments)
case parsers.FunctionCallIif:
return r.misc_Iif(functionCall.Arguments)
case parsers.FunctionCallMathAbs:
return r.math_Abs(functionCall.Arguments)
case parsers.FunctionCallMathAcos:
return r.math_Acos(functionCall.Arguments)
case parsers.FunctionCallMathAsin:
return r.math_Asin(functionCall.Arguments)
case parsers.FunctionCallMathAtan:
return r.math_Atan(functionCall.Arguments)
case parsers.FunctionCallMathCeiling:
return r.math_Ceiling(functionCall.Arguments)
case parsers.FunctionCallMathCos:
return r.math_Cos(functionCall.Arguments)
case parsers.FunctionCallMathCot:
return r.math_Cot(functionCall.Arguments)
case parsers.FunctionCallMathDegrees:
return r.math_Degrees(functionCall.Arguments)
case parsers.FunctionCallMathExp:
return r.math_Exp(functionCall.Arguments)
case parsers.FunctionCallMathFloor:
return r.math_Floor(functionCall.Arguments)
case parsers.FunctionCallMathIntBitNot:
return r.math_IntBitNot(functionCall.Arguments)
case parsers.FunctionCallMathLog10:
return r.math_Log10(functionCall.Arguments)
case parsers.FunctionCallMathRadians:
return r.math_Radians(functionCall.Arguments)
case parsers.FunctionCallMathRound:
return r.math_Round(functionCall.Arguments)
case parsers.FunctionCallMathSign:
return r.math_Sign(functionCall.Arguments)
case parsers.FunctionCallMathSin:
return r.math_Sin(functionCall.Arguments)
case parsers.FunctionCallMathSqrt:
return r.math_Sqrt(functionCall.Arguments)
case parsers.FunctionCallMathSquare:
return r.math_Square(functionCall.Arguments)
case parsers.FunctionCallMathTan:
return r.math_Tan(functionCall.Arguments)
case parsers.FunctionCallMathTrunc:
return r.math_Trunc(functionCall.Arguments)
case parsers.FunctionCallMathAtn2:
return r.math_Atn2(functionCall.Arguments)
case parsers.FunctionCallMathIntAdd:
return r.math_IntAdd(functionCall.Arguments)
case parsers.FunctionCallMathIntBitAnd:
return r.math_IntBitAnd(functionCall.Arguments)
case parsers.FunctionCallMathIntBitLeftShift:
return r.math_IntBitLeftShift(functionCall.Arguments)
case parsers.FunctionCallMathIntBitOr:
return r.math_IntBitOr(functionCall.Arguments)
case parsers.FunctionCallMathIntBitRightShift:
return r.math_IntBitRightShift(functionCall.Arguments)
case parsers.FunctionCallMathIntBitXor:
return r.math_IntBitXor(functionCall.Arguments)
case parsers.FunctionCallMathIntDiv:
return r.math_IntDiv(functionCall.Arguments)
case parsers.FunctionCallMathIntMod:
return r.math_IntMod(functionCall.Arguments)
case parsers.FunctionCallMathIntMul:
return r.math_IntMul(functionCall.Arguments)
case parsers.FunctionCallMathIntSub:
return r.math_IntSub(functionCall.Arguments)
case parsers.FunctionCallMathPower:
return r.math_Power(functionCall.Arguments)
case parsers.FunctionCallMathLog:
return r.math_Log(functionCall.Arguments)
case parsers.FunctionCallMathNumberBin:
return r.math_NumberBin(functionCall.Arguments)
case parsers.FunctionCallMathPi:
return r.math_Pi()
case parsers.FunctionCallMathRand:
return r.math_Rand()
case parsers.FunctionCallAggregateAvg:
return r.aggregate_Avg(functionCall.Arguments)
case parsers.FunctionCallAggregateCount:
return r.aggregate_Count(functionCall.Arguments)
case parsers.FunctionCallAggregateMax:
return r.aggregate_Max(functionCall.Arguments)
case parsers.FunctionCallAggregateMin:
return r.aggregate_Min(functionCall.Arguments)
case parsers.FunctionCallAggregateSum:
return r.aggregate_Sum(functionCall.Arguments)
case parsers.FunctionCallIn:
return r.misc_In(functionCall.Arguments)
}
logger.Errorf("Unknown function call type: %v", functionCall.Type)
return nil
}
func (r rowContext) selectItem_SelectItemTypeBinaryExpression(binaryExpression parsers.BinaryExpression) interface{} {
if binaryExpression.Left == nil || binaryExpression.Right == nil {
logger.Debug("parsers.BinaryExpression has nil Left or Right value")
return nil
}
leftValue := r.resolveSelectItem(binaryExpression.Left.(parsers.SelectItem))
rightValue := r.resolveSelectItem(binaryExpression.Right.(parsers.SelectItem))
if leftValue == nil || rightValue == nil {
return nil
}
leftNumber, leftIsNumber := numToFloat64(leftValue)
rightNumber, rightIsNumber := numToFloat64(rightValue)
if !leftIsNumber || !rightIsNumber {
logger.Debug("Binary expression operands are not numbers, returning nil")
return nil
}
switch binaryExpression.Operation {
case "+":
return leftNumber + rightNumber
case "-":
return leftNumber - rightNumber
case "*":
return leftNumber * rightNumber
case "/":
if rightNumber == 0 {
logger.Debug("Division by zero in binary expression")
return nil
}
return leftNumber / rightNumber
default:
return nil
}
}
func (r rowContext) selectItem_SelectItemTypeField(selectItem parsers.SelectItem) interface{} {
value := r.tables[selectItem.Path[0]]
if len(selectItem.Path) > 1 {
for _, pathSegment := range selectItem.Path[1:] {
if pathSegment[0] == '@' {
pathSegment = r.parameters[pathSegment].(string)
}
switch nestedValue := value.(type) {
case map[string]interface{}:
value = nestedValue[pathSegment]
case map[string]RowType:
value = nestedValue[pathSegment]
case datastore.Document:
value = nestedValue[pathSegment]
case map[string]datastore.Document:
value = nestedValue[pathSegment]
case []int, []string, []interface{}:
slice := reflect.ValueOf(nestedValue)
if arrayIndex, err := strconv.Atoi(pathSegment); err == nil && slice.Len() > arrayIndex {
value = slice.Index(arrayIndex).Interface()
} else {
return nil
}
default:
return nil
}
}
}
return value
}
func compareValues(val1, val2 interface{}) int {
// Handle nil values
if val1 == nil && val2 == nil {
return 0
} else if val1 == nil {
return -1
} else if val2 == nil {
return 1
}
// Handle number values
val1Number, val1IsNumber := numToFloat64(val1)
val2Number, val2IsNumber := numToFloat64(val2)
if val1IsNumber && val2IsNumber {
if val1Number < val2Number {
return -1
} else if val1Number > val2Number {
return 1
}
return 0
}
// Handle different types
if reflect.TypeOf(val1) != reflect.TypeOf(val2) {
return 1
}
switch val1 := val1.(type) {
case string:
val2 := val2.(string)
return strings.Compare(val1, val2)
case bool:
val2 := val2.(bool)
if val1 == val2 {
return 0
} else if val1 {
return 1
} else {
return -1
}
// TODO: Add more types
default:
if reflect.DeepEqual(val1, val2) {
return 0
}
return 1
}
}
func copyMap[T RowType | []RowType](originalMap map[string]T) map[string]T {
targetMap := make(map[string]T)
for k, v := range originalMap {
targetMap[k] = v
}
return targetMap
}

View File

@ -1,92 +0,0 @@
package memoryexecutor_test
import (
"testing"
"github.com/pikami/cosmium/parsers"
memoryexecutor "github.com/pikami/cosmium/query_executors/memory_executor"
testutils "github.com/pikami/cosmium/test_utils"
)
func Test_Execute_Expressions(t *testing.T) {
mockData := []memoryexecutor.RowType{
map[string]interface{}{"id": "123", "age": 10, "isCool": true},
map[string]interface{}{"id": "456", "age": 20, "isCool": false},
map[string]interface{}{"id": "789", "age": 30, "isCool": true},
}
t.Run("Should execute comparison expressions in SELECT", func(t *testing.T) {
testQueryExecute(
t,
parsers.SelectStmt{
SelectItems: []parsers.SelectItem{
{
Path: []string{"c", "id"},
Type: parsers.SelectItemTypeField,
},
{
Alias: "isAdult",
Type: parsers.SelectItemTypeExpression,
Value: parsers.ComparisonExpression{
Operation: ">=",
Left: testutils.SelectItem_Path("c", "age"),
Right: testutils.SelectItem_Constant_Int(18),
},
},
{
Alias: "isNotCool",
Type: parsers.SelectItemTypeExpression,
Value: parsers.ComparisonExpression{
Operation: "!=",
Left: testutils.SelectItem_Path("c", "isCool"),
Right: testutils.SelectItem_Constant_Bool(true),
},
},
},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
mockData,
[]memoryexecutor.RowType{
map[string]interface{}{"id": "123", "isAdult": false, "isNotCool": false},
map[string]interface{}{"id": "456", "isAdult": true, "isNotCool": true},
map[string]interface{}{"id": "789", "isAdult": true, "isNotCool": false},
},
)
})
t.Run("Should execute logical expressions in SELECT", func(t *testing.T) {
testQueryExecute(
t,
parsers.SelectStmt{
SelectItems: []parsers.SelectItem{
{
Path: []string{"c", "id"},
Type: parsers.SelectItemTypeField,
},
{
Alias: "isCoolAndAdult",
Type: parsers.SelectItemTypeExpression,
Value: parsers.LogicalExpression{
Operation: parsers.LogicalExpressionTypeAnd,
Expressions: []interface{}{
testutils.SelectItem_Path("c", "isCool"),
parsers.ComparisonExpression{
Operation: ">=",
Left: testutils.SelectItem_Path("c", "age"),
Right: testutils.SelectItem_Constant_Int(18),
},
},
},
},
},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
mockData,
[]memoryexecutor.RowType{
map[string]interface{}{"id": "123", "isCoolAndAdult": false},
map[string]interface{}{"id": "456", "isCoolAndAdult": false},
map[string]interface{}{"id": "789", "isCoolAndAdult": true},
},
)
})
}

View File

@ -1,36 +0,0 @@
package memoryexecutor
import "github.com/pikami/cosmium/internal/datastore"
type distinctIterator struct {
documents rowTypeIterator
seenDocs []RowType
}
func (di *distinctIterator) Next() (RowType, datastore.DataStoreStatus) {
if di.documents == nil {
return rowContext{}, datastore.IterEOF
}
for {
row, status := di.documents.Next()
if status != datastore.StatusOk {
di.documents = nil
return rowContext{}, status
}
if !di.seen(row) {
di.seenDocs = append(di.seenDocs, row)
return row, status
}
}
}
func (di *distinctIterator) seen(row RowType) bool {
for _, seenRow := range di.seenDocs {
if compareValues(seenRow, row) == 0 {
return true
}
}
return false
}

View File

@ -1,143 +0,0 @@
package memoryexecutor
import (
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/logger"
"github.com/pikami/cosmium/parsers"
)
type filterIterator struct {
documents rowIterator
filters interface{}
}
func (fi *filterIterator) Next() (rowContext, datastore.DataStoreStatus) {
if fi.documents == nil {
return rowContext{}, datastore.IterEOF
}
for {
row, status := fi.documents.Next()
if status != datastore.StatusOk {
fi.documents = nil
return rowContext{}, status
}
if fi.evaluateFilters(row) {
return row, status
}
}
}
func (fi *filterIterator) evaluateFilters(row rowContext) bool {
if fi.filters == nil {
return true
}
switch typedFilters := fi.filters.(type) {
case parsers.ComparisonExpression:
return row.filters_ComparisonExpression(typedFilters)
case parsers.LogicalExpression:
return row.filters_LogicalExpression(typedFilters)
case parsers.Constant:
if value, ok := typedFilters.Value.(bool); ok {
return value
}
return false
case parsers.SelectItem:
resolvedValue := row.resolveSelectItem(typedFilters)
if value, ok := resolvedValue.(bool); ok {
if typedFilters.Invert {
return !value
}
return value
}
}
return false
}
func (r rowContext) applyFilters(filters interface{}) bool {
if filters == nil {
return true
}
switch typedFilters := filters.(type) {
case parsers.ComparisonExpression:
return r.filters_ComparisonExpression(typedFilters)
case parsers.LogicalExpression:
return r.filters_LogicalExpression(typedFilters)
case parsers.Constant:
if value, ok := typedFilters.Value.(bool); ok {
return value
}
return false
case parsers.SelectItem:
resolvedValue := r.resolveSelectItem(typedFilters)
if value, ok := resolvedValue.(bool); ok {
if typedFilters.Invert {
return !value
}
return value
}
}
return false
}
func (r rowContext) filters_ComparisonExpression(expression parsers.ComparisonExpression) bool {
leftExpression, leftExpressionOk := expression.Left.(parsers.SelectItem)
rightExpression, rightExpressionOk := expression.Right.(parsers.SelectItem)
if !leftExpressionOk || !rightExpressionOk {
logger.ErrorLn("ComparisonExpression has incorrect Left or Right type")
return false
}
leftValue := r.resolveSelectItem(leftExpression)
rightValue := r.resolveSelectItem(rightExpression)
cmp := compareValues(leftValue, rightValue)
switch expression.Operation {
case "=":
return cmp == 0
case "!=":
return cmp != 0
case "<":
return cmp < 0
case ">":
return cmp > 0
case "<=":
return cmp <= 0
case ">=":
return cmp >= 0
}
return false
}
func (r rowContext) filters_LogicalExpression(expression parsers.LogicalExpression) bool {
var result bool
for i, subExpression := range expression.Expressions {
expressionResult := r.applyFilters(subExpression)
if i == 0 {
result = expressionResult
}
switch expression.Operation {
case parsers.LogicalExpressionTypeAnd:
result = result && expressionResult
if !result {
return false
}
case parsers.LogicalExpressionTypeOr:
result = result || expressionResult
if result {
return true
}
}
}
return result
}

View File

@ -1,73 +0,0 @@
package memoryexecutor
import (
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/parsers"
)
type fromIterator struct {
documents rowIterator
table parsers.Table
buffer []rowContext
bufferIndex int
}
func (fi *fromIterator) Next() (rowContext, datastore.DataStoreStatus) {
if fi.documents == nil {
return rowContext{}, datastore.IterEOF
}
// Return from buffer if available
if fi.bufferIndex < len(fi.buffer) {
result := fi.buffer[fi.bufferIndex]
fi.buffer[fi.bufferIndex] = rowContext{}
fi.bufferIndex++
return result, datastore.StatusOk
}
// Resolve next row from documents
row, status := fi.documents.Next()
if status != datastore.StatusOk {
fi.documents = nil
return row, status
}
if fi.table.SelectItem.Path != nil || fi.table.SelectItem.Type == parsers.SelectItemTypeSubQuery {
destinationTableName := fi.table.SelectItem.Alias
if destinationTableName == "" {
destinationTableName = fi.table.Value
}
if destinationTableName == "" {
destinationTableName = resolveDestinationColumnName(fi.table.SelectItem, 0, row.parameters)
}
if fi.table.IsInSelect || fi.table.SelectItem.Type == parsers.SelectItemTypeSubQuery {
selectValue := row.parseArray(fi.table.SelectItem)
rowContexts := make([]rowContext, len(selectValue))
for i, newRowData := range selectValue {
rowContexts[i].parameters = row.parameters
rowContexts[i].tables = copyMap(row.tables)
rowContexts[i].tables[destinationTableName] = newRowData
}
fi.buffer = rowContexts
fi.bufferIndex = 0
return fi.Next()
}
if len(fi.table.SelectItem.Path) > 0 {
sourceTableName := fi.table.SelectItem.Path[0]
sourceTableData := row.tables[sourceTableName]
if sourceTableData == nil {
// When source table is not found, assume it's root document
row.tables[sourceTableName] = row.tables["$root"]
}
}
newRowData := row.resolveSelectItem(fi.table.SelectItem)
row.tables[destinationTableName] = newRowData
return row, status
}
return row, status
}

View File

@ -1,69 +0,0 @@
package memoryexecutor
import (
"fmt"
"strings"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/parsers"
)
type groupByIterator struct {
documents rowIterator
groupBy []parsers.SelectItem
groupedRows []rowContext
}
func (gi *groupByIterator) Next() (rowContext, datastore.DataStoreStatus) {
if gi.groupedRows != nil {
if len(gi.groupedRows) == 0 {
return rowContext{}, datastore.IterEOF
}
row := gi.groupedRows[0]
gi.groupedRows = gi.groupedRows[1:]
return row, datastore.StatusOk
}
documents := make([]rowContext, 0)
for {
row, status := gi.documents.Next()
if status != datastore.StatusOk {
break
}
documents = append(documents, row)
}
gi.documents = nil
groupedRows := make(map[string][]rowContext)
groupedKeys := make([]string, 0)
for _, row := range documents {
key := row.generateGroupByKey(gi.groupBy)
if _, ok := groupedRows[key]; !ok {
groupedKeys = append(groupedKeys, key)
}
groupedRows[key] = append(groupedRows[key], row)
}
gi.groupedRows = make([]rowContext, 0)
for _, key := range groupedKeys {
gi.groupedRows = append(gi.groupedRows, rowContext{
tables: groupedRows[key][0].tables,
parameters: groupedRows[key][0].parameters,
grouppedRows: groupedRows[key],
})
}
return gi.Next()
}
func (r rowContext) generateGroupByKey(groupBy []parsers.SelectItem) string {
var keyBuilder strings.Builder
for _, selectItem := range groupBy {
value := r.resolveSelectItem(selectItem)
keyBuilder.WriteString(fmt.Sprintf("%v", value))
keyBuilder.WriteString(":")
}
return keyBuilder.String()
}

View File

@ -1,62 +0,0 @@
package memoryexecutor
import (
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/parsers"
)
type joinIterator struct {
documents rowIterator
query parsers.SelectStmt
buffer []rowContext
}
func (ji *joinIterator) Next() (rowContext, datastore.DataStoreStatus) {
if ji.documents == nil {
return rowContext{}, datastore.IterEOF
}
if len(ji.buffer) > 0 {
row := ji.buffer[0]
ji.buffer = ji.buffer[1:]
return row, datastore.StatusOk
}
doc, status := ji.documents.Next()
if status != datastore.StatusOk {
ji.documents = nil
return rowContext{}, status
}
ji.buffer = []rowContext{doc}
for _, joinItem := range ji.query.JoinItems {
nextDocuments := make([]rowContext, 0)
for _, row := range ji.buffer {
joinedItems := row.resolveJoinItemSelect(joinItem.SelectItem)
for _, joinedItem := range joinedItems {
tablesCopy := copyMap(row.tables)
tablesCopy[joinItem.Table.Value] = joinedItem
nextDocuments = append(nextDocuments, rowContext{
parameters: row.parameters,
tables: tablesCopy,
})
}
}
ji.buffer = nextDocuments
}
return ji.Next()
}
func (r rowContext) resolveJoinItemSelect(selectItem parsers.SelectItem) []RowType {
if selectItem.Path != nil || selectItem.Type == parsers.SelectItemTypeSubQuery {
selectValue := r.parseArray(selectItem)
documents := make([]RowType, len(selectValue))
for i, newRowData := range selectValue {
documents[i] = newRowData
}
return documents
}
return []RowType{}
}

View File

@ -5,7 +5,6 @@ import (
"github.com/pikami/cosmium/parsers" "github.com/pikami/cosmium/parsers"
memoryexecutor "github.com/pikami/cosmium/query_executors/memory_executor" memoryexecutor "github.com/pikami/cosmium/query_executors/memory_executor"
testutils "github.com/pikami/cosmium/test_utils"
) )
func Test_Execute_Joins(t *testing.T) { func Test_Execute_Joins(t *testing.T) {
@ -34,7 +33,7 @@ func Test_Execute_Joins(t *testing.T) {
{Path: []string{"c", "id"}}, {Path: []string{"c", "id"}},
{Path: []string{"cc", "name"}}, {Path: []string{"cc", "name"}},
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
JoinItems: []parsers.JoinItem{ JoinItems: []parsers.JoinItem{
{ {
Table: parsers.Table{ Table: parsers.Table{
@ -63,7 +62,7 @@ func Test_Execute_Joins(t *testing.T) {
SelectItems: []parsers.SelectItem{ SelectItems: []parsers.SelectItem{
{Path: []string{"cc"}, IsTopLevel: true}, {Path: []string{"cc"}, IsTopLevel: true},
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
JoinItems: []parsers.JoinItem{ JoinItems: []parsers.JoinItem{
{ {
Table: parsers.Table{ Table: parsers.Table{

View File

@ -1,19 +0,0 @@
package memoryexecutor
import "github.com/pikami/cosmium/internal/datastore"
type limitIterator struct {
documents rowTypeIterator
limit int
count int
}
func (li *limitIterator) Next() (RowType, datastore.DataStoreStatus) {
if li.count >= li.limit {
li.documents = nil
return rowContext{}, datastore.IterEOF
}
li.count++
return li.documents.Next()
}

View File

@ -605,30 +605,10 @@ func numToInt(ex interface{}) (int, bool) {
func numToFloat64(num interface{}) (float64, bool) { func numToFloat64(num interface{}) (float64, bool) {
switch val := num.(type) { switch val := num.(type) {
case int:
return float64(val), true
case int8:
return float64(val), true
case int16:
return float64(val), true
case int32:
return float64(val), true
case int64:
return float64(val), true
case uint:
return float64(val), true
case uint8:
return float64(val), true
case uint16:
return float64(val), true
case uint32:
return float64(val), true
case uint64:
return float64(val), true
case float32:
return float64(val), true
case float64: case float64:
return val, true return val, true
case int:
return float64(val), true
default: default:
return 0, false return 0, false
} }

View File

@ -6,7 +6,6 @@ import (
"github.com/pikami/cosmium/parsers" "github.com/pikami/cosmium/parsers"
memoryexecutor "github.com/pikami/cosmium/query_executors/memory_executor" memoryexecutor "github.com/pikami/cosmium/query_executors/memory_executor"
testutils "github.com/pikami/cosmium/test_utils"
) )
func Test_Execute_MathFunctions(t *testing.T) { func Test_Execute_MathFunctions(t *testing.T) {
@ -262,7 +261,7 @@ func testMathFunctionExecute(
}, },
}, },
}, },
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")}, Table: parsers.Table{Value: "c"},
}, },
data, data,
expectedData, expectedData,

View File

@ -1,92 +1,702 @@
package memoryexecutor package memoryexecutor
import ( import (
"github.com/pikami/cosmium/internal/datastore" "fmt"
"reflect"
"sort"
"strconv"
"strings"
"github.com/pikami/cosmium/internal/logger"
"github.com/pikami/cosmium/parsers" "github.com/pikami/cosmium/parsers"
"golang.org/x/exp/slices"
) )
func ExecuteQuery(query parsers.SelectStmt, documents rowTypeIterator) []RowType { type RowType interface{}
resultIter := executeQuery(query, &rowTypeToRowContextIterator{documents: documents, query: query}) type rowContext struct {
result := make([]RowType, 0) tables map[string]RowType
for { parameters map[string]interface{}
row, status := resultIter.Next() grouppedRows []rowContext
if status != datastore.StatusOk {
break
} }
result = append(result, row) func ExecuteQuery(query parsers.SelectStmt, documents []RowType) []RowType {
currentDocuments := make([]rowContext, 0)
for _, doc := range documents {
currentDocuments = append(currentDocuments, resolveFrom(query, doc)...)
}
// Handle JOINS
nextDocuments := make([]rowContext, 0)
for _, currentDocument := range currentDocuments {
rowContexts := currentDocument.handleJoin(query)
nextDocuments = append(nextDocuments, rowContexts...)
}
currentDocuments = nextDocuments
// Apply filters
nextDocuments = make([]rowContext, 0)
for _, currentDocument := range currentDocuments {
if currentDocument.applyFilters(query.Filters) {
nextDocuments = append(nextDocuments, currentDocument)
}
}
currentDocuments = nextDocuments
// Apply order
if len(query.OrderExpressions) > 0 {
applyOrder(currentDocuments, query.OrderExpressions)
}
// Apply group by
if len(query.GroupBy) > 0 {
currentDocuments = applyGroupBy(currentDocuments, query.GroupBy)
}
// Apply select
projectedDocuments := applyProjection(currentDocuments, query.SelectItems, query.GroupBy)
// Apply distinct
if query.Distinct {
projectedDocuments = deduplicate(projectedDocuments)
}
// Apply result limit
if query.Count > 0 && len(projectedDocuments) > query.Count {
projectedDocuments = projectedDocuments[:query.Count]
}
return projectedDocuments
}
func resolveFrom(query parsers.SelectStmt, doc RowType) []rowContext {
initialRow, gotParentContext := doc.(rowContext)
if !gotParentContext {
var initialTableName string
if query.Table.SelectItem.Type == parsers.SelectItemTypeSubQuery {
initialTableName = query.Table.SelectItem.Value.(parsers.SelectStmt).Table.Value
}
if initialTableName == "" {
initialTableName = query.Table.Value
}
initialRow = rowContext{
parameters: query.Parameters,
tables: map[string]RowType{
initialTableName: doc,
},
}
}
if query.Table.SelectItem.Path != nil || query.Table.SelectItem.Type == parsers.SelectItemTypeSubQuery {
destinationTableName := query.Table.SelectItem.Alias
if destinationTableName == "" {
destinationTableName = query.Table.Value
}
selectValue := initialRow.parseArray(query.Table.SelectItem)
rowContexts := make([]rowContext, len(selectValue))
for i, newRowData := range selectValue {
rowContexts[i].parameters = initialRow.parameters
rowContexts[i].tables = copyMap(initialRow.tables)
rowContexts[i].tables[destinationTableName] = newRowData
}
return rowContexts
}
return []rowContext{initialRow}
}
func (r rowContext) handleJoin(query parsers.SelectStmt) []rowContext {
currentDocuments := []rowContext{r}
for _, joinItem := range query.JoinItems {
nextDocuments := make([]rowContext, 0)
for _, currentDocument := range currentDocuments {
joinedItems := currentDocument.resolveJoinItemSelect(joinItem.SelectItem)
for _, joinedItem := range joinedItems {
tablesCopy := copyMap(currentDocument.tables)
tablesCopy[joinItem.Table.Value] = joinedItem
nextDocuments = append(nextDocuments, rowContext{
parameters: currentDocument.parameters,
tables: tablesCopy,
})
}
}
currentDocuments = nextDocuments
}
return currentDocuments
}
func (r rowContext) resolveJoinItemSelect(selectItem parsers.SelectItem) []RowType {
if selectItem.Path != nil || selectItem.Type == parsers.SelectItemTypeSubQuery {
selectValue := r.parseArray(selectItem)
documents := make([]RowType, len(selectValue))
for i, newRowData := range selectValue {
documents[i] = newRowData
}
return documents
}
return []RowType{}
}
func (r rowContext) applyFilters(filters interface{}) bool {
if filters == nil {
return true
}
switch typedFilters := filters.(type) {
case parsers.ComparisonExpression:
return r.filters_ComparisonExpression(typedFilters)
case parsers.LogicalExpression:
return r.filters_LogicalExpression(typedFilters)
case parsers.Constant:
if value, ok := typedFilters.Value.(bool); ok {
return value
}
return false
case parsers.SelectItem:
resolvedValue := r.resolveSelectItem(typedFilters)
if value, ok := resolvedValue.(bool); ok {
return value
}
}
return false
}
func (r rowContext) filters_ComparisonExpression(expression parsers.ComparisonExpression) bool {
leftExpression, leftExpressionOk := expression.Left.(parsers.SelectItem)
rightExpression, rightExpressionOk := expression.Right.(parsers.SelectItem)
if !leftExpressionOk || !rightExpressionOk {
logger.ErrorLn("ComparisonExpression has incorrect Left or Right type")
return false
}
leftValue := r.resolveSelectItem(leftExpression)
rightValue := r.resolveSelectItem(rightExpression)
cmp := compareValues(leftValue, rightValue)
switch expression.Operation {
case "=":
return cmp == 0
case "!=":
return cmp != 0
case "<":
return cmp < 0
case ">":
return cmp > 0
case "<=":
return cmp <= 0
case ">=":
return cmp >= 0
}
return false
}
func (r rowContext) filters_LogicalExpression(expression parsers.LogicalExpression) bool {
var result bool
for i, subExpression := range expression.Expressions {
expressionResult := r.applyFilters(subExpression)
if i == 0 {
result = expressionResult
}
switch expression.Operation {
case parsers.LogicalExpressionTypeAnd:
result = result && expressionResult
if !result {
return false
}
case parsers.LogicalExpressionTypeOr:
result = result || expressionResult
if result {
return true
}
}
} }
return result return result
} }
func executeQuery(query parsers.SelectStmt, documents rowIterator) rowTypeIterator { func applyOrder(documents []rowContext, orderExpressions []parsers.OrderExpression) {
// Resolve FROM less := func(i, j int) bool {
var iter rowIterator = &fromIterator{ for _, order := range orderExpressions {
documents: documents, val1 := documents[i].resolveSelectItem(order.SelectItem)
table: query.Table, val2 := documents[j].resolveSelectItem(order.SelectItem)
cmp := compareValues(val1, val2)
if cmp != 0 {
if order.Direction == parsers.OrderDirectionDesc {
return cmp > 0
}
return cmp < 0
}
}
return i < j
} }
// Apply JOIN sort.SliceStable(documents, less)
if len(query.JoinItems) > 0 { }
iter = &joinIterator{
documents: iter, func applyGroupBy(documents []rowContext, groupBy []parsers.SelectItem) []rowContext {
query: query, groupedRows := make(map[string][]rowContext)
groupedKeys := make([]string, 0)
for _, row := range documents {
key := row.generateGroupByKey(groupBy)
if _, ok := groupedRows[key]; !ok {
groupedKeys = append(groupedKeys, key)
}
groupedRows[key] = append(groupedRows[key], row)
}
grouppedRows := make([]rowContext, 0)
for _, key := range groupedKeys {
grouppedRowContext := rowContext{
tables: groupedRows[key][0].tables,
parameters: groupedRows[key][0].parameters,
grouppedRows: groupedRows[key],
}
grouppedRows = append(grouppedRows, grouppedRowContext)
}
return grouppedRows
}
func (r rowContext) generateGroupByKey(groupBy []parsers.SelectItem) string {
var keyBuilder strings.Builder
for _, selectItem := range groupBy {
value := r.resolveSelectItem(selectItem)
keyBuilder.WriteString(fmt.Sprintf("%v", value))
keyBuilder.WriteString(":")
}
return keyBuilder.String()
}
func applyProjection(documents []rowContext, selectItems []parsers.SelectItem, groupBy []parsers.SelectItem) []RowType {
if len(documents) == 0 {
return []RowType{}
}
if hasAggregateFunctions(selectItems) && len(groupBy) == 0 {
// When can have aggregate functions without GROUP BY clause,
// we should aggregate all rows in that case
rowContext := rowContext{
tables: documents[0].tables,
parameters: documents[0].parameters,
grouppedRows: documents,
}
return []RowType{rowContext.applyProjection(selectItems)}
}
projectedDocuments := make([]RowType, len(documents))
for index, row := range documents {
projectedDocuments[index] = row.applyProjection(selectItems)
}
return projectedDocuments
}
func (r rowContext) applyProjection(selectItems []parsers.SelectItem) RowType {
// When the first value is top level, select it instead
if len(selectItems) > 0 && selectItems[0].IsTopLevel {
return r.resolveSelectItem(selectItems[0])
}
// Construct a new row based on the selected columns
row := make(map[string]interface{})
for index, selectItem := range selectItems {
destinationName := selectItem.Alias
if destinationName == "" {
if len(selectItem.Path) > 0 {
destinationName = selectItem.Path[len(selectItem.Path)-1]
} else {
destinationName = fmt.Sprintf("$%d", index+1)
}
if destinationName[0] == '@' {
destinationName = r.parameters[destinationName].(string)
} }
} }
// Apply WHERE row[destinationName] = r.resolveSelectItem(selectItem)
if query.Filters != nil { }
iter = &filterIterator{
documents: iter, return row
filters: query.Filters, }
func (r rowContext) resolveSelectItem(selectItem parsers.SelectItem) interface{} {
if selectItem.Type == parsers.SelectItemTypeArray {
return r.selectItem_SelectItemTypeArray(selectItem)
}
if selectItem.Type == parsers.SelectItemTypeObject {
return r.selectItem_SelectItemTypeObject(selectItem)
}
if selectItem.Type == parsers.SelectItemTypeConstant {
return r.selectItem_SelectItemTypeConstant(selectItem)
}
if selectItem.Type == parsers.SelectItemTypeSubQuery {
return r.selectItem_SelectItemTypeSubQuery(selectItem)
}
if selectItem.Type == parsers.SelectItemTypeFunctionCall {
if typedFunctionCall, ok := selectItem.Value.(parsers.FunctionCall); ok {
return r.selectItem_SelectItemTypeFunctionCall(typedFunctionCall)
}
logger.ErrorLn("parsers.SelectItem has incorrect Value type (expected parsers.FunctionCall)")
return nil
}
return r.selectItem_SelectItemTypeField(selectItem)
}
func (r rowContext) selectItem_SelectItemTypeArray(selectItem parsers.SelectItem) interface{} {
arrayValue := make([]interface{}, 0)
for _, subSelectItem := range selectItem.SelectItems {
arrayValue = append(arrayValue, r.resolveSelectItem(subSelectItem))
}
return arrayValue
}
func (r rowContext) selectItem_SelectItemTypeObject(selectItem parsers.SelectItem) interface{} {
objectValue := make(map[string]interface{})
for _, subSelectItem := range selectItem.SelectItems {
objectValue[subSelectItem.Alias] = r.resolveSelectItem(subSelectItem)
}
return objectValue
}
func (r rowContext) selectItem_SelectItemTypeConstant(selectItem parsers.SelectItem) interface{} {
var typedValue parsers.Constant
var ok bool
if typedValue, ok = selectItem.Value.(parsers.Constant); !ok {
// TODO: Handle error
logger.ErrorLn("parsers.Constant has incorrect Value type")
}
if typedValue.Type == parsers.ConstantTypeParameterConstant &&
r.parameters != nil {
if key, ok := typedValue.Value.(string); ok {
return r.parameters[key]
} }
} }
// Apply ORDER BY return typedValue.Value
if len(query.OrderExpressions) > 0 { }
iter = &orderIterator{
documents: iter, func (r rowContext) selectItem_SelectItemTypeSubQuery(selectItem parsers.SelectItem) interface{} {
orderExpressions: query.OrderExpressions, subQuery := selectItem.Value.(parsers.SelectStmt)
subQueryResult := ExecuteQuery(
subQuery,
[]RowType{r},
)
if subQuery.Exists {
return len(subQueryResult) > 0
}
return subQueryResult
}
func (r rowContext) selectItem_SelectItemTypeFunctionCall(functionCall parsers.FunctionCall) interface{} {
switch functionCall.Type {
case parsers.FunctionCallStringEquals:
return r.strings_StringEquals(functionCall.Arguments)
case parsers.FunctionCallContains:
return r.strings_Contains(functionCall.Arguments)
case parsers.FunctionCallEndsWith:
return r.strings_EndsWith(functionCall.Arguments)
case parsers.FunctionCallStartsWith:
return r.strings_StartsWith(functionCall.Arguments)
case parsers.FunctionCallConcat:
return r.strings_Concat(functionCall.Arguments)
case parsers.FunctionCallIndexOf:
return r.strings_IndexOf(functionCall.Arguments)
case parsers.FunctionCallToString:
return r.strings_ToString(functionCall.Arguments)
case parsers.FunctionCallUpper:
return r.strings_Upper(functionCall.Arguments)
case parsers.FunctionCallLower:
return r.strings_Lower(functionCall.Arguments)
case parsers.FunctionCallLeft:
return r.strings_Left(functionCall.Arguments)
case parsers.FunctionCallLength:
return r.strings_Length(functionCall.Arguments)
case parsers.FunctionCallLTrim:
return r.strings_LTrim(functionCall.Arguments)
case parsers.FunctionCallReplace:
return r.strings_Replace(functionCall.Arguments)
case parsers.FunctionCallReplicate:
return r.strings_Replicate(functionCall.Arguments)
case parsers.FunctionCallReverse:
return r.strings_Reverse(functionCall.Arguments)
case parsers.FunctionCallRight:
return r.strings_Right(functionCall.Arguments)
case parsers.FunctionCallRTrim:
return r.strings_RTrim(functionCall.Arguments)
case parsers.FunctionCallSubstring:
return r.strings_Substring(functionCall.Arguments)
case parsers.FunctionCallTrim:
return r.strings_Trim(functionCall.Arguments)
case parsers.FunctionCallIsDefined:
return r.typeChecking_IsDefined(functionCall.Arguments)
case parsers.FunctionCallIsArray:
return r.typeChecking_IsArray(functionCall.Arguments)
case parsers.FunctionCallIsBool:
return r.typeChecking_IsBool(functionCall.Arguments)
case parsers.FunctionCallIsFiniteNumber:
return r.typeChecking_IsFiniteNumber(functionCall.Arguments)
case parsers.FunctionCallIsInteger:
return r.typeChecking_IsInteger(functionCall.Arguments)
case parsers.FunctionCallIsNull:
return r.typeChecking_IsNull(functionCall.Arguments)
case parsers.FunctionCallIsNumber:
return r.typeChecking_IsNumber(functionCall.Arguments)
case parsers.FunctionCallIsObject:
return r.typeChecking_IsObject(functionCall.Arguments)
case parsers.FunctionCallIsPrimitive:
return r.typeChecking_IsPrimitive(functionCall.Arguments)
case parsers.FunctionCallIsString:
return r.typeChecking_IsString(functionCall.Arguments)
case parsers.FunctionCallArrayConcat:
return r.array_Concat(functionCall.Arguments)
case parsers.FunctionCallArrayContains:
return r.array_Contains(functionCall.Arguments)
case parsers.FunctionCallArrayContainsAny:
return r.array_Contains_Any(functionCall.Arguments)
case parsers.FunctionCallArrayContainsAll:
return r.array_Contains_All(functionCall.Arguments)
case parsers.FunctionCallArrayLength:
return r.array_Length(functionCall.Arguments)
case parsers.FunctionCallArraySlice:
return r.array_Slice(functionCall.Arguments)
case parsers.FunctionCallSetIntersect:
return r.set_Intersect(functionCall.Arguments)
case parsers.FunctionCallSetUnion:
return r.set_Union(functionCall.Arguments)
case parsers.FunctionCallMathAbs:
return r.math_Abs(functionCall.Arguments)
case parsers.FunctionCallMathAcos:
return r.math_Acos(functionCall.Arguments)
case parsers.FunctionCallMathAsin:
return r.math_Asin(functionCall.Arguments)
case parsers.FunctionCallMathAtan:
return r.math_Atan(functionCall.Arguments)
case parsers.FunctionCallMathCeiling:
return r.math_Ceiling(functionCall.Arguments)
case parsers.FunctionCallMathCos:
return r.math_Cos(functionCall.Arguments)
case parsers.FunctionCallMathCot:
return r.math_Cot(functionCall.Arguments)
case parsers.FunctionCallMathDegrees:
return r.math_Degrees(functionCall.Arguments)
case parsers.FunctionCallMathExp:
return r.math_Exp(functionCall.Arguments)
case parsers.FunctionCallMathFloor:
return r.math_Floor(functionCall.Arguments)
case parsers.FunctionCallMathIntBitNot:
return r.math_IntBitNot(functionCall.Arguments)
case parsers.FunctionCallMathLog10:
return r.math_Log10(functionCall.Arguments)
case parsers.FunctionCallMathRadians:
return r.math_Radians(functionCall.Arguments)
case parsers.FunctionCallMathRound:
return r.math_Round(functionCall.Arguments)
case parsers.FunctionCallMathSign:
return r.math_Sign(functionCall.Arguments)
case parsers.FunctionCallMathSin:
return r.math_Sin(functionCall.Arguments)
case parsers.FunctionCallMathSqrt:
return r.math_Sqrt(functionCall.Arguments)
case parsers.FunctionCallMathSquare:
return r.math_Square(functionCall.Arguments)
case parsers.FunctionCallMathTan:
return r.math_Tan(functionCall.Arguments)
case parsers.FunctionCallMathTrunc:
return r.math_Trunc(functionCall.Arguments)
case parsers.FunctionCallMathAtn2:
return r.math_Atn2(functionCall.Arguments)
case parsers.FunctionCallMathIntAdd:
return r.math_IntAdd(functionCall.Arguments)
case parsers.FunctionCallMathIntBitAnd:
return r.math_IntBitAnd(functionCall.Arguments)
case parsers.FunctionCallMathIntBitLeftShift:
return r.math_IntBitLeftShift(functionCall.Arguments)
case parsers.FunctionCallMathIntBitOr:
return r.math_IntBitOr(functionCall.Arguments)
case parsers.FunctionCallMathIntBitRightShift:
return r.math_IntBitRightShift(functionCall.Arguments)
case parsers.FunctionCallMathIntBitXor:
return r.math_IntBitXor(functionCall.Arguments)
case parsers.FunctionCallMathIntDiv:
return r.math_IntDiv(functionCall.Arguments)
case parsers.FunctionCallMathIntMod:
return r.math_IntMod(functionCall.Arguments)
case parsers.FunctionCallMathIntMul:
return r.math_IntMul(functionCall.Arguments)
case parsers.FunctionCallMathIntSub:
return r.math_IntSub(functionCall.Arguments)
case parsers.FunctionCallMathPower:
return r.math_Power(functionCall.Arguments)
case parsers.FunctionCallMathLog:
return r.math_Log(functionCall.Arguments)
case parsers.FunctionCallMathNumberBin:
return r.math_NumberBin(functionCall.Arguments)
case parsers.FunctionCallMathPi:
return r.math_Pi()
case parsers.FunctionCallMathRand:
return r.math_Rand()
case parsers.FunctionCallAggregateAvg:
return r.aggregate_Avg(functionCall.Arguments)
case parsers.FunctionCallAggregateCount:
return r.aggregate_Count(functionCall.Arguments)
case parsers.FunctionCallAggregateMax:
return r.aggregate_Max(functionCall.Arguments)
case parsers.FunctionCallAggregateMin:
return r.aggregate_Min(functionCall.Arguments)
case parsers.FunctionCallAggregateSum:
return r.aggregate_Sum(functionCall.Arguments)
case parsers.FunctionCallIn:
return r.misc_In(functionCall.Arguments)
}
logger.Errorf("Unknown function call type: %v", functionCall.Type)
return nil
}
func (r rowContext) selectItem_SelectItemTypeField(selectItem parsers.SelectItem) interface{} {
value := r.tables[selectItem.Path[0]]
if len(selectItem.Path) > 1 {
for _, pathSegment := range selectItem.Path[1:] {
if pathSegment[0] == '@' {
pathSegment = r.parameters[pathSegment].(string)
}
switch nestedValue := value.(type) {
case map[string]interface{}:
value = nestedValue[pathSegment]
case map[string]RowType:
value = nestedValue[pathSegment]
case []int, []string, []interface{}:
slice := reflect.ValueOf(nestedValue)
if arrayIndex, err := strconv.Atoi(pathSegment); err == nil && slice.Len() > arrayIndex {
value = slice.Index(arrayIndex).Interface()
} else {
return nil
}
default:
return nil
}
} }
} }
// Apply GROUP BY return value
if len(query.GroupBy) > 0 { }
iter = &groupByIterator{
documents: iter, func hasAggregateFunctions(selectItems []parsers.SelectItem) bool {
groupBy: query.GroupBy, if selectItems == nil {
return false
}
for _, selectItem := range selectItems {
if selectItem.Type == parsers.SelectItemTypeFunctionCall {
if typedValue, ok := selectItem.Value.(parsers.FunctionCall); ok && slices.Contains[[]parsers.FunctionCallType](parsers.AggregateFunctions, typedValue.Type) {
return true
} }
} }
// Apply SELECT if hasAggregateFunctions(selectItem.SelectItems) {
var projectedIterator rowTypeIterator = &projectIterator{ return true
documents: iter,
selectItems: query.SelectItems,
groupBy: query.GroupBy,
}
// Apply DISTINCT
if query.Distinct {
projectedIterator = &distinctIterator{
documents: projectedIterator,
} }
} }
// Apply OFFSET return false
if query.Offset > 0 { }
projectedIterator = &offsetIterator{
documents: projectedIterator, func compareValues(val1, val2 interface{}) int {
offset: query.Offset, if reflect.TypeOf(val1) != reflect.TypeOf(val2) {
return 1
}
switch val1 := val1.(type) {
case int:
val2 := val2.(int)
if val1 < val2 {
return -1
} else if val1 > val2 {
return 1
}
return 0
case float64:
val2 := val2.(float64)
if val1 < val2 {
return -1
} else if val1 > val2 {
return 1
}
return 0
case string:
val2 := val2.(string)
return strings.Compare(val1, val2)
case bool:
val2 := val2.(bool)
if val1 == val2 {
return 0
} else if val1 {
return 1
} else {
return -1
}
// TODO: Add more types
default:
if reflect.DeepEqual(val1, val2) {
return 0
}
return 1
} }
} }
// Apply LIMIT func deduplicate[T RowType | interface{}](slice []T) []T {
if query.Count > 0 { var result []T
projectedIterator = &limitIterator{ result = make([]T, 0)
documents: projectedIterator,
limit: query.Count, for i := 0; i < len(slice); i++ {
unique := true
for j := 0; j < len(result); j++ {
if compareValues(slice[i], result[j]) == 0 {
unique = false
break
} }
} }
return projectedIterator if unique {
result = append(result, slice[i])
}
}
return result
}
func copyMap[T RowType | []RowType](originalMap map[string]T) map[string]T {
targetMap := make(map[string]T)
for k, v := range originalMap {
targetMap[k] = v
}
return targetMap
} }

Some files were not shown because too many files have changed in this diff Show More