25 Commits

Author SHA1 Message Date
Pijus Kamandulis
b2516eda9f Stability improvements 2025-03-12 22:00:30 +02:00
Pijus Kamandulis
813b9faeaa Added support for Badger as an alternative storage backend 2025-03-12 21:06:10 +02:00
Pijus Kamandulis
e526b2269e Refactored query engine utilizing iterators 2025-03-11 17:36:28 +02:00
Pijus Kamandulis
221f029a1d DataStore is interface now. Liskov would be proud. 2025-03-09 18:34:07 +02:00
Pijus Kamandulis
bd4fe5abec Update azcosmos package 2025-02-25 20:43:23 +02:00
Pijus Kamandulis
f062e03f0c Update packages 2025-02-25 19:56:02 +02:00
Pijus Kamandulis
058b3271b7 OrderBy should bring NULL values to front 2025-02-25 19:47:29 +02:00
Pijus Kamandulis
1711c8fb5c Implement NOT logical operator 2025-02-25 19:33:32 +02:00
Pijus Kamandulis
851b3ca3a8 Fix IN clause with function calls 2025-02-20 18:45:20 +02:00
Pijus Kamandulis
d27c633e1d Better handling when passing null to string functions 2025-02-18 20:11:11 +02:00
Pijus Kamandulis
3987df89c0 Upgrade to golang 1.24.0 2025-02-18 19:16:21 +02:00
Pijus Kamandulis
6e3f4169a1 Fix 'ComparisonOperator' parsing 2025-02-18 19:12:08 +02:00
Pijus Kamandulis
14c5400d23 Keep old explorer images tagged with version 2025-02-09 22:42:51 +02:00
Pijus Kamandulis
1cf5ae92f4 Shared library stability improvements 2025-02-09 11:45:10 +02:00
Pijus Kamandulis
5d99b653cc Generate more realistic resource ids 2025-02-09 00:36:35 +02:00
Pijus Kamandulis
787cdb33cf Fix OFFSET clause 2025-02-08 15:28:06 +02:00
Pijus Kamandulis
5caa829ac1 Implement 'Transactional batch operations' 2025-02-04 20:35:15 +02:00
Pijus Kamandulis
887d456ad4 Return error code if server fails to start 2025-02-03 22:58:45 +02:00
Pijus Kamandulis
da1566875b Wait for server shutdown when stopping server 2025-02-03 22:21:54 +02:00
Pijus Kamandulis
3fee3bc816 Fix ARRAY_CONTAINS partial matches for nested objects 2025-02-03 19:29:29 +02:00
Pijus Kamandulis
8657c48fc8 Added support for table alias; Make AS keyword optional #9 2025-02-03 19:02:12 +02:00
Pijus Kamandulis
e080888c20 Add shared-libraries to release 2025-01-28 23:28:23 +02:00
Pijus Kamandulis
b8d79fd945 Upgrade upload-artifact pipeline action 2025-01-28 21:31:05 +02:00
Pijus Kamandulis
f25cb7fb03 Stamp binaries with version control information 2025-01-28 21:15:49 +02:00
Pijus Kamandulis
125f10d8a2 Add more error handling and mutex guards 2025-01-27 21:09:37 +02:00
110 changed files with 6685 additions and 3994 deletions

View File

@@ -15,16 +15,17 @@ jobs:
uses: crazy-max/ghaction-xgo@v3.1.0
with:
xgo_version: latest
go_version: 1.22.0
go_version: 1.24.0
dest: dist
pkg: sharedlibrary
prefix: cosmium
targets: linux/amd64,linux/arm64,windows/amd64,windows/arm64,darwin/amd64,darwin/arm64
v: true
buildmode: c-shared
buildvcs: true
- name: Upload artifact
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
with:
name: shared-libraries
path: dist/*

View File

@@ -17,16 +17,32 @@ jobs:
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Set up Go
uses: actions/setup-go@v5
with:
go-version: 1.22.0
go-version: 1.24.0
- name: Cross-Compile with xgo
uses: crazy-max/ghaction-xgo@v3.1.0
with:
xgo_version: latest
go_version: 1.24.0
dest: sharedlibrary_dist
pkg: sharedlibrary
prefix: cosmium
targets: linux/amd64,linux/arm64,windows/amd64,windows/arm64,darwin/amd64,darwin/arm64
v: true
buildmode: c-shared
buildvcs: true
- name: Docker Login
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Run GoReleaser
uses: goreleaser/goreleaser-action@v5
with:

1
.gitignore vendored
View File

@@ -1,4 +1,5 @@
dist/
sharedlibrary_dist/
ignored/
explorer_www/
main

View File

@@ -26,6 +26,23 @@ brews:
commit_author:
name: pikami
email: git@pikami.org
skip_upload: auto
archives:
- id: bundle
format: tar.gz
format_overrides:
- goos: windows
format: zip
- id: shared-libraries
meta: true
format: "tar.gz"
wrap_in_directory: true
name_template: "{{ .ProjectName }}_{{ .Version }}_shared-libraries"
files:
- LICENSE
- README.md
- sharedlibrary_dist/**
dockers:
- id: docker-linux-amd64
@@ -33,7 +50,6 @@ dockers:
goarch: amd64
image_templates:
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-amd64"
- "ghcr.io/pikami/{{ .ProjectName }}:latest-amd64"
dockerfile: Dockerfile
use: buildx
build_flag_templates:
@@ -51,9 +67,7 @@ dockers:
goarch: arm64
image_templates:
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-arm64"
- "ghcr.io/pikami/{{ .ProjectName }}:latest-arm64"
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-arm64v8"
- "ghcr.io/pikami/{{ .ProjectName }}:latest-arm64v8"
dockerfile: Dockerfile
use: buildx
build_flag_templates:
@@ -104,16 +118,25 @@ dockers:
docker_manifests:
- name_template: 'ghcr.io/pikami/{{ .ProjectName }}:latest'
skip_push: auto
image_templates:
- "ghcr.io/pikami/{{ .ProjectName }}:latest-amd64"
- "ghcr.io/pikami/{{ .ProjectName }}:latest-arm64"
- "ghcr.io/pikami/{{ .ProjectName }}:latest-arm64v8"
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-amd64"
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-arm64"
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-arm64v8"
- name_template: 'ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}'
skip_push: auto
image_templates:
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-amd64"
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-arm64"
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-arm64v8"
- name_template: 'ghcr.io/pikami/{{ .ProjectName }}:explorer'
skip_push: auto
image_templates:
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-explorer-amd64"
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-explorer-arm64"
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-explorer-arm64v8"
- name_template: 'ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-explorer'
skip_push: auto
image_templates:
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-explorer-amd64"
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-explorer-arm64"

View File

@@ -9,7 +9,7 @@ SERVER_LOCATION=./cmd/server
SHARED_LIB_LOCATION=./sharedlibrary
SHARED_LIB_OPT=-buildmode=c-shared
XGO_TARGETS=linux/amd64,linux/arm64,windows/amd64,windows/arm64,darwin/amd64,darwin/arm64
GOVERSION=1.22.0
GOVERSION=1.24.0
DIST_DIR=dist
@@ -51,6 +51,10 @@ build-sharedlib-linux-amd64:
@echo "Building shared library for Linux x64..."
@GOOS=linux GOARCH=amd64 $(GOBUILD) $(SHARED_LIB_OPT) -o $(DIST_DIR)/$(BINARY_NAME)-linux-amd64.so $(SHARED_LIB_LOCATION)
build-sharedlib-darwin-arm64:
@echo "Building shared library for macOS ARM..."
@GOOS=darwin GOARCH=arm64 $(GOBUILD) $(SHARED_LIB_OPT) -o $(DIST_DIR)/$(BINARY_NAME)-darwin-arm64.so $(SHARED_LIB_LOCATION)
build-sharedlib-tests: build-sharedlib-linux-amd64
@echo "Building shared library tests..."
@$(SHARED_LIB_TEST_CC) $(SHARED_LIB_TEST_CFLAGS) -o $(SHARED_LIB_TEST_TARGET) $(SHARED_LIB_TEST_SOURCES)

24
api/api_models/models.go Normal file
View File

@@ -0,0 +1,24 @@
package apimodels
const (
BatchOperationTypeCreate = "Create"
BatchOperationTypeDelete = "Delete"
BatchOperationTypeReplace = "Replace"
BatchOperationTypeUpsert = "Upsert"
BatchOperationTypeRead = "Read"
BatchOperationTypePatch = "Patch"
)
type BatchOperation struct {
OperationType string `json:"operationType"`
Id string `json:"id"`
ResourceBody map[string]interface{} `json:"resourceBody"`
}
type BatchOperationResult struct {
StatusCode int `json:"statusCode"`
RequestCharge float64 `json:"requestCharge"`
ResourceBody map[string]interface{} `json:"resourceBody"`
Etag string `json:"etag"`
Message string `json:"message"`
}

View File

@@ -3,25 +3,28 @@ package api
import (
"github.com/gin-gonic/gin"
"github.com/pikami/cosmium/api/config"
"github.com/pikami/cosmium/internal/repositories"
"github.com/pikami/cosmium/internal/datastore"
)
type ApiServer struct {
stopServer chan interface{}
isActive bool
router *gin.Engine
config config.ServerConfig
stopServer chan interface{}
onServerShutdown chan interface{}
isActive bool
router *gin.Engine
config *config.ServerConfig
}
func NewApiServer(dataRepository *repositories.DataRepository, config config.ServerConfig) *ApiServer {
func NewApiServer(dataStore datastore.DataStore, config *config.ServerConfig) *ApiServer {
stopChan := make(chan interface{})
onServerShutdownChan := make(chan interface{})
apiServer := &ApiServer{
stopServer: stopChan,
config: config,
stopServer: stopChan,
onServerShutdown: onServerShutdownChan,
config: config,
}
apiServer.CreateRouter(dataRepository)
apiServer.CreateRouter(dataStore)
return apiServer
}
@@ -32,4 +35,5 @@ func (s *ApiServer) GetRouter() *gin.Engine {
func (s *ApiServer) Stop() {
s.stopServer <- true
<-s.onServerShutdown
}

View File

@@ -15,6 +15,11 @@ const (
ExplorerBaseUrlLocation = "/_explorer"
)
const (
DataStoreMap = "map"
DataStoreBadger = "badger"
)
func ParseFlags() ServerConfig {
host := flag.String("Host", "localhost", "Hostname")
port := flag.Int("Port", 8081, "Listen port")
@@ -28,6 +33,8 @@ func ParseFlags() ServerConfig {
persistDataPath := flag.String("Persist", "", "Saves data to given path on application exit")
logLevel := NewEnumValue("info", []string{"debug", "info", "error", "silent"})
flag.Var(logLevel, "LogLevel", fmt.Sprintf("Sets the logging level %s", logLevel.AllowedValuesList()))
dataStore := NewEnumValue("map", []string{DataStoreMap, DataStoreBadger})
flag.Var(dataStore, "DataStore", fmt.Sprintf("Sets the data store %s, (badger is currently in the experimental phase)", dataStore.AllowedValuesList()))
flag.Parse()
setFlagsFromEnvironment()
@@ -44,6 +51,7 @@ func ParseFlags() ServerConfig {
config.DisableTls = *disableTls
config.AccountKey = *accountKey
config.LogLevel = logLevel.value
config.DataStore = dataStore.value
config.PopulateCalculatedFields()
@@ -58,15 +66,22 @@ func (c *ServerConfig) PopulateCalculatedFields() {
switch c.LogLevel {
case "debug":
logger.LogLevel = logger.LogLevelDebug
logger.SetLogLevel(logger.LogLevelDebug)
case "info":
logger.LogLevel = logger.LogLevelInfo
logger.SetLogLevel(logger.LogLevelInfo)
case "error":
logger.LogLevel = logger.LogLevelError
logger.SetLogLevel(logger.LogLevelError)
case "silent":
logger.LogLevel = logger.LogLevelSilent
logger.SetLogLevel(logger.LogLevelSilent)
default:
logger.LogLevel = logger.LogLevelInfo
logger.SetLogLevel(logger.LogLevelInfo)
}
if c.DataStore == DataStoreBadger &&
(c.InitialDataFilePath != "" || c.PersistDataFilePath != "") {
logger.ErrorLn("InitialData and Persist options are currently not supported with Badger data store")
c.InitialDataFilePath = ""
c.PersistDataFilePath = ""
}
}

View File

@@ -17,4 +17,6 @@ type ServerConfig struct {
DisableTls bool `json:"disableTls"`
LogLevel string `json:"logLevel"`
ExplorerBaseUrlLocation string `json:"explorerBaseUrlLocation"`
DataStore string `json:"dataStore"`
}

View File

@@ -5,15 +5,15 @@ import (
"net/http"
"github.com/gin-gonic/gin"
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
"github.com/pikami/cosmium/internal/datastore"
)
func (h *Handlers) GetAllCollections(c *gin.Context) {
databaseId := c.Param("databaseId")
collections, status := h.repository.GetAllCollections(databaseId)
if status == repositorymodels.StatusOk {
database, _ := h.repository.GetDatabase(databaseId)
collections, status := h.dataStore.GetAllCollections(databaseId)
if status == datastore.StatusOk {
database, _ := h.dataStore.GetDatabase(databaseId)
c.Header("x-ms-item-count", fmt.Sprintf("%d", len(collections)))
c.IndentedJSON(http.StatusOK, gin.H{
@@ -31,13 +31,13 @@ func (h *Handlers) GetCollection(c *gin.Context) {
databaseId := c.Param("databaseId")
id := c.Param("collId")
collection, status := h.repository.GetCollection(databaseId, id)
if status == repositorymodels.StatusOk {
collection, status := h.dataStore.GetCollection(databaseId, id)
if status == datastore.StatusOk {
c.IndentedJSON(http.StatusOK, collection)
return
}
if status == repositorymodels.StatusNotFound {
if status == datastore.StatusNotFound {
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
return
}
@@ -49,13 +49,13 @@ func (h *Handlers) DeleteCollection(c *gin.Context) {
databaseId := c.Param("databaseId")
id := c.Param("collId")
status := h.repository.DeleteCollection(databaseId, id)
if status == repositorymodels.StatusOk {
status := h.dataStore.DeleteCollection(databaseId, id)
if status == datastore.StatusOk {
c.Status(http.StatusNoContent)
return
}
if status == repositorymodels.StatusNotFound {
if status == datastore.StatusNotFound {
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
return
}
@@ -65,7 +65,7 @@ func (h *Handlers) DeleteCollection(c *gin.Context) {
func (h *Handlers) CreateCollection(c *gin.Context) {
databaseId := c.Param("databaseId")
var newCollection repositorymodels.Collection
var newCollection datastore.Collection
if err := c.BindJSON(&newCollection); err != nil {
c.JSON(http.StatusBadRequest, gin.H{"message": err.Error()})
@@ -77,13 +77,13 @@ func (h *Handlers) CreateCollection(c *gin.Context) {
return
}
createdCollection, status := h.repository.CreateCollection(databaseId, newCollection)
if status == repositorymodels.Conflict {
createdCollection, status := h.dataStore.CreateCollection(databaseId, newCollection)
if status == datastore.Conflict {
c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"})
return
}
if status == repositorymodels.StatusOk {
if status == datastore.StatusOk {
c.IndentedJSON(http.StatusCreated, createdCollection)
return
}

View File

@@ -7,11 +7,11 @@ import (
)
func (h *Handlers) CosmiumExport(c *gin.Context) {
repositoryState, err := h.repository.GetState()
dataStoreState, err := h.dataStore.DumpToJson()
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
c.Data(http.StatusOK, "application/json", []byte(repositoryState))
c.Data(http.StatusOK, "application/json", []byte(dataStoreState))
}

View File

@@ -5,12 +5,12 @@ import (
"net/http"
"github.com/gin-gonic/gin"
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
"github.com/pikami/cosmium/internal/datastore"
)
func (h *Handlers) GetAllDatabases(c *gin.Context) {
databases, status := h.repository.GetAllDatabases()
if status == repositorymodels.StatusOk {
databases, status := h.dataStore.GetAllDatabases()
if status == datastore.StatusOk {
c.Header("x-ms-item-count", fmt.Sprintf("%d", len(databases)))
c.IndentedJSON(http.StatusOK, gin.H{
"_rid": "",
@@ -26,13 +26,13 @@ func (h *Handlers) GetAllDatabases(c *gin.Context) {
func (h *Handlers) GetDatabase(c *gin.Context) {
id := c.Param("databaseId")
database, status := h.repository.GetDatabase(id)
if status == repositorymodels.StatusOk {
database, status := h.dataStore.GetDatabase(id)
if status == datastore.StatusOk {
c.IndentedJSON(http.StatusOK, database)
return
}
if status == repositorymodels.StatusNotFound {
if status == datastore.StatusNotFound {
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
return
}
@@ -43,13 +43,13 @@ func (h *Handlers) GetDatabase(c *gin.Context) {
func (h *Handlers) DeleteDatabase(c *gin.Context) {
id := c.Param("databaseId")
status := h.repository.DeleteDatabase(id)
if status == repositorymodels.StatusOk {
status := h.dataStore.DeleteDatabase(id)
if status == datastore.StatusOk {
c.Status(http.StatusNoContent)
return
}
if status == repositorymodels.StatusNotFound {
if status == datastore.StatusNotFound {
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
return
}
@@ -58,7 +58,7 @@ func (h *Handlers) DeleteDatabase(c *gin.Context) {
}
func (h *Handlers) CreateDatabase(c *gin.Context) {
var newDatabase repositorymodels.Database
var newDatabase datastore.Database
if err := c.BindJSON(&newDatabase); err != nil {
c.JSON(http.StatusBadRequest, gin.H{"message": err.Error()})
@@ -70,13 +70,13 @@ func (h *Handlers) CreateDatabase(c *gin.Context) {
return
}
createdDatabase, status := h.repository.CreateDatabase(newDatabase)
if status == repositorymodels.Conflict {
createdDatabase, status := h.dataStore.CreateDatabase(newDatabase)
if status == datastore.Conflict {
c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"})
return
}
if status == repositorymodels.StatusOk {
if status == datastore.StatusOk {
c.IndentedJSON(http.StatusCreated, createdDatabase)
return
}

View File

@@ -8,18 +8,23 @@ import (
jsonpatch "github.com/cosmiumdev/json-patch/v5"
"github.com/gin-gonic/gin"
apimodels "github.com/pikami/cosmium/api/api_models"
"github.com/pikami/cosmium/internal/constants"
"github.com/pikami/cosmium/internal/converters"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/logger"
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
"github.com/pikami/cosmium/parsers"
"github.com/pikami/cosmium/parsers/nosql"
memoryexecutor "github.com/pikami/cosmium/query_executors/memory_executor"
)
func (h *Handlers) GetAllDocuments(c *gin.Context) {
databaseId := c.Param("databaseId")
collectionId := c.Param("collId")
documents, status := h.repository.GetAllDocuments(databaseId, collectionId)
if status == repositorymodels.StatusOk {
collection, _ := h.repository.GetCollection(databaseId, collectionId)
documents, status := h.dataStore.GetAllDocuments(databaseId, collectionId)
if status == datastore.StatusOk {
collection, _ := h.dataStore.GetCollection(databaseId, collectionId)
c.Header("x-ms-item-count", fmt.Sprintf("%d", len(documents)))
c.IndentedJSON(http.StatusOK, gin.H{
@@ -38,13 +43,13 @@ func (h *Handlers) GetDocument(c *gin.Context) {
collectionId := c.Param("collId")
documentId := c.Param("docId")
document, status := h.repository.GetDocument(databaseId, collectionId, documentId)
if status == repositorymodels.StatusOk {
document, status := h.dataStore.GetDocument(databaseId, collectionId, documentId)
if status == datastore.StatusOk {
c.IndentedJSON(http.StatusOK, document)
return
}
if status == repositorymodels.StatusNotFound {
if status == datastore.StatusNotFound {
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
return
}
@@ -57,13 +62,13 @@ func (h *Handlers) DeleteDocument(c *gin.Context) {
collectionId := c.Param("collId")
documentId := c.Param("docId")
status := h.repository.DeleteDocument(databaseId, collectionId, documentId)
if status == repositorymodels.StatusOk {
status := h.dataStore.DeleteDocument(databaseId, collectionId, documentId)
if status == datastore.StatusOk {
c.Status(http.StatusNoContent)
return
}
if status == repositorymodels.StatusNotFound {
if status == datastore.StatusNotFound {
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
return
}
@@ -71,7 +76,7 @@ func (h *Handlers) DeleteDocument(c *gin.Context) {
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
}
// TODO: Maybe move "replace" logic to repository
// TODO: Maybe move "replace" logic to data store
func (h *Handlers) ReplaceDocument(c *gin.Context) {
databaseId := c.Param("databaseId")
collectionId := c.Param("collId")
@@ -83,19 +88,19 @@ func (h *Handlers) ReplaceDocument(c *gin.Context) {
return
}
status := h.repository.DeleteDocument(databaseId, collectionId, documentId)
if status == repositorymodels.StatusNotFound {
status := h.dataStore.DeleteDocument(databaseId, collectionId, documentId)
if status == datastore.StatusNotFound {
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
return
}
createdDocument, status := h.repository.CreateDocument(databaseId, collectionId, requestBody)
if status == repositorymodels.Conflict {
createdDocument, status := h.dataStore.CreateDocument(databaseId, collectionId, requestBody)
if status == datastore.Conflict {
c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"})
return
}
if status == repositorymodels.StatusOk {
if status == datastore.StatusOk {
c.IndentedJSON(http.StatusCreated, createdDocument)
return
}
@@ -108,8 +113,8 @@ func (h *Handlers) PatchDocument(c *gin.Context) {
collectionId := c.Param("collId")
documentId := c.Param("docId")
document, status := h.repository.GetDocument(databaseId, collectionId, documentId)
if status == repositorymodels.StatusNotFound {
document, status := h.dataStore.GetDocument(databaseId, collectionId, documentId)
if status == datastore.StatusNotFound {
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
return
}
@@ -159,19 +164,19 @@ func (h *Handlers) PatchDocument(c *gin.Context) {
return
}
status = h.repository.DeleteDocument(databaseId, collectionId, documentId)
if status == repositorymodels.StatusNotFound {
status = h.dataStore.DeleteDocument(databaseId, collectionId, documentId)
if status == datastore.StatusNotFound {
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
return
}
createdDocument, status := h.repository.CreateDocument(databaseId, collectionId, modifiedDocument)
if status == repositorymodels.Conflict {
createdDocument, status := h.dataStore.CreateDocument(databaseId, collectionId, modifiedDocument)
if status == datastore.Conflict {
c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"})
return
}
if status == repositorymodels.StatusOk {
if status == datastore.StatusOk {
c.IndentedJSON(http.StatusCreated, createdDocument)
return
}
@@ -183,6 +188,13 @@ func (h *Handlers) DocumentsPost(c *gin.Context) {
databaseId := c.Param("databaseId")
collectionId := c.Param("collId")
// Handle batch requests
isBatchRequest, _ := strconv.ParseBool(c.GetHeader("x-ms-cosmos-is-batch-request"))
if isBatchRequest {
h.handleBatchRequest(c)
return
}
var requestBody map[string]interface{}
if err := c.BindJSON(&requestBody); err != nil {
c.JSON(http.StatusBadRequest, gin.H{"message": err.Error()})
@@ -191,30 +203,7 @@ func (h *Handlers) DocumentsPost(c *gin.Context) {
query := requestBody["query"]
if query != nil {
if c.GetHeader("x-ms-cosmos-is-query-plan-request") != "" {
c.IndentedJSON(http.StatusOK, constants.QueryPlanResponse)
return
}
var queryParameters map[string]interface{}
if paramsArray, ok := requestBody["parameters"].([]interface{}); ok {
queryParameters = parametersToMap(paramsArray)
}
docs, status := h.repository.ExecuteQueryDocuments(databaseId, collectionId, query.(string), queryParameters)
if status != repositorymodels.StatusOk {
// TODO: Currently we return everything if the query fails
h.GetAllDocuments(c)
return
}
collection, _ := h.repository.GetCollection(databaseId, collectionId)
c.Header("x-ms-item-count", fmt.Sprintf("%d", len(docs)))
c.IndentedJSON(http.StatusOK, gin.H{
"_rid": collection.ResourceID,
"Documents": docs,
"_count": len(docs),
})
h.handleDocumentQuery(c, requestBody)
return
}
@@ -225,16 +214,16 @@ func (h *Handlers) DocumentsPost(c *gin.Context) {
isUpsert, _ := strconv.ParseBool(c.GetHeader("x-ms-documentdb-is-upsert"))
if isUpsert {
h.repository.DeleteDocument(databaseId, collectionId, requestBody["id"].(string))
h.dataStore.DeleteDocument(databaseId, collectionId, requestBody["id"].(string))
}
createdDocument, status := h.repository.CreateDocument(databaseId, collectionId, requestBody)
if status == repositorymodels.Conflict {
createdDocument, status := h.dataStore.CreateDocument(databaseId, collectionId, requestBody)
if status == datastore.Conflict {
c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"})
return
}
if status == repositorymodels.StatusOk {
if status == datastore.StatusOk {
c.IndentedJSON(http.StatusCreated, createdDocument)
return
}
@@ -253,3 +242,155 @@ func parametersToMap(pairs []interface{}) map[string]interface{} {
return result
}
func (h *Handlers) handleDocumentQuery(c *gin.Context, requestBody map[string]interface{}) {
databaseId := c.Param("databaseId")
collectionId := c.Param("collId")
if c.GetHeader("x-ms-cosmos-is-query-plan-request") != "" {
c.IndentedJSON(http.StatusOK, constants.QueryPlanResponse)
return
}
var queryParameters map[string]interface{}
if paramsArray, ok := requestBody["parameters"].([]interface{}); ok {
queryParameters = parametersToMap(paramsArray)
}
queryText := requestBody["query"].(string)
docs, status := h.executeQueryDocuments(databaseId, collectionId, queryText, queryParameters)
if status != datastore.StatusOk {
// TODO: Currently we return everything if the query fails
h.GetAllDocuments(c)
return
}
collection, _ := h.dataStore.GetCollection(databaseId, collectionId)
c.Header("x-ms-item-count", fmt.Sprintf("%d", len(docs)))
c.IndentedJSON(http.StatusOK, gin.H{
"_rid": collection.ResourceID,
"Documents": docs,
"_count": len(docs),
})
}
func (h *Handlers) handleBatchRequest(c *gin.Context) {
databaseId := c.Param("databaseId")
collectionId := c.Param("collId")
batchOperations := make([]apimodels.BatchOperation, 0)
if err := c.BindJSON(&batchOperations); err != nil {
c.JSON(http.StatusBadRequest, gin.H{"message": err.Error()})
return
}
batchOperationResults := make([]apimodels.BatchOperationResult, len(batchOperations))
for idx, operation := range batchOperations {
switch operation.OperationType {
case apimodels.BatchOperationTypeCreate:
createdDocument, status := h.dataStore.CreateDocument(databaseId, collectionId, operation.ResourceBody)
responseCode := dataStoreStatusToResponseCode(status)
if status == datastore.StatusOk {
responseCode = http.StatusCreated
}
batchOperationResults[idx] = apimodels.BatchOperationResult{
StatusCode: responseCode,
ResourceBody: createdDocument,
}
case apimodels.BatchOperationTypeDelete:
status := h.dataStore.DeleteDocument(databaseId, collectionId, operation.Id)
responseCode := dataStoreStatusToResponseCode(status)
if status == datastore.StatusOk {
responseCode = http.StatusNoContent
}
batchOperationResults[idx] = apimodels.BatchOperationResult{
StatusCode: responseCode,
}
case apimodels.BatchOperationTypeReplace:
deleteStatus := h.dataStore.DeleteDocument(databaseId, collectionId, operation.Id)
if deleteStatus == datastore.StatusNotFound {
batchOperationResults[idx] = apimodels.BatchOperationResult{
StatusCode: http.StatusNotFound,
}
continue
}
createdDocument, createStatus := h.dataStore.CreateDocument(databaseId, collectionId, operation.ResourceBody)
responseCode := dataStoreStatusToResponseCode(createStatus)
if createStatus == datastore.StatusOk {
responseCode = http.StatusCreated
}
batchOperationResults[idx] = apimodels.BatchOperationResult{
StatusCode: responseCode,
ResourceBody: createdDocument,
}
case apimodels.BatchOperationTypeUpsert:
documentId := operation.ResourceBody["id"].(string)
h.dataStore.DeleteDocument(databaseId, collectionId, documentId)
createdDocument, createStatus := h.dataStore.CreateDocument(databaseId, collectionId, operation.ResourceBody)
responseCode := dataStoreStatusToResponseCode(createStatus)
if createStatus == datastore.StatusOk {
responseCode = http.StatusCreated
}
batchOperationResults[idx] = apimodels.BatchOperationResult{
StatusCode: responseCode,
ResourceBody: createdDocument,
}
case apimodels.BatchOperationTypeRead:
document, status := h.dataStore.GetDocument(databaseId, collectionId, operation.Id)
batchOperationResults[idx] = apimodels.BatchOperationResult{
StatusCode: dataStoreStatusToResponseCode(status),
ResourceBody: document,
}
case apimodels.BatchOperationTypePatch:
batchOperationResults[idx] = apimodels.BatchOperationResult{
StatusCode: http.StatusNotImplemented,
Message: "Patch operation is not implemented",
}
default:
batchOperationResults[idx] = apimodels.BatchOperationResult{
StatusCode: http.StatusBadRequest,
Message: "Unknown operation type",
}
}
}
c.JSON(http.StatusOK, batchOperationResults)
}
func dataStoreStatusToResponseCode(status datastore.DataStoreStatus) int {
switch status {
case datastore.StatusOk:
return http.StatusOK
case datastore.StatusNotFound:
return http.StatusNotFound
case datastore.Conflict:
return http.StatusConflict
case datastore.BadRequest:
return http.StatusBadRequest
default:
return http.StatusInternalServerError
}
}
func (h *Handlers) executeQueryDocuments(databaseId string, collectionId string, query string, queryParameters map[string]interface{}) ([]memoryexecutor.RowType, datastore.DataStoreStatus) {
parsedQuery, err := nosql.Parse("", []byte(query))
if err != nil {
logger.Errorf("Failed to parse query: %s\nerr: %v", query, err)
return nil, datastore.BadRequest
}
allDocumentsIterator, status := h.dataStore.GetDocumentIterator(databaseId, collectionId)
if status != datastore.StatusOk {
return nil, status
}
defer allDocumentsIterator.Close()
rowsIterator := converters.NewDocumentToRowTypeIterator(allDocumentsIterator)
if typedQuery, ok := parsedQuery.(parsers.SelectStmt); ok {
typedQuery.Parameters = queryParameters
return memoryexecutor.ExecuteQuery(typedQuery, rowsIterator), datastore.StatusOk
}
return nil, datastore.BadRequest
}

View File

@@ -2,17 +2,17 @@ package handlers
import (
"github.com/pikami/cosmium/api/config"
"github.com/pikami/cosmium/internal/repositories"
"github.com/pikami/cosmium/internal/datastore"
)
type Handlers struct {
repository *repositories.DataRepository
config config.ServerConfig
dataStore datastore.DataStore
config *config.ServerConfig
}
func NewHandlers(dataRepository *repositories.DataRepository, config config.ServerConfig) *Handlers {
func NewHandlers(dataStore datastore.DataStore, config *config.ServerConfig) *Handlers {
return &Handlers{
repository: dataRepository,
config: config,
dataStore: dataStore,
config: config,
}
}

View File

@@ -10,7 +10,7 @@ import (
"github.com/pikami/cosmium/internal/logger"
)
func Authentication(config config.ServerConfig) gin.HandlerFunc {
func Authentication(config *config.ServerConfig) gin.HandlerFunc {
return func(c *gin.Context) {
requestUrl := c.Request.URL.String()
if config.DisableAuth ||
@@ -75,8 +75,7 @@ func requestToResourceId(c *gin.Context) string {
isFeed := c.Request.Header.Get("A-Im") == "Incremental Feed"
if resourceType == "pkranges" && isFeed {
// CosmosSDK replaces '/' with '-' in resource id requests
resourceId = strings.Replace(collId, "-", "/", -1)
resourceId = collId
}
return resourceId

View File

@@ -7,7 +7,7 @@ import (
"github.com/pikami/cosmium/api/config"
)
func StripTrailingSlashes(r *gin.Engine, config config.ServerConfig) gin.HandlerFunc {
func StripTrailingSlashes(r *gin.Engine, config *config.ServerConfig) gin.HandlerFunc {
return func(c *gin.Context) {
path := c.Request.URL.Path
if len(path) > 1 && path[len(path)-1] == '/' && !strings.Contains(path, config.ExplorerBaseUrlLocation) {

View File

@@ -5,7 +5,8 @@ import (
"net/http"
"github.com/gin-gonic/gin"
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/resourceid"
)
func (h *Handlers) GetPartitionKeyRanges(c *gin.Context) {
@@ -17,8 +18,8 @@ func (h *Handlers) GetPartitionKeyRanges(c *gin.Context) {
return
}
partitionKeyRanges, status := h.repository.GetPartitionKeyRanges(databaseId, collectionId)
if status == repositorymodels.StatusOk {
partitionKeyRanges, status := h.dataStore.GetPartitionKeyRanges(databaseId, collectionId)
if status == datastore.StatusOk {
c.Header("etag", "\"420\"")
c.Header("lsn", "420")
c.Header("x-ms-cosmos-llsn", "420")
@@ -26,20 +27,21 @@ func (h *Handlers) GetPartitionKeyRanges(c *gin.Context) {
c.Header("x-ms-item-count", fmt.Sprintf("%d", len(partitionKeyRanges)))
collectionRid := collectionId
collection, _ := h.repository.GetCollection(databaseId, collectionId)
collection, _ := h.dataStore.GetCollection(databaseId, collectionId)
if collection.ResourceID != "" {
collectionRid = collection.ResourceID
}
rid := resourceid.NewCombined(collectionRid, resourceid.New(resourceid.ResourceTypePartitionKeyRange))
c.IndentedJSON(http.StatusOK, gin.H{
"_rid": collectionRid,
"_rid": rid,
"_count": len(partitionKeyRanges),
"PartitionKeyRanges": partitionKeyRanges,
})
return
}
if status == repositorymodels.StatusNotFound {
if status == datastore.StatusNotFound {
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
return
}

View File

@@ -27,7 +27,9 @@ func (h *Handlers) GetServerInfo(c *gin.Context) {
"databaseAccountEndpoint": h.config.DatabaseEndpoint,
},
},
"enableMultipleWriteLocations": false,
"enableMultipleWriteLocations": false,
"continuousBackupEnabled": false,
"enableNRegionSynchronousCommit": false,
"userReplicationPolicy": map[string]interface{}{
"asyncReplication": false,
"minReplicaSetSize": 1,

View File

@@ -5,16 +5,16 @@ import (
"net/http"
"github.com/gin-gonic/gin"
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
"github.com/pikami/cosmium/internal/datastore"
)
func (h *Handlers) GetAllStoredProcedures(c *gin.Context) {
databaseId := c.Param("databaseId")
collectionId := c.Param("collId")
sps, status := h.repository.GetAllStoredProcedures(databaseId, collectionId)
sps, status := h.dataStore.GetAllStoredProcedures(databaseId, collectionId)
if status == repositorymodels.StatusOk {
if status == datastore.StatusOk {
c.Header("x-ms-item-count", fmt.Sprintf("%d", len(sps)))
c.IndentedJSON(http.StatusOK, gin.H{"_rid": "", "StoredProcedures": sps, "_count": len(sps)})
return
@@ -28,14 +28,14 @@ func (h *Handlers) GetStoredProcedure(c *gin.Context) {
collectionId := c.Param("collId")
spId := c.Param("spId")
sp, status := h.repository.GetStoredProcedure(databaseId, collectionId, spId)
sp, status := h.dataStore.GetStoredProcedure(databaseId, collectionId, spId)
if status == repositorymodels.StatusOk {
if status == datastore.StatusOk {
c.IndentedJSON(http.StatusOK, sp)
return
}
if status == repositorymodels.StatusNotFound {
if status == datastore.StatusNotFound {
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
return
}
@@ -48,13 +48,13 @@ func (h *Handlers) DeleteStoredProcedure(c *gin.Context) {
collectionId := c.Param("collId")
spId := c.Param("spId")
status := h.repository.DeleteStoredProcedure(databaseId, collectionId, spId)
if status == repositorymodels.StatusOk {
status := h.dataStore.DeleteStoredProcedure(databaseId, collectionId, spId)
if status == datastore.StatusOk {
c.Status(http.StatusNoContent)
return
}
if status == repositorymodels.StatusNotFound {
if status == datastore.StatusNotFound {
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
return
}
@@ -67,25 +67,25 @@ func (h *Handlers) ReplaceStoredProcedure(c *gin.Context) {
collectionId := c.Param("collId")
spId := c.Param("spId")
var sp repositorymodels.StoredProcedure
var sp datastore.StoredProcedure
if err := c.BindJSON(&sp); err != nil {
c.IndentedJSON(http.StatusBadRequest, gin.H{"message": "Invalid body"})
return
}
status := h.repository.DeleteStoredProcedure(databaseId, collectionId, spId)
if status == repositorymodels.StatusNotFound {
status := h.dataStore.DeleteStoredProcedure(databaseId, collectionId, spId)
if status == datastore.StatusNotFound {
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
return
}
createdSP, status := h.repository.CreateStoredProcedure(databaseId, collectionId, sp)
if status == repositorymodels.Conflict {
createdSP, status := h.dataStore.CreateStoredProcedure(databaseId, collectionId, sp)
if status == datastore.Conflict {
c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"})
return
}
if status == repositorymodels.StatusOk {
if status == datastore.StatusOk {
c.IndentedJSON(http.StatusOK, createdSP)
return
}
@@ -97,19 +97,19 @@ func (h *Handlers) CreateStoredProcedure(c *gin.Context) {
databaseId := c.Param("databaseId")
collectionId := c.Param("collId")
var sp repositorymodels.StoredProcedure
var sp datastore.StoredProcedure
if err := c.BindJSON(&sp); err != nil {
c.IndentedJSON(http.StatusBadRequest, gin.H{"message": "Invalid body"})
return
}
createdSP, status := h.repository.CreateStoredProcedure(databaseId, collectionId, sp)
if status == repositorymodels.Conflict {
createdSP, status := h.dataStore.CreateStoredProcedure(databaseId, collectionId, sp)
if status == datastore.Conflict {
c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"})
return
}
if status == repositorymodels.StatusOk {
if status == datastore.StatusOk {
c.IndentedJSON(http.StatusCreated, createdSP)
return
}

View File

@@ -5,16 +5,16 @@ import (
"net/http"
"github.com/gin-gonic/gin"
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
"github.com/pikami/cosmium/internal/datastore"
)
func (h *Handlers) GetAllTriggers(c *gin.Context) {
databaseId := c.Param("databaseId")
collectionId := c.Param("collId")
triggers, status := h.repository.GetAllTriggers(databaseId, collectionId)
triggers, status := h.dataStore.GetAllTriggers(databaseId, collectionId)
if status == repositorymodels.StatusOk {
if status == datastore.StatusOk {
c.Header("x-ms-item-count", fmt.Sprintf("%d", len(triggers)))
c.IndentedJSON(http.StatusOK, gin.H{"_rid": "", "Triggers": triggers, "_count": len(triggers)})
return
@@ -28,14 +28,14 @@ func (h *Handlers) GetTrigger(c *gin.Context) {
collectionId := c.Param("collId")
triggerId := c.Param("triggerId")
trigger, status := h.repository.GetTrigger(databaseId, collectionId, triggerId)
trigger, status := h.dataStore.GetTrigger(databaseId, collectionId, triggerId)
if status == repositorymodels.StatusOk {
if status == datastore.StatusOk {
c.IndentedJSON(http.StatusOK, trigger)
return
}
if status == repositorymodels.StatusNotFound {
if status == datastore.StatusNotFound {
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
return
}
@@ -48,13 +48,13 @@ func (h *Handlers) DeleteTrigger(c *gin.Context) {
collectionId := c.Param("collId")
triggerId := c.Param("triggerId")
status := h.repository.DeleteTrigger(databaseId, collectionId, triggerId)
if status == repositorymodels.StatusOk {
status := h.dataStore.DeleteTrigger(databaseId, collectionId, triggerId)
if status == datastore.StatusOk {
c.Status(http.StatusNoContent)
return
}
if status == repositorymodels.StatusNotFound {
if status == datastore.StatusNotFound {
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
return
}
@@ -67,25 +67,25 @@ func (h *Handlers) ReplaceTrigger(c *gin.Context) {
collectionId := c.Param("collId")
triggerId := c.Param("triggerId")
var trigger repositorymodels.Trigger
var trigger datastore.Trigger
if err := c.BindJSON(&trigger); err != nil {
c.IndentedJSON(http.StatusBadRequest, gin.H{"message": "Invalid body"})
return
}
status := h.repository.DeleteTrigger(databaseId, collectionId, triggerId)
if status == repositorymodels.StatusNotFound {
status := h.dataStore.DeleteTrigger(databaseId, collectionId, triggerId)
if status == datastore.StatusNotFound {
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
return
}
createdTrigger, status := h.repository.CreateTrigger(databaseId, collectionId, trigger)
if status == repositorymodels.Conflict {
createdTrigger, status := h.dataStore.CreateTrigger(databaseId, collectionId, trigger)
if status == datastore.Conflict {
c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"})
return
}
if status == repositorymodels.StatusOk {
if status == datastore.StatusOk {
c.IndentedJSON(http.StatusOK, createdTrigger)
return
}
@@ -97,19 +97,19 @@ func (h *Handlers) CreateTrigger(c *gin.Context) {
databaseId := c.Param("databaseId")
collectionId := c.Param("collId")
var trigger repositorymodels.Trigger
var trigger datastore.Trigger
if err := c.BindJSON(&trigger); err != nil {
c.IndentedJSON(http.StatusBadRequest, gin.H{"message": "Invalid body"})
return
}
createdTrigger, status := h.repository.CreateTrigger(databaseId, collectionId, trigger)
if status == repositorymodels.Conflict {
createdTrigger, status := h.dataStore.CreateTrigger(databaseId, collectionId, trigger)
if status == datastore.Conflict {
c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"})
return
}
if status == repositorymodels.StatusOk {
if status == datastore.StatusOk {
c.IndentedJSON(http.StatusCreated, createdTrigger)
return
}

View File

@@ -5,16 +5,16 @@ import (
"net/http"
"github.com/gin-gonic/gin"
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
"github.com/pikami/cosmium/internal/datastore"
)
func (h *Handlers) GetAllUserDefinedFunctions(c *gin.Context) {
databaseId := c.Param("databaseId")
collectionId := c.Param("collId")
udfs, status := h.repository.GetAllUserDefinedFunctions(databaseId, collectionId)
udfs, status := h.dataStore.GetAllUserDefinedFunctions(databaseId, collectionId)
if status == repositorymodels.StatusOk {
if status == datastore.StatusOk {
c.Header("x-ms-item-count", fmt.Sprintf("%d", len(udfs)))
c.IndentedJSON(http.StatusOK, gin.H{"_rid": "", "UserDefinedFunctions": udfs, "_count": len(udfs)})
return
@@ -28,14 +28,14 @@ func (h *Handlers) GetUserDefinedFunction(c *gin.Context) {
collectionId := c.Param("collId")
udfId := c.Param("udfId")
udf, status := h.repository.GetUserDefinedFunction(databaseId, collectionId, udfId)
udf, status := h.dataStore.GetUserDefinedFunction(databaseId, collectionId, udfId)
if status == repositorymodels.StatusOk {
if status == datastore.StatusOk {
c.IndentedJSON(http.StatusOK, udf)
return
}
if status == repositorymodels.StatusNotFound {
if status == datastore.StatusNotFound {
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
return
}
@@ -48,13 +48,13 @@ func (h *Handlers) DeleteUserDefinedFunction(c *gin.Context) {
collectionId := c.Param("collId")
udfId := c.Param("udfId")
status := h.repository.DeleteUserDefinedFunction(databaseId, collectionId, udfId)
if status == repositorymodels.StatusOk {
status := h.dataStore.DeleteUserDefinedFunction(databaseId, collectionId, udfId)
if status == datastore.StatusOk {
c.Status(http.StatusNoContent)
return
}
if status == repositorymodels.StatusNotFound {
if status == datastore.StatusNotFound {
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
return
}
@@ -67,25 +67,25 @@ func (h *Handlers) ReplaceUserDefinedFunction(c *gin.Context) {
collectionId := c.Param("collId")
udfId := c.Param("udfId")
var udf repositorymodels.UserDefinedFunction
var udf datastore.UserDefinedFunction
if err := c.BindJSON(&udf); err != nil {
c.IndentedJSON(http.StatusBadRequest, gin.H{"message": "Invalid body"})
return
}
status := h.repository.DeleteUserDefinedFunction(databaseId, collectionId, udfId)
if status == repositorymodels.StatusNotFound {
status := h.dataStore.DeleteUserDefinedFunction(databaseId, collectionId, udfId)
if status == datastore.StatusNotFound {
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
return
}
createdUdf, status := h.repository.CreateUserDefinedFunction(databaseId, collectionId, udf)
if status == repositorymodels.Conflict {
createdUdf, status := h.dataStore.CreateUserDefinedFunction(databaseId, collectionId, udf)
if status == datastore.Conflict {
c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"})
return
}
if status == repositorymodels.StatusOk {
if status == datastore.StatusOk {
c.IndentedJSON(http.StatusOK, createdUdf)
return
}
@@ -97,19 +97,19 @@ func (h *Handlers) CreateUserDefinedFunction(c *gin.Context) {
databaseId := c.Param("databaseId")
collectionId := c.Param("collId")
var udf repositorymodels.UserDefinedFunction
var udf datastore.UserDefinedFunction
if err := c.BindJSON(&udf); err != nil {
c.IndentedJSON(http.StatusBadRequest, gin.H{"message": "Invalid body"})
return
}
createdUdf, status := h.repository.CreateUserDefinedFunction(databaseId, collectionId, udf)
if status == repositorymodels.Conflict {
createdUdf, status := h.dataStore.CreateUserDefinedFunction(databaseId, collectionId, udf)
if status == datastore.Conflict {
c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"})
return
}
if status == repositorymodels.StatusOk {
if status == datastore.StatusOk {
c.IndentedJSON(http.StatusCreated, createdUdf)
return
}

View File

@@ -4,24 +4,30 @@ import (
"context"
"fmt"
"net/http"
"sync"
"time"
"github.com/gin-gonic/gin"
"github.com/pikami/cosmium/api/handlers"
"github.com/pikami/cosmium/api/handlers/middleware"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/logger"
"github.com/pikami/cosmium/internal/repositories"
tlsprovider "github.com/pikami/cosmium/internal/tls_provider"
)
func (s *ApiServer) CreateRouter(repository *repositories.DataRepository) {
routeHandlers := handlers.NewHandlers(repository, s.config)
var ginMux sync.Mutex
func (s *ApiServer) CreateRouter(dataStore datastore.DataStore) {
routeHandlers := handlers.NewHandlers(dataStore, s.config)
ginMux.Lock()
gin.DefaultWriter = logger.InfoWriter()
gin.DefaultErrorWriter = logger.ErrorWriter()
if s.config.LogLevel != "debug" {
gin.SetMode(gin.ReleaseMode)
}
ginMux.Unlock()
router := gin.Default(func(e *gin.Engine) {
e.RedirectTrailingSlash = false
@@ -81,7 +87,7 @@ func (s *ApiServer) CreateRouter(repository *repositories.DataRepository) {
s.router = router
}
func (s *ApiServer) Start() {
func (s *ApiServer) Start() error {
listenAddress := fmt.Sprintf(":%d", s.config.Port)
s.isActive = true
@@ -90,6 +96,8 @@ func (s *ApiServer) Start() {
Handler: s.router.Handler(),
}
errChan := make(chan error, 1)
go func() {
<-s.stopServer
logger.InfoLn("Shutting down server...")
@@ -97,35 +105,40 @@ func (s *ApiServer) Start() {
if err != nil {
logger.ErrorLn("Failed to shutdown server:", err)
}
s.onServerShutdown <- true
}()
go func() {
var err error
if s.config.DisableTls {
logger.Infof("Listening and serving HTTP on %s\n", server.Addr)
err := server.ListenAndServe()
if err != nil && err != http.ErrServerClosed {
logger.ErrorLn("Failed to start HTTP server:", err)
}
s.isActive = false
err = server.ListenAndServe()
} else if s.config.TLS_CertificatePath != "" && s.config.TLS_CertificateKey != "" {
logger.Infof("Listening and serving HTTPS on %s\n", server.Addr)
err := server.ListenAndServeTLS(
err = server.ListenAndServeTLS(
s.config.TLS_CertificatePath,
s.config.TLS_CertificateKey)
if err != nil && err != http.ErrServerClosed {
logger.ErrorLn("Failed to start HTTPS server:", err)
}
s.isActive = false
} else {
tlsConfig := tlsprovider.GetDefaultTlsConfig()
server.TLSConfig = tlsConfig
logger.Infof("Listening and serving HTTPS on %s\n", server.Addr)
err := server.ListenAndServeTLS("", "")
if err != nil && err != http.ErrServerClosed {
logger.ErrorLn("Failed to start HTTPS server:", err)
}
s.isActive = false
err = server.ListenAndServeTLS("", "")
}
if err != nil && err != http.ErrServerClosed {
logger.ErrorLn("Failed to start server:", err)
errChan <- err
} else {
errChan <- nil
}
s.isActive = false
}()
select {
case err := <-errChan:
return err
case <-time.After(50 * time.Millisecond):
return nil
}
}

View File

@@ -2,13 +2,11 @@ package tests_test
import (
"context"
"errors"
"fmt"
"io"
"net/http"
"testing"
"github.com/Azure/azure-sdk-for-go/sdk/azcore"
"github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos"
"github.com/pikami/cosmium/api/config"
"github.com/stretchr/testify/assert"
@@ -19,7 +17,7 @@ func Test_Authentication(t *testing.T) {
defer ts.Server.Close()
t.Run("Should get 200 when correct account key is used", func(t *testing.T) {
ts.Repository.DeleteDatabase(testDatabaseName)
ts.DataStore.DeleteDatabase(testDatabaseName)
client, err := azcosmos.NewClientFromConnectionString(
fmt.Sprintf("AccountEndpoint=%s;AccountKey=%s", ts.URL, config.DefaultAccountKey),
&azcosmos.ClientOptions{},
@@ -35,7 +33,7 @@ func Test_Authentication(t *testing.T) {
})
t.Run("Should get 401 when wrong account key is used", func(t *testing.T) {
ts.Repository.DeleteDatabase(testDatabaseName)
ts.DataStore.DeleteDatabase(testDatabaseName)
client, err := azcosmos.NewClientFromConnectionString(
fmt.Sprintf("AccountEndpoint=%s;AccountKey=%s", ts.URL, "AAAA"),
&azcosmos.ClientOptions{},
@@ -47,12 +45,7 @@ func Test_Authentication(t *testing.T) {
azcosmos.DatabaseProperties{ID: testDatabaseName},
&azcosmos.CreateDatabaseOptions{})
var respErr *azcore.ResponseError
if errors.As(err, &respErr) {
assert.Equal(t, respErr.StatusCode, http.StatusUnauthorized)
} else {
panic(err)
}
assert.Contains(t, err.Error(), "401 Unauthorized")
})
t.Run("Should allow unauthorized requests to /_explorer", func(t *testing.T) {
@@ -68,7 +61,7 @@ func Test_Authentication(t *testing.T) {
}
func Test_Authentication_Disabled(t *testing.T) {
ts := runTestServerCustomConfig(config.ServerConfig{
ts := runTestServerCustomConfig(&config.ServerConfig{
AccountKey: config.DefaultAccountKey,
ExplorerPath: "/tmp/nothing",
ExplorerBaseUrlLocation: config.ExplorerBaseUrlLocation,
@@ -77,7 +70,7 @@ func Test_Authentication_Disabled(t *testing.T) {
defer ts.Server.Close()
t.Run("Should get 200 when wrong account key is used, but authentication is dissabled", func(t *testing.T) {
ts.Repository.DeleteDatabase(testDatabaseName)
ts.DataStore.DeleteDatabase(testDatabaseName)
client, err := azcosmos.NewClientFromConnectionString(
fmt.Sprintf("AccountEndpoint=%s;AccountKey=%s", ts.URL, "AAAA"),
&azcosmos.ClientOptions{},

View File

@@ -3,32 +3,29 @@ package tests_test
import (
"context"
"errors"
"fmt"
"net/http"
"testing"
"github.com/Azure/azure-sdk-for-go/sdk/azcore"
"github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos"
"github.com/pikami/cosmium/api/config"
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
"github.com/pikami/cosmium/internal/datastore"
"github.com/stretchr/testify/assert"
)
func Test_Collections(t *testing.T) {
ts := runTestServer()
defer ts.Server.Close()
presets := []testPreset{PresetMapStore, PresetBadgerStore}
client, err := azcosmos.NewClientFromConnectionString(
fmt.Sprintf("AccountEndpoint=%s;AccountKey=%s", ts.URL, config.DefaultAccountKey),
&azcosmos.ClientOptions{},
)
assert.Nil(t, err)
setUp := func(ts *TestServer, client *azcosmos.Client) *azcosmos.DatabaseClient {
ts.DataStore.CreateDatabase(datastore.Database{ID: testDatabaseName})
databaseClient, err := client.NewDatabase(testDatabaseName)
assert.Nil(t, err)
ts.Repository.CreateDatabase(repositorymodels.Database{ID: testDatabaseName})
databaseClient, err := client.NewDatabase(testDatabaseName)
assert.Nil(t, err)
return databaseClient
}
runTestsWithPresets(t, "Collection Create", presets, func(t *testing.T, ts *TestServer, client *azcosmos.Client) {
databaseClient := setUp(ts, client)
t.Run("Collection Create", func(t *testing.T) {
t.Run("Should create collection", func(t *testing.T) {
createResponse, err := databaseClient.CreateContainer(context.TODO(), azcosmos.ContainerProperties{
ID: testCollectionName,
@@ -39,7 +36,7 @@ func Test_Collections(t *testing.T) {
})
t.Run("Should return conflict when collection exists", func(t *testing.T) {
ts.Repository.CreateCollection(testDatabaseName, repositorymodels.Collection{
ts.DataStore.CreateCollection(testDatabaseName, datastore.Collection{
ID: testCollectionName,
})
@@ -57,9 +54,11 @@ func Test_Collections(t *testing.T) {
})
})
t.Run("Collection Read", func(t *testing.T) {
runTestsWithPresets(t, "Collection Read", presets, func(t *testing.T, ts *TestServer, client *azcosmos.Client) {
databaseClient := setUp(ts, client)
t.Run("Should read collection", func(t *testing.T) {
ts.Repository.CreateCollection(testDatabaseName, repositorymodels.Collection{
ts.DataStore.CreateCollection(testDatabaseName, datastore.Collection{
ID: testCollectionName,
})
@@ -73,7 +72,7 @@ func Test_Collections(t *testing.T) {
})
t.Run("Should return not found when collection does not exist", func(t *testing.T) {
ts.Repository.DeleteCollection(testDatabaseName, testCollectionName)
ts.DataStore.DeleteCollection(testDatabaseName, testCollectionName)
collectionResponse, err := databaseClient.NewContainer(testCollectionName)
assert.Nil(t, err)
@@ -90,9 +89,11 @@ func Test_Collections(t *testing.T) {
})
})
t.Run("Collection Delete", func(t *testing.T) {
runTestsWithPresets(t, "Collection Delete", presets, func(t *testing.T, ts *TestServer, client *azcosmos.Client) {
databaseClient := setUp(ts, client)
t.Run("Should delete collection", func(t *testing.T) {
ts.Repository.CreateCollection(testDatabaseName, repositorymodels.Collection{
ts.DataStore.CreateCollection(testDatabaseName, datastore.Collection{
ID: testCollectionName,
})
@@ -105,7 +106,7 @@ func Test_Collections(t *testing.T) {
})
t.Run("Should return not found when collection does not exist", func(t *testing.T) {
ts.Repository.DeleteCollection(testDatabaseName, testCollectionName)
ts.DataStore.DeleteCollection(testDatabaseName, testCollectionName)
collectionResponse, err := databaseClient.NewContainer(testCollectionName)
assert.Nil(t, err)

View File

@@ -1,39 +1,62 @@
package tests_test
import (
"fmt"
"net/http/httptest"
"testing"
"github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos"
"github.com/pikami/cosmium/api"
"github.com/pikami/cosmium/api/config"
"github.com/pikami/cosmium/internal/repositories"
"github.com/pikami/cosmium/internal/datastore"
badgerdatastore "github.com/pikami/cosmium/internal/datastore/badger_datastore"
mapdatastore "github.com/pikami/cosmium/internal/datastore/map_datastore"
"github.com/pikami/cosmium/internal/logger"
"github.com/stretchr/testify/assert"
)
type TestServer struct {
Server *httptest.Server
Repository *repositories.DataRepository
URL string
Server *httptest.Server
DataStore datastore.DataStore
URL string
}
func runTestServerCustomConfig(config config.ServerConfig) *TestServer {
repository := repositories.NewDataRepository(repositories.RepositoryOptions{})
func getDefaultTestServerConfig() *config.ServerConfig {
return &config.ServerConfig{
AccountKey: config.DefaultAccountKey,
ExplorerPath: "/tmp/nothing",
ExplorerBaseUrlLocation: config.ExplorerBaseUrlLocation,
DataStore: "map",
}
}
api := api.NewApiServer(repository, config)
func runTestServerCustomConfig(configuration *config.ServerConfig) *TestServer {
var dataStore datastore.DataStore
switch configuration.DataStore {
case config.DataStoreBadger:
dataStore = badgerdatastore.NewBadgerDataStore()
default:
dataStore = mapdatastore.NewMapDataStore(mapdatastore.MapDataStoreOptions{})
}
api := api.NewApiServer(dataStore, configuration)
server := httptest.NewServer(api.GetRouter())
configuration.DatabaseEndpoint = server.URL
return &TestServer{
Server: server,
Repository: repository,
URL: server.URL,
Server: server,
DataStore: dataStore,
URL: server.URL,
}
}
func runTestServer() *TestServer {
config := config.ServerConfig{
AccountKey: config.DefaultAccountKey,
ExplorerPath: "/tmp/nothing",
ExplorerBaseUrlLocation: config.ExplorerBaseUrlLocation,
}
config := getDefaultTestServerConfig()
config.LogLevel = "debug"
logger.SetLogLevel(logger.LogLevelDebug)
return runTestServerCustomConfig(config)
}
@@ -43,3 +66,47 @@ const (
testDatabaseName = "test-db"
testCollectionName = "test-coll"
)
type testFunc func(t *testing.T, ts *TestServer, cosmosClient *azcosmos.Client)
type testPreset string
const (
PresetMapStore testPreset = "MapDS"
PresetBadgerStore testPreset = "BadgerDS"
)
func runTestsWithPreset(t *testing.T, name string, testPreset testPreset, f testFunc) {
serverConfig := getDefaultTestServerConfig()
serverConfig.LogLevel = "debug"
logger.SetLogLevel(logger.LogLevelDebug)
switch testPreset {
case PresetBadgerStore:
serverConfig.DataStore = config.DataStoreBadger
case PresetMapStore:
serverConfig.DataStore = config.DataStoreMap
}
ts := runTestServerCustomConfig(serverConfig)
defer ts.Server.Close()
defer ts.DataStore.Close()
client, err := azcosmos.NewClientFromConnectionString(
fmt.Sprintf("AccountEndpoint=%s;AccountKey=%s", ts.URL, config.DefaultAccountKey),
&azcosmos.ClientOptions{},
)
assert.Nil(t, err)
testName := fmt.Sprintf("%s_%s", testPreset, name)
t.Run(testName, func(t *testing.T) {
f(t, ts, client)
})
}
func runTestsWithPresets(t *testing.T, name string, testPresets []testPreset, f testFunc) {
for _, testPreset := range testPresets {
runTestsWithPreset(t, name, testPreset, f)
}
}

View File

@@ -3,30 +3,21 @@ package tests_test
import (
"context"
"errors"
"fmt"
"net/http"
"testing"
"github.com/Azure/azure-sdk-for-go/sdk/azcore"
"github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos"
"github.com/pikami/cosmium/api/config"
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
"github.com/pikami/cosmium/internal/datastore"
"github.com/stretchr/testify/assert"
)
func Test_Databases(t *testing.T) {
ts := runTestServer()
defer ts.Server.Close()
presets := []testPreset{PresetMapStore, PresetBadgerStore}
client, err := azcosmos.NewClientFromConnectionString(
fmt.Sprintf("AccountEndpoint=%s;AccountKey=%s", ts.URL, config.DefaultAccountKey),
&azcosmos.ClientOptions{},
)
assert.Nil(t, err)
t.Run("Database Create", func(t *testing.T) {
runTestsWithPresets(t, "Database Create", presets, func(t *testing.T, ts *TestServer, client *azcosmos.Client) {
t.Run("Should create database", func(t *testing.T) {
ts.Repository.DeleteDatabase(testDatabaseName)
ts.DataStore.DeleteDatabase(testDatabaseName)
createResponse, err := client.CreateDatabase(context.TODO(), azcosmos.DatabaseProperties{
ID: testDatabaseName,
@@ -37,7 +28,7 @@ func Test_Databases(t *testing.T) {
})
t.Run("Should return conflict when database exists", func(t *testing.T) {
ts.Repository.CreateDatabase(repositorymodels.Database{
ts.DataStore.CreateDatabase(datastore.Database{
ID: testDatabaseName,
})
@@ -55,9 +46,9 @@ func Test_Databases(t *testing.T) {
})
})
t.Run("Database Read", func(t *testing.T) {
runTestsWithPresets(t, "Database Read", presets, func(t *testing.T, ts *TestServer, client *azcosmos.Client) {
t.Run("Should read database", func(t *testing.T) {
ts.Repository.CreateDatabase(repositorymodels.Database{
ts.DataStore.CreateDatabase(datastore.Database{
ID: testDatabaseName,
})
@@ -71,7 +62,7 @@ func Test_Databases(t *testing.T) {
})
t.Run("Should return not found when database does not exist", func(t *testing.T) {
ts.Repository.DeleteDatabase(testDatabaseName)
ts.DataStore.DeleteDatabase(testDatabaseName)
databaseResponse, err := client.NewDatabase(testDatabaseName)
assert.Nil(t, err)
@@ -88,9 +79,9 @@ func Test_Databases(t *testing.T) {
})
})
t.Run("Database Delete", func(t *testing.T) {
runTestsWithPresets(t, "Database Delete", presets, func(t *testing.T, ts *TestServer, client *azcosmos.Client) {
t.Run("Should delete database", func(t *testing.T) {
ts.Repository.CreateDatabase(repositorymodels.Database{
ts.DataStore.CreateDatabase(datastore.Database{
ID: testDatabaseName,
})
@@ -103,7 +94,7 @@ func Test_Databases(t *testing.T) {
})
t.Run("Should return not found when database does not exist", func(t *testing.T) {
ts.Repository.DeleteDatabase(testDatabaseName)
ts.DataStore.DeleteDatabase(testDatabaseName)
databaseResponse, err := client.NewDatabase(testDatabaseName)
assert.Nil(t, err)

View File

@@ -14,7 +14,7 @@ import (
"github.com/Azure/azure-sdk-for-go/sdk/azcore"
"github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos"
"github.com/pikami/cosmium/api/config"
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
"github.com/pikami/cosmium/internal/datastore"
"github.com/stretchr/testify/assert"
)
@@ -53,11 +53,9 @@ func testCosmosQuery(t *testing.T,
}
}
func documents_InitializeDb(t *testing.T) (*TestServer, *azcosmos.ContainerClient) {
ts := runTestServer()
ts.Repository.CreateDatabase(repositorymodels.Database{ID: testDatabaseName})
ts.Repository.CreateCollection(testDatabaseName, repositorymodels.Collection{
func documents_InitializeDb(t *testing.T, ts *TestServer) *azcosmos.ContainerClient {
ts.DataStore.CreateDatabase(datastore.Database{ID: testDatabaseName})
ts.DataStore.CreateCollection(testDatabaseName, datastore.Collection{
ID: testCollectionName,
PartitionKey: struct {
Paths []string "json:\"paths\""
@@ -67,8 +65,8 @@ func documents_InitializeDb(t *testing.T) (*TestServer, *azcosmos.ContainerClien
Paths: []string{"/pk"},
},
})
ts.Repository.CreateDocument(testDatabaseName, testCollectionName, map[string]interface{}{"id": "12345", "pk": "123", "isCool": false, "arr": []int{1, 2, 3}})
ts.Repository.CreateDocument(testDatabaseName, testCollectionName, map[string]interface{}{"id": "67890", "pk": "456", "isCool": true, "arr": []int{6, 7, 8}})
ts.DataStore.CreateDocument(testDatabaseName, testCollectionName, map[string]interface{}{"id": "12345", "pk": "123", "isCool": false, "arr": []int{1, 2, 3}})
ts.DataStore.CreateDocument(testDatabaseName, testCollectionName, map[string]interface{}{"id": "67890", "pk": "456", "isCool": true, "arr": []int{6, 7, 8}})
client, err := azcosmos.NewClientFromConnectionString(
fmt.Sprintf("AccountEndpoint=%s;AccountKey=%s", ts.URL, config.DefaultAccountKey),
@@ -79,303 +77,439 @@ func documents_InitializeDb(t *testing.T) (*TestServer, *azcosmos.ContainerClien
collectionClient, err := client.NewContainer(testDatabaseName, testCollectionName)
assert.Nil(t, err)
return ts, collectionClient
return collectionClient
}
func Test_Documents(t *testing.T) {
ts, collectionClient := documents_InitializeDb(t)
defer ts.Server.Close()
presets := []testPreset{PresetMapStore, PresetBadgerStore}
t.Run("Should query document", func(t *testing.T) {
testCosmosQuery(t, collectionClient,
"SELECT c.id, c[\"pk\"] FROM c ORDER BY c.id",
nil,
[]interface{}{
map[string]interface{}{"id": "12345", "pk": "123"},
map[string]interface{}{"id": "67890", "pk": "456"},
},
)
runTestsWithPresets(t, "Test_Documents", presets, func(t *testing.T, ts *TestServer, client *azcosmos.Client) {
collectionClient := documents_InitializeDb(t, ts)
t.Run("Should query document", func(t *testing.T) {
testCosmosQuery(t, collectionClient,
"SELECT c.id, c[\"pk\"] FROM c ORDER BY c.id",
nil,
[]interface{}{
map[string]interface{}{"id": "12345", "pk": "123"},
map[string]interface{}{"id": "67890", "pk": "456"},
},
)
})
t.Run("Should query VALUE array", func(t *testing.T) {
testCosmosQuery(t, collectionClient,
"SELECT VALUE [c.id, c[\"pk\"]] FROM c ORDER BY c.id",
nil,
[]interface{}{
[]interface{}{"12345", "123"},
[]interface{}{"67890", "456"},
},
)
})
t.Run("Should query VALUE object", func(t *testing.T) {
testCosmosQuery(t, collectionClient,
"SELECT VALUE { id: c.id, _pk: c.pk } FROM c ORDER BY c.id",
nil,
[]interface{}{
map[string]interface{}{"id": "12345", "_pk": "123"},
map[string]interface{}{"id": "67890", "_pk": "456"},
},
)
})
t.Run("Should query document with single WHERE condition", func(t *testing.T) {
testCosmosQuery(t, collectionClient,
`select c.id
FROM c
WHERE c.isCool=true
ORDER BY c.id`,
nil,
[]interface{}{
map[string]interface{}{"id": "67890"},
},
)
})
t.Run("Should query document with query parameters", func(t *testing.T) {
testCosmosQuery(t, collectionClient,
`select c.id
FROM c
WHERE c.id=@param_id
ORDER BY c.id`,
[]azcosmos.QueryParameter{
{Name: "@param_id", Value: "67890"},
},
[]interface{}{
map[string]interface{}{"id": "67890"},
},
)
})
t.Run("Should query document with query parameters as accessor", func(t *testing.T) {
testCosmosQuery(t, collectionClient,
`select c.id
FROM c
WHERE c[@param]="67890"
ORDER BY c.id`,
[]azcosmos.QueryParameter{
{Name: "@param", Value: "id"},
},
[]interface{}{
map[string]interface{}{"id": "67890"},
},
)
})
t.Run("Should query array accessor", func(t *testing.T) {
testCosmosQuery(t, collectionClient,
`SELECT c.id,
c["arr"][0] AS arr0,
c["arr"][1] AS arr1,
c["arr"][2] AS arr2,
c["arr"][3] AS arr3
FROM c ORDER BY c.id`,
nil,
[]interface{}{
map[string]interface{}{"id": "12345", "arr0": 1.0, "arr1": 2.0, "arr2": 3.0, "arr3": nil},
map[string]interface{}{"id": "67890", "arr0": 6.0, "arr1": 7.0, "arr2": 8.0, "arr3": nil},
},
)
})
t.Run("Should handle parallel writes", func(t *testing.T) {
var wg sync.WaitGroup
rutineCount := 100
results := make(chan error, rutineCount)
createCall := func(i int) {
defer wg.Done()
item := map[string]interface{}{
"id": fmt.Sprintf("id-%d", i),
"pk": fmt.Sprintf("pk-%d", i),
"val": i,
}
bytes, err := json.Marshal(item)
if err != nil {
results <- err
return
}
ctx, cancel := context.WithTimeout(context.Background(), 2*time.Second)
defer cancel()
_, err = collectionClient.CreateItem(
ctx,
azcosmos.PartitionKey{},
bytes,
&azcosmos.ItemOptions{
EnableContentResponseOnWrite: false,
},
)
results <- err
collectionClient.ReadItem(ctx, azcosmos.PartitionKey{}, fmt.Sprintf("id-%d", i), nil)
collectionClient.DeleteItem(ctx, azcosmos.PartitionKey{}, fmt.Sprintf("id-%d", i), nil)
}
for i := 0; i < rutineCount; i++ {
wg.Add(1)
go createCall(i)
}
wg.Wait()
close(results)
for err := range results {
if err != nil {
t.Errorf("Error creating item: %v", err)
}
}
})
})
t.Run("Should query VALUE array", func(t *testing.T) {
testCosmosQuery(t, collectionClient,
"SELECT VALUE [c.id, c[\"pk\"]] FROM c ORDER BY c.id",
nil,
[]interface{}{
[]interface{}{"12345", "123"},
[]interface{}{"67890", "456"},
},
)
})
runTestsWithPresets(t, "Test_Documents_Patch", presets, func(t *testing.T, ts *TestServer, client *azcosmos.Client) {
collectionClient := documents_InitializeDb(t, ts)
t.Run("Should query VALUE object", func(t *testing.T) {
testCosmosQuery(t, collectionClient,
"SELECT VALUE { id: c.id, _pk: c.pk } FROM c ORDER BY c.id",
nil,
[]interface{}{
map[string]interface{}{"id": "12345", "_pk": "123"},
map[string]interface{}{"id": "67890", "_pk": "456"},
},
)
})
t.Run("Should PATCH document", func(t *testing.T) {
context := context.TODO()
expectedData := map[string]interface{}{"id": "67890", "pk": "666", "newField": "newValue", "incr": 15., "setted": "isSet"}
t.Run("Should query document with single WHERE condition", func(t *testing.T) {
testCosmosQuery(t, collectionClient,
`select c.id
FROM c
WHERE c.isCool=true
ORDER BY c.id`,
nil,
[]interface{}{
map[string]interface{}{"id": "67890"},
},
)
})
patch := azcosmos.PatchOperations{}
patch.AppendAdd("/newField", "newValue")
patch.AppendIncrement("/incr", 15)
patch.AppendRemove("/isCool")
patch.AppendReplace("/pk", "666")
patch.AppendSet("/setted", "isSet")
t.Run("Should query document with query parameters", func(t *testing.T) {
testCosmosQuery(t, collectionClient,
`select c.id
FROM c
WHERE c.id=@param_id
ORDER BY c.id`,
[]azcosmos.QueryParameter{
{Name: "@param_id", Value: "67890"},
},
[]interface{}{
map[string]interface{}{"id": "67890"},
},
)
})
itemResponse, err := collectionClient.PatchItem(
context,
azcosmos.PartitionKey{},
"67890",
patch,
&azcosmos.ItemOptions{
EnableContentResponseOnWrite: false,
},
)
assert.Nil(t, err)
t.Run("Should query document with query parameters as accessor", func(t *testing.T) {
testCosmosQuery(t, collectionClient,
`select c.id
FROM c
WHERE c[@param]="67890"
ORDER BY c.id`,
[]azcosmos.QueryParameter{
{Name: "@param", Value: "id"},
},
[]interface{}{
map[string]interface{}{"id": "67890"},
},
)
})
var itemResponseBody map[string]interface{}
json.Unmarshal(itemResponse.Value, &itemResponseBody)
t.Run("Should query array accessor", func(t *testing.T) {
testCosmosQuery(t, collectionClient,
`SELECT c.id,
c["arr"][0] AS arr0,
c["arr"][1] AS arr1,
c["arr"][2] AS arr2,
c["arr"][3] AS arr3
FROM c ORDER BY c.id`,
nil,
[]interface{}{
map[string]interface{}{"id": "12345", "arr0": 1.0, "arr1": 2.0, "arr2": 3.0, "arr3": nil},
map[string]interface{}{"id": "67890", "arr0": 6.0, "arr1": 7.0, "arr2": 8.0, "arr3": nil},
},
)
})
assert.Equal(t, expectedData["id"], itemResponseBody["id"])
assert.Equal(t, expectedData["pk"], itemResponseBody["pk"])
assert.Empty(t, itemResponseBody["isCool"])
assert.Equal(t, expectedData["newField"], itemResponseBody["newField"])
assert.Equal(t, expectedData["incr"], itemResponseBody["incr"])
assert.Equal(t, expectedData["setted"], itemResponseBody["setted"])
})
t.Run("Should handle parallel writes", func(t *testing.T) {
var wg sync.WaitGroup
rutineCount := 100
results := make(chan error, rutineCount)
t.Run("Should not allow to PATCH document ID", func(t *testing.T) {
context := context.TODO()
patch := azcosmos.PatchOperations{}
patch.AppendReplace("/id", "newValue")
_, err := collectionClient.PatchItem(
context,
azcosmos.PartitionKey{},
"67890",
patch,
&azcosmos.ItemOptions{
EnableContentResponseOnWrite: false,
},
)
assert.NotNil(t, err)
var respErr *azcore.ResponseError
if errors.As(err, &respErr) {
assert.Equal(t, http.StatusUnprocessableEntity, respErr.StatusCode)
} else {
panic(err)
}
})
t.Run("CreateItem", func(t *testing.T) {
context := context.TODO()
createCall := func(i int) {
defer wg.Done()
item := map[string]interface{}{
"id": fmt.Sprintf("id-%d", i),
"pk": fmt.Sprintf("pk-%d", i),
"val": i,
"Id": "6789011",
"pk": "456",
"newField": "newValue2",
}
bytes, err := json.Marshal(item)
if err != nil {
results <- err
return
}
assert.Nil(t, err)
ctx, cancel := context.WithTimeout(context.Background(), 2*time.Second)
defer cancel()
_, err = collectionClient.CreateItem(
ctx,
r, err2 := collectionClient.CreateItem(
context,
azcosmos.PartitionKey{},
bytes,
&azcosmos.ItemOptions{
EnableContentResponseOnWrite: false,
},
)
results <- err
assert.NotNil(t, r)
assert.Nil(t, err2)
})
collectionClient.ReadItem(ctx, azcosmos.PartitionKey{}, fmt.Sprintf("id-%d", i), nil)
collectionClient.DeleteItem(ctx, azcosmos.PartitionKey{}, fmt.Sprintf("id-%d", i), nil)
}
t.Run("CreateItem that already exists", func(t *testing.T) {
context := context.TODO()
for i := 0; i < rutineCount; i++ {
wg.Add(1)
go createCall(i)
}
item := map[string]interface{}{"id": "12345", "pk": "123", "isCool": false, "arr": []int{1, 2, 3}}
bytes, err := json.Marshal(item)
assert.Nil(t, err)
wg.Wait()
close(results)
r, err := collectionClient.CreateItem(
context,
azcosmos.PartitionKey{},
bytes,
&azcosmos.ItemOptions{
EnableContentResponseOnWrite: false,
},
)
assert.NotNil(t, r)
assert.NotNil(t, err)
for err := range results {
if err != nil {
t.Errorf("Error creating item: %v", err)
var respErr *azcore.ResponseError
if errors.As(err, &respErr) {
assert.Equal(t, http.StatusConflict, respErr.StatusCode)
} else {
panic(err)
}
}
})
t.Run("UpsertItem new", func(t *testing.T) {
context := context.TODO()
item := map[string]interface{}{"id": "123456", "pk": "1234", "isCool": false, "arr": []int{1, 2, 3}}
bytes, err := json.Marshal(item)
assert.Nil(t, err)
r, err2 := collectionClient.UpsertItem(
context,
azcosmos.PartitionKey{},
bytes,
&azcosmos.ItemOptions{
EnableContentResponseOnWrite: false,
},
)
assert.NotNil(t, r)
assert.Nil(t, err2)
})
t.Run("UpsertItem that already exists", func(t *testing.T) {
context := context.TODO()
item := map[string]interface{}{"id": "12345", "pk": "123", "isCool": false, "arr": []int{1, 2, 3, 4}}
bytes, err := json.Marshal(item)
assert.Nil(t, err)
r, err2 := collectionClient.UpsertItem(
context,
azcosmos.PartitionKey{},
bytes,
&azcosmos.ItemOptions{
EnableContentResponseOnWrite: false,
},
)
assert.NotNil(t, r)
assert.Nil(t, err2)
})
})
runTestsWithPresets(t, "Test_Documents_TransactionalBatch", presets, func(t *testing.T, ts *TestServer, client *azcosmos.Client) {
collectionClient := documents_InitializeDb(t, ts)
t.Run("Should execute CREATE transactional batch", func(t *testing.T) {
context := context.TODO()
batch := collectionClient.NewTransactionalBatch(azcosmos.NewPartitionKeyString("pk"))
newItem := map[string]interface{}{
"id": "678901",
}
bytes, err := json.Marshal(newItem)
assert.Nil(t, err)
batch.CreateItem(bytes, nil)
response, err := collectionClient.ExecuteTransactionalBatch(context, batch, &azcosmos.TransactionalBatchOptions{})
assert.Nil(t, err)
assert.True(t, response.Success)
assert.Equal(t, 1, len(response.OperationResults))
operationResponse := response.OperationResults[0]
assert.NotNil(t, operationResponse)
assert.NotNil(t, operationResponse.ResourceBody)
assert.Equal(t, int32(http.StatusCreated), operationResponse.StatusCode)
var itemResponseBody map[string]interface{}
json.Unmarshal(operationResponse.ResourceBody, &itemResponseBody)
assert.Equal(t, newItem["id"], itemResponseBody["id"])
createdDoc, _ := ts.DataStore.GetDocument(testDatabaseName, testCollectionName, newItem["id"].(string))
assert.Equal(t, newItem["id"], createdDoc["id"])
})
t.Run("Should execute DELETE transactional batch", func(t *testing.T) {
context := context.TODO()
batch := collectionClient.NewTransactionalBatch(azcosmos.NewPartitionKeyString("pk"))
batch.DeleteItem("12345", nil)
response, err := collectionClient.ExecuteTransactionalBatch(context, batch, &azcosmos.TransactionalBatchOptions{})
assert.Nil(t, err)
assert.True(t, response.Success)
assert.Equal(t, 1, len(response.OperationResults))
operationResponse := response.OperationResults[0]
assert.NotNil(t, operationResponse)
assert.Equal(t, int32(http.StatusNoContent), operationResponse.StatusCode)
_, status := ts.DataStore.GetDocument(testDatabaseName, testCollectionName, "12345")
assert.Equal(t, datastore.StatusNotFound, int(status))
})
t.Run("Should execute REPLACE transactional batch", func(t *testing.T) {
context := context.TODO()
batch := collectionClient.NewTransactionalBatch(azcosmos.NewPartitionKeyString("pk"))
newItem := map[string]interface{}{
"id": "67890",
"pk": "666",
}
bytes, err := json.Marshal(newItem)
assert.Nil(t, err)
batch.ReplaceItem("67890", bytes, nil)
response, err := collectionClient.ExecuteTransactionalBatch(context, batch, &azcosmos.TransactionalBatchOptions{})
assert.Nil(t, err)
assert.True(t, response.Success)
assert.Equal(t, 1, len(response.OperationResults))
operationResponse := response.OperationResults[0]
assert.NotNil(t, operationResponse)
assert.NotNil(t, operationResponse.ResourceBody)
assert.Equal(t, int32(http.StatusCreated), operationResponse.StatusCode)
var itemResponseBody map[string]interface{}
json.Unmarshal(operationResponse.ResourceBody, &itemResponseBody)
assert.Equal(t, newItem["id"], itemResponseBody["id"])
assert.Equal(t, newItem["pk"], itemResponseBody["pk"])
updatedDoc, _ := ts.DataStore.GetDocument(testDatabaseName, testCollectionName, newItem["id"].(string))
assert.Equal(t, newItem["id"], updatedDoc["id"])
assert.Equal(t, newItem["pk"], updatedDoc["pk"])
})
t.Run("Should execute UPSERT transactional batch", func(t *testing.T) {
context := context.TODO()
batch := collectionClient.NewTransactionalBatch(azcosmos.NewPartitionKeyString("pk"))
newItem := map[string]interface{}{
"id": "678901",
"pk": "666",
}
bytes, err := json.Marshal(newItem)
assert.Nil(t, err)
batch.UpsertItem(bytes, nil)
response, err := collectionClient.ExecuteTransactionalBatch(context, batch, &azcosmos.TransactionalBatchOptions{})
assert.Nil(t, err)
assert.True(t, response.Success)
assert.Equal(t, 1, len(response.OperationResults))
operationResponse := response.OperationResults[0]
assert.NotNil(t, operationResponse)
assert.NotNil(t, operationResponse.ResourceBody)
assert.Equal(t, int32(http.StatusCreated), operationResponse.StatusCode)
var itemResponseBody map[string]interface{}
json.Unmarshal(operationResponse.ResourceBody, &itemResponseBody)
assert.Equal(t, newItem["id"], itemResponseBody["id"])
assert.Equal(t, newItem["pk"], itemResponseBody["pk"])
updatedDoc, _ := ts.DataStore.GetDocument(testDatabaseName, testCollectionName, newItem["id"].(string))
assert.Equal(t, newItem["id"], updatedDoc["id"])
assert.Equal(t, newItem["pk"], updatedDoc["pk"])
})
t.Run("Should execute READ transactional batch", func(t *testing.T) {
context := context.TODO()
batch := collectionClient.NewTransactionalBatch(azcosmos.NewPartitionKeyString("pk"))
batch.ReadItem("67890", nil)
response, err := collectionClient.ExecuteTransactionalBatch(context, batch, &azcosmos.TransactionalBatchOptions{})
assert.Nil(t, err)
assert.True(t, response.Success)
assert.Equal(t, 1, len(response.OperationResults))
operationResponse := response.OperationResults[0]
assert.NotNil(t, operationResponse)
assert.NotNil(t, operationResponse.ResourceBody)
assert.Equal(t, int32(http.StatusOK), operationResponse.StatusCode)
var itemResponseBody map[string]interface{}
json.Unmarshal(operationResponse.ResourceBody, &itemResponseBody)
assert.Equal(t, "67890", itemResponseBody["id"])
})
})
}
func Test_Documents_Patch(t *testing.T) {
ts, collectionClient := documents_InitializeDb(t)
defer ts.Server.Close()
t.Run("Should PATCH document", func(t *testing.T) {
context := context.TODO()
expectedData := map[string]interface{}{"id": "67890", "pk": "666", "newField": "newValue", "incr": 15., "setted": "isSet"}
patch := azcosmos.PatchOperations{}
patch.AppendAdd("/newField", "newValue")
patch.AppendIncrement("/incr", 15)
patch.AppendRemove("/isCool")
patch.AppendReplace("/pk", "666")
patch.AppendSet("/setted", "isSet")
itemResponse, err := collectionClient.PatchItem(
context,
azcosmos.PartitionKey{},
"67890",
patch,
&azcosmos.ItemOptions{
EnableContentResponseOnWrite: false,
},
)
assert.Nil(t, err)
var itemResponseBody map[string]interface{}
json.Unmarshal(itemResponse.Value, &itemResponseBody)
assert.Equal(t, expectedData["id"], itemResponseBody["id"])
assert.Equal(t, expectedData["pk"], itemResponseBody["pk"])
assert.Empty(t, itemResponseBody["isCool"])
assert.Equal(t, expectedData["newField"], itemResponseBody["newField"])
assert.Equal(t, expectedData["incr"], itemResponseBody["incr"])
assert.Equal(t, expectedData["setted"], itemResponseBody["setted"])
})
t.Run("Should not allow to PATCH document ID", func(t *testing.T) {
context := context.TODO()
patch := azcosmos.PatchOperations{}
patch.AppendReplace("/id", "newValue")
_, err := collectionClient.PatchItem(
context,
azcosmos.PartitionKey{},
"67890",
patch,
&azcosmos.ItemOptions{
EnableContentResponseOnWrite: false,
},
)
assert.NotNil(t, err)
var respErr *azcore.ResponseError
if errors.As(err, &respErr) {
assert.Equal(t, http.StatusUnprocessableEntity, respErr.StatusCode)
} else {
panic(err)
}
})
t.Run("CreateItem", func(t *testing.T) {
context := context.TODO()
item := map[string]interface{}{
"Id": "6789011",
"pk": "456",
"newField": "newValue2",
}
bytes, err := json.Marshal(item)
assert.Nil(t, err)
r, err2 := collectionClient.CreateItem(
context,
azcosmos.PartitionKey{},
bytes,
&azcosmos.ItemOptions{
EnableContentResponseOnWrite: false,
},
)
assert.NotNil(t, r)
assert.Nil(t, err2)
})
t.Run("CreateItem that already exists", func(t *testing.T) {
context := context.TODO()
item := map[string]interface{}{"id": "12345", "pk": "123", "isCool": false, "arr": []int{1, 2, 3}}
bytes, err := json.Marshal(item)
assert.Nil(t, err)
r, err := collectionClient.CreateItem(
context,
azcosmos.PartitionKey{},
bytes,
&azcosmos.ItemOptions{
EnableContentResponseOnWrite: false,
},
)
assert.NotNil(t, r)
assert.NotNil(t, err)
var respErr *azcore.ResponseError
if errors.As(err, &respErr) {
assert.Equal(t, http.StatusConflict, respErr.StatusCode)
} else {
panic(err)
}
})
t.Run("UpsertItem new", func(t *testing.T) {
context := context.TODO()
item := map[string]interface{}{"id": "123456", "pk": "1234", "isCool": false, "arr": []int{1, 2, 3}}
bytes, err := json.Marshal(item)
assert.Nil(t, err)
r, err2 := collectionClient.UpsertItem(
context,
azcosmos.PartitionKey{},
bytes,
&azcosmos.ItemOptions{
EnableContentResponseOnWrite: false,
},
)
assert.NotNil(t, r)
assert.Nil(t, err2)
})
t.Run("UpsertItem that already exists", func(t *testing.T) {
context := context.TODO()
item := map[string]interface{}{"id": "12345", "pk": "123", "isCool": false, "arr": []int{1, 2, 3, 4}}
bytes, err := json.Marshal(item)
assert.Nil(t, err)
r, err2 := collectionClient.UpsertItem(
context,
azcosmos.PartitionKey{},
bytes,
&azcosmos.ItemOptions{
EnableContentResponseOnWrite: false,
},
)
assert.NotNil(t, r)
assert.Nil(t, err2)
})
}

View File

@@ -14,7 +14,8 @@ import (
// Request document with trailing slash like python cosmosdb client does.
func Test_Documents_Read_Trailing_Slash(t *testing.T) {
ts, _ := documents_InitializeDb(t)
ts := runTestServer()
documents_InitializeDb(t, ts)
defer ts.Server.Close()
t.Run("Read doc with client that appends slash to path", func(t *testing.T) {

View File

@@ -7,24 +7,38 @@ import (
"github.com/pikami/cosmium/api"
"github.com/pikami/cosmium/api/config"
"github.com/pikami/cosmium/internal/repositories"
"github.com/pikami/cosmium/internal/datastore"
badgerdatastore "github.com/pikami/cosmium/internal/datastore/badger_datastore"
mapdatastore "github.com/pikami/cosmium/internal/datastore/map_datastore"
"github.com/pikami/cosmium/internal/logger"
)
func main() {
configuration := config.ParseFlags()
repository := repositories.NewDataRepository(repositories.RepositoryOptions{
InitialDataFilePath: configuration.InitialDataFilePath,
PersistDataFilePath: configuration.PersistDataFilePath,
})
var dataStore datastore.DataStore
switch configuration.DataStore {
case config.DataStoreBadger:
dataStore = badgerdatastore.NewBadgerDataStore()
logger.InfoLn("Using Badger data store")
default:
dataStore = mapdatastore.NewMapDataStore(mapdatastore.MapDataStoreOptions{
InitialDataFilePath: configuration.InitialDataFilePath,
PersistDataFilePath: configuration.PersistDataFilePath,
})
logger.InfoLn("Using in-memory data store")
}
server := api.NewApiServer(repository, configuration)
server.Start()
server := api.NewApiServer(dataStore, &configuration)
err := server.Start()
if err != nil {
panic(err)
}
waitForExit(server, repository, configuration)
waitForExit(server, dataStore)
}
func waitForExit(server *api.ApiServer, repository *repositories.DataRepository, config config.ServerConfig) {
func waitForExit(server *api.ApiServer, dataStore datastore.DataStore) {
sigs := make(chan os.Signal, 1)
signal.Notify(sigs, syscall.SIGINT, syscall.SIGTERM)
@@ -34,7 +48,5 @@ func waitForExit(server *api.ApiServer, repository *repositories.DataRepository,
// Stop the server
server.Stop()
if config.PersistDataFilePath != "" {
repository.SaveStateFS(config.PersistDataFilePath)
}
dataStore.Close()
}

View File

@@ -204,6 +204,19 @@ Cosmium strives to support the core features of Cosmos DB, including:
| IS_PRIMITIVE | Yes |
| IS_STRING | Yes |
### Transactional batch operations
Note: There's actually no transaction here. Think of this as a 'bulk operation' that can partially succeed.
| Operation | Implemented |
| --------- | ----------- |
| Create | Yes |
| Delete | Yes |
| Replace | Yes |
| Upsert | Yes |
| Read | Yes |
| Patch | No |
## Known Differences
While Cosmium aims to replicate the behavior of Cosmos DB as closely as possible, there are certain differences and limitations to be aware of:

40
go.mod
View File

@@ -1,32 +1,40 @@
module github.com/pikami/cosmium
go 1.22.0
go 1.24.0
require (
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.12.0
github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos v0.3.6
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.17.0
github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos v1.3.0
github.com/cosmiumdev/json-patch/v5 v5.9.3
github.com/dgraph-io/badger/v4 v4.6.0
github.com/gin-gonic/gin v1.10.0
github.com/google/uuid v1.6.0
github.com/stretchr/testify v1.10.0
golang.org/x/exp v0.0.0-20250106191152-7588d65b2ba8
golang.org/x/exp v0.0.0-20250218142911-aa4b98e5adaa
)
require (
github.com/Azure/azure-sdk-for-go v68.0.0+incompatible // indirect
github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0 // indirect
github.com/bytedance/sonic v1.12.7 // indirect
github.com/bytedance/sonic v1.12.9 // indirect
github.com/bytedance/sonic/loader v0.2.3 // indirect
github.com/cespare/xxhash/v2 v2.3.0 // indirect
github.com/cloudwego/base64x v0.1.5 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/dgraph-io/ristretto/v2 v2.1.0 // indirect
github.com/dustin/go-humanize v1.0.1 // indirect
github.com/gabriel-vasile/mimetype v1.4.8 // indirect
github.com/gin-contrib/sse v1.0.0 // indirect
github.com/go-logr/logr v1.4.2 // indirect
github.com/go-logr/stdr v1.2.2 // indirect
github.com/go-playground/locales v0.14.1 // indirect
github.com/go-playground/universal-translator v0.18.1 // indirect
github.com/go-playground/validator/v10 v10.24.0 // indirect
github.com/goccy/go-json v0.10.4 // indirect
github.com/go-playground/validator/v10 v10.25.0 // indirect
github.com/goccy/go-json v0.10.5 // indirect
github.com/google/flatbuffers v25.2.10+incompatible // indirect
github.com/json-iterator/go v1.1.12 // indirect
github.com/klauspost/cpuid/v2 v2.2.9 // indirect
github.com/klauspost/compress v1.18.0 // indirect
github.com/klauspost/cpuid/v2 v2.2.10 // indirect
github.com/leodido/go-urn v1.4.0 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
@@ -36,11 +44,15 @@ require (
github.com/pmezard/go-difflib v1.0.0 // indirect
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
github.com/ugorji/go/codec v1.2.12 // indirect
golang.org/x/arch v0.13.0 // indirect
golang.org/x/crypto v0.32.0 // indirect
golang.org/x/net v0.34.0 // indirect
golang.org/x/sys v0.29.0 // indirect
golang.org/x/text v0.21.0 // indirect
google.golang.org/protobuf v1.36.4 // indirect
go.opentelemetry.io/auto/sdk v1.1.0 // indirect
go.opentelemetry.io/otel v1.34.0 // indirect
go.opentelemetry.io/otel/metric v1.34.0 // indirect
go.opentelemetry.io/otel/trace v1.34.0 // indirect
golang.org/x/arch v0.14.0 // indirect
golang.org/x/crypto v0.35.0 // indirect
golang.org/x/net v0.35.0 // indirect
golang.org/x/sys v0.30.0 // indirect
golang.org/x/text v0.22.0 // indirect
google.golang.org/protobuf v1.36.5 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
)

92
go.sum
View File

@@ -1,20 +1,22 @@
github.com/Azure/azure-sdk-for-go v68.0.0+incompatible h1:fcYLmCpyNYRnvJbPerq7U0hS+6+I79yEDJBqVNcqUzU=
github.com/Azure/azure-sdk-for-go v68.0.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.12.0 h1:1nGuui+4POelzDwI7RG56yfQJHCnKvwfMoU7VsEp+Zg=
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.12.0/go.mod h1:99EvauvlcJ1U06amZiksfYz/3aFGyIhWGHVyiZXtBAI=
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.7.0 h1:tfLQ34V6F7tVSwoTf/4lH5sE0o6eCJuNDTmH09nDpbc=
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.7.0/go.mod h1:9kIvujWAA58nmPmWB1m23fyWic1kYZMxD9CxaWn4Qpg=
github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos v0.3.6 h1:oBqQLSI1pZwGOdXJAoJJSzmff9tlfD4KroVfjQQmd0g=
github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos v0.3.6/go.mod h1:Beh5cHIXJ0oWEDWk9lNFtuklCojLLQ5hl+LqSNTTs0I=
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.17.0 h1:g0EZJwz7xkXQiZAI5xi9f3WWFYBlX1CPTrR+NDToRkQ=
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.17.0/go.mod h1:XCW7KnZet0Opnr7HccfUw1PLc4CjHqpcaxW8DHklNkQ=
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.8.0 h1:B/dfvscEQtew9dVuoxqxrUKKv8Ih2f55PydknDamU+g=
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.8.0/go.mod h1:fiPSssYvltE08HJchL04dOy+RD4hgrjph0cwGGMntdI=
github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos v1.3.0 h1:RGcdpSElvcXCwxydI0xzOBu1Gvp88OoiTGfbtO/z1m0=
github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos v1.3.0/go.mod h1:YwUyrNUtcZcibA99JcfCP6UUp95VVQKO2MJfBzgJDwA=
github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0 h1:ywEEhmNahHBihViHepv3xPBn1663uRv2t2q/ESv9seY=
github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0/go.mod h1:iZDifYGJTIgIIkYRNWPENUnqx6bJ2xnSDFI2tjwZNuY=
github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2 h1:XHOnouVk1mxXfQidrMEnLlPk9UMeRtyBTnEFtxkV0kU=
github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI=
github.com/bytedance/sonic v1.12.7 h1:CQU8pxOy9HToxhndH0Kx/S1qU/CuS9GnKYrGioDcU1Q=
github.com/bytedance/sonic v1.12.7/go.mod h1:tnbal4mxOMju17EGfknm2XyYcpyCnIROYOEYuemj13I=
github.com/AzureAD/microsoft-authentication-library-for-go v1.3.2 h1:kYRSnvJju5gYVyhkij+RTJ/VR6QIUaCfWeaFm2ycsjQ=
github.com/AzureAD/microsoft-authentication-library-for-go v1.3.2/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI=
github.com/bytedance/sonic v1.12.9 h1:Od1BvK55NnewtGaJsTDeAOSnLVO2BTSLOe0+ooKokmQ=
github.com/bytedance/sonic v1.12.9/go.mod h1:uVvFidNmlt9+wa31S1urfwwthTWteBgG0hWuoKAXTx8=
github.com/bytedance/sonic/loader v0.1.1/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU=
github.com/bytedance/sonic/loader v0.2.3 h1:yctD0Q3v2NOGfSWPLPvG2ggA2kV6TS6s4wioyEqssH0=
github.com/bytedance/sonic/loader v0.2.3/go.mod h1:N8A3vUdtUebEY2/VQC0MyhYeKUFosQU6FxH2JmUe6VI=
github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs=
github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/cloudwego/base64x v0.1.5 h1:XPciSp1xaq2VCSt6lF0phncD4koWyULpl5bUxbfCyP4=
github.com/cloudwego/base64x v0.1.5/go.mod h1:0zlkT4Wn5C6NdauXdJRhSKRlJvmclQ1hhJgA0rcu/8w=
github.com/cloudwego/iasm v0.2.0/go.mod h1:8rXZaNYT2n95jn+zTI1sDr+IgcD2GVs0nlbbQPiEFhY=
@@ -23,25 +25,39 @@ github.com/cosmiumdev/json-patch/v5 v5.9.3/go.mod h1:WzSTCdia0WrlZtjnL19P4RiwWtf
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/dgraph-io/badger/v4 v4.6.0 h1:acOwfOOZ4p1dPRnYzvkVm7rUk2Y21TgPVepCy5dJdFQ=
github.com/dgraph-io/badger/v4 v4.6.0/go.mod h1:KSJ5VTuZNC3Sd+YhvVjk2nYua9UZnnTr/SkXvdtiPgI=
github.com/dgraph-io/ristretto/v2 v2.1.0 h1:59LjpOJLNDULHh8MC4UaegN52lC4JnO2dITsie/Pa8I=
github.com/dgraph-io/ristretto/v2 v2.1.0/go.mod h1:uejeqfYXpUomfse0+lO+13ATz4TypQYLJZzBSAemuB4=
github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13 h1:fAjc9m62+UWV/WAFKLNi6ZS0675eEUC9y3AlwSbQu1Y=
github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw=
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
github.com/gabriel-vasile/mimetype v1.4.8 h1:FfZ3gj38NjllZIeJAmMhr+qKL8Wu+nOoI3GqacKw1NM=
github.com/gabriel-vasile/mimetype v1.4.8/go.mod h1:ByKUIKGjh1ODkGM1asKUbQZOLGrPjydw3hYPU2YU9t8=
github.com/gin-contrib/sse v1.0.0 h1:y3bT1mUWUxDpW4JLQg/HnTqV4rozuW4tC9eFKTxYI9E=
github.com/gin-contrib/sse v1.0.0/go.mod h1:zNuFdwarAygJBht0NTKiSi3jRf6RbqeILZ9Sp6Slhe0=
github.com/gin-gonic/gin v1.10.0 h1:nTuyha1TYqgedzytsKYqna+DfLos46nTv2ygFy86HFU=
github.com/gin-gonic/gin v1.10.0/go.mod h1:4PMNQiOhvDRa013RKVbsiNwoyezlm2rm0uX/T7kzp5Y=
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY=
github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s=
github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA=
github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY=
github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY=
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
github.com/go-playground/validator/v10 v10.24.0 h1:KHQckvo8G6hlWnrPX4NJJ+aBfWNAE/HH+qdL2cBpCmg=
github.com/go-playground/validator/v10 v10.24.0/go.mod h1:GGzBIJMuE98Ic/kJsBXbz1x/7cByt++cQ+YOuDM5wus=
github.com/goccy/go-json v0.10.4 h1:JSwxQzIqKfmFX1swYPpUThQZp/Ka4wzJdK0LWVytLPM=
github.com/goccy/go-json v0.10.4/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
github.com/golang-jwt/jwt v3.2.1+incompatible h1:73Z+4BJcrTC+KczS6WvTPvRGOp1WmfEP4Q1lOd9Z/+c=
github.com/go-playground/validator/v10 v10.25.0 h1:5Dh7cjvzR7BRZadnsVOzPhWsrwUr0nmsZJxEAnFLNO8=
github.com/go-playground/validator/v10 v10.25.0/go.mod h1:GGzBIJMuE98Ic/kJsBXbz1x/7cByt++cQ+YOuDM5wus=
github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4=
github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
github.com/golang-jwt/jwt/v5 v5.2.1 h1:OuVbFODueb089Lh128TAcimifWaLhJwVflnrgM17wHk=
github.com/golang-jwt/jwt/v5 v5.2.1/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
github.com/google/flatbuffers v25.2.10+incompatible h1:F3vclr7C3HpB1k9mxCGRMXq6FdUalZ6H/pNX4FP1v0Q=
github.com/google/flatbuffers v25.2.10+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8=
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
@@ -49,9 +65,11 @@ github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo=
github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ=
github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
github.com/klauspost/cpuid/v2 v2.2.9 h1:66ze0taIn2H33fBvCkXuv9BmCwDfafmiIVpKV9kKGuY=
github.com/klauspost/cpuid/v2 v2.2.9/go.mod h1:rqkxqrZ1EhYM9G+hXH7YdowN5R5RGN6NK4QwQ3WMXF8=
github.com/klauspost/cpuid/v2 v2.2.10 h1:tBs3QSyvjDyFTq3uoc/9xFpCuOsJQFNPiAhYdw2skhE=
github.com/klauspost/cpuid/v2 v2.2.10/go.mod h1:hqwkgyIinND0mEev00jJYCxPNVRVXFQeu1XKlok6oO0=
github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M=
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
@@ -76,8 +94,8 @@ github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8=
github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4=
github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII=
github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
@@ -94,21 +112,29 @@ github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS
github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08=
github.com/ugorji/go/codec v1.2.12 h1:9LC83zGrHhuUA9l16C9AHXAqEV/2wBQ4nkvumAE65EE=
github.com/ugorji/go/codec v1.2.12/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg=
golang.org/x/arch v0.13.0 h1:KCkqVVV1kGg0X87TFysjCJ8MxtZEIU4Ja/yXGeoECdA=
golang.org/x/arch v0.13.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys=
golang.org/x/crypto v0.32.0 h1:euUpcYgM8WcP71gNpTqQCn6rC2t6ULUPiOzfWaXVVfc=
golang.org/x/crypto v0.32.0/go.mod h1:ZnnJkOaASj8g0AjIduWNlq2NRxL0PlBrbKVyZ6V/Ugc=
golang.org/x/exp v0.0.0-20250106191152-7588d65b2ba8 h1:yqrTHse8TCMW1M1ZCP+VAR/l0kKxwaAIqN/il7x4voA=
golang.org/x/exp v0.0.0-20250106191152-7588d65b2ba8/go.mod h1:tujkw807nyEEAamNbDrEGzRav+ilXA7PCRAd6xsmwiU=
golang.org/x/net v0.34.0 h1:Mb7Mrk043xzHgnRM88suvJFwzVrRfHEHJEl5/71CKw0=
golang.org/x/net v0.34.0/go.mod h1:di0qlW3YNM5oh6GqDGQr92MyTozJPmybPK4Ev/Gm31k=
go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA=
go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A=
go.opentelemetry.io/otel v1.34.0 h1:zRLXxLCgL1WyKsPVrgbSdMN4c0FMkDAskSTQP+0hdUY=
go.opentelemetry.io/otel v1.34.0/go.mod h1:OWFPOQ+h4G8xpyjgqo4SxJYdDQ/qmRH+wivy7zzx9oI=
go.opentelemetry.io/otel/metric v1.34.0 h1:+eTR3U0MyfWjRDhmFMxe2SsW64QrZ84AOhvqS7Y+PoQ=
go.opentelemetry.io/otel/metric v1.34.0/go.mod h1:CEDrp0fy2D0MvkXE+dPV7cMi8tWZwX3dmaIhwPOaqHE=
go.opentelemetry.io/otel/trace v1.34.0 h1:+ouXS2V8Rd4hp4580a8q23bg0azF2nI8cqLYnC8mh/k=
go.opentelemetry.io/otel/trace v1.34.0/go.mod h1:Svm7lSjQD7kG7KJ/MUHPVXSDGz2OX4h0M2jHBhmSfRE=
golang.org/x/arch v0.14.0 h1:z9JUEZWr8x4rR0OU6c4/4t6E6jOZ8/QBS2bBYBm4tx4=
golang.org/x/arch v0.14.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys=
golang.org/x/crypto v0.35.0 h1:b15kiHdrGCHrP6LvwaQ3c03kgNhhiMgvlhxHQhmg2Xs=
golang.org/x/crypto v0.35.0/go.mod h1:dy7dXNW32cAb/6/PRuTNsix8T+vJAqvuIy5Bli/x0YQ=
golang.org/x/exp v0.0.0-20250218142911-aa4b98e5adaa h1:t2QcU6V556bFjYgu4L6C+6VrCPyJZ+eyRsABUPs1mz4=
golang.org/x/exp v0.0.0-20250218142911-aa4b98e5adaa/go.mod h1:BHOTPb3L19zxehTsLoJXVaTktb06DFgmdW6Wb9s8jqk=
golang.org/x/net v0.35.0 h1:T5GQRQb2y08kTAByq9L4/bz8cipCdA8FbRTXewonqY8=
golang.org/x/net v0.35.0/go.mod h1:EglIi67kWsHKlRzzVMUD93VMSWGFOMSZgxFjparz1Qk=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.29.0 h1:TPYlXGxvx1MGTn2GiZDhnjPA9wZzZeGKHHmKhHYvgaU=
golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo=
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
google.golang.org/protobuf v1.36.4 h1:6A3ZDJHn/eNqc1i+IdefRzy/9PokBTPvcqMySR7NNIM=
google.golang.org/protobuf v1.36.4/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE=
golang.org/x/sys v0.30.0 h1:QjkSwP/36a20jFYWkSue1YwXzLmsV5Gfq7Eiy72C1uc=
golang.org/x/sys v0.30.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/text v0.22.0 h1:bofq7m3/HAFvbF51jz3Q9wLg3jkvSPuiZu/pD1XwgtM=
golang.org/x/text v0.22.0/go.mod h1:YRoo4H8PVmsu+E3Ou7cqLVH8oXWIHVoX0jqUWALQhfY=
google.golang.org/protobuf v1.36.5 h1:tPhr+woSbjfYvY6/GPufUoYizxw1cF/yFoxJ2fmpwlM=
google.golang.org/protobuf v1.36.5/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=

View File

@@ -0,0 +1,20 @@
package converters
import (
"github.com/pikami/cosmium/internal/datastore"
memoryexecutor "github.com/pikami/cosmium/query_executors/memory_executor"
)
type DocumentToRowTypeIterator struct {
documents datastore.DocumentIterator
}
func NewDocumentToRowTypeIterator(documents datastore.DocumentIterator) *DocumentToRowTypeIterator {
return &DocumentToRowTypeIterator{
documents: documents,
}
}
func (di *DocumentToRowTypeIterator) Next() (memoryexecutor.RowType, datastore.DataStoreStatus) {
return di.documents.Next()
}

View File

@@ -0,0 +1,37 @@
package badgerdatastore
import (
"encoding/gob"
"github.com/dgraph-io/badger/v4"
"github.com/pikami/cosmium/internal/logger"
)
type BadgerDataStore struct {
db *badger.DB
}
func NewBadgerDataStore() *BadgerDataStore {
gob.Register([]interface{}{})
badgerOpts := badger.DefaultOptions("").WithInMemory(true)
db, err := badger.Open(badgerOpts)
if err != nil {
panic(err)
}
return &BadgerDataStore{
db: db,
}
}
func (r *BadgerDataStore) Close() {
r.db.Close()
r.db = nil
}
func (r *BadgerDataStore) DumpToJson() (string, error) {
logger.ErrorLn("Badger datastore does not support state export currently.")
return "{}", nil
}

View File

@@ -0,0 +1,103 @@
package badgerdatastore
import (
"fmt"
"time"
"github.com/google/uuid"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/logger"
"github.com/pikami/cosmium/internal/resourceid"
structhidrators "github.com/pikami/cosmium/internal/struct_hidrators"
)
func (r *BadgerDataStore) GetAllCollections(databaseId string) ([]datastore.Collection, datastore.DataStoreStatus) {
exists, err := keyExists(r.db.NewTransaction(false), generateDatabaseKey(databaseId))
if err != nil {
logger.ErrorLn("Error while checking if database exists:", err)
return nil, datastore.Unknown
}
if !exists {
return nil, datastore.StatusNotFound
}
colls, status := listByPrefix[datastore.Collection](r.db, generateKey(resourceid.ResourceTypeCollection, databaseId, "", ""))
if status == datastore.StatusOk {
return colls, datastore.StatusOk
}
return nil, status
}
func (r *BadgerDataStore) GetCollection(databaseId string, collectionId string) (datastore.Collection, datastore.DataStoreStatus) {
collectionKey := generateCollectionKey(databaseId, collectionId)
txn := r.db.NewTransaction(false)
defer txn.Discard()
var collection datastore.Collection
status := getKey(txn, collectionKey, &collection)
return collection, status
}
func (r *BadgerDataStore) DeleteCollection(databaseId string, collectionId string) datastore.DataStoreStatus {
collectionKey := generateCollectionKey(databaseId, collectionId)
txn := r.db.NewTransaction(true)
defer txn.Discard()
prefixes := []string{
generateKey(resourceid.ResourceTypeDocument, databaseId, collectionId, ""),
generateKey(resourceid.ResourceTypeTrigger, databaseId, collectionId, ""),
generateKey(resourceid.ResourceTypeStoredProcedure, databaseId, collectionId, ""),
generateKey(resourceid.ResourceTypeUserDefinedFunction, databaseId, collectionId, ""),
collectionKey,
}
for _, prefix := range prefixes {
if err := deleteKeysByPrefix(txn, prefix); err != nil {
return datastore.Unknown
}
}
err := txn.Commit()
if err != nil {
logger.ErrorLn("Error while committing transaction:", err)
return datastore.Unknown
}
return datastore.StatusOk
}
func (r *BadgerDataStore) CreateCollection(databaseId string, newCollection datastore.Collection) (datastore.Collection, datastore.DataStoreStatus) {
collectionKey := generateCollectionKey(databaseId, newCollection.ID)
txn := r.db.NewTransaction(true)
defer txn.Discard()
collectionExists, err := keyExists(txn, collectionKey)
if err != nil || collectionExists {
return datastore.Collection{}, datastore.Conflict
}
var database datastore.Database
status := getKey(txn, generateDatabaseKey(databaseId), &database)
if status != datastore.StatusOk {
return datastore.Collection{}, status
}
newCollection = structhidrators.Hidrate(newCollection).(datastore.Collection)
newCollection.TimeStamp = time.Now().Unix()
newCollection.ResourceID = resourceid.NewCombined(database.ResourceID, resourceid.New(resourceid.ResourceTypeCollection))
newCollection.ETag = fmt.Sprintf("\"%s\"", uuid.New())
newCollection.Self = fmt.Sprintf("dbs/%s/colls/%s/", database.ResourceID, newCollection.ResourceID)
status = insertKey(txn, collectionKey, newCollection)
if status != datastore.StatusOk {
return datastore.Collection{}, status
}
return newCollection, datastore.StatusOk
}

View File

@@ -0,0 +1,80 @@
package badgerdatastore
import (
"fmt"
"time"
"github.com/google/uuid"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/logger"
"github.com/pikami/cosmium/internal/resourceid"
)
func (r *BadgerDataStore) GetAllDatabases() ([]datastore.Database, datastore.DataStoreStatus) {
dbs, status := listByPrefix[datastore.Database](r.db, DatabaseKeyPrefix)
if status == datastore.StatusOk {
return dbs, datastore.StatusOk
}
return nil, status
}
func (r *BadgerDataStore) GetDatabase(id string) (datastore.Database, datastore.DataStoreStatus) {
databaseKey := generateDatabaseKey(id)
txn := r.db.NewTransaction(false)
defer txn.Discard()
var database datastore.Database
status := getKey(txn, databaseKey, &database)
return database, status
}
func (r *BadgerDataStore) DeleteDatabase(id string) datastore.DataStoreStatus {
databaseKey := generateDatabaseKey(id)
txn := r.db.NewTransaction(true)
defer txn.Discard()
prefixes := []string{
generateKey(resourceid.ResourceTypeCollection, id, "", ""),
generateKey(resourceid.ResourceTypeDocument, id, "", ""),
generateKey(resourceid.ResourceTypeTrigger, id, "", ""),
generateKey(resourceid.ResourceTypeStoredProcedure, id, "", ""),
generateKey(resourceid.ResourceTypeUserDefinedFunction, id, "", ""),
databaseKey,
}
for _, prefix := range prefixes {
if err := deleteKeysByPrefix(txn, prefix); err != nil {
return datastore.Unknown
}
}
err := txn.Commit()
if err != nil {
logger.ErrorLn("Error while committing transaction:", err)
return datastore.Unknown
}
return datastore.StatusOk
}
func (r *BadgerDataStore) CreateDatabase(newDatabase datastore.Database) (datastore.Database, datastore.DataStoreStatus) {
databaseKey := generateDatabaseKey(newDatabase.ID)
txn := r.db.NewTransaction(true)
defer txn.Discard()
newDatabase.TimeStamp = time.Now().Unix()
newDatabase.ResourceID = resourceid.New(resourceid.ResourceTypeDatabase)
newDatabase.ETag = fmt.Sprintf("\"%s\"", uuid.New())
newDatabase.Self = fmt.Sprintf("dbs/%s/", newDatabase.ResourceID)
status := insertKey(txn, databaseKey, newDatabase)
if status != datastore.StatusOk {
return datastore.Database{}, status
}
return newDatabase, datastore.StatusOk
}

View File

@@ -0,0 +1,207 @@
package badgerdatastore
import (
"bytes"
"encoding/gob"
"github.com/dgraph-io/badger/v4"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/logger"
"github.com/pikami/cosmium/internal/resourceid"
)
const (
DatabaseKeyPrefix = "DB:"
CollectionKeyPrefix = "COL:"
DocumentKeyPrefix = "DOC:"
TriggerKeyPrefix = "TRG:"
StoredProcedureKeyPrefix = "SP:"
UserDefinedFunctionKeyPrefix = "UDF:"
)
func generateKey(
resourceType resourceid.ResourceType,
databaseId string,
collectionId string,
resourceId string,
) string {
result := ""
switch resourceType {
case resourceid.ResourceTypeDatabase:
result += DatabaseKeyPrefix
case resourceid.ResourceTypeCollection:
result += CollectionKeyPrefix
case resourceid.ResourceTypeDocument:
result += DocumentKeyPrefix
case resourceid.ResourceTypeTrigger:
result += TriggerKeyPrefix
case resourceid.ResourceTypeStoredProcedure:
result += StoredProcedureKeyPrefix
case resourceid.ResourceTypeUserDefinedFunction:
result += UserDefinedFunctionKeyPrefix
}
if databaseId != "" {
result += databaseId
}
if collectionId != "" {
result += "/colls/" + collectionId
}
if resourceId != "" {
result += "/" + resourceId
}
return result
}
func generateDatabaseKey(databaseId string) string {
return generateKey(resourceid.ResourceTypeDatabase, databaseId, "", "")
}
func generateCollectionKey(databaseId string, collectionId string) string {
return generateKey(resourceid.ResourceTypeCollection, databaseId, collectionId, "")
}
func generateDocumentKey(databaseId string, collectionId string, documentId string) string {
return generateKey(resourceid.ResourceTypeDocument, databaseId, collectionId, documentId)
}
func generateTriggerKey(databaseId string, collectionId string, triggerId string) string {
return generateKey(resourceid.ResourceTypeTrigger, databaseId, collectionId, triggerId)
}
func generateStoredProcedureKey(databaseId string, collectionId string, storedProcedureId string) string {
return generateKey(resourceid.ResourceTypeStoredProcedure, databaseId, collectionId, storedProcedureId)
}
func generateUserDefinedFunctionKey(databaseId string, collectionId string, udfId string) string {
return generateKey(resourceid.ResourceTypeUserDefinedFunction, databaseId, collectionId, udfId)
}
func insertKey(txn *badger.Txn, key string, value interface{}) datastore.DataStoreStatus {
_, err := txn.Get([]byte(key))
if err == nil {
return datastore.Conflict
}
if err != badger.ErrKeyNotFound {
logger.ErrorLn("Error while checking if key exists:", err)
return datastore.Unknown
}
var buf bytes.Buffer
err = gob.NewEncoder(&buf).Encode(value)
if err != nil {
logger.ErrorLn("Error while encoding value:", err)
return datastore.Unknown
}
err = txn.Set([]byte(key), buf.Bytes())
if err != nil {
logger.ErrorLn("Error while setting key:", err)
return datastore.Unknown
}
err = txn.Commit()
if err != nil {
logger.ErrorLn("Error while committing transaction:", err)
return datastore.Unknown
}
return datastore.StatusOk
}
func getKey(txn *badger.Txn, key string, value interface{}) datastore.DataStoreStatus {
item, err := txn.Get([]byte(key))
if err != nil {
if err == badger.ErrKeyNotFound {
return datastore.StatusNotFound
}
logger.ErrorLn("Error while getting key:", err)
return datastore.Unknown
}
val, err := item.ValueCopy(nil)
if err != nil {
logger.ErrorLn("Error while copying value:", err)
return datastore.Unknown
}
if value == nil {
logger.ErrorLn("getKey called with nil value")
return datastore.Unknown
}
err = gob.NewDecoder(bytes.NewReader(val)).Decode(value)
if err != nil {
logger.ErrorLn("Error while decoding value:", err)
return datastore.Unknown
}
return datastore.StatusOk
}
func keyExists(txn *badger.Txn, key string) (bool, error) {
_, err := txn.Get([]byte(key))
if err == nil {
return true, nil
}
if err == badger.ErrKeyNotFound {
return false, nil
}
return false, err
}
func listByPrefix[T any](db *badger.DB, prefix string) ([]T, datastore.DataStoreStatus) {
var results []T
err := db.View(func(txn *badger.Txn) error {
opts := badger.DefaultIteratorOptions
opts.Prefix = []byte(prefix)
it := txn.NewIterator(opts)
defer it.Close()
for it.Rewind(); it.Valid(); it.Next() {
item := it.Item()
var entry T
status := getKey(txn, string(item.Key()), &entry)
if status != datastore.StatusOk {
logger.ErrorLn("Failed to retrieve entry:", string(item.Key()))
continue
}
results = append(results, entry)
}
return nil
})
if err != nil {
logger.ErrorLn("Error while listing entries:", err)
return nil, datastore.Unknown
}
return results, datastore.StatusOk
}
func deleteKeysByPrefix(txn *badger.Txn, prefix string) error {
opts := badger.DefaultIteratorOptions
opts.Prefix = []byte(prefix)
it := txn.NewIterator(opts)
defer it.Close()
for it.Rewind(); it.Valid(); it.Next() {
key := it.Item().KeyCopy(nil)
if err := txn.Delete(key); err != nil {
logger.ErrorLn("Failed to delete key:", string(key), "Error:", err)
return err
}
}
return nil
}

View File

@@ -0,0 +1,60 @@
package badgerdatastore
import (
"bytes"
"encoding/gob"
"github.com/dgraph-io/badger/v4"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/logger"
)
type BadgerDocumentIterator struct {
txn *badger.Txn
it *badger.Iterator
prefix string
}
func NewBadgerDocumentIterator(txn *badger.Txn, prefix string) *BadgerDocumentIterator {
opts := badger.DefaultIteratorOptions
opts.Prefix = []byte(prefix)
it := txn.NewIterator(opts)
it.Rewind()
return &BadgerDocumentIterator{
txn: txn,
it: it,
prefix: prefix,
}
}
func (i *BadgerDocumentIterator) Next() (datastore.Document, datastore.DataStoreStatus) {
if !i.it.Valid() {
i.it.Close()
return datastore.Document{}, datastore.IterEOF
}
item := i.it.Item()
val, err := item.ValueCopy(nil)
if err != nil {
logger.ErrorLn("Error while copying value:", err)
return datastore.Document{}, datastore.Unknown
}
current := &datastore.Document{}
err = gob.NewDecoder(bytes.NewReader(val)).Decode(current)
if err != nil {
logger.ErrorLn("Error while decoding value:", err)
return datastore.Document{}, datastore.Unknown
}
i.it.Next()
return *current, datastore.StatusOk
}
func (i *BadgerDocumentIterator) Close() {
i.it.Close()
i.txn.Discard()
}

View File

@@ -0,0 +1,127 @@
package badgerdatastore
import (
"fmt"
"time"
"github.com/google/uuid"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/logger"
"github.com/pikami/cosmium/internal/resourceid"
)
func (r *BadgerDataStore) GetAllDocuments(databaseId string, collectionId string) ([]datastore.Document, datastore.DataStoreStatus) {
txn := r.db.NewTransaction(false)
defer txn.Discard()
dbExists, err := keyExists(txn, generateDatabaseKey(databaseId))
if err != nil || !dbExists {
return nil, datastore.StatusNotFound
}
collExists, err := keyExists(txn, generateCollectionKey(databaseId, collectionId))
if err != nil || !collExists {
return nil, datastore.StatusNotFound
}
docs, status := listByPrefix[datastore.Document](r.db, generateKey(resourceid.ResourceTypeDocument, databaseId, collectionId, ""))
if status == datastore.StatusOk {
return docs, datastore.StatusOk
}
return nil, status
}
func (r *BadgerDataStore) GetDocumentIterator(databaseId string, collectionId string) (datastore.DocumentIterator, datastore.DataStoreStatus) {
txn := r.db.NewTransaction(false)
dbExists, err := keyExists(txn, generateDatabaseKey(databaseId))
if err != nil || !dbExists {
return nil, datastore.StatusNotFound
}
collExists, err := keyExists(txn, generateCollectionKey(databaseId, collectionId))
if err != nil || !collExists {
return nil, datastore.StatusNotFound
}
iter := NewBadgerDocumentIterator(txn, generateKey(resourceid.ResourceTypeDocument, databaseId, collectionId, ""))
return iter, datastore.StatusOk
}
func (r *BadgerDataStore) GetDocument(databaseId string, collectionId string, documentId string) (datastore.Document, datastore.DataStoreStatus) {
documentKey := generateDocumentKey(databaseId, collectionId, documentId)
txn := r.db.NewTransaction(false)
defer txn.Discard()
var document datastore.Document
status := getKey(txn, documentKey, &document)
return document, status
}
func (r *BadgerDataStore) DeleteDocument(databaseId string, collectionId string, documentId string) datastore.DataStoreStatus {
documentKey := generateDocumentKey(databaseId, collectionId, documentId)
txn := r.db.NewTransaction(true)
defer txn.Discard()
exists, err := keyExists(txn, documentKey)
if err != nil {
return datastore.Unknown
}
if !exists {
return datastore.StatusNotFound
}
err = txn.Delete([]byte(documentKey))
if err != nil {
logger.ErrorLn("Error while deleting document:", err)
return datastore.Unknown
}
err = txn.Commit()
if err != nil {
logger.ErrorLn("Error while committing transaction:", err)
return datastore.Unknown
}
return datastore.StatusOk
}
func (r *BadgerDataStore) CreateDocument(databaseId string, collectionId string, document map[string]interface{}) (datastore.Document, datastore.DataStoreStatus) {
txn := r.db.NewTransaction(true)
defer txn.Discard()
var database datastore.Database
status := getKey(txn, generateDatabaseKey(databaseId), &database)
if status != datastore.StatusOk {
return datastore.Document{}, status
}
var collection datastore.Collection
status = getKey(txn, generateCollectionKey(databaseId, collectionId), &collection)
if status != datastore.StatusOk {
return datastore.Document{}, status
}
var ok bool
var documentId string
if documentId, ok = document["id"].(string); !ok || documentId == "" {
documentId = fmt.Sprint(uuid.New())
document["id"] = documentId
}
document["_ts"] = time.Now().Unix()
document["_rid"] = resourceid.NewCombined(collection.ResourceID, resourceid.New(resourceid.ResourceTypeDocument))
document["_etag"] = fmt.Sprintf("\"%s\"", uuid.New())
document["_self"] = fmt.Sprintf("dbs/%s/colls/%s/docs/%s/", database.ResourceID, collection.ResourceID, document["_rid"])
status = insertKey(txn, generateDocumentKey(databaseId, collectionId, documentId), document)
if status != datastore.StatusOk {
return datastore.Document{}, status
}
return document, datastore.StatusOk
}

View File

@@ -0,0 +1,53 @@
package badgerdatastore
import (
"fmt"
"github.com/google/uuid"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/resourceid"
)
// I have no idea what this is tbh
func (r *BadgerDataStore) GetPartitionKeyRanges(databaseId string, collectionId string) ([]datastore.PartitionKeyRange, datastore.DataStoreStatus) {
databaseRid := databaseId
collectionRid := collectionId
var timestamp int64 = 0
txn := r.db.NewTransaction(false)
defer txn.Discard()
var database datastore.Database
status := getKey(txn, generateDatabaseKey(databaseId), &database)
if status != datastore.StatusOk {
databaseRid = database.ResourceID
}
var collection datastore.Collection
status = getKey(txn, generateCollectionKey(databaseId, collectionId), &collection)
if status != datastore.StatusOk {
collectionRid = collection.ResourceID
timestamp = collection.TimeStamp
}
pkrResourceId := resourceid.NewCombined(collectionRid, resourceid.New(resourceid.ResourceTypePartitionKeyRange))
pkrSelf := fmt.Sprintf("dbs/%s/colls/%s/pkranges/%s/", databaseRid, collectionRid, pkrResourceId)
etag := fmt.Sprintf("\"%s\"", uuid.New())
return []datastore.PartitionKeyRange{
{
ResourceID: pkrResourceId,
ID: "0",
Etag: etag,
MinInclusive: "",
MaxExclusive: "FF",
RidPrefix: 0,
Self: pkrSelf,
ThroughputFraction: 1,
Status: "online",
Parents: []interface{}{},
TimeStamp: timestamp,
Lsn: 17,
},
}, datastore.StatusOk
}

View File

@@ -0,0 +1,107 @@
package badgerdatastore
import (
"fmt"
"time"
"github.com/google/uuid"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/logger"
"github.com/pikami/cosmium/internal/resourceid"
)
func (r *BadgerDataStore) GetAllStoredProcedures(databaseId string, collectionId string) ([]datastore.StoredProcedure, datastore.DataStoreStatus) {
txn := r.db.NewTransaction(false)
defer txn.Discard()
dbExists, err := keyExists(txn, generateDatabaseKey(databaseId))
if err != nil || !dbExists {
return nil, datastore.StatusNotFound
}
collExists, err := keyExists(txn, generateCollectionKey(databaseId, collectionId))
if err != nil || !collExists {
return nil, datastore.StatusNotFound
}
storedProcedures, status := listByPrefix[datastore.StoredProcedure](r.db, generateKey(resourceid.ResourceTypeStoredProcedure, databaseId, collectionId, ""))
if status == datastore.StatusOk {
return storedProcedures, datastore.StatusOk
}
return nil, status
}
func (r *BadgerDataStore) GetStoredProcedure(databaseId string, collectionId string, storedProcedureId string) (datastore.StoredProcedure, datastore.DataStoreStatus) {
storedProcedureKey := generateStoredProcedureKey(databaseId, collectionId, storedProcedureId)
txn := r.db.NewTransaction(false)
defer txn.Discard()
var storedProcedure datastore.StoredProcedure
status := getKey(txn, storedProcedureKey, &storedProcedure)
return storedProcedure, status
}
func (r *BadgerDataStore) DeleteStoredProcedure(databaseId string, collectionId string, storedProcedureId string) datastore.DataStoreStatus {
storedProcedureKey := generateStoredProcedureKey(databaseId, collectionId, storedProcedureId)
txn := r.db.NewTransaction(true)
defer txn.Discard()
exists, err := keyExists(txn, storedProcedureKey)
if err != nil {
return datastore.Unknown
}
if !exists {
return datastore.StatusNotFound
}
err = txn.Delete([]byte(storedProcedureKey))
if err != nil {
logger.ErrorLn("Error while deleting stored procedure:", err)
return datastore.Unknown
}
err = txn.Commit()
if err != nil {
logger.ErrorLn("Error while committing transaction:", err)
return datastore.Unknown
}
return datastore.StatusOk
}
func (r *BadgerDataStore) CreateStoredProcedure(databaseId string, collectionId string, storedProcedure datastore.StoredProcedure) (datastore.StoredProcedure, datastore.DataStoreStatus) {
txn := r.db.NewTransaction(true)
defer txn.Discard()
if storedProcedure.ID == "" {
return datastore.StoredProcedure{}, datastore.BadRequest
}
var database datastore.Database
status := getKey(txn, generateDatabaseKey(databaseId), &database)
if status != datastore.StatusOk {
return datastore.StoredProcedure{}, status
}
var collection datastore.Collection
status = getKey(txn, generateCollectionKey(databaseId, collectionId), &collection)
if status != datastore.StatusOk {
return datastore.StoredProcedure{}, status
}
storedProcedure.TimeStamp = time.Now().Unix()
storedProcedure.ResourceID = resourceid.NewCombined(collection.ResourceID, resourceid.New(resourceid.ResourceTypeStoredProcedure))
storedProcedure.ETag = fmt.Sprintf("\"%s\"", uuid.New())
storedProcedure.Self = fmt.Sprintf("dbs/%s/colls/%s/sprocs/%s/", database.ResourceID, collection.ResourceID, storedProcedure.ResourceID)
status = insertKey(txn, generateStoredProcedureKey(databaseId, collectionId, storedProcedure.ID), storedProcedure)
if status != datastore.StatusOk {
return datastore.StoredProcedure{}, status
}
return storedProcedure, datastore.StatusOk
}

View File

@@ -0,0 +1,107 @@
package badgerdatastore
import (
"fmt"
"time"
"github.com/google/uuid"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/logger"
"github.com/pikami/cosmium/internal/resourceid"
)
func (r *BadgerDataStore) GetAllTriggers(databaseId string, collectionId string) ([]datastore.Trigger, datastore.DataStoreStatus) {
txn := r.db.NewTransaction(false)
defer txn.Discard()
dbExists, err := keyExists(txn, generateDatabaseKey(databaseId))
if err != nil || !dbExists {
return nil, datastore.StatusNotFound
}
collExists, err := keyExists(txn, generateCollectionKey(databaseId, collectionId))
if err != nil || !collExists {
return nil, datastore.StatusNotFound
}
triggers, status := listByPrefix[datastore.Trigger](r.db, generateKey(resourceid.ResourceTypeTrigger, databaseId, collectionId, ""))
if status == datastore.StatusOk {
return triggers, datastore.StatusOk
}
return nil, status
}
func (r *BadgerDataStore) GetTrigger(databaseId string, collectionId string, triggerId string) (datastore.Trigger, datastore.DataStoreStatus) {
triggerKey := generateTriggerKey(databaseId, collectionId, triggerId)
txn := r.db.NewTransaction(false)
defer txn.Discard()
var trigger datastore.Trigger
status := getKey(txn, triggerKey, &trigger)
return trigger, status
}
func (r *BadgerDataStore) DeleteTrigger(databaseId string, collectionId string, triggerId string) datastore.DataStoreStatus {
triggerKey := generateTriggerKey(databaseId, collectionId, triggerId)
txn := r.db.NewTransaction(true)
defer txn.Discard()
exists, err := keyExists(txn, triggerKey)
if err != nil {
return datastore.Unknown
}
if !exists {
return datastore.StatusNotFound
}
err = txn.Delete([]byte(triggerKey))
if err != nil {
logger.ErrorLn("Error while deleting trigger:", err)
return datastore.Unknown
}
err = txn.Commit()
if err != nil {
logger.ErrorLn("Error while committing transaction:", err)
return datastore.Unknown
}
return datastore.StatusOk
}
func (r *BadgerDataStore) CreateTrigger(databaseId string, collectionId string, trigger datastore.Trigger) (datastore.Trigger, datastore.DataStoreStatus) {
txn := r.db.NewTransaction(true)
defer txn.Discard()
if trigger.ID == "" {
return datastore.Trigger{}, datastore.BadRequest
}
var database datastore.Database
status := getKey(txn, generateDatabaseKey(databaseId), &database)
if status != datastore.StatusOk {
return datastore.Trigger{}, status
}
var collection datastore.Collection
status = getKey(txn, generateCollectionKey(databaseId, collectionId), &collection)
if status != datastore.StatusOk {
return datastore.Trigger{}, status
}
trigger.TimeStamp = time.Now().Unix()
trigger.ResourceID = resourceid.NewCombined(collection.ResourceID, resourceid.New(resourceid.ResourceTypeTrigger))
trigger.ETag = fmt.Sprintf("\"%s\"", uuid.New())
trigger.Self = fmt.Sprintf("dbs/%s/colls/%s/triggers/%s/", database.ResourceID, collection.ResourceID, trigger.ResourceID)
status = insertKey(txn, generateTriggerKey(databaseId, collectionId, trigger.ID), trigger)
if status != datastore.StatusOk {
return datastore.Trigger{}, status
}
return trigger, datastore.StatusOk
}

View File

@@ -0,0 +1,107 @@
package badgerdatastore
import (
"fmt"
"time"
"github.com/google/uuid"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/logger"
"github.com/pikami/cosmium/internal/resourceid"
)
func (r *BadgerDataStore) GetAllUserDefinedFunctions(databaseId string, collectionId string) ([]datastore.UserDefinedFunction, datastore.DataStoreStatus) {
txn := r.db.NewTransaction(false)
defer txn.Discard()
dbExists, err := keyExists(txn, generateDatabaseKey(databaseId))
if err != nil || !dbExists {
return nil, datastore.StatusNotFound
}
collExists, err := keyExists(txn, generateCollectionKey(databaseId, collectionId))
if err != nil || !collExists {
return nil, datastore.StatusNotFound
}
udfs, status := listByPrefix[datastore.UserDefinedFunction](r.db, generateKey(resourceid.ResourceTypeUserDefinedFunction, databaseId, collectionId, ""))
if status == datastore.StatusOk {
return udfs, datastore.StatusOk
}
return nil, status
}
func (r *BadgerDataStore) GetUserDefinedFunction(databaseId string, collectionId string, udfId string) (datastore.UserDefinedFunction, datastore.DataStoreStatus) {
udfKey := generateUserDefinedFunctionKey(databaseId, collectionId, udfId)
txn := r.db.NewTransaction(false)
defer txn.Discard()
var udf datastore.UserDefinedFunction
status := getKey(txn, udfKey, &udf)
return udf, status
}
func (r *BadgerDataStore) DeleteUserDefinedFunction(databaseId string, collectionId string, udfId string) datastore.DataStoreStatus {
udfKey := generateUserDefinedFunctionKey(databaseId, collectionId, udfId)
txn := r.db.NewTransaction(true)
defer txn.Discard()
exists, err := keyExists(txn, udfKey)
if err != nil {
return datastore.Unknown
}
if !exists {
return datastore.StatusNotFound
}
err = txn.Delete([]byte(udfKey))
if err != nil {
logger.ErrorLn("Error while deleting user defined function:", err)
return datastore.Unknown
}
err = txn.Commit()
if err != nil {
logger.ErrorLn("Error while committing transaction:", err)
return datastore.Unknown
}
return datastore.StatusOk
}
func (r *BadgerDataStore) CreateUserDefinedFunction(databaseId string, collectionId string, udf datastore.UserDefinedFunction) (datastore.UserDefinedFunction, datastore.DataStoreStatus) {
txn := r.db.NewTransaction(true)
defer txn.Discard()
if udf.ID == "" {
return datastore.UserDefinedFunction{}, datastore.BadRequest
}
var database datastore.Database
status := getKey(txn, generateDatabaseKey(databaseId), &database)
if status != datastore.StatusOk {
return datastore.UserDefinedFunction{}, status
}
var collection datastore.Collection
status = getKey(txn, generateCollectionKey(databaseId, collectionId), &collection)
if status != datastore.StatusOk {
return datastore.UserDefinedFunction{}, status
}
udf.TimeStamp = time.Now().Unix()
udf.ResourceID = resourceid.NewCombined(collection.ResourceID, resourceid.New(resourceid.ResourceTypeUserDefinedFunction))
udf.ETag = fmt.Sprintf("\"%s\"", uuid.New())
udf.Self = fmt.Sprintf("dbs/%s/colls/%s/udfs/%s/", database.ResourceID, collection.ResourceID, udf.ResourceID)
status = insertKey(txn, generateUserDefinedFunctionKey(databaseId, collectionId, udf.ID), udf)
if status != datastore.StatusOk {
return datastore.UserDefinedFunction{}, status
}
return udf, datastore.StatusOk
}

View File

@@ -0,0 +1,44 @@
package datastore
type DataStore interface {
GetAllDatabases() ([]Database, DataStoreStatus)
GetDatabase(databaseId string) (Database, DataStoreStatus)
DeleteDatabase(databaseId string) DataStoreStatus
CreateDatabase(newDatabase Database) (Database, DataStoreStatus)
GetAllCollections(databaseId string) ([]Collection, DataStoreStatus)
GetCollection(databaseId string, collectionId string) (Collection, DataStoreStatus)
DeleteCollection(databaseId string, collectionId string) DataStoreStatus
CreateCollection(databaseId string, newCollection Collection) (Collection, DataStoreStatus)
GetAllDocuments(databaseId string, collectionId string) ([]Document, DataStoreStatus)
GetDocumentIterator(databaseId string, collectionId string) (DocumentIterator, DataStoreStatus)
GetDocument(databaseId string, collectionId string, documentId string) (Document, DataStoreStatus)
DeleteDocument(databaseId string, collectionId string, documentId string) DataStoreStatus
CreateDocument(databaseId string, collectionId string, document map[string]interface{}) (Document, DataStoreStatus)
GetAllTriggers(databaseId string, collectionId string) ([]Trigger, DataStoreStatus)
GetTrigger(databaseId string, collectionId string, triggerId string) (Trigger, DataStoreStatus)
DeleteTrigger(databaseId string, collectionId string, triggerId string) DataStoreStatus
CreateTrigger(databaseId string, collectionId string, trigger Trigger) (Trigger, DataStoreStatus)
GetAllStoredProcedures(databaseId string, collectionId string) ([]StoredProcedure, DataStoreStatus)
GetStoredProcedure(databaseId string, collectionId string, storedProcedureId string) (StoredProcedure, DataStoreStatus)
DeleteStoredProcedure(databaseId string, collectionId string, storedProcedureId string) DataStoreStatus
CreateStoredProcedure(databaseId string, collectionId string, storedProcedure StoredProcedure) (StoredProcedure, DataStoreStatus)
GetAllUserDefinedFunctions(databaseId string, collectionId string) ([]UserDefinedFunction, DataStoreStatus)
GetUserDefinedFunction(databaseId string, collectionId string, udfId string) (UserDefinedFunction, DataStoreStatus)
DeleteUserDefinedFunction(databaseId string, collectionId string, udfId string) DataStoreStatus
CreateUserDefinedFunction(databaseId string, collectionId string, udf UserDefinedFunction) (UserDefinedFunction, DataStoreStatus)
GetPartitionKeyRanges(databaseId string, collectionId string) ([]PartitionKeyRange, DataStoreStatus)
Close()
DumpToJson() (string, error)
}
type DocumentIterator interface {
Next() (Document, DataStoreStatus)
Close()
}

View File

@@ -0,0 +1,21 @@
package mapdatastore
import "github.com/pikami/cosmium/internal/datastore"
type ArrayDocumentIterator struct {
documents []datastore.Document
index int
}
func (i *ArrayDocumentIterator) Next() (datastore.Document, datastore.DataStoreStatus) {
i.index++
if i.index >= len(i.documents) {
return datastore.Document{}, datastore.StatusNotFound
}
return i.documents[i.index], datastore.StatusOk
}
func (i *ArrayDocumentIterator) Close() {
i.documents = []datastore.Document{}
}

View File

@@ -0,0 +1,89 @@
package mapdatastore
import (
"fmt"
"time"
"github.com/google/uuid"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/resourceid"
structhidrators "github.com/pikami/cosmium/internal/struct_hidrators"
"golang.org/x/exp/maps"
)
func (r *MapDataStore) GetAllCollections(databaseId string) ([]datastore.Collection, datastore.DataStoreStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return make([]datastore.Collection, 0), datastore.StatusNotFound
}
return maps.Values(r.storeState.Collections[databaseId]), datastore.StatusOk
}
func (r *MapDataStore) GetCollection(databaseId string, collectionId string) (datastore.Collection, datastore.DataStoreStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return datastore.Collection{}, datastore.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return datastore.Collection{}, datastore.StatusNotFound
}
return r.storeState.Collections[databaseId][collectionId], datastore.StatusOk
}
func (r *MapDataStore) DeleteCollection(databaseId string, collectionId string) datastore.DataStoreStatus {
r.storeState.Lock()
defer r.storeState.Unlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return datastore.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return datastore.StatusNotFound
}
delete(r.storeState.Collections[databaseId], collectionId)
delete(r.storeState.Documents[databaseId], collectionId)
delete(r.storeState.Triggers[databaseId], collectionId)
delete(r.storeState.StoredProcedures[databaseId], collectionId)
delete(r.storeState.UserDefinedFunctions[databaseId], collectionId)
return datastore.StatusOk
}
func (r *MapDataStore) CreateCollection(databaseId string, newCollection datastore.Collection) (datastore.Collection, datastore.DataStoreStatus) {
r.storeState.Lock()
defer r.storeState.Unlock()
var ok bool
var database datastore.Database
if database, ok = r.storeState.Databases[databaseId]; !ok {
return datastore.Collection{}, datastore.StatusNotFound
}
if _, ok = r.storeState.Collections[databaseId][newCollection.ID]; ok {
return datastore.Collection{}, datastore.Conflict
}
newCollection = structhidrators.Hidrate(newCollection).(datastore.Collection)
newCollection.TimeStamp = time.Now().Unix()
newCollection.ResourceID = resourceid.NewCombined(database.ResourceID, resourceid.New(resourceid.ResourceTypeCollection))
newCollection.ETag = fmt.Sprintf("\"%s\"", uuid.New())
newCollection.Self = fmt.Sprintf("dbs/%s/colls/%s/", database.ResourceID, newCollection.ResourceID)
r.storeState.Collections[databaseId][newCollection.ID] = newCollection
r.storeState.Documents[databaseId][newCollection.ID] = make(map[string]datastore.Document)
r.storeState.Triggers[databaseId][newCollection.ID] = make(map[string]datastore.Trigger)
r.storeState.StoredProcedures[databaseId][newCollection.ID] = make(map[string]datastore.StoredProcedure)
r.storeState.UserDefinedFunctions[databaseId][newCollection.ID] = make(map[string]datastore.UserDefinedFunction)
return newCollection, datastore.StatusOk
}

View File

@@ -0,0 +1,70 @@
package mapdatastore
import (
"fmt"
"time"
"github.com/google/uuid"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/resourceid"
"golang.org/x/exp/maps"
)
func (r *MapDataStore) GetAllDatabases() ([]datastore.Database, datastore.DataStoreStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
return maps.Values(r.storeState.Databases), datastore.StatusOk
}
func (r *MapDataStore) GetDatabase(id string) (datastore.Database, datastore.DataStoreStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
if database, ok := r.storeState.Databases[id]; ok {
return database, datastore.StatusOk
}
return datastore.Database{}, datastore.StatusNotFound
}
func (r *MapDataStore) DeleteDatabase(id string) datastore.DataStoreStatus {
r.storeState.Lock()
defer r.storeState.Unlock()
if _, ok := r.storeState.Databases[id]; !ok {
return datastore.StatusNotFound
}
delete(r.storeState.Databases, id)
delete(r.storeState.Collections, id)
delete(r.storeState.Documents, id)
delete(r.storeState.Triggers, id)
delete(r.storeState.StoredProcedures, id)
delete(r.storeState.UserDefinedFunctions, id)
return datastore.StatusOk
}
func (r *MapDataStore) CreateDatabase(newDatabase datastore.Database) (datastore.Database, datastore.DataStoreStatus) {
r.storeState.Lock()
defer r.storeState.Unlock()
if _, ok := r.storeState.Databases[newDatabase.ID]; ok {
return datastore.Database{}, datastore.Conflict
}
newDatabase.TimeStamp = time.Now().Unix()
newDatabase.ResourceID = resourceid.New(resourceid.ResourceTypeDatabase)
newDatabase.ETag = fmt.Sprintf("\"%s\"", uuid.New())
newDatabase.Self = fmt.Sprintf("dbs/%s/", newDatabase.ResourceID)
r.storeState.Databases[newDatabase.ID] = newDatabase
r.storeState.Collections[newDatabase.ID] = make(map[string]datastore.Collection)
r.storeState.Documents[newDatabase.ID] = make(map[string]map[string]datastore.Document)
r.storeState.Triggers[newDatabase.ID] = make(map[string]map[string]datastore.Trigger)
r.storeState.StoredProcedures[newDatabase.ID] = make(map[string]map[string]datastore.StoredProcedure)
r.storeState.UserDefinedFunctions[newDatabase.ID] = make(map[string]map[string]datastore.UserDefinedFunction)
return newDatabase, datastore.StatusOk
}

View File

@@ -0,0 +1,113 @@
package mapdatastore
import (
"fmt"
"time"
"github.com/google/uuid"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/resourceid"
"golang.org/x/exp/maps"
)
func (r *MapDataStore) GetAllDocuments(databaseId string, collectionId string) ([]datastore.Document, datastore.DataStoreStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return make([]datastore.Document, 0), datastore.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return make([]datastore.Document, 0), datastore.StatusNotFound
}
return maps.Values(r.storeState.Documents[databaseId][collectionId]), datastore.StatusOk
}
func (r *MapDataStore) GetDocument(databaseId string, collectionId string, documentId string) (datastore.Document, datastore.DataStoreStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return datastore.Document{}, datastore.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return datastore.Document{}, datastore.StatusNotFound
}
if _, ok := r.storeState.Documents[databaseId][collectionId][documentId]; !ok {
return datastore.Document{}, datastore.StatusNotFound
}
return r.storeState.Documents[databaseId][collectionId][documentId], datastore.StatusOk
}
func (r *MapDataStore) DeleteDocument(databaseId string, collectionId string, documentId string) datastore.DataStoreStatus {
r.storeState.Lock()
defer r.storeState.Unlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return datastore.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return datastore.StatusNotFound
}
if _, ok := r.storeState.Documents[databaseId][collectionId][documentId]; !ok {
return datastore.StatusNotFound
}
delete(r.storeState.Documents[databaseId][collectionId], documentId)
return datastore.StatusOk
}
func (r *MapDataStore) CreateDocument(databaseId string, collectionId string, document map[string]interface{}) (datastore.Document, datastore.DataStoreStatus) {
r.storeState.Lock()
defer r.storeState.Unlock()
var ok bool
var documentId string
var database datastore.Database
var collection datastore.Collection
if documentId, ok = document["id"].(string); !ok || documentId == "" {
documentId = fmt.Sprint(uuid.New())
document["id"] = documentId
}
if database, ok = r.storeState.Databases[databaseId]; !ok {
return datastore.Document{}, datastore.StatusNotFound
}
if collection, ok = r.storeState.Collections[databaseId][collectionId]; !ok {
return datastore.Document{}, datastore.StatusNotFound
}
if _, ok := r.storeState.Documents[databaseId][collectionId][documentId]; ok {
return datastore.Document{}, datastore.Conflict
}
document["_ts"] = time.Now().Unix()
document["_rid"] = resourceid.NewCombined(collection.ResourceID, resourceid.New(resourceid.ResourceTypeDocument))
document["_etag"] = fmt.Sprintf("\"%s\"", uuid.New())
document["_self"] = fmt.Sprintf("dbs/%s/colls/%s/docs/%s/", database.ResourceID, collection.ResourceID, document["_rid"])
r.storeState.Documents[databaseId][collectionId][documentId] = document
return document, datastore.StatusOk
}
func (r *MapDataStore) GetDocumentIterator(databaseId string, collectionId string) (datastore.DocumentIterator, datastore.DataStoreStatus) {
documents, status := r.GetAllDocuments(databaseId, collectionId)
if status != datastore.StatusOk {
return nil, status
}
return &ArrayDocumentIterator{
documents: documents,
index: -1,
}, datastore.StatusOk
}

View File

@@ -0,0 +1,34 @@
package mapdatastore
import "github.com/pikami/cosmium/internal/datastore"
type MapDataStore struct {
storeState State
initialDataFilePath string
persistDataFilePath string
}
type MapDataStoreOptions struct {
InitialDataFilePath string
PersistDataFilePath string
}
func NewMapDataStore(options MapDataStoreOptions) *MapDataStore {
dataStore := &MapDataStore{
storeState: State{
Databases: make(map[string]datastore.Database),
Collections: make(map[string]map[string]datastore.Collection),
Documents: make(map[string]map[string]map[string]datastore.Document),
Triggers: make(map[string]map[string]map[string]datastore.Trigger),
StoredProcedures: make(map[string]map[string]map[string]datastore.StoredProcedure),
UserDefinedFunctions: make(map[string]map[string]map[string]datastore.UserDefinedFunction),
},
initialDataFilePath: options.InitialDataFilePath,
persistDataFilePath: options.PersistDataFilePath,
}
dataStore.InitializeDataStore()
return dataStore
}

View File

@@ -1,15 +1,15 @@
package repositories
package mapdatastore
import (
"fmt"
"github.com/google/uuid"
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/resourceid"
)
// I have no idea what this is tbh
func (r *DataRepository) GetPartitionKeyRanges(databaseId string, collectionId string) ([]repositorymodels.PartitionKeyRange, repositorymodels.RepositoryStatus) {
func (r *MapDataStore) GetPartitionKeyRanges(databaseId string, collectionId string) ([]datastore.PartitionKeyRange, datastore.DataStoreStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
@@ -26,11 +26,11 @@ func (r *DataRepository) GetPartitionKeyRanges(databaseId string, collectionId s
timestamp = collection.TimeStamp
}
pkrResourceId := resourceid.NewCombined(databaseRid, collectionRid, resourceid.New())
pkrResourceId := resourceid.NewCombined(collectionRid, resourceid.New(resourceid.ResourceTypePartitionKeyRange))
pkrSelf := fmt.Sprintf("dbs/%s/colls/%s/pkranges/%s/", databaseRid, collectionRid, pkrResourceId)
etag := fmt.Sprintf("\"%s\"", uuid.New())
return []repositorymodels.PartitionKeyRange{
return []datastore.PartitionKeyRange{
{
ResourceID: pkrResourceId,
ID: "0",
@@ -45,5 +45,5 @@ func (r *DataRepository) GetPartitionKeyRanges(databaseId string, collectionId s
TimeStamp: timestamp,
Lsn: 17,
},
}, repositorymodels.StatusOk
}, datastore.StatusOk
}

View File

@@ -1,16 +1,39 @@
package repositories
package mapdatastore
import (
"encoding/json"
"log"
"os"
"reflect"
"sync"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/logger"
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
)
func (r *DataRepository) InitializeRepository() {
type State struct {
sync.RWMutex
// Map databaseId -> Database
Databases map[string]datastore.Database `json:"databases"`
// Map databaseId -> collectionId -> Collection
Collections map[string]map[string]datastore.Collection `json:"collections"`
// Map databaseId -> collectionId -> documentId -> Documents
Documents map[string]map[string]map[string]datastore.Document `json:"documents"`
// Map databaseId -> collectionId -> triggerId -> Trigger
Triggers map[string]map[string]map[string]datastore.Trigger `json:"triggers"`
// Map databaseId -> collectionId -> spId -> StoredProcedure
StoredProcedures map[string]map[string]map[string]datastore.StoredProcedure `json:"sprocs"`
// Map databaseId -> collectionId -> udfId -> UserDefinedFunction
UserDefinedFunctions map[string]map[string]map[string]datastore.UserDefinedFunction `json:"udfs"`
}
func (r *MapDataStore) InitializeDataStore() {
if r.initialDataFilePath != "" {
r.LoadStateFS(r.initialDataFilePath)
return
@@ -32,7 +55,7 @@ func (r *DataRepository) InitializeRepository() {
}
}
func (r *DataRepository) LoadStateFS(filePath string) {
func (r *MapDataStore) LoadStateFS(filePath string) {
data, err := os.ReadFile(filePath)
if err != nil {
log.Fatalf("Error reading state JSON file: %v", err)
@@ -45,11 +68,11 @@ func (r *DataRepository) LoadStateFS(filePath string) {
}
}
func (r *DataRepository) LoadStateJSON(jsonData string) error {
r.storeState.RLock()
defer r.storeState.RUnlock()
func (r *MapDataStore) LoadStateJSON(jsonData string) error {
r.storeState.Lock()
defer r.storeState.Unlock()
var state repositorymodels.State
var state State
if err := json.Unmarshal([]byte(jsonData), &state); err != nil {
return err
}
@@ -71,7 +94,7 @@ func (r *DataRepository) LoadStateJSON(jsonData string) error {
return nil
}
func (r *DataRepository) SaveStateFS(filePath string) {
func (r *MapDataStore) SaveStateFS(filePath string) {
r.storeState.RLock()
defer r.storeState.RUnlock()
@@ -92,7 +115,7 @@ func (r *DataRepository) SaveStateFS(filePath string) {
logger.Infof("User defined functions: %d\n", getLength(r.storeState.UserDefinedFunctions))
}
func (r *DataRepository) GetState() (string, error) {
func (r *MapDataStore) DumpToJson() (string, error) {
r.storeState.RLock()
defer r.storeState.RUnlock()
@@ -103,16 +126,23 @@ func (r *DataRepository) GetState() (string, error) {
}
return string(data), nil
}
func (r *MapDataStore) Close() {
if r.persistDataFilePath != "" {
r.SaveStateFS(r.persistDataFilePath)
}
}
func getLength(v interface{}) int {
switch v.(type) {
case repositorymodels.Database,
repositorymodels.Collection,
repositorymodels.Document,
repositorymodels.Trigger,
repositorymodels.StoredProcedure,
repositorymodels.UserDefinedFunction:
case datastore.Database,
datastore.Collection,
datastore.Document,
datastore.Trigger,
datastore.StoredProcedure,
datastore.UserDefinedFunction:
return 1
}
@@ -133,55 +163,55 @@ func getLength(v interface{}) int {
return count
}
func (r *DataRepository) ensureStoreStateNoNullReferences() {
func (r *MapDataStore) ensureStoreStateNoNullReferences() {
if r.storeState.Databases == nil {
r.storeState.Databases = make(map[string]repositorymodels.Database)
r.storeState.Databases = make(map[string]datastore.Database)
}
if r.storeState.Collections == nil {
r.storeState.Collections = make(map[string]map[string]repositorymodels.Collection)
r.storeState.Collections = make(map[string]map[string]datastore.Collection)
}
if r.storeState.Documents == nil {
r.storeState.Documents = make(map[string]map[string]map[string]repositorymodels.Document)
r.storeState.Documents = make(map[string]map[string]map[string]datastore.Document)
}
if r.storeState.Triggers == nil {
r.storeState.Triggers = make(map[string]map[string]map[string]repositorymodels.Trigger)
r.storeState.Triggers = make(map[string]map[string]map[string]datastore.Trigger)
}
if r.storeState.StoredProcedures == nil {
r.storeState.StoredProcedures = make(map[string]map[string]map[string]repositorymodels.StoredProcedure)
r.storeState.StoredProcedures = make(map[string]map[string]map[string]datastore.StoredProcedure)
}
if r.storeState.UserDefinedFunctions == nil {
r.storeState.UserDefinedFunctions = make(map[string]map[string]map[string]repositorymodels.UserDefinedFunction)
r.storeState.UserDefinedFunctions = make(map[string]map[string]map[string]datastore.UserDefinedFunction)
}
for database := range r.storeState.Databases {
if r.storeState.Collections[database] == nil {
r.storeState.Collections[database] = make(map[string]repositorymodels.Collection)
r.storeState.Collections[database] = make(map[string]datastore.Collection)
}
if r.storeState.Documents[database] == nil {
r.storeState.Documents[database] = make(map[string]map[string]repositorymodels.Document)
r.storeState.Documents[database] = make(map[string]map[string]datastore.Document)
}
if r.storeState.Triggers[database] == nil {
r.storeState.Triggers[database] = make(map[string]map[string]repositorymodels.Trigger)
r.storeState.Triggers[database] = make(map[string]map[string]datastore.Trigger)
}
if r.storeState.StoredProcedures[database] == nil {
r.storeState.StoredProcedures[database] = make(map[string]map[string]repositorymodels.StoredProcedure)
r.storeState.StoredProcedures[database] = make(map[string]map[string]datastore.StoredProcedure)
}
if r.storeState.UserDefinedFunctions[database] == nil {
r.storeState.UserDefinedFunctions[database] = make(map[string]map[string]repositorymodels.UserDefinedFunction)
r.storeState.UserDefinedFunctions[database] = make(map[string]map[string]datastore.UserDefinedFunction)
}
for collection := range r.storeState.Collections[database] {
if r.storeState.Documents[database][collection] == nil {
r.storeState.Documents[database][collection] = make(map[string]repositorymodels.Document)
r.storeState.Documents[database][collection] = make(map[string]datastore.Document)
}
for document := range r.storeState.Documents[database][collection] {
@@ -191,15 +221,15 @@ func (r *DataRepository) ensureStoreStateNoNullReferences() {
}
if r.storeState.Triggers[database][collection] == nil {
r.storeState.Triggers[database][collection] = make(map[string]repositorymodels.Trigger)
r.storeState.Triggers[database][collection] = make(map[string]datastore.Trigger)
}
if r.storeState.StoredProcedures[database][collection] == nil {
r.storeState.StoredProcedures[database][collection] = make(map[string]repositorymodels.StoredProcedure)
r.storeState.StoredProcedures[database][collection] = make(map[string]datastore.StoredProcedure)
}
if r.storeState.UserDefinedFunctions[database][collection] == nil {
r.storeState.UserDefinedFunctions[database][collection] = make(map[string]repositorymodels.UserDefinedFunction)
r.storeState.UserDefinedFunctions[database][collection] = make(map[string]datastore.UserDefinedFunction)
}
}
}

View File

@@ -0,0 +1,91 @@
package mapdatastore
import (
"fmt"
"time"
"github.com/google/uuid"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/resourceid"
"golang.org/x/exp/maps"
)
func (r *MapDataStore) GetAllStoredProcedures(databaseId string, collectionId string) ([]datastore.StoredProcedure, datastore.DataStoreStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
return maps.Values(r.storeState.StoredProcedures[databaseId][collectionId]), datastore.StatusOk
}
func (r *MapDataStore) GetStoredProcedure(databaseId string, collectionId string, spId string) (datastore.StoredProcedure, datastore.DataStoreStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return datastore.StoredProcedure{}, datastore.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return datastore.StoredProcedure{}, datastore.StatusNotFound
}
if sp, ok := r.storeState.StoredProcedures[databaseId][collectionId][spId]; ok {
return sp, datastore.StatusOk
}
return datastore.StoredProcedure{}, datastore.StatusNotFound
}
func (r *MapDataStore) DeleteStoredProcedure(databaseId string, collectionId string, spId string) datastore.DataStoreStatus {
r.storeState.Lock()
defer r.storeState.Unlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return datastore.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return datastore.StatusNotFound
}
if _, ok := r.storeState.StoredProcedures[databaseId][collectionId][spId]; !ok {
return datastore.StatusNotFound
}
delete(r.storeState.StoredProcedures[databaseId][collectionId], spId)
return datastore.StatusOk
}
func (r *MapDataStore) CreateStoredProcedure(databaseId string, collectionId string, sp datastore.StoredProcedure) (datastore.StoredProcedure, datastore.DataStoreStatus) {
r.storeState.Lock()
defer r.storeState.Unlock()
var ok bool
var database datastore.Database
var collection datastore.Collection
if sp.ID == "" {
return datastore.StoredProcedure{}, datastore.BadRequest
}
if database, ok = r.storeState.Databases[databaseId]; !ok {
return datastore.StoredProcedure{}, datastore.StatusNotFound
}
if collection, ok = r.storeState.Collections[databaseId][collectionId]; !ok {
return datastore.StoredProcedure{}, datastore.StatusNotFound
}
if _, ok = r.storeState.StoredProcedures[databaseId][collectionId][sp.ID]; ok {
return datastore.StoredProcedure{}, datastore.Conflict
}
sp.TimeStamp = time.Now().Unix()
sp.ResourceID = resourceid.NewCombined(collection.ResourceID, resourceid.New(resourceid.ResourceTypeStoredProcedure))
sp.ETag = fmt.Sprintf("\"%s\"", uuid.New())
sp.Self = fmt.Sprintf("dbs/%s/colls/%s/sprocs/%s/", database.ResourceID, collection.ResourceID, sp.ResourceID)
r.storeState.StoredProcedures[databaseId][collectionId][sp.ID] = sp
return sp, datastore.StatusOk
}

View File

@@ -0,0 +1,91 @@
package mapdatastore
import (
"fmt"
"time"
"github.com/google/uuid"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/resourceid"
"golang.org/x/exp/maps"
)
func (r *MapDataStore) GetAllTriggers(databaseId string, collectionId string) ([]datastore.Trigger, datastore.DataStoreStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
return maps.Values(r.storeState.Triggers[databaseId][collectionId]), datastore.StatusOk
}
func (r *MapDataStore) GetTrigger(databaseId string, collectionId string, triggerId string) (datastore.Trigger, datastore.DataStoreStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return datastore.Trigger{}, datastore.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return datastore.Trigger{}, datastore.StatusNotFound
}
if trigger, ok := r.storeState.Triggers[databaseId][collectionId][triggerId]; ok {
return trigger, datastore.StatusOk
}
return datastore.Trigger{}, datastore.StatusNotFound
}
func (r *MapDataStore) DeleteTrigger(databaseId string, collectionId string, triggerId string) datastore.DataStoreStatus {
r.storeState.Lock()
defer r.storeState.Unlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return datastore.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return datastore.StatusNotFound
}
if _, ok := r.storeState.Triggers[databaseId][collectionId][triggerId]; !ok {
return datastore.StatusNotFound
}
delete(r.storeState.Triggers[databaseId][collectionId], triggerId)
return datastore.StatusOk
}
func (r *MapDataStore) CreateTrigger(databaseId string, collectionId string, trigger datastore.Trigger) (datastore.Trigger, datastore.DataStoreStatus) {
r.storeState.Lock()
defer r.storeState.Unlock()
var ok bool
var database datastore.Database
var collection datastore.Collection
if trigger.ID == "" {
return datastore.Trigger{}, datastore.BadRequest
}
if database, ok = r.storeState.Databases[databaseId]; !ok {
return datastore.Trigger{}, datastore.StatusNotFound
}
if collection, ok = r.storeState.Collections[databaseId][collectionId]; !ok {
return datastore.Trigger{}, datastore.StatusNotFound
}
if _, ok = r.storeState.Triggers[databaseId][collectionId][trigger.ID]; ok {
return datastore.Trigger{}, datastore.Conflict
}
trigger.TimeStamp = time.Now().Unix()
trigger.ResourceID = resourceid.NewCombined(collection.ResourceID, resourceid.New(resourceid.ResourceTypeTrigger))
trigger.ETag = fmt.Sprintf("\"%s\"", uuid.New())
trigger.Self = fmt.Sprintf("dbs/%s/colls/%s/triggers/%s/", database.ResourceID, collection.ResourceID, trigger.ResourceID)
r.storeState.Triggers[databaseId][collectionId][trigger.ID] = trigger
return trigger, datastore.StatusOk
}

View File

@@ -0,0 +1,91 @@
package mapdatastore
import (
"fmt"
"time"
"github.com/google/uuid"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/resourceid"
"golang.org/x/exp/maps"
)
func (r *MapDataStore) GetAllUserDefinedFunctions(databaseId string, collectionId string) ([]datastore.UserDefinedFunction, datastore.DataStoreStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
return maps.Values(r.storeState.UserDefinedFunctions[databaseId][collectionId]), datastore.StatusOk
}
func (r *MapDataStore) GetUserDefinedFunction(databaseId string, collectionId string, udfId string) (datastore.UserDefinedFunction, datastore.DataStoreStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return datastore.UserDefinedFunction{}, datastore.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return datastore.UserDefinedFunction{}, datastore.StatusNotFound
}
if udf, ok := r.storeState.UserDefinedFunctions[databaseId][collectionId][udfId]; ok {
return udf, datastore.StatusOk
}
return datastore.UserDefinedFunction{}, datastore.StatusNotFound
}
func (r *MapDataStore) DeleteUserDefinedFunction(databaseId string, collectionId string, udfId string) datastore.DataStoreStatus {
r.storeState.Lock()
defer r.storeState.Unlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return datastore.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return datastore.StatusNotFound
}
if _, ok := r.storeState.UserDefinedFunctions[databaseId][collectionId][udfId]; !ok {
return datastore.StatusNotFound
}
delete(r.storeState.UserDefinedFunctions[databaseId][collectionId], udfId)
return datastore.StatusOk
}
func (r *MapDataStore) CreateUserDefinedFunction(databaseId string, collectionId string, udf datastore.UserDefinedFunction) (datastore.UserDefinedFunction, datastore.DataStoreStatus) {
r.storeState.Lock()
defer r.storeState.Unlock()
var ok bool
var database datastore.Database
var collection datastore.Collection
if udf.ID == "" {
return datastore.UserDefinedFunction{}, datastore.BadRequest
}
if database, ok = r.storeState.Databases[databaseId]; !ok {
return datastore.UserDefinedFunction{}, datastore.StatusNotFound
}
if collection, ok = r.storeState.Collections[databaseId][collectionId]; !ok {
return datastore.UserDefinedFunction{}, datastore.StatusNotFound
}
if _, ok := r.storeState.UserDefinedFunctions[databaseId][collectionId][udf.ID]; ok {
return datastore.UserDefinedFunction{}, datastore.Conflict
}
udf.TimeStamp = time.Now().Unix()
udf.ResourceID = resourceid.NewCombined(collection.ResourceID, resourceid.New(resourceid.ResourceTypeUserDefinedFunction))
udf.ETag = fmt.Sprintf("\"%s\"", uuid.New())
udf.Self = fmt.Sprintf("dbs/%s/colls/%s/udfs/%s/", database.ResourceID, collection.ResourceID, udf.ResourceID)
r.storeState.UserDefinedFunctions[databaseId][collectionId][udf.ID] = udf
return udf, datastore.StatusOk
}

View File

@@ -1,6 +1,4 @@
package repositorymodels
import "sync"
package datastore
type Database struct {
ID string `json:"id"`
@@ -10,13 +8,15 @@ type Database struct {
Self string `json:"_self"`
}
type RepositoryStatus int
type DataStoreStatus int
const (
StatusOk = 1
StatusNotFound = 2
Conflict = 3
BadRequest = 4
IterEOF = 5
Unknown = 6
)
type TriggerOperation string
@@ -117,25 +117,3 @@ type PartitionKeyRange struct {
TimeStamp int64 `json:"_ts"`
Lsn int `json:"lsn"`
}
type State struct {
sync.RWMutex
// Map databaseId -> Database
Databases map[string]Database `json:"databases"`
// Map databaseId -> collectionId -> Collection
Collections map[string]map[string]Collection `json:"collections"`
// Map databaseId -> collectionId -> documentId -> Documents
Documents map[string]map[string]map[string]Document `json:"documents"`
// Map databaseId -> collectionId -> triggerId -> Trigger
Triggers map[string]map[string]map[string]Trigger `json:"triggers"`
// Map databaseId -> collectionId -> spId -> StoredProcedure
StoredProcedures map[string]map[string]map[string]StoredProcedure `json:"sprocs"`
// Map databaseId -> collectionId -> udfId -> UserDefinedFunction
UserDefinedFunctions map[string]map[string]map[string]UserDefinedFunction `json:"udfs"`
}

View File

@@ -6,6 +6,7 @@ import (
"os"
"runtime"
"strings"
"sync"
)
type LogLevelType int
@@ -21,67 +22,68 @@ type LogWriter struct {
WriterLevel LogLevelType
}
var LogLevel = LogLevelInfo
var logLevelMutex sync.RWMutex
var logLevel = LogLevelInfo
var DebugLogger = log.New(os.Stdout, "", log.Ldate|log.Ltime)
var InfoLogger = log.New(os.Stdout, "", log.Ldate|log.Ltime)
var ErrorLogger = log.New(os.Stderr, "", log.Ldate|log.Ltime)
func DebugLn(v ...any) {
if LogLevel <= LogLevelDebug {
if GetLogLevel() <= LogLevelDebug {
prefix := getCallerPrefix()
DebugLogger.Println(append([]interface{}{prefix}, v...)...)
}
}
func Debug(v ...any) {
if LogLevel <= LogLevelDebug {
if GetLogLevel() <= LogLevelDebug {
prefix := getCallerPrefix()
DebugLogger.Println(append([]interface{}{prefix}, v...)...)
}
}
func Debugf(format string, v ...any) {
if LogLevel <= LogLevelDebug {
if GetLogLevel() <= LogLevelDebug {
prefix := getCallerPrefix()
DebugLogger.Printf(prefix+format, v...)
}
}
func InfoLn(v ...any) {
if LogLevel <= LogLevelInfo {
if GetLogLevel() <= LogLevelInfo {
InfoLogger.Println(v...)
}
}
func Info(v ...any) {
if LogLevel <= LogLevelInfo {
if GetLogLevel() <= LogLevelInfo {
InfoLogger.Print(v...)
}
}
func Infof(format string, v ...any) {
if LogLevel <= LogLevelInfo {
if GetLogLevel() <= LogLevelInfo {
InfoLogger.Printf(format, v...)
}
}
func ErrorLn(v ...any) {
if LogLevel <= LogLevelError {
if GetLogLevel() <= LogLevelError {
prefix := getCallerPrefix()
ErrorLogger.Println(append([]interface{}{prefix}, v...)...)
}
}
func Error(v ...any) {
if LogLevel <= LogLevelError {
if GetLogLevel() <= LogLevelError {
prefix := getCallerPrefix()
ErrorLogger.Print(append([]interface{}{prefix}, v...)...)
}
}
func Errorf(format string, v ...any) {
if LogLevel <= LogLevelError {
if GetLogLevel() <= LogLevelError {
prefix := getCallerPrefix()
ErrorLogger.Printf(prefix+format, v...)
}
@@ -112,11 +114,25 @@ func DebugWriter() *LogWriter {
return &LogWriter{WriterLevel: LogLevelDebug}
}
func SetLogLevel(level LogLevelType) {
logLevelMutex.Lock()
defer logLevelMutex.Unlock()
logLevel = level
}
func GetLogLevel() LogLevelType {
logLevelMutex.RLock()
defer logLevelMutex.RUnlock()
return logLevel
}
func getCallerPrefix() string {
_, file, line, ok := runtime.Caller(2)
if ok {
parts := strings.Split(file, "/")
file = parts[len(parts)-1]
if len(parts) > 0 {
file = parts[len(parts)-1]
}
return fmt.Sprintf("%s:%d - ", file, line)
}

View File

@@ -1,85 +0,0 @@
package repositories
import (
"fmt"
"time"
"github.com/google/uuid"
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
"github.com/pikami/cosmium/internal/resourceid"
structhidrators "github.com/pikami/cosmium/internal/struct_hidrators"
"golang.org/x/exp/maps"
)
func (r *DataRepository) GetAllCollections(databaseId string) ([]repositorymodels.Collection, repositorymodels.RepositoryStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return make([]repositorymodels.Collection, 0), repositorymodels.StatusNotFound
}
return maps.Values(r.storeState.Collections[databaseId]), repositorymodels.StatusOk
}
func (r *DataRepository) GetCollection(databaseId string, collectionId string) (repositorymodels.Collection, repositorymodels.RepositoryStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return repositorymodels.Collection{}, repositorymodels.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return repositorymodels.Collection{}, repositorymodels.StatusNotFound
}
return r.storeState.Collections[databaseId][collectionId], repositorymodels.StatusOk
}
func (r *DataRepository) DeleteCollection(databaseId string, collectionId string) repositorymodels.RepositoryStatus {
r.storeState.Lock()
defer r.storeState.Unlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return repositorymodels.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return repositorymodels.StatusNotFound
}
delete(r.storeState.Collections[databaseId], collectionId)
return repositorymodels.StatusOk
}
func (r *DataRepository) CreateCollection(databaseId string, newCollection repositorymodels.Collection) (repositorymodels.Collection, repositorymodels.RepositoryStatus) {
r.storeState.Lock()
defer r.storeState.Unlock()
var ok bool
var database repositorymodels.Database
if database, ok = r.storeState.Databases[databaseId]; !ok {
return repositorymodels.Collection{}, repositorymodels.StatusNotFound
}
if _, ok = r.storeState.Collections[databaseId][newCollection.ID]; ok {
return repositorymodels.Collection{}, repositorymodels.Conflict
}
newCollection = structhidrators.Hidrate(newCollection).(repositorymodels.Collection)
newCollection.TimeStamp = time.Now().Unix()
newCollection.ResourceID = resourceid.NewCombined(database.ResourceID, resourceid.New())
newCollection.ETag = fmt.Sprintf("\"%s\"", uuid.New())
newCollection.Self = fmt.Sprintf("dbs/%s/colls/%s/", database.ResourceID, newCollection.ResourceID)
r.storeState.Collections[databaseId][newCollection.ID] = newCollection
r.storeState.Documents[databaseId][newCollection.ID] = make(map[string]repositorymodels.Document)
r.storeState.Triggers[databaseId][newCollection.ID] = make(map[string]repositorymodels.Trigger)
r.storeState.StoredProcedures[databaseId][newCollection.ID] = make(map[string]repositorymodels.StoredProcedure)
r.storeState.UserDefinedFunctions[databaseId][newCollection.ID] = make(map[string]repositorymodels.UserDefinedFunction)
return newCollection, repositorymodels.StatusOk
}

View File

@@ -1,65 +0,0 @@
package repositories
import (
"fmt"
"time"
"github.com/google/uuid"
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
"github.com/pikami/cosmium/internal/resourceid"
"golang.org/x/exp/maps"
)
func (r *DataRepository) GetAllDatabases() ([]repositorymodels.Database, repositorymodels.RepositoryStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
return maps.Values(r.storeState.Databases), repositorymodels.StatusOk
}
func (r *DataRepository) GetDatabase(id string) (repositorymodels.Database, repositorymodels.RepositoryStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
if database, ok := r.storeState.Databases[id]; ok {
return database, repositorymodels.StatusOk
}
return repositorymodels.Database{}, repositorymodels.StatusNotFound
}
func (r *DataRepository) DeleteDatabase(id string) repositorymodels.RepositoryStatus {
r.storeState.Lock()
defer r.storeState.Unlock()
if _, ok := r.storeState.Databases[id]; !ok {
return repositorymodels.StatusNotFound
}
delete(r.storeState.Databases, id)
return repositorymodels.StatusOk
}
func (r *DataRepository) CreateDatabase(newDatabase repositorymodels.Database) (repositorymodels.Database, repositorymodels.RepositoryStatus) {
r.storeState.Lock()
defer r.storeState.Unlock()
if _, ok := r.storeState.Databases[newDatabase.ID]; ok {
return repositorymodels.Database{}, repositorymodels.Conflict
}
newDatabase.TimeStamp = time.Now().Unix()
newDatabase.ResourceID = resourceid.New()
newDatabase.ETag = fmt.Sprintf("\"%s\"", uuid.New())
newDatabase.Self = fmt.Sprintf("dbs/%s/", newDatabase.ResourceID)
r.storeState.Databases[newDatabase.ID] = newDatabase
r.storeState.Collections[newDatabase.ID] = make(map[string]repositorymodels.Collection)
r.storeState.Documents[newDatabase.ID] = make(map[string]map[string]repositorymodels.Document)
r.storeState.Triggers[newDatabase.ID] = make(map[string]map[string]repositorymodels.Trigger)
r.storeState.StoredProcedures[newDatabase.ID] = make(map[string]map[string]repositorymodels.StoredProcedure)
r.storeState.UserDefinedFunctions[newDatabase.ID] = make(map[string]map[string]repositorymodels.UserDefinedFunction)
return newDatabase, repositorymodels.StatusOk
}

View File

@@ -1,130 +0,0 @@
package repositories
import (
"fmt"
"log"
"time"
"github.com/google/uuid"
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
"github.com/pikami/cosmium/internal/resourceid"
"github.com/pikami/cosmium/parsers"
"github.com/pikami/cosmium/parsers/nosql"
memoryexecutor "github.com/pikami/cosmium/query_executors/memory_executor"
"golang.org/x/exp/maps"
)
func (r *DataRepository) GetAllDocuments(databaseId string, collectionId string) ([]repositorymodels.Document, repositorymodels.RepositoryStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return make([]repositorymodels.Document, 0), repositorymodels.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return make([]repositorymodels.Document, 0), repositorymodels.StatusNotFound
}
return maps.Values(r.storeState.Documents[databaseId][collectionId]), repositorymodels.StatusOk
}
func (r *DataRepository) GetDocument(databaseId string, collectionId string, documentId string) (repositorymodels.Document, repositorymodels.RepositoryStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return repositorymodels.Document{}, repositorymodels.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return repositorymodels.Document{}, repositorymodels.StatusNotFound
}
if _, ok := r.storeState.Documents[databaseId][collectionId][documentId]; !ok {
return repositorymodels.Document{}, repositorymodels.StatusNotFound
}
return r.storeState.Documents[databaseId][collectionId][documentId], repositorymodels.StatusOk
}
func (r *DataRepository) DeleteDocument(databaseId string, collectionId string, documentId string) repositorymodels.RepositoryStatus {
r.storeState.Lock()
defer r.storeState.Unlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return repositorymodels.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return repositorymodels.StatusNotFound
}
if _, ok := r.storeState.Documents[databaseId][collectionId][documentId]; !ok {
return repositorymodels.StatusNotFound
}
delete(r.storeState.Documents[databaseId][collectionId], documentId)
return repositorymodels.StatusOk
}
func (r *DataRepository) CreateDocument(databaseId string, collectionId string, document map[string]interface{}) (repositorymodels.Document, repositorymodels.RepositoryStatus) {
r.storeState.Lock()
defer r.storeState.Unlock()
var ok bool
var documentId string
var database repositorymodels.Database
var collection repositorymodels.Collection
if documentId, ok = document["id"].(string); !ok || documentId == "" {
documentId = fmt.Sprint(uuid.New())
document["id"] = documentId
}
if database, ok = r.storeState.Databases[databaseId]; !ok {
return repositorymodels.Document{}, repositorymodels.StatusNotFound
}
if collection, ok = r.storeState.Collections[databaseId][collectionId]; !ok {
return repositorymodels.Document{}, repositorymodels.StatusNotFound
}
if _, ok := r.storeState.Documents[databaseId][collectionId][documentId]; ok {
return repositorymodels.Document{}, repositorymodels.Conflict
}
document["_ts"] = time.Now().Unix()
document["_rid"] = resourceid.NewCombined(database.ResourceID, collection.ResourceID, resourceid.New())
document["_etag"] = fmt.Sprintf("\"%s\"", uuid.New())
document["_self"] = fmt.Sprintf("dbs/%s/colls/%s/docs/%s/", database.ResourceID, collection.ResourceID, document["_rid"])
r.storeState.Documents[databaseId][collectionId][documentId] = document
return document, repositorymodels.StatusOk
}
func (r *DataRepository) ExecuteQueryDocuments(databaseId string, collectionId string, query string, queryParameters map[string]interface{}) ([]memoryexecutor.RowType, repositorymodels.RepositoryStatus) {
parsedQuery, err := nosql.Parse("", []byte(query))
if err != nil {
log.Printf("Failed to parse query: %s\nerr: %v", query, err)
return nil, repositorymodels.BadRequest
}
collectionDocuments, status := r.GetAllDocuments(databaseId, collectionId)
if status != repositorymodels.StatusOk {
return nil, status
}
covDocs := make([]memoryexecutor.RowType, 0)
for _, doc := range collectionDocuments {
covDocs = append(covDocs, map[string]interface{}(doc))
}
if typedQuery, ok := parsedQuery.(parsers.SelectStmt); ok {
typedQuery.Parameters = queryParameters
return memoryexecutor.ExecuteQuery(typedQuery, covDocs), repositorymodels.StatusOk
}
return nil, repositorymodels.BadRequest
}

View File

@@ -1,34 +0,0 @@
package repositories
import repositorymodels "github.com/pikami/cosmium/internal/repository_models"
type DataRepository struct {
storeState repositorymodels.State
initialDataFilePath string
persistDataFilePath string
}
type RepositoryOptions struct {
InitialDataFilePath string
PersistDataFilePath string
}
func NewDataRepository(options RepositoryOptions) *DataRepository {
repository := &DataRepository{
storeState: repositorymodels.State{
Databases: make(map[string]repositorymodels.Database),
Collections: make(map[string]map[string]repositorymodels.Collection),
Documents: make(map[string]map[string]map[string]repositorymodels.Document),
Triggers: make(map[string]map[string]map[string]repositorymodels.Trigger),
StoredProcedures: make(map[string]map[string]map[string]repositorymodels.StoredProcedure),
UserDefinedFunctions: make(map[string]map[string]map[string]repositorymodels.UserDefinedFunction),
},
initialDataFilePath: options.InitialDataFilePath,
persistDataFilePath: options.PersistDataFilePath,
}
repository.InitializeRepository()
return repository
}

View File

@@ -1,91 +0,0 @@
package repositories
import (
"fmt"
"time"
"github.com/google/uuid"
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
"github.com/pikami/cosmium/internal/resourceid"
"golang.org/x/exp/maps"
)
func (r *DataRepository) GetAllStoredProcedures(databaseId string, collectionId string) ([]repositorymodels.StoredProcedure, repositorymodels.RepositoryStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
return maps.Values(r.storeState.StoredProcedures[databaseId][collectionId]), repositorymodels.StatusOk
}
func (r *DataRepository) GetStoredProcedure(databaseId string, collectionId string, spId string) (repositorymodels.StoredProcedure, repositorymodels.RepositoryStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return repositorymodels.StoredProcedure{}, repositorymodels.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return repositorymodels.StoredProcedure{}, repositorymodels.StatusNotFound
}
if sp, ok := r.storeState.StoredProcedures[databaseId][collectionId][spId]; ok {
return sp, repositorymodels.StatusOk
}
return repositorymodels.StoredProcedure{}, repositorymodels.StatusNotFound
}
func (r *DataRepository) DeleteStoredProcedure(databaseId string, collectionId string, spId string) repositorymodels.RepositoryStatus {
r.storeState.Lock()
defer r.storeState.Unlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return repositorymodels.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return repositorymodels.StatusNotFound
}
if _, ok := r.storeState.StoredProcedures[databaseId][collectionId][spId]; !ok {
return repositorymodels.StatusNotFound
}
delete(r.storeState.StoredProcedures[databaseId][collectionId], spId)
return repositorymodels.StatusOk
}
func (r *DataRepository) CreateStoredProcedure(databaseId string, collectionId string, sp repositorymodels.StoredProcedure) (repositorymodels.StoredProcedure, repositorymodels.RepositoryStatus) {
r.storeState.Lock()
defer r.storeState.Unlock()
var ok bool
var database repositorymodels.Database
var collection repositorymodels.Collection
if sp.ID == "" {
return repositorymodels.StoredProcedure{}, repositorymodels.BadRequest
}
if database, ok = r.storeState.Databases[databaseId]; !ok {
return repositorymodels.StoredProcedure{}, repositorymodels.StatusNotFound
}
if collection, ok = r.storeState.Collections[databaseId][collectionId]; !ok {
return repositorymodels.StoredProcedure{}, repositorymodels.StatusNotFound
}
if _, ok = r.storeState.StoredProcedures[databaseId][collectionId][sp.ID]; ok {
return repositorymodels.StoredProcedure{}, repositorymodels.Conflict
}
sp.TimeStamp = time.Now().Unix()
sp.ResourceID = resourceid.NewCombined(database.ResourceID, collection.ResourceID, resourceid.New())
sp.ETag = fmt.Sprintf("\"%s\"", uuid.New())
sp.Self = fmt.Sprintf("dbs/%s/colls/%s/sprocs/%s/", database.ResourceID, collection.ResourceID, sp.ResourceID)
r.storeState.StoredProcedures[databaseId][collectionId][sp.ID] = sp
return sp, repositorymodels.StatusOk
}

View File

@@ -1,91 +0,0 @@
package repositories
import (
"fmt"
"time"
"github.com/google/uuid"
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
"github.com/pikami/cosmium/internal/resourceid"
"golang.org/x/exp/maps"
)
func (r *DataRepository) GetAllTriggers(databaseId string, collectionId string) ([]repositorymodels.Trigger, repositorymodels.RepositoryStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
return maps.Values(r.storeState.Triggers[databaseId][collectionId]), repositorymodels.StatusOk
}
func (r *DataRepository) GetTrigger(databaseId string, collectionId string, triggerId string) (repositorymodels.Trigger, repositorymodels.RepositoryStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return repositorymodels.Trigger{}, repositorymodels.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return repositorymodels.Trigger{}, repositorymodels.StatusNotFound
}
if trigger, ok := r.storeState.Triggers[databaseId][collectionId][triggerId]; ok {
return trigger, repositorymodels.StatusOk
}
return repositorymodels.Trigger{}, repositorymodels.StatusNotFound
}
func (r *DataRepository) DeleteTrigger(databaseId string, collectionId string, triggerId string) repositorymodels.RepositoryStatus {
r.storeState.Lock()
defer r.storeState.Unlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return repositorymodels.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return repositorymodels.StatusNotFound
}
if _, ok := r.storeState.Triggers[databaseId][collectionId][triggerId]; !ok {
return repositorymodels.StatusNotFound
}
delete(r.storeState.Triggers[databaseId][collectionId], triggerId)
return repositorymodels.StatusOk
}
func (r *DataRepository) CreateTrigger(databaseId string, collectionId string, trigger repositorymodels.Trigger) (repositorymodels.Trigger, repositorymodels.RepositoryStatus) {
r.storeState.Lock()
defer r.storeState.Unlock()
var ok bool
var database repositorymodels.Database
var collection repositorymodels.Collection
if trigger.ID == "" {
return repositorymodels.Trigger{}, repositorymodels.BadRequest
}
if database, ok = r.storeState.Databases[databaseId]; !ok {
return repositorymodels.Trigger{}, repositorymodels.StatusNotFound
}
if collection, ok = r.storeState.Collections[databaseId][collectionId]; !ok {
return repositorymodels.Trigger{}, repositorymodels.StatusNotFound
}
if _, ok = r.storeState.Triggers[databaseId][collectionId][trigger.ID]; ok {
return repositorymodels.Trigger{}, repositorymodels.Conflict
}
trigger.TimeStamp = time.Now().Unix()
trigger.ResourceID = resourceid.NewCombined(database.ResourceID, collection.ResourceID, resourceid.New())
trigger.ETag = fmt.Sprintf("\"%s\"", uuid.New())
trigger.Self = fmt.Sprintf("dbs/%s/colls/%s/triggers/%s/", database.ResourceID, collection.ResourceID, trigger.ResourceID)
r.storeState.Triggers[databaseId][collectionId][trigger.ID] = trigger
return trigger, repositorymodels.StatusOk
}

View File

@@ -1,91 +0,0 @@
package repositories
import (
"fmt"
"time"
"github.com/google/uuid"
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
"github.com/pikami/cosmium/internal/resourceid"
"golang.org/x/exp/maps"
)
func (r *DataRepository) GetAllUserDefinedFunctions(databaseId string, collectionId string) ([]repositorymodels.UserDefinedFunction, repositorymodels.RepositoryStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
return maps.Values(r.storeState.UserDefinedFunctions[databaseId][collectionId]), repositorymodels.StatusOk
}
func (r *DataRepository) GetUserDefinedFunction(databaseId string, collectionId string, udfId string) (repositorymodels.UserDefinedFunction, repositorymodels.RepositoryStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return repositorymodels.UserDefinedFunction{}, repositorymodels.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return repositorymodels.UserDefinedFunction{}, repositorymodels.StatusNotFound
}
if udf, ok := r.storeState.UserDefinedFunctions[databaseId][collectionId][udfId]; ok {
return udf, repositorymodels.StatusOk
}
return repositorymodels.UserDefinedFunction{}, repositorymodels.StatusNotFound
}
func (r *DataRepository) DeleteUserDefinedFunction(databaseId string, collectionId string, udfId string) repositorymodels.RepositoryStatus {
r.storeState.Lock()
defer r.storeState.Unlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return repositorymodels.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return repositorymodels.StatusNotFound
}
if _, ok := r.storeState.UserDefinedFunctions[databaseId][collectionId][udfId]; !ok {
return repositorymodels.StatusNotFound
}
delete(r.storeState.UserDefinedFunctions[databaseId][collectionId], udfId)
return repositorymodels.StatusOk
}
func (r *DataRepository) CreateUserDefinedFunction(databaseId string, collectionId string, udf repositorymodels.UserDefinedFunction) (repositorymodels.UserDefinedFunction, repositorymodels.RepositoryStatus) {
r.storeState.Lock()
defer r.storeState.Unlock()
var ok bool
var database repositorymodels.Database
var collection repositorymodels.Collection
if udf.ID == "" {
return repositorymodels.UserDefinedFunction{}, repositorymodels.BadRequest
}
if database, ok = r.storeState.Databases[databaseId]; !ok {
return repositorymodels.UserDefinedFunction{}, repositorymodels.StatusNotFound
}
if collection, ok = r.storeState.Collections[databaseId][collectionId]; !ok {
return repositorymodels.UserDefinedFunction{}, repositorymodels.StatusNotFound
}
if _, ok := r.storeState.UserDefinedFunctions[databaseId][collectionId][udf.ID]; ok {
return repositorymodels.UserDefinedFunction{}, repositorymodels.Conflict
}
udf.TimeStamp = time.Now().Unix()
udf.ResourceID = resourceid.NewCombined(database.ResourceID, collection.ResourceID, resourceid.New())
udf.ETag = fmt.Sprintf("\"%s\"", uuid.New())
udf.Self = fmt.Sprintf("dbs/%s/colls/%s/udfs/%s/", database.ResourceID, collection.ResourceID, udf.ResourceID)
r.storeState.UserDefinedFunctions[databaseId][collectionId][udf.ID] = udf
return udf, repositorymodels.StatusOk
}

View File

@@ -3,32 +3,76 @@ package resourceid
import (
"encoding/base64"
"math/rand"
"strings"
"github.com/google/uuid"
)
func New() string {
id := uuid.New().ID()
idBytes := uintToBytes(id)
type ResourceType int
// first byte should be bigger than 0x80 for collection ids
// clients classify this id as "user" otherwise
if (idBytes[0] & 0x80) <= 0 {
idBytes[0] = byte(rand.Intn(0x80) + 0x80)
const (
ResourceTypeDatabase ResourceType = iota
ResourceTypeCollection
ResourceTypeDocument
ResourceTypeStoredProcedure
ResourceTypeTrigger
ResourceTypeUserDefinedFunction
ResourceTypeConflict
ResourceTypePartitionKeyRange
ResourceTypeSchema
)
func New(resourceType ResourceType) string {
var idBytes []byte
switch resourceType {
case ResourceTypeDatabase:
idBytes = randomBytes(4)
case ResourceTypeCollection:
idBytes = randomBytes(4)
// first byte should be bigger than 0x80 for collection ids
// clients classify this id as "user" otherwise
if (idBytes[0] & 0x80) <= 0 {
idBytes[0] = byte(rand.Intn(0x80) + 0x80)
}
case ResourceTypeDocument:
idBytes = randomBytes(8)
idBytes[7] = byte(rand.Intn(0x10)) // Upper 4 bits = 0
case ResourceTypeStoredProcedure:
idBytes = randomBytes(8)
idBytes[7] = byte(rand.Intn(0x10)) | 0x08 // Upper 4 bits = 0x08
case ResourceTypeTrigger:
idBytes = randomBytes(8)
idBytes[7] = byte(rand.Intn(0x10)) | 0x07 // Upper 4 bits = 0x07
case ResourceTypeUserDefinedFunction:
idBytes = randomBytes(8)
idBytes[7] = byte(rand.Intn(0x10)) | 0x06 // Upper 4 bits = 0x06
case ResourceTypeConflict:
idBytes = randomBytes(8)
idBytes[7] = byte(rand.Intn(0x10)) | 0x04 // Upper 4 bits = 0x04
case ResourceTypePartitionKeyRange:
// we don't do partitions yet, so just use a fixed id
idBytes = []byte{0x69, 0x69, 0x69, 0x69, 0x69, 0x69, 0x69, 0x50}
case ResourceTypeSchema:
idBytes = randomBytes(8)
idBytes[7] = byte(rand.Intn(0x10)) | 0x09 // Upper 4 bits = 0x09
default:
idBytes = randomBytes(4)
}
return base64.StdEncoding.EncodeToString(idBytes)
encoded := base64.StdEncoding.EncodeToString(idBytes)
return strings.ReplaceAll(encoded, "/", "-")
}
func NewCombined(ids ...string) string {
combinedIdBytes := make([]byte, 0)
for _, id := range ids {
idBytes, _ := base64.StdEncoding.DecodeString(id)
idBytes, _ := base64.StdEncoding.DecodeString(strings.ReplaceAll(id, "-", "/"))
combinedIdBytes = append(combinedIdBytes, idBytes...)
}
return base64.StdEncoding.EncodeToString(combinedIdBytes)
encoded := base64.StdEncoding.EncodeToString(combinedIdBytes)
return strings.ReplaceAll(encoded, "/", "-")
}
func uintToBytes(id uint32) []byte {
@@ -39,3 +83,13 @@ func uintToBytes(id uint32) []byte {
return buf
}
func randomBytes(count int) []byte {
buf := make([]byte, count)
for i := 0; i < count; i += 4 {
id := uuid.New().ID()
idBytes := uintToBytes(id)
copy(buf[i:], idBytes)
}
return buf
}

View File

@@ -1,21 +1,19 @@
package structhidrators
import (
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
)
import "github.com/pikami/cosmium/internal/datastore"
var defaultCollection repositorymodels.Collection = repositorymodels.Collection{
IndexingPolicy: repositorymodels.CollectionIndexingPolicy{
var defaultCollection datastore.Collection = datastore.Collection{
IndexingPolicy: datastore.CollectionIndexingPolicy{
IndexingMode: "consistent",
Automatic: true,
IncludedPaths: []repositorymodels.CollectionIndexingPolicyPath{
IncludedPaths: []datastore.CollectionIndexingPolicyPath{
{Path: "/*"},
},
ExcludedPaths: []repositorymodels.CollectionIndexingPolicyPath{
ExcludedPaths: []datastore.CollectionIndexingPolicyPath{
{Path: "/\"_etag\"/?"},
},
},
PartitionKey: repositorymodels.CollectionPartitionKey{
PartitionKey: datastore.CollectionPartitionKey{
Paths: []string{"/_partitionKey"},
Kind: "Hash",
Version: 2,

View File

@@ -3,11 +3,11 @@ package structhidrators
import (
"reflect"
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
"github.com/pikami/cosmium/internal/datastore"
)
func Hidrate(input interface{}) interface{} {
if reflect.TypeOf(input) == reflect.TypeOf(repositorymodels.Collection{}) {
if reflect.TypeOf(input) == reflect.TypeOf(datastore.Collection{}) {
return hidrate(input, defaultCollection)
}
return input

View File

@@ -17,6 +17,7 @@ type SelectStmt struct {
type Table struct {
Value string
SelectItem SelectItem
IsInSelect bool
}
type JoinItem struct {
@@ -41,6 +42,7 @@ type SelectItem struct {
SelectItems []SelectItem
Type SelectItemType
Value interface{}
Invert bool
IsTopLevel bool
}

View File

@@ -4,6 +4,7 @@ import (
"testing"
"github.com/pikami/cosmium/parsers"
testutils "github.com/pikami/cosmium/test_utils"
)
func Test_Parse_AggregateFunctions(t *testing.T) {
@@ -27,7 +28,7 @@ func Test_Parse_AggregateFunctions(t *testing.T) {
},
},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
)
})
@@ -51,7 +52,7 @@ func Test_Parse_AggregateFunctions(t *testing.T) {
},
},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
)
})
@@ -75,7 +76,7 @@ func Test_Parse_AggregateFunctions(t *testing.T) {
},
},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
)
})
@@ -99,7 +100,7 @@ func Test_Parse_AggregateFunctions(t *testing.T) {
},
},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
)
})
@@ -123,7 +124,7 @@ func Test_Parse_AggregateFunctions(t *testing.T) {
},
},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
)
})

View File

@@ -32,7 +32,7 @@ func Test_Parse_ArrayFunctions(t *testing.T) {
},
},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
)
})
@@ -58,7 +58,7 @@ func Test_Parse_ArrayFunctions(t *testing.T) {
},
},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
)
})
@@ -87,7 +87,7 @@ func Test_Parse_ArrayFunctions(t *testing.T) {
},
},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
)
})
@@ -116,7 +116,7 @@ func Test_Parse_ArrayFunctions(t *testing.T) {
},
},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
)
})
@@ -145,7 +145,7 @@ func Test_Parse_ArrayFunctions(t *testing.T) {
},
},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
)
})
@@ -169,7 +169,7 @@ func Test_Parse_ArrayFunctions(t *testing.T) {
},
},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
)
})
@@ -195,7 +195,7 @@ func Test_Parse_ArrayFunctions(t *testing.T) {
},
},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
)
})
@@ -223,7 +223,7 @@ func Test_Parse_ArrayFunctions(t *testing.T) {
},
},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
)
})
@@ -251,7 +251,7 @@ func Test_Parse_ArrayFunctions(t *testing.T) {
},
},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
)
})

View File

@@ -4,6 +4,7 @@ import (
"testing"
"github.com/pikami/cosmium/parsers"
testutils "github.com/pikami/cosmium/test_utils"
)
func Test_Parse_Join(t *testing.T) {
@@ -17,7 +18,7 @@ func Test_Parse_Join(t *testing.T) {
{Path: []string{"c", "id"}},
{Path: []string{"c", "pk"}},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
JoinItems: []parsers.JoinItem{
{
Table: parsers.Table{
@@ -40,7 +41,7 @@ func Test_Parse_Join(t *testing.T) {
SelectItems: []parsers.SelectItem{
{Path: []string{"cc"}, IsTopLevel: true},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
JoinItems: []parsers.JoinItem{
{
Table: parsers.Table{

View File

@@ -4,6 +4,7 @@ import (
"testing"
"github.com/pikami/cosmium/parsers"
testutils "github.com/pikami/cosmium/test_utils"
)
func Test_Execute_MathFunctions(t *testing.T) {
@@ -644,7 +645,7 @@ func testMathFunctionParse(
},
},
},
Table: parsers.Table{Value: expectedTable},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path(expectedTable)},
},
)
}

View File

@@ -49,7 +49,7 @@ func Test_Parse(t *testing.T) {
{Path: []string{"c", "id"}},
{Path: []string{"c", "pk"}},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
OrderExpressions: []parsers.OrderExpression{
{
SelectItem: parsers.SelectItem{Path: []string{"c", "id"}},
@@ -73,7 +73,7 @@ func Test_Parse(t *testing.T) {
{Path: []string{"c", "id"}},
{Path: []string{"c", "pk"}},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
GroupBy: []parsers.SelectItem{
{Path: []string{"c", "id"}},
{Path: []string{"c", "pk"}},
@@ -93,7 +93,7 @@ func Test_Parse(t *testing.T) {
Type: parsers.SelectItemTypeField,
},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
Filters: parsers.SelectItem{
Type: parsers.SelectItemTypeFunctionCall,
Value: parsers.FunctionCall{
@@ -112,6 +112,38 @@ func Test_Parse(t *testing.T) {
)
})
t.Run("Should parse IN function with function call", func(t *testing.T) {
testQueryParse(
t,
`Select c.id FROM c WHERE (ToString(c.id) IN ("123", "456"))`,
parsers.SelectStmt{
SelectItems: []parsers.SelectItem{
testutils.SelectItem_Path("c", "id"),
},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
Filters: parsers.SelectItem{
Type: parsers.SelectItemTypeFunctionCall,
Value: parsers.FunctionCall{
Type: parsers.FunctionCallIn,
Arguments: []interface{}{
parsers.SelectItem{
Type: parsers.SelectItemTypeFunctionCall,
Value: parsers.FunctionCall{
Type: parsers.FunctionCallToString,
Arguments: []interface{}{
testutils.SelectItem_Path("c", "id"),
},
},
},
testutils.SelectItem_Constant_String("123"),
testutils.SelectItem_Constant_String("456"),
},
},
},
},
)
})
t.Run("Should parse IN selector", func(t *testing.T) {
testQueryParse(
t,
@@ -124,10 +156,9 @@ func Test_Parse(t *testing.T) {
},
},
Table: parsers.Table{
Value: "c",
SelectItem: parsers.SelectItem{
Path: []string{"c", "tags"},
},
Value: "c",
SelectItem: testutils.SelectItem_Path("c", "tags"),
IsInSelect: true,
},
},
)

File diff suppressed because it is too large Load Diff

View File

@@ -204,14 +204,22 @@ TopClause <- Top ws count:Integer {
return count, nil
}
FromClause <- From ws table:TableName selectItem:(ws "IN"i ws column:SelectItem { return column, nil })? {
FromClause <- From ws table:TableName selectItem:(ws In ws column:SelectItem { return column, nil }) {
tableTyped := table.(parsers.Table)
if selectItem != nil {
tableTyped.SelectItem = selectItem.(parsers.SelectItem)
tableTyped.IsInSelect = true
}
return tableTyped, nil
} / From ws column:SelectItem {
tableSelectItem := column.(parsers.SelectItem)
table := parsers.Table{
Value: tableSelectItem.Alias,
SelectItem: tableSelectItem,
}
return table, nil
} / From ws subQuery:SubQuerySelectItem {
subQueryTyped := subQuery.(parsers.SelectItem)
table := parsers.Table{
@@ -243,13 +251,13 @@ SubQuerySelectItem <- subQuery:SubQuery asClause:(ws alias:AsClause { return ali
return selectItem, nil
}
JoinClause <- Join ws table:TableName ws "IN"i ws column:SelectItem {
JoinClause <- Join ws table:TableName ws In ws column:SelectItem {
return makeJoin(table, column)
} / Join ws subQuery:SubQuerySelectItem {
return makeJoin(nil, subQuery)
}
OffsetClause <- "OFFSET"i ws offset:IntegerLiteral ws "LIMIT"i ws limit:IntegerLiteral {
OffsetClause <- Offset ws offset:IntegerLiteral ws "LIMIT"i ws limit:IntegerLiteral {
return []interface{}{offset.(parsers.Constant).Value, limit.(parsers.Constant).Value}, nil
}
@@ -317,7 +325,11 @@ SelectItem <- selectItem:(SubQuerySelectItem / Literal / FunctionCall / SelectAr
return itemResult, nil
}
AsClause <- ws As ws alias:Identifier { return alias, nil }
AsClause <- (ws As)? ws !ExcludedKeywords alias:Identifier {
return alias, nil
}
ExcludedKeywords <- Select / Top / As / From / In / Join / Exists / Where / And / Or / Not / GroupBy / OrderBy / Offset
DotFieldAccess <- "." id:Identifier {
return id, nil
@@ -346,8 +358,14 @@ AndExpression <- ex1:ComparisonExpression ex2:(ws And ws ex:ComparisonExpression
ComparisonExpression <- "(" ws ex:OrExpression ws ")" { return ex, nil }
/ left:SelectItem ws op:ComparisonOperator ws right:SelectItem {
return parsers.ComparisonExpression{Left:left,Right:right,Operation:op.(string)}, nil
} / inv:(Not ws)? ex:SelectItem {
if inv != nil {
ex1 := ex.(parsers.SelectItem)
ex1.Invert = true
return ex1, nil
}
return ex, nil
} / ex:BooleanLiteral { return ex, nil }
/ ex:SelectItem { return ex, nil }
OrderByClause <- OrderBy ws ex1:OrderExpression others:(ws "," ws ex:OrderExpression { return ex, nil })* {
return makeOrderByClause(ex1, others)
@@ -373,6 +391,8 @@ As <- "AS"i
From <- "FROM"i
In <- "IN"i
Join <- "JOIN"i
Exists <- "EXISTS"i
@@ -383,11 +403,15 @@ And <- "AND"i
Or <- "OR"i wss
Not <- "NOT"i
GroupBy <- "GROUP"i ws "BY"i
OrderBy <- "ORDER"i ws "BY"i
ComparisonOperator <- ("=" / "!=" / "<" / "<=" / ">" / ">=") {
Offset <- "OFFSET"i
ComparisonOperator <- ("<=" / ">=" / "=" / "!=" / "<" / ">") {
return string(c.text), nil
}
@@ -700,7 +724,9 @@ MathNumberBinExpression <- "NumberBin"i ws "(" ws ex1:SelectItem others:(ws ","
MathPiExpression <- "PI"i ws "(" ws ")" { return createFunctionCall(parsers.FunctionCallMathPi, []interface{}{}) }
MathRandExpression <- "RAND"i ws "(" ws ")" { return createFunctionCall(parsers.FunctionCallMathRand, []interface{}{}) }
InFunction <- ex1:SelectProperty ws "IN"i ws "(" ws ex2:SelectItem others:(ws "," ws ex:SelectItem { return ex, nil })* ws ")" {
InFunction <- ex1:SelectProperty ws In ws "(" ws ex2:SelectItem others:(ws "," ws ex:SelectItem { return ex, nil })* ws ")" {
return createFunctionCall(parsers.FunctionCallIn, append([]interface{}{ex1, ex2}, others.([]interface{})...))
} / "(" ws ex1:SelectItem ws In ws "(" ws ex2:SelectItem others:(ws "," ws ex:SelectItem { return ex, nil })* ws ")" ws ")" {
return createFunctionCall(parsers.FunctionCallIn, append([]interface{}{ex1, ex2}, others.([]interface{})...))
}

View File

@@ -4,6 +4,7 @@ import (
"testing"
"github.com/pikami/cosmium/parsers"
testutils "github.com/pikami/cosmium/test_utils"
)
func Test_Parse_Select(t *testing.T) {
@@ -17,7 +18,7 @@ func Test_Parse_Select(t *testing.T) {
{Path: []string{"c", "id"}},
{Path: []string{"c", "pk"}},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
)
})
@@ -31,7 +32,7 @@ func Test_Parse_Select(t *testing.T) {
{Path: []string{"c", "id"}},
{Path: []string{"c", "@param"}},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
)
})
@@ -44,7 +45,7 @@ func Test_Parse_Select(t *testing.T) {
SelectItems: []parsers.SelectItem{
{Path: []string{"c", "id"}},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
Distinct: true,
},
)
@@ -58,7 +59,7 @@ func Test_Parse_Select(t *testing.T) {
SelectItems: []parsers.SelectItem{
{Path: []string{"c", "id"}},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
Count: 1,
},
)
@@ -72,7 +73,7 @@ func Test_Parse_Select(t *testing.T) {
SelectItems: []parsers.SelectItem{
{Path: []string{"c", "id"}},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
Count: 5,
Offset: 3,
},
@@ -87,7 +88,7 @@ func Test_Parse_Select(t *testing.T) {
SelectItems: []parsers.SelectItem{
{Path: []string{"c", "id"}, IsTopLevel: true},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
)
})
@@ -100,7 +101,20 @@ func Test_Parse_Select(t *testing.T) {
SelectItems: []parsers.SelectItem{
{Path: []string{"c"}, IsTopLevel: true},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
)
})
t.Run("Should parse SELECT c", func(t *testing.T) {
testQueryParse(
t,
`SELECT c FROM c`,
parsers.SelectStmt{
SelectItems: []parsers.SelectItem{
{Path: []string{"c"}, IsTopLevel: false},
},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
)
})
@@ -120,7 +134,27 @@ func Test_Parse_Select(t *testing.T) {
},
},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
)
})
t.Run("Should parse SELECT with alias", func(t *testing.T) {
testQueryParse(
t,
`SELECT
c.id AS aliasWithAs,
c.pk aliasWithoutAs
FROM root c`,
parsers.SelectStmt{
SelectItems: []parsers.SelectItem{
{Alias: "aliasWithAs", Path: []string{"c", "id"}},
{Alias: "aliasWithoutAs", Path: []string{"c", "pk"}},
},
Table: parsers.Table{
Value: "c",
SelectItem: parsers.SelectItem{Alias: "c", Path: []string{"root"}},
},
},
)
})
@@ -140,7 +174,7 @@ func Test_Parse_Select(t *testing.T) {
},
},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
)
})

View File

@@ -30,7 +30,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
},
},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
)
})
@@ -56,7 +56,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
},
},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
)
})
@@ -85,7 +85,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
},
},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
)
})
@@ -111,7 +111,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
},
},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
)
})
@@ -137,7 +137,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
},
},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
)
})
@@ -163,7 +163,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
},
},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
)
})
@@ -189,7 +189,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
},
},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
)
})
@@ -213,7 +213,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
},
},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
)
})
@@ -237,7 +237,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
},
},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
)
})
@@ -261,7 +261,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
},
},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
)
})
@@ -286,7 +286,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
},
},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
)
})
@@ -310,7 +310,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
},
},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
)
})
@@ -334,7 +334,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
},
},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
)
})
@@ -360,7 +360,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
},
},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
)
})
@@ -385,7 +385,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
},
},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
)
})
@@ -409,7 +409,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
},
},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
)
})
@@ -434,7 +434,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
},
},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
)
})
@@ -458,7 +458,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
},
},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
)
})
@@ -484,7 +484,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
},
},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
)
})
@@ -508,7 +508,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
},
},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
)
})

View File

@@ -4,6 +4,7 @@ import (
"testing"
"github.com/pikami/cosmium/parsers"
testutils "github.com/pikami/cosmium/test_utils"
)
func Test_Parse_SubQuery(t *testing.T) {
@@ -22,7 +23,7 @@ func Test_Parse_SubQuery(t *testing.T) {
Alias: "c",
Type: parsers.SelectItemTypeSubQuery,
Value: parsers.SelectStmt{
Table: parsers.Table{Value: "cc"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("cc")},
SelectItems: []parsers.SelectItem{
{Path: []string{"cc", "info"}, IsTopLevel: true},
},
@@ -42,9 +43,7 @@ func Test_Parse_SubQuery(t *testing.T) {
{Path: []string{"c", "id"}},
{Path: []string{"cc", "name"}},
},
Table: parsers.Table{
Value: "c",
},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
JoinItems: []parsers.JoinItem{
{
Table: parsers.Table{
@@ -55,13 +54,12 @@ func Test_Parse_SubQuery(t *testing.T) {
Type: parsers.SelectItemTypeSubQuery,
Value: parsers.SelectStmt{
SelectItems: []parsers.SelectItem{
{Path: []string{"tag", "name"}},
testutils.SelectItem_Path("tag", "name"),
},
Table: parsers.Table{
Value: "tag",
SelectItem: parsers.SelectItem{
Path: []string{"c", "tags"},
},
Value: "tag",
SelectItem: testutils.SelectItem_Path("c", "tags"),
IsInSelect: true,
},
},
},
@@ -82,10 +80,10 @@ func Test_Parse_SubQuery(t *testing.T) {
WHERE hasTags`,
parsers.SelectStmt{
SelectItems: []parsers.SelectItem{
{Path: []string{"c", "id"}},
testutils.SelectItem_Path("c", "id"),
},
Table: parsers.Table{
Value: "c",
SelectItem: testutils.SelectItem_Path("c"),
},
JoinItems: []parsers.JoinItem{
{
@@ -100,13 +98,12 @@ func Test_Parse_SubQuery(t *testing.T) {
Type: parsers.SelectItemTypeSubQuery,
Value: parsers.SelectStmt{
SelectItems: []parsers.SelectItem{
{Path: []string{"tag", "name"}},
testutils.SelectItem_Path("tag", "name"),
},
Table: parsers.Table{
Value: "tag",
SelectItem: parsers.SelectItem{
Path: []string{"c", "tags"},
},
Value: "tag",
SelectItem: testutils.SelectItem_Path("c", "tags"),
IsInSelect: true,
},
Exists: true,
},

View File

@@ -4,6 +4,7 @@ import (
"testing"
"github.com/pikami/cosmium/parsers"
testutils "github.com/pikami/cosmium/test_utils"
)
func Test_Execute_TypeCheckingFunctions(t *testing.T) {
@@ -27,7 +28,7 @@ func Test_Execute_TypeCheckingFunctions(t *testing.T) {
},
},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
Filters: parsers.SelectItem{
Type: parsers.SelectItemTypeFunctionCall,
Value: parsers.FunctionCall{
@@ -63,7 +64,7 @@ func Test_Execute_TypeCheckingFunctions(t *testing.T) {
},
},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
Filters: parsers.SelectItem{
Type: parsers.SelectItemTypeFunctionCall,
Value: parsers.FunctionCall{
@@ -99,7 +100,7 @@ func Test_Execute_TypeCheckingFunctions(t *testing.T) {
},
},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
Filters: parsers.SelectItem{
Type: parsers.SelectItemTypeFunctionCall,
Value: parsers.FunctionCall{
@@ -135,7 +136,7 @@ func Test_Execute_TypeCheckingFunctions(t *testing.T) {
},
},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
Filters: parsers.SelectItem{
Type: parsers.SelectItemTypeFunctionCall,
Value: parsers.FunctionCall{
@@ -171,7 +172,7 @@ func Test_Execute_TypeCheckingFunctions(t *testing.T) {
},
},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
Filters: parsers.SelectItem{
Type: parsers.SelectItemTypeFunctionCall,
Value: parsers.FunctionCall{
@@ -207,7 +208,7 @@ func Test_Execute_TypeCheckingFunctions(t *testing.T) {
},
},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
Filters: parsers.SelectItem{
Type: parsers.SelectItemTypeFunctionCall,
Value: parsers.FunctionCall{
@@ -243,7 +244,7 @@ func Test_Execute_TypeCheckingFunctions(t *testing.T) {
},
},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
Filters: parsers.SelectItem{
Type: parsers.SelectItemTypeFunctionCall,
Value: parsers.FunctionCall{
@@ -279,7 +280,7 @@ func Test_Execute_TypeCheckingFunctions(t *testing.T) {
},
},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
Filters: parsers.SelectItem{
Type: parsers.SelectItemTypeFunctionCall,
Value: parsers.FunctionCall{
@@ -315,7 +316,7 @@ func Test_Execute_TypeCheckingFunctions(t *testing.T) {
},
},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
Filters: parsers.SelectItem{
Type: parsers.SelectItemTypeFunctionCall,
Value: parsers.FunctionCall{
@@ -351,7 +352,7 @@ func Test_Execute_TypeCheckingFunctions(t *testing.T) {
},
},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
Filters: parsers.SelectItem{
Type: parsers.SelectItemTypeFunctionCall,
Value: parsers.FunctionCall{

View File

@@ -19,7 +19,7 @@ func Test_Parse_Were(t *testing.T) {
SelectItems: []parsers.SelectItem{
{Path: []string{"c", "id"}},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
Filters: parsers.ComparisonExpression{
Operation: "=",
Left: parsers.SelectItem{Path: []string{"c", "isCool"}},
@@ -42,7 +42,7 @@ func Test_Parse_Were(t *testing.T) {
{Path: []string{"c", "_rid"}},
{Path: []string{"c", "_ts"}},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
Filters: parsers.LogicalExpression{
Operation: parsers.LogicalExpressionTypeOr,
Expressions: []interface{}{
@@ -67,12 +67,12 @@ func Test_Parse_Were(t *testing.T) {
t,
`select c.id
FROM c
WHERE c.isCool=true AND (c.id = "123" OR c.id = "456")`,
WHERE c.isCool=true AND (c.id = "123" OR c.id <= "456")`,
parsers.SelectStmt{
SelectItems: []parsers.SelectItem{
{Path: []string{"c", "id"}},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
Filters: parsers.LogicalExpression{
Operation: parsers.LogicalExpressionTypeAnd,
Expressions: []interface{}{
@@ -90,7 +90,7 @@ func Test_Parse_Were(t *testing.T) {
Right: testutils.SelectItem_Constant_String("123"),
},
parsers.ComparisonExpression{
Operation: "=",
Operation: "<=",
Left: parsers.SelectItem{Path: []string{"c", "id"}},
Right: testutils.SelectItem_Constant_String("456"),
},
@@ -114,7 +114,7 @@ func Test_Parse_Were(t *testing.T) {
AND c.param=@param_id1`,
parsers.SelectStmt{
SelectItems: []parsers.SelectItem{{Path: []string{"c", "id"}, Alias: ""}},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
Filters: parsers.LogicalExpression{
Expressions: []interface{}{
parsers.ComparisonExpression{
@@ -148,4 +148,21 @@ func Test_Parse_Were(t *testing.T) {
},
)
})
t.Run("Should correctly parse NOT conditions", func(t *testing.T) {
testQueryParse(
t,
`select c.id
FROM c
WHERE NOT c.boolean`,
parsers.SelectStmt{
SelectItems: []parsers.SelectItem{{Path: []string{"c", "id"}, Alias: ""}},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
Filters: parsers.SelectItem{
Path: []string{"c", "boolean"},
Invert: true,
},
},
)
})
}

View File

@@ -5,6 +5,7 @@ import (
"github.com/pikami/cosmium/parsers"
memoryexecutor "github.com/pikami/cosmium/query_executors/memory_executor"
testutils "github.com/pikami/cosmium/test_utils"
)
func Test_Execute_AggregateFunctions(t *testing.T) {
@@ -38,7 +39,7 @@ func Test_Execute_AggregateFunctions(t *testing.T) {
GroupBy: []parsers.SelectItem{
{Path: []string{"c", "key"}},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
mockData,
[]memoryexecutor.RowType{
@@ -67,7 +68,7 @@ func Test_Execute_AggregateFunctions(t *testing.T) {
},
},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
mockData,
[]memoryexecutor.RowType{
@@ -99,7 +100,7 @@ func Test_Execute_AggregateFunctions(t *testing.T) {
GroupBy: []parsers.SelectItem{
{Path: []string{"c", "key"}},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
mockData,
[]memoryexecutor.RowType{
@@ -132,7 +133,7 @@ func Test_Execute_AggregateFunctions(t *testing.T) {
GroupBy: []parsers.SelectItem{
{Path: []string{"c", "key"}},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
mockData,
[]memoryexecutor.RowType{
@@ -165,7 +166,7 @@ func Test_Execute_AggregateFunctions(t *testing.T) {
GroupBy: []parsers.SelectItem{
{Path: []string{"c", "key"}},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
mockData,
[]memoryexecutor.RowType{
@@ -198,7 +199,7 @@ func Test_Execute_AggregateFunctions(t *testing.T) {
GroupBy: []parsers.SelectItem{
{Path: []string{"c", "key"}},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
mockData,
[]memoryexecutor.RowType{

View File

@@ -196,6 +196,10 @@ func (r rowContext) parseArray(argument interface{}) []interface{} {
ex := r.resolveSelectItem(exItem)
arrValue := reflect.ValueOf(ex)
if arrValue.Kind() == reflect.Invalid {
return nil
}
if arrValue.Kind() != reflect.Slice {
logger.ErrorLn("parseArray got parameters of wrong type")
return nil
@@ -220,7 +224,7 @@ func (r rowContext) partialMatch(item interface{}, exprToSearch interface{}) boo
}
for _, key := range exprValue.MapKeys() {
if itemValue.MapIndex(key).Interface() != exprValue.MapIndex(key).Interface() {
if !reflect.DeepEqual(itemValue.MapIndex(key).Interface(), exprValue.MapIndex(key).Interface()) {
return false
}
}

View File

@@ -42,7 +42,7 @@ func Test_Execute_ArrayFunctions(t *testing.T) {
},
},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
mockData,
[]memoryexecutor.RowType{
@@ -59,10 +59,11 @@ func Test_Execute_ArrayFunctions(t *testing.T) {
parsers.SelectStmt{
Parameters: map[string]interface{}{
"@categories": []interface{}{"coats", "jackets", "sweatshirts"},
"@objectArray": []interface{}{map[string]interface{}{"category": "shirts", "color": "blue"}},
"@fullMatchObject": map[string]interface{}{"category": "shirts", "color": "blue"},
"@objectArray": []interface{}{map[string]interface{}{"category": "shirts", "color": "blue", "nestedObject": map[string]interface{}{"size": "M"}}},
"@fullMatchObject": map[string]interface{}{"category": "shirts", "color": "blue", "nestedObject": map[string]interface{}{"size": "M"}},
"@partialMatchObject": map[string]interface{}{"category": "shirts"},
"@missingPartialMatchObject": map[string]interface{}{"category": "shorts", "color": "blue"},
"@nestedPartialMatchObject": map[string]interface{}{"nestedObject": map[string]interface{}{"size": "M"}},
},
SelectItems: []parsers.SelectItem{
{
@@ -133,17 +134,30 @@ func Test_Execute_ArrayFunctions(t *testing.T) {
},
},
},
{
Alias: "ContainsNestedPartialMatchObject",
Type: parsers.SelectItemTypeFunctionCall,
Value: parsers.FunctionCall{
Type: parsers.FunctionCallArrayContains,
Arguments: []interface{}{
testutils.SelectItem_Constant_Parameter("@objectArray"),
testutils.SelectItem_Constant_Parameter("@nestedPartialMatchObject"),
testutils.SelectItem_Constant_Bool(true),
},
},
},
},
},
[]memoryexecutor.RowType{map[string]interface{}{"id": "123"}},
[]memoryexecutor.RowType{
map[string]interface{}{
"ContainsItem": true,
"MissingItem": false,
"ContainsFullMatchObject": true,
"MissingFullMatchObject": false,
"ContainsPartialMatchObject": true,
"MissingPartialMatchObject": false,
"ContainsItem": true,
"MissingItem": false,
"ContainsFullMatchObject": true,
"MissingFullMatchObject": false,
"ContainsPartialMatchObject": true,
"MissingPartialMatchObject": false,
"ContainsNestedPartialMatchObject": true,
},
},
)
@@ -356,7 +370,7 @@ func Test_Execute_ArrayFunctions(t *testing.T) {
},
},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
mockData,
[]memoryexecutor.RowType{
@@ -392,7 +406,7 @@ func Test_Execute_ArrayFunctions(t *testing.T) {
},
},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
mockData,
[]memoryexecutor.RowType{
@@ -430,7 +444,7 @@ func Test_Execute_ArrayFunctions(t *testing.T) {
},
},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
mockData,
[]memoryexecutor.RowType{
@@ -468,7 +482,7 @@ func Test_Execute_ArrayFunctions(t *testing.T) {
},
},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
mockData,
[]memoryexecutor.RowType{

View File

@@ -0,0 +1,27 @@
package memoryexecutor
import "github.com/pikami/cosmium/internal/datastore"
type rowArrayIterator struct {
documents []rowContext
index int
}
func NewRowArrayIterator(documents []rowContext) *rowArrayIterator {
return &rowArrayIterator{
documents: documents,
index: -1,
}
}
func (i *rowArrayIterator) Next() (rowContext, datastore.DataStoreStatus) {
i.index++
if i.index >= len(i.documents) {
return rowContext{}, datastore.IterEOF
}
row := i.documents[i.index]
i.documents[i.index] = rowContext{} // Help GC reclaim memory
return row, datastore.StatusOk
}

View File

@@ -0,0 +1,397 @@
package memoryexecutor
import (
"fmt"
"reflect"
"strconv"
"strings"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/logger"
"github.com/pikami/cosmium/parsers"
)
type RowType interface{}
type rowContext struct {
tables map[string]RowType
parameters map[string]interface{}
grouppedRows []rowContext
}
type rowIterator interface {
Next() (rowContext, datastore.DataStoreStatus)
}
type rowTypeIterator interface {
Next() (RowType, datastore.DataStoreStatus)
}
func resolveDestinationColumnName(selectItem parsers.SelectItem, itemIndex int, queryParameters map[string]interface{}) string {
if selectItem.Alias != "" {
return selectItem.Alias
}
destinationName := fmt.Sprintf("$%d", itemIndex+1)
if len(selectItem.Path) > 0 {
destinationName = selectItem.Path[len(selectItem.Path)-1]
}
if destinationName[0] == '@' {
destinationName = queryParameters[destinationName].(string)
}
return destinationName
}
func (r rowContext) resolveSelectItem(selectItem parsers.SelectItem) interface{} {
if selectItem.Type == parsers.SelectItemTypeArray {
return r.selectItem_SelectItemTypeArray(selectItem)
}
if selectItem.Type == parsers.SelectItemTypeObject {
return r.selectItem_SelectItemTypeObject(selectItem)
}
if selectItem.Type == parsers.SelectItemTypeConstant {
return r.selectItem_SelectItemTypeConstant(selectItem)
}
if selectItem.Type == parsers.SelectItemTypeSubQuery {
return r.selectItem_SelectItemTypeSubQuery(selectItem)
}
if selectItem.Type == parsers.SelectItemTypeFunctionCall {
if typedFunctionCall, ok := selectItem.Value.(parsers.FunctionCall); ok {
return r.selectItem_SelectItemTypeFunctionCall(typedFunctionCall)
}
logger.ErrorLn("parsers.SelectItem has incorrect Value type (expected parsers.FunctionCall)")
return nil
}
return r.selectItem_SelectItemTypeField(selectItem)
}
func (r rowContext) selectItem_SelectItemTypeArray(selectItem parsers.SelectItem) interface{} {
arrayValue := make([]interface{}, 0)
for _, subSelectItem := range selectItem.SelectItems {
arrayValue = append(arrayValue, r.resolveSelectItem(subSelectItem))
}
return arrayValue
}
func (r rowContext) selectItem_SelectItemTypeObject(selectItem parsers.SelectItem) interface{} {
objectValue := make(map[string]interface{})
for _, subSelectItem := range selectItem.SelectItems {
objectValue[subSelectItem.Alias] = r.resolveSelectItem(subSelectItem)
}
return objectValue
}
func (r rowContext) selectItem_SelectItemTypeConstant(selectItem parsers.SelectItem) interface{} {
var typedValue parsers.Constant
var ok bool
if typedValue, ok = selectItem.Value.(parsers.Constant); !ok {
// TODO: Handle error
logger.ErrorLn("parsers.Constant has incorrect Value type")
}
if typedValue.Type == parsers.ConstantTypeParameterConstant &&
r.parameters != nil {
if key, ok := typedValue.Value.(string); ok {
return r.parameters[key]
}
}
return typedValue.Value
}
func (r rowContext) selectItem_SelectItemTypeSubQuery(selectItem parsers.SelectItem) interface{} {
subQuery := selectItem.Value.(parsers.SelectStmt)
subQueryResult := executeQuery(
subQuery,
NewRowArrayIterator([]rowContext{r}),
)
if subQuery.Exists {
_, status := subQueryResult.Next()
return status == datastore.StatusOk
}
allDocuments := make([]RowType, 0)
for {
row, status := subQueryResult.Next()
if status != datastore.StatusOk {
break
}
allDocuments = append(allDocuments, row)
}
return allDocuments
}
func (r rowContext) selectItem_SelectItemTypeFunctionCall(functionCall parsers.FunctionCall) interface{} {
switch functionCall.Type {
case parsers.FunctionCallStringEquals:
return r.strings_StringEquals(functionCall.Arguments)
case parsers.FunctionCallContains:
return r.strings_Contains(functionCall.Arguments)
case parsers.FunctionCallEndsWith:
return r.strings_EndsWith(functionCall.Arguments)
case parsers.FunctionCallStartsWith:
return r.strings_StartsWith(functionCall.Arguments)
case parsers.FunctionCallConcat:
return r.strings_Concat(functionCall.Arguments)
case parsers.FunctionCallIndexOf:
return r.strings_IndexOf(functionCall.Arguments)
case parsers.FunctionCallToString:
return r.strings_ToString(functionCall.Arguments)
case parsers.FunctionCallUpper:
return r.strings_Upper(functionCall.Arguments)
case parsers.FunctionCallLower:
return r.strings_Lower(functionCall.Arguments)
case parsers.FunctionCallLeft:
return r.strings_Left(functionCall.Arguments)
case parsers.FunctionCallLength:
return r.strings_Length(functionCall.Arguments)
case parsers.FunctionCallLTrim:
return r.strings_LTrim(functionCall.Arguments)
case parsers.FunctionCallReplace:
return r.strings_Replace(functionCall.Arguments)
case parsers.FunctionCallReplicate:
return r.strings_Replicate(functionCall.Arguments)
case parsers.FunctionCallReverse:
return r.strings_Reverse(functionCall.Arguments)
case parsers.FunctionCallRight:
return r.strings_Right(functionCall.Arguments)
case parsers.FunctionCallRTrim:
return r.strings_RTrim(functionCall.Arguments)
case parsers.FunctionCallSubstring:
return r.strings_Substring(functionCall.Arguments)
case parsers.FunctionCallTrim:
return r.strings_Trim(functionCall.Arguments)
case parsers.FunctionCallIsDefined:
return r.typeChecking_IsDefined(functionCall.Arguments)
case parsers.FunctionCallIsArray:
return r.typeChecking_IsArray(functionCall.Arguments)
case parsers.FunctionCallIsBool:
return r.typeChecking_IsBool(functionCall.Arguments)
case parsers.FunctionCallIsFiniteNumber:
return r.typeChecking_IsFiniteNumber(functionCall.Arguments)
case parsers.FunctionCallIsInteger:
return r.typeChecking_IsInteger(functionCall.Arguments)
case parsers.FunctionCallIsNull:
return r.typeChecking_IsNull(functionCall.Arguments)
case parsers.FunctionCallIsNumber:
return r.typeChecking_IsNumber(functionCall.Arguments)
case parsers.FunctionCallIsObject:
return r.typeChecking_IsObject(functionCall.Arguments)
case parsers.FunctionCallIsPrimitive:
return r.typeChecking_IsPrimitive(functionCall.Arguments)
case parsers.FunctionCallIsString:
return r.typeChecking_IsString(functionCall.Arguments)
case parsers.FunctionCallArrayConcat:
return r.array_Concat(functionCall.Arguments)
case parsers.FunctionCallArrayContains:
return r.array_Contains(functionCall.Arguments)
case parsers.FunctionCallArrayContainsAny:
return r.array_Contains_Any(functionCall.Arguments)
case parsers.FunctionCallArrayContainsAll:
return r.array_Contains_All(functionCall.Arguments)
case parsers.FunctionCallArrayLength:
return r.array_Length(functionCall.Arguments)
case parsers.FunctionCallArraySlice:
return r.array_Slice(functionCall.Arguments)
case parsers.FunctionCallSetIntersect:
return r.set_Intersect(functionCall.Arguments)
case parsers.FunctionCallSetUnion:
return r.set_Union(functionCall.Arguments)
case parsers.FunctionCallMathAbs:
return r.math_Abs(functionCall.Arguments)
case parsers.FunctionCallMathAcos:
return r.math_Acos(functionCall.Arguments)
case parsers.FunctionCallMathAsin:
return r.math_Asin(functionCall.Arguments)
case parsers.FunctionCallMathAtan:
return r.math_Atan(functionCall.Arguments)
case parsers.FunctionCallMathCeiling:
return r.math_Ceiling(functionCall.Arguments)
case parsers.FunctionCallMathCos:
return r.math_Cos(functionCall.Arguments)
case parsers.FunctionCallMathCot:
return r.math_Cot(functionCall.Arguments)
case parsers.FunctionCallMathDegrees:
return r.math_Degrees(functionCall.Arguments)
case parsers.FunctionCallMathExp:
return r.math_Exp(functionCall.Arguments)
case parsers.FunctionCallMathFloor:
return r.math_Floor(functionCall.Arguments)
case parsers.FunctionCallMathIntBitNot:
return r.math_IntBitNot(functionCall.Arguments)
case parsers.FunctionCallMathLog10:
return r.math_Log10(functionCall.Arguments)
case parsers.FunctionCallMathRadians:
return r.math_Radians(functionCall.Arguments)
case parsers.FunctionCallMathRound:
return r.math_Round(functionCall.Arguments)
case parsers.FunctionCallMathSign:
return r.math_Sign(functionCall.Arguments)
case parsers.FunctionCallMathSin:
return r.math_Sin(functionCall.Arguments)
case parsers.FunctionCallMathSqrt:
return r.math_Sqrt(functionCall.Arguments)
case parsers.FunctionCallMathSquare:
return r.math_Square(functionCall.Arguments)
case parsers.FunctionCallMathTan:
return r.math_Tan(functionCall.Arguments)
case parsers.FunctionCallMathTrunc:
return r.math_Trunc(functionCall.Arguments)
case parsers.FunctionCallMathAtn2:
return r.math_Atn2(functionCall.Arguments)
case parsers.FunctionCallMathIntAdd:
return r.math_IntAdd(functionCall.Arguments)
case parsers.FunctionCallMathIntBitAnd:
return r.math_IntBitAnd(functionCall.Arguments)
case parsers.FunctionCallMathIntBitLeftShift:
return r.math_IntBitLeftShift(functionCall.Arguments)
case parsers.FunctionCallMathIntBitOr:
return r.math_IntBitOr(functionCall.Arguments)
case parsers.FunctionCallMathIntBitRightShift:
return r.math_IntBitRightShift(functionCall.Arguments)
case parsers.FunctionCallMathIntBitXor:
return r.math_IntBitXor(functionCall.Arguments)
case parsers.FunctionCallMathIntDiv:
return r.math_IntDiv(functionCall.Arguments)
case parsers.FunctionCallMathIntMod:
return r.math_IntMod(functionCall.Arguments)
case parsers.FunctionCallMathIntMul:
return r.math_IntMul(functionCall.Arguments)
case parsers.FunctionCallMathIntSub:
return r.math_IntSub(functionCall.Arguments)
case parsers.FunctionCallMathPower:
return r.math_Power(functionCall.Arguments)
case parsers.FunctionCallMathLog:
return r.math_Log(functionCall.Arguments)
case parsers.FunctionCallMathNumberBin:
return r.math_NumberBin(functionCall.Arguments)
case parsers.FunctionCallMathPi:
return r.math_Pi()
case parsers.FunctionCallMathRand:
return r.math_Rand()
case parsers.FunctionCallAggregateAvg:
return r.aggregate_Avg(functionCall.Arguments)
case parsers.FunctionCallAggregateCount:
return r.aggregate_Count(functionCall.Arguments)
case parsers.FunctionCallAggregateMax:
return r.aggregate_Max(functionCall.Arguments)
case parsers.FunctionCallAggregateMin:
return r.aggregate_Min(functionCall.Arguments)
case parsers.FunctionCallAggregateSum:
return r.aggregate_Sum(functionCall.Arguments)
case parsers.FunctionCallIn:
return r.misc_In(functionCall.Arguments)
}
logger.Errorf("Unknown function call type: %v", functionCall.Type)
return nil
}
func (r rowContext) selectItem_SelectItemTypeField(selectItem parsers.SelectItem) interface{} {
value := r.tables[selectItem.Path[0]]
if len(selectItem.Path) > 1 {
for _, pathSegment := range selectItem.Path[1:] {
if pathSegment[0] == '@' {
pathSegment = r.parameters[pathSegment].(string)
}
switch nestedValue := value.(type) {
case map[string]interface{}:
value = nestedValue[pathSegment]
case map[string]RowType:
value = nestedValue[pathSegment]
case datastore.Document:
value = nestedValue[pathSegment]
case map[string]datastore.Document:
value = nestedValue[pathSegment]
case []int, []string, []interface{}:
slice := reflect.ValueOf(nestedValue)
if arrayIndex, err := strconv.Atoi(pathSegment); err == nil && slice.Len() > arrayIndex {
value = slice.Index(arrayIndex).Interface()
} else {
return nil
}
default:
return nil
}
}
}
return value
}
func compareValues(val1, val2 interface{}) int {
if val1 == nil && val2 == nil {
return 0
} else if val1 == nil {
return -1
} else if val2 == nil {
return 1
}
if reflect.TypeOf(val1) != reflect.TypeOf(val2) {
return 1
}
switch val1 := val1.(type) {
case int:
val2 := val2.(int)
if val1 < val2 {
return -1
} else if val1 > val2 {
return 1
}
return 0
case float64:
val2 := val2.(float64)
if val1 < val2 {
return -1
} else if val1 > val2 {
return 1
}
return 0
case string:
val2 := val2.(string)
return strings.Compare(val1, val2)
case bool:
val2 := val2.(bool)
if val1 == val2 {
return 0
} else if val1 {
return 1
} else {
return -1
}
// TODO: Add more types
default:
if reflect.DeepEqual(val1, val2) {
return 0
}
return 1
}
}
func copyMap[T RowType | []RowType](originalMap map[string]T) map[string]T {
targetMap := make(map[string]T)
for k, v := range originalMap {
targetMap[k] = v
}
return targetMap
}

View File

@@ -0,0 +1,36 @@
package memoryexecutor
import "github.com/pikami/cosmium/internal/datastore"
type distinctIterator struct {
documents rowTypeIterator
seenDocs []RowType
}
func (di *distinctIterator) Next() (RowType, datastore.DataStoreStatus) {
if di.documents == nil {
return rowContext{}, datastore.IterEOF
}
for {
row, status := di.documents.Next()
if status != datastore.StatusOk {
di.documents = nil
return rowContext{}, status
}
if !di.seen(row) {
di.seenDocs = append(di.seenDocs, row)
return row, status
}
}
}
func (di *distinctIterator) seen(row RowType) bool {
for _, seenRow := range di.seenDocs {
if compareValues(seenRow, row) == 0 {
return true
}
}
return false
}

View File

@@ -0,0 +1,143 @@
package memoryexecutor
import (
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/logger"
"github.com/pikami/cosmium/parsers"
)
type filterIterator struct {
documents rowIterator
filters interface{}
}
func (fi *filterIterator) Next() (rowContext, datastore.DataStoreStatus) {
if fi.documents == nil {
return rowContext{}, datastore.IterEOF
}
for {
row, status := fi.documents.Next()
if status != datastore.StatusOk {
fi.documents = nil
return rowContext{}, status
}
if fi.evaluateFilters(row) {
return row, status
}
}
}
func (fi *filterIterator) evaluateFilters(row rowContext) bool {
if fi.filters == nil {
return true
}
switch typedFilters := fi.filters.(type) {
case parsers.ComparisonExpression:
return row.filters_ComparisonExpression(typedFilters)
case parsers.LogicalExpression:
return row.filters_LogicalExpression(typedFilters)
case parsers.Constant:
if value, ok := typedFilters.Value.(bool); ok {
return value
}
return false
case parsers.SelectItem:
resolvedValue := row.resolveSelectItem(typedFilters)
if value, ok := resolvedValue.(bool); ok {
if typedFilters.Invert {
return !value
}
return value
}
}
return false
}
func (r rowContext) applyFilters(filters interface{}) bool {
if filters == nil {
return true
}
switch typedFilters := filters.(type) {
case parsers.ComparisonExpression:
return r.filters_ComparisonExpression(typedFilters)
case parsers.LogicalExpression:
return r.filters_LogicalExpression(typedFilters)
case parsers.Constant:
if value, ok := typedFilters.Value.(bool); ok {
return value
}
return false
case parsers.SelectItem:
resolvedValue := r.resolveSelectItem(typedFilters)
if value, ok := resolvedValue.(bool); ok {
if typedFilters.Invert {
return !value
}
return value
}
}
return false
}
func (r rowContext) filters_ComparisonExpression(expression parsers.ComparisonExpression) bool {
leftExpression, leftExpressionOk := expression.Left.(parsers.SelectItem)
rightExpression, rightExpressionOk := expression.Right.(parsers.SelectItem)
if !leftExpressionOk || !rightExpressionOk {
logger.ErrorLn("ComparisonExpression has incorrect Left or Right type")
return false
}
leftValue := r.resolveSelectItem(leftExpression)
rightValue := r.resolveSelectItem(rightExpression)
cmp := compareValues(leftValue, rightValue)
switch expression.Operation {
case "=":
return cmp == 0
case "!=":
return cmp != 0
case "<":
return cmp < 0
case ">":
return cmp > 0
case "<=":
return cmp <= 0
case ">=":
return cmp >= 0
}
return false
}
func (r rowContext) filters_LogicalExpression(expression parsers.LogicalExpression) bool {
var result bool
for i, subExpression := range expression.Expressions {
expressionResult := r.applyFilters(subExpression)
if i == 0 {
result = expressionResult
}
switch expression.Operation {
case parsers.LogicalExpressionTypeAnd:
result = result && expressionResult
if !result {
return false
}
case parsers.LogicalExpressionTypeOr:
result = result || expressionResult
if result {
return true
}
}
}
return result
}

View File

@@ -0,0 +1,73 @@
package memoryexecutor
import (
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/parsers"
)
type fromIterator struct {
documents rowIterator
table parsers.Table
buffer []rowContext
bufferIndex int
}
func (fi *fromIterator) Next() (rowContext, datastore.DataStoreStatus) {
if fi.documents == nil {
return rowContext{}, datastore.IterEOF
}
// Return from buffer if available
if fi.bufferIndex < len(fi.buffer) {
result := fi.buffer[fi.bufferIndex]
fi.buffer[fi.bufferIndex] = rowContext{}
fi.bufferIndex++
return result, datastore.StatusOk
}
// Resolve next row from documents
row, status := fi.documents.Next()
if status != datastore.StatusOk {
fi.documents = nil
return row, status
}
if fi.table.SelectItem.Path != nil || fi.table.SelectItem.Type == parsers.SelectItemTypeSubQuery {
destinationTableName := fi.table.SelectItem.Alias
if destinationTableName == "" {
destinationTableName = fi.table.Value
}
if destinationTableName == "" {
destinationTableName = resolveDestinationColumnName(fi.table.SelectItem, 0, row.parameters)
}
if fi.table.IsInSelect || fi.table.SelectItem.Type == parsers.SelectItemTypeSubQuery {
selectValue := row.parseArray(fi.table.SelectItem)
rowContexts := make([]rowContext, len(selectValue))
for i, newRowData := range selectValue {
rowContexts[i].parameters = row.parameters
rowContexts[i].tables = copyMap(row.tables)
rowContexts[i].tables[destinationTableName] = newRowData
}
fi.buffer = rowContexts
fi.bufferIndex = 0
return fi.Next()
}
if len(fi.table.SelectItem.Path) > 0 {
sourceTableName := fi.table.SelectItem.Path[0]
sourceTableData := row.tables[sourceTableName]
if sourceTableData == nil {
// When source table is not found, assume it's root document
row.tables[sourceTableName] = row.tables["$root"]
}
}
newRowData := row.resolveSelectItem(fi.table.SelectItem)
row.tables[destinationTableName] = newRowData
return row, status
}
return row, status
}

View File

@@ -0,0 +1,69 @@
package memoryexecutor
import (
"fmt"
"strings"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/parsers"
)
type groupByIterator struct {
documents rowIterator
groupBy []parsers.SelectItem
groupedRows []rowContext
}
func (gi *groupByIterator) Next() (rowContext, datastore.DataStoreStatus) {
if gi.groupedRows != nil {
if len(gi.groupedRows) == 0 {
return rowContext{}, datastore.IterEOF
}
row := gi.groupedRows[0]
gi.groupedRows = gi.groupedRows[1:]
return row, datastore.StatusOk
}
documents := make([]rowContext, 0)
for {
row, status := gi.documents.Next()
if status != datastore.StatusOk {
break
}
documents = append(documents, row)
}
gi.documents = nil
groupedRows := make(map[string][]rowContext)
groupedKeys := make([]string, 0)
for _, row := range documents {
key := row.generateGroupByKey(gi.groupBy)
if _, ok := groupedRows[key]; !ok {
groupedKeys = append(groupedKeys, key)
}
groupedRows[key] = append(groupedRows[key], row)
}
gi.groupedRows = make([]rowContext, 0)
for _, key := range groupedKeys {
gi.groupedRows = append(gi.groupedRows, rowContext{
tables: groupedRows[key][0].tables,
parameters: groupedRows[key][0].parameters,
grouppedRows: groupedRows[key],
})
}
return gi.Next()
}
func (r rowContext) generateGroupByKey(groupBy []parsers.SelectItem) string {
var keyBuilder strings.Builder
for _, selectItem := range groupBy {
value := r.resolveSelectItem(selectItem)
keyBuilder.WriteString(fmt.Sprintf("%v", value))
keyBuilder.WriteString(":")
}
return keyBuilder.String()
}

View File

@@ -0,0 +1,62 @@
package memoryexecutor
import (
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/parsers"
)
type joinIterator struct {
documents rowIterator
query parsers.SelectStmt
buffer []rowContext
}
func (ji *joinIterator) Next() (rowContext, datastore.DataStoreStatus) {
if ji.documents == nil {
return rowContext{}, datastore.IterEOF
}
if len(ji.buffer) > 0 {
row := ji.buffer[0]
ji.buffer = ji.buffer[1:]
return row, datastore.StatusOk
}
doc, status := ji.documents.Next()
if status != datastore.StatusOk {
ji.documents = nil
return rowContext{}, status
}
ji.buffer = []rowContext{doc}
for _, joinItem := range ji.query.JoinItems {
nextDocuments := make([]rowContext, 0)
for _, row := range ji.buffer {
joinedItems := row.resolveJoinItemSelect(joinItem.SelectItem)
for _, joinedItem := range joinedItems {
tablesCopy := copyMap(row.tables)
tablesCopy[joinItem.Table.Value] = joinedItem
nextDocuments = append(nextDocuments, rowContext{
parameters: row.parameters,
tables: tablesCopy,
})
}
}
ji.buffer = nextDocuments
}
return ji.Next()
}
func (r rowContext) resolveJoinItemSelect(selectItem parsers.SelectItem) []RowType {
if selectItem.Path != nil || selectItem.Type == parsers.SelectItemTypeSubQuery {
selectValue := r.parseArray(selectItem)
documents := make([]RowType, len(selectValue))
for i, newRowData := range selectValue {
documents[i] = newRowData
}
return documents
}
return []RowType{}
}

View File

@@ -5,6 +5,7 @@ import (
"github.com/pikami/cosmium/parsers"
memoryexecutor "github.com/pikami/cosmium/query_executors/memory_executor"
testutils "github.com/pikami/cosmium/test_utils"
)
func Test_Execute_Joins(t *testing.T) {
@@ -33,7 +34,7 @@ func Test_Execute_Joins(t *testing.T) {
{Path: []string{"c", "id"}},
{Path: []string{"cc", "name"}},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
JoinItems: []parsers.JoinItem{
{
Table: parsers.Table{
@@ -62,7 +63,7 @@ func Test_Execute_Joins(t *testing.T) {
SelectItems: []parsers.SelectItem{
{Path: []string{"cc"}, IsTopLevel: true},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
JoinItems: []parsers.JoinItem{
{
Table: parsers.Table{

View File

@@ -0,0 +1,19 @@
package memoryexecutor
import "github.com/pikami/cosmium/internal/datastore"
type limitIterator struct {
documents rowTypeIterator
limit int
count int
}
func (li *limitIterator) Next() (RowType, datastore.DataStoreStatus) {
if li.count >= li.limit {
li.documents = nil
return rowContext{}, datastore.IterEOF
}
li.count++
return li.documents.Next()
}

View File

@@ -6,6 +6,7 @@ import (
"github.com/pikami/cosmium/parsers"
memoryexecutor "github.com/pikami/cosmium/query_executors/memory_executor"
testutils "github.com/pikami/cosmium/test_utils"
)
func Test_Execute_MathFunctions(t *testing.T) {
@@ -261,7 +262,7 @@ func testMathFunctionExecute(
},
},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
data,
expectedData,

View File

@@ -1,702 +1,92 @@
package memoryexecutor
import (
"fmt"
"reflect"
"sort"
"strconv"
"strings"
"github.com/pikami/cosmium/internal/logger"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/parsers"
"golang.org/x/exp/slices"
)
type RowType interface{}
type rowContext struct {
tables map[string]RowType
parameters map[string]interface{}
grouppedRows []rowContext
func ExecuteQuery(query parsers.SelectStmt, documents rowTypeIterator) []RowType {
resultIter := executeQuery(query, &rowTypeToRowContextIterator{documents: documents, query: query})
result := make([]RowType, 0)
for {
row, status := resultIter.Next()
if status != datastore.StatusOk {
break
}
result = append(result, row)
}
return result
}
func ExecuteQuery(query parsers.SelectStmt, documents []RowType) []RowType {
currentDocuments := make([]rowContext, 0)
for _, doc := range documents {
currentDocuments = append(currentDocuments, resolveFrom(query, doc)...)
func executeQuery(query parsers.SelectStmt, documents rowIterator) rowTypeIterator {
// Resolve FROM
var iter rowIterator = &fromIterator{
documents: documents,
table: query.Table,
}
// Handle JOINS
nextDocuments := make([]rowContext, 0)
for _, currentDocument := range currentDocuments {
rowContexts := currentDocument.handleJoin(query)
nextDocuments = append(nextDocuments, rowContexts...)
}
currentDocuments = nextDocuments
// Apply filters
nextDocuments = make([]rowContext, 0)
for _, currentDocument := range currentDocuments {
if currentDocument.applyFilters(query.Filters) {
nextDocuments = append(nextDocuments, currentDocument)
// Apply JOIN
if len(query.JoinItems) > 0 {
iter = &joinIterator{
documents: iter,
query: query,
}
}
currentDocuments = nextDocuments
// Apply order
// Apply WHERE
if query.Filters != nil {
iter = &filterIterator{
documents: iter,
filters: query.Filters,
}
}
// Apply ORDER BY
if len(query.OrderExpressions) > 0 {
applyOrder(currentDocuments, query.OrderExpressions)
iter = &orderIterator{
documents: iter,
orderExpressions: query.OrderExpressions,
}
}
// Apply group by
// Apply GROUP BY
if len(query.GroupBy) > 0 {
currentDocuments = applyGroupBy(currentDocuments, query.GroupBy)
iter = &groupByIterator{
documents: iter,
groupBy: query.GroupBy,
}
}
// Apply select
projectedDocuments := applyProjection(currentDocuments, query.SelectItems, query.GroupBy)
// Apply SELECT
var projectedIterator rowTypeIterator = &projectIterator{
documents: iter,
selectItems: query.SelectItems,
groupBy: query.GroupBy,
}
// Apply distinct
// Apply DISTINCT
if query.Distinct {
projectedDocuments = deduplicate(projectedDocuments)
projectedIterator = &distinctIterator{
documents: projectedIterator,
}
}
// Apply result limit
if query.Count > 0 && len(projectedDocuments) > query.Count {
projectedDocuments = projectedDocuments[:query.Count]
// Apply OFFSET
if query.Offset > 0 {
projectedIterator = &offsetIterator{
documents: projectedIterator,
offset: query.Offset,
}
}
return projectedDocuments
}
func resolveFrom(query parsers.SelectStmt, doc RowType) []rowContext {
initialRow, gotParentContext := doc.(rowContext)
if !gotParentContext {
var initialTableName string
if query.Table.SelectItem.Type == parsers.SelectItemTypeSubQuery {
initialTableName = query.Table.SelectItem.Value.(parsers.SelectStmt).Table.Value
}
if initialTableName == "" {
initialTableName = query.Table.Value
}
initialRow = rowContext{
parameters: query.Parameters,
tables: map[string]RowType{
initialTableName: doc,
},
}
}
if query.Table.SelectItem.Path != nil || query.Table.SelectItem.Type == parsers.SelectItemTypeSubQuery {
destinationTableName := query.Table.SelectItem.Alias
if destinationTableName == "" {
destinationTableName = query.Table.Value
}
selectValue := initialRow.parseArray(query.Table.SelectItem)
rowContexts := make([]rowContext, len(selectValue))
for i, newRowData := range selectValue {
rowContexts[i].parameters = initialRow.parameters
rowContexts[i].tables = copyMap(initialRow.tables)
rowContexts[i].tables[destinationTableName] = newRowData
}
return rowContexts
}
return []rowContext{initialRow}
}
func (r rowContext) handleJoin(query parsers.SelectStmt) []rowContext {
currentDocuments := []rowContext{r}
for _, joinItem := range query.JoinItems {
nextDocuments := make([]rowContext, 0)
for _, currentDocument := range currentDocuments {
joinedItems := currentDocument.resolveJoinItemSelect(joinItem.SelectItem)
for _, joinedItem := range joinedItems {
tablesCopy := copyMap(currentDocument.tables)
tablesCopy[joinItem.Table.Value] = joinedItem
nextDocuments = append(nextDocuments, rowContext{
parameters: currentDocument.parameters,
tables: tablesCopy,
})
}
}
currentDocuments = nextDocuments
}
return currentDocuments
}
func (r rowContext) resolveJoinItemSelect(selectItem parsers.SelectItem) []RowType {
if selectItem.Path != nil || selectItem.Type == parsers.SelectItemTypeSubQuery {
selectValue := r.parseArray(selectItem)
documents := make([]RowType, len(selectValue))
for i, newRowData := range selectValue {
documents[i] = newRowData
}
return documents
}
return []RowType{}
}
func (r rowContext) applyFilters(filters interface{}) bool {
if filters == nil {
return true
}
switch typedFilters := filters.(type) {
case parsers.ComparisonExpression:
return r.filters_ComparisonExpression(typedFilters)
case parsers.LogicalExpression:
return r.filters_LogicalExpression(typedFilters)
case parsers.Constant:
if value, ok := typedFilters.Value.(bool); ok {
return value
}
return false
case parsers.SelectItem:
resolvedValue := r.resolveSelectItem(typedFilters)
if value, ok := resolvedValue.(bool); ok {
return value
}
}
return false
}
func (r rowContext) filters_ComparisonExpression(expression parsers.ComparisonExpression) bool {
leftExpression, leftExpressionOk := expression.Left.(parsers.SelectItem)
rightExpression, rightExpressionOk := expression.Right.(parsers.SelectItem)
if !leftExpressionOk || !rightExpressionOk {
logger.ErrorLn("ComparisonExpression has incorrect Left or Right type")
return false
}
leftValue := r.resolveSelectItem(leftExpression)
rightValue := r.resolveSelectItem(rightExpression)
cmp := compareValues(leftValue, rightValue)
switch expression.Operation {
case "=":
return cmp == 0
case "!=":
return cmp != 0
case "<":
return cmp < 0
case ">":
return cmp > 0
case "<=":
return cmp <= 0
case ">=":
return cmp >= 0
}
return false
}
func (r rowContext) filters_LogicalExpression(expression parsers.LogicalExpression) bool {
var result bool
for i, subExpression := range expression.Expressions {
expressionResult := r.applyFilters(subExpression)
if i == 0 {
result = expressionResult
}
switch expression.Operation {
case parsers.LogicalExpressionTypeAnd:
result = result && expressionResult
if !result {
return false
}
case parsers.LogicalExpressionTypeOr:
result = result || expressionResult
if result {
return true
}
}
}
return result
}
func applyOrder(documents []rowContext, orderExpressions []parsers.OrderExpression) {
less := func(i, j int) bool {
for _, order := range orderExpressions {
val1 := documents[i].resolveSelectItem(order.SelectItem)
val2 := documents[j].resolveSelectItem(order.SelectItem)
cmp := compareValues(val1, val2)
if cmp != 0 {
if order.Direction == parsers.OrderDirectionDesc {
return cmp > 0
}
return cmp < 0
}
}
return i < j
}
sort.SliceStable(documents, less)
}
func applyGroupBy(documents []rowContext, groupBy []parsers.SelectItem) []rowContext {
groupedRows := make(map[string][]rowContext)
groupedKeys := make([]string, 0)
for _, row := range documents {
key := row.generateGroupByKey(groupBy)
if _, ok := groupedRows[key]; !ok {
groupedKeys = append(groupedKeys, key)
}
groupedRows[key] = append(groupedRows[key], row)
}
grouppedRows := make([]rowContext, 0)
for _, key := range groupedKeys {
grouppedRowContext := rowContext{
tables: groupedRows[key][0].tables,
parameters: groupedRows[key][0].parameters,
grouppedRows: groupedRows[key],
}
grouppedRows = append(grouppedRows, grouppedRowContext)
}
return grouppedRows
}
func (r rowContext) generateGroupByKey(groupBy []parsers.SelectItem) string {
var keyBuilder strings.Builder
for _, selectItem := range groupBy {
value := r.resolveSelectItem(selectItem)
keyBuilder.WriteString(fmt.Sprintf("%v", value))
keyBuilder.WriteString(":")
}
return keyBuilder.String()
}
func applyProjection(documents []rowContext, selectItems []parsers.SelectItem, groupBy []parsers.SelectItem) []RowType {
if len(documents) == 0 {
return []RowType{}
}
if hasAggregateFunctions(selectItems) && len(groupBy) == 0 {
// When can have aggregate functions without GROUP BY clause,
// we should aggregate all rows in that case
rowContext := rowContext{
tables: documents[0].tables,
parameters: documents[0].parameters,
grouppedRows: documents,
}
return []RowType{rowContext.applyProjection(selectItems)}
}
projectedDocuments := make([]RowType, len(documents))
for index, row := range documents {
projectedDocuments[index] = row.applyProjection(selectItems)
}
return projectedDocuments
}
func (r rowContext) applyProjection(selectItems []parsers.SelectItem) RowType {
// When the first value is top level, select it instead
if len(selectItems) > 0 && selectItems[0].IsTopLevel {
return r.resolveSelectItem(selectItems[0])
}
// Construct a new row based on the selected columns
row := make(map[string]interface{})
for index, selectItem := range selectItems {
destinationName := selectItem.Alias
if destinationName == "" {
if len(selectItem.Path) > 0 {
destinationName = selectItem.Path[len(selectItem.Path)-1]
} else {
destinationName = fmt.Sprintf("$%d", index+1)
}
if destinationName[0] == '@' {
destinationName = r.parameters[destinationName].(string)
}
}
row[destinationName] = r.resolveSelectItem(selectItem)
}
return row
}
func (r rowContext) resolveSelectItem(selectItem parsers.SelectItem) interface{} {
if selectItem.Type == parsers.SelectItemTypeArray {
return r.selectItem_SelectItemTypeArray(selectItem)
}
if selectItem.Type == parsers.SelectItemTypeObject {
return r.selectItem_SelectItemTypeObject(selectItem)
}
if selectItem.Type == parsers.SelectItemTypeConstant {
return r.selectItem_SelectItemTypeConstant(selectItem)
}
if selectItem.Type == parsers.SelectItemTypeSubQuery {
return r.selectItem_SelectItemTypeSubQuery(selectItem)
}
if selectItem.Type == parsers.SelectItemTypeFunctionCall {
if typedFunctionCall, ok := selectItem.Value.(parsers.FunctionCall); ok {
return r.selectItem_SelectItemTypeFunctionCall(typedFunctionCall)
}
logger.ErrorLn("parsers.SelectItem has incorrect Value type (expected parsers.FunctionCall)")
return nil
}
return r.selectItem_SelectItemTypeField(selectItem)
}
func (r rowContext) selectItem_SelectItemTypeArray(selectItem parsers.SelectItem) interface{} {
arrayValue := make([]interface{}, 0)
for _, subSelectItem := range selectItem.SelectItems {
arrayValue = append(arrayValue, r.resolveSelectItem(subSelectItem))
}
return arrayValue
}
func (r rowContext) selectItem_SelectItemTypeObject(selectItem parsers.SelectItem) interface{} {
objectValue := make(map[string]interface{})
for _, subSelectItem := range selectItem.SelectItems {
objectValue[subSelectItem.Alias] = r.resolveSelectItem(subSelectItem)
}
return objectValue
}
func (r rowContext) selectItem_SelectItemTypeConstant(selectItem parsers.SelectItem) interface{} {
var typedValue parsers.Constant
var ok bool
if typedValue, ok = selectItem.Value.(parsers.Constant); !ok {
// TODO: Handle error
logger.ErrorLn("parsers.Constant has incorrect Value type")
}
if typedValue.Type == parsers.ConstantTypeParameterConstant &&
r.parameters != nil {
if key, ok := typedValue.Value.(string); ok {
return r.parameters[key]
}
}
return typedValue.Value
}
func (r rowContext) selectItem_SelectItemTypeSubQuery(selectItem parsers.SelectItem) interface{} {
subQuery := selectItem.Value.(parsers.SelectStmt)
subQueryResult := ExecuteQuery(
subQuery,
[]RowType{r},
)
if subQuery.Exists {
return len(subQueryResult) > 0
}
return subQueryResult
}
func (r rowContext) selectItem_SelectItemTypeFunctionCall(functionCall parsers.FunctionCall) interface{} {
switch functionCall.Type {
case parsers.FunctionCallStringEquals:
return r.strings_StringEquals(functionCall.Arguments)
case parsers.FunctionCallContains:
return r.strings_Contains(functionCall.Arguments)
case parsers.FunctionCallEndsWith:
return r.strings_EndsWith(functionCall.Arguments)
case parsers.FunctionCallStartsWith:
return r.strings_StartsWith(functionCall.Arguments)
case parsers.FunctionCallConcat:
return r.strings_Concat(functionCall.Arguments)
case parsers.FunctionCallIndexOf:
return r.strings_IndexOf(functionCall.Arguments)
case parsers.FunctionCallToString:
return r.strings_ToString(functionCall.Arguments)
case parsers.FunctionCallUpper:
return r.strings_Upper(functionCall.Arguments)
case parsers.FunctionCallLower:
return r.strings_Lower(functionCall.Arguments)
case parsers.FunctionCallLeft:
return r.strings_Left(functionCall.Arguments)
case parsers.FunctionCallLength:
return r.strings_Length(functionCall.Arguments)
case parsers.FunctionCallLTrim:
return r.strings_LTrim(functionCall.Arguments)
case parsers.FunctionCallReplace:
return r.strings_Replace(functionCall.Arguments)
case parsers.FunctionCallReplicate:
return r.strings_Replicate(functionCall.Arguments)
case parsers.FunctionCallReverse:
return r.strings_Reverse(functionCall.Arguments)
case parsers.FunctionCallRight:
return r.strings_Right(functionCall.Arguments)
case parsers.FunctionCallRTrim:
return r.strings_RTrim(functionCall.Arguments)
case parsers.FunctionCallSubstring:
return r.strings_Substring(functionCall.Arguments)
case parsers.FunctionCallTrim:
return r.strings_Trim(functionCall.Arguments)
case parsers.FunctionCallIsDefined:
return r.typeChecking_IsDefined(functionCall.Arguments)
case parsers.FunctionCallIsArray:
return r.typeChecking_IsArray(functionCall.Arguments)
case parsers.FunctionCallIsBool:
return r.typeChecking_IsBool(functionCall.Arguments)
case parsers.FunctionCallIsFiniteNumber:
return r.typeChecking_IsFiniteNumber(functionCall.Arguments)
case parsers.FunctionCallIsInteger:
return r.typeChecking_IsInteger(functionCall.Arguments)
case parsers.FunctionCallIsNull:
return r.typeChecking_IsNull(functionCall.Arguments)
case parsers.FunctionCallIsNumber:
return r.typeChecking_IsNumber(functionCall.Arguments)
case parsers.FunctionCallIsObject:
return r.typeChecking_IsObject(functionCall.Arguments)
case parsers.FunctionCallIsPrimitive:
return r.typeChecking_IsPrimitive(functionCall.Arguments)
case parsers.FunctionCallIsString:
return r.typeChecking_IsString(functionCall.Arguments)
case parsers.FunctionCallArrayConcat:
return r.array_Concat(functionCall.Arguments)
case parsers.FunctionCallArrayContains:
return r.array_Contains(functionCall.Arguments)
case parsers.FunctionCallArrayContainsAny:
return r.array_Contains_Any(functionCall.Arguments)
case parsers.FunctionCallArrayContainsAll:
return r.array_Contains_All(functionCall.Arguments)
case parsers.FunctionCallArrayLength:
return r.array_Length(functionCall.Arguments)
case parsers.FunctionCallArraySlice:
return r.array_Slice(functionCall.Arguments)
case parsers.FunctionCallSetIntersect:
return r.set_Intersect(functionCall.Arguments)
case parsers.FunctionCallSetUnion:
return r.set_Union(functionCall.Arguments)
case parsers.FunctionCallMathAbs:
return r.math_Abs(functionCall.Arguments)
case parsers.FunctionCallMathAcos:
return r.math_Acos(functionCall.Arguments)
case parsers.FunctionCallMathAsin:
return r.math_Asin(functionCall.Arguments)
case parsers.FunctionCallMathAtan:
return r.math_Atan(functionCall.Arguments)
case parsers.FunctionCallMathCeiling:
return r.math_Ceiling(functionCall.Arguments)
case parsers.FunctionCallMathCos:
return r.math_Cos(functionCall.Arguments)
case parsers.FunctionCallMathCot:
return r.math_Cot(functionCall.Arguments)
case parsers.FunctionCallMathDegrees:
return r.math_Degrees(functionCall.Arguments)
case parsers.FunctionCallMathExp:
return r.math_Exp(functionCall.Arguments)
case parsers.FunctionCallMathFloor:
return r.math_Floor(functionCall.Arguments)
case parsers.FunctionCallMathIntBitNot:
return r.math_IntBitNot(functionCall.Arguments)
case parsers.FunctionCallMathLog10:
return r.math_Log10(functionCall.Arguments)
case parsers.FunctionCallMathRadians:
return r.math_Radians(functionCall.Arguments)
case parsers.FunctionCallMathRound:
return r.math_Round(functionCall.Arguments)
case parsers.FunctionCallMathSign:
return r.math_Sign(functionCall.Arguments)
case parsers.FunctionCallMathSin:
return r.math_Sin(functionCall.Arguments)
case parsers.FunctionCallMathSqrt:
return r.math_Sqrt(functionCall.Arguments)
case parsers.FunctionCallMathSquare:
return r.math_Square(functionCall.Arguments)
case parsers.FunctionCallMathTan:
return r.math_Tan(functionCall.Arguments)
case parsers.FunctionCallMathTrunc:
return r.math_Trunc(functionCall.Arguments)
case parsers.FunctionCallMathAtn2:
return r.math_Atn2(functionCall.Arguments)
case parsers.FunctionCallMathIntAdd:
return r.math_IntAdd(functionCall.Arguments)
case parsers.FunctionCallMathIntBitAnd:
return r.math_IntBitAnd(functionCall.Arguments)
case parsers.FunctionCallMathIntBitLeftShift:
return r.math_IntBitLeftShift(functionCall.Arguments)
case parsers.FunctionCallMathIntBitOr:
return r.math_IntBitOr(functionCall.Arguments)
case parsers.FunctionCallMathIntBitRightShift:
return r.math_IntBitRightShift(functionCall.Arguments)
case parsers.FunctionCallMathIntBitXor:
return r.math_IntBitXor(functionCall.Arguments)
case parsers.FunctionCallMathIntDiv:
return r.math_IntDiv(functionCall.Arguments)
case parsers.FunctionCallMathIntMod:
return r.math_IntMod(functionCall.Arguments)
case parsers.FunctionCallMathIntMul:
return r.math_IntMul(functionCall.Arguments)
case parsers.FunctionCallMathIntSub:
return r.math_IntSub(functionCall.Arguments)
case parsers.FunctionCallMathPower:
return r.math_Power(functionCall.Arguments)
case parsers.FunctionCallMathLog:
return r.math_Log(functionCall.Arguments)
case parsers.FunctionCallMathNumberBin:
return r.math_NumberBin(functionCall.Arguments)
case parsers.FunctionCallMathPi:
return r.math_Pi()
case parsers.FunctionCallMathRand:
return r.math_Rand()
case parsers.FunctionCallAggregateAvg:
return r.aggregate_Avg(functionCall.Arguments)
case parsers.FunctionCallAggregateCount:
return r.aggregate_Count(functionCall.Arguments)
case parsers.FunctionCallAggregateMax:
return r.aggregate_Max(functionCall.Arguments)
case parsers.FunctionCallAggregateMin:
return r.aggregate_Min(functionCall.Arguments)
case parsers.FunctionCallAggregateSum:
return r.aggregate_Sum(functionCall.Arguments)
case parsers.FunctionCallIn:
return r.misc_In(functionCall.Arguments)
}
logger.Errorf("Unknown function call type: %v", functionCall.Type)
return nil
}
func (r rowContext) selectItem_SelectItemTypeField(selectItem parsers.SelectItem) interface{} {
value := r.tables[selectItem.Path[0]]
if len(selectItem.Path) > 1 {
for _, pathSegment := range selectItem.Path[1:] {
if pathSegment[0] == '@' {
pathSegment = r.parameters[pathSegment].(string)
}
switch nestedValue := value.(type) {
case map[string]interface{}:
value = nestedValue[pathSegment]
case map[string]RowType:
value = nestedValue[pathSegment]
case []int, []string, []interface{}:
slice := reflect.ValueOf(nestedValue)
if arrayIndex, err := strconv.Atoi(pathSegment); err == nil && slice.Len() > arrayIndex {
value = slice.Index(arrayIndex).Interface()
} else {
return nil
}
default:
return nil
}
}
}
return value
}
func hasAggregateFunctions(selectItems []parsers.SelectItem) bool {
if selectItems == nil {
return false
}
for _, selectItem := range selectItems {
if selectItem.Type == parsers.SelectItemTypeFunctionCall {
if typedValue, ok := selectItem.Value.(parsers.FunctionCall); ok && slices.Contains[[]parsers.FunctionCallType](parsers.AggregateFunctions, typedValue.Type) {
return true
}
}
if hasAggregateFunctions(selectItem.SelectItems) {
return true
}
}
return false
}
func compareValues(val1, val2 interface{}) int {
if reflect.TypeOf(val1) != reflect.TypeOf(val2) {
return 1
}
switch val1 := val1.(type) {
case int:
val2 := val2.(int)
if val1 < val2 {
return -1
} else if val1 > val2 {
return 1
}
return 0
case float64:
val2 := val2.(float64)
if val1 < val2 {
return -1
} else if val1 > val2 {
return 1
}
return 0
case string:
val2 := val2.(string)
return strings.Compare(val1, val2)
case bool:
val2 := val2.(bool)
if val1 == val2 {
return 0
} else if val1 {
return 1
} else {
return -1
}
// TODO: Add more types
default:
if reflect.DeepEqual(val1, val2) {
return 0
}
return 1
}
}
func deduplicate[T RowType | interface{}](slice []T) []T {
var result []T
result = make([]T, 0)
for i := 0; i < len(slice); i++ {
unique := true
for j := 0; j < len(result); j++ {
if compareValues(slice[i], result[j]) == 0 {
unique = false
break
}
}
if unique {
result = append(result, slice[i])
}
}
return result
}
func copyMap[T RowType | []RowType](originalMap map[string]T) map[string]T {
targetMap := make(map[string]T)
for k, v := range originalMap {
targetMap[k] = v
}
return targetMap
// Apply LIMIT
if query.Count > 0 {
projectedIterator = &limitIterator{
documents: projectedIterator,
limit: query.Count,
}
}
return projectedIterator
}

View File

@@ -4,18 +4,41 @@ import (
"reflect"
"testing"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/parsers"
memoryexecutor "github.com/pikami/cosmium/query_executors/memory_executor"
testutils "github.com/pikami/cosmium/test_utils"
)
type TestDocumentIterator struct {
documents []memoryexecutor.RowType
index int
}
func NewTestDocumentIterator(documents []memoryexecutor.RowType) *TestDocumentIterator {
return &TestDocumentIterator{
documents: documents,
index: -1,
}
}
func (i *TestDocumentIterator) Next() (memoryexecutor.RowType, datastore.DataStoreStatus) {
i.index++
if i.index >= len(i.documents) {
return nil, datastore.IterEOF
}
return i.documents[i.index], datastore.StatusOk
}
func testQueryExecute(
t *testing.T,
query parsers.SelectStmt,
data []memoryexecutor.RowType,
expectedData []memoryexecutor.RowType,
) {
result := memoryexecutor.ExecuteQuery(query, data)
iter := NewTestDocumentIterator(data)
result := memoryexecutor.ExecuteQuery(query, iter)
if !reflect.DeepEqual(result, expectedData) {
t.Errorf("execution result does not match expected data.\nExpected: %+v\nGot: %+v", expectedData, result)
@@ -50,7 +73,7 @@ func Test_Execute(t *testing.T) {
{Path: []string{"c", "id"}},
{Path: []string{"c", "pk"}},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
OrderExpressions: []parsers.OrderExpression{
{
SelectItem: parsers.SelectItem{Path: []string{"c", "pk"}},
@@ -79,7 +102,7 @@ func Test_Execute(t *testing.T) {
SelectItems: []parsers.SelectItem{
{Path: []string{"c", "pk"}},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
GroupBy: []parsers.SelectItem{
{Path: []string{"c", "pk"}},
},
@@ -102,7 +125,7 @@ func Test_Execute(t *testing.T) {
Type: parsers.SelectItemTypeField,
},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
Filters: parsers.SelectItem{
Type: parsers.SelectItemTypeFunctionCall,
Value: parsers.FunctionCall{
@@ -126,6 +149,42 @@ func Test_Execute(t *testing.T) {
)
})
t.Run("Should execute IN function with function call", func(t *testing.T) {
testQueryExecute(
t,
parsers.SelectStmt{
SelectItems: []parsers.SelectItem{
testutils.SelectItem_Path("c", "id"),
},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
Filters: parsers.SelectItem{
Type: parsers.SelectItemTypeFunctionCall,
Value: parsers.FunctionCall{
Type: parsers.FunctionCallIn,
Arguments: []interface{}{
parsers.SelectItem{
Type: parsers.SelectItemTypeFunctionCall,
Value: parsers.FunctionCall{
Type: parsers.FunctionCallToString,
Arguments: []interface{}{
testutils.SelectItem_Path("c", "id"),
},
},
},
testutils.SelectItem_Constant_String("123"),
testutils.SelectItem_Constant_String("456"),
},
},
},
},
mockData,
[]memoryexecutor.RowType{
map[string]interface{}{"id": "456"},
map[string]interface{}{"id": "123"},
},
)
})
t.Run("Should execute IN selector", func(t *testing.T) {
testQueryExecute(
t,
@@ -137,10 +196,9 @@ func Test_Execute(t *testing.T) {
},
},
Table: parsers.Table{
Value: "c",
SelectItem: parsers.SelectItem{
Path: []string{"c", "tags"},
},
Value: "c",
SelectItem: testutils.SelectItem_Path("c", "tags"),
IsInSelect: true,
},
},
mockData,

View File

@@ -0,0 +1,22 @@
package memoryexecutor
import "github.com/pikami/cosmium/internal/datastore"
type offsetIterator struct {
documents rowTypeIterator
offset int
skipped bool
}
func (oi *offsetIterator) Next() (RowType, datastore.DataStoreStatus) {
if oi.skipped {
return oi.documents.Next()
}
for i := 0; i < oi.offset; i++ {
oi.documents.Next()
}
oi.skipped = true
return oi.Next()
}

View File

@@ -0,0 +1,63 @@
package memoryexecutor
import (
"sort"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/parsers"
)
type orderIterator struct {
documents rowIterator
orderExpressions []parsers.OrderExpression
orderedDocs []rowContext
docsIndex int
}
func (oi *orderIterator) Next() (rowContext, datastore.DataStoreStatus) {
if oi.orderedDocs != nil {
if oi.docsIndex >= len(oi.orderedDocs) {
return rowContext{}, datastore.IterEOF
}
row := oi.orderedDocs[oi.docsIndex]
oi.orderedDocs[oi.docsIndex] = rowContext{}
oi.docsIndex++
return row, datastore.StatusOk
}
oi.orderedDocs = make([]rowContext, 0)
for {
row, status := oi.documents.Next()
if status != datastore.StatusOk {
break
}
oi.orderedDocs = append(oi.orderedDocs, row)
}
oi.documents = nil
less := func(i, j int) bool {
for _, order := range oi.orderExpressions {
val1 := oi.orderedDocs[i].resolveSelectItem(order.SelectItem)
val2 := oi.orderedDocs[j].resolveSelectItem(order.SelectItem)
cmp := compareValues(val1, val2)
if cmp != 0 {
if order.Direction == parsers.OrderDirectionDesc {
return cmp > 0
}
return cmp < 0
}
}
return i < j
}
sort.SliceStable(oi.orderedDocs, less)
if len(oi.orderedDocs) == 0 {
return rowContext{}, datastore.IterEOF
}
oi.docsIndex = 1
return oi.orderedDocs[0], datastore.StatusOk
}

View File

@@ -0,0 +1,90 @@
package memoryexecutor
import (
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/parsers"
"golang.org/x/exp/slices"
)
type projectIterator struct {
documents rowIterator
selectItems []parsers.SelectItem
groupBy []parsers.SelectItem
}
func (pi *projectIterator) Next() (RowType, datastore.DataStoreStatus) {
if pi.documents == nil {
return rowContext{}, datastore.IterEOF
}
row, status := pi.documents.Next()
if status != datastore.StatusOk {
pi.documents = nil
return rowContext{}, status
}
if hasAggregateFunctions(pi.selectItems) && len(pi.groupBy) == 0 {
// When can have aggregate functions without GROUP BY clause,
// we should aggregate all rows in that case.
allDocuments := []rowContext{row}
for {
row, status := pi.documents.Next()
if status != datastore.StatusOk {
break
}
allDocuments = append(allDocuments, row)
}
if len(allDocuments) == 0 {
return rowContext{}, datastore.IterEOF
}
aggRow := rowContext{
tables: row.tables,
parameters: row.parameters,
grouppedRows: allDocuments,
}
return aggRow.applyProjection(pi.selectItems), datastore.StatusOk
}
return row.applyProjection(pi.selectItems), datastore.StatusOk
}
func (r rowContext) applyProjection(selectItems []parsers.SelectItem) RowType {
// When the first value is top level, select it instead
if len(selectItems) > 0 && selectItems[0].IsTopLevel {
return r.resolveSelectItem(selectItems[0])
}
// Construct a new row based on the selected columns
row := make(map[string]interface{})
for index, selectItem := range selectItems {
destinationName := resolveDestinationColumnName(selectItem, index, r.parameters)
row[destinationName] = r.resolveSelectItem(selectItem)
}
return row
}
func hasAggregateFunctions(selectItems []parsers.SelectItem) bool {
if selectItems == nil {
return false
}
for _, selectItem := range selectItems {
if selectItem.Type == parsers.SelectItemTypeFunctionCall {
if typedValue, ok := selectItem.Value.(parsers.FunctionCall); ok && slices.Contains[[]parsers.FunctionCallType](parsers.AggregateFunctions, typedValue.Type) {
return true
}
}
if hasAggregateFunctions(selectItem.SelectItems) {
return true
}
}
return false
}

View File

@@ -0,0 +1,44 @@
package memoryexecutor
import (
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/parsers"
)
type rowTypeToRowContextIterator struct {
documents rowTypeIterator
query parsers.SelectStmt
}
func (di *rowTypeToRowContextIterator) Next() (rowContext, datastore.DataStoreStatus) {
if di.documents == nil {
return rowContext{}, datastore.IterEOF
}
doc, status := di.documents.Next()
if status != datastore.StatusOk {
di.documents = nil
return rowContext{}, status
}
var initialTableName string
if di.query.Table.SelectItem.Type == parsers.SelectItemTypeSubQuery {
initialTableName = di.query.Table.SelectItem.Value.(parsers.SelectStmt).Table.Value
}
if initialTableName == "" {
initialTableName = di.query.Table.Value
}
if initialTableName == "" {
initialTableName = resolveDestinationColumnName(di.query.Table.SelectItem, 0, di.query.Parameters)
}
return rowContext{
parameters: di.query.Parameters,
tables: map[string]RowType{
initialTableName: doc,
"$root": doc,
},
}, status
}

View File

@@ -5,14 +5,15 @@ import (
"github.com/pikami/cosmium/parsers"
memoryexecutor "github.com/pikami/cosmium/query_executors/memory_executor"
testutils "github.com/pikami/cosmium/test_utils"
)
func Test_Execute_Select(t *testing.T) {
mockData := []memoryexecutor.RowType{
map[string]interface{}{"id": "12345", "pk": 123, "_self": "self1", "_rid": "rid1", "_ts": 123456, "isCool": false},
map[string]interface{}{"id": "67890", "pk": 456, "_self": "self2", "_rid": "rid2", "_ts": 789012, "isCool": true},
map[string]interface{}{"id": "456", "pk": 456, "_self": "self2", "_rid": "rid2", "_ts": 789012, "isCool": true},
map[string]interface{}{"id": "123", "pk": 456, "_self": "self2", "_rid": "rid2", "_ts": 789012, "isCool": true},
map[string]interface{}{"id": "12345", "pk": 123, "_self": "self1", "_rid": "rid1", "_ts": 123456, "isCool": false, "order": nil},
map[string]interface{}{"id": "67890", "pk": 456, "_self": "self2", "_rid": "rid2", "_ts": 789012, "isCool": true, "order": 1},
map[string]interface{}{"id": "456", "pk": 456, "_self": "self2", "_rid": "rid2", "_ts": 789012, "isCool": true, "order": 2},
map[string]interface{}{"id": "123", "pk": 456, "_self": "self2", "_rid": "rid2", "_ts": 789012, "isCool": true, "order": 3},
}
t.Run("Should execute simple SELECT", func(t *testing.T) {
@@ -23,7 +24,7 @@ func Test_Execute_Select(t *testing.T) {
{Path: []string{"c", "id"}},
{Path: []string{"c", "pk"}},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
mockData,
[]memoryexecutor.RowType{
@@ -43,7 +44,7 @@ func Test_Execute_Select(t *testing.T) {
{Path: []string{"c", "id"}},
{Path: []string{"c", "@param"}},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
Parameters: map[string]interface{}{
"@param": "pk",
},
@@ -65,7 +66,7 @@ func Test_Execute_Select(t *testing.T) {
SelectItems: []parsers.SelectItem{
{Path: []string{"c", "pk"}},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
Distinct: true,
},
mockData,
@@ -84,7 +85,7 @@ func Test_Execute_Select(t *testing.T) {
{Path: []string{"c", "id"}},
{Path: []string{"c", "pk"}},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
Count: 1,
},
mockData,
@@ -102,20 +103,20 @@ func Test_Execute_Select(t *testing.T) {
{Path: []string{"c", "id"}},
{Path: []string{"c", "pk"}},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
Count: 2,
Offset: 1,
OrderExpressions: []parsers.OrderExpression{
{
SelectItem: parsers.SelectItem{Path: []string{"c", "id"}},
SelectItem: parsers.SelectItem{Path: []string{"c", "order"}},
Direction: parsers.OrderDirectionDesc,
},
},
},
mockData,
[]memoryexecutor.RowType{
map[string]interface{}{"id": "67890", "pk": 456},
map[string]interface{}{"id": "456", "pk": 456},
map[string]interface{}{"id": "67890", "pk": 456},
},
)
})
@@ -127,7 +128,7 @@ func Test_Execute_Select(t *testing.T) {
SelectItems: []parsers.SelectItem{
{Path: []string{"c", "id"}, IsTopLevel: true},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
mockData,
[]memoryexecutor.RowType{
@@ -146,7 +147,7 @@ func Test_Execute_Select(t *testing.T) {
SelectItems: []parsers.SelectItem{
{Path: []string{"c"}, IsTopLevel: true},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
mockData,
mockData,
@@ -167,7 +168,7 @@ func Test_Execute_Select(t *testing.T) {
},
},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
mockData,
[]memoryexecutor.RowType{
@@ -193,7 +194,7 @@ func Test_Execute_Select(t *testing.T) {
},
},
},
Table: parsers.Table{Value: "c"},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
mockData,
[]memoryexecutor.RowType{

View File

@@ -9,10 +9,14 @@ import (
)
func (r rowContext) strings_StringEquals(arguments []interface{}) bool {
str1 := r.parseString(arguments[0])
str2 := r.parseString(arguments[1])
str1, str1ok := r.parseString(arguments[0])
str2, str2ok := r.parseString(arguments[1])
ignoreCase := r.getBoolFlag(arguments)
if !str1ok || !str2ok {
return false
}
if ignoreCase {
return strings.EqualFold(str1, str2)
}
@@ -21,10 +25,14 @@ func (r rowContext) strings_StringEquals(arguments []interface{}) bool {
}
func (r rowContext) strings_Contains(arguments []interface{}) bool {
str1 := r.parseString(arguments[0])
str2 := r.parseString(arguments[1])
str1, str1ok := r.parseString(arguments[0])
str2, str2ok := r.parseString(arguments[1])
ignoreCase := r.getBoolFlag(arguments)
if !str1ok || !str2ok {
return false
}
if ignoreCase {
str1 = strings.ToLower(str1)
str2 = strings.ToLower(str2)
@@ -34,10 +42,14 @@ func (r rowContext) strings_Contains(arguments []interface{}) bool {
}
func (r rowContext) strings_EndsWith(arguments []interface{}) bool {
str1 := r.parseString(arguments[0])
str2 := r.parseString(arguments[1])
str1, str1ok := r.parseString(arguments[0])
str2, str2ok := r.parseString(arguments[1])
ignoreCase := r.getBoolFlag(arguments)
if !str1ok || !str2ok {
return false
}
if ignoreCase {
str1 = strings.ToLower(str1)
str2 = strings.ToLower(str2)
@@ -47,10 +59,14 @@ func (r rowContext) strings_EndsWith(arguments []interface{}) bool {
}
func (r rowContext) strings_StartsWith(arguments []interface{}) bool {
str1 := r.parseString(arguments[0])
str2 := r.parseString(arguments[1])
str1, str1ok := r.parseString(arguments[0])
str2, str2ok := r.parseString(arguments[1])
ignoreCase := r.getBoolFlag(arguments)
if !str1ok || !str2ok {
return false
}
if ignoreCase {
str1 = strings.ToLower(str1)
str2 = strings.ToLower(str2)
@@ -73,8 +89,12 @@ func (r rowContext) strings_Concat(arguments []interface{}) string {
}
func (r rowContext) strings_IndexOf(arguments []interface{}) int {
str1 := r.parseString(arguments[0])
str2 := r.parseString(arguments[1])
str1, str1ok := r.parseString(arguments[0])
str2, str2ok := r.parseString(arguments[1])
if !str1ok || !str2ok {
return -1
}
start := 0
if len(arguments) > 2 && arguments[2] != nil {
@@ -115,9 +135,13 @@ func (r rowContext) strings_Lower(arguments []interface{}) string {
func (r rowContext) strings_Left(arguments []interface{}) string {
var ok bool
var length int
str := r.parseString(arguments[0])
str, strOk := r.parseString(arguments[0])
lengthEx := r.resolveSelectItem(arguments[1].(parsers.SelectItem))
if !strOk {
return ""
}
if length, ok = lengthEx.(int); !ok {
logger.ErrorLn("strings_Left - got parameters of wrong type")
return ""
@@ -135,28 +159,45 @@ func (r rowContext) strings_Left(arguments []interface{}) string {
}
func (r rowContext) strings_Length(arguments []interface{}) int {
str := r.parseString(arguments[0])
str, strOk := r.parseString(arguments[0])
if !strOk {
return 0
}
return len(str)
}
func (r rowContext) strings_LTrim(arguments []interface{}) string {
str := r.parseString(arguments[0])
str, strOk := r.parseString(arguments[0])
if !strOk {
return ""
}
return strings.TrimLeft(str, " ")
}
func (r rowContext) strings_Replace(arguments []interface{}) string {
str := r.parseString(arguments[0])
oldStr := r.parseString(arguments[1])
newStr := r.parseString(arguments[2])
str, strOk := r.parseString(arguments[0])
oldStr, oldStrOk := r.parseString(arguments[1])
newStr, newStrOk := r.parseString(arguments[2])
if !strOk || !oldStrOk || !newStrOk {
return ""
}
return strings.Replace(str, oldStr, newStr, -1)
}
func (r rowContext) strings_Replicate(arguments []interface{}) string {
var ok bool
var times int
str := r.parseString(arguments[0])
str, strOk := r.parseString(arguments[0])
timesEx := r.resolveSelectItem(arguments[1].(parsers.SelectItem))
if !strOk {
return ""
}
if times, ok = timesEx.(int); !ok {
logger.ErrorLn("strings_Replicate - got parameters of wrong type")
return ""
@@ -174,9 +215,13 @@ func (r rowContext) strings_Replicate(arguments []interface{}) string {
}
func (r rowContext) strings_Reverse(arguments []interface{}) string {
str := r.parseString(arguments[0])
str, strOk := r.parseString(arguments[0])
runes := []rune(str)
if !strOk {
return ""
}
for i, j := 0, len(runes)-1; i < j; i, j = i+1, j-1 {
runes[i], runes[j] = runes[j], runes[i]
}
@@ -187,9 +232,13 @@ func (r rowContext) strings_Reverse(arguments []interface{}) string {
func (r rowContext) strings_Right(arguments []interface{}) string {
var ok bool
var length int
str := r.parseString(arguments[0])
str, strOk := r.parseString(arguments[0])
lengthEx := r.resolveSelectItem(arguments[1].(parsers.SelectItem))
if !strOk {
return ""
}
if length, ok = lengthEx.(int); !ok {
logger.ErrorLn("strings_Right - got parameters of wrong type")
return ""
@@ -207,7 +256,11 @@ func (r rowContext) strings_Right(arguments []interface{}) string {
}
func (r rowContext) strings_RTrim(arguments []interface{}) string {
str := r.parseString(arguments[0])
str, strOk := r.parseString(arguments[0])
if !strOk {
return ""
}
return strings.TrimRight(str, " ")
}
@@ -215,10 +268,14 @@ func (r rowContext) strings_Substring(arguments []interface{}) string {
var ok bool
var startPos int
var length int
str := r.parseString(arguments[0])
str, strOk := r.parseString(arguments[0])
startPosEx := r.resolveSelectItem(arguments[1].(parsers.SelectItem))
lengthEx := r.resolveSelectItem(arguments[2].(parsers.SelectItem))
if !strOk {
return ""
}
if startPos, ok = startPosEx.(int); !ok {
logger.ErrorLn("strings_Substring - got start parameters of wrong type")
return ""
@@ -241,7 +298,11 @@ func (r rowContext) strings_Substring(arguments []interface{}) string {
}
func (r rowContext) strings_Trim(arguments []interface{}) string {
str := r.parseString(arguments[0])
str, strOk := r.parseString(arguments[0])
if !strOk {
return ""
}
return strings.TrimSpace(str)
}
@@ -257,15 +318,15 @@ func (r rowContext) getBoolFlag(arguments []interface{}) bool {
return ignoreCase
}
func (r rowContext) parseString(argument interface{}) string {
func (r rowContext) parseString(argument interface{}) (value string, ok bool) {
exItem := argument.(parsers.SelectItem)
ex := r.resolveSelectItem(exItem)
if str1, ok := ex.(string); ok {
return str1
return str1, true
}
logger.ErrorLn("StringEquals got parameters of wrong type")
return ""
return "", false
}
func convertToString(value interface{}) string {

Some files were not shown because too many files have changed in this diff Show More