26 Commits

Author SHA1 Message Date
Pijus Kamandulis
598f2837af Fix issues with persist flag; Use custom logger for badger 2025-04-03 23:48:20 +03:00
Pijus Kamandulis
28e3c0c3d8 Rename 'MapDS' to 'JsonDS'; Added some docs 2025-03-14 22:40:12 +02:00
Pijus Kamandulis
97eea30c97 Use msgpack instead of gob; Added data persistance for badger data store 2025-03-13 23:59:07 +02:00
Pijus Kamandulis
5fe60d831a Pinned 3rd party Github Actions 2025-03-12 23:48:42 +02:00
Pijus Kamandulis
d309d99906 Update dependancies 2025-03-12 23:24:08 +02:00
Pijus Kamandulis
b2516eda9f Stability improvements 2025-03-12 22:00:30 +02:00
Pijus Kamandulis
813b9faeaa Added support for Badger as an alternative storage backend 2025-03-12 21:06:10 +02:00
Pijus Kamandulis
e526b2269e Refactored query engine utilizing iterators 2025-03-11 17:36:28 +02:00
Pijus Kamandulis
221f029a1d DataStore is interface now. Liskov would be proud. 2025-03-09 18:34:07 +02:00
Pijus Kamandulis
bd4fe5abec Update azcosmos package 2025-02-25 20:43:23 +02:00
Pijus Kamandulis
f062e03f0c Update packages 2025-02-25 19:56:02 +02:00
Pijus Kamandulis
058b3271b7 OrderBy should bring NULL values to front 2025-02-25 19:47:29 +02:00
Pijus Kamandulis
1711c8fb5c Implement NOT logical operator 2025-02-25 19:33:32 +02:00
Pijus Kamandulis
851b3ca3a8 Fix IN clause with function calls 2025-02-20 18:45:20 +02:00
Pijus Kamandulis
d27c633e1d Better handling when passing null to string functions 2025-02-18 20:11:11 +02:00
Pijus Kamandulis
3987df89c0 Upgrade to golang 1.24.0 2025-02-18 19:16:21 +02:00
Pijus Kamandulis
6e3f4169a1 Fix 'ComparisonOperator' parsing 2025-02-18 19:12:08 +02:00
Pijus Kamandulis
14c5400d23 Keep old explorer images tagged with version 2025-02-09 22:42:51 +02:00
Pijus Kamandulis
1cf5ae92f4 Shared library stability improvements 2025-02-09 11:45:10 +02:00
Pijus Kamandulis
5d99b653cc Generate more realistic resource ids 2025-02-09 00:36:35 +02:00
Pijus Kamandulis
787cdb33cf Fix OFFSET clause 2025-02-08 15:28:06 +02:00
Pijus Kamandulis
5caa829ac1 Implement 'Transactional batch operations' 2025-02-04 20:35:15 +02:00
Pijus Kamandulis
887d456ad4 Return error code if server fails to start 2025-02-03 22:58:45 +02:00
Pijus Kamandulis
da1566875b Wait for server shutdown when stopping server 2025-02-03 22:21:54 +02:00
Pijus Kamandulis
3fee3bc816 Fix ARRAY_CONTAINS partial matches for nested objects 2025-02-03 19:29:29 +02:00
Pijus Kamandulis
8657c48fc8 Added support for table alias; Make AS keyword optional #9 2025-02-03 19:02:12 +02:00
110 changed files with 6660 additions and 3974 deletions

View File

@@ -12,10 +12,10 @@ jobs:
uses: actions/checkout@v3 uses: actions/checkout@v3
- name: Cross-Compile with xgo - name: Cross-Compile with xgo
uses: crazy-max/ghaction-xgo@v3.1.0 uses: crazy-max/ghaction-xgo@e22d3c8b089adba750d5a74738b8e95d96f0c991 # v3.1.0
with: with:
xgo_version: latest xgo_version: latest
go_version: 1.22.0 go_version: 1.24.0
dest: dist dest: dist
pkg: sharedlibrary pkg: sharedlibrary
prefix: cosmium prefix: cosmium

View File

@@ -21,13 +21,13 @@ jobs:
- name: Set up Go - name: Set up Go
uses: actions/setup-go@v5 uses: actions/setup-go@v5
with: with:
go-version: 1.22.0 go-version: 1.24.0
- name: Cross-Compile with xgo - name: Cross-Compile with xgo
uses: crazy-max/ghaction-xgo@v3.1.0 uses: crazy-max/ghaction-xgo@e22d3c8b089adba750d5a74738b8e95d96f0c991 # v3.1.0
with: with:
xgo_version: latest xgo_version: latest
go_version: 1.22.0 go_version: 1.24.0
dest: sharedlibrary_dist dest: sharedlibrary_dist
pkg: sharedlibrary pkg: sharedlibrary
prefix: cosmium prefix: cosmium
@@ -44,7 +44,7 @@ jobs:
password: ${{ secrets.GITHUB_TOKEN }} password: ${{ secrets.GITHUB_TOKEN }}
- name: Run GoReleaser - name: Run GoReleaser
uses: goreleaser/goreleaser-action@v5 uses: goreleaser/goreleaser-action@5742e2a039330cbb23ebf35f046f814d4c6ff811 # v5
with: with:
distribution: goreleaser distribution: goreleaser
version: ${{ env.GITHUB_REF_NAME }} version: ${{ env.GITHUB_REF_NAME }}

View File

@@ -135,6 +135,12 @@ docker_manifests:
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-explorer-amd64" - "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-explorer-amd64"
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-explorer-arm64" - "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-explorer-arm64"
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-explorer-arm64v8" - "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-explorer-arm64v8"
- name_template: 'ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-explorer'
skip_push: auto
image_templates:
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-explorer-amd64"
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-explorer-arm64"
- "ghcr.io/pikami/{{ .ProjectName }}:{{ .Version }}-explorer-arm64v8"
checksum: checksum:
name_template: 'checksums.txt' name_template: 'checksums.txt'

View File

@@ -9,7 +9,7 @@ SERVER_LOCATION=./cmd/server
SHARED_LIB_LOCATION=./sharedlibrary SHARED_LIB_LOCATION=./sharedlibrary
SHARED_LIB_OPT=-buildmode=c-shared SHARED_LIB_OPT=-buildmode=c-shared
XGO_TARGETS=linux/amd64,linux/arm64,windows/amd64,windows/arm64,darwin/amd64,darwin/arm64 XGO_TARGETS=linux/amd64,linux/arm64,windows/amd64,windows/arm64,darwin/amd64,darwin/arm64
GOVERSION=1.22.0 GOVERSION=1.24.0
DIST_DIR=dist DIST_DIR=dist
@@ -51,6 +51,10 @@ build-sharedlib-linux-amd64:
@echo "Building shared library for Linux x64..." @echo "Building shared library for Linux x64..."
@GOOS=linux GOARCH=amd64 $(GOBUILD) $(SHARED_LIB_OPT) -o $(DIST_DIR)/$(BINARY_NAME)-linux-amd64.so $(SHARED_LIB_LOCATION) @GOOS=linux GOARCH=amd64 $(GOBUILD) $(SHARED_LIB_OPT) -o $(DIST_DIR)/$(BINARY_NAME)-linux-amd64.so $(SHARED_LIB_LOCATION)
build-sharedlib-darwin-arm64:
@echo "Building shared library for macOS ARM..."
@GOOS=darwin GOARCH=arm64 $(GOBUILD) $(SHARED_LIB_OPT) -o $(DIST_DIR)/$(BINARY_NAME)-darwin-arm64.so $(SHARED_LIB_LOCATION)
build-sharedlib-tests: build-sharedlib-linux-amd64 build-sharedlib-tests: build-sharedlib-linux-amd64
@echo "Building shared library tests..." @echo "Building shared library tests..."
@$(SHARED_LIB_TEST_CC) $(SHARED_LIB_TEST_CFLAGS) -o $(SHARED_LIB_TEST_TARGET) $(SHARED_LIB_TEST_SOURCES) @$(SHARED_LIB_TEST_CC) $(SHARED_LIB_TEST_CFLAGS) -o $(SHARED_LIB_TEST_TARGET) $(SHARED_LIB_TEST_SOURCES)

View File

@@ -86,6 +86,7 @@ To disable SSL and run Cosmium on HTTP instead, you can use the `-DisableTls` fl
- **-Persist**: Saves data to the given path on application exit (When `-InitialData` argument is not supplied, it will try to load data from path supplied in `-Persist`) - **-Persist**: Saves data to the given path on application exit (When `-InitialData` argument is not supplied, it will try to load data from path supplied in `-Persist`)
- **-Port**: Listen port (default 8081) - **-Port**: Listen port (default 8081)
- **-LogLevel**: Sets the logging level (one of: debug, info, error, silent) (default info) - **-LogLevel**: Sets the logging level (one of: debug, info, error, silent) (default info)
- **-DataStore**: Allows selecting [storage backend](#data-storage-backends) (default "json")
These arguments allow you to configure various aspects of Cosmium's behavior according to your requirements. These arguments allow you to configure various aspects of Cosmium's behavior according to your requirements.
@@ -99,6 +100,18 @@ All mentioned arguments can also be set using environment variables:
- **COSMIUM_PORT** for `-Port` - **COSMIUM_PORT** for `-Port`
- **COSMIUM_LOGLEVEL** for `-LogLevel` - **COSMIUM_LOGLEVEL** for `-LogLevel`
### Data Storage Backends
Cosmium supports multiple storage backends for saving, loading, and managing data at runtime.
| Backend | Storage Location | Write Behavior | Memory Usage | Supports Initial JSON Load |
|----------|--------------------------|--------------------------|----------------------|----------------------------|
| `json` (default) | JSON file on disk 📄 | On application exit ⏳ | 🛑 More than Badger | ✅ Yes |
| `badger` | BadgerDB database on disk ⚡ | Immediately on write 🚀 | ✅ Less than JSON | ❌ No |
The `badger` backend is generally recommended as it uses less memory and writes data to disk immediately. However, if you need to load initial data from a JSON file, use the `json` backend.
# License # License
This project is [MIT licensed](./LICENSE). This project is [MIT licensed](./LICENSE).

24
api/api_models/models.go Normal file
View File

@@ -0,0 +1,24 @@
package apimodels
const (
BatchOperationTypeCreate = "Create"
BatchOperationTypeDelete = "Delete"
BatchOperationTypeReplace = "Replace"
BatchOperationTypeUpsert = "Upsert"
BatchOperationTypeRead = "Read"
BatchOperationTypePatch = "Patch"
)
type BatchOperation struct {
OperationType string `json:"operationType"`
Id string `json:"id"`
ResourceBody map[string]interface{} `json:"resourceBody"`
}
type BatchOperationResult struct {
StatusCode int `json:"statusCode"`
RequestCharge float64 `json:"requestCharge"`
ResourceBody map[string]interface{} `json:"resourceBody"`
Etag string `json:"etag"`
Message string `json:"message"`
}

View File

@@ -3,25 +3,28 @@ package api
import ( import (
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
"github.com/pikami/cosmium/api/config" "github.com/pikami/cosmium/api/config"
"github.com/pikami/cosmium/internal/repositories" "github.com/pikami/cosmium/internal/datastore"
) )
type ApiServer struct { type ApiServer struct {
stopServer chan interface{} stopServer chan interface{}
isActive bool onServerShutdown chan interface{}
router *gin.Engine isActive bool
config config.ServerConfig router *gin.Engine
config *config.ServerConfig
} }
func NewApiServer(dataRepository *repositories.DataRepository, config config.ServerConfig) *ApiServer { func NewApiServer(dataStore datastore.DataStore, config *config.ServerConfig) *ApiServer {
stopChan := make(chan interface{}) stopChan := make(chan interface{})
onServerShutdownChan := make(chan interface{})
apiServer := &ApiServer{ apiServer := &ApiServer{
stopServer: stopChan, stopServer: stopChan,
config: config, onServerShutdown: onServerShutdownChan,
config: config,
} }
apiServer.CreateRouter(dataRepository) apiServer.CreateRouter(dataStore)
return apiServer return apiServer
} }
@@ -32,4 +35,5 @@ func (s *ApiServer) GetRouter() *gin.Engine {
func (s *ApiServer) Stop() { func (s *ApiServer) Stop() {
s.stopServer <- true s.stopServer <- true
<-s.onServerShutdown
} }

View File

@@ -15,6 +15,11 @@ const (
ExplorerBaseUrlLocation = "/_explorer" ExplorerBaseUrlLocation = "/_explorer"
) )
const (
DataStoreJson = "json"
DataStoreBadger = "badger"
)
func ParseFlags() ServerConfig { func ParseFlags() ServerConfig {
host := flag.String("Host", "localhost", "Hostname") host := flag.String("Host", "localhost", "Hostname")
port := flag.Int("Port", 8081, "Listen port") port := flag.Int("Port", 8081, "Listen port")
@@ -28,6 +33,8 @@ func ParseFlags() ServerConfig {
persistDataPath := flag.String("Persist", "", "Saves data to given path on application exit") persistDataPath := flag.String("Persist", "", "Saves data to given path on application exit")
logLevel := NewEnumValue("info", []string{"debug", "info", "error", "silent"}) logLevel := NewEnumValue("info", []string{"debug", "info", "error", "silent"})
flag.Var(logLevel, "LogLevel", fmt.Sprintf("Sets the logging level %s", logLevel.AllowedValuesList())) flag.Var(logLevel, "LogLevel", fmt.Sprintf("Sets the logging level %s", logLevel.AllowedValuesList()))
dataStore := NewEnumValue("json", []string{DataStoreJson, DataStoreBadger})
flag.Var(dataStore, "DataStore", fmt.Sprintf("Sets the data store %s", dataStore.AllowedValuesList()))
flag.Parse() flag.Parse()
setFlagsFromEnvironment() setFlagsFromEnvironment()
@@ -44,6 +51,7 @@ func ParseFlags() ServerConfig {
config.DisableTls = *disableTls config.DisableTls = *disableTls
config.AccountKey = *accountKey config.AccountKey = *accountKey
config.LogLevel = logLevel.value config.LogLevel = logLevel.value
config.DataStore = dataStore.value
config.PopulateCalculatedFields() config.PopulateCalculatedFields()
@@ -68,6 +76,29 @@ func (c *ServerConfig) PopulateCalculatedFields() {
default: default:
logger.SetLogLevel(logger.LogLevelInfo) logger.SetLogLevel(logger.LogLevelInfo)
} }
fileInfo, err := os.Stat(c.PersistDataFilePath)
if c.PersistDataFilePath != "" && !os.IsNotExist(err) {
if err != nil {
logger.ErrorLn("Failed to get file info for persist path:", err)
os.Exit(1)
}
if c.DataStore == DataStoreJson && fileInfo.IsDir() {
logger.ErrorLn("--Persist cannot be a directory when using json data store")
os.Exit(1)
}
if c.DataStore == DataStoreBadger && !fileInfo.IsDir() {
logger.ErrorLn("--Persist must be a directory when using Badger data store")
os.Exit(1)
}
}
if c.DataStore == DataStoreBadger && c.InitialDataFilePath != "" {
logger.ErrorLn("InitialData option is currently not supported with Badger data store")
os.Exit(1)
}
} }
func (c *ServerConfig) ApplyDefaultsToEmptyFields() { func (c *ServerConfig) ApplyDefaultsToEmptyFields() {

View File

@@ -17,4 +17,6 @@ type ServerConfig struct {
DisableTls bool `json:"disableTls"` DisableTls bool `json:"disableTls"`
LogLevel string `json:"logLevel"` LogLevel string `json:"logLevel"`
ExplorerBaseUrlLocation string `json:"explorerBaseUrlLocation"` ExplorerBaseUrlLocation string `json:"explorerBaseUrlLocation"`
DataStore string `json:"dataStore"`
} }

View File

@@ -5,15 +5,15 @@ import (
"net/http" "net/http"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
repositorymodels "github.com/pikami/cosmium/internal/repository_models" "github.com/pikami/cosmium/internal/datastore"
) )
func (h *Handlers) GetAllCollections(c *gin.Context) { func (h *Handlers) GetAllCollections(c *gin.Context) {
databaseId := c.Param("databaseId") databaseId := c.Param("databaseId")
collections, status := h.repository.GetAllCollections(databaseId) collections, status := h.dataStore.GetAllCollections(databaseId)
if status == repositorymodels.StatusOk { if status == datastore.StatusOk {
database, _ := h.repository.GetDatabase(databaseId) database, _ := h.dataStore.GetDatabase(databaseId)
c.Header("x-ms-item-count", fmt.Sprintf("%d", len(collections))) c.Header("x-ms-item-count", fmt.Sprintf("%d", len(collections)))
c.IndentedJSON(http.StatusOK, gin.H{ c.IndentedJSON(http.StatusOK, gin.H{
@@ -31,13 +31,13 @@ func (h *Handlers) GetCollection(c *gin.Context) {
databaseId := c.Param("databaseId") databaseId := c.Param("databaseId")
id := c.Param("collId") id := c.Param("collId")
collection, status := h.repository.GetCollection(databaseId, id) collection, status := h.dataStore.GetCollection(databaseId, id)
if status == repositorymodels.StatusOk { if status == datastore.StatusOk {
c.IndentedJSON(http.StatusOK, collection) c.IndentedJSON(http.StatusOK, collection)
return return
} }
if status == repositorymodels.StatusNotFound { if status == datastore.StatusNotFound {
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"}) c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
return return
} }
@@ -49,13 +49,13 @@ func (h *Handlers) DeleteCollection(c *gin.Context) {
databaseId := c.Param("databaseId") databaseId := c.Param("databaseId")
id := c.Param("collId") id := c.Param("collId")
status := h.repository.DeleteCollection(databaseId, id) status := h.dataStore.DeleteCollection(databaseId, id)
if status == repositorymodels.StatusOk { if status == datastore.StatusOk {
c.Status(http.StatusNoContent) c.Status(http.StatusNoContent)
return return
} }
if status == repositorymodels.StatusNotFound { if status == datastore.StatusNotFound {
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"}) c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
return return
} }
@@ -65,7 +65,7 @@ func (h *Handlers) DeleteCollection(c *gin.Context) {
func (h *Handlers) CreateCollection(c *gin.Context) { func (h *Handlers) CreateCollection(c *gin.Context) {
databaseId := c.Param("databaseId") databaseId := c.Param("databaseId")
var newCollection repositorymodels.Collection var newCollection datastore.Collection
if err := c.BindJSON(&newCollection); err != nil { if err := c.BindJSON(&newCollection); err != nil {
c.JSON(http.StatusBadRequest, gin.H{"message": err.Error()}) c.JSON(http.StatusBadRequest, gin.H{"message": err.Error()})
@@ -77,13 +77,13 @@ func (h *Handlers) CreateCollection(c *gin.Context) {
return return
} }
createdCollection, status := h.repository.CreateCollection(databaseId, newCollection) createdCollection, status := h.dataStore.CreateCollection(databaseId, newCollection)
if status == repositorymodels.Conflict { if status == datastore.Conflict {
c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"}) c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"})
return return
} }
if status == repositorymodels.StatusOk { if status == datastore.StatusOk {
c.IndentedJSON(http.StatusCreated, createdCollection) c.IndentedJSON(http.StatusCreated, createdCollection)
return return
} }

View File

@@ -7,11 +7,11 @@ import (
) )
func (h *Handlers) CosmiumExport(c *gin.Context) { func (h *Handlers) CosmiumExport(c *gin.Context) {
repositoryState, err := h.repository.GetState() dataStoreState, err := h.dataStore.DumpToJson()
if err != nil { if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()}) c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return return
} }
c.Data(http.StatusOK, "application/json", []byte(repositoryState)) c.Data(http.StatusOK, "application/json", []byte(dataStoreState))
} }

View File

@@ -5,12 +5,12 @@ import (
"net/http" "net/http"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
repositorymodels "github.com/pikami/cosmium/internal/repository_models" "github.com/pikami/cosmium/internal/datastore"
) )
func (h *Handlers) GetAllDatabases(c *gin.Context) { func (h *Handlers) GetAllDatabases(c *gin.Context) {
databases, status := h.repository.GetAllDatabases() databases, status := h.dataStore.GetAllDatabases()
if status == repositorymodels.StatusOk { if status == datastore.StatusOk {
c.Header("x-ms-item-count", fmt.Sprintf("%d", len(databases))) c.Header("x-ms-item-count", fmt.Sprintf("%d", len(databases)))
c.IndentedJSON(http.StatusOK, gin.H{ c.IndentedJSON(http.StatusOK, gin.H{
"_rid": "", "_rid": "",
@@ -26,13 +26,13 @@ func (h *Handlers) GetAllDatabases(c *gin.Context) {
func (h *Handlers) GetDatabase(c *gin.Context) { func (h *Handlers) GetDatabase(c *gin.Context) {
id := c.Param("databaseId") id := c.Param("databaseId")
database, status := h.repository.GetDatabase(id) database, status := h.dataStore.GetDatabase(id)
if status == repositorymodels.StatusOk { if status == datastore.StatusOk {
c.IndentedJSON(http.StatusOK, database) c.IndentedJSON(http.StatusOK, database)
return return
} }
if status == repositorymodels.StatusNotFound { if status == datastore.StatusNotFound {
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"}) c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
return return
} }
@@ -43,13 +43,13 @@ func (h *Handlers) GetDatabase(c *gin.Context) {
func (h *Handlers) DeleteDatabase(c *gin.Context) { func (h *Handlers) DeleteDatabase(c *gin.Context) {
id := c.Param("databaseId") id := c.Param("databaseId")
status := h.repository.DeleteDatabase(id) status := h.dataStore.DeleteDatabase(id)
if status == repositorymodels.StatusOk { if status == datastore.StatusOk {
c.Status(http.StatusNoContent) c.Status(http.StatusNoContent)
return return
} }
if status == repositorymodels.StatusNotFound { if status == datastore.StatusNotFound {
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"}) c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
return return
} }
@@ -58,7 +58,7 @@ func (h *Handlers) DeleteDatabase(c *gin.Context) {
} }
func (h *Handlers) CreateDatabase(c *gin.Context) { func (h *Handlers) CreateDatabase(c *gin.Context) {
var newDatabase repositorymodels.Database var newDatabase datastore.Database
if err := c.BindJSON(&newDatabase); err != nil { if err := c.BindJSON(&newDatabase); err != nil {
c.JSON(http.StatusBadRequest, gin.H{"message": err.Error()}) c.JSON(http.StatusBadRequest, gin.H{"message": err.Error()})
@@ -70,13 +70,13 @@ func (h *Handlers) CreateDatabase(c *gin.Context) {
return return
} }
createdDatabase, status := h.repository.CreateDatabase(newDatabase) createdDatabase, status := h.dataStore.CreateDatabase(newDatabase)
if status == repositorymodels.Conflict { if status == datastore.Conflict {
c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"}) c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"})
return return
} }
if status == repositorymodels.StatusOk { if status == datastore.StatusOk {
c.IndentedJSON(http.StatusCreated, createdDatabase) c.IndentedJSON(http.StatusCreated, createdDatabase)
return return
} }

View File

@@ -8,18 +8,23 @@ import (
jsonpatch "github.com/cosmiumdev/json-patch/v5" jsonpatch "github.com/cosmiumdev/json-patch/v5"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
apimodels "github.com/pikami/cosmium/api/api_models"
"github.com/pikami/cosmium/internal/constants" "github.com/pikami/cosmium/internal/constants"
"github.com/pikami/cosmium/internal/converters"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/logger" "github.com/pikami/cosmium/internal/logger"
repositorymodels "github.com/pikami/cosmium/internal/repository_models" "github.com/pikami/cosmium/parsers"
"github.com/pikami/cosmium/parsers/nosql"
memoryexecutor "github.com/pikami/cosmium/query_executors/memory_executor"
) )
func (h *Handlers) GetAllDocuments(c *gin.Context) { func (h *Handlers) GetAllDocuments(c *gin.Context) {
databaseId := c.Param("databaseId") databaseId := c.Param("databaseId")
collectionId := c.Param("collId") collectionId := c.Param("collId")
documents, status := h.repository.GetAllDocuments(databaseId, collectionId) documents, status := h.dataStore.GetAllDocuments(databaseId, collectionId)
if status == repositorymodels.StatusOk { if status == datastore.StatusOk {
collection, _ := h.repository.GetCollection(databaseId, collectionId) collection, _ := h.dataStore.GetCollection(databaseId, collectionId)
c.Header("x-ms-item-count", fmt.Sprintf("%d", len(documents))) c.Header("x-ms-item-count", fmt.Sprintf("%d", len(documents)))
c.IndentedJSON(http.StatusOK, gin.H{ c.IndentedJSON(http.StatusOK, gin.H{
@@ -38,13 +43,13 @@ func (h *Handlers) GetDocument(c *gin.Context) {
collectionId := c.Param("collId") collectionId := c.Param("collId")
documentId := c.Param("docId") documentId := c.Param("docId")
document, status := h.repository.GetDocument(databaseId, collectionId, documentId) document, status := h.dataStore.GetDocument(databaseId, collectionId, documentId)
if status == repositorymodels.StatusOk { if status == datastore.StatusOk {
c.IndentedJSON(http.StatusOK, document) c.IndentedJSON(http.StatusOK, document)
return return
} }
if status == repositorymodels.StatusNotFound { if status == datastore.StatusNotFound {
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"}) c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
return return
} }
@@ -57,13 +62,13 @@ func (h *Handlers) DeleteDocument(c *gin.Context) {
collectionId := c.Param("collId") collectionId := c.Param("collId")
documentId := c.Param("docId") documentId := c.Param("docId")
status := h.repository.DeleteDocument(databaseId, collectionId, documentId) status := h.dataStore.DeleteDocument(databaseId, collectionId, documentId)
if status == repositorymodels.StatusOk { if status == datastore.StatusOk {
c.Status(http.StatusNoContent) c.Status(http.StatusNoContent)
return return
} }
if status == repositorymodels.StatusNotFound { if status == datastore.StatusNotFound {
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"}) c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
return return
} }
@@ -71,7 +76,7 @@ func (h *Handlers) DeleteDocument(c *gin.Context) {
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"}) c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
} }
// TODO: Maybe move "replace" logic to repository // TODO: Maybe move "replace" logic to data store
func (h *Handlers) ReplaceDocument(c *gin.Context) { func (h *Handlers) ReplaceDocument(c *gin.Context) {
databaseId := c.Param("databaseId") databaseId := c.Param("databaseId")
collectionId := c.Param("collId") collectionId := c.Param("collId")
@@ -83,19 +88,19 @@ func (h *Handlers) ReplaceDocument(c *gin.Context) {
return return
} }
status := h.repository.DeleteDocument(databaseId, collectionId, documentId) status := h.dataStore.DeleteDocument(databaseId, collectionId, documentId)
if status == repositorymodels.StatusNotFound { if status == datastore.StatusNotFound {
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"}) c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
return return
} }
createdDocument, status := h.repository.CreateDocument(databaseId, collectionId, requestBody) createdDocument, status := h.dataStore.CreateDocument(databaseId, collectionId, requestBody)
if status == repositorymodels.Conflict { if status == datastore.Conflict {
c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"}) c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"})
return return
} }
if status == repositorymodels.StatusOk { if status == datastore.StatusOk {
c.IndentedJSON(http.StatusCreated, createdDocument) c.IndentedJSON(http.StatusCreated, createdDocument)
return return
} }
@@ -108,8 +113,8 @@ func (h *Handlers) PatchDocument(c *gin.Context) {
collectionId := c.Param("collId") collectionId := c.Param("collId")
documentId := c.Param("docId") documentId := c.Param("docId")
document, status := h.repository.GetDocument(databaseId, collectionId, documentId) document, status := h.dataStore.GetDocument(databaseId, collectionId, documentId)
if status == repositorymodels.StatusNotFound { if status == datastore.StatusNotFound {
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"}) c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
return return
} }
@@ -159,19 +164,19 @@ func (h *Handlers) PatchDocument(c *gin.Context) {
return return
} }
status = h.repository.DeleteDocument(databaseId, collectionId, documentId) status = h.dataStore.DeleteDocument(databaseId, collectionId, documentId)
if status == repositorymodels.StatusNotFound { if status == datastore.StatusNotFound {
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"}) c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
return return
} }
createdDocument, status := h.repository.CreateDocument(databaseId, collectionId, modifiedDocument) createdDocument, status := h.dataStore.CreateDocument(databaseId, collectionId, modifiedDocument)
if status == repositorymodels.Conflict { if status == datastore.Conflict {
c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"}) c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"})
return return
} }
if status == repositorymodels.StatusOk { if status == datastore.StatusOk {
c.IndentedJSON(http.StatusCreated, createdDocument) c.IndentedJSON(http.StatusCreated, createdDocument)
return return
} }
@@ -183,6 +188,13 @@ func (h *Handlers) DocumentsPost(c *gin.Context) {
databaseId := c.Param("databaseId") databaseId := c.Param("databaseId")
collectionId := c.Param("collId") collectionId := c.Param("collId")
// Handle batch requests
isBatchRequest, _ := strconv.ParseBool(c.GetHeader("x-ms-cosmos-is-batch-request"))
if isBatchRequest {
h.handleBatchRequest(c)
return
}
var requestBody map[string]interface{} var requestBody map[string]interface{}
if err := c.BindJSON(&requestBody); err != nil { if err := c.BindJSON(&requestBody); err != nil {
c.JSON(http.StatusBadRequest, gin.H{"message": err.Error()}) c.JSON(http.StatusBadRequest, gin.H{"message": err.Error()})
@@ -191,30 +203,7 @@ func (h *Handlers) DocumentsPost(c *gin.Context) {
query := requestBody["query"] query := requestBody["query"]
if query != nil { if query != nil {
if c.GetHeader("x-ms-cosmos-is-query-plan-request") != "" { h.handleDocumentQuery(c, requestBody)
c.IndentedJSON(http.StatusOK, constants.QueryPlanResponse)
return
}
var queryParameters map[string]interface{}
if paramsArray, ok := requestBody["parameters"].([]interface{}); ok {
queryParameters = parametersToMap(paramsArray)
}
docs, status := h.repository.ExecuteQueryDocuments(databaseId, collectionId, query.(string), queryParameters)
if status != repositorymodels.StatusOk {
// TODO: Currently we return everything if the query fails
h.GetAllDocuments(c)
return
}
collection, _ := h.repository.GetCollection(databaseId, collectionId)
c.Header("x-ms-item-count", fmt.Sprintf("%d", len(docs)))
c.IndentedJSON(http.StatusOK, gin.H{
"_rid": collection.ResourceID,
"Documents": docs,
"_count": len(docs),
})
return return
} }
@@ -225,16 +214,16 @@ func (h *Handlers) DocumentsPost(c *gin.Context) {
isUpsert, _ := strconv.ParseBool(c.GetHeader("x-ms-documentdb-is-upsert")) isUpsert, _ := strconv.ParseBool(c.GetHeader("x-ms-documentdb-is-upsert"))
if isUpsert { if isUpsert {
h.repository.DeleteDocument(databaseId, collectionId, requestBody["id"].(string)) h.dataStore.DeleteDocument(databaseId, collectionId, requestBody["id"].(string))
} }
createdDocument, status := h.repository.CreateDocument(databaseId, collectionId, requestBody) createdDocument, status := h.dataStore.CreateDocument(databaseId, collectionId, requestBody)
if status == repositorymodels.Conflict { if status == datastore.Conflict {
c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"}) c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"})
return return
} }
if status == repositorymodels.StatusOk { if status == datastore.StatusOk {
c.IndentedJSON(http.StatusCreated, createdDocument) c.IndentedJSON(http.StatusCreated, createdDocument)
return return
} }
@@ -253,3 +242,155 @@ func parametersToMap(pairs []interface{}) map[string]interface{} {
return result return result
} }
func (h *Handlers) handleDocumentQuery(c *gin.Context, requestBody map[string]interface{}) {
databaseId := c.Param("databaseId")
collectionId := c.Param("collId")
if c.GetHeader("x-ms-cosmos-is-query-plan-request") != "" {
c.IndentedJSON(http.StatusOK, constants.QueryPlanResponse)
return
}
var queryParameters map[string]interface{}
if paramsArray, ok := requestBody["parameters"].([]interface{}); ok {
queryParameters = parametersToMap(paramsArray)
}
queryText := requestBody["query"].(string)
docs, status := h.executeQueryDocuments(databaseId, collectionId, queryText, queryParameters)
if status != datastore.StatusOk {
// TODO: Currently we return everything if the query fails
h.GetAllDocuments(c)
return
}
collection, _ := h.dataStore.GetCollection(databaseId, collectionId)
c.Header("x-ms-item-count", fmt.Sprintf("%d", len(docs)))
c.IndentedJSON(http.StatusOK, gin.H{
"_rid": collection.ResourceID,
"Documents": docs,
"_count": len(docs),
})
}
func (h *Handlers) handleBatchRequest(c *gin.Context) {
databaseId := c.Param("databaseId")
collectionId := c.Param("collId")
batchOperations := make([]apimodels.BatchOperation, 0)
if err := c.BindJSON(&batchOperations); err != nil {
c.JSON(http.StatusBadRequest, gin.H{"message": err.Error()})
return
}
batchOperationResults := make([]apimodels.BatchOperationResult, len(batchOperations))
for idx, operation := range batchOperations {
switch operation.OperationType {
case apimodels.BatchOperationTypeCreate:
createdDocument, status := h.dataStore.CreateDocument(databaseId, collectionId, operation.ResourceBody)
responseCode := dataStoreStatusToResponseCode(status)
if status == datastore.StatusOk {
responseCode = http.StatusCreated
}
batchOperationResults[idx] = apimodels.BatchOperationResult{
StatusCode: responseCode,
ResourceBody: createdDocument,
}
case apimodels.BatchOperationTypeDelete:
status := h.dataStore.DeleteDocument(databaseId, collectionId, operation.Id)
responseCode := dataStoreStatusToResponseCode(status)
if status == datastore.StatusOk {
responseCode = http.StatusNoContent
}
batchOperationResults[idx] = apimodels.BatchOperationResult{
StatusCode: responseCode,
}
case apimodels.BatchOperationTypeReplace:
deleteStatus := h.dataStore.DeleteDocument(databaseId, collectionId, operation.Id)
if deleteStatus == datastore.StatusNotFound {
batchOperationResults[idx] = apimodels.BatchOperationResult{
StatusCode: http.StatusNotFound,
}
continue
}
createdDocument, createStatus := h.dataStore.CreateDocument(databaseId, collectionId, operation.ResourceBody)
responseCode := dataStoreStatusToResponseCode(createStatus)
if createStatus == datastore.StatusOk {
responseCode = http.StatusCreated
}
batchOperationResults[idx] = apimodels.BatchOperationResult{
StatusCode: responseCode,
ResourceBody: createdDocument,
}
case apimodels.BatchOperationTypeUpsert:
documentId := operation.ResourceBody["id"].(string)
h.dataStore.DeleteDocument(databaseId, collectionId, documentId)
createdDocument, createStatus := h.dataStore.CreateDocument(databaseId, collectionId, operation.ResourceBody)
responseCode := dataStoreStatusToResponseCode(createStatus)
if createStatus == datastore.StatusOk {
responseCode = http.StatusCreated
}
batchOperationResults[idx] = apimodels.BatchOperationResult{
StatusCode: responseCode,
ResourceBody: createdDocument,
}
case apimodels.BatchOperationTypeRead:
document, status := h.dataStore.GetDocument(databaseId, collectionId, operation.Id)
batchOperationResults[idx] = apimodels.BatchOperationResult{
StatusCode: dataStoreStatusToResponseCode(status),
ResourceBody: document,
}
case apimodels.BatchOperationTypePatch:
batchOperationResults[idx] = apimodels.BatchOperationResult{
StatusCode: http.StatusNotImplemented,
Message: "Patch operation is not implemented",
}
default:
batchOperationResults[idx] = apimodels.BatchOperationResult{
StatusCode: http.StatusBadRequest,
Message: "Unknown operation type",
}
}
}
c.JSON(http.StatusOK, batchOperationResults)
}
func dataStoreStatusToResponseCode(status datastore.DataStoreStatus) int {
switch status {
case datastore.StatusOk:
return http.StatusOK
case datastore.StatusNotFound:
return http.StatusNotFound
case datastore.Conflict:
return http.StatusConflict
case datastore.BadRequest:
return http.StatusBadRequest
default:
return http.StatusInternalServerError
}
}
func (h *Handlers) executeQueryDocuments(databaseId string, collectionId string, query string, queryParameters map[string]interface{}) ([]memoryexecutor.RowType, datastore.DataStoreStatus) {
parsedQuery, err := nosql.Parse("", []byte(query))
if err != nil {
logger.Errorf("Failed to parse query: %s\nerr: %v", query, err)
return nil, datastore.BadRequest
}
allDocumentsIterator, status := h.dataStore.GetDocumentIterator(databaseId, collectionId)
if status != datastore.StatusOk {
return nil, status
}
defer allDocumentsIterator.Close()
rowsIterator := converters.NewDocumentToRowTypeIterator(allDocumentsIterator)
if typedQuery, ok := parsedQuery.(parsers.SelectStmt); ok {
typedQuery.Parameters = queryParameters
return memoryexecutor.ExecuteQuery(typedQuery, rowsIterator), datastore.StatusOk
}
return nil, datastore.BadRequest
}

View File

@@ -2,17 +2,17 @@ package handlers
import ( import (
"github.com/pikami/cosmium/api/config" "github.com/pikami/cosmium/api/config"
"github.com/pikami/cosmium/internal/repositories" "github.com/pikami/cosmium/internal/datastore"
) )
type Handlers struct { type Handlers struct {
repository *repositories.DataRepository dataStore datastore.DataStore
config config.ServerConfig config *config.ServerConfig
} }
func NewHandlers(dataRepository *repositories.DataRepository, config config.ServerConfig) *Handlers { func NewHandlers(dataStore datastore.DataStore, config *config.ServerConfig) *Handlers {
return &Handlers{ return &Handlers{
repository: dataRepository, dataStore: dataStore,
config: config, config: config,
} }
} }

View File

@@ -10,7 +10,7 @@ import (
"github.com/pikami/cosmium/internal/logger" "github.com/pikami/cosmium/internal/logger"
) )
func Authentication(config config.ServerConfig) gin.HandlerFunc { func Authentication(config *config.ServerConfig) gin.HandlerFunc {
return func(c *gin.Context) { return func(c *gin.Context) {
requestUrl := c.Request.URL.String() requestUrl := c.Request.URL.String()
if config.DisableAuth || if config.DisableAuth ||
@@ -75,8 +75,7 @@ func requestToResourceId(c *gin.Context) string {
isFeed := c.Request.Header.Get("A-Im") == "Incremental Feed" isFeed := c.Request.Header.Get("A-Im") == "Incremental Feed"
if resourceType == "pkranges" && isFeed { if resourceType == "pkranges" && isFeed {
// CosmosSDK replaces '/' with '-' in resource id requests resourceId = collId
resourceId = strings.Replace(collId, "-", "/", -1)
} }
return resourceId return resourceId

View File

@@ -7,7 +7,7 @@ import (
"github.com/pikami/cosmium/api/config" "github.com/pikami/cosmium/api/config"
) )
func StripTrailingSlashes(r *gin.Engine, config config.ServerConfig) gin.HandlerFunc { func StripTrailingSlashes(r *gin.Engine, config *config.ServerConfig) gin.HandlerFunc {
return func(c *gin.Context) { return func(c *gin.Context) {
path := c.Request.URL.Path path := c.Request.URL.Path
if len(path) > 1 && path[len(path)-1] == '/' && !strings.Contains(path, config.ExplorerBaseUrlLocation) { if len(path) > 1 && path[len(path)-1] == '/' && !strings.Contains(path, config.ExplorerBaseUrlLocation) {

View File

@@ -5,7 +5,8 @@ import (
"net/http" "net/http"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
repositorymodels "github.com/pikami/cosmium/internal/repository_models" "github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/resourceid"
) )
func (h *Handlers) GetPartitionKeyRanges(c *gin.Context) { func (h *Handlers) GetPartitionKeyRanges(c *gin.Context) {
@@ -17,8 +18,8 @@ func (h *Handlers) GetPartitionKeyRanges(c *gin.Context) {
return return
} }
partitionKeyRanges, status := h.repository.GetPartitionKeyRanges(databaseId, collectionId) partitionKeyRanges, status := h.dataStore.GetPartitionKeyRanges(databaseId, collectionId)
if status == repositorymodels.StatusOk { if status == datastore.StatusOk {
c.Header("etag", "\"420\"") c.Header("etag", "\"420\"")
c.Header("lsn", "420") c.Header("lsn", "420")
c.Header("x-ms-cosmos-llsn", "420") c.Header("x-ms-cosmos-llsn", "420")
@@ -26,20 +27,21 @@ func (h *Handlers) GetPartitionKeyRanges(c *gin.Context) {
c.Header("x-ms-item-count", fmt.Sprintf("%d", len(partitionKeyRanges))) c.Header("x-ms-item-count", fmt.Sprintf("%d", len(partitionKeyRanges)))
collectionRid := collectionId collectionRid := collectionId
collection, _ := h.repository.GetCollection(databaseId, collectionId) collection, _ := h.dataStore.GetCollection(databaseId, collectionId)
if collection.ResourceID != "" { if collection.ResourceID != "" {
collectionRid = collection.ResourceID collectionRid = collection.ResourceID
} }
rid := resourceid.NewCombined(collectionRid, resourceid.New(resourceid.ResourceTypePartitionKeyRange))
c.IndentedJSON(http.StatusOK, gin.H{ c.IndentedJSON(http.StatusOK, gin.H{
"_rid": collectionRid, "_rid": rid,
"_count": len(partitionKeyRanges), "_count": len(partitionKeyRanges),
"PartitionKeyRanges": partitionKeyRanges, "PartitionKeyRanges": partitionKeyRanges,
}) })
return return
} }
if status == repositorymodels.StatusNotFound { if status == datastore.StatusNotFound {
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"}) c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
return return
} }

View File

@@ -27,7 +27,9 @@ func (h *Handlers) GetServerInfo(c *gin.Context) {
"databaseAccountEndpoint": h.config.DatabaseEndpoint, "databaseAccountEndpoint": h.config.DatabaseEndpoint,
}, },
}, },
"enableMultipleWriteLocations": false, "enableMultipleWriteLocations": false,
"continuousBackupEnabled": false,
"enableNRegionSynchronousCommit": false,
"userReplicationPolicy": map[string]interface{}{ "userReplicationPolicy": map[string]interface{}{
"asyncReplication": false, "asyncReplication": false,
"minReplicaSetSize": 1, "minReplicaSetSize": 1,

View File

@@ -5,16 +5,16 @@ import (
"net/http" "net/http"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
repositorymodels "github.com/pikami/cosmium/internal/repository_models" "github.com/pikami/cosmium/internal/datastore"
) )
func (h *Handlers) GetAllStoredProcedures(c *gin.Context) { func (h *Handlers) GetAllStoredProcedures(c *gin.Context) {
databaseId := c.Param("databaseId") databaseId := c.Param("databaseId")
collectionId := c.Param("collId") collectionId := c.Param("collId")
sps, status := h.repository.GetAllStoredProcedures(databaseId, collectionId) sps, status := h.dataStore.GetAllStoredProcedures(databaseId, collectionId)
if status == repositorymodels.StatusOk { if status == datastore.StatusOk {
c.Header("x-ms-item-count", fmt.Sprintf("%d", len(sps))) c.Header("x-ms-item-count", fmt.Sprintf("%d", len(sps)))
c.IndentedJSON(http.StatusOK, gin.H{"_rid": "", "StoredProcedures": sps, "_count": len(sps)}) c.IndentedJSON(http.StatusOK, gin.H{"_rid": "", "StoredProcedures": sps, "_count": len(sps)})
return return
@@ -28,14 +28,14 @@ func (h *Handlers) GetStoredProcedure(c *gin.Context) {
collectionId := c.Param("collId") collectionId := c.Param("collId")
spId := c.Param("spId") spId := c.Param("spId")
sp, status := h.repository.GetStoredProcedure(databaseId, collectionId, spId) sp, status := h.dataStore.GetStoredProcedure(databaseId, collectionId, spId)
if status == repositorymodels.StatusOk { if status == datastore.StatusOk {
c.IndentedJSON(http.StatusOK, sp) c.IndentedJSON(http.StatusOK, sp)
return return
} }
if status == repositorymodels.StatusNotFound { if status == datastore.StatusNotFound {
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"}) c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
return return
} }
@@ -48,13 +48,13 @@ func (h *Handlers) DeleteStoredProcedure(c *gin.Context) {
collectionId := c.Param("collId") collectionId := c.Param("collId")
spId := c.Param("spId") spId := c.Param("spId")
status := h.repository.DeleteStoredProcedure(databaseId, collectionId, spId) status := h.dataStore.DeleteStoredProcedure(databaseId, collectionId, spId)
if status == repositorymodels.StatusOk { if status == datastore.StatusOk {
c.Status(http.StatusNoContent) c.Status(http.StatusNoContent)
return return
} }
if status == repositorymodels.StatusNotFound { if status == datastore.StatusNotFound {
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"}) c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
return return
} }
@@ -67,25 +67,25 @@ func (h *Handlers) ReplaceStoredProcedure(c *gin.Context) {
collectionId := c.Param("collId") collectionId := c.Param("collId")
spId := c.Param("spId") spId := c.Param("spId")
var sp repositorymodels.StoredProcedure var sp datastore.StoredProcedure
if err := c.BindJSON(&sp); err != nil { if err := c.BindJSON(&sp); err != nil {
c.IndentedJSON(http.StatusBadRequest, gin.H{"message": "Invalid body"}) c.IndentedJSON(http.StatusBadRequest, gin.H{"message": "Invalid body"})
return return
} }
status := h.repository.DeleteStoredProcedure(databaseId, collectionId, spId) status := h.dataStore.DeleteStoredProcedure(databaseId, collectionId, spId)
if status == repositorymodels.StatusNotFound { if status == datastore.StatusNotFound {
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"}) c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
return return
} }
createdSP, status := h.repository.CreateStoredProcedure(databaseId, collectionId, sp) createdSP, status := h.dataStore.CreateStoredProcedure(databaseId, collectionId, sp)
if status == repositorymodels.Conflict { if status == datastore.Conflict {
c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"}) c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"})
return return
} }
if status == repositorymodels.StatusOk { if status == datastore.StatusOk {
c.IndentedJSON(http.StatusOK, createdSP) c.IndentedJSON(http.StatusOK, createdSP)
return return
} }
@@ -97,19 +97,19 @@ func (h *Handlers) CreateStoredProcedure(c *gin.Context) {
databaseId := c.Param("databaseId") databaseId := c.Param("databaseId")
collectionId := c.Param("collId") collectionId := c.Param("collId")
var sp repositorymodels.StoredProcedure var sp datastore.StoredProcedure
if err := c.BindJSON(&sp); err != nil { if err := c.BindJSON(&sp); err != nil {
c.IndentedJSON(http.StatusBadRequest, gin.H{"message": "Invalid body"}) c.IndentedJSON(http.StatusBadRequest, gin.H{"message": "Invalid body"})
return return
} }
createdSP, status := h.repository.CreateStoredProcedure(databaseId, collectionId, sp) createdSP, status := h.dataStore.CreateStoredProcedure(databaseId, collectionId, sp)
if status == repositorymodels.Conflict { if status == datastore.Conflict {
c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"}) c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"})
return return
} }
if status == repositorymodels.StatusOk { if status == datastore.StatusOk {
c.IndentedJSON(http.StatusCreated, createdSP) c.IndentedJSON(http.StatusCreated, createdSP)
return return
} }

View File

@@ -5,16 +5,16 @@ import (
"net/http" "net/http"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
repositorymodels "github.com/pikami/cosmium/internal/repository_models" "github.com/pikami/cosmium/internal/datastore"
) )
func (h *Handlers) GetAllTriggers(c *gin.Context) { func (h *Handlers) GetAllTriggers(c *gin.Context) {
databaseId := c.Param("databaseId") databaseId := c.Param("databaseId")
collectionId := c.Param("collId") collectionId := c.Param("collId")
triggers, status := h.repository.GetAllTriggers(databaseId, collectionId) triggers, status := h.dataStore.GetAllTriggers(databaseId, collectionId)
if status == repositorymodels.StatusOk { if status == datastore.StatusOk {
c.Header("x-ms-item-count", fmt.Sprintf("%d", len(triggers))) c.Header("x-ms-item-count", fmt.Sprintf("%d", len(triggers)))
c.IndentedJSON(http.StatusOK, gin.H{"_rid": "", "Triggers": triggers, "_count": len(triggers)}) c.IndentedJSON(http.StatusOK, gin.H{"_rid": "", "Triggers": triggers, "_count": len(triggers)})
return return
@@ -28,14 +28,14 @@ func (h *Handlers) GetTrigger(c *gin.Context) {
collectionId := c.Param("collId") collectionId := c.Param("collId")
triggerId := c.Param("triggerId") triggerId := c.Param("triggerId")
trigger, status := h.repository.GetTrigger(databaseId, collectionId, triggerId) trigger, status := h.dataStore.GetTrigger(databaseId, collectionId, triggerId)
if status == repositorymodels.StatusOk { if status == datastore.StatusOk {
c.IndentedJSON(http.StatusOK, trigger) c.IndentedJSON(http.StatusOK, trigger)
return return
} }
if status == repositorymodels.StatusNotFound { if status == datastore.StatusNotFound {
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"}) c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
return return
} }
@@ -48,13 +48,13 @@ func (h *Handlers) DeleteTrigger(c *gin.Context) {
collectionId := c.Param("collId") collectionId := c.Param("collId")
triggerId := c.Param("triggerId") triggerId := c.Param("triggerId")
status := h.repository.DeleteTrigger(databaseId, collectionId, triggerId) status := h.dataStore.DeleteTrigger(databaseId, collectionId, triggerId)
if status == repositorymodels.StatusOk { if status == datastore.StatusOk {
c.Status(http.StatusNoContent) c.Status(http.StatusNoContent)
return return
} }
if status == repositorymodels.StatusNotFound { if status == datastore.StatusNotFound {
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"}) c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
return return
} }
@@ -67,25 +67,25 @@ func (h *Handlers) ReplaceTrigger(c *gin.Context) {
collectionId := c.Param("collId") collectionId := c.Param("collId")
triggerId := c.Param("triggerId") triggerId := c.Param("triggerId")
var trigger repositorymodels.Trigger var trigger datastore.Trigger
if err := c.BindJSON(&trigger); err != nil { if err := c.BindJSON(&trigger); err != nil {
c.IndentedJSON(http.StatusBadRequest, gin.H{"message": "Invalid body"}) c.IndentedJSON(http.StatusBadRequest, gin.H{"message": "Invalid body"})
return return
} }
status := h.repository.DeleteTrigger(databaseId, collectionId, triggerId) status := h.dataStore.DeleteTrigger(databaseId, collectionId, triggerId)
if status == repositorymodels.StatusNotFound { if status == datastore.StatusNotFound {
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"}) c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
return return
} }
createdTrigger, status := h.repository.CreateTrigger(databaseId, collectionId, trigger) createdTrigger, status := h.dataStore.CreateTrigger(databaseId, collectionId, trigger)
if status == repositorymodels.Conflict { if status == datastore.Conflict {
c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"}) c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"})
return return
} }
if status == repositorymodels.StatusOk { if status == datastore.StatusOk {
c.IndentedJSON(http.StatusOK, createdTrigger) c.IndentedJSON(http.StatusOK, createdTrigger)
return return
} }
@@ -97,19 +97,19 @@ func (h *Handlers) CreateTrigger(c *gin.Context) {
databaseId := c.Param("databaseId") databaseId := c.Param("databaseId")
collectionId := c.Param("collId") collectionId := c.Param("collId")
var trigger repositorymodels.Trigger var trigger datastore.Trigger
if err := c.BindJSON(&trigger); err != nil { if err := c.BindJSON(&trigger); err != nil {
c.IndentedJSON(http.StatusBadRequest, gin.H{"message": "Invalid body"}) c.IndentedJSON(http.StatusBadRequest, gin.H{"message": "Invalid body"})
return return
} }
createdTrigger, status := h.repository.CreateTrigger(databaseId, collectionId, trigger) createdTrigger, status := h.dataStore.CreateTrigger(databaseId, collectionId, trigger)
if status == repositorymodels.Conflict { if status == datastore.Conflict {
c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"}) c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"})
return return
} }
if status == repositorymodels.StatusOk { if status == datastore.StatusOk {
c.IndentedJSON(http.StatusCreated, createdTrigger) c.IndentedJSON(http.StatusCreated, createdTrigger)
return return
} }

View File

@@ -5,16 +5,16 @@ import (
"net/http" "net/http"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
repositorymodels "github.com/pikami/cosmium/internal/repository_models" "github.com/pikami/cosmium/internal/datastore"
) )
func (h *Handlers) GetAllUserDefinedFunctions(c *gin.Context) { func (h *Handlers) GetAllUserDefinedFunctions(c *gin.Context) {
databaseId := c.Param("databaseId") databaseId := c.Param("databaseId")
collectionId := c.Param("collId") collectionId := c.Param("collId")
udfs, status := h.repository.GetAllUserDefinedFunctions(databaseId, collectionId) udfs, status := h.dataStore.GetAllUserDefinedFunctions(databaseId, collectionId)
if status == repositorymodels.StatusOk { if status == datastore.StatusOk {
c.Header("x-ms-item-count", fmt.Sprintf("%d", len(udfs))) c.Header("x-ms-item-count", fmt.Sprintf("%d", len(udfs)))
c.IndentedJSON(http.StatusOK, gin.H{"_rid": "", "UserDefinedFunctions": udfs, "_count": len(udfs)}) c.IndentedJSON(http.StatusOK, gin.H{"_rid": "", "UserDefinedFunctions": udfs, "_count": len(udfs)})
return return
@@ -28,14 +28,14 @@ func (h *Handlers) GetUserDefinedFunction(c *gin.Context) {
collectionId := c.Param("collId") collectionId := c.Param("collId")
udfId := c.Param("udfId") udfId := c.Param("udfId")
udf, status := h.repository.GetUserDefinedFunction(databaseId, collectionId, udfId) udf, status := h.dataStore.GetUserDefinedFunction(databaseId, collectionId, udfId)
if status == repositorymodels.StatusOk { if status == datastore.StatusOk {
c.IndentedJSON(http.StatusOK, udf) c.IndentedJSON(http.StatusOK, udf)
return return
} }
if status == repositorymodels.StatusNotFound { if status == datastore.StatusNotFound {
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"}) c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
return return
} }
@@ -48,13 +48,13 @@ func (h *Handlers) DeleteUserDefinedFunction(c *gin.Context) {
collectionId := c.Param("collId") collectionId := c.Param("collId")
udfId := c.Param("udfId") udfId := c.Param("udfId")
status := h.repository.DeleteUserDefinedFunction(databaseId, collectionId, udfId) status := h.dataStore.DeleteUserDefinedFunction(databaseId, collectionId, udfId)
if status == repositorymodels.StatusOk { if status == datastore.StatusOk {
c.Status(http.StatusNoContent) c.Status(http.StatusNoContent)
return return
} }
if status == repositorymodels.StatusNotFound { if status == datastore.StatusNotFound {
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"}) c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
return return
} }
@@ -67,25 +67,25 @@ func (h *Handlers) ReplaceUserDefinedFunction(c *gin.Context) {
collectionId := c.Param("collId") collectionId := c.Param("collId")
udfId := c.Param("udfId") udfId := c.Param("udfId")
var udf repositorymodels.UserDefinedFunction var udf datastore.UserDefinedFunction
if err := c.BindJSON(&udf); err != nil { if err := c.BindJSON(&udf); err != nil {
c.IndentedJSON(http.StatusBadRequest, gin.H{"message": "Invalid body"}) c.IndentedJSON(http.StatusBadRequest, gin.H{"message": "Invalid body"})
return return
} }
status := h.repository.DeleteUserDefinedFunction(databaseId, collectionId, udfId) status := h.dataStore.DeleteUserDefinedFunction(databaseId, collectionId, udfId)
if status == repositorymodels.StatusNotFound { if status == datastore.StatusNotFound {
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"}) c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
return return
} }
createdUdf, status := h.repository.CreateUserDefinedFunction(databaseId, collectionId, udf) createdUdf, status := h.dataStore.CreateUserDefinedFunction(databaseId, collectionId, udf)
if status == repositorymodels.Conflict { if status == datastore.Conflict {
c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"}) c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"})
return return
} }
if status == repositorymodels.StatusOk { if status == datastore.StatusOk {
c.IndentedJSON(http.StatusOK, createdUdf) c.IndentedJSON(http.StatusOK, createdUdf)
return return
} }
@@ -97,19 +97,19 @@ func (h *Handlers) CreateUserDefinedFunction(c *gin.Context) {
databaseId := c.Param("databaseId") databaseId := c.Param("databaseId")
collectionId := c.Param("collId") collectionId := c.Param("collId")
var udf repositorymodels.UserDefinedFunction var udf datastore.UserDefinedFunction
if err := c.BindJSON(&udf); err != nil { if err := c.BindJSON(&udf); err != nil {
c.IndentedJSON(http.StatusBadRequest, gin.H{"message": "Invalid body"}) c.IndentedJSON(http.StatusBadRequest, gin.H{"message": "Invalid body"})
return return
} }
createdUdf, status := h.repository.CreateUserDefinedFunction(databaseId, collectionId, udf) createdUdf, status := h.dataStore.CreateUserDefinedFunction(databaseId, collectionId, udf)
if status == repositorymodels.Conflict { if status == datastore.Conflict {
c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"}) c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"})
return return
} }
if status == repositorymodels.StatusOk { if status == datastore.StatusOk {
c.IndentedJSON(http.StatusCreated, createdUdf) c.IndentedJSON(http.StatusCreated, createdUdf)
return return
} }

View File

@@ -5,19 +5,20 @@ import (
"fmt" "fmt"
"net/http" "net/http"
"sync" "sync"
"time"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
"github.com/pikami/cosmium/api/handlers" "github.com/pikami/cosmium/api/handlers"
"github.com/pikami/cosmium/api/handlers/middleware" "github.com/pikami/cosmium/api/handlers/middleware"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/logger" "github.com/pikami/cosmium/internal/logger"
"github.com/pikami/cosmium/internal/repositories"
tlsprovider "github.com/pikami/cosmium/internal/tls_provider" tlsprovider "github.com/pikami/cosmium/internal/tls_provider"
) )
var ginMux sync.Mutex var ginMux sync.Mutex
func (s *ApiServer) CreateRouter(repository *repositories.DataRepository) { func (s *ApiServer) CreateRouter(dataStore datastore.DataStore) {
routeHandlers := handlers.NewHandlers(repository, s.config) routeHandlers := handlers.NewHandlers(dataStore, s.config)
ginMux.Lock() ginMux.Lock()
gin.DefaultWriter = logger.InfoWriter() gin.DefaultWriter = logger.InfoWriter()
@@ -86,7 +87,7 @@ func (s *ApiServer) CreateRouter(repository *repositories.DataRepository) {
s.router = router s.router = router
} }
func (s *ApiServer) Start() { func (s *ApiServer) Start() error {
listenAddress := fmt.Sprintf(":%d", s.config.Port) listenAddress := fmt.Sprintf(":%d", s.config.Port)
s.isActive = true s.isActive = true
@@ -95,6 +96,8 @@ func (s *ApiServer) Start() {
Handler: s.router.Handler(), Handler: s.router.Handler(),
} }
errChan := make(chan error, 1)
go func() { go func() {
<-s.stopServer <-s.stopServer
logger.InfoLn("Shutting down server...") logger.InfoLn("Shutting down server...")
@@ -102,35 +105,40 @@ func (s *ApiServer) Start() {
if err != nil { if err != nil {
logger.ErrorLn("Failed to shutdown server:", err) logger.ErrorLn("Failed to shutdown server:", err)
} }
s.onServerShutdown <- true
}() }()
go func() { go func() {
var err error
if s.config.DisableTls { if s.config.DisableTls {
logger.Infof("Listening and serving HTTP on %s\n", server.Addr) logger.Infof("Listening and serving HTTP on %s\n", server.Addr)
err := server.ListenAndServe() err = server.ListenAndServe()
if err != nil && err != http.ErrServerClosed {
logger.ErrorLn("Failed to start HTTP server:", err)
}
s.isActive = false
} else if s.config.TLS_CertificatePath != "" && s.config.TLS_CertificateKey != "" { } else if s.config.TLS_CertificatePath != "" && s.config.TLS_CertificateKey != "" {
logger.Infof("Listening and serving HTTPS on %s\n", server.Addr) logger.Infof("Listening and serving HTTPS on %s\n", server.Addr)
err := server.ListenAndServeTLS( err = server.ListenAndServeTLS(
s.config.TLS_CertificatePath, s.config.TLS_CertificatePath,
s.config.TLS_CertificateKey) s.config.TLS_CertificateKey)
if err != nil && err != http.ErrServerClosed {
logger.ErrorLn("Failed to start HTTPS server:", err)
}
s.isActive = false
} else { } else {
tlsConfig := tlsprovider.GetDefaultTlsConfig() tlsConfig := tlsprovider.GetDefaultTlsConfig()
server.TLSConfig = tlsConfig server.TLSConfig = tlsConfig
logger.Infof("Listening and serving HTTPS on %s\n", server.Addr) logger.Infof("Listening and serving HTTPS on %s\n", server.Addr)
err := server.ListenAndServeTLS("", "") err = server.ListenAndServeTLS("", "")
if err != nil && err != http.ErrServerClosed {
logger.ErrorLn("Failed to start HTTPS server:", err)
}
s.isActive = false
} }
if err != nil && err != http.ErrServerClosed {
logger.ErrorLn("Failed to start server:", err)
errChan <- err
} else {
errChan <- nil
}
s.isActive = false
}() }()
select {
case err := <-errChan:
return err
case <-time.After(50 * time.Millisecond):
return nil
}
} }

View File

@@ -2,13 +2,11 @@ package tests_test
import ( import (
"context" "context"
"errors"
"fmt" "fmt"
"io" "io"
"net/http" "net/http"
"testing" "testing"
"github.com/Azure/azure-sdk-for-go/sdk/azcore"
"github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos" "github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos"
"github.com/pikami/cosmium/api/config" "github.com/pikami/cosmium/api/config"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
@@ -19,7 +17,7 @@ func Test_Authentication(t *testing.T) {
defer ts.Server.Close() defer ts.Server.Close()
t.Run("Should get 200 when correct account key is used", func(t *testing.T) { t.Run("Should get 200 when correct account key is used", func(t *testing.T) {
ts.Repository.DeleteDatabase(testDatabaseName) ts.DataStore.DeleteDatabase(testDatabaseName)
client, err := azcosmos.NewClientFromConnectionString( client, err := azcosmos.NewClientFromConnectionString(
fmt.Sprintf("AccountEndpoint=%s;AccountKey=%s", ts.URL, config.DefaultAccountKey), fmt.Sprintf("AccountEndpoint=%s;AccountKey=%s", ts.URL, config.DefaultAccountKey),
&azcosmos.ClientOptions{}, &azcosmos.ClientOptions{},
@@ -35,7 +33,7 @@ func Test_Authentication(t *testing.T) {
}) })
t.Run("Should get 401 when wrong account key is used", func(t *testing.T) { t.Run("Should get 401 when wrong account key is used", func(t *testing.T) {
ts.Repository.DeleteDatabase(testDatabaseName) ts.DataStore.DeleteDatabase(testDatabaseName)
client, err := azcosmos.NewClientFromConnectionString( client, err := azcosmos.NewClientFromConnectionString(
fmt.Sprintf("AccountEndpoint=%s;AccountKey=%s", ts.URL, "AAAA"), fmt.Sprintf("AccountEndpoint=%s;AccountKey=%s", ts.URL, "AAAA"),
&azcosmos.ClientOptions{}, &azcosmos.ClientOptions{},
@@ -47,12 +45,7 @@ func Test_Authentication(t *testing.T) {
azcosmos.DatabaseProperties{ID: testDatabaseName}, azcosmos.DatabaseProperties{ID: testDatabaseName},
&azcosmos.CreateDatabaseOptions{}) &azcosmos.CreateDatabaseOptions{})
var respErr *azcore.ResponseError assert.Contains(t, err.Error(), "401 Unauthorized")
if errors.As(err, &respErr) {
assert.Equal(t, respErr.StatusCode, http.StatusUnauthorized)
} else {
panic(err)
}
}) })
t.Run("Should allow unauthorized requests to /_explorer", func(t *testing.T) { t.Run("Should allow unauthorized requests to /_explorer", func(t *testing.T) {
@@ -68,7 +61,7 @@ func Test_Authentication(t *testing.T) {
} }
func Test_Authentication_Disabled(t *testing.T) { func Test_Authentication_Disabled(t *testing.T) {
ts := runTestServerCustomConfig(config.ServerConfig{ ts := runTestServerCustomConfig(&config.ServerConfig{
AccountKey: config.DefaultAccountKey, AccountKey: config.DefaultAccountKey,
ExplorerPath: "/tmp/nothing", ExplorerPath: "/tmp/nothing",
ExplorerBaseUrlLocation: config.ExplorerBaseUrlLocation, ExplorerBaseUrlLocation: config.ExplorerBaseUrlLocation,
@@ -77,7 +70,7 @@ func Test_Authentication_Disabled(t *testing.T) {
defer ts.Server.Close() defer ts.Server.Close()
t.Run("Should get 200 when wrong account key is used, but authentication is dissabled", func(t *testing.T) { t.Run("Should get 200 when wrong account key is used, but authentication is dissabled", func(t *testing.T) {
ts.Repository.DeleteDatabase(testDatabaseName) ts.DataStore.DeleteDatabase(testDatabaseName)
client, err := azcosmos.NewClientFromConnectionString( client, err := azcosmos.NewClientFromConnectionString(
fmt.Sprintf("AccountEndpoint=%s;AccountKey=%s", ts.URL, "AAAA"), fmt.Sprintf("AccountEndpoint=%s;AccountKey=%s", ts.URL, "AAAA"),
&azcosmos.ClientOptions{}, &azcosmos.ClientOptions{},

View File

@@ -3,32 +3,29 @@ package tests_test
import ( import (
"context" "context"
"errors" "errors"
"fmt"
"net/http" "net/http"
"testing" "testing"
"github.com/Azure/azure-sdk-for-go/sdk/azcore" "github.com/Azure/azure-sdk-for-go/sdk/azcore"
"github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos" "github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos"
"github.com/pikami/cosmium/api/config" "github.com/pikami/cosmium/internal/datastore"
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
) )
func Test_Collections(t *testing.T) { func Test_Collections(t *testing.T) {
ts := runTestServer() presets := []testPreset{PresetJsonStore, PresetBadgerStore}
defer ts.Server.Close()
client, err := azcosmos.NewClientFromConnectionString( setUp := func(ts *TestServer, client *azcosmos.Client) *azcosmos.DatabaseClient {
fmt.Sprintf("AccountEndpoint=%s;AccountKey=%s", ts.URL, config.DefaultAccountKey), ts.DataStore.CreateDatabase(datastore.Database{ID: testDatabaseName})
&azcosmos.ClientOptions{}, databaseClient, err := client.NewDatabase(testDatabaseName)
) assert.Nil(t, err)
assert.Nil(t, err)
ts.Repository.CreateDatabase(repositorymodels.Database{ID: testDatabaseName}) return databaseClient
databaseClient, err := client.NewDatabase(testDatabaseName) }
assert.Nil(t, err)
runTestsWithPresets(t, "Collection Create", presets, func(t *testing.T, ts *TestServer, client *azcosmos.Client) {
databaseClient := setUp(ts, client)
t.Run("Collection Create", func(t *testing.T) {
t.Run("Should create collection", func(t *testing.T) { t.Run("Should create collection", func(t *testing.T) {
createResponse, err := databaseClient.CreateContainer(context.TODO(), azcosmos.ContainerProperties{ createResponse, err := databaseClient.CreateContainer(context.TODO(), azcosmos.ContainerProperties{
ID: testCollectionName, ID: testCollectionName,
@@ -39,7 +36,7 @@ func Test_Collections(t *testing.T) {
}) })
t.Run("Should return conflict when collection exists", func(t *testing.T) { t.Run("Should return conflict when collection exists", func(t *testing.T) {
ts.Repository.CreateCollection(testDatabaseName, repositorymodels.Collection{ ts.DataStore.CreateCollection(testDatabaseName, datastore.Collection{
ID: testCollectionName, ID: testCollectionName,
}) })
@@ -57,9 +54,11 @@ func Test_Collections(t *testing.T) {
}) })
}) })
t.Run("Collection Read", func(t *testing.T) { runTestsWithPresets(t, "Collection Read", presets, func(t *testing.T, ts *TestServer, client *azcosmos.Client) {
databaseClient := setUp(ts, client)
t.Run("Should read collection", func(t *testing.T) { t.Run("Should read collection", func(t *testing.T) {
ts.Repository.CreateCollection(testDatabaseName, repositorymodels.Collection{ ts.DataStore.CreateCollection(testDatabaseName, datastore.Collection{
ID: testCollectionName, ID: testCollectionName,
}) })
@@ -73,7 +72,7 @@ func Test_Collections(t *testing.T) {
}) })
t.Run("Should return not found when collection does not exist", func(t *testing.T) { t.Run("Should return not found when collection does not exist", func(t *testing.T) {
ts.Repository.DeleteCollection(testDatabaseName, testCollectionName) ts.DataStore.DeleteCollection(testDatabaseName, testCollectionName)
collectionResponse, err := databaseClient.NewContainer(testCollectionName) collectionResponse, err := databaseClient.NewContainer(testCollectionName)
assert.Nil(t, err) assert.Nil(t, err)
@@ -90,9 +89,11 @@ func Test_Collections(t *testing.T) {
}) })
}) })
t.Run("Collection Delete", func(t *testing.T) { runTestsWithPresets(t, "Collection Delete", presets, func(t *testing.T, ts *TestServer, client *azcosmos.Client) {
databaseClient := setUp(ts, client)
t.Run("Should delete collection", func(t *testing.T) { t.Run("Should delete collection", func(t *testing.T) {
ts.Repository.CreateCollection(testDatabaseName, repositorymodels.Collection{ ts.DataStore.CreateCollection(testDatabaseName, datastore.Collection{
ID: testCollectionName, ID: testCollectionName,
}) })
@@ -105,7 +106,7 @@ func Test_Collections(t *testing.T) {
}) })
t.Run("Should return not found when collection does not exist", func(t *testing.T) { t.Run("Should return not found when collection does not exist", func(t *testing.T) {
ts.Repository.DeleteCollection(testDatabaseName, testCollectionName) ts.DataStore.DeleteCollection(testDatabaseName, testCollectionName)
collectionResponse, err := databaseClient.NewContainer(testCollectionName) collectionResponse, err := databaseClient.NewContainer(testCollectionName)
assert.Nil(t, err) assert.Nil(t, err)

View File

@@ -1,39 +1,62 @@
package tests_test package tests_test
import ( import (
"fmt"
"net/http/httptest" "net/http/httptest"
"testing"
"github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos"
"github.com/pikami/cosmium/api" "github.com/pikami/cosmium/api"
"github.com/pikami/cosmium/api/config" "github.com/pikami/cosmium/api/config"
"github.com/pikami/cosmium/internal/repositories" "github.com/pikami/cosmium/internal/datastore"
badgerdatastore "github.com/pikami/cosmium/internal/datastore/badger_datastore"
jsondatastore "github.com/pikami/cosmium/internal/datastore/json_datastore"
"github.com/pikami/cosmium/internal/logger"
"github.com/stretchr/testify/assert"
) )
type TestServer struct { type TestServer struct {
Server *httptest.Server Server *httptest.Server
Repository *repositories.DataRepository DataStore datastore.DataStore
URL string URL string
} }
func runTestServerCustomConfig(config config.ServerConfig) *TestServer { func getDefaultTestServerConfig() *config.ServerConfig {
repository := repositories.NewDataRepository(repositories.RepositoryOptions{}) return &config.ServerConfig{
AccountKey: config.DefaultAccountKey,
ExplorerPath: "/tmp/nothing",
ExplorerBaseUrlLocation: config.ExplorerBaseUrlLocation,
DataStore: "json",
}
}
api := api.NewApiServer(repository, config) func runTestServerCustomConfig(configuration *config.ServerConfig) *TestServer {
var dataStore datastore.DataStore
switch configuration.DataStore {
case config.DataStoreBadger:
dataStore = badgerdatastore.NewBadgerDataStore(badgerdatastore.BadgerDataStoreOptions{})
default:
dataStore = jsondatastore.NewJsonDataStore(jsondatastore.JsonDataStoreOptions{})
}
api := api.NewApiServer(dataStore, configuration)
server := httptest.NewServer(api.GetRouter()) server := httptest.NewServer(api.GetRouter())
configuration.DatabaseEndpoint = server.URL
return &TestServer{ return &TestServer{
Server: server, Server: server,
Repository: repository, DataStore: dataStore,
URL: server.URL, URL: server.URL,
} }
} }
func runTestServer() *TestServer { func runTestServer() *TestServer {
config := config.ServerConfig{ config := getDefaultTestServerConfig()
AccountKey: config.DefaultAccountKey,
ExplorerPath: "/tmp/nothing", config.LogLevel = "debug"
ExplorerBaseUrlLocation: config.ExplorerBaseUrlLocation, logger.SetLogLevel(logger.LogLevelDebug)
}
return runTestServerCustomConfig(config) return runTestServerCustomConfig(config)
} }
@@ -43,3 +66,47 @@ const (
testDatabaseName = "test-db" testDatabaseName = "test-db"
testCollectionName = "test-coll" testCollectionName = "test-coll"
) )
type testFunc func(t *testing.T, ts *TestServer, cosmosClient *azcosmos.Client)
type testPreset string
const (
PresetJsonStore testPreset = "JsonDS"
PresetBadgerStore testPreset = "BadgerDS"
)
func runTestsWithPreset(t *testing.T, name string, testPreset testPreset, f testFunc) {
serverConfig := getDefaultTestServerConfig()
serverConfig.LogLevel = "debug"
logger.SetLogLevel(logger.LogLevelDebug)
switch testPreset {
case PresetBadgerStore:
serverConfig.DataStore = config.DataStoreBadger
case PresetJsonStore:
serverConfig.DataStore = config.DataStoreJson
}
ts := runTestServerCustomConfig(serverConfig)
defer ts.Server.Close()
defer ts.DataStore.Close()
client, err := azcosmos.NewClientFromConnectionString(
fmt.Sprintf("AccountEndpoint=%s;AccountKey=%s", ts.URL, config.DefaultAccountKey),
&azcosmos.ClientOptions{},
)
assert.Nil(t, err)
testName := fmt.Sprintf("%s_%s", testPreset, name)
t.Run(testName, func(t *testing.T) {
f(t, ts, client)
})
}
func runTestsWithPresets(t *testing.T, name string, testPresets []testPreset, f testFunc) {
for _, testPreset := range testPresets {
runTestsWithPreset(t, name, testPreset, f)
}
}

View File

@@ -3,30 +3,21 @@ package tests_test
import ( import (
"context" "context"
"errors" "errors"
"fmt"
"net/http" "net/http"
"testing" "testing"
"github.com/Azure/azure-sdk-for-go/sdk/azcore" "github.com/Azure/azure-sdk-for-go/sdk/azcore"
"github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos" "github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos"
"github.com/pikami/cosmium/api/config" "github.com/pikami/cosmium/internal/datastore"
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
) )
func Test_Databases(t *testing.T) { func Test_Databases(t *testing.T) {
ts := runTestServer() presets := []testPreset{PresetJsonStore, PresetBadgerStore}
defer ts.Server.Close()
client, err := azcosmos.NewClientFromConnectionString( runTestsWithPresets(t, "Database Create", presets, func(t *testing.T, ts *TestServer, client *azcosmos.Client) {
fmt.Sprintf("AccountEndpoint=%s;AccountKey=%s", ts.URL, config.DefaultAccountKey),
&azcosmos.ClientOptions{},
)
assert.Nil(t, err)
t.Run("Database Create", func(t *testing.T) {
t.Run("Should create database", func(t *testing.T) { t.Run("Should create database", func(t *testing.T) {
ts.Repository.DeleteDatabase(testDatabaseName) ts.DataStore.DeleteDatabase(testDatabaseName)
createResponse, err := client.CreateDatabase(context.TODO(), azcosmos.DatabaseProperties{ createResponse, err := client.CreateDatabase(context.TODO(), azcosmos.DatabaseProperties{
ID: testDatabaseName, ID: testDatabaseName,
@@ -37,7 +28,7 @@ func Test_Databases(t *testing.T) {
}) })
t.Run("Should return conflict when database exists", func(t *testing.T) { t.Run("Should return conflict when database exists", func(t *testing.T) {
ts.Repository.CreateDatabase(repositorymodels.Database{ ts.DataStore.CreateDatabase(datastore.Database{
ID: testDatabaseName, ID: testDatabaseName,
}) })
@@ -55,9 +46,9 @@ func Test_Databases(t *testing.T) {
}) })
}) })
t.Run("Database Read", func(t *testing.T) { runTestsWithPresets(t, "Database Read", presets, func(t *testing.T, ts *TestServer, client *azcosmos.Client) {
t.Run("Should read database", func(t *testing.T) { t.Run("Should read database", func(t *testing.T) {
ts.Repository.CreateDatabase(repositorymodels.Database{ ts.DataStore.CreateDatabase(datastore.Database{
ID: testDatabaseName, ID: testDatabaseName,
}) })
@@ -71,7 +62,7 @@ func Test_Databases(t *testing.T) {
}) })
t.Run("Should return not found when database does not exist", func(t *testing.T) { t.Run("Should return not found when database does not exist", func(t *testing.T) {
ts.Repository.DeleteDatabase(testDatabaseName) ts.DataStore.DeleteDatabase(testDatabaseName)
databaseResponse, err := client.NewDatabase(testDatabaseName) databaseResponse, err := client.NewDatabase(testDatabaseName)
assert.Nil(t, err) assert.Nil(t, err)
@@ -88,9 +79,9 @@ func Test_Databases(t *testing.T) {
}) })
}) })
t.Run("Database Delete", func(t *testing.T) { runTestsWithPresets(t, "Database Delete", presets, func(t *testing.T, ts *TestServer, client *azcosmos.Client) {
t.Run("Should delete database", func(t *testing.T) { t.Run("Should delete database", func(t *testing.T) {
ts.Repository.CreateDatabase(repositorymodels.Database{ ts.DataStore.CreateDatabase(datastore.Database{
ID: testDatabaseName, ID: testDatabaseName,
}) })
@@ -103,7 +94,7 @@ func Test_Databases(t *testing.T) {
}) })
t.Run("Should return not found when database does not exist", func(t *testing.T) { t.Run("Should return not found when database does not exist", func(t *testing.T) {
ts.Repository.DeleteDatabase(testDatabaseName) ts.DataStore.DeleteDatabase(testDatabaseName)
databaseResponse, err := client.NewDatabase(testDatabaseName) databaseResponse, err := client.NewDatabase(testDatabaseName)
assert.Nil(t, err) assert.Nil(t, err)

View File

@@ -14,7 +14,7 @@ import (
"github.com/Azure/azure-sdk-for-go/sdk/azcore" "github.com/Azure/azure-sdk-for-go/sdk/azcore"
"github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos" "github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos"
"github.com/pikami/cosmium/api/config" "github.com/pikami/cosmium/api/config"
repositorymodels "github.com/pikami/cosmium/internal/repository_models" "github.com/pikami/cosmium/internal/datastore"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
) )
@@ -53,11 +53,9 @@ func testCosmosQuery(t *testing.T,
} }
} }
func documents_InitializeDb(t *testing.T) (*TestServer, *azcosmos.ContainerClient) { func documents_InitializeDb(t *testing.T, ts *TestServer) *azcosmos.ContainerClient {
ts := runTestServer() ts.DataStore.CreateDatabase(datastore.Database{ID: testDatabaseName})
ts.DataStore.CreateCollection(testDatabaseName, datastore.Collection{
ts.Repository.CreateDatabase(repositorymodels.Database{ID: testDatabaseName})
ts.Repository.CreateCollection(testDatabaseName, repositorymodels.Collection{
ID: testCollectionName, ID: testCollectionName,
PartitionKey: struct { PartitionKey: struct {
Paths []string "json:\"paths\"" Paths []string "json:\"paths\""
@@ -67,8 +65,8 @@ func documents_InitializeDb(t *testing.T) (*TestServer, *azcosmos.ContainerClien
Paths: []string{"/pk"}, Paths: []string{"/pk"},
}, },
}) })
ts.Repository.CreateDocument(testDatabaseName, testCollectionName, map[string]interface{}{"id": "12345", "pk": "123", "isCool": false, "arr": []int{1, 2, 3}}) ts.DataStore.CreateDocument(testDatabaseName, testCollectionName, map[string]interface{}{"id": "12345", "pk": "123", "isCool": false, "arr": []int{1, 2, 3}})
ts.Repository.CreateDocument(testDatabaseName, testCollectionName, map[string]interface{}{"id": "67890", "pk": "456", "isCool": true, "arr": []int{6, 7, 8}}) ts.DataStore.CreateDocument(testDatabaseName, testCollectionName, map[string]interface{}{"id": "67890", "pk": "456", "isCool": true, "arr": []int{6, 7, 8}})
client, err := azcosmos.NewClientFromConnectionString( client, err := azcosmos.NewClientFromConnectionString(
fmt.Sprintf("AccountEndpoint=%s;AccountKey=%s", ts.URL, config.DefaultAccountKey), fmt.Sprintf("AccountEndpoint=%s;AccountKey=%s", ts.URL, config.DefaultAccountKey),
@@ -79,303 +77,439 @@ func documents_InitializeDb(t *testing.T) (*TestServer, *azcosmos.ContainerClien
collectionClient, err := client.NewContainer(testDatabaseName, testCollectionName) collectionClient, err := client.NewContainer(testDatabaseName, testCollectionName)
assert.Nil(t, err) assert.Nil(t, err)
return ts, collectionClient return collectionClient
} }
func Test_Documents(t *testing.T) { func Test_Documents(t *testing.T) {
ts, collectionClient := documents_InitializeDb(t) presets := []testPreset{PresetJsonStore, PresetBadgerStore}
defer ts.Server.Close()
t.Run("Should query document", func(t *testing.T) { runTestsWithPresets(t, "Test_Documents", presets, func(t *testing.T, ts *TestServer, client *azcosmos.Client) {
testCosmosQuery(t, collectionClient, collectionClient := documents_InitializeDb(t, ts)
"SELECT c.id, c[\"pk\"] FROM c ORDER BY c.id",
nil, t.Run("Should query document", func(t *testing.T) {
[]interface{}{ testCosmosQuery(t, collectionClient,
map[string]interface{}{"id": "12345", "pk": "123"}, "SELECT c.id, c[\"pk\"] FROM c ORDER BY c.id",
map[string]interface{}{"id": "67890", "pk": "456"}, nil,
}, []interface{}{
) map[string]interface{}{"id": "12345", "pk": "123"},
map[string]interface{}{"id": "67890", "pk": "456"},
},
)
})
t.Run("Should query VALUE array", func(t *testing.T) {
testCosmosQuery(t, collectionClient,
"SELECT VALUE [c.id, c[\"pk\"]] FROM c ORDER BY c.id",
nil,
[]interface{}{
[]interface{}{"12345", "123"},
[]interface{}{"67890", "456"},
},
)
})
t.Run("Should query VALUE object", func(t *testing.T) {
testCosmosQuery(t, collectionClient,
"SELECT VALUE { id: c.id, _pk: c.pk } FROM c ORDER BY c.id",
nil,
[]interface{}{
map[string]interface{}{"id": "12345", "_pk": "123"},
map[string]interface{}{"id": "67890", "_pk": "456"},
},
)
})
t.Run("Should query document with single WHERE condition", func(t *testing.T) {
testCosmosQuery(t, collectionClient,
`select c.id
FROM c
WHERE c.isCool=true
ORDER BY c.id`,
nil,
[]interface{}{
map[string]interface{}{"id": "67890"},
},
)
})
t.Run("Should query document with query parameters", func(t *testing.T) {
testCosmosQuery(t, collectionClient,
`select c.id
FROM c
WHERE c.id=@param_id
ORDER BY c.id`,
[]azcosmos.QueryParameter{
{Name: "@param_id", Value: "67890"},
},
[]interface{}{
map[string]interface{}{"id": "67890"},
},
)
})
t.Run("Should query document with query parameters as accessor", func(t *testing.T) {
testCosmosQuery(t, collectionClient,
`select c.id
FROM c
WHERE c[@param]="67890"
ORDER BY c.id`,
[]azcosmos.QueryParameter{
{Name: "@param", Value: "id"},
},
[]interface{}{
map[string]interface{}{"id": "67890"},
},
)
})
t.Run("Should query array accessor", func(t *testing.T) {
testCosmosQuery(t, collectionClient,
`SELECT c.id,
c["arr"][0] AS arr0,
c["arr"][1] AS arr1,
c["arr"][2] AS arr2,
c["arr"][3] AS arr3
FROM c ORDER BY c.id`,
nil,
[]interface{}{
map[string]interface{}{"id": "12345", "arr0": 1.0, "arr1": 2.0, "arr2": 3.0, "arr3": nil},
map[string]interface{}{"id": "67890", "arr0": 6.0, "arr1": 7.0, "arr2": 8.0, "arr3": nil},
},
)
})
t.Run("Should handle parallel writes", func(t *testing.T) {
var wg sync.WaitGroup
rutineCount := 100
results := make(chan error, rutineCount)
createCall := func(i int) {
defer wg.Done()
item := map[string]interface{}{
"id": fmt.Sprintf("id-%d", i),
"pk": fmt.Sprintf("pk-%d", i),
"val": i,
}
bytes, err := json.Marshal(item)
if err != nil {
results <- err
return
}
ctx, cancel := context.WithTimeout(context.Background(), 2*time.Second)
defer cancel()
_, err = collectionClient.CreateItem(
ctx,
azcosmos.PartitionKey{},
bytes,
&azcosmos.ItemOptions{
EnableContentResponseOnWrite: false,
},
)
results <- err
collectionClient.ReadItem(ctx, azcosmos.PartitionKey{}, fmt.Sprintf("id-%d", i), nil)
collectionClient.DeleteItem(ctx, azcosmos.PartitionKey{}, fmt.Sprintf("id-%d", i), nil)
}
for i := 0; i < rutineCount; i++ {
wg.Add(1)
go createCall(i)
}
wg.Wait()
close(results)
for err := range results {
if err != nil {
t.Errorf("Error creating item: %v", err)
}
}
})
}) })
t.Run("Should query VALUE array", func(t *testing.T) { runTestsWithPresets(t, "Test_Documents_Patch", presets, func(t *testing.T, ts *TestServer, client *azcosmos.Client) {
testCosmosQuery(t, collectionClient, collectionClient := documents_InitializeDb(t, ts)
"SELECT VALUE [c.id, c[\"pk\"]] FROM c ORDER BY c.id",
nil,
[]interface{}{
[]interface{}{"12345", "123"},
[]interface{}{"67890", "456"},
},
)
})
t.Run("Should query VALUE object", func(t *testing.T) { t.Run("Should PATCH document", func(t *testing.T) {
testCosmosQuery(t, collectionClient, context := context.TODO()
"SELECT VALUE { id: c.id, _pk: c.pk } FROM c ORDER BY c.id", expectedData := map[string]interface{}{"id": "67890", "pk": "666", "newField": "newValue", "incr": 15., "setted": "isSet"}
nil,
[]interface{}{
map[string]interface{}{"id": "12345", "_pk": "123"},
map[string]interface{}{"id": "67890", "_pk": "456"},
},
)
})
t.Run("Should query document with single WHERE condition", func(t *testing.T) { patch := azcosmos.PatchOperations{}
testCosmosQuery(t, collectionClient, patch.AppendAdd("/newField", "newValue")
`select c.id patch.AppendIncrement("/incr", 15)
FROM c patch.AppendRemove("/isCool")
WHERE c.isCool=true patch.AppendReplace("/pk", "666")
ORDER BY c.id`, patch.AppendSet("/setted", "isSet")
nil,
[]interface{}{
map[string]interface{}{"id": "67890"},
},
)
})
t.Run("Should query document with query parameters", func(t *testing.T) { itemResponse, err := collectionClient.PatchItem(
testCosmosQuery(t, collectionClient, context,
`select c.id azcosmos.PartitionKey{},
FROM c "67890",
WHERE c.id=@param_id patch,
ORDER BY c.id`, &azcosmos.ItemOptions{
[]azcosmos.QueryParameter{ EnableContentResponseOnWrite: false,
{Name: "@param_id", Value: "67890"}, },
}, )
[]interface{}{ assert.Nil(t, err)
map[string]interface{}{"id": "67890"},
},
)
})
t.Run("Should query document with query parameters as accessor", func(t *testing.T) { var itemResponseBody map[string]interface{}
testCosmosQuery(t, collectionClient, json.Unmarshal(itemResponse.Value, &itemResponseBody)
`select c.id
FROM c
WHERE c[@param]="67890"
ORDER BY c.id`,
[]azcosmos.QueryParameter{
{Name: "@param", Value: "id"},
},
[]interface{}{
map[string]interface{}{"id": "67890"},
},
)
})
t.Run("Should query array accessor", func(t *testing.T) { assert.Equal(t, expectedData["id"], itemResponseBody["id"])
testCosmosQuery(t, collectionClient, assert.Equal(t, expectedData["pk"], itemResponseBody["pk"])
`SELECT c.id, assert.Empty(t, itemResponseBody["isCool"])
c["arr"][0] AS arr0, assert.Equal(t, expectedData["newField"], itemResponseBody["newField"])
c["arr"][1] AS arr1, assert.Equal(t, expectedData["incr"], itemResponseBody["incr"])
c["arr"][2] AS arr2, assert.Equal(t, expectedData["setted"], itemResponseBody["setted"])
c["arr"][3] AS arr3 })
FROM c ORDER BY c.id`,
nil,
[]interface{}{
map[string]interface{}{"id": "12345", "arr0": 1.0, "arr1": 2.0, "arr2": 3.0, "arr3": nil},
map[string]interface{}{"id": "67890", "arr0": 6.0, "arr1": 7.0, "arr2": 8.0, "arr3": nil},
},
)
})
t.Run("Should handle parallel writes", func(t *testing.T) { t.Run("Should not allow to PATCH document ID", func(t *testing.T) {
var wg sync.WaitGroup context := context.TODO()
rutineCount := 100
results := make(chan error, rutineCount) patch := azcosmos.PatchOperations{}
patch.AppendReplace("/id", "newValue")
_, err := collectionClient.PatchItem(
context,
azcosmos.PartitionKey{},
"67890",
patch,
&azcosmos.ItemOptions{
EnableContentResponseOnWrite: false,
},
)
assert.NotNil(t, err)
var respErr *azcore.ResponseError
if errors.As(err, &respErr) {
assert.Equal(t, http.StatusUnprocessableEntity, respErr.StatusCode)
} else {
panic(err)
}
})
t.Run("CreateItem", func(t *testing.T) {
context := context.TODO()
createCall := func(i int) {
defer wg.Done()
item := map[string]interface{}{ item := map[string]interface{}{
"id": fmt.Sprintf("id-%d", i), "Id": "6789011",
"pk": fmt.Sprintf("pk-%d", i), "pk": "456",
"val": i, "newField": "newValue2",
} }
bytes, err := json.Marshal(item) bytes, err := json.Marshal(item)
if err != nil { assert.Nil(t, err)
results <- err
return
}
ctx, cancel := context.WithTimeout(context.Background(), 2*time.Second) r, err2 := collectionClient.CreateItem(
defer cancel() context,
_, err = collectionClient.CreateItem(
ctx,
azcosmos.PartitionKey{}, azcosmos.PartitionKey{},
bytes, bytes,
&azcosmos.ItemOptions{ &azcosmos.ItemOptions{
EnableContentResponseOnWrite: false, EnableContentResponseOnWrite: false,
}, },
) )
results <- err assert.NotNil(t, r)
assert.Nil(t, err2)
})
collectionClient.ReadItem(ctx, azcosmos.PartitionKey{}, fmt.Sprintf("id-%d", i), nil) t.Run("CreateItem that already exists", func(t *testing.T) {
collectionClient.DeleteItem(ctx, azcosmos.PartitionKey{}, fmt.Sprintf("id-%d", i), nil) context := context.TODO()
}
for i := 0; i < rutineCount; i++ { item := map[string]interface{}{"id": "12345", "pk": "123", "isCool": false, "arr": []int{1, 2, 3}}
wg.Add(1) bytes, err := json.Marshal(item)
go createCall(i) assert.Nil(t, err)
}
wg.Wait() r, err := collectionClient.CreateItem(
close(results) context,
azcosmos.PartitionKey{},
bytes,
&azcosmos.ItemOptions{
EnableContentResponseOnWrite: false,
},
)
assert.NotNil(t, r)
assert.NotNil(t, err)
for err := range results { var respErr *azcore.ResponseError
if err != nil { if errors.As(err, &respErr) {
t.Errorf("Error creating item: %v", err) assert.Equal(t, http.StatusConflict, respErr.StatusCode)
} else {
panic(err)
} }
} })
t.Run("UpsertItem new", func(t *testing.T) {
context := context.TODO()
item := map[string]interface{}{"id": "123456", "pk": "1234", "isCool": false, "arr": []int{1, 2, 3}}
bytes, err := json.Marshal(item)
assert.Nil(t, err)
r, err2 := collectionClient.UpsertItem(
context,
azcosmos.PartitionKey{},
bytes,
&azcosmos.ItemOptions{
EnableContentResponseOnWrite: false,
},
)
assert.NotNil(t, r)
assert.Nil(t, err2)
})
t.Run("UpsertItem that already exists", func(t *testing.T) {
context := context.TODO()
item := map[string]interface{}{"id": "12345", "pk": "123", "isCool": false, "arr": []int{1, 2, 3, 4}}
bytes, err := json.Marshal(item)
assert.Nil(t, err)
r, err2 := collectionClient.UpsertItem(
context,
azcosmos.PartitionKey{},
bytes,
&azcosmos.ItemOptions{
EnableContentResponseOnWrite: false,
},
)
assert.NotNil(t, r)
assert.Nil(t, err2)
})
})
runTestsWithPresets(t, "Test_Documents_TransactionalBatch", presets, func(t *testing.T, ts *TestServer, client *azcosmos.Client) {
collectionClient := documents_InitializeDb(t, ts)
t.Run("Should execute CREATE transactional batch", func(t *testing.T) {
context := context.TODO()
batch := collectionClient.NewTransactionalBatch(azcosmos.NewPartitionKeyString("pk"))
newItem := map[string]interface{}{
"id": "678901",
}
bytes, err := json.Marshal(newItem)
assert.Nil(t, err)
batch.CreateItem(bytes, nil)
response, err := collectionClient.ExecuteTransactionalBatch(context, batch, &azcosmos.TransactionalBatchOptions{})
assert.Nil(t, err)
assert.True(t, response.Success)
assert.Equal(t, 1, len(response.OperationResults))
operationResponse := response.OperationResults[0]
assert.NotNil(t, operationResponse)
assert.NotNil(t, operationResponse.ResourceBody)
assert.Equal(t, int32(http.StatusCreated), operationResponse.StatusCode)
var itemResponseBody map[string]interface{}
json.Unmarshal(operationResponse.ResourceBody, &itemResponseBody)
assert.Equal(t, newItem["id"], itemResponseBody["id"])
createdDoc, _ := ts.DataStore.GetDocument(testDatabaseName, testCollectionName, newItem["id"].(string))
assert.Equal(t, newItem["id"], createdDoc["id"])
})
t.Run("Should execute DELETE transactional batch", func(t *testing.T) {
context := context.TODO()
batch := collectionClient.NewTransactionalBatch(azcosmos.NewPartitionKeyString("pk"))
batch.DeleteItem("12345", nil)
response, err := collectionClient.ExecuteTransactionalBatch(context, batch, &azcosmos.TransactionalBatchOptions{})
assert.Nil(t, err)
assert.True(t, response.Success)
assert.Equal(t, 1, len(response.OperationResults))
operationResponse := response.OperationResults[0]
assert.NotNil(t, operationResponse)
assert.Equal(t, int32(http.StatusNoContent), operationResponse.StatusCode)
_, status := ts.DataStore.GetDocument(testDatabaseName, testCollectionName, "12345")
assert.Equal(t, datastore.StatusNotFound, int(status))
})
t.Run("Should execute REPLACE transactional batch", func(t *testing.T) {
context := context.TODO()
batch := collectionClient.NewTransactionalBatch(azcosmos.NewPartitionKeyString("pk"))
newItem := map[string]interface{}{
"id": "67890",
"pk": "666",
}
bytes, err := json.Marshal(newItem)
assert.Nil(t, err)
batch.ReplaceItem("67890", bytes, nil)
response, err := collectionClient.ExecuteTransactionalBatch(context, batch, &azcosmos.TransactionalBatchOptions{})
assert.Nil(t, err)
assert.True(t, response.Success)
assert.Equal(t, 1, len(response.OperationResults))
operationResponse := response.OperationResults[0]
assert.NotNil(t, operationResponse)
assert.NotNil(t, operationResponse.ResourceBody)
assert.Equal(t, int32(http.StatusCreated), operationResponse.StatusCode)
var itemResponseBody map[string]interface{}
json.Unmarshal(operationResponse.ResourceBody, &itemResponseBody)
assert.Equal(t, newItem["id"], itemResponseBody["id"])
assert.Equal(t, newItem["pk"], itemResponseBody["pk"])
updatedDoc, _ := ts.DataStore.GetDocument(testDatabaseName, testCollectionName, newItem["id"].(string))
assert.Equal(t, newItem["id"], updatedDoc["id"])
assert.Equal(t, newItem["pk"], updatedDoc["pk"])
})
t.Run("Should execute UPSERT transactional batch", func(t *testing.T) {
context := context.TODO()
batch := collectionClient.NewTransactionalBatch(azcosmos.NewPartitionKeyString("pk"))
newItem := map[string]interface{}{
"id": "678901",
"pk": "666",
}
bytes, err := json.Marshal(newItem)
assert.Nil(t, err)
batch.UpsertItem(bytes, nil)
response, err := collectionClient.ExecuteTransactionalBatch(context, batch, &azcosmos.TransactionalBatchOptions{})
assert.Nil(t, err)
assert.True(t, response.Success)
assert.Equal(t, 1, len(response.OperationResults))
operationResponse := response.OperationResults[0]
assert.NotNil(t, operationResponse)
assert.NotNil(t, operationResponse.ResourceBody)
assert.Equal(t, int32(http.StatusCreated), operationResponse.StatusCode)
var itemResponseBody map[string]interface{}
json.Unmarshal(operationResponse.ResourceBody, &itemResponseBody)
assert.Equal(t, newItem["id"], itemResponseBody["id"])
assert.Equal(t, newItem["pk"], itemResponseBody["pk"])
updatedDoc, _ := ts.DataStore.GetDocument(testDatabaseName, testCollectionName, newItem["id"].(string))
assert.Equal(t, newItem["id"], updatedDoc["id"])
assert.Equal(t, newItem["pk"], updatedDoc["pk"])
})
t.Run("Should execute READ transactional batch", func(t *testing.T) {
context := context.TODO()
batch := collectionClient.NewTransactionalBatch(azcosmos.NewPartitionKeyString("pk"))
batch.ReadItem("67890", nil)
response, err := collectionClient.ExecuteTransactionalBatch(context, batch, &azcosmos.TransactionalBatchOptions{})
assert.Nil(t, err)
assert.True(t, response.Success)
assert.Equal(t, 1, len(response.OperationResults))
operationResponse := response.OperationResults[0]
assert.NotNil(t, operationResponse)
assert.NotNil(t, operationResponse.ResourceBody)
assert.Equal(t, int32(http.StatusOK), operationResponse.StatusCode)
var itemResponseBody map[string]interface{}
json.Unmarshal(operationResponse.ResourceBody, &itemResponseBody)
assert.Equal(t, "67890", itemResponseBody["id"])
})
}) })
} }
func Test_Documents_Patch(t *testing.T) {
ts, collectionClient := documents_InitializeDb(t)
defer ts.Server.Close()
t.Run("Should PATCH document", func(t *testing.T) {
context := context.TODO()
expectedData := map[string]interface{}{"id": "67890", "pk": "666", "newField": "newValue", "incr": 15., "setted": "isSet"}
patch := azcosmos.PatchOperations{}
patch.AppendAdd("/newField", "newValue")
patch.AppendIncrement("/incr", 15)
patch.AppendRemove("/isCool")
patch.AppendReplace("/pk", "666")
patch.AppendSet("/setted", "isSet")
itemResponse, err := collectionClient.PatchItem(
context,
azcosmos.PartitionKey{},
"67890",
patch,
&azcosmos.ItemOptions{
EnableContentResponseOnWrite: false,
},
)
assert.Nil(t, err)
var itemResponseBody map[string]interface{}
json.Unmarshal(itemResponse.Value, &itemResponseBody)
assert.Equal(t, expectedData["id"], itemResponseBody["id"])
assert.Equal(t, expectedData["pk"], itemResponseBody["pk"])
assert.Empty(t, itemResponseBody["isCool"])
assert.Equal(t, expectedData["newField"], itemResponseBody["newField"])
assert.Equal(t, expectedData["incr"], itemResponseBody["incr"])
assert.Equal(t, expectedData["setted"], itemResponseBody["setted"])
})
t.Run("Should not allow to PATCH document ID", func(t *testing.T) {
context := context.TODO()
patch := azcosmos.PatchOperations{}
patch.AppendReplace("/id", "newValue")
_, err := collectionClient.PatchItem(
context,
azcosmos.PartitionKey{},
"67890",
patch,
&azcosmos.ItemOptions{
EnableContentResponseOnWrite: false,
},
)
assert.NotNil(t, err)
var respErr *azcore.ResponseError
if errors.As(err, &respErr) {
assert.Equal(t, http.StatusUnprocessableEntity, respErr.StatusCode)
} else {
panic(err)
}
})
t.Run("CreateItem", func(t *testing.T) {
context := context.TODO()
item := map[string]interface{}{
"Id": "6789011",
"pk": "456",
"newField": "newValue2",
}
bytes, err := json.Marshal(item)
assert.Nil(t, err)
r, err2 := collectionClient.CreateItem(
context,
azcosmos.PartitionKey{},
bytes,
&azcosmos.ItemOptions{
EnableContentResponseOnWrite: false,
},
)
assert.NotNil(t, r)
assert.Nil(t, err2)
})
t.Run("CreateItem that already exists", func(t *testing.T) {
context := context.TODO()
item := map[string]interface{}{"id": "12345", "pk": "123", "isCool": false, "arr": []int{1, 2, 3}}
bytes, err := json.Marshal(item)
assert.Nil(t, err)
r, err := collectionClient.CreateItem(
context,
azcosmos.PartitionKey{},
bytes,
&azcosmos.ItemOptions{
EnableContentResponseOnWrite: false,
},
)
assert.NotNil(t, r)
assert.NotNil(t, err)
var respErr *azcore.ResponseError
if errors.As(err, &respErr) {
assert.Equal(t, http.StatusConflict, respErr.StatusCode)
} else {
panic(err)
}
})
t.Run("UpsertItem new", func(t *testing.T) {
context := context.TODO()
item := map[string]interface{}{"id": "123456", "pk": "1234", "isCool": false, "arr": []int{1, 2, 3}}
bytes, err := json.Marshal(item)
assert.Nil(t, err)
r, err2 := collectionClient.UpsertItem(
context,
azcosmos.PartitionKey{},
bytes,
&azcosmos.ItemOptions{
EnableContentResponseOnWrite: false,
},
)
assert.NotNil(t, r)
assert.Nil(t, err2)
})
t.Run("UpsertItem that already exists", func(t *testing.T) {
context := context.TODO()
item := map[string]interface{}{"id": "12345", "pk": "123", "isCool": false, "arr": []int{1, 2, 3, 4}}
bytes, err := json.Marshal(item)
assert.Nil(t, err)
r, err2 := collectionClient.UpsertItem(
context,
azcosmos.PartitionKey{},
bytes,
&azcosmos.ItemOptions{
EnableContentResponseOnWrite: false,
},
)
assert.NotNil(t, r)
assert.Nil(t, err2)
})
}

View File

@@ -14,7 +14,8 @@ import (
// Request document with trailing slash like python cosmosdb client does. // Request document with trailing slash like python cosmosdb client does.
func Test_Documents_Read_Trailing_Slash(t *testing.T) { func Test_Documents_Read_Trailing_Slash(t *testing.T) {
ts, _ := documents_InitializeDb(t) ts := runTestServer()
documents_InitializeDb(t, ts)
defer ts.Server.Close() defer ts.Server.Close()
t.Run("Read doc with client that appends slash to path", func(t *testing.T) { t.Run("Read doc with client that appends slash to path", func(t *testing.T) {

View File

@@ -7,24 +7,40 @@ import (
"github.com/pikami/cosmium/api" "github.com/pikami/cosmium/api"
"github.com/pikami/cosmium/api/config" "github.com/pikami/cosmium/api/config"
"github.com/pikami/cosmium/internal/repositories" "github.com/pikami/cosmium/internal/datastore"
badgerdatastore "github.com/pikami/cosmium/internal/datastore/badger_datastore"
jsondatastore "github.com/pikami/cosmium/internal/datastore/json_datastore"
"github.com/pikami/cosmium/internal/logger"
) )
func main() { func main() {
configuration := config.ParseFlags() configuration := config.ParseFlags()
repository := repositories.NewDataRepository(repositories.RepositoryOptions{ var dataStore datastore.DataStore
InitialDataFilePath: configuration.InitialDataFilePath, switch configuration.DataStore {
PersistDataFilePath: configuration.PersistDataFilePath, case config.DataStoreBadger:
}) dataStore = badgerdatastore.NewBadgerDataStore(badgerdatastore.BadgerDataStoreOptions{
PersistDataFilePath: configuration.PersistDataFilePath,
})
logger.InfoLn("Using Badger data store")
default:
dataStore = jsondatastore.NewJsonDataStore(jsondatastore.JsonDataStoreOptions{
InitialDataFilePath: configuration.InitialDataFilePath,
PersistDataFilePath: configuration.PersistDataFilePath,
})
logger.InfoLn("Using in-memory data store")
}
server := api.NewApiServer(repository, configuration) server := api.NewApiServer(dataStore, &configuration)
server.Start() err := server.Start()
if err != nil {
panic(err)
}
waitForExit(server, repository, configuration) waitForExit(server, dataStore)
} }
func waitForExit(server *api.ApiServer, repository *repositories.DataRepository, config config.ServerConfig) { func waitForExit(server *api.ApiServer, dataStore datastore.DataStore) {
sigs := make(chan os.Signal, 1) sigs := make(chan os.Signal, 1)
signal.Notify(sigs, syscall.SIGINT, syscall.SIGTERM) signal.Notify(sigs, syscall.SIGINT, syscall.SIGTERM)
@@ -34,7 +50,5 @@ func waitForExit(server *api.ApiServer, repository *repositories.DataRepository,
// Stop the server // Stop the server
server.Stop() server.Stop()
if config.PersistDataFilePath != "" { dataStore.Close()
repository.SaveStateFS(config.PersistDataFilePath)
}
} }

View File

@@ -204,6 +204,19 @@ Cosmium strives to support the core features of Cosmos DB, including:
| IS_PRIMITIVE | Yes | | IS_PRIMITIVE | Yes |
| IS_STRING | Yes | | IS_STRING | Yes |
### Transactional batch operations
Note: There's actually no transaction here. Think of this as a 'bulk operation' that can partially succeed.
| Operation | Implemented |
| --------- | ----------- |
| Create | Yes |
| Delete | Yes |
| Replace | Yes |
| Upsert | Yes |
| Read | Yes |
| Patch | No |
## Known Differences ## Known Differences
While Cosmium aims to replicate the behavior of Cosmos DB as closely as possible, there are certain differences and limitations to be aware of: While Cosmium aims to replicate the behavior of Cosmos DB as closely as possible, there are certain differences and limitations to be aware of:

46
go.mod
View File

@@ -1,32 +1,41 @@
module github.com/pikami/cosmium module github.com/pikami/cosmium
go 1.22.0 go 1.24.0
require ( require (
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.12.0 github.com/Azure/azure-sdk-for-go/sdk/azcore v1.17.0
github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos v0.3.6 github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos v1.3.0
github.com/cosmiumdev/json-patch/v5 v5.9.3 github.com/cosmiumdev/json-patch/v5 v5.9.11
github.com/dgraph-io/badger/v4 v4.6.0
github.com/gin-gonic/gin v1.10.0 github.com/gin-gonic/gin v1.10.0
github.com/google/uuid v1.6.0 github.com/google/uuid v1.6.0
github.com/stretchr/testify v1.10.0 github.com/stretchr/testify v1.10.0
golang.org/x/exp v0.0.0-20250106191152-7588d65b2ba8 github.com/vmihailenco/msgpack/v5 v5.4.1
golang.org/x/exp v0.0.0-20250305212735-054e65f0b394
) )
require ( require (
github.com/Azure/azure-sdk-for-go v68.0.0+incompatible // indirect github.com/Azure/azure-sdk-for-go v68.0.0+incompatible // indirect
github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0 // indirect github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0 // indirect
github.com/bytedance/sonic v1.12.7 // indirect github.com/bytedance/sonic v1.13.1 // indirect
github.com/bytedance/sonic/loader v0.2.3 // indirect github.com/bytedance/sonic/loader v0.2.4 // indirect
github.com/cespare/xxhash/v2 v2.3.0 // indirect
github.com/cloudwego/base64x v0.1.5 // indirect github.com/cloudwego/base64x v0.1.5 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect github.com/davecgh/go-spew v1.1.1 // indirect
github.com/dgraph-io/ristretto/v2 v2.1.0 // indirect
github.com/dustin/go-humanize v1.0.1 // indirect
github.com/gabriel-vasile/mimetype v1.4.8 // indirect github.com/gabriel-vasile/mimetype v1.4.8 // indirect
github.com/gin-contrib/sse v1.0.0 // indirect github.com/gin-contrib/sse v1.0.0 // indirect
github.com/go-logr/logr v1.4.2 // indirect
github.com/go-logr/stdr v1.2.2 // indirect
github.com/go-playground/locales v0.14.1 // indirect github.com/go-playground/locales v0.14.1 // indirect
github.com/go-playground/universal-translator v0.18.1 // indirect github.com/go-playground/universal-translator v0.18.1 // indirect
github.com/go-playground/validator/v10 v10.24.0 // indirect github.com/go-playground/validator/v10 v10.25.0 // indirect
github.com/goccy/go-json v0.10.4 // indirect github.com/goccy/go-json v0.10.5 // indirect
github.com/google/flatbuffers v25.2.10+incompatible // indirect
github.com/json-iterator/go v1.1.12 // indirect github.com/json-iterator/go v1.1.12 // indirect
github.com/klauspost/cpuid/v2 v2.2.9 // indirect github.com/klauspost/compress v1.18.0 // indirect
github.com/klauspost/cpuid/v2 v2.2.10 // indirect
github.com/leodido/go-urn v1.4.0 // indirect github.com/leodido/go-urn v1.4.0 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect github.com/mattn/go-isatty v0.0.20 // indirect
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
@@ -36,11 +45,16 @@ require (
github.com/pmezard/go-difflib v1.0.0 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
github.com/ugorji/go/codec v1.2.12 // indirect github.com/ugorji/go/codec v1.2.12 // indirect
golang.org/x/arch v0.13.0 // indirect github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect
golang.org/x/crypto v0.32.0 // indirect go.opentelemetry.io/auto/sdk v1.1.0 // indirect
golang.org/x/net v0.34.0 // indirect go.opentelemetry.io/otel v1.35.0 // indirect
golang.org/x/sys v0.29.0 // indirect go.opentelemetry.io/otel/metric v1.35.0 // indirect
golang.org/x/text v0.21.0 // indirect go.opentelemetry.io/otel/trace v1.35.0 // indirect
google.golang.org/protobuf v1.36.4 // indirect golang.org/x/arch v0.15.0 // indirect
golang.org/x/crypto v0.36.0 // indirect
golang.org/x/net v0.37.0 // indirect
golang.org/x/sys v0.31.0 // indirect
golang.org/x/text v0.23.0 // indirect
google.golang.org/protobuf v1.36.5 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect
) )

108
go.sum
View File

@@ -1,57 +1,75 @@
github.com/Azure/azure-sdk-for-go v68.0.0+incompatible h1:fcYLmCpyNYRnvJbPerq7U0hS+6+I79yEDJBqVNcqUzU= github.com/Azure/azure-sdk-for-go v68.0.0+incompatible h1:fcYLmCpyNYRnvJbPerq7U0hS+6+I79yEDJBqVNcqUzU=
github.com/Azure/azure-sdk-for-go v68.0.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc= github.com/Azure/azure-sdk-for-go v68.0.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.12.0 h1:1nGuui+4POelzDwI7RG56yfQJHCnKvwfMoU7VsEp+Zg= github.com/Azure/azure-sdk-for-go/sdk/azcore v1.17.0 h1:g0EZJwz7xkXQiZAI5xi9f3WWFYBlX1CPTrR+NDToRkQ=
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.12.0/go.mod h1:99EvauvlcJ1U06amZiksfYz/3aFGyIhWGHVyiZXtBAI= github.com/Azure/azure-sdk-for-go/sdk/azcore v1.17.0/go.mod h1:XCW7KnZet0Opnr7HccfUw1PLc4CjHqpcaxW8DHklNkQ=
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.7.0 h1:tfLQ34V6F7tVSwoTf/4lH5sE0o6eCJuNDTmH09nDpbc= github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.8.0 h1:B/dfvscEQtew9dVuoxqxrUKKv8Ih2f55PydknDamU+g=
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.7.0/go.mod h1:9kIvujWAA58nmPmWB1m23fyWic1kYZMxD9CxaWn4Qpg= github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.8.0/go.mod h1:fiPSssYvltE08HJchL04dOy+RD4hgrjph0cwGGMntdI=
github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos v0.3.6 h1:oBqQLSI1pZwGOdXJAoJJSzmff9tlfD4KroVfjQQmd0g= github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos v1.3.0 h1:RGcdpSElvcXCwxydI0xzOBu1Gvp88OoiTGfbtO/z1m0=
github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos v0.3.6/go.mod h1:Beh5cHIXJ0oWEDWk9lNFtuklCojLLQ5hl+LqSNTTs0I= github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos v1.3.0/go.mod h1:YwUyrNUtcZcibA99JcfCP6UUp95VVQKO2MJfBzgJDwA=
github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0 h1:ywEEhmNahHBihViHepv3xPBn1663uRv2t2q/ESv9seY= github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0 h1:ywEEhmNahHBihViHepv3xPBn1663uRv2t2q/ESv9seY=
github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0/go.mod h1:iZDifYGJTIgIIkYRNWPENUnqx6bJ2xnSDFI2tjwZNuY= github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0/go.mod h1:iZDifYGJTIgIIkYRNWPENUnqx6bJ2xnSDFI2tjwZNuY=
github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2 h1:XHOnouVk1mxXfQidrMEnLlPk9UMeRtyBTnEFtxkV0kU= github.com/AzureAD/microsoft-authentication-library-for-go v1.3.2 h1:kYRSnvJju5gYVyhkij+RTJ/VR6QIUaCfWeaFm2ycsjQ=
github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI= github.com/AzureAD/microsoft-authentication-library-for-go v1.3.2/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI=
github.com/bytedance/sonic v1.12.7 h1:CQU8pxOy9HToxhndH0Kx/S1qU/CuS9GnKYrGioDcU1Q= github.com/bytedance/sonic v1.13.1 h1:Jyd5CIvdFnkOWuKXr+wm4Nyk2h0yAFsr8ucJgEasO3g=
github.com/bytedance/sonic v1.12.7/go.mod h1:tnbal4mxOMju17EGfknm2XyYcpyCnIROYOEYuemj13I= github.com/bytedance/sonic v1.13.1/go.mod h1:o68xyaF9u2gvVBuGHPlUVCy+ZfmNNO5ETf1+KgkJhz4=
github.com/bytedance/sonic/loader v0.1.1/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU= github.com/bytedance/sonic/loader v0.1.1/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU=
github.com/bytedance/sonic/loader v0.2.3 h1:yctD0Q3v2NOGfSWPLPvG2ggA2kV6TS6s4wioyEqssH0= github.com/bytedance/sonic/loader v0.2.4 h1:ZWCw4stuXUsn1/+zQDqeE7JKP+QO47tz7QCNan80NzY=
github.com/bytedance/sonic/loader v0.2.3/go.mod h1:N8A3vUdtUebEY2/VQC0MyhYeKUFosQU6FxH2JmUe6VI= github.com/bytedance/sonic/loader v0.2.4/go.mod h1:N8A3vUdtUebEY2/VQC0MyhYeKUFosQU6FxH2JmUe6VI=
github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs=
github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/cloudwego/base64x v0.1.5 h1:XPciSp1xaq2VCSt6lF0phncD4koWyULpl5bUxbfCyP4= github.com/cloudwego/base64x v0.1.5 h1:XPciSp1xaq2VCSt6lF0phncD4koWyULpl5bUxbfCyP4=
github.com/cloudwego/base64x v0.1.5/go.mod h1:0zlkT4Wn5C6NdauXdJRhSKRlJvmclQ1hhJgA0rcu/8w= github.com/cloudwego/base64x v0.1.5/go.mod h1:0zlkT4Wn5C6NdauXdJRhSKRlJvmclQ1hhJgA0rcu/8w=
github.com/cloudwego/iasm v0.2.0/go.mod h1:8rXZaNYT2n95jn+zTI1sDr+IgcD2GVs0nlbbQPiEFhY= github.com/cloudwego/iasm v0.2.0/go.mod h1:8rXZaNYT2n95jn+zTI1sDr+IgcD2GVs0nlbbQPiEFhY=
github.com/cosmiumdev/json-patch/v5 v5.9.3 h1:l+Og3+5edqV2NHDo58sz72eS733lbXVYP61seYK43Do= github.com/cosmiumdev/json-patch/v5 v5.9.11 h1:WD2Wqaz/vO987z2FFdqgkj15HgYZ/Y5TpqE3I4T/iOQ=
github.com/cosmiumdev/json-patch/v5 v5.9.3/go.mod h1:WzSTCdia0WrlZtjnL19P4RiwWtfdyArm/E7stgEeP5g= github.com/cosmiumdev/json-patch/v5 v5.9.11/go.mod h1:YPZmckmv4ZY+oxKIOjgq3sIudHVB6VEMcicCS9LtVLM=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/dgraph-io/badger/v4 v4.6.0 h1:acOwfOOZ4p1dPRnYzvkVm7rUk2Y21TgPVepCy5dJdFQ=
github.com/dgraph-io/badger/v4 v4.6.0/go.mod h1:KSJ5VTuZNC3Sd+YhvVjk2nYua9UZnnTr/SkXvdtiPgI=
github.com/dgraph-io/ristretto/v2 v2.1.0 h1:59LjpOJLNDULHh8MC4UaegN52lC4JnO2dITsie/Pa8I=
github.com/dgraph-io/ristretto/v2 v2.1.0/go.mod h1:uejeqfYXpUomfse0+lO+13ATz4TypQYLJZzBSAemuB4=
github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13 h1:fAjc9m62+UWV/WAFKLNi6ZS0675eEUC9y3AlwSbQu1Y=
github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw=
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
github.com/gabriel-vasile/mimetype v1.4.8 h1:FfZ3gj38NjllZIeJAmMhr+qKL8Wu+nOoI3GqacKw1NM= github.com/gabriel-vasile/mimetype v1.4.8 h1:FfZ3gj38NjllZIeJAmMhr+qKL8Wu+nOoI3GqacKw1NM=
github.com/gabriel-vasile/mimetype v1.4.8/go.mod h1:ByKUIKGjh1ODkGM1asKUbQZOLGrPjydw3hYPU2YU9t8= github.com/gabriel-vasile/mimetype v1.4.8/go.mod h1:ByKUIKGjh1ODkGM1asKUbQZOLGrPjydw3hYPU2YU9t8=
github.com/gin-contrib/sse v1.0.0 h1:y3bT1mUWUxDpW4JLQg/HnTqV4rozuW4tC9eFKTxYI9E= github.com/gin-contrib/sse v1.0.0 h1:y3bT1mUWUxDpW4JLQg/HnTqV4rozuW4tC9eFKTxYI9E=
github.com/gin-contrib/sse v1.0.0/go.mod h1:zNuFdwarAygJBht0NTKiSi3jRf6RbqeILZ9Sp6Slhe0= github.com/gin-contrib/sse v1.0.0/go.mod h1:zNuFdwarAygJBht0NTKiSi3jRf6RbqeILZ9Sp6Slhe0=
github.com/gin-gonic/gin v1.10.0 h1:nTuyha1TYqgedzytsKYqna+DfLos46nTv2ygFy86HFU= github.com/gin-gonic/gin v1.10.0 h1:nTuyha1TYqgedzytsKYqna+DfLos46nTv2ygFy86HFU=
github.com/gin-gonic/gin v1.10.0/go.mod h1:4PMNQiOhvDRa013RKVbsiNwoyezlm2rm0uX/T7kzp5Y= github.com/gin-gonic/gin v1.10.0/go.mod h1:4PMNQiOhvDRa013RKVbsiNwoyezlm2rm0uX/T7kzp5Y=
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY=
github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s= github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s=
github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA=
github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY=
github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY=
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
github.com/go-playground/validator/v10 v10.24.0 h1:KHQckvo8G6hlWnrPX4NJJ+aBfWNAE/HH+qdL2cBpCmg= github.com/go-playground/validator/v10 v10.25.0 h1:5Dh7cjvzR7BRZadnsVOzPhWsrwUr0nmsZJxEAnFLNO8=
github.com/go-playground/validator/v10 v10.24.0/go.mod h1:GGzBIJMuE98Ic/kJsBXbz1x/7cByt++cQ+YOuDM5wus= github.com/go-playground/validator/v10 v10.25.0/go.mod h1:GGzBIJMuE98Ic/kJsBXbz1x/7cByt++cQ+YOuDM5wus=
github.com/goccy/go-json v0.10.4 h1:JSwxQzIqKfmFX1swYPpUThQZp/Ka4wzJdK0LWVytLPM= github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4=
github.com/goccy/go-json v0.10.4/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M= github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
github.com/golang-jwt/jwt v3.2.1+incompatible h1:73Z+4BJcrTC+KczS6WvTPvRGOp1WmfEP4Q1lOd9Z/+c=
github.com/golang-jwt/jwt/v5 v5.2.1 h1:OuVbFODueb089Lh128TAcimifWaLhJwVflnrgM17wHk= github.com/golang-jwt/jwt/v5 v5.2.1 h1:OuVbFODueb089Lh128TAcimifWaLhJwVflnrgM17wHk=
github.com/golang-jwt/jwt/v5 v5.2.1/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= github.com/golang-jwt/jwt/v5 v5.2.1/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/flatbuffers v25.2.10+incompatible h1:F3vclr7C3HpB1k9mxCGRMXq6FdUalZ6H/pNX4FP1v0Q=
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/flatbuffers v25.2.10+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8=
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo=
github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ=
github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
github.com/klauspost/cpuid/v2 v2.2.9 h1:66ze0taIn2H33fBvCkXuv9BmCwDfafmiIVpKV9kKGuY= github.com/klauspost/cpuid/v2 v2.2.10 h1:tBs3QSyvjDyFTq3uoc/9xFpCuOsJQFNPiAhYdw2skhE=
github.com/klauspost/cpuid/v2 v2.2.9/go.mod h1:rqkxqrZ1EhYM9G+hXH7YdowN5R5RGN6NK4QwQ3WMXF8= github.com/klauspost/cpuid/v2 v2.2.10/go.mod h1:hqwkgyIinND0mEev00jJYCxPNVRVXFQeu1XKlok6oO0=
github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M= github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M=
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
@@ -76,8 +94,8 @@ github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8= github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII=
github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4= github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
@@ -94,21 +112,33 @@ github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS
github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08= github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08=
github.com/ugorji/go/codec v1.2.12 h1:9LC83zGrHhuUA9l16C9AHXAqEV/2wBQ4nkvumAE65EE= github.com/ugorji/go/codec v1.2.12 h1:9LC83zGrHhuUA9l16C9AHXAqEV/2wBQ4nkvumAE65EE=
github.com/ugorji/go/codec v1.2.12/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg= github.com/ugorji/go/codec v1.2.12/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg=
golang.org/x/arch v0.13.0 h1:KCkqVVV1kGg0X87TFysjCJ8MxtZEIU4Ja/yXGeoECdA= github.com/vmihailenco/msgpack/v5 v5.4.1 h1:cQriyiUvjTwOHg8QZaPihLWeRAAVoCpE00IUPn0Bjt8=
golang.org/x/arch v0.13.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys= github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok=
golang.org/x/crypto v0.32.0 h1:euUpcYgM8WcP71gNpTqQCn6rC2t6ULUPiOzfWaXVVfc= github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g=
golang.org/x/crypto v0.32.0/go.mod h1:ZnnJkOaASj8g0AjIduWNlq2NRxL0PlBrbKVyZ6V/Ugc= github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds=
golang.org/x/exp v0.0.0-20250106191152-7588d65b2ba8 h1:yqrTHse8TCMW1M1ZCP+VAR/l0kKxwaAIqN/il7x4voA= go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA=
golang.org/x/exp v0.0.0-20250106191152-7588d65b2ba8/go.mod h1:tujkw807nyEEAamNbDrEGzRav+ilXA7PCRAd6xsmwiU= go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A=
golang.org/x/net v0.34.0 h1:Mb7Mrk043xzHgnRM88suvJFwzVrRfHEHJEl5/71CKw0= go.opentelemetry.io/otel v1.35.0 h1:xKWKPxrxB6OtMCbmMY021CqC45J+3Onta9MqjhnusiQ=
golang.org/x/net v0.34.0/go.mod h1:di0qlW3YNM5oh6GqDGQr92MyTozJPmybPK4Ev/Gm31k= go.opentelemetry.io/otel v1.35.0/go.mod h1:UEqy8Zp11hpkUrL73gSlELM0DupHoiq72dR+Zqel/+Y=
go.opentelemetry.io/otel/metric v1.35.0 h1:0znxYu2SNyuMSQT4Y9WDWej0VpcsxkuklLa4/siN90M=
go.opentelemetry.io/otel/metric v1.35.0/go.mod h1:nKVFgxBZ2fReX6IlyW28MgZojkoAkJGaE8CpgeAU3oE=
go.opentelemetry.io/otel/trace v1.35.0 h1:dPpEfJu1sDIqruz7BHFG3c7528f6ddfSWfFDVt/xgMs=
go.opentelemetry.io/otel/trace v1.35.0/go.mod h1:WUk7DtFp1Aw2MkvqGdwiXYDZZNvA/1J8o6xRXLrIkyc=
golang.org/x/arch v0.15.0 h1:QtOrQd0bTUnhNVNndMpLHNWrDmYzZ2KDqSrEymqInZw=
golang.org/x/arch v0.15.0/go.mod h1:JmwW7aLIoRUKgaTzhkiEFxvcEiQGyOg9BMonBJUS7EE=
golang.org/x/crypto v0.36.0 h1:AnAEvhDddvBdpY+uR+MyHmuZzzNqXSe/GvuDeob5L34=
golang.org/x/crypto v0.36.0/go.mod h1:Y4J0ReaxCR1IMaabaSMugxJES1EpwhBHhv2bDHklZvc=
golang.org/x/exp v0.0.0-20250305212735-054e65f0b394 h1:nDVHiLt8aIbd/VzvPWN6kSOPE7+F/fNFDSXLVYkE/Iw=
golang.org/x/exp v0.0.0-20250305212735-054e65f0b394/go.mod h1:sIifuuw/Yco/y6yb6+bDNfyeQ/MdPUy/hKEMYQV17cM=
golang.org/x/net v0.37.0 h1:1zLorHbz+LYj7MQlSf1+2tPIIgibq2eL5xkrGk6f+2c=
golang.org/x/net v0.37.0/go.mod h1:ivrbrMbzFq5J41QOQh0siUuly180yBYtLp+CKbEaFx8=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.29.0 h1:TPYlXGxvx1MGTn2GiZDhnjPA9wZzZeGKHHmKhHYvgaU= golang.org/x/sys v0.31.0 h1:ioabZlmFYtWhL+TRYpcnNlLwhyxaM9kWTDEmfnprqik=
golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.31.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo= golang.org/x/text v0.23.0 h1:D71I7dUrlY+VX0gQShAThNGHFxZ13dGLBHQLVl1mJlY=
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= golang.org/x/text v0.23.0/go.mod h1:/BLNzu4aZCJ1+kcD0DNRotWKage4q2rGVAg4o22unh4=
google.golang.org/protobuf v1.36.4 h1:6A3ZDJHn/eNqc1i+IdefRzy/9PokBTPvcqMySR7NNIM= google.golang.org/protobuf v1.36.5 h1:tPhr+woSbjfYvY6/GPufUoYizxw1cF/yFoxJ2fmpwlM=
google.golang.org/protobuf v1.36.4/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE= google.golang.org/protobuf v1.36.5/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=

View File

@@ -0,0 +1,20 @@
package converters
import (
"github.com/pikami/cosmium/internal/datastore"
memoryexecutor "github.com/pikami/cosmium/query_executors/memory_executor"
)
type DocumentToRowTypeIterator struct {
documents datastore.DocumentIterator
}
func NewDocumentToRowTypeIterator(documents datastore.DocumentIterator) *DocumentToRowTypeIterator {
return &DocumentToRowTypeIterator{
documents: documents,
}
}
func (di *DocumentToRowTypeIterator) Next() (memoryexecutor.RowType, datastore.DataStoreStatus) {
return di.documents.Next()
}

View File

@@ -0,0 +1,41 @@
package badgerdatastore
import (
"github.com/dgraph-io/badger/v4"
"github.com/pikami/cosmium/internal/logger"
)
type BadgerDataStore struct {
db *badger.DB
}
type BadgerDataStoreOptions struct {
PersistDataFilePath string
}
func NewBadgerDataStore(options BadgerDataStoreOptions) *BadgerDataStore {
badgerOpts := badger.DefaultOptions(options.PersistDataFilePath)
badgerOpts = badgerOpts.WithLogger(newBadgerLogger())
if options.PersistDataFilePath == "" {
badgerOpts = badgerOpts.WithInMemory(true)
}
db, err := badger.Open(badgerOpts)
if err != nil {
panic(err)
}
return &BadgerDataStore{
db: db,
}
}
func (r *BadgerDataStore) Close() {
r.db.Close()
r.db = nil
}
func (r *BadgerDataStore) DumpToJson() (string, error) {
logger.ErrorLn("Badger datastore does not support state export currently.")
return "{}", nil
}

View File

@@ -0,0 +1,28 @@
package badgerdatastore
import (
"github.com/dgraph-io/badger/v4"
"github.com/pikami/cosmium/internal/logger"
)
type badgerLogger struct{}
func newBadgerLogger() badger.Logger {
return &badgerLogger{}
}
func (l *badgerLogger) Errorf(format string, v ...interface{}) {
logger.Errorf(format, v...)
}
func (l *badgerLogger) Warningf(format string, v ...interface{}) {
logger.Infof(format, v...)
}
func (l *badgerLogger) Infof(format string, v ...interface{}) {
logger.Infof(format, v...)
}
func (l *badgerLogger) Debugf(format string, v ...interface{}) {
logger.Debugf(format, v...)
}

View File

@@ -0,0 +1,103 @@
package badgerdatastore
import (
"fmt"
"time"
"github.com/google/uuid"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/logger"
"github.com/pikami/cosmium/internal/resourceid"
structhidrators "github.com/pikami/cosmium/internal/struct_hidrators"
)
func (r *BadgerDataStore) GetAllCollections(databaseId string) ([]datastore.Collection, datastore.DataStoreStatus) {
exists, err := keyExists(r.db.NewTransaction(false), generateDatabaseKey(databaseId))
if err != nil {
logger.ErrorLn("Error while checking if database exists:", err)
return nil, datastore.Unknown
}
if !exists {
return nil, datastore.StatusNotFound
}
colls, status := listByPrefix[datastore.Collection](r.db, generateKey(resourceid.ResourceTypeCollection, databaseId, "", ""))
if status == datastore.StatusOk {
return colls, datastore.StatusOk
}
return nil, status
}
func (r *BadgerDataStore) GetCollection(databaseId string, collectionId string) (datastore.Collection, datastore.DataStoreStatus) {
collectionKey := generateCollectionKey(databaseId, collectionId)
txn := r.db.NewTransaction(false)
defer txn.Discard()
var collection datastore.Collection
status := getKey(txn, collectionKey, &collection)
return collection, status
}
func (r *BadgerDataStore) DeleteCollection(databaseId string, collectionId string) datastore.DataStoreStatus {
collectionKey := generateCollectionKey(databaseId, collectionId)
txn := r.db.NewTransaction(true)
defer txn.Discard()
prefixes := []string{
generateKey(resourceid.ResourceTypeDocument, databaseId, collectionId, ""),
generateKey(resourceid.ResourceTypeTrigger, databaseId, collectionId, ""),
generateKey(resourceid.ResourceTypeStoredProcedure, databaseId, collectionId, ""),
generateKey(resourceid.ResourceTypeUserDefinedFunction, databaseId, collectionId, ""),
collectionKey,
}
for _, prefix := range prefixes {
if err := deleteKeysByPrefix(txn, prefix); err != nil {
return datastore.Unknown
}
}
err := txn.Commit()
if err != nil {
logger.ErrorLn("Error while committing transaction:", err)
return datastore.Unknown
}
return datastore.StatusOk
}
func (r *BadgerDataStore) CreateCollection(databaseId string, newCollection datastore.Collection) (datastore.Collection, datastore.DataStoreStatus) {
collectionKey := generateCollectionKey(databaseId, newCollection.ID)
txn := r.db.NewTransaction(true)
defer txn.Discard()
collectionExists, err := keyExists(txn, collectionKey)
if err != nil || collectionExists {
return datastore.Collection{}, datastore.Conflict
}
var database datastore.Database
status := getKey(txn, generateDatabaseKey(databaseId), &database)
if status != datastore.StatusOk {
return datastore.Collection{}, status
}
newCollection = structhidrators.Hidrate(newCollection).(datastore.Collection)
newCollection.TimeStamp = time.Now().Unix()
newCollection.ResourceID = resourceid.NewCombined(database.ResourceID, resourceid.New(resourceid.ResourceTypeCollection))
newCollection.ETag = fmt.Sprintf("\"%s\"", uuid.New())
newCollection.Self = fmt.Sprintf("dbs/%s/colls/%s/", database.ResourceID, newCollection.ResourceID)
status = insertKey(txn, collectionKey, newCollection)
if status != datastore.StatusOk {
return datastore.Collection{}, status
}
return newCollection, datastore.StatusOk
}

View File

@@ -0,0 +1,80 @@
package badgerdatastore
import (
"fmt"
"time"
"github.com/google/uuid"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/logger"
"github.com/pikami/cosmium/internal/resourceid"
)
func (r *BadgerDataStore) GetAllDatabases() ([]datastore.Database, datastore.DataStoreStatus) {
dbs, status := listByPrefix[datastore.Database](r.db, DatabaseKeyPrefix)
if status == datastore.StatusOk {
return dbs, datastore.StatusOk
}
return nil, status
}
func (r *BadgerDataStore) GetDatabase(id string) (datastore.Database, datastore.DataStoreStatus) {
databaseKey := generateDatabaseKey(id)
txn := r.db.NewTransaction(false)
defer txn.Discard()
var database datastore.Database
status := getKey(txn, databaseKey, &database)
return database, status
}
func (r *BadgerDataStore) DeleteDatabase(id string) datastore.DataStoreStatus {
databaseKey := generateDatabaseKey(id)
txn := r.db.NewTransaction(true)
defer txn.Discard()
prefixes := []string{
generateKey(resourceid.ResourceTypeCollection, id, "", ""),
generateKey(resourceid.ResourceTypeDocument, id, "", ""),
generateKey(resourceid.ResourceTypeTrigger, id, "", ""),
generateKey(resourceid.ResourceTypeStoredProcedure, id, "", ""),
generateKey(resourceid.ResourceTypeUserDefinedFunction, id, "", ""),
databaseKey,
}
for _, prefix := range prefixes {
if err := deleteKeysByPrefix(txn, prefix); err != nil {
return datastore.Unknown
}
}
err := txn.Commit()
if err != nil {
logger.ErrorLn("Error while committing transaction:", err)
return datastore.Unknown
}
return datastore.StatusOk
}
func (r *BadgerDataStore) CreateDatabase(newDatabase datastore.Database) (datastore.Database, datastore.DataStoreStatus) {
databaseKey := generateDatabaseKey(newDatabase.ID)
txn := r.db.NewTransaction(true)
defer txn.Discard()
newDatabase.TimeStamp = time.Now().Unix()
newDatabase.ResourceID = resourceid.New(resourceid.ResourceTypeDatabase)
newDatabase.ETag = fmt.Sprintf("\"%s\"", uuid.New())
newDatabase.Self = fmt.Sprintf("dbs/%s/", newDatabase.ResourceID)
status := insertKey(txn, databaseKey, newDatabase)
if status != datastore.StatusOk {
return datastore.Database{}, status
}
return newDatabase, datastore.StatusOk
}

View File

@@ -0,0 +1,204 @@
package badgerdatastore
import (
"github.com/dgraph-io/badger/v4"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/logger"
"github.com/pikami/cosmium/internal/resourceid"
"github.com/vmihailenco/msgpack/v5"
)
const (
DatabaseKeyPrefix = "DB:"
CollectionKeyPrefix = "COL:"
DocumentKeyPrefix = "DOC:"
TriggerKeyPrefix = "TRG:"
StoredProcedureKeyPrefix = "SP:"
UserDefinedFunctionKeyPrefix = "UDF:"
)
func generateKey(
resourceType resourceid.ResourceType,
databaseId string,
collectionId string,
resourceId string,
) string {
result := ""
switch resourceType {
case resourceid.ResourceTypeDatabase:
result += DatabaseKeyPrefix
case resourceid.ResourceTypeCollection:
result += CollectionKeyPrefix
case resourceid.ResourceTypeDocument:
result += DocumentKeyPrefix
case resourceid.ResourceTypeTrigger:
result += TriggerKeyPrefix
case resourceid.ResourceTypeStoredProcedure:
result += StoredProcedureKeyPrefix
case resourceid.ResourceTypeUserDefinedFunction:
result += UserDefinedFunctionKeyPrefix
}
if databaseId != "" {
result += databaseId
}
if collectionId != "" {
result += "/colls/" + collectionId
}
if resourceId != "" {
result += "/" + resourceId
}
return result
}
func generateDatabaseKey(databaseId string) string {
return generateKey(resourceid.ResourceTypeDatabase, databaseId, "", "")
}
func generateCollectionKey(databaseId string, collectionId string) string {
return generateKey(resourceid.ResourceTypeCollection, databaseId, collectionId, "")
}
func generateDocumentKey(databaseId string, collectionId string, documentId string) string {
return generateKey(resourceid.ResourceTypeDocument, databaseId, collectionId, documentId)
}
func generateTriggerKey(databaseId string, collectionId string, triggerId string) string {
return generateKey(resourceid.ResourceTypeTrigger, databaseId, collectionId, triggerId)
}
func generateStoredProcedureKey(databaseId string, collectionId string, storedProcedureId string) string {
return generateKey(resourceid.ResourceTypeStoredProcedure, databaseId, collectionId, storedProcedureId)
}
func generateUserDefinedFunctionKey(databaseId string, collectionId string, udfId string) string {
return generateKey(resourceid.ResourceTypeUserDefinedFunction, databaseId, collectionId, udfId)
}
func insertKey(txn *badger.Txn, key string, value interface{}) datastore.DataStoreStatus {
_, err := txn.Get([]byte(key))
if err == nil {
return datastore.Conflict
}
if err != badger.ErrKeyNotFound {
logger.ErrorLn("Error while checking if key exists:", err)
return datastore.Unknown
}
buf, err := msgpack.Marshal(value)
if err != nil {
logger.ErrorLn("Error while encoding value:", err)
return datastore.Unknown
}
err = txn.Set([]byte(key), buf)
if err != nil {
logger.ErrorLn("Error while setting key:", err)
return datastore.Unknown
}
err = txn.Commit()
if err != nil {
logger.ErrorLn("Error while committing transaction:", err)
return datastore.Unknown
}
return datastore.StatusOk
}
func getKey(txn *badger.Txn, key string, value interface{}) datastore.DataStoreStatus {
item, err := txn.Get([]byte(key))
if err != nil {
if err == badger.ErrKeyNotFound {
return datastore.StatusNotFound
}
logger.ErrorLn("Error while getting key:", err)
return datastore.Unknown
}
val, err := item.ValueCopy(nil)
if err != nil {
logger.ErrorLn("Error while copying value:", err)
return datastore.Unknown
}
if value == nil {
logger.ErrorLn("getKey called with nil value")
return datastore.Unknown
}
err = msgpack.Unmarshal(val, &value)
if err != nil {
logger.ErrorLn("Error while decoding value:", err)
return datastore.Unknown
}
return datastore.StatusOk
}
func keyExists(txn *badger.Txn, key string) (bool, error) {
_, err := txn.Get([]byte(key))
if err == nil {
return true, nil
}
if err == badger.ErrKeyNotFound {
return false, nil
}
return false, err
}
func listByPrefix[T any](db *badger.DB, prefix string) ([]T, datastore.DataStoreStatus) {
results := make([]T, 0)
err := db.View(func(txn *badger.Txn) error {
opts := badger.DefaultIteratorOptions
opts.Prefix = []byte(prefix)
it := txn.NewIterator(opts)
defer it.Close()
for it.Rewind(); it.Valid(); it.Next() {
item := it.Item()
var entry T
status := getKey(txn, string(item.Key()), &entry)
if status != datastore.StatusOk {
logger.ErrorLn("Failed to retrieve entry:", string(item.Key()))
continue
}
results = append(results, entry)
}
return nil
})
if err != nil {
logger.ErrorLn("Error while listing entries:", err)
return nil, datastore.Unknown
}
return results, datastore.StatusOk
}
func deleteKeysByPrefix(txn *badger.Txn, prefix string) error {
opts := badger.DefaultIteratorOptions
opts.Prefix = []byte(prefix)
it := txn.NewIterator(opts)
defer it.Close()
for it.Rewind(); it.Valid(); it.Next() {
key := it.Item().KeyCopy(nil)
if err := txn.Delete(key); err != nil {
logger.ErrorLn("Failed to delete key:", string(key), "Error:", err)
return err
}
}
return nil
}

View File

@@ -0,0 +1,58 @@
package badgerdatastore
import (
"github.com/dgraph-io/badger/v4"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/logger"
"github.com/vmihailenco/msgpack/v5"
)
type BadgerDocumentIterator struct {
txn *badger.Txn
it *badger.Iterator
prefix string
}
func NewBadgerDocumentIterator(txn *badger.Txn, prefix string) *BadgerDocumentIterator {
opts := badger.DefaultIteratorOptions
opts.Prefix = []byte(prefix)
it := txn.NewIterator(opts)
it.Rewind()
return &BadgerDocumentIterator{
txn: txn,
it: it,
prefix: prefix,
}
}
func (i *BadgerDocumentIterator) Next() (datastore.Document, datastore.DataStoreStatus) {
if !i.it.Valid() {
i.it.Close()
return datastore.Document{}, datastore.IterEOF
}
item := i.it.Item()
val, err := item.ValueCopy(nil)
if err != nil {
logger.ErrorLn("Error while copying value:", err)
return datastore.Document{}, datastore.Unknown
}
current := &datastore.Document{}
err = msgpack.Unmarshal(val, &current)
if err != nil {
logger.ErrorLn("Error while decoding value:", err)
return datastore.Document{}, datastore.Unknown
}
i.it.Next()
return *current, datastore.StatusOk
}
func (i *BadgerDocumentIterator) Close() {
i.it.Close()
i.txn.Discard()
}

View File

@@ -0,0 +1,127 @@
package badgerdatastore
import (
"fmt"
"time"
"github.com/google/uuid"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/logger"
"github.com/pikami/cosmium/internal/resourceid"
)
func (r *BadgerDataStore) GetAllDocuments(databaseId string, collectionId string) ([]datastore.Document, datastore.DataStoreStatus) {
txn := r.db.NewTransaction(false)
defer txn.Discard()
dbExists, err := keyExists(txn, generateDatabaseKey(databaseId))
if err != nil || !dbExists {
return nil, datastore.StatusNotFound
}
collExists, err := keyExists(txn, generateCollectionKey(databaseId, collectionId))
if err != nil || !collExists {
return nil, datastore.StatusNotFound
}
docs, status := listByPrefix[datastore.Document](r.db, generateKey(resourceid.ResourceTypeDocument, databaseId, collectionId, ""))
if status == datastore.StatusOk {
return docs, datastore.StatusOk
}
return nil, status
}
func (r *BadgerDataStore) GetDocumentIterator(databaseId string, collectionId string) (datastore.DocumentIterator, datastore.DataStoreStatus) {
txn := r.db.NewTransaction(false)
dbExists, err := keyExists(txn, generateDatabaseKey(databaseId))
if err != nil || !dbExists {
return nil, datastore.StatusNotFound
}
collExists, err := keyExists(txn, generateCollectionKey(databaseId, collectionId))
if err != nil || !collExists {
return nil, datastore.StatusNotFound
}
iter := NewBadgerDocumentIterator(txn, generateKey(resourceid.ResourceTypeDocument, databaseId, collectionId, ""))
return iter, datastore.StatusOk
}
func (r *BadgerDataStore) GetDocument(databaseId string, collectionId string, documentId string) (datastore.Document, datastore.DataStoreStatus) {
documentKey := generateDocumentKey(databaseId, collectionId, documentId)
txn := r.db.NewTransaction(false)
defer txn.Discard()
var document datastore.Document
status := getKey(txn, documentKey, &document)
return document, status
}
func (r *BadgerDataStore) DeleteDocument(databaseId string, collectionId string, documentId string) datastore.DataStoreStatus {
documentKey := generateDocumentKey(databaseId, collectionId, documentId)
txn := r.db.NewTransaction(true)
defer txn.Discard()
exists, err := keyExists(txn, documentKey)
if err != nil {
return datastore.Unknown
}
if !exists {
return datastore.StatusNotFound
}
err = txn.Delete([]byte(documentKey))
if err != nil {
logger.ErrorLn("Error while deleting document:", err)
return datastore.Unknown
}
err = txn.Commit()
if err != nil {
logger.ErrorLn("Error while committing transaction:", err)
return datastore.Unknown
}
return datastore.StatusOk
}
func (r *BadgerDataStore) CreateDocument(databaseId string, collectionId string, document map[string]interface{}) (datastore.Document, datastore.DataStoreStatus) {
txn := r.db.NewTransaction(true)
defer txn.Discard()
var database datastore.Database
status := getKey(txn, generateDatabaseKey(databaseId), &database)
if status != datastore.StatusOk {
return datastore.Document{}, status
}
var collection datastore.Collection
status = getKey(txn, generateCollectionKey(databaseId, collectionId), &collection)
if status != datastore.StatusOk {
return datastore.Document{}, status
}
var ok bool
var documentId string
if documentId, ok = document["id"].(string); !ok || documentId == "" {
documentId = fmt.Sprint(uuid.New())
document["id"] = documentId
}
document["_ts"] = time.Now().Unix()
document["_rid"] = resourceid.NewCombined(collection.ResourceID, resourceid.New(resourceid.ResourceTypeDocument))
document["_etag"] = fmt.Sprintf("\"%s\"", uuid.New())
document["_self"] = fmt.Sprintf("dbs/%s/colls/%s/docs/%s/", database.ResourceID, collection.ResourceID, document["_rid"])
status = insertKey(txn, generateDocumentKey(databaseId, collectionId, documentId), document)
if status != datastore.StatusOk {
return datastore.Document{}, status
}
return document, datastore.StatusOk
}

View File

@@ -0,0 +1,53 @@
package badgerdatastore
import (
"fmt"
"github.com/google/uuid"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/resourceid"
)
// I have no idea what this is tbh
func (r *BadgerDataStore) GetPartitionKeyRanges(databaseId string, collectionId string) ([]datastore.PartitionKeyRange, datastore.DataStoreStatus) {
databaseRid := databaseId
collectionRid := collectionId
var timestamp int64 = 0
txn := r.db.NewTransaction(false)
defer txn.Discard()
var database datastore.Database
status := getKey(txn, generateDatabaseKey(databaseId), &database)
if status != datastore.StatusOk {
databaseRid = database.ResourceID
}
var collection datastore.Collection
status = getKey(txn, generateCollectionKey(databaseId, collectionId), &collection)
if status != datastore.StatusOk {
collectionRid = collection.ResourceID
timestamp = collection.TimeStamp
}
pkrResourceId := resourceid.NewCombined(collectionRid, resourceid.New(resourceid.ResourceTypePartitionKeyRange))
pkrSelf := fmt.Sprintf("dbs/%s/colls/%s/pkranges/%s/", databaseRid, collectionRid, pkrResourceId)
etag := fmt.Sprintf("\"%s\"", uuid.New())
return []datastore.PartitionKeyRange{
{
ResourceID: pkrResourceId,
ID: "0",
Etag: etag,
MinInclusive: "",
MaxExclusive: "FF",
RidPrefix: 0,
Self: pkrSelf,
ThroughputFraction: 1,
Status: "online",
Parents: []interface{}{},
TimeStamp: timestamp,
Lsn: 17,
},
}, datastore.StatusOk
}

View File

@@ -0,0 +1,107 @@
package badgerdatastore
import (
"fmt"
"time"
"github.com/google/uuid"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/logger"
"github.com/pikami/cosmium/internal/resourceid"
)
func (r *BadgerDataStore) GetAllStoredProcedures(databaseId string, collectionId string) ([]datastore.StoredProcedure, datastore.DataStoreStatus) {
txn := r.db.NewTransaction(false)
defer txn.Discard()
dbExists, err := keyExists(txn, generateDatabaseKey(databaseId))
if err != nil || !dbExists {
return nil, datastore.StatusNotFound
}
collExists, err := keyExists(txn, generateCollectionKey(databaseId, collectionId))
if err != nil || !collExists {
return nil, datastore.StatusNotFound
}
storedProcedures, status := listByPrefix[datastore.StoredProcedure](r.db, generateKey(resourceid.ResourceTypeStoredProcedure, databaseId, collectionId, ""))
if status == datastore.StatusOk {
return storedProcedures, datastore.StatusOk
}
return nil, status
}
func (r *BadgerDataStore) GetStoredProcedure(databaseId string, collectionId string, storedProcedureId string) (datastore.StoredProcedure, datastore.DataStoreStatus) {
storedProcedureKey := generateStoredProcedureKey(databaseId, collectionId, storedProcedureId)
txn := r.db.NewTransaction(false)
defer txn.Discard()
var storedProcedure datastore.StoredProcedure
status := getKey(txn, storedProcedureKey, &storedProcedure)
return storedProcedure, status
}
func (r *BadgerDataStore) DeleteStoredProcedure(databaseId string, collectionId string, storedProcedureId string) datastore.DataStoreStatus {
storedProcedureKey := generateStoredProcedureKey(databaseId, collectionId, storedProcedureId)
txn := r.db.NewTransaction(true)
defer txn.Discard()
exists, err := keyExists(txn, storedProcedureKey)
if err != nil {
return datastore.Unknown
}
if !exists {
return datastore.StatusNotFound
}
err = txn.Delete([]byte(storedProcedureKey))
if err != nil {
logger.ErrorLn("Error while deleting stored procedure:", err)
return datastore.Unknown
}
err = txn.Commit()
if err != nil {
logger.ErrorLn("Error while committing transaction:", err)
return datastore.Unknown
}
return datastore.StatusOk
}
func (r *BadgerDataStore) CreateStoredProcedure(databaseId string, collectionId string, storedProcedure datastore.StoredProcedure) (datastore.StoredProcedure, datastore.DataStoreStatus) {
txn := r.db.NewTransaction(true)
defer txn.Discard()
if storedProcedure.ID == "" {
return datastore.StoredProcedure{}, datastore.BadRequest
}
var database datastore.Database
status := getKey(txn, generateDatabaseKey(databaseId), &database)
if status != datastore.StatusOk {
return datastore.StoredProcedure{}, status
}
var collection datastore.Collection
status = getKey(txn, generateCollectionKey(databaseId, collectionId), &collection)
if status != datastore.StatusOk {
return datastore.StoredProcedure{}, status
}
storedProcedure.TimeStamp = time.Now().Unix()
storedProcedure.ResourceID = resourceid.NewCombined(collection.ResourceID, resourceid.New(resourceid.ResourceTypeStoredProcedure))
storedProcedure.ETag = fmt.Sprintf("\"%s\"", uuid.New())
storedProcedure.Self = fmt.Sprintf("dbs/%s/colls/%s/sprocs/%s/", database.ResourceID, collection.ResourceID, storedProcedure.ResourceID)
status = insertKey(txn, generateStoredProcedureKey(databaseId, collectionId, storedProcedure.ID), storedProcedure)
if status != datastore.StatusOk {
return datastore.StoredProcedure{}, status
}
return storedProcedure, datastore.StatusOk
}

View File

@@ -0,0 +1,107 @@
package badgerdatastore
import (
"fmt"
"time"
"github.com/google/uuid"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/logger"
"github.com/pikami/cosmium/internal/resourceid"
)
func (r *BadgerDataStore) GetAllTriggers(databaseId string, collectionId string) ([]datastore.Trigger, datastore.DataStoreStatus) {
txn := r.db.NewTransaction(false)
defer txn.Discard()
dbExists, err := keyExists(txn, generateDatabaseKey(databaseId))
if err != nil || !dbExists {
return nil, datastore.StatusNotFound
}
collExists, err := keyExists(txn, generateCollectionKey(databaseId, collectionId))
if err != nil || !collExists {
return nil, datastore.StatusNotFound
}
triggers, status := listByPrefix[datastore.Trigger](r.db, generateKey(resourceid.ResourceTypeTrigger, databaseId, collectionId, ""))
if status == datastore.StatusOk {
return triggers, datastore.StatusOk
}
return nil, status
}
func (r *BadgerDataStore) GetTrigger(databaseId string, collectionId string, triggerId string) (datastore.Trigger, datastore.DataStoreStatus) {
triggerKey := generateTriggerKey(databaseId, collectionId, triggerId)
txn := r.db.NewTransaction(false)
defer txn.Discard()
var trigger datastore.Trigger
status := getKey(txn, triggerKey, &trigger)
return trigger, status
}
func (r *BadgerDataStore) DeleteTrigger(databaseId string, collectionId string, triggerId string) datastore.DataStoreStatus {
triggerKey := generateTriggerKey(databaseId, collectionId, triggerId)
txn := r.db.NewTransaction(true)
defer txn.Discard()
exists, err := keyExists(txn, triggerKey)
if err != nil {
return datastore.Unknown
}
if !exists {
return datastore.StatusNotFound
}
err = txn.Delete([]byte(triggerKey))
if err != nil {
logger.ErrorLn("Error while deleting trigger:", err)
return datastore.Unknown
}
err = txn.Commit()
if err != nil {
logger.ErrorLn("Error while committing transaction:", err)
return datastore.Unknown
}
return datastore.StatusOk
}
func (r *BadgerDataStore) CreateTrigger(databaseId string, collectionId string, trigger datastore.Trigger) (datastore.Trigger, datastore.DataStoreStatus) {
txn := r.db.NewTransaction(true)
defer txn.Discard()
if trigger.ID == "" {
return datastore.Trigger{}, datastore.BadRequest
}
var database datastore.Database
status := getKey(txn, generateDatabaseKey(databaseId), &database)
if status != datastore.StatusOk {
return datastore.Trigger{}, status
}
var collection datastore.Collection
status = getKey(txn, generateCollectionKey(databaseId, collectionId), &collection)
if status != datastore.StatusOk {
return datastore.Trigger{}, status
}
trigger.TimeStamp = time.Now().Unix()
trigger.ResourceID = resourceid.NewCombined(collection.ResourceID, resourceid.New(resourceid.ResourceTypeTrigger))
trigger.ETag = fmt.Sprintf("\"%s\"", uuid.New())
trigger.Self = fmt.Sprintf("dbs/%s/colls/%s/triggers/%s/", database.ResourceID, collection.ResourceID, trigger.ResourceID)
status = insertKey(txn, generateTriggerKey(databaseId, collectionId, trigger.ID), trigger)
if status != datastore.StatusOk {
return datastore.Trigger{}, status
}
return trigger, datastore.StatusOk
}

View File

@@ -0,0 +1,107 @@
package badgerdatastore
import (
"fmt"
"time"
"github.com/google/uuid"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/logger"
"github.com/pikami/cosmium/internal/resourceid"
)
func (r *BadgerDataStore) GetAllUserDefinedFunctions(databaseId string, collectionId string) ([]datastore.UserDefinedFunction, datastore.DataStoreStatus) {
txn := r.db.NewTransaction(false)
defer txn.Discard()
dbExists, err := keyExists(txn, generateDatabaseKey(databaseId))
if err != nil || !dbExists {
return nil, datastore.StatusNotFound
}
collExists, err := keyExists(txn, generateCollectionKey(databaseId, collectionId))
if err != nil || !collExists {
return nil, datastore.StatusNotFound
}
udfs, status := listByPrefix[datastore.UserDefinedFunction](r.db, generateKey(resourceid.ResourceTypeUserDefinedFunction, databaseId, collectionId, ""))
if status == datastore.StatusOk {
return udfs, datastore.StatusOk
}
return nil, status
}
func (r *BadgerDataStore) GetUserDefinedFunction(databaseId string, collectionId string, udfId string) (datastore.UserDefinedFunction, datastore.DataStoreStatus) {
udfKey := generateUserDefinedFunctionKey(databaseId, collectionId, udfId)
txn := r.db.NewTransaction(false)
defer txn.Discard()
var udf datastore.UserDefinedFunction
status := getKey(txn, udfKey, &udf)
return udf, status
}
func (r *BadgerDataStore) DeleteUserDefinedFunction(databaseId string, collectionId string, udfId string) datastore.DataStoreStatus {
udfKey := generateUserDefinedFunctionKey(databaseId, collectionId, udfId)
txn := r.db.NewTransaction(true)
defer txn.Discard()
exists, err := keyExists(txn, udfKey)
if err != nil {
return datastore.Unknown
}
if !exists {
return datastore.StatusNotFound
}
err = txn.Delete([]byte(udfKey))
if err != nil {
logger.ErrorLn("Error while deleting user defined function:", err)
return datastore.Unknown
}
err = txn.Commit()
if err != nil {
logger.ErrorLn("Error while committing transaction:", err)
return datastore.Unknown
}
return datastore.StatusOk
}
func (r *BadgerDataStore) CreateUserDefinedFunction(databaseId string, collectionId string, udf datastore.UserDefinedFunction) (datastore.UserDefinedFunction, datastore.DataStoreStatus) {
txn := r.db.NewTransaction(true)
defer txn.Discard()
if udf.ID == "" {
return datastore.UserDefinedFunction{}, datastore.BadRequest
}
var database datastore.Database
status := getKey(txn, generateDatabaseKey(databaseId), &database)
if status != datastore.StatusOk {
return datastore.UserDefinedFunction{}, status
}
var collection datastore.Collection
status = getKey(txn, generateCollectionKey(databaseId, collectionId), &collection)
if status != datastore.StatusOk {
return datastore.UserDefinedFunction{}, status
}
udf.TimeStamp = time.Now().Unix()
udf.ResourceID = resourceid.NewCombined(collection.ResourceID, resourceid.New(resourceid.ResourceTypeUserDefinedFunction))
udf.ETag = fmt.Sprintf("\"%s\"", uuid.New())
udf.Self = fmt.Sprintf("dbs/%s/colls/%s/udfs/%s/", database.ResourceID, collection.ResourceID, udf.ResourceID)
status = insertKey(txn, generateUserDefinedFunctionKey(databaseId, collectionId, udf.ID), udf)
if status != datastore.StatusOk {
return datastore.UserDefinedFunction{}, status
}
return udf, datastore.StatusOk
}

View File

@@ -0,0 +1,44 @@
package datastore
type DataStore interface {
GetAllDatabases() ([]Database, DataStoreStatus)
GetDatabase(databaseId string) (Database, DataStoreStatus)
DeleteDatabase(databaseId string) DataStoreStatus
CreateDatabase(newDatabase Database) (Database, DataStoreStatus)
GetAllCollections(databaseId string) ([]Collection, DataStoreStatus)
GetCollection(databaseId string, collectionId string) (Collection, DataStoreStatus)
DeleteCollection(databaseId string, collectionId string) DataStoreStatus
CreateCollection(databaseId string, newCollection Collection) (Collection, DataStoreStatus)
GetAllDocuments(databaseId string, collectionId string) ([]Document, DataStoreStatus)
GetDocumentIterator(databaseId string, collectionId string) (DocumentIterator, DataStoreStatus)
GetDocument(databaseId string, collectionId string, documentId string) (Document, DataStoreStatus)
DeleteDocument(databaseId string, collectionId string, documentId string) DataStoreStatus
CreateDocument(databaseId string, collectionId string, document map[string]interface{}) (Document, DataStoreStatus)
GetAllTriggers(databaseId string, collectionId string) ([]Trigger, DataStoreStatus)
GetTrigger(databaseId string, collectionId string, triggerId string) (Trigger, DataStoreStatus)
DeleteTrigger(databaseId string, collectionId string, triggerId string) DataStoreStatus
CreateTrigger(databaseId string, collectionId string, trigger Trigger) (Trigger, DataStoreStatus)
GetAllStoredProcedures(databaseId string, collectionId string) ([]StoredProcedure, DataStoreStatus)
GetStoredProcedure(databaseId string, collectionId string, storedProcedureId string) (StoredProcedure, DataStoreStatus)
DeleteStoredProcedure(databaseId string, collectionId string, storedProcedureId string) DataStoreStatus
CreateStoredProcedure(databaseId string, collectionId string, storedProcedure StoredProcedure) (StoredProcedure, DataStoreStatus)
GetAllUserDefinedFunctions(databaseId string, collectionId string) ([]UserDefinedFunction, DataStoreStatus)
GetUserDefinedFunction(databaseId string, collectionId string, udfId string) (UserDefinedFunction, DataStoreStatus)
DeleteUserDefinedFunction(databaseId string, collectionId string, udfId string) DataStoreStatus
CreateUserDefinedFunction(databaseId string, collectionId string, udf UserDefinedFunction) (UserDefinedFunction, DataStoreStatus)
GetPartitionKeyRanges(databaseId string, collectionId string) ([]PartitionKeyRange, DataStoreStatus)
Close()
DumpToJson() (string, error)
}
type DocumentIterator interface {
Next() (Document, DataStoreStatus)
Close()
}

View File

@@ -0,0 +1,21 @@
package jsondatastore
import "github.com/pikami/cosmium/internal/datastore"
type ArrayDocumentIterator struct {
documents []datastore.Document
index int
}
func (i *ArrayDocumentIterator) Next() (datastore.Document, datastore.DataStoreStatus) {
i.index++
if i.index >= len(i.documents) {
return datastore.Document{}, datastore.StatusNotFound
}
return i.documents[i.index], datastore.StatusOk
}
func (i *ArrayDocumentIterator) Close() {
i.documents = []datastore.Document{}
}

View File

@@ -0,0 +1,89 @@
package jsondatastore
import (
"fmt"
"time"
"github.com/google/uuid"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/resourceid"
structhidrators "github.com/pikami/cosmium/internal/struct_hidrators"
"golang.org/x/exp/maps"
)
func (r *JsonDataStore) GetAllCollections(databaseId string) ([]datastore.Collection, datastore.DataStoreStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return make([]datastore.Collection, 0), datastore.StatusNotFound
}
return maps.Values(r.storeState.Collections[databaseId]), datastore.StatusOk
}
func (r *JsonDataStore) GetCollection(databaseId string, collectionId string) (datastore.Collection, datastore.DataStoreStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return datastore.Collection{}, datastore.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return datastore.Collection{}, datastore.StatusNotFound
}
return r.storeState.Collections[databaseId][collectionId], datastore.StatusOk
}
func (r *JsonDataStore) DeleteCollection(databaseId string, collectionId string) datastore.DataStoreStatus {
r.storeState.Lock()
defer r.storeState.Unlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return datastore.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return datastore.StatusNotFound
}
delete(r.storeState.Collections[databaseId], collectionId)
delete(r.storeState.Documents[databaseId], collectionId)
delete(r.storeState.Triggers[databaseId], collectionId)
delete(r.storeState.StoredProcedures[databaseId], collectionId)
delete(r.storeState.UserDefinedFunctions[databaseId], collectionId)
return datastore.StatusOk
}
func (r *JsonDataStore) CreateCollection(databaseId string, newCollection datastore.Collection) (datastore.Collection, datastore.DataStoreStatus) {
r.storeState.Lock()
defer r.storeState.Unlock()
var ok bool
var database datastore.Database
if database, ok = r.storeState.Databases[databaseId]; !ok {
return datastore.Collection{}, datastore.StatusNotFound
}
if _, ok = r.storeState.Collections[databaseId][newCollection.ID]; ok {
return datastore.Collection{}, datastore.Conflict
}
newCollection = structhidrators.Hidrate(newCollection).(datastore.Collection)
newCollection.TimeStamp = time.Now().Unix()
newCollection.ResourceID = resourceid.NewCombined(database.ResourceID, resourceid.New(resourceid.ResourceTypeCollection))
newCollection.ETag = fmt.Sprintf("\"%s\"", uuid.New())
newCollection.Self = fmt.Sprintf("dbs/%s/colls/%s/", database.ResourceID, newCollection.ResourceID)
r.storeState.Collections[databaseId][newCollection.ID] = newCollection
r.storeState.Documents[databaseId][newCollection.ID] = make(map[string]datastore.Document)
r.storeState.Triggers[databaseId][newCollection.ID] = make(map[string]datastore.Trigger)
r.storeState.StoredProcedures[databaseId][newCollection.ID] = make(map[string]datastore.StoredProcedure)
r.storeState.UserDefinedFunctions[databaseId][newCollection.ID] = make(map[string]datastore.UserDefinedFunction)
return newCollection, datastore.StatusOk
}

View File

@@ -0,0 +1,70 @@
package jsondatastore
import (
"fmt"
"time"
"github.com/google/uuid"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/resourceid"
"golang.org/x/exp/maps"
)
func (r *JsonDataStore) GetAllDatabases() ([]datastore.Database, datastore.DataStoreStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
return maps.Values(r.storeState.Databases), datastore.StatusOk
}
func (r *JsonDataStore) GetDatabase(id string) (datastore.Database, datastore.DataStoreStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
if database, ok := r.storeState.Databases[id]; ok {
return database, datastore.StatusOk
}
return datastore.Database{}, datastore.StatusNotFound
}
func (r *JsonDataStore) DeleteDatabase(id string) datastore.DataStoreStatus {
r.storeState.Lock()
defer r.storeState.Unlock()
if _, ok := r.storeState.Databases[id]; !ok {
return datastore.StatusNotFound
}
delete(r.storeState.Databases, id)
delete(r.storeState.Collections, id)
delete(r.storeState.Documents, id)
delete(r.storeState.Triggers, id)
delete(r.storeState.StoredProcedures, id)
delete(r.storeState.UserDefinedFunctions, id)
return datastore.StatusOk
}
func (r *JsonDataStore) CreateDatabase(newDatabase datastore.Database) (datastore.Database, datastore.DataStoreStatus) {
r.storeState.Lock()
defer r.storeState.Unlock()
if _, ok := r.storeState.Databases[newDatabase.ID]; ok {
return datastore.Database{}, datastore.Conflict
}
newDatabase.TimeStamp = time.Now().Unix()
newDatabase.ResourceID = resourceid.New(resourceid.ResourceTypeDatabase)
newDatabase.ETag = fmt.Sprintf("\"%s\"", uuid.New())
newDatabase.Self = fmt.Sprintf("dbs/%s/", newDatabase.ResourceID)
r.storeState.Databases[newDatabase.ID] = newDatabase
r.storeState.Collections[newDatabase.ID] = make(map[string]datastore.Collection)
r.storeState.Documents[newDatabase.ID] = make(map[string]map[string]datastore.Document)
r.storeState.Triggers[newDatabase.ID] = make(map[string]map[string]datastore.Trigger)
r.storeState.StoredProcedures[newDatabase.ID] = make(map[string]map[string]datastore.StoredProcedure)
r.storeState.UserDefinedFunctions[newDatabase.ID] = make(map[string]map[string]datastore.UserDefinedFunction)
return newDatabase, datastore.StatusOk
}

View File

@@ -0,0 +1,113 @@
package jsondatastore
import (
"fmt"
"time"
"github.com/google/uuid"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/resourceid"
"golang.org/x/exp/maps"
)
func (r *JsonDataStore) GetAllDocuments(databaseId string, collectionId string) ([]datastore.Document, datastore.DataStoreStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return make([]datastore.Document, 0), datastore.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return make([]datastore.Document, 0), datastore.StatusNotFound
}
return maps.Values(r.storeState.Documents[databaseId][collectionId]), datastore.StatusOk
}
func (r *JsonDataStore) GetDocument(databaseId string, collectionId string, documentId string) (datastore.Document, datastore.DataStoreStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return datastore.Document{}, datastore.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return datastore.Document{}, datastore.StatusNotFound
}
if _, ok := r.storeState.Documents[databaseId][collectionId][documentId]; !ok {
return datastore.Document{}, datastore.StatusNotFound
}
return r.storeState.Documents[databaseId][collectionId][documentId], datastore.StatusOk
}
func (r *JsonDataStore) DeleteDocument(databaseId string, collectionId string, documentId string) datastore.DataStoreStatus {
r.storeState.Lock()
defer r.storeState.Unlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return datastore.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return datastore.StatusNotFound
}
if _, ok := r.storeState.Documents[databaseId][collectionId][documentId]; !ok {
return datastore.StatusNotFound
}
delete(r.storeState.Documents[databaseId][collectionId], documentId)
return datastore.StatusOk
}
func (r *JsonDataStore) CreateDocument(databaseId string, collectionId string, document map[string]interface{}) (datastore.Document, datastore.DataStoreStatus) {
r.storeState.Lock()
defer r.storeState.Unlock()
var ok bool
var documentId string
var database datastore.Database
var collection datastore.Collection
if documentId, ok = document["id"].(string); !ok || documentId == "" {
documentId = fmt.Sprint(uuid.New())
document["id"] = documentId
}
if database, ok = r.storeState.Databases[databaseId]; !ok {
return datastore.Document{}, datastore.StatusNotFound
}
if collection, ok = r.storeState.Collections[databaseId][collectionId]; !ok {
return datastore.Document{}, datastore.StatusNotFound
}
if _, ok := r.storeState.Documents[databaseId][collectionId][documentId]; ok {
return datastore.Document{}, datastore.Conflict
}
document["_ts"] = time.Now().Unix()
document["_rid"] = resourceid.NewCombined(collection.ResourceID, resourceid.New(resourceid.ResourceTypeDocument))
document["_etag"] = fmt.Sprintf("\"%s\"", uuid.New())
document["_self"] = fmt.Sprintf("dbs/%s/colls/%s/docs/%s/", database.ResourceID, collection.ResourceID, document["_rid"])
r.storeState.Documents[databaseId][collectionId][documentId] = document
return document, datastore.StatusOk
}
func (r *JsonDataStore) GetDocumentIterator(databaseId string, collectionId string) (datastore.DocumentIterator, datastore.DataStoreStatus) {
documents, status := r.GetAllDocuments(databaseId, collectionId)
if status != datastore.StatusOk {
return nil, status
}
return &ArrayDocumentIterator{
documents: documents,
index: -1,
}, datastore.StatusOk
}

View File

@@ -0,0 +1,34 @@
package jsondatastore
import "github.com/pikami/cosmium/internal/datastore"
type JsonDataStore struct {
storeState State
initialDataFilePath string
persistDataFilePath string
}
type JsonDataStoreOptions struct {
InitialDataFilePath string
PersistDataFilePath string
}
func NewJsonDataStore(options JsonDataStoreOptions) *JsonDataStore {
dataStore := &JsonDataStore{
storeState: State{
Databases: make(map[string]datastore.Database),
Collections: make(map[string]map[string]datastore.Collection),
Documents: make(map[string]map[string]map[string]datastore.Document),
Triggers: make(map[string]map[string]map[string]datastore.Trigger),
StoredProcedures: make(map[string]map[string]map[string]datastore.StoredProcedure),
UserDefinedFunctions: make(map[string]map[string]map[string]datastore.UserDefinedFunction),
},
initialDataFilePath: options.InitialDataFilePath,
persistDataFilePath: options.PersistDataFilePath,
}
dataStore.InitializeDataStore()
return dataStore
}

View File

@@ -1,15 +1,15 @@
package repositories package jsondatastore
import ( import (
"fmt" "fmt"
"github.com/google/uuid" "github.com/google/uuid"
repositorymodels "github.com/pikami/cosmium/internal/repository_models" "github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/resourceid" "github.com/pikami/cosmium/internal/resourceid"
) )
// I have no idea what this is tbh // I have no idea what this is tbh
func (r *DataRepository) GetPartitionKeyRanges(databaseId string, collectionId string) ([]repositorymodels.PartitionKeyRange, repositorymodels.RepositoryStatus) { func (r *JsonDataStore) GetPartitionKeyRanges(databaseId string, collectionId string) ([]datastore.PartitionKeyRange, datastore.DataStoreStatus) {
r.storeState.RLock() r.storeState.RLock()
defer r.storeState.RUnlock() defer r.storeState.RUnlock()
@@ -26,11 +26,11 @@ func (r *DataRepository) GetPartitionKeyRanges(databaseId string, collectionId s
timestamp = collection.TimeStamp timestamp = collection.TimeStamp
} }
pkrResourceId := resourceid.NewCombined(databaseRid, collectionRid, resourceid.New()) pkrResourceId := resourceid.NewCombined(collectionRid, resourceid.New(resourceid.ResourceTypePartitionKeyRange))
pkrSelf := fmt.Sprintf("dbs/%s/colls/%s/pkranges/%s/", databaseRid, collectionRid, pkrResourceId) pkrSelf := fmt.Sprintf("dbs/%s/colls/%s/pkranges/%s/", databaseRid, collectionRid, pkrResourceId)
etag := fmt.Sprintf("\"%s\"", uuid.New()) etag := fmt.Sprintf("\"%s\"", uuid.New())
return []repositorymodels.PartitionKeyRange{ return []datastore.PartitionKeyRange{
{ {
ResourceID: pkrResourceId, ResourceID: pkrResourceId,
ID: "0", ID: "0",
@@ -45,5 +45,5 @@ func (r *DataRepository) GetPartitionKeyRanges(databaseId string, collectionId s
TimeStamp: timestamp, TimeStamp: timestamp,
Lsn: 17, Lsn: 17,
}, },
}, repositorymodels.StatusOk }, datastore.StatusOk
} }

View File

@@ -1,16 +1,39 @@
package repositories package jsondatastore
import ( import (
"encoding/json" "encoding/json"
"log" "log"
"os" "os"
"reflect" "reflect"
"sync"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/logger" "github.com/pikami/cosmium/internal/logger"
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
) )
func (r *DataRepository) InitializeRepository() { type State struct {
sync.RWMutex
// Map databaseId -> Database
Databases map[string]datastore.Database `json:"databases"`
// Map databaseId -> collectionId -> Collection
Collections map[string]map[string]datastore.Collection `json:"collections"`
// Map databaseId -> collectionId -> documentId -> Documents
Documents map[string]map[string]map[string]datastore.Document `json:"documents"`
// Map databaseId -> collectionId -> triggerId -> Trigger
Triggers map[string]map[string]map[string]datastore.Trigger `json:"triggers"`
// Map databaseId -> collectionId -> spId -> StoredProcedure
StoredProcedures map[string]map[string]map[string]datastore.StoredProcedure `json:"sprocs"`
// Map databaseId -> collectionId -> udfId -> UserDefinedFunction
UserDefinedFunctions map[string]map[string]map[string]datastore.UserDefinedFunction `json:"udfs"`
}
func (r *JsonDataStore) InitializeDataStore() {
if r.initialDataFilePath != "" { if r.initialDataFilePath != "" {
r.LoadStateFS(r.initialDataFilePath) r.LoadStateFS(r.initialDataFilePath)
return return
@@ -32,7 +55,7 @@ func (r *DataRepository) InitializeRepository() {
} }
} }
func (r *DataRepository) LoadStateFS(filePath string) { func (r *JsonDataStore) LoadStateFS(filePath string) {
data, err := os.ReadFile(filePath) data, err := os.ReadFile(filePath)
if err != nil { if err != nil {
log.Fatalf("Error reading state JSON file: %v", err) log.Fatalf("Error reading state JSON file: %v", err)
@@ -45,11 +68,11 @@ func (r *DataRepository) LoadStateFS(filePath string) {
} }
} }
func (r *DataRepository) LoadStateJSON(jsonData string) error { func (r *JsonDataStore) LoadStateJSON(jsonData string) error {
r.storeState.Lock() r.storeState.Lock()
defer r.storeState.Unlock() defer r.storeState.Unlock()
var state repositorymodels.State var state State
if err := json.Unmarshal([]byte(jsonData), &state); err != nil { if err := json.Unmarshal([]byte(jsonData), &state); err != nil {
return err return err
} }
@@ -71,7 +94,7 @@ func (r *DataRepository) LoadStateJSON(jsonData string) error {
return nil return nil
} }
func (r *DataRepository) SaveStateFS(filePath string) { func (r *JsonDataStore) SaveStateFS(filePath string) {
r.storeState.RLock() r.storeState.RLock()
defer r.storeState.RUnlock() defer r.storeState.RUnlock()
@@ -92,7 +115,7 @@ func (r *DataRepository) SaveStateFS(filePath string) {
logger.Infof("User defined functions: %d\n", getLength(r.storeState.UserDefinedFunctions)) logger.Infof("User defined functions: %d\n", getLength(r.storeState.UserDefinedFunctions))
} }
func (r *DataRepository) GetState() (string, error) { func (r *JsonDataStore) DumpToJson() (string, error) {
r.storeState.RLock() r.storeState.RLock()
defer r.storeState.RUnlock() defer r.storeState.RUnlock()
@@ -103,16 +126,23 @@ func (r *DataRepository) GetState() (string, error) {
} }
return string(data), nil return string(data), nil
}
func (r *JsonDataStore) Close() {
if r.persistDataFilePath != "" {
r.SaveStateFS(r.persistDataFilePath)
}
} }
func getLength(v interface{}) int { func getLength(v interface{}) int {
switch v.(type) { switch v.(type) {
case repositorymodels.Database, case datastore.Database,
repositorymodels.Collection, datastore.Collection,
repositorymodels.Document, datastore.Document,
repositorymodels.Trigger, datastore.Trigger,
repositorymodels.StoredProcedure, datastore.StoredProcedure,
repositorymodels.UserDefinedFunction: datastore.UserDefinedFunction:
return 1 return 1
} }
@@ -133,55 +163,55 @@ func getLength(v interface{}) int {
return count return count
} }
func (r *DataRepository) ensureStoreStateNoNullReferences() { func (r *JsonDataStore) ensureStoreStateNoNullReferences() {
if r.storeState.Databases == nil { if r.storeState.Databases == nil {
r.storeState.Databases = make(map[string]repositorymodels.Database) r.storeState.Databases = make(map[string]datastore.Database)
} }
if r.storeState.Collections == nil { if r.storeState.Collections == nil {
r.storeState.Collections = make(map[string]map[string]repositorymodels.Collection) r.storeState.Collections = make(map[string]map[string]datastore.Collection)
} }
if r.storeState.Documents == nil { if r.storeState.Documents == nil {
r.storeState.Documents = make(map[string]map[string]map[string]repositorymodels.Document) r.storeState.Documents = make(map[string]map[string]map[string]datastore.Document)
} }
if r.storeState.Triggers == nil { if r.storeState.Triggers == nil {
r.storeState.Triggers = make(map[string]map[string]map[string]repositorymodels.Trigger) r.storeState.Triggers = make(map[string]map[string]map[string]datastore.Trigger)
} }
if r.storeState.StoredProcedures == nil { if r.storeState.StoredProcedures == nil {
r.storeState.StoredProcedures = make(map[string]map[string]map[string]repositorymodels.StoredProcedure) r.storeState.StoredProcedures = make(map[string]map[string]map[string]datastore.StoredProcedure)
} }
if r.storeState.UserDefinedFunctions == nil { if r.storeState.UserDefinedFunctions == nil {
r.storeState.UserDefinedFunctions = make(map[string]map[string]map[string]repositorymodels.UserDefinedFunction) r.storeState.UserDefinedFunctions = make(map[string]map[string]map[string]datastore.UserDefinedFunction)
} }
for database := range r.storeState.Databases { for database := range r.storeState.Databases {
if r.storeState.Collections[database] == nil { if r.storeState.Collections[database] == nil {
r.storeState.Collections[database] = make(map[string]repositorymodels.Collection) r.storeState.Collections[database] = make(map[string]datastore.Collection)
} }
if r.storeState.Documents[database] == nil { if r.storeState.Documents[database] == nil {
r.storeState.Documents[database] = make(map[string]map[string]repositorymodels.Document) r.storeState.Documents[database] = make(map[string]map[string]datastore.Document)
} }
if r.storeState.Triggers[database] == nil { if r.storeState.Triggers[database] == nil {
r.storeState.Triggers[database] = make(map[string]map[string]repositorymodels.Trigger) r.storeState.Triggers[database] = make(map[string]map[string]datastore.Trigger)
} }
if r.storeState.StoredProcedures[database] == nil { if r.storeState.StoredProcedures[database] == nil {
r.storeState.StoredProcedures[database] = make(map[string]map[string]repositorymodels.StoredProcedure) r.storeState.StoredProcedures[database] = make(map[string]map[string]datastore.StoredProcedure)
} }
if r.storeState.UserDefinedFunctions[database] == nil { if r.storeState.UserDefinedFunctions[database] == nil {
r.storeState.UserDefinedFunctions[database] = make(map[string]map[string]repositorymodels.UserDefinedFunction) r.storeState.UserDefinedFunctions[database] = make(map[string]map[string]datastore.UserDefinedFunction)
} }
for collection := range r.storeState.Collections[database] { for collection := range r.storeState.Collections[database] {
if r.storeState.Documents[database][collection] == nil { if r.storeState.Documents[database][collection] == nil {
r.storeState.Documents[database][collection] = make(map[string]repositorymodels.Document) r.storeState.Documents[database][collection] = make(map[string]datastore.Document)
} }
for document := range r.storeState.Documents[database][collection] { for document := range r.storeState.Documents[database][collection] {
@@ -191,15 +221,15 @@ func (r *DataRepository) ensureStoreStateNoNullReferences() {
} }
if r.storeState.Triggers[database][collection] == nil { if r.storeState.Triggers[database][collection] == nil {
r.storeState.Triggers[database][collection] = make(map[string]repositorymodels.Trigger) r.storeState.Triggers[database][collection] = make(map[string]datastore.Trigger)
} }
if r.storeState.StoredProcedures[database][collection] == nil { if r.storeState.StoredProcedures[database][collection] == nil {
r.storeState.StoredProcedures[database][collection] = make(map[string]repositorymodels.StoredProcedure) r.storeState.StoredProcedures[database][collection] = make(map[string]datastore.StoredProcedure)
} }
if r.storeState.UserDefinedFunctions[database][collection] == nil { if r.storeState.UserDefinedFunctions[database][collection] == nil {
r.storeState.UserDefinedFunctions[database][collection] = make(map[string]repositorymodels.UserDefinedFunction) r.storeState.UserDefinedFunctions[database][collection] = make(map[string]datastore.UserDefinedFunction)
} }
} }
} }

View File

@@ -0,0 +1,91 @@
package jsondatastore
import (
"fmt"
"time"
"github.com/google/uuid"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/resourceid"
"golang.org/x/exp/maps"
)
func (r *JsonDataStore) GetAllStoredProcedures(databaseId string, collectionId string) ([]datastore.StoredProcedure, datastore.DataStoreStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
return maps.Values(r.storeState.StoredProcedures[databaseId][collectionId]), datastore.StatusOk
}
func (r *JsonDataStore) GetStoredProcedure(databaseId string, collectionId string, spId string) (datastore.StoredProcedure, datastore.DataStoreStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return datastore.StoredProcedure{}, datastore.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return datastore.StoredProcedure{}, datastore.StatusNotFound
}
if sp, ok := r.storeState.StoredProcedures[databaseId][collectionId][spId]; ok {
return sp, datastore.StatusOk
}
return datastore.StoredProcedure{}, datastore.StatusNotFound
}
func (r *JsonDataStore) DeleteStoredProcedure(databaseId string, collectionId string, spId string) datastore.DataStoreStatus {
r.storeState.Lock()
defer r.storeState.Unlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return datastore.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return datastore.StatusNotFound
}
if _, ok := r.storeState.StoredProcedures[databaseId][collectionId][spId]; !ok {
return datastore.StatusNotFound
}
delete(r.storeState.StoredProcedures[databaseId][collectionId], spId)
return datastore.StatusOk
}
func (r *JsonDataStore) CreateStoredProcedure(databaseId string, collectionId string, sp datastore.StoredProcedure) (datastore.StoredProcedure, datastore.DataStoreStatus) {
r.storeState.Lock()
defer r.storeState.Unlock()
var ok bool
var database datastore.Database
var collection datastore.Collection
if sp.ID == "" {
return datastore.StoredProcedure{}, datastore.BadRequest
}
if database, ok = r.storeState.Databases[databaseId]; !ok {
return datastore.StoredProcedure{}, datastore.StatusNotFound
}
if collection, ok = r.storeState.Collections[databaseId][collectionId]; !ok {
return datastore.StoredProcedure{}, datastore.StatusNotFound
}
if _, ok = r.storeState.StoredProcedures[databaseId][collectionId][sp.ID]; ok {
return datastore.StoredProcedure{}, datastore.Conflict
}
sp.TimeStamp = time.Now().Unix()
sp.ResourceID = resourceid.NewCombined(collection.ResourceID, resourceid.New(resourceid.ResourceTypeStoredProcedure))
sp.ETag = fmt.Sprintf("\"%s\"", uuid.New())
sp.Self = fmt.Sprintf("dbs/%s/colls/%s/sprocs/%s/", database.ResourceID, collection.ResourceID, sp.ResourceID)
r.storeState.StoredProcedures[databaseId][collectionId][sp.ID] = sp
return sp, datastore.StatusOk
}

View File

@@ -0,0 +1,91 @@
package jsondatastore
import (
"fmt"
"time"
"github.com/google/uuid"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/resourceid"
"golang.org/x/exp/maps"
)
func (r *JsonDataStore) GetAllTriggers(databaseId string, collectionId string) ([]datastore.Trigger, datastore.DataStoreStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
return maps.Values(r.storeState.Triggers[databaseId][collectionId]), datastore.StatusOk
}
func (r *JsonDataStore) GetTrigger(databaseId string, collectionId string, triggerId string) (datastore.Trigger, datastore.DataStoreStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return datastore.Trigger{}, datastore.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return datastore.Trigger{}, datastore.StatusNotFound
}
if trigger, ok := r.storeState.Triggers[databaseId][collectionId][triggerId]; ok {
return trigger, datastore.StatusOk
}
return datastore.Trigger{}, datastore.StatusNotFound
}
func (r *JsonDataStore) DeleteTrigger(databaseId string, collectionId string, triggerId string) datastore.DataStoreStatus {
r.storeState.Lock()
defer r.storeState.Unlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return datastore.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return datastore.StatusNotFound
}
if _, ok := r.storeState.Triggers[databaseId][collectionId][triggerId]; !ok {
return datastore.StatusNotFound
}
delete(r.storeState.Triggers[databaseId][collectionId], triggerId)
return datastore.StatusOk
}
func (r *JsonDataStore) CreateTrigger(databaseId string, collectionId string, trigger datastore.Trigger) (datastore.Trigger, datastore.DataStoreStatus) {
r.storeState.Lock()
defer r.storeState.Unlock()
var ok bool
var database datastore.Database
var collection datastore.Collection
if trigger.ID == "" {
return datastore.Trigger{}, datastore.BadRequest
}
if database, ok = r.storeState.Databases[databaseId]; !ok {
return datastore.Trigger{}, datastore.StatusNotFound
}
if collection, ok = r.storeState.Collections[databaseId][collectionId]; !ok {
return datastore.Trigger{}, datastore.StatusNotFound
}
if _, ok = r.storeState.Triggers[databaseId][collectionId][trigger.ID]; ok {
return datastore.Trigger{}, datastore.Conflict
}
trigger.TimeStamp = time.Now().Unix()
trigger.ResourceID = resourceid.NewCombined(collection.ResourceID, resourceid.New(resourceid.ResourceTypeTrigger))
trigger.ETag = fmt.Sprintf("\"%s\"", uuid.New())
trigger.Self = fmt.Sprintf("dbs/%s/colls/%s/triggers/%s/", database.ResourceID, collection.ResourceID, trigger.ResourceID)
r.storeState.Triggers[databaseId][collectionId][trigger.ID] = trigger
return trigger, datastore.StatusOk
}

View File

@@ -0,0 +1,91 @@
package jsondatastore
import (
"fmt"
"time"
"github.com/google/uuid"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/resourceid"
"golang.org/x/exp/maps"
)
func (r *JsonDataStore) GetAllUserDefinedFunctions(databaseId string, collectionId string) ([]datastore.UserDefinedFunction, datastore.DataStoreStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
return maps.Values(r.storeState.UserDefinedFunctions[databaseId][collectionId]), datastore.StatusOk
}
func (r *JsonDataStore) GetUserDefinedFunction(databaseId string, collectionId string, udfId string) (datastore.UserDefinedFunction, datastore.DataStoreStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return datastore.UserDefinedFunction{}, datastore.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return datastore.UserDefinedFunction{}, datastore.StatusNotFound
}
if udf, ok := r.storeState.UserDefinedFunctions[databaseId][collectionId][udfId]; ok {
return udf, datastore.StatusOk
}
return datastore.UserDefinedFunction{}, datastore.StatusNotFound
}
func (r *JsonDataStore) DeleteUserDefinedFunction(databaseId string, collectionId string, udfId string) datastore.DataStoreStatus {
r.storeState.Lock()
defer r.storeState.Unlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return datastore.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return datastore.StatusNotFound
}
if _, ok := r.storeState.UserDefinedFunctions[databaseId][collectionId][udfId]; !ok {
return datastore.StatusNotFound
}
delete(r.storeState.UserDefinedFunctions[databaseId][collectionId], udfId)
return datastore.StatusOk
}
func (r *JsonDataStore) CreateUserDefinedFunction(databaseId string, collectionId string, udf datastore.UserDefinedFunction) (datastore.UserDefinedFunction, datastore.DataStoreStatus) {
r.storeState.Lock()
defer r.storeState.Unlock()
var ok bool
var database datastore.Database
var collection datastore.Collection
if udf.ID == "" {
return datastore.UserDefinedFunction{}, datastore.BadRequest
}
if database, ok = r.storeState.Databases[databaseId]; !ok {
return datastore.UserDefinedFunction{}, datastore.StatusNotFound
}
if collection, ok = r.storeState.Collections[databaseId][collectionId]; !ok {
return datastore.UserDefinedFunction{}, datastore.StatusNotFound
}
if _, ok := r.storeState.UserDefinedFunctions[databaseId][collectionId][udf.ID]; ok {
return datastore.UserDefinedFunction{}, datastore.Conflict
}
udf.TimeStamp = time.Now().Unix()
udf.ResourceID = resourceid.NewCombined(collection.ResourceID, resourceid.New(resourceid.ResourceTypeUserDefinedFunction))
udf.ETag = fmt.Sprintf("\"%s\"", uuid.New())
udf.Self = fmt.Sprintf("dbs/%s/colls/%s/udfs/%s/", database.ResourceID, collection.ResourceID, udf.ResourceID)
r.storeState.UserDefinedFunctions[databaseId][collectionId][udf.ID] = udf
return udf, datastore.StatusOk
}

View File

@@ -1,6 +1,4 @@
package repositorymodels package datastore
import "sync"
type Database struct { type Database struct {
ID string `json:"id"` ID string `json:"id"`
@@ -10,13 +8,15 @@ type Database struct {
Self string `json:"_self"` Self string `json:"_self"`
} }
type RepositoryStatus int type DataStoreStatus int
const ( const (
StatusOk = 1 StatusOk = 1
StatusNotFound = 2 StatusNotFound = 2
Conflict = 3 Conflict = 3
BadRequest = 4 BadRequest = 4
IterEOF = 5
Unknown = 6
) )
type TriggerOperation string type TriggerOperation string
@@ -117,25 +117,3 @@ type PartitionKeyRange struct {
TimeStamp int64 `json:"_ts"` TimeStamp int64 `json:"_ts"`
Lsn int `json:"lsn"` Lsn int `json:"lsn"`
} }
type State struct {
sync.RWMutex
// Map databaseId -> Database
Databases map[string]Database `json:"databases"`
// Map databaseId -> collectionId -> Collection
Collections map[string]map[string]Collection `json:"collections"`
// Map databaseId -> collectionId -> documentId -> Documents
Documents map[string]map[string]map[string]Document `json:"documents"`
// Map databaseId -> collectionId -> triggerId -> Trigger
Triggers map[string]map[string]map[string]Trigger `json:"triggers"`
// Map databaseId -> collectionId -> spId -> StoredProcedure
StoredProcedures map[string]map[string]map[string]StoredProcedure `json:"sprocs"`
// Map databaseId -> collectionId -> udfId -> UserDefinedFunction
UserDefinedFunctions map[string]map[string]map[string]UserDefinedFunction `json:"udfs"`
}

View File

@@ -1,85 +0,0 @@
package repositories
import (
"fmt"
"time"
"github.com/google/uuid"
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
"github.com/pikami/cosmium/internal/resourceid"
structhidrators "github.com/pikami/cosmium/internal/struct_hidrators"
"golang.org/x/exp/maps"
)
func (r *DataRepository) GetAllCollections(databaseId string) ([]repositorymodels.Collection, repositorymodels.RepositoryStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return make([]repositorymodels.Collection, 0), repositorymodels.StatusNotFound
}
return maps.Values(r.storeState.Collections[databaseId]), repositorymodels.StatusOk
}
func (r *DataRepository) GetCollection(databaseId string, collectionId string) (repositorymodels.Collection, repositorymodels.RepositoryStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return repositorymodels.Collection{}, repositorymodels.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return repositorymodels.Collection{}, repositorymodels.StatusNotFound
}
return r.storeState.Collections[databaseId][collectionId], repositorymodels.StatusOk
}
func (r *DataRepository) DeleteCollection(databaseId string, collectionId string) repositorymodels.RepositoryStatus {
r.storeState.Lock()
defer r.storeState.Unlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return repositorymodels.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return repositorymodels.StatusNotFound
}
delete(r.storeState.Collections[databaseId], collectionId)
return repositorymodels.StatusOk
}
func (r *DataRepository) CreateCollection(databaseId string, newCollection repositorymodels.Collection) (repositorymodels.Collection, repositorymodels.RepositoryStatus) {
r.storeState.Lock()
defer r.storeState.Unlock()
var ok bool
var database repositorymodels.Database
if database, ok = r.storeState.Databases[databaseId]; !ok {
return repositorymodels.Collection{}, repositorymodels.StatusNotFound
}
if _, ok = r.storeState.Collections[databaseId][newCollection.ID]; ok {
return repositorymodels.Collection{}, repositorymodels.Conflict
}
newCollection = structhidrators.Hidrate(newCollection).(repositorymodels.Collection)
newCollection.TimeStamp = time.Now().Unix()
newCollection.ResourceID = resourceid.NewCombined(database.ResourceID, resourceid.New())
newCollection.ETag = fmt.Sprintf("\"%s\"", uuid.New())
newCollection.Self = fmt.Sprintf("dbs/%s/colls/%s/", database.ResourceID, newCollection.ResourceID)
r.storeState.Collections[databaseId][newCollection.ID] = newCollection
r.storeState.Documents[databaseId][newCollection.ID] = make(map[string]repositorymodels.Document)
r.storeState.Triggers[databaseId][newCollection.ID] = make(map[string]repositorymodels.Trigger)
r.storeState.StoredProcedures[databaseId][newCollection.ID] = make(map[string]repositorymodels.StoredProcedure)
r.storeState.UserDefinedFunctions[databaseId][newCollection.ID] = make(map[string]repositorymodels.UserDefinedFunction)
return newCollection, repositorymodels.StatusOk
}

View File

@@ -1,65 +0,0 @@
package repositories
import (
"fmt"
"time"
"github.com/google/uuid"
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
"github.com/pikami/cosmium/internal/resourceid"
"golang.org/x/exp/maps"
)
func (r *DataRepository) GetAllDatabases() ([]repositorymodels.Database, repositorymodels.RepositoryStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
return maps.Values(r.storeState.Databases), repositorymodels.StatusOk
}
func (r *DataRepository) GetDatabase(id string) (repositorymodels.Database, repositorymodels.RepositoryStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
if database, ok := r.storeState.Databases[id]; ok {
return database, repositorymodels.StatusOk
}
return repositorymodels.Database{}, repositorymodels.StatusNotFound
}
func (r *DataRepository) DeleteDatabase(id string) repositorymodels.RepositoryStatus {
r.storeState.Lock()
defer r.storeState.Unlock()
if _, ok := r.storeState.Databases[id]; !ok {
return repositorymodels.StatusNotFound
}
delete(r.storeState.Databases, id)
return repositorymodels.StatusOk
}
func (r *DataRepository) CreateDatabase(newDatabase repositorymodels.Database) (repositorymodels.Database, repositorymodels.RepositoryStatus) {
r.storeState.Lock()
defer r.storeState.Unlock()
if _, ok := r.storeState.Databases[newDatabase.ID]; ok {
return repositorymodels.Database{}, repositorymodels.Conflict
}
newDatabase.TimeStamp = time.Now().Unix()
newDatabase.ResourceID = resourceid.New()
newDatabase.ETag = fmt.Sprintf("\"%s\"", uuid.New())
newDatabase.Self = fmt.Sprintf("dbs/%s/", newDatabase.ResourceID)
r.storeState.Databases[newDatabase.ID] = newDatabase
r.storeState.Collections[newDatabase.ID] = make(map[string]repositorymodels.Collection)
r.storeState.Documents[newDatabase.ID] = make(map[string]map[string]repositorymodels.Document)
r.storeState.Triggers[newDatabase.ID] = make(map[string]map[string]repositorymodels.Trigger)
r.storeState.StoredProcedures[newDatabase.ID] = make(map[string]map[string]repositorymodels.StoredProcedure)
r.storeState.UserDefinedFunctions[newDatabase.ID] = make(map[string]map[string]repositorymodels.UserDefinedFunction)
return newDatabase, repositorymodels.StatusOk
}

View File

@@ -1,130 +0,0 @@
package repositories
import (
"fmt"
"log"
"time"
"github.com/google/uuid"
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
"github.com/pikami/cosmium/internal/resourceid"
"github.com/pikami/cosmium/parsers"
"github.com/pikami/cosmium/parsers/nosql"
memoryexecutor "github.com/pikami/cosmium/query_executors/memory_executor"
"golang.org/x/exp/maps"
)
func (r *DataRepository) GetAllDocuments(databaseId string, collectionId string) ([]repositorymodels.Document, repositorymodels.RepositoryStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return make([]repositorymodels.Document, 0), repositorymodels.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return make([]repositorymodels.Document, 0), repositorymodels.StatusNotFound
}
return maps.Values(r.storeState.Documents[databaseId][collectionId]), repositorymodels.StatusOk
}
func (r *DataRepository) GetDocument(databaseId string, collectionId string, documentId string) (repositorymodels.Document, repositorymodels.RepositoryStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return repositorymodels.Document{}, repositorymodels.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return repositorymodels.Document{}, repositorymodels.StatusNotFound
}
if _, ok := r.storeState.Documents[databaseId][collectionId][documentId]; !ok {
return repositorymodels.Document{}, repositorymodels.StatusNotFound
}
return r.storeState.Documents[databaseId][collectionId][documentId], repositorymodels.StatusOk
}
func (r *DataRepository) DeleteDocument(databaseId string, collectionId string, documentId string) repositorymodels.RepositoryStatus {
r.storeState.Lock()
defer r.storeState.Unlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return repositorymodels.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return repositorymodels.StatusNotFound
}
if _, ok := r.storeState.Documents[databaseId][collectionId][documentId]; !ok {
return repositorymodels.StatusNotFound
}
delete(r.storeState.Documents[databaseId][collectionId], documentId)
return repositorymodels.StatusOk
}
func (r *DataRepository) CreateDocument(databaseId string, collectionId string, document map[string]interface{}) (repositorymodels.Document, repositorymodels.RepositoryStatus) {
r.storeState.Lock()
defer r.storeState.Unlock()
var ok bool
var documentId string
var database repositorymodels.Database
var collection repositorymodels.Collection
if documentId, ok = document["id"].(string); !ok || documentId == "" {
documentId = fmt.Sprint(uuid.New())
document["id"] = documentId
}
if database, ok = r.storeState.Databases[databaseId]; !ok {
return repositorymodels.Document{}, repositorymodels.StatusNotFound
}
if collection, ok = r.storeState.Collections[databaseId][collectionId]; !ok {
return repositorymodels.Document{}, repositorymodels.StatusNotFound
}
if _, ok := r.storeState.Documents[databaseId][collectionId][documentId]; ok {
return repositorymodels.Document{}, repositorymodels.Conflict
}
document["_ts"] = time.Now().Unix()
document["_rid"] = resourceid.NewCombined(database.ResourceID, collection.ResourceID, resourceid.New())
document["_etag"] = fmt.Sprintf("\"%s\"", uuid.New())
document["_self"] = fmt.Sprintf("dbs/%s/colls/%s/docs/%s/", database.ResourceID, collection.ResourceID, document["_rid"])
r.storeState.Documents[databaseId][collectionId][documentId] = document
return document, repositorymodels.StatusOk
}
func (r *DataRepository) ExecuteQueryDocuments(databaseId string, collectionId string, query string, queryParameters map[string]interface{}) ([]memoryexecutor.RowType, repositorymodels.RepositoryStatus) {
parsedQuery, err := nosql.Parse("", []byte(query))
if err != nil {
log.Printf("Failed to parse query: %s\nerr: %v", query, err)
return nil, repositorymodels.BadRequest
}
collectionDocuments, status := r.GetAllDocuments(databaseId, collectionId)
if status != repositorymodels.StatusOk {
return nil, status
}
covDocs := make([]memoryexecutor.RowType, 0)
for _, doc := range collectionDocuments {
covDocs = append(covDocs, map[string]interface{}(doc))
}
if typedQuery, ok := parsedQuery.(parsers.SelectStmt); ok {
typedQuery.Parameters = queryParameters
return memoryexecutor.ExecuteQuery(typedQuery, covDocs), repositorymodels.StatusOk
}
return nil, repositorymodels.BadRequest
}

View File

@@ -1,34 +0,0 @@
package repositories
import repositorymodels "github.com/pikami/cosmium/internal/repository_models"
type DataRepository struct {
storeState repositorymodels.State
initialDataFilePath string
persistDataFilePath string
}
type RepositoryOptions struct {
InitialDataFilePath string
PersistDataFilePath string
}
func NewDataRepository(options RepositoryOptions) *DataRepository {
repository := &DataRepository{
storeState: repositorymodels.State{
Databases: make(map[string]repositorymodels.Database),
Collections: make(map[string]map[string]repositorymodels.Collection),
Documents: make(map[string]map[string]map[string]repositorymodels.Document),
Triggers: make(map[string]map[string]map[string]repositorymodels.Trigger),
StoredProcedures: make(map[string]map[string]map[string]repositorymodels.StoredProcedure),
UserDefinedFunctions: make(map[string]map[string]map[string]repositorymodels.UserDefinedFunction),
},
initialDataFilePath: options.InitialDataFilePath,
persistDataFilePath: options.PersistDataFilePath,
}
repository.InitializeRepository()
return repository
}

View File

@@ -1,91 +0,0 @@
package repositories
import (
"fmt"
"time"
"github.com/google/uuid"
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
"github.com/pikami/cosmium/internal/resourceid"
"golang.org/x/exp/maps"
)
func (r *DataRepository) GetAllStoredProcedures(databaseId string, collectionId string) ([]repositorymodels.StoredProcedure, repositorymodels.RepositoryStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
return maps.Values(r.storeState.StoredProcedures[databaseId][collectionId]), repositorymodels.StatusOk
}
func (r *DataRepository) GetStoredProcedure(databaseId string, collectionId string, spId string) (repositorymodels.StoredProcedure, repositorymodels.RepositoryStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return repositorymodels.StoredProcedure{}, repositorymodels.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return repositorymodels.StoredProcedure{}, repositorymodels.StatusNotFound
}
if sp, ok := r.storeState.StoredProcedures[databaseId][collectionId][spId]; ok {
return sp, repositorymodels.StatusOk
}
return repositorymodels.StoredProcedure{}, repositorymodels.StatusNotFound
}
func (r *DataRepository) DeleteStoredProcedure(databaseId string, collectionId string, spId string) repositorymodels.RepositoryStatus {
r.storeState.Lock()
defer r.storeState.Unlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return repositorymodels.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return repositorymodels.StatusNotFound
}
if _, ok := r.storeState.StoredProcedures[databaseId][collectionId][spId]; !ok {
return repositorymodels.StatusNotFound
}
delete(r.storeState.StoredProcedures[databaseId][collectionId], spId)
return repositorymodels.StatusOk
}
func (r *DataRepository) CreateStoredProcedure(databaseId string, collectionId string, sp repositorymodels.StoredProcedure) (repositorymodels.StoredProcedure, repositorymodels.RepositoryStatus) {
r.storeState.Lock()
defer r.storeState.Unlock()
var ok bool
var database repositorymodels.Database
var collection repositorymodels.Collection
if sp.ID == "" {
return repositorymodels.StoredProcedure{}, repositorymodels.BadRequest
}
if database, ok = r.storeState.Databases[databaseId]; !ok {
return repositorymodels.StoredProcedure{}, repositorymodels.StatusNotFound
}
if collection, ok = r.storeState.Collections[databaseId][collectionId]; !ok {
return repositorymodels.StoredProcedure{}, repositorymodels.StatusNotFound
}
if _, ok = r.storeState.StoredProcedures[databaseId][collectionId][sp.ID]; ok {
return repositorymodels.StoredProcedure{}, repositorymodels.Conflict
}
sp.TimeStamp = time.Now().Unix()
sp.ResourceID = resourceid.NewCombined(database.ResourceID, collection.ResourceID, resourceid.New())
sp.ETag = fmt.Sprintf("\"%s\"", uuid.New())
sp.Self = fmt.Sprintf("dbs/%s/colls/%s/sprocs/%s/", database.ResourceID, collection.ResourceID, sp.ResourceID)
r.storeState.StoredProcedures[databaseId][collectionId][sp.ID] = sp
return sp, repositorymodels.StatusOk
}

View File

@@ -1,91 +0,0 @@
package repositories
import (
"fmt"
"time"
"github.com/google/uuid"
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
"github.com/pikami/cosmium/internal/resourceid"
"golang.org/x/exp/maps"
)
func (r *DataRepository) GetAllTriggers(databaseId string, collectionId string) ([]repositorymodels.Trigger, repositorymodels.RepositoryStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
return maps.Values(r.storeState.Triggers[databaseId][collectionId]), repositorymodels.StatusOk
}
func (r *DataRepository) GetTrigger(databaseId string, collectionId string, triggerId string) (repositorymodels.Trigger, repositorymodels.RepositoryStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return repositorymodels.Trigger{}, repositorymodels.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return repositorymodels.Trigger{}, repositorymodels.StatusNotFound
}
if trigger, ok := r.storeState.Triggers[databaseId][collectionId][triggerId]; ok {
return trigger, repositorymodels.StatusOk
}
return repositorymodels.Trigger{}, repositorymodels.StatusNotFound
}
func (r *DataRepository) DeleteTrigger(databaseId string, collectionId string, triggerId string) repositorymodels.RepositoryStatus {
r.storeState.Lock()
defer r.storeState.Unlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return repositorymodels.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return repositorymodels.StatusNotFound
}
if _, ok := r.storeState.Triggers[databaseId][collectionId][triggerId]; !ok {
return repositorymodels.StatusNotFound
}
delete(r.storeState.Triggers[databaseId][collectionId], triggerId)
return repositorymodels.StatusOk
}
func (r *DataRepository) CreateTrigger(databaseId string, collectionId string, trigger repositorymodels.Trigger) (repositorymodels.Trigger, repositorymodels.RepositoryStatus) {
r.storeState.Lock()
defer r.storeState.Unlock()
var ok bool
var database repositorymodels.Database
var collection repositorymodels.Collection
if trigger.ID == "" {
return repositorymodels.Trigger{}, repositorymodels.BadRequest
}
if database, ok = r.storeState.Databases[databaseId]; !ok {
return repositorymodels.Trigger{}, repositorymodels.StatusNotFound
}
if collection, ok = r.storeState.Collections[databaseId][collectionId]; !ok {
return repositorymodels.Trigger{}, repositorymodels.StatusNotFound
}
if _, ok = r.storeState.Triggers[databaseId][collectionId][trigger.ID]; ok {
return repositorymodels.Trigger{}, repositorymodels.Conflict
}
trigger.TimeStamp = time.Now().Unix()
trigger.ResourceID = resourceid.NewCombined(database.ResourceID, collection.ResourceID, resourceid.New())
trigger.ETag = fmt.Sprintf("\"%s\"", uuid.New())
trigger.Self = fmt.Sprintf("dbs/%s/colls/%s/triggers/%s/", database.ResourceID, collection.ResourceID, trigger.ResourceID)
r.storeState.Triggers[databaseId][collectionId][trigger.ID] = trigger
return trigger, repositorymodels.StatusOk
}

View File

@@ -1,91 +0,0 @@
package repositories
import (
"fmt"
"time"
"github.com/google/uuid"
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
"github.com/pikami/cosmium/internal/resourceid"
"golang.org/x/exp/maps"
)
func (r *DataRepository) GetAllUserDefinedFunctions(databaseId string, collectionId string) ([]repositorymodels.UserDefinedFunction, repositorymodels.RepositoryStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
return maps.Values(r.storeState.UserDefinedFunctions[databaseId][collectionId]), repositorymodels.StatusOk
}
func (r *DataRepository) GetUserDefinedFunction(databaseId string, collectionId string, udfId string) (repositorymodels.UserDefinedFunction, repositorymodels.RepositoryStatus) {
r.storeState.RLock()
defer r.storeState.RUnlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return repositorymodels.UserDefinedFunction{}, repositorymodels.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return repositorymodels.UserDefinedFunction{}, repositorymodels.StatusNotFound
}
if udf, ok := r.storeState.UserDefinedFunctions[databaseId][collectionId][udfId]; ok {
return udf, repositorymodels.StatusOk
}
return repositorymodels.UserDefinedFunction{}, repositorymodels.StatusNotFound
}
func (r *DataRepository) DeleteUserDefinedFunction(databaseId string, collectionId string, udfId string) repositorymodels.RepositoryStatus {
r.storeState.Lock()
defer r.storeState.Unlock()
if _, ok := r.storeState.Databases[databaseId]; !ok {
return repositorymodels.StatusNotFound
}
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
return repositorymodels.StatusNotFound
}
if _, ok := r.storeState.UserDefinedFunctions[databaseId][collectionId][udfId]; !ok {
return repositorymodels.StatusNotFound
}
delete(r.storeState.UserDefinedFunctions[databaseId][collectionId], udfId)
return repositorymodels.StatusOk
}
func (r *DataRepository) CreateUserDefinedFunction(databaseId string, collectionId string, udf repositorymodels.UserDefinedFunction) (repositorymodels.UserDefinedFunction, repositorymodels.RepositoryStatus) {
r.storeState.Lock()
defer r.storeState.Unlock()
var ok bool
var database repositorymodels.Database
var collection repositorymodels.Collection
if udf.ID == "" {
return repositorymodels.UserDefinedFunction{}, repositorymodels.BadRequest
}
if database, ok = r.storeState.Databases[databaseId]; !ok {
return repositorymodels.UserDefinedFunction{}, repositorymodels.StatusNotFound
}
if collection, ok = r.storeState.Collections[databaseId][collectionId]; !ok {
return repositorymodels.UserDefinedFunction{}, repositorymodels.StatusNotFound
}
if _, ok := r.storeState.UserDefinedFunctions[databaseId][collectionId][udf.ID]; ok {
return repositorymodels.UserDefinedFunction{}, repositorymodels.Conflict
}
udf.TimeStamp = time.Now().Unix()
udf.ResourceID = resourceid.NewCombined(database.ResourceID, collection.ResourceID, resourceid.New())
udf.ETag = fmt.Sprintf("\"%s\"", uuid.New())
udf.Self = fmt.Sprintf("dbs/%s/colls/%s/udfs/%s/", database.ResourceID, collection.ResourceID, udf.ResourceID)
r.storeState.UserDefinedFunctions[databaseId][collectionId][udf.ID] = udf
return udf, repositorymodels.StatusOk
}

View File

@@ -3,32 +3,76 @@ package resourceid
import ( import (
"encoding/base64" "encoding/base64"
"math/rand" "math/rand"
"strings"
"github.com/google/uuid" "github.com/google/uuid"
) )
func New() string { type ResourceType int
id := uuid.New().ID()
idBytes := uintToBytes(id)
// first byte should be bigger than 0x80 for collection ids const (
// clients classify this id as "user" otherwise ResourceTypeDatabase ResourceType = iota
if (idBytes[0] & 0x80) <= 0 { ResourceTypeCollection
idBytes[0] = byte(rand.Intn(0x80) + 0x80) ResourceTypeDocument
ResourceTypeStoredProcedure
ResourceTypeTrigger
ResourceTypeUserDefinedFunction
ResourceTypeConflict
ResourceTypePartitionKeyRange
ResourceTypeSchema
)
func New(resourceType ResourceType) string {
var idBytes []byte
switch resourceType {
case ResourceTypeDatabase:
idBytes = randomBytes(4)
case ResourceTypeCollection:
idBytes = randomBytes(4)
// first byte should be bigger than 0x80 for collection ids
// clients classify this id as "user" otherwise
if (idBytes[0] & 0x80) <= 0 {
idBytes[0] = byte(rand.Intn(0x80) + 0x80)
}
case ResourceTypeDocument:
idBytes = randomBytes(8)
idBytes[7] = byte(rand.Intn(0x10)) // Upper 4 bits = 0
case ResourceTypeStoredProcedure:
idBytes = randomBytes(8)
idBytes[7] = byte(rand.Intn(0x10)) | 0x08 // Upper 4 bits = 0x08
case ResourceTypeTrigger:
idBytes = randomBytes(8)
idBytes[7] = byte(rand.Intn(0x10)) | 0x07 // Upper 4 bits = 0x07
case ResourceTypeUserDefinedFunction:
idBytes = randomBytes(8)
idBytes[7] = byte(rand.Intn(0x10)) | 0x06 // Upper 4 bits = 0x06
case ResourceTypeConflict:
idBytes = randomBytes(8)
idBytes[7] = byte(rand.Intn(0x10)) | 0x04 // Upper 4 bits = 0x04
case ResourceTypePartitionKeyRange:
// we don't do partitions yet, so just use a fixed id
idBytes = []byte{0x69, 0x69, 0x69, 0x69, 0x69, 0x69, 0x69, 0x50}
case ResourceTypeSchema:
idBytes = randomBytes(8)
idBytes[7] = byte(rand.Intn(0x10)) | 0x09 // Upper 4 bits = 0x09
default:
idBytes = randomBytes(4)
} }
return base64.StdEncoding.EncodeToString(idBytes) encoded := base64.StdEncoding.EncodeToString(idBytes)
return strings.ReplaceAll(encoded, "/", "-")
} }
func NewCombined(ids ...string) string { func NewCombined(ids ...string) string {
combinedIdBytes := make([]byte, 0) combinedIdBytes := make([]byte, 0)
for _, id := range ids { for _, id := range ids {
idBytes, _ := base64.StdEncoding.DecodeString(id) idBytes, _ := base64.StdEncoding.DecodeString(strings.ReplaceAll(id, "-", "/"))
combinedIdBytes = append(combinedIdBytes, idBytes...) combinedIdBytes = append(combinedIdBytes, idBytes...)
} }
return base64.StdEncoding.EncodeToString(combinedIdBytes) encoded := base64.StdEncoding.EncodeToString(combinedIdBytes)
return strings.ReplaceAll(encoded, "/", "-")
} }
func uintToBytes(id uint32) []byte { func uintToBytes(id uint32) []byte {
@@ -39,3 +83,13 @@ func uintToBytes(id uint32) []byte {
return buf return buf
} }
func randomBytes(count int) []byte {
buf := make([]byte, count)
for i := 0; i < count; i += 4 {
id := uuid.New().ID()
idBytes := uintToBytes(id)
copy(buf[i:], idBytes)
}
return buf
}

View File

@@ -1,21 +1,19 @@
package structhidrators package structhidrators
import ( import "github.com/pikami/cosmium/internal/datastore"
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
)
var defaultCollection repositorymodels.Collection = repositorymodels.Collection{ var defaultCollection datastore.Collection = datastore.Collection{
IndexingPolicy: repositorymodels.CollectionIndexingPolicy{ IndexingPolicy: datastore.CollectionIndexingPolicy{
IndexingMode: "consistent", IndexingMode: "consistent",
Automatic: true, Automatic: true,
IncludedPaths: []repositorymodels.CollectionIndexingPolicyPath{ IncludedPaths: []datastore.CollectionIndexingPolicyPath{
{Path: "/*"}, {Path: "/*"},
}, },
ExcludedPaths: []repositorymodels.CollectionIndexingPolicyPath{ ExcludedPaths: []datastore.CollectionIndexingPolicyPath{
{Path: "/\"_etag\"/?"}, {Path: "/\"_etag\"/?"},
}, },
}, },
PartitionKey: repositorymodels.CollectionPartitionKey{ PartitionKey: datastore.CollectionPartitionKey{
Paths: []string{"/_partitionKey"}, Paths: []string{"/_partitionKey"},
Kind: "Hash", Kind: "Hash",
Version: 2, Version: 2,

View File

@@ -3,11 +3,11 @@ package structhidrators
import ( import (
"reflect" "reflect"
repositorymodels "github.com/pikami/cosmium/internal/repository_models" "github.com/pikami/cosmium/internal/datastore"
) )
func Hidrate(input interface{}) interface{} { func Hidrate(input interface{}) interface{} {
if reflect.TypeOf(input) == reflect.TypeOf(repositorymodels.Collection{}) { if reflect.TypeOf(input) == reflect.TypeOf(datastore.Collection{}) {
return hidrate(input, defaultCollection) return hidrate(input, defaultCollection)
} }
return input return input

View File

@@ -17,6 +17,7 @@ type SelectStmt struct {
type Table struct { type Table struct {
Value string Value string
SelectItem SelectItem SelectItem SelectItem
IsInSelect bool
} }
type JoinItem struct { type JoinItem struct {
@@ -41,6 +42,7 @@ type SelectItem struct {
SelectItems []SelectItem SelectItems []SelectItem
Type SelectItemType Type SelectItemType
Value interface{} Value interface{}
Invert bool
IsTopLevel bool IsTopLevel bool
} }

View File

@@ -4,6 +4,7 @@ import (
"testing" "testing"
"github.com/pikami/cosmium/parsers" "github.com/pikami/cosmium/parsers"
testutils "github.com/pikami/cosmium/test_utils"
) )
func Test_Parse_AggregateFunctions(t *testing.T) { func Test_Parse_AggregateFunctions(t *testing.T) {
@@ -27,7 +28,7 @@ func Test_Parse_AggregateFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
}, },
) )
}) })
@@ -51,7 +52,7 @@ func Test_Parse_AggregateFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
}, },
) )
}) })
@@ -75,7 +76,7 @@ func Test_Parse_AggregateFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
}, },
) )
}) })
@@ -99,7 +100,7 @@ func Test_Parse_AggregateFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
}, },
) )
}) })
@@ -123,7 +124,7 @@ func Test_Parse_AggregateFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
}, },
) )
}) })

View File

@@ -32,7 +32,7 @@ func Test_Parse_ArrayFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
}, },
) )
}) })
@@ -58,7 +58,7 @@ func Test_Parse_ArrayFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
}, },
) )
}) })
@@ -87,7 +87,7 @@ func Test_Parse_ArrayFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
}, },
) )
}) })
@@ -116,7 +116,7 @@ func Test_Parse_ArrayFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
}, },
) )
}) })
@@ -145,7 +145,7 @@ func Test_Parse_ArrayFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
}, },
) )
}) })
@@ -169,7 +169,7 @@ func Test_Parse_ArrayFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
}, },
) )
}) })
@@ -195,7 +195,7 @@ func Test_Parse_ArrayFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
}, },
) )
}) })
@@ -223,7 +223,7 @@ func Test_Parse_ArrayFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
}, },
) )
}) })
@@ -251,7 +251,7 @@ func Test_Parse_ArrayFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
}, },
) )
}) })

View File

@@ -4,6 +4,7 @@ import (
"testing" "testing"
"github.com/pikami/cosmium/parsers" "github.com/pikami/cosmium/parsers"
testutils "github.com/pikami/cosmium/test_utils"
) )
func Test_Parse_Join(t *testing.T) { func Test_Parse_Join(t *testing.T) {
@@ -17,7 +18,7 @@ func Test_Parse_Join(t *testing.T) {
{Path: []string{"c", "id"}}, {Path: []string{"c", "id"}},
{Path: []string{"c", "pk"}}, {Path: []string{"c", "pk"}},
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
JoinItems: []parsers.JoinItem{ JoinItems: []parsers.JoinItem{
{ {
Table: parsers.Table{ Table: parsers.Table{
@@ -40,7 +41,7 @@ func Test_Parse_Join(t *testing.T) {
SelectItems: []parsers.SelectItem{ SelectItems: []parsers.SelectItem{
{Path: []string{"cc"}, IsTopLevel: true}, {Path: []string{"cc"}, IsTopLevel: true},
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
JoinItems: []parsers.JoinItem{ JoinItems: []parsers.JoinItem{
{ {
Table: parsers.Table{ Table: parsers.Table{

View File

@@ -4,6 +4,7 @@ import (
"testing" "testing"
"github.com/pikami/cosmium/parsers" "github.com/pikami/cosmium/parsers"
testutils "github.com/pikami/cosmium/test_utils"
) )
func Test_Execute_MathFunctions(t *testing.T) { func Test_Execute_MathFunctions(t *testing.T) {
@@ -644,7 +645,7 @@ func testMathFunctionParse(
}, },
}, },
}, },
Table: parsers.Table{Value: expectedTable}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path(expectedTable)},
}, },
) )
} }

View File

@@ -49,7 +49,7 @@ func Test_Parse(t *testing.T) {
{Path: []string{"c", "id"}}, {Path: []string{"c", "id"}},
{Path: []string{"c", "pk"}}, {Path: []string{"c", "pk"}},
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
OrderExpressions: []parsers.OrderExpression{ OrderExpressions: []parsers.OrderExpression{
{ {
SelectItem: parsers.SelectItem{Path: []string{"c", "id"}}, SelectItem: parsers.SelectItem{Path: []string{"c", "id"}},
@@ -73,7 +73,7 @@ func Test_Parse(t *testing.T) {
{Path: []string{"c", "id"}}, {Path: []string{"c", "id"}},
{Path: []string{"c", "pk"}}, {Path: []string{"c", "pk"}},
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
GroupBy: []parsers.SelectItem{ GroupBy: []parsers.SelectItem{
{Path: []string{"c", "id"}}, {Path: []string{"c", "id"}},
{Path: []string{"c", "pk"}}, {Path: []string{"c", "pk"}},
@@ -93,7 +93,7 @@ func Test_Parse(t *testing.T) {
Type: parsers.SelectItemTypeField, Type: parsers.SelectItemTypeField,
}, },
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
Filters: parsers.SelectItem{ Filters: parsers.SelectItem{
Type: parsers.SelectItemTypeFunctionCall, Type: parsers.SelectItemTypeFunctionCall,
Value: parsers.FunctionCall{ Value: parsers.FunctionCall{
@@ -112,6 +112,38 @@ func Test_Parse(t *testing.T) {
) )
}) })
t.Run("Should parse IN function with function call", func(t *testing.T) {
testQueryParse(
t,
`Select c.id FROM c WHERE (ToString(c.id) IN ("123", "456"))`,
parsers.SelectStmt{
SelectItems: []parsers.SelectItem{
testutils.SelectItem_Path("c", "id"),
},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
Filters: parsers.SelectItem{
Type: parsers.SelectItemTypeFunctionCall,
Value: parsers.FunctionCall{
Type: parsers.FunctionCallIn,
Arguments: []interface{}{
parsers.SelectItem{
Type: parsers.SelectItemTypeFunctionCall,
Value: parsers.FunctionCall{
Type: parsers.FunctionCallToString,
Arguments: []interface{}{
testutils.SelectItem_Path("c", "id"),
},
},
},
testutils.SelectItem_Constant_String("123"),
testutils.SelectItem_Constant_String("456"),
},
},
},
},
)
})
t.Run("Should parse IN selector", func(t *testing.T) { t.Run("Should parse IN selector", func(t *testing.T) {
testQueryParse( testQueryParse(
t, t,
@@ -124,10 +156,9 @@ func Test_Parse(t *testing.T) {
}, },
}, },
Table: parsers.Table{ Table: parsers.Table{
Value: "c", Value: "c",
SelectItem: parsers.SelectItem{ SelectItem: testutils.SelectItem_Path("c", "tags"),
Path: []string{"c", "tags"}, IsInSelect: true,
},
}, },
}, },
) )

File diff suppressed because it is too large Load Diff

View File

@@ -204,14 +204,22 @@ TopClause <- Top ws count:Integer {
return count, nil return count, nil
} }
FromClause <- From ws table:TableName selectItem:(ws "IN"i ws column:SelectItem { return column, nil })? { FromClause <- From ws table:TableName selectItem:(ws In ws column:SelectItem { return column, nil }) {
tableTyped := table.(parsers.Table) tableTyped := table.(parsers.Table)
if selectItem != nil { if selectItem != nil {
tableTyped.SelectItem = selectItem.(parsers.SelectItem) tableTyped.SelectItem = selectItem.(parsers.SelectItem)
tableTyped.IsInSelect = true
} }
return tableTyped, nil return tableTyped, nil
} / From ws column:SelectItem {
tableSelectItem := column.(parsers.SelectItem)
table := parsers.Table{
Value: tableSelectItem.Alias,
SelectItem: tableSelectItem,
}
return table, nil
} / From ws subQuery:SubQuerySelectItem { } / From ws subQuery:SubQuerySelectItem {
subQueryTyped := subQuery.(parsers.SelectItem) subQueryTyped := subQuery.(parsers.SelectItem)
table := parsers.Table{ table := parsers.Table{
@@ -243,13 +251,13 @@ SubQuerySelectItem <- subQuery:SubQuery asClause:(ws alias:AsClause { return ali
return selectItem, nil return selectItem, nil
} }
JoinClause <- Join ws table:TableName ws "IN"i ws column:SelectItem { JoinClause <- Join ws table:TableName ws In ws column:SelectItem {
return makeJoin(table, column) return makeJoin(table, column)
} / Join ws subQuery:SubQuerySelectItem { } / Join ws subQuery:SubQuerySelectItem {
return makeJoin(nil, subQuery) return makeJoin(nil, subQuery)
} }
OffsetClause <- "OFFSET"i ws offset:IntegerLiteral ws "LIMIT"i ws limit:IntegerLiteral { OffsetClause <- Offset ws offset:IntegerLiteral ws "LIMIT"i ws limit:IntegerLiteral {
return []interface{}{offset.(parsers.Constant).Value, limit.(parsers.Constant).Value}, nil return []interface{}{offset.(parsers.Constant).Value, limit.(parsers.Constant).Value}, nil
} }
@@ -317,7 +325,11 @@ SelectItem <- selectItem:(SubQuerySelectItem / Literal / FunctionCall / SelectAr
return itemResult, nil return itemResult, nil
} }
AsClause <- ws As ws alias:Identifier { return alias, nil } AsClause <- (ws As)? ws !ExcludedKeywords alias:Identifier {
return alias, nil
}
ExcludedKeywords <- Select / Top / As / From / In / Join / Exists / Where / And / Or / Not / GroupBy / OrderBy / Offset
DotFieldAccess <- "." id:Identifier { DotFieldAccess <- "." id:Identifier {
return id, nil return id, nil
@@ -346,8 +358,14 @@ AndExpression <- ex1:ComparisonExpression ex2:(ws And ws ex:ComparisonExpression
ComparisonExpression <- "(" ws ex:OrExpression ws ")" { return ex, nil } ComparisonExpression <- "(" ws ex:OrExpression ws ")" { return ex, nil }
/ left:SelectItem ws op:ComparisonOperator ws right:SelectItem { / left:SelectItem ws op:ComparisonOperator ws right:SelectItem {
return parsers.ComparisonExpression{Left:left,Right:right,Operation:op.(string)}, nil return parsers.ComparisonExpression{Left:left,Right:right,Operation:op.(string)}, nil
} / inv:(Not ws)? ex:SelectItem {
if inv != nil {
ex1 := ex.(parsers.SelectItem)
ex1.Invert = true
return ex1, nil
}
return ex, nil
} / ex:BooleanLiteral { return ex, nil } } / ex:BooleanLiteral { return ex, nil }
/ ex:SelectItem { return ex, nil }
OrderByClause <- OrderBy ws ex1:OrderExpression others:(ws "," ws ex:OrderExpression { return ex, nil })* { OrderByClause <- OrderBy ws ex1:OrderExpression others:(ws "," ws ex:OrderExpression { return ex, nil })* {
return makeOrderByClause(ex1, others) return makeOrderByClause(ex1, others)
@@ -373,6 +391,8 @@ As <- "AS"i
From <- "FROM"i From <- "FROM"i
In <- "IN"i
Join <- "JOIN"i Join <- "JOIN"i
Exists <- "EXISTS"i Exists <- "EXISTS"i
@@ -383,11 +403,15 @@ And <- "AND"i
Or <- "OR"i wss Or <- "OR"i wss
Not <- "NOT"i
GroupBy <- "GROUP"i ws "BY"i GroupBy <- "GROUP"i ws "BY"i
OrderBy <- "ORDER"i ws "BY"i OrderBy <- "ORDER"i ws "BY"i
ComparisonOperator <- ("=" / "!=" / "<" / "<=" / ">" / ">=") { Offset <- "OFFSET"i
ComparisonOperator <- ("<=" / ">=" / "=" / "!=" / "<" / ">") {
return string(c.text), nil return string(c.text), nil
} }
@@ -700,7 +724,9 @@ MathNumberBinExpression <- "NumberBin"i ws "(" ws ex1:SelectItem others:(ws ","
MathPiExpression <- "PI"i ws "(" ws ")" { return createFunctionCall(parsers.FunctionCallMathPi, []interface{}{}) } MathPiExpression <- "PI"i ws "(" ws ")" { return createFunctionCall(parsers.FunctionCallMathPi, []interface{}{}) }
MathRandExpression <- "RAND"i ws "(" ws ")" { return createFunctionCall(parsers.FunctionCallMathRand, []interface{}{}) } MathRandExpression <- "RAND"i ws "(" ws ")" { return createFunctionCall(parsers.FunctionCallMathRand, []interface{}{}) }
InFunction <- ex1:SelectProperty ws "IN"i ws "(" ws ex2:SelectItem others:(ws "," ws ex:SelectItem { return ex, nil })* ws ")" { InFunction <- ex1:SelectProperty ws In ws "(" ws ex2:SelectItem others:(ws "," ws ex:SelectItem { return ex, nil })* ws ")" {
return createFunctionCall(parsers.FunctionCallIn, append([]interface{}{ex1, ex2}, others.([]interface{})...))
} / "(" ws ex1:SelectItem ws In ws "(" ws ex2:SelectItem others:(ws "," ws ex:SelectItem { return ex, nil })* ws ")" ws ")" {
return createFunctionCall(parsers.FunctionCallIn, append([]interface{}{ex1, ex2}, others.([]interface{})...)) return createFunctionCall(parsers.FunctionCallIn, append([]interface{}{ex1, ex2}, others.([]interface{})...))
} }

View File

@@ -4,6 +4,7 @@ import (
"testing" "testing"
"github.com/pikami/cosmium/parsers" "github.com/pikami/cosmium/parsers"
testutils "github.com/pikami/cosmium/test_utils"
) )
func Test_Parse_Select(t *testing.T) { func Test_Parse_Select(t *testing.T) {
@@ -17,7 +18,7 @@ func Test_Parse_Select(t *testing.T) {
{Path: []string{"c", "id"}}, {Path: []string{"c", "id"}},
{Path: []string{"c", "pk"}}, {Path: []string{"c", "pk"}},
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
}, },
) )
}) })
@@ -31,7 +32,7 @@ func Test_Parse_Select(t *testing.T) {
{Path: []string{"c", "id"}}, {Path: []string{"c", "id"}},
{Path: []string{"c", "@param"}}, {Path: []string{"c", "@param"}},
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
}, },
) )
}) })
@@ -44,7 +45,7 @@ func Test_Parse_Select(t *testing.T) {
SelectItems: []parsers.SelectItem{ SelectItems: []parsers.SelectItem{
{Path: []string{"c", "id"}}, {Path: []string{"c", "id"}},
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
Distinct: true, Distinct: true,
}, },
) )
@@ -58,7 +59,7 @@ func Test_Parse_Select(t *testing.T) {
SelectItems: []parsers.SelectItem{ SelectItems: []parsers.SelectItem{
{Path: []string{"c", "id"}}, {Path: []string{"c", "id"}},
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
Count: 1, Count: 1,
}, },
) )
@@ -72,7 +73,7 @@ func Test_Parse_Select(t *testing.T) {
SelectItems: []parsers.SelectItem{ SelectItems: []parsers.SelectItem{
{Path: []string{"c", "id"}}, {Path: []string{"c", "id"}},
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
Count: 5, Count: 5,
Offset: 3, Offset: 3,
}, },
@@ -87,7 +88,7 @@ func Test_Parse_Select(t *testing.T) {
SelectItems: []parsers.SelectItem{ SelectItems: []parsers.SelectItem{
{Path: []string{"c", "id"}, IsTopLevel: true}, {Path: []string{"c", "id"}, IsTopLevel: true},
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
}, },
) )
}) })
@@ -100,7 +101,20 @@ func Test_Parse_Select(t *testing.T) {
SelectItems: []parsers.SelectItem{ SelectItems: []parsers.SelectItem{
{Path: []string{"c"}, IsTopLevel: true}, {Path: []string{"c"}, IsTopLevel: true},
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
)
})
t.Run("Should parse SELECT c", func(t *testing.T) {
testQueryParse(
t,
`SELECT c FROM c`,
parsers.SelectStmt{
SelectItems: []parsers.SelectItem{
{Path: []string{"c"}, IsTopLevel: false},
},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
}, },
) )
}) })
@@ -120,7 +134,27 @@ func Test_Parse_Select(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
},
)
})
t.Run("Should parse SELECT with alias", func(t *testing.T) {
testQueryParse(
t,
`SELECT
c.id AS aliasWithAs,
c.pk aliasWithoutAs
FROM root c`,
parsers.SelectStmt{
SelectItems: []parsers.SelectItem{
{Alias: "aliasWithAs", Path: []string{"c", "id"}},
{Alias: "aliasWithoutAs", Path: []string{"c", "pk"}},
},
Table: parsers.Table{
Value: "c",
SelectItem: parsers.SelectItem{Alias: "c", Path: []string{"root"}},
},
}, },
) )
}) })
@@ -140,7 +174,7 @@ func Test_Parse_Select(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
}, },
) )
}) })

View File

@@ -30,7 +30,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
}, },
) )
}) })
@@ -56,7 +56,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
}, },
) )
}) })
@@ -85,7 +85,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
}, },
) )
}) })
@@ -111,7 +111,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
}, },
) )
}) })
@@ -137,7 +137,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
}, },
) )
}) })
@@ -163,7 +163,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
}, },
) )
}) })
@@ -189,7 +189,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
}, },
) )
}) })
@@ -213,7 +213,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
}, },
) )
}) })
@@ -237,7 +237,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
}, },
) )
}) })
@@ -261,7 +261,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
}, },
) )
}) })
@@ -286,7 +286,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
}, },
) )
}) })
@@ -310,7 +310,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
}, },
) )
}) })
@@ -334,7 +334,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
}, },
) )
}) })
@@ -360,7 +360,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
}, },
) )
}) })
@@ -385,7 +385,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
}, },
) )
}) })
@@ -409,7 +409,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
}, },
) )
}) })
@@ -434,7 +434,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
}, },
) )
}) })
@@ -458,7 +458,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
}, },
) )
}) })
@@ -484,7 +484,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
}, },
) )
}) })
@@ -508,7 +508,7 @@ func Test_Execute_StringFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
}, },
) )
}) })

View File

@@ -4,6 +4,7 @@ import (
"testing" "testing"
"github.com/pikami/cosmium/parsers" "github.com/pikami/cosmium/parsers"
testutils "github.com/pikami/cosmium/test_utils"
) )
func Test_Parse_SubQuery(t *testing.T) { func Test_Parse_SubQuery(t *testing.T) {
@@ -22,7 +23,7 @@ func Test_Parse_SubQuery(t *testing.T) {
Alias: "c", Alias: "c",
Type: parsers.SelectItemTypeSubQuery, Type: parsers.SelectItemTypeSubQuery,
Value: parsers.SelectStmt{ Value: parsers.SelectStmt{
Table: parsers.Table{Value: "cc"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("cc")},
SelectItems: []parsers.SelectItem{ SelectItems: []parsers.SelectItem{
{Path: []string{"cc", "info"}, IsTopLevel: true}, {Path: []string{"cc", "info"}, IsTopLevel: true},
}, },
@@ -42,9 +43,7 @@ func Test_Parse_SubQuery(t *testing.T) {
{Path: []string{"c", "id"}}, {Path: []string{"c", "id"}},
{Path: []string{"cc", "name"}}, {Path: []string{"cc", "name"}},
}, },
Table: parsers.Table{ Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
Value: "c",
},
JoinItems: []parsers.JoinItem{ JoinItems: []parsers.JoinItem{
{ {
Table: parsers.Table{ Table: parsers.Table{
@@ -55,13 +54,12 @@ func Test_Parse_SubQuery(t *testing.T) {
Type: parsers.SelectItemTypeSubQuery, Type: parsers.SelectItemTypeSubQuery,
Value: parsers.SelectStmt{ Value: parsers.SelectStmt{
SelectItems: []parsers.SelectItem{ SelectItems: []parsers.SelectItem{
{Path: []string{"tag", "name"}}, testutils.SelectItem_Path("tag", "name"),
}, },
Table: parsers.Table{ Table: parsers.Table{
Value: "tag", Value: "tag",
SelectItem: parsers.SelectItem{ SelectItem: testutils.SelectItem_Path("c", "tags"),
Path: []string{"c", "tags"}, IsInSelect: true,
},
}, },
}, },
}, },
@@ -82,10 +80,10 @@ func Test_Parse_SubQuery(t *testing.T) {
WHERE hasTags`, WHERE hasTags`,
parsers.SelectStmt{ parsers.SelectStmt{
SelectItems: []parsers.SelectItem{ SelectItems: []parsers.SelectItem{
{Path: []string{"c", "id"}}, testutils.SelectItem_Path("c", "id"),
}, },
Table: parsers.Table{ Table: parsers.Table{
Value: "c", SelectItem: testutils.SelectItem_Path("c"),
}, },
JoinItems: []parsers.JoinItem{ JoinItems: []parsers.JoinItem{
{ {
@@ -100,13 +98,12 @@ func Test_Parse_SubQuery(t *testing.T) {
Type: parsers.SelectItemTypeSubQuery, Type: parsers.SelectItemTypeSubQuery,
Value: parsers.SelectStmt{ Value: parsers.SelectStmt{
SelectItems: []parsers.SelectItem{ SelectItems: []parsers.SelectItem{
{Path: []string{"tag", "name"}}, testutils.SelectItem_Path("tag", "name"),
}, },
Table: parsers.Table{ Table: parsers.Table{
Value: "tag", Value: "tag",
SelectItem: parsers.SelectItem{ SelectItem: testutils.SelectItem_Path("c", "tags"),
Path: []string{"c", "tags"}, IsInSelect: true,
},
}, },
Exists: true, Exists: true,
}, },

View File

@@ -4,6 +4,7 @@ import (
"testing" "testing"
"github.com/pikami/cosmium/parsers" "github.com/pikami/cosmium/parsers"
testutils "github.com/pikami/cosmium/test_utils"
) )
func Test_Execute_TypeCheckingFunctions(t *testing.T) { func Test_Execute_TypeCheckingFunctions(t *testing.T) {
@@ -27,7 +28,7 @@ func Test_Execute_TypeCheckingFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
Filters: parsers.SelectItem{ Filters: parsers.SelectItem{
Type: parsers.SelectItemTypeFunctionCall, Type: parsers.SelectItemTypeFunctionCall,
Value: parsers.FunctionCall{ Value: parsers.FunctionCall{
@@ -63,7 +64,7 @@ func Test_Execute_TypeCheckingFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
Filters: parsers.SelectItem{ Filters: parsers.SelectItem{
Type: parsers.SelectItemTypeFunctionCall, Type: parsers.SelectItemTypeFunctionCall,
Value: parsers.FunctionCall{ Value: parsers.FunctionCall{
@@ -99,7 +100,7 @@ func Test_Execute_TypeCheckingFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
Filters: parsers.SelectItem{ Filters: parsers.SelectItem{
Type: parsers.SelectItemTypeFunctionCall, Type: parsers.SelectItemTypeFunctionCall,
Value: parsers.FunctionCall{ Value: parsers.FunctionCall{
@@ -135,7 +136,7 @@ func Test_Execute_TypeCheckingFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
Filters: parsers.SelectItem{ Filters: parsers.SelectItem{
Type: parsers.SelectItemTypeFunctionCall, Type: parsers.SelectItemTypeFunctionCall,
Value: parsers.FunctionCall{ Value: parsers.FunctionCall{
@@ -171,7 +172,7 @@ func Test_Execute_TypeCheckingFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
Filters: parsers.SelectItem{ Filters: parsers.SelectItem{
Type: parsers.SelectItemTypeFunctionCall, Type: parsers.SelectItemTypeFunctionCall,
Value: parsers.FunctionCall{ Value: parsers.FunctionCall{
@@ -207,7 +208,7 @@ func Test_Execute_TypeCheckingFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
Filters: parsers.SelectItem{ Filters: parsers.SelectItem{
Type: parsers.SelectItemTypeFunctionCall, Type: parsers.SelectItemTypeFunctionCall,
Value: parsers.FunctionCall{ Value: parsers.FunctionCall{
@@ -243,7 +244,7 @@ func Test_Execute_TypeCheckingFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
Filters: parsers.SelectItem{ Filters: parsers.SelectItem{
Type: parsers.SelectItemTypeFunctionCall, Type: parsers.SelectItemTypeFunctionCall,
Value: parsers.FunctionCall{ Value: parsers.FunctionCall{
@@ -279,7 +280,7 @@ func Test_Execute_TypeCheckingFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
Filters: parsers.SelectItem{ Filters: parsers.SelectItem{
Type: parsers.SelectItemTypeFunctionCall, Type: parsers.SelectItemTypeFunctionCall,
Value: parsers.FunctionCall{ Value: parsers.FunctionCall{
@@ -315,7 +316,7 @@ func Test_Execute_TypeCheckingFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
Filters: parsers.SelectItem{ Filters: parsers.SelectItem{
Type: parsers.SelectItemTypeFunctionCall, Type: parsers.SelectItemTypeFunctionCall,
Value: parsers.FunctionCall{ Value: parsers.FunctionCall{
@@ -351,7 +352,7 @@ func Test_Execute_TypeCheckingFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
Filters: parsers.SelectItem{ Filters: parsers.SelectItem{
Type: parsers.SelectItemTypeFunctionCall, Type: parsers.SelectItemTypeFunctionCall,
Value: parsers.FunctionCall{ Value: parsers.FunctionCall{

View File

@@ -19,7 +19,7 @@ func Test_Parse_Were(t *testing.T) {
SelectItems: []parsers.SelectItem{ SelectItems: []parsers.SelectItem{
{Path: []string{"c", "id"}}, {Path: []string{"c", "id"}},
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
Filters: parsers.ComparisonExpression{ Filters: parsers.ComparisonExpression{
Operation: "=", Operation: "=",
Left: parsers.SelectItem{Path: []string{"c", "isCool"}}, Left: parsers.SelectItem{Path: []string{"c", "isCool"}},
@@ -42,7 +42,7 @@ func Test_Parse_Were(t *testing.T) {
{Path: []string{"c", "_rid"}}, {Path: []string{"c", "_rid"}},
{Path: []string{"c", "_ts"}}, {Path: []string{"c", "_ts"}},
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
Filters: parsers.LogicalExpression{ Filters: parsers.LogicalExpression{
Operation: parsers.LogicalExpressionTypeOr, Operation: parsers.LogicalExpressionTypeOr,
Expressions: []interface{}{ Expressions: []interface{}{
@@ -67,12 +67,12 @@ func Test_Parse_Were(t *testing.T) {
t, t,
`select c.id `select c.id
FROM c FROM c
WHERE c.isCool=true AND (c.id = "123" OR c.id = "456")`, WHERE c.isCool=true AND (c.id = "123" OR c.id <= "456")`,
parsers.SelectStmt{ parsers.SelectStmt{
SelectItems: []parsers.SelectItem{ SelectItems: []parsers.SelectItem{
{Path: []string{"c", "id"}}, {Path: []string{"c", "id"}},
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
Filters: parsers.LogicalExpression{ Filters: parsers.LogicalExpression{
Operation: parsers.LogicalExpressionTypeAnd, Operation: parsers.LogicalExpressionTypeAnd,
Expressions: []interface{}{ Expressions: []interface{}{
@@ -90,7 +90,7 @@ func Test_Parse_Were(t *testing.T) {
Right: testutils.SelectItem_Constant_String("123"), Right: testutils.SelectItem_Constant_String("123"),
}, },
parsers.ComparisonExpression{ parsers.ComparisonExpression{
Operation: "=", Operation: "<=",
Left: parsers.SelectItem{Path: []string{"c", "id"}}, Left: parsers.SelectItem{Path: []string{"c", "id"}},
Right: testutils.SelectItem_Constant_String("456"), Right: testutils.SelectItem_Constant_String("456"),
}, },
@@ -114,7 +114,7 @@ func Test_Parse_Were(t *testing.T) {
AND c.param=@param_id1`, AND c.param=@param_id1`,
parsers.SelectStmt{ parsers.SelectStmt{
SelectItems: []parsers.SelectItem{{Path: []string{"c", "id"}, Alias: ""}}, SelectItems: []parsers.SelectItem{{Path: []string{"c", "id"}, Alias: ""}},
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
Filters: parsers.LogicalExpression{ Filters: parsers.LogicalExpression{
Expressions: []interface{}{ Expressions: []interface{}{
parsers.ComparisonExpression{ parsers.ComparisonExpression{
@@ -148,4 +148,21 @@ func Test_Parse_Were(t *testing.T) {
}, },
) )
}) })
t.Run("Should correctly parse NOT conditions", func(t *testing.T) {
testQueryParse(
t,
`select c.id
FROM c
WHERE NOT c.boolean`,
parsers.SelectStmt{
SelectItems: []parsers.SelectItem{{Path: []string{"c", "id"}, Alias: ""}},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
Filters: parsers.SelectItem{
Path: []string{"c", "boolean"},
Invert: true,
},
},
)
})
} }

View File

@@ -5,6 +5,7 @@ import (
"github.com/pikami/cosmium/parsers" "github.com/pikami/cosmium/parsers"
memoryexecutor "github.com/pikami/cosmium/query_executors/memory_executor" memoryexecutor "github.com/pikami/cosmium/query_executors/memory_executor"
testutils "github.com/pikami/cosmium/test_utils"
) )
func Test_Execute_AggregateFunctions(t *testing.T) { func Test_Execute_AggregateFunctions(t *testing.T) {
@@ -38,7 +39,7 @@ func Test_Execute_AggregateFunctions(t *testing.T) {
GroupBy: []parsers.SelectItem{ GroupBy: []parsers.SelectItem{
{Path: []string{"c", "key"}}, {Path: []string{"c", "key"}},
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
}, },
mockData, mockData,
[]memoryexecutor.RowType{ []memoryexecutor.RowType{
@@ -67,7 +68,7 @@ func Test_Execute_AggregateFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
}, },
mockData, mockData,
[]memoryexecutor.RowType{ []memoryexecutor.RowType{
@@ -99,7 +100,7 @@ func Test_Execute_AggregateFunctions(t *testing.T) {
GroupBy: []parsers.SelectItem{ GroupBy: []parsers.SelectItem{
{Path: []string{"c", "key"}}, {Path: []string{"c", "key"}},
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
}, },
mockData, mockData,
[]memoryexecutor.RowType{ []memoryexecutor.RowType{
@@ -132,7 +133,7 @@ func Test_Execute_AggregateFunctions(t *testing.T) {
GroupBy: []parsers.SelectItem{ GroupBy: []parsers.SelectItem{
{Path: []string{"c", "key"}}, {Path: []string{"c", "key"}},
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
}, },
mockData, mockData,
[]memoryexecutor.RowType{ []memoryexecutor.RowType{
@@ -165,7 +166,7 @@ func Test_Execute_AggregateFunctions(t *testing.T) {
GroupBy: []parsers.SelectItem{ GroupBy: []parsers.SelectItem{
{Path: []string{"c", "key"}}, {Path: []string{"c", "key"}},
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
}, },
mockData, mockData,
[]memoryexecutor.RowType{ []memoryexecutor.RowType{
@@ -198,7 +199,7 @@ func Test_Execute_AggregateFunctions(t *testing.T) {
GroupBy: []parsers.SelectItem{ GroupBy: []parsers.SelectItem{
{Path: []string{"c", "key"}}, {Path: []string{"c", "key"}},
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
}, },
mockData, mockData,
[]memoryexecutor.RowType{ []memoryexecutor.RowType{

View File

@@ -196,6 +196,10 @@ func (r rowContext) parseArray(argument interface{}) []interface{} {
ex := r.resolveSelectItem(exItem) ex := r.resolveSelectItem(exItem)
arrValue := reflect.ValueOf(ex) arrValue := reflect.ValueOf(ex)
if arrValue.Kind() == reflect.Invalid {
return nil
}
if arrValue.Kind() != reflect.Slice { if arrValue.Kind() != reflect.Slice {
logger.ErrorLn("parseArray got parameters of wrong type") logger.ErrorLn("parseArray got parameters of wrong type")
return nil return nil
@@ -220,7 +224,7 @@ func (r rowContext) partialMatch(item interface{}, exprToSearch interface{}) boo
} }
for _, key := range exprValue.MapKeys() { for _, key := range exprValue.MapKeys() {
if itemValue.MapIndex(key).Interface() != exprValue.MapIndex(key).Interface() { if !reflect.DeepEqual(itemValue.MapIndex(key).Interface(), exprValue.MapIndex(key).Interface()) {
return false return false
} }
} }

View File

@@ -42,7 +42,7 @@ func Test_Execute_ArrayFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
}, },
mockData, mockData,
[]memoryexecutor.RowType{ []memoryexecutor.RowType{
@@ -59,10 +59,11 @@ func Test_Execute_ArrayFunctions(t *testing.T) {
parsers.SelectStmt{ parsers.SelectStmt{
Parameters: map[string]interface{}{ Parameters: map[string]interface{}{
"@categories": []interface{}{"coats", "jackets", "sweatshirts"}, "@categories": []interface{}{"coats", "jackets", "sweatshirts"},
"@objectArray": []interface{}{map[string]interface{}{"category": "shirts", "color": "blue"}}, "@objectArray": []interface{}{map[string]interface{}{"category": "shirts", "color": "blue", "nestedObject": map[string]interface{}{"size": "M"}}},
"@fullMatchObject": map[string]interface{}{"category": "shirts", "color": "blue"}, "@fullMatchObject": map[string]interface{}{"category": "shirts", "color": "blue", "nestedObject": map[string]interface{}{"size": "M"}},
"@partialMatchObject": map[string]interface{}{"category": "shirts"}, "@partialMatchObject": map[string]interface{}{"category": "shirts"},
"@missingPartialMatchObject": map[string]interface{}{"category": "shorts", "color": "blue"}, "@missingPartialMatchObject": map[string]interface{}{"category": "shorts", "color": "blue"},
"@nestedPartialMatchObject": map[string]interface{}{"nestedObject": map[string]interface{}{"size": "M"}},
}, },
SelectItems: []parsers.SelectItem{ SelectItems: []parsers.SelectItem{
{ {
@@ -133,17 +134,30 @@ func Test_Execute_ArrayFunctions(t *testing.T) {
}, },
}, },
}, },
{
Alias: "ContainsNestedPartialMatchObject",
Type: parsers.SelectItemTypeFunctionCall,
Value: parsers.FunctionCall{
Type: parsers.FunctionCallArrayContains,
Arguments: []interface{}{
testutils.SelectItem_Constant_Parameter("@objectArray"),
testutils.SelectItem_Constant_Parameter("@nestedPartialMatchObject"),
testutils.SelectItem_Constant_Bool(true),
},
},
},
}, },
}, },
[]memoryexecutor.RowType{map[string]interface{}{"id": "123"}}, []memoryexecutor.RowType{map[string]interface{}{"id": "123"}},
[]memoryexecutor.RowType{ []memoryexecutor.RowType{
map[string]interface{}{ map[string]interface{}{
"ContainsItem": true, "ContainsItem": true,
"MissingItem": false, "MissingItem": false,
"ContainsFullMatchObject": true, "ContainsFullMatchObject": true,
"MissingFullMatchObject": false, "MissingFullMatchObject": false,
"ContainsPartialMatchObject": true, "ContainsPartialMatchObject": true,
"MissingPartialMatchObject": false, "MissingPartialMatchObject": false,
"ContainsNestedPartialMatchObject": true,
}, },
}, },
) )
@@ -356,7 +370,7 @@ func Test_Execute_ArrayFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
}, },
mockData, mockData,
[]memoryexecutor.RowType{ []memoryexecutor.RowType{
@@ -392,7 +406,7 @@ func Test_Execute_ArrayFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
}, },
mockData, mockData,
[]memoryexecutor.RowType{ []memoryexecutor.RowType{
@@ -430,7 +444,7 @@ func Test_Execute_ArrayFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
}, },
mockData, mockData,
[]memoryexecutor.RowType{ []memoryexecutor.RowType{
@@ -468,7 +482,7 @@ func Test_Execute_ArrayFunctions(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
}, },
mockData, mockData,
[]memoryexecutor.RowType{ []memoryexecutor.RowType{

View File

@@ -0,0 +1,27 @@
package memoryexecutor
import "github.com/pikami/cosmium/internal/datastore"
type rowArrayIterator struct {
documents []rowContext
index int
}
func NewRowArrayIterator(documents []rowContext) *rowArrayIterator {
return &rowArrayIterator{
documents: documents,
index: -1,
}
}
func (i *rowArrayIterator) Next() (rowContext, datastore.DataStoreStatus) {
i.index++
if i.index >= len(i.documents) {
return rowContext{}, datastore.IterEOF
}
row := i.documents[i.index]
i.documents[i.index] = rowContext{} // Help GC reclaim memory
return row, datastore.StatusOk
}

View File

@@ -0,0 +1,397 @@
package memoryexecutor
import (
"fmt"
"reflect"
"strconv"
"strings"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/logger"
"github.com/pikami/cosmium/parsers"
)
type RowType interface{}
type rowContext struct {
tables map[string]RowType
parameters map[string]interface{}
grouppedRows []rowContext
}
type rowIterator interface {
Next() (rowContext, datastore.DataStoreStatus)
}
type rowTypeIterator interface {
Next() (RowType, datastore.DataStoreStatus)
}
func resolveDestinationColumnName(selectItem parsers.SelectItem, itemIndex int, queryParameters map[string]interface{}) string {
if selectItem.Alias != "" {
return selectItem.Alias
}
destinationName := fmt.Sprintf("$%d", itemIndex+1)
if len(selectItem.Path) > 0 {
destinationName = selectItem.Path[len(selectItem.Path)-1]
}
if destinationName[0] == '@' {
destinationName = queryParameters[destinationName].(string)
}
return destinationName
}
func (r rowContext) resolveSelectItem(selectItem parsers.SelectItem) interface{} {
if selectItem.Type == parsers.SelectItemTypeArray {
return r.selectItem_SelectItemTypeArray(selectItem)
}
if selectItem.Type == parsers.SelectItemTypeObject {
return r.selectItem_SelectItemTypeObject(selectItem)
}
if selectItem.Type == parsers.SelectItemTypeConstant {
return r.selectItem_SelectItemTypeConstant(selectItem)
}
if selectItem.Type == parsers.SelectItemTypeSubQuery {
return r.selectItem_SelectItemTypeSubQuery(selectItem)
}
if selectItem.Type == parsers.SelectItemTypeFunctionCall {
if typedFunctionCall, ok := selectItem.Value.(parsers.FunctionCall); ok {
return r.selectItem_SelectItemTypeFunctionCall(typedFunctionCall)
}
logger.ErrorLn("parsers.SelectItem has incorrect Value type (expected parsers.FunctionCall)")
return nil
}
return r.selectItem_SelectItemTypeField(selectItem)
}
func (r rowContext) selectItem_SelectItemTypeArray(selectItem parsers.SelectItem) interface{} {
arrayValue := make([]interface{}, 0)
for _, subSelectItem := range selectItem.SelectItems {
arrayValue = append(arrayValue, r.resolveSelectItem(subSelectItem))
}
return arrayValue
}
func (r rowContext) selectItem_SelectItemTypeObject(selectItem parsers.SelectItem) interface{} {
objectValue := make(map[string]interface{})
for _, subSelectItem := range selectItem.SelectItems {
objectValue[subSelectItem.Alias] = r.resolveSelectItem(subSelectItem)
}
return objectValue
}
func (r rowContext) selectItem_SelectItemTypeConstant(selectItem parsers.SelectItem) interface{} {
var typedValue parsers.Constant
var ok bool
if typedValue, ok = selectItem.Value.(parsers.Constant); !ok {
// TODO: Handle error
logger.ErrorLn("parsers.Constant has incorrect Value type")
}
if typedValue.Type == parsers.ConstantTypeParameterConstant &&
r.parameters != nil {
if key, ok := typedValue.Value.(string); ok {
return r.parameters[key]
}
}
return typedValue.Value
}
func (r rowContext) selectItem_SelectItemTypeSubQuery(selectItem parsers.SelectItem) interface{} {
subQuery := selectItem.Value.(parsers.SelectStmt)
subQueryResult := executeQuery(
subQuery,
NewRowArrayIterator([]rowContext{r}),
)
if subQuery.Exists {
_, status := subQueryResult.Next()
return status == datastore.StatusOk
}
allDocuments := make([]RowType, 0)
for {
row, status := subQueryResult.Next()
if status != datastore.StatusOk {
break
}
allDocuments = append(allDocuments, row)
}
return allDocuments
}
func (r rowContext) selectItem_SelectItemTypeFunctionCall(functionCall parsers.FunctionCall) interface{} {
switch functionCall.Type {
case parsers.FunctionCallStringEquals:
return r.strings_StringEquals(functionCall.Arguments)
case parsers.FunctionCallContains:
return r.strings_Contains(functionCall.Arguments)
case parsers.FunctionCallEndsWith:
return r.strings_EndsWith(functionCall.Arguments)
case parsers.FunctionCallStartsWith:
return r.strings_StartsWith(functionCall.Arguments)
case parsers.FunctionCallConcat:
return r.strings_Concat(functionCall.Arguments)
case parsers.FunctionCallIndexOf:
return r.strings_IndexOf(functionCall.Arguments)
case parsers.FunctionCallToString:
return r.strings_ToString(functionCall.Arguments)
case parsers.FunctionCallUpper:
return r.strings_Upper(functionCall.Arguments)
case parsers.FunctionCallLower:
return r.strings_Lower(functionCall.Arguments)
case parsers.FunctionCallLeft:
return r.strings_Left(functionCall.Arguments)
case parsers.FunctionCallLength:
return r.strings_Length(functionCall.Arguments)
case parsers.FunctionCallLTrim:
return r.strings_LTrim(functionCall.Arguments)
case parsers.FunctionCallReplace:
return r.strings_Replace(functionCall.Arguments)
case parsers.FunctionCallReplicate:
return r.strings_Replicate(functionCall.Arguments)
case parsers.FunctionCallReverse:
return r.strings_Reverse(functionCall.Arguments)
case parsers.FunctionCallRight:
return r.strings_Right(functionCall.Arguments)
case parsers.FunctionCallRTrim:
return r.strings_RTrim(functionCall.Arguments)
case parsers.FunctionCallSubstring:
return r.strings_Substring(functionCall.Arguments)
case parsers.FunctionCallTrim:
return r.strings_Trim(functionCall.Arguments)
case parsers.FunctionCallIsDefined:
return r.typeChecking_IsDefined(functionCall.Arguments)
case parsers.FunctionCallIsArray:
return r.typeChecking_IsArray(functionCall.Arguments)
case parsers.FunctionCallIsBool:
return r.typeChecking_IsBool(functionCall.Arguments)
case parsers.FunctionCallIsFiniteNumber:
return r.typeChecking_IsFiniteNumber(functionCall.Arguments)
case parsers.FunctionCallIsInteger:
return r.typeChecking_IsInteger(functionCall.Arguments)
case parsers.FunctionCallIsNull:
return r.typeChecking_IsNull(functionCall.Arguments)
case parsers.FunctionCallIsNumber:
return r.typeChecking_IsNumber(functionCall.Arguments)
case parsers.FunctionCallIsObject:
return r.typeChecking_IsObject(functionCall.Arguments)
case parsers.FunctionCallIsPrimitive:
return r.typeChecking_IsPrimitive(functionCall.Arguments)
case parsers.FunctionCallIsString:
return r.typeChecking_IsString(functionCall.Arguments)
case parsers.FunctionCallArrayConcat:
return r.array_Concat(functionCall.Arguments)
case parsers.FunctionCallArrayContains:
return r.array_Contains(functionCall.Arguments)
case parsers.FunctionCallArrayContainsAny:
return r.array_Contains_Any(functionCall.Arguments)
case parsers.FunctionCallArrayContainsAll:
return r.array_Contains_All(functionCall.Arguments)
case parsers.FunctionCallArrayLength:
return r.array_Length(functionCall.Arguments)
case parsers.FunctionCallArraySlice:
return r.array_Slice(functionCall.Arguments)
case parsers.FunctionCallSetIntersect:
return r.set_Intersect(functionCall.Arguments)
case parsers.FunctionCallSetUnion:
return r.set_Union(functionCall.Arguments)
case parsers.FunctionCallMathAbs:
return r.math_Abs(functionCall.Arguments)
case parsers.FunctionCallMathAcos:
return r.math_Acos(functionCall.Arguments)
case parsers.FunctionCallMathAsin:
return r.math_Asin(functionCall.Arguments)
case parsers.FunctionCallMathAtan:
return r.math_Atan(functionCall.Arguments)
case parsers.FunctionCallMathCeiling:
return r.math_Ceiling(functionCall.Arguments)
case parsers.FunctionCallMathCos:
return r.math_Cos(functionCall.Arguments)
case parsers.FunctionCallMathCot:
return r.math_Cot(functionCall.Arguments)
case parsers.FunctionCallMathDegrees:
return r.math_Degrees(functionCall.Arguments)
case parsers.FunctionCallMathExp:
return r.math_Exp(functionCall.Arguments)
case parsers.FunctionCallMathFloor:
return r.math_Floor(functionCall.Arguments)
case parsers.FunctionCallMathIntBitNot:
return r.math_IntBitNot(functionCall.Arguments)
case parsers.FunctionCallMathLog10:
return r.math_Log10(functionCall.Arguments)
case parsers.FunctionCallMathRadians:
return r.math_Radians(functionCall.Arguments)
case parsers.FunctionCallMathRound:
return r.math_Round(functionCall.Arguments)
case parsers.FunctionCallMathSign:
return r.math_Sign(functionCall.Arguments)
case parsers.FunctionCallMathSin:
return r.math_Sin(functionCall.Arguments)
case parsers.FunctionCallMathSqrt:
return r.math_Sqrt(functionCall.Arguments)
case parsers.FunctionCallMathSquare:
return r.math_Square(functionCall.Arguments)
case parsers.FunctionCallMathTan:
return r.math_Tan(functionCall.Arguments)
case parsers.FunctionCallMathTrunc:
return r.math_Trunc(functionCall.Arguments)
case parsers.FunctionCallMathAtn2:
return r.math_Atn2(functionCall.Arguments)
case parsers.FunctionCallMathIntAdd:
return r.math_IntAdd(functionCall.Arguments)
case parsers.FunctionCallMathIntBitAnd:
return r.math_IntBitAnd(functionCall.Arguments)
case parsers.FunctionCallMathIntBitLeftShift:
return r.math_IntBitLeftShift(functionCall.Arguments)
case parsers.FunctionCallMathIntBitOr:
return r.math_IntBitOr(functionCall.Arguments)
case parsers.FunctionCallMathIntBitRightShift:
return r.math_IntBitRightShift(functionCall.Arguments)
case parsers.FunctionCallMathIntBitXor:
return r.math_IntBitXor(functionCall.Arguments)
case parsers.FunctionCallMathIntDiv:
return r.math_IntDiv(functionCall.Arguments)
case parsers.FunctionCallMathIntMod:
return r.math_IntMod(functionCall.Arguments)
case parsers.FunctionCallMathIntMul:
return r.math_IntMul(functionCall.Arguments)
case parsers.FunctionCallMathIntSub:
return r.math_IntSub(functionCall.Arguments)
case parsers.FunctionCallMathPower:
return r.math_Power(functionCall.Arguments)
case parsers.FunctionCallMathLog:
return r.math_Log(functionCall.Arguments)
case parsers.FunctionCallMathNumberBin:
return r.math_NumberBin(functionCall.Arguments)
case parsers.FunctionCallMathPi:
return r.math_Pi()
case parsers.FunctionCallMathRand:
return r.math_Rand()
case parsers.FunctionCallAggregateAvg:
return r.aggregate_Avg(functionCall.Arguments)
case parsers.FunctionCallAggregateCount:
return r.aggregate_Count(functionCall.Arguments)
case parsers.FunctionCallAggregateMax:
return r.aggregate_Max(functionCall.Arguments)
case parsers.FunctionCallAggregateMin:
return r.aggregate_Min(functionCall.Arguments)
case parsers.FunctionCallAggregateSum:
return r.aggregate_Sum(functionCall.Arguments)
case parsers.FunctionCallIn:
return r.misc_In(functionCall.Arguments)
}
logger.Errorf("Unknown function call type: %v", functionCall.Type)
return nil
}
func (r rowContext) selectItem_SelectItemTypeField(selectItem parsers.SelectItem) interface{} {
value := r.tables[selectItem.Path[0]]
if len(selectItem.Path) > 1 {
for _, pathSegment := range selectItem.Path[1:] {
if pathSegment[0] == '@' {
pathSegment = r.parameters[pathSegment].(string)
}
switch nestedValue := value.(type) {
case map[string]interface{}:
value = nestedValue[pathSegment]
case map[string]RowType:
value = nestedValue[pathSegment]
case datastore.Document:
value = nestedValue[pathSegment]
case map[string]datastore.Document:
value = nestedValue[pathSegment]
case []int, []string, []interface{}:
slice := reflect.ValueOf(nestedValue)
if arrayIndex, err := strconv.Atoi(pathSegment); err == nil && slice.Len() > arrayIndex {
value = slice.Index(arrayIndex).Interface()
} else {
return nil
}
default:
return nil
}
}
}
return value
}
func compareValues(val1, val2 interface{}) int {
if val1 == nil && val2 == nil {
return 0
} else if val1 == nil {
return -1
} else if val2 == nil {
return 1
}
if reflect.TypeOf(val1) != reflect.TypeOf(val2) {
return 1
}
switch val1 := val1.(type) {
case int:
val2 := val2.(int)
if val1 < val2 {
return -1
} else if val1 > val2 {
return 1
}
return 0
case float64:
val2 := val2.(float64)
if val1 < val2 {
return -1
} else if val1 > val2 {
return 1
}
return 0
case string:
val2 := val2.(string)
return strings.Compare(val1, val2)
case bool:
val2 := val2.(bool)
if val1 == val2 {
return 0
} else if val1 {
return 1
} else {
return -1
}
// TODO: Add more types
default:
if reflect.DeepEqual(val1, val2) {
return 0
}
return 1
}
}
func copyMap[T RowType | []RowType](originalMap map[string]T) map[string]T {
targetMap := make(map[string]T)
for k, v := range originalMap {
targetMap[k] = v
}
return targetMap
}

View File

@@ -0,0 +1,36 @@
package memoryexecutor
import "github.com/pikami/cosmium/internal/datastore"
type distinctIterator struct {
documents rowTypeIterator
seenDocs []RowType
}
func (di *distinctIterator) Next() (RowType, datastore.DataStoreStatus) {
if di.documents == nil {
return rowContext{}, datastore.IterEOF
}
for {
row, status := di.documents.Next()
if status != datastore.StatusOk {
di.documents = nil
return rowContext{}, status
}
if !di.seen(row) {
di.seenDocs = append(di.seenDocs, row)
return row, status
}
}
}
func (di *distinctIterator) seen(row RowType) bool {
for _, seenRow := range di.seenDocs {
if compareValues(seenRow, row) == 0 {
return true
}
}
return false
}

View File

@@ -0,0 +1,143 @@
package memoryexecutor
import (
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/internal/logger"
"github.com/pikami/cosmium/parsers"
)
type filterIterator struct {
documents rowIterator
filters interface{}
}
func (fi *filterIterator) Next() (rowContext, datastore.DataStoreStatus) {
if fi.documents == nil {
return rowContext{}, datastore.IterEOF
}
for {
row, status := fi.documents.Next()
if status != datastore.StatusOk {
fi.documents = nil
return rowContext{}, status
}
if fi.evaluateFilters(row) {
return row, status
}
}
}
func (fi *filterIterator) evaluateFilters(row rowContext) bool {
if fi.filters == nil {
return true
}
switch typedFilters := fi.filters.(type) {
case parsers.ComparisonExpression:
return row.filters_ComparisonExpression(typedFilters)
case parsers.LogicalExpression:
return row.filters_LogicalExpression(typedFilters)
case parsers.Constant:
if value, ok := typedFilters.Value.(bool); ok {
return value
}
return false
case parsers.SelectItem:
resolvedValue := row.resolveSelectItem(typedFilters)
if value, ok := resolvedValue.(bool); ok {
if typedFilters.Invert {
return !value
}
return value
}
}
return false
}
func (r rowContext) applyFilters(filters interface{}) bool {
if filters == nil {
return true
}
switch typedFilters := filters.(type) {
case parsers.ComparisonExpression:
return r.filters_ComparisonExpression(typedFilters)
case parsers.LogicalExpression:
return r.filters_LogicalExpression(typedFilters)
case parsers.Constant:
if value, ok := typedFilters.Value.(bool); ok {
return value
}
return false
case parsers.SelectItem:
resolvedValue := r.resolveSelectItem(typedFilters)
if value, ok := resolvedValue.(bool); ok {
if typedFilters.Invert {
return !value
}
return value
}
}
return false
}
func (r rowContext) filters_ComparisonExpression(expression parsers.ComparisonExpression) bool {
leftExpression, leftExpressionOk := expression.Left.(parsers.SelectItem)
rightExpression, rightExpressionOk := expression.Right.(parsers.SelectItem)
if !leftExpressionOk || !rightExpressionOk {
logger.ErrorLn("ComparisonExpression has incorrect Left or Right type")
return false
}
leftValue := r.resolveSelectItem(leftExpression)
rightValue := r.resolveSelectItem(rightExpression)
cmp := compareValues(leftValue, rightValue)
switch expression.Operation {
case "=":
return cmp == 0
case "!=":
return cmp != 0
case "<":
return cmp < 0
case ">":
return cmp > 0
case "<=":
return cmp <= 0
case ">=":
return cmp >= 0
}
return false
}
func (r rowContext) filters_LogicalExpression(expression parsers.LogicalExpression) bool {
var result bool
for i, subExpression := range expression.Expressions {
expressionResult := r.applyFilters(subExpression)
if i == 0 {
result = expressionResult
}
switch expression.Operation {
case parsers.LogicalExpressionTypeAnd:
result = result && expressionResult
if !result {
return false
}
case parsers.LogicalExpressionTypeOr:
result = result || expressionResult
if result {
return true
}
}
}
return result
}

View File

@@ -0,0 +1,73 @@
package memoryexecutor
import (
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/parsers"
)
type fromIterator struct {
documents rowIterator
table parsers.Table
buffer []rowContext
bufferIndex int
}
func (fi *fromIterator) Next() (rowContext, datastore.DataStoreStatus) {
if fi.documents == nil {
return rowContext{}, datastore.IterEOF
}
// Return from buffer if available
if fi.bufferIndex < len(fi.buffer) {
result := fi.buffer[fi.bufferIndex]
fi.buffer[fi.bufferIndex] = rowContext{}
fi.bufferIndex++
return result, datastore.StatusOk
}
// Resolve next row from documents
row, status := fi.documents.Next()
if status != datastore.StatusOk {
fi.documents = nil
return row, status
}
if fi.table.SelectItem.Path != nil || fi.table.SelectItem.Type == parsers.SelectItemTypeSubQuery {
destinationTableName := fi.table.SelectItem.Alias
if destinationTableName == "" {
destinationTableName = fi.table.Value
}
if destinationTableName == "" {
destinationTableName = resolveDestinationColumnName(fi.table.SelectItem, 0, row.parameters)
}
if fi.table.IsInSelect || fi.table.SelectItem.Type == parsers.SelectItemTypeSubQuery {
selectValue := row.parseArray(fi.table.SelectItem)
rowContexts := make([]rowContext, len(selectValue))
for i, newRowData := range selectValue {
rowContexts[i].parameters = row.parameters
rowContexts[i].tables = copyMap(row.tables)
rowContexts[i].tables[destinationTableName] = newRowData
}
fi.buffer = rowContexts
fi.bufferIndex = 0
return fi.Next()
}
if len(fi.table.SelectItem.Path) > 0 {
sourceTableName := fi.table.SelectItem.Path[0]
sourceTableData := row.tables[sourceTableName]
if sourceTableData == nil {
// When source table is not found, assume it's root document
row.tables[sourceTableName] = row.tables["$root"]
}
}
newRowData := row.resolveSelectItem(fi.table.SelectItem)
row.tables[destinationTableName] = newRowData
return row, status
}
return row, status
}

View File

@@ -0,0 +1,69 @@
package memoryexecutor
import (
"fmt"
"strings"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/parsers"
)
type groupByIterator struct {
documents rowIterator
groupBy []parsers.SelectItem
groupedRows []rowContext
}
func (gi *groupByIterator) Next() (rowContext, datastore.DataStoreStatus) {
if gi.groupedRows != nil {
if len(gi.groupedRows) == 0 {
return rowContext{}, datastore.IterEOF
}
row := gi.groupedRows[0]
gi.groupedRows = gi.groupedRows[1:]
return row, datastore.StatusOk
}
documents := make([]rowContext, 0)
for {
row, status := gi.documents.Next()
if status != datastore.StatusOk {
break
}
documents = append(documents, row)
}
gi.documents = nil
groupedRows := make(map[string][]rowContext)
groupedKeys := make([]string, 0)
for _, row := range documents {
key := row.generateGroupByKey(gi.groupBy)
if _, ok := groupedRows[key]; !ok {
groupedKeys = append(groupedKeys, key)
}
groupedRows[key] = append(groupedRows[key], row)
}
gi.groupedRows = make([]rowContext, 0)
for _, key := range groupedKeys {
gi.groupedRows = append(gi.groupedRows, rowContext{
tables: groupedRows[key][0].tables,
parameters: groupedRows[key][0].parameters,
grouppedRows: groupedRows[key],
})
}
return gi.Next()
}
func (r rowContext) generateGroupByKey(groupBy []parsers.SelectItem) string {
var keyBuilder strings.Builder
for _, selectItem := range groupBy {
value := r.resolveSelectItem(selectItem)
keyBuilder.WriteString(fmt.Sprintf("%v", value))
keyBuilder.WriteString(":")
}
return keyBuilder.String()
}

View File

@@ -0,0 +1,62 @@
package memoryexecutor
import (
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/parsers"
)
type joinIterator struct {
documents rowIterator
query parsers.SelectStmt
buffer []rowContext
}
func (ji *joinIterator) Next() (rowContext, datastore.DataStoreStatus) {
if ji.documents == nil {
return rowContext{}, datastore.IterEOF
}
if len(ji.buffer) > 0 {
row := ji.buffer[0]
ji.buffer = ji.buffer[1:]
return row, datastore.StatusOk
}
doc, status := ji.documents.Next()
if status != datastore.StatusOk {
ji.documents = nil
return rowContext{}, status
}
ji.buffer = []rowContext{doc}
for _, joinItem := range ji.query.JoinItems {
nextDocuments := make([]rowContext, 0)
for _, row := range ji.buffer {
joinedItems := row.resolveJoinItemSelect(joinItem.SelectItem)
for _, joinedItem := range joinedItems {
tablesCopy := copyMap(row.tables)
tablesCopy[joinItem.Table.Value] = joinedItem
nextDocuments = append(nextDocuments, rowContext{
parameters: row.parameters,
tables: tablesCopy,
})
}
}
ji.buffer = nextDocuments
}
return ji.Next()
}
func (r rowContext) resolveJoinItemSelect(selectItem parsers.SelectItem) []RowType {
if selectItem.Path != nil || selectItem.Type == parsers.SelectItemTypeSubQuery {
selectValue := r.parseArray(selectItem)
documents := make([]RowType, len(selectValue))
for i, newRowData := range selectValue {
documents[i] = newRowData
}
return documents
}
return []RowType{}
}

View File

@@ -5,6 +5,7 @@ import (
"github.com/pikami/cosmium/parsers" "github.com/pikami/cosmium/parsers"
memoryexecutor "github.com/pikami/cosmium/query_executors/memory_executor" memoryexecutor "github.com/pikami/cosmium/query_executors/memory_executor"
testutils "github.com/pikami/cosmium/test_utils"
) )
func Test_Execute_Joins(t *testing.T) { func Test_Execute_Joins(t *testing.T) {
@@ -33,7 +34,7 @@ func Test_Execute_Joins(t *testing.T) {
{Path: []string{"c", "id"}}, {Path: []string{"c", "id"}},
{Path: []string{"cc", "name"}}, {Path: []string{"cc", "name"}},
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
JoinItems: []parsers.JoinItem{ JoinItems: []parsers.JoinItem{
{ {
Table: parsers.Table{ Table: parsers.Table{
@@ -62,7 +63,7 @@ func Test_Execute_Joins(t *testing.T) {
SelectItems: []parsers.SelectItem{ SelectItems: []parsers.SelectItem{
{Path: []string{"cc"}, IsTopLevel: true}, {Path: []string{"cc"}, IsTopLevel: true},
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
JoinItems: []parsers.JoinItem{ JoinItems: []parsers.JoinItem{
{ {
Table: parsers.Table{ Table: parsers.Table{

View File

@@ -0,0 +1,19 @@
package memoryexecutor
import "github.com/pikami/cosmium/internal/datastore"
type limitIterator struct {
documents rowTypeIterator
limit int
count int
}
func (li *limitIterator) Next() (RowType, datastore.DataStoreStatus) {
if li.count >= li.limit {
li.documents = nil
return rowContext{}, datastore.IterEOF
}
li.count++
return li.documents.Next()
}

View File

@@ -6,6 +6,7 @@ import (
"github.com/pikami/cosmium/parsers" "github.com/pikami/cosmium/parsers"
memoryexecutor "github.com/pikami/cosmium/query_executors/memory_executor" memoryexecutor "github.com/pikami/cosmium/query_executors/memory_executor"
testutils "github.com/pikami/cosmium/test_utils"
) )
func Test_Execute_MathFunctions(t *testing.T) { func Test_Execute_MathFunctions(t *testing.T) {
@@ -261,7 +262,7 @@ func testMathFunctionExecute(
}, },
}, },
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
}, },
data, data,
expectedData, expectedData,

View File

@@ -1,702 +1,92 @@
package memoryexecutor package memoryexecutor
import ( import (
"fmt" "github.com/pikami/cosmium/internal/datastore"
"reflect"
"sort"
"strconv"
"strings"
"github.com/pikami/cosmium/internal/logger"
"github.com/pikami/cosmium/parsers" "github.com/pikami/cosmium/parsers"
"golang.org/x/exp/slices"
) )
type RowType interface{} func ExecuteQuery(query parsers.SelectStmt, documents rowTypeIterator) []RowType {
type rowContext struct { resultIter := executeQuery(query, &rowTypeToRowContextIterator{documents: documents, query: query})
tables map[string]RowType result := make([]RowType, 0)
parameters map[string]interface{} for {
grouppedRows []rowContext row, status := resultIter.Next()
if status != datastore.StatusOk {
break
}
result = append(result, row)
}
return result
} }
func ExecuteQuery(query parsers.SelectStmt, documents []RowType) []RowType { func executeQuery(query parsers.SelectStmt, documents rowIterator) rowTypeIterator {
currentDocuments := make([]rowContext, 0) // Resolve FROM
for _, doc := range documents { var iter rowIterator = &fromIterator{
currentDocuments = append(currentDocuments, resolveFrom(query, doc)...) documents: documents,
table: query.Table,
} }
// Handle JOINS // Apply JOIN
nextDocuments := make([]rowContext, 0) if len(query.JoinItems) > 0 {
for _, currentDocument := range currentDocuments { iter = &joinIterator{
rowContexts := currentDocument.handleJoin(query) documents: iter,
nextDocuments = append(nextDocuments, rowContexts...) query: query,
}
currentDocuments = nextDocuments
// Apply filters
nextDocuments = make([]rowContext, 0)
for _, currentDocument := range currentDocuments {
if currentDocument.applyFilters(query.Filters) {
nextDocuments = append(nextDocuments, currentDocument)
} }
} }
currentDocuments = nextDocuments
// Apply order // Apply WHERE
if query.Filters != nil {
iter = &filterIterator{
documents: iter,
filters: query.Filters,
}
}
// Apply ORDER BY
if len(query.OrderExpressions) > 0 { if len(query.OrderExpressions) > 0 {
applyOrder(currentDocuments, query.OrderExpressions) iter = &orderIterator{
documents: iter,
orderExpressions: query.OrderExpressions,
}
} }
// Apply group by // Apply GROUP BY
if len(query.GroupBy) > 0 { if len(query.GroupBy) > 0 {
currentDocuments = applyGroupBy(currentDocuments, query.GroupBy) iter = &groupByIterator{
documents: iter,
groupBy: query.GroupBy,
}
} }
// Apply select // Apply SELECT
projectedDocuments := applyProjection(currentDocuments, query.SelectItems, query.GroupBy) var projectedIterator rowTypeIterator = &projectIterator{
documents: iter,
selectItems: query.SelectItems,
groupBy: query.GroupBy,
}
// Apply distinct // Apply DISTINCT
if query.Distinct { if query.Distinct {
projectedDocuments = deduplicate(projectedDocuments) projectedIterator = &distinctIterator{
documents: projectedIterator,
}
} }
// Apply result limit // Apply OFFSET
if query.Count > 0 && len(projectedDocuments) > query.Count { if query.Offset > 0 {
projectedDocuments = projectedDocuments[:query.Count] projectedIterator = &offsetIterator{
documents: projectedIterator,
offset: query.Offset,
}
} }
return projectedDocuments // Apply LIMIT
} if query.Count > 0 {
projectedIterator = &limitIterator{
func resolveFrom(query parsers.SelectStmt, doc RowType) []rowContext { documents: projectedIterator,
initialRow, gotParentContext := doc.(rowContext) limit: query.Count,
if !gotParentContext { }
var initialTableName string }
if query.Table.SelectItem.Type == parsers.SelectItemTypeSubQuery {
initialTableName = query.Table.SelectItem.Value.(parsers.SelectStmt).Table.Value return projectedIterator
}
if initialTableName == "" {
initialTableName = query.Table.Value
}
initialRow = rowContext{
parameters: query.Parameters,
tables: map[string]RowType{
initialTableName: doc,
},
}
}
if query.Table.SelectItem.Path != nil || query.Table.SelectItem.Type == parsers.SelectItemTypeSubQuery {
destinationTableName := query.Table.SelectItem.Alias
if destinationTableName == "" {
destinationTableName = query.Table.Value
}
selectValue := initialRow.parseArray(query.Table.SelectItem)
rowContexts := make([]rowContext, len(selectValue))
for i, newRowData := range selectValue {
rowContexts[i].parameters = initialRow.parameters
rowContexts[i].tables = copyMap(initialRow.tables)
rowContexts[i].tables[destinationTableName] = newRowData
}
return rowContexts
}
return []rowContext{initialRow}
}
func (r rowContext) handleJoin(query parsers.SelectStmt) []rowContext {
currentDocuments := []rowContext{r}
for _, joinItem := range query.JoinItems {
nextDocuments := make([]rowContext, 0)
for _, currentDocument := range currentDocuments {
joinedItems := currentDocument.resolveJoinItemSelect(joinItem.SelectItem)
for _, joinedItem := range joinedItems {
tablesCopy := copyMap(currentDocument.tables)
tablesCopy[joinItem.Table.Value] = joinedItem
nextDocuments = append(nextDocuments, rowContext{
parameters: currentDocument.parameters,
tables: tablesCopy,
})
}
}
currentDocuments = nextDocuments
}
return currentDocuments
}
func (r rowContext) resolveJoinItemSelect(selectItem parsers.SelectItem) []RowType {
if selectItem.Path != nil || selectItem.Type == parsers.SelectItemTypeSubQuery {
selectValue := r.parseArray(selectItem)
documents := make([]RowType, len(selectValue))
for i, newRowData := range selectValue {
documents[i] = newRowData
}
return documents
}
return []RowType{}
}
func (r rowContext) applyFilters(filters interface{}) bool {
if filters == nil {
return true
}
switch typedFilters := filters.(type) {
case parsers.ComparisonExpression:
return r.filters_ComparisonExpression(typedFilters)
case parsers.LogicalExpression:
return r.filters_LogicalExpression(typedFilters)
case parsers.Constant:
if value, ok := typedFilters.Value.(bool); ok {
return value
}
return false
case parsers.SelectItem:
resolvedValue := r.resolveSelectItem(typedFilters)
if value, ok := resolvedValue.(bool); ok {
return value
}
}
return false
}
func (r rowContext) filters_ComparisonExpression(expression parsers.ComparisonExpression) bool {
leftExpression, leftExpressionOk := expression.Left.(parsers.SelectItem)
rightExpression, rightExpressionOk := expression.Right.(parsers.SelectItem)
if !leftExpressionOk || !rightExpressionOk {
logger.ErrorLn("ComparisonExpression has incorrect Left or Right type")
return false
}
leftValue := r.resolveSelectItem(leftExpression)
rightValue := r.resolveSelectItem(rightExpression)
cmp := compareValues(leftValue, rightValue)
switch expression.Operation {
case "=":
return cmp == 0
case "!=":
return cmp != 0
case "<":
return cmp < 0
case ">":
return cmp > 0
case "<=":
return cmp <= 0
case ">=":
return cmp >= 0
}
return false
}
func (r rowContext) filters_LogicalExpression(expression parsers.LogicalExpression) bool {
var result bool
for i, subExpression := range expression.Expressions {
expressionResult := r.applyFilters(subExpression)
if i == 0 {
result = expressionResult
}
switch expression.Operation {
case parsers.LogicalExpressionTypeAnd:
result = result && expressionResult
if !result {
return false
}
case parsers.LogicalExpressionTypeOr:
result = result || expressionResult
if result {
return true
}
}
}
return result
}
func applyOrder(documents []rowContext, orderExpressions []parsers.OrderExpression) {
less := func(i, j int) bool {
for _, order := range orderExpressions {
val1 := documents[i].resolveSelectItem(order.SelectItem)
val2 := documents[j].resolveSelectItem(order.SelectItem)
cmp := compareValues(val1, val2)
if cmp != 0 {
if order.Direction == parsers.OrderDirectionDesc {
return cmp > 0
}
return cmp < 0
}
}
return i < j
}
sort.SliceStable(documents, less)
}
func applyGroupBy(documents []rowContext, groupBy []parsers.SelectItem) []rowContext {
groupedRows := make(map[string][]rowContext)
groupedKeys := make([]string, 0)
for _, row := range documents {
key := row.generateGroupByKey(groupBy)
if _, ok := groupedRows[key]; !ok {
groupedKeys = append(groupedKeys, key)
}
groupedRows[key] = append(groupedRows[key], row)
}
grouppedRows := make([]rowContext, 0)
for _, key := range groupedKeys {
grouppedRowContext := rowContext{
tables: groupedRows[key][0].tables,
parameters: groupedRows[key][0].parameters,
grouppedRows: groupedRows[key],
}
grouppedRows = append(grouppedRows, grouppedRowContext)
}
return grouppedRows
}
func (r rowContext) generateGroupByKey(groupBy []parsers.SelectItem) string {
var keyBuilder strings.Builder
for _, selectItem := range groupBy {
value := r.resolveSelectItem(selectItem)
keyBuilder.WriteString(fmt.Sprintf("%v", value))
keyBuilder.WriteString(":")
}
return keyBuilder.String()
}
func applyProjection(documents []rowContext, selectItems []parsers.SelectItem, groupBy []parsers.SelectItem) []RowType {
if len(documents) == 0 {
return []RowType{}
}
if hasAggregateFunctions(selectItems) && len(groupBy) == 0 {
// When can have aggregate functions without GROUP BY clause,
// we should aggregate all rows in that case
rowContext := rowContext{
tables: documents[0].tables,
parameters: documents[0].parameters,
grouppedRows: documents,
}
return []RowType{rowContext.applyProjection(selectItems)}
}
projectedDocuments := make([]RowType, len(documents))
for index, row := range documents {
projectedDocuments[index] = row.applyProjection(selectItems)
}
return projectedDocuments
}
func (r rowContext) applyProjection(selectItems []parsers.SelectItem) RowType {
// When the first value is top level, select it instead
if len(selectItems) > 0 && selectItems[0].IsTopLevel {
return r.resolveSelectItem(selectItems[0])
}
// Construct a new row based on the selected columns
row := make(map[string]interface{})
for index, selectItem := range selectItems {
destinationName := selectItem.Alias
if destinationName == "" {
if len(selectItem.Path) > 0 {
destinationName = selectItem.Path[len(selectItem.Path)-1]
} else {
destinationName = fmt.Sprintf("$%d", index+1)
}
if destinationName[0] == '@' {
destinationName = r.parameters[destinationName].(string)
}
}
row[destinationName] = r.resolveSelectItem(selectItem)
}
return row
}
func (r rowContext) resolveSelectItem(selectItem parsers.SelectItem) interface{} {
if selectItem.Type == parsers.SelectItemTypeArray {
return r.selectItem_SelectItemTypeArray(selectItem)
}
if selectItem.Type == parsers.SelectItemTypeObject {
return r.selectItem_SelectItemTypeObject(selectItem)
}
if selectItem.Type == parsers.SelectItemTypeConstant {
return r.selectItem_SelectItemTypeConstant(selectItem)
}
if selectItem.Type == parsers.SelectItemTypeSubQuery {
return r.selectItem_SelectItemTypeSubQuery(selectItem)
}
if selectItem.Type == parsers.SelectItemTypeFunctionCall {
if typedFunctionCall, ok := selectItem.Value.(parsers.FunctionCall); ok {
return r.selectItem_SelectItemTypeFunctionCall(typedFunctionCall)
}
logger.ErrorLn("parsers.SelectItem has incorrect Value type (expected parsers.FunctionCall)")
return nil
}
return r.selectItem_SelectItemTypeField(selectItem)
}
func (r rowContext) selectItem_SelectItemTypeArray(selectItem parsers.SelectItem) interface{} {
arrayValue := make([]interface{}, 0)
for _, subSelectItem := range selectItem.SelectItems {
arrayValue = append(arrayValue, r.resolveSelectItem(subSelectItem))
}
return arrayValue
}
func (r rowContext) selectItem_SelectItemTypeObject(selectItem parsers.SelectItem) interface{} {
objectValue := make(map[string]interface{})
for _, subSelectItem := range selectItem.SelectItems {
objectValue[subSelectItem.Alias] = r.resolveSelectItem(subSelectItem)
}
return objectValue
}
func (r rowContext) selectItem_SelectItemTypeConstant(selectItem parsers.SelectItem) interface{} {
var typedValue parsers.Constant
var ok bool
if typedValue, ok = selectItem.Value.(parsers.Constant); !ok {
// TODO: Handle error
logger.ErrorLn("parsers.Constant has incorrect Value type")
}
if typedValue.Type == parsers.ConstantTypeParameterConstant &&
r.parameters != nil {
if key, ok := typedValue.Value.(string); ok {
return r.parameters[key]
}
}
return typedValue.Value
}
func (r rowContext) selectItem_SelectItemTypeSubQuery(selectItem parsers.SelectItem) interface{} {
subQuery := selectItem.Value.(parsers.SelectStmt)
subQueryResult := ExecuteQuery(
subQuery,
[]RowType{r},
)
if subQuery.Exists {
return len(subQueryResult) > 0
}
return subQueryResult
}
func (r rowContext) selectItem_SelectItemTypeFunctionCall(functionCall parsers.FunctionCall) interface{} {
switch functionCall.Type {
case parsers.FunctionCallStringEquals:
return r.strings_StringEquals(functionCall.Arguments)
case parsers.FunctionCallContains:
return r.strings_Contains(functionCall.Arguments)
case parsers.FunctionCallEndsWith:
return r.strings_EndsWith(functionCall.Arguments)
case parsers.FunctionCallStartsWith:
return r.strings_StartsWith(functionCall.Arguments)
case parsers.FunctionCallConcat:
return r.strings_Concat(functionCall.Arguments)
case parsers.FunctionCallIndexOf:
return r.strings_IndexOf(functionCall.Arguments)
case parsers.FunctionCallToString:
return r.strings_ToString(functionCall.Arguments)
case parsers.FunctionCallUpper:
return r.strings_Upper(functionCall.Arguments)
case parsers.FunctionCallLower:
return r.strings_Lower(functionCall.Arguments)
case parsers.FunctionCallLeft:
return r.strings_Left(functionCall.Arguments)
case parsers.FunctionCallLength:
return r.strings_Length(functionCall.Arguments)
case parsers.FunctionCallLTrim:
return r.strings_LTrim(functionCall.Arguments)
case parsers.FunctionCallReplace:
return r.strings_Replace(functionCall.Arguments)
case parsers.FunctionCallReplicate:
return r.strings_Replicate(functionCall.Arguments)
case parsers.FunctionCallReverse:
return r.strings_Reverse(functionCall.Arguments)
case parsers.FunctionCallRight:
return r.strings_Right(functionCall.Arguments)
case parsers.FunctionCallRTrim:
return r.strings_RTrim(functionCall.Arguments)
case parsers.FunctionCallSubstring:
return r.strings_Substring(functionCall.Arguments)
case parsers.FunctionCallTrim:
return r.strings_Trim(functionCall.Arguments)
case parsers.FunctionCallIsDefined:
return r.typeChecking_IsDefined(functionCall.Arguments)
case parsers.FunctionCallIsArray:
return r.typeChecking_IsArray(functionCall.Arguments)
case parsers.FunctionCallIsBool:
return r.typeChecking_IsBool(functionCall.Arguments)
case parsers.FunctionCallIsFiniteNumber:
return r.typeChecking_IsFiniteNumber(functionCall.Arguments)
case parsers.FunctionCallIsInteger:
return r.typeChecking_IsInteger(functionCall.Arguments)
case parsers.FunctionCallIsNull:
return r.typeChecking_IsNull(functionCall.Arguments)
case parsers.FunctionCallIsNumber:
return r.typeChecking_IsNumber(functionCall.Arguments)
case parsers.FunctionCallIsObject:
return r.typeChecking_IsObject(functionCall.Arguments)
case parsers.FunctionCallIsPrimitive:
return r.typeChecking_IsPrimitive(functionCall.Arguments)
case parsers.FunctionCallIsString:
return r.typeChecking_IsString(functionCall.Arguments)
case parsers.FunctionCallArrayConcat:
return r.array_Concat(functionCall.Arguments)
case parsers.FunctionCallArrayContains:
return r.array_Contains(functionCall.Arguments)
case parsers.FunctionCallArrayContainsAny:
return r.array_Contains_Any(functionCall.Arguments)
case parsers.FunctionCallArrayContainsAll:
return r.array_Contains_All(functionCall.Arguments)
case parsers.FunctionCallArrayLength:
return r.array_Length(functionCall.Arguments)
case parsers.FunctionCallArraySlice:
return r.array_Slice(functionCall.Arguments)
case parsers.FunctionCallSetIntersect:
return r.set_Intersect(functionCall.Arguments)
case parsers.FunctionCallSetUnion:
return r.set_Union(functionCall.Arguments)
case parsers.FunctionCallMathAbs:
return r.math_Abs(functionCall.Arguments)
case parsers.FunctionCallMathAcos:
return r.math_Acos(functionCall.Arguments)
case parsers.FunctionCallMathAsin:
return r.math_Asin(functionCall.Arguments)
case parsers.FunctionCallMathAtan:
return r.math_Atan(functionCall.Arguments)
case parsers.FunctionCallMathCeiling:
return r.math_Ceiling(functionCall.Arguments)
case parsers.FunctionCallMathCos:
return r.math_Cos(functionCall.Arguments)
case parsers.FunctionCallMathCot:
return r.math_Cot(functionCall.Arguments)
case parsers.FunctionCallMathDegrees:
return r.math_Degrees(functionCall.Arguments)
case parsers.FunctionCallMathExp:
return r.math_Exp(functionCall.Arguments)
case parsers.FunctionCallMathFloor:
return r.math_Floor(functionCall.Arguments)
case parsers.FunctionCallMathIntBitNot:
return r.math_IntBitNot(functionCall.Arguments)
case parsers.FunctionCallMathLog10:
return r.math_Log10(functionCall.Arguments)
case parsers.FunctionCallMathRadians:
return r.math_Radians(functionCall.Arguments)
case parsers.FunctionCallMathRound:
return r.math_Round(functionCall.Arguments)
case parsers.FunctionCallMathSign:
return r.math_Sign(functionCall.Arguments)
case parsers.FunctionCallMathSin:
return r.math_Sin(functionCall.Arguments)
case parsers.FunctionCallMathSqrt:
return r.math_Sqrt(functionCall.Arguments)
case parsers.FunctionCallMathSquare:
return r.math_Square(functionCall.Arguments)
case parsers.FunctionCallMathTan:
return r.math_Tan(functionCall.Arguments)
case parsers.FunctionCallMathTrunc:
return r.math_Trunc(functionCall.Arguments)
case parsers.FunctionCallMathAtn2:
return r.math_Atn2(functionCall.Arguments)
case parsers.FunctionCallMathIntAdd:
return r.math_IntAdd(functionCall.Arguments)
case parsers.FunctionCallMathIntBitAnd:
return r.math_IntBitAnd(functionCall.Arguments)
case parsers.FunctionCallMathIntBitLeftShift:
return r.math_IntBitLeftShift(functionCall.Arguments)
case parsers.FunctionCallMathIntBitOr:
return r.math_IntBitOr(functionCall.Arguments)
case parsers.FunctionCallMathIntBitRightShift:
return r.math_IntBitRightShift(functionCall.Arguments)
case parsers.FunctionCallMathIntBitXor:
return r.math_IntBitXor(functionCall.Arguments)
case parsers.FunctionCallMathIntDiv:
return r.math_IntDiv(functionCall.Arguments)
case parsers.FunctionCallMathIntMod:
return r.math_IntMod(functionCall.Arguments)
case parsers.FunctionCallMathIntMul:
return r.math_IntMul(functionCall.Arguments)
case parsers.FunctionCallMathIntSub:
return r.math_IntSub(functionCall.Arguments)
case parsers.FunctionCallMathPower:
return r.math_Power(functionCall.Arguments)
case parsers.FunctionCallMathLog:
return r.math_Log(functionCall.Arguments)
case parsers.FunctionCallMathNumberBin:
return r.math_NumberBin(functionCall.Arguments)
case parsers.FunctionCallMathPi:
return r.math_Pi()
case parsers.FunctionCallMathRand:
return r.math_Rand()
case parsers.FunctionCallAggregateAvg:
return r.aggregate_Avg(functionCall.Arguments)
case parsers.FunctionCallAggregateCount:
return r.aggregate_Count(functionCall.Arguments)
case parsers.FunctionCallAggregateMax:
return r.aggregate_Max(functionCall.Arguments)
case parsers.FunctionCallAggregateMin:
return r.aggregate_Min(functionCall.Arguments)
case parsers.FunctionCallAggregateSum:
return r.aggregate_Sum(functionCall.Arguments)
case parsers.FunctionCallIn:
return r.misc_In(functionCall.Arguments)
}
logger.Errorf("Unknown function call type: %v", functionCall.Type)
return nil
}
func (r rowContext) selectItem_SelectItemTypeField(selectItem parsers.SelectItem) interface{} {
value := r.tables[selectItem.Path[0]]
if len(selectItem.Path) > 1 {
for _, pathSegment := range selectItem.Path[1:] {
if pathSegment[0] == '@' {
pathSegment = r.parameters[pathSegment].(string)
}
switch nestedValue := value.(type) {
case map[string]interface{}:
value = nestedValue[pathSegment]
case map[string]RowType:
value = nestedValue[pathSegment]
case []int, []string, []interface{}:
slice := reflect.ValueOf(nestedValue)
if arrayIndex, err := strconv.Atoi(pathSegment); err == nil && slice.Len() > arrayIndex {
value = slice.Index(arrayIndex).Interface()
} else {
return nil
}
default:
return nil
}
}
}
return value
}
func hasAggregateFunctions(selectItems []parsers.SelectItem) bool {
if selectItems == nil {
return false
}
for _, selectItem := range selectItems {
if selectItem.Type == parsers.SelectItemTypeFunctionCall {
if typedValue, ok := selectItem.Value.(parsers.FunctionCall); ok && slices.Contains[[]parsers.FunctionCallType](parsers.AggregateFunctions, typedValue.Type) {
return true
}
}
if hasAggregateFunctions(selectItem.SelectItems) {
return true
}
}
return false
}
func compareValues(val1, val2 interface{}) int {
if reflect.TypeOf(val1) != reflect.TypeOf(val2) {
return 1
}
switch val1 := val1.(type) {
case int:
val2 := val2.(int)
if val1 < val2 {
return -1
} else if val1 > val2 {
return 1
}
return 0
case float64:
val2 := val2.(float64)
if val1 < val2 {
return -1
} else if val1 > val2 {
return 1
}
return 0
case string:
val2 := val2.(string)
return strings.Compare(val1, val2)
case bool:
val2 := val2.(bool)
if val1 == val2 {
return 0
} else if val1 {
return 1
} else {
return -1
}
// TODO: Add more types
default:
if reflect.DeepEqual(val1, val2) {
return 0
}
return 1
}
}
func deduplicate[T RowType | interface{}](slice []T) []T {
var result []T
result = make([]T, 0)
for i := 0; i < len(slice); i++ {
unique := true
for j := 0; j < len(result); j++ {
if compareValues(slice[i], result[j]) == 0 {
unique = false
break
}
}
if unique {
result = append(result, slice[i])
}
}
return result
}
func copyMap[T RowType | []RowType](originalMap map[string]T) map[string]T {
targetMap := make(map[string]T)
for k, v := range originalMap {
targetMap[k] = v
}
return targetMap
} }

View File

@@ -4,18 +4,41 @@ import (
"reflect" "reflect"
"testing" "testing"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/parsers" "github.com/pikami/cosmium/parsers"
memoryexecutor "github.com/pikami/cosmium/query_executors/memory_executor" memoryexecutor "github.com/pikami/cosmium/query_executors/memory_executor"
testutils "github.com/pikami/cosmium/test_utils" testutils "github.com/pikami/cosmium/test_utils"
) )
type TestDocumentIterator struct {
documents []memoryexecutor.RowType
index int
}
func NewTestDocumentIterator(documents []memoryexecutor.RowType) *TestDocumentIterator {
return &TestDocumentIterator{
documents: documents,
index: -1,
}
}
func (i *TestDocumentIterator) Next() (memoryexecutor.RowType, datastore.DataStoreStatus) {
i.index++
if i.index >= len(i.documents) {
return nil, datastore.IterEOF
}
return i.documents[i.index], datastore.StatusOk
}
func testQueryExecute( func testQueryExecute(
t *testing.T, t *testing.T,
query parsers.SelectStmt, query parsers.SelectStmt,
data []memoryexecutor.RowType, data []memoryexecutor.RowType,
expectedData []memoryexecutor.RowType, expectedData []memoryexecutor.RowType,
) { ) {
result := memoryexecutor.ExecuteQuery(query, data) iter := NewTestDocumentIterator(data)
result := memoryexecutor.ExecuteQuery(query, iter)
if !reflect.DeepEqual(result, expectedData) { if !reflect.DeepEqual(result, expectedData) {
t.Errorf("execution result does not match expected data.\nExpected: %+v\nGot: %+v", expectedData, result) t.Errorf("execution result does not match expected data.\nExpected: %+v\nGot: %+v", expectedData, result)
@@ -50,7 +73,7 @@ func Test_Execute(t *testing.T) {
{Path: []string{"c", "id"}}, {Path: []string{"c", "id"}},
{Path: []string{"c", "pk"}}, {Path: []string{"c", "pk"}},
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
OrderExpressions: []parsers.OrderExpression{ OrderExpressions: []parsers.OrderExpression{
{ {
SelectItem: parsers.SelectItem{Path: []string{"c", "pk"}}, SelectItem: parsers.SelectItem{Path: []string{"c", "pk"}},
@@ -79,7 +102,7 @@ func Test_Execute(t *testing.T) {
SelectItems: []parsers.SelectItem{ SelectItems: []parsers.SelectItem{
{Path: []string{"c", "pk"}}, {Path: []string{"c", "pk"}},
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
GroupBy: []parsers.SelectItem{ GroupBy: []parsers.SelectItem{
{Path: []string{"c", "pk"}}, {Path: []string{"c", "pk"}},
}, },
@@ -102,7 +125,7 @@ func Test_Execute(t *testing.T) {
Type: parsers.SelectItemTypeField, Type: parsers.SelectItemTypeField,
}, },
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
Filters: parsers.SelectItem{ Filters: parsers.SelectItem{
Type: parsers.SelectItemTypeFunctionCall, Type: parsers.SelectItemTypeFunctionCall,
Value: parsers.FunctionCall{ Value: parsers.FunctionCall{
@@ -126,6 +149,42 @@ func Test_Execute(t *testing.T) {
) )
}) })
t.Run("Should execute IN function with function call", func(t *testing.T) {
testQueryExecute(
t,
parsers.SelectStmt{
SelectItems: []parsers.SelectItem{
testutils.SelectItem_Path("c", "id"),
},
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
Filters: parsers.SelectItem{
Type: parsers.SelectItemTypeFunctionCall,
Value: parsers.FunctionCall{
Type: parsers.FunctionCallIn,
Arguments: []interface{}{
parsers.SelectItem{
Type: parsers.SelectItemTypeFunctionCall,
Value: parsers.FunctionCall{
Type: parsers.FunctionCallToString,
Arguments: []interface{}{
testutils.SelectItem_Path("c", "id"),
},
},
},
testutils.SelectItem_Constant_String("123"),
testutils.SelectItem_Constant_String("456"),
},
},
},
},
mockData,
[]memoryexecutor.RowType{
map[string]interface{}{"id": "456"},
map[string]interface{}{"id": "123"},
},
)
})
t.Run("Should execute IN selector", func(t *testing.T) { t.Run("Should execute IN selector", func(t *testing.T) {
testQueryExecute( testQueryExecute(
t, t,
@@ -137,10 +196,9 @@ func Test_Execute(t *testing.T) {
}, },
}, },
Table: parsers.Table{ Table: parsers.Table{
Value: "c", Value: "c",
SelectItem: parsers.SelectItem{ SelectItem: testutils.SelectItem_Path("c", "tags"),
Path: []string{"c", "tags"}, IsInSelect: true,
},
}, },
}, },
mockData, mockData,

View File

@@ -0,0 +1,22 @@
package memoryexecutor
import "github.com/pikami/cosmium/internal/datastore"
type offsetIterator struct {
documents rowTypeIterator
offset int
skipped bool
}
func (oi *offsetIterator) Next() (RowType, datastore.DataStoreStatus) {
if oi.skipped {
return oi.documents.Next()
}
for i := 0; i < oi.offset; i++ {
oi.documents.Next()
}
oi.skipped = true
return oi.Next()
}

View File

@@ -0,0 +1,63 @@
package memoryexecutor
import (
"sort"
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/parsers"
)
type orderIterator struct {
documents rowIterator
orderExpressions []parsers.OrderExpression
orderedDocs []rowContext
docsIndex int
}
func (oi *orderIterator) Next() (rowContext, datastore.DataStoreStatus) {
if oi.orderedDocs != nil {
if oi.docsIndex >= len(oi.orderedDocs) {
return rowContext{}, datastore.IterEOF
}
row := oi.orderedDocs[oi.docsIndex]
oi.orderedDocs[oi.docsIndex] = rowContext{}
oi.docsIndex++
return row, datastore.StatusOk
}
oi.orderedDocs = make([]rowContext, 0)
for {
row, status := oi.documents.Next()
if status != datastore.StatusOk {
break
}
oi.orderedDocs = append(oi.orderedDocs, row)
}
oi.documents = nil
less := func(i, j int) bool {
for _, order := range oi.orderExpressions {
val1 := oi.orderedDocs[i].resolveSelectItem(order.SelectItem)
val2 := oi.orderedDocs[j].resolveSelectItem(order.SelectItem)
cmp := compareValues(val1, val2)
if cmp != 0 {
if order.Direction == parsers.OrderDirectionDesc {
return cmp > 0
}
return cmp < 0
}
}
return i < j
}
sort.SliceStable(oi.orderedDocs, less)
if len(oi.orderedDocs) == 0 {
return rowContext{}, datastore.IterEOF
}
oi.docsIndex = 1
return oi.orderedDocs[0], datastore.StatusOk
}

View File

@@ -0,0 +1,90 @@
package memoryexecutor
import (
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/parsers"
"golang.org/x/exp/slices"
)
type projectIterator struct {
documents rowIterator
selectItems []parsers.SelectItem
groupBy []parsers.SelectItem
}
func (pi *projectIterator) Next() (RowType, datastore.DataStoreStatus) {
if pi.documents == nil {
return rowContext{}, datastore.IterEOF
}
row, status := pi.documents.Next()
if status != datastore.StatusOk {
pi.documents = nil
return rowContext{}, status
}
if hasAggregateFunctions(pi.selectItems) && len(pi.groupBy) == 0 {
// When can have aggregate functions without GROUP BY clause,
// we should aggregate all rows in that case.
allDocuments := []rowContext{row}
for {
row, status := pi.documents.Next()
if status != datastore.StatusOk {
break
}
allDocuments = append(allDocuments, row)
}
if len(allDocuments) == 0 {
return rowContext{}, datastore.IterEOF
}
aggRow := rowContext{
tables: row.tables,
parameters: row.parameters,
grouppedRows: allDocuments,
}
return aggRow.applyProjection(pi.selectItems), datastore.StatusOk
}
return row.applyProjection(pi.selectItems), datastore.StatusOk
}
func (r rowContext) applyProjection(selectItems []parsers.SelectItem) RowType {
// When the first value is top level, select it instead
if len(selectItems) > 0 && selectItems[0].IsTopLevel {
return r.resolveSelectItem(selectItems[0])
}
// Construct a new row based on the selected columns
row := make(map[string]interface{})
for index, selectItem := range selectItems {
destinationName := resolveDestinationColumnName(selectItem, index, r.parameters)
row[destinationName] = r.resolveSelectItem(selectItem)
}
return row
}
func hasAggregateFunctions(selectItems []parsers.SelectItem) bool {
if selectItems == nil {
return false
}
for _, selectItem := range selectItems {
if selectItem.Type == parsers.SelectItemTypeFunctionCall {
if typedValue, ok := selectItem.Value.(parsers.FunctionCall); ok && slices.Contains[[]parsers.FunctionCallType](parsers.AggregateFunctions, typedValue.Type) {
return true
}
}
if hasAggregateFunctions(selectItem.SelectItems) {
return true
}
}
return false
}

View File

@@ -0,0 +1,44 @@
package memoryexecutor
import (
"github.com/pikami/cosmium/internal/datastore"
"github.com/pikami/cosmium/parsers"
)
type rowTypeToRowContextIterator struct {
documents rowTypeIterator
query parsers.SelectStmt
}
func (di *rowTypeToRowContextIterator) Next() (rowContext, datastore.DataStoreStatus) {
if di.documents == nil {
return rowContext{}, datastore.IterEOF
}
doc, status := di.documents.Next()
if status != datastore.StatusOk {
di.documents = nil
return rowContext{}, status
}
var initialTableName string
if di.query.Table.SelectItem.Type == parsers.SelectItemTypeSubQuery {
initialTableName = di.query.Table.SelectItem.Value.(parsers.SelectStmt).Table.Value
}
if initialTableName == "" {
initialTableName = di.query.Table.Value
}
if initialTableName == "" {
initialTableName = resolveDestinationColumnName(di.query.Table.SelectItem, 0, di.query.Parameters)
}
return rowContext{
parameters: di.query.Parameters,
tables: map[string]RowType{
initialTableName: doc,
"$root": doc,
},
}, status
}

View File

@@ -5,14 +5,15 @@ import (
"github.com/pikami/cosmium/parsers" "github.com/pikami/cosmium/parsers"
memoryexecutor "github.com/pikami/cosmium/query_executors/memory_executor" memoryexecutor "github.com/pikami/cosmium/query_executors/memory_executor"
testutils "github.com/pikami/cosmium/test_utils"
) )
func Test_Execute_Select(t *testing.T) { func Test_Execute_Select(t *testing.T) {
mockData := []memoryexecutor.RowType{ mockData := []memoryexecutor.RowType{
map[string]interface{}{"id": "12345", "pk": 123, "_self": "self1", "_rid": "rid1", "_ts": 123456, "isCool": false}, map[string]interface{}{"id": "12345", "pk": 123, "_self": "self1", "_rid": "rid1", "_ts": 123456, "isCool": false, "order": nil},
map[string]interface{}{"id": "67890", "pk": 456, "_self": "self2", "_rid": "rid2", "_ts": 789012, "isCool": true}, map[string]interface{}{"id": "67890", "pk": 456, "_self": "self2", "_rid": "rid2", "_ts": 789012, "isCool": true, "order": 1},
map[string]interface{}{"id": "456", "pk": 456, "_self": "self2", "_rid": "rid2", "_ts": 789012, "isCool": true}, map[string]interface{}{"id": "456", "pk": 456, "_self": "self2", "_rid": "rid2", "_ts": 789012, "isCool": true, "order": 2},
map[string]interface{}{"id": "123", "pk": 456, "_self": "self2", "_rid": "rid2", "_ts": 789012, "isCool": true}, map[string]interface{}{"id": "123", "pk": 456, "_self": "self2", "_rid": "rid2", "_ts": 789012, "isCool": true, "order": 3},
} }
t.Run("Should execute simple SELECT", func(t *testing.T) { t.Run("Should execute simple SELECT", func(t *testing.T) {
@@ -23,7 +24,7 @@ func Test_Execute_Select(t *testing.T) {
{Path: []string{"c", "id"}}, {Path: []string{"c", "id"}},
{Path: []string{"c", "pk"}}, {Path: []string{"c", "pk"}},
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
}, },
mockData, mockData,
[]memoryexecutor.RowType{ []memoryexecutor.RowType{
@@ -43,7 +44,7 @@ func Test_Execute_Select(t *testing.T) {
{Path: []string{"c", "id"}}, {Path: []string{"c", "id"}},
{Path: []string{"c", "@param"}}, {Path: []string{"c", "@param"}},
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
Parameters: map[string]interface{}{ Parameters: map[string]interface{}{
"@param": "pk", "@param": "pk",
}, },
@@ -65,7 +66,7 @@ func Test_Execute_Select(t *testing.T) {
SelectItems: []parsers.SelectItem{ SelectItems: []parsers.SelectItem{
{Path: []string{"c", "pk"}}, {Path: []string{"c", "pk"}},
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
Distinct: true, Distinct: true,
}, },
mockData, mockData,
@@ -84,7 +85,7 @@ func Test_Execute_Select(t *testing.T) {
{Path: []string{"c", "id"}}, {Path: []string{"c", "id"}},
{Path: []string{"c", "pk"}}, {Path: []string{"c", "pk"}},
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
Count: 1, Count: 1,
}, },
mockData, mockData,
@@ -102,20 +103,20 @@ func Test_Execute_Select(t *testing.T) {
{Path: []string{"c", "id"}}, {Path: []string{"c", "id"}},
{Path: []string{"c", "pk"}}, {Path: []string{"c", "pk"}},
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
Count: 2, Count: 2,
Offset: 1, Offset: 1,
OrderExpressions: []parsers.OrderExpression{ OrderExpressions: []parsers.OrderExpression{
{ {
SelectItem: parsers.SelectItem{Path: []string{"c", "id"}}, SelectItem: parsers.SelectItem{Path: []string{"c", "order"}},
Direction: parsers.OrderDirectionDesc, Direction: parsers.OrderDirectionDesc,
}, },
}, },
}, },
mockData, mockData,
[]memoryexecutor.RowType{ []memoryexecutor.RowType{
map[string]interface{}{"id": "67890", "pk": 456},
map[string]interface{}{"id": "456", "pk": 456}, map[string]interface{}{"id": "456", "pk": 456},
map[string]interface{}{"id": "67890", "pk": 456},
}, },
) )
}) })
@@ -127,7 +128,7 @@ func Test_Execute_Select(t *testing.T) {
SelectItems: []parsers.SelectItem{ SelectItems: []parsers.SelectItem{
{Path: []string{"c", "id"}, IsTopLevel: true}, {Path: []string{"c", "id"}, IsTopLevel: true},
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
}, },
mockData, mockData,
[]memoryexecutor.RowType{ []memoryexecutor.RowType{
@@ -146,7 +147,7 @@ func Test_Execute_Select(t *testing.T) {
SelectItems: []parsers.SelectItem{ SelectItems: []parsers.SelectItem{
{Path: []string{"c"}, IsTopLevel: true}, {Path: []string{"c"}, IsTopLevel: true},
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
}, },
mockData, mockData,
mockData, mockData,
@@ -167,7 +168,7 @@ func Test_Execute_Select(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
}, },
mockData, mockData,
[]memoryexecutor.RowType{ []memoryexecutor.RowType{
@@ -193,7 +194,7 @@ func Test_Execute_Select(t *testing.T) {
}, },
}, },
}, },
Table: parsers.Table{Value: "c"}, Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
}, },
mockData, mockData,
[]memoryexecutor.RowType{ []memoryexecutor.RowType{

View File

@@ -9,10 +9,14 @@ import (
) )
func (r rowContext) strings_StringEquals(arguments []interface{}) bool { func (r rowContext) strings_StringEquals(arguments []interface{}) bool {
str1 := r.parseString(arguments[0]) str1, str1ok := r.parseString(arguments[0])
str2 := r.parseString(arguments[1]) str2, str2ok := r.parseString(arguments[1])
ignoreCase := r.getBoolFlag(arguments) ignoreCase := r.getBoolFlag(arguments)
if !str1ok || !str2ok {
return false
}
if ignoreCase { if ignoreCase {
return strings.EqualFold(str1, str2) return strings.EqualFold(str1, str2)
} }
@@ -21,10 +25,14 @@ func (r rowContext) strings_StringEquals(arguments []interface{}) bool {
} }
func (r rowContext) strings_Contains(arguments []interface{}) bool { func (r rowContext) strings_Contains(arguments []interface{}) bool {
str1 := r.parseString(arguments[0]) str1, str1ok := r.parseString(arguments[0])
str2 := r.parseString(arguments[1]) str2, str2ok := r.parseString(arguments[1])
ignoreCase := r.getBoolFlag(arguments) ignoreCase := r.getBoolFlag(arguments)
if !str1ok || !str2ok {
return false
}
if ignoreCase { if ignoreCase {
str1 = strings.ToLower(str1) str1 = strings.ToLower(str1)
str2 = strings.ToLower(str2) str2 = strings.ToLower(str2)
@@ -34,10 +42,14 @@ func (r rowContext) strings_Contains(arguments []interface{}) bool {
} }
func (r rowContext) strings_EndsWith(arguments []interface{}) bool { func (r rowContext) strings_EndsWith(arguments []interface{}) bool {
str1 := r.parseString(arguments[0]) str1, str1ok := r.parseString(arguments[0])
str2 := r.parseString(arguments[1]) str2, str2ok := r.parseString(arguments[1])
ignoreCase := r.getBoolFlag(arguments) ignoreCase := r.getBoolFlag(arguments)
if !str1ok || !str2ok {
return false
}
if ignoreCase { if ignoreCase {
str1 = strings.ToLower(str1) str1 = strings.ToLower(str1)
str2 = strings.ToLower(str2) str2 = strings.ToLower(str2)
@@ -47,10 +59,14 @@ func (r rowContext) strings_EndsWith(arguments []interface{}) bool {
} }
func (r rowContext) strings_StartsWith(arguments []interface{}) bool { func (r rowContext) strings_StartsWith(arguments []interface{}) bool {
str1 := r.parseString(arguments[0]) str1, str1ok := r.parseString(arguments[0])
str2 := r.parseString(arguments[1]) str2, str2ok := r.parseString(arguments[1])
ignoreCase := r.getBoolFlag(arguments) ignoreCase := r.getBoolFlag(arguments)
if !str1ok || !str2ok {
return false
}
if ignoreCase { if ignoreCase {
str1 = strings.ToLower(str1) str1 = strings.ToLower(str1)
str2 = strings.ToLower(str2) str2 = strings.ToLower(str2)
@@ -73,8 +89,12 @@ func (r rowContext) strings_Concat(arguments []interface{}) string {
} }
func (r rowContext) strings_IndexOf(arguments []interface{}) int { func (r rowContext) strings_IndexOf(arguments []interface{}) int {
str1 := r.parseString(arguments[0]) str1, str1ok := r.parseString(arguments[0])
str2 := r.parseString(arguments[1]) str2, str2ok := r.parseString(arguments[1])
if !str1ok || !str2ok {
return -1
}
start := 0 start := 0
if len(arguments) > 2 && arguments[2] != nil { if len(arguments) > 2 && arguments[2] != nil {
@@ -115,9 +135,13 @@ func (r rowContext) strings_Lower(arguments []interface{}) string {
func (r rowContext) strings_Left(arguments []interface{}) string { func (r rowContext) strings_Left(arguments []interface{}) string {
var ok bool var ok bool
var length int var length int
str := r.parseString(arguments[0]) str, strOk := r.parseString(arguments[0])
lengthEx := r.resolveSelectItem(arguments[1].(parsers.SelectItem)) lengthEx := r.resolveSelectItem(arguments[1].(parsers.SelectItem))
if !strOk {
return ""
}
if length, ok = lengthEx.(int); !ok { if length, ok = lengthEx.(int); !ok {
logger.ErrorLn("strings_Left - got parameters of wrong type") logger.ErrorLn("strings_Left - got parameters of wrong type")
return "" return ""
@@ -135,28 +159,45 @@ func (r rowContext) strings_Left(arguments []interface{}) string {
} }
func (r rowContext) strings_Length(arguments []interface{}) int { func (r rowContext) strings_Length(arguments []interface{}) int {
str := r.parseString(arguments[0]) str, strOk := r.parseString(arguments[0])
if !strOk {
return 0
}
return len(str) return len(str)
} }
func (r rowContext) strings_LTrim(arguments []interface{}) string { func (r rowContext) strings_LTrim(arguments []interface{}) string {
str := r.parseString(arguments[0]) str, strOk := r.parseString(arguments[0])
if !strOk {
return ""
}
return strings.TrimLeft(str, " ") return strings.TrimLeft(str, " ")
} }
func (r rowContext) strings_Replace(arguments []interface{}) string { func (r rowContext) strings_Replace(arguments []interface{}) string {
str := r.parseString(arguments[0]) str, strOk := r.parseString(arguments[0])
oldStr := r.parseString(arguments[1]) oldStr, oldStrOk := r.parseString(arguments[1])
newStr := r.parseString(arguments[2]) newStr, newStrOk := r.parseString(arguments[2])
if !strOk || !oldStrOk || !newStrOk {
return ""
}
return strings.Replace(str, oldStr, newStr, -1) return strings.Replace(str, oldStr, newStr, -1)
} }
func (r rowContext) strings_Replicate(arguments []interface{}) string { func (r rowContext) strings_Replicate(arguments []interface{}) string {
var ok bool var ok bool
var times int var times int
str := r.parseString(arguments[0]) str, strOk := r.parseString(arguments[0])
timesEx := r.resolveSelectItem(arguments[1].(parsers.SelectItem)) timesEx := r.resolveSelectItem(arguments[1].(parsers.SelectItem))
if !strOk {
return ""
}
if times, ok = timesEx.(int); !ok { if times, ok = timesEx.(int); !ok {
logger.ErrorLn("strings_Replicate - got parameters of wrong type") logger.ErrorLn("strings_Replicate - got parameters of wrong type")
return "" return ""
@@ -174,9 +215,13 @@ func (r rowContext) strings_Replicate(arguments []interface{}) string {
} }
func (r rowContext) strings_Reverse(arguments []interface{}) string { func (r rowContext) strings_Reverse(arguments []interface{}) string {
str := r.parseString(arguments[0]) str, strOk := r.parseString(arguments[0])
runes := []rune(str) runes := []rune(str)
if !strOk {
return ""
}
for i, j := 0, len(runes)-1; i < j; i, j = i+1, j-1 { for i, j := 0, len(runes)-1; i < j; i, j = i+1, j-1 {
runes[i], runes[j] = runes[j], runes[i] runes[i], runes[j] = runes[j], runes[i]
} }
@@ -187,9 +232,13 @@ func (r rowContext) strings_Reverse(arguments []interface{}) string {
func (r rowContext) strings_Right(arguments []interface{}) string { func (r rowContext) strings_Right(arguments []interface{}) string {
var ok bool var ok bool
var length int var length int
str := r.parseString(arguments[0]) str, strOk := r.parseString(arguments[0])
lengthEx := r.resolveSelectItem(arguments[1].(parsers.SelectItem)) lengthEx := r.resolveSelectItem(arguments[1].(parsers.SelectItem))
if !strOk {
return ""
}
if length, ok = lengthEx.(int); !ok { if length, ok = lengthEx.(int); !ok {
logger.ErrorLn("strings_Right - got parameters of wrong type") logger.ErrorLn("strings_Right - got parameters of wrong type")
return "" return ""
@@ -207,7 +256,11 @@ func (r rowContext) strings_Right(arguments []interface{}) string {
} }
func (r rowContext) strings_RTrim(arguments []interface{}) string { func (r rowContext) strings_RTrim(arguments []interface{}) string {
str := r.parseString(arguments[0]) str, strOk := r.parseString(arguments[0])
if !strOk {
return ""
}
return strings.TrimRight(str, " ") return strings.TrimRight(str, " ")
} }
@@ -215,10 +268,14 @@ func (r rowContext) strings_Substring(arguments []interface{}) string {
var ok bool var ok bool
var startPos int var startPos int
var length int var length int
str := r.parseString(arguments[0]) str, strOk := r.parseString(arguments[0])
startPosEx := r.resolveSelectItem(arguments[1].(parsers.SelectItem)) startPosEx := r.resolveSelectItem(arguments[1].(parsers.SelectItem))
lengthEx := r.resolveSelectItem(arguments[2].(parsers.SelectItem)) lengthEx := r.resolveSelectItem(arguments[2].(parsers.SelectItem))
if !strOk {
return ""
}
if startPos, ok = startPosEx.(int); !ok { if startPos, ok = startPosEx.(int); !ok {
logger.ErrorLn("strings_Substring - got start parameters of wrong type") logger.ErrorLn("strings_Substring - got start parameters of wrong type")
return "" return ""
@@ -241,7 +298,11 @@ func (r rowContext) strings_Substring(arguments []interface{}) string {
} }
func (r rowContext) strings_Trim(arguments []interface{}) string { func (r rowContext) strings_Trim(arguments []interface{}) string {
str := r.parseString(arguments[0]) str, strOk := r.parseString(arguments[0])
if !strOk {
return ""
}
return strings.TrimSpace(str) return strings.TrimSpace(str)
} }
@@ -257,15 +318,15 @@ func (r rowContext) getBoolFlag(arguments []interface{}) bool {
return ignoreCase return ignoreCase
} }
func (r rowContext) parseString(argument interface{}) string { func (r rowContext) parseString(argument interface{}) (value string, ok bool) {
exItem := argument.(parsers.SelectItem) exItem := argument.(parsers.SelectItem)
ex := r.resolveSelectItem(exItem) ex := r.resolveSelectItem(exItem)
if str1, ok := ex.(string); ok { if str1, ok := ex.(string); ok {
return str1 return str1, true
} }
logger.ErrorLn("StringEquals got parameters of wrong type") logger.ErrorLn("StringEquals got parameters of wrong type")
return "" return "", false
} }
func convertToString(value interface{}) string { func convertToString(value interface{}) string {

Some files were not shown because too many files have changed in this diff Show More