mirror of
https://github.com/pikami/cosmium.git
synced 2025-06-07 16:10:23 +01:00
Compare commits
16 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
fba9b3df5f | ||
|
b743e23ff9 | ||
|
11851297f5 | ||
|
560ea5296d | ||
|
e20a6ca7cd | ||
|
7e0c10479b | ||
|
30195fae96 | ||
|
598f2837af | ||
|
28e3c0c3d8 | ||
|
97eea30c97 | ||
|
5fe60d831a | ||
|
d309d99906 | ||
|
b2516eda9f | ||
|
813b9faeaa | ||
|
e526b2269e | ||
|
221f029a1d |
@ -12,7 +12,7 @@ jobs:
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Cross-Compile with xgo
|
||||
uses: crazy-max/ghaction-xgo@v3.1.0
|
||||
uses: crazy-max/ghaction-xgo@e22d3c8b089adba750d5a74738b8e95d96f0c991 # v3.1.0
|
||||
with:
|
||||
xgo_version: latest
|
||||
go_version: 1.24.0
|
||||
|
4
.github/workflows/release.yml
vendored
4
.github/workflows/release.yml
vendored
@ -24,7 +24,7 @@ jobs:
|
||||
go-version: 1.24.0
|
||||
|
||||
- name: Cross-Compile with xgo
|
||||
uses: crazy-max/ghaction-xgo@v3.1.0
|
||||
uses: crazy-max/ghaction-xgo@e22d3c8b089adba750d5a74738b8e95d96f0c991 # v3.1.0
|
||||
with:
|
||||
xgo_version: latest
|
||||
go_version: 1.24.0
|
||||
@ -44,7 +44,7 @@ jobs:
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Run GoReleaser
|
||||
uses: goreleaser/goreleaser-action@v5
|
||||
uses: goreleaser/goreleaser-action@5742e2a039330cbb23ebf35f046f814d4c6ff811 # v5
|
||||
with:
|
||||
distribution: goreleaser
|
||||
version: ${{ env.GITHUB_REF_NAME }}
|
||||
|
4
Makefile
4
Makefile
@ -51,6 +51,10 @@ build-sharedlib-linux-amd64:
|
||||
@echo "Building shared library for Linux x64..."
|
||||
@GOOS=linux GOARCH=amd64 $(GOBUILD) $(SHARED_LIB_OPT) -o $(DIST_DIR)/$(BINARY_NAME)-linux-amd64.so $(SHARED_LIB_LOCATION)
|
||||
|
||||
build-sharedlib-darwin-arm64:
|
||||
@echo "Building shared library for macOS ARM..."
|
||||
@GOOS=darwin GOARCH=arm64 $(GOBUILD) $(SHARED_LIB_OPT) -o $(DIST_DIR)/$(BINARY_NAME)-darwin-arm64.so $(SHARED_LIB_LOCATION)
|
||||
|
||||
build-sharedlib-tests: build-sharedlib-linux-amd64
|
||||
@echo "Building shared library tests..."
|
||||
@$(SHARED_LIB_TEST_CC) $(SHARED_LIB_TEST_CFLAGS) -o $(SHARED_LIB_TEST_TARGET) $(SHARED_LIB_TEST_SOURCES)
|
||||
|
13
README.md
13
README.md
@ -86,6 +86,7 @@ To disable SSL and run Cosmium on HTTP instead, you can use the `-DisableTls` fl
|
||||
- **-Persist**: Saves data to the given path on application exit (When `-InitialData` argument is not supplied, it will try to load data from path supplied in `-Persist`)
|
||||
- **-Port**: Listen port (default 8081)
|
||||
- **-LogLevel**: Sets the logging level (one of: debug, info, error, silent) (default info)
|
||||
- **-DataStore**: Allows selecting [storage backend](#data-storage-backends) (default "json")
|
||||
|
||||
These arguments allow you to configure various aspects of Cosmium's behavior according to your requirements.
|
||||
|
||||
@ -99,6 +100,18 @@ All mentioned arguments can also be set using environment variables:
|
||||
- **COSMIUM_PORT** for `-Port`
|
||||
- **COSMIUM_LOGLEVEL** for `-LogLevel`
|
||||
|
||||
### Data Storage Backends
|
||||
|
||||
Cosmium supports multiple storage backends for saving, loading, and managing data at runtime.
|
||||
|
||||
| Backend | Storage Location | Write Behavior | Memory Usage | Supports Initial JSON Load |
|
||||
|----------|--------------------------|--------------------------|----------------------|----------------------------|
|
||||
| `json` (default) | JSON file on disk 📄 | On application exit ⏳ | 🛑 More than Badger | ✅ Yes |
|
||||
| `badger` | BadgerDB database on disk ⚡ | Immediately on write 🚀 | ✅ Less than JSON | ❌ No |
|
||||
|
||||
|
||||
The `badger` backend is generally recommended as it uses less memory and writes data to disk immediately. However, if you need to load initial data from a JSON file, use the `json` backend.
|
||||
|
||||
# License
|
||||
|
||||
This project is [MIT licensed](./LICENSE).
|
||||
|
@ -3,7 +3,7 @@ package api
|
||||
import (
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/pikami/cosmium/api/config"
|
||||
"github.com/pikami/cosmium/internal/repositories"
|
||||
"github.com/pikami/cosmium/internal/datastore"
|
||||
)
|
||||
|
||||
type ApiServer struct {
|
||||
@ -14,7 +14,7 @@ type ApiServer struct {
|
||||
config *config.ServerConfig
|
||||
}
|
||||
|
||||
func NewApiServer(dataRepository *repositories.DataRepository, config *config.ServerConfig) *ApiServer {
|
||||
func NewApiServer(dataStore datastore.DataStore, config *config.ServerConfig) *ApiServer {
|
||||
stopChan := make(chan interface{})
|
||||
onServerShutdownChan := make(chan interface{})
|
||||
|
||||
@ -24,7 +24,7 @@ func NewApiServer(dataRepository *repositories.DataRepository, config *config.Se
|
||||
config: config,
|
||||
}
|
||||
|
||||
apiServer.CreateRouter(dataRepository)
|
||||
apiServer.CreateRouter(dataStore)
|
||||
|
||||
return apiServer
|
||||
}
|
||||
|
@ -15,6 +15,11 @@ const (
|
||||
ExplorerBaseUrlLocation = "/_explorer"
|
||||
)
|
||||
|
||||
const (
|
||||
DataStoreJson = "json"
|
||||
DataStoreBadger = "badger"
|
||||
)
|
||||
|
||||
func ParseFlags() ServerConfig {
|
||||
host := flag.String("Host", "localhost", "Hostname")
|
||||
port := flag.Int("Port", 8081, "Listen port")
|
||||
@ -28,6 +33,8 @@ func ParseFlags() ServerConfig {
|
||||
persistDataPath := flag.String("Persist", "", "Saves data to given path on application exit")
|
||||
logLevel := NewEnumValue("info", []string{"debug", "info", "error", "silent"})
|
||||
flag.Var(logLevel, "LogLevel", fmt.Sprintf("Sets the logging level %s", logLevel.AllowedValuesList()))
|
||||
dataStore := NewEnumValue("json", []string{DataStoreJson, DataStoreBadger})
|
||||
flag.Var(dataStore, "DataStore", fmt.Sprintf("Sets the data store %s", dataStore.AllowedValuesList()))
|
||||
|
||||
flag.Parse()
|
||||
setFlagsFromEnvironment()
|
||||
@ -44,6 +51,7 @@ func ParseFlags() ServerConfig {
|
||||
config.DisableTls = *disableTls
|
||||
config.AccountKey = *accountKey
|
||||
config.LogLevel = logLevel.value
|
||||
config.DataStore = dataStore.value
|
||||
|
||||
config.PopulateCalculatedFields()
|
||||
|
||||
@ -68,6 +76,29 @@ func (c *ServerConfig) PopulateCalculatedFields() {
|
||||
default:
|
||||
logger.SetLogLevel(logger.LogLevelInfo)
|
||||
}
|
||||
|
||||
fileInfo, err := os.Stat(c.PersistDataFilePath)
|
||||
if c.PersistDataFilePath != "" && !os.IsNotExist(err) {
|
||||
if err != nil {
|
||||
logger.ErrorLn("Failed to get file info for persist path:", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
if c.DataStore == DataStoreJson && fileInfo.IsDir() {
|
||||
logger.ErrorLn("--Persist cannot be a directory when using json data store")
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
if c.DataStore == DataStoreBadger && !fileInfo.IsDir() {
|
||||
logger.ErrorLn("--Persist must be a directory when using Badger data store")
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
if c.DataStore == DataStoreBadger && c.InitialDataFilePath != "" {
|
||||
logger.ErrorLn("InitialData option is currently not supported with Badger data store")
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
func (c *ServerConfig) ApplyDefaultsToEmptyFields() {
|
||||
|
@ -17,4 +17,6 @@ type ServerConfig struct {
|
||||
DisableTls bool `json:"disableTls"`
|
||||
LogLevel string `json:"logLevel"`
|
||||
ExplorerBaseUrlLocation string `json:"explorerBaseUrlLocation"`
|
||||
|
||||
DataStore string `json:"dataStore"`
|
||||
}
|
||||
|
@ -5,15 +5,16 @@ import (
|
||||
"net/http"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
||||
"github.com/pikami/cosmium/internal/constants"
|
||||
"github.com/pikami/cosmium/internal/datastore"
|
||||
)
|
||||
|
||||
func (h *Handlers) GetAllCollections(c *gin.Context) {
|
||||
databaseId := c.Param("databaseId")
|
||||
|
||||
collections, status := h.repository.GetAllCollections(databaseId)
|
||||
if status == repositorymodels.StatusOk {
|
||||
database, _ := h.repository.GetDatabase(databaseId)
|
||||
collections, status := h.dataStore.GetAllCollections(databaseId)
|
||||
if status == datastore.StatusOk {
|
||||
database, _ := h.dataStore.GetDatabase(databaseId)
|
||||
|
||||
c.Header("x-ms-item-count", fmt.Sprintf("%d", len(collections)))
|
||||
c.IndentedJSON(http.StatusOK, gin.H{
|
||||
@ -24,48 +25,48 @@ func (h *Handlers) GetAllCollections(c *gin.Context) {
|
||||
return
|
||||
}
|
||||
|
||||
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
||||
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
||||
}
|
||||
|
||||
func (h *Handlers) GetCollection(c *gin.Context) {
|
||||
databaseId := c.Param("databaseId")
|
||||
id := c.Param("collId")
|
||||
|
||||
collection, status := h.repository.GetCollection(databaseId, id)
|
||||
if status == repositorymodels.StatusOk {
|
||||
collection, status := h.dataStore.GetCollection(databaseId, id)
|
||||
if status == datastore.StatusOk {
|
||||
c.IndentedJSON(http.StatusOK, collection)
|
||||
return
|
||||
}
|
||||
|
||||
if status == repositorymodels.StatusNotFound {
|
||||
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
|
||||
if status == datastore.StatusNotFound {
|
||||
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse)
|
||||
return
|
||||
}
|
||||
|
||||
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
||||
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
||||
}
|
||||
|
||||
func (h *Handlers) DeleteCollection(c *gin.Context) {
|
||||
databaseId := c.Param("databaseId")
|
||||
id := c.Param("collId")
|
||||
|
||||
status := h.repository.DeleteCollection(databaseId, id)
|
||||
if status == repositorymodels.StatusOk {
|
||||
status := h.dataStore.DeleteCollection(databaseId, id)
|
||||
if status == datastore.StatusOk {
|
||||
c.Status(http.StatusNoContent)
|
||||
return
|
||||
}
|
||||
|
||||
if status == repositorymodels.StatusNotFound {
|
||||
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
|
||||
if status == datastore.StatusNotFound {
|
||||
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse)
|
||||
return
|
||||
}
|
||||
|
||||
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
||||
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
||||
}
|
||||
|
||||
func (h *Handlers) CreateCollection(c *gin.Context) {
|
||||
databaseId := c.Param("databaseId")
|
||||
var newCollection repositorymodels.Collection
|
||||
var newCollection datastore.Collection
|
||||
|
||||
if err := c.BindJSON(&newCollection); err != nil {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"message": err.Error()})
|
||||
@ -73,20 +74,20 @@ func (h *Handlers) CreateCollection(c *gin.Context) {
|
||||
}
|
||||
|
||||
if newCollection.ID == "" {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"message": "BadRequest"})
|
||||
c.JSON(http.StatusBadRequest, constants.BadRequestResponse)
|
||||
return
|
||||
}
|
||||
|
||||
createdCollection, status := h.repository.CreateCollection(databaseId, newCollection)
|
||||
if status == repositorymodels.Conflict {
|
||||
c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"})
|
||||
createdCollection, status := h.dataStore.CreateCollection(databaseId, newCollection)
|
||||
if status == datastore.Conflict {
|
||||
c.IndentedJSON(http.StatusConflict, constants.ConflictResponse)
|
||||
return
|
||||
}
|
||||
|
||||
if status == repositorymodels.StatusOk {
|
||||
if status == datastore.StatusOk {
|
||||
c.IndentedJSON(http.StatusCreated, createdCollection)
|
||||
return
|
||||
}
|
||||
|
||||
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
||||
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
||||
}
|
||||
|
@ -7,11 +7,11 @@ import (
|
||||
)
|
||||
|
||||
func (h *Handlers) CosmiumExport(c *gin.Context) {
|
||||
repositoryState, err := h.repository.GetState()
|
||||
dataStoreState, err := h.dataStore.DumpToJson()
|
||||
if err != nil {
|
||||
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||
return
|
||||
}
|
||||
|
||||
c.Data(http.StatusOK, "application/json", []byte(repositoryState))
|
||||
c.Data(http.StatusOK, "application/json", []byte(dataStoreState))
|
||||
}
|
||||
|
@ -5,12 +5,13 @@ import (
|
||||
"net/http"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
||||
"github.com/pikami/cosmium/internal/constants"
|
||||
"github.com/pikami/cosmium/internal/datastore"
|
||||
)
|
||||
|
||||
func (h *Handlers) GetAllDatabases(c *gin.Context) {
|
||||
databases, status := h.repository.GetAllDatabases()
|
||||
if status == repositorymodels.StatusOk {
|
||||
databases, status := h.dataStore.GetAllDatabases()
|
||||
if status == datastore.StatusOk {
|
||||
c.Header("x-ms-item-count", fmt.Sprintf("%d", len(databases)))
|
||||
c.IndentedJSON(http.StatusOK, gin.H{
|
||||
"_rid": "",
|
||||
@ -20,45 +21,45 @@ func (h *Handlers) GetAllDatabases(c *gin.Context) {
|
||||
return
|
||||
}
|
||||
|
||||
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
||||
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
||||
}
|
||||
|
||||
func (h *Handlers) GetDatabase(c *gin.Context) {
|
||||
id := c.Param("databaseId")
|
||||
|
||||
database, status := h.repository.GetDatabase(id)
|
||||
if status == repositorymodels.StatusOk {
|
||||
database, status := h.dataStore.GetDatabase(id)
|
||||
if status == datastore.StatusOk {
|
||||
c.IndentedJSON(http.StatusOK, database)
|
||||
return
|
||||
}
|
||||
|
||||
if status == repositorymodels.StatusNotFound {
|
||||
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
|
||||
if status == datastore.StatusNotFound {
|
||||
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse)
|
||||
return
|
||||
}
|
||||
|
||||
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
||||
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
||||
}
|
||||
|
||||
func (h *Handlers) DeleteDatabase(c *gin.Context) {
|
||||
id := c.Param("databaseId")
|
||||
|
||||
status := h.repository.DeleteDatabase(id)
|
||||
if status == repositorymodels.StatusOk {
|
||||
status := h.dataStore.DeleteDatabase(id)
|
||||
if status == datastore.StatusOk {
|
||||
c.Status(http.StatusNoContent)
|
||||
return
|
||||
}
|
||||
|
||||
if status == repositorymodels.StatusNotFound {
|
||||
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
|
||||
if status == datastore.StatusNotFound {
|
||||
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse)
|
||||
return
|
||||
}
|
||||
|
||||
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
||||
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
||||
}
|
||||
|
||||
func (h *Handlers) CreateDatabase(c *gin.Context) {
|
||||
var newDatabase repositorymodels.Database
|
||||
var newDatabase datastore.Database
|
||||
|
||||
if err := c.BindJSON(&newDatabase); err != nil {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"message": err.Error()})
|
||||
@ -66,20 +67,20 @@ func (h *Handlers) CreateDatabase(c *gin.Context) {
|
||||
}
|
||||
|
||||
if newDatabase.ID == "" {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"message": "BadRequest"})
|
||||
c.JSON(http.StatusBadRequest, constants.BadRequestResponse)
|
||||
return
|
||||
}
|
||||
|
||||
createdDatabase, status := h.repository.CreateDatabase(newDatabase)
|
||||
if status == repositorymodels.Conflict {
|
||||
c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"})
|
||||
createdDatabase, status := h.dataStore.CreateDatabase(newDatabase)
|
||||
if status == datastore.Conflict {
|
||||
c.IndentedJSON(http.StatusConflict, constants.ConflictResponse)
|
||||
return
|
||||
}
|
||||
|
||||
if status == repositorymodels.StatusOk {
|
||||
if status == datastore.StatusOk {
|
||||
c.IndentedJSON(http.StatusCreated, createdDatabase)
|
||||
return
|
||||
}
|
||||
|
||||
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
||||
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
||||
}
|
||||
|
@ -10,17 +10,21 @@ import (
|
||||
"github.com/gin-gonic/gin"
|
||||
apimodels "github.com/pikami/cosmium/api/api_models"
|
||||
"github.com/pikami/cosmium/internal/constants"
|
||||
"github.com/pikami/cosmium/internal/converters"
|
||||
"github.com/pikami/cosmium/internal/datastore"
|
||||
"github.com/pikami/cosmium/internal/logger"
|
||||
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
||||
"github.com/pikami/cosmium/parsers"
|
||||
"github.com/pikami/cosmium/parsers/nosql"
|
||||
memoryexecutor "github.com/pikami/cosmium/query_executors/memory_executor"
|
||||
)
|
||||
|
||||
func (h *Handlers) GetAllDocuments(c *gin.Context) {
|
||||
databaseId := c.Param("databaseId")
|
||||
collectionId := c.Param("collId")
|
||||
|
||||
documents, status := h.repository.GetAllDocuments(databaseId, collectionId)
|
||||
if status == repositorymodels.StatusOk {
|
||||
collection, _ := h.repository.GetCollection(databaseId, collectionId)
|
||||
documents, status := h.dataStore.GetAllDocuments(databaseId, collectionId)
|
||||
if status == datastore.StatusOk {
|
||||
collection, _ := h.dataStore.GetCollection(databaseId, collectionId)
|
||||
|
||||
c.Header("x-ms-item-count", fmt.Sprintf("%d", len(documents)))
|
||||
c.IndentedJSON(http.StatusOK, gin.H{
|
||||
@ -31,7 +35,7 @@ func (h *Handlers) GetAllDocuments(c *gin.Context) {
|
||||
return
|
||||
}
|
||||
|
||||
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
||||
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
||||
}
|
||||
|
||||
func (h *Handlers) GetDocument(c *gin.Context) {
|
||||
@ -39,18 +43,18 @@ func (h *Handlers) GetDocument(c *gin.Context) {
|
||||
collectionId := c.Param("collId")
|
||||
documentId := c.Param("docId")
|
||||
|
||||
document, status := h.repository.GetDocument(databaseId, collectionId, documentId)
|
||||
if status == repositorymodels.StatusOk {
|
||||
document, status := h.dataStore.GetDocument(databaseId, collectionId, documentId)
|
||||
if status == datastore.StatusOk {
|
||||
c.IndentedJSON(http.StatusOK, document)
|
||||
return
|
||||
}
|
||||
|
||||
if status == repositorymodels.StatusNotFound {
|
||||
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
|
||||
if status == datastore.StatusNotFound {
|
||||
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse)
|
||||
return
|
||||
}
|
||||
|
||||
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
||||
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
||||
}
|
||||
|
||||
func (h *Handlers) DeleteDocument(c *gin.Context) {
|
||||
@ -58,21 +62,21 @@ func (h *Handlers) DeleteDocument(c *gin.Context) {
|
||||
collectionId := c.Param("collId")
|
||||
documentId := c.Param("docId")
|
||||
|
||||
status := h.repository.DeleteDocument(databaseId, collectionId, documentId)
|
||||
if status == repositorymodels.StatusOk {
|
||||
status := h.dataStore.DeleteDocument(databaseId, collectionId, documentId)
|
||||
if status == datastore.StatusOk {
|
||||
c.Status(http.StatusNoContent)
|
||||
return
|
||||
}
|
||||
|
||||
if status == repositorymodels.StatusNotFound {
|
||||
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
|
||||
if status == datastore.StatusNotFound {
|
||||
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse)
|
||||
return
|
||||
}
|
||||
|
||||
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
||||
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
||||
}
|
||||
|
||||
// TODO: Maybe move "replace" logic to repository
|
||||
// TODO: Maybe move "replace" logic to data store
|
||||
func (h *Handlers) ReplaceDocument(c *gin.Context) {
|
||||
databaseId := c.Param("databaseId")
|
||||
collectionId := c.Param("collId")
|
||||
@ -84,24 +88,24 @@ func (h *Handlers) ReplaceDocument(c *gin.Context) {
|
||||
return
|
||||
}
|
||||
|
||||
status := h.repository.DeleteDocument(databaseId, collectionId, documentId)
|
||||
if status == repositorymodels.StatusNotFound {
|
||||
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
|
||||
status := h.dataStore.DeleteDocument(databaseId, collectionId, documentId)
|
||||
if status == datastore.StatusNotFound {
|
||||
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse)
|
||||
return
|
||||
}
|
||||
|
||||
createdDocument, status := h.repository.CreateDocument(databaseId, collectionId, requestBody)
|
||||
if status == repositorymodels.Conflict {
|
||||
c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"})
|
||||
createdDocument, status := h.dataStore.CreateDocument(databaseId, collectionId, requestBody)
|
||||
if status == datastore.Conflict {
|
||||
c.IndentedJSON(http.StatusConflict, constants.ConflictResponse)
|
||||
return
|
||||
}
|
||||
|
||||
if status == repositorymodels.StatusOk {
|
||||
if status == datastore.StatusOk {
|
||||
c.IndentedJSON(http.StatusCreated, createdDocument)
|
||||
return
|
||||
}
|
||||
|
||||
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
||||
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
||||
}
|
||||
|
||||
func (h *Handlers) PatchDocument(c *gin.Context) {
|
||||
@ -109,9 +113,9 @@ func (h *Handlers) PatchDocument(c *gin.Context) {
|
||||
collectionId := c.Param("collId")
|
||||
documentId := c.Param("docId")
|
||||
|
||||
document, status := h.repository.GetDocument(databaseId, collectionId, documentId)
|
||||
if status == repositorymodels.StatusNotFound {
|
||||
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
|
||||
document, status := h.dataStore.GetDocument(databaseId, collectionId, documentId)
|
||||
if status == datastore.StatusNotFound {
|
||||
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse)
|
||||
return
|
||||
}
|
||||
|
||||
@ -160,24 +164,24 @@ func (h *Handlers) PatchDocument(c *gin.Context) {
|
||||
return
|
||||
}
|
||||
|
||||
status = h.repository.DeleteDocument(databaseId, collectionId, documentId)
|
||||
if status == repositorymodels.StatusNotFound {
|
||||
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
|
||||
status = h.dataStore.DeleteDocument(databaseId, collectionId, documentId)
|
||||
if status == datastore.StatusNotFound {
|
||||
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse)
|
||||
return
|
||||
}
|
||||
|
||||
createdDocument, status := h.repository.CreateDocument(databaseId, collectionId, modifiedDocument)
|
||||
if status == repositorymodels.Conflict {
|
||||
c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"})
|
||||
createdDocument, status := h.dataStore.CreateDocument(databaseId, collectionId, modifiedDocument)
|
||||
if status == datastore.Conflict {
|
||||
c.IndentedJSON(http.StatusConflict, constants.ConflictResponse)
|
||||
return
|
||||
}
|
||||
|
||||
if status == repositorymodels.StatusOk {
|
||||
if status == datastore.StatusOk {
|
||||
c.IndentedJSON(http.StatusCreated, createdDocument)
|
||||
return
|
||||
}
|
||||
|
||||
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
||||
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
||||
}
|
||||
|
||||
func (h *Handlers) DocumentsPost(c *gin.Context) {
|
||||
@ -204,27 +208,27 @@ func (h *Handlers) DocumentsPost(c *gin.Context) {
|
||||
}
|
||||
|
||||
if requestBody["id"] == "" {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"message": "BadRequest"})
|
||||
c.JSON(http.StatusBadRequest, constants.BadRequestResponse)
|
||||
return
|
||||
}
|
||||
|
||||
isUpsert, _ := strconv.ParseBool(c.GetHeader("x-ms-documentdb-is-upsert"))
|
||||
if isUpsert {
|
||||
h.repository.DeleteDocument(databaseId, collectionId, requestBody["id"].(string))
|
||||
h.dataStore.DeleteDocument(databaseId, collectionId, requestBody["id"].(string))
|
||||
}
|
||||
|
||||
createdDocument, status := h.repository.CreateDocument(databaseId, collectionId, requestBody)
|
||||
if status == repositorymodels.Conflict {
|
||||
c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"})
|
||||
createdDocument, status := h.dataStore.CreateDocument(databaseId, collectionId, requestBody)
|
||||
if status == datastore.Conflict {
|
||||
c.IndentedJSON(http.StatusConflict, constants.ConflictResponse)
|
||||
return
|
||||
}
|
||||
|
||||
if status == repositorymodels.StatusOk {
|
||||
if status == datastore.StatusOk {
|
||||
c.IndentedJSON(http.StatusCreated, createdDocument)
|
||||
return
|
||||
}
|
||||
|
||||
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
||||
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
||||
}
|
||||
|
||||
func parametersToMap(pairs []interface{}) map[string]interface{} {
|
||||
@ -253,14 +257,15 @@ func (h *Handlers) handleDocumentQuery(c *gin.Context, requestBody map[string]in
|
||||
queryParameters = parametersToMap(paramsArray)
|
||||
}
|
||||
|
||||
docs, status := h.repository.ExecuteQueryDocuments(databaseId, collectionId, requestBody["query"].(string), queryParameters)
|
||||
if status != repositorymodels.StatusOk {
|
||||
queryText := requestBody["query"].(string)
|
||||
docs, status := h.executeQueryDocuments(databaseId, collectionId, queryText, queryParameters)
|
||||
if status != datastore.StatusOk {
|
||||
// TODO: Currently we return everything if the query fails
|
||||
h.GetAllDocuments(c)
|
||||
return
|
||||
}
|
||||
|
||||
collection, _ := h.repository.GetCollection(databaseId, collectionId)
|
||||
collection, _ := h.dataStore.GetCollection(databaseId, collectionId)
|
||||
c.Header("x-ms-item-count", fmt.Sprintf("%d", len(docs)))
|
||||
c.IndentedJSON(http.StatusOK, gin.H{
|
||||
"_rid": collection.ResourceID,
|
||||
@ -283,9 +288,9 @@ func (h *Handlers) handleBatchRequest(c *gin.Context) {
|
||||
for idx, operation := range batchOperations {
|
||||
switch operation.OperationType {
|
||||
case apimodels.BatchOperationTypeCreate:
|
||||
createdDocument, status := h.repository.CreateDocument(databaseId, collectionId, operation.ResourceBody)
|
||||
responseCode := repositoryStatusToResponseCode(status)
|
||||
if status == repositorymodels.StatusOk {
|
||||
createdDocument, status := h.dataStore.CreateDocument(databaseId, collectionId, operation.ResourceBody)
|
||||
responseCode := dataStoreStatusToResponseCode(status)
|
||||
if status == datastore.StatusOk {
|
||||
responseCode = http.StatusCreated
|
||||
}
|
||||
batchOperationResults[idx] = apimodels.BatchOperationResult{
|
||||
@ -293,25 +298,25 @@ func (h *Handlers) handleBatchRequest(c *gin.Context) {
|
||||
ResourceBody: createdDocument,
|
||||
}
|
||||
case apimodels.BatchOperationTypeDelete:
|
||||
status := h.repository.DeleteDocument(databaseId, collectionId, operation.Id)
|
||||
responseCode := repositoryStatusToResponseCode(status)
|
||||
if status == repositorymodels.StatusOk {
|
||||
status := h.dataStore.DeleteDocument(databaseId, collectionId, operation.Id)
|
||||
responseCode := dataStoreStatusToResponseCode(status)
|
||||
if status == datastore.StatusOk {
|
||||
responseCode = http.StatusNoContent
|
||||
}
|
||||
batchOperationResults[idx] = apimodels.BatchOperationResult{
|
||||
StatusCode: responseCode,
|
||||
}
|
||||
case apimodels.BatchOperationTypeReplace:
|
||||
deleteStatus := h.repository.DeleteDocument(databaseId, collectionId, operation.Id)
|
||||
if deleteStatus == repositorymodels.StatusNotFound {
|
||||
deleteStatus := h.dataStore.DeleteDocument(databaseId, collectionId, operation.Id)
|
||||
if deleteStatus == datastore.StatusNotFound {
|
||||
batchOperationResults[idx] = apimodels.BatchOperationResult{
|
||||
StatusCode: http.StatusNotFound,
|
||||
}
|
||||
continue
|
||||
}
|
||||
createdDocument, createStatus := h.repository.CreateDocument(databaseId, collectionId, operation.ResourceBody)
|
||||
responseCode := repositoryStatusToResponseCode(createStatus)
|
||||
if createStatus == repositorymodels.StatusOk {
|
||||
createdDocument, createStatus := h.dataStore.CreateDocument(databaseId, collectionId, operation.ResourceBody)
|
||||
responseCode := dataStoreStatusToResponseCode(createStatus)
|
||||
if createStatus == datastore.StatusOk {
|
||||
responseCode = http.StatusCreated
|
||||
}
|
||||
batchOperationResults[idx] = apimodels.BatchOperationResult{
|
||||
@ -320,10 +325,10 @@ func (h *Handlers) handleBatchRequest(c *gin.Context) {
|
||||
}
|
||||
case apimodels.BatchOperationTypeUpsert:
|
||||
documentId := operation.ResourceBody["id"].(string)
|
||||
h.repository.DeleteDocument(databaseId, collectionId, documentId)
|
||||
createdDocument, createStatus := h.repository.CreateDocument(databaseId, collectionId, operation.ResourceBody)
|
||||
responseCode := repositoryStatusToResponseCode(createStatus)
|
||||
if createStatus == repositorymodels.StatusOk {
|
||||
h.dataStore.DeleteDocument(databaseId, collectionId, documentId)
|
||||
createdDocument, createStatus := h.dataStore.CreateDocument(databaseId, collectionId, operation.ResourceBody)
|
||||
responseCode := dataStoreStatusToResponseCode(createStatus)
|
||||
if createStatus == datastore.StatusOk {
|
||||
responseCode = http.StatusCreated
|
||||
}
|
||||
batchOperationResults[idx] = apimodels.BatchOperationResult{
|
||||
@ -331,9 +336,9 @@ func (h *Handlers) handleBatchRequest(c *gin.Context) {
|
||||
ResourceBody: createdDocument,
|
||||
}
|
||||
case apimodels.BatchOperationTypeRead:
|
||||
document, status := h.repository.GetDocument(databaseId, collectionId, operation.Id)
|
||||
document, status := h.dataStore.GetDocument(databaseId, collectionId, operation.Id)
|
||||
batchOperationResults[idx] = apimodels.BatchOperationResult{
|
||||
StatusCode: repositoryStatusToResponseCode(status),
|
||||
StatusCode: dataStoreStatusToResponseCode(status),
|
||||
ResourceBody: document,
|
||||
}
|
||||
case apimodels.BatchOperationTypePatch:
|
||||
@ -352,17 +357,40 @@ func (h *Handlers) handleBatchRequest(c *gin.Context) {
|
||||
c.JSON(http.StatusOK, batchOperationResults)
|
||||
}
|
||||
|
||||
func repositoryStatusToResponseCode(status repositorymodels.RepositoryStatus) int {
|
||||
func dataStoreStatusToResponseCode(status datastore.DataStoreStatus) int {
|
||||
switch status {
|
||||
case repositorymodels.StatusOk:
|
||||
case datastore.StatusOk:
|
||||
return http.StatusOK
|
||||
case repositorymodels.StatusNotFound:
|
||||
case datastore.StatusNotFound:
|
||||
return http.StatusNotFound
|
||||
case repositorymodels.Conflict:
|
||||
case datastore.Conflict:
|
||||
return http.StatusConflict
|
||||
case repositorymodels.BadRequest:
|
||||
case datastore.BadRequest:
|
||||
return http.StatusBadRequest
|
||||
default:
|
||||
return http.StatusInternalServerError
|
||||
}
|
||||
}
|
||||
|
||||
func (h *Handlers) executeQueryDocuments(databaseId string, collectionId string, query string, queryParameters map[string]interface{}) ([]memoryexecutor.RowType, datastore.DataStoreStatus) {
|
||||
parsedQuery, err := nosql.Parse("", []byte(query))
|
||||
if err != nil {
|
||||
logger.Errorf("Failed to parse query: %s\nerr: %v", query, err)
|
||||
return nil, datastore.BadRequest
|
||||
}
|
||||
|
||||
allDocumentsIterator, status := h.dataStore.GetDocumentIterator(databaseId, collectionId)
|
||||
if status != datastore.StatusOk {
|
||||
return nil, status
|
||||
}
|
||||
defer allDocumentsIterator.Close()
|
||||
|
||||
rowsIterator := converters.NewDocumentToRowTypeIterator(allDocumentsIterator)
|
||||
|
||||
if typedQuery, ok := parsedQuery.(parsers.SelectStmt); ok {
|
||||
typedQuery.Parameters = queryParameters
|
||||
return memoryexecutor.ExecuteQuery(typedQuery, rowsIterator), datastore.StatusOk
|
||||
}
|
||||
|
||||
return nil, datastore.BadRequest
|
||||
}
|
||||
|
@ -2,17 +2,17 @@ package handlers
|
||||
|
||||
import (
|
||||
"github.com/pikami/cosmium/api/config"
|
||||
"github.com/pikami/cosmium/internal/repositories"
|
||||
"github.com/pikami/cosmium/internal/datastore"
|
||||
)
|
||||
|
||||
type Handlers struct {
|
||||
repository *repositories.DataRepository
|
||||
config *config.ServerConfig
|
||||
dataStore datastore.DataStore
|
||||
config *config.ServerConfig
|
||||
}
|
||||
|
||||
func NewHandlers(dataRepository *repositories.DataRepository, config *config.ServerConfig) *Handlers {
|
||||
func NewHandlers(dataStore datastore.DataStore, config *config.ServerConfig) *Handlers {
|
||||
return &Handlers{
|
||||
repository: dataRepository,
|
||||
config: config,
|
||||
dataStore: dataStore,
|
||||
config: config,
|
||||
}
|
||||
}
|
||||
|
@ -5,7 +5,8 @@ import (
|
||||
"net/http"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
||||
"github.com/pikami/cosmium/internal/constants"
|
||||
"github.com/pikami/cosmium/internal/datastore"
|
||||
"github.com/pikami/cosmium/internal/resourceid"
|
||||
)
|
||||
|
||||
@ -18,8 +19,8 @@ func (h *Handlers) GetPartitionKeyRanges(c *gin.Context) {
|
||||
return
|
||||
}
|
||||
|
||||
partitionKeyRanges, status := h.repository.GetPartitionKeyRanges(databaseId, collectionId)
|
||||
if status == repositorymodels.StatusOk {
|
||||
partitionKeyRanges, status := h.dataStore.GetPartitionKeyRanges(databaseId, collectionId)
|
||||
if status == datastore.StatusOk {
|
||||
c.Header("etag", "\"420\"")
|
||||
c.Header("lsn", "420")
|
||||
c.Header("x-ms-cosmos-llsn", "420")
|
||||
@ -27,7 +28,7 @@ func (h *Handlers) GetPartitionKeyRanges(c *gin.Context) {
|
||||
c.Header("x-ms-item-count", fmt.Sprintf("%d", len(partitionKeyRanges)))
|
||||
|
||||
collectionRid := collectionId
|
||||
collection, _ := h.repository.GetCollection(databaseId, collectionId)
|
||||
collection, _ := h.dataStore.GetCollection(databaseId, collectionId)
|
||||
if collection.ResourceID != "" {
|
||||
collectionRid = collection.ResourceID
|
||||
}
|
||||
@ -41,10 +42,10 @@ func (h *Handlers) GetPartitionKeyRanges(c *gin.Context) {
|
||||
return
|
||||
}
|
||||
|
||||
if status == repositorymodels.StatusNotFound {
|
||||
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
|
||||
if status == datastore.StatusNotFound {
|
||||
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse)
|
||||
return
|
||||
}
|
||||
|
||||
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
||||
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
||||
}
|
||||
|
@ -5,22 +5,23 @@ import (
|
||||
"net/http"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
||||
"github.com/pikami/cosmium/internal/constants"
|
||||
"github.com/pikami/cosmium/internal/datastore"
|
||||
)
|
||||
|
||||
func (h *Handlers) GetAllStoredProcedures(c *gin.Context) {
|
||||
databaseId := c.Param("databaseId")
|
||||
collectionId := c.Param("collId")
|
||||
|
||||
sps, status := h.repository.GetAllStoredProcedures(databaseId, collectionId)
|
||||
sps, status := h.dataStore.GetAllStoredProcedures(databaseId, collectionId)
|
||||
|
||||
if status == repositorymodels.StatusOk {
|
||||
if status == datastore.StatusOk {
|
||||
c.Header("x-ms-item-count", fmt.Sprintf("%d", len(sps)))
|
||||
c.IndentedJSON(http.StatusOK, gin.H{"_rid": "", "StoredProcedures": sps, "_count": len(sps)})
|
||||
return
|
||||
}
|
||||
|
||||
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
||||
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
||||
}
|
||||
|
||||
func (h *Handlers) GetStoredProcedure(c *gin.Context) {
|
||||
@ -28,19 +29,19 @@ func (h *Handlers) GetStoredProcedure(c *gin.Context) {
|
||||
collectionId := c.Param("collId")
|
||||
spId := c.Param("spId")
|
||||
|
||||
sp, status := h.repository.GetStoredProcedure(databaseId, collectionId, spId)
|
||||
sp, status := h.dataStore.GetStoredProcedure(databaseId, collectionId, spId)
|
||||
|
||||
if status == repositorymodels.StatusOk {
|
||||
if status == datastore.StatusOk {
|
||||
c.IndentedJSON(http.StatusOK, sp)
|
||||
return
|
||||
}
|
||||
|
||||
if status == repositorymodels.StatusNotFound {
|
||||
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
|
||||
if status == datastore.StatusNotFound {
|
||||
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse)
|
||||
return
|
||||
}
|
||||
|
||||
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
||||
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
||||
}
|
||||
|
||||
func (h *Handlers) DeleteStoredProcedure(c *gin.Context) {
|
||||
@ -48,18 +49,18 @@ func (h *Handlers) DeleteStoredProcedure(c *gin.Context) {
|
||||
collectionId := c.Param("collId")
|
||||
spId := c.Param("spId")
|
||||
|
||||
status := h.repository.DeleteStoredProcedure(databaseId, collectionId, spId)
|
||||
if status == repositorymodels.StatusOk {
|
||||
status := h.dataStore.DeleteStoredProcedure(databaseId, collectionId, spId)
|
||||
if status == datastore.StatusOk {
|
||||
c.Status(http.StatusNoContent)
|
||||
return
|
||||
}
|
||||
|
||||
if status == repositorymodels.StatusNotFound {
|
||||
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
|
||||
if status == datastore.StatusNotFound {
|
||||
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse)
|
||||
return
|
||||
}
|
||||
|
||||
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
||||
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
||||
}
|
||||
|
||||
func (h *Handlers) ReplaceStoredProcedure(c *gin.Context) {
|
||||
@ -67,52 +68,52 @@ func (h *Handlers) ReplaceStoredProcedure(c *gin.Context) {
|
||||
collectionId := c.Param("collId")
|
||||
spId := c.Param("spId")
|
||||
|
||||
var sp repositorymodels.StoredProcedure
|
||||
var sp datastore.StoredProcedure
|
||||
if err := c.BindJSON(&sp); err != nil {
|
||||
c.IndentedJSON(http.StatusBadRequest, gin.H{"message": "Invalid body"})
|
||||
c.IndentedJSON(http.StatusBadRequest, constants.BadRequestResponse)
|
||||
return
|
||||
}
|
||||
|
||||
status := h.repository.DeleteStoredProcedure(databaseId, collectionId, spId)
|
||||
if status == repositorymodels.StatusNotFound {
|
||||
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
|
||||
status := h.dataStore.DeleteStoredProcedure(databaseId, collectionId, spId)
|
||||
if status == datastore.StatusNotFound {
|
||||
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse)
|
||||
return
|
||||
}
|
||||
|
||||
createdSP, status := h.repository.CreateStoredProcedure(databaseId, collectionId, sp)
|
||||
if status == repositorymodels.Conflict {
|
||||
c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"})
|
||||
createdSP, status := h.dataStore.CreateStoredProcedure(databaseId, collectionId, sp)
|
||||
if status == datastore.Conflict {
|
||||
c.IndentedJSON(http.StatusConflict, constants.ConflictResponse)
|
||||
return
|
||||
}
|
||||
|
||||
if status == repositorymodels.StatusOk {
|
||||
if status == datastore.StatusOk {
|
||||
c.IndentedJSON(http.StatusOK, createdSP)
|
||||
return
|
||||
}
|
||||
|
||||
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
||||
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
||||
}
|
||||
|
||||
func (h *Handlers) CreateStoredProcedure(c *gin.Context) {
|
||||
databaseId := c.Param("databaseId")
|
||||
collectionId := c.Param("collId")
|
||||
|
||||
var sp repositorymodels.StoredProcedure
|
||||
var sp datastore.StoredProcedure
|
||||
if err := c.BindJSON(&sp); err != nil {
|
||||
c.IndentedJSON(http.StatusBadRequest, gin.H{"message": "Invalid body"})
|
||||
c.IndentedJSON(http.StatusBadRequest, constants.BadRequestResponse)
|
||||
return
|
||||
}
|
||||
|
||||
createdSP, status := h.repository.CreateStoredProcedure(databaseId, collectionId, sp)
|
||||
if status == repositorymodels.Conflict {
|
||||
c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"})
|
||||
createdSP, status := h.dataStore.CreateStoredProcedure(databaseId, collectionId, sp)
|
||||
if status == datastore.Conflict {
|
||||
c.IndentedJSON(http.StatusConflict, constants.ConflictResponse)
|
||||
return
|
||||
}
|
||||
|
||||
if status == repositorymodels.StatusOk {
|
||||
if status == datastore.StatusOk {
|
||||
c.IndentedJSON(http.StatusCreated, createdSP)
|
||||
return
|
||||
}
|
||||
|
||||
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
||||
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
||||
}
|
||||
|
@ -5,22 +5,23 @@ import (
|
||||
"net/http"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
||||
"github.com/pikami/cosmium/internal/constants"
|
||||
"github.com/pikami/cosmium/internal/datastore"
|
||||
)
|
||||
|
||||
func (h *Handlers) GetAllTriggers(c *gin.Context) {
|
||||
databaseId := c.Param("databaseId")
|
||||
collectionId := c.Param("collId")
|
||||
|
||||
triggers, status := h.repository.GetAllTriggers(databaseId, collectionId)
|
||||
triggers, status := h.dataStore.GetAllTriggers(databaseId, collectionId)
|
||||
|
||||
if status == repositorymodels.StatusOk {
|
||||
if status == datastore.StatusOk {
|
||||
c.Header("x-ms-item-count", fmt.Sprintf("%d", len(triggers)))
|
||||
c.IndentedJSON(http.StatusOK, gin.H{"_rid": "", "Triggers": triggers, "_count": len(triggers)})
|
||||
return
|
||||
}
|
||||
|
||||
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
||||
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
||||
}
|
||||
|
||||
func (h *Handlers) GetTrigger(c *gin.Context) {
|
||||
@ -28,19 +29,19 @@ func (h *Handlers) GetTrigger(c *gin.Context) {
|
||||
collectionId := c.Param("collId")
|
||||
triggerId := c.Param("triggerId")
|
||||
|
||||
trigger, status := h.repository.GetTrigger(databaseId, collectionId, triggerId)
|
||||
trigger, status := h.dataStore.GetTrigger(databaseId, collectionId, triggerId)
|
||||
|
||||
if status == repositorymodels.StatusOk {
|
||||
if status == datastore.StatusOk {
|
||||
c.IndentedJSON(http.StatusOK, trigger)
|
||||
return
|
||||
}
|
||||
|
||||
if status == repositorymodels.StatusNotFound {
|
||||
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
|
||||
if status == datastore.StatusNotFound {
|
||||
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse)
|
||||
return
|
||||
}
|
||||
|
||||
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
||||
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
||||
}
|
||||
|
||||
func (h *Handlers) DeleteTrigger(c *gin.Context) {
|
||||
@ -48,18 +49,18 @@ func (h *Handlers) DeleteTrigger(c *gin.Context) {
|
||||
collectionId := c.Param("collId")
|
||||
triggerId := c.Param("triggerId")
|
||||
|
||||
status := h.repository.DeleteTrigger(databaseId, collectionId, triggerId)
|
||||
if status == repositorymodels.StatusOk {
|
||||
status := h.dataStore.DeleteTrigger(databaseId, collectionId, triggerId)
|
||||
if status == datastore.StatusOk {
|
||||
c.Status(http.StatusNoContent)
|
||||
return
|
||||
}
|
||||
|
||||
if status == repositorymodels.StatusNotFound {
|
||||
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
|
||||
if status == datastore.StatusNotFound {
|
||||
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse)
|
||||
return
|
||||
}
|
||||
|
||||
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
||||
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
||||
}
|
||||
|
||||
func (h *Handlers) ReplaceTrigger(c *gin.Context) {
|
||||
@ -67,52 +68,52 @@ func (h *Handlers) ReplaceTrigger(c *gin.Context) {
|
||||
collectionId := c.Param("collId")
|
||||
triggerId := c.Param("triggerId")
|
||||
|
||||
var trigger repositorymodels.Trigger
|
||||
var trigger datastore.Trigger
|
||||
if err := c.BindJSON(&trigger); err != nil {
|
||||
c.IndentedJSON(http.StatusBadRequest, gin.H{"message": "Invalid body"})
|
||||
c.IndentedJSON(http.StatusBadRequest, constants.BadRequestResponse)
|
||||
return
|
||||
}
|
||||
|
||||
status := h.repository.DeleteTrigger(databaseId, collectionId, triggerId)
|
||||
if status == repositorymodels.StatusNotFound {
|
||||
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
|
||||
status := h.dataStore.DeleteTrigger(databaseId, collectionId, triggerId)
|
||||
if status == datastore.StatusNotFound {
|
||||
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse)
|
||||
return
|
||||
}
|
||||
|
||||
createdTrigger, status := h.repository.CreateTrigger(databaseId, collectionId, trigger)
|
||||
if status == repositorymodels.Conflict {
|
||||
c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"})
|
||||
createdTrigger, status := h.dataStore.CreateTrigger(databaseId, collectionId, trigger)
|
||||
if status == datastore.Conflict {
|
||||
c.IndentedJSON(http.StatusConflict, constants.ConflictResponse)
|
||||
return
|
||||
}
|
||||
|
||||
if status == repositorymodels.StatusOk {
|
||||
if status == datastore.StatusOk {
|
||||
c.IndentedJSON(http.StatusOK, createdTrigger)
|
||||
return
|
||||
}
|
||||
|
||||
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
||||
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
||||
}
|
||||
|
||||
func (h *Handlers) CreateTrigger(c *gin.Context) {
|
||||
databaseId := c.Param("databaseId")
|
||||
collectionId := c.Param("collId")
|
||||
|
||||
var trigger repositorymodels.Trigger
|
||||
var trigger datastore.Trigger
|
||||
if err := c.BindJSON(&trigger); err != nil {
|
||||
c.IndentedJSON(http.StatusBadRequest, gin.H{"message": "Invalid body"})
|
||||
c.IndentedJSON(http.StatusBadRequest, constants.BadRequestResponse)
|
||||
return
|
||||
}
|
||||
|
||||
createdTrigger, status := h.repository.CreateTrigger(databaseId, collectionId, trigger)
|
||||
if status == repositorymodels.Conflict {
|
||||
c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"})
|
||||
createdTrigger, status := h.dataStore.CreateTrigger(databaseId, collectionId, trigger)
|
||||
if status == datastore.Conflict {
|
||||
c.IndentedJSON(http.StatusConflict, constants.ConflictResponse)
|
||||
return
|
||||
}
|
||||
|
||||
if status == repositorymodels.StatusOk {
|
||||
if status == datastore.StatusOk {
|
||||
c.IndentedJSON(http.StatusCreated, createdTrigger)
|
||||
return
|
||||
}
|
||||
|
||||
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
||||
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
||||
}
|
||||
|
@ -5,22 +5,23 @@ import (
|
||||
"net/http"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
||||
"github.com/pikami/cosmium/internal/constants"
|
||||
"github.com/pikami/cosmium/internal/datastore"
|
||||
)
|
||||
|
||||
func (h *Handlers) GetAllUserDefinedFunctions(c *gin.Context) {
|
||||
databaseId := c.Param("databaseId")
|
||||
collectionId := c.Param("collId")
|
||||
|
||||
udfs, status := h.repository.GetAllUserDefinedFunctions(databaseId, collectionId)
|
||||
udfs, status := h.dataStore.GetAllUserDefinedFunctions(databaseId, collectionId)
|
||||
|
||||
if status == repositorymodels.StatusOk {
|
||||
if status == datastore.StatusOk {
|
||||
c.Header("x-ms-item-count", fmt.Sprintf("%d", len(udfs)))
|
||||
c.IndentedJSON(http.StatusOK, gin.H{"_rid": "", "UserDefinedFunctions": udfs, "_count": len(udfs)})
|
||||
return
|
||||
}
|
||||
|
||||
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
||||
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
||||
}
|
||||
|
||||
func (h *Handlers) GetUserDefinedFunction(c *gin.Context) {
|
||||
@ -28,19 +29,19 @@ func (h *Handlers) GetUserDefinedFunction(c *gin.Context) {
|
||||
collectionId := c.Param("collId")
|
||||
udfId := c.Param("udfId")
|
||||
|
||||
udf, status := h.repository.GetUserDefinedFunction(databaseId, collectionId, udfId)
|
||||
udf, status := h.dataStore.GetUserDefinedFunction(databaseId, collectionId, udfId)
|
||||
|
||||
if status == repositorymodels.StatusOk {
|
||||
if status == datastore.StatusOk {
|
||||
c.IndentedJSON(http.StatusOK, udf)
|
||||
return
|
||||
}
|
||||
|
||||
if status == repositorymodels.StatusNotFound {
|
||||
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
|
||||
if status == datastore.StatusNotFound {
|
||||
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse)
|
||||
return
|
||||
}
|
||||
|
||||
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
||||
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
||||
}
|
||||
|
||||
func (h *Handlers) DeleteUserDefinedFunction(c *gin.Context) {
|
||||
@ -48,18 +49,18 @@ func (h *Handlers) DeleteUserDefinedFunction(c *gin.Context) {
|
||||
collectionId := c.Param("collId")
|
||||
udfId := c.Param("udfId")
|
||||
|
||||
status := h.repository.DeleteUserDefinedFunction(databaseId, collectionId, udfId)
|
||||
if status == repositorymodels.StatusOk {
|
||||
status := h.dataStore.DeleteUserDefinedFunction(databaseId, collectionId, udfId)
|
||||
if status == datastore.StatusOk {
|
||||
c.Status(http.StatusNoContent)
|
||||
return
|
||||
}
|
||||
|
||||
if status == repositorymodels.StatusNotFound {
|
||||
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
|
||||
if status == datastore.StatusNotFound {
|
||||
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse)
|
||||
return
|
||||
}
|
||||
|
||||
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
||||
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
||||
}
|
||||
|
||||
func (h *Handlers) ReplaceUserDefinedFunction(c *gin.Context) {
|
||||
@ -67,52 +68,52 @@ func (h *Handlers) ReplaceUserDefinedFunction(c *gin.Context) {
|
||||
collectionId := c.Param("collId")
|
||||
udfId := c.Param("udfId")
|
||||
|
||||
var udf repositorymodels.UserDefinedFunction
|
||||
var udf datastore.UserDefinedFunction
|
||||
if err := c.BindJSON(&udf); err != nil {
|
||||
c.IndentedJSON(http.StatusBadRequest, gin.H{"message": "Invalid body"})
|
||||
c.IndentedJSON(http.StatusBadRequest, constants.BadRequestResponse)
|
||||
return
|
||||
}
|
||||
|
||||
status := h.repository.DeleteUserDefinedFunction(databaseId, collectionId, udfId)
|
||||
if status == repositorymodels.StatusNotFound {
|
||||
c.IndentedJSON(http.StatusNotFound, gin.H{"message": "NotFound"})
|
||||
status := h.dataStore.DeleteUserDefinedFunction(databaseId, collectionId, udfId)
|
||||
if status == datastore.StatusNotFound {
|
||||
c.IndentedJSON(http.StatusNotFound, constants.NotFoundResponse)
|
||||
return
|
||||
}
|
||||
|
||||
createdUdf, status := h.repository.CreateUserDefinedFunction(databaseId, collectionId, udf)
|
||||
if status == repositorymodels.Conflict {
|
||||
c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"})
|
||||
createdUdf, status := h.dataStore.CreateUserDefinedFunction(databaseId, collectionId, udf)
|
||||
if status == datastore.Conflict {
|
||||
c.IndentedJSON(http.StatusConflict, constants.ConflictResponse)
|
||||
return
|
||||
}
|
||||
|
||||
if status == repositorymodels.StatusOk {
|
||||
if status == datastore.StatusOk {
|
||||
c.IndentedJSON(http.StatusOK, createdUdf)
|
||||
return
|
||||
}
|
||||
|
||||
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
||||
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
||||
}
|
||||
|
||||
func (h *Handlers) CreateUserDefinedFunction(c *gin.Context) {
|
||||
databaseId := c.Param("databaseId")
|
||||
collectionId := c.Param("collId")
|
||||
|
||||
var udf repositorymodels.UserDefinedFunction
|
||||
var udf datastore.UserDefinedFunction
|
||||
if err := c.BindJSON(&udf); err != nil {
|
||||
c.IndentedJSON(http.StatusBadRequest, gin.H{"message": "Invalid body"})
|
||||
c.IndentedJSON(http.StatusBadRequest, constants.BadRequestResponse)
|
||||
return
|
||||
}
|
||||
|
||||
createdUdf, status := h.repository.CreateUserDefinedFunction(databaseId, collectionId, udf)
|
||||
if status == repositorymodels.Conflict {
|
||||
c.IndentedJSON(http.StatusConflict, gin.H{"message": "Conflict"})
|
||||
createdUdf, status := h.dataStore.CreateUserDefinedFunction(databaseId, collectionId, udf)
|
||||
if status == datastore.Conflict {
|
||||
c.IndentedJSON(http.StatusConflict, constants.ConflictResponse)
|
||||
return
|
||||
}
|
||||
|
||||
if status == repositorymodels.StatusOk {
|
||||
if status == datastore.StatusOk {
|
||||
c.IndentedJSON(http.StatusCreated, createdUdf)
|
||||
return
|
||||
}
|
||||
|
||||
c.IndentedJSON(http.StatusInternalServerError, gin.H{"message": "Unknown error"})
|
||||
c.IndentedJSON(http.StatusInternalServerError, constants.UnknownErrorResponse)
|
||||
}
|
||||
|
@ -10,15 +10,15 @@ import (
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/pikami/cosmium/api/handlers"
|
||||
"github.com/pikami/cosmium/api/handlers/middleware"
|
||||
"github.com/pikami/cosmium/internal/datastore"
|
||||
"github.com/pikami/cosmium/internal/logger"
|
||||
"github.com/pikami/cosmium/internal/repositories"
|
||||
tlsprovider "github.com/pikami/cosmium/internal/tls_provider"
|
||||
)
|
||||
|
||||
var ginMux sync.Mutex
|
||||
|
||||
func (s *ApiServer) CreateRouter(repository *repositories.DataRepository) {
|
||||
routeHandlers := handlers.NewHandlers(repository, s.config)
|
||||
func (s *ApiServer) CreateRouter(dataStore datastore.DataStore) {
|
||||
routeHandlers := handlers.NewHandlers(dataStore, s.config)
|
||||
|
||||
ginMux.Lock()
|
||||
gin.DefaultWriter = logger.InfoWriter()
|
||||
|
@ -17,9 +17,9 @@ func Test_Authentication(t *testing.T) {
|
||||
defer ts.Server.Close()
|
||||
|
||||
t.Run("Should get 200 when correct account key is used", func(t *testing.T) {
|
||||
ts.Repository.DeleteDatabase(testDatabaseName)
|
||||
ts.DataStore.DeleteDatabase(testDatabaseName)
|
||||
client, err := azcosmos.NewClientFromConnectionString(
|
||||
fmt.Sprintf("AccountEndpoint=%s;AccountKey=%s", ts.URL, config.DefaultAccountKey),
|
||||
formatConnectionString(ts.URL, config.DefaultAccountKey),
|
||||
&azcosmos.ClientOptions{},
|
||||
)
|
||||
assert.Nil(t, err)
|
||||
@ -33,9 +33,9 @@ func Test_Authentication(t *testing.T) {
|
||||
})
|
||||
|
||||
t.Run("Should get 401 when wrong account key is used", func(t *testing.T) {
|
||||
ts.Repository.DeleteDatabase(testDatabaseName)
|
||||
ts.DataStore.DeleteDatabase(testDatabaseName)
|
||||
client, err := azcosmos.NewClientFromConnectionString(
|
||||
fmt.Sprintf("AccountEndpoint=%s;AccountKey=%s", ts.URL, "AAAA"),
|
||||
formatConnectionString(ts.URL, "AAAA"),
|
||||
&azcosmos.ClientOptions{},
|
||||
)
|
||||
assert.Nil(t, err)
|
||||
@ -70,9 +70,9 @@ func Test_Authentication_Disabled(t *testing.T) {
|
||||
defer ts.Server.Close()
|
||||
|
||||
t.Run("Should get 200 when wrong account key is used, but authentication is dissabled", func(t *testing.T) {
|
||||
ts.Repository.DeleteDatabase(testDatabaseName)
|
||||
ts.DataStore.DeleteDatabase(testDatabaseName)
|
||||
client, err := azcosmos.NewClientFromConnectionString(
|
||||
fmt.Sprintf("AccountEndpoint=%s;AccountKey=%s", ts.URL, "AAAA"),
|
||||
formatConnectionString(ts.URL, "AAAA"),
|
||||
&azcosmos.ClientOptions{},
|
||||
)
|
||||
assert.Nil(t, err)
|
||||
@ -85,3 +85,7 @@ func Test_Authentication_Disabled(t *testing.T) {
|
||||
assert.Equal(t, createResponse.DatabaseProperties.ID, testDatabaseName)
|
||||
})
|
||||
}
|
||||
|
||||
func formatConnectionString(endpoint, key string) string {
|
||||
return fmt.Sprintf("AccountEndpoint=%s;AccountKey=%s", endpoint, key)
|
||||
}
|
||||
|
@ -3,32 +3,29 @@ package tests_test
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"testing"
|
||||
|
||||
"github.com/Azure/azure-sdk-for-go/sdk/azcore"
|
||||
"github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos"
|
||||
"github.com/pikami/cosmium/api/config"
|
||||
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
||||
"github.com/pikami/cosmium/internal/datastore"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func Test_Collections(t *testing.T) {
|
||||
ts := runTestServer()
|
||||
defer ts.Server.Close()
|
||||
presets := []testPreset{PresetJsonStore, PresetBadgerStore}
|
||||
|
||||
client, err := azcosmos.NewClientFromConnectionString(
|
||||
fmt.Sprintf("AccountEndpoint=%s;AccountKey=%s", ts.URL, config.DefaultAccountKey),
|
||||
&azcosmos.ClientOptions{},
|
||||
)
|
||||
assert.Nil(t, err)
|
||||
setUp := func(ts *TestServer, client *azcosmos.Client) *azcosmos.DatabaseClient {
|
||||
ts.DataStore.CreateDatabase(datastore.Database{ID: testDatabaseName})
|
||||
databaseClient, err := client.NewDatabase(testDatabaseName)
|
||||
assert.Nil(t, err)
|
||||
|
||||
ts.Repository.CreateDatabase(repositorymodels.Database{ID: testDatabaseName})
|
||||
databaseClient, err := client.NewDatabase(testDatabaseName)
|
||||
assert.Nil(t, err)
|
||||
return databaseClient
|
||||
}
|
||||
|
||||
runTestsWithPresets(t, "Collection Create", presets, func(t *testing.T, ts *TestServer, client *azcosmos.Client) {
|
||||
databaseClient := setUp(ts, client)
|
||||
|
||||
t.Run("Collection Create", func(t *testing.T) {
|
||||
t.Run("Should create collection", func(t *testing.T) {
|
||||
createResponse, err := databaseClient.CreateContainer(context.TODO(), azcosmos.ContainerProperties{
|
||||
ID: testCollectionName,
|
||||
@ -39,7 +36,7 @@ func Test_Collections(t *testing.T) {
|
||||
})
|
||||
|
||||
t.Run("Should return conflict when collection exists", func(t *testing.T) {
|
||||
ts.Repository.CreateCollection(testDatabaseName, repositorymodels.Collection{
|
||||
ts.DataStore.CreateCollection(testDatabaseName, datastore.Collection{
|
||||
ID: testCollectionName,
|
||||
})
|
||||
|
||||
@ -57,9 +54,11 @@ func Test_Collections(t *testing.T) {
|
||||
})
|
||||
})
|
||||
|
||||
t.Run("Collection Read", func(t *testing.T) {
|
||||
runTestsWithPresets(t, "Collection Read", presets, func(t *testing.T, ts *TestServer, client *azcosmos.Client) {
|
||||
databaseClient := setUp(ts, client)
|
||||
|
||||
t.Run("Should read collection", func(t *testing.T) {
|
||||
ts.Repository.CreateCollection(testDatabaseName, repositorymodels.Collection{
|
||||
ts.DataStore.CreateCollection(testDatabaseName, datastore.Collection{
|
||||
ID: testCollectionName,
|
||||
})
|
||||
|
||||
@ -73,7 +72,7 @@ func Test_Collections(t *testing.T) {
|
||||
})
|
||||
|
||||
t.Run("Should return not found when collection does not exist", func(t *testing.T) {
|
||||
ts.Repository.DeleteCollection(testDatabaseName, testCollectionName)
|
||||
ts.DataStore.DeleteCollection(testDatabaseName, testCollectionName)
|
||||
|
||||
collectionResponse, err := databaseClient.NewContainer(testCollectionName)
|
||||
assert.Nil(t, err)
|
||||
@ -90,9 +89,11 @@ func Test_Collections(t *testing.T) {
|
||||
})
|
||||
})
|
||||
|
||||
t.Run("Collection Delete", func(t *testing.T) {
|
||||
runTestsWithPresets(t, "Collection Delete", presets, func(t *testing.T, ts *TestServer, client *azcosmos.Client) {
|
||||
databaseClient := setUp(ts, client)
|
||||
|
||||
t.Run("Should delete collection", func(t *testing.T) {
|
||||
ts.Repository.CreateCollection(testDatabaseName, repositorymodels.Collection{
|
||||
ts.DataStore.CreateCollection(testDatabaseName, datastore.Collection{
|
||||
ID: testCollectionName,
|
||||
})
|
||||
|
||||
@ -105,7 +106,7 @@ func Test_Collections(t *testing.T) {
|
||||
})
|
||||
|
||||
t.Run("Should return not found when collection does not exist", func(t *testing.T) {
|
||||
ts.Repository.DeleteCollection(testDatabaseName, testCollectionName)
|
||||
ts.DataStore.DeleteCollection(testDatabaseName, testCollectionName)
|
||||
|
||||
collectionResponse, err := databaseClient.NewContainer(testCollectionName)
|
||||
assert.Nil(t, err)
|
||||
|
@ -1,42 +1,59 @@
|
||||
package tests_test
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http/httptest"
|
||||
"testing"
|
||||
|
||||
"github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos"
|
||||
"github.com/pikami/cosmium/api"
|
||||
"github.com/pikami/cosmium/api/config"
|
||||
"github.com/pikami/cosmium/internal/datastore"
|
||||
badgerdatastore "github.com/pikami/cosmium/internal/datastore/badger_datastore"
|
||||
jsondatastore "github.com/pikami/cosmium/internal/datastore/json_datastore"
|
||||
"github.com/pikami/cosmium/internal/logger"
|
||||
"github.com/pikami/cosmium/internal/repositories"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
type TestServer struct {
|
||||
Server *httptest.Server
|
||||
Repository *repositories.DataRepository
|
||||
URL string
|
||||
Server *httptest.Server
|
||||
DataStore datastore.DataStore
|
||||
URL string
|
||||
}
|
||||
|
||||
func runTestServerCustomConfig(config *config.ServerConfig) *TestServer {
|
||||
repository := repositories.NewDataRepository(repositories.RepositoryOptions{})
|
||||
func getDefaultTestServerConfig() *config.ServerConfig {
|
||||
return &config.ServerConfig{
|
||||
AccountKey: config.DefaultAccountKey,
|
||||
ExplorerPath: "/tmp/nothing",
|
||||
ExplorerBaseUrlLocation: config.ExplorerBaseUrlLocation,
|
||||
DataStore: "json",
|
||||
}
|
||||
}
|
||||
|
||||
api := api.NewApiServer(repository, config)
|
||||
func runTestServerCustomConfig(configuration *config.ServerConfig) *TestServer {
|
||||
var dataStore datastore.DataStore
|
||||
switch configuration.DataStore {
|
||||
case config.DataStoreBadger:
|
||||
dataStore = badgerdatastore.NewBadgerDataStore(badgerdatastore.BadgerDataStoreOptions{})
|
||||
default:
|
||||
dataStore = jsondatastore.NewJsonDataStore(jsondatastore.JsonDataStoreOptions{})
|
||||
}
|
||||
|
||||
api := api.NewApiServer(dataStore, configuration)
|
||||
|
||||
server := httptest.NewServer(api.GetRouter())
|
||||
|
||||
config.DatabaseEndpoint = server.URL
|
||||
configuration.DatabaseEndpoint = server.URL
|
||||
|
||||
return &TestServer{
|
||||
Server: server,
|
||||
Repository: repository,
|
||||
URL: server.URL,
|
||||
Server: server,
|
||||
DataStore: dataStore,
|
||||
URL: server.URL,
|
||||
}
|
||||
}
|
||||
|
||||
func runTestServer() *TestServer {
|
||||
config := &config.ServerConfig{
|
||||
AccountKey: config.DefaultAccountKey,
|
||||
ExplorerPath: "/tmp/nothing",
|
||||
ExplorerBaseUrlLocation: config.ExplorerBaseUrlLocation,
|
||||
}
|
||||
config := getDefaultTestServerConfig()
|
||||
|
||||
config.LogLevel = "debug"
|
||||
logger.SetLogLevel(logger.LogLevelDebug)
|
||||
@ -49,3 +66,47 @@ const (
|
||||
testDatabaseName = "test-db"
|
||||
testCollectionName = "test-coll"
|
||||
)
|
||||
|
||||
type testFunc func(t *testing.T, ts *TestServer, cosmosClient *azcosmos.Client)
|
||||
type testPreset string
|
||||
|
||||
const (
|
||||
PresetJsonStore testPreset = "JsonDS"
|
||||
PresetBadgerStore testPreset = "BadgerDS"
|
||||
)
|
||||
|
||||
func runTestsWithPreset(t *testing.T, name string, testPreset testPreset, f testFunc) {
|
||||
serverConfig := getDefaultTestServerConfig()
|
||||
|
||||
serverConfig.LogLevel = "debug"
|
||||
logger.SetLogLevel(logger.LogLevelDebug)
|
||||
|
||||
switch testPreset {
|
||||
case PresetBadgerStore:
|
||||
serverConfig.DataStore = config.DataStoreBadger
|
||||
case PresetJsonStore:
|
||||
serverConfig.DataStore = config.DataStoreJson
|
||||
}
|
||||
|
||||
ts := runTestServerCustomConfig(serverConfig)
|
||||
defer ts.Server.Close()
|
||||
defer ts.DataStore.Close()
|
||||
|
||||
client, err := azcosmos.NewClientFromConnectionString(
|
||||
fmt.Sprintf("AccountEndpoint=%s;AccountKey=%s", ts.URL, config.DefaultAccountKey),
|
||||
&azcosmos.ClientOptions{},
|
||||
)
|
||||
assert.Nil(t, err)
|
||||
|
||||
testName := fmt.Sprintf("%s_%s", testPreset, name)
|
||||
|
||||
t.Run(testName, func(t *testing.T) {
|
||||
f(t, ts, client)
|
||||
})
|
||||
}
|
||||
|
||||
func runTestsWithPresets(t *testing.T, name string, testPresets []testPreset, f testFunc) {
|
||||
for _, testPreset := range testPresets {
|
||||
runTestsWithPreset(t, name, testPreset, f)
|
||||
}
|
||||
}
|
||||
|
@ -3,30 +3,21 @@ package tests_test
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"testing"
|
||||
|
||||
"github.com/Azure/azure-sdk-for-go/sdk/azcore"
|
||||
"github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos"
|
||||
"github.com/pikami/cosmium/api/config"
|
||||
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
||||
"github.com/pikami/cosmium/internal/datastore"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func Test_Databases(t *testing.T) {
|
||||
ts := runTestServer()
|
||||
defer ts.Server.Close()
|
||||
presets := []testPreset{PresetJsonStore, PresetBadgerStore}
|
||||
|
||||
client, err := azcosmos.NewClientFromConnectionString(
|
||||
fmt.Sprintf("AccountEndpoint=%s;AccountKey=%s", ts.URL, config.DefaultAccountKey),
|
||||
&azcosmos.ClientOptions{},
|
||||
)
|
||||
assert.Nil(t, err)
|
||||
|
||||
t.Run("Database Create", func(t *testing.T) {
|
||||
runTestsWithPresets(t, "Database Create", presets, func(t *testing.T, ts *TestServer, client *azcosmos.Client) {
|
||||
t.Run("Should create database", func(t *testing.T) {
|
||||
ts.Repository.DeleteDatabase(testDatabaseName)
|
||||
ts.DataStore.DeleteDatabase(testDatabaseName)
|
||||
|
||||
createResponse, err := client.CreateDatabase(context.TODO(), azcosmos.DatabaseProperties{
|
||||
ID: testDatabaseName,
|
||||
@ -37,7 +28,7 @@ func Test_Databases(t *testing.T) {
|
||||
})
|
||||
|
||||
t.Run("Should return conflict when database exists", func(t *testing.T) {
|
||||
ts.Repository.CreateDatabase(repositorymodels.Database{
|
||||
ts.DataStore.CreateDatabase(datastore.Database{
|
||||
ID: testDatabaseName,
|
||||
})
|
||||
|
||||
@ -55,9 +46,9 @@ func Test_Databases(t *testing.T) {
|
||||
})
|
||||
})
|
||||
|
||||
t.Run("Database Read", func(t *testing.T) {
|
||||
runTestsWithPresets(t, "Database Read", presets, func(t *testing.T, ts *TestServer, client *azcosmos.Client) {
|
||||
t.Run("Should read database", func(t *testing.T) {
|
||||
ts.Repository.CreateDatabase(repositorymodels.Database{
|
||||
ts.DataStore.CreateDatabase(datastore.Database{
|
||||
ID: testDatabaseName,
|
||||
})
|
||||
|
||||
@ -71,7 +62,7 @@ func Test_Databases(t *testing.T) {
|
||||
})
|
||||
|
||||
t.Run("Should return not found when database does not exist", func(t *testing.T) {
|
||||
ts.Repository.DeleteDatabase(testDatabaseName)
|
||||
ts.DataStore.DeleteDatabase(testDatabaseName)
|
||||
|
||||
databaseResponse, err := client.NewDatabase(testDatabaseName)
|
||||
assert.Nil(t, err)
|
||||
@ -88,9 +79,9 @@ func Test_Databases(t *testing.T) {
|
||||
})
|
||||
})
|
||||
|
||||
t.Run("Database Delete", func(t *testing.T) {
|
||||
runTestsWithPresets(t, "Database Delete", presets, func(t *testing.T, ts *TestServer, client *azcosmos.Client) {
|
||||
t.Run("Should delete database", func(t *testing.T) {
|
||||
ts.Repository.CreateDatabase(repositorymodels.Database{
|
||||
ts.DataStore.CreateDatabase(datastore.Database{
|
||||
ID: testDatabaseName,
|
||||
})
|
||||
|
||||
@ -103,7 +94,7 @@ func Test_Databases(t *testing.T) {
|
||||
})
|
||||
|
||||
t.Run("Should return not found when database does not exist", func(t *testing.T) {
|
||||
ts.Repository.DeleteDatabase(testDatabaseName)
|
||||
ts.DataStore.DeleteDatabase(testDatabaseName)
|
||||
|
||||
databaseResponse, err := client.NewDatabase(testDatabaseName)
|
||||
assert.Nil(t, err)
|
||||
|
@ -14,7 +14,7 @@ import (
|
||||
"github.com/Azure/azure-sdk-for-go/sdk/azcore"
|
||||
"github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos"
|
||||
"github.com/pikami/cosmium/api/config"
|
||||
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
||||
"github.com/pikami/cosmium/internal/datastore"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
@ -53,11 +53,9 @@ func testCosmosQuery(t *testing.T,
|
||||
}
|
||||
}
|
||||
|
||||
func documents_InitializeDb(t *testing.T) (*TestServer, *azcosmos.ContainerClient) {
|
||||
ts := runTestServer()
|
||||
|
||||
ts.Repository.CreateDatabase(repositorymodels.Database{ID: testDatabaseName})
|
||||
ts.Repository.CreateCollection(testDatabaseName, repositorymodels.Collection{
|
||||
func documents_InitializeDb(t *testing.T, ts *TestServer) *azcosmos.ContainerClient {
|
||||
ts.DataStore.CreateDatabase(datastore.Database{ID: testDatabaseName})
|
||||
ts.DataStore.CreateCollection(testDatabaseName, datastore.Collection{
|
||||
ID: testCollectionName,
|
||||
PartitionKey: struct {
|
||||
Paths []string "json:\"paths\""
|
||||
@ -67,8 +65,8 @@ func documents_InitializeDb(t *testing.T) (*TestServer, *azcosmos.ContainerClien
|
||||
Paths: []string{"/pk"},
|
||||
},
|
||||
})
|
||||
ts.Repository.CreateDocument(testDatabaseName, testCollectionName, map[string]interface{}{"id": "12345", "pk": "123", "isCool": false, "arr": []int{1, 2, 3}})
|
||||
ts.Repository.CreateDocument(testDatabaseName, testCollectionName, map[string]interface{}{"id": "67890", "pk": "456", "isCool": true, "arr": []int{6, 7, 8}})
|
||||
ts.DataStore.CreateDocument(testDatabaseName, testCollectionName, map[string]interface{}{"id": "12345", "pk": "123", "isCool": false, "arr": []int{1, 2, 3}})
|
||||
ts.DataStore.CreateDocument(testDatabaseName, testCollectionName, map[string]interface{}{"id": "67890", "pk": "456", "isCool": true, "arr": []int{6, 7, 8}})
|
||||
|
||||
client, err := azcosmos.NewClientFromConnectionString(
|
||||
fmt.Sprintf("AccountEndpoint=%s;AccountKey=%s", ts.URL, config.DefaultAccountKey),
|
||||
@ -79,438 +77,439 @@ func documents_InitializeDb(t *testing.T) (*TestServer, *azcosmos.ContainerClien
|
||||
collectionClient, err := client.NewContainer(testDatabaseName, testCollectionName)
|
||||
assert.Nil(t, err)
|
||||
|
||||
return ts, collectionClient
|
||||
return collectionClient
|
||||
}
|
||||
|
||||
func Test_Documents(t *testing.T) {
|
||||
ts, collectionClient := documents_InitializeDb(t)
|
||||
defer ts.Server.Close()
|
||||
presets := []testPreset{PresetJsonStore, PresetBadgerStore}
|
||||
|
||||
t.Run("Should query document", func(t *testing.T) {
|
||||
testCosmosQuery(t, collectionClient,
|
||||
"SELECT c.id, c[\"pk\"] FROM c ORDER BY c.id",
|
||||
nil,
|
||||
[]interface{}{
|
||||
map[string]interface{}{"id": "12345", "pk": "123"},
|
||||
map[string]interface{}{"id": "67890", "pk": "456"},
|
||||
},
|
||||
)
|
||||
runTestsWithPresets(t, "Test_Documents", presets, func(t *testing.T, ts *TestServer, client *azcosmos.Client) {
|
||||
collectionClient := documents_InitializeDb(t, ts)
|
||||
|
||||
t.Run("Should query document", func(t *testing.T) {
|
||||
testCosmosQuery(t, collectionClient,
|
||||
"SELECT c.id, c[\"pk\"] FROM c ORDER BY c.id",
|
||||
nil,
|
||||
[]interface{}{
|
||||
map[string]interface{}{"id": "12345", "pk": "123"},
|
||||
map[string]interface{}{"id": "67890", "pk": "456"},
|
||||
},
|
||||
)
|
||||
})
|
||||
|
||||
t.Run("Should query VALUE array", func(t *testing.T) {
|
||||
testCosmosQuery(t, collectionClient,
|
||||
"SELECT VALUE [c.id, c[\"pk\"]] FROM c ORDER BY c.id",
|
||||
nil,
|
||||
[]interface{}{
|
||||
[]interface{}{"12345", "123"},
|
||||
[]interface{}{"67890", "456"},
|
||||
},
|
||||
)
|
||||
})
|
||||
|
||||
t.Run("Should query VALUE object", func(t *testing.T) {
|
||||
testCosmosQuery(t, collectionClient,
|
||||
"SELECT VALUE { id: c.id, _pk: c.pk } FROM c ORDER BY c.id",
|
||||
nil,
|
||||
[]interface{}{
|
||||
map[string]interface{}{"id": "12345", "_pk": "123"},
|
||||
map[string]interface{}{"id": "67890", "_pk": "456"},
|
||||
},
|
||||
)
|
||||
})
|
||||
|
||||
t.Run("Should query document with single WHERE condition", func(t *testing.T) {
|
||||
testCosmosQuery(t, collectionClient,
|
||||
`select c.id
|
||||
FROM c
|
||||
WHERE c.isCool=true
|
||||
ORDER BY c.id`,
|
||||
nil,
|
||||
[]interface{}{
|
||||
map[string]interface{}{"id": "67890"},
|
||||
},
|
||||
)
|
||||
})
|
||||
|
||||
t.Run("Should query document with query parameters", func(t *testing.T) {
|
||||
testCosmosQuery(t, collectionClient,
|
||||
`select c.id
|
||||
FROM c
|
||||
WHERE c.id=@param_id
|
||||
ORDER BY c.id`,
|
||||
[]azcosmos.QueryParameter{
|
||||
{Name: "@param_id", Value: "67890"},
|
||||
},
|
||||
[]interface{}{
|
||||
map[string]interface{}{"id": "67890"},
|
||||
},
|
||||
)
|
||||
})
|
||||
|
||||
t.Run("Should query document with query parameters as accessor", func(t *testing.T) {
|
||||
testCosmosQuery(t, collectionClient,
|
||||
`select c.id
|
||||
FROM c
|
||||
WHERE c[@param]="67890"
|
||||
ORDER BY c.id`,
|
||||
[]azcosmos.QueryParameter{
|
||||
{Name: "@param", Value: "id"},
|
||||
},
|
||||
[]interface{}{
|
||||
map[string]interface{}{"id": "67890"},
|
||||
},
|
||||
)
|
||||
})
|
||||
|
||||
t.Run("Should query array accessor", func(t *testing.T) {
|
||||
testCosmosQuery(t, collectionClient,
|
||||
`SELECT c.id,
|
||||
c["arr"][0] AS arr0,
|
||||
c["arr"][1] AS arr1,
|
||||
c["arr"][2] AS arr2,
|
||||
c["arr"][3] AS arr3
|
||||
FROM c ORDER BY c.id`,
|
||||
nil,
|
||||
[]interface{}{
|
||||
map[string]interface{}{"id": "12345", "arr0": 1.0, "arr1": 2.0, "arr2": 3.0, "arr3": nil},
|
||||
map[string]interface{}{"id": "67890", "arr0": 6.0, "arr1": 7.0, "arr2": 8.0, "arr3": nil},
|
||||
},
|
||||
)
|
||||
})
|
||||
|
||||
t.Run("Should handle parallel writes", func(t *testing.T) {
|
||||
var wg sync.WaitGroup
|
||||
rutineCount := 100
|
||||
results := make(chan error, rutineCount)
|
||||
|
||||
createCall := func(i int) {
|
||||
defer wg.Done()
|
||||
item := map[string]interface{}{
|
||||
"id": fmt.Sprintf("id-%d", i),
|
||||
"pk": fmt.Sprintf("pk-%d", i),
|
||||
"val": i,
|
||||
}
|
||||
bytes, err := json.Marshal(item)
|
||||
if err != nil {
|
||||
results <- err
|
||||
return
|
||||
}
|
||||
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 2*time.Second)
|
||||
defer cancel()
|
||||
|
||||
_, err = collectionClient.CreateItem(
|
||||
ctx,
|
||||
azcosmos.PartitionKey{},
|
||||
bytes,
|
||||
&azcosmos.ItemOptions{
|
||||
EnableContentResponseOnWrite: false,
|
||||
},
|
||||
)
|
||||
results <- err
|
||||
|
||||
collectionClient.ReadItem(ctx, azcosmos.PartitionKey{}, fmt.Sprintf("id-%d", i), nil)
|
||||
collectionClient.DeleteItem(ctx, azcosmos.PartitionKey{}, fmt.Sprintf("id-%d", i), nil)
|
||||
}
|
||||
|
||||
for i := 0; i < rutineCount; i++ {
|
||||
wg.Add(1)
|
||||
go createCall(i)
|
||||
}
|
||||
|
||||
wg.Wait()
|
||||
close(results)
|
||||
|
||||
for err := range results {
|
||||
if err != nil {
|
||||
t.Errorf("Error creating item: %v", err)
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
t.Run("Should query VALUE array", func(t *testing.T) {
|
||||
testCosmosQuery(t, collectionClient,
|
||||
"SELECT VALUE [c.id, c[\"pk\"]] FROM c ORDER BY c.id",
|
||||
nil,
|
||||
[]interface{}{
|
||||
[]interface{}{"12345", "123"},
|
||||
[]interface{}{"67890", "456"},
|
||||
},
|
||||
)
|
||||
})
|
||||
runTestsWithPresets(t, "Test_Documents_Patch", presets, func(t *testing.T, ts *TestServer, client *azcosmos.Client) {
|
||||
collectionClient := documents_InitializeDb(t, ts)
|
||||
|
||||
t.Run("Should query VALUE object", func(t *testing.T) {
|
||||
testCosmosQuery(t, collectionClient,
|
||||
"SELECT VALUE { id: c.id, _pk: c.pk } FROM c ORDER BY c.id",
|
||||
nil,
|
||||
[]interface{}{
|
||||
map[string]interface{}{"id": "12345", "_pk": "123"},
|
||||
map[string]interface{}{"id": "67890", "_pk": "456"},
|
||||
},
|
||||
)
|
||||
})
|
||||
t.Run("Should PATCH document", func(t *testing.T) {
|
||||
context := context.TODO()
|
||||
expectedData := map[string]interface{}{"id": "67890", "pk": "666", "newField": "newValue", "incr": 15., "setted": "isSet"}
|
||||
|
||||
t.Run("Should query document with single WHERE condition", func(t *testing.T) {
|
||||
testCosmosQuery(t, collectionClient,
|
||||
`select c.id
|
||||
FROM c
|
||||
WHERE c.isCool=true
|
||||
ORDER BY c.id`,
|
||||
nil,
|
||||
[]interface{}{
|
||||
map[string]interface{}{"id": "67890"},
|
||||
},
|
||||
)
|
||||
})
|
||||
patch := azcosmos.PatchOperations{}
|
||||
patch.AppendAdd("/newField", "newValue")
|
||||
patch.AppendIncrement("/incr", 15)
|
||||
patch.AppendRemove("/isCool")
|
||||
patch.AppendReplace("/pk", "666")
|
||||
patch.AppendSet("/setted", "isSet")
|
||||
|
||||
t.Run("Should query document with query parameters", func(t *testing.T) {
|
||||
testCosmosQuery(t, collectionClient,
|
||||
`select c.id
|
||||
FROM c
|
||||
WHERE c.id=@param_id
|
||||
ORDER BY c.id`,
|
||||
[]azcosmos.QueryParameter{
|
||||
{Name: "@param_id", Value: "67890"},
|
||||
},
|
||||
[]interface{}{
|
||||
map[string]interface{}{"id": "67890"},
|
||||
},
|
||||
)
|
||||
})
|
||||
itemResponse, err := collectionClient.PatchItem(
|
||||
context,
|
||||
azcosmos.PartitionKey{},
|
||||
"67890",
|
||||
patch,
|
||||
&azcosmos.ItemOptions{
|
||||
EnableContentResponseOnWrite: false,
|
||||
},
|
||||
)
|
||||
assert.Nil(t, err)
|
||||
|
||||
t.Run("Should query document with query parameters as accessor", func(t *testing.T) {
|
||||
testCosmosQuery(t, collectionClient,
|
||||
`select c.id
|
||||
FROM c
|
||||
WHERE c[@param]="67890"
|
||||
ORDER BY c.id`,
|
||||
[]azcosmos.QueryParameter{
|
||||
{Name: "@param", Value: "id"},
|
||||
},
|
||||
[]interface{}{
|
||||
map[string]interface{}{"id": "67890"},
|
||||
},
|
||||
)
|
||||
})
|
||||
var itemResponseBody map[string]interface{}
|
||||
json.Unmarshal(itemResponse.Value, &itemResponseBody)
|
||||
|
||||
t.Run("Should query array accessor", func(t *testing.T) {
|
||||
testCosmosQuery(t, collectionClient,
|
||||
`SELECT c.id,
|
||||
c["arr"][0] AS arr0,
|
||||
c["arr"][1] AS arr1,
|
||||
c["arr"][2] AS arr2,
|
||||
c["arr"][3] AS arr3
|
||||
FROM c ORDER BY c.id`,
|
||||
nil,
|
||||
[]interface{}{
|
||||
map[string]interface{}{"id": "12345", "arr0": 1.0, "arr1": 2.0, "arr2": 3.0, "arr3": nil},
|
||||
map[string]interface{}{"id": "67890", "arr0": 6.0, "arr1": 7.0, "arr2": 8.0, "arr3": nil},
|
||||
},
|
||||
)
|
||||
})
|
||||
assert.Equal(t, expectedData["id"], itemResponseBody["id"])
|
||||
assert.Equal(t, expectedData["pk"], itemResponseBody["pk"])
|
||||
assert.Empty(t, itemResponseBody["isCool"])
|
||||
assert.Equal(t, expectedData["newField"], itemResponseBody["newField"])
|
||||
assert.Equal(t, expectedData["incr"], itemResponseBody["incr"])
|
||||
assert.Equal(t, expectedData["setted"], itemResponseBody["setted"])
|
||||
})
|
||||
|
||||
t.Run("Should handle parallel writes", func(t *testing.T) {
|
||||
var wg sync.WaitGroup
|
||||
rutineCount := 100
|
||||
results := make(chan error, rutineCount)
|
||||
t.Run("Should not allow to PATCH document ID", func(t *testing.T) {
|
||||
context := context.TODO()
|
||||
|
||||
patch := azcosmos.PatchOperations{}
|
||||
patch.AppendReplace("/id", "newValue")
|
||||
|
||||
_, err := collectionClient.PatchItem(
|
||||
context,
|
||||
azcosmos.PartitionKey{},
|
||||
"67890",
|
||||
patch,
|
||||
&azcosmos.ItemOptions{
|
||||
EnableContentResponseOnWrite: false,
|
||||
},
|
||||
)
|
||||
assert.NotNil(t, err)
|
||||
|
||||
var respErr *azcore.ResponseError
|
||||
if errors.As(err, &respErr) {
|
||||
assert.Equal(t, http.StatusUnprocessableEntity, respErr.StatusCode)
|
||||
} else {
|
||||
panic(err)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("CreateItem", func(t *testing.T) {
|
||||
context := context.TODO()
|
||||
|
||||
createCall := func(i int) {
|
||||
defer wg.Done()
|
||||
item := map[string]interface{}{
|
||||
"id": fmt.Sprintf("id-%d", i),
|
||||
"pk": fmt.Sprintf("pk-%d", i),
|
||||
"val": i,
|
||||
"Id": "6789011",
|
||||
"pk": "456",
|
||||
"newField": "newValue2",
|
||||
}
|
||||
bytes, err := json.Marshal(item)
|
||||
if err != nil {
|
||||
results <- err
|
||||
return
|
||||
}
|
||||
assert.Nil(t, err)
|
||||
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 2*time.Second)
|
||||
defer cancel()
|
||||
|
||||
_, err = collectionClient.CreateItem(
|
||||
ctx,
|
||||
r, err2 := collectionClient.CreateItem(
|
||||
context,
|
||||
azcosmos.PartitionKey{},
|
||||
bytes,
|
||||
&azcosmos.ItemOptions{
|
||||
EnableContentResponseOnWrite: false,
|
||||
},
|
||||
)
|
||||
results <- err
|
||||
assert.NotNil(t, r)
|
||||
assert.Nil(t, err2)
|
||||
})
|
||||
|
||||
collectionClient.ReadItem(ctx, azcosmos.PartitionKey{}, fmt.Sprintf("id-%d", i), nil)
|
||||
collectionClient.DeleteItem(ctx, azcosmos.PartitionKey{}, fmt.Sprintf("id-%d", i), nil)
|
||||
}
|
||||
t.Run("CreateItem that already exists", func(t *testing.T) {
|
||||
context := context.TODO()
|
||||
|
||||
for i := 0; i < rutineCount; i++ {
|
||||
wg.Add(1)
|
||||
go createCall(i)
|
||||
}
|
||||
item := map[string]interface{}{"id": "12345", "pk": "123", "isCool": false, "arr": []int{1, 2, 3}}
|
||||
bytes, err := json.Marshal(item)
|
||||
assert.Nil(t, err)
|
||||
|
||||
wg.Wait()
|
||||
close(results)
|
||||
r, err := collectionClient.CreateItem(
|
||||
context,
|
||||
azcosmos.PartitionKey{},
|
||||
bytes,
|
||||
&azcosmos.ItemOptions{
|
||||
EnableContentResponseOnWrite: false,
|
||||
},
|
||||
)
|
||||
assert.NotNil(t, r)
|
||||
assert.NotNil(t, err)
|
||||
|
||||
for err := range results {
|
||||
if err != nil {
|
||||
t.Errorf("Error creating item: %v", err)
|
||||
var respErr *azcore.ResponseError
|
||||
if errors.As(err, &respErr) {
|
||||
assert.Equal(t, http.StatusConflict, respErr.StatusCode)
|
||||
} else {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func Test_Documents_Patch(t *testing.T) {
|
||||
ts, collectionClient := documents_InitializeDb(t)
|
||||
defer ts.Server.Close()
|
||||
|
||||
t.Run("Should PATCH document", func(t *testing.T) {
|
||||
context := context.TODO()
|
||||
expectedData := map[string]interface{}{"id": "67890", "pk": "666", "newField": "newValue", "incr": 15., "setted": "isSet"}
|
||||
|
||||
patch := azcosmos.PatchOperations{}
|
||||
patch.AppendAdd("/newField", "newValue")
|
||||
patch.AppendIncrement("/incr", 15)
|
||||
patch.AppendRemove("/isCool")
|
||||
patch.AppendReplace("/pk", "666")
|
||||
patch.AppendSet("/setted", "isSet")
|
||||
|
||||
itemResponse, err := collectionClient.PatchItem(
|
||||
context,
|
||||
azcosmos.PartitionKey{},
|
||||
"67890",
|
||||
patch,
|
||||
&azcosmos.ItemOptions{
|
||||
EnableContentResponseOnWrite: false,
|
||||
},
|
||||
)
|
||||
assert.Nil(t, err)
|
||||
|
||||
var itemResponseBody map[string]interface{}
|
||||
json.Unmarshal(itemResponse.Value, &itemResponseBody)
|
||||
|
||||
assert.Equal(t, expectedData["id"], itemResponseBody["id"])
|
||||
assert.Equal(t, expectedData["pk"], itemResponseBody["pk"])
|
||||
assert.Empty(t, itemResponseBody["isCool"])
|
||||
assert.Equal(t, expectedData["newField"], itemResponseBody["newField"])
|
||||
assert.Equal(t, expectedData["incr"], itemResponseBody["incr"])
|
||||
assert.Equal(t, expectedData["setted"], itemResponseBody["setted"])
|
||||
})
|
||||
|
||||
t.Run("Should not allow to PATCH document ID", func(t *testing.T) {
|
||||
context := context.TODO()
|
||||
|
||||
patch := azcosmos.PatchOperations{}
|
||||
patch.AppendReplace("/id", "newValue")
|
||||
|
||||
_, err := collectionClient.PatchItem(
|
||||
context,
|
||||
azcosmos.PartitionKey{},
|
||||
"67890",
|
||||
patch,
|
||||
&azcosmos.ItemOptions{
|
||||
EnableContentResponseOnWrite: false,
|
||||
},
|
||||
)
|
||||
assert.NotNil(t, err)
|
||||
|
||||
var respErr *azcore.ResponseError
|
||||
if errors.As(err, &respErr) {
|
||||
assert.Equal(t, http.StatusUnprocessableEntity, respErr.StatusCode)
|
||||
} else {
|
||||
panic(err)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("CreateItem", func(t *testing.T) {
|
||||
context := context.TODO()
|
||||
|
||||
item := map[string]interface{}{
|
||||
"Id": "6789011",
|
||||
"pk": "456",
|
||||
"newField": "newValue2",
|
||||
}
|
||||
bytes, err := json.Marshal(item)
|
||||
assert.Nil(t, err)
|
||||
|
||||
r, err2 := collectionClient.CreateItem(
|
||||
context,
|
||||
azcosmos.PartitionKey{},
|
||||
bytes,
|
||||
&azcosmos.ItemOptions{
|
||||
EnableContentResponseOnWrite: false,
|
||||
},
|
||||
)
|
||||
assert.NotNil(t, r)
|
||||
assert.Nil(t, err2)
|
||||
})
|
||||
|
||||
t.Run("CreateItem that already exists", func(t *testing.T) {
|
||||
context := context.TODO()
|
||||
|
||||
item := map[string]interface{}{"id": "12345", "pk": "123", "isCool": false, "arr": []int{1, 2, 3}}
|
||||
bytes, err := json.Marshal(item)
|
||||
assert.Nil(t, err)
|
||||
|
||||
r, err := collectionClient.CreateItem(
|
||||
context,
|
||||
azcosmos.PartitionKey{},
|
||||
bytes,
|
||||
&azcosmos.ItemOptions{
|
||||
EnableContentResponseOnWrite: false,
|
||||
},
|
||||
)
|
||||
assert.NotNil(t, r)
|
||||
assert.NotNil(t, err)
|
||||
|
||||
var respErr *azcore.ResponseError
|
||||
if errors.As(err, &respErr) {
|
||||
assert.Equal(t, http.StatusConflict, respErr.StatusCode)
|
||||
} else {
|
||||
panic(err)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("UpsertItem new", func(t *testing.T) {
|
||||
context := context.TODO()
|
||||
|
||||
item := map[string]interface{}{"id": "123456", "pk": "1234", "isCool": false, "arr": []int{1, 2, 3}}
|
||||
bytes, err := json.Marshal(item)
|
||||
assert.Nil(t, err)
|
||||
|
||||
r, err2 := collectionClient.UpsertItem(
|
||||
context,
|
||||
azcosmos.PartitionKey{},
|
||||
bytes,
|
||||
&azcosmos.ItemOptions{
|
||||
EnableContentResponseOnWrite: false,
|
||||
},
|
||||
)
|
||||
assert.NotNil(t, r)
|
||||
assert.Nil(t, err2)
|
||||
})
|
||||
|
||||
t.Run("UpsertItem that already exists", func(t *testing.T) {
|
||||
context := context.TODO()
|
||||
|
||||
item := map[string]interface{}{"id": "12345", "pk": "123", "isCool": false, "arr": []int{1, 2, 3, 4}}
|
||||
bytes, err := json.Marshal(item)
|
||||
assert.Nil(t, err)
|
||||
|
||||
r, err2 := collectionClient.UpsertItem(
|
||||
context,
|
||||
azcosmos.PartitionKey{},
|
||||
bytes,
|
||||
&azcosmos.ItemOptions{
|
||||
EnableContentResponseOnWrite: false,
|
||||
},
|
||||
)
|
||||
assert.NotNil(t, r)
|
||||
assert.Nil(t, err2)
|
||||
})
|
||||
}
|
||||
|
||||
func Test_Documents_TransactionalBatch(t *testing.T) {
|
||||
ts, collectionClient := documents_InitializeDb(t)
|
||||
defer ts.Server.Close()
|
||||
|
||||
t.Run("Should execute CREATE transactional batch", func(t *testing.T) {
|
||||
context := context.TODO()
|
||||
batch := collectionClient.NewTransactionalBatch(azcosmos.NewPartitionKeyString("pk"))
|
||||
|
||||
newItem := map[string]interface{}{
|
||||
"id": "678901",
|
||||
}
|
||||
bytes, err := json.Marshal(newItem)
|
||||
assert.Nil(t, err)
|
||||
|
||||
batch.CreateItem(bytes, nil)
|
||||
response, err := collectionClient.ExecuteTransactionalBatch(context, batch, &azcosmos.TransactionalBatchOptions{})
|
||||
assert.Nil(t, err)
|
||||
assert.True(t, response.Success)
|
||||
assert.Equal(t, 1, len(response.OperationResults))
|
||||
|
||||
operationResponse := response.OperationResults[0]
|
||||
assert.NotNil(t, operationResponse)
|
||||
assert.NotNil(t, operationResponse.ResourceBody)
|
||||
assert.Equal(t, int32(http.StatusCreated), operationResponse.StatusCode)
|
||||
|
||||
var itemResponseBody map[string]interface{}
|
||||
json.Unmarshal(operationResponse.ResourceBody, &itemResponseBody)
|
||||
assert.Equal(t, newItem["id"], itemResponseBody["id"])
|
||||
|
||||
createdDoc, _ := ts.Repository.GetDocument(testDatabaseName, testCollectionName, newItem["id"].(string))
|
||||
assert.Equal(t, newItem["id"], createdDoc["id"])
|
||||
})
|
||||
|
||||
t.Run("Should execute DELETE transactional batch", func(t *testing.T) {
|
||||
context := context.TODO()
|
||||
batch := collectionClient.NewTransactionalBatch(azcosmos.NewPartitionKeyString("pk"))
|
||||
|
||||
batch.DeleteItem("12345", nil)
|
||||
response, err := collectionClient.ExecuteTransactionalBatch(context, batch, &azcosmos.TransactionalBatchOptions{})
|
||||
assert.Nil(t, err)
|
||||
assert.True(t, response.Success)
|
||||
assert.Equal(t, 1, len(response.OperationResults))
|
||||
|
||||
operationResponse := response.OperationResults[0]
|
||||
assert.NotNil(t, operationResponse)
|
||||
assert.Equal(t, int32(http.StatusNoContent), operationResponse.StatusCode)
|
||||
|
||||
_, status := ts.Repository.GetDocument(testDatabaseName, testCollectionName, "12345")
|
||||
assert.Equal(t, repositorymodels.StatusNotFound, int(status))
|
||||
})
|
||||
|
||||
t.Run("Should execute REPLACE transactional batch", func(t *testing.T) {
|
||||
context := context.TODO()
|
||||
batch := collectionClient.NewTransactionalBatch(azcosmos.NewPartitionKeyString("pk"))
|
||||
|
||||
newItem := map[string]interface{}{
|
||||
"id": "67890",
|
||||
"pk": "666",
|
||||
}
|
||||
bytes, err := json.Marshal(newItem)
|
||||
assert.Nil(t, err)
|
||||
|
||||
batch.ReplaceItem("67890", bytes, nil)
|
||||
response, err := collectionClient.ExecuteTransactionalBatch(context, batch, &azcosmos.TransactionalBatchOptions{})
|
||||
assert.Nil(t, err)
|
||||
assert.True(t, response.Success)
|
||||
assert.Equal(t, 1, len(response.OperationResults))
|
||||
|
||||
operationResponse := response.OperationResults[0]
|
||||
assert.NotNil(t, operationResponse)
|
||||
assert.NotNil(t, operationResponse.ResourceBody)
|
||||
assert.Equal(t, int32(http.StatusCreated), operationResponse.StatusCode)
|
||||
|
||||
var itemResponseBody map[string]interface{}
|
||||
json.Unmarshal(operationResponse.ResourceBody, &itemResponseBody)
|
||||
assert.Equal(t, newItem["id"], itemResponseBody["id"])
|
||||
assert.Equal(t, newItem["pk"], itemResponseBody["pk"])
|
||||
|
||||
updatedDoc, _ := ts.Repository.GetDocument(testDatabaseName, testCollectionName, newItem["id"].(string))
|
||||
assert.Equal(t, newItem["id"], updatedDoc["id"])
|
||||
assert.Equal(t, newItem["pk"], updatedDoc["pk"])
|
||||
})
|
||||
|
||||
t.Run("Should execute UPSERT transactional batch", func(t *testing.T) {
|
||||
context := context.TODO()
|
||||
batch := collectionClient.NewTransactionalBatch(azcosmos.NewPartitionKeyString("pk"))
|
||||
|
||||
newItem := map[string]interface{}{
|
||||
"id": "678901",
|
||||
"pk": "666",
|
||||
}
|
||||
bytes, err := json.Marshal(newItem)
|
||||
assert.Nil(t, err)
|
||||
|
||||
batch.UpsertItem(bytes, nil)
|
||||
response, err := collectionClient.ExecuteTransactionalBatch(context, batch, &azcosmos.TransactionalBatchOptions{})
|
||||
assert.Nil(t, err)
|
||||
assert.True(t, response.Success)
|
||||
assert.Equal(t, 1, len(response.OperationResults))
|
||||
|
||||
operationResponse := response.OperationResults[0]
|
||||
assert.NotNil(t, operationResponse)
|
||||
assert.NotNil(t, operationResponse.ResourceBody)
|
||||
assert.Equal(t, int32(http.StatusCreated), operationResponse.StatusCode)
|
||||
|
||||
var itemResponseBody map[string]interface{}
|
||||
json.Unmarshal(operationResponse.ResourceBody, &itemResponseBody)
|
||||
assert.Equal(t, newItem["id"], itemResponseBody["id"])
|
||||
assert.Equal(t, newItem["pk"], itemResponseBody["pk"])
|
||||
|
||||
updatedDoc, _ := ts.Repository.GetDocument(testDatabaseName, testCollectionName, newItem["id"].(string))
|
||||
assert.Equal(t, newItem["id"], updatedDoc["id"])
|
||||
assert.Equal(t, newItem["pk"], updatedDoc["pk"])
|
||||
})
|
||||
|
||||
t.Run("Should execute READ transactional batch", func(t *testing.T) {
|
||||
context := context.TODO()
|
||||
batch := collectionClient.NewTransactionalBatch(azcosmos.NewPartitionKeyString("pk"))
|
||||
|
||||
batch.ReadItem("67890", nil)
|
||||
response, err := collectionClient.ExecuteTransactionalBatch(context, batch, &azcosmos.TransactionalBatchOptions{})
|
||||
assert.Nil(t, err)
|
||||
assert.True(t, response.Success)
|
||||
assert.Equal(t, 1, len(response.OperationResults))
|
||||
|
||||
operationResponse := response.OperationResults[0]
|
||||
assert.NotNil(t, operationResponse)
|
||||
assert.NotNil(t, operationResponse.ResourceBody)
|
||||
assert.Equal(t, int32(http.StatusOK), operationResponse.StatusCode)
|
||||
|
||||
var itemResponseBody map[string]interface{}
|
||||
json.Unmarshal(operationResponse.ResourceBody, &itemResponseBody)
|
||||
assert.Equal(t, "67890", itemResponseBody["id"])
|
||||
})
|
||||
|
||||
t.Run("UpsertItem new", func(t *testing.T) {
|
||||
context := context.TODO()
|
||||
|
||||
item := map[string]interface{}{"id": "123456", "pk": "1234", "isCool": false, "arr": []int{1, 2, 3}}
|
||||
bytes, err := json.Marshal(item)
|
||||
assert.Nil(t, err)
|
||||
|
||||
r, err2 := collectionClient.UpsertItem(
|
||||
context,
|
||||
azcosmos.PartitionKey{},
|
||||
bytes,
|
||||
&azcosmos.ItemOptions{
|
||||
EnableContentResponseOnWrite: false,
|
||||
},
|
||||
)
|
||||
assert.NotNil(t, r)
|
||||
assert.Nil(t, err2)
|
||||
})
|
||||
|
||||
t.Run("UpsertItem that already exists", func(t *testing.T) {
|
||||
context := context.TODO()
|
||||
|
||||
item := map[string]interface{}{"id": "12345", "pk": "123", "isCool": false, "arr": []int{1, 2, 3, 4}}
|
||||
bytes, err := json.Marshal(item)
|
||||
assert.Nil(t, err)
|
||||
|
||||
r, err2 := collectionClient.UpsertItem(
|
||||
context,
|
||||
azcosmos.PartitionKey{},
|
||||
bytes,
|
||||
&azcosmos.ItemOptions{
|
||||
EnableContentResponseOnWrite: false,
|
||||
},
|
||||
)
|
||||
assert.NotNil(t, r)
|
||||
assert.Nil(t, err2)
|
||||
})
|
||||
})
|
||||
|
||||
runTestsWithPresets(t, "Test_Documents_TransactionalBatch", presets, func(t *testing.T, ts *TestServer, client *azcosmos.Client) {
|
||||
collectionClient := documents_InitializeDb(t, ts)
|
||||
|
||||
t.Run("Should execute CREATE transactional batch", func(t *testing.T) {
|
||||
context := context.TODO()
|
||||
batch := collectionClient.NewTransactionalBatch(azcosmos.NewPartitionKeyString("pk"))
|
||||
|
||||
newItem := map[string]interface{}{
|
||||
"id": "678901",
|
||||
}
|
||||
bytes, err := json.Marshal(newItem)
|
||||
assert.Nil(t, err)
|
||||
|
||||
batch.CreateItem(bytes, nil)
|
||||
response, err := collectionClient.ExecuteTransactionalBatch(context, batch, &azcosmos.TransactionalBatchOptions{})
|
||||
assert.Nil(t, err)
|
||||
assert.True(t, response.Success)
|
||||
assert.Equal(t, 1, len(response.OperationResults))
|
||||
|
||||
operationResponse := response.OperationResults[0]
|
||||
assert.NotNil(t, operationResponse)
|
||||
assert.NotNil(t, operationResponse.ResourceBody)
|
||||
assert.Equal(t, int32(http.StatusCreated), operationResponse.StatusCode)
|
||||
|
||||
var itemResponseBody map[string]interface{}
|
||||
json.Unmarshal(operationResponse.ResourceBody, &itemResponseBody)
|
||||
assert.Equal(t, newItem["id"], itemResponseBody["id"])
|
||||
|
||||
createdDoc, _ := ts.DataStore.GetDocument(testDatabaseName, testCollectionName, newItem["id"].(string))
|
||||
assert.Equal(t, newItem["id"], createdDoc["id"])
|
||||
})
|
||||
|
||||
t.Run("Should execute DELETE transactional batch", func(t *testing.T) {
|
||||
context := context.TODO()
|
||||
batch := collectionClient.NewTransactionalBatch(azcosmos.NewPartitionKeyString("pk"))
|
||||
|
||||
batch.DeleteItem("12345", nil)
|
||||
response, err := collectionClient.ExecuteTransactionalBatch(context, batch, &azcosmos.TransactionalBatchOptions{})
|
||||
assert.Nil(t, err)
|
||||
assert.True(t, response.Success)
|
||||
assert.Equal(t, 1, len(response.OperationResults))
|
||||
|
||||
operationResponse := response.OperationResults[0]
|
||||
assert.NotNil(t, operationResponse)
|
||||
assert.Equal(t, int32(http.StatusNoContent), operationResponse.StatusCode)
|
||||
|
||||
_, status := ts.DataStore.GetDocument(testDatabaseName, testCollectionName, "12345")
|
||||
assert.Equal(t, datastore.StatusNotFound, int(status))
|
||||
})
|
||||
|
||||
t.Run("Should execute REPLACE transactional batch", func(t *testing.T) {
|
||||
context := context.TODO()
|
||||
batch := collectionClient.NewTransactionalBatch(azcosmos.NewPartitionKeyString("pk"))
|
||||
|
||||
newItem := map[string]interface{}{
|
||||
"id": "67890",
|
||||
"pk": "666",
|
||||
}
|
||||
bytes, err := json.Marshal(newItem)
|
||||
assert.Nil(t, err)
|
||||
|
||||
batch.ReplaceItem("67890", bytes, nil)
|
||||
response, err := collectionClient.ExecuteTransactionalBatch(context, batch, &azcosmos.TransactionalBatchOptions{})
|
||||
assert.Nil(t, err)
|
||||
assert.True(t, response.Success)
|
||||
assert.Equal(t, 1, len(response.OperationResults))
|
||||
|
||||
operationResponse := response.OperationResults[0]
|
||||
assert.NotNil(t, operationResponse)
|
||||
assert.NotNil(t, operationResponse.ResourceBody)
|
||||
assert.Equal(t, int32(http.StatusCreated), operationResponse.StatusCode)
|
||||
|
||||
var itemResponseBody map[string]interface{}
|
||||
json.Unmarshal(operationResponse.ResourceBody, &itemResponseBody)
|
||||
assert.Equal(t, newItem["id"], itemResponseBody["id"])
|
||||
assert.Equal(t, newItem["pk"], itemResponseBody["pk"])
|
||||
|
||||
updatedDoc, _ := ts.DataStore.GetDocument(testDatabaseName, testCollectionName, newItem["id"].(string))
|
||||
assert.Equal(t, newItem["id"], updatedDoc["id"])
|
||||
assert.Equal(t, newItem["pk"], updatedDoc["pk"])
|
||||
})
|
||||
|
||||
t.Run("Should execute UPSERT transactional batch", func(t *testing.T) {
|
||||
context := context.TODO()
|
||||
batch := collectionClient.NewTransactionalBatch(azcosmos.NewPartitionKeyString("pk"))
|
||||
|
||||
newItem := map[string]interface{}{
|
||||
"id": "678901",
|
||||
"pk": "666",
|
||||
}
|
||||
bytes, err := json.Marshal(newItem)
|
||||
assert.Nil(t, err)
|
||||
|
||||
batch.UpsertItem(bytes, nil)
|
||||
response, err := collectionClient.ExecuteTransactionalBatch(context, batch, &azcosmos.TransactionalBatchOptions{})
|
||||
assert.Nil(t, err)
|
||||
assert.True(t, response.Success)
|
||||
assert.Equal(t, 1, len(response.OperationResults))
|
||||
|
||||
operationResponse := response.OperationResults[0]
|
||||
assert.NotNil(t, operationResponse)
|
||||
assert.NotNil(t, operationResponse.ResourceBody)
|
||||
assert.Equal(t, int32(http.StatusCreated), operationResponse.StatusCode)
|
||||
|
||||
var itemResponseBody map[string]interface{}
|
||||
json.Unmarshal(operationResponse.ResourceBody, &itemResponseBody)
|
||||
assert.Equal(t, newItem["id"], itemResponseBody["id"])
|
||||
assert.Equal(t, newItem["pk"], itemResponseBody["pk"])
|
||||
|
||||
updatedDoc, _ := ts.DataStore.GetDocument(testDatabaseName, testCollectionName, newItem["id"].(string))
|
||||
assert.Equal(t, newItem["id"], updatedDoc["id"])
|
||||
assert.Equal(t, newItem["pk"], updatedDoc["pk"])
|
||||
})
|
||||
|
||||
t.Run("Should execute READ transactional batch", func(t *testing.T) {
|
||||
context := context.TODO()
|
||||
batch := collectionClient.NewTransactionalBatch(azcosmos.NewPartitionKeyString("pk"))
|
||||
|
||||
batch.ReadItem("67890", nil)
|
||||
response, err := collectionClient.ExecuteTransactionalBatch(context, batch, &azcosmos.TransactionalBatchOptions{})
|
||||
assert.Nil(t, err)
|
||||
assert.True(t, response.Success)
|
||||
assert.Equal(t, 1, len(response.OperationResults))
|
||||
|
||||
operationResponse := response.OperationResults[0]
|
||||
assert.NotNil(t, operationResponse)
|
||||
assert.NotNil(t, operationResponse.ResourceBody)
|
||||
assert.Equal(t, int32(http.StatusOK), operationResponse.StatusCode)
|
||||
|
||||
var itemResponseBody map[string]interface{}
|
||||
json.Unmarshal(operationResponse.ResourceBody, &itemResponseBody)
|
||||
assert.Equal(t, "67890", itemResponseBody["id"])
|
||||
})
|
||||
})
|
||||
}
|
||||
|
@ -14,7 +14,8 @@ import (
|
||||
|
||||
// Request document with trailing slash like python cosmosdb client does.
|
||||
func Test_Documents_Read_Trailing_Slash(t *testing.T) {
|
||||
ts, _ := documents_InitializeDb(t)
|
||||
ts := runTestServer()
|
||||
documents_InitializeDb(t, ts)
|
||||
defer ts.Server.Close()
|
||||
|
||||
t.Run("Read doc with client that appends slash to path", func(t *testing.T) {
|
||||
|
@ -7,27 +7,40 @@ import (
|
||||
|
||||
"github.com/pikami/cosmium/api"
|
||||
"github.com/pikami/cosmium/api/config"
|
||||
"github.com/pikami/cosmium/internal/repositories"
|
||||
"github.com/pikami/cosmium/internal/datastore"
|
||||
badgerdatastore "github.com/pikami/cosmium/internal/datastore/badger_datastore"
|
||||
jsondatastore "github.com/pikami/cosmium/internal/datastore/json_datastore"
|
||||
"github.com/pikami/cosmium/internal/logger"
|
||||
)
|
||||
|
||||
func main() {
|
||||
configuration := config.ParseFlags()
|
||||
|
||||
repository := repositories.NewDataRepository(repositories.RepositoryOptions{
|
||||
InitialDataFilePath: configuration.InitialDataFilePath,
|
||||
PersistDataFilePath: configuration.PersistDataFilePath,
|
||||
})
|
||||
var dataStore datastore.DataStore
|
||||
switch configuration.DataStore {
|
||||
case config.DataStoreBadger:
|
||||
dataStore = badgerdatastore.NewBadgerDataStore(badgerdatastore.BadgerDataStoreOptions{
|
||||
PersistDataFilePath: configuration.PersistDataFilePath,
|
||||
})
|
||||
logger.InfoLn("Using Badger data store")
|
||||
default:
|
||||
dataStore = jsondatastore.NewJsonDataStore(jsondatastore.JsonDataStoreOptions{
|
||||
InitialDataFilePath: configuration.InitialDataFilePath,
|
||||
PersistDataFilePath: configuration.PersistDataFilePath,
|
||||
})
|
||||
logger.InfoLn("Using in-memory data store")
|
||||
}
|
||||
|
||||
server := api.NewApiServer(repository, &configuration)
|
||||
server := api.NewApiServer(dataStore, &configuration)
|
||||
err := server.Start()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
waitForExit(server, repository, configuration)
|
||||
waitForExit(server, dataStore)
|
||||
}
|
||||
|
||||
func waitForExit(server *api.ApiServer, repository *repositories.DataRepository, config config.ServerConfig) {
|
||||
func waitForExit(server *api.ApiServer, dataStore datastore.DataStore) {
|
||||
sigs := make(chan os.Signal, 1)
|
||||
signal.Notify(sigs, syscall.SIGINT, syscall.SIGTERM)
|
||||
|
||||
@ -37,7 +50,5 @@ func waitForExit(server *api.ApiServer, repository *repositories.DataRepository,
|
||||
// Stop the server
|
||||
server.Stop()
|
||||
|
||||
if config.PersistDataFilePath != "" {
|
||||
repository.SaveStateFS(config.PersistDataFilePath)
|
||||
}
|
||||
dataStore.Close()
|
||||
}
|
||||
|
@ -79,7 +79,7 @@ Cosmium strives to support the core features of Cosmos DB, including:
|
||||
|
||||
| Function | Implemented |
|
||||
| -------- | ----------- |
|
||||
| IIF | No |
|
||||
| IIF | Yes |
|
||||
|
||||
### Date and time Functions
|
||||
|
||||
|
48
go.mod
48
go.mod
@ -3,44 +3,58 @@ module github.com/pikami/cosmium
|
||||
go 1.24.0
|
||||
|
||||
require (
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.17.0
|
||||
github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos v1.3.0
|
||||
github.com/cosmiumdev/json-patch/v5 v5.9.3
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0
|
||||
github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos v1.4.0
|
||||
github.com/cosmiumdev/json-patch/v5 v5.9.11
|
||||
github.com/dgraph-io/badger/v4 v4.7.0
|
||||
github.com/gin-gonic/gin v1.10.0
|
||||
github.com/google/uuid v1.6.0
|
||||
github.com/stretchr/testify v1.10.0
|
||||
golang.org/x/exp v0.0.0-20250218142911-aa4b98e5adaa
|
||||
github.com/vmihailenco/msgpack/v5 v5.4.1
|
||||
golang.org/x/exp v0.0.0-20250506013437-ce4c2cf36ca6
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/Azure/azure-sdk-for-go v68.0.0+incompatible // indirect
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0 // indirect
|
||||
github.com/bytedance/sonic v1.12.9 // indirect
|
||||
github.com/bytedance/sonic/loader v0.2.3 // indirect
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.1 // indirect
|
||||
github.com/bytedance/sonic v1.13.2 // indirect
|
||||
github.com/bytedance/sonic/loader v0.2.4 // indirect
|
||||
github.com/cespare/xxhash/v2 v2.3.0 // indirect
|
||||
github.com/cloudwego/base64x v0.1.5 // indirect
|
||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||
github.com/gabriel-vasile/mimetype v1.4.8 // indirect
|
||||
github.com/gin-contrib/sse v1.0.0 // indirect
|
||||
github.com/dgraph-io/ristretto/v2 v2.2.0 // indirect
|
||||
github.com/dustin/go-humanize v1.0.1 // indirect
|
||||
github.com/gabriel-vasile/mimetype v1.4.9 // indirect
|
||||
github.com/gin-contrib/sse v1.1.0 // indirect
|
||||
github.com/go-logr/logr v1.4.2 // indirect
|
||||
github.com/go-logr/stdr v1.2.2 // indirect
|
||||
github.com/go-playground/locales v0.14.1 // indirect
|
||||
github.com/go-playground/universal-translator v0.18.1 // indirect
|
||||
github.com/go-playground/validator/v10 v10.25.0 // indirect
|
||||
github.com/go-playground/validator/v10 v10.26.0 // indirect
|
||||
github.com/goccy/go-json v0.10.5 // indirect
|
||||
github.com/google/flatbuffers v25.2.10+incompatible // indirect
|
||||
github.com/json-iterator/go v1.1.12 // indirect
|
||||
github.com/klauspost/compress v1.18.0 // indirect
|
||||
github.com/klauspost/cpuid/v2 v2.2.10 // indirect
|
||||
github.com/leodido/go-urn v1.4.0 // indirect
|
||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
|
||||
github.com/modern-go/reflect2 v1.0.2 // indirect
|
||||
github.com/pelletier/go-toml/v2 v2.2.3 // indirect
|
||||
github.com/pelletier/go-toml/v2 v2.2.4 // indirect
|
||||
github.com/pkg/errors v0.9.1 // indirect
|
||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
|
||||
github.com/ugorji/go/codec v1.2.12 // indirect
|
||||
golang.org/x/arch v0.14.0 // indirect
|
||||
golang.org/x/crypto v0.35.0 // indirect
|
||||
golang.org/x/net v0.35.0 // indirect
|
||||
golang.org/x/sys v0.30.0 // indirect
|
||||
golang.org/x/text v0.22.0 // indirect
|
||||
google.golang.org/protobuf v1.36.5 // indirect
|
||||
github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect
|
||||
go.opentelemetry.io/auto/sdk v1.1.0 // indirect
|
||||
go.opentelemetry.io/otel v1.35.0 // indirect
|
||||
go.opentelemetry.io/otel/metric v1.35.0 // indirect
|
||||
go.opentelemetry.io/otel/trace v1.35.0 // indirect
|
||||
golang.org/x/arch v0.17.0 // indirect
|
||||
golang.org/x/crypto v0.38.0 // indirect
|
||||
golang.org/x/net v0.40.0 // indirect
|
||||
golang.org/x/sys v0.33.0 // indirect
|
||||
golang.org/x/text v0.25.0 // indirect
|
||||
google.golang.org/protobuf v1.36.6 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||
)
|
||||
|
113
go.sum
113
go.sum
@ -2,33 +2,67 @@ github.com/Azure/azure-sdk-for-go v68.0.0+incompatible h1:fcYLmCpyNYRnvJbPerq7U0
|
||||
github.com/Azure/azure-sdk-for-go v68.0.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.17.0 h1:g0EZJwz7xkXQiZAI5xi9f3WWFYBlX1CPTrR+NDToRkQ=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.17.0/go.mod h1:XCW7KnZet0Opnr7HccfUw1PLc4CjHqpcaxW8DHklNkQ=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0 h1:Gt0j3wceWMwPmiazCa8MzMA0MfhmPIz0Qp0FJ6qcM0U=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0/go.mod h1:Ot/6aikWnKWi4l9QB7qVSwa8iMphQNqkWALMoNT3rzM=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.8.0 h1:B/dfvscEQtew9dVuoxqxrUKKv8Ih2f55PydknDamU+g=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.8.0/go.mod h1:fiPSssYvltE08HJchL04dOy+RD4hgrjph0cwGGMntdI=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.8.2 h1:F0gBpfdPLGsw+nsgk6aqqkZS1jiixa5WwFe3fk/T3Ys=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos v1.3.0 h1:RGcdpSElvcXCwxydI0xzOBu1Gvp88OoiTGfbtO/z1m0=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos v1.3.0/go.mod h1:YwUyrNUtcZcibA99JcfCP6UUp95VVQKO2MJfBzgJDwA=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos v1.4.0 h1:TSaH6Lj0m8bDr4vX1+LC1KLQTnLzZb3tOxrx/PLqw+c=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos v1.4.0/go.mod h1:Krtog/7tz27z75TwM5cIS8bxEH4dcBUezcq+kGVeZEo=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0 h1:ywEEhmNahHBihViHepv3xPBn1663uRv2t2q/ESv9seY=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0/go.mod h1:iZDifYGJTIgIIkYRNWPENUnqx6bJ2xnSDFI2tjwZNuY=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.1 h1:FPKJS1T+clwv+OLGt13a8UjqeRuh0O4SJ3lUriThc+4=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.1/go.mod h1:j2chePtV91HrC22tGoRX3sGY42uF13WzmmV80/OdVAA=
|
||||
github.com/AzureAD/microsoft-authentication-library-for-go v1.3.2 h1:kYRSnvJju5gYVyhkij+RTJ/VR6QIUaCfWeaFm2ycsjQ=
|
||||
github.com/AzureAD/microsoft-authentication-library-for-go v1.3.2/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI=
|
||||
github.com/bytedance/sonic v1.12.9 h1:Od1BvK55NnewtGaJsTDeAOSnLVO2BTSLOe0+ooKokmQ=
|
||||
github.com/bytedance/sonic v1.12.9/go.mod h1:uVvFidNmlt9+wa31S1urfwwthTWteBgG0hWuoKAXTx8=
|
||||
github.com/AzureAD/microsoft-authentication-library-for-go v1.4.2 h1:oygO0locgZJe7PpYPXT5A29ZkwJaPqcva7BVeemZOZs=
|
||||
github.com/bytedance/sonic v1.13.1 h1:Jyd5CIvdFnkOWuKXr+wm4Nyk2h0yAFsr8ucJgEasO3g=
|
||||
github.com/bytedance/sonic v1.13.1/go.mod h1:o68xyaF9u2gvVBuGHPlUVCy+ZfmNNO5ETf1+KgkJhz4=
|
||||
github.com/bytedance/sonic v1.13.2 h1:8/H1FempDZqC4VqjptGo14QQlJx8VdZJegxs6wwfqpQ=
|
||||
github.com/bytedance/sonic v1.13.2/go.mod h1:o68xyaF9u2gvVBuGHPlUVCy+ZfmNNO5ETf1+KgkJhz4=
|
||||
github.com/bytedance/sonic/loader v0.1.1/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU=
|
||||
github.com/bytedance/sonic/loader v0.2.3 h1:yctD0Q3v2NOGfSWPLPvG2ggA2kV6TS6s4wioyEqssH0=
|
||||
github.com/bytedance/sonic/loader v0.2.3/go.mod h1:N8A3vUdtUebEY2/VQC0MyhYeKUFosQU6FxH2JmUe6VI=
|
||||
github.com/bytedance/sonic/loader v0.2.4 h1:ZWCw4stuXUsn1/+zQDqeE7JKP+QO47tz7QCNan80NzY=
|
||||
github.com/bytedance/sonic/loader v0.2.4/go.mod h1:N8A3vUdtUebEY2/VQC0MyhYeKUFosQU6FxH2JmUe6VI=
|
||||
github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs=
|
||||
github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
||||
github.com/cloudwego/base64x v0.1.5 h1:XPciSp1xaq2VCSt6lF0phncD4koWyULpl5bUxbfCyP4=
|
||||
github.com/cloudwego/base64x v0.1.5/go.mod h1:0zlkT4Wn5C6NdauXdJRhSKRlJvmclQ1hhJgA0rcu/8w=
|
||||
github.com/cloudwego/iasm v0.2.0/go.mod h1:8rXZaNYT2n95jn+zTI1sDr+IgcD2GVs0nlbbQPiEFhY=
|
||||
github.com/cosmiumdev/json-patch/v5 v5.9.3 h1:l+Og3+5edqV2NHDo58sz72eS733lbXVYP61seYK43Do=
|
||||
github.com/cosmiumdev/json-patch/v5 v5.9.3/go.mod h1:WzSTCdia0WrlZtjnL19P4RiwWtfdyArm/E7stgEeP5g=
|
||||
github.com/cosmiumdev/json-patch/v5 v5.9.11 h1:WD2Wqaz/vO987z2FFdqgkj15HgYZ/Y5TpqE3I4T/iOQ=
|
||||
github.com/cosmiumdev/json-patch/v5 v5.9.11/go.mod h1:YPZmckmv4ZY+oxKIOjgq3sIudHVB6VEMcicCS9LtVLM=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/dgraph-io/badger/v4 v4.6.0 h1:acOwfOOZ4p1dPRnYzvkVm7rUk2Y21TgPVepCy5dJdFQ=
|
||||
github.com/dgraph-io/badger/v4 v4.6.0/go.mod h1:KSJ5VTuZNC3Sd+YhvVjk2nYua9UZnnTr/SkXvdtiPgI=
|
||||
github.com/dgraph-io/badger/v4 v4.7.0 h1:Q+J8HApYAY7UMpL8d9owqiB+odzEc0zn/aqOD9jhc6Y=
|
||||
github.com/dgraph-io/badger/v4 v4.7.0/go.mod h1:He7TzG3YBy3j4f5baj5B7Zl2XyfNe5bl4Udl0aPemVA=
|
||||
github.com/dgraph-io/ristretto/v2 v2.1.0 h1:59LjpOJLNDULHh8MC4UaegN52lC4JnO2dITsie/Pa8I=
|
||||
github.com/dgraph-io/ristretto/v2 v2.1.0/go.mod h1:uejeqfYXpUomfse0+lO+13ATz4TypQYLJZzBSAemuB4=
|
||||
github.com/dgraph-io/ristretto/v2 v2.2.0 h1:bkY3XzJcXoMuELV8F+vS8kzNgicwQFAaGINAEJdWGOM=
|
||||
github.com/dgraph-io/ristretto/v2 v2.2.0/go.mod h1:RZrm63UmcBAaYWC1DotLYBmTvgkrs0+XhBd7Npn7/zI=
|
||||
github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13 h1:fAjc9m62+UWV/WAFKLNi6ZS0675eEUC9y3AlwSbQu1Y=
|
||||
github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw=
|
||||
github.com/dgryski/go-farm v0.0.0-20240924180020-3414d57e47da h1:aIftn67I1fkbMa512G+w+Pxci9hJPB8oMnkcP3iZF38=
|
||||
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
|
||||
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
|
||||
github.com/gabriel-vasile/mimetype v1.4.8 h1:FfZ3gj38NjllZIeJAmMhr+qKL8Wu+nOoI3GqacKw1NM=
|
||||
github.com/gabriel-vasile/mimetype v1.4.8/go.mod h1:ByKUIKGjh1ODkGM1asKUbQZOLGrPjydw3hYPU2YU9t8=
|
||||
github.com/gabriel-vasile/mimetype v1.4.9 h1:5k+WDwEsD9eTLL8Tz3L0VnmVh9QxGjRmjBvAG7U/oYY=
|
||||
github.com/gabriel-vasile/mimetype v1.4.9/go.mod h1:WnSQhFKJuBlRyLiKohA/2DtIlPFAbguNaG7QCHcyGok=
|
||||
github.com/gin-contrib/sse v1.0.0 h1:y3bT1mUWUxDpW4JLQg/HnTqV4rozuW4tC9eFKTxYI9E=
|
||||
github.com/gin-contrib/sse v1.0.0/go.mod h1:zNuFdwarAygJBht0NTKiSi3jRf6RbqeILZ9Sp6Slhe0=
|
||||
github.com/gin-contrib/sse v1.1.0 h1:n0w2GMuUpWDVp7qSpvze6fAu9iRxJY4Hmj6AmBOU05w=
|
||||
github.com/gin-contrib/sse v1.1.0/go.mod h1:hxRZ5gVpWMT7Z0B0gSNYqqsSCNIJMjzvm6fqCz9vjwM=
|
||||
github.com/gin-gonic/gin v1.10.0 h1:nTuyha1TYqgedzytsKYqna+DfLos46nTv2ygFy86HFU=
|
||||
github.com/gin-gonic/gin v1.10.0/go.mod h1:4PMNQiOhvDRa013RKVbsiNwoyezlm2rm0uX/T7kzp5Y=
|
||||
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
|
||||
github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY=
|
||||
github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
|
||||
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
|
||||
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
|
||||
github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s=
|
||||
github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
|
||||
github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA=
|
||||
@ -37,17 +71,24 @@ github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJn
|
||||
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
|
||||
github.com/go-playground/validator/v10 v10.25.0 h1:5Dh7cjvzR7BRZadnsVOzPhWsrwUr0nmsZJxEAnFLNO8=
|
||||
github.com/go-playground/validator/v10 v10.25.0/go.mod h1:GGzBIJMuE98Ic/kJsBXbz1x/7cByt++cQ+YOuDM5wus=
|
||||
github.com/go-playground/validator/v10 v10.26.0 h1:SP05Nqhjcvz81uJaRfEV0YBSSSGMc/iMaVtFbr3Sw2k=
|
||||
github.com/go-playground/validator/v10 v10.26.0/go.mod h1:I5QpIEbmr8On7W0TktmJAumgzX4CA1XNl4ZmDuVHKKo=
|
||||
github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4=
|
||||
github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
|
||||
github.com/golang-jwt/jwt/v5 v5.2.1 h1:OuVbFODueb089Lh128TAcimifWaLhJwVflnrgM17wHk=
|
||||
github.com/golang-jwt/jwt/v5 v5.2.1/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
|
||||
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
|
||||
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||
github.com/golang-jwt/jwt/v5 v5.2.2 h1:Rl4B7itRWVtYIHFrSNd7vhTiz9UpLdi6gZhZ3wEeDy8=
|
||||
github.com/google/flatbuffers v25.2.10+incompatible h1:F3vclr7C3HpB1k9mxCGRMXq6FdUalZ6H/pNX4FP1v0Q=
|
||||
github.com/google/flatbuffers v25.2.10+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8=
|
||||
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
|
||||
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
|
||||
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
||||
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
|
||||
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
|
||||
github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo=
|
||||
github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ=
|
||||
github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
|
||||
github.com/klauspost/cpuid/v2 v2.2.10 h1:tBs3QSyvjDyFTq3uoc/9xFpCuOsJQFNPiAhYdw2skhE=
|
||||
github.com/klauspost/cpuid/v2 v2.2.10/go.mod h1:hqwkgyIinND0mEev00jJYCxPNVRVXFQeu1XKlok6oO0=
|
||||
@ -69,14 +110,16 @@ github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9G
|
||||
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
|
||||
github.com/pelletier/go-toml/v2 v2.2.3 h1:YmeHyLY8mFWbdkNWwpr+qIL2bEqT0o95WSdkNHvL12M=
|
||||
github.com/pelletier/go-toml/v2 v2.2.3/go.mod h1:MfCQTFTvCcUyyvvwm1+G6H/jORL20Xlb6rzQu9GuUkc=
|
||||
github.com/pelletier/go-toml/v2 v2.2.4 h1:mye9XuhQ6gvn5h28+VilKrrPoQVanw5PMw/TB0t5Ec4=
|
||||
github.com/pelletier/go-toml/v2 v2.2.4/go.mod h1:2gIqNv+qfxSVS7cM2xJQKtLSTLUE9V8t9Stt+h56mCY=
|
||||
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c h1:+mdjkGKdHQG3305AYmdv1U2eRNDiU2ErMBj1gwrq8eQ=
|
||||
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c/go.mod h1:7rwL4CYBLnjLxUqIJNnCWiEdr3bn6IUYi15bNlnbCCU=
|
||||
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
||||
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8=
|
||||
github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4=
|
||||
github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII=
|
||||
github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
|
||||
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
|
||||
@ -93,21 +136,47 @@ github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS
|
||||
github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08=
|
||||
github.com/ugorji/go/codec v1.2.12 h1:9LC83zGrHhuUA9l16C9AHXAqEV/2wBQ4nkvumAE65EE=
|
||||
github.com/ugorji/go/codec v1.2.12/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg=
|
||||
golang.org/x/arch v0.14.0 h1:z9JUEZWr8x4rR0OU6c4/4t6E6jOZ8/QBS2bBYBm4tx4=
|
||||
golang.org/x/arch v0.14.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys=
|
||||
golang.org/x/crypto v0.35.0 h1:b15kiHdrGCHrP6LvwaQ3c03kgNhhiMgvlhxHQhmg2Xs=
|
||||
golang.org/x/crypto v0.35.0/go.mod h1:dy7dXNW32cAb/6/PRuTNsix8T+vJAqvuIy5Bli/x0YQ=
|
||||
golang.org/x/exp v0.0.0-20250218142911-aa4b98e5adaa h1:t2QcU6V556bFjYgu4L6C+6VrCPyJZ+eyRsABUPs1mz4=
|
||||
golang.org/x/exp v0.0.0-20250218142911-aa4b98e5adaa/go.mod h1:BHOTPb3L19zxehTsLoJXVaTktb06DFgmdW6Wb9s8jqk=
|
||||
golang.org/x/net v0.35.0 h1:T5GQRQb2y08kTAByq9L4/bz8cipCdA8FbRTXewonqY8=
|
||||
golang.org/x/net v0.35.0/go.mod h1:EglIi67kWsHKlRzzVMUD93VMSWGFOMSZgxFjparz1Qk=
|
||||
github.com/vmihailenco/msgpack/v5 v5.4.1 h1:cQriyiUvjTwOHg8QZaPihLWeRAAVoCpE00IUPn0Bjt8=
|
||||
github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok=
|
||||
github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g=
|
||||
github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds=
|
||||
go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA=
|
||||
go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A=
|
||||
go.opentelemetry.io/otel v1.35.0 h1:xKWKPxrxB6OtMCbmMY021CqC45J+3Onta9MqjhnusiQ=
|
||||
go.opentelemetry.io/otel v1.35.0/go.mod h1:UEqy8Zp11hpkUrL73gSlELM0DupHoiq72dR+Zqel/+Y=
|
||||
go.opentelemetry.io/otel/metric v1.35.0 h1:0znxYu2SNyuMSQT4Y9WDWej0VpcsxkuklLa4/siN90M=
|
||||
go.opentelemetry.io/otel/metric v1.35.0/go.mod h1:nKVFgxBZ2fReX6IlyW28MgZojkoAkJGaE8CpgeAU3oE=
|
||||
go.opentelemetry.io/otel/trace v1.35.0 h1:dPpEfJu1sDIqruz7BHFG3c7528f6ddfSWfFDVt/xgMs=
|
||||
go.opentelemetry.io/otel/trace v1.35.0/go.mod h1:WUk7DtFp1Aw2MkvqGdwiXYDZZNvA/1J8o6xRXLrIkyc=
|
||||
golang.org/x/arch v0.15.0 h1:QtOrQd0bTUnhNVNndMpLHNWrDmYzZ2KDqSrEymqInZw=
|
||||
golang.org/x/arch v0.15.0/go.mod h1:JmwW7aLIoRUKgaTzhkiEFxvcEiQGyOg9BMonBJUS7EE=
|
||||
golang.org/x/arch v0.17.0 h1:4O3dfLzd+lQewptAHqjewQZQDyEdejz3VwgeYwkZneU=
|
||||
golang.org/x/arch v0.17.0/go.mod h1:bdwinDaKcfZUGpH09BB7ZmOfhalA8lQdzl62l8gGWsk=
|
||||
golang.org/x/crypto v0.36.0 h1:AnAEvhDddvBdpY+uR+MyHmuZzzNqXSe/GvuDeob5L34=
|
||||
golang.org/x/crypto v0.36.0/go.mod h1:Y4J0ReaxCR1IMaabaSMugxJES1EpwhBHhv2bDHklZvc=
|
||||
golang.org/x/crypto v0.38.0 h1:jt+WWG8IZlBnVbomuhg2Mdq0+BBQaHbtqHEFEigjUV8=
|
||||
golang.org/x/crypto v0.38.0/go.mod h1:MvrbAqul58NNYPKnOra203SB9vpuZW0e+RRZV+Ggqjw=
|
||||
golang.org/x/exp v0.0.0-20250305212735-054e65f0b394 h1:nDVHiLt8aIbd/VzvPWN6kSOPE7+F/fNFDSXLVYkE/Iw=
|
||||
golang.org/x/exp v0.0.0-20250305212735-054e65f0b394/go.mod h1:sIifuuw/Yco/y6yb6+bDNfyeQ/MdPUy/hKEMYQV17cM=
|
||||
golang.org/x/exp v0.0.0-20250506013437-ce4c2cf36ca6 h1:y5zboxd6LQAqYIhHnB48p0ByQ/GnQx2BE33L8BOHQkI=
|
||||
golang.org/x/exp v0.0.0-20250506013437-ce4c2cf36ca6/go.mod h1:U6Lno4MTRCDY+Ba7aCcauB9T60gsv5s4ralQzP72ZoQ=
|
||||
golang.org/x/net v0.37.0 h1:1zLorHbz+LYj7MQlSf1+2tPIIgibq2eL5xkrGk6f+2c=
|
||||
golang.org/x/net v0.37.0/go.mod h1:ivrbrMbzFq5J41QOQh0siUuly180yBYtLp+CKbEaFx8=
|
||||
golang.org/x/net v0.40.0 h1:79Xs7wF06Gbdcg4kdCCIQArK11Z1hr5POQ6+fIYHNuY=
|
||||
golang.org/x/net v0.40.0/go.mod h1:y0hY0exeL2Pku80/zKK7tpntoX23cqL3Oa6njdgRtds=
|
||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.30.0 h1:QjkSwP/36a20jFYWkSue1YwXzLmsV5Gfq7Eiy72C1uc=
|
||||
golang.org/x/sys v0.30.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/text v0.22.0 h1:bofq7m3/HAFvbF51jz3Q9wLg3jkvSPuiZu/pD1XwgtM=
|
||||
golang.org/x/text v0.22.0/go.mod h1:YRoo4H8PVmsu+E3Ou7cqLVH8oXWIHVoX0jqUWALQhfY=
|
||||
golang.org/x/sys v0.31.0 h1:ioabZlmFYtWhL+TRYpcnNlLwhyxaM9kWTDEmfnprqik=
|
||||
golang.org/x/sys v0.31.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
|
||||
golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw=
|
||||
golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
|
||||
golang.org/x/text v0.23.0 h1:D71I7dUrlY+VX0gQShAThNGHFxZ13dGLBHQLVl1mJlY=
|
||||
golang.org/x/text v0.23.0/go.mod h1:/BLNzu4aZCJ1+kcD0DNRotWKage4q2rGVAg4o22unh4=
|
||||
golang.org/x/text v0.25.0 h1:qVyWApTSYLk/drJRO5mDlNYskwQznZmkpV2c8q9zls4=
|
||||
golang.org/x/text v0.25.0/go.mod h1:WEdwpYrmk1qmdHvhkSTNPm3app7v4rsT8F2UD6+VHIA=
|
||||
google.golang.org/protobuf v1.36.5 h1:tPhr+woSbjfYvY6/GPufUoYizxw1cF/yFoxJ2fmpwlM=
|
||||
google.golang.org/protobuf v1.36.5/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE=
|
||||
google.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY=
|
||||
google.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
|
||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
|
||||
|
@ -30,3 +30,8 @@ var QueryPlanResponse = gin.H{
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
var UnknownErrorResponse = gin.H{"message": "Unknown error"}
|
||||
var NotFoundResponse = gin.H{"message": "NotFound"}
|
||||
var ConflictResponse = gin.H{"message": "Conflict"}
|
||||
var BadRequestResponse = gin.H{"message": "BadRequest"}
|
||||
|
20
internal/converters/document_to_rowtype.go
Normal file
20
internal/converters/document_to_rowtype.go
Normal file
@ -0,0 +1,20 @@
|
||||
package converters
|
||||
|
||||
import (
|
||||
"github.com/pikami/cosmium/internal/datastore"
|
||||
memoryexecutor "github.com/pikami/cosmium/query_executors/memory_executor"
|
||||
)
|
||||
|
||||
type DocumentToRowTypeIterator struct {
|
||||
documents datastore.DocumentIterator
|
||||
}
|
||||
|
||||
func NewDocumentToRowTypeIterator(documents datastore.DocumentIterator) *DocumentToRowTypeIterator {
|
||||
return &DocumentToRowTypeIterator{
|
||||
documents: documents,
|
||||
}
|
||||
}
|
||||
|
||||
func (di *DocumentToRowTypeIterator) Next() (memoryexecutor.RowType, datastore.DataStoreStatus) {
|
||||
return di.documents.Next()
|
||||
}
|
66
internal/datastore/badger_datastore/badger_datastore.go
Normal file
66
internal/datastore/badger_datastore/badger_datastore.go
Normal file
@ -0,0 +1,66 @@
|
||||
package badgerdatastore
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/dgraph-io/badger/v4"
|
||||
"github.com/pikami/cosmium/internal/logger"
|
||||
)
|
||||
|
||||
type BadgerDataStore struct {
|
||||
db *badger.DB
|
||||
gcTicker *time.Ticker
|
||||
}
|
||||
|
||||
type BadgerDataStoreOptions struct {
|
||||
PersistDataFilePath string
|
||||
}
|
||||
|
||||
func NewBadgerDataStore(options BadgerDataStoreOptions) *BadgerDataStore {
|
||||
badgerOpts := badger.DefaultOptions(options.PersistDataFilePath)
|
||||
badgerOpts = badgerOpts.WithLogger(newBadgerLogger())
|
||||
if options.PersistDataFilePath == "" {
|
||||
badgerOpts = badgerOpts.WithInMemory(true)
|
||||
}
|
||||
|
||||
db, err := badger.Open(badgerOpts)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
gcTicker := time.NewTicker(5 * time.Minute)
|
||||
|
||||
ds := &BadgerDataStore{
|
||||
db: db,
|
||||
gcTicker: gcTicker,
|
||||
}
|
||||
|
||||
go ds.runGarbageCollector()
|
||||
|
||||
return ds
|
||||
}
|
||||
|
||||
func (r *BadgerDataStore) Close() {
|
||||
if r.gcTicker != nil {
|
||||
r.gcTicker.Stop()
|
||||
r.gcTicker = nil
|
||||
}
|
||||
|
||||
r.db.Close()
|
||||
r.db = nil
|
||||
}
|
||||
|
||||
func (r *BadgerDataStore) DumpToJson() (string, error) {
|
||||
logger.ErrorLn("Badger datastore does not support state export currently.")
|
||||
return "{}", nil
|
||||
}
|
||||
|
||||
func (r *BadgerDataStore) runGarbageCollector() {
|
||||
for range r.gcTicker.C {
|
||||
again:
|
||||
err := r.db.RunValueLogGC(0.7)
|
||||
if err == nil {
|
||||
goto again
|
||||
}
|
||||
}
|
||||
}
|
28
internal/datastore/badger_datastore/badger_logger.go
Normal file
28
internal/datastore/badger_datastore/badger_logger.go
Normal file
@ -0,0 +1,28 @@
|
||||
package badgerdatastore
|
||||
|
||||
import (
|
||||
"github.com/dgraph-io/badger/v4"
|
||||
"github.com/pikami/cosmium/internal/logger"
|
||||
)
|
||||
|
||||
type badgerLogger struct{}
|
||||
|
||||
func newBadgerLogger() badger.Logger {
|
||||
return &badgerLogger{}
|
||||
}
|
||||
|
||||
func (l *badgerLogger) Errorf(format string, v ...interface{}) {
|
||||
logger.Errorf(format, v...)
|
||||
}
|
||||
|
||||
func (l *badgerLogger) Warningf(format string, v ...interface{}) {
|
||||
logger.Infof(format, v...)
|
||||
}
|
||||
|
||||
func (l *badgerLogger) Infof(format string, v ...interface{}) {
|
||||
logger.Infof(format, v...)
|
||||
}
|
||||
|
||||
func (l *badgerLogger) Debugf(format string, v ...interface{}) {
|
||||
logger.Debugf(format, v...)
|
||||
}
|
103
internal/datastore/badger_datastore/collections.go
Normal file
103
internal/datastore/badger_datastore/collections.go
Normal file
@ -0,0 +1,103 @@
|
||||
package badgerdatastore
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/pikami/cosmium/internal/datastore"
|
||||
"github.com/pikami/cosmium/internal/logger"
|
||||
"github.com/pikami/cosmium/internal/resourceid"
|
||||
structhidrators "github.com/pikami/cosmium/internal/struct_hidrators"
|
||||
)
|
||||
|
||||
func (r *BadgerDataStore) GetAllCollections(databaseId string) ([]datastore.Collection, datastore.DataStoreStatus) {
|
||||
exists, err := keyExists(r.db.NewTransaction(false), generateDatabaseKey(databaseId))
|
||||
if err != nil {
|
||||
logger.ErrorLn("Error while checking if database exists:", err)
|
||||
return nil, datastore.Unknown
|
||||
}
|
||||
|
||||
if !exists {
|
||||
return nil, datastore.StatusNotFound
|
||||
}
|
||||
|
||||
colls, status := listByPrefix[datastore.Collection](r.db, generateKey(resourceid.ResourceTypeCollection, databaseId, "", ""))
|
||||
if status == datastore.StatusOk {
|
||||
return colls, datastore.StatusOk
|
||||
}
|
||||
|
||||
return nil, status
|
||||
}
|
||||
|
||||
func (r *BadgerDataStore) GetCollection(databaseId string, collectionId string) (datastore.Collection, datastore.DataStoreStatus) {
|
||||
collectionKey := generateCollectionKey(databaseId, collectionId)
|
||||
|
||||
txn := r.db.NewTransaction(false)
|
||||
defer txn.Discard()
|
||||
|
||||
var collection datastore.Collection
|
||||
status := getKey(txn, collectionKey, &collection)
|
||||
|
||||
return collection, status
|
||||
}
|
||||
|
||||
func (r *BadgerDataStore) DeleteCollection(databaseId string, collectionId string) datastore.DataStoreStatus {
|
||||
collectionKey := generateCollectionKey(databaseId, collectionId)
|
||||
|
||||
txn := r.db.NewTransaction(true)
|
||||
defer txn.Discard()
|
||||
|
||||
prefixes := []string{
|
||||
generateKey(resourceid.ResourceTypeDocument, databaseId, collectionId, ""),
|
||||
generateKey(resourceid.ResourceTypeTrigger, databaseId, collectionId, ""),
|
||||
generateKey(resourceid.ResourceTypeStoredProcedure, databaseId, collectionId, ""),
|
||||
generateKey(resourceid.ResourceTypeUserDefinedFunction, databaseId, collectionId, ""),
|
||||
collectionKey,
|
||||
}
|
||||
for _, prefix := range prefixes {
|
||||
if err := deleteKeysByPrefix(txn, prefix); err != nil {
|
||||
return datastore.Unknown
|
||||
}
|
||||
}
|
||||
|
||||
err := txn.Commit()
|
||||
if err != nil {
|
||||
logger.ErrorLn("Error while committing transaction:", err)
|
||||
return datastore.Unknown
|
||||
}
|
||||
|
||||
return datastore.StatusOk
|
||||
}
|
||||
|
||||
func (r *BadgerDataStore) CreateCollection(databaseId string, newCollection datastore.Collection) (datastore.Collection, datastore.DataStoreStatus) {
|
||||
collectionKey := generateCollectionKey(databaseId, newCollection.ID)
|
||||
|
||||
txn := r.db.NewTransaction(true)
|
||||
defer txn.Discard()
|
||||
|
||||
collectionExists, err := keyExists(txn, collectionKey)
|
||||
if err != nil || collectionExists {
|
||||
return datastore.Collection{}, datastore.Conflict
|
||||
}
|
||||
|
||||
var database datastore.Database
|
||||
status := getKey(txn, generateDatabaseKey(databaseId), &database)
|
||||
if status != datastore.StatusOk {
|
||||
return datastore.Collection{}, status
|
||||
}
|
||||
|
||||
newCollection = structhidrators.Hidrate(newCollection).(datastore.Collection)
|
||||
|
||||
newCollection.TimeStamp = time.Now().Unix()
|
||||
newCollection.ResourceID = resourceid.NewCombined(database.ResourceID, resourceid.New(resourceid.ResourceTypeCollection))
|
||||
newCollection.ETag = fmt.Sprintf("\"%s\"", uuid.New())
|
||||
newCollection.Self = fmt.Sprintf("dbs/%s/colls/%s/", database.ResourceID, newCollection.ResourceID)
|
||||
|
||||
status = insertKey(txn, collectionKey, newCollection)
|
||||
if status != datastore.StatusOk {
|
||||
return datastore.Collection{}, status
|
||||
}
|
||||
|
||||
return newCollection, datastore.StatusOk
|
||||
}
|
80
internal/datastore/badger_datastore/databases.go
Normal file
80
internal/datastore/badger_datastore/databases.go
Normal file
@ -0,0 +1,80 @@
|
||||
package badgerdatastore
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/pikami/cosmium/internal/datastore"
|
||||
"github.com/pikami/cosmium/internal/logger"
|
||||
"github.com/pikami/cosmium/internal/resourceid"
|
||||
)
|
||||
|
||||
func (r *BadgerDataStore) GetAllDatabases() ([]datastore.Database, datastore.DataStoreStatus) {
|
||||
dbs, status := listByPrefix[datastore.Database](r.db, DatabaseKeyPrefix)
|
||||
if status == datastore.StatusOk {
|
||||
return dbs, datastore.StatusOk
|
||||
}
|
||||
|
||||
return nil, status
|
||||
}
|
||||
|
||||
func (r *BadgerDataStore) GetDatabase(id string) (datastore.Database, datastore.DataStoreStatus) {
|
||||
databaseKey := generateDatabaseKey(id)
|
||||
|
||||
txn := r.db.NewTransaction(false)
|
||||
defer txn.Discard()
|
||||
|
||||
var database datastore.Database
|
||||
status := getKey(txn, databaseKey, &database)
|
||||
|
||||
return database, status
|
||||
}
|
||||
|
||||
func (r *BadgerDataStore) DeleteDatabase(id string) datastore.DataStoreStatus {
|
||||
databaseKey := generateDatabaseKey(id)
|
||||
|
||||
txn := r.db.NewTransaction(true)
|
||||
defer txn.Discard()
|
||||
|
||||
prefixes := []string{
|
||||
generateKey(resourceid.ResourceTypeCollection, id, "", ""),
|
||||
generateKey(resourceid.ResourceTypeDocument, id, "", ""),
|
||||
generateKey(resourceid.ResourceTypeTrigger, id, "", ""),
|
||||
generateKey(resourceid.ResourceTypeStoredProcedure, id, "", ""),
|
||||
generateKey(resourceid.ResourceTypeUserDefinedFunction, id, "", ""),
|
||||
databaseKey,
|
||||
}
|
||||
for _, prefix := range prefixes {
|
||||
if err := deleteKeysByPrefix(txn, prefix); err != nil {
|
||||
return datastore.Unknown
|
||||
}
|
||||
}
|
||||
|
||||
err := txn.Commit()
|
||||
if err != nil {
|
||||
logger.ErrorLn("Error while committing transaction:", err)
|
||||
return datastore.Unknown
|
||||
}
|
||||
|
||||
return datastore.StatusOk
|
||||
}
|
||||
|
||||
func (r *BadgerDataStore) CreateDatabase(newDatabase datastore.Database) (datastore.Database, datastore.DataStoreStatus) {
|
||||
databaseKey := generateDatabaseKey(newDatabase.ID)
|
||||
|
||||
txn := r.db.NewTransaction(true)
|
||||
defer txn.Discard()
|
||||
|
||||
newDatabase.TimeStamp = time.Now().Unix()
|
||||
newDatabase.ResourceID = resourceid.New(resourceid.ResourceTypeDatabase)
|
||||
newDatabase.ETag = fmt.Sprintf("\"%s\"", uuid.New())
|
||||
newDatabase.Self = fmt.Sprintf("dbs/%s/", newDatabase.ResourceID)
|
||||
|
||||
status := insertKey(txn, databaseKey, newDatabase)
|
||||
if status != datastore.StatusOk {
|
||||
return datastore.Database{}, status
|
||||
}
|
||||
|
||||
return newDatabase, datastore.StatusOk
|
||||
}
|
204
internal/datastore/badger_datastore/db_abstractions.go
Normal file
204
internal/datastore/badger_datastore/db_abstractions.go
Normal file
@ -0,0 +1,204 @@
|
||||
package badgerdatastore
|
||||
|
||||
import (
|
||||
"github.com/dgraph-io/badger/v4"
|
||||
"github.com/pikami/cosmium/internal/datastore"
|
||||
"github.com/pikami/cosmium/internal/logger"
|
||||
"github.com/pikami/cosmium/internal/resourceid"
|
||||
"github.com/vmihailenco/msgpack/v5"
|
||||
)
|
||||
|
||||
const (
|
||||
DatabaseKeyPrefix = "DB:"
|
||||
CollectionKeyPrefix = "COL:"
|
||||
DocumentKeyPrefix = "DOC:"
|
||||
TriggerKeyPrefix = "TRG:"
|
||||
StoredProcedureKeyPrefix = "SP:"
|
||||
UserDefinedFunctionKeyPrefix = "UDF:"
|
||||
)
|
||||
|
||||
func generateKey(
|
||||
resourceType resourceid.ResourceType,
|
||||
databaseId string,
|
||||
collectionId string,
|
||||
resourceId string,
|
||||
) string {
|
||||
result := ""
|
||||
|
||||
switch resourceType {
|
||||
case resourceid.ResourceTypeDatabase:
|
||||
result += DatabaseKeyPrefix
|
||||
case resourceid.ResourceTypeCollection:
|
||||
result += CollectionKeyPrefix
|
||||
case resourceid.ResourceTypeDocument:
|
||||
result += DocumentKeyPrefix
|
||||
case resourceid.ResourceTypeTrigger:
|
||||
result += TriggerKeyPrefix
|
||||
case resourceid.ResourceTypeStoredProcedure:
|
||||
result += StoredProcedureKeyPrefix
|
||||
case resourceid.ResourceTypeUserDefinedFunction:
|
||||
result += UserDefinedFunctionKeyPrefix
|
||||
}
|
||||
|
||||
if databaseId != "" {
|
||||
result += databaseId
|
||||
}
|
||||
|
||||
if collectionId != "" {
|
||||
result += "/colls/" + collectionId
|
||||
}
|
||||
|
||||
if resourceId != "" {
|
||||
result += "/" + resourceId
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
func generateDatabaseKey(databaseId string) string {
|
||||
return generateKey(resourceid.ResourceTypeDatabase, databaseId, "", "")
|
||||
}
|
||||
|
||||
func generateCollectionKey(databaseId string, collectionId string) string {
|
||||
return generateKey(resourceid.ResourceTypeCollection, databaseId, collectionId, "")
|
||||
}
|
||||
|
||||
func generateDocumentKey(databaseId string, collectionId string, documentId string) string {
|
||||
return generateKey(resourceid.ResourceTypeDocument, databaseId, collectionId, documentId)
|
||||
}
|
||||
|
||||
func generateTriggerKey(databaseId string, collectionId string, triggerId string) string {
|
||||
return generateKey(resourceid.ResourceTypeTrigger, databaseId, collectionId, triggerId)
|
||||
}
|
||||
|
||||
func generateStoredProcedureKey(databaseId string, collectionId string, storedProcedureId string) string {
|
||||
return generateKey(resourceid.ResourceTypeStoredProcedure, databaseId, collectionId, storedProcedureId)
|
||||
}
|
||||
|
||||
func generateUserDefinedFunctionKey(databaseId string, collectionId string, udfId string) string {
|
||||
return generateKey(resourceid.ResourceTypeUserDefinedFunction, databaseId, collectionId, udfId)
|
||||
}
|
||||
|
||||
func insertKey(txn *badger.Txn, key string, value interface{}) datastore.DataStoreStatus {
|
||||
_, err := txn.Get([]byte(key))
|
||||
if err == nil {
|
||||
return datastore.Conflict
|
||||
}
|
||||
|
||||
if err != badger.ErrKeyNotFound {
|
||||
logger.ErrorLn("Error while checking if key exists:", err)
|
||||
return datastore.Unknown
|
||||
}
|
||||
|
||||
buf, err := msgpack.Marshal(value)
|
||||
if err != nil {
|
||||
logger.ErrorLn("Error while encoding value:", err)
|
||||
return datastore.Unknown
|
||||
}
|
||||
|
||||
err = txn.Set([]byte(key), buf)
|
||||
if err != nil {
|
||||
logger.ErrorLn("Error while setting key:", err)
|
||||
return datastore.Unknown
|
||||
}
|
||||
|
||||
err = txn.Commit()
|
||||
if err != nil {
|
||||
logger.ErrorLn("Error while committing transaction:", err)
|
||||
return datastore.Unknown
|
||||
}
|
||||
|
||||
return datastore.StatusOk
|
||||
}
|
||||
|
||||
func getKey(txn *badger.Txn, key string, value interface{}) datastore.DataStoreStatus {
|
||||
item, err := txn.Get([]byte(key))
|
||||
if err != nil {
|
||||
if err == badger.ErrKeyNotFound {
|
||||
return datastore.StatusNotFound
|
||||
}
|
||||
logger.ErrorLn("Error while getting key:", err)
|
||||
return datastore.Unknown
|
||||
}
|
||||
|
||||
val, err := item.ValueCopy(nil)
|
||||
if err != nil {
|
||||
logger.ErrorLn("Error while copying value:", err)
|
||||
return datastore.Unknown
|
||||
}
|
||||
|
||||
if value == nil {
|
||||
logger.ErrorLn("getKey called with nil value")
|
||||
return datastore.Unknown
|
||||
}
|
||||
|
||||
err = msgpack.Unmarshal(val, &value)
|
||||
if err != nil {
|
||||
logger.ErrorLn("Error while decoding value:", err)
|
||||
return datastore.Unknown
|
||||
}
|
||||
|
||||
return datastore.StatusOk
|
||||
}
|
||||
|
||||
func keyExists(txn *badger.Txn, key string) (bool, error) {
|
||||
_, err := txn.Get([]byte(key))
|
||||
if err == nil {
|
||||
return true, nil
|
||||
}
|
||||
|
||||
if err == badger.ErrKeyNotFound {
|
||||
return false, nil
|
||||
}
|
||||
|
||||
return false, err
|
||||
}
|
||||
|
||||
func listByPrefix[T any](db *badger.DB, prefix string) ([]T, datastore.DataStoreStatus) {
|
||||
results := make([]T, 0)
|
||||
|
||||
err := db.View(func(txn *badger.Txn) error {
|
||||
opts := badger.DefaultIteratorOptions
|
||||
opts.Prefix = []byte(prefix)
|
||||
it := txn.NewIterator(opts)
|
||||
defer it.Close()
|
||||
|
||||
for it.Rewind(); it.Valid(); it.Next() {
|
||||
item := it.Item()
|
||||
var entry T
|
||||
|
||||
status := getKey(txn, string(item.Key()), &entry)
|
||||
if status != datastore.StatusOk {
|
||||
logger.ErrorLn("Failed to retrieve entry:", string(item.Key()))
|
||||
continue
|
||||
}
|
||||
|
||||
results = append(results, entry)
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
logger.ErrorLn("Error while listing entries:", err)
|
||||
return nil, datastore.Unknown
|
||||
}
|
||||
|
||||
return results, datastore.StatusOk
|
||||
}
|
||||
|
||||
func deleteKeysByPrefix(txn *badger.Txn, prefix string) error {
|
||||
opts := badger.DefaultIteratorOptions
|
||||
opts.Prefix = []byte(prefix)
|
||||
it := txn.NewIterator(opts)
|
||||
defer it.Close()
|
||||
|
||||
for it.Rewind(); it.Valid(); it.Next() {
|
||||
key := it.Item().KeyCopy(nil)
|
||||
if err := txn.Delete(key); err != nil {
|
||||
logger.ErrorLn("Failed to delete key:", string(key), "Error:", err)
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
58
internal/datastore/badger_datastore/document_iterator.go
Normal file
58
internal/datastore/badger_datastore/document_iterator.go
Normal file
@ -0,0 +1,58 @@
|
||||
package badgerdatastore
|
||||
|
||||
import (
|
||||
"github.com/dgraph-io/badger/v4"
|
||||
"github.com/pikami/cosmium/internal/datastore"
|
||||
"github.com/pikami/cosmium/internal/logger"
|
||||
"github.com/vmihailenco/msgpack/v5"
|
||||
)
|
||||
|
||||
type BadgerDocumentIterator struct {
|
||||
txn *badger.Txn
|
||||
it *badger.Iterator
|
||||
prefix string
|
||||
}
|
||||
|
||||
func NewBadgerDocumentIterator(txn *badger.Txn, prefix string) *BadgerDocumentIterator {
|
||||
opts := badger.DefaultIteratorOptions
|
||||
opts.Prefix = []byte(prefix)
|
||||
|
||||
it := txn.NewIterator(opts)
|
||||
it.Rewind()
|
||||
|
||||
return &BadgerDocumentIterator{
|
||||
txn: txn,
|
||||
it: it,
|
||||
prefix: prefix,
|
||||
}
|
||||
}
|
||||
|
||||
func (i *BadgerDocumentIterator) Next() (datastore.Document, datastore.DataStoreStatus) {
|
||||
if !i.it.Valid() {
|
||||
i.it.Close()
|
||||
return datastore.Document{}, datastore.IterEOF
|
||||
}
|
||||
|
||||
item := i.it.Item()
|
||||
val, err := item.ValueCopy(nil)
|
||||
if err != nil {
|
||||
logger.ErrorLn("Error while copying value:", err)
|
||||
return datastore.Document{}, datastore.Unknown
|
||||
}
|
||||
|
||||
current := &datastore.Document{}
|
||||
err = msgpack.Unmarshal(val, ¤t)
|
||||
if err != nil {
|
||||
logger.ErrorLn("Error while decoding value:", err)
|
||||
return datastore.Document{}, datastore.Unknown
|
||||
}
|
||||
|
||||
i.it.Next()
|
||||
|
||||
return *current, datastore.StatusOk
|
||||
}
|
||||
|
||||
func (i *BadgerDocumentIterator) Close() {
|
||||
i.it.Close()
|
||||
i.txn.Discard()
|
||||
}
|
127
internal/datastore/badger_datastore/documents.go
Normal file
127
internal/datastore/badger_datastore/documents.go
Normal file
@ -0,0 +1,127 @@
|
||||
package badgerdatastore
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/pikami/cosmium/internal/datastore"
|
||||
"github.com/pikami/cosmium/internal/logger"
|
||||
"github.com/pikami/cosmium/internal/resourceid"
|
||||
)
|
||||
|
||||
func (r *BadgerDataStore) GetAllDocuments(databaseId string, collectionId string) ([]datastore.Document, datastore.DataStoreStatus) {
|
||||
txn := r.db.NewTransaction(false)
|
||||
defer txn.Discard()
|
||||
|
||||
dbExists, err := keyExists(txn, generateDatabaseKey(databaseId))
|
||||
if err != nil || !dbExists {
|
||||
return nil, datastore.StatusNotFound
|
||||
}
|
||||
|
||||
collExists, err := keyExists(txn, generateCollectionKey(databaseId, collectionId))
|
||||
if err != nil || !collExists {
|
||||
return nil, datastore.StatusNotFound
|
||||
}
|
||||
|
||||
docs, status := listByPrefix[datastore.Document](r.db, generateKey(resourceid.ResourceTypeDocument, databaseId, collectionId, ""))
|
||||
if status == datastore.StatusOk {
|
||||
return docs, datastore.StatusOk
|
||||
}
|
||||
|
||||
return nil, status
|
||||
}
|
||||
|
||||
func (r *BadgerDataStore) GetDocumentIterator(databaseId string, collectionId string) (datastore.DocumentIterator, datastore.DataStoreStatus) {
|
||||
txn := r.db.NewTransaction(false)
|
||||
|
||||
dbExists, err := keyExists(txn, generateDatabaseKey(databaseId))
|
||||
if err != nil || !dbExists {
|
||||
return nil, datastore.StatusNotFound
|
||||
}
|
||||
|
||||
collExists, err := keyExists(txn, generateCollectionKey(databaseId, collectionId))
|
||||
if err != nil || !collExists {
|
||||
return nil, datastore.StatusNotFound
|
||||
}
|
||||
|
||||
iter := NewBadgerDocumentIterator(txn, generateKey(resourceid.ResourceTypeDocument, databaseId, collectionId, ""))
|
||||
return iter, datastore.StatusOk
|
||||
}
|
||||
|
||||
func (r *BadgerDataStore) GetDocument(databaseId string, collectionId string, documentId string) (datastore.Document, datastore.DataStoreStatus) {
|
||||
documentKey := generateDocumentKey(databaseId, collectionId, documentId)
|
||||
|
||||
txn := r.db.NewTransaction(false)
|
||||
defer txn.Discard()
|
||||
|
||||
var document datastore.Document
|
||||
status := getKey(txn, documentKey, &document)
|
||||
|
||||
return document, status
|
||||
}
|
||||
|
||||
func (r *BadgerDataStore) DeleteDocument(databaseId string, collectionId string, documentId string) datastore.DataStoreStatus {
|
||||
documentKey := generateDocumentKey(databaseId, collectionId, documentId)
|
||||
|
||||
txn := r.db.NewTransaction(true)
|
||||
defer txn.Discard()
|
||||
|
||||
exists, err := keyExists(txn, documentKey)
|
||||
if err != nil {
|
||||
return datastore.Unknown
|
||||
}
|
||||
if !exists {
|
||||
return datastore.StatusNotFound
|
||||
}
|
||||
|
||||
err = txn.Delete([]byte(documentKey))
|
||||
if err != nil {
|
||||
logger.ErrorLn("Error while deleting document:", err)
|
||||
return datastore.Unknown
|
||||
}
|
||||
|
||||
err = txn.Commit()
|
||||
if err != nil {
|
||||
logger.ErrorLn("Error while committing transaction:", err)
|
||||
return datastore.Unknown
|
||||
}
|
||||
|
||||
return datastore.StatusOk
|
||||
}
|
||||
|
||||
func (r *BadgerDataStore) CreateDocument(databaseId string, collectionId string, document map[string]interface{}) (datastore.Document, datastore.DataStoreStatus) {
|
||||
txn := r.db.NewTransaction(true)
|
||||
defer txn.Discard()
|
||||
|
||||
var database datastore.Database
|
||||
status := getKey(txn, generateDatabaseKey(databaseId), &database)
|
||||
if status != datastore.StatusOk {
|
||||
return datastore.Document{}, status
|
||||
}
|
||||
|
||||
var collection datastore.Collection
|
||||
status = getKey(txn, generateCollectionKey(databaseId, collectionId), &collection)
|
||||
if status != datastore.StatusOk {
|
||||
return datastore.Document{}, status
|
||||
}
|
||||
|
||||
var ok bool
|
||||
var documentId string
|
||||
if documentId, ok = document["id"].(string); !ok || documentId == "" {
|
||||
documentId = fmt.Sprint(uuid.New())
|
||||
document["id"] = documentId
|
||||
}
|
||||
|
||||
document["_ts"] = time.Now().Unix()
|
||||
document["_rid"] = resourceid.NewCombined(collection.ResourceID, resourceid.New(resourceid.ResourceTypeDocument))
|
||||
document["_etag"] = fmt.Sprintf("\"%s\"", uuid.New())
|
||||
document["_self"] = fmt.Sprintf("dbs/%s/colls/%s/docs/%s/", database.ResourceID, collection.ResourceID, document["_rid"])
|
||||
|
||||
status = insertKey(txn, generateDocumentKey(databaseId, collectionId, documentId), document)
|
||||
if status != datastore.StatusOk {
|
||||
return datastore.Document{}, status
|
||||
}
|
||||
|
||||
return document, datastore.StatusOk
|
||||
}
|
53
internal/datastore/badger_datastore/partition_key_ranges.go
Normal file
53
internal/datastore/badger_datastore/partition_key_ranges.go
Normal file
@ -0,0 +1,53 @@
|
||||
package badgerdatastore
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/pikami/cosmium/internal/datastore"
|
||||
"github.com/pikami/cosmium/internal/resourceid"
|
||||
)
|
||||
|
||||
// I have no idea what this is tbh
|
||||
func (r *BadgerDataStore) GetPartitionKeyRanges(databaseId string, collectionId string) ([]datastore.PartitionKeyRange, datastore.DataStoreStatus) {
|
||||
databaseRid := databaseId
|
||||
collectionRid := collectionId
|
||||
var timestamp int64 = 0
|
||||
|
||||
txn := r.db.NewTransaction(false)
|
||||
defer txn.Discard()
|
||||
|
||||
var database datastore.Database
|
||||
status := getKey(txn, generateDatabaseKey(databaseId), &database)
|
||||
if status != datastore.StatusOk {
|
||||
databaseRid = database.ResourceID
|
||||
}
|
||||
|
||||
var collection datastore.Collection
|
||||
status = getKey(txn, generateCollectionKey(databaseId, collectionId), &collection)
|
||||
if status != datastore.StatusOk {
|
||||
collectionRid = collection.ResourceID
|
||||
timestamp = collection.TimeStamp
|
||||
}
|
||||
|
||||
pkrResourceId := resourceid.NewCombined(collectionRid, resourceid.New(resourceid.ResourceTypePartitionKeyRange))
|
||||
pkrSelf := fmt.Sprintf("dbs/%s/colls/%s/pkranges/%s/", databaseRid, collectionRid, pkrResourceId)
|
||||
etag := fmt.Sprintf("\"%s\"", uuid.New())
|
||||
|
||||
return []datastore.PartitionKeyRange{
|
||||
{
|
||||
ResourceID: pkrResourceId,
|
||||
ID: "0",
|
||||
Etag: etag,
|
||||
MinInclusive: "",
|
||||
MaxExclusive: "FF",
|
||||
RidPrefix: 0,
|
||||
Self: pkrSelf,
|
||||
ThroughputFraction: 1,
|
||||
Status: "online",
|
||||
Parents: []interface{}{},
|
||||
TimeStamp: timestamp,
|
||||
Lsn: 17,
|
||||
},
|
||||
}, datastore.StatusOk
|
||||
}
|
107
internal/datastore/badger_datastore/stored_procedures.go
Normal file
107
internal/datastore/badger_datastore/stored_procedures.go
Normal file
@ -0,0 +1,107 @@
|
||||
package badgerdatastore
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/pikami/cosmium/internal/datastore"
|
||||
"github.com/pikami/cosmium/internal/logger"
|
||||
"github.com/pikami/cosmium/internal/resourceid"
|
||||
)
|
||||
|
||||
func (r *BadgerDataStore) GetAllStoredProcedures(databaseId string, collectionId string) ([]datastore.StoredProcedure, datastore.DataStoreStatus) {
|
||||
txn := r.db.NewTransaction(false)
|
||||
defer txn.Discard()
|
||||
|
||||
dbExists, err := keyExists(txn, generateDatabaseKey(databaseId))
|
||||
if err != nil || !dbExists {
|
||||
return nil, datastore.StatusNotFound
|
||||
}
|
||||
|
||||
collExists, err := keyExists(txn, generateCollectionKey(databaseId, collectionId))
|
||||
if err != nil || !collExists {
|
||||
return nil, datastore.StatusNotFound
|
||||
}
|
||||
|
||||
storedProcedures, status := listByPrefix[datastore.StoredProcedure](r.db, generateKey(resourceid.ResourceTypeStoredProcedure, databaseId, collectionId, ""))
|
||||
if status == datastore.StatusOk {
|
||||
return storedProcedures, datastore.StatusOk
|
||||
}
|
||||
|
||||
return nil, status
|
||||
}
|
||||
|
||||
func (r *BadgerDataStore) GetStoredProcedure(databaseId string, collectionId string, storedProcedureId string) (datastore.StoredProcedure, datastore.DataStoreStatus) {
|
||||
storedProcedureKey := generateStoredProcedureKey(databaseId, collectionId, storedProcedureId)
|
||||
|
||||
txn := r.db.NewTransaction(false)
|
||||
defer txn.Discard()
|
||||
|
||||
var storedProcedure datastore.StoredProcedure
|
||||
status := getKey(txn, storedProcedureKey, &storedProcedure)
|
||||
|
||||
return storedProcedure, status
|
||||
}
|
||||
|
||||
func (r *BadgerDataStore) DeleteStoredProcedure(databaseId string, collectionId string, storedProcedureId string) datastore.DataStoreStatus {
|
||||
storedProcedureKey := generateStoredProcedureKey(databaseId, collectionId, storedProcedureId)
|
||||
|
||||
txn := r.db.NewTransaction(true)
|
||||
defer txn.Discard()
|
||||
|
||||
exists, err := keyExists(txn, storedProcedureKey)
|
||||
if err != nil {
|
||||
return datastore.Unknown
|
||||
}
|
||||
if !exists {
|
||||
return datastore.StatusNotFound
|
||||
}
|
||||
|
||||
err = txn.Delete([]byte(storedProcedureKey))
|
||||
if err != nil {
|
||||
logger.ErrorLn("Error while deleting stored procedure:", err)
|
||||
return datastore.Unknown
|
||||
}
|
||||
|
||||
err = txn.Commit()
|
||||
if err != nil {
|
||||
logger.ErrorLn("Error while committing transaction:", err)
|
||||
return datastore.Unknown
|
||||
}
|
||||
|
||||
return datastore.StatusOk
|
||||
}
|
||||
|
||||
func (r *BadgerDataStore) CreateStoredProcedure(databaseId string, collectionId string, storedProcedure datastore.StoredProcedure) (datastore.StoredProcedure, datastore.DataStoreStatus) {
|
||||
txn := r.db.NewTransaction(true)
|
||||
defer txn.Discard()
|
||||
|
||||
if storedProcedure.ID == "" {
|
||||
return datastore.StoredProcedure{}, datastore.BadRequest
|
||||
}
|
||||
|
||||
var database datastore.Database
|
||||
status := getKey(txn, generateDatabaseKey(databaseId), &database)
|
||||
if status != datastore.StatusOk {
|
||||
return datastore.StoredProcedure{}, status
|
||||
}
|
||||
|
||||
var collection datastore.Collection
|
||||
status = getKey(txn, generateCollectionKey(databaseId, collectionId), &collection)
|
||||
if status != datastore.StatusOk {
|
||||
return datastore.StoredProcedure{}, status
|
||||
}
|
||||
|
||||
storedProcedure.TimeStamp = time.Now().Unix()
|
||||
storedProcedure.ResourceID = resourceid.NewCombined(collection.ResourceID, resourceid.New(resourceid.ResourceTypeStoredProcedure))
|
||||
storedProcedure.ETag = fmt.Sprintf("\"%s\"", uuid.New())
|
||||
storedProcedure.Self = fmt.Sprintf("dbs/%s/colls/%s/sprocs/%s/", database.ResourceID, collection.ResourceID, storedProcedure.ResourceID)
|
||||
|
||||
status = insertKey(txn, generateStoredProcedureKey(databaseId, collectionId, storedProcedure.ID), storedProcedure)
|
||||
if status != datastore.StatusOk {
|
||||
return datastore.StoredProcedure{}, status
|
||||
}
|
||||
|
||||
return storedProcedure, datastore.StatusOk
|
||||
}
|
107
internal/datastore/badger_datastore/triggers.go
Normal file
107
internal/datastore/badger_datastore/triggers.go
Normal file
@ -0,0 +1,107 @@
|
||||
package badgerdatastore
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/pikami/cosmium/internal/datastore"
|
||||
"github.com/pikami/cosmium/internal/logger"
|
||||
"github.com/pikami/cosmium/internal/resourceid"
|
||||
)
|
||||
|
||||
func (r *BadgerDataStore) GetAllTriggers(databaseId string, collectionId string) ([]datastore.Trigger, datastore.DataStoreStatus) {
|
||||
txn := r.db.NewTransaction(false)
|
||||
defer txn.Discard()
|
||||
|
||||
dbExists, err := keyExists(txn, generateDatabaseKey(databaseId))
|
||||
if err != nil || !dbExists {
|
||||
return nil, datastore.StatusNotFound
|
||||
}
|
||||
|
||||
collExists, err := keyExists(txn, generateCollectionKey(databaseId, collectionId))
|
||||
if err != nil || !collExists {
|
||||
return nil, datastore.StatusNotFound
|
||||
}
|
||||
|
||||
triggers, status := listByPrefix[datastore.Trigger](r.db, generateKey(resourceid.ResourceTypeTrigger, databaseId, collectionId, ""))
|
||||
if status == datastore.StatusOk {
|
||||
return triggers, datastore.StatusOk
|
||||
}
|
||||
|
||||
return nil, status
|
||||
}
|
||||
|
||||
func (r *BadgerDataStore) GetTrigger(databaseId string, collectionId string, triggerId string) (datastore.Trigger, datastore.DataStoreStatus) {
|
||||
triggerKey := generateTriggerKey(databaseId, collectionId, triggerId)
|
||||
|
||||
txn := r.db.NewTransaction(false)
|
||||
defer txn.Discard()
|
||||
|
||||
var trigger datastore.Trigger
|
||||
status := getKey(txn, triggerKey, &trigger)
|
||||
|
||||
return trigger, status
|
||||
}
|
||||
|
||||
func (r *BadgerDataStore) DeleteTrigger(databaseId string, collectionId string, triggerId string) datastore.DataStoreStatus {
|
||||
triggerKey := generateTriggerKey(databaseId, collectionId, triggerId)
|
||||
|
||||
txn := r.db.NewTransaction(true)
|
||||
defer txn.Discard()
|
||||
|
||||
exists, err := keyExists(txn, triggerKey)
|
||||
if err != nil {
|
||||
return datastore.Unknown
|
||||
}
|
||||
if !exists {
|
||||
return datastore.StatusNotFound
|
||||
}
|
||||
|
||||
err = txn.Delete([]byte(triggerKey))
|
||||
if err != nil {
|
||||
logger.ErrorLn("Error while deleting trigger:", err)
|
||||
return datastore.Unknown
|
||||
}
|
||||
|
||||
err = txn.Commit()
|
||||
if err != nil {
|
||||
logger.ErrorLn("Error while committing transaction:", err)
|
||||
return datastore.Unknown
|
||||
}
|
||||
|
||||
return datastore.StatusOk
|
||||
}
|
||||
|
||||
func (r *BadgerDataStore) CreateTrigger(databaseId string, collectionId string, trigger datastore.Trigger) (datastore.Trigger, datastore.DataStoreStatus) {
|
||||
txn := r.db.NewTransaction(true)
|
||||
defer txn.Discard()
|
||||
|
||||
if trigger.ID == "" {
|
||||
return datastore.Trigger{}, datastore.BadRequest
|
||||
}
|
||||
|
||||
var database datastore.Database
|
||||
status := getKey(txn, generateDatabaseKey(databaseId), &database)
|
||||
if status != datastore.StatusOk {
|
||||
return datastore.Trigger{}, status
|
||||
}
|
||||
|
||||
var collection datastore.Collection
|
||||
status = getKey(txn, generateCollectionKey(databaseId, collectionId), &collection)
|
||||
if status != datastore.StatusOk {
|
||||
return datastore.Trigger{}, status
|
||||
}
|
||||
|
||||
trigger.TimeStamp = time.Now().Unix()
|
||||
trigger.ResourceID = resourceid.NewCombined(collection.ResourceID, resourceid.New(resourceid.ResourceTypeTrigger))
|
||||
trigger.ETag = fmt.Sprintf("\"%s\"", uuid.New())
|
||||
trigger.Self = fmt.Sprintf("dbs/%s/colls/%s/triggers/%s/", database.ResourceID, collection.ResourceID, trigger.ResourceID)
|
||||
|
||||
status = insertKey(txn, generateTriggerKey(databaseId, collectionId, trigger.ID), trigger)
|
||||
if status != datastore.StatusOk {
|
||||
return datastore.Trigger{}, status
|
||||
}
|
||||
|
||||
return trigger, datastore.StatusOk
|
||||
}
|
107
internal/datastore/badger_datastore/user_defined_functions.go
Normal file
107
internal/datastore/badger_datastore/user_defined_functions.go
Normal file
@ -0,0 +1,107 @@
|
||||
package badgerdatastore
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/pikami/cosmium/internal/datastore"
|
||||
"github.com/pikami/cosmium/internal/logger"
|
||||
"github.com/pikami/cosmium/internal/resourceid"
|
||||
)
|
||||
|
||||
func (r *BadgerDataStore) GetAllUserDefinedFunctions(databaseId string, collectionId string) ([]datastore.UserDefinedFunction, datastore.DataStoreStatus) {
|
||||
txn := r.db.NewTransaction(false)
|
||||
defer txn.Discard()
|
||||
|
||||
dbExists, err := keyExists(txn, generateDatabaseKey(databaseId))
|
||||
if err != nil || !dbExists {
|
||||
return nil, datastore.StatusNotFound
|
||||
}
|
||||
|
||||
collExists, err := keyExists(txn, generateCollectionKey(databaseId, collectionId))
|
||||
if err != nil || !collExists {
|
||||
return nil, datastore.StatusNotFound
|
||||
}
|
||||
|
||||
udfs, status := listByPrefix[datastore.UserDefinedFunction](r.db, generateKey(resourceid.ResourceTypeUserDefinedFunction, databaseId, collectionId, ""))
|
||||
if status == datastore.StatusOk {
|
||||
return udfs, datastore.StatusOk
|
||||
}
|
||||
|
||||
return nil, status
|
||||
}
|
||||
|
||||
func (r *BadgerDataStore) GetUserDefinedFunction(databaseId string, collectionId string, udfId string) (datastore.UserDefinedFunction, datastore.DataStoreStatus) {
|
||||
udfKey := generateUserDefinedFunctionKey(databaseId, collectionId, udfId)
|
||||
|
||||
txn := r.db.NewTransaction(false)
|
||||
defer txn.Discard()
|
||||
|
||||
var udf datastore.UserDefinedFunction
|
||||
status := getKey(txn, udfKey, &udf)
|
||||
|
||||
return udf, status
|
||||
}
|
||||
|
||||
func (r *BadgerDataStore) DeleteUserDefinedFunction(databaseId string, collectionId string, udfId string) datastore.DataStoreStatus {
|
||||
udfKey := generateUserDefinedFunctionKey(databaseId, collectionId, udfId)
|
||||
|
||||
txn := r.db.NewTransaction(true)
|
||||
defer txn.Discard()
|
||||
|
||||
exists, err := keyExists(txn, udfKey)
|
||||
if err != nil {
|
||||
return datastore.Unknown
|
||||
}
|
||||
if !exists {
|
||||
return datastore.StatusNotFound
|
||||
}
|
||||
|
||||
err = txn.Delete([]byte(udfKey))
|
||||
if err != nil {
|
||||
logger.ErrorLn("Error while deleting user defined function:", err)
|
||||
return datastore.Unknown
|
||||
}
|
||||
|
||||
err = txn.Commit()
|
||||
if err != nil {
|
||||
logger.ErrorLn("Error while committing transaction:", err)
|
||||
return datastore.Unknown
|
||||
}
|
||||
|
||||
return datastore.StatusOk
|
||||
}
|
||||
|
||||
func (r *BadgerDataStore) CreateUserDefinedFunction(databaseId string, collectionId string, udf datastore.UserDefinedFunction) (datastore.UserDefinedFunction, datastore.DataStoreStatus) {
|
||||
txn := r.db.NewTransaction(true)
|
||||
defer txn.Discard()
|
||||
|
||||
if udf.ID == "" {
|
||||
return datastore.UserDefinedFunction{}, datastore.BadRequest
|
||||
}
|
||||
|
||||
var database datastore.Database
|
||||
status := getKey(txn, generateDatabaseKey(databaseId), &database)
|
||||
if status != datastore.StatusOk {
|
||||
return datastore.UserDefinedFunction{}, status
|
||||
}
|
||||
|
||||
var collection datastore.Collection
|
||||
status = getKey(txn, generateCollectionKey(databaseId, collectionId), &collection)
|
||||
if status != datastore.StatusOk {
|
||||
return datastore.UserDefinedFunction{}, status
|
||||
}
|
||||
|
||||
udf.TimeStamp = time.Now().Unix()
|
||||
udf.ResourceID = resourceid.NewCombined(collection.ResourceID, resourceid.New(resourceid.ResourceTypeUserDefinedFunction))
|
||||
udf.ETag = fmt.Sprintf("\"%s\"", uuid.New())
|
||||
udf.Self = fmt.Sprintf("dbs/%s/colls/%s/udfs/%s/", database.ResourceID, collection.ResourceID, udf.ResourceID)
|
||||
|
||||
status = insertKey(txn, generateUserDefinedFunctionKey(databaseId, collectionId, udf.ID), udf)
|
||||
if status != datastore.StatusOk {
|
||||
return datastore.UserDefinedFunction{}, status
|
||||
}
|
||||
|
||||
return udf, datastore.StatusOk
|
||||
}
|
44
internal/datastore/datastore.go
Normal file
44
internal/datastore/datastore.go
Normal file
@ -0,0 +1,44 @@
|
||||
package datastore
|
||||
|
||||
type DataStore interface {
|
||||
GetAllDatabases() ([]Database, DataStoreStatus)
|
||||
GetDatabase(databaseId string) (Database, DataStoreStatus)
|
||||
DeleteDatabase(databaseId string) DataStoreStatus
|
||||
CreateDatabase(newDatabase Database) (Database, DataStoreStatus)
|
||||
|
||||
GetAllCollections(databaseId string) ([]Collection, DataStoreStatus)
|
||||
GetCollection(databaseId string, collectionId string) (Collection, DataStoreStatus)
|
||||
DeleteCollection(databaseId string, collectionId string) DataStoreStatus
|
||||
CreateCollection(databaseId string, newCollection Collection) (Collection, DataStoreStatus)
|
||||
|
||||
GetAllDocuments(databaseId string, collectionId string) ([]Document, DataStoreStatus)
|
||||
GetDocumentIterator(databaseId string, collectionId string) (DocumentIterator, DataStoreStatus)
|
||||
GetDocument(databaseId string, collectionId string, documentId string) (Document, DataStoreStatus)
|
||||
DeleteDocument(databaseId string, collectionId string, documentId string) DataStoreStatus
|
||||
CreateDocument(databaseId string, collectionId string, document map[string]interface{}) (Document, DataStoreStatus)
|
||||
|
||||
GetAllTriggers(databaseId string, collectionId string) ([]Trigger, DataStoreStatus)
|
||||
GetTrigger(databaseId string, collectionId string, triggerId string) (Trigger, DataStoreStatus)
|
||||
DeleteTrigger(databaseId string, collectionId string, triggerId string) DataStoreStatus
|
||||
CreateTrigger(databaseId string, collectionId string, trigger Trigger) (Trigger, DataStoreStatus)
|
||||
|
||||
GetAllStoredProcedures(databaseId string, collectionId string) ([]StoredProcedure, DataStoreStatus)
|
||||
GetStoredProcedure(databaseId string, collectionId string, storedProcedureId string) (StoredProcedure, DataStoreStatus)
|
||||
DeleteStoredProcedure(databaseId string, collectionId string, storedProcedureId string) DataStoreStatus
|
||||
CreateStoredProcedure(databaseId string, collectionId string, storedProcedure StoredProcedure) (StoredProcedure, DataStoreStatus)
|
||||
|
||||
GetAllUserDefinedFunctions(databaseId string, collectionId string) ([]UserDefinedFunction, DataStoreStatus)
|
||||
GetUserDefinedFunction(databaseId string, collectionId string, udfId string) (UserDefinedFunction, DataStoreStatus)
|
||||
DeleteUserDefinedFunction(databaseId string, collectionId string, udfId string) DataStoreStatus
|
||||
CreateUserDefinedFunction(databaseId string, collectionId string, udf UserDefinedFunction) (UserDefinedFunction, DataStoreStatus)
|
||||
|
||||
GetPartitionKeyRanges(databaseId string, collectionId string) ([]PartitionKeyRange, DataStoreStatus)
|
||||
|
||||
Close()
|
||||
DumpToJson() (string, error)
|
||||
}
|
||||
|
||||
type DocumentIterator interface {
|
||||
Next() (Document, DataStoreStatus)
|
||||
Close()
|
||||
}
|
21
internal/datastore/json_datastore/array_document_iterator.go
Normal file
21
internal/datastore/json_datastore/array_document_iterator.go
Normal file
@ -0,0 +1,21 @@
|
||||
package jsondatastore
|
||||
|
||||
import "github.com/pikami/cosmium/internal/datastore"
|
||||
|
||||
type ArrayDocumentIterator struct {
|
||||
documents []datastore.Document
|
||||
index int
|
||||
}
|
||||
|
||||
func (i *ArrayDocumentIterator) Next() (datastore.Document, datastore.DataStoreStatus) {
|
||||
i.index++
|
||||
if i.index >= len(i.documents) {
|
||||
return datastore.Document{}, datastore.StatusNotFound
|
||||
}
|
||||
|
||||
return i.documents[i.index], datastore.StatusOk
|
||||
}
|
||||
|
||||
func (i *ArrayDocumentIterator) Close() {
|
||||
i.documents = []datastore.Document{}
|
||||
}
|
@ -1,52 +1,52 @@
|
||||
package repositories
|
||||
package jsondatastore
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
||||
"github.com/pikami/cosmium/internal/datastore"
|
||||
"github.com/pikami/cosmium/internal/resourceid"
|
||||
structhidrators "github.com/pikami/cosmium/internal/struct_hidrators"
|
||||
"golang.org/x/exp/maps"
|
||||
)
|
||||
|
||||
func (r *DataRepository) GetAllCollections(databaseId string) ([]repositorymodels.Collection, repositorymodels.RepositoryStatus) {
|
||||
func (r *JsonDataStore) GetAllCollections(databaseId string) ([]datastore.Collection, datastore.DataStoreStatus) {
|
||||
r.storeState.RLock()
|
||||
defer r.storeState.RUnlock()
|
||||
|
||||
if _, ok := r.storeState.Databases[databaseId]; !ok {
|
||||
return make([]repositorymodels.Collection, 0), repositorymodels.StatusNotFound
|
||||
return make([]datastore.Collection, 0), datastore.StatusNotFound
|
||||
}
|
||||
|
||||
return maps.Values(r.storeState.Collections[databaseId]), repositorymodels.StatusOk
|
||||
return maps.Values(r.storeState.Collections[databaseId]), datastore.StatusOk
|
||||
}
|
||||
|
||||
func (r *DataRepository) GetCollection(databaseId string, collectionId string) (repositorymodels.Collection, repositorymodels.RepositoryStatus) {
|
||||
func (r *JsonDataStore) GetCollection(databaseId string, collectionId string) (datastore.Collection, datastore.DataStoreStatus) {
|
||||
r.storeState.RLock()
|
||||
defer r.storeState.RUnlock()
|
||||
|
||||
if _, ok := r.storeState.Databases[databaseId]; !ok {
|
||||
return repositorymodels.Collection{}, repositorymodels.StatusNotFound
|
||||
return datastore.Collection{}, datastore.StatusNotFound
|
||||
}
|
||||
|
||||
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
|
||||
return repositorymodels.Collection{}, repositorymodels.StatusNotFound
|
||||
return datastore.Collection{}, datastore.StatusNotFound
|
||||
}
|
||||
|
||||
return r.storeState.Collections[databaseId][collectionId], repositorymodels.StatusOk
|
||||
return r.storeState.Collections[databaseId][collectionId], datastore.StatusOk
|
||||
}
|
||||
|
||||
func (r *DataRepository) DeleteCollection(databaseId string, collectionId string) repositorymodels.RepositoryStatus {
|
||||
func (r *JsonDataStore) DeleteCollection(databaseId string, collectionId string) datastore.DataStoreStatus {
|
||||
r.storeState.Lock()
|
||||
defer r.storeState.Unlock()
|
||||
|
||||
if _, ok := r.storeState.Databases[databaseId]; !ok {
|
||||
return repositorymodels.StatusNotFound
|
||||
return datastore.StatusNotFound
|
||||
}
|
||||
|
||||
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
|
||||
return repositorymodels.StatusNotFound
|
||||
return datastore.StatusNotFound
|
||||
}
|
||||
|
||||
delete(r.storeState.Collections[databaseId], collectionId)
|
||||
@ -55,24 +55,24 @@ func (r *DataRepository) DeleteCollection(databaseId string, collectionId string
|
||||
delete(r.storeState.StoredProcedures[databaseId], collectionId)
|
||||
delete(r.storeState.UserDefinedFunctions[databaseId], collectionId)
|
||||
|
||||
return repositorymodels.StatusOk
|
||||
return datastore.StatusOk
|
||||
}
|
||||
|
||||
func (r *DataRepository) CreateCollection(databaseId string, newCollection repositorymodels.Collection) (repositorymodels.Collection, repositorymodels.RepositoryStatus) {
|
||||
func (r *JsonDataStore) CreateCollection(databaseId string, newCollection datastore.Collection) (datastore.Collection, datastore.DataStoreStatus) {
|
||||
r.storeState.Lock()
|
||||
defer r.storeState.Unlock()
|
||||
|
||||
var ok bool
|
||||
var database repositorymodels.Database
|
||||
var database datastore.Database
|
||||
if database, ok = r.storeState.Databases[databaseId]; !ok {
|
||||
return repositorymodels.Collection{}, repositorymodels.StatusNotFound
|
||||
return datastore.Collection{}, datastore.StatusNotFound
|
||||
}
|
||||
|
||||
if _, ok = r.storeState.Collections[databaseId][newCollection.ID]; ok {
|
||||
return repositorymodels.Collection{}, repositorymodels.Conflict
|
||||
return datastore.Collection{}, datastore.Conflict
|
||||
}
|
||||
|
||||
newCollection = structhidrators.Hidrate(newCollection).(repositorymodels.Collection)
|
||||
newCollection = structhidrators.Hidrate(newCollection).(datastore.Collection)
|
||||
|
||||
newCollection.TimeStamp = time.Now().Unix()
|
||||
newCollection.ResourceID = resourceid.NewCombined(database.ResourceID, resourceid.New(resourceid.ResourceTypeCollection))
|
||||
@ -80,10 +80,10 @@ func (r *DataRepository) CreateCollection(databaseId string, newCollection repos
|
||||
newCollection.Self = fmt.Sprintf("dbs/%s/colls/%s/", database.ResourceID, newCollection.ResourceID)
|
||||
|
||||
r.storeState.Collections[databaseId][newCollection.ID] = newCollection
|
||||
r.storeState.Documents[databaseId][newCollection.ID] = make(map[string]repositorymodels.Document)
|
||||
r.storeState.Triggers[databaseId][newCollection.ID] = make(map[string]repositorymodels.Trigger)
|
||||
r.storeState.StoredProcedures[databaseId][newCollection.ID] = make(map[string]repositorymodels.StoredProcedure)
|
||||
r.storeState.UserDefinedFunctions[databaseId][newCollection.ID] = make(map[string]repositorymodels.UserDefinedFunction)
|
||||
r.storeState.Documents[databaseId][newCollection.ID] = make(map[string]datastore.Document)
|
||||
r.storeState.Triggers[databaseId][newCollection.ID] = make(map[string]datastore.Trigger)
|
||||
r.storeState.StoredProcedures[databaseId][newCollection.ID] = make(map[string]datastore.StoredProcedure)
|
||||
r.storeState.UserDefinedFunctions[databaseId][newCollection.ID] = make(map[string]datastore.UserDefinedFunction)
|
||||
|
||||
return newCollection, repositorymodels.StatusOk
|
||||
return newCollection, datastore.StatusOk
|
||||
}
|
@ -1,39 +1,39 @@
|
||||
package repositories
|
||||
package jsondatastore
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
||||
"github.com/pikami/cosmium/internal/datastore"
|
||||
"github.com/pikami/cosmium/internal/resourceid"
|
||||
"golang.org/x/exp/maps"
|
||||
)
|
||||
|
||||
func (r *DataRepository) GetAllDatabases() ([]repositorymodels.Database, repositorymodels.RepositoryStatus) {
|
||||
func (r *JsonDataStore) GetAllDatabases() ([]datastore.Database, datastore.DataStoreStatus) {
|
||||
r.storeState.RLock()
|
||||
defer r.storeState.RUnlock()
|
||||
|
||||
return maps.Values(r.storeState.Databases), repositorymodels.StatusOk
|
||||
return maps.Values(r.storeState.Databases), datastore.StatusOk
|
||||
}
|
||||
|
||||
func (r *DataRepository) GetDatabase(id string) (repositorymodels.Database, repositorymodels.RepositoryStatus) {
|
||||
func (r *JsonDataStore) GetDatabase(id string) (datastore.Database, datastore.DataStoreStatus) {
|
||||
r.storeState.RLock()
|
||||
defer r.storeState.RUnlock()
|
||||
|
||||
if database, ok := r.storeState.Databases[id]; ok {
|
||||
return database, repositorymodels.StatusOk
|
||||
return database, datastore.StatusOk
|
||||
}
|
||||
|
||||
return repositorymodels.Database{}, repositorymodels.StatusNotFound
|
||||
return datastore.Database{}, datastore.StatusNotFound
|
||||
}
|
||||
|
||||
func (r *DataRepository) DeleteDatabase(id string) repositorymodels.RepositoryStatus {
|
||||
func (r *JsonDataStore) DeleteDatabase(id string) datastore.DataStoreStatus {
|
||||
r.storeState.Lock()
|
||||
defer r.storeState.Unlock()
|
||||
|
||||
if _, ok := r.storeState.Databases[id]; !ok {
|
||||
return repositorymodels.StatusNotFound
|
||||
return datastore.StatusNotFound
|
||||
}
|
||||
|
||||
delete(r.storeState.Databases, id)
|
||||
@ -43,15 +43,15 @@ func (r *DataRepository) DeleteDatabase(id string) repositorymodels.RepositorySt
|
||||
delete(r.storeState.StoredProcedures, id)
|
||||
delete(r.storeState.UserDefinedFunctions, id)
|
||||
|
||||
return repositorymodels.StatusOk
|
||||
return datastore.StatusOk
|
||||
}
|
||||
|
||||
func (r *DataRepository) CreateDatabase(newDatabase repositorymodels.Database) (repositorymodels.Database, repositorymodels.RepositoryStatus) {
|
||||
func (r *JsonDataStore) CreateDatabase(newDatabase datastore.Database) (datastore.Database, datastore.DataStoreStatus) {
|
||||
r.storeState.Lock()
|
||||
defer r.storeState.Unlock()
|
||||
|
||||
if _, ok := r.storeState.Databases[newDatabase.ID]; ok {
|
||||
return repositorymodels.Database{}, repositorymodels.Conflict
|
||||
return datastore.Database{}, datastore.Conflict
|
||||
}
|
||||
|
||||
newDatabase.TimeStamp = time.Now().Unix()
|
||||
@ -60,11 +60,11 @@ func (r *DataRepository) CreateDatabase(newDatabase repositorymodels.Database) (
|
||||
newDatabase.Self = fmt.Sprintf("dbs/%s/", newDatabase.ResourceID)
|
||||
|
||||
r.storeState.Databases[newDatabase.ID] = newDatabase
|
||||
r.storeState.Collections[newDatabase.ID] = make(map[string]repositorymodels.Collection)
|
||||
r.storeState.Documents[newDatabase.ID] = make(map[string]map[string]repositorymodels.Document)
|
||||
r.storeState.Triggers[newDatabase.ID] = make(map[string]map[string]repositorymodels.Trigger)
|
||||
r.storeState.StoredProcedures[newDatabase.ID] = make(map[string]map[string]repositorymodels.StoredProcedure)
|
||||
r.storeState.UserDefinedFunctions[newDatabase.ID] = make(map[string]map[string]repositorymodels.UserDefinedFunction)
|
||||
r.storeState.Collections[newDatabase.ID] = make(map[string]datastore.Collection)
|
||||
r.storeState.Documents[newDatabase.ID] = make(map[string]map[string]datastore.Document)
|
||||
r.storeState.Triggers[newDatabase.ID] = make(map[string]map[string]datastore.Trigger)
|
||||
r.storeState.StoredProcedures[newDatabase.ID] = make(map[string]map[string]datastore.StoredProcedure)
|
||||
r.storeState.UserDefinedFunctions[newDatabase.ID] = make(map[string]map[string]datastore.UserDefinedFunction)
|
||||
|
||||
return newDatabase, repositorymodels.StatusOk
|
||||
return newDatabase, datastore.StatusOk
|
||||
}
|
113
internal/datastore/json_datastore/documents.go
Normal file
113
internal/datastore/json_datastore/documents.go
Normal file
@ -0,0 +1,113 @@
|
||||
package jsondatastore
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/pikami/cosmium/internal/datastore"
|
||||
"github.com/pikami/cosmium/internal/resourceid"
|
||||
"golang.org/x/exp/maps"
|
||||
)
|
||||
|
||||
func (r *JsonDataStore) GetAllDocuments(databaseId string, collectionId string) ([]datastore.Document, datastore.DataStoreStatus) {
|
||||
r.storeState.RLock()
|
||||
defer r.storeState.RUnlock()
|
||||
|
||||
if _, ok := r.storeState.Databases[databaseId]; !ok {
|
||||
return make([]datastore.Document, 0), datastore.StatusNotFound
|
||||
}
|
||||
|
||||
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
|
||||
return make([]datastore.Document, 0), datastore.StatusNotFound
|
||||
}
|
||||
|
||||
return maps.Values(r.storeState.Documents[databaseId][collectionId]), datastore.StatusOk
|
||||
}
|
||||
|
||||
func (r *JsonDataStore) GetDocument(databaseId string, collectionId string, documentId string) (datastore.Document, datastore.DataStoreStatus) {
|
||||
r.storeState.RLock()
|
||||
defer r.storeState.RUnlock()
|
||||
|
||||
if _, ok := r.storeState.Databases[databaseId]; !ok {
|
||||
return datastore.Document{}, datastore.StatusNotFound
|
||||
}
|
||||
|
||||
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
|
||||
return datastore.Document{}, datastore.StatusNotFound
|
||||
}
|
||||
|
||||
if _, ok := r.storeState.Documents[databaseId][collectionId][documentId]; !ok {
|
||||
return datastore.Document{}, datastore.StatusNotFound
|
||||
}
|
||||
|
||||
return r.storeState.Documents[databaseId][collectionId][documentId], datastore.StatusOk
|
||||
}
|
||||
|
||||
func (r *JsonDataStore) DeleteDocument(databaseId string, collectionId string, documentId string) datastore.DataStoreStatus {
|
||||
r.storeState.Lock()
|
||||
defer r.storeState.Unlock()
|
||||
|
||||
if _, ok := r.storeState.Databases[databaseId]; !ok {
|
||||
return datastore.StatusNotFound
|
||||
}
|
||||
|
||||
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
|
||||
return datastore.StatusNotFound
|
||||
}
|
||||
|
||||
if _, ok := r.storeState.Documents[databaseId][collectionId][documentId]; !ok {
|
||||
return datastore.StatusNotFound
|
||||
}
|
||||
|
||||
delete(r.storeState.Documents[databaseId][collectionId], documentId)
|
||||
|
||||
return datastore.StatusOk
|
||||
}
|
||||
|
||||
func (r *JsonDataStore) CreateDocument(databaseId string, collectionId string, document map[string]interface{}) (datastore.Document, datastore.DataStoreStatus) {
|
||||
r.storeState.Lock()
|
||||
defer r.storeState.Unlock()
|
||||
|
||||
var ok bool
|
||||
var documentId string
|
||||
var database datastore.Database
|
||||
var collection datastore.Collection
|
||||
if documentId, ok = document["id"].(string); !ok || documentId == "" {
|
||||
documentId = fmt.Sprint(uuid.New())
|
||||
document["id"] = documentId
|
||||
}
|
||||
|
||||
if database, ok = r.storeState.Databases[databaseId]; !ok {
|
||||
return datastore.Document{}, datastore.StatusNotFound
|
||||
}
|
||||
|
||||
if collection, ok = r.storeState.Collections[databaseId][collectionId]; !ok {
|
||||
return datastore.Document{}, datastore.StatusNotFound
|
||||
}
|
||||
|
||||
if _, ok := r.storeState.Documents[databaseId][collectionId][documentId]; ok {
|
||||
return datastore.Document{}, datastore.Conflict
|
||||
}
|
||||
|
||||
document["_ts"] = time.Now().Unix()
|
||||
document["_rid"] = resourceid.NewCombined(collection.ResourceID, resourceid.New(resourceid.ResourceTypeDocument))
|
||||
document["_etag"] = fmt.Sprintf("\"%s\"", uuid.New())
|
||||
document["_self"] = fmt.Sprintf("dbs/%s/colls/%s/docs/%s/", database.ResourceID, collection.ResourceID, document["_rid"])
|
||||
|
||||
r.storeState.Documents[databaseId][collectionId][documentId] = document
|
||||
|
||||
return document, datastore.StatusOk
|
||||
}
|
||||
|
||||
func (r *JsonDataStore) GetDocumentIterator(databaseId string, collectionId string) (datastore.DocumentIterator, datastore.DataStoreStatus) {
|
||||
documents, status := r.GetAllDocuments(databaseId, collectionId)
|
||||
if status != datastore.StatusOk {
|
||||
return nil, status
|
||||
}
|
||||
|
||||
return &ArrayDocumentIterator{
|
||||
documents: documents,
|
||||
index: -1,
|
||||
}, datastore.StatusOk
|
||||
}
|
34
internal/datastore/json_datastore/map_datastore.go
Normal file
34
internal/datastore/json_datastore/map_datastore.go
Normal file
@ -0,0 +1,34 @@
|
||||
package jsondatastore
|
||||
|
||||
import "github.com/pikami/cosmium/internal/datastore"
|
||||
|
||||
type JsonDataStore struct {
|
||||
storeState State
|
||||
|
||||
initialDataFilePath string
|
||||
persistDataFilePath string
|
||||
}
|
||||
|
||||
type JsonDataStoreOptions struct {
|
||||
InitialDataFilePath string
|
||||
PersistDataFilePath string
|
||||
}
|
||||
|
||||
func NewJsonDataStore(options JsonDataStoreOptions) *JsonDataStore {
|
||||
dataStore := &JsonDataStore{
|
||||
storeState: State{
|
||||
Databases: make(map[string]datastore.Database),
|
||||
Collections: make(map[string]map[string]datastore.Collection),
|
||||
Documents: make(map[string]map[string]map[string]datastore.Document),
|
||||
Triggers: make(map[string]map[string]map[string]datastore.Trigger),
|
||||
StoredProcedures: make(map[string]map[string]map[string]datastore.StoredProcedure),
|
||||
UserDefinedFunctions: make(map[string]map[string]map[string]datastore.UserDefinedFunction),
|
||||
},
|
||||
initialDataFilePath: options.InitialDataFilePath,
|
||||
persistDataFilePath: options.PersistDataFilePath,
|
||||
}
|
||||
|
||||
dataStore.InitializeDataStore()
|
||||
|
||||
return dataStore
|
||||
}
|
@ -1,15 +1,15 @@
|
||||
package repositories
|
||||
package jsondatastore
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/google/uuid"
|
||||
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
||||
"github.com/pikami/cosmium/internal/datastore"
|
||||
"github.com/pikami/cosmium/internal/resourceid"
|
||||
)
|
||||
|
||||
// I have no idea what this is tbh
|
||||
func (r *DataRepository) GetPartitionKeyRanges(databaseId string, collectionId string) ([]repositorymodels.PartitionKeyRange, repositorymodels.RepositoryStatus) {
|
||||
func (r *JsonDataStore) GetPartitionKeyRanges(databaseId string, collectionId string) ([]datastore.PartitionKeyRange, datastore.DataStoreStatus) {
|
||||
r.storeState.RLock()
|
||||
defer r.storeState.RUnlock()
|
||||
|
||||
@ -30,7 +30,7 @@ func (r *DataRepository) GetPartitionKeyRanges(databaseId string, collectionId s
|
||||
pkrSelf := fmt.Sprintf("dbs/%s/colls/%s/pkranges/%s/", databaseRid, collectionRid, pkrResourceId)
|
||||
etag := fmt.Sprintf("\"%s\"", uuid.New())
|
||||
|
||||
return []repositorymodels.PartitionKeyRange{
|
||||
return []datastore.PartitionKeyRange{
|
||||
{
|
||||
ResourceID: pkrResourceId,
|
||||
ID: "0",
|
||||
@ -45,5 +45,5 @@ func (r *DataRepository) GetPartitionKeyRanges(databaseId string, collectionId s
|
||||
TimeStamp: timestamp,
|
||||
Lsn: 17,
|
||||
},
|
||||
}, repositorymodels.StatusOk
|
||||
}, datastore.StatusOk
|
||||
}
|
@ -1,16 +1,39 @@
|
||||
package repositories
|
||||
package jsondatastore
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"log"
|
||||
"os"
|
||||
"reflect"
|
||||
"sync"
|
||||
|
||||
"github.com/pikami/cosmium/internal/datastore"
|
||||
"github.com/pikami/cosmium/internal/logger"
|
||||
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
||||
)
|
||||
|
||||
func (r *DataRepository) InitializeRepository() {
|
||||
type State struct {
|
||||
sync.RWMutex
|
||||
|
||||
// Map databaseId -> Database
|
||||
Databases map[string]datastore.Database `json:"databases"`
|
||||
|
||||
// Map databaseId -> collectionId -> Collection
|
||||
Collections map[string]map[string]datastore.Collection `json:"collections"`
|
||||
|
||||
// Map databaseId -> collectionId -> documentId -> Documents
|
||||
Documents map[string]map[string]map[string]datastore.Document `json:"documents"`
|
||||
|
||||
// Map databaseId -> collectionId -> triggerId -> Trigger
|
||||
Triggers map[string]map[string]map[string]datastore.Trigger `json:"triggers"`
|
||||
|
||||
// Map databaseId -> collectionId -> spId -> StoredProcedure
|
||||
StoredProcedures map[string]map[string]map[string]datastore.StoredProcedure `json:"sprocs"`
|
||||
|
||||
// Map databaseId -> collectionId -> udfId -> UserDefinedFunction
|
||||
UserDefinedFunctions map[string]map[string]map[string]datastore.UserDefinedFunction `json:"udfs"`
|
||||
}
|
||||
|
||||
func (r *JsonDataStore) InitializeDataStore() {
|
||||
if r.initialDataFilePath != "" {
|
||||
r.LoadStateFS(r.initialDataFilePath)
|
||||
return
|
||||
@ -32,7 +55,7 @@ func (r *DataRepository) InitializeRepository() {
|
||||
}
|
||||
}
|
||||
|
||||
func (r *DataRepository) LoadStateFS(filePath string) {
|
||||
func (r *JsonDataStore) LoadStateFS(filePath string) {
|
||||
data, err := os.ReadFile(filePath)
|
||||
if err != nil {
|
||||
log.Fatalf("Error reading state JSON file: %v", err)
|
||||
@ -45,11 +68,11 @@ func (r *DataRepository) LoadStateFS(filePath string) {
|
||||
}
|
||||
}
|
||||
|
||||
func (r *DataRepository) LoadStateJSON(jsonData string) error {
|
||||
func (r *JsonDataStore) LoadStateJSON(jsonData string) error {
|
||||
r.storeState.Lock()
|
||||
defer r.storeState.Unlock()
|
||||
|
||||
var state repositorymodels.State
|
||||
var state State
|
||||
if err := json.Unmarshal([]byte(jsonData), &state); err != nil {
|
||||
return err
|
||||
}
|
||||
@ -71,7 +94,7 @@ func (r *DataRepository) LoadStateJSON(jsonData string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *DataRepository) SaveStateFS(filePath string) {
|
||||
func (r *JsonDataStore) SaveStateFS(filePath string) {
|
||||
r.storeState.RLock()
|
||||
defer r.storeState.RUnlock()
|
||||
|
||||
@ -92,7 +115,7 @@ func (r *DataRepository) SaveStateFS(filePath string) {
|
||||
logger.Infof("User defined functions: %d\n", getLength(r.storeState.UserDefinedFunctions))
|
||||
}
|
||||
|
||||
func (r *DataRepository) GetState() (string, error) {
|
||||
func (r *JsonDataStore) DumpToJson() (string, error) {
|
||||
r.storeState.RLock()
|
||||
defer r.storeState.RUnlock()
|
||||
|
||||
@ -103,16 +126,23 @@ func (r *DataRepository) GetState() (string, error) {
|
||||
}
|
||||
|
||||
return string(data), nil
|
||||
|
||||
}
|
||||
|
||||
func (r *JsonDataStore) Close() {
|
||||
if r.persistDataFilePath != "" {
|
||||
r.SaveStateFS(r.persistDataFilePath)
|
||||
}
|
||||
}
|
||||
|
||||
func getLength(v interface{}) int {
|
||||
switch v.(type) {
|
||||
case repositorymodels.Database,
|
||||
repositorymodels.Collection,
|
||||
repositorymodels.Document,
|
||||
repositorymodels.Trigger,
|
||||
repositorymodels.StoredProcedure,
|
||||
repositorymodels.UserDefinedFunction:
|
||||
case datastore.Database,
|
||||
datastore.Collection,
|
||||
datastore.Document,
|
||||
datastore.Trigger,
|
||||
datastore.StoredProcedure,
|
||||
datastore.UserDefinedFunction:
|
||||
return 1
|
||||
}
|
||||
|
||||
@ -133,55 +163,55 @@ func getLength(v interface{}) int {
|
||||
return count
|
||||
}
|
||||
|
||||
func (r *DataRepository) ensureStoreStateNoNullReferences() {
|
||||
func (r *JsonDataStore) ensureStoreStateNoNullReferences() {
|
||||
if r.storeState.Databases == nil {
|
||||
r.storeState.Databases = make(map[string]repositorymodels.Database)
|
||||
r.storeState.Databases = make(map[string]datastore.Database)
|
||||
}
|
||||
|
||||
if r.storeState.Collections == nil {
|
||||
r.storeState.Collections = make(map[string]map[string]repositorymodels.Collection)
|
||||
r.storeState.Collections = make(map[string]map[string]datastore.Collection)
|
||||
}
|
||||
|
||||
if r.storeState.Documents == nil {
|
||||
r.storeState.Documents = make(map[string]map[string]map[string]repositorymodels.Document)
|
||||
r.storeState.Documents = make(map[string]map[string]map[string]datastore.Document)
|
||||
}
|
||||
|
||||
if r.storeState.Triggers == nil {
|
||||
r.storeState.Triggers = make(map[string]map[string]map[string]repositorymodels.Trigger)
|
||||
r.storeState.Triggers = make(map[string]map[string]map[string]datastore.Trigger)
|
||||
}
|
||||
|
||||
if r.storeState.StoredProcedures == nil {
|
||||
r.storeState.StoredProcedures = make(map[string]map[string]map[string]repositorymodels.StoredProcedure)
|
||||
r.storeState.StoredProcedures = make(map[string]map[string]map[string]datastore.StoredProcedure)
|
||||
}
|
||||
|
||||
if r.storeState.UserDefinedFunctions == nil {
|
||||
r.storeState.UserDefinedFunctions = make(map[string]map[string]map[string]repositorymodels.UserDefinedFunction)
|
||||
r.storeState.UserDefinedFunctions = make(map[string]map[string]map[string]datastore.UserDefinedFunction)
|
||||
}
|
||||
|
||||
for database := range r.storeState.Databases {
|
||||
if r.storeState.Collections[database] == nil {
|
||||
r.storeState.Collections[database] = make(map[string]repositorymodels.Collection)
|
||||
r.storeState.Collections[database] = make(map[string]datastore.Collection)
|
||||
}
|
||||
|
||||
if r.storeState.Documents[database] == nil {
|
||||
r.storeState.Documents[database] = make(map[string]map[string]repositorymodels.Document)
|
||||
r.storeState.Documents[database] = make(map[string]map[string]datastore.Document)
|
||||
}
|
||||
|
||||
if r.storeState.Triggers[database] == nil {
|
||||
r.storeState.Triggers[database] = make(map[string]map[string]repositorymodels.Trigger)
|
||||
r.storeState.Triggers[database] = make(map[string]map[string]datastore.Trigger)
|
||||
}
|
||||
|
||||
if r.storeState.StoredProcedures[database] == nil {
|
||||
r.storeState.StoredProcedures[database] = make(map[string]map[string]repositorymodels.StoredProcedure)
|
||||
r.storeState.StoredProcedures[database] = make(map[string]map[string]datastore.StoredProcedure)
|
||||
}
|
||||
|
||||
if r.storeState.UserDefinedFunctions[database] == nil {
|
||||
r.storeState.UserDefinedFunctions[database] = make(map[string]map[string]repositorymodels.UserDefinedFunction)
|
||||
r.storeState.UserDefinedFunctions[database] = make(map[string]map[string]datastore.UserDefinedFunction)
|
||||
}
|
||||
|
||||
for collection := range r.storeState.Collections[database] {
|
||||
if r.storeState.Documents[database][collection] == nil {
|
||||
r.storeState.Documents[database][collection] = make(map[string]repositorymodels.Document)
|
||||
r.storeState.Documents[database][collection] = make(map[string]datastore.Document)
|
||||
}
|
||||
|
||||
for document := range r.storeState.Documents[database][collection] {
|
||||
@ -191,15 +221,15 @@ func (r *DataRepository) ensureStoreStateNoNullReferences() {
|
||||
}
|
||||
|
||||
if r.storeState.Triggers[database][collection] == nil {
|
||||
r.storeState.Triggers[database][collection] = make(map[string]repositorymodels.Trigger)
|
||||
r.storeState.Triggers[database][collection] = make(map[string]datastore.Trigger)
|
||||
}
|
||||
|
||||
if r.storeState.StoredProcedures[database][collection] == nil {
|
||||
r.storeState.StoredProcedures[database][collection] = make(map[string]repositorymodels.StoredProcedure)
|
||||
r.storeState.StoredProcedures[database][collection] = make(map[string]datastore.StoredProcedure)
|
||||
}
|
||||
|
||||
if r.storeState.UserDefinedFunctions[database][collection] == nil {
|
||||
r.storeState.UserDefinedFunctions[database][collection] = make(map[string]repositorymodels.UserDefinedFunction)
|
||||
r.storeState.UserDefinedFunctions[database][collection] = make(map[string]datastore.UserDefinedFunction)
|
||||
}
|
||||
}
|
||||
}
|
91
internal/datastore/json_datastore/stored_procedures.go
Normal file
91
internal/datastore/json_datastore/stored_procedures.go
Normal file
@ -0,0 +1,91 @@
|
||||
package jsondatastore
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/pikami/cosmium/internal/datastore"
|
||||
"github.com/pikami/cosmium/internal/resourceid"
|
||||
"golang.org/x/exp/maps"
|
||||
)
|
||||
|
||||
func (r *JsonDataStore) GetAllStoredProcedures(databaseId string, collectionId string) ([]datastore.StoredProcedure, datastore.DataStoreStatus) {
|
||||
r.storeState.RLock()
|
||||
defer r.storeState.RUnlock()
|
||||
|
||||
return maps.Values(r.storeState.StoredProcedures[databaseId][collectionId]), datastore.StatusOk
|
||||
}
|
||||
|
||||
func (r *JsonDataStore) GetStoredProcedure(databaseId string, collectionId string, spId string) (datastore.StoredProcedure, datastore.DataStoreStatus) {
|
||||
r.storeState.RLock()
|
||||
defer r.storeState.RUnlock()
|
||||
|
||||
if _, ok := r.storeState.Databases[databaseId]; !ok {
|
||||
return datastore.StoredProcedure{}, datastore.StatusNotFound
|
||||
}
|
||||
|
||||
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
|
||||
return datastore.StoredProcedure{}, datastore.StatusNotFound
|
||||
}
|
||||
|
||||
if sp, ok := r.storeState.StoredProcedures[databaseId][collectionId][spId]; ok {
|
||||
return sp, datastore.StatusOk
|
||||
}
|
||||
|
||||
return datastore.StoredProcedure{}, datastore.StatusNotFound
|
||||
}
|
||||
|
||||
func (r *JsonDataStore) DeleteStoredProcedure(databaseId string, collectionId string, spId string) datastore.DataStoreStatus {
|
||||
r.storeState.Lock()
|
||||
defer r.storeState.Unlock()
|
||||
|
||||
if _, ok := r.storeState.Databases[databaseId]; !ok {
|
||||
return datastore.StatusNotFound
|
||||
}
|
||||
|
||||
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
|
||||
return datastore.StatusNotFound
|
||||
}
|
||||
|
||||
if _, ok := r.storeState.StoredProcedures[databaseId][collectionId][spId]; !ok {
|
||||
return datastore.StatusNotFound
|
||||
}
|
||||
|
||||
delete(r.storeState.StoredProcedures[databaseId][collectionId], spId)
|
||||
|
||||
return datastore.StatusOk
|
||||
}
|
||||
|
||||
func (r *JsonDataStore) CreateStoredProcedure(databaseId string, collectionId string, sp datastore.StoredProcedure) (datastore.StoredProcedure, datastore.DataStoreStatus) {
|
||||
r.storeState.Lock()
|
||||
defer r.storeState.Unlock()
|
||||
|
||||
var ok bool
|
||||
var database datastore.Database
|
||||
var collection datastore.Collection
|
||||
if sp.ID == "" {
|
||||
return datastore.StoredProcedure{}, datastore.BadRequest
|
||||
}
|
||||
|
||||
if database, ok = r.storeState.Databases[databaseId]; !ok {
|
||||
return datastore.StoredProcedure{}, datastore.StatusNotFound
|
||||
}
|
||||
|
||||
if collection, ok = r.storeState.Collections[databaseId][collectionId]; !ok {
|
||||
return datastore.StoredProcedure{}, datastore.StatusNotFound
|
||||
}
|
||||
|
||||
if _, ok = r.storeState.StoredProcedures[databaseId][collectionId][sp.ID]; ok {
|
||||
return datastore.StoredProcedure{}, datastore.Conflict
|
||||
}
|
||||
|
||||
sp.TimeStamp = time.Now().Unix()
|
||||
sp.ResourceID = resourceid.NewCombined(collection.ResourceID, resourceid.New(resourceid.ResourceTypeStoredProcedure))
|
||||
sp.ETag = fmt.Sprintf("\"%s\"", uuid.New())
|
||||
sp.Self = fmt.Sprintf("dbs/%s/colls/%s/sprocs/%s/", database.ResourceID, collection.ResourceID, sp.ResourceID)
|
||||
|
||||
r.storeState.StoredProcedures[databaseId][collectionId][sp.ID] = sp
|
||||
|
||||
return sp, datastore.StatusOk
|
||||
}
|
@ -1,83 +1,83 @@
|
||||
package repositories
|
||||
package jsondatastore
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
||||
"github.com/pikami/cosmium/internal/datastore"
|
||||
"github.com/pikami/cosmium/internal/resourceid"
|
||||
"golang.org/x/exp/maps"
|
||||
)
|
||||
|
||||
func (r *DataRepository) GetAllTriggers(databaseId string, collectionId string) ([]repositorymodels.Trigger, repositorymodels.RepositoryStatus) {
|
||||
func (r *JsonDataStore) GetAllTriggers(databaseId string, collectionId string) ([]datastore.Trigger, datastore.DataStoreStatus) {
|
||||
r.storeState.RLock()
|
||||
defer r.storeState.RUnlock()
|
||||
|
||||
return maps.Values(r.storeState.Triggers[databaseId][collectionId]), repositorymodels.StatusOk
|
||||
return maps.Values(r.storeState.Triggers[databaseId][collectionId]), datastore.StatusOk
|
||||
}
|
||||
|
||||
func (r *DataRepository) GetTrigger(databaseId string, collectionId string, triggerId string) (repositorymodels.Trigger, repositorymodels.RepositoryStatus) {
|
||||
func (r *JsonDataStore) GetTrigger(databaseId string, collectionId string, triggerId string) (datastore.Trigger, datastore.DataStoreStatus) {
|
||||
r.storeState.RLock()
|
||||
defer r.storeState.RUnlock()
|
||||
|
||||
if _, ok := r.storeState.Databases[databaseId]; !ok {
|
||||
return repositorymodels.Trigger{}, repositorymodels.StatusNotFound
|
||||
return datastore.Trigger{}, datastore.StatusNotFound
|
||||
}
|
||||
|
||||
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
|
||||
return repositorymodels.Trigger{}, repositorymodels.StatusNotFound
|
||||
return datastore.Trigger{}, datastore.StatusNotFound
|
||||
}
|
||||
|
||||
if trigger, ok := r.storeState.Triggers[databaseId][collectionId][triggerId]; ok {
|
||||
return trigger, repositorymodels.StatusOk
|
||||
return trigger, datastore.StatusOk
|
||||
}
|
||||
|
||||
return repositorymodels.Trigger{}, repositorymodels.StatusNotFound
|
||||
return datastore.Trigger{}, datastore.StatusNotFound
|
||||
}
|
||||
|
||||
func (r *DataRepository) DeleteTrigger(databaseId string, collectionId string, triggerId string) repositorymodels.RepositoryStatus {
|
||||
func (r *JsonDataStore) DeleteTrigger(databaseId string, collectionId string, triggerId string) datastore.DataStoreStatus {
|
||||
r.storeState.Lock()
|
||||
defer r.storeState.Unlock()
|
||||
|
||||
if _, ok := r.storeState.Databases[databaseId]; !ok {
|
||||
return repositorymodels.StatusNotFound
|
||||
return datastore.StatusNotFound
|
||||
}
|
||||
|
||||
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
|
||||
return repositorymodels.StatusNotFound
|
||||
return datastore.StatusNotFound
|
||||
}
|
||||
|
||||
if _, ok := r.storeState.Triggers[databaseId][collectionId][triggerId]; !ok {
|
||||
return repositorymodels.StatusNotFound
|
||||
return datastore.StatusNotFound
|
||||
}
|
||||
|
||||
delete(r.storeState.Triggers[databaseId][collectionId], triggerId)
|
||||
|
||||
return repositorymodels.StatusOk
|
||||
return datastore.StatusOk
|
||||
}
|
||||
|
||||
func (r *DataRepository) CreateTrigger(databaseId string, collectionId string, trigger repositorymodels.Trigger) (repositorymodels.Trigger, repositorymodels.RepositoryStatus) {
|
||||
func (r *JsonDataStore) CreateTrigger(databaseId string, collectionId string, trigger datastore.Trigger) (datastore.Trigger, datastore.DataStoreStatus) {
|
||||
r.storeState.Lock()
|
||||
defer r.storeState.Unlock()
|
||||
|
||||
var ok bool
|
||||
var database repositorymodels.Database
|
||||
var collection repositorymodels.Collection
|
||||
var database datastore.Database
|
||||
var collection datastore.Collection
|
||||
if trigger.ID == "" {
|
||||
return repositorymodels.Trigger{}, repositorymodels.BadRequest
|
||||
return datastore.Trigger{}, datastore.BadRequest
|
||||
}
|
||||
|
||||
if database, ok = r.storeState.Databases[databaseId]; !ok {
|
||||
return repositorymodels.Trigger{}, repositorymodels.StatusNotFound
|
||||
return datastore.Trigger{}, datastore.StatusNotFound
|
||||
}
|
||||
|
||||
if collection, ok = r.storeState.Collections[databaseId][collectionId]; !ok {
|
||||
return repositorymodels.Trigger{}, repositorymodels.StatusNotFound
|
||||
return datastore.Trigger{}, datastore.StatusNotFound
|
||||
}
|
||||
|
||||
if _, ok = r.storeState.Triggers[databaseId][collectionId][trigger.ID]; ok {
|
||||
return repositorymodels.Trigger{}, repositorymodels.Conflict
|
||||
return datastore.Trigger{}, datastore.Conflict
|
||||
}
|
||||
|
||||
trigger.TimeStamp = time.Now().Unix()
|
||||
@ -87,5 +87,5 @@ func (r *DataRepository) CreateTrigger(databaseId string, collectionId string, t
|
||||
|
||||
r.storeState.Triggers[databaseId][collectionId][trigger.ID] = trigger
|
||||
|
||||
return trigger, repositorymodels.StatusOk
|
||||
return trigger, datastore.StatusOk
|
||||
}
|
91
internal/datastore/json_datastore/user_defined_functions.go
Normal file
91
internal/datastore/json_datastore/user_defined_functions.go
Normal file
@ -0,0 +1,91 @@
|
||||
package jsondatastore
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/pikami/cosmium/internal/datastore"
|
||||
"github.com/pikami/cosmium/internal/resourceid"
|
||||
"golang.org/x/exp/maps"
|
||||
)
|
||||
|
||||
func (r *JsonDataStore) GetAllUserDefinedFunctions(databaseId string, collectionId string) ([]datastore.UserDefinedFunction, datastore.DataStoreStatus) {
|
||||
r.storeState.RLock()
|
||||
defer r.storeState.RUnlock()
|
||||
|
||||
return maps.Values(r.storeState.UserDefinedFunctions[databaseId][collectionId]), datastore.StatusOk
|
||||
}
|
||||
|
||||
func (r *JsonDataStore) GetUserDefinedFunction(databaseId string, collectionId string, udfId string) (datastore.UserDefinedFunction, datastore.DataStoreStatus) {
|
||||
r.storeState.RLock()
|
||||
defer r.storeState.RUnlock()
|
||||
|
||||
if _, ok := r.storeState.Databases[databaseId]; !ok {
|
||||
return datastore.UserDefinedFunction{}, datastore.StatusNotFound
|
||||
}
|
||||
|
||||
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
|
||||
return datastore.UserDefinedFunction{}, datastore.StatusNotFound
|
||||
}
|
||||
|
||||
if udf, ok := r.storeState.UserDefinedFunctions[databaseId][collectionId][udfId]; ok {
|
||||
return udf, datastore.StatusOk
|
||||
}
|
||||
|
||||
return datastore.UserDefinedFunction{}, datastore.StatusNotFound
|
||||
}
|
||||
|
||||
func (r *JsonDataStore) DeleteUserDefinedFunction(databaseId string, collectionId string, udfId string) datastore.DataStoreStatus {
|
||||
r.storeState.Lock()
|
||||
defer r.storeState.Unlock()
|
||||
|
||||
if _, ok := r.storeState.Databases[databaseId]; !ok {
|
||||
return datastore.StatusNotFound
|
||||
}
|
||||
|
||||
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
|
||||
return datastore.StatusNotFound
|
||||
}
|
||||
|
||||
if _, ok := r.storeState.UserDefinedFunctions[databaseId][collectionId][udfId]; !ok {
|
||||
return datastore.StatusNotFound
|
||||
}
|
||||
|
||||
delete(r.storeState.UserDefinedFunctions[databaseId][collectionId], udfId)
|
||||
|
||||
return datastore.StatusOk
|
||||
}
|
||||
|
||||
func (r *JsonDataStore) CreateUserDefinedFunction(databaseId string, collectionId string, udf datastore.UserDefinedFunction) (datastore.UserDefinedFunction, datastore.DataStoreStatus) {
|
||||
r.storeState.Lock()
|
||||
defer r.storeState.Unlock()
|
||||
|
||||
var ok bool
|
||||
var database datastore.Database
|
||||
var collection datastore.Collection
|
||||
if udf.ID == "" {
|
||||
return datastore.UserDefinedFunction{}, datastore.BadRequest
|
||||
}
|
||||
|
||||
if database, ok = r.storeState.Databases[databaseId]; !ok {
|
||||
return datastore.UserDefinedFunction{}, datastore.StatusNotFound
|
||||
}
|
||||
|
||||
if collection, ok = r.storeState.Collections[databaseId][collectionId]; !ok {
|
||||
return datastore.UserDefinedFunction{}, datastore.StatusNotFound
|
||||
}
|
||||
|
||||
if _, ok := r.storeState.UserDefinedFunctions[databaseId][collectionId][udf.ID]; ok {
|
||||
return datastore.UserDefinedFunction{}, datastore.Conflict
|
||||
}
|
||||
|
||||
udf.TimeStamp = time.Now().Unix()
|
||||
udf.ResourceID = resourceid.NewCombined(collection.ResourceID, resourceid.New(resourceid.ResourceTypeUserDefinedFunction))
|
||||
udf.ETag = fmt.Sprintf("\"%s\"", uuid.New())
|
||||
udf.Self = fmt.Sprintf("dbs/%s/colls/%s/udfs/%s/", database.ResourceID, collection.ResourceID, udf.ResourceID)
|
||||
|
||||
r.storeState.UserDefinedFunctions[databaseId][collectionId][udf.ID] = udf
|
||||
|
||||
return udf, datastore.StatusOk
|
||||
}
|
@ -1,6 +1,4 @@
|
||||
package repositorymodels
|
||||
|
||||
import "sync"
|
||||
package datastore
|
||||
|
||||
type Database struct {
|
||||
ID string `json:"id"`
|
||||
@ -10,13 +8,15 @@ type Database struct {
|
||||
Self string `json:"_self"`
|
||||
}
|
||||
|
||||
type RepositoryStatus int
|
||||
type DataStoreStatus int
|
||||
|
||||
const (
|
||||
StatusOk = 1
|
||||
StatusNotFound = 2
|
||||
Conflict = 3
|
||||
BadRequest = 4
|
||||
IterEOF = 5
|
||||
Unknown = 6
|
||||
)
|
||||
|
||||
type TriggerOperation string
|
||||
@ -117,25 +117,3 @@ type PartitionKeyRange struct {
|
||||
TimeStamp int64 `json:"_ts"`
|
||||
Lsn int `json:"lsn"`
|
||||
}
|
||||
|
||||
type State struct {
|
||||
sync.RWMutex
|
||||
|
||||
// Map databaseId -> Database
|
||||
Databases map[string]Database `json:"databases"`
|
||||
|
||||
// Map databaseId -> collectionId -> Collection
|
||||
Collections map[string]map[string]Collection `json:"collections"`
|
||||
|
||||
// Map databaseId -> collectionId -> documentId -> Documents
|
||||
Documents map[string]map[string]map[string]Document `json:"documents"`
|
||||
|
||||
// Map databaseId -> collectionId -> triggerId -> Trigger
|
||||
Triggers map[string]map[string]map[string]Trigger `json:"triggers"`
|
||||
|
||||
// Map databaseId -> collectionId -> spId -> StoredProcedure
|
||||
StoredProcedures map[string]map[string]map[string]StoredProcedure `json:"sprocs"`
|
||||
|
||||
// Map databaseId -> collectionId -> udfId -> UserDefinedFunction
|
||||
UserDefinedFunctions map[string]map[string]map[string]UserDefinedFunction `json:"udfs"`
|
||||
}
|
@ -1,130 +0,0 @@
|
||||
package repositories
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
||||
"github.com/pikami/cosmium/internal/resourceid"
|
||||
"github.com/pikami/cosmium/parsers"
|
||||
"github.com/pikami/cosmium/parsers/nosql"
|
||||
memoryexecutor "github.com/pikami/cosmium/query_executors/memory_executor"
|
||||
"golang.org/x/exp/maps"
|
||||
)
|
||||
|
||||
func (r *DataRepository) GetAllDocuments(databaseId string, collectionId string) ([]repositorymodels.Document, repositorymodels.RepositoryStatus) {
|
||||
r.storeState.RLock()
|
||||
defer r.storeState.RUnlock()
|
||||
|
||||
if _, ok := r.storeState.Databases[databaseId]; !ok {
|
||||
return make([]repositorymodels.Document, 0), repositorymodels.StatusNotFound
|
||||
}
|
||||
|
||||
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
|
||||
return make([]repositorymodels.Document, 0), repositorymodels.StatusNotFound
|
||||
}
|
||||
|
||||
return maps.Values(r.storeState.Documents[databaseId][collectionId]), repositorymodels.StatusOk
|
||||
}
|
||||
|
||||
func (r *DataRepository) GetDocument(databaseId string, collectionId string, documentId string) (repositorymodels.Document, repositorymodels.RepositoryStatus) {
|
||||
r.storeState.RLock()
|
||||
defer r.storeState.RUnlock()
|
||||
|
||||
if _, ok := r.storeState.Databases[databaseId]; !ok {
|
||||
return repositorymodels.Document{}, repositorymodels.StatusNotFound
|
||||
}
|
||||
|
||||
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
|
||||
return repositorymodels.Document{}, repositorymodels.StatusNotFound
|
||||
}
|
||||
|
||||
if _, ok := r.storeState.Documents[databaseId][collectionId][documentId]; !ok {
|
||||
return repositorymodels.Document{}, repositorymodels.StatusNotFound
|
||||
}
|
||||
|
||||
return r.storeState.Documents[databaseId][collectionId][documentId], repositorymodels.StatusOk
|
||||
}
|
||||
|
||||
func (r *DataRepository) DeleteDocument(databaseId string, collectionId string, documentId string) repositorymodels.RepositoryStatus {
|
||||
r.storeState.Lock()
|
||||
defer r.storeState.Unlock()
|
||||
|
||||
if _, ok := r.storeState.Databases[databaseId]; !ok {
|
||||
return repositorymodels.StatusNotFound
|
||||
}
|
||||
|
||||
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
|
||||
return repositorymodels.StatusNotFound
|
||||
}
|
||||
|
||||
if _, ok := r.storeState.Documents[databaseId][collectionId][documentId]; !ok {
|
||||
return repositorymodels.StatusNotFound
|
||||
}
|
||||
|
||||
delete(r.storeState.Documents[databaseId][collectionId], documentId)
|
||||
|
||||
return repositorymodels.StatusOk
|
||||
}
|
||||
|
||||
func (r *DataRepository) CreateDocument(databaseId string, collectionId string, document map[string]interface{}) (repositorymodels.Document, repositorymodels.RepositoryStatus) {
|
||||
r.storeState.Lock()
|
||||
defer r.storeState.Unlock()
|
||||
|
||||
var ok bool
|
||||
var documentId string
|
||||
var database repositorymodels.Database
|
||||
var collection repositorymodels.Collection
|
||||
if documentId, ok = document["id"].(string); !ok || documentId == "" {
|
||||
documentId = fmt.Sprint(uuid.New())
|
||||
document["id"] = documentId
|
||||
}
|
||||
|
||||
if database, ok = r.storeState.Databases[databaseId]; !ok {
|
||||
return repositorymodels.Document{}, repositorymodels.StatusNotFound
|
||||
}
|
||||
|
||||
if collection, ok = r.storeState.Collections[databaseId][collectionId]; !ok {
|
||||
return repositorymodels.Document{}, repositorymodels.StatusNotFound
|
||||
}
|
||||
|
||||
if _, ok := r.storeState.Documents[databaseId][collectionId][documentId]; ok {
|
||||
return repositorymodels.Document{}, repositorymodels.Conflict
|
||||
}
|
||||
|
||||
document["_ts"] = time.Now().Unix()
|
||||
document["_rid"] = resourceid.NewCombined(collection.ResourceID, resourceid.New(resourceid.ResourceTypeDocument))
|
||||
document["_etag"] = fmt.Sprintf("\"%s\"", uuid.New())
|
||||
document["_self"] = fmt.Sprintf("dbs/%s/colls/%s/docs/%s/", database.ResourceID, collection.ResourceID, document["_rid"])
|
||||
|
||||
r.storeState.Documents[databaseId][collectionId][documentId] = document
|
||||
|
||||
return document, repositorymodels.StatusOk
|
||||
}
|
||||
|
||||
func (r *DataRepository) ExecuteQueryDocuments(databaseId string, collectionId string, query string, queryParameters map[string]interface{}) ([]memoryexecutor.RowType, repositorymodels.RepositoryStatus) {
|
||||
parsedQuery, err := nosql.Parse("", []byte(query))
|
||||
if err != nil {
|
||||
log.Printf("Failed to parse query: %s\nerr: %v", query, err)
|
||||
return nil, repositorymodels.BadRequest
|
||||
}
|
||||
|
||||
collectionDocuments, status := r.GetAllDocuments(databaseId, collectionId)
|
||||
if status != repositorymodels.StatusOk {
|
||||
return nil, status
|
||||
}
|
||||
|
||||
covDocs := make([]memoryexecutor.RowType, 0)
|
||||
for _, doc := range collectionDocuments {
|
||||
covDocs = append(covDocs, map[string]interface{}(doc))
|
||||
}
|
||||
|
||||
if typedQuery, ok := parsedQuery.(parsers.SelectStmt); ok {
|
||||
typedQuery.Parameters = queryParameters
|
||||
return memoryexecutor.ExecuteQuery(typedQuery, covDocs), repositorymodels.StatusOk
|
||||
}
|
||||
|
||||
return nil, repositorymodels.BadRequest
|
||||
}
|
@ -1,34 +0,0 @@
|
||||
package repositories
|
||||
|
||||
import repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
||||
|
||||
type DataRepository struct {
|
||||
storeState repositorymodels.State
|
||||
|
||||
initialDataFilePath string
|
||||
persistDataFilePath string
|
||||
}
|
||||
|
||||
type RepositoryOptions struct {
|
||||
InitialDataFilePath string
|
||||
PersistDataFilePath string
|
||||
}
|
||||
|
||||
func NewDataRepository(options RepositoryOptions) *DataRepository {
|
||||
repository := &DataRepository{
|
||||
storeState: repositorymodels.State{
|
||||
Databases: make(map[string]repositorymodels.Database),
|
||||
Collections: make(map[string]map[string]repositorymodels.Collection),
|
||||
Documents: make(map[string]map[string]map[string]repositorymodels.Document),
|
||||
Triggers: make(map[string]map[string]map[string]repositorymodels.Trigger),
|
||||
StoredProcedures: make(map[string]map[string]map[string]repositorymodels.StoredProcedure),
|
||||
UserDefinedFunctions: make(map[string]map[string]map[string]repositorymodels.UserDefinedFunction),
|
||||
},
|
||||
initialDataFilePath: options.InitialDataFilePath,
|
||||
persistDataFilePath: options.PersistDataFilePath,
|
||||
}
|
||||
|
||||
repository.InitializeRepository()
|
||||
|
||||
return repository
|
||||
}
|
@ -1,91 +0,0 @@
|
||||
package repositories
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
||||
"github.com/pikami/cosmium/internal/resourceid"
|
||||
"golang.org/x/exp/maps"
|
||||
)
|
||||
|
||||
func (r *DataRepository) GetAllStoredProcedures(databaseId string, collectionId string) ([]repositorymodels.StoredProcedure, repositorymodels.RepositoryStatus) {
|
||||
r.storeState.RLock()
|
||||
defer r.storeState.RUnlock()
|
||||
|
||||
return maps.Values(r.storeState.StoredProcedures[databaseId][collectionId]), repositorymodels.StatusOk
|
||||
}
|
||||
|
||||
func (r *DataRepository) GetStoredProcedure(databaseId string, collectionId string, spId string) (repositorymodels.StoredProcedure, repositorymodels.RepositoryStatus) {
|
||||
r.storeState.RLock()
|
||||
defer r.storeState.RUnlock()
|
||||
|
||||
if _, ok := r.storeState.Databases[databaseId]; !ok {
|
||||
return repositorymodels.StoredProcedure{}, repositorymodels.StatusNotFound
|
||||
}
|
||||
|
||||
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
|
||||
return repositorymodels.StoredProcedure{}, repositorymodels.StatusNotFound
|
||||
}
|
||||
|
||||
if sp, ok := r.storeState.StoredProcedures[databaseId][collectionId][spId]; ok {
|
||||
return sp, repositorymodels.StatusOk
|
||||
}
|
||||
|
||||
return repositorymodels.StoredProcedure{}, repositorymodels.StatusNotFound
|
||||
}
|
||||
|
||||
func (r *DataRepository) DeleteStoredProcedure(databaseId string, collectionId string, spId string) repositorymodels.RepositoryStatus {
|
||||
r.storeState.Lock()
|
||||
defer r.storeState.Unlock()
|
||||
|
||||
if _, ok := r.storeState.Databases[databaseId]; !ok {
|
||||
return repositorymodels.StatusNotFound
|
||||
}
|
||||
|
||||
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
|
||||
return repositorymodels.StatusNotFound
|
||||
}
|
||||
|
||||
if _, ok := r.storeState.StoredProcedures[databaseId][collectionId][spId]; !ok {
|
||||
return repositorymodels.StatusNotFound
|
||||
}
|
||||
|
||||
delete(r.storeState.StoredProcedures[databaseId][collectionId], spId)
|
||||
|
||||
return repositorymodels.StatusOk
|
||||
}
|
||||
|
||||
func (r *DataRepository) CreateStoredProcedure(databaseId string, collectionId string, sp repositorymodels.StoredProcedure) (repositorymodels.StoredProcedure, repositorymodels.RepositoryStatus) {
|
||||
r.storeState.Lock()
|
||||
defer r.storeState.Unlock()
|
||||
|
||||
var ok bool
|
||||
var database repositorymodels.Database
|
||||
var collection repositorymodels.Collection
|
||||
if sp.ID == "" {
|
||||
return repositorymodels.StoredProcedure{}, repositorymodels.BadRequest
|
||||
}
|
||||
|
||||
if database, ok = r.storeState.Databases[databaseId]; !ok {
|
||||
return repositorymodels.StoredProcedure{}, repositorymodels.StatusNotFound
|
||||
}
|
||||
|
||||
if collection, ok = r.storeState.Collections[databaseId][collectionId]; !ok {
|
||||
return repositorymodels.StoredProcedure{}, repositorymodels.StatusNotFound
|
||||
}
|
||||
|
||||
if _, ok = r.storeState.StoredProcedures[databaseId][collectionId][sp.ID]; ok {
|
||||
return repositorymodels.StoredProcedure{}, repositorymodels.Conflict
|
||||
}
|
||||
|
||||
sp.TimeStamp = time.Now().Unix()
|
||||
sp.ResourceID = resourceid.NewCombined(collection.ResourceID, resourceid.New(resourceid.ResourceTypeStoredProcedure))
|
||||
sp.ETag = fmt.Sprintf("\"%s\"", uuid.New())
|
||||
sp.Self = fmt.Sprintf("dbs/%s/colls/%s/sprocs/%s/", database.ResourceID, collection.ResourceID, sp.ResourceID)
|
||||
|
||||
r.storeState.StoredProcedures[databaseId][collectionId][sp.ID] = sp
|
||||
|
||||
return sp, repositorymodels.StatusOk
|
||||
}
|
@ -1,91 +0,0 @@
|
||||
package repositories
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
||||
"github.com/pikami/cosmium/internal/resourceid"
|
||||
"golang.org/x/exp/maps"
|
||||
)
|
||||
|
||||
func (r *DataRepository) GetAllUserDefinedFunctions(databaseId string, collectionId string) ([]repositorymodels.UserDefinedFunction, repositorymodels.RepositoryStatus) {
|
||||
r.storeState.RLock()
|
||||
defer r.storeState.RUnlock()
|
||||
|
||||
return maps.Values(r.storeState.UserDefinedFunctions[databaseId][collectionId]), repositorymodels.StatusOk
|
||||
}
|
||||
|
||||
func (r *DataRepository) GetUserDefinedFunction(databaseId string, collectionId string, udfId string) (repositorymodels.UserDefinedFunction, repositorymodels.RepositoryStatus) {
|
||||
r.storeState.RLock()
|
||||
defer r.storeState.RUnlock()
|
||||
|
||||
if _, ok := r.storeState.Databases[databaseId]; !ok {
|
||||
return repositorymodels.UserDefinedFunction{}, repositorymodels.StatusNotFound
|
||||
}
|
||||
|
||||
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
|
||||
return repositorymodels.UserDefinedFunction{}, repositorymodels.StatusNotFound
|
||||
}
|
||||
|
||||
if udf, ok := r.storeState.UserDefinedFunctions[databaseId][collectionId][udfId]; ok {
|
||||
return udf, repositorymodels.StatusOk
|
||||
}
|
||||
|
||||
return repositorymodels.UserDefinedFunction{}, repositorymodels.StatusNotFound
|
||||
}
|
||||
|
||||
func (r *DataRepository) DeleteUserDefinedFunction(databaseId string, collectionId string, udfId string) repositorymodels.RepositoryStatus {
|
||||
r.storeState.Lock()
|
||||
defer r.storeState.Unlock()
|
||||
|
||||
if _, ok := r.storeState.Databases[databaseId]; !ok {
|
||||
return repositorymodels.StatusNotFound
|
||||
}
|
||||
|
||||
if _, ok := r.storeState.Collections[databaseId][collectionId]; !ok {
|
||||
return repositorymodels.StatusNotFound
|
||||
}
|
||||
|
||||
if _, ok := r.storeState.UserDefinedFunctions[databaseId][collectionId][udfId]; !ok {
|
||||
return repositorymodels.StatusNotFound
|
||||
}
|
||||
|
||||
delete(r.storeState.UserDefinedFunctions[databaseId][collectionId], udfId)
|
||||
|
||||
return repositorymodels.StatusOk
|
||||
}
|
||||
|
||||
func (r *DataRepository) CreateUserDefinedFunction(databaseId string, collectionId string, udf repositorymodels.UserDefinedFunction) (repositorymodels.UserDefinedFunction, repositorymodels.RepositoryStatus) {
|
||||
r.storeState.Lock()
|
||||
defer r.storeState.Unlock()
|
||||
|
||||
var ok bool
|
||||
var database repositorymodels.Database
|
||||
var collection repositorymodels.Collection
|
||||
if udf.ID == "" {
|
||||
return repositorymodels.UserDefinedFunction{}, repositorymodels.BadRequest
|
||||
}
|
||||
|
||||
if database, ok = r.storeState.Databases[databaseId]; !ok {
|
||||
return repositorymodels.UserDefinedFunction{}, repositorymodels.StatusNotFound
|
||||
}
|
||||
|
||||
if collection, ok = r.storeState.Collections[databaseId][collectionId]; !ok {
|
||||
return repositorymodels.UserDefinedFunction{}, repositorymodels.StatusNotFound
|
||||
}
|
||||
|
||||
if _, ok := r.storeState.UserDefinedFunctions[databaseId][collectionId][udf.ID]; ok {
|
||||
return repositorymodels.UserDefinedFunction{}, repositorymodels.Conflict
|
||||
}
|
||||
|
||||
udf.TimeStamp = time.Now().Unix()
|
||||
udf.ResourceID = resourceid.NewCombined(collection.ResourceID, resourceid.New(resourceid.ResourceTypeUserDefinedFunction))
|
||||
udf.ETag = fmt.Sprintf("\"%s\"", uuid.New())
|
||||
udf.Self = fmt.Sprintf("dbs/%s/colls/%s/udfs/%s/", database.ResourceID, collection.ResourceID, udf.ResourceID)
|
||||
|
||||
r.storeState.UserDefinedFunctions[databaseId][collectionId][udf.ID] = udf
|
||||
|
||||
return udf, repositorymodels.StatusOk
|
||||
}
|
@ -1,21 +1,19 @@
|
||||
package structhidrators
|
||||
|
||||
import (
|
||||
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
||||
)
|
||||
import "github.com/pikami/cosmium/internal/datastore"
|
||||
|
||||
var defaultCollection repositorymodels.Collection = repositorymodels.Collection{
|
||||
IndexingPolicy: repositorymodels.CollectionIndexingPolicy{
|
||||
var defaultCollection datastore.Collection = datastore.Collection{
|
||||
IndexingPolicy: datastore.CollectionIndexingPolicy{
|
||||
IndexingMode: "consistent",
|
||||
Automatic: true,
|
||||
IncludedPaths: []repositorymodels.CollectionIndexingPolicyPath{
|
||||
IncludedPaths: []datastore.CollectionIndexingPolicyPath{
|
||||
{Path: "/*"},
|
||||
},
|
||||
ExcludedPaths: []repositorymodels.CollectionIndexingPolicyPath{
|
||||
ExcludedPaths: []datastore.CollectionIndexingPolicyPath{
|
||||
{Path: "/\"_etag\"/?"},
|
||||
},
|
||||
},
|
||||
PartitionKey: repositorymodels.CollectionPartitionKey{
|
||||
PartitionKey: datastore.CollectionPartitionKey{
|
||||
Paths: []string{"/_partitionKey"},
|
||||
Kind: "Hash",
|
||||
Version: 2,
|
||||
|
@ -3,11 +3,11 @@ package structhidrators
|
||||
import (
|
||||
"reflect"
|
||||
|
||||
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
||||
"github.com/pikami/cosmium/internal/datastore"
|
||||
)
|
||||
|
||||
func Hidrate(input interface{}) interface{} {
|
||||
if reflect.TypeOf(input) == reflect.TypeOf(repositorymodels.Collection{}) {
|
||||
if reflect.TypeOf(input) == reflect.TypeOf(datastore.Collection{}) {
|
||||
return hidrate(input, defaultCollection)
|
||||
}
|
||||
return input
|
||||
|
@ -34,6 +34,8 @@ const (
|
||||
SelectItemTypeConstant
|
||||
SelectItemTypeFunctionCall
|
||||
SelectItemTypeSubQuery
|
||||
SelectItemTypeExpression
|
||||
SelectItemTypeBinaryExpression
|
||||
)
|
||||
|
||||
type SelectItem struct {
|
||||
@ -64,6 +66,12 @@ type ComparisonExpression struct {
|
||||
Operation string
|
||||
}
|
||||
|
||||
type BinaryExpression struct {
|
||||
Left interface{}
|
||||
Right interface{}
|
||||
Operation string
|
||||
}
|
||||
|
||||
type ConstantType int
|
||||
|
||||
const (
|
||||
@ -134,6 +142,8 @@ const (
|
||||
FunctionCallSetIntersect FunctionCallType = "SetIntersect"
|
||||
FunctionCallSetUnion FunctionCallType = "SetUnion"
|
||||
|
||||
FunctionCallIif FunctionCallType = "Iif"
|
||||
|
||||
FunctionCallMathAbs FunctionCallType = "MathAbs"
|
||||
FunctionCallMathAcos FunctionCallType = "MathAcos"
|
||||
FunctionCallMathAsin FunctionCallType = "MathAsin"
|
||||
|
366
parsers/nosql/arithmetics_test.go
Normal file
366
parsers/nosql/arithmetics_test.go
Normal file
@ -0,0 +1,366 @@
|
||||
package nosql_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/pikami/cosmium/parsers"
|
||||
testutils "github.com/pikami/cosmium/test_utils"
|
||||
)
|
||||
|
||||
func Test_Parse_Arithmetics(t *testing.T) {
|
||||
t.Run("Should parse multiplication before addition", func(t *testing.T) {
|
||||
testQueryParse(
|
||||
t,
|
||||
`SELECT c.a + c.b * c.c FROM c`,
|
||||
parsers.SelectStmt{
|
||||
SelectItems: []parsers.SelectItem{
|
||||
{
|
||||
Type: parsers.SelectItemTypeBinaryExpression,
|
||||
Value: parsers.BinaryExpression{
|
||||
Operation: "+",
|
||||
Left: testutils.SelectItem_Path("c", "a"),
|
||||
Right: parsers.SelectItem{
|
||||
Type: parsers.SelectItemTypeBinaryExpression,
|
||||
Value: parsers.BinaryExpression{
|
||||
Operation: "*",
|
||||
Left: testutils.SelectItem_Path("c", "b"),
|
||||
Right: testutils.SelectItem_Path("c", "c"),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||
},
|
||||
)
|
||||
})
|
||||
|
||||
t.Run("Should parse division before subtraction", func(t *testing.T) {
|
||||
testQueryParse(
|
||||
t,
|
||||
`SELECT c.x - c.y / c.z FROM c`,
|
||||
parsers.SelectStmt{
|
||||
SelectItems: []parsers.SelectItem{
|
||||
{
|
||||
Type: parsers.SelectItemTypeBinaryExpression,
|
||||
Value: parsers.BinaryExpression{
|
||||
Operation: "-",
|
||||
Left: testutils.SelectItem_Path("c", "x"),
|
||||
Right: parsers.SelectItem{
|
||||
Type: parsers.SelectItemTypeBinaryExpression,
|
||||
Value: parsers.BinaryExpression{
|
||||
Operation: "/",
|
||||
Left: testutils.SelectItem_Path("c", "y"),
|
||||
Right: testutils.SelectItem_Path("c", "z"),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||
},
|
||||
)
|
||||
})
|
||||
|
||||
t.Run("Should handle complex mixed operations", func(t *testing.T) {
|
||||
testQueryParse(
|
||||
t,
|
||||
`SELECT c.a + c.b * c.c - c.d / c.e FROM c`,
|
||||
parsers.SelectStmt{
|
||||
SelectItems: []parsers.SelectItem{
|
||||
{
|
||||
Type: parsers.SelectItemTypeBinaryExpression,
|
||||
Value: parsers.BinaryExpression{
|
||||
Operation: "-",
|
||||
Left: parsers.SelectItem{
|
||||
Type: parsers.SelectItemTypeBinaryExpression,
|
||||
Value: parsers.BinaryExpression{
|
||||
Operation: "+",
|
||||
Left: testutils.SelectItem_Path("c", "a"),
|
||||
Right: parsers.SelectItem{
|
||||
Type: parsers.SelectItemTypeBinaryExpression,
|
||||
Value: parsers.BinaryExpression{
|
||||
Operation: "*",
|
||||
Left: testutils.SelectItem_Path("c", "b"),
|
||||
Right: testutils.SelectItem_Path("c", "c"),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Right: parsers.SelectItem{
|
||||
Type: parsers.SelectItemTypeBinaryExpression,
|
||||
Value: parsers.BinaryExpression{
|
||||
Operation: "/",
|
||||
Left: testutils.SelectItem_Path("c", "d"),
|
||||
Right: testutils.SelectItem_Path("c", "e"),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||
},
|
||||
)
|
||||
})
|
||||
|
||||
t.Run("Should respect parentheses overriding precedence", func(t *testing.T) {
|
||||
testQueryParse(
|
||||
t,
|
||||
`SELECT (c.a + c.b) * c.c FROM c`,
|
||||
parsers.SelectStmt{
|
||||
SelectItems: []parsers.SelectItem{
|
||||
{
|
||||
Type: parsers.SelectItemTypeBinaryExpression,
|
||||
Value: parsers.BinaryExpression{
|
||||
Operation: "*",
|
||||
Left: parsers.SelectItem{
|
||||
Type: parsers.SelectItemTypeBinaryExpression,
|
||||
Value: parsers.BinaryExpression{
|
||||
Operation: "+",
|
||||
Left: testutils.SelectItem_Path("c", "a"),
|
||||
Right: testutils.SelectItem_Path("c", "b"),
|
||||
},
|
||||
},
|
||||
Right: testutils.SelectItem_Path("c", "c"),
|
||||
},
|
||||
},
|
||||
},
|
||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||
},
|
||||
)
|
||||
})
|
||||
|
||||
t.Run("Should handle nested parentheses", func(t *testing.T) {
|
||||
testQueryParse(
|
||||
t,
|
||||
`SELECT ((c.a + c.b) * c.c) - c.d FROM c`,
|
||||
parsers.SelectStmt{
|
||||
SelectItems: []parsers.SelectItem{
|
||||
{
|
||||
Type: parsers.SelectItemTypeBinaryExpression,
|
||||
Value: parsers.BinaryExpression{
|
||||
Operation: "-",
|
||||
Left: parsers.SelectItem{
|
||||
Type: parsers.SelectItemTypeBinaryExpression,
|
||||
Value: parsers.BinaryExpression{
|
||||
Operation: "*",
|
||||
Left: parsers.SelectItem{
|
||||
Type: parsers.SelectItemTypeBinaryExpression,
|
||||
Value: parsers.BinaryExpression{
|
||||
Operation: "+",
|
||||
Left: testutils.SelectItem_Path("c", "a"),
|
||||
Right: testutils.SelectItem_Path("c", "b"),
|
||||
},
|
||||
},
|
||||
Right: testutils.SelectItem_Path("c", "c"),
|
||||
},
|
||||
},
|
||||
Right: testutils.SelectItem_Path("c", "d"),
|
||||
},
|
||||
},
|
||||
},
|
||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||
},
|
||||
)
|
||||
})
|
||||
|
||||
t.Run("Should be left associative for same precedence operators", func(t *testing.T) {
|
||||
testQueryParse(
|
||||
t,
|
||||
`SELECT c.a - c.b - c.c FROM c`,
|
||||
parsers.SelectStmt{
|
||||
SelectItems: []parsers.SelectItem{
|
||||
{
|
||||
Type: parsers.SelectItemTypeBinaryExpression,
|
||||
Value: parsers.BinaryExpression{
|
||||
Operation: "-",
|
||||
Left: parsers.SelectItem{
|
||||
Type: parsers.SelectItemTypeBinaryExpression,
|
||||
Value: parsers.BinaryExpression{
|
||||
Operation: "-",
|
||||
Left: testutils.SelectItem_Path("c", "a"),
|
||||
Right: testutils.SelectItem_Path("c", "b"),
|
||||
},
|
||||
},
|
||||
Right: testutils.SelectItem_Path("c", "c"),
|
||||
},
|
||||
},
|
||||
},
|
||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||
},
|
||||
)
|
||||
})
|
||||
|
||||
t.Run("Should be left associative with multiplication and division", func(t *testing.T) {
|
||||
testQueryParse(
|
||||
t,
|
||||
`SELECT c.a * c.b / c.c FROM c`,
|
||||
parsers.SelectStmt{
|
||||
SelectItems: []parsers.SelectItem{
|
||||
{
|
||||
Type: parsers.SelectItemTypeBinaryExpression,
|
||||
Value: parsers.BinaryExpression{
|
||||
Operation: "/",
|
||||
Left: parsers.SelectItem{
|
||||
Type: parsers.SelectItemTypeBinaryExpression,
|
||||
Value: parsers.BinaryExpression{
|
||||
Operation: "*",
|
||||
Left: testutils.SelectItem_Path("c", "a"),
|
||||
Right: testutils.SelectItem_Path("c", "b"),
|
||||
},
|
||||
},
|
||||
Right: testutils.SelectItem_Path("c", "c"),
|
||||
},
|
||||
},
|
||||
},
|
||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||
},
|
||||
)
|
||||
})
|
||||
|
||||
t.Run("Should handle math with constants", func(t *testing.T) {
|
||||
testQueryParse(
|
||||
t,
|
||||
`SELECT 10 + 20 * 5 FROM c`,
|
||||
parsers.SelectStmt{
|
||||
SelectItems: []parsers.SelectItem{
|
||||
{
|
||||
Type: parsers.SelectItemTypeBinaryExpression,
|
||||
Value: parsers.BinaryExpression{
|
||||
Operation: "+",
|
||||
Left: testutils.SelectItem_Constant_Int(10),
|
||||
Right: parsers.SelectItem{
|
||||
Type: parsers.SelectItemTypeBinaryExpression,
|
||||
Value: parsers.BinaryExpression{
|
||||
Operation: "*",
|
||||
Left: testutils.SelectItem_Constant_Int(20),
|
||||
Right: testutils.SelectItem_Constant_Int(5),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||
},
|
||||
)
|
||||
})
|
||||
|
||||
t.Run("Should handle math with floating point numbers", func(t *testing.T) {
|
||||
testQueryParse(
|
||||
t,
|
||||
`SELECT c.price * 1.08 FROM c`,
|
||||
parsers.SelectStmt{
|
||||
SelectItems: []parsers.SelectItem{
|
||||
{
|
||||
Type: parsers.SelectItemTypeBinaryExpression,
|
||||
Value: parsers.BinaryExpression{
|
||||
Operation: "*",
|
||||
Left: testutils.SelectItem_Path("c", "price"),
|
||||
Right: testutils.SelectItem_Constant_Float(1.08),
|
||||
},
|
||||
},
|
||||
},
|
||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||
},
|
||||
)
|
||||
})
|
||||
|
||||
t.Run("Should handle parentheses around single value", func(t *testing.T) {
|
||||
testQueryParse(
|
||||
t,
|
||||
`SELECT (c.value) FROM c`,
|
||||
parsers.SelectStmt{
|
||||
SelectItems: []parsers.SelectItem{
|
||||
testutils.SelectItem_Path("c", "value"),
|
||||
},
|
||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||
},
|
||||
)
|
||||
})
|
||||
|
||||
t.Run("Should handle function calls in math expressions", func(t *testing.T) {
|
||||
testQueryParse(
|
||||
t,
|
||||
`SELECT LENGTH(c.name) * 2 + 10 FROM c`,
|
||||
parsers.SelectStmt{
|
||||
SelectItems: []parsers.SelectItem{
|
||||
{
|
||||
Type: parsers.SelectItemTypeBinaryExpression,
|
||||
Value: parsers.BinaryExpression{
|
||||
Operation: "+",
|
||||
Left: parsers.SelectItem{
|
||||
Type: parsers.SelectItemTypeBinaryExpression,
|
||||
Value: parsers.BinaryExpression{
|
||||
Operation: "*",
|
||||
Left: parsers.SelectItem{
|
||||
Type: parsers.SelectItemTypeFunctionCall,
|
||||
Value: parsers.FunctionCall{
|
||||
Type: parsers.FunctionCallLength,
|
||||
Arguments: []interface{}{testutils.SelectItem_Path("c", "name")},
|
||||
},
|
||||
},
|
||||
Right: testutils.SelectItem_Constant_Int(2),
|
||||
},
|
||||
},
|
||||
Right: testutils.SelectItem_Constant_Int(10),
|
||||
},
|
||||
},
|
||||
},
|
||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||
},
|
||||
)
|
||||
})
|
||||
|
||||
t.Run("Should handle multiple select items with math", func(t *testing.T) {
|
||||
testQueryParse(
|
||||
t,
|
||||
`SELECT c.a + c.b, c.x * c.y FROM c`,
|
||||
parsers.SelectStmt{
|
||||
SelectItems: []parsers.SelectItem{
|
||||
{
|
||||
Type: parsers.SelectItemTypeBinaryExpression,
|
||||
Value: parsers.BinaryExpression{
|
||||
Operation: "+",
|
||||
Left: testutils.SelectItem_Path("c", "a"),
|
||||
Right: testutils.SelectItem_Path("c", "b"),
|
||||
},
|
||||
},
|
||||
{
|
||||
Type: parsers.SelectItemTypeBinaryExpression,
|
||||
Value: parsers.BinaryExpression{
|
||||
Operation: "*",
|
||||
Left: testutils.SelectItem_Path("c", "x"),
|
||||
Right: testutils.SelectItem_Path("c", "y"),
|
||||
},
|
||||
},
|
||||
},
|
||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||
},
|
||||
)
|
||||
})
|
||||
|
||||
t.Run("Should handle math in WHERE clause", func(t *testing.T) {
|
||||
testQueryParse(
|
||||
t,
|
||||
`SELECT c.id FROM c WHERE c.price * 1.08 > 100`,
|
||||
parsers.SelectStmt{
|
||||
SelectItems: []parsers.SelectItem{
|
||||
testutils.SelectItem_Path("c", "id"),
|
||||
},
|
||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||
Filters: parsers.ComparisonExpression{
|
||||
Operation: ">",
|
||||
Left: parsers.SelectItem{
|
||||
Type: parsers.SelectItemTypeBinaryExpression,
|
||||
Value: parsers.BinaryExpression{
|
||||
Operation: "*",
|
||||
Left: testutils.SelectItem_Path("c", "price"),
|
||||
Right: testutils.SelectItem_Constant_Float(1.08),
|
||||
},
|
||||
},
|
||||
Right: testutils.SelectItem_Constant_Int(100),
|
||||
},
|
||||
},
|
||||
)
|
||||
})
|
||||
}
|
@ -163,4 +163,27 @@ func Test_Parse(t *testing.T) {
|
||||
},
|
||||
)
|
||||
})
|
||||
|
||||
t.Run("Should parse IIF function", func(t *testing.T) {
|
||||
testQueryParse(
|
||||
t,
|
||||
`SELECT IIF(true, c.pk, c.id) FROM c`,
|
||||
parsers.SelectStmt{
|
||||
SelectItems: []parsers.SelectItem{
|
||||
{
|
||||
Type: parsers.SelectItemTypeFunctionCall,
|
||||
Value: parsers.FunctionCall{
|
||||
Type: parsers.FunctionCallIif,
|
||||
Arguments: []interface{}{
|
||||
testutils.SelectItem_Constant_Bool(true),
|
||||
testutils.SelectItem_Path("c", "pk"),
|
||||
testutils.SelectItem_Path("c", "id"),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||
},
|
||||
)
|
||||
})
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -4,137 +4,137 @@ package nosql
|
||||
import "github.com/pikami/cosmium/parsers"
|
||||
|
||||
func makeSelectStmt(
|
||||
columns, fromClause, joinItems,
|
||||
whereClause interface{}, distinctClause interface{},
|
||||
count interface{}, groupByClause interface{}, orderList interface{},
|
||||
offsetClause interface{},
|
||||
columns, fromClause, joinItems,
|
||||
whereClause interface{}, distinctClause interface{},
|
||||
count interface{}, groupByClause interface{}, orderList interface{},
|
||||
offsetClause interface{},
|
||||
) (parsers.SelectStmt, error) {
|
||||
selectStmt := parsers.SelectStmt{
|
||||
SelectItems: columns.([]parsers.SelectItem),
|
||||
}
|
||||
selectStmt := parsers.SelectStmt{
|
||||
SelectItems: columns.([]parsers.SelectItem),
|
||||
}
|
||||
|
||||
if fromTable, ok := fromClause.(parsers.Table); ok {
|
||||
selectStmt.Table = fromTable
|
||||
}
|
||||
|
||||
if joinItemsArray, ok := joinItems.([]interface{}); ok && len(joinItemsArray) > 0 {
|
||||
selectStmt.JoinItems = make([]parsers.JoinItem, len(joinItemsArray))
|
||||
for i, joinItem := range joinItemsArray {
|
||||
selectStmt.JoinItems[i] = joinItem.(parsers.JoinItem)
|
||||
}
|
||||
}
|
||||
selectStmt.JoinItems = make([]parsers.JoinItem, len(joinItemsArray))
|
||||
for i, joinItem := range joinItemsArray {
|
||||
selectStmt.JoinItems[i] = joinItem.(parsers.JoinItem)
|
||||
}
|
||||
}
|
||||
|
||||
switch v := whereClause.(type) {
|
||||
case parsers.ComparisonExpression, parsers.LogicalExpression, parsers.Constant, parsers.SelectItem:
|
||||
selectStmt.Filters = v
|
||||
}
|
||||
switch v := whereClause.(type) {
|
||||
case parsers.ComparisonExpression, parsers.LogicalExpression, parsers.Constant, parsers.SelectItem:
|
||||
selectStmt.Filters = v
|
||||
}
|
||||
|
||||
if distinctClause != nil {
|
||||
selectStmt.Distinct = true
|
||||
}
|
||||
if distinctClause != nil {
|
||||
selectStmt.Distinct = true
|
||||
}
|
||||
|
||||
if n, ok := count.(int); ok {
|
||||
selectStmt.Count = n
|
||||
}
|
||||
if n, ok := count.(int); ok {
|
||||
selectStmt.Count = n
|
||||
}
|
||||
|
||||
if offsetArr, ok := offsetClause.([]interface{}); ok && len(offsetArr) == 2 {
|
||||
if n, ok := offsetArr[0].(int); ok {
|
||||
selectStmt.Offset = n
|
||||
}
|
||||
if offsetArr, ok := offsetClause.([]interface{}); ok && len(offsetArr) == 2 {
|
||||
if n, ok := offsetArr[0].(int); ok {
|
||||
selectStmt.Offset = n
|
||||
}
|
||||
|
||||
if n, ok := offsetArr[1].(int); ok {
|
||||
selectStmt.Count = n
|
||||
}
|
||||
}
|
||||
if n, ok := offsetArr[1].(int); ok {
|
||||
selectStmt.Count = n
|
||||
}
|
||||
}
|
||||
|
||||
if orderExpressions, ok := orderList.([]parsers.OrderExpression); ok {
|
||||
selectStmt.OrderExpressions = orderExpressions
|
||||
}
|
||||
if orderExpressions, ok := orderList.([]parsers.OrderExpression); ok {
|
||||
selectStmt.OrderExpressions = orderExpressions
|
||||
}
|
||||
|
||||
if groupByClause != nil {
|
||||
selectStmt.GroupBy = groupByClause.([]parsers.SelectItem)
|
||||
}
|
||||
if groupByClause != nil {
|
||||
selectStmt.GroupBy = groupByClause.([]parsers.SelectItem)
|
||||
}
|
||||
|
||||
return selectStmt, nil
|
||||
return selectStmt, nil
|
||||
}
|
||||
|
||||
func makeJoin(table interface{}, column interface{}) (parsers.JoinItem, error) {
|
||||
joinItem := parsers.JoinItem{}
|
||||
joinItem := parsers.JoinItem{}
|
||||
|
||||
if selectItem, isSelectItem := column.(parsers.SelectItem); isSelectItem {
|
||||
joinItem.SelectItem = selectItem
|
||||
joinItem.Table.Value = selectItem.Alias
|
||||
}
|
||||
if selectItem, isSelectItem := column.(parsers.SelectItem); isSelectItem {
|
||||
joinItem.SelectItem = selectItem
|
||||
joinItem.Table.Value = selectItem.Alias
|
||||
}
|
||||
|
||||
if tableTyped, isTable := table.(parsers.Table); isTable {
|
||||
joinItem.Table = tableTyped
|
||||
}
|
||||
if tableTyped, isTable := table.(parsers.Table); isTable {
|
||||
joinItem.Table = tableTyped
|
||||
}
|
||||
|
||||
return joinItem, nil
|
||||
return joinItem, nil
|
||||
}
|
||||
|
||||
func makeSelectItem(name interface{}, path interface{}, selectItemType parsers.SelectItemType) (parsers.SelectItem, error) {
|
||||
ps := path.([]interface{})
|
||||
ps := path.([]interface{})
|
||||
|
||||
paths := make([]string, 1)
|
||||
paths := make([]string, 1)
|
||||
paths[0] = name.(string)
|
||||
for _, p := range ps {
|
||||
for _, p := range ps {
|
||||
paths = append(paths, p.(string))
|
||||
}
|
||||
}
|
||||
|
||||
return parsers.SelectItem{Path: paths, Type: selectItemType}, nil
|
||||
return parsers.SelectItem{Path: paths, Type: selectItemType}, nil
|
||||
}
|
||||
|
||||
func makeColumnList(column interface{}, other_columns interface{}) ([]parsers.SelectItem, error) {
|
||||
collsAsArray := other_columns.([]interface{})
|
||||
columnList := make([]parsers.SelectItem, len(collsAsArray) + 1)
|
||||
columnList[0] = column.(parsers.SelectItem)
|
||||
columnList := make([]parsers.SelectItem, len(collsAsArray) + 1)
|
||||
columnList[0] = column.(parsers.SelectItem)
|
||||
|
||||
for i, v := range collsAsArray {
|
||||
for i, v := range collsAsArray {
|
||||
if col, ok := v.(parsers.SelectItem); ok {
|
||||
columnList[i+1] = col
|
||||
columnList[i+1] = col
|
||||
}
|
||||
}
|
||||
|
||||
return columnList, nil
|
||||
return columnList, nil
|
||||
}
|
||||
|
||||
func makeSelectArray(columns interface{}) (parsers.SelectItem, error) {
|
||||
return parsers.SelectItem{
|
||||
SelectItems: columns.([]parsers.SelectItem),
|
||||
Type: parsers.SelectItemTypeArray,
|
||||
}, nil
|
||||
return parsers.SelectItem{
|
||||
SelectItems: columns.([]parsers.SelectItem),
|
||||
Type: parsers.SelectItemTypeArray,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func makeSelectObject(field interface{}, other_fields interface{}) (parsers.SelectItem, error) {
|
||||
fieldsAsArray := other_fields.([]interface{})
|
||||
fieldsList := make([]parsers.SelectItem, len(fieldsAsArray)+1)
|
||||
fieldsList[0] = field.(parsers.SelectItem)
|
||||
fieldsList := make([]parsers.SelectItem, len(fieldsAsArray)+1)
|
||||
fieldsList[0] = field.(parsers.SelectItem)
|
||||
|
||||
for i, v := range fieldsAsArray {
|
||||
if col, ok := v.(parsers.SelectItem); ok {
|
||||
fieldsList[i+1] = col
|
||||
}
|
||||
}
|
||||
for i, v := range fieldsAsArray {
|
||||
if col, ok := v.(parsers.SelectItem); ok {
|
||||
fieldsList[i+1] = col
|
||||
}
|
||||
}
|
||||
|
||||
return parsers.SelectItem{
|
||||
SelectItems: fieldsList,
|
||||
Type: parsers.SelectItemTypeObject,
|
||||
}, nil
|
||||
return parsers.SelectItem{
|
||||
SelectItems: fieldsList,
|
||||
Type: parsers.SelectItemTypeObject,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func makeOrderByClause(ex1 interface{}, others interface{}) ([]parsers.OrderExpression, error) {
|
||||
othersArray := others.([]interface{})
|
||||
orderList := make([]parsers.OrderExpression, len(othersArray)+1)
|
||||
orderList[0] = ex1.(parsers.OrderExpression)
|
||||
orderList := make([]parsers.OrderExpression, len(othersArray)+1)
|
||||
orderList[0] = ex1.(parsers.OrderExpression)
|
||||
|
||||
for i, v := range othersArray {
|
||||
if col, ok := v.(parsers.OrderExpression); ok {
|
||||
orderList[i+1] = col
|
||||
}
|
||||
}
|
||||
for i, v := range othersArray {
|
||||
if col, ok := v.(parsers.OrderExpression); ok {
|
||||
orderList[i+1] = col
|
||||
}
|
||||
}
|
||||
|
||||
return orderList, nil
|
||||
return orderList, nil
|
||||
}
|
||||
|
||||
func makeOrderExpression(field interface{}, order interface{}) (parsers.OrderExpression, error) {
|
||||
@ -144,8 +144,8 @@ func makeOrderExpression(field interface{}, order interface{}) (parsers.OrderExp
|
||||
}
|
||||
|
||||
if orderValue, ok := order.(parsers.OrderDirection); ok {
|
||||
value.Direction = orderValue
|
||||
}
|
||||
value.Direction = orderValue
|
||||
}
|
||||
|
||||
return value, nil
|
||||
}
|
||||
@ -169,13 +169,39 @@ func joinStrings(array []interface{}) string {
|
||||
|
||||
func combineExpressions(ex1 interface{}, exs interface{}, operation parsers.LogicalExpressionType) (interface{}, error) {
|
||||
if exs == nil || len(exs.([]interface{})) < 1 {
|
||||
return ex1, nil
|
||||
}
|
||||
return ex1, nil
|
||||
}
|
||||
|
||||
return parsers.LogicalExpression{
|
||||
Expressions: append([]interface{}{ex1}, exs.([]interface{})...),
|
||||
Operation: operation,
|
||||
}, nil
|
||||
return parsers.LogicalExpression{
|
||||
Expressions: append([]interface{}{ex1}, exs.([]interface{})...),
|
||||
Operation: operation,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func makeMathExpression(left interface{}, operations interface{}) (interface{}, error) {
|
||||
if operations == nil || len(operations.([]interface{})) == 0 {
|
||||
return left, nil
|
||||
}
|
||||
|
||||
result := left.(parsers.SelectItem)
|
||||
ops := operations.([]interface{})
|
||||
|
||||
for _, op := range ops {
|
||||
opData := op.([]interface{})
|
||||
operation := opData[0].(string)
|
||||
right := opData[1].(parsers.SelectItem)
|
||||
|
||||
result = parsers.SelectItem{
|
||||
Type: parsers.SelectItemTypeBinaryExpression,
|
||||
Value: parsers.BinaryExpression{
|
||||
Left: result,
|
||||
Right: right,
|
||||
Operation: operation,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
}
|
||||
@ -204,16 +230,16 @@ TopClause <- Top ws count:Integer {
|
||||
return count, nil
|
||||
}
|
||||
|
||||
FromClause <- From ws table:TableName selectItem:(ws In ws column:SelectItem { return column, nil }) {
|
||||
FromClause <- From ws table:TableName selectItem:(ws In ws column:SelectItemWithAlias { return column, nil }) {
|
||||
tableTyped := table.(parsers.Table)
|
||||
|
||||
if selectItem != nil {
|
||||
tableTyped.SelectItem = selectItem.(parsers.SelectItem)
|
||||
tableTyped.SelectItem = selectItem.(parsers.SelectItem)
|
||||
tableTyped.IsInSelect = true
|
||||
}
|
||||
}
|
||||
|
||||
return tableTyped, nil
|
||||
} / From ws column:SelectItem {
|
||||
return tableTyped, nil
|
||||
} / From ws column:SelectItemWithAlias {
|
||||
tableSelectItem := column.(parsers.SelectItem)
|
||||
table := parsers.Table{
|
||||
Value: tableSelectItem.Alias,
|
||||
@ -222,11 +248,11 @@ FromClause <- From ws table:TableName selectItem:(ws In ws column:SelectItem { r
|
||||
return table, nil
|
||||
} / From ws subQuery:SubQuerySelectItem {
|
||||
subQueryTyped := subQuery.(parsers.SelectItem)
|
||||
table := parsers.Table{
|
||||
Value: subQueryTyped.Alias,
|
||||
SelectItem: subQueryTyped,
|
||||
}
|
||||
return table, nil
|
||||
table := parsers.Table{
|
||||
Value: subQueryTyped.Alias,
|
||||
SelectItem: subQueryTyped,
|
||||
}
|
||||
return table, nil
|
||||
}
|
||||
|
||||
SubQuery <- exists:(exists:Exists ws { return exists, nil })? "(" ws selectStmt:SelectStmt ws ")" {
|
||||
@ -251,7 +277,7 @@ SubQuerySelectItem <- subQuery:SubQuery asClause:(ws alias:AsClause { return ali
|
||||
return selectItem, nil
|
||||
}
|
||||
|
||||
JoinClause <- Join ws table:TableName ws In ws column:SelectItem {
|
||||
JoinClause <- Join ws table:TableName ws In ws column:SelectItemWithAlias {
|
||||
return makeJoin(table, column)
|
||||
} / Join ws subQuery:SubQuerySelectItem {
|
||||
return makeJoin(nil, subQuery)
|
||||
@ -265,17 +291,40 @@ Selection <- SelectValueSpec / ColumnList / SelectAsterisk
|
||||
|
||||
SelectAsterisk <- "*" {
|
||||
selectItem, _ := makeSelectItem("c", make([]interface{}, 0), parsers.SelectItemTypeField)
|
||||
selectItem.IsTopLevel = true
|
||||
selectItem.IsTopLevel = true
|
||||
return makeColumnList(selectItem, make([]interface{}, 0))
|
||||
}
|
||||
|
||||
ColumnList <- column:SelectItem other_columns:(ws "," ws coll:SelectItem {return coll, nil })* {
|
||||
ColumnList <- column:ExpressionOrSelectItem other_columns:(ws "," ws coll:ExpressionOrSelectItem {return coll, nil })* {
|
||||
return makeColumnList(column, other_columns)
|
||||
}
|
||||
|
||||
SelectValueSpec <- "VALUE"i ws column:SelectItem {
|
||||
ExpressionOrSelectItem <- expression:OrExpression asClause:AsClause? {
|
||||
switch typedValue := expression.(type) {
|
||||
case parsers.ComparisonExpression, parsers.LogicalExpression:
|
||||
selectItem := parsers.SelectItem{
|
||||
Type: parsers.SelectItemTypeExpression,
|
||||
Value: typedValue,
|
||||
}
|
||||
|
||||
if aliasValue, ok := asClause.(string); ok {
|
||||
selectItem.Alias = aliasValue
|
||||
}
|
||||
|
||||
return selectItem, nil
|
||||
case parsers.SelectItem:
|
||||
if aliasValue, ok := asClause.(string); ok {
|
||||
typedValue.Alias = aliasValue
|
||||
}
|
||||
return typedValue, nil
|
||||
default:
|
||||
return typedValue, nil
|
||||
}
|
||||
} / item:SelectItemWithAlias { return item, nil }
|
||||
|
||||
SelectValueSpec <- "VALUE"i ws column:SelectItemWithAlias {
|
||||
selectItem := column.(parsers.SelectItem)
|
||||
selectItem.IsTopLevel = true
|
||||
selectItem.IsTopLevel = true
|
||||
return makeColumnList(selectItem, make([]interface{}, 0))
|
||||
}
|
||||
|
||||
@ -289,19 +338,32 @@ SelectArray <- "[" ws columns:ColumnList ws "]" {
|
||||
|
||||
SelectObject <- "{" ws field:SelectObjectField ws other_fields:(ws "," ws coll:SelectObjectField {return coll, nil })* ws "}" {
|
||||
return makeSelectObject(field, other_fields)
|
||||
} / "{" ws "}" {
|
||||
return parsers.SelectItem{
|
||||
SelectItems: []parsers.SelectItem{},
|
||||
Type: parsers.SelectItemTypeObject,
|
||||
}, nil
|
||||
}
|
||||
|
||||
SelectObjectField <- name:(Identifier / "\"" key:Identifier "\"" { return key, nil }) ws ":" ws selectItem:SelectItem {
|
||||
item := selectItem.(parsers.SelectItem)
|
||||
item.Alias = name.(string)
|
||||
return item, nil
|
||||
item.Alias = name.(string)
|
||||
return item, nil
|
||||
}
|
||||
|
||||
SelectProperty <- name:Identifier path:(DotFieldAccess / ArrayFieldAccess)* {
|
||||
return makeSelectItem(name, path, parsers.SelectItemTypeField)
|
||||
}
|
||||
|
||||
SelectItem <- selectItem:(SubQuerySelectItem / Literal / FunctionCall / SelectArray / SelectObject / SelectProperty) asClause:AsClause? {
|
||||
SelectItemWithAlias <- selectItem:SelectItem asClause:AsClause? {
|
||||
item := selectItem.(parsers.SelectItem)
|
||||
if aliasValue, ok := asClause.(string); ok {
|
||||
item.Alias = aliasValue
|
||||
}
|
||||
return item, nil
|
||||
}
|
||||
|
||||
SelectItem <- selectItem:(SubQuerySelectItem / Literal / FunctionCall / SelectArray / SelectObject / SelectProperty) {
|
||||
var itemResult parsers.SelectItem
|
||||
switch typedValue := selectItem.(type) {
|
||||
case parsers.SelectItem:
|
||||
@ -318,11 +380,7 @@ SelectItem <- selectItem:(SubQuerySelectItem / Literal / FunctionCall / SelectAr
|
||||
}
|
||||
}
|
||||
|
||||
if aliasValue, ok := asClause.(string); ok {
|
||||
itemResult.Alias = aliasValue
|
||||
}
|
||||
|
||||
return itemResult, nil
|
||||
return itemResult, nil
|
||||
}
|
||||
|
||||
AsClause <- (ws As)? ws !ExcludedKeywords alias:Identifier {
|
||||
@ -355,15 +413,25 @@ AndExpression <- ex1:ComparisonExpression ex2:(ws And ws ex:ComparisonExpression
|
||||
return combineExpressions(ex1, ex2, parsers.LogicalExpressionTypeAnd)
|
||||
}
|
||||
|
||||
ComparisonExpression <- "(" ws ex:OrExpression ws ")" { return ex, nil }
|
||||
/ left:SelectItem ws op:ComparisonOperator ws right:SelectItem {
|
||||
ComparisonExpression <- left:AddSubExpression ws op:ComparisonOperator ws right:AddSubExpression {
|
||||
return parsers.ComparisonExpression{Left:left,Right:right,Operation:op.(string)}, nil
|
||||
} / inv:(Not ws)? ex:SelectItem {
|
||||
} / ex:AddSubExpression { return ex, nil }
|
||||
|
||||
AddSubExpression <- left:MulDivExpression operations:(ws op:AddOrSubtractOperation ws right:MulDivExpression { return []interface{}{op, right}, nil })* {
|
||||
return makeMathExpression(left, operations)
|
||||
}
|
||||
|
||||
MulDivExpression <- left:SelectItemWithParentheses operations:(ws op:MultiplyOrDivideOperation ws right:SelectItemWithParentheses { return []interface{}{op, right}, nil })* {
|
||||
return makeMathExpression(left, operations)
|
||||
}
|
||||
|
||||
SelectItemWithParentheses <- "(" ws ex:OrExpression ws ")" { return ex, nil }
|
||||
/ inv:(Not ws)? ex:SelectItem {
|
||||
if inv != nil {
|
||||
ex1 := ex.(parsers.SelectItem)
|
||||
ex1.Invert = true
|
||||
return ex1, nil
|
||||
}
|
||||
ex1 := ex.(parsers.SelectItem)
|
||||
ex1.Invert = true
|
||||
return ex1, nil
|
||||
}
|
||||
return ex, nil
|
||||
} / ex:BooleanLiteral { return ex, nil }
|
||||
|
||||
@ -377,10 +445,10 @@ OrderExpression <- field:SelectProperty ws order:OrderDirection? {
|
||||
|
||||
OrderDirection <- ("ASC"i / "DESC"i) {
|
||||
if strings.EqualFold(string(c.text), "DESC") {
|
||||
return parsers.OrderDirectionDesc, nil
|
||||
return parsers.OrderDirectionDesc, nil
|
||||
}
|
||||
|
||||
return parsers.OrderDirectionAsc, nil
|
||||
return parsers.OrderDirectionAsc, nil
|
||||
}
|
||||
|
||||
Select <- "SELECT"i
|
||||
@ -415,6 +483,10 @@ ComparisonOperator <- ("<=" / ">=" / "=" / "!=" / "<" / ">") {
|
||||
return string(c.text), nil
|
||||
}
|
||||
|
||||
AddOrSubtractOperation <- ("+" / "-") { return string(c.text), nil }
|
||||
|
||||
MultiplyOrDivideOperation <- ("*" / "/") { return string(c.text), nil }
|
||||
|
||||
Literal <- FloatLiteral / IntegerLiteral / StringLiteral / BooleanLiteral / ParameterConstant / NullConstant
|
||||
|
||||
ParameterConstant <- "@" Identifier {
|
||||
@ -442,6 +514,7 @@ BooleanLiteral <- ("true"i / "false"i) {
|
||||
FunctionCall <- StringFunctions
|
||||
/ TypeCheckingFunctions
|
||||
/ ArrayFunctions
|
||||
/ ConditionalFunctions
|
||||
/ InFunction
|
||||
/ AggregateFunctions
|
||||
/ MathFunctions
|
||||
@ -489,6 +562,8 @@ ArrayFunctions <- ArrayConcatExpression
|
||||
/ SetIntersectExpression
|
||||
/ SetUnionExpression
|
||||
|
||||
ConditionalFunctions <- IifExpression
|
||||
|
||||
MathFunctions <- MathAbsExpression
|
||||
/ MathAcosExpression
|
||||
/ MathAsinExpression
|
||||
@ -681,6 +756,10 @@ SetUnionExpression <- "SetUnion"i ws "(" ws set1:SelectItem ws "," ws set2:Selec
|
||||
return createFunctionCall(parsers.FunctionCallSetUnion, []interface{}{set1, set2})
|
||||
}
|
||||
|
||||
IifExpression <- "IIF"i ws "(" ws condition:SelectItem ws "," ws trueValue:SelectItem ws "," ws falseValue:SelectItem ws ")" {
|
||||
return createFunctionCall(parsers.FunctionCallIif, []interface{}{condition, trueValue, falseValue})
|
||||
}
|
||||
|
||||
MathAbsExpression <- "ABS"i ws "(" ws ex:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathAbs, []interface{}{ex}) }
|
||||
MathAcosExpression <- "ACOS"i ws "(" ws ex:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathAcos, []interface{}{ex}) }
|
||||
MathAsinExpression <- "ASIN"i ws "(" ws ex:SelectItem ws ")" { return createFunctionCall(parsers.FunctionCallMathAsin, []interface{}{ex}) }
|
||||
|
@ -178,4 +178,90 @@ func Test_Parse_Select(t *testing.T) {
|
||||
},
|
||||
)
|
||||
})
|
||||
|
||||
t.Run("Should parse SELECT empty object", func(t *testing.T) {
|
||||
testQueryParse(
|
||||
t,
|
||||
`SELECT {} AS obj FROM c`,
|
||||
parsers.SelectStmt{
|
||||
SelectItems: []parsers.SelectItem{
|
||||
{
|
||||
Alias: "obj",
|
||||
Type: parsers.SelectItemTypeObject,
|
||||
SelectItems: []parsers.SelectItem{},
|
||||
},
|
||||
},
|
||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||
},
|
||||
)
|
||||
})
|
||||
|
||||
t.Run("Should parse comparison expressions in SELECT", func(t *testing.T) {
|
||||
testQueryParse(
|
||||
t,
|
||||
`SELECT c["id"] = "123", c["pk"] > 456 FROM c`,
|
||||
parsers.SelectStmt{
|
||||
SelectItems: []parsers.SelectItem{
|
||||
{
|
||||
Type: parsers.SelectItemTypeExpression,
|
||||
Value: parsers.ComparisonExpression{
|
||||
Operation: "=",
|
||||
Left: testutils.SelectItem_Path("c", "id"),
|
||||
Right: testutils.SelectItem_Constant_String("123"),
|
||||
},
|
||||
},
|
||||
{
|
||||
Type: parsers.SelectItemTypeExpression,
|
||||
Value: parsers.ComparisonExpression{
|
||||
Operation: ">",
|
||||
Left: testutils.SelectItem_Path("c", "pk"),
|
||||
Right: testutils.SelectItem_Constant_Int(456),
|
||||
},
|
||||
},
|
||||
},
|
||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||
},
|
||||
)
|
||||
})
|
||||
|
||||
t.Run("Should parse logical expressions in SELECT", func(t *testing.T) {
|
||||
testQueryParse(
|
||||
t,
|
||||
`SELECT c["id"] = "123" OR c["pk"] > 456, c["isCool"] AND c["hasRizz"] AS isRizzler FROM c`,
|
||||
parsers.SelectStmt{
|
||||
SelectItems: []parsers.SelectItem{
|
||||
{
|
||||
Type: parsers.SelectItemTypeExpression,
|
||||
Value: parsers.LogicalExpression{
|
||||
Operation: parsers.LogicalExpressionTypeOr,
|
||||
Expressions: []interface{}{
|
||||
parsers.ComparisonExpression{
|
||||
Operation: "=",
|
||||
Left: testutils.SelectItem_Path("c", "id"),
|
||||
Right: testutils.SelectItem_Constant_String("123"),
|
||||
},
|
||||
parsers.ComparisonExpression{
|
||||
Operation: ">",
|
||||
Left: testutils.SelectItem_Path("c", "pk"),
|
||||
Right: testutils.SelectItem_Constant_Int(456),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Type: parsers.SelectItemTypeExpression,
|
||||
Alias: "isRizzler",
|
||||
Value: parsers.LogicalExpression{
|
||||
Operation: parsers.LogicalExpressionTypeAnd,
|
||||
Expressions: []interface{}{
|
||||
testutils.SelectItem_Path("c", "isCool"),
|
||||
testutils.SelectItem_Path("c", "hasRizz"),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||
},
|
||||
)
|
||||
})
|
||||
}
|
||||
|
91
query_executors/memory_executor/arithmetics_test.go
Normal file
91
query_executors/memory_executor/arithmetics_test.go
Normal file
@ -0,0 +1,91 @@
|
||||
package memoryexecutor_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/pikami/cosmium/parsers"
|
||||
memoryexecutor "github.com/pikami/cosmium/query_executors/memory_executor"
|
||||
testutils "github.com/pikami/cosmium/test_utils"
|
||||
)
|
||||
|
||||
func Test_Execute_Arithmetics(t *testing.T) {
|
||||
mockData := []memoryexecutor.RowType{
|
||||
map[string]interface{}{"id": 1, "a": 420},
|
||||
map[string]interface{}{"id": 2, "a": 6.9},
|
||||
map[string]interface{}{"id": 3},
|
||||
}
|
||||
|
||||
t.Run("Should execute simple arithmetics", func(t *testing.T) {
|
||||
testQueryExecute(
|
||||
t,
|
||||
parsers.SelectStmt{
|
||||
SelectItems: []parsers.SelectItem{
|
||||
{
|
||||
Path: []string{"c", "id"},
|
||||
Type: parsers.SelectItemTypeField,
|
||||
},
|
||||
{
|
||||
Type: parsers.SelectItemTypeBinaryExpression,
|
||||
Alias: "result",
|
||||
Value: parsers.BinaryExpression{
|
||||
Operation: "+",
|
||||
Left: testutils.SelectItem_Path("c", "a"),
|
||||
Right: parsers.SelectItem{
|
||||
Type: parsers.SelectItemTypeBinaryExpression,
|
||||
Value: parsers.BinaryExpression{
|
||||
Operation: "*",
|
||||
Left: testutils.SelectItem_Constant_Float(2.0),
|
||||
Right: testutils.SelectItem_Constant_Int(3),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||
},
|
||||
mockData,
|
||||
[]memoryexecutor.RowType{
|
||||
map[string]interface{}{"id": 1, "result": 426.0},
|
||||
map[string]interface{}{"id": 2, "result": 12.9},
|
||||
map[string]interface{}{"id": 3, "result": nil},
|
||||
},
|
||||
)
|
||||
})
|
||||
|
||||
t.Run("Should execute arithmetics in WHERE clause", func(t *testing.T) {
|
||||
testQueryExecute(
|
||||
t,
|
||||
parsers.SelectStmt{
|
||||
SelectItems: []parsers.SelectItem{
|
||||
testutils.SelectItem_Path("c", "id"),
|
||||
{
|
||||
Alias: "result",
|
||||
Type: parsers.SelectItemTypeBinaryExpression,
|
||||
Value: parsers.BinaryExpression{
|
||||
Operation: "*",
|
||||
Left: testutils.SelectItem_Path("c", "a"),
|
||||
Right: testutils.SelectItem_Constant_Int(2),
|
||||
},
|
||||
},
|
||||
},
|
||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||
Filters: parsers.ComparisonExpression{
|
||||
Operation: ">",
|
||||
Left: parsers.SelectItem{
|
||||
Type: parsers.SelectItemTypeBinaryExpression,
|
||||
Value: parsers.BinaryExpression{
|
||||
Operation: "*",
|
||||
Left: testutils.SelectItem_Path("c", "a"),
|
||||
Right: testutils.SelectItem_Constant_Int(2),
|
||||
},
|
||||
},
|
||||
Right: testutils.SelectItem_Constant_Int(500),
|
||||
},
|
||||
},
|
||||
mockData,
|
||||
[]memoryexecutor.RowType{
|
||||
map[string]interface{}{"id": 1, "result": 840.0},
|
||||
},
|
||||
)
|
||||
})
|
||||
}
|
@ -196,6 +196,10 @@ func (r rowContext) parseArray(argument interface{}) []interface{} {
|
||||
ex := r.resolveSelectItem(exItem)
|
||||
|
||||
arrValue := reflect.ValueOf(ex)
|
||||
if arrValue.Kind() == reflect.Invalid {
|
||||
return nil
|
||||
}
|
||||
|
||||
if arrValue.Kind() != reflect.Slice {
|
||||
logger.ErrorLn("parseArray got parameters of wrong type")
|
||||
return nil
|
||||
|
27
query_executors/memory_executor/array_iterator.go
Normal file
27
query_executors/memory_executor/array_iterator.go
Normal file
@ -0,0 +1,27 @@
|
||||
package memoryexecutor
|
||||
|
||||
import "github.com/pikami/cosmium/internal/datastore"
|
||||
|
||||
type rowArrayIterator struct {
|
||||
documents []rowContext
|
||||
index int
|
||||
}
|
||||
|
||||
func NewRowArrayIterator(documents []rowContext) *rowArrayIterator {
|
||||
return &rowArrayIterator{
|
||||
documents: documents,
|
||||
index: -1,
|
||||
}
|
||||
}
|
||||
|
||||
func (i *rowArrayIterator) Next() (rowContext, datastore.DataStoreStatus) {
|
||||
i.index++
|
||||
if i.index >= len(i.documents) {
|
||||
return rowContext{}, datastore.IterEOF
|
||||
}
|
||||
|
||||
row := i.documents[i.index]
|
||||
i.documents[i.index] = rowContext{} // Help GC reclaim memory
|
||||
|
||||
return row, datastore.StatusOk
|
||||
}
|
459
query_executors/memory_executor/common.go
Normal file
459
query_executors/memory_executor/common.go
Normal file
@ -0,0 +1,459 @@
|
||||
package memoryexecutor
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/pikami/cosmium/internal/datastore"
|
||||
"github.com/pikami/cosmium/internal/logger"
|
||||
"github.com/pikami/cosmium/parsers"
|
||||
)
|
||||
|
||||
type RowType interface{}
|
||||
type rowContext struct {
|
||||
tables map[string]RowType
|
||||
parameters map[string]interface{}
|
||||
grouppedRows []rowContext
|
||||
}
|
||||
|
||||
type rowIterator interface {
|
||||
Next() (rowContext, datastore.DataStoreStatus)
|
||||
}
|
||||
|
||||
type rowTypeIterator interface {
|
||||
Next() (RowType, datastore.DataStoreStatus)
|
||||
}
|
||||
|
||||
func resolveDestinationColumnName(selectItem parsers.SelectItem, itemIndex int, queryParameters map[string]interface{}) string {
|
||||
if selectItem.Alias != "" {
|
||||
return selectItem.Alias
|
||||
}
|
||||
|
||||
destinationName := fmt.Sprintf("$%d", itemIndex+1)
|
||||
if len(selectItem.Path) > 0 {
|
||||
destinationName = selectItem.Path[len(selectItem.Path)-1]
|
||||
}
|
||||
|
||||
if destinationName[0] == '@' {
|
||||
destinationName = queryParameters[destinationName].(string)
|
||||
}
|
||||
|
||||
return destinationName
|
||||
}
|
||||
|
||||
func (r rowContext) resolveSelectItem(selectItem parsers.SelectItem) interface{} {
|
||||
if selectItem.Type == parsers.SelectItemTypeArray {
|
||||
return r.selectItem_SelectItemTypeArray(selectItem)
|
||||
}
|
||||
|
||||
if selectItem.Type == parsers.SelectItemTypeObject {
|
||||
return r.selectItem_SelectItemTypeObject(selectItem)
|
||||
}
|
||||
|
||||
if selectItem.Type == parsers.SelectItemTypeConstant {
|
||||
return r.selectItem_SelectItemTypeConstant(selectItem)
|
||||
}
|
||||
|
||||
if selectItem.Type == parsers.SelectItemTypeSubQuery {
|
||||
return r.selectItem_SelectItemTypeSubQuery(selectItem)
|
||||
}
|
||||
|
||||
if selectItem.Type == parsers.SelectItemTypeFunctionCall {
|
||||
if typedFunctionCall, ok := selectItem.Value.(parsers.FunctionCall); ok {
|
||||
return r.selectItem_SelectItemTypeFunctionCall(typedFunctionCall)
|
||||
}
|
||||
|
||||
logger.ErrorLn("parsers.SelectItem has incorrect Value type (expected parsers.FunctionCall)")
|
||||
return nil
|
||||
}
|
||||
|
||||
if selectItem.Type == parsers.SelectItemTypeExpression {
|
||||
if typedExpression, ok := selectItem.Value.(parsers.ComparisonExpression); ok {
|
||||
return r.filters_ComparisonExpression(typedExpression)
|
||||
}
|
||||
|
||||
if typedExpression, ok := selectItem.Value.(parsers.LogicalExpression); ok {
|
||||
return r.filters_LogicalExpression(typedExpression)
|
||||
}
|
||||
|
||||
logger.ErrorLn("parsers.SelectItem has incorrect Value type (expected parsers.ComparisonExpression)")
|
||||
return nil
|
||||
}
|
||||
|
||||
if selectItem.Type == parsers.SelectItemTypeBinaryExpression {
|
||||
if typedSelectItem, ok := selectItem.Value.(parsers.BinaryExpression); ok {
|
||||
return r.selectItem_SelectItemTypeBinaryExpression(typedSelectItem)
|
||||
}
|
||||
|
||||
logger.ErrorLn("parsers.SelectItem has incorrect Value type (expected parsers.BinaryExpression)")
|
||||
return nil
|
||||
}
|
||||
|
||||
return r.selectItem_SelectItemTypeField(selectItem)
|
||||
}
|
||||
|
||||
func (r rowContext) selectItem_SelectItemTypeArray(selectItem parsers.SelectItem) interface{} {
|
||||
arrayValue := make([]interface{}, 0)
|
||||
for _, subSelectItem := range selectItem.SelectItems {
|
||||
arrayValue = append(arrayValue, r.resolveSelectItem(subSelectItem))
|
||||
}
|
||||
return arrayValue
|
||||
}
|
||||
|
||||
func (r rowContext) selectItem_SelectItemTypeObject(selectItem parsers.SelectItem) interface{} {
|
||||
objectValue := make(map[string]interface{})
|
||||
for _, subSelectItem := range selectItem.SelectItems {
|
||||
objectValue[subSelectItem.Alias] = r.resolveSelectItem(subSelectItem)
|
||||
}
|
||||
return objectValue
|
||||
}
|
||||
|
||||
func (r rowContext) selectItem_SelectItemTypeConstant(selectItem parsers.SelectItem) interface{} {
|
||||
var typedValue parsers.Constant
|
||||
var ok bool
|
||||
if typedValue, ok = selectItem.Value.(parsers.Constant); !ok {
|
||||
// TODO: Handle error
|
||||
logger.ErrorLn("parsers.Constant has incorrect Value type")
|
||||
}
|
||||
|
||||
if typedValue.Type == parsers.ConstantTypeParameterConstant &&
|
||||
r.parameters != nil {
|
||||
if key, ok := typedValue.Value.(string); ok {
|
||||
return r.parameters[key]
|
||||
}
|
||||
}
|
||||
|
||||
return typedValue.Value
|
||||
}
|
||||
|
||||
func (r rowContext) selectItem_SelectItemTypeSubQuery(selectItem parsers.SelectItem) interface{} {
|
||||
subQuery := selectItem.Value.(parsers.SelectStmt)
|
||||
subQueryResult := executeQuery(
|
||||
subQuery,
|
||||
NewRowArrayIterator([]rowContext{r}),
|
||||
)
|
||||
|
||||
if subQuery.Exists {
|
||||
_, status := subQueryResult.Next()
|
||||
return status == datastore.StatusOk
|
||||
}
|
||||
|
||||
allDocuments := make([]RowType, 0)
|
||||
for {
|
||||
row, status := subQueryResult.Next()
|
||||
if status != datastore.StatusOk {
|
||||
break
|
||||
}
|
||||
allDocuments = append(allDocuments, row)
|
||||
}
|
||||
|
||||
return allDocuments
|
||||
}
|
||||
|
||||
func (r rowContext) selectItem_SelectItemTypeFunctionCall(functionCall parsers.FunctionCall) interface{} {
|
||||
switch functionCall.Type {
|
||||
case parsers.FunctionCallStringEquals:
|
||||
return r.strings_StringEquals(functionCall.Arguments)
|
||||
case parsers.FunctionCallContains:
|
||||
return r.strings_Contains(functionCall.Arguments)
|
||||
case parsers.FunctionCallEndsWith:
|
||||
return r.strings_EndsWith(functionCall.Arguments)
|
||||
case parsers.FunctionCallStartsWith:
|
||||
return r.strings_StartsWith(functionCall.Arguments)
|
||||
case parsers.FunctionCallConcat:
|
||||
return r.strings_Concat(functionCall.Arguments)
|
||||
case parsers.FunctionCallIndexOf:
|
||||
return r.strings_IndexOf(functionCall.Arguments)
|
||||
case parsers.FunctionCallToString:
|
||||
return r.strings_ToString(functionCall.Arguments)
|
||||
case parsers.FunctionCallUpper:
|
||||
return r.strings_Upper(functionCall.Arguments)
|
||||
case parsers.FunctionCallLower:
|
||||
return r.strings_Lower(functionCall.Arguments)
|
||||
case parsers.FunctionCallLeft:
|
||||
return r.strings_Left(functionCall.Arguments)
|
||||
case parsers.FunctionCallLength:
|
||||
return r.strings_Length(functionCall.Arguments)
|
||||
case parsers.FunctionCallLTrim:
|
||||
return r.strings_LTrim(functionCall.Arguments)
|
||||
case parsers.FunctionCallReplace:
|
||||
return r.strings_Replace(functionCall.Arguments)
|
||||
case parsers.FunctionCallReplicate:
|
||||
return r.strings_Replicate(functionCall.Arguments)
|
||||
case parsers.FunctionCallReverse:
|
||||
return r.strings_Reverse(functionCall.Arguments)
|
||||
case parsers.FunctionCallRight:
|
||||
return r.strings_Right(functionCall.Arguments)
|
||||
case parsers.FunctionCallRTrim:
|
||||
return r.strings_RTrim(functionCall.Arguments)
|
||||
case parsers.FunctionCallSubstring:
|
||||
return r.strings_Substring(functionCall.Arguments)
|
||||
case parsers.FunctionCallTrim:
|
||||
return r.strings_Trim(functionCall.Arguments)
|
||||
|
||||
case parsers.FunctionCallIsDefined:
|
||||
return r.typeChecking_IsDefined(functionCall.Arguments)
|
||||
case parsers.FunctionCallIsArray:
|
||||
return r.typeChecking_IsArray(functionCall.Arguments)
|
||||
case parsers.FunctionCallIsBool:
|
||||
return r.typeChecking_IsBool(functionCall.Arguments)
|
||||
case parsers.FunctionCallIsFiniteNumber:
|
||||
return r.typeChecking_IsFiniteNumber(functionCall.Arguments)
|
||||
case parsers.FunctionCallIsInteger:
|
||||
return r.typeChecking_IsInteger(functionCall.Arguments)
|
||||
case parsers.FunctionCallIsNull:
|
||||
return r.typeChecking_IsNull(functionCall.Arguments)
|
||||
case parsers.FunctionCallIsNumber:
|
||||
return r.typeChecking_IsNumber(functionCall.Arguments)
|
||||
case parsers.FunctionCallIsObject:
|
||||
return r.typeChecking_IsObject(functionCall.Arguments)
|
||||
case parsers.FunctionCallIsPrimitive:
|
||||
return r.typeChecking_IsPrimitive(functionCall.Arguments)
|
||||
case parsers.FunctionCallIsString:
|
||||
return r.typeChecking_IsString(functionCall.Arguments)
|
||||
|
||||
case parsers.FunctionCallArrayConcat:
|
||||
return r.array_Concat(functionCall.Arguments)
|
||||
case parsers.FunctionCallArrayContains:
|
||||
return r.array_Contains(functionCall.Arguments)
|
||||
case parsers.FunctionCallArrayContainsAny:
|
||||
return r.array_Contains_Any(functionCall.Arguments)
|
||||
case parsers.FunctionCallArrayContainsAll:
|
||||
return r.array_Contains_All(functionCall.Arguments)
|
||||
case parsers.FunctionCallArrayLength:
|
||||
return r.array_Length(functionCall.Arguments)
|
||||
case parsers.FunctionCallArraySlice:
|
||||
return r.array_Slice(functionCall.Arguments)
|
||||
case parsers.FunctionCallSetIntersect:
|
||||
return r.set_Intersect(functionCall.Arguments)
|
||||
case parsers.FunctionCallSetUnion:
|
||||
return r.set_Union(functionCall.Arguments)
|
||||
|
||||
case parsers.FunctionCallIif:
|
||||
return r.misc_Iif(functionCall.Arguments)
|
||||
|
||||
case parsers.FunctionCallMathAbs:
|
||||
return r.math_Abs(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathAcos:
|
||||
return r.math_Acos(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathAsin:
|
||||
return r.math_Asin(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathAtan:
|
||||
return r.math_Atan(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathCeiling:
|
||||
return r.math_Ceiling(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathCos:
|
||||
return r.math_Cos(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathCot:
|
||||
return r.math_Cot(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathDegrees:
|
||||
return r.math_Degrees(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathExp:
|
||||
return r.math_Exp(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathFloor:
|
||||
return r.math_Floor(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathIntBitNot:
|
||||
return r.math_IntBitNot(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathLog10:
|
||||
return r.math_Log10(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathRadians:
|
||||
return r.math_Radians(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathRound:
|
||||
return r.math_Round(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathSign:
|
||||
return r.math_Sign(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathSin:
|
||||
return r.math_Sin(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathSqrt:
|
||||
return r.math_Sqrt(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathSquare:
|
||||
return r.math_Square(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathTan:
|
||||
return r.math_Tan(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathTrunc:
|
||||
return r.math_Trunc(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathAtn2:
|
||||
return r.math_Atn2(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathIntAdd:
|
||||
return r.math_IntAdd(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathIntBitAnd:
|
||||
return r.math_IntBitAnd(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathIntBitLeftShift:
|
||||
return r.math_IntBitLeftShift(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathIntBitOr:
|
||||
return r.math_IntBitOr(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathIntBitRightShift:
|
||||
return r.math_IntBitRightShift(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathIntBitXor:
|
||||
return r.math_IntBitXor(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathIntDiv:
|
||||
return r.math_IntDiv(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathIntMod:
|
||||
return r.math_IntMod(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathIntMul:
|
||||
return r.math_IntMul(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathIntSub:
|
||||
return r.math_IntSub(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathPower:
|
||||
return r.math_Power(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathLog:
|
||||
return r.math_Log(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathNumberBin:
|
||||
return r.math_NumberBin(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathPi:
|
||||
return r.math_Pi()
|
||||
case parsers.FunctionCallMathRand:
|
||||
return r.math_Rand()
|
||||
|
||||
case parsers.FunctionCallAggregateAvg:
|
||||
return r.aggregate_Avg(functionCall.Arguments)
|
||||
case parsers.FunctionCallAggregateCount:
|
||||
return r.aggregate_Count(functionCall.Arguments)
|
||||
case parsers.FunctionCallAggregateMax:
|
||||
return r.aggregate_Max(functionCall.Arguments)
|
||||
case parsers.FunctionCallAggregateMin:
|
||||
return r.aggregate_Min(functionCall.Arguments)
|
||||
case parsers.FunctionCallAggregateSum:
|
||||
return r.aggregate_Sum(functionCall.Arguments)
|
||||
|
||||
case parsers.FunctionCallIn:
|
||||
return r.misc_In(functionCall.Arguments)
|
||||
}
|
||||
|
||||
logger.Errorf("Unknown function call type: %v", functionCall.Type)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r rowContext) selectItem_SelectItemTypeBinaryExpression(binaryExpression parsers.BinaryExpression) interface{} {
|
||||
if binaryExpression.Left == nil || binaryExpression.Right == nil {
|
||||
logger.Debug("parsers.BinaryExpression has nil Left or Right value")
|
||||
return nil
|
||||
}
|
||||
|
||||
leftValue := r.resolveSelectItem(binaryExpression.Left.(parsers.SelectItem))
|
||||
rightValue := r.resolveSelectItem(binaryExpression.Right.(parsers.SelectItem))
|
||||
|
||||
if leftValue == nil || rightValue == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
leftNumber, leftIsNumber := numToFloat64(leftValue)
|
||||
rightNumber, rightIsNumber := numToFloat64(rightValue)
|
||||
|
||||
if !leftIsNumber || !rightIsNumber {
|
||||
logger.Debug("Binary expression operands are not numbers, returning nil")
|
||||
return nil
|
||||
}
|
||||
|
||||
switch binaryExpression.Operation {
|
||||
case "+":
|
||||
return leftNumber + rightNumber
|
||||
case "-":
|
||||
return leftNumber - rightNumber
|
||||
case "*":
|
||||
return leftNumber * rightNumber
|
||||
case "/":
|
||||
if rightNumber == 0 {
|
||||
logger.Debug("Division by zero in binary expression")
|
||||
return nil
|
||||
}
|
||||
return leftNumber / rightNumber
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func (r rowContext) selectItem_SelectItemTypeField(selectItem parsers.SelectItem) interface{} {
|
||||
value := r.tables[selectItem.Path[0]]
|
||||
|
||||
if len(selectItem.Path) > 1 {
|
||||
for _, pathSegment := range selectItem.Path[1:] {
|
||||
if pathSegment[0] == '@' {
|
||||
pathSegment = r.parameters[pathSegment].(string)
|
||||
}
|
||||
|
||||
switch nestedValue := value.(type) {
|
||||
case map[string]interface{}:
|
||||
value = nestedValue[pathSegment]
|
||||
case map[string]RowType:
|
||||
value = nestedValue[pathSegment]
|
||||
case datastore.Document:
|
||||
value = nestedValue[pathSegment]
|
||||
case map[string]datastore.Document:
|
||||
value = nestedValue[pathSegment]
|
||||
case []int, []string, []interface{}:
|
||||
slice := reflect.ValueOf(nestedValue)
|
||||
if arrayIndex, err := strconv.Atoi(pathSegment); err == nil && slice.Len() > arrayIndex {
|
||||
value = slice.Index(arrayIndex).Interface()
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return value
|
||||
}
|
||||
|
||||
func compareValues(val1, val2 interface{}) int {
|
||||
// Handle nil values
|
||||
if val1 == nil && val2 == nil {
|
||||
return 0
|
||||
} else if val1 == nil {
|
||||
return -1
|
||||
} else if val2 == nil {
|
||||
return 1
|
||||
}
|
||||
|
||||
// Handle number values
|
||||
val1Number, val1IsNumber := numToFloat64(val1)
|
||||
val2Number, val2IsNumber := numToFloat64(val2)
|
||||
if val1IsNumber && val2IsNumber {
|
||||
if val1Number < val2Number {
|
||||
return -1
|
||||
} else if val1Number > val2Number {
|
||||
return 1
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
// Handle different types
|
||||
if reflect.TypeOf(val1) != reflect.TypeOf(val2) {
|
||||
return 1
|
||||
}
|
||||
|
||||
switch val1 := val1.(type) {
|
||||
case string:
|
||||
val2 := val2.(string)
|
||||
return strings.Compare(val1, val2)
|
||||
case bool:
|
||||
val2 := val2.(bool)
|
||||
if val1 == val2 {
|
||||
return 0
|
||||
} else if val1 {
|
||||
return 1
|
||||
} else {
|
||||
return -1
|
||||
}
|
||||
// TODO: Add more types
|
||||
default:
|
||||
if reflect.DeepEqual(val1, val2) {
|
||||
return 0
|
||||
}
|
||||
return 1
|
||||
}
|
||||
}
|
||||
|
||||
func copyMap[T RowType | []RowType](originalMap map[string]T) map[string]T {
|
||||
targetMap := make(map[string]T)
|
||||
|
||||
for k, v := range originalMap {
|
||||
targetMap[k] = v
|
||||
}
|
||||
|
||||
return targetMap
|
||||
}
|
@ -0,0 +1,92 @@
|
||||
package memoryexecutor_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/pikami/cosmium/parsers"
|
||||
memoryexecutor "github.com/pikami/cosmium/query_executors/memory_executor"
|
||||
testutils "github.com/pikami/cosmium/test_utils"
|
||||
)
|
||||
|
||||
func Test_Execute_Expressions(t *testing.T) {
|
||||
mockData := []memoryexecutor.RowType{
|
||||
map[string]interface{}{"id": "123", "age": 10, "isCool": true},
|
||||
map[string]interface{}{"id": "456", "age": 20, "isCool": false},
|
||||
map[string]interface{}{"id": "789", "age": 30, "isCool": true},
|
||||
}
|
||||
|
||||
t.Run("Should execute comparison expressions in SELECT", func(t *testing.T) {
|
||||
testQueryExecute(
|
||||
t,
|
||||
parsers.SelectStmt{
|
||||
SelectItems: []parsers.SelectItem{
|
||||
{
|
||||
Path: []string{"c", "id"},
|
||||
Type: parsers.SelectItemTypeField,
|
||||
},
|
||||
{
|
||||
Alias: "isAdult",
|
||||
Type: parsers.SelectItemTypeExpression,
|
||||
Value: parsers.ComparisonExpression{
|
||||
Operation: ">=",
|
||||
Left: testutils.SelectItem_Path("c", "age"),
|
||||
Right: testutils.SelectItem_Constant_Int(18),
|
||||
},
|
||||
},
|
||||
{
|
||||
Alias: "isNotCool",
|
||||
Type: parsers.SelectItemTypeExpression,
|
||||
Value: parsers.ComparisonExpression{
|
||||
Operation: "!=",
|
||||
Left: testutils.SelectItem_Path("c", "isCool"),
|
||||
Right: testutils.SelectItem_Constant_Bool(true),
|
||||
},
|
||||
},
|
||||
},
|
||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||
},
|
||||
mockData,
|
||||
[]memoryexecutor.RowType{
|
||||
map[string]interface{}{"id": "123", "isAdult": false, "isNotCool": false},
|
||||
map[string]interface{}{"id": "456", "isAdult": true, "isNotCool": true},
|
||||
map[string]interface{}{"id": "789", "isAdult": true, "isNotCool": false},
|
||||
},
|
||||
)
|
||||
})
|
||||
|
||||
t.Run("Should execute logical expressions in SELECT", func(t *testing.T) {
|
||||
testQueryExecute(
|
||||
t,
|
||||
parsers.SelectStmt{
|
||||
SelectItems: []parsers.SelectItem{
|
||||
{
|
||||
Path: []string{"c", "id"},
|
||||
Type: parsers.SelectItemTypeField,
|
||||
},
|
||||
{
|
||||
Alias: "isCoolAndAdult",
|
||||
Type: parsers.SelectItemTypeExpression,
|
||||
Value: parsers.LogicalExpression{
|
||||
Operation: parsers.LogicalExpressionTypeAnd,
|
||||
Expressions: []interface{}{
|
||||
testutils.SelectItem_Path("c", "isCool"),
|
||||
parsers.ComparisonExpression{
|
||||
Operation: ">=",
|
||||
Left: testutils.SelectItem_Path("c", "age"),
|
||||
Right: testutils.SelectItem_Constant_Int(18),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||
},
|
||||
mockData,
|
||||
[]memoryexecutor.RowType{
|
||||
map[string]interface{}{"id": "123", "isCoolAndAdult": false},
|
||||
map[string]interface{}{"id": "456", "isCoolAndAdult": false},
|
||||
map[string]interface{}{"id": "789", "isCoolAndAdult": true},
|
||||
},
|
||||
)
|
||||
})
|
||||
}
|
36
query_executors/memory_executor/distinct_iterator.go
Normal file
36
query_executors/memory_executor/distinct_iterator.go
Normal file
@ -0,0 +1,36 @@
|
||||
package memoryexecutor
|
||||
|
||||
import "github.com/pikami/cosmium/internal/datastore"
|
||||
|
||||
type distinctIterator struct {
|
||||
documents rowTypeIterator
|
||||
seenDocs []RowType
|
||||
}
|
||||
|
||||
func (di *distinctIterator) Next() (RowType, datastore.DataStoreStatus) {
|
||||
if di.documents == nil {
|
||||
return rowContext{}, datastore.IterEOF
|
||||
}
|
||||
|
||||
for {
|
||||
row, status := di.documents.Next()
|
||||
if status != datastore.StatusOk {
|
||||
di.documents = nil
|
||||
return rowContext{}, status
|
||||
}
|
||||
|
||||
if !di.seen(row) {
|
||||
di.seenDocs = append(di.seenDocs, row)
|
||||
return row, status
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (di *distinctIterator) seen(row RowType) bool {
|
||||
for _, seenRow := range di.seenDocs {
|
||||
if compareValues(seenRow, row) == 0 {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
143
query_executors/memory_executor/filter_iterator.go
Normal file
143
query_executors/memory_executor/filter_iterator.go
Normal file
@ -0,0 +1,143 @@
|
||||
package memoryexecutor
|
||||
|
||||
import (
|
||||
"github.com/pikami/cosmium/internal/datastore"
|
||||
"github.com/pikami/cosmium/internal/logger"
|
||||
"github.com/pikami/cosmium/parsers"
|
||||
)
|
||||
|
||||
type filterIterator struct {
|
||||
documents rowIterator
|
||||
filters interface{}
|
||||
}
|
||||
|
||||
func (fi *filterIterator) Next() (rowContext, datastore.DataStoreStatus) {
|
||||
if fi.documents == nil {
|
||||
return rowContext{}, datastore.IterEOF
|
||||
}
|
||||
|
||||
for {
|
||||
row, status := fi.documents.Next()
|
||||
if status != datastore.StatusOk {
|
||||
fi.documents = nil
|
||||
return rowContext{}, status
|
||||
}
|
||||
|
||||
if fi.evaluateFilters(row) {
|
||||
return row, status
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (fi *filterIterator) evaluateFilters(row rowContext) bool {
|
||||
if fi.filters == nil {
|
||||
return true
|
||||
}
|
||||
|
||||
switch typedFilters := fi.filters.(type) {
|
||||
case parsers.ComparisonExpression:
|
||||
return row.filters_ComparisonExpression(typedFilters)
|
||||
case parsers.LogicalExpression:
|
||||
return row.filters_LogicalExpression(typedFilters)
|
||||
case parsers.Constant:
|
||||
if value, ok := typedFilters.Value.(bool); ok {
|
||||
return value
|
||||
}
|
||||
return false
|
||||
case parsers.SelectItem:
|
||||
resolvedValue := row.resolveSelectItem(typedFilters)
|
||||
if value, ok := resolvedValue.(bool); ok {
|
||||
if typedFilters.Invert {
|
||||
return !value
|
||||
}
|
||||
|
||||
return value
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func (r rowContext) applyFilters(filters interface{}) bool {
|
||||
if filters == nil {
|
||||
return true
|
||||
}
|
||||
|
||||
switch typedFilters := filters.(type) {
|
||||
case parsers.ComparisonExpression:
|
||||
return r.filters_ComparisonExpression(typedFilters)
|
||||
case parsers.LogicalExpression:
|
||||
return r.filters_LogicalExpression(typedFilters)
|
||||
case parsers.Constant:
|
||||
if value, ok := typedFilters.Value.(bool); ok {
|
||||
return value
|
||||
}
|
||||
return false
|
||||
case parsers.SelectItem:
|
||||
resolvedValue := r.resolveSelectItem(typedFilters)
|
||||
if value, ok := resolvedValue.(bool); ok {
|
||||
if typedFilters.Invert {
|
||||
return !value
|
||||
}
|
||||
|
||||
return value
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func (r rowContext) filters_ComparisonExpression(expression parsers.ComparisonExpression) bool {
|
||||
leftExpression, leftExpressionOk := expression.Left.(parsers.SelectItem)
|
||||
rightExpression, rightExpressionOk := expression.Right.(parsers.SelectItem)
|
||||
|
||||
if !leftExpressionOk || !rightExpressionOk {
|
||||
logger.ErrorLn("ComparisonExpression has incorrect Left or Right type")
|
||||
return false
|
||||
}
|
||||
|
||||
leftValue := r.resolveSelectItem(leftExpression)
|
||||
rightValue := r.resolveSelectItem(rightExpression)
|
||||
|
||||
cmp := compareValues(leftValue, rightValue)
|
||||
switch expression.Operation {
|
||||
case "=":
|
||||
return cmp == 0
|
||||
case "!=":
|
||||
return cmp != 0
|
||||
case "<":
|
||||
return cmp < 0
|
||||
case ">":
|
||||
return cmp > 0
|
||||
case "<=":
|
||||
return cmp <= 0
|
||||
case ">=":
|
||||
return cmp >= 0
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func (r rowContext) filters_LogicalExpression(expression parsers.LogicalExpression) bool {
|
||||
var result bool
|
||||
for i, subExpression := range expression.Expressions {
|
||||
expressionResult := r.applyFilters(subExpression)
|
||||
if i == 0 {
|
||||
result = expressionResult
|
||||
}
|
||||
|
||||
switch expression.Operation {
|
||||
case parsers.LogicalExpressionTypeAnd:
|
||||
result = result && expressionResult
|
||||
if !result {
|
||||
return false
|
||||
}
|
||||
case parsers.LogicalExpressionTypeOr:
|
||||
result = result || expressionResult
|
||||
if result {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
73
query_executors/memory_executor/from_iterator.go
Normal file
73
query_executors/memory_executor/from_iterator.go
Normal file
@ -0,0 +1,73 @@
|
||||
package memoryexecutor
|
||||
|
||||
import (
|
||||
"github.com/pikami/cosmium/internal/datastore"
|
||||
"github.com/pikami/cosmium/parsers"
|
||||
)
|
||||
|
||||
type fromIterator struct {
|
||||
documents rowIterator
|
||||
table parsers.Table
|
||||
buffer []rowContext
|
||||
bufferIndex int
|
||||
}
|
||||
|
||||
func (fi *fromIterator) Next() (rowContext, datastore.DataStoreStatus) {
|
||||
if fi.documents == nil {
|
||||
return rowContext{}, datastore.IterEOF
|
||||
}
|
||||
|
||||
// Return from buffer if available
|
||||
if fi.bufferIndex < len(fi.buffer) {
|
||||
result := fi.buffer[fi.bufferIndex]
|
||||
fi.buffer[fi.bufferIndex] = rowContext{}
|
||||
fi.bufferIndex++
|
||||
return result, datastore.StatusOk
|
||||
}
|
||||
|
||||
// Resolve next row from documents
|
||||
row, status := fi.documents.Next()
|
||||
if status != datastore.StatusOk {
|
||||
fi.documents = nil
|
||||
return row, status
|
||||
}
|
||||
|
||||
if fi.table.SelectItem.Path != nil || fi.table.SelectItem.Type == parsers.SelectItemTypeSubQuery {
|
||||
destinationTableName := fi.table.SelectItem.Alias
|
||||
if destinationTableName == "" {
|
||||
destinationTableName = fi.table.Value
|
||||
}
|
||||
if destinationTableName == "" {
|
||||
destinationTableName = resolveDestinationColumnName(fi.table.SelectItem, 0, row.parameters)
|
||||
}
|
||||
|
||||
if fi.table.IsInSelect || fi.table.SelectItem.Type == parsers.SelectItemTypeSubQuery {
|
||||
selectValue := row.parseArray(fi.table.SelectItem)
|
||||
rowContexts := make([]rowContext, len(selectValue))
|
||||
for i, newRowData := range selectValue {
|
||||
rowContexts[i].parameters = row.parameters
|
||||
rowContexts[i].tables = copyMap(row.tables)
|
||||
rowContexts[i].tables[destinationTableName] = newRowData
|
||||
}
|
||||
|
||||
fi.buffer = rowContexts
|
||||
fi.bufferIndex = 0
|
||||
return fi.Next()
|
||||
}
|
||||
|
||||
if len(fi.table.SelectItem.Path) > 0 {
|
||||
sourceTableName := fi.table.SelectItem.Path[0]
|
||||
sourceTableData := row.tables[sourceTableName]
|
||||
if sourceTableData == nil {
|
||||
// When source table is not found, assume it's root document
|
||||
row.tables[sourceTableName] = row.tables["$root"]
|
||||
}
|
||||
}
|
||||
|
||||
newRowData := row.resolveSelectItem(fi.table.SelectItem)
|
||||
row.tables[destinationTableName] = newRowData
|
||||
return row, status
|
||||
}
|
||||
|
||||
return row, status
|
||||
}
|
69
query_executors/memory_executor/groupBy_iterator.go
Normal file
69
query_executors/memory_executor/groupBy_iterator.go
Normal file
@ -0,0 +1,69 @@
|
||||
package memoryexecutor
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/pikami/cosmium/internal/datastore"
|
||||
"github.com/pikami/cosmium/parsers"
|
||||
)
|
||||
|
||||
type groupByIterator struct {
|
||||
documents rowIterator
|
||||
groupBy []parsers.SelectItem
|
||||
groupedRows []rowContext
|
||||
}
|
||||
|
||||
func (gi *groupByIterator) Next() (rowContext, datastore.DataStoreStatus) {
|
||||
if gi.groupedRows != nil {
|
||||
if len(gi.groupedRows) == 0 {
|
||||
return rowContext{}, datastore.IterEOF
|
||||
}
|
||||
row := gi.groupedRows[0]
|
||||
gi.groupedRows = gi.groupedRows[1:]
|
||||
return row, datastore.StatusOk
|
||||
}
|
||||
|
||||
documents := make([]rowContext, 0)
|
||||
for {
|
||||
row, status := gi.documents.Next()
|
||||
if status != datastore.StatusOk {
|
||||
break
|
||||
}
|
||||
|
||||
documents = append(documents, row)
|
||||
}
|
||||
gi.documents = nil
|
||||
|
||||
groupedRows := make(map[string][]rowContext)
|
||||
groupedKeys := make([]string, 0)
|
||||
|
||||
for _, row := range documents {
|
||||
key := row.generateGroupByKey(gi.groupBy)
|
||||
if _, ok := groupedRows[key]; !ok {
|
||||
groupedKeys = append(groupedKeys, key)
|
||||
}
|
||||
groupedRows[key] = append(groupedRows[key], row)
|
||||
}
|
||||
|
||||
gi.groupedRows = make([]rowContext, 0)
|
||||
for _, key := range groupedKeys {
|
||||
gi.groupedRows = append(gi.groupedRows, rowContext{
|
||||
tables: groupedRows[key][0].tables,
|
||||
parameters: groupedRows[key][0].parameters,
|
||||
grouppedRows: groupedRows[key],
|
||||
})
|
||||
}
|
||||
|
||||
return gi.Next()
|
||||
}
|
||||
|
||||
func (r rowContext) generateGroupByKey(groupBy []parsers.SelectItem) string {
|
||||
var keyBuilder strings.Builder
|
||||
for _, selectItem := range groupBy {
|
||||
value := r.resolveSelectItem(selectItem)
|
||||
keyBuilder.WriteString(fmt.Sprintf("%v", value))
|
||||
keyBuilder.WriteString(":")
|
||||
}
|
||||
return keyBuilder.String()
|
||||
}
|
62
query_executors/memory_executor/join_iterator.go
Normal file
62
query_executors/memory_executor/join_iterator.go
Normal file
@ -0,0 +1,62 @@
|
||||
package memoryexecutor
|
||||
|
||||
import (
|
||||
"github.com/pikami/cosmium/internal/datastore"
|
||||
"github.com/pikami/cosmium/parsers"
|
||||
)
|
||||
|
||||
type joinIterator struct {
|
||||
documents rowIterator
|
||||
query parsers.SelectStmt
|
||||
buffer []rowContext
|
||||
}
|
||||
|
||||
func (ji *joinIterator) Next() (rowContext, datastore.DataStoreStatus) {
|
||||
if ji.documents == nil {
|
||||
return rowContext{}, datastore.IterEOF
|
||||
}
|
||||
|
||||
if len(ji.buffer) > 0 {
|
||||
row := ji.buffer[0]
|
||||
ji.buffer = ji.buffer[1:]
|
||||
return row, datastore.StatusOk
|
||||
}
|
||||
|
||||
doc, status := ji.documents.Next()
|
||||
if status != datastore.StatusOk {
|
||||
ji.documents = nil
|
||||
return rowContext{}, status
|
||||
}
|
||||
|
||||
ji.buffer = []rowContext{doc}
|
||||
for _, joinItem := range ji.query.JoinItems {
|
||||
nextDocuments := make([]rowContext, 0)
|
||||
for _, row := range ji.buffer {
|
||||
joinedItems := row.resolveJoinItemSelect(joinItem.SelectItem)
|
||||
for _, joinedItem := range joinedItems {
|
||||
tablesCopy := copyMap(row.tables)
|
||||
tablesCopy[joinItem.Table.Value] = joinedItem
|
||||
nextDocuments = append(nextDocuments, rowContext{
|
||||
parameters: row.parameters,
|
||||
tables: tablesCopy,
|
||||
})
|
||||
}
|
||||
}
|
||||
ji.buffer = nextDocuments
|
||||
}
|
||||
|
||||
return ji.Next()
|
||||
}
|
||||
|
||||
func (r rowContext) resolveJoinItemSelect(selectItem parsers.SelectItem) []RowType {
|
||||
if selectItem.Path != nil || selectItem.Type == parsers.SelectItemTypeSubQuery {
|
||||
selectValue := r.parseArray(selectItem)
|
||||
documents := make([]RowType, len(selectValue))
|
||||
for i, newRowData := range selectValue {
|
||||
documents[i] = newRowData
|
||||
}
|
||||
return documents
|
||||
}
|
||||
|
||||
return []RowType{}
|
||||
}
|
19
query_executors/memory_executor/limit_iterator.go
Normal file
19
query_executors/memory_executor/limit_iterator.go
Normal file
@ -0,0 +1,19 @@
|
||||
package memoryexecutor
|
||||
|
||||
import "github.com/pikami/cosmium/internal/datastore"
|
||||
|
||||
type limitIterator struct {
|
||||
documents rowTypeIterator
|
||||
limit int
|
||||
count int
|
||||
}
|
||||
|
||||
func (li *limitIterator) Next() (RowType, datastore.DataStoreStatus) {
|
||||
if li.count >= li.limit {
|
||||
li.documents = nil
|
||||
return rowContext{}, datastore.IterEOF
|
||||
}
|
||||
|
||||
li.count++
|
||||
return li.documents.Next()
|
||||
}
|
@ -605,10 +605,30 @@ func numToInt(ex interface{}) (int, bool) {
|
||||
|
||||
func numToFloat64(num interface{}) (float64, bool) {
|
||||
switch val := num.(type) {
|
||||
case float64:
|
||||
return val, true
|
||||
case int:
|
||||
return float64(val), true
|
||||
case int8:
|
||||
return float64(val), true
|
||||
case int16:
|
||||
return float64(val), true
|
||||
case int32:
|
||||
return float64(val), true
|
||||
case int64:
|
||||
return float64(val), true
|
||||
case uint:
|
||||
return float64(val), true
|
||||
case uint8:
|
||||
return float64(val), true
|
||||
case uint16:
|
||||
return float64(val), true
|
||||
case uint32:
|
||||
return float64(val), true
|
||||
case uint64:
|
||||
return float64(val), true
|
||||
case float32:
|
||||
return float64(val), true
|
||||
case float64:
|
||||
return val, true
|
||||
default:
|
||||
return 0, false
|
||||
}
|
||||
|
@ -1,752 +1,92 @@
|
||||
package memoryexecutor
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/pikami/cosmium/internal/logger"
|
||||
"github.com/pikami/cosmium/internal/datastore"
|
||||
"github.com/pikami/cosmium/parsers"
|
||||
"golang.org/x/exp/slices"
|
||||
)
|
||||
|
||||
type RowType interface{}
|
||||
type rowContext struct {
|
||||
tables map[string]RowType
|
||||
parameters map[string]interface{}
|
||||
grouppedRows []rowContext
|
||||
func ExecuteQuery(query parsers.SelectStmt, documents rowTypeIterator) []RowType {
|
||||
resultIter := executeQuery(query, &rowTypeToRowContextIterator{documents: documents, query: query})
|
||||
result := make([]RowType, 0)
|
||||
for {
|
||||
row, status := resultIter.Next()
|
||||
if status != datastore.StatusOk {
|
||||
break
|
||||
}
|
||||
|
||||
result = append(result, row)
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
func ExecuteQuery(query parsers.SelectStmt, documents []RowType) []RowType {
|
||||
currentDocuments := make([]rowContext, 0)
|
||||
for _, doc := range documents {
|
||||
currentDocuments = append(currentDocuments, resolveFrom(query, doc)...)
|
||||
func executeQuery(query parsers.SelectStmt, documents rowIterator) rowTypeIterator {
|
||||
// Resolve FROM
|
||||
var iter rowIterator = &fromIterator{
|
||||
documents: documents,
|
||||
table: query.Table,
|
||||
}
|
||||
|
||||
// Handle JOINS
|
||||
nextDocuments := make([]rowContext, 0)
|
||||
for _, currentDocument := range currentDocuments {
|
||||
rowContexts := currentDocument.handleJoin(query)
|
||||
nextDocuments = append(nextDocuments, rowContexts...)
|
||||
}
|
||||
currentDocuments = nextDocuments
|
||||
|
||||
// Apply filters
|
||||
nextDocuments = make([]rowContext, 0)
|
||||
for _, currentDocument := range currentDocuments {
|
||||
if currentDocument.applyFilters(query.Filters) {
|
||||
nextDocuments = append(nextDocuments, currentDocument)
|
||||
// Apply JOIN
|
||||
if len(query.JoinItems) > 0 {
|
||||
iter = &joinIterator{
|
||||
documents: iter,
|
||||
query: query,
|
||||
}
|
||||
}
|
||||
currentDocuments = nextDocuments
|
||||
|
||||
// Apply order
|
||||
// Apply WHERE
|
||||
if query.Filters != nil {
|
||||
iter = &filterIterator{
|
||||
documents: iter,
|
||||
filters: query.Filters,
|
||||
}
|
||||
}
|
||||
|
||||
// Apply ORDER BY
|
||||
if len(query.OrderExpressions) > 0 {
|
||||
applyOrder(currentDocuments, query.OrderExpressions)
|
||||
iter = &orderIterator{
|
||||
documents: iter,
|
||||
orderExpressions: query.OrderExpressions,
|
||||
}
|
||||
}
|
||||
|
||||
// Apply group by
|
||||
// Apply GROUP BY
|
||||
if len(query.GroupBy) > 0 {
|
||||
currentDocuments = applyGroupBy(currentDocuments, query.GroupBy)
|
||||
iter = &groupByIterator{
|
||||
documents: iter,
|
||||
groupBy: query.GroupBy,
|
||||
}
|
||||
}
|
||||
|
||||
// Apply select
|
||||
projectedDocuments := applyProjection(currentDocuments, query.SelectItems, query.GroupBy)
|
||||
// Apply SELECT
|
||||
var projectedIterator rowTypeIterator = &projectIterator{
|
||||
documents: iter,
|
||||
selectItems: query.SelectItems,
|
||||
groupBy: query.GroupBy,
|
||||
}
|
||||
|
||||
// Apply distinct
|
||||
// Apply DISTINCT
|
||||
if query.Distinct {
|
||||
projectedDocuments = deduplicate(projectedDocuments)
|
||||
projectedIterator = &distinctIterator{
|
||||
documents: projectedIterator,
|
||||
}
|
||||
}
|
||||
|
||||
// Apply offset
|
||||
// Apply OFFSET
|
||||
if query.Offset > 0 {
|
||||
if query.Offset < len(projectedDocuments) {
|
||||
projectedDocuments = projectedDocuments[query.Offset:]
|
||||
} else {
|
||||
projectedDocuments = []RowType{}
|
||||
projectedIterator = &offsetIterator{
|
||||
documents: projectedIterator,
|
||||
offset: query.Offset,
|
||||
}
|
||||
}
|
||||
|
||||
// Apply result limit
|
||||
if query.Count > 0 && len(projectedDocuments) > query.Count {
|
||||
projectedDocuments = projectedDocuments[:query.Count]
|
||||
// Apply LIMIT
|
||||
if query.Count > 0 {
|
||||
projectedIterator = &limitIterator{
|
||||
documents: projectedIterator,
|
||||
limit: query.Count,
|
||||
}
|
||||
}
|
||||
|
||||
return projectedDocuments
|
||||
}
|
||||
|
||||
func resolveFrom(query parsers.SelectStmt, doc RowType) []rowContext {
|
||||
initialRow, gotParentContext := doc.(rowContext)
|
||||
if !gotParentContext {
|
||||
var initialTableName string
|
||||
if query.Table.SelectItem.Type == parsers.SelectItemTypeSubQuery {
|
||||
initialTableName = query.Table.SelectItem.Value.(parsers.SelectStmt).Table.Value
|
||||
}
|
||||
|
||||
if initialTableName == "" {
|
||||
initialTableName = query.Table.Value
|
||||
}
|
||||
|
||||
if initialTableName == "" {
|
||||
initialTableName = resolveDestinationColumnName(query.Table.SelectItem, 0, query.Parameters)
|
||||
}
|
||||
|
||||
initialRow = rowContext{
|
||||
parameters: query.Parameters,
|
||||
tables: map[string]RowType{
|
||||
initialTableName: doc,
|
||||
"$root": doc,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
if query.Table.SelectItem.Path != nil || query.Table.SelectItem.Type == parsers.SelectItemTypeSubQuery {
|
||||
destinationTableName := query.Table.SelectItem.Alias
|
||||
if destinationTableName == "" {
|
||||
destinationTableName = query.Table.Value
|
||||
}
|
||||
if destinationTableName == "" {
|
||||
destinationTableName = resolveDestinationColumnName(query.Table.SelectItem, 0, initialRow.parameters)
|
||||
}
|
||||
|
||||
if query.Table.IsInSelect || query.Table.SelectItem.Type == parsers.SelectItemTypeSubQuery {
|
||||
selectValue := initialRow.parseArray(query.Table.SelectItem)
|
||||
rowContexts := make([]rowContext, len(selectValue))
|
||||
for i, newRowData := range selectValue {
|
||||
rowContexts[i].parameters = initialRow.parameters
|
||||
rowContexts[i].tables = copyMap(initialRow.tables)
|
||||
rowContexts[i].tables[destinationTableName] = newRowData
|
||||
}
|
||||
return rowContexts
|
||||
}
|
||||
|
||||
if len(query.Table.SelectItem.Path) > 0 {
|
||||
sourceTableName := query.Table.SelectItem.Path[0]
|
||||
sourceTableData := initialRow.tables[sourceTableName]
|
||||
if sourceTableData == nil {
|
||||
// When source table is not found, assume it's root document
|
||||
initialRow.tables[sourceTableName] = initialRow.tables["$root"]
|
||||
}
|
||||
}
|
||||
|
||||
newRowData := initialRow.resolveSelectItem(query.Table.SelectItem)
|
||||
initialRow.tables[destinationTableName] = newRowData
|
||||
return []rowContext{initialRow}
|
||||
}
|
||||
|
||||
return []rowContext{initialRow}
|
||||
}
|
||||
|
||||
func (r rowContext) handleJoin(query parsers.SelectStmt) []rowContext {
|
||||
currentDocuments := []rowContext{r}
|
||||
|
||||
for _, joinItem := range query.JoinItems {
|
||||
nextDocuments := make([]rowContext, 0)
|
||||
for _, currentDocument := range currentDocuments {
|
||||
joinedItems := currentDocument.resolveJoinItemSelect(joinItem.SelectItem)
|
||||
for _, joinedItem := range joinedItems {
|
||||
tablesCopy := copyMap(currentDocument.tables)
|
||||
tablesCopy[joinItem.Table.Value] = joinedItem
|
||||
nextDocuments = append(nextDocuments, rowContext{
|
||||
parameters: currentDocument.parameters,
|
||||
tables: tablesCopy,
|
||||
})
|
||||
}
|
||||
}
|
||||
currentDocuments = nextDocuments
|
||||
}
|
||||
|
||||
return currentDocuments
|
||||
}
|
||||
|
||||
func (r rowContext) resolveJoinItemSelect(selectItem parsers.SelectItem) []RowType {
|
||||
if selectItem.Path != nil || selectItem.Type == parsers.SelectItemTypeSubQuery {
|
||||
selectValue := r.parseArray(selectItem)
|
||||
documents := make([]RowType, len(selectValue))
|
||||
for i, newRowData := range selectValue {
|
||||
documents[i] = newRowData
|
||||
}
|
||||
return documents
|
||||
}
|
||||
|
||||
return []RowType{}
|
||||
}
|
||||
|
||||
func (r rowContext) applyFilters(filters interface{}) bool {
|
||||
if filters == nil {
|
||||
return true
|
||||
}
|
||||
|
||||
switch typedFilters := filters.(type) {
|
||||
case parsers.ComparisonExpression:
|
||||
return r.filters_ComparisonExpression(typedFilters)
|
||||
case parsers.LogicalExpression:
|
||||
return r.filters_LogicalExpression(typedFilters)
|
||||
case parsers.Constant:
|
||||
if value, ok := typedFilters.Value.(bool); ok {
|
||||
return value
|
||||
}
|
||||
return false
|
||||
case parsers.SelectItem:
|
||||
resolvedValue := r.resolveSelectItem(typedFilters)
|
||||
if value, ok := resolvedValue.(bool); ok {
|
||||
if typedFilters.Invert {
|
||||
return !value
|
||||
}
|
||||
|
||||
return value
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func (r rowContext) filters_ComparisonExpression(expression parsers.ComparisonExpression) bool {
|
||||
leftExpression, leftExpressionOk := expression.Left.(parsers.SelectItem)
|
||||
rightExpression, rightExpressionOk := expression.Right.(parsers.SelectItem)
|
||||
|
||||
if !leftExpressionOk || !rightExpressionOk {
|
||||
logger.ErrorLn("ComparisonExpression has incorrect Left or Right type")
|
||||
return false
|
||||
}
|
||||
|
||||
leftValue := r.resolveSelectItem(leftExpression)
|
||||
rightValue := r.resolveSelectItem(rightExpression)
|
||||
|
||||
cmp := compareValues(leftValue, rightValue)
|
||||
switch expression.Operation {
|
||||
case "=":
|
||||
return cmp == 0
|
||||
case "!=":
|
||||
return cmp != 0
|
||||
case "<":
|
||||
return cmp < 0
|
||||
case ">":
|
||||
return cmp > 0
|
||||
case "<=":
|
||||
return cmp <= 0
|
||||
case ">=":
|
||||
return cmp >= 0
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func (r rowContext) filters_LogicalExpression(expression parsers.LogicalExpression) bool {
|
||||
var result bool
|
||||
for i, subExpression := range expression.Expressions {
|
||||
expressionResult := r.applyFilters(subExpression)
|
||||
if i == 0 {
|
||||
result = expressionResult
|
||||
}
|
||||
|
||||
switch expression.Operation {
|
||||
case parsers.LogicalExpressionTypeAnd:
|
||||
result = result && expressionResult
|
||||
if !result {
|
||||
return false
|
||||
}
|
||||
case parsers.LogicalExpressionTypeOr:
|
||||
result = result || expressionResult
|
||||
if result {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
func applyOrder(documents []rowContext, orderExpressions []parsers.OrderExpression) {
|
||||
less := func(i, j int) bool {
|
||||
for _, order := range orderExpressions {
|
||||
val1 := documents[i].resolveSelectItem(order.SelectItem)
|
||||
val2 := documents[j].resolveSelectItem(order.SelectItem)
|
||||
|
||||
cmp := compareValues(val1, val2)
|
||||
if cmp != 0 {
|
||||
if order.Direction == parsers.OrderDirectionDesc {
|
||||
return cmp > 0
|
||||
}
|
||||
return cmp < 0
|
||||
}
|
||||
}
|
||||
return i < j
|
||||
}
|
||||
|
||||
sort.SliceStable(documents, less)
|
||||
}
|
||||
|
||||
func applyGroupBy(documents []rowContext, groupBy []parsers.SelectItem) []rowContext {
|
||||
groupedRows := make(map[string][]rowContext)
|
||||
groupedKeys := make([]string, 0)
|
||||
|
||||
for _, row := range documents {
|
||||
key := row.generateGroupByKey(groupBy)
|
||||
if _, ok := groupedRows[key]; !ok {
|
||||
groupedKeys = append(groupedKeys, key)
|
||||
}
|
||||
groupedRows[key] = append(groupedRows[key], row)
|
||||
}
|
||||
|
||||
grouppedRows := make([]rowContext, 0)
|
||||
for _, key := range groupedKeys {
|
||||
grouppedRowContext := rowContext{
|
||||
tables: groupedRows[key][0].tables,
|
||||
parameters: groupedRows[key][0].parameters,
|
||||
grouppedRows: groupedRows[key],
|
||||
}
|
||||
grouppedRows = append(grouppedRows, grouppedRowContext)
|
||||
}
|
||||
|
||||
return grouppedRows
|
||||
}
|
||||
|
||||
func (r rowContext) generateGroupByKey(groupBy []parsers.SelectItem) string {
|
||||
var keyBuilder strings.Builder
|
||||
for _, selectItem := range groupBy {
|
||||
value := r.resolveSelectItem(selectItem)
|
||||
keyBuilder.WriteString(fmt.Sprintf("%v", value))
|
||||
keyBuilder.WriteString(":")
|
||||
}
|
||||
return keyBuilder.String()
|
||||
}
|
||||
|
||||
func applyProjection(documents []rowContext, selectItems []parsers.SelectItem, groupBy []parsers.SelectItem) []RowType {
|
||||
if len(documents) == 0 {
|
||||
return []RowType{}
|
||||
}
|
||||
|
||||
if hasAggregateFunctions(selectItems) && len(groupBy) == 0 {
|
||||
// When can have aggregate functions without GROUP BY clause,
|
||||
// we should aggregate all rows in that case
|
||||
rowContext := rowContext{
|
||||
tables: documents[0].tables,
|
||||
parameters: documents[0].parameters,
|
||||
grouppedRows: documents,
|
||||
}
|
||||
return []RowType{rowContext.applyProjection(selectItems)}
|
||||
}
|
||||
|
||||
projectedDocuments := make([]RowType, len(documents))
|
||||
for index, row := range documents {
|
||||
projectedDocuments[index] = row.applyProjection(selectItems)
|
||||
}
|
||||
|
||||
return projectedDocuments
|
||||
}
|
||||
|
||||
func (r rowContext) applyProjection(selectItems []parsers.SelectItem) RowType {
|
||||
// When the first value is top level, select it instead
|
||||
if len(selectItems) > 0 && selectItems[0].IsTopLevel {
|
||||
return r.resolveSelectItem(selectItems[0])
|
||||
}
|
||||
|
||||
// Construct a new row based on the selected columns
|
||||
row := make(map[string]interface{})
|
||||
for index, selectItem := range selectItems {
|
||||
destinationName := resolveDestinationColumnName(selectItem, index, r.parameters)
|
||||
|
||||
row[destinationName] = r.resolveSelectItem(selectItem)
|
||||
}
|
||||
|
||||
return row
|
||||
}
|
||||
|
||||
func resolveDestinationColumnName(selectItem parsers.SelectItem, itemIndex int, queryParameters map[string]interface{}) string {
|
||||
if selectItem.Alias != "" {
|
||||
return selectItem.Alias
|
||||
}
|
||||
|
||||
destinationName := fmt.Sprintf("$%d", itemIndex+1)
|
||||
if len(selectItem.Path) > 0 {
|
||||
destinationName = selectItem.Path[len(selectItem.Path)-1]
|
||||
}
|
||||
|
||||
if destinationName[0] == '@' {
|
||||
destinationName = queryParameters[destinationName].(string)
|
||||
}
|
||||
|
||||
return destinationName
|
||||
}
|
||||
|
||||
func (r rowContext) resolveSelectItem(selectItem parsers.SelectItem) interface{} {
|
||||
if selectItem.Type == parsers.SelectItemTypeArray {
|
||||
return r.selectItem_SelectItemTypeArray(selectItem)
|
||||
}
|
||||
|
||||
if selectItem.Type == parsers.SelectItemTypeObject {
|
||||
return r.selectItem_SelectItemTypeObject(selectItem)
|
||||
}
|
||||
|
||||
if selectItem.Type == parsers.SelectItemTypeConstant {
|
||||
return r.selectItem_SelectItemTypeConstant(selectItem)
|
||||
}
|
||||
|
||||
if selectItem.Type == parsers.SelectItemTypeSubQuery {
|
||||
return r.selectItem_SelectItemTypeSubQuery(selectItem)
|
||||
}
|
||||
|
||||
if selectItem.Type == parsers.SelectItemTypeFunctionCall {
|
||||
if typedFunctionCall, ok := selectItem.Value.(parsers.FunctionCall); ok {
|
||||
return r.selectItem_SelectItemTypeFunctionCall(typedFunctionCall)
|
||||
}
|
||||
|
||||
logger.ErrorLn("parsers.SelectItem has incorrect Value type (expected parsers.FunctionCall)")
|
||||
return nil
|
||||
}
|
||||
|
||||
return r.selectItem_SelectItemTypeField(selectItem)
|
||||
}
|
||||
|
||||
func (r rowContext) selectItem_SelectItemTypeArray(selectItem parsers.SelectItem) interface{} {
|
||||
arrayValue := make([]interface{}, 0)
|
||||
for _, subSelectItem := range selectItem.SelectItems {
|
||||
arrayValue = append(arrayValue, r.resolveSelectItem(subSelectItem))
|
||||
}
|
||||
return arrayValue
|
||||
}
|
||||
|
||||
func (r rowContext) selectItem_SelectItemTypeObject(selectItem parsers.SelectItem) interface{} {
|
||||
objectValue := make(map[string]interface{})
|
||||
for _, subSelectItem := range selectItem.SelectItems {
|
||||
objectValue[subSelectItem.Alias] = r.resolveSelectItem(subSelectItem)
|
||||
}
|
||||
return objectValue
|
||||
}
|
||||
|
||||
func (r rowContext) selectItem_SelectItemTypeConstant(selectItem parsers.SelectItem) interface{} {
|
||||
var typedValue parsers.Constant
|
||||
var ok bool
|
||||
if typedValue, ok = selectItem.Value.(parsers.Constant); !ok {
|
||||
// TODO: Handle error
|
||||
logger.ErrorLn("parsers.Constant has incorrect Value type")
|
||||
}
|
||||
|
||||
if typedValue.Type == parsers.ConstantTypeParameterConstant &&
|
||||
r.parameters != nil {
|
||||
if key, ok := typedValue.Value.(string); ok {
|
||||
return r.parameters[key]
|
||||
}
|
||||
}
|
||||
|
||||
return typedValue.Value
|
||||
}
|
||||
|
||||
func (r rowContext) selectItem_SelectItemTypeSubQuery(selectItem parsers.SelectItem) interface{} {
|
||||
subQuery := selectItem.Value.(parsers.SelectStmt)
|
||||
subQueryResult := ExecuteQuery(
|
||||
subQuery,
|
||||
[]RowType{r},
|
||||
)
|
||||
|
||||
if subQuery.Exists {
|
||||
return len(subQueryResult) > 0
|
||||
}
|
||||
|
||||
return subQueryResult
|
||||
}
|
||||
|
||||
func (r rowContext) selectItem_SelectItemTypeFunctionCall(functionCall parsers.FunctionCall) interface{} {
|
||||
switch functionCall.Type {
|
||||
case parsers.FunctionCallStringEquals:
|
||||
return r.strings_StringEquals(functionCall.Arguments)
|
||||
case parsers.FunctionCallContains:
|
||||
return r.strings_Contains(functionCall.Arguments)
|
||||
case parsers.FunctionCallEndsWith:
|
||||
return r.strings_EndsWith(functionCall.Arguments)
|
||||
case parsers.FunctionCallStartsWith:
|
||||
return r.strings_StartsWith(functionCall.Arguments)
|
||||
case parsers.FunctionCallConcat:
|
||||
return r.strings_Concat(functionCall.Arguments)
|
||||
case parsers.FunctionCallIndexOf:
|
||||
return r.strings_IndexOf(functionCall.Arguments)
|
||||
case parsers.FunctionCallToString:
|
||||
return r.strings_ToString(functionCall.Arguments)
|
||||
case parsers.FunctionCallUpper:
|
||||
return r.strings_Upper(functionCall.Arguments)
|
||||
case parsers.FunctionCallLower:
|
||||
return r.strings_Lower(functionCall.Arguments)
|
||||
case parsers.FunctionCallLeft:
|
||||
return r.strings_Left(functionCall.Arguments)
|
||||
case parsers.FunctionCallLength:
|
||||
return r.strings_Length(functionCall.Arguments)
|
||||
case parsers.FunctionCallLTrim:
|
||||
return r.strings_LTrim(functionCall.Arguments)
|
||||
case parsers.FunctionCallReplace:
|
||||
return r.strings_Replace(functionCall.Arguments)
|
||||
case parsers.FunctionCallReplicate:
|
||||
return r.strings_Replicate(functionCall.Arguments)
|
||||
case parsers.FunctionCallReverse:
|
||||
return r.strings_Reverse(functionCall.Arguments)
|
||||
case parsers.FunctionCallRight:
|
||||
return r.strings_Right(functionCall.Arguments)
|
||||
case parsers.FunctionCallRTrim:
|
||||
return r.strings_RTrim(functionCall.Arguments)
|
||||
case parsers.FunctionCallSubstring:
|
||||
return r.strings_Substring(functionCall.Arguments)
|
||||
case parsers.FunctionCallTrim:
|
||||
return r.strings_Trim(functionCall.Arguments)
|
||||
|
||||
case parsers.FunctionCallIsDefined:
|
||||
return r.typeChecking_IsDefined(functionCall.Arguments)
|
||||
case parsers.FunctionCallIsArray:
|
||||
return r.typeChecking_IsArray(functionCall.Arguments)
|
||||
case parsers.FunctionCallIsBool:
|
||||
return r.typeChecking_IsBool(functionCall.Arguments)
|
||||
case parsers.FunctionCallIsFiniteNumber:
|
||||
return r.typeChecking_IsFiniteNumber(functionCall.Arguments)
|
||||
case parsers.FunctionCallIsInteger:
|
||||
return r.typeChecking_IsInteger(functionCall.Arguments)
|
||||
case parsers.FunctionCallIsNull:
|
||||
return r.typeChecking_IsNull(functionCall.Arguments)
|
||||
case parsers.FunctionCallIsNumber:
|
||||
return r.typeChecking_IsNumber(functionCall.Arguments)
|
||||
case parsers.FunctionCallIsObject:
|
||||
return r.typeChecking_IsObject(functionCall.Arguments)
|
||||
case parsers.FunctionCallIsPrimitive:
|
||||
return r.typeChecking_IsPrimitive(functionCall.Arguments)
|
||||
case parsers.FunctionCallIsString:
|
||||
return r.typeChecking_IsString(functionCall.Arguments)
|
||||
|
||||
case parsers.FunctionCallArrayConcat:
|
||||
return r.array_Concat(functionCall.Arguments)
|
||||
case parsers.FunctionCallArrayContains:
|
||||
return r.array_Contains(functionCall.Arguments)
|
||||
case parsers.FunctionCallArrayContainsAny:
|
||||
return r.array_Contains_Any(functionCall.Arguments)
|
||||
case parsers.FunctionCallArrayContainsAll:
|
||||
return r.array_Contains_All(functionCall.Arguments)
|
||||
case parsers.FunctionCallArrayLength:
|
||||
return r.array_Length(functionCall.Arguments)
|
||||
case parsers.FunctionCallArraySlice:
|
||||
return r.array_Slice(functionCall.Arguments)
|
||||
case parsers.FunctionCallSetIntersect:
|
||||
return r.set_Intersect(functionCall.Arguments)
|
||||
case parsers.FunctionCallSetUnion:
|
||||
return r.set_Union(functionCall.Arguments)
|
||||
|
||||
case parsers.FunctionCallMathAbs:
|
||||
return r.math_Abs(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathAcos:
|
||||
return r.math_Acos(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathAsin:
|
||||
return r.math_Asin(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathAtan:
|
||||
return r.math_Atan(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathCeiling:
|
||||
return r.math_Ceiling(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathCos:
|
||||
return r.math_Cos(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathCot:
|
||||
return r.math_Cot(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathDegrees:
|
||||
return r.math_Degrees(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathExp:
|
||||
return r.math_Exp(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathFloor:
|
||||
return r.math_Floor(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathIntBitNot:
|
||||
return r.math_IntBitNot(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathLog10:
|
||||
return r.math_Log10(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathRadians:
|
||||
return r.math_Radians(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathRound:
|
||||
return r.math_Round(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathSign:
|
||||
return r.math_Sign(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathSin:
|
||||
return r.math_Sin(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathSqrt:
|
||||
return r.math_Sqrt(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathSquare:
|
||||
return r.math_Square(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathTan:
|
||||
return r.math_Tan(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathTrunc:
|
||||
return r.math_Trunc(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathAtn2:
|
||||
return r.math_Atn2(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathIntAdd:
|
||||
return r.math_IntAdd(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathIntBitAnd:
|
||||
return r.math_IntBitAnd(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathIntBitLeftShift:
|
||||
return r.math_IntBitLeftShift(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathIntBitOr:
|
||||
return r.math_IntBitOr(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathIntBitRightShift:
|
||||
return r.math_IntBitRightShift(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathIntBitXor:
|
||||
return r.math_IntBitXor(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathIntDiv:
|
||||
return r.math_IntDiv(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathIntMod:
|
||||
return r.math_IntMod(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathIntMul:
|
||||
return r.math_IntMul(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathIntSub:
|
||||
return r.math_IntSub(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathPower:
|
||||
return r.math_Power(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathLog:
|
||||
return r.math_Log(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathNumberBin:
|
||||
return r.math_NumberBin(functionCall.Arguments)
|
||||
case parsers.FunctionCallMathPi:
|
||||
return r.math_Pi()
|
||||
case parsers.FunctionCallMathRand:
|
||||
return r.math_Rand()
|
||||
|
||||
case parsers.FunctionCallAggregateAvg:
|
||||
return r.aggregate_Avg(functionCall.Arguments)
|
||||
case parsers.FunctionCallAggregateCount:
|
||||
return r.aggregate_Count(functionCall.Arguments)
|
||||
case parsers.FunctionCallAggregateMax:
|
||||
return r.aggregate_Max(functionCall.Arguments)
|
||||
case parsers.FunctionCallAggregateMin:
|
||||
return r.aggregate_Min(functionCall.Arguments)
|
||||
case parsers.FunctionCallAggregateSum:
|
||||
return r.aggregate_Sum(functionCall.Arguments)
|
||||
|
||||
case parsers.FunctionCallIn:
|
||||
return r.misc_In(functionCall.Arguments)
|
||||
}
|
||||
|
||||
logger.Errorf("Unknown function call type: %v", functionCall.Type)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r rowContext) selectItem_SelectItemTypeField(selectItem parsers.SelectItem) interface{} {
|
||||
value := r.tables[selectItem.Path[0]]
|
||||
|
||||
if len(selectItem.Path) > 1 {
|
||||
for _, pathSegment := range selectItem.Path[1:] {
|
||||
if pathSegment[0] == '@' {
|
||||
pathSegment = r.parameters[pathSegment].(string)
|
||||
}
|
||||
|
||||
switch nestedValue := value.(type) {
|
||||
case map[string]interface{}:
|
||||
value = nestedValue[pathSegment]
|
||||
case map[string]RowType:
|
||||
value = nestedValue[pathSegment]
|
||||
case []int, []string, []interface{}:
|
||||
slice := reflect.ValueOf(nestedValue)
|
||||
if arrayIndex, err := strconv.Atoi(pathSegment); err == nil && slice.Len() > arrayIndex {
|
||||
value = slice.Index(arrayIndex).Interface()
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return value
|
||||
}
|
||||
|
||||
func hasAggregateFunctions(selectItems []parsers.SelectItem) bool {
|
||||
if selectItems == nil {
|
||||
return false
|
||||
}
|
||||
|
||||
for _, selectItem := range selectItems {
|
||||
if selectItem.Type == parsers.SelectItemTypeFunctionCall {
|
||||
if typedValue, ok := selectItem.Value.(parsers.FunctionCall); ok && slices.Contains[[]parsers.FunctionCallType](parsers.AggregateFunctions, typedValue.Type) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
if hasAggregateFunctions(selectItem.SelectItems) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func compareValues(val1, val2 interface{}) int {
|
||||
if val1 == nil && val2 == nil {
|
||||
return 0
|
||||
} else if val1 == nil {
|
||||
return -1
|
||||
} else if val2 == nil {
|
||||
return 1
|
||||
}
|
||||
|
||||
if reflect.TypeOf(val1) != reflect.TypeOf(val2) {
|
||||
return 1
|
||||
}
|
||||
|
||||
switch val1 := val1.(type) {
|
||||
case int:
|
||||
val2 := val2.(int)
|
||||
if val1 < val2 {
|
||||
return -1
|
||||
} else if val1 > val2 {
|
||||
return 1
|
||||
}
|
||||
return 0
|
||||
case float64:
|
||||
val2 := val2.(float64)
|
||||
if val1 < val2 {
|
||||
return -1
|
||||
} else if val1 > val2 {
|
||||
return 1
|
||||
}
|
||||
return 0
|
||||
case string:
|
||||
val2 := val2.(string)
|
||||
return strings.Compare(val1, val2)
|
||||
case bool:
|
||||
val2 := val2.(bool)
|
||||
if val1 == val2 {
|
||||
return 0
|
||||
} else if val1 {
|
||||
return 1
|
||||
} else {
|
||||
return -1
|
||||
}
|
||||
// TODO: Add more types
|
||||
default:
|
||||
if reflect.DeepEqual(val1, val2) {
|
||||
return 0
|
||||
}
|
||||
return 1
|
||||
}
|
||||
}
|
||||
|
||||
func deduplicate[T RowType | interface{}](slice []T) []T {
|
||||
var result []T
|
||||
result = make([]T, 0)
|
||||
|
||||
for i := 0; i < len(slice); i++ {
|
||||
unique := true
|
||||
for j := 0; j < len(result); j++ {
|
||||
if compareValues(slice[i], result[j]) == 0 {
|
||||
unique = false
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if unique {
|
||||
result = append(result, slice[i])
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
func copyMap[T RowType | []RowType](originalMap map[string]T) map[string]T {
|
||||
targetMap := make(map[string]T)
|
||||
|
||||
for k, v := range originalMap {
|
||||
targetMap[k] = v
|
||||
}
|
||||
|
||||
return targetMap
|
||||
return projectedIterator
|
||||
}
|
||||
|
@ -16,3 +16,16 @@ func (r rowContext) misc_In(arguments []interface{}) bool {
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func (r rowContext) misc_Iif(arguments []interface{}) interface{} {
|
||||
if len(arguments) != 3 {
|
||||
return nil
|
||||
}
|
||||
|
||||
condition := r.resolveSelectItem(arguments[0].(parsers.SelectItem))
|
||||
if condition != nil && condition == true {
|
||||
return r.resolveSelectItem(arguments[1].(parsers.SelectItem))
|
||||
}
|
||||
|
||||
return r.resolveSelectItem(arguments[2].(parsers.SelectItem))
|
||||
}
|
||||
|
@ -4,18 +4,41 @@ import (
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
"github.com/pikami/cosmium/internal/datastore"
|
||||
"github.com/pikami/cosmium/parsers"
|
||||
memoryexecutor "github.com/pikami/cosmium/query_executors/memory_executor"
|
||||
testutils "github.com/pikami/cosmium/test_utils"
|
||||
)
|
||||
|
||||
type TestDocumentIterator struct {
|
||||
documents []memoryexecutor.RowType
|
||||
index int
|
||||
}
|
||||
|
||||
func NewTestDocumentIterator(documents []memoryexecutor.RowType) *TestDocumentIterator {
|
||||
return &TestDocumentIterator{
|
||||
documents: documents,
|
||||
index: -1,
|
||||
}
|
||||
}
|
||||
|
||||
func (i *TestDocumentIterator) Next() (memoryexecutor.RowType, datastore.DataStoreStatus) {
|
||||
i.index++
|
||||
if i.index >= len(i.documents) {
|
||||
return nil, datastore.IterEOF
|
||||
}
|
||||
|
||||
return i.documents[i.index], datastore.StatusOk
|
||||
}
|
||||
|
||||
func testQueryExecute(
|
||||
t *testing.T,
|
||||
query parsers.SelectStmt,
|
||||
data []memoryexecutor.RowType,
|
||||
expectedData []memoryexecutor.RowType,
|
||||
) {
|
||||
result := memoryexecutor.ExecuteQuery(query, data)
|
||||
iter := NewTestDocumentIterator(data)
|
||||
result := memoryexecutor.ExecuteQuery(query, iter)
|
||||
|
||||
if !reflect.DeepEqual(result, expectedData) {
|
||||
t.Errorf("execution result does not match expected data.\nExpected: %+v\nGot: %+v", expectedData, result)
|
||||
@ -187,4 +210,35 @@ func Test_Execute(t *testing.T) {
|
||||
},
|
||||
)
|
||||
})
|
||||
|
||||
t.Run("Should execute function IIF()", func(t *testing.T) {
|
||||
testQueryExecute(
|
||||
t,
|
||||
parsers.SelectStmt{
|
||||
SelectItems: []parsers.SelectItem{
|
||||
testutils.SelectItem_Path("c", "id"),
|
||||
{
|
||||
Alias: "coolness",
|
||||
Type: parsers.SelectItemTypeFunctionCall,
|
||||
Value: parsers.FunctionCall{
|
||||
Type: parsers.FunctionCallIif,
|
||||
Arguments: []interface{}{
|
||||
testutils.SelectItem_Path("c", "isCool"),
|
||||
testutils.SelectItem_Constant_String("real cool"),
|
||||
testutils.SelectItem_Constant_String("not cool"),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||
},
|
||||
mockData,
|
||||
[]memoryexecutor.RowType{
|
||||
map[string]interface{}{"id": "12345", "coolness": "not cool"},
|
||||
map[string]interface{}{"id": "67890", "coolness": "real cool"},
|
||||
map[string]interface{}{"id": "456", "coolness": "real cool"},
|
||||
map[string]interface{}{"id": "123", "coolness": "real cool"},
|
||||
},
|
||||
)
|
||||
})
|
||||
}
|
||||
|
22
query_executors/memory_executor/offset_iterator.go
Normal file
22
query_executors/memory_executor/offset_iterator.go
Normal file
@ -0,0 +1,22 @@
|
||||
package memoryexecutor
|
||||
|
||||
import "github.com/pikami/cosmium/internal/datastore"
|
||||
|
||||
type offsetIterator struct {
|
||||
documents rowTypeIterator
|
||||
offset int
|
||||
skipped bool
|
||||
}
|
||||
|
||||
func (oi *offsetIterator) Next() (RowType, datastore.DataStoreStatus) {
|
||||
if oi.skipped {
|
||||
return oi.documents.Next()
|
||||
}
|
||||
|
||||
for i := 0; i < oi.offset; i++ {
|
||||
oi.documents.Next()
|
||||
}
|
||||
|
||||
oi.skipped = true
|
||||
return oi.Next()
|
||||
}
|
63
query_executors/memory_executor/order_iterator.go
Normal file
63
query_executors/memory_executor/order_iterator.go
Normal file
@ -0,0 +1,63 @@
|
||||
package memoryexecutor
|
||||
|
||||
import (
|
||||
"sort"
|
||||
|
||||
"github.com/pikami/cosmium/internal/datastore"
|
||||
"github.com/pikami/cosmium/parsers"
|
||||
)
|
||||
|
||||
type orderIterator struct {
|
||||
documents rowIterator
|
||||
orderExpressions []parsers.OrderExpression
|
||||
orderedDocs []rowContext
|
||||
docsIndex int
|
||||
}
|
||||
|
||||
func (oi *orderIterator) Next() (rowContext, datastore.DataStoreStatus) {
|
||||
if oi.orderedDocs != nil {
|
||||
if oi.docsIndex >= len(oi.orderedDocs) {
|
||||
return rowContext{}, datastore.IterEOF
|
||||
}
|
||||
row := oi.orderedDocs[oi.docsIndex]
|
||||
oi.orderedDocs[oi.docsIndex] = rowContext{}
|
||||
oi.docsIndex++
|
||||
return row, datastore.StatusOk
|
||||
}
|
||||
|
||||
oi.orderedDocs = make([]rowContext, 0)
|
||||
for {
|
||||
row, status := oi.documents.Next()
|
||||
if status != datastore.StatusOk {
|
||||
break
|
||||
}
|
||||
|
||||
oi.orderedDocs = append(oi.orderedDocs, row)
|
||||
}
|
||||
oi.documents = nil
|
||||
|
||||
less := func(i, j int) bool {
|
||||
for _, order := range oi.orderExpressions {
|
||||
val1 := oi.orderedDocs[i].resolveSelectItem(order.SelectItem)
|
||||
val2 := oi.orderedDocs[j].resolveSelectItem(order.SelectItem)
|
||||
|
||||
cmp := compareValues(val1, val2)
|
||||
if cmp != 0 {
|
||||
if order.Direction == parsers.OrderDirectionDesc {
|
||||
return cmp > 0
|
||||
}
|
||||
return cmp < 0
|
||||
}
|
||||
}
|
||||
return i < j
|
||||
}
|
||||
|
||||
sort.SliceStable(oi.orderedDocs, less)
|
||||
|
||||
if len(oi.orderedDocs) == 0 {
|
||||
return rowContext{}, datastore.IterEOF
|
||||
}
|
||||
|
||||
oi.docsIndex = 1
|
||||
return oi.orderedDocs[0], datastore.StatusOk
|
||||
}
|
90
query_executors/memory_executor/project_iterator.go
Normal file
90
query_executors/memory_executor/project_iterator.go
Normal file
@ -0,0 +1,90 @@
|
||||
package memoryexecutor
|
||||
|
||||
import (
|
||||
"github.com/pikami/cosmium/internal/datastore"
|
||||
"github.com/pikami/cosmium/parsers"
|
||||
"golang.org/x/exp/slices"
|
||||
)
|
||||
|
||||
type projectIterator struct {
|
||||
documents rowIterator
|
||||
selectItems []parsers.SelectItem
|
||||
groupBy []parsers.SelectItem
|
||||
}
|
||||
|
||||
func (pi *projectIterator) Next() (RowType, datastore.DataStoreStatus) {
|
||||
if pi.documents == nil {
|
||||
return rowContext{}, datastore.IterEOF
|
||||
}
|
||||
|
||||
row, status := pi.documents.Next()
|
||||
if status != datastore.StatusOk {
|
||||
pi.documents = nil
|
||||
return rowContext{}, status
|
||||
}
|
||||
|
||||
if hasAggregateFunctions(pi.selectItems) && len(pi.groupBy) == 0 {
|
||||
// When can have aggregate functions without GROUP BY clause,
|
||||
// we should aggregate all rows in that case.
|
||||
allDocuments := []rowContext{row}
|
||||
for {
|
||||
row, status := pi.documents.Next()
|
||||
if status != datastore.StatusOk {
|
||||
break
|
||||
}
|
||||
|
||||
allDocuments = append(allDocuments, row)
|
||||
}
|
||||
|
||||
if len(allDocuments) == 0 {
|
||||
return rowContext{}, datastore.IterEOF
|
||||
}
|
||||
|
||||
aggRow := rowContext{
|
||||
tables: row.tables,
|
||||
parameters: row.parameters,
|
||||
grouppedRows: allDocuments,
|
||||
}
|
||||
|
||||
return aggRow.applyProjection(pi.selectItems), datastore.StatusOk
|
||||
}
|
||||
|
||||
return row.applyProjection(pi.selectItems), datastore.StatusOk
|
||||
}
|
||||
|
||||
func (r rowContext) applyProjection(selectItems []parsers.SelectItem) RowType {
|
||||
// When the first value is top level, select it instead
|
||||
if len(selectItems) > 0 && selectItems[0].IsTopLevel {
|
||||
return r.resolveSelectItem(selectItems[0])
|
||||
}
|
||||
|
||||
// Construct a new row based on the selected columns
|
||||
row := make(map[string]interface{})
|
||||
for index, selectItem := range selectItems {
|
||||
destinationName := resolveDestinationColumnName(selectItem, index, r.parameters)
|
||||
|
||||
row[destinationName] = r.resolveSelectItem(selectItem)
|
||||
}
|
||||
|
||||
return row
|
||||
}
|
||||
|
||||
func hasAggregateFunctions(selectItems []parsers.SelectItem) bool {
|
||||
if selectItems == nil {
|
||||
return false
|
||||
}
|
||||
|
||||
for _, selectItem := range selectItems {
|
||||
if selectItem.Type == parsers.SelectItemTypeFunctionCall {
|
||||
if typedValue, ok := selectItem.Value.(parsers.FunctionCall); ok && slices.Contains[[]parsers.FunctionCallType](parsers.AggregateFunctions, typedValue.Type) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
if hasAggregateFunctions(selectItem.SelectItems) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
@ -0,0 +1,44 @@
|
||||
package memoryexecutor
|
||||
|
||||
import (
|
||||
"github.com/pikami/cosmium/internal/datastore"
|
||||
"github.com/pikami/cosmium/parsers"
|
||||
)
|
||||
|
||||
type rowTypeToRowContextIterator struct {
|
||||
documents rowTypeIterator
|
||||
query parsers.SelectStmt
|
||||
}
|
||||
|
||||
func (di *rowTypeToRowContextIterator) Next() (rowContext, datastore.DataStoreStatus) {
|
||||
if di.documents == nil {
|
||||
return rowContext{}, datastore.IterEOF
|
||||
}
|
||||
|
||||
doc, status := di.documents.Next()
|
||||
if status != datastore.StatusOk {
|
||||
di.documents = nil
|
||||
return rowContext{}, status
|
||||
}
|
||||
|
||||
var initialTableName string
|
||||
if di.query.Table.SelectItem.Type == parsers.SelectItemTypeSubQuery {
|
||||
initialTableName = di.query.Table.SelectItem.Value.(parsers.SelectStmt).Table.Value
|
||||
}
|
||||
|
||||
if initialTableName == "" {
|
||||
initialTableName = di.query.Table.Value
|
||||
}
|
||||
|
||||
if initialTableName == "" {
|
||||
initialTableName = resolveDestinationColumnName(di.query.Table.SelectItem, 0, di.query.Parameters)
|
||||
}
|
||||
|
||||
return rowContext{
|
||||
parameters: di.query.Parameters,
|
||||
tables: map[string]RowType{
|
||||
initialTableName: doc,
|
||||
"$root": doc,
|
||||
},
|
||||
}, status
|
||||
}
|
@ -205,4 +205,27 @@ func Test_Execute_Select(t *testing.T) {
|
||||
},
|
||||
)
|
||||
})
|
||||
|
||||
t.Run("Should execute SELECT empty object", func(t *testing.T) {
|
||||
testQueryExecute(
|
||||
t,
|
||||
parsers.SelectStmt{
|
||||
SelectItems: []parsers.SelectItem{
|
||||
{
|
||||
Alias: "obj",
|
||||
Type: parsers.SelectItemTypeObject,
|
||||
SelectItems: []parsers.SelectItem{},
|
||||
},
|
||||
},
|
||||
Table: parsers.Table{SelectItem: testutils.SelectItem_Path("c")},
|
||||
},
|
||||
mockData,
|
||||
[]memoryexecutor.RowType{
|
||||
map[string]interface{}{"obj": map[string]interface{}{}},
|
||||
map[string]interface{}{"obj": map[string]interface{}{}},
|
||||
map[string]interface{}{"obj": map[string]interface{}{}},
|
||||
map[string]interface{}{"obj": map[string]interface{}{}},
|
||||
},
|
||||
)
|
||||
})
|
||||
}
|
||||
|
@ -5,7 +5,7 @@ import (
|
||||
"encoding/json"
|
||||
"strings"
|
||||
|
||||
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
||||
"github.com/pikami/cosmium/internal/datastore"
|
||||
)
|
||||
|
||||
//export CreateCollection
|
||||
@ -20,15 +20,15 @@ func CreateCollection(serverName *C.char, databaseId *C.char, collectionJson *C.
|
||||
return ResponseServerInstanceNotFound
|
||||
}
|
||||
|
||||
var collection repositorymodels.Collection
|
||||
var collection datastore.Collection
|
||||
err := json.NewDecoder(strings.NewReader(collectionStr)).Decode(&collection)
|
||||
if err != nil {
|
||||
return ResponseFailedToParseRequest
|
||||
}
|
||||
|
||||
_, code := serverInstance.repository.CreateCollection(databaseIdStr, collection)
|
||||
_, code := serverInstance.dataStore.CreateCollection(databaseIdStr, collection)
|
||||
|
||||
return repositoryStatusToResponseCode(code)
|
||||
return dataStoreStatusToResponseCode(code)
|
||||
}
|
||||
|
||||
//export GetCollection
|
||||
@ -43,8 +43,8 @@ func GetCollection(serverName *C.char, databaseId *C.char, collectionId *C.char)
|
||||
return C.CString("")
|
||||
}
|
||||
|
||||
collection, code := serverInstance.repository.GetCollection(databaseIdStr, collectionIdStr)
|
||||
if code != repositorymodels.StatusOk {
|
||||
collection, code := serverInstance.dataStore.GetCollection(databaseIdStr, collectionIdStr)
|
||||
if code != datastore.StatusOk {
|
||||
return C.CString("")
|
||||
}
|
||||
|
||||
@ -66,8 +66,8 @@ func GetAllCollections(serverName *C.char, databaseId *C.char) *C.char {
|
||||
return C.CString("")
|
||||
}
|
||||
|
||||
collections, code := serverInstance.repository.GetAllCollections(databaseIdStr)
|
||||
if code != repositorymodels.StatusOk {
|
||||
collections, code := serverInstance.dataStore.GetAllCollections(databaseIdStr)
|
||||
if code != datastore.StatusOk {
|
||||
return C.CString("")
|
||||
}
|
||||
|
||||
@ -90,7 +90,7 @@ func DeleteCollection(serverName *C.char, databaseId *C.char, collectionId *C.ch
|
||||
return ResponseServerInstanceNotFound
|
||||
}
|
||||
|
||||
code := serverInstance.repository.DeleteCollection(databaseIdStr, collectionIdStr)
|
||||
code := serverInstance.dataStore.DeleteCollection(databaseIdStr, collectionIdStr)
|
||||
|
||||
return repositoryStatusToResponseCode(code)
|
||||
return dataStoreStatusToResponseCode(code)
|
||||
}
|
||||
|
@ -5,7 +5,7 @@ import (
|
||||
"encoding/json"
|
||||
"strings"
|
||||
|
||||
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
||||
"github.com/pikami/cosmium/internal/datastore"
|
||||
)
|
||||
|
||||
//export CreateDatabase
|
||||
@ -19,15 +19,15 @@ func CreateDatabase(serverName *C.char, databaseJson *C.char) int {
|
||||
return ResponseServerInstanceNotFound
|
||||
}
|
||||
|
||||
var database repositorymodels.Database
|
||||
var database datastore.Database
|
||||
err := json.NewDecoder(strings.NewReader(databaseStr)).Decode(&database)
|
||||
if err != nil {
|
||||
return ResponseFailedToParseRequest
|
||||
}
|
||||
|
||||
_, code := serverInstance.repository.CreateDatabase(database)
|
||||
_, code := serverInstance.dataStore.CreateDatabase(database)
|
||||
|
||||
return repositoryStatusToResponseCode(code)
|
||||
return dataStoreStatusToResponseCode(code)
|
||||
}
|
||||
|
||||
//export GetDatabase
|
||||
@ -41,8 +41,8 @@ func GetDatabase(serverName *C.char, databaseId *C.char) *C.char {
|
||||
return C.CString("")
|
||||
}
|
||||
|
||||
database, code := serverInstance.repository.GetDatabase(databaseIdStr)
|
||||
if code != repositorymodels.StatusOk {
|
||||
database, code := serverInstance.dataStore.GetDatabase(databaseIdStr)
|
||||
if code != datastore.StatusOk {
|
||||
return C.CString("")
|
||||
}
|
||||
|
||||
@ -63,8 +63,8 @@ func GetAllDatabases(serverName *C.char) *C.char {
|
||||
return C.CString("")
|
||||
}
|
||||
|
||||
databases, code := serverInstance.repository.GetAllDatabases()
|
||||
if code != repositorymodels.StatusOk {
|
||||
databases, code := serverInstance.dataStore.GetAllDatabases()
|
||||
if code != datastore.StatusOk {
|
||||
return C.CString("")
|
||||
}
|
||||
|
||||
@ -87,7 +87,7 @@ func DeleteDatabase(serverName *C.char, databaseId *C.char) int {
|
||||
return ResponseServerInstanceNotFound
|
||||
}
|
||||
|
||||
code := serverInstance.repository.DeleteDatabase(databaseIdStr)
|
||||
code := serverInstance.dataStore.DeleteDatabase(databaseIdStr)
|
||||
|
||||
return repositoryStatusToResponseCode(code)
|
||||
return dataStoreStatusToResponseCode(code)
|
||||
}
|
||||
|
@ -5,7 +5,7 @@ import (
|
||||
"encoding/json"
|
||||
"strings"
|
||||
|
||||
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
||||
"github.com/pikami/cosmium/internal/datastore"
|
||||
)
|
||||
|
||||
//export CreateDocument
|
||||
@ -21,15 +21,15 @@ func CreateDocument(serverName *C.char, databaseId *C.char, collectionId *C.char
|
||||
return ResponseServerInstanceNotFound
|
||||
}
|
||||
|
||||
var document repositorymodels.Document
|
||||
var document datastore.Document
|
||||
err := json.NewDecoder(strings.NewReader(documentStr)).Decode(&document)
|
||||
if err != nil {
|
||||
return ResponseFailedToParseRequest
|
||||
}
|
||||
|
||||
_, code := serverInstance.repository.CreateDocument(databaseIdStr, collectionIdStr, document)
|
||||
_, code := serverInstance.dataStore.CreateDocument(databaseIdStr, collectionIdStr, document)
|
||||
|
||||
return repositoryStatusToResponseCode(code)
|
||||
return dataStoreStatusToResponseCode(code)
|
||||
}
|
||||
|
||||
//export GetDocument
|
||||
@ -45,8 +45,8 @@ func GetDocument(serverName *C.char, databaseId *C.char, collectionId *C.char, d
|
||||
return C.CString("")
|
||||
}
|
||||
|
||||
document, code := serverInstance.repository.GetDocument(databaseIdStr, collectionIdStr, documentIdStr)
|
||||
if code != repositorymodels.StatusOk {
|
||||
document, code := serverInstance.dataStore.GetDocument(databaseIdStr, collectionIdStr, documentIdStr)
|
||||
if code != datastore.StatusOk {
|
||||
return C.CString("")
|
||||
}
|
||||
|
||||
@ -69,8 +69,8 @@ func GetAllDocuments(serverName *C.char, databaseId *C.char, collectionId *C.cha
|
||||
return C.CString("")
|
||||
}
|
||||
|
||||
documents, code := serverInstance.repository.GetAllDocuments(databaseIdStr, collectionIdStr)
|
||||
if code != repositorymodels.StatusOk {
|
||||
documents, code := serverInstance.dataStore.GetAllDocuments(databaseIdStr, collectionIdStr)
|
||||
if code != datastore.StatusOk {
|
||||
return C.CString("")
|
||||
}
|
||||
|
||||
@ -95,19 +95,19 @@ func UpdateDocument(serverName *C.char, databaseId *C.char, collectionId *C.char
|
||||
return ResponseServerInstanceNotFound
|
||||
}
|
||||
|
||||
var document repositorymodels.Document
|
||||
var document datastore.Document
|
||||
err := json.Unmarshal([]byte(documentStr), &document)
|
||||
if err != nil {
|
||||
return ResponseFailedToParseRequest
|
||||
}
|
||||
|
||||
code := serverInstance.repository.DeleteDocument(databaseIdStr, collectionIdStr, documentIdStr)
|
||||
if code != repositorymodels.StatusOk {
|
||||
return repositoryStatusToResponseCode(code)
|
||||
code := serverInstance.dataStore.DeleteDocument(databaseIdStr, collectionIdStr, documentIdStr)
|
||||
if code != datastore.StatusOk {
|
||||
return dataStoreStatusToResponseCode(code)
|
||||
}
|
||||
|
||||
_, code = serverInstance.repository.CreateDocument(databaseIdStr, collectionIdStr, document)
|
||||
return repositoryStatusToResponseCode(code)
|
||||
_, code = serverInstance.dataStore.CreateDocument(databaseIdStr, collectionIdStr, document)
|
||||
return dataStoreStatusToResponseCode(code)
|
||||
}
|
||||
|
||||
//export DeleteDocument
|
||||
@ -123,7 +123,7 @@ func DeleteDocument(serverName *C.char, databaseId *C.char, collectionId *C.char
|
||||
return ResponseServerInstanceNotFound
|
||||
}
|
||||
|
||||
code := serverInstance.repository.DeleteDocument(databaseIdStr, collectionIdStr, documentIdStr)
|
||||
code := serverInstance.dataStore.DeleteDocument(databaseIdStr, collectionIdStr, documentIdStr)
|
||||
|
||||
return repositoryStatusToResponseCode(code)
|
||||
return dataStoreStatusToResponseCode(code)
|
||||
}
|
||||
|
@ -4,13 +4,12 @@ import (
|
||||
"sync"
|
||||
|
||||
"github.com/pikami/cosmium/api"
|
||||
"github.com/pikami/cosmium/internal/repositories"
|
||||
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
||||
"github.com/pikami/cosmium/internal/datastore"
|
||||
)
|
||||
|
||||
type ServerInstance struct {
|
||||
server *api.ApiServer
|
||||
repository *repositories.DataRepository
|
||||
server *api.ApiServer
|
||||
dataStore datastore.DataStore
|
||||
}
|
||||
|
||||
var (
|
||||
@ -21,17 +20,18 @@ var (
|
||||
const (
|
||||
ResponseSuccess = 0
|
||||
|
||||
ResponseUnknown = 100
|
||||
ResponseFailedToParseConfiguration = 101
|
||||
ResponseFailedToLoadState = 102
|
||||
ResponseFailedToParseRequest = 103
|
||||
ResponseServerInstanceAlreadyExists = 104
|
||||
ResponseServerInstanceNotFound = 105
|
||||
ResponseFailedToStartServer = 106
|
||||
ResponseUnknown = 100
|
||||
ResponseFailedToParseConfiguration = 101
|
||||
ResponseFailedToLoadState = 102
|
||||
ResponseFailedToParseRequest = 103
|
||||
ResponseServerInstanceAlreadyExists = 104
|
||||
ResponseServerInstanceNotFound = 105
|
||||
ResponseFailedToStartServer = 106
|
||||
ResponseCurrentDataStoreDoesNotSupportStateLoading = 107
|
||||
|
||||
ResponseRepositoryNotFound = 200
|
||||
ResponseRepositoryConflict = 201
|
||||
ResponseRepositoryBadRequest = 202
|
||||
ResponseDataStoreNotFound = 200
|
||||
ResponseDataStoreConflict = 201
|
||||
ResponseDataStoreBadRequest = 202
|
||||
)
|
||||
|
||||
func getInstance(serverName string) (*ServerInstance, bool) {
|
||||
@ -61,16 +61,16 @@ func removeInstance(serverName string) {
|
||||
delete(serverInstances, serverName)
|
||||
}
|
||||
|
||||
func repositoryStatusToResponseCode(status repositorymodels.RepositoryStatus) int {
|
||||
func dataStoreStatusToResponseCode(status datastore.DataStoreStatus) int {
|
||||
switch status {
|
||||
case repositorymodels.StatusOk:
|
||||
case datastore.StatusOk:
|
||||
return ResponseSuccess
|
||||
case repositorymodels.StatusNotFound:
|
||||
return ResponseRepositoryNotFound
|
||||
case repositorymodels.Conflict:
|
||||
return ResponseRepositoryConflict
|
||||
case repositorymodels.BadRequest:
|
||||
return ResponseRepositoryBadRequest
|
||||
case datastore.StatusNotFound:
|
||||
return ResponseDataStoreNotFound
|
||||
case datastore.Conflict:
|
||||
return ResponseDataStoreConflict
|
||||
case datastore.BadRequest:
|
||||
return ResponseDataStoreBadRequest
|
||||
default:
|
||||
return ResponseUnknown
|
||||
}
|
||||
|
@ -11,7 +11,9 @@ import (
|
||||
|
||||
"github.com/pikami/cosmium/api"
|
||||
"github.com/pikami/cosmium/api/config"
|
||||
"github.com/pikami/cosmium/internal/repositories"
|
||||
"github.com/pikami/cosmium/internal/datastore"
|
||||
badgerdatastore "github.com/pikami/cosmium/internal/datastore/badger_datastore"
|
||||
jsondatastore "github.com/pikami/cosmium/internal/datastore/json_datastore"
|
||||
)
|
||||
|
||||
//export CreateServerInstance
|
||||
@ -32,20 +34,28 @@ func CreateServerInstance(serverName *C.char, configurationJSON *C.char) int {
|
||||
configuration.ApplyDefaultsToEmptyFields()
|
||||
configuration.PopulateCalculatedFields()
|
||||
|
||||
repository := repositories.NewDataRepository(repositories.RepositoryOptions{
|
||||
InitialDataFilePath: configuration.InitialDataFilePath,
|
||||
PersistDataFilePath: configuration.PersistDataFilePath,
|
||||
})
|
||||
var dataStore datastore.DataStore
|
||||
switch configuration.DataStore {
|
||||
case config.DataStoreBadger:
|
||||
dataStore = badgerdatastore.NewBadgerDataStore(badgerdatastore.BadgerDataStoreOptions{
|
||||
PersistDataFilePath: configuration.PersistDataFilePath,
|
||||
})
|
||||
default:
|
||||
dataStore = jsondatastore.NewJsonDataStore(jsondatastore.JsonDataStoreOptions{
|
||||
InitialDataFilePath: configuration.InitialDataFilePath,
|
||||
PersistDataFilePath: configuration.PersistDataFilePath,
|
||||
})
|
||||
}
|
||||
|
||||
server := api.NewApiServer(repository, &configuration)
|
||||
server := api.NewApiServer(dataStore, &configuration)
|
||||
err = server.Start()
|
||||
if err != nil {
|
||||
return ResponseFailedToStartServer
|
||||
}
|
||||
|
||||
addInstance(serverNameStr, &ServerInstance{
|
||||
server: server,
|
||||
repository: repository,
|
||||
server: server,
|
||||
dataStore: dataStore,
|
||||
})
|
||||
|
||||
return ResponseSuccess
|
||||
@ -57,6 +67,7 @@ func StopServerInstance(serverName *C.char) int {
|
||||
|
||||
if serverInstance, ok := getInstance(serverNameStr); ok {
|
||||
serverInstance.server.Stop()
|
||||
serverInstance.dataStore.Close()
|
||||
removeInstance(serverNameStr)
|
||||
return ResponseSuccess
|
||||
}
|
||||
@ -69,7 +80,7 @@ func GetServerInstanceState(serverName *C.char) *C.char {
|
||||
serverNameStr := C.GoString(serverName)
|
||||
|
||||
if serverInstance, ok := getInstance(serverNameStr); ok {
|
||||
stateJSON, err := serverInstance.repository.GetState()
|
||||
stateJSON, err := serverInstance.dataStore.DumpToJson()
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
@ -85,11 +96,14 @@ func LoadServerInstanceState(serverName *C.char, stateJSON *C.char) int {
|
||||
stateJSONStr := C.GoString(stateJSON)
|
||||
|
||||
if serverInstance, ok := getInstance(serverNameStr); ok {
|
||||
err := serverInstance.repository.LoadStateJSON(stateJSONStr)
|
||||
if err != nil {
|
||||
return ResponseFailedToLoadState
|
||||
if jsonDS, ok := serverInstance.dataStore.(*jsondatastore.JsonDataStore); ok {
|
||||
err := jsonDS.LoadStateJSON(stateJSONStr)
|
||||
if err != nil {
|
||||
return ResponseFailedToLoadState
|
||||
}
|
||||
return ResponseSuccess
|
||||
}
|
||||
return ResponseSuccess
|
||||
return ResponseCurrentDataStoreDoesNotSupportStateLoading
|
||||
}
|
||||
|
||||
return ResponseServerInstanceNotFound
|
||||
|
Loading…
x
Reference in New Issue
Block a user