mirror of https://github.com/pikami/cosmium.git
Generate legit ResourceIds for SDK compatibility
This commit is contained in:
parent
6dd43ca7e0
commit
1158f93102
2
Makefile
2
Makefile
|
@ -25,7 +25,7 @@ build-linux-amd64:
|
||||||
|
|
||||||
build-windows-amd64:
|
build-windows-amd64:
|
||||||
@echo "Building Windows x64 binary..."
|
@echo "Building Windows x64 binary..."
|
||||||
@GOOS=windows GOARCH=amd64 $(GOBUILD) -o $(DIST_DIR)/$(BINARY_NAME)-windows-amd64.exe .
|
@GOOS=windows GOARCH=amd64 $(GOBUILD) -o $(DIST_DIR)/$(BINARY_NAME)-windows-amd64.exe .
|
||||||
|
|
||||||
generate-parser-nosql:
|
generate-parser-nosql:
|
||||||
pigeon -o ./parsers/nosql/nosql.go ./parsers/nosql/nosql.peg
|
pigeon -o ./parsers/nosql/nosql.go ./parsers/nosql/nosql.peg
|
||||||
|
|
|
@ -13,7 +13,13 @@ func GetAllCollections(c *gin.Context) {
|
||||||
|
|
||||||
collections, status := repositories.GetAllCollections(databaseId)
|
collections, status := repositories.GetAllCollections(databaseId)
|
||||||
if status == repositorymodels.StatusOk {
|
if status == repositorymodels.StatusOk {
|
||||||
c.IndentedJSON(http.StatusOK, gin.H{"_rid": "", "DocumentCollections": collections, "_count": len(collections)})
|
database, _ := repositories.GetDatabase(databaseId)
|
||||||
|
|
||||||
|
c.IndentedJSON(http.StatusOK, gin.H{
|
||||||
|
"_rid": database.ResourceID,
|
||||||
|
"DocumentCollections": collections,
|
||||||
|
"_count": len(collections),
|
||||||
|
})
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -11,7 +11,11 @@ import (
|
||||||
func GetAllDatabases(c *gin.Context) {
|
func GetAllDatabases(c *gin.Context) {
|
||||||
databases, status := repositories.GetAllDatabases()
|
databases, status := repositories.GetAllDatabases()
|
||||||
if status == repositorymodels.StatusOk {
|
if status == repositorymodels.StatusOk {
|
||||||
c.IndentedJSON(http.StatusOK, gin.H{"_rid": "", "Databases": databases, "_count": len(databases)})
|
c.IndentedJSON(http.StatusOK, gin.H{
|
||||||
|
"_rid": "",
|
||||||
|
"Databases": databases,
|
||||||
|
"_count": len(databases),
|
||||||
|
})
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -15,7 +15,13 @@ func GetAllDocuments(c *gin.Context) {
|
||||||
|
|
||||||
documents, status := repositories.GetAllDocuments(databaseId, collectionId)
|
documents, status := repositories.GetAllDocuments(databaseId, collectionId)
|
||||||
if status == repositorymodels.StatusOk {
|
if status == repositorymodels.StatusOk {
|
||||||
c.IndentedJSON(http.StatusOK, gin.H{"_rid": "", "Documents": documents, "_count": len(documents)})
|
collection, _ := repositories.GetCollection(databaseId, collectionId)
|
||||||
|
|
||||||
|
c.IndentedJSON(http.StatusOK, gin.H{
|
||||||
|
"_rid": collection.ID,
|
||||||
|
"Documents": documents,
|
||||||
|
"_count": len(documents),
|
||||||
|
})
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -121,7 +127,12 @@ func DocumentsPost(c *gin.Context) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
c.IndentedJSON(http.StatusOK, gin.H{"_rid": "", "Documents": docs, "_count": len(docs)})
|
collection, _ := repositories.GetCollection(databaseId, collectionId)
|
||||||
|
c.IndentedJSON(http.StatusOK, gin.H{
|
||||||
|
"_rid": collection.ResourceID,
|
||||||
|
"Documents": docs,
|
||||||
|
"_count": len(docs),
|
||||||
|
})
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -26,8 +26,9 @@ func GetPartitionKeyRanges(c *gin.Context) {
|
||||||
c.Header("x-ms-global-committed-lsn", "420")
|
c.Header("x-ms-global-committed-lsn", "420")
|
||||||
c.Header("x-ms-item-count", fmt.Sprintf("%d", len(partitionKeyRanges)))
|
c.Header("x-ms-item-count", fmt.Sprintf("%d", len(partitionKeyRanges)))
|
||||||
|
|
||||||
|
collection, _ := repositories.GetCollection(databaseId, collectionId)
|
||||||
c.IndentedJSON(http.StatusOK, gin.H{
|
c.IndentedJSON(http.StatusOK, gin.H{
|
||||||
"_rid": "",
|
"_rid": collection.ResourceID,
|
||||||
"_count": len(partitionKeyRanges),
|
"_count": len(partitionKeyRanges),
|
||||||
"PartitionKeyRanges": partitionKeyRanges,
|
"PartitionKeyRanges": partitionKeyRanges,
|
||||||
})
|
})
|
||||||
|
|
|
@ -6,6 +6,7 @@ import (
|
||||||
|
|
||||||
"github.com/google/uuid"
|
"github.com/google/uuid"
|
||||||
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
||||||
|
"github.com/pikami/cosmium/internal/resourceid"
|
||||||
structhidrators "github.com/pikami/cosmium/internal/struct_hidrators"
|
structhidrators "github.com/pikami/cosmium/internal/struct_hidrators"
|
||||||
"golang.org/x/exp/maps"
|
"golang.org/x/exp/maps"
|
||||||
)
|
)
|
||||||
|
@ -45,19 +46,22 @@ func DeleteCollection(databaseId string, collectionId string) repositorymodels.R
|
||||||
}
|
}
|
||||||
|
|
||||||
func CreateCollection(databaseId string, newCollection repositorymodels.Collection) (repositorymodels.Collection, repositorymodels.RepositoryStatus) {
|
func CreateCollection(databaseId string, newCollection repositorymodels.Collection) (repositorymodels.Collection, repositorymodels.RepositoryStatus) {
|
||||||
if _, ok := storeState.Databases[databaseId]; !ok {
|
var ok bool
|
||||||
|
var database repositorymodels.Database
|
||||||
|
if database, ok = storeState.Databases[databaseId]; !ok {
|
||||||
return repositorymodels.Collection{}, repositorymodels.StatusNotFound
|
return repositorymodels.Collection{}, repositorymodels.StatusNotFound
|
||||||
}
|
}
|
||||||
|
|
||||||
if _, ok := storeState.Collections[databaseId][newCollection.ID]; ok {
|
if _, ok = storeState.Collections[databaseId][newCollection.ID]; ok {
|
||||||
return repositorymodels.Collection{}, repositorymodels.Conflict
|
return repositorymodels.Collection{}, repositorymodels.Conflict
|
||||||
}
|
}
|
||||||
|
|
||||||
newCollection = structhidrators.Hidrate(newCollection).(repositorymodels.Collection)
|
newCollection = structhidrators.Hidrate(newCollection).(repositorymodels.Collection)
|
||||||
|
|
||||||
newCollection.TimeStamp = time.Now().Unix()
|
newCollection.TimeStamp = time.Now().Unix()
|
||||||
newCollection.UniqueID = uuid.New().String()
|
newCollection.ResourceID = resourceid.NewCombined(database.ResourceID, resourceid.New())
|
||||||
newCollection.ETag = fmt.Sprintf("\"%s\"", newCollection.UniqueID)
|
newCollection.ETag = fmt.Sprintf("\"%s\"", uuid.New())
|
||||||
|
newCollection.Self = fmt.Sprintf("dbs/%s/colls/%s/", database.ResourceID, newCollection.ResourceID)
|
||||||
|
|
||||||
storeState.Collections[databaseId][newCollection.ID] = newCollection
|
storeState.Collections[databaseId][newCollection.ID] = newCollection
|
||||||
storeState.Documents[databaseId][newCollection.ID] = make(map[string]repositorymodels.Document)
|
storeState.Documents[databaseId][newCollection.ID] = make(map[string]repositorymodels.Document)
|
||||||
|
|
|
@ -6,6 +6,7 @@ import (
|
||||||
|
|
||||||
"github.com/google/uuid"
|
"github.com/google/uuid"
|
||||||
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
||||||
|
"github.com/pikami/cosmium/internal/resourceid"
|
||||||
"golang.org/x/exp/maps"
|
"golang.org/x/exp/maps"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -37,8 +38,10 @@ func CreateDatabase(newDatabase repositorymodels.Database) (repositorymodels.Dat
|
||||||
}
|
}
|
||||||
|
|
||||||
newDatabase.TimeStamp = time.Now().Unix()
|
newDatabase.TimeStamp = time.Now().Unix()
|
||||||
newDatabase.UniqueID = uuid.New().String()
|
newDatabase.ResourceID = resourceid.New()
|
||||||
newDatabase.ETag = fmt.Sprintf("\"%s\"", newDatabase.UniqueID)
|
newDatabase.ETag = fmt.Sprintf("\"%s\"", uuid.New())
|
||||||
|
newDatabase.Self = fmt.Sprintf("dbs/%s/", newDatabase.ResourceID)
|
||||||
|
|
||||||
storeState.Databases[newDatabase.ID] = newDatabase
|
storeState.Databases[newDatabase.ID] = newDatabase
|
||||||
storeState.Collections[newDatabase.ID] = make(map[string]repositorymodels.Collection)
|
storeState.Collections[newDatabase.ID] = make(map[string]repositorymodels.Collection)
|
||||||
storeState.Documents[newDatabase.ID] = make(map[string]map[string]repositorymodels.Document)
|
storeState.Documents[newDatabase.ID] = make(map[string]map[string]repositorymodels.Document)
|
||||||
|
|
|
@ -7,6 +7,7 @@ import (
|
||||||
|
|
||||||
"github.com/google/uuid"
|
"github.com/google/uuid"
|
||||||
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
||||||
|
"github.com/pikami/cosmium/internal/resourceid"
|
||||||
"github.com/pikami/cosmium/parsers"
|
"github.com/pikami/cosmium/parsers"
|
||||||
"github.com/pikami/cosmium/parsers/nosql"
|
"github.com/pikami/cosmium/parsers/nosql"
|
||||||
memoryexecutor "github.com/pikami/cosmium/query_executors/memory_executor"
|
memoryexecutor "github.com/pikami/cosmium/query_executors/memory_executor"
|
||||||
|
@ -60,17 +61,19 @@ func DeleteDocument(databaseId string, collectionId string, documentId string) r
|
||||||
}
|
}
|
||||||
|
|
||||||
func CreateDocument(databaseId string, collectionId string, document map[string]interface{}) (repositorymodels.Document, repositorymodels.RepositoryStatus) {
|
func CreateDocument(databaseId string, collectionId string, document map[string]interface{}) (repositorymodels.Document, repositorymodels.RepositoryStatus) {
|
||||||
var documentId string
|
|
||||||
var ok bool
|
var ok bool
|
||||||
|
var documentId string
|
||||||
|
var database repositorymodels.Database
|
||||||
|
var collection repositorymodels.Collection
|
||||||
if documentId, ok = document["id"].(string); !ok || documentId == "" {
|
if documentId, ok = document["id"].(string); !ok || documentId == "" {
|
||||||
return repositorymodels.Document{}, repositorymodels.BadRequest
|
return repositorymodels.Document{}, repositorymodels.BadRequest
|
||||||
}
|
}
|
||||||
|
|
||||||
if _, ok := storeState.Databases[databaseId]; !ok {
|
if database, ok = storeState.Databases[databaseId]; !ok {
|
||||||
return repositorymodels.Document{}, repositorymodels.StatusNotFound
|
return repositorymodels.Document{}, repositorymodels.StatusNotFound
|
||||||
}
|
}
|
||||||
|
|
||||||
if _, ok = storeState.Collections[databaseId][collectionId]; !ok {
|
if collection, ok = storeState.Collections[databaseId][collectionId]; !ok {
|
||||||
return repositorymodels.Document{}, repositorymodels.StatusNotFound
|
return repositorymodels.Document{}, repositorymodels.StatusNotFound
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -79,8 +82,9 @@ func CreateDocument(databaseId string, collectionId string, document map[string]
|
||||||
}
|
}
|
||||||
|
|
||||||
document["_ts"] = time.Now().Unix()
|
document["_ts"] = time.Now().Unix()
|
||||||
document["_rid"] = uuid.New().String()
|
document["_rid"] = resourceid.NewCombined(database.ResourceID, collection.ResourceID, resourceid.New())
|
||||||
document["_etag"] = fmt.Sprintf("\"%s\"", document["_rid"])
|
document["_etag"] = fmt.Sprintf("\"%s\"", uuid.New())
|
||||||
|
document["_self"] = fmt.Sprintf("dbs/%s/colls/%s/docs/%s/", database.ResourceID, collection.ResourceID, document["_rid"])
|
||||||
|
|
||||||
storeState.Documents[databaseId][collectionId][documentId] = document
|
storeState.Documents[databaseId][collectionId][documentId] = document
|
||||||
|
|
||||||
|
|
|
@ -1,22 +1,43 @@
|
||||||
package repositories
|
package repositories
|
||||||
|
|
||||||
import repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
import (
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
||||||
|
"github.com/pikami/cosmium/internal/resourceid"
|
||||||
|
)
|
||||||
|
|
||||||
|
// I have no idea what this is tbh
|
||||||
func GetPartitionKeyRanges(databaseId string, collectionId string) ([]repositorymodels.PartitionKeyRange, repositorymodels.RepositoryStatus) {
|
func GetPartitionKeyRanges(databaseId string, collectionId string) ([]repositorymodels.PartitionKeyRange, repositorymodels.RepositoryStatus) {
|
||||||
// I have no idea what this is tbh
|
var ok bool
|
||||||
|
var database repositorymodels.Database
|
||||||
|
var collection repositorymodels.Collection
|
||||||
|
if database, ok = storeState.Databases[databaseId]; !ok {
|
||||||
|
return make([]repositorymodels.PartitionKeyRange, 0), repositorymodels.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
if collection, ok = storeState.Collections[databaseId][collectionId]; !ok {
|
||||||
|
return make([]repositorymodels.PartitionKeyRange, 0), repositorymodels.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
pkrResourceId := resourceid.NewCombined(database.ResourceID, collection.ResourceID, resourceid.New())
|
||||||
|
pkrSelf := fmt.Sprintf("dbs/%s/colls/%s/pkranges/%s/", database.ResourceID, collection.ResourceID, pkrResourceId)
|
||||||
|
etag := fmt.Sprintf("\"%s\"", uuid.New())
|
||||||
|
|
||||||
return []repositorymodels.PartitionKeyRange{
|
return []repositorymodels.PartitionKeyRange{
|
||||||
{
|
{
|
||||||
Rid: "ZxlyAP7rKwACAAAAAAAAUA==",
|
ResourceID: pkrResourceId,
|
||||||
ID: "0",
|
ID: "0",
|
||||||
Etag: "\"00005504-0000-0100-0000-65c555490000\"",
|
Etag: etag,
|
||||||
MinInclusive: "",
|
MinInclusive: "",
|
||||||
MaxExclusive: "FF",
|
MaxExclusive: "FF",
|
||||||
RidPrefix: 0,
|
RidPrefix: 0,
|
||||||
Self: "dbs/ZxlyAA==/colls/ZxlyAP7rKwA=/pkranges/ZxlyAP7rKwACAAAAAAAAUA==/",
|
Self: pkrSelf,
|
||||||
ThroughputFraction: 1,
|
ThroughputFraction: 1,
|
||||||
Status: "online",
|
Status: "online",
|
||||||
Parents: []interface{}{},
|
Parents: []interface{}{},
|
||||||
Ts: 1707431241,
|
TimeStamp: collection.TimeStamp,
|
||||||
Lsn: 17,
|
Lsn: 17,
|
||||||
},
|
},
|
||||||
}, repositorymodels.StatusOk
|
}, repositorymodels.StatusOk
|
||||||
|
|
|
@ -1,10 +1,11 @@
|
||||||
package repositorymodels
|
package repositorymodels
|
||||||
|
|
||||||
type Database struct {
|
type Database struct {
|
||||||
ID string `json:"id"`
|
ID string `json:"id"`
|
||||||
TimeStamp int64 `json:"_ts"`
|
TimeStamp int64 `json:"_ts"`
|
||||||
UniqueID string `json:"_rid"`
|
ResourceID string `json:"_rid"`
|
||||||
ETag string `json:"_etag"`
|
ETag string `json:"_etag"`
|
||||||
|
Self string `json:"_self"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type RepositoryStatus int
|
type RepositoryStatus int
|
||||||
|
@ -20,7 +21,7 @@ type Collection struct {
|
||||||
ID string `json:"id"`
|
ID string `json:"id"`
|
||||||
IndexingPolicy CollectionIndexingPolicy `json:"indexingPolicy"`
|
IndexingPolicy CollectionIndexingPolicy `json:"indexingPolicy"`
|
||||||
PartitionKey CollectionPartitionKey `json:"partitionKey"`
|
PartitionKey CollectionPartitionKey `json:"partitionKey"`
|
||||||
UniqueID string `json:"_rid"`
|
ResourceID string `json:"_rid"`
|
||||||
TimeStamp int64 `json:"_ts"`
|
TimeStamp int64 `json:"_ts"`
|
||||||
Self string `json:"_self"`
|
Self string `json:"_self"`
|
||||||
ETag string `json:"_etag"`
|
ETag string `json:"_etag"`
|
||||||
|
@ -54,21 +55,21 @@ type CollectionPartitionKey struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
type UserDefinedFunction struct {
|
type UserDefinedFunction struct {
|
||||||
Body string `json:"body"`
|
Body string `json:"body"`
|
||||||
ID string `json:"id"`
|
ID string `json:"id"`
|
||||||
Rid string `json:"_rid"`
|
ResourceID string `json:"_rid"`
|
||||||
Ts int `json:"_ts"`
|
TimeStamp int `json:"_ts"`
|
||||||
Self string `json:"_self"`
|
Self string `json:"_self"`
|
||||||
Etag string `json:"_etag"`
|
Etag string `json:"_etag"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type StoredProcedure struct {
|
type StoredProcedure struct {
|
||||||
Body string `json:"body"`
|
Body string `json:"body"`
|
||||||
ID string `json:"id"`
|
ID string `json:"id"`
|
||||||
Rid string `json:"_rid"`
|
ResourceID string `json:"_rid"`
|
||||||
Ts int `json:"_ts"`
|
TimeStamp int `json:"_ts"`
|
||||||
Self string `json:"_self"`
|
Self string `json:"_self"`
|
||||||
Etag string `json:"_etag"`
|
Etag string `json:"_etag"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type Trigger struct {
|
type Trigger struct {
|
||||||
|
@ -76,8 +77,8 @@ type Trigger struct {
|
||||||
ID string `json:"id"`
|
ID string `json:"id"`
|
||||||
TriggerOperation string `json:"triggerOperation"`
|
TriggerOperation string `json:"triggerOperation"`
|
||||||
TriggerType string `json:"triggerType"`
|
TriggerType string `json:"triggerType"`
|
||||||
Rid string `json:"_rid"`
|
ResourceID string `json:"_rid"`
|
||||||
Ts int `json:"_ts"`
|
TimeStamp int `json:"_ts"`
|
||||||
Self string `json:"_self"`
|
Self string `json:"_self"`
|
||||||
Etag string `json:"_etag"`
|
Etag string `json:"_etag"`
|
||||||
}
|
}
|
||||||
|
@ -85,7 +86,7 @@ type Trigger struct {
|
||||||
type Document map[string]interface{}
|
type Document map[string]interface{}
|
||||||
|
|
||||||
type PartitionKeyRange struct {
|
type PartitionKeyRange struct {
|
||||||
Rid string `json:"_rid"`
|
ResourceID string `json:"_rid"`
|
||||||
ID string `json:"id"`
|
ID string `json:"id"`
|
||||||
Etag string `json:"_etag"`
|
Etag string `json:"_etag"`
|
||||||
MinInclusive string `json:"minInclusive"`
|
MinInclusive string `json:"minInclusive"`
|
||||||
|
@ -95,7 +96,7 @@ type PartitionKeyRange struct {
|
||||||
ThroughputFraction int `json:"throughputFraction"`
|
ThroughputFraction int `json:"throughputFraction"`
|
||||||
Status string `json:"status"`
|
Status string `json:"status"`
|
||||||
Parents []any `json:"parents"`
|
Parents []any `json:"parents"`
|
||||||
Ts int `json:"_ts"`
|
TimeStamp int64 `json:"_ts"`
|
||||||
Lsn int `json:"lsn"`
|
Lsn int `json:"lsn"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,34 @@
|
||||||
|
package resourceid
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/base64"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
)
|
||||||
|
|
||||||
|
func New() string {
|
||||||
|
id := uuid.New().ID()
|
||||||
|
idBytes := uintToBytes(id)
|
||||||
|
|
||||||
|
return base64.StdEncoding.EncodeToString(idBytes)
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewCombined(ids ...string) string {
|
||||||
|
combinedIdBytes := make([]byte, 0)
|
||||||
|
|
||||||
|
for _, id := range ids {
|
||||||
|
idBytes, _ := base64.StdEncoding.DecodeString(id)
|
||||||
|
combinedIdBytes = append(combinedIdBytes, idBytes...)
|
||||||
|
}
|
||||||
|
|
||||||
|
return base64.StdEncoding.EncodeToString(combinedIdBytes)
|
||||||
|
}
|
||||||
|
|
||||||
|
func uintToBytes(id uint32) []byte {
|
||||||
|
buf := make([]byte, 4)
|
||||||
|
for i := 0; i < 4; i++ {
|
||||||
|
buf[i] = byte(id >> (i * 8))
|
||||||
|
}
|
||||||
|
|
||||||
|
return buf
|
||||||
|
}
|
|
@ -20,13 +20,13 @@ var defaultCollection repositorymodels.Collection = repositorymodels.Collection{
|
||||||
Kind: "Hash",
|
Kind: "Hash",
|
||||||
Version: 2,
|
Version: 2,
|
||||||
},
|
},
|
||||||
UniqueID: "nFFFFFFFFFF=",
|
ResourceID: "nFFFFFFFFFF=",
|
||||||
TimeStamp: 0,
|
TimeStamp: 0,
|
||||||
Self: "",
|
Self: "",
|
||||||
ETag: "\"00000000-0000-0000-0000-000000000000\"",
|
ETag: "\"00000000-0000-0000-0000-000000000000\"",
|
||||||
Docs: "docs/",
|
Docs: "docs/",
|
||||||
Sprocs: "sprocs/",
|
Sprocs: "sprocs/",
|
||||||
Triggers: "triggers/",
|
Triggers: "triggers/",
|
||||||
Udfs: "udfs/",
|
Udfs: "udfs/",
|
||||||
Conflicts: "conflicts/",
|
Conflicts: "conflicts/",
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue