mirror of
https://github.com/pikami/cosmium.git
synced 2024-11-28 08:27:37 +00:00
Compare commits
No commits in common. "d426dc23c06f5bf1a4238e145b8d187fe4bbfbc2" and "6dd43ca7e0d199b1e06645f84a0bceeac6e8ee2c" have entirely different histories.
d426dc23c0
...
6dd43ca7e0
30
.github/workflows/release.yml
vendored
30
.github/workflows/release.yml
vendored
@ -1,30 +0,0 @@
|
|||||||
name: goreleaser
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
tags:
|
|
||||||
- '*'
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
goreleaser:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
- name: Set up Go
|
|
||||||
uses: actions/setup-go@v5
|
|
||||||
with:
|
|
||||||
go-version: 1.21.6
|
|
||||||
- name: Run GoReleaser
|
|
||||||
uses: goreleaser/goreleaser-action@v5
|
|
||||||
with:
|
|
||||||
distribution: goreleaser
|
|
||||||
version: ${{ env.GITHUB_REF_NAME }}
|
|
||||||
args: release --clean
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.PUBLISHER_TOKEN }}
|
|
@ -1,35 +0,0 @@
|
|||||||
builds:
|
|
||||||
- binary: cosmium
|
|
||||||
goos:
|
|
||||||
- darwin
|
|
||||||
- linux
|
|
||||||
- windows
|
|
||||||
goarch:
|
|
||||||
- amd64
|
|
||||||
- arm64
|
|
||||||
env:
|
|
||||||
- CGO_ENABLED=0
|
|
||||||
ignore:
|
|
||||||
- goos: linux
|
|
||||||
goarch: arm64
|
|
||||||
- goos: windows
|
|
||||||
goarch: arm64
|
|
||||||
|
|
||||||
release:
|
|
||||||
prerelease: auto
|
|
||||||
|
|
||||||
universal_binaries:
|
|
||||||
- replace: true
|
|
||||||
|
|
||||||
brews:
|
|
||||||
- name: cosmium
|
|
||||||
homepage: 'https://github.com/pikami/cosmium'
|
|
||||||
repository:
|
|
||||||
owner: pikami
|
|
||||||
name: homebrew-pikami
|
|
||||||
commit_author:
|
|
||||||
name: pikami
|
|
||||||
email: git@pikami.org
|
|
||||||
|
|
||||||
checksum:
|
|
||||||
name_template: 'checksums.txt'
|
|
@ -13,13 +13,7 @@ func GetAllCollections(c *gin.Context) {
|
|||||||
|
|
||||||
collections, status := repositories.GetAllCollections(databaseId)
|
collections, status := repositories.GetAllCollections(databaseId)
|
||||||
if status == repositorymodels.StatusOk {
|
if status == repositorymodels.StatusOk {
|
||||||
database, _ := repositories.GetDatabase(databaseId)
|
c.IndentedJSON(http.StatusOK, gin.H{"_rid": "", "DocumentCollections": collections, "_count": len(collections)})
|
||||||
|
|
||||||
c.IndentedJSON(http.StatusOK, gin.H{
|
|
||||||
"_rid": database.ResourceID,
|
|
||||||
"DocumentCollections": collections,
|
|
||||||
"_count": len(collections),
|
|
||||||
})
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -11,11 +11,7 @@ import (
|
|||||||
func GetAllDatabases(c *gin.Context) {
|
func GetAllDatabases(c *gin.Context) {
|
||||||
databases, status := repositories.GetAllDatabases()
|
databases, status := repositories.GetAllDatabases()
|
||||||
if status == repositorymodels.StatusOk {
|
if status == repositorymodels.StatusOk {
|
||||||
c.IndentedJSON(http.StatusOK, gin.H{
|
c.IndentedJSON(http.StatusOK, gin.H{"_rid": "", "Databases": databases, "_count": len(databases)})
|
||||||
"_rid": "",
|
|
||||||
"Databases": databases,
|
|
||||||
"_count": len(databases),
|
|
||||||
})
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -15,13 +15,7 @@ func GetAllDocuments(c *gin.Context) {
|
|||||||
|
|
||||||
documents, status := repositories.GetAllDocuments(databaseId, collectionId)
|
documents, status := repositories.GetAllDocuments(databaseId, collectionId)
|
||||||
if status == repositorymodels.StatusOk {
|
if status == repositorymodels.StatusOk {
|
||||||
collection, _ := repositories.GetCollection(databaseId, collectionId)
|
c.IndentedJSON(http.StatusOK, gin.H{"_rid": "", "Documents": documents, "_count": len(documents)})
|
||||||
|
|
||||||
c.IndentedJSON(http.StatusOK, gin.H{
|
|
||||||
"_rid": collection.ID,
|
|
||||||
"Documents": documents,
|
|
||||||
"_count": len(documents),
|
|
||||||
})
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -127,12 +121,7 @@ func DocumentsPost(c *gin.Context) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
collection, _ := repositories.GetCollection(databaseId, collectionId)
|
c.IndentedJSON(http.StatusOK, gin.H{"_rid": "", "Documents": docs, "_count": len(docs)})
|
||||||
c.IndentedJSON(http.StatusOK, gin.H{
|
|
||||||
"_rid": collection.ResourceID,
|
|
||||||
"Documents": docs,
|
|
||||||
"_count": len(docs),
|
|
||||||
})
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -26,9 +26,8 @@ func GetPartitionKeyRanges(c *gin.Context) {
|
|||||||
c.Header("x-ms-global-committed-lsn", "420")
|
c.Header("x-ms-global-committed-lsn", "420")
|
||||||
c.Header("x-ms-item-count", fmt.Sprintf("%d", len(partitionKeyRanges)))
|
c.Header("x-ms-item-count", fmt.Sprintf("%d", len(partitionKeyRanges)))
|
||||||
|
|
||||||
collection, _ := repositories.GetCollection(databaseId, collectionId)
|
|
||||||
c.IndentedJSON(http.StatusOK, gin.H{
|
c.IndentedJSON(http.StatusOK, gin.H{
|
||||||
"_rid": collection.ResourceID,
|
"_rid": "",
|
||||||
"_count": len(partitionKeyRanges),
|
"_count": len(partitionKeyRanges),
|
||||||
"PartitionKeyRanges": partitionKeyRanges,
|
"PartitionKeyRanges": partitionKeyRanges,
|
||||||
})
|
})
|
||||||
|
@ -6,7 +6,6 @@ import (
|
|||||||
|
|
||||||
"github.com/google/uuid"
|
"github.com/google/uuid"
|
||||||
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
||||||
"github.com/pikami/cosmium/internal/resourceid"
|
|
||||||
structhidrators "github.com/pikami/cosmium/internal/struct_hidrators"
|
structhidrators "github.com/pikami/cosmium/internal/struct_hidrators"
|
||||||
"golang.org/x/exp/maps"
|
"golang.org/x/exp/maps"
|
||||||
)
|
)
|
||||||
@ -46,22 +45,19 @@ func DeleteCollection(databaseId string, collectionId string) repositorymodels.R
|
|||||||
}
|
}
|
||||||
|
|
||||||
func CreateCollection(databaseId string, newCollection repositorymodels.Collection) (repositorymodels.Collection, repositorymodels.RepositoryStatus) {
|
func CreateCollection(databaseId string, newCollection repositorymodels.Collection) (repositorymodels.Collection, repositorymodels.RepositoryStatus) {
|
||||||
var ok bool
|
if _, ok := storeState.Databases[databaseId]; !ok {
|
||||||
var database repositorymodels.Database
|
|
||||||
if database, ok = storeState.Databases[databaseId]; !ok {
|
|
||||||
return repositorymodels.Collection{}, repositorymodels.StatusNotFound
|
return repositorymodels.Collection{}, repositorymodels.StatusNotFound
|
||||||
}
|
}
|
||||||
|
|
||||||
if _, ok = storeState.Collections[databaseId][newCollection.ID]; ok {
|
if _, ok := storeState.Collections[databaseId][newCollection.ID]; ok {
|
||||||
return repositorymodels.Collection{}, repositorymodels.Conflict
|
return repositorymodels.Collection{}, repositorymodels.Conflict
|
||||||
}
|
}
|
||||||
|
|
||||||
newCollection = structhidrators.Hidrate(newCollection).(repositorymodels.Collection)
|
newCollection = structhidrators.Hidrate(newCollection).(repositorymodels.Collection)
|
||||||
|
|
||||||
newCollection.TimeStamp = time.Now().Unix()
|
newCollection.TimeStamp = time.Now().Unix()
|
||||||
newCollection.ResourceID = resourceid.NewCombined(database.ResourceID, resourceid.New())
|
newCollection.UniqueID = uuid.New().String()
|
||||||
newCollection.ETag = fmt.Sprintf("\"%s\"", uuid.New())
|
newCollection.ETag = fmt.Sprintf("\"%s\"", newCollection.UniqueID)
|
||||||
newCollection.Self = fmt.Sprintf("dbs/%s/colls/%s/", database.ResourceID, newCollection.ResourceID)
|
|
||||||
|
|
||||||
storeState.Collections[databaseId][newCollection.ID] = newCollection
|
storeState.Collections[databaseId][newCollection.ID] = newCollection
|
||||||
storeState.Documents[databaseId][newCollection.ID] = make(map[string]repositorymodels.Document)
|
storeState.Documents[databaseId][newCollection.ID] = make(map[string]repositorymodels.Document)
|
||||||
|
@ -6,7 +6,6 @@ import (
|
|||||||
|
|
||||||
"github.com/google/uuid"
|
"github.com/google/uuid"
|
||||||
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
||||||
"github.com/pikami/cosmium/internal/resourceid"
|
|
||||||
"golang.org/x/exp/maps"
|
"golang.org/x/exp/maps"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -38,10 +37,8 @@ func CreateDatabase(newDatabase repositorymodels.Database) (repositorymodels.Dat
|
|||||||
}
|
}
|
||||||
|
|
||||||
newDatabase.TimeStamp = time.Now().Unix()
|
newDatabase.TimeStamp = time.Now().Unix()
|
||||||
newDatabase.ResourceID = resourceid.New()
|
newDatabase.UniqueID = uuid.New().String()
|
||||||
newDatabase.ETag = fmt.Sprintf("\"%s\"", uuid.New())
|
newDatabase.ETag = fmt.Sprintf("\"%s\"", newDatabase.UniqueID)
|
||||||
newDatabase.Self = fmt.Sprintf("dbs/%s/", newDatabase.ResourceID)
|
|
||||||
|
|
||||||
storeState.Databases[newDatabase.ID] = newDatabase
|
storeState.Databases[newDatabase.ID] = newDatabase
|
||||||
storeState.Collections[newDatabase.ID] = make(map[string]repositorymodels.Collection)
|
storeState.Collections[newDatabase.ID] = make(map[string]repositorymodels.Collection)
|
||||||
storeState.Documents[newDatabase.ID] = make(map[string]map[string]repositorymodels.Document)
|
storeState.Documents[newDatabase.ID] = make(map[string]map[string]repositorymodels.Document)
|
||||||
|
@ -7,7 +7,6 @@ import (
|
|||||||
|
|
||||||
"github.com/google/uuid"
|
"github.com/google/uuid"
|
||||||
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
||||||
"github.com/pikami/cosmium/internal/resourceid"
|
|
||||||
"github.com/pikami/cosmium/parsers"
|
"github.com/pikami/cosmium/parsers"
|
||||||
"github.com/pikami/cosmium/parsers/nosql"
|
"github.com/pikami/cosmium/parsers/nosql"
|
||||||
memoryexecutor "github.com/pikami/cosmium/query_executors/memory_executor"
|
memoryexecutor "github.com/pikami/cosmium/query_executors/memory_executor"
|
||||||
@ -61,19 +60,17 @@ func DeleteDocument(databaseId string, collectionId string, documentId string) r
|
|||||||
}
|
}
|
||||||
|
|
||||||
func CreateDocument(databaseId string, collectionId string, document map[string]interface{}) (repositorymodels.Document, repositorymodels.RepositoryStatus) {
|
func CreateDocument(databaseId string, collectionId string, document map[string]interface{}) (repositorymodels.Document, repositorymodels.RepositoryStatus) {
|
||||||
var ok bool
|
|
||||||
var documentId string
|
var documentId string
|
||||||
var database repositorymodels.Database
|
var ok bool
|
||||||
var collection repositorymodels.Collection
|
|
||||||
if documentId, ok = document["id"].(string); !ok || documentId == "" {
|
if documentId, ok = document["id"].(string); !ok || documentId == "" {
|
||||||
return repositorymodels.Document{}, repositorymodels.BadRequest
|
return repositorymodels.Document{}, repositorymodels.BadRequest
|
||||||
}
|
}
|
||||||
|
|
||||||
if database, ok = storeState.Databases[databaseId]; !ok {
|
if _, ok := storeState.Databases[databaseId]; !ok {
|
||||||
return repositorymodels.Document{}, repositorymodels.StatusNotFound
|
return repositorymodels.Document{}, repositorymodels.StatusNotFound
|
||||||
}
|
}
|
||||||
|
|
||||||
if collection, ok = storeState.Collections[databaseId][collectionId]; !ok {
|
if _, ok = storeState.Collections[databaseId][collectionId]; !ok {
|
||||||
return repositorymodels.Document{}, repositorymodels.StatusNotFound
|
return repositorymodels.Document{}, repositorymodels.StatusNotFound
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -82,9 +79,8 @@ func CreateDocument(databaseId string, collectionId string, document map[string]
|
|||||||
}
|
}
|
||||||
|
|
||||||
document["_ts"] = time.Now().Unix()
|
document["_ts"] = time.Now().Unix()
|
||||||
document["_rid"] = resourceid.NewCombined(database.ResourceID, collection.ResourceID, resourceid.New())
|
document["_rid"] = uuid.New().String()
|
||||||
document["_etag"] = fmt.Sprintf("\"%s\"", uuid.New())
|
document["_etag"] = fmt.Sprintf("\"%s\"", document["_rid"])
|
||||||
document["_self"] = fmt.Sprintf("dbs/%s/colls/%s/docs/%s/", database.ResourceID, collection.ResourceID, document["_rid"])
|
|
||||||
|
|
||||||
storeState.Documents[databaseId][collectionId][documentId] = document
|
storeState.Documents[databaseId][collectionId][documentId] = document
|
||||||
|
|
||||||
|
@ -1,43 +1,22 @@
|
|||||||
package repositories
|
package repositories
|
||||||
|
|
||||||
import (
|
import repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
||||||
"fmt"
|
|
||||||
|
|
||||||
"github.com/google/uuid"
|
|
||||||
repositorymodels "github.com/pikami/cosmium/internal/repository_models"
|
|
||||||
"github.com/pikami/cosmium/internal/resourceid"
|
|
||||||
)
|
|
||||||
|
|
||||||
// I have no idea what this is tbh
|
|
||||||
func GetPartitionKeyRanges(databaseId string, collectionId string) ([]repositorymodels.PartitionKeyRange, repositorymodels.RepositoryStatus) {
|
func GetPartitionKeyRanges(databaseId string, collectionId string) ([]repositorymodels.PartitionKeyRange, repositorymodels.RepositoryStatus) {
|
||||||
var ok bool
|
// I have no idea what this is tbh
|
||||||
var database repositorymodels.Database
|
|
||||||
var collection repositorymodels.Collection
|
|
||||||
if database, ok = storeState.Databases[databaseId]; !ok {
|
|
||||||
return make([]repositorymodels.PartitionKeyRange, 0), repositorymodels.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
if collection, ok = storeState.Collections[databaseId][collectionId]; !ok {
|
|
||||||
return make([]repositorymodels.PartitionKeyRange, 0), repositorymodels.StatusNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
pkrResourceId := resourceid.NewCombined(database.ResourceID, collection.ResourceID, resourceid.New())
|
|
||||||
pkrSelf := fmt.Sprintf("dbs/%s/colls/%s/pkranges/%s/", database.ResourceID, collection.ResourceID, pkrResourceId)
|
|
||||||
etag := fmt.Sprintf("\"%s\"", uuid.New())
|
|
||||||
|
|
||||||
return []repositorymodels.PartitionKeyRange{
|
return []repositorymodels.PartitionKeyRange{
|
||||||
{
|
{
|
||||||
ResourceID: pkrResourceId,
|
Rid: "ZxlyAP7rKwACAAAAAAAAUA==",
|
||||||
ID: "0",
|
ID: "0",
|
||||||
Etag: etag,
|
Etag: "\"00005504-0000-0100-0000-65c555490000\"",
|
||||||
MinInclusive: "",
|
MinInclusive: "",
|
||||||
MaxExclusive: "FF",
|
MaxExclusive: "FF",
|
||||||
RidPrefix: 0,
|
RidPrefix: 0,
|
||||||
Self: pkrSelf,
|
Self: "dbs/ZxlyAA==/colls/ZxlyAP7rKwA=/pkranges/ZxlyAP7rKwACAAAAAAAAUA==/",
|
||||||
ThroughputFraction: 1,
|
ThroughputFraction: 1,
|
||||||
Status: "online",
|
Status: "online",
|
||||||
Parents: []interface{}{},
|
Parents: []interface{}{},
|
||||||
TimeStamp: collection.TimeStamp,
|
Ts: 1707431241,
|
||||||
Lsn: 17,
|
Lsn: 17,
|
||||||
},
|
},
|
||||||
}, repositorymodels.StatusOk
|
}, repositorymodels.StatusOk
|
||||||
|
@ -3,9 +3,8 @@ package repositorymodels
|
|||||||
type Database struct {
|
type Database struct {
|
||||||
ID string `json:"id"`
|
ID string `json:"id"`
|
||||||
TimeStamp int64 `json:"_ts"`
|
TimeStamp int64 `json:"_ts"`
|
||||||
ResourceID string `json:"_rid"`
|
UniqueID string `json:"_rid"`
|
||||||
ETag string `json:"_etag"`
|
ETag string `json:"_etag"`
|
||||||
Self string `json:"_self"`
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type RepositoryStatus int
|
type RepositoryStatus int
|
||||||
@ -21,7 +20,7 @@ type Collection struct {
|
|||||||
ID string `json:"id"`
|
ID string `json:"id"`
|
||||||
IndexingPolicy CollectionIndexingPolicy `json:"indexingPolicy"`
|
IndexingPolicy CollectionIndexingPolicy `json:"indexingPolicy"`
|
||||||
PartitionKey CollectionPartitionKey `json:"partitionKey"`
|
PartitionKey CollectionPartitionKey `json:"partitionKey"`
|
||||||
ResourceID string `json:"_rid"`
|
UniqueID string `json:"_rid"`
|
||||||
TimeStamp int64 `json:"_ts"`
|
TimeStamp int64 `json:"_ts"`
|
||||||
Self string `json:"_self"`
|
Self string `json:"_self"`
|
||||||
ETag string `json:"_etag"`
|
ETag string `json:"_etag"`
|
||||||
@ -57,8 +56,8 @@ type CollectionPartitionKey struct {
|
|||||||
type UserDefinedFunction struct {
|
type UserDefinedFunction struct {
|
||||||
Body string `json:"body"`
|
Body string `json:"body"`
|
||||||
ID string `json:"id"`
|
ID string `json:"id"`
|
||||||
ResourceID string `json:"_rid"`
|
Rid string `json:"_rid"`
|
||||||
TimeStamp int `json:"_ts"`
|
Ts int `json:"_ts"`
|
||||||
Self string `json:"_self"`
|
Self string `json:"_self"`
|
||||||
Etag string `json:"_etag"`
|
Etag string `json:"_etag"`
|
||||||
}
|
}
|
||||||
@ -66,8 +65,8 @@ type UserDefinedFunction struct {
|
|||||||
type StoredProcedure struct {
|
type StoredProcedure struct {
|
||||||
Body string `json:"body"`
|
Body string `json:"body"`
|
||||||
ID string `json:"id"`
|
ID string `json:"id"`
|
||||||
ResourceID string `json:"_rid"`
|
Rid string `json:"_rid"`
|
||||||
TimeStamp int `json:"_ts"`
|
Ts int `json:"_ts"`
|
||||||
Self string `json:"_self"`
|
Self string `json:"_self"`
|
||||||
Etag string `json:"_etag"`
|
Etag string `json:"_etag"`
|
||||||
}
|
}
|
||||||
@ -77,8 +76,8 @@ type Trigger struct {
|
|||||||
ID string `json:"id"`
|
ID string `json:"id"`
|
||||||
TriggerOperation string `json:"triggerOperation"`
|
TriggerOperation string `json:"triggerOperation"`
|
||||||
TriggerType string `json:"triggerType"`
|
TriggerType string `json:"triggerType"`
|
||||||
ResourceID string `json:"_rid"`
|
Rid string `json:"_rid"`
|
||||||
TimeStamp int `json:"_ts"`
|
Ts int `json:"_ts"`
|
||||||
Self string `json:"_self"`
|
Self string `json:"_self"`
|
||||||
Etag string `json:"_etag"`
|
Etag string `json:"_etag"`
|
||||||
}
|
}
|
||||||
@ -86,7 +85,7 @@ type Trigger struct {
|
|||||||
type Document map[string]interface{}
|
type Document map[string]interface{}
|
||||||
|
|
||||||
type PartitionKeyRange struct {
|
type PartitionKeyRange struct {
|
||||||
ResourceID string `json:"_rid"`
|
Rid string `json:"_rid"`
|
||||||
ID string `json:"id"`
|
ID string `json:"id"`
|
||||||
Etag string `json:"_etag"`
|
Etag string `json:"_etag"`
|
||||||
MinInclusive string `json:"minInclusive"`
|
MinInclusive string `json:"minInclusive"`
|
||||||
@ -96,7 +95,7 @@ type PartitionKeyRange struct {
|
|||||||
ThroughputFraction int `json:"throughputFraction"`
|
ThroughputFraction int `json:"throughputFraction"`
|
||||||
Status string `json:"status"`
|
Status string `json:"status"`
|
||||||
Parents []any `json:"parents"`
|
Parents []any `json:"parents"`
|
||||||
TimeStamp int64 `json:"_ts"`
|
Ts int `json:"_ts"`
|
||||||
Lsn int `json:"lsn"`
|
Lsn int `json:"lsn"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,34 +0,0 @@
|
|||||||
package resourceid
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/base64"
|
|
||||||
|
|
||||||
"github.com/google/uuid"
|
|
||||||
)
|
|
||||||
|
|
||||||
func New() string {
|
|
||||||
id := uuid.New().ID()
|
|
||||||
idBytes := uintToBytes(id)
|
|
||||||
|
|
||||||
return base64.StdEncoding.EncodeToString(idBytes)
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewCombined(ids ...string) string {
|
|
||||||
combinedIdBytes := make([]byte, 0)
|
|
||||||
|
|
||||||
for _, id := range ids {
|
|
||||||
idBytes, _ := base64.StdEncoding.DecodeString(id)
|
|
||||||
combinedIdBytes = append(combinedIdBytes, idBytes...)
|
|
||||||
}
|
|
||||||
|
|
||||||
return base64.StdEncoding.EncodeToString(combinedIdBytes)
|
|
||||||
}
|
|
||||||
|
|
||||||
func uintToBytes(id uint32) []byte {
|
|
||||||
buf := make([]byte, 4)
|
|
||||||
for i := 0; i < 4; i++ {
|
|
||||||
buf[i] = byte(id >> (i * 8))
|
|
||||||
}
|
|
||||||
|
|
||||||
return buf
|
|
||||||
}
|
|
@ -20,7 +20,7 @@ var defaultCollection repositorymodels.Collection = repositorymodels.Collection{
|
|||||||
Kind: "Hash",
|
Kind: "Hash",
|
||||||
Version: 2,
|
Version: 2,
|
||||||
},
|
},
|
||||||
ResourceID: "nFFFFFFFFFF=",
|
UniqueID: "nFFFFFFFFFF=",
|
||||||
TimeStamp: 0,
|
TimeStamp: 0,
|
||||||
Self: "",
|
Self: "",
|
||||||
ETag: "\"00000000-0000-0000-0000-000000000000\"",
|
ETag: "\"00000000-0000-0000-0000-000000000000\"",
|
||||||
|
Loading…
Reference in New Issue
Block a user