Compare commits
5 Commits
260d2ec24b
...
b89a1a2a7e
Author | SHA1 | Date | |
---|---|---|---|
|
b89a1a2a7e | ||
|
4ddd9abd2e | ||
|
f66fbcc454 | ||
|
be637f4540 | ||
|
d801debaf6 |
24
constants/constants.go
Normal file
24
constants/constants.go
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
package constants
|
||||||
|
|
||||||
|
import "time"
|
||||||
|
|
||||||
|
// time.Duration constants
|
||||||
|
const (
|
||||||
|
// RecalculateTimeWindowHighSharesJob interval
|
||||||
|
RecalculateTimeWindowHighSharesJobInterval = 1 * time.Minute
|
||||||
|
// RecalculateTopSharesJob interval
|
||||||
|
RecalculateTopSharesJobInterval = 30 * time.Second
|
||||||
|
// IngestSharesJob interval
|
||||||
|
IngestSharesJobInterval = 30 * time.Second
|
||||||
|
)
|
||||||
|
|
||||||
|
// counts and stuff
|
||||||
|
const (
|
||||||
|
// TopSharesAmount is the number of top shares to keep
|
||||||
|
TopSharesAmount = 15
|
||||||
|
// DailyStatsPerPage is the number of daily stats per page
|
||||||
|
DailyStatsPerPage = 15
|
||||||
|
)
|
||||||
|
|
||||||
|
// EpochTime is the start time for daily stats
|
||||||
|
var EpochTime = time.Date(2025, 5, 1, 0, 0, 0, 0, time.UTC)
|
229
database/db.go
229
database/db.go
@ -3,7 +3,9 @@ package database
|
|||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"log"
|
"log"
|
||||||
|
"pool-stats/helpers"
|
||||||
"pool-stats/models"
|
"pool-stats/models"
|
||||||
|
"sort"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/ostafen/clover/v2"
|
"github.com/ostafen/clover/v2"
|
||||||
@ -14,6 +16,9 @@ import (
|
|||||||
|
|
||||||
const (
|
const (
|
||||||
CollectionName = "shares"
|
CollectionName = "shares"
|
||||||
|
TopSharesCollectionName = "TopShares"
|
||||||
|
TimeWindowHighShareCollectionName = "TimeWindowHighShareStat"
|
||||||
|
DailyStatsCollectionName = "DailyStats"
|
||||||
)
|
)
|
||||||
|
|
||||||
func InitDatabase(path string) (*clover.DB, error) {
|
func InitDatabase(path string) (*clover.DB, error) {
|
||||||
@ -42,10 +47,59 @@ func InitDatabase(path string) (*clover.DB, error) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Init TopShares collection
|
||||||
|
hasTopSharesCollection, err := db.HasCollection(TopSharesCollectionName)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to check TopShares collection: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if !hasTopSharesCollection {
|
||||||
|
if err := db.CreateCollection(TopSharesCollectionName); err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to create TopShares collection: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := db.CreateIndex(TopSharesCollectionName, "CreateDate"); err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to create index for TopShares: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := db.CreateIndex(TopSharesCollectionName, "SDiff"); err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to create index for TopShares SDiff: %v", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Init TimeWindowHighShareStat collection
|
||||||
|
hasTimeWindowCollection, err := db.HasCollection(TimeWindowHighShareCollectionName)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to check TimeWindowHighShare collection: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if !hasTimeWindowCollection {
|
||||||
|
if err := db.CreateCollection(TimeWindowHighShareCollectionName); err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to create TimeWindowHighShare collection: %v", err)
|
||||||
|
}
|
||||||
|
if err := db.CreateIndex(TimeWindowHighShareCollectionName, "TimeWindowID"); err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to create index for TimeWindowHighShare: %v", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Init DailyStats collection
|
||||||
|
hasDailyStatsCollection, err := db.HasCollection(DailyStatsCollectionName)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to check DailyStats collection: %v", err)
|
||||||
|
}
|
||||||
|
if !hasDailyStatsCollection {
|
||||||
|
if err := db.CreateCollection(DailyStatsCollectionName); err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to create DailyStats collection: %v", err)
|
||||||
|
}
|
||||||
|
if err := db.CreateIndex(DailyStatsCollectionName, "Date"); err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to create index for DailyStats: %v", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return db, nil
|
return db, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func GetHighestShareInRange(db *clover.DB, collection string, since time.Time) (*document.Document, error) {
|
func GetHighestSharesInRange(db *clover.DB, collection string, since time.Time, count int) ([]models.ShareLog, error) {
|
||||||
// Convert `since` to the format in `createdate`
|
// Convert `since` to the format in `createdate`
|
||||||
lower := since.Unix()
|
lower := since.Unix()
|
||||||
upper := time.Now().Unix()
|
upper := time.Now().Unix()
|
||||||
@ -58,12 +112,22 @@ func GetHighestShareInRange(db *clover.DB, collection string, since time.Time) (
|
|||||||
results, err := db.FindAll(c.NewQuery(collection).
|
results, err := db.FindAll(c.NewQuery(collection).
|
||||||
Where(criteria).
|
Where(criteria).
|
||||||
Sort(c.SortOption{Field: "SDiff", Direction: -1}).
|
Sort(c.SortOption{Field: "SDiff", Direction: -1}).
|
||||||
Limit(1))
|
Limit(count))
|
||||||
|
|
||||||
if err != nil || len(results) == 0 {
|
if err != nil || len(results) == 0 {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
return results[0], nil
|
|
||||||
|
var shares []models.ShareLog
|
||||||
|
for _, doc := range results {
|
||||||
|
var s models.ShareLog
|
||||||
|
if err := doc.Unmarshal(&s); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
shares = append(shares, s)
|
||||||
|
}
|
||||||
|
|
||||||
|
return shares, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func PrintAllHashes(db *clover.DB) {
|
func PrintAllHashes(db *clover.DB) {
|
||||||
@ -99,3 +163,162 @@ func ListShares(db *clover.DB, offset int, count int) []models.ShareLog {
|
|||||||
|
|
||||||
return shareLogs
|
return shareLogs
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func ListTopShares(db *clover.DB) []models.ShareLog {
|
||||||
|
results, err := db.FindAll(
|
||||||
|
c.NewQuery(TopSharesCollectionName).
|
||||||
|
Sort(c.SortOption{Field: "SDiff", Direction: -1}),
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("failed to list top shares: %v", err)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
topShares := make([]models.ShareLog, len(results))
|
||||||
|
for idx, doc := range results {
|
||||||
|
var shareLog models.ShareLog
|
||||||
|
doc.Unmarshal(&shareLog)
|
||||||
|
topShares[idx] = shareLog
|
||||||
|
}
|
||||||
|
|
||||||
|
return topShares
|
||||||
|
}
|
||||||
|
|
||||||
|
func ReplaceTopShares(db *clover.DB, shares []models.ShareLog) {
|
||||||
|
db.Delete(c.NewQuery(TopSharesCollectionName))
|
||||||
|
|
||||||
|
for _, share := range shares {
|
||||||
|
doc := document.NewDocumentOf(&share)
|
||||||
|
if _, err := db.InsertOne(TopSharesCollectionName, doc); err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetTimeWindowHighShares(db *clover.DB) []models.TimeWindowHighShare {
|
||||||
|
results, err := db.FindAll(
|
||||||
|
c.NewQuery(TimeWindowHighShareCollectionName).
|
||||||
|
Sort(c.SortOption{Field: "TimeWindowID", Direction: 1}),
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("failed to list time window high shares: %v", err)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
timeWindowHighShares := make([]models.TimeWindowHighShare, len(results))
|
||||||
|
for idx, doc := range results {
|
||||||
|
var timeWindowHighShare models.TimeWindowHighShare
|
||||||
|
doc.Unmarshal(&timeWindowHighShare)
|
||||||
|
timeWindowHighShares[idx] = timeWindowHighShare
|
||||||
|
}
|
||||||
|
|
||||||
|
return timeWindowHighShares
|
||||||
|
}
|
||||||
|
|
||||||
|
func SetTimeWindowHighShare(db *clover.DB, share models.TimeWindowHighShare) error {
|
||||||
|
doc := document.NewDocumentOf(&share)
|
||||||
|
|
||||||
|
existingDoc, _ := db.FindFirst(c.NewQuery(TimeWindowHighShareCollectionName).
|
||||||
|
Where(c.Field("TimeWindowID").Eq(share.TimeWindowID)))
|
||||||
|
if existingDoc != nil {
|
||||||
|
db.ReplaceById(TimeWindowHighShareCollectionName, existingDoc.ObjectId(), doc)
|
||||||
|
} else {
|
||||||
|
db.InsertOne(TimeWindowHighShareCollectionName, doc)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func ListSharesInTimeRange(db *clover.DB, since time.Time, till time.Time) []models.ShareLog {
|
||||||
|
lower := since.Unix()
|
||||||
|
upper := till.Unix()
|
||||||
|
|
||||||
|
results, err := db.FindAll(c.NewQuery(CollectionName).
|
||||||
|
Where(c.Field("CreateDate").GtEq(fmt.Sprint(lower)).
|
||||||
|
And(c.Field("CreateDate").LtEq(fmt.Sprint(upper)))).
|
||||||
|
Sort(c.SortOption{Field: "CreateDate", Direction: -1}))
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("failed to list shares in time range: %v", err)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
shareLogs := make([]models.ShareLog, len(results))
|
||||||
|
for idx, doc := range results {
|
||||||
|
var shareLog models.ShareLog
|
||||||
|
doc.Unmarshal(&shareLog)
|
||||||
|
shareLogs[idx] = shareLog
|
||||||
|
}
|
||||||
|
|
||||||
|
return shareLogs
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetStatsForDay retrieves daily statistics for a given date
|
||||||
|
// Tries to find from DailyStats collection, calculates on the fly if not found and stores
|
||||||
|
func GetDailyStats(db *clover.DB, date time.Time) (*models.DailyStats, error) {
|
||||||
|
dateStr := date.Format(time.DateOnly)
|
||||||
|
|
||||||
|
// Check if stats already exist
|
||||||
|
isToday := dateStr == time.Now().UTC().Format(time.DateOnly)
|
||||||
|
existingDoc, err := db.FindFirst(c.NewQuery(DailyStatsCollectionName).
|
||||||
|
Where(c.Field("Date").Eq(dateStr)))
|
||||||
|
if !isToday && err == nil && existingDoc != nil {
|
||||||
|
var stats models.DailyStats
|
||||||
|
if err := existingDoc.Unmarshal(&stats); err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to unmarshal daily stats: %v", err)
|
||||||
|
}
|
||||||
|
return &stats, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get shares in range
|
||||||
|
since := date.Truncate(24 * time.Hour)
|
||||||
|
till := since.Add(24 * time.Hour)
|
||||||
|
shares := ListSharesInTimeRange(db, since, till)
|
||||||
|
sort.Slice(shares, func(i, j int) bool {
|
||||||
|
return shares[i].SDiff > shares[j].SDiff
|
||||||
|
})
|
||||||
|
|
||||||
|
// Calculate daily stats
|
||||||
|
stats := &models.DailyStats{
|
||||||
|
Date: dateStr,
|
||||||
|
ShareCount: len(shares),
|
||||||
|
Workers: make(map[string]models.WorkerDailyStats),
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(shares) > 0 {
|
||||||
|
stats.TopShare = shares[0]
|
||||||
|
stats.PoolHashrate = helpers.CalculateAverageHashrate(shares)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculate worker stats
|
||||||
|
sharesByWorker := make(map[string][]models.ShareLog)
|
||||||
|
for _, share := range shares {
|
||||||
|
sharesByWorker[share.WorkerName] = append(sharesByWorker[share.WorkerName], share)
|
||||||
|
}
|
||||||
|
for workerName, workerShares := range sharesByWorker {
|
||||||
|
workerHashrate := helpers.CalculateAverageHashrate(workerShares)
|
||||||
|
workerTopShare := workerShares[0] // Already sorted by SDiff
|
||||||
|
|
||||||
|
stats.Workers[workerName] = models.WorkerDailyStats{
|
||||||
|
TopShare: workerTopShare,
|
||||||
|
Hashrate: workerHashrate,
|
||||||
|
Shares: len(workerShares),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Insert or update the daily stats in the collection
|
||||||
|
doc := document.NewDocumentOf(stats)
|
||||||
|
if _, err := db.InsertOne(DailyStatsCollectionName, doc); err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to insert daily stats: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return stats, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func ClearDailyStats(db *clover.DB) error {
|
||||||
|
// Delete all documents in DailyStats collection
|
||||||
|
if err := db.Delete(c.NewQuery(DailyStatsCollectionName)); err != nil {
|
||||||
|
return fmt.Errorf("failed to clear DailyStats collection: %v", err)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
@ -2,6 +2,9 @@ package helpers
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"math"
|
||||||
|
"pool-stats/models"
|
||||||
|
"sort"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
@ -35,5 +38,43 @@ func FormatCreateDate(createdate string) string {
|
|||||||
t := time.Unix(sec, nsec)
|
t := time.Unix(sec, nsec)
|
||||||
return t.Format(time.DateTime)
|
return t.Format(time.DateTime)
|
||||||
}
|
}
|
||||||
return ""
|
return "-"
|
||||||
|
}
|
||||||
|
|
||||||
|
func CalculateAverageHashrate(shares []models.ShareLog) float64 {
|
||||||
|
if len(shares) == 0 {
|
||||||
|
return 0.0
|
||||||
|
}
|
||||||
|
|
||||||
|
sort.Slice(shares, func(i, j int) bool {
|
||||||
|
return shares[i].CreateDate < shares[j].CreateDate
|
||||||
|
})
|
||||||
|
|
||||||
|
first := ParseCreateDate(shares[0].CreateDate)
|
||||||
|
last := ParseCreateDate(shares[len(shares)-1].CreateDate)
|
||||||
|
timeSpan := last.Sub(first).Seconds()
|
||||||
|
if timeSpan <= 0 {
|
||||||
|
return 0.0
|
||||||
|
}
|
||||||
|
|
||||||
|
var totalAssignedDiff float64
|
||||||
|
for _, s := range shares {
|
||||||
|
totalAssignedDiff += s.Diff
|
||||||
|
}
|
||||||
|
|
||||||
|
avgAssignedDiff := totalAssignedDiff / float64(len(shares))
|
||||||
|
|
||||||
|
// Hashrate = avg diff * 2^32 / avg time per share
|
||||||
|
hashrate := (avgAssignedDiff * math.Pow(2, 32)) / (timeSpan / float64(len(shares)))
|
||||||
|
return hashrate
|
||||||
|
}
|
||||||
|
|
||||||
|
func FormatHashrate(hps float64) string {
|
||||||
|
units := []string{"H/s", "kH/s", "MH/s", "GH/s", "TH/s", "PH/s", "EH/s"}
|
||||||
|
i := 0
|
||||||
|
for hps >= 1000 && i < len(units)-1 {
|
||||||
|
hps /= 1000
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
return fmt.Sprintf("%.2f %s", hps, units[i])
|
||||||
}
|
}
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
package ingest
|
package jobs
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
@ -12,21 +12,22 @@ import (
|
|||||||
"github.com/ostafen/clover/v2"
|
"github.com/ostafen/clover/v2"
|
||||||
"github.com/ostafen/clover/v2/document"
|
"github.com/ostafen/clover/v2/document"
|
||||||
|
|
||||||
|
"pool-stats/constants"
|
||||||
"pool-stats/database"
|
"pool-stats/database"
|
||||||
"pool-stats/models"
|
"pool-stats/models"
|
||||||
)
|
)
|
||||||
|
|
||||||
type Ingestor struct {
|
type IngestSharesJob struct {
|
||||||
db *clover.DB
|
db *clover.DB
|
||||||
logPath string
|
logPath string
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewIngestor(db *clover.DB, path string) *Ingestor {
|
func NewIngestSharesJob(db *clover.DB, path string) *IngestSharesJob {
|
||||||
return &Ingestor{db: db, logPath: path}
|
return &IngestSharesJob{db: db, logPath: path}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (this *Ingestor) WatchAndIngest() {
|
func (this *IngestSharesJob) WatchAndIngest() {
|
||||||
ticker := time.NewTicker(30 * time.Second)
|
ticker := time.NewTicker(constants.IngestSharesJobInterval)
|
||||||
defer ticker.Stop()
|
defer ticker.Stop()
|
||||||
|
|
||||||
for {
|
for {
|
||||||
@ -35,7 +36,7 @@ func (this *Ingestor) WatchAndIngest() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (this *Ingestor) ingestClosedBlocks() {
|
func (this *IngestSharesJob) ingestClosedBlocks() {
|
||||||
entries, err := os.ReadDir(this.logPath)
|
entries, err := os.ReadDir(this.logPath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Println("Error reading logsDir:", err)
|
log.Println("Error reading logsDir:", err)
|
||||||
@ -66,7 +67,7 @@ func (this *Ingestor) ingestClosedBlocks() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (this *Ingestor) ingestBlockDir(db *clover.DB, dirPath string) {
|
func (this *IngestSharesJob) ingestBlockDir(db *clover.DB, dirPath string) {
|
||||||
files, err := os.ReadDir(dirPath)
|
files, err := os.ReadDir(dirPath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Printf("Failed to read block dir %s: %v", dirPath, err)
|
log.Printf("Failed to read block dir %s: %v", dirPath, err)
|
106
jobs/recalculateTimeWindowHighShares.go
Normal file
106
jobs/recalculateTimeWindowHighShares.go
Normal file
@ -0,0 +1,106 @@
|
|||||||
|
package jobs
|
||||||
|
|
||||||
|
import (
|
||||||
|
"pool-stats/constants"
|
||||||
|
"pool-stats/database"
|
||||||
|
"pool-stats/models"
|
||||||
|
"pool-stats/notlinq"
|
||||||
|
"sort"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/ostafen/clover/v2"
|
||||||
|
)
|
||||||
|
|
||||||
|
type RecalculateTimeWindowHighSharesJob struct {
|
||||||
|
DB *clover.DB
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewRecalculateTimeWindowHighSharesJob(db *clover.DB) *RecalculateTimeWindowHighSharesJob {
|
||||||
|
return &RecalculateTimeWindowHighSharesJob{DB: db}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (job *RecalculateTimeWindowHighSharesJob) Run() error {
|
||||||
|
ticker := time.NewTicker(constants.RecalculateTimeWindowHighSharesJobInterval)
|
||||||
|
defer ticker.Stop()
|
||||||
|
|
||||||
|
for {
|
||||||
|
select {
|
||||||
|
case <-ticker.C:
|
||||||
|
job.recalculateTimeWindowHighShares()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (job *RecalculateTimeWindowHighSharesJob) recalculateTimeWindowHighShares() {
|
||||||
|
topShares := database.ListTopShares(job.DB)
|
||||||
|
sort.Slice(topShares, func(i, j int) bool {
|
||||||
|
return topShares[i].SDiff > topShares[j].SDiff
|
||||||
|
})
|
||||||
|
|
||||||
|
// All time high share
|
||||||
|
if len(topShares) > 0 {
|
||||||
|
allTimeHighShare := topShares[0]
|
||||||
|
allTimeHighShareStat := &models.TimeWindowHighShare{
|
||||||
|
TimeWindowID: "0-all-time",
|
||||||
|
TimeWindowName: "All Time",
|
||||||
|
SDiff: allTimeHighShare.SDiff,
|
||||||
|
Time: allTimeHighShare.CreateDate,
|
||||||
|
}
|
||||||
|
database.SetTimeWindowHighShare(job.DB, *allTimeHighShareStat)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Other ranges
|
||||||
|
timeWindows := []struct {
|
||||||
|
ID string
|
||||||
|
Name string
|
||||||
|
Since time.Time
|
||||||
|
}{
|
||||||
|
{"1-hour", "Past Hour", time.Now().Add(-1 * time.Hour)},
|
||||||
|
{"2-day", "Past 24h", time.Now().Add(-24 * time.Hour)},
|
||||||
|
{"3-week", "Past 7d", time.Now().Add(-7 * 24 * time.Hour)},
|
||||||
|
}
|
||||||
|
for _, tw := range timeWindows {
|
||||||
|
// Can use one of top shares if in range,
|
||||||
|
// otherwise get highest share in range
|
||||||
|
var highestShare models.ShareLog
|
||||||
|
topSharesInRange := notlinq.
|
||||||
|
Where(topShares, func(s models.ShareLog) bool {
|
||||||
|
shareTime, err := s.ParseCreateDate()
|
||||||
|
if err != nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return shareTime.After(tw.Since)
|
||||||
|
})
|
||||||
|
sort.Slice(topSharesInRange, func(i, j int) bool {
|
||||||
|
return topSharesInRange[i].SDiff > topSharesInRange[j].SDiff
|
||||||
|
})
|
||||||
|
if len(topSharesInRange) > 0 {
|
||||||
|
highestShare = topSharesInRange[0]
|
||||||
|
} else {
|
||||||
|
highestShareDocs, _ := database.GetHighestSharesInRange(
|
||||||
|
job.DB, database.CollectionName, tw.Since, 1)
|
||||||
|
if len(highestShareDocs) > 0 {
|
||||||
|
highestShare = highestShareDocs[0]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var timeWindowStat models.TimeWindowHighShare
|
||||||
|
if highestShare.SDiff > 0 {
|
||||||
|
timeWindowStat = models.TimeWindowHighShare{
|
||||||
|
TimeWindowID: tw.ID,
|
||||||
|
TimeWindowName: tw.Name,
|
||||||
|
SDiff: highestShare.SDiff,
|
||||||
|
Time: highestShare.CreateDate,
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
timeWindowStat = models.TimeWindowHighShare{
|
||||||
|
TimeWindowID: tw.ID,
|
||||||
|
TimeWindowName: tw.Name,
|
||||||
|
SDiff: 0,
|
||||||
|
Time: "-",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
database.SetTimeWindowHighShare(job.DB, timeWindowStat)
|
||||||
|
}
|
||||||
|
}
|
60
jobs/recalculateTopShares.go
Normal file
60
jobs/recalculateTopShares.go
Normal file
@ -0,0 +1,60 @@
|
|||||||
|
package jobs
|
||||||
|
|
||||||
|
import (
|
||||||
|
"pool-stats/constants"
|
||||||
|
"pool-stats/database"
|
||||||
|
"pool-stats/helpers"
|
||||||
|
"pool-stats/models"
|
||||||
|
"pool-stats/notlinq"
|
||||||
|
"sort"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/ostafen/clover/v2"
|
||||||
|
)
|
||||||
|
|
||||||
|
type RecalculateTopSharesJob struct {
|
||||||
|
DB *clover.DB
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewRecalculateTopSharesJob(db *clover.DB) *RecalculateTopSharesJob {
|
||||||
|
return &RecalculateTopSharesJob{DB: db}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (job *RecalculateTopSharesJob) Run() error {
|
||||||
|
ticker := time.NewTicker(constants.RecalculateTopSharesJobInterval)
|
||||||
|
defer ticker.Stop()
|
||||||
|
|
||||||
|
for {
|
||||||
|
<-ticker.C
|
||||||
|
job.recalculateTopShares()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (job *RecalculateTopSharesJob) recalculateTopShares() {
|
||||||
|
currentTopShares := database.ListTopShares(job.DB)
|
||||||
|
|
||||||
|
var newTopShares []models.ShareLog
|
||||||
|
if currentTopShares == nil || len(currentTopShares) < constants.TopSharesAmount {
|
||||||
|
newTopShares, _ = database.GetHighestSharesInRange(job.DB, database.CollectionName, time.Unix(0, 0), constants.TopSharesAmount)
|
||||||
|
} else {
|
||||||
|
sort.Slice(currentTopShares, func(i, j int) bool {
|
||||||
|
return currentTopShares[i].CreateDate > currentTopShares[j].CreateDate
|
||||||
|
})
|
||||||
|
lastTopShareDate := currentTopShares[0].CreateDate
|
||||||
|
lastTopShareDateTime := helpers.ParseCreateDate(lastTopShareDate)
|
||||||
|
newTopShares, _ = database.GetHighestSharesInRange(job.DB, database.CollectionName, lastTopShareDateTime, constants.TopSharesAmount)
|
||||||
|
}
|
||||||
|
|
||||||
|
newTopShares = append(newTopShares, currentTopShares...)
|
||||||
|
sort.Slice(newTopShares, func(i, j int) bool {
|
||||||
|
return newTopShares[i].SDiff > newTopShares[j].SDiff
|
||||||
|
})
|
||||||
|
newTopShares = notlinq.UniqueBy(newTopShares, func(s models.ShareLog) string {
|
||||||
|
return s.Hash
|
||||||
|
})
|
||||||
|
if len(newTopShares) > constants.TopSharesAmount {
|
||||||
|
newTopShares = newTopShares[:constants.TopSharesAmount]
|
||||||
|
}
|
||||||
|
|
||||||
|
database.ReplaceTopShares(job.DB, newTopShares)
|
||||||
|
}
|
10
main.go
10
main.go
@ -9,7 +9,7 @@ import (
|
|||||||
|
|
||||||
"pool-stats/config"
|
"pool-stats/config"
|
||||||
"pool-stats/database"
|
"pool-stats/database"
|
||||||
"pool-stats/ingest"
|
"pool-stats/jobs"
|
||||||
"pool-stats/web"
|
"pool-stats/web"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -22,9 +22,15 @@ func main() {
|
|||||||
}
|
}
|
||||||
defer db.Close()
|
defer db.Close()
|
||||||
|
|
||||||
ingestor := ingest.NewIngestor(db, config.LogPath)
|
ingestor := jobs.NewIngestSharesJob(db, config.LogPath)
|
||||||
go ingestor.WatchAndIngest()
|
go ingestor.WatchAndIngest()
|
||||||
|
|
||||||
|
topSharesRecalcJob := jobs.NewRecalculateTopSharesJob(db)
|
||||||
|
go topSharesRecalcJob.Run()
|
||||||
|
|
||||||
|
timeWindowHighSharesRecalcJob := jobs.NewRecalculateTimeWindowHighSharesJob(db)
|
||||||
|
go timeWindowHighSharesRecalcJob.Run()
|
||||||
|
|
||||||
webServer := web.NewWebServer(db, config.Port)
|
webServer := web.NewWebServer(db, config.Port)
|
||||||
if err := webServer.Start(); err != nil {
|
if err := webServer.Start(); err != nil {
|
||||||
log.Fatalf("Failed to start web server: %v", err)
|
log.Fatalf("Failed to start web server: %v", err)
|
||||||
|
@ -29,10 +29,25 @@ type ShareLog struct {
|
|||||||
Agent string `json:"agent"` // Miner agent string (e.g., bitaxe/BM1370)
|
Agent string `json:"agent"` // Miner agent string (e.g., bitaxe/BM1370)
|
||||||
}
|
}
|
||||||
|
|
||||||
type ShareStat struct {
|
type TimeWindowHighShare struct {
|
||||||
Label string
|
TimeWindowID string `json:"time_window_id"` // Unique ID for the time window
|
||||||
Diff string
|
TimeWindowName string `json:"time_window_name"` // Name of the time window (e.g., "Past Hour")
|
||||||
Time string
|
SDiff float64 `json:"share_diff"` // Difficulty of the highest share
|
||||||
|
Time string `json:"share_time"` // Time of the highest share
|
||||||
|
}
|
||||||
|
|
||||||
|
type DailyStats struct {
|
||||||
|
Date string `json:"date"` // Format: "2006-01-02" in UTC
|
||||||
|
ShareCount int `json:"sharecount"` // Total shares submitted that day
|
||||||
|
TopShare ShareLog `json:"topshare"` // Highest share (by SDiff)
|
||||||
|
PoolHashrate float64 `json:"poolhashrate"` // In H/s (averaged)
|
||||||
|
Workers map[string]WorkerDailyStats `json:"workers"` // key = workername
|
||||||
|
}
|
||||||
|
|
||||||
|
type WorkerDailyStats struct {
|
||||||
|
TopShare ShareLog `json:"topshare"` // Highest share by this worker
|
||||||
|
Hashrate float64 `json:"hashrate"` // avg hashrate in H/s
|
||||||
|
Shares int `json:"shares"` // shares submitted
|
||||||
}
|
}
|
||||||
|
|
||||||
// ParseCreateDate can be used to convert ShareLog.CreateDate to time.Time
|
// ParseCreateDate can be used to convert ShareLog.CreateDate to time.Time
|
24
notlinq/notlinq.go
Normal file
24
notlinq/notlinq.go
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
package notlinq
|
||||||
|
|
||||||
|
func UniqueBy[T any, K comparable](items []T, keySelector func(T) K) []T {
|
||||||
|
seen := make(map[K]struct{})
|
||||||
|
result := make([]T, 0, len(items))
|
||||||
|
for _, item := range items {
|
||||||
|
key := keySelector(item)
|
||||||
|
if _, exists := seen[key]; !exists {
|
||||||
|
seen[key] = struct{}{}
|
||||||
|
result = append(result, item)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
func Where[T any](items []T, predicate func(T) bool) []T {
|
||||||
|
result := make([]T, 0)
|
||||||
|
for _, item := range items {
|
||||||
|
if predicate(item) {
|
||||||
|
result = append(result, item)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
@ -1,50 +0,0 @@
|
|||||||
package stats
|
|
||||||
|
|
||||||
import (
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/ostafen/clover/v2"
|
|
||||||
|
|
||||||
"pool-stats/database"
|
|
||||||
"pool-stats/helpers"
|
|
||||||
"pool-stats/models"
|
|
||||||
)
|
|
||||||
|
|
||||||
func GetStats(db *clover.DB) ([]models.ShareStat, error) {
|
|
||||||
now := time.Now()
|
|
||||||
ranges := []struct {
|
|
||||||
Label string
|
|
||||||
Since time.Time
|
|
||||||
}{
|
|
||||||
{"Past Hour", now.Add(-1 * time.Hour)},
|
|
||||||
{"Past 24h", now.Add(-24 * time.Hour)},
|
|
||||||
{"Past 7d", now.Add(-7 * 24 * time.Hour)},
|
|
||||||
}
|
|
||||||
|
|
||||||
stats := []models.ShareStat{}
|
|
||||||
|
|
||||||
// All-time highest
|
|
||||||
doc, _ := database.GetHighestShareInRange(db, database.CollectionName, time.Unix(0, 0))
|
|
||||||
if doc != nil {
|
|
||||||
stats = append(stats, models.ShareStat{
|
|
||||||
Label: "All Time",
|
|
||||||
Diff: helpers.HumanDiff(doc.Get("SDiff").(float64)),
|
|
||||||
Time: helpers.ParseCreateDate(doc.Get("CreateDate").(string)).Format(time.RFC822),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, r := range ranges {
|
|
||||||
doc, _ := database.GetHighestShareInRange(db, database.CollectionName, r.Since)
|
|
||||||
if doc != nil {
|
|
||||||
stats = append(stats, models.ShareStat{
|
|
||||||
Label: r.Label,
|
|
||||||
Diff: helpers.HumanDiff(doc.Get("SDiff").(float64)),
|
|
||||||
Time: helpers.ParseCreateDate(doc.Get("CreateDate").(string)).Format(time.RFC822),
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
stats = append(stats, models.ShareStat{Label: r.Label, Diff: "-", Time: "-"})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return stats, nil
|
|
||||||
}
|
|
45
templates/daily_stats.html
Normal file
45
templates/daily_stats.html
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
{{ define "title" }}Daily Stats{{ end }} {{ define "header" }}📊 Pool Daily
|
||||||
|
Stats{{ end }} {{ define "content" }}
|
||||||
|
<table>
|
||||||
|
<thead>
|
||||||
|
<tr>
|
||||||
|
<th>Date (UTC)</th>
|
||||||
|
<th>Share Count</th>
|
||||||
|
<th>Top Share Diff</th>
|
||||||
|
<th>Pool Hashrate</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody>
|
||||||
|
{{ range .DailyStats }}
|
||||||
|
<tr>
|
||||||
|
<td>{{ .Date }}</td>
|
||||||
|
<td>{{ .ShareCount }}</td>
|
||||||
|
<td>{{ humanDiff .TopShare.SDiff }}</td>
|
||||||
|
<td>{{ formatHashrate .PoolHashrate }}</td>
|
||||||
|
</tr>
|
||||||
|
{{ else }}
|
||||||
|
<tr>
|
||||||
|
<td colspan="4">No stats found for this date range.</td>
|
||||||
|
</tr>
|
||||||
|
{{ end }}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
|
||||||
|
<div>
|
||||||
|
{{ if .PrevPageAvailable }}
|
||||||
|
<a class="page-link" href="?start={{ .PrevPageStart }}&end={{ .PrevPageEnd }}"
|
||||||
|
>« Prev</a
|
||||||
|
>
|
||||||
|
{{ end }}
|
||||||
|
|
||||||
|
<a class="page-link current" href="?start={{ .Start }}&end={{ .End }}"
|
||||||
|
>{{ .Start }} - {{ .End }}</a
|
||||||
|
>
|
||||||
|
|
||||||
|
{{ if .NextPageAvailable }}
|
||||||
|
<a class="page-link" href="?start={{ .NextPageStart }}&end={{ .NextPageEnd }}"
|
||||||
|
>Next »</a
|
||||||
|
>
|
||||||
|
{{ end }}
|
||||||
|
</div>
|
||||||
|
{{ end }} {{ template "layout" . }}
|
@ -1,62 +1,19 @@
|
|||||||
<!DOCTYPE html>
|
{{ define "title" }}Share Stats{{ end }} {{ define "header" }}🌟 Pool Share
|
||||||
<html>
|
Stats{{ end }} {{ define "content" }}
|
||||||
<head>
|
<table>
|
||||||
<meta charset="UTF-8" />
|
|
||||||
<title>Share Stats</title>
|
|
||||||
<style>
|
|
||||||
body {
|
|
||||||
font-family: sans-serif;
|
|
||||||
background: #111;
|
|
||||||
color: #eee;
|
|
||||||
text-align: center;
|
|
||||||
padding: 2em;
|
|
||||||
}
|
|
||||||
table {
|
|
||||||
margin: auto;
|
|
||||||
border-collapse: collapse;
|
|
||||||
}
|
|
||||||
th,
|
|
||||||
td {
|
|
||||||
padding: 0.5em 1em;
|
|
||||||
border: 1px solid #444;
|
|
||||||
}
|
|
||||||
th {
|
|
||||||
background-color: #222;
|
|
||||||
}
|
|
||||||
tr:nth-child(even) {
|
|
||||||
background-color: #1a1a1a;
|
|
||||||
}
|
|
||||||
a {
|
|
||||||
color: #0af;
|
|
||||||
text-decoration: none;
|
|
||||||
}
|
|
||||||
li {
|
|
||||||
display: inline;
|
|
||||||
margin: 0 10px;
|
|
||||||
}
|
|
||||||
</style>
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<h1>🌟 Pool Share Stats</h1>
|
|
||||||
|
|
||||||
<table>
|
|
||||||
<tr>
|
<tr>
|
||||||
<th>Range</th>
|
<th>Range</th>
|
||||||
<th>Highest Share Diff</th>
|
<th>Highest Share Diff</th>
|
||||||
<th>Time</th>
|
<th>Time</th>
|
||||||
</tr>
|
</tr>
|
||||||
{{range .}}
|
{{ range .Stats }}
|
||||||
<tr>
|
<tr>
|
||||||
<td>{{.Label}}</td>
|
<td>{{ .TimeWindowName }}</td>
|
||||||
<td>{{.Diff}}</td>
|
<td>
|
||||||
<td>{{.Time}}</td>
|
{{ if ne .SDiff 0.0 }} {{ humanDiff .SDiff }} {{ else }} - {{ end }}
|
||||||
|
</td>
|
||||||
|
<td>{{ formatCreateDate .Time }}</td>
|
||||||
</tr>
|
</tr>
|
||||||
{{end}}
|
{{ end }}
|
||||||
</table>
|
</table>
|
||||||
|
{{ end }} {{ template "layout" . }}
|
||||||
<ul>
|
|
||||||
<li><a href="/">Home</a></li>
|
|
||||||
<li><a href="/shares">View Shares</a></li>
|
|
||||||
</ul>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
|
68
templates/layout.html
Normal file
68
templates/layout.html
Normal file
@ -0,0 +1,68 @@
|
|||||||
|
{{ define "layout" }}
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8" />
|
||||||
|
<title>{{ template "title" . }}</title>
|
||||||
|
<style>
|
||||||
|
body {
|
||||||
|
font-family: sans-serif;
|
||||||
|
background: #111;
|
||||||
|
color: #eee;
|
||||||
|
padding: 20px;
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
table {
|
||||||
|
border-collapse: collapse;
|
||||||
|
margin: auto;
|
||||||
|
margin-bottom: 20px;
|
||||||
|
}
|
||||||
|
table.fw {
|
||||||
|
width: 100%;
|
||||||
|
}
|
||||||
|
th,
|
||||||
|
td {
|
||||||
|
padding: 8px 12px;
|
||||||
|
border: 1px solid #444;
|
||||||
|
text-align: left;
|
||||||
|
white-space: nowrap;
|
||||||
|
}
|
||||||
|
th {
|
||||||
|
background-color: #222;
|
||||||
|
}
|
||||||
|
tr:nth-child(even) {
|
||||||
|
background-color: #1a1a1a;
|
||||||
|
}
|
||||||
|
a.page-link {
|
||||||
|
margin: 0 5px;
|
||||||
|
text-decoration: none;
|
||||||
|
color: #0af;
|
||||||
|
}
|
||||||
|
a.page-link.current {
|
||||||
|
font-weight: bold;
|
||||||
|
color: #fff;
|
||||||
|
}
|
||||||
|
a {
|
||||||
|
color: #0af;
|
||||||
|
text-decoration: none;
|
||||||
|
}
|
||||||
|
li {
|
||||||
|
display: inline;
|
||||||
|
margin: 0 10px;
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<h1>{{ template "header" . }}</h1>
|
||||||
|
|
||||||
|
{{ template "content" . }} {{ template "navigation" . }}
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
{{ end }} {{ define "navigation" }}
|
||||||
|
<ul>
|
||||||
|
<li><a href="/">Home</a></li>
|
||||||
|
<li><a href="/shares">View Shares</a></li>
|
||||||
|
<li><a href="/top-shares">Top Shares</a></li>
|
||||||
|
<li><a href="/daily-stats">Daily Stats</a></li>
|
||||||
|
</ul>
|
||||||
|
{{ end }}
|
@ -1,59 +1,10 @@
|
|||||||
{{ define "share_list" }}
|
{{ define "title" }}Share Browser{{ end }} {{ define "header" }}☀️ Pool Share
|
||||||
<!DOCTYPE html>
|
Browser{{ end }} {{ define "content" }}
|
||||||
<html lang="en">
|
<table>
|
||||||
<head>
|
|
||||||
<meta charset="UTF-8" />
|
|
||||||
<title>ckpool Share Browser</title>
|
|
||||||
<style>
|
|
||||||
body {
|
|
||||||
font-family: sans-serif;
|
|
||||||
background: #111;
|
|
||||||
color: #eee;
|
|
||||||
padding: 20px;
|
|
||||||
}
|
|
||||||
table {
|
|
||||||
width: 100%;
|
|
||||||
border-collapse: collapse;
|
|
||||||
margin-bottom: 20px;
|
|
||||||
}
|
|
||||||
th,
|
|
||||||
td {
|
|
||||||
padding: 8px 12px;
|
|
||||||
border: 1px solid #444;
|
|
||||||
text-align: left;
|
|
||||||
white-space: nowrap;
|
|
||||||
}
|
|
||||||
th {
|
|
||||||
background-color: #222;
|
|
||||||
}
|
|
||||||
a.page-link {
|
|
||||||
margin: 0 5px;
|
|
||||||
text-decoration: none;
|
|
||||||
color: #0af;
|
|
||||||
}
|
|
||||||
a.page-link.current {
|
|
||||||
font-weight: bold;
|
|
||||||
color: #fff;
|
|
||||||
}
|
|
||||||
a {
|
|
||||||
color: #0af;
|
|
||||||
text-decoration: none;
|
|
||||||
}
|
|
||||||
li {
|
|
||||||
display: inline;
|
|
||||||
margin: 0 10px;
|
|
||||||
}
|
|
||||||
</style>
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<h1>☀️ Pool Share Browser</h1>
|
|
||||||
|
|
||||||
<table>
|
|
||||||
<thead>
|
<thead>
|
||||||
<tr>
|
<tr>
|
||||||
<th>Time</th>
|
<th>Time</th>
|
||||||
<th>Worker</th>
|
<th>Worker</th>
|
||||||
<th>Address</th>
|
|
||||||
<th>SDiff</th>
|
<th>SDiff</th>
|
||||||
<th>Result</th>
|
<th>Result</th>
|
||||||
<th>Hash</th>
|
<th>Hash</th>
|
||||||
@ -64,20 +15,19 @@
|
|||||||
<tr>
|
<tr>
|
||||||
<td>{{ formatCreateDate .CreateDate }}</td>
|
<td>{{ formatCreateDate .CreateDate }}</td>
|
||||||
<td>{{ .WorkerName }}</td>
|
<td>{{ .WorkerName }}</td>
|
||||||
<td>{{ .Address }}</td>
|
|
||||||
<td>{{ humanDiff .SDiff }}</td>
|
<td>{{ humanDiff .SDiff }}</td>
|
||||||
<td>{{ if .Result }}✔️{{ else }}❌{{ end }}</td>
|
<td>{{ if .Result }}✔️{{ else }}❌{{ end }}</td>
|
||||||
<td><code style="font-size: small">{{ .Hash }}</code></td>
|
<td><code style="font-size: small">{{ .Hash }}</code></td>
|
||||||
</tr>
|
</tr>
|
||||||
{{ else }}
|
{{ else }}
|
||||||
<tr>
|
<tr>
|
||||||
<td colspan="6">No shares found.</td>
|
<td colspan="5">No shares found.</td>
|
||||||
</tr>
|
</tr>
|
||||||
{{ end }}
|
{{ end }}
|
||||||
</tbody>
|
</tbody>
|
||||||
</table>
|
</table>
|
||||||
|
|
||||||
<div>
|
<div>
|
||||||
{{ if gt .Page 1 }}
|
{{ if gt .Page 1 }}
|
||||||
<a class="page-link" href="?page={{ sub .Page 1 }}">« Prev</a>
|
<a class="page-link" href="?page={{ sub .Page 1 }}">« Prev</a>
|
||||||
{{ end }} {{ if gt .Page 2 }}
|
{{ end }} {{ if gt .Page 2 }}
|
||||||
@ -92,12 +42,5 @@
|
|||||||
<span class="page-link">...</span>
|
<span class="page-link">...</span>
|
||||||
<a class="page-link" href="?page={{ add .Page 1 }}">Next »</a>
|
<a class="page-link" href="?page={{ add .Page 1 }}">Next »</a>
|
||||||
{{ end }}
|
{{ end }}
|
||||||
</div>
|
</div>
|
||||||
|
{{ end }} {{ template "layout" . }}
|
||||||
<ul>
|
|
||||||
<li><a href="/">Home</a></li>
|
|
||||||
<li><a href="/shares">View Shares</a></li>
|
|
||||||
</ul>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
{{ end }}
|
|
||||||
|
27
templates/top_shares.html
Normal file
27
templates/top_shares.html
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
{{ define "title" }}Top Shares{{ end }} {{ define "header" }}☀️ Pool Top
|
||||||
|
Shares{{ end }} {{ define "content" }}
|
||||||
|
<table>
|
||||||
|
<thead>
|
||||||
|
<tr>
|
||||||
|
<th>Time</th>
|
||||||
|
<th>Worker</th>
|
||||||
|
<th>SDiff</th>
|
||||||
|
<th>Hash</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody>
|
||||||
|
{{ range .Shares }}
|
||||||
|
<tr>
|
||||||
|
<td>{{ formatCreateDate .CreateDate }}</td>
|
||||||
|
<td>{{ .WorkerName }}</td>
|
||||||
|
<td>{{ humanDiff .SDiff }}</td>
|
||||||
|
<td><code style="font-size: small">{{ .Hash }}</code></td>
|
||||||
|
</tr>
|
||||||
|
{{ else }}
|
||||||
|
<tr>
|
||||||
|
<td colspan="4">No shares found.</td>
|
||||||
|
</tr>
|
||||||
|
{{ end }}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
{{ end }} {{ template "layout" . }}
|
99
web/dailyStatsHandler.go
Normal file
99
web/dailyStatsHandler.go
Normal file
@ -0,0 +1,99 @@
|
|||||||
|
package web
|
||||||
|
|
||||||
|
import (
|
||||||
|
"html/template"
|
||||||
|
"net/http"
|
||||||
|
"pool-stats/constants"
|
||||||
|
"pool-stats/database"
|
||||||
|
"pool-stats/models"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
type DailyStatsPageData struct {
|
||||||
|
DailyStats []models.DailyStats
|
||||||
|
|
||||||
|
Start string
|
||||||
|
End string
|
||||||
|
|
||||||
|
NextPageAvailable bool
|
||||||
|
NextPageStart string
|
||||||
|
NextPageEnd string
|
||||||
|
|
||||||
|
PrevPageAvailable bool
|
||||||
|
PrevPageStart string
|
||||||
|
PrevPageEnd string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ws *WebServer) DailyStatsHandler(w http.ResponseWriter, r *http.Request) {
|
||||||
|
tmpl, err := template.Must(ws.templates.Clone()).ParseFiles("templates/daily_stats.html")
|
||||||
|
if err != nil {
|
||||||
|
http.Error(w, "Failed to parse template", http.StatusInternalServerError)
|
||||||
|
println("Error parsing template:", err.Error())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
startParam := r.URL.Query().Get("start")
|
||||||
|
endParam := r.URL.Query().Get("end")
|
||||||
|
var startTime, endTime time.Time
|
||||||
|
|
||||||
|
if startParam == "" || endParam == "" {
|
||||||
|
endTime = time.Now().Truncate(24 * time.Hour)
|
||||||
|
startTime = endTime.AddDate(0, 0, -constants.DailyStatsPerPage+1)
|
||||||
|
} else {
|
||||||
|
startTime, err = time.Parse(time.DateOnly, startParam)
|
||||||
|
if err != nil {
|
||||||
|
http.Error(w, "Invalid start time format", http.StatusBadRequest)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
endTime, err = time.Parse(time.DateOnly, endParam)
|
||||||
|
if err != nil {
|
||||||
|
http.Error(w, "Invalid end time format", http.StatusBadRequest)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
daysCount := int(endTime.Sub(startTime).Hours() / 24)
|
||||||
|
if daysCount < 0 {
|
||||||
|
http.Error(w, "End time must be after start time", http.StatusBadRequest)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if daysCount > constants.DailyStatsPerPage {
|
||||||
|
http.Error(w, "Too many days requested", http.StatusBadRequest)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
dailyStats := make([]models.DailyStats, 0)
|
||||||
|
for t := endTime; !t.Before(startTime); t = t.AddDate(0, 0, -1) {
|
||||||
|
stats, err := database.GetDailyStats(ws.db, t)
|
||||||
|
if err != nil {
|
||||||
|
http.Error(w, "Failed to fetch daily stats", http.StatusInternalServerError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
dailyStats = append(dailyStats, *stats)
|
||||||
|
}
|
||||||
|
|
||||||
|
nextPageStart := endTime.AddDate(0, 0, 1)
|
||||||
|
nextPageEnd := endTime.AddDate(0, 0, constants.DailyStatsPerPage)
|
||||||
|
prevPageEnd := startTime.AddDate(0, 0, -1)
|
||||||
|
prevPageStart := startTime.AddDate(0, 0, -constants.DailyStatsPerPage)
|
||||||
|
|
||||||
|
data := DailyStatsPageData{
|
||||||
|
DailyStats: dailyStats,
|
||||||
|
Start: startTime.Format(time.DateOnly),
|
||||||
|
End: endTime.Format(time.DateOnly),
|
||||||
|
|
||||||
|
NextPageAvailable: nextPageStart.Before(time.Now()),
|
||||||
|
NextPageStart: nextPageStart.Format(time.DateOnly),
|
||||||
|
NextPageEnd: nextPageEnd.Format(time.DateOnly),
|
||||||
|
|
||||||
|
PrevPageAvailable: prevPageStart.After(constants.EpochTime),
|
||||||
|
PrevPageStart: prevPageStart.Format(time.DateOnly),
|
||||||
|
PrevPageEnd: prevPageEnd.Format(time.DateOnly),
|
||||||
|
}
|
||||||
|
if err := tmpl.ExecuteTemplate(w, "daily_stats.html", data); err != nil {
|
||||||
|
http.Error(w, "Failed to render template", http.StatusInternalServerError)
|
||||||
|
println("Error rendering template:", err.Error())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
@ -1,19 +0,0 @@
|
|||||||
package web
|
|
||||||
|
|
||||||
import (
|
|
||||||
"html/template"
|
|
||||||
"net/http"
|
|
||||||
|
|
||||||
"pool-stats/stats"
|
|
||||||
)
|
|
||||||
|
|
||||||
func (ws *WebServer) IndexHandler(w http.ResponseWriter, r *http.Request) {
|
|
||||||
shareStats, err := stats.GetStats(ws.db)
|
|
||||||
if err != nil {
|
|
||||||
http.Error(w, "Failed to load stats", 500)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
tmpl := template.Must(template.ParseFiles("templates/index.html"))
|
|
||||||
tmpl.Execute(w, shareStats)
|
|
||||||
}
|
|
38
web/indexHandler.go
Normal file
38
web/indexHandler.go
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
package web
|
||||||
|
|
||||||
|
import (
|
||||||
|
"html/template"
|
||||||
|
"net/http"
|
||||||
|
|
||||||
|
"pool-stats/database"
|
||||||
|
"pool-stats/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
type IndexPageData struct {
|
||||||
|
Stats []models.TimeWindowHighShare
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ws *WebServer) IndexHandler(w http.ResponseWriter, r *http.Request) {
|
||||||
|
tmpl, err := template.Must(ws.templates.Clone()).ParseFiles("templates/index.html")
|
||||||
|
if err != nil {
|
||||||
|
http.Error(w, "Failed to parse template", 500)
|
||||||
|
println("Error parsing template:", err.Error())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
tws := database.GetTimeWindowHighShares(ws.db)
|
||||||
|
if tws == nil {
|
||||||
|
http.Error(w, "Failed to load time window high shares", 500)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
indexData := IndexPageData{
|
||||||
|
Stats: tws,
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := tmpl.ExecuteTemplate(w, "index.html", indexData); err != nil {
|
||||||
|
http.Error(w, "Failed to render template", 500)
|
||||||
|
println("Error rendering template:", err.Error())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
@ -1,7 +1,9 @@
|
|||||||
package web
|
package web
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"html/template"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
"pool-stats/helpers"
|
||||||
|
|
||||||
"fmt"
|
"fmt"
|
||||||
|
|
||||||
@ -11,18 +13,34 @@ import (
|
|||||||
type WebServer struct {
|
type WebServer struct {
|
||||||
db *clover.DB
|
db *clover.DB
|
||||||
port int
|
port int
|
||||||
|
templates *template.Template
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewWebServer(db *clover.DB, port int) *WebServer {
|
func NewWebServer(db *clover.DB, port int) *WebServer {
|
||||||
|
templates := template.New("base").Funcs(template.FuncMap{
|
||||||
|
"add": func(a, b int) int { return a + b },
|
||||||
|
"sub": func(a, b int) int { return a - b },
|
||||||
|
"humanDiff": helpers.HumanDiff,
|
||||||
|
"formatHashrate": helpers.FormatHashrate,
|
||||||
|
"formatCreateDate": helpers.FormatCreateDate,
|
||||||
|
})
|
||||||
|
|
||||||
|
templates = template.Must(templates.ParseFiles(
|
||||||
|
"templates/layout.html",
|
||||||
|
))
|
||||||
|
|
||||||
return &WebServer{
|
return &WebServer{
|
||||||
db: db,
|
db: db,
|
||||||
port: port,
|
port: port,
|
||||||
|
templates: templates,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (ws *WebServer) Start() error {
|
func (ws *WebServer) Start() error {
|
||||||
http.HandleFunc("/", ws.IndexHandler)
|
http.HandleFunc("/", ws.IndexHandler)
|
||||||
http.HandleFunc("/shares", ws.SharesHandler)
|
http.HandleFunc("/shares", ws.SharesHandler)
|
||||||
|
http.HandleFunc("/top-shares", ws.TopSharesHandler)
|
||||||
|
http.HandleFunc("/daily-stats", ws.DailyStatsHandler)
|
||||||
|
|
||||||
address := ":" + fmt.Sprint(ws.port)
|
address := ":" + fmt.Sprint(ws.port)
|
||||||
println("Listening on", address)
|
println("Listening on", address)
|
||||||
|
@ -4,7 +4,6 @@ import (
|
|||||||
"html/template"
|
"html/template"
|
||||||
"net/http"
|
"net/http"
|
||||||
"pool-stats/database"
|
"pool-stats/database"
|
||||||
"pool-stats/helpers"
|
|
||||||
"pool-stats/models"
|
"pool-stats/models"
|
||||||
"strconv"
|
"strconv"
|
||||||
)
|
)
|
||||||
@ -16,15 +15,10 @@ type SharePageData struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (ws *WebServer) SharesHandler(w http.ResponseWriter, r *http.Request) {
|
func (ws *WebServer) SharesHandler(w http.ResponseWriter, r *http.Request) {
|
||||||
tmpl := template.New("share_list").Funcs(template.FuncMap{
|
tmpl, err := template.Must(ws.templates.Clone()).ParseFiles("templates/shares.html")
|
||||||
"add": func(a, b int) int { return a + b },
|
|
||||||
"sub": func(a, b int) int { return a - b },
|
|
||||||
"humanDiff": helpers.HumanDiff,
|
|
||||||
"formatCreateDate": helpers.FormatCreateDate,
|
|
||||||
})
|
|
||||||
tmpl, err := tmpl.ParseFiles("templates/shares.html")
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
http.Error(w, "Failed to load template", 500)
|
http.Error(w, "Failed to parse template", 500)
|
||||||
|
println("Error parsing template:", err.Error())
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -52,7 +46,7 @@ func (ws *WebServer) SharesHandler(w http.ResponseWriter, r *http.Request) {
|
|||||||
Page: offset/entriesPerPage + 1,
|
Page: offset/entriesPerPage + 1,
|
||||||
HasMore: len(shareLogs) == entriesPerPage,
|
HasMore: len(shareLogs) == entriesPerPage,
|
||||||
}
|
}
|
||||||
if err := tmpl.Execute(w, data); err != nil {
|
if err := tmpl.ExecuteTemplate(w, "shares.html", data); err != nil {
|
||||||
http.Error(w, "Failed to render template", 500)
|
http.Error(w, "Failed to render template", 500)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
35
web/topSharesHandler.go
Normal file
35
web/topSharesHandler.go
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
package web
|
||||||
|
|
||||||
|
import (
|
||||||
|
"html/template"
|
||||||
|
"net/http"
|
||||||
|
"pool-stats/database"
|
||||||
|
"pool-stats/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
type TopSharesPageData struct {
|
||||||
|
Shares []models.ShareLog
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ws *WebServer) TopSharesHandler(w http.ResponseWriter, r *http.Request) {
|
||||||
|
tmpl, err := template.Must(ws.templates.Clone()).ParseFiles("templates/top_shares.html")
|
||||||
|
if err != nil {
|
||||||
|
http.Error(w, "Failed to parse template", 500)
|
||||||
|
println("Error parsing template:", err.Error())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
topShares := database.ListTopShares(ws.db)
|
||||||
|
if topShares == nil {
|
||||||
|
http.Error(w, "Failed to load top shares", 500)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
data := TopSharesPageData{
|
||||||
|
Shares: topShares,
|
||||||
|
}
|
||||||
|
if err := tmpl.ExecuteTemplate(w, "top_shares.html", data); err != nil {
|
||||||
|
http.Error(w, "Failed to render template", 500)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
Loading…
x
Reference in New Issue
Block a user