Implement daily stats
This commit is contained in:
parent
4ddd9abd2e
commit
b89a1a2a7e
@ -16,4 +16,9 @@ const (
|
||||
const (
|
||||
// TopSharesAmount is the number of top shares to keep
|
||||
TopSharesAmount = 15
|
||||
// DailyStatsPerPage is the number of daily stats per page
|
||||
DailyStatsPerPage = 15
|
||||
)
|
||||
|
||||
// EpochTime is the start time for daily stats
|
||||
var EpochTime = time.Date(2025, 5, 1, 0, 0, 0, 0, time.UTC)
|
||||
|
111
database/db.go
111
database/db.go
@ -3,7 +3,9 @@ package database
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"pool-stats/helpers"
|
||||
"pool-stats/models"
|
||||
"sort"
|
||||
"time"
|
||||
|
||||
"github.com/ostafen/clover/v2"
|
||||
@ -16,6 +18,7 @@ const (
|
||||
CollectionName = "shares"
|
||||
TopSharesCollectionName = "TopShares"
|
||||
TimeWindowHighShareCollectionName = "TimeWindowHighShareStat"
|
||||
DailyStatsCollectionName = "DailyStats"
|
||||
)
|
||||
|
||||
func InitDatabase(path string) (*clover.DB, error) {
|
||||
@ -79,6 +82,20 @@ func InitDatabase(path string) (*clover.DB, error) {
|
||||
}
|
||||
}
|
||||
|
||||
// Init DailyStats collection
|
||||
hasDailyStatsCollection, err := db.HasCollection(DailyStatsCollectionName)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to check DailyStats collection: %v", err)
|
||||
}
|
||||
if !hasDailyStatsCollection {
|
||||
if err := db.CreateCollection(DailyStatsCollectionName); err != nil {
|
||||
return nil, fmt.Errorf("failed to create DailyStats collection: %v", err)
|
||||
}
|
||||
if err := db.CreateIndex(DailyStatsCollectionName, "Date"); err != nil {
|
||||
return nil, fmt.Errorf("failed to create index for DailyStats: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
return db, nil
|
||||
}
|
||||
|
||||
@ -211,3 +228,97 @@ func SetTimeWindowHighShare(db *clover.DB, share models.TimeWindowHighShare) err
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func ListSharesInTimeRange(db *clover.DB, since time.Time, till time.Time) []models.ShareLog {
|
||||
lower := since.Unix()
|
||||
upper := till.Unix()
|
||||
|
||||
results, err := db.FindAll(c.NewQuery(CollectionName).
|
||||
Where(c.Field("CreateDate").GtEq(fmt.Sprint(lower)).
|
||||
And(c.Field("CreateDate").LtEq(fmt.Sprint(upper)))).
|
||||
Sort(c.SortOption{Field: "CreateDate", Direction: -1}))
|
||||
|
||||
if err != nil {
|
||||
log.Printf("failed to list shares in time range: %v", err)
|
||||
return nil
|
||||
}
|
||||
|
||||
shareLogs := make([]models.ShareLog, len(results))
|
||||
for idx, doc := range results {
|
||||
var shareLog models.ShareLog
|
||||
doc.Unmarshal(&shareLog)
|
||||
shareLogs[idx] = shareLog
|
||||
}
|
||||
|
||||
return shareLogs
|
||||
}
|
||||
|
||||
// GetStatsForDay retrieves daily statistics for a given date
|
||||
// Tries to find from DailyStats collection, calculates on the fly if not found and stores
|
||||
func GetDailyStats(db *clover.DB, date time.Time) (*models.DailyStats, error) {
|
||||
dateStr := date.Format(time.DateOnly)
|
||||
|
||||
// Check if stats already exist
|
||||
isToday := dateStr == time.Now().UTC().Format(time.DateOnly)
|
||||
existingDoc, err := db.FindFirst(c.NewQuery(DailyStatsCollectionName).
|
||||
Where(c.Field("Date").Eq(dateStr)))
|
||||
if !isToday && err == nil && existingDoc != nil {
|
||||
var stats models.DailyStats
|
||||
if err := existingDoc.Unmarshal(&stats); err != nil {
|
||||
return nil, fmt.Errorf("failed to unmarshal daily stats: %v", err)
|
||||
}
|
||||
return &stats, nil
|
||||
}
|
||||
|
||||
// Get shares in range
|
||||
since := date.Truncate(24 * time.Hour)
|
||||
till := since.Add(24 * time.Hour)
|
||||
shares := ListSharesInTimeRange(db, since, till)
|
||||
sort.Slice(shares, func(i, j int) bool {
|
||||
return shares[i].SDiff > shares[j].SDiff
|
||||
})
|
||||
|
||||
// Calculate daily stats
|
||||
stats := &models.DailyStats{
|
||||
Date: dateStr,
|
||||
ShareCount: len(shares),
|
||||
Workers: make(map[string]models.WorkerDailyStats),
|
||||
}
|
||||
|
||||
if len(shares) > 0 {
|
||||
stats.TopShare = shares[0]
|
||||
stats.PoolHashrate = helpers.CalculateAverageHashrate(shares)
|
||||
}
|
||||
|
||||
// Calculate worker stats
|
||||
sharesByWorker := make(map[string][]models.ShareLog)
|
||||
for _, share := range shares {
|
||||
sharesByWorker[share.WorkerName] = append(sharesByWorker[share.WorkerName], share)
|
||||
}
|
||||
for workerName, workerShares := range sharesByWorker {
|
||||
workerHashrate := helpers.CalculateAverageHashrate(workerShares)
|
||||
workerTopShare := workerShares[0] // Already sorted by SDiff
|
||||
|
||||
stats.Workers[workerName] = models.WorkerDailyStats{
|
||||
TopShare: workerTopShare,
|
||||
Hashrate: workerHashrate,
|
||||
Shares: len(workerShares),
|
||||
}
|
||||
}
|
||||
|
||||
// Insert or update the daily stats in the collection
|
||||
doc := document.NewDocumentOf(stats)
|
||||
if _, err := db.InsertOne(DailyStatsCollectionName, doc); err != nil {
|
||||
return nil, fmt.Errorf("failed to insert daily stats: %v", err)
|
||||
}
|
||||
|
||||
return stats, nil
|
||||
}
|
||||
|
||||
func ClearDailyStats(db *clover.DB) error {
|
||||
// Delete all documents in DailyStats collection
|
||||
if err := db.Delete(c.NewQuery(DailyStatsCollectionName)); err != nil {
|
||||
return fmt.Errorf("failed to clear DailyStats collection: %v", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
@ -2,6 +2,9 @@ package helpers
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"math"
|
||||
"pool-stats/models"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
@ -37,3 +40,41 @@ func FormatCreateDate(createdate string) string {
|
||||
}
|
||||
return "-"
|
||||
}
|
||||
|
||||
func CalculateAverageHashrate(shares []models.ShareLog) float64 {
|
||||
if len(shares) == 0 {
|
||||
return 0.0
|
||||
}
|
||||
|
||||
sort.Slice(shares, func(i, j int) bool {
|
||||
return shares[i].CreateDate < shares[j].CreateDate
|
||||
})
|
||||
|
||||
first := ParseCreateDate(shares[0].CreateDate)
|
||||
last := ParseCreateDate(shares[len(shares)-1].CreateDate)
|
||||
timeSpan := last.Sub(first).Seconds()
|
||||
if timeSpan <= 0 {
|
||||
return 0.0
|
||||
}
|
||||
|
||||
var totalAssignedDiff float64
|
||||
for _, s := range shares {
|
||||
totalAssignedDiff += s.Diff
|
||||
}
|
||||
|
||||
avgAssignedDiff := totalAssignedDiff / float64(len(shares))
|
||||
|
||||
// Hashrate = avg diff * 2^32 / avg time per share
|
||||
hashrate := (avgAssignedDiff * math.Pow(2, 32)) / (timeSpan / float64(len(shares)))
|
||||
return hashrate
|
||||
}
|
||||
|
||||
func FormatHashrate(hps float64) string {
|
||||
units := []string{"H/s", "kH/s", "MH/s", "GH/s", "TH/s", "PH/s", "EH/s"}
|
||||
i := 0
|
||||
for hps >= 1000 && i < len(units)-1 {
|
||||
hps /= 1000
|
||||
i++
|
||||
}
|
||||
return fmt.Sprintf("%.2f %s", hps, units[i])
|
||||
}
|
||||
|
@ -36,6 +36,20 @@ type TimeWindowHighShare struct {
|
||||
Time string `json:"share_time"` // Time of the highest share
|
||||
}
|
||||
|
||||
type DailyStats struct {
|
||||
Date string `json:"date"` // Format: "2006-01-02" in UTC
|
||||
ShareCount int `json:"sharecount"` // Total shares submitted that day
|
||||
TopShare ShareLog `json:"topshare"` // Highest share (by SDiff)
|
||||
PoolHashrate float64 `json:"poolhashrate"` // In H/s (averaged)
|
||||
Workers map[string]WorkerDailyStats `json:"workers"` // key = workername
|
||||
}
|
||||
|
||||
type WorkerDailyStats struct {
|
||||
TopShare ShareLog `json:"topshare"` // Highest share by this worker
|
||||
Hashrate float64 `json:"hashrate"` // avg hashrate in H/s
|
||||
Shares int `json:"shares"` // shares submitted
|
||||
}
|
||||
|
||||
// ParseCreateDate can be used to convert ShareLog.CreateDate to time.Time
|
||||
func (s *ShareLog) ParseCreateDate() (time.Time, error) {
|
||||
var sec, nsec int64
|
||||
|
45
templates/daily_stats.html
Normal file
45
templates/daily_stats.html
Normal file
@ -0,0 +1,45 @@
|
||||
{{ define "title" }}Daily Stats{{ end }} {{ define "header" }}📊 Pool Daily
|
||||
Stats{{ end }} {{ define "content" }}
|
||||
<table>
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Date (UTC)</th>
|
||||
<th>Share Count</th>
|
||||
<th>Top Share Diff</th>
|
||||
<th>Pool Hashrate</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{{ range .DailyStats }}
|
||||
<tr>
|
||||
<td>{{ .Date }}</td>
|
||||
<td>{{ .ShareCount }}</td>
|
||||
<td>{{ humanDiff .TopShare.SDiff }}</td>
|
||||
<td>{{ formatHashrate .PoolHashrate }}</td>
|
||||
</tr>
|
||||
{{ else }}
|
||||
<tr>
|
||||
<td colspan="4">No stats found for this date range.</td>
|
||||
</tr>
|
||||
{{ end }}
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
<div>
|
||||
{{ if .PrevPageAvailable }}
|
||||
<a class="page-link" href="?start={{ .PrevPageStart }}&end={{ .PrevPageEnd }}"
|
||||
>« Prev</a
|
||||
>
|
||||
{{ end }}
|
||||
|
||||
<a class="page-link current" href="?start={{ .Start }}&end={{ .End }}"
|
||||
>{{ .Start }} - {{ .End }}</a
|
||||
>
|
||||
|
||||
{{ if .NextPageAvailable }}
|
||||
<a class="page-link" href="?start={{ .NextPageStart }}&end={{ .NextPageEnd }}"
|
||||
>Next »</a
|
||||
>
|
||||
{{ end }}
|
||||
</div>
|
||||
{{ end }} {{ template "layout" . }}
|
@ -63,5 +63,6 @@
|
||||
<li><a href="/">Home</a></li>
|
||||
<li><a href="/shares">View Shares</a></li>
|
||||
<li><a href="/top-shares">Top Shares</a></li>
|
||||
<li><a href="/daily-stats">Daily Stats</a></li>
|
||||
</ul>
|
||||
{{ end }}
|
||||
|
99
web/dailyStatsHandler.go
Normal file
99
web/dailyStatsHandler.go
Normal file
@ -0,0 +1,99 @@
|
||||
package web
|
||||
|
||||
import (
|
||||
"html/template"
|
||||
"net/http"
|
||||
"pool-stats/constants"
|
||||
"pool-stats/database"
|
||||
"pool-stats/models"
|
||||
"time"
|
||||
)
|
||||
|
||||
type DailyStatsPageData struct {
|
||||
DailyStats []models.DailyStats
|
||||
|
||||
Start string
|
||||
End string
|
||||
|
||||
NextPageAvailable bool
|
||||
NextPageStart string
|
||||
NextPageEnd string
|
||||
|
||||
PrevPageAvailable bool
|
||||
PrevPageStart string
|
||||
PrevPageEnd string
|
||||
}
|
||||
|
||||
func (ws *WebServer) DailyStatsHandler(w http.ResponseWriter, r *http.Request) {
|
||||
tmpl, err := template.Must(ws.templates.Clone()).ParseFiles("templates/daily_stats.html")
|
||||
if err != nil {
|
||||
http.Error(w, "Failed to parse template", http.StatusInternalServerError)
|
||||
println("Error parsing template:", err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
startParam := r.URL.Query().Get("start")
|
||||
endParam := r.URL.Query().Get("end")
|
||||
var startTime, endTime time.Time
|
||||
|
||||
if startParam == "" || endParam == "" {
|
||||
endTime = time.Now().Truncate(24 * time.Hour)
|
||||
startTime = endTime.AddDate(0, 0, -constants.DailyStatsPerPage+1)
|
||||
} else {
|
||||
startTime, err = time.Parse(time.DateOnly, startParam)
|
||||
if err != nil {
|
||||
http.Error(w, "Invalid start time format", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
endTime, err = time.Parse(time.DateOnly, endParam)
|
||||
if err != nil {
|
||||
http.Error(w, "Invalid end time format", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
daysCount := int(endTime.Sub(startTime).Hours() / 24)
|
||||
if daysCount < 0 {
|
||||
http.Error(w, "End time must be after start time", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
if daysCount > constants.DailyStatsPerPage {
|
||||
http.Error(w, "Too many days requested", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
dailyStats := make([]models.DailyStats, 0)
|
||||
for t := endTime; !t.Before(startTime); t = t.AddDate(0, 0, -1) {
|
||||
stats, err := database.GetDailyStats(ws.db, t)
|
||||
if err != nil {
|
||||
http.Error(w, "Failed to fetch daily stats", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
dailyStats = append(dailyStats, *stats)
|
||||
}
|
||||
|
||||
nextPageStart := endTime.AddDate(0, 0, 1)
|
||||
nextPageEnd := endTime.AddDate(0, 0, constants.DailyStatsPerPage)
|
||||
prevPageEnd := startTime.AddDate(0, 0, -1)
|
||||
prevPageStart := startTime.AddDate(0, 0, -constants.DailyStatsPerPage)
|
||||
|
||||
data := DailyStatsPageData{
|
||||
DailyStats: dailyStats,
|
||||
Start: startTime.Format(time.DateOnly),
|
||||
End: endTime.Format(time.DateOnly),
|
||||
|
||||
NextPageAvailable: nextPageStart.Before(time.Now()),
|
||||
NextPageStart: nextPageStart.Format(time.DateOnly),
|
||||
NextPageEnd: nextPageEnd.Format(time.DateOnly),
|
||||
|
||||
PrevPageAvailable: prevPageStart.After(constants.EpochTime),
|
||||
PrevPageStart: prevPageStart.Format(time.DateOnly),
|
||||
PrevPageEnd: prevPageEnd.Format(time.DateOnly),
|
||||
}
|
||||
if err := tmpl.ExecuteTemplate(w, "daily_stats.html", data); err != nil {
|
||||
http.Error(w, "Failed to render template", http.StatusInternalServerError)
|
||||
println("Error rendering template:", err.Error())
|
||||
return
|
||||
}
|
||||
}
|
@ -21,6 +21,7 @@ func NewWebServer(db *clover.DB, port int) *WebServer {
|
||||
"add": func(a, b int) int { return a + b },
|
||||
"sub": func(a, b int) int { return a - b },
|
||||
"humanDiff": helpers.HumanDiff,
|
||||
"formatHashrate": helpers.FormatHashrate,
|
||||
"formatCreateDate": helpers.FormatCreateDate,
|
||||
})
|
||||
|
||||
@ -39,6 +40,7 @@ func (ws *WebServer) Start() error {
|
||||
http.HandleFunc("/", ws.IndexHandler)
|
||||
http.HandleFunc("/shares", ws.SharesHandler)
|
||||
http.HandleFunc("/top-shares", ws.TopSharesHandler)
|
||||
http.HandleFunc("/daily-stats", ws.DailyStatsHandler)
|
||||
|
||||
address := ":" + fmt.Sprint(ws.port)
|
||||
println("Listening on", address)
|
||||
|
Loading…
x
Reference in New Issue
Block a user