Rewrite how historical stats are stored and calculated.

Closes #310.
This commit is contained in:
Eugene Bujak 2018-09-06 02:11:36 +03:00
parent c7a5275d42
commit 04562dece3
3 changed files with 142 additions and 180 deletions

View File

@ -199,21 +199,23 @@ func handleStatus(w http.ResponseWriter, r *http.Request) {
// stats
// -----
func handleStats(w http.ResponseWriter, r *http.Request) {
snap := &statistics.lastsnap
histrical := generateMapFromStats(&statistics.perMinute, 0, 2)
// sum them up
summed := map[string]interface{}{}
for key, values := range histrical {
summedValue := 0.0
floats, ok := values.([]float64)
if !ok {
continue
}
for _, v := range floats {
summedValue += v
}
summed[key] = summedValue
}
summed["stats_period"] = "3 minutes"
// generate from last 3 minutes
var last3mins statsSnapshot
last3mins.filteredTotal = snap.filteredTotal - statistics.perMinute.filteredTotal[2]
last3mins.filteredLists = snap.filteredLists - statistics.perMinute.filteredLists[2]
last3mins.filteredSafebrowsing = snap.filteredSafebrowsing - statistics.perMinute.filteredSafebrowsing[2]
last3mins.filteredParental = snap.filteredParental - statistics.perMinute.filteredParental[2]
last3mins.totalRequests = snap.totalRequests - statistics.perMinute.totalRequests[2]
last3mins.processingTimeSum = snap.processingTimeSum - statistics.perMinute.processingTimeSum[2]
last3mins.processingTimeCount = snap.processingTimeCount - statistics.perMinute.processingTimeCount[2]
// rate := computeRate(append([]float64(snap.totalRequests}, statistics.perMinute.totalRequests[0:2])
data := generateMapFromSnap(last3mins)
json, err := json.Marshal(data)
json, err := json.Marshal(summed)
if err != nil {
errortext := fmt.Sprintf("Unable to marshal status json: %s", err)
log.Println(errortext)
@ -232,28 +234,29 @@ func handleStats(w http.ResponseWriter, r *http.Request) {
func handleStatsHistory(w http.ResponseWriter, r *http.Request) {
// handle time unit and prepare our time window size
limitTime := time.Now()
timeUnit := r.URL.Query().Get("time_unit")
now := time.Now()
timeUnitString := r.URL.Query().Get("time_unit")
var stats *periodicStats
switch timeUnit {
var timeUnit time.Duration
switch timeUnitString {
case "seconds":
limitTime = limitTime.Add(statsHistoryElements * -1 * time.Second)
timeUnit = time.Second
stats = &statistics.perSecond
case "minutes":
limitTime = limitTime.Add(statsHistoryElements * -1 * time.Minute)
timeUnit = time.Minute
stats = &statistics.perMinute
case "hours":
limitTime = limitTime.Add(statsHistoryElements * -1 * time.Hour)
timeUnit = time.Hour
stats = &statistics.perHour
case "days":
limitTime = limitTime.Add(statsHistoryElements * -1 * time.Hour * 24)
timeUnit = time.Hour * 24
stats = &statistics.perDay
default:
http.Error(w, "Must specify valid time_unit parameter", 400)
return
}
// check if start time is within supported time range
// parse start and end time
startTime, err := time.Parse(time.RFC3339, r.URL.Query().Get("start_time"))
if err != nil {
errortext := fmt.Sprintf("Must specify valid start_time parameter: %s", err)
@ -261,12 +264,6 @@ func handleStatsHistory(w http.ResponseWriter, r *http.Request) {
http.Error(w, errortext, 400)
return
}
if startTime.Before(limitTime) {
http.Error(w, "start_time parameter is outside of supported range", 501)
return
}
// check if end time is within supported time range
endTime, err := time.Parse(time.RFC3339, r.URL.Query().Get("end_time"))
if err != nil {
errortext := fmt.Sprintf("Must specify valid end_time parameter: %s", err)
@ -274,28 +271,22 @@ func handleStatsHistory(w http.ResponseWriter, r *http.Request) {
http.Error(w, errortext, 400)
return
}
if endTime.Before(limitTime) {
// check if start and time times are within supported time range
timeRange := timeUnit * statsHistoryElements
if startTime.Add(timeRange).Before(now) {
http.Error(w, "start_time parameter is outside of supported range", 501)
return
}
if endTime.Add(timeRange).Before(now) {
http.Error(w, "end_time parameter is outside of supported range", 501)
return
}
// calculate how which slice range we need to provide
var start int
var end int
switch timeUnit {
case "seconds":
start = int(startTime.Sub(limitTime).Seconds())
end = int(endTime.Sub(limitTime).Seconds())
case "minutes":
start = int(startTime.Sub(limitTime).Minutes())
end = int(endTime.Sub(limitTime).Minutes())
case "hours":
start = int(startTime.Sub(limitTime).Hours())
end = int(endTime.Sub(limitTime).Hours())
case "days":
start = int(startTime.Sub(limitTime).Hours() / 24.0)
end = int(endTime.Sub(limitTime).Hours() / 24.0)
}
// calculate start and end of our array
// basically it's how many hours/minutes/etc have passed since now
start := int(now.Sub(endTime) / timeUnit)
end := int(now.Sub(startTime) / timeUnit)
// swap them around if they're inverted
if start > end {

View File

@ -51,33 +51,14 @@ func ensureDELETE(handler func(http.ResponseWriter, *http.Request)) func(http.Re
// --------------------------
// helper functions for stats
// --------------------------
func computeRate(input []float64) []float64 {
func getSlice(input [statsHistoryElements]float64, start int, end int) []float64 {
output := make([]float64, 0)
for i := len(input) - 2; i >= 0; i-- {
value := input[i]
diff := value - input[i+1]
output = append([]float64{diff}, output...)
for i := start; i <= end; i++ {
output = append(output, input[i])
}
return output
}
func generateMapFromSnap(snap statsSnapshot) map[string]interface{} {
var avgProcessingTime float64
if snap.processingTimeCount > 0 {
avgProcessingTime = snap.processingTimeSum / snap.processingTimeCount
}
result := map[string]interface{}{
"dns_queries": snap.totalRequests,
"blocked_filtering": snap.filteredLists,
"replaced_safebrowsing": snap.filteredSafebrowsing,
"replaced_safesearch": snap.filteredSafesearch,
"replaced_parental": snap.filteredParental,
"avg_processing_time": avgProcessingTime,
}
return result
}
func generateMapFromStats(stats *periodicStats, start int, end int) map[string]interface{} {
// clamp
start = clamp(start, 0, statsHistoryElements)
@ -85,8 +66,8 @@ func generateMapFromStats(stats *periodicStats, start int, end int) map[string]i
avgProcessingTime := make([]float64, 0)
count := computeRate(stats.processingTimeCount[start:end])
sum := computeRate(stats.processingTimeSum[start:end])
count := getSlice(stats.entries[processingTimeCount], start, end)
sum := getSlice(stats.entries[processingTimeSum], start, end)
for i := 0; i < len(count); i++ {
var avg float64
if count[i] != 0 {
@ -97,11 +78,11 @@ func generateMapFromStats(stats *periodicStats, start int, end int) map[string]i
}
result := map[string]interface{}{
"dns_queries": computeRate(stats.totalRequests[start:end]),
"blocked_filtering": computeRate(stats.filteredLists[start:end]),
"replaced_safebrowsing": computeRate(stats.filteredSafebrowsing[start:end]),
"replaced_safesearch": computeRate(stats.filteredSafesearch[start:end]),
"replaced_parental": computeRate(stats.filteredParental[start:end]),
"dns_queries": getSlice(stats.entries[totalRequests], start, end),
"blocked_filtering": getSlice(stats.entries[filteredLists], start, end),
"replaced_safebrowsing": getSlice(stats.entries[filteredSafebrowsing], start, end),
"replaced_safesearch": getSlice(stats.entries[filteredSafesearch], start, end),
"replaced_parental": getSlice(stats.entries[filteredParental], start, end),
"avg_processing_time": avgProcessingTime,
}
return result

202
stats.go
View File

@ -8,70 +8,50 @@ import (
"net/http"
"net/url"
"os"
"regexp"
"strconv"
"strings"
"syscall"
"time"
)
type periodicStats struct {
totalRequests []float64
filteredTotal []float64
filteredLists []float64
filteredSafebrowsing []float64
filteredSafesearch []float64
filteredParental []float64
processingTimeSum []float64
processingTimeCount []float64
lastRotate time.Time // last time this data was rotated
}
type statsSnapshot struct {
totalRequests float64
filteredTotal float64
filteredLists float64
filteredSafebrowsing float64
filteredSafesearch float64
filteredParental float64
processingTimeSum float64
processingTimeCount float64
}
type statsCollection struct {
perSecond periodicStats
perMinute periodicStats
perHour periodicStats
perDay periodicStats
lastsnap statsSnapshot
}
var statistics statsCollection
var client = &http.Client{
Timeout: time.Second * 30,
}
const statsHistoryElements = 60 + 1 // +1 for calculating delta
// as seen over HTTP
type statsEntry map[string]float64
type statsEntries map[string][statsHistoryElements]float64
var requestCountTotalRegex = regexp.MustCompile(`^coredns_dns_request_count_total`)
var requestDurationSecondsSum = regexp.MustCompile(`^coredns_dns_request_duration_seconds_sum`)
var requestDurationSecondsCount = regexp.MustCompile(`^coredns_dns_request_duration_seconds_count`)
const (
statsHistoryElements = 60 + 1 // +1 for calculating delta
totalRequests = `coredns_dns_request_count_total`
filteredTotal = `coredns_dnsfilter_filtered_total`
filteredLists = `coredns_dnsfilter_filtered_lists_total`
filteredSafebrowsing = `coredns_dnsfilter_filtered_safebrowsing_total`
filteredSafesearch = `coredns_dnsfilter_safesearch_total`
filteredParental = `coredns_dnsfilter_filtered_parental_total`
processingTimeSum = `coredns_dns_request_duration_seconds_sum`
processingTimeCount = `coredns_dns_request_duration_seconds_count`
)
func initPeriodicStats(stats *periodicStats) {
stats.totalRequests = make([]float64, statsHistoryElements)
stats.filteredTotal = make([]float64, statsHistoryElements)
stats.filteredLists = make([]float64, statsHistoryElements)
stats.filteredSafebrowsing = make([]float64, statsHistoryElements)
stats.filteredSafesearch = make([]float64, statsHistoryElements)
stats.filteredParental = make([]float64, statsHistoryElements)
stats.processingTimeSum = make([]float64, statsHistoryElements)
stats.processingTimeCount = make([]float64, statsHistoryElements)
type periodicStats struct {
entries statsEntries
lastRotate time.Time // last time this data was rotated
}
type stats struct {
perSecond periodicStats
perMinute periodicStats
perHour periodicStats
perDay periodicStats
lastSeen statsEntry
}
var statistics stats
func initPeriodicStats(periodic *periodicStats) {
periodic.entries = statsEntries{}
}
func init() {
@ -106,37 +86,22 @@ func isConnRefused(err error) bool {
return false
}
func sliceRotate(slice *[]float64) {
a := (*slice)[:len(*slice)-1]
*slice = append([]float64{0}, a...)
}
func statsRotate(stats *periodicStats, now time.Time) {
sliceRotate(&stats.totalRequests)
sliceRotate(&stats.filteredTotal)
sliceRotate(&stats.filteredLists)
sliceRotate(&stats.filteredSafebrowsing)
sliceRotate(&stats.filteredSafesearch)
sliceRotate(&stats.filteredParental)
sliceRotate(&stats.processingTimeSum)
sliceRotate(&stats.processingTimeCount)
stats.lastRotate = now
}
func handleValue(input string, target *float64) {
value, err := strconv.ParseFloat(input, 64)
if err != nil {
log.Println("Failed to parse number input:", err)
return
func statsRotate(periodic *periodicStats, now time.Time) {
for key, values := range periodic.entries {
newValues := [statsHistoryElements]float64{}
for i := 1; i < len(values); i++ {
newValues[i] = values[i-1]
}
*target = value
periodic.entries[key] = newValues
}
periodic.lastRotate = now
}
// called every second, accumulates stats for each second, minute, hour and day
func collectStats() {
now := time.Now()
// rotate each second
// NOTE: since we are called every second, always rotate, otherwise aliasing problems cause the rotation to skip
// NOTE: since we are called every second, always rotate perSecond, otherwise aliasing problems cause the rotation to skip
if true {
statsRotate(&statistics.perSecond, now)
}
@ -172,6 +137,8 @@ func collectStats() {
return
}
entry := statsEntry{}
// handle body
scanner := bufio.NewScanner(strings.NewReader(string(body)))
for scanner.Scan() {
@ -181,38 +148,61 @@ func collectStats() {
continue
}
splitted := strings.Split(line, " ")
switch {
case splitted[0] == "coredns_dnsfilter_filtered_total":
handleValue(splitted[1], &statistics.lastsnap.filteredTotal)
case splitted[0] == "coredns_dnsfilter_filtered_lists_total":
handleValue(splitted[1], &statistics.lastsnap.filteredLists)
case splitted[0] == "coredns_dnsfilter_filtered_safebrowsing_total":
handleValue(splitted[1], &statistics.lastsnap.filteredSafebrowsing)
case splitted[0] == "coredns_dnsfilter_filtered_parental_total":
handleValue(splitted[1], &statistics.lastsnap.filteredParental)
case requestCountTotalRegex.MatchString(splitted[0]):
handleValue(splitted[1], &statistics.lastsnap.totalRequests)
case requestDurationSecondsSum.MatchString(splitted[0]):
handleValue(splitted[1], &statistics.lastsnap.processingTimeSum)
case requestDurationSecondsCount.MatchString(splitted[0]):
handleValue(splitted[1], &statistics.lastsnap.processingTimeCount)
}
if len(splitted) < 2 {
continue
}
// put the snap into per-second, per-minute, per-hour and per-day
assignSnapToStats(&statistics.perSecond)
assignSnapToStats(&statistics.perMinute)
assignSnapToStats(&statistics.perHour)
assignSnapToStats(&statistics.perDay)
value, err := strconv.ParseFloat(splitted[1], 64)
if err != nil {
log.Printf("Failed to parse number input %s: %s", splitted[1], err)
continue
}
key := splitted[0]
index := strings.IndexByte(key, '{')
if index >= 0 {
key = key[:index]
}
// empty keys are not ok
if key == "" {
continue
}
got, ok := entry[key]
if ok {
value += got
}
entry[key] = value
}
// calculate delta
delta := calcDelta(entry, statistics.lastSeen)
// apply delta to second/minute/hour/day
applyDelta(&statistics.perSecond, delta)
applyDelta(&statistics.perMinute, delta)
applyDelta(&statistics.perHour, delta)
applyDelta(&statistics.perDay, delta)
// save last seen
statistics.lastSeen = entry
}
func assignSnapToStats(stats *periodicStats) {
stats.totalRequests[0] = statistics.lastsnap.totalRequests
stats.filteredTotal[0] = statistics.lastsnap.filteredTotal
stats.filteredLists[0] = statistics.lastsnap.filteredLists
stats.filteredSafebrowsing[0] = statistics.lastsnap.filteredSafebrowsing
stats.filteredSafesearch[0] = statistics.lastsnap.filteredSafesearch
stats.filteredParental[0] = statistics.lastsnap.filteredParental
stats.processingTimeSum[0] = statistics.lastsnap.processingTimeSum
stats.processingTimeCount[0] = statistics.lastsnap.processingTimeCount
func calcDelta(current, seen statsEntry) statsEntry {
delta := statsEntry{}
for key, currentValue := range current {
seenValue := seen[key]
deltaValue := currentValue - seenValue
delta[key] = deltaValue
}
return delta
}
func applyDelta(current *periodicStats, delta statsEntry) {
for key, deltaValue := range delta {
currentValues := current.entries[key]
currentValues[0] += deltaValue
current.entries[key] = currentValues
}
}