test(pkg/storage): add test for compression and rounding
This commit is contained in:
@ -80,12 +80,15 @@ func (cl *csvLogfile) Read() ([]*types.MeasuredValue, error) {
|
||||
}
|
||||
|
||||
// Creation date
|
||||
creationDate, err := time.Parse(format.TimeFormat, record[6])
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("%v %v: %v", errorParseTime, record[6], err)
|
||||
if record[6] != "null" {
|
||||
creationDate, err := time.Parse(format.TimeFormat, record[6])
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("%v %v: %v", errorParseTime, record[6], err)
|
||||
}
|
||||
measuredValue.CreationDate = &creationDate
|
||||
}
|
||||
measuredValue.CreationDate = creationDate
|
||||
|
||||
// Update date
|
||||
if record[7] != "null" {
|
||||
updateDate, err := time.Parse(format.TimeFormat, record[7])
|
||||
if err != nil {
|
||||
|
@ -1,9 +1,7 @@
|
||||
package logfile
|
||||
|
||||
import (
|
||||
"math"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
|
||||
"github.com/go-flucky/flucky/pkg/internal/format"
|
||||
"github.com/go-flucky/flucky/pkg/types"
|
||||
@ -12,13 +10,7 @@ import (
|
||||
// var validUUID = regexp.MustCompile("^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-4[a-fA-F0-9]{3}-[8|9|aA|bB][a-fA-F0-9]{3}-[a-fA-F0-9]{12}$")
|
||||
|
||||
// Append adds an array of several measured values to a logfile
|
||||
func Append(logfile Logfile, compression bool, round float64, measuredValues []*types.MeasuredValue) error {
|
||||
|
||||
if round != 0 {
|
||||
for _, measuredValue := range measuredValues {
|
||||
measuredValue.Value = math.Round(measuredValue.Value/round) * round
|
||||
}
|
||||
}
|
||||
func Append(logfile Logfile, measuredValues []*types.MeasuredValue) error {
|
||||
|
||||
allMeasuredValues, err := logfile.Read()
|
||||
if err != nil {
|
||||
@ -27,10 +19,6 @@ func Append(logfile Logfile, compression bool, round float64, measuredValues []*
|
||||
|
||||
allMeasuredValues = append(allMeasuredValues, measuredValues...)
|
||||
|
||||
if compression {
|
||||
allMeasuredValues = Compression(allMeasuredValues)
|
||||
}
|
||||
|
||||
err = logfile.Write(allMeasuredValues)
|
||||
if err != nil {
|
||||
return err
|
||||
@ -40,61 +28,6 @@ func Append(logfile Logfile, compression bool, round float64, measuredValues []*
|
||||
return nil
|
||||
}
|
||||
|
||||
// Compression the measured values. The system checks whether the measured values
|
||||
// of the same type correspond to those of the predecessor. If this is the case,
|
||||
// the current value is discarded and the validity date of the previous value is
|
||||
// set to that of the current value. This means that no information is lost.
|
||||
// Only the validity period of the measured value is increased.
|
||||
func Compression(measuredValues []*types.MeasuredValue) []*types.MeasuredValue {
|
||||
compressedMeasuredValues := make([]*types.MeasuredValue, 0)
|
||||
lastMeasuredValuesBySensors := make(map[string]map[types.MeasuredValueType]*types.MeasuredValue, 0)
|
||||
|
||||
// Sort all measured values according to the start time of the validity date
|
||||
// in order to successfully implement the subsequent compression.
|
||||
sort.SliceStable(measuredValues, func(i int, j int) bool {
|
||||
return measuredValues[i].FromDate.Before(measuredValues[j].TillDate)
|
||||
})
|
||||
|
||||
now := format.FormatedTime()
|
||||
|
||||
for _, measuredValue := range measuredValues {
|
||||
if _, ok := lastMeasuredValuesBySensors[measuredValue.SensorID]; !ok {
|
||||
lastMeasuredValuesBySensors[measuredValue.SensorID] = make(map[types.MeasuredValueType]*types.MeasuredValue, 0)
|
||||
}
|
||||
|
||||
if _, ok := lastMeasuredValuesBySensors[measuredValue.SensorID][measuredValue.ValueType]; !ok {
|
||||
lastMeasuredValuesBySensors[measuredValue.SensorID][measuredValue.ValueType] = measuredValue
|
||||
continue
|
||||
}
|
||||
|
||||
if lastMeasuredValuesBySensors[measuredValue.SensorID][measuredValue.ValueType].Value == measuredValue.Value {
|
||||
lastMeasuredValuesBySensors[measuredValue.SensorID][measuredValue.ValueType].TillDate = measuredValue.TillDate
|
||||
lastMeasuredValuesBySensors[measuredValue.SensorID][measuredValue.ValueType].UpdateDate = &now
|
||||
} else if lastMeasuredValuesBySensors[measuredValue.SensorID][measuredValue.ValueType].Value != measuredValue.Value {
|
||||
compressedMeasuredValues = append(compressedMeasuredValues, lastMeasuredValuesBySensors[measuredValue.SensorID][measuredValue.ValueType])
|
||||
delete(lastMeasuredValuesBySensors[measuredValue.SensorID], measuredValue.ValueType)
|
||||
lastMeasuredValuesBySensors[measuredValue.SensorID][measuredValue.ValueType] = measuredValue
|
||||
}
|
||||
}
|
||||
|
||||
// Copy all remaining entries from the map into the cache array
|
||||
for _, lastMeasuredValuesBySensor := range lastMeasuredValuesBySensors {
|
||||
for _, measuredValueType := range types.MeasuredValueTypes {
|
||||
if measuredValue, ok := lastMeasuredValuesBySensor[measuredValueType]; ok {
|
||||
compressedMeasuredValues = append(compressedMeasuredValues, measuredValue)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Sort all measured values again to include the measured values from the
|
||||
// cache.
|
||||
sort.SliceStable(compressedMeasuredValues, func(i int, j int) bool {
|
||||
return compressedMeasuredValues[i].FromDate.Before(compressedMeasuredValues[j].FromDate)
|
||||
})
|
||||
|
||||
return compressedMeasuredValues
|
||||
}
|
||||
|
||||
// New returns a log file with basic functions for reading and writing data. The
|
||||
// file extension of the logfile is taken into account to format the logfile
|
||||
// into the correct format.
|
||||
@ -124,9 +57,11 @@ func New(logfile string) Logfile {
|
||||
}
|
||||
|
||||
func writeCreationDate(measuredValues []*types.MeasuredValue) error {
|
||||
now := format.FormatedTime()
|
||||
for _, measuredValue := range measuredValues {
|
||||
now := format.FormatedTime()
|
||||
measuredValue.CreationDate = now
|
||||
if measuredValue.CreationDate == nil {
|
||||
measuredValue.CreationDate = &now
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
@ -1 +0,0 @@
|
||||
package logfile_test
|
Reference in New Issue
Block a user