commit 2c4366ff1f61ff189866417878b2676dcd682179 Author: NiseVoid Date: Mon May 7 14:07:37 2018 +0200 Initial commit diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..899ead2 --- /dev/null +++ b/.gitignore @@ -0,0 +1,2 @@ +**/model/tables.go +bindata.go diff --git a/db.go b/db.go new file mode 100644 index 0000000..35fb976 --- /dev/null +++ b/db.go @@ -0,0 +1,25 @@ +package main + +import ( + "database/sql" + + "git.fuyu.moe/5GPowerQuality/parser/internal/migrations" + "git.ultraware.nl/NiseVoid/qb/driver/autoqb" + + _ "github.com/lib/pq" // PostgreSQL driver +) + +func initDB() { + d, err := sql.Open(`postgres`, `host=/tmp dbname=power_quality`) + if err != nil { + panic(err) + } + + err = migrations.RunMigrations(d) + if err != nil { + panic(err) + } + + db = autoqb.New(d) + +} diff --git a/internal/migrations/files/0001.sql b/internal/migrations/files/0001.sql new file mode 100644 index 0000000..baece72 --- /dev/null +++ b/internal/migrations/files/0001.sql @@ -0,0 +1,30 @@ +CREATE TABLE measurement ( + time timestamptz NOT NULL, + c_gem_1 DOUBLE PRECISION NOT NULL, + c_gem_2 DOUBLE PRECISION NOT NULL, + c_gem_3 DOUBLE PRECISION NOT NULL, + ep_1 DOUBLE PRECISION NOT NULL, + ep_2 DOUBLE PRECISION NOT NULL, + ep_3 DOUBLE PRECISION NOT NULL, + i_gem_1 DOUBLE PRECISION NOT NULL, + i_gem_2 DOUBLE PRECISION NOT NULL, + i_gem_3 DOUBLE PRECISION NOT NULL, + i_max_1 DOUBLE PRECISION NOT NULL, + i_max_2 DOUBLE PRECISION NOT NULL, + i_max_3 DOUBLE PRECISION NOT NULL, + p_gem_1 DOUBLE PRECISION NOT NULL, + p_gem_2 DOUBLE PRECISION NOT NULL, + p_gem_3 DOUBLE PRECISION NOT NULL, + p_max_1 DOUBLE PRECISION NOT NULL, + p_max_2 DOUBLE PRECISION NOT NULL, + p_max_3 DOUBLE PRECISION NOT NULL, + s_gem_1 DOUBLE PRECISION NOT NULL, + s_gem_2 DOUBLE PRECISION NOT NULL, + s_gem_3 DOUBLE PRECISION NOT NULL, + s_max_1 DOUBLE PRECISION NOT NULL, + s_max_2 DOUBLE PRECISION NOT NULL, + s_max_3 DOUBLE PRECISION NOT NULL, + u_gem_1 DOUBLE PRECISION NOT NULL, + u_gem_2 DOUBLE PRECISION NOT NULL, + u_gem_3 DOUBLE PRECISION NOT NULL +); diff --git a/internal/migrations/migrations.go b/internal/migrations/migrations.go new file mode 100644 index 0000000..92b9319 --- /dev/null +++ b/internal/migrations/migrations.go @@ -0,0 +1,15 @@ +package migrations + +import ( + "database/sql" + + "git.fuyu.moe/Fuyu/migrate" +) + +//go:generate go-bindata -pkg migrations --prefix files -nomemcopy -nometadata files + +// RunMigrations runs all migrations +func RunMigrations(db *sql.DB) error { + version := len(AssetNames()) + return migrate.Migrate(db, version, migrate.Options{}, Asset) +} diff --git a/internal/model/db.json b/internal/model/db.json new file mode 100644 index 0000000..ba3c843 --- /dev/null +++ b/internal/model/db.json @@ -0,0 +1,175 @@ +[ + { + "name": "public.measurement", + "fields": [ + { + "name": "c_gem_1", + "type": "double precision", + "null": false, + "size": 8 + }, + { + "name": "c_gem_2", + "type": "double precision", + "null": false, + "size": 8 + }, + { + "name": "c_gem_3", + "type": "double precision", + "null": false, + "size": 8 + }, + { + "name": "ep_1", + "type": "double precision", + "null": false, + "size": 8 + }, + { + "name": "ep_2", + "type": "double precision", + "null": false, + "size": 8 + }, + { + "name": "ep_3", + "type": "double precision", + "null": false, + "size": 8 + }, + { + "name": "i_gem_1", + "type": "double precision", + "null": false, + "size": 8 + }, + { + "name": "i_gem_2", + "type": "double precision", + "null": false, + "size": 8 + }, + { + "name": "i_gem_3", + "type": "double precision", + "null": false, + "size": 8 + }, + { + "name": "i_max_1", + "type": "double precision", + "null": false, + "size": 8 + }, + { + "name": "i_max_2", + "type": "double precision", + "null": false, + "size": 8 + }, + { + "name": "i_max_3", + "type": "double precision", + "null": false, + "size": 8 + }, + { + "name": "p_gem_1", + "type": "double precision", + "null": false, + "size": 8 + }, + { + "name": "p_gem_2", + "type": "double precision", + "null": false, + "size": 8 + }, + { + "name": "p_gem_3", + "type": "double precision", + "null": false, + "size": 8 + }, + { + "name": "p_max_1", + "type": "double precision", + "null": false, + "size": 8 + }, + { + "name": "p_max_2", + "type": "double precision", + "null": false, + "size": 8 + }, + { + "name": "p_max_3", + "type": "double precision", + "null": false, + "size": 8 + }, + { + "name": "s_gem_1", + "type": "double precision", + "null": false, + "size": 8 + }, + { + "name": "s_gem_2", + "type": "double precision", + "null": false, + "size": 8 + }, + { + "name": "s_gem_3", + "type": "double precision", + "null": false, + "size": 8 + }, + { + "name": "s_max_1", + "type": "double precision", + "null": false, + "size": 8 + }, + { + "name": "s_max_2", + "type": "double precision", + "null": false, + "size": 8 + }, + { + "name": "s_max_3", + "type": "double precision", + "null": false, + "size": 8 + }, + { + "name": "time", + "type": "timestamp with time zone", + "null": false, + "size": 8 + }, + { + "name": "u_gem_1", + "type": "double precision", + "null": false, + "size": 8 + }, + { + "name": "u_gem_2", + "type": "double precision", + "null": false, + "size": 8 + }, + { + "name": "u_gem_3", + "type": "double precision", + "null": false, + "size": 8 + } + ] + } +] diff --git a/internal/model/model.go b/internal/model/model.go new file mode 100644 index 0000000..80e7ea9 --- /dev/null +++ b/internal/model/model.go @@ -0,0 +1,3 @@ +package model + +//go:generate qb-generator db.json tables.go diff --git a/main.go b/main.go new file mode 100644 index 0000000..d245225 --- /dev/null +++ b/main.go @@ -0,0 +1,132 @@ +package main + +import ( + "database/sql" + "encoding/xml" + "fmt" + "net/http" + "strconv" + "time" + + "git.fuyu.moe/5GPowerQuality/parser/internal/model" + "git.ultraware.nl/NiseVoid/qb/qbdb" + "git.ultraware.nl/NiseVoid/qb/qf" + "golang.org/x/net/html/charset" +) + +var db *qbdb.DB + +func main() { + initDB() + + startDate := GetStartDate() + + for time.Now().After(startDate) { + start, end := startDate, startDate.AddDate(0, 0, 7) + + data := GetFortopData(start, end) + ranges := map[string][]Range{} + for _, v := range data { + ranges[v.Naam] = v.Range + } + + sets := Sets{} + for k, v := range ranges { + for _, r := range v { + date := time.Time(r.Date) + set, ok := sets[date] + if !ok { + sets[date] = Set{} + set = sets[date] + } + + set[k] = r.Value + } + } + + insertSets(sets) + + startDate = end + } +} + +// GetStartDate gets the last measurement date +func GetStartDate() time.Time { + m := model.Measurement() + var t *time.Time + + q := m.Select(qf.Max(m.Time)) + err := db.QueryRow(q).Scan(&t) + if t == nil || err == sql.ErrNoRows { + return time.Date(2018, 1, 1, 0, 0, 0, 0, time.Local) + } + if err != nil { + panic(err) + } + + *t = t.Add(time.Second) + + return *t +} + +func insertSets(sets Sets) { + if len(sets) == 0 { + return + } + + m := model.Measurement() + q := m.Insert(m.Time, + m.CGem1, m.CGem2, m.CGem3, + m.Ep1, m.Ep2, m.Ep3, + m.IGem1, m.IGem2, m.IGem3, + m.IMax1, m.IMax2, m.IMax3, + m.PGem1, m.PGem2, m.PGem3, + m.PMax1, m.PMax2, m.PMax3, + m.SGem1, m.SGem2, m.SGem3, + m.SMax1, m.SMax2, m.SMax3, + m.UGem1, m.UGem2, m.UGem3, + ) + + for k, v := range sets { + q.Values(k, + v[`CGem1`], v[`CGem2`], v[`CGem3`], + v[`EP1`], v[`EP2`], v[`EP3`], + v[`IGem1`], v[`IGem2`], v[`IGem3`], + v[`IMax1`], v[`IMax2`], v[`IMax3`], + v[`PGem1`], v[`PGem2`], v[`PGem3`], + v[`PMax1`], v[`PMax2`], v[`PMax3`], + v[`SGem1`], v[`SGem2`], v[`SGem3`], + v[`SMax1`], v[`SMax2`], v[`SMax3`], + v[`UGem1`], v[`UGem2`], v[`UGem3`], + ) + } + + err := db.Exec(q) + if err != nil { + panic(err) + } +} + +// GetFortopData retrieves data from fortop +func GetFortopData(startDate, endDate time.Time) []Meetwaarde { + url := `https://energy4all.energyportal.online/index?p5g=S_1_` + strconv.FormatInt(startDate.Unix(), 10) + `_` + strconv.FormatInt(endDate.Unix(), 10) + resp, err := http.Get(url) + if err != nil { + panic(err) + } + if resp.StatusCode != 200 { + fmt.Println(`Got status code:`, resp.StatusCode) + return nil + } + + decoder := xml.NewDecoder(resp.Body) + decoder.CharsetReader = charset.NewReaderLabel + + var data FortopFormat + err = decoder.Decode(&data) + if err != nil { + panic(err) + } + + return data.Trafo.Meter.Meetwaarde +} diff --git a/type.go b/type.go new file mode 100644 index 0000000..7c7566c --- /dev/null +++ b/type.go @@ -0,0 +1,56 @@ +package main + +import ( + "encoding/xml" + "strconv" + "time" +) + +// FortopFormat .. +type FortopFormat struct { + XMLName xml.Name `xml:"Trafo-Details"` + Trafo struct { + TrafoNummer int `xml:"trafonummer"` + Meter struct { + MeterID int `xml:"meter-id"` + StartDate UnixTimestamp `xml:"startdate"` + EndDate int `xml:"enddate"` + Meetwaarde []Meetwaarde `xml:"meetwaarde"` + } `xml:"meter"` + } `xml:"trafo"` +} + +// Meetwaarde contains the data that is not pointless garbage +type Meetwaarde struct { + Naam string `xml:"naam"` + Range []Range `xml:"range"` +} + +// Range is a single set of data +type Range struct { + Date UnixTimestamp `xml:"date"` + Value float64 `xml:"value"` +} + +// UnixTimestamp is a time.Time that can parse unix timestamps +type UnixTimestamp time.Time + +// UnmarshalText implements encoding.TextUnmarshaler +func (t *UnixTimestamp) UnmarshalText(b []byte) error { + i, err := strconv.ParseInt(string(b), 10, 64) + if err != nil { + return err + } + *t = UnixTimestamp(time.Unix(i, 0)) + return nil +} + +func (t UnixTimestamp) String() string { + return time.Time(t).Format(`2006-01-02 15:04`) +} + +// Sets are multiple sets of date +type Sets map[time.Time]Set + +// Set is a set of data +type Set map[string]float64