sungrow/main.go

290 lines
7.2 KiB
Go
Raw Normal View History

2023-01-07 16:41:15 +11:00
/*
Copyright 2023 Josh Deprez
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
2021-01-01 16:51:16 +11:00
// The sungrow binary periodically reads inverter data from a sungrow inverter
// and exports the data as prometheus metrics.
package main
import (
2023-01-22 19:21:49 +11:00
"context"
2021-01-12 10:45:52 +11:00
"errors"
2021-01-01 16:51:16 +11:00
"flag"
"fmt"
2021-01-05 20:47:41 +11:00
"log"
2023-01-22 19:21:49 +11:00
"math/rand"
2021-01-01 16:51:16 +11:00
"net/http"
2021-02-07 17:52:20 +11:00
"strings"
2023-01-07 17:37:54 +11:00
"sync"
2021-01-01 16:51:16 +11:00
"time"
2023-01-06 16:53:25 +11:00
"gitea.drjosh.dev/josh/sungrow/modbus"
2021-01-01 16:51:16 +11:00
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promauto"
"github.com/prometheus/client_golang/prometheus/promhttp"
)
2023-01-07 17:37:54 +11:00
const maxScrapeAge = 3 * time.Second
2021-01-01 16:51:16 +11:00
var (
httpAddr = flag.String("http-addr", ":9455", "Address to listen on")
2021-02-07 17:52:20 +11:00
inverterAddrs = flag.String("inverter-addrs", "rakmodule_00DBC1:502,192.168.86.6:502", "Comma-separated list of inverter addresses (modbus-tcp with 'encryption')")
2021-01-01 19:50:24 +11:00
scrapeInterval = flag.Duration("scrape-interval", 15*time.Second, "Period of modbus scraping loop")
2021-01-01 16:51:16 +11:00
2023-01-21 16:59:12 +11:00
promHandler = promhttp.Handler()
2023-01-21 20:48:47 +11:00
scrapeMu sync.RWMutex
2023-01-21 16:59:12 +11:00
lastScrape time.Time
lastValues = make(map[uint16]float64)
2021-01-01 16:51:16 +11:00
scrapeCounter = promauto.NewCounter(prometheus.CounterOpts{
Namespace: "sungrow",
Subsystem: "scraper",
Name: "scrapes_total",
2023-01-21 16:59:12 +11:00
Help: "Number of successful scrapes of the inverter input registers",
2021-01-01 16:51:16 +11:00
})
scrapeStart = promauto.NewGauge(prometheus.GaugeOpts{
Namespace: "sungrow",
Subsystem: "scraper",
Name: "scrape_start",
2023-01-21 16:59:12 +11:00
Help: "Start time of the most recent scrape attempt",
2021-01-01 16:51:16 +11:00
})
scrapeEnd = promauto.NewGauge(prometheus.GaugeOpts{
Namespace: "sungrow",
Subsystem: "scraper",
Name: "scrape_end",
2023-01-21 16:59:12 +11:00
Help: "End time of the most recent successful scrape",
2021-01-01 16:51:16 +11:00
})
scrapeDuration = promauto.NewGauge(prometheus.GaugeOpts{
Namespace: "sungrow",
Subsystem: "scraper",
Name: "scrape_duration",
Help: "units:s",
})
2023-01-07 16:41:15 +11:00
)
func init() {
promauto.NewGaugeFunc(
2021-01-10 15:20:11 +11:00
prometheus.GaugeOpts{
Namespace: "sungrow",
Subsystem: "scraper",
Name: "scrape_interval",
Help: "units:s",
},
func() float64 { return scrapeInterval.Seconds() },
)
2023-01-07 16:41:15 +11:00
promauto.NewGaugeFunc(
2021-01-10 14:43:46 +11:00
prometheus.GaugeOpts{
Namespace: "sungrow",
Subsystem: "tariff",
Name: "daily_charge",
2023-01-21 16:59:12 +11:00
Help: "units:$",
2021-01-10 14:43:46 +11:00
},
func() float64 { return dailySupplyCharge },
)
2023-01-07 16:41:15 +11:00
promauto.NewGaugeFunc(
2021-01-10 14:43:46 +11:00
prometheus.GaugeOpts{
Namespace: "sungrow",
Subsystem: "tariff",
Name: "import_tariff",
2023-01-21 16:59:12 +11:00
Help: "units:$",
2021-01-10 14:43:46 +11:00
},
func() float64 { return tariff93.pricePerKWh(time.Now()) },
)
2023-01-07 16:41:15 +11:00
promauto.NewGaugeFunc(
2021-01-10 14:43:46 +11:00
prometheus.GaugeOpts{
Namespace: "sungrow",
Subsystem: "tariff",
Name: "export_tariff",
2023-01-21 16:59:12 +11:00
Help: "units:$",
2021-01-10 14:43:46 +11:00
},
func() float64 { return solarFeedInTariff.pricePerKWh(time.Now()) },
)
2021-01-01 16:51:16 +11:00
}
func statusHandler(w http.ResponseWriter, r *http.Request) {
2021-01-19 15:11:26 +11:00
fmt.Fprintf(w, "current time: %v\n", time.Now())
2021-01-01 16:51:16 +11:00
}
2023-01-21 16:59:12 +11:00
func dialInverter() (*sungrowConn, error) {
for _, addr := range strings.Split(*inverterAddrs, ",") {
conn, err := dialSungrow(addr)
if err != nil {
log.Printf("Couldn't dial inverter: %v", err)
continue
}
return conn, nil
}
return nil, fmt.Errorf("all addresses unreachable")
}
2023-01-07 17:37:54 +11:00
// Called under scrapeMu.
2023-01-21 20:48:47 +11:00
func readRegs(vals map[uint16]float64, client modbus.Client, start, qty uint16) error {
2021-01-01 20:00:07 +11:00
data, err := client.ReadInputRegisters(start, qty)
2021-01-01 19:50:24 +11:00
if err != nil {
2023-01-21 16:59:12 +11:00
return fmt.Errorf("read input registers %d-%d: %v", start+1, start+qty, err)
2021-01-01 19:50:24 +11:00
}
2021-02-02 16:25:58 +11:00
if len(data) != int(2*qty) {
2023-01-21 17:08:11 +11:00
return fmt.Errorf("read input registers %d-%d: len(data) = %d != %d = 2*qty", start+1, start+qty, len(data), 2*qty)
2021-02-02 16:25:58 +11:00
}
2021-01-01 19:50:24 +11:00
for addr, reg := range sungrowInputRegs {
if addr <= start || addr > start+qty {
continue
}
2021-01-11 10:21:53 +11:00
val, err := reg.read(data[(addr-start-1)*2:])
if err != nil {
2023-01-22 13:51:05 +11:00
if !errors.Is(err, errSkippableRead) {
return fmt.Errorf("parse input register data at %d: %v", addr, err)
2021-01-12 10:45:52 +11:00
}
2023-01-22 13:51:05 +11:00
log.Printf("Couldn't parse input register data at %d, skipping: %v", addr, err)
val = lastValues[addr]
2021-01-11 10:21:53 +11:00
}
2021-01-01 19:50:24 +11:00
//fmt.Printf("%s: %v %s\n", reg.name, val, reg.unit)
2023-01-21 20:48:47 +11:00
vals[addr] = val
2021-01-01 19:50:24 +11:00
}
2023-01-21 16:59:12 +11:00
return nil
2021-01-01 19:50:24 +11:00
}
2023-01-07 17:37:54 +11:00
// Called under scrapeMu.
2023-01-21 16:59:12 +11:00
func scrape() error {
sgc, err := dialInverter()
if err != nil {
return err
}
defer sgc.Close()
handler := modbus.TCPHandlerFromConnection(sgc)
handler.SlaveId = 0x01
2023-01-22 17:55:06 +11:00
if err := handler.Connect(); err != nil {
return err
}
defer handler.Close()
2023-01-21 16:59:12 +11:00
client := modbus.NewClient(handler)
2023-01-21 20:48:47 +11:00
vals := make(map[uint16]float64)
start := time.Now()
scrapeStart.SetToCurrentTime()
2023-01-23 09:39:28 +11:00
ranges := []struct{ start, qty uint16 }{
{5000, 24}, {5030, 7}, {5048, 1},
{5082, 18}, {5112, 1}, {5143, 6},
2023-01-22 13:51:05 +11:00
}
2023-01-23 09:39:28 +11:00
rand.Shuffle(len(ranges), func(i, j int) {
ranges[i], ranges[j] = ranges[j], ranges[i]
})
for _, r := range ranges {
if err := readRegs(vals, client, r.start, r.qty); err != nil {
return err
}
2023-01-21 16:59:12 +11:00
}
2023-01-21 20:48:47 +11:00
lastValues = vals
scrapeEnd.SetToCurrentTime()
2023-01-07 17:37:54 +11:00
lastScrape = time.Now()
scrapeDuration.Set(time.Since(start).Seconds())
scrapeCounter.Inc()
2023-01-21 16:59:12 +11:00
return nil
}
2023-01-22 19:21:49 +11:00
func retries(ctx context.Context, tries int, base time.Duration, mul float64) <-chan int {
ch := make(chan int)
go func() {
defer close(ch)
i := 0
for {
select {
case ch <- i:
i++
if i == tries {
return
}
t := time.NewTimer(time.Duration(rand.Int63n(int64(base))))
select {
case <-t.C:
// next iteration
case <-ctx.Done():
t.Stop()
return
}
base = time.Duration(mul * float64(base))
case <-ctx.Done():
return
}
}
}()
return ch
}
2023-01-21 16:59:12 +11:00
func metricsHandler(w http.ResponseWriter, r *http.Request) {
// In normal mode, always serve metrics
defer promHandler.ServeHTTP(w, r)
2021-01-01 16:51:16 +11:00
2023-01-21 16:59:12 +11:00
scrapeMu.Lock()
defer scrapeMu.Unlock()
if time.Since(lastScrape) <= maxScrapeAge {
return
}
2023-01-22 19:21:49 +11:00
ctx, canc := context.WithCancel(context.Background())
defer canc()
2023-01-22 19:57:20 +11:00
for range retries(ctx, 4, 2*time.Second, 2) {
2023-01-21 16:59:12 +11:00
if err := scrape(); err != nil {
2023-01-21 17:02:24 +11:00
log.Printf("Scrape error: %v", err)
2021-02-07 18:01:27 +11:00
continue
2021-02-07 17:52:20 +11:00
}
2023-01-21 16:59:12 +11:00
return
2021-02-07 17:53:56 +11:00
}
2023-01-22 19:57:20 +11:00
log.Fatal("Multiple scrape attempts failed, aborting entirely")
2023-01-21 16:59:12 +11:00
}
2021-01-01 16:51:16 +11:00
2023-01-21 16:59:12 +11:00
func main() {
flag.Parse()
2023-01-07 17:37:54 +11:00
// These are GaugeFuncs to more closely align Prometheus scrape time with
// the modbus scrape time.
for addr, reg := range sungrowInputRegs {
2023-01-07 17:49:22 +11:00
addr, reg := addr, reg
2023-01-07 17:37:54 +11:00
promauto.NewGaugeFunc(
prometheus.GaugeOpts{
Namespace: "sungrow",
Subsystem: "inverter",
Name: reg.name,
Help: fmt.Sprintf("addr: %d, unit: %s", addr, reg.unit),
},
2023-01-21 20:48:47 +11:00
func() float64 {
scrapeMu.RLock()
defer scrapeMu.RUnlock()
return lastValues[addr]
},
2023-01-07 17:37:54 +11:00
)
2021-01-01 16:51:16 +11:00
}
2023-01-07 17:37:54 +11:00
2023-01-21 16:59:12 +11:00
// Startup paranoia check: Is the inverter reachable?
sgc, err := dialInverter()
if err != nil {
log.Fatal("Couldn't dial any addresses, aborting")
}
sgc.Close()
// HTTP setup
http.HandleFunc("/metrics", metricsHandler)
http.HandleFunc("/", statusHandler)
2023-01-07 17:37:54 +11:00
log.Fatalf("http.ListenAndServe: %v", http.ListenAndServe(*httpAddr, nil))
2021-01-01 16:51:16 +11:00
}