Compare commits
182 Commits
main
...
develop-an
| Author | SHA1 | Date | |
|---|---|---|---|
| bc47fb5379 | |||
| 6d276b1760 | |||
| 2c2d5232f8 | |||
| 19ab65dec6 | |||
| 8ca1435591 | |||
| b210ebc075 | |||
| 13c181f5f7 | |||
| da75f4a93e | |||
| 376a36df22 | |||
| eb49f3c313 | |||
| 17ed76eb84 | |||
| ae929bcb0d | |||
| 4efc4cc3ce | |||
| c420608387 | |||
| 2efe4c6c66 | |||
| 3161a89857 | |||
| e2878477de | |||
| f3dabb887e | |||
| 36ff64fd96 | |||
| ae88d465ee | |||
| f974fed489 | |||
| f42ef0446a | |||
| 97c0df46bf | |||
| 1cc2d5f519 | |||
| de18d4f297 | |||
| 8123ecb5c8 | |||
| 5ec67da170 | |||
| f49aa3ba3a | |||
| 584e9218b5 | |||
| 13d24a1322 | |||
| 9b40bde332 | |||
| 85c7691d0b | |||
| 4a73a41a6c | |||
| 34b7bf3ff2 | |||
| 54fa9d6186 | |||
| f4ba47d9f0 | |||
| d879c76d07 | |||
| 59472de103 | |||
| dab7b06638 | |||
| dd3912545d | |||
| 573ef9ef3e | |||
| edf3e93eef | |||
| d88f64a87b | |||
| ca914355ba | |||
| abb45a13ec | |||
| b8530cb614 | |||
| 299932f01a | |||
| 65f95662ca | |||
| 19ea80614c | |||
| d1be656e13 | |||
| 7bc2337549 | |||
| 2ee30db410 | |||
| d78bfd381b | |||
| 0caa1da229 | |||
| 582270ce95 | |||
| 35ba93a435 | |||
| bc7443ca8b | |||
| 93a94c3149 | |||
| 7b3ce46f04 | |||
| 08fa1e8a04 | |||
| 0fd915a2ee | |||
| 229bbe76e8 | |||
| 4f68fdc28e | |||
| cfa69d4469 | |||
| c421aed925 | |||
| e8fcd550c1 | |||
| 5b7ec80473 | |||
| 0b5b26d5b0 | |||
| b0086a984f | |||
| 2556ccf351 | |||
| 050e189442 | |||
| 8797af23b5 | |||
| cbeb623f20 | |||
| 0398b82675 | |||
| 6f3dd6a3d0 | |||
| fef1825cb4 | |||
| 4ca575d86c | |||
| dc03e83562 | |||
| 28ce15ce13 | |||
| 9aaff59042 | |||
| 4696a46c8c | |||
| 63e484870d | |||
| 0b0512f5b2 | |||
| aa53a21685 | |||
| 9f960c6411 | |||
| 13b4117d75 | |||
| 5d8202311f | |||
| 0449971bcb | |||
| d0f96710e0 | |||
| b7183812fe | |||
| 504e39f0a5 | |||
| 9604c62f4c | |||
| ef1d2f57e1 | |||
| e41a242223 | |||
| c14759933f | |||
| a127ddfeba | |||
| f1e54aab9f | |||
| 5d6e967794 | |||
| a2a7cfd744 | |||
| 2085fd9a31 | |||
| 12b2ad5ace | |||
| e6f9d500ea | |||
| 11e5c73275 | |||
| da67660fe7 | |||
| a03c60469f | |||
| 2c7f9a0f47 | |||
| cfb0bca723 | |||
| 0da2c838c2 | |||
| 317e12900a | |||
| df7358530f | |||
| 6bc0610c2d | |||
| 4fa9822405 | |||
| 448b13c2f6 | |||
| ff9ab1f6c2 | |||
| d2a73a7dc3 | |||
| 93533aa76c | |||
| a612b511b2 | |||
| a5ddaea5a8 | |||
| 87ff8f44d7 | |||
| 0a5a6ec4e2 | |||
| b37a2801cc | |||
| 9cd26d3df3 | |||
| d26c2c025a | |||
| c81cd572d4 | |||
| 4c0f0da515 | |||
| 781a93cefc | |||
| 7e24cc52c9 | |||
| 3799b9fac8 | |||
| 701292c54b | |||
| d4fb3f5986 | |||
| 3b3cfe8a49 | |||
| 1ad2eb6e60 | |||
| eeeffa3e95 | |||
| 3e50260c51 | |||
| f969c2fe0f | |||
| 77d85816bd | |||
| 9f3331a09f | |||
| a5d382493a | |||
| f5174fe156 | |||
| 8fbdcb1e5b | |||
| 29a3e9305b | |||
| 67ba5cf21c | |||
| 7f4b8bb18b | |||
| 0444df3b4c | |||
| 9e1c4979c0 | |||
| 7ca0198b33 | |||
| 75cb5722f8 | |||
| bc8027a3d8 | |||
| 1defe32470 | |||
| 24dca2f489 | |||
| 6fb4655a15 | |||
| ac7c699530 | |||
| 85cca73799 | |||
| 4acb2b62ca | |||
| 91c881d066 | |||
| 753d4dcbc7 | |||
| e4b1c19064 | |||
| 6936734f7e | |||
| 337ee06caf | |||
| 3152c6bb14 | |||
| cc5c607457 | |||
| c55f089247 | |||
| 03d42ac3eb | |||
| a206138362 | |||
| f8fe5bd1e1 | |||
| c931eb6af5 | |||
| 2e62ce0501 | |||
| 8cfc1c0563 | |||
| ecf3a153f0 | |||
| 1c88bde080 | |||
| f2deb5512f | |||
| 26c13351d7 | |||
| 89fc15b5c4 | |||
| 26b261a663 | |||
| 419a5c940e | |||
| 6e643497a1 | |||
| bc3290c501 | |||
| cb1728ef00 | |||
| 523f489e11 | |||
| a6fa18f5cc | |||
| 8cbde597fd | |||
| ff2c0d6919 |
5
.gitignore
vendored
Normal file
5
.gitignore
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
/log/*
|
||||
/rain_data/*
|
||||
export_data/*
|
||||
/.gopath/*
|
||||
/tech/*
|
||||
8
.idea/.gitignore
generated
vendored
Normal file
8
.idea/.gitignore
generated
vendored
Normal file
@ -0,0 +1,8 @@
|
||||
# Default ignored files
|
||||
/shelf/
|
||||
/workspace.xml
|
||||
# Editor-based HTTP Client requests
|
||||
/httpRequests/
|
||||
# Datasource local storage ignored files
|
||||
/dataSources/
|
||||
/dataSources.local.xml
|
||||
279
cmd/caiyun_parse/main.go
Normal file
279
cmd/caiyun_parse/main.go
Normal file
@ -0,0 +1,279 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"encoding/csv"
|
||||
"encoding/json"
|
||||
"flag"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"sort"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
// Minimal Caiyun hourly model focusing on required fields.
|
||||
type caiyunHourly struct {
|
||||
Status string `json:"status"`
|
||||
Result struct {
|
||||
Hourly struct {
|
||||
Status string `json:"status"`
|
||||
Temperature []valTime `json:"temperature"`
|
||||
Humidity []valTime `json:"humidity"`
|
||||
Visibility []valTime `json:"visibility"`
|
||||
Dswrf []valTime `json:"dswrf"`
|
||||
Pressure []valTime `json:"pressure"`
|
||||
Wind []windTime `json:"wind"`
|
||||
} `json:"hourly"`
|
||||
} `json:"result"`
|
||||
}
|
||||
|
||||
type valTime struct {
|
||||
Datetime string `json:"datetime"`
|
||||
Value float64 `json:"value"`
|
||||
}
|
||||
|
||||
type windTime struct {
|
||||
Datetime string `json:"datetime"`
|
||||
Speed float64 `json:"speed"`
|
||||
Direction float64 `json:"direction"`
|
||||
}
|
||||
|
||||
type row struct {
|
||||
t time.Time
|
||||
temperature *float64
|
||||
humidity *float64
|
||||
windSpeed *float64
|
||||
windDir *float64
|
||||
pressure *float64
|
||||
visibility *float64
|
||||
dswrf *float64
|
||||
}
|
||||
|
||||
func main() {
|
||||
var file string
|
||||
var tz string
|
||||
var mode string
|
||||
var alias string
|
||||
var lat float64
|
||||
var lon float64
|
||||
var sqlTable string
|
||||
flag.StringVar(&file, "file", "", "Path to Caiyun hourly JSON; if empty, read from stdin")
|
||||
flag.StringVar(&tz, "tz", "Asia/Shanghai", "Timezone for output timestamps")
|
||||
flag.StringVar(&mode, "mode", "csv", "Output mode: csv | sql")
|
||||
flag.StringVar(&alias, "alias", "", "Station alias for SQL output (required for mode=sql)")
|
||||
flag.Float64Var(&lat, "lat", 0, "Latitude for SQL output")
|
||||
flag.Float64Var(&lon, "lon", 0, "Longitude for SQL output")
|
||||
flag.StringVar(&sqlTable, "sqltable", "radar_weather", "SQL table name for inserts")
|
||||
flag.Parse()
|
||||
|
||||
var r io.Reader
|
||||
if file == "" {
|
||||
r = bufio.NewReader(os.Stdin)
|
||||
} else {
|
||||
f, err := os.Open(file)
|
||||
if err != nil {
|
||||
fatalf("open file: %v", err)
|
||||
}
|
||||
defer f.Close()
|
||||
r = f
|
||||
}
|
||||
|
||||
var payload caiyunHourly
|
||||
dec := json.NewDecoder(r)
|
||||
if err := dec.Decode(&payload); err != nil {
|
||||
fatalf("decode json: %v", err)
|
||||
}
|
||||
if strings.ToLower(payload.Status) != "ok" && payload.Status != "" {
|
||||
fatalf("top-level status not ok: %s", payload.Status)
|
||||
}
|
||||
|
||||
loc, _ := time.LoadLocation(tz)
|
||||
if loc == nil {
|
||||
loc = time.FixedZone("CST", 8*3600)
|
||||
}
|
||||
|
||||
// Merge series by timestamp
|
||||
rowsByTime := map[time.Time]*row{}
|
||||
upsert := func(ts string) *row {
|
||||
t, ok := parseTime(ts, loc)
|
||||
if !ok {
|
||||
fatalf("parse time failed: %s", ts)
|
||||
}
|
||||
if v, exists := rowsByTime[t]; exists {
|
||||
return v
|
||||
}
|
||||
nr := &row{t: t}
|
||||
rowsByTime[t] = nr
|
||||
return nr
|
||||
}
|
||||
|
||||
for _, v := range payload.Result.Hourly.Temperature {
|
||||
rr := upsert(v.Datetime)
|
||||
rr.temperature = ptr(v.Value)
|
||||
}
|
||||
for _, v := range payload.Result.Hourly.Humidity {
|
||||
rr := upsert(v.Datetime)
|
||||
rr.humidity = ptr(v.Value)
|
||||
}
|
||||
for _, v := range payload.Result.Hourly.Visibility {
|
||||
rr := upsert(v.Datetime)
|
||||
rr.visibility = ptr(v.Value)
|
||||
}
|
||||
for _, v := range payload.Result.Hourly.Dswrf {
|
||||
rr := upsert(v.Datetime)
|
||||
rr.dswrf = ptr(v.Value)
|
||||
}
|
||||
for _, v := range payload.Result.Hourly.Pressure {
|
||||
rr := upsert(v.Datetime)
|
||||
rr.pressure = ptr(v.Value)
|
||||
}
|
||||
for _, w := range payload.Result.Hourly.Wind {
|
||||
rr := upsert(w.Datetime)
|
||||
rr.windSpeed = ptr(w.Speed)
|
||||
rr.windDir = ptr(w.Direction)
|
||||
}
|
||||
|
||||
// Sort by time
|
||||
times := make([]time.Time, 0, len(rowsByTime))
|
||||
for t := range rowsByTime {
|
||||
times = append(times, t)
|
||||
}
|
||||
sort.Slice(times, func(i, j int) bool { return times[i].Before(times[j]) })
|
||||
|
||||
if mode == "sql" {
|
||||
if alias == "" {
|
||||
fatalf("-alias is required for mode=sql")
|
||||
}
|
||||
// Emit upserts into radar_weather; convert wind_speed km/h -> m/s, keep humidity as ratio (0..1)
|
||||
fmt.Println("BEGIN;")
|
||||
for _, t := range times {
|
||||
rr := rowsByTime[t]
|
||||
// wind speed conversion
|
||||
var ws string
|
||||
if rr.windSpeed != nil {
|
||||
v := *rr.windSpeed / 3.6
|
||||
ws = trimZeros(fmt.Sprintf("%.6f", v))
|
||||
}
|
||||
// Build SQL with NULLs where missing
|
||||
q := fmt.Sprintf(
|
||||
"INSERT INTO %s (alias, lat, lon, dt, temperature, humidity, cloudrate, visibility, dswrf, wind_speed, wind_direction, pressure) "+
|
||||
"VALUES (%s, %s, %s, %s, %s, %s, NULL, %s, %s, %s, %s, %s) "+
|
||||
"ON CONFLICT (alias, dt) DO UPDATE SET "+
|
||||
"lat=EXCLUDED.lat, lon=EXCLUDED.lon, temperature=EXCLUDED.temperature, humidity=EXCLUDED.humidity, "+
|
||||
"visibility=EXCLUDED.visibility, dswrf=EXCLUDED.dswrf, wind_speed=EXCLUDED.wind_speed, "+
|
||||
"wind_direction=EXCLUDED.wind_direction, pressure=EXCLUDED.pressure;",
|
||||
sqlTable,
|
||||
sqlQuote(alias),
|
||||
sqlNum(lat),
|
||||
sqlNum(lon),
|
||||
sqlTime(t),
|
||||
sqlOpt(rr.temperature),
|
||||
sqlOpt(rr.humidity),
|
||||
sqlOpt(rr.visibility),
|
||||
sqlOpt(rr.dswrf),
|
||||
sqlStrOrNull(ws),
|
||||
sqlOpt(rr.windDir),
|
||||
sqlOpt(rr.pressure),
|
||||
)
|
||||
fmt.Println(q)
|
||||
}
|
||||
fmt.Println("COMMIT;")
|
||||
return
|
||||
}
|
||||
|
||||
// CSV output
|
||||
w := csv.NewWriter(os.Stdout)
|
||||
_ = w.Write([]string{"datetime", "temperature", "humidity", "wind_speed", "wind_direction", "pressure", "visibility", "dswrf"})
|
||||
for _, t := range times {
|
||||
rr := rowsByTime[t]
|
||||
var ws string
|
||||
if rr.windSpeed != nil {
|
||||
v := *rr.windSpeed / 3.6
|
||||
ws = trimZeros(fmt.Sprintf("%.6f", v))
|
||||
}
|
||||
rec := []string{
|
||||
t.Format("2006-01-02 15:04:05"),
|
||||
optf(rr.temperature),
|
||||
optf(rr.humidity),
|
||||
ws,
|
||||
optf(rr.windDir),
|
||||
optf(rr.pressure),
|
||||
optf(rr.visibility),
|
||||
optf(rr.dswrf),
|
||||
}
|
||||
_ = w.Write(rec)
|
||||
}
|
||||
w.Flush()
|
||||
if err := w.Error(); err != nil {
|
||||
fatalf("write csv: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
func ptr(f float64) *float64 { return &f }
|
||||
|
||||
func optf(p *float64) string {
|
||||
if p == nil {
|
||||
return ""
|
||||
}
|
||||
// Trim trailing zeros via fmt
|
||||
return trimZeros(fmt.Sprintf("%.6f", *p))
|
||||
}
|
||||
|
||||
func trimZeros(s string) string {
|
||||
if !strings.Contains(s, ".") {
|
||||
return s
|
||||
}
|
||||
s = strings.TrimRight(s, "0")
|
||||
s = strings.TrimRight(s, ".")
|
||||
return s
|
||||
}
|
||||
|
||||
// parseTime attempts RFC3339 and common Caiyun formats without seconds.
|
||||
func parseTime(s string, loc *time.Location) (time.Time, bool) {
|
||||
// Try RFC3339 first
|
||||
if t, err := time.Parse(time.RFC3339, s); err == nil {
|
||||
return t.In(loc), true
|
||||
}
|
||||
// Try without seconds, with offset, e.g. 2006-01-02T15:04+08:00
|
||||
if t, err := time.Parse("2006-01-02T15:04-07:00", s); err == nil {
|
||||
return t.In(loc), true
|
||||
}
|
||||
// Try without offset (assume loc)
|
||||
if t, err := time.ParseInLocation("2006-01-02 15:04", s, loc); err == nil {
|
||||
return t.In(loc), true
|
||||
}
|
||||
return time.Time{}, false
|
||||
}
|
||||
|
||||
func fatalf(format string, args ...any) {
|
||||
fmt.Fprintf(os.Stderr, format+"\n", args...)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
func sqlQuote(s string) string {
|
||||
return "'" + strings.ReplaceAll(s, "'", "''") + "'"
|
||||
}
|
||||
|
||||
func sqlNum(f float64) string {
|
||||
return trimZeros(fmt.Sprintf("%.8f", f))
|
||||
}
|
||||
|
||||
func sqlTime(t time.Time) string {
|
||||
return sqlQuote(t.Format("2006-01-02 15:04:05"))
|
||||
}
|
||||
|
||||
func sqlOpt(p *float64) string {
|
||||
if p == nil {
|
||||
return "NULL"
|
||||
}
|
||||
return trimZeros(fmt.Sprintf("%.6f", *p))
|
||||
}
|
||||
|
||||
func sqlStrOrNull(s string) string {
|
||||
if s == "" {
|
||||
return "NULL"
|
||||
}
|
||||
return s
|
||||
}
|
||||
1419
cmd/imdroidmix/main.go
Normal file
1419
cmd/imdroidmix/main.go
Normal file
File diff suppressed because it is too large
Load Diff
448
cmd/radar_export_csv/main.go
Normal file
448
cmd/radar_export_csv/main.go
Normal file
@ -0,0 +1,448 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"encoding/binary"
|
||||
"encoding/csv"
|
||||
"errors"
|
||||
"flag"
|
||||
"fmt"
|
||||
"log"
|
||||
"math"
|
||||
"os"
|
||||
"strings"
|
||||
"time"
|
||||
"weatherstation/internal/database"
|
||||
)
|
||||
|
||||
type stationInfo struct {
|
||||
ID string
|
||||
Alias string
|
||||
Lat float64
|
||||
Lon float64
|
||||
Z int
|
||||
Y int
|
||||
X int
|
||||
}
|
||||
|
||||
type tileRec struct {
|
||||
DT time.Time
|
||||
Width, Height int
|
||||
West, South float64
|
||||
East, North float64
|
||||
ResDeg float64
|
||||
Data []byte
|
||||
}
|
||||
|
||||
func main() {
|
||||
var stationID string
|
||||
var startStr string
|
||||
var endStr string
|
||||
var outPath string
|
||||
var verbose bool
|
||||
|
||||
flag.StringVar(&stationID, "station_id", "", "站点ID(留空表示全部WH65LP且有经纬度的站)")
|
||||
flag.StringVar(&startStr, "start", "", "起始时间(YYYY-MM-DD HH:MM:SS,CST)")
|
||||
flag.StringVar(&endStr, "end", "", "结束时间(YYYY-MM-DD HH:MM:SS,CST)")
|
||||
flag.StringVar(&outPath, "out", "radar_stats.csv", "输出CSV文件路径")
|
||||
flag.BoolVar(&verbose, "info", false, "输出详细过程信息")
|
||||
flag.Parse()
|
||||
|
||||
if strings.TrimSpace(startStr) == "" || strings.TrimSpace(endStr) == "" {
|
||||
log.Fatalln("必须提供 --start 与 --end,格式 YYYY-MM-DD HH:MM:SS")
|
||||
}
|
||||
loc, _ := time.LoadLocation("Asia/Shanghai")
|
||||
if loc == nil {
|
||||
loc = time.FixedZone("CST", 8*3600)
|
||||
}
|
||||
startT, err := time.ParseInLocation("2006-01-02 15:04:05", startStr, loc)
|
||||
if err != nil {
|
||||
log.Fatalf("解析 start 失败: %v", err)
|
||||
}
|
||||
endT, err := time.ParseInLocation("2006-01-02 15:04:05", endStr, loc)
|
||||
if err != nil {
|
||||
log.Fatalf("解析 end 失败: %v", err)
|
||||
}
|
||||
if !endT.After(startT) {
|
||||
log.Fatalln("结束时间必须大于起始时间")
|
||||
}
|
||||
|
||||
// 初始化数据库
|
||||
_ = database.GetDB()
|
||||
defer database.Close()
|
||||
|
||||
// 获取站点列表
|
||||
stations, err := listStations(database.GetDB(), stationID)
|
||||
if err != nil {
|
||||
log.Fatalf("查询站点失败: %v", err)
|
||||
}
|
||||
if len(stations) == 0 {
|
||||
log.Fatalln("没有符合条件的站点")
|
||||
}
|
||||
if verbose {
|
||||
log.Printf("站点数量: %d", len(stations))
|
||||
for _, s := range stations {
|
||||
log.Printf("站点: id=%s alias=%s lat=%.5f lon=%.5f z/y/x=%d/%d/%d", s.ID, s.Alias, s.Lat, s.Lon, s.Z, s.Y, s.X)
|
||||
}
|
||||
}
|
||||
|
||||
// 创建CSV
|
||||
f, err := os.Create(outPath)
|
||||
if err != nil {
|
||||
log.Fatalf("创建输出文件失败: %v", err)
|
||||
}
|
||||
defer f.Close()
|
||||
w := csv.NewWriter(f)
|
||||
defer w.Flush()
|
||||
|
||||
header := []string{
|
||||
"station_id", "station_alias", "dt", "lat", "lon", "wind_speed_ms", "wind_dir_deg",
|
||||
"sector_ge40_cnt", "sector_ge40_sum", "sector_ge30_cnt", "sector_ge30_sum",
|
||||
"circle_ge40_cnt", "circle_ge40_sum", "circle_ge30_cnt", "circle_ge30_sum",
|
||||
"rs485_rain_total_mm",
|
||||
}
|
||||
if err := w.Write(header); err != nil {
|
||||
log.Fatalf("写入CSV表头失败: %v", err)
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
totalRows := 0
|
||||
var totalTiles, skipNoZYX, skipNoWind, skipDecode int
|
||||
for _, s := range stations {
|
||||
if s.Z == 0 && s.Y == 0 && s.X == 0 {
|
||||
log.Printf("跳过站点 %s(无z/y/x映射)", s.ID)
|
||||
skipNoZYX++
|
||||
continue
|
||||
}
|
||||
tiles, err := listTiles(ctx, database.GetDB(), s.Z, s.Y, s.X, startT, endT)
|
||||
if err != nil {
|
||||
log.Printf("查询瓦片失败 station=%s: %v", s.ID, err)
|
||||
continue
|
||||
}
|
||||
totalTiles += len(tiles)
|
||||
if verbose {
|
||||
log.Printf("站点 %s 瓦片数量: %d", s.ID, len(tiles))
|
||||
}
|
||||
if len(tiles) == 0 {
|
||||
log.Printf("站点 %s 在范围内无瓦片", s.ID)
|
||||
}
|
||||
|
||||
for _, t := range tiles {
|
||||
// 10分钟向下取整时间(bucket)
|
||||
bucket := bucket10(t.DT, loc)
|
||||
// NOTE: 按需改为用 station_id 匹配 radar_weather.alias
|
||||
windSpeed, windDir, ok, err := loadWindAt(database.GetDB(), s.ID, bucket)
|
||||
if err != nil {
|
||||
log.Printf("读取风失败 %s @%s: %v", s.ID, t.DT.Format(time.RFC3339), err)
|
||||
continue
|
||||
}
|
||||
if !ok { // 无风场:跳过该时次
|
||||
skipNoWind++
|
||||
if verbose {
|
||||
log.Printf("跳过: %s 瓦片@%s(桶=%s)在 radar_weather(alias=%s) 无记录", s.ID, t.DT.In(loc).Format("2006-01-02 15:04:05"), bucket.Format("2006-01-02 15:04:05"), s.ID)
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
// 解码 dBZ 网格
|
||||
vals, xs, ys, err := decodeTile(t)
|
||||
if err != nil {
|
||||
log.Printf("解码瓦片失败 %s @%s: %v", s.ID, t.DT.Format(time.RFC3339), err)
|
||||
skipDecode++
|
||||
continue
|
||||
}
|
||||
|
||||
// 统计
|
||||
sec40Cnt, sec40Sum, sec30Cnt, sec30Sum,
|
||||
cir40Cnt, cir40Sum, cir30Cnt, cir30Sum := computeStats(vals, xs, ys, s.Lat, s.Lon, windSpeed, windDir)
|
||||
|
||||
// 最近一条累计雨量
|
||||
rainTotal, rainOK := loadNearestRain(database.GetDB(), s.ID, t.DT)
|
||||
if verbose {
|
||||
log.Printf("写出: %s dt=%s wind=%.3f m/s %.1f° 扇形(>=40:%d/%.1f >=30:%d/%.1f) 圆形(>=40:%d/%.1f >=30:%d/%.1f) rain_total=%v(%.3f)",
|
||||
s.ID,
|
||||
t.DT.In(loc).Format("2006-01-02 15:04:05"),
|
||||
windSpeed, windDir,
|
||||
sec40Cnt, sec40Sum, sec30Cnt, sec30Sum,
|
||||
cir40Cnt, cir40Sum, cir30Cnt, cir30Sum,
|
||||
rainOK, rainTotal,
|
||||
)
|
||||
}
|
||||
|
||||
rec := []string{
|
||||
s.ID,
|
||||
s.Alias,
|
||||
t.DT.In(loc).Format("2006-01-02 15:04:05"),
|
||||
fmt.Sprintf("%.6f", s.Lat),
|
||||
fmt.Sprintf("%.6f", s.Lon),
|
||||
fmt.Sprintf("%.3f", windSpeed),
|
||||
fmt.Sprintf("%.2f", windDir),
|
||||
fmt.Sprintf("%d", sec40Cnt),
|
||||
fmt.Sprintf("%.1f", sec40Sum),
|
||||
fmt.Sprintf("%d", sec30Cnt),
|
||||
fmt.Sprintf("%.1f", sec30Sum),
|
||||
fmt.Sprintf("%d", cir40Cnt),
|
||||
fmt.Sprintf("%.1f", cir40Sum),
|
||||
fmt.Sprintf("%d", cir30Cnt),
|
||||
fmt.Sprintf("%.1f", cir30Sum),
|
||||
fmt.Sprintf("%.3f", rainTotal),
|
||||
}
|
||||
if err := w.Write(rec); err != nil {
|
||||
log.Printf("写入CSV失败: %v", err)
|
||||
}
|
||||
totalRows++
|
||||
}
|
||||
}
|
||||
w.Flush()
|
||||
if err := w.Error(); err != nil {
|
||||
log.Fatalf("写入CSV失败: %v", err)
|
||||
}
|
||||
if verbose {
|
||||
log.Printf("汇总: 站点数=%d 瓦片总数=%d 跳过(无z/y/x)=%d 跳过(无风)=%d 跳过(解码失败)=%d", len(stations), totalTiles, skipNoZYX, skipNoWind, skipDecode)
|
||||
}
|
||||
log.Printf("完成,输出 %d 行到 %s", totalRows, outPath)
|
||||
}
|
||||
|
||||
func listStations(db *sql.DB, stationID string) ([]stationInfo, error) {
|
||||
// 与前端一致:device_type='WH65LP' 且 lat/lon 非空且非零
|
||||
if strings.TrimSpace(stationID) != "" {
|
||||
const q = `
|
||||
SELECT station_id,
|
||||
CASE WHEN COALESCE(station_alias,'')='' THEN station_id ELSE station_alias END AS alias,
|
||||
latitude, longitude,
|
||||
COALESCE(z,0), COALESCE(y,0), COALESCE(x,0)
|
||||
FROM stations
|
||||
WHERE device_type='WH65LP' AND station_id=$1
|
||||
AND latitude IS NOT NULL AND longitude IS NOT NULL
|
||||
AND latitude<>0 AND longitude<>0`
|
||||
var s stationInfo
|
||||
err := db.QueryRow(q, stationID).Scan(&s.ID, &s.Alias, &s.Lat, &s.Lon, &s.Z, &s.Y, &s.X)
|
||||
if err != nil {
|
||||
if errors.Is(err, sql.ErrNoRows) {
|
||||
return nil, nil
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
return []stationInfo{s}, nil
|
||||
}
|
||||
const qAll = `
|
||||
SELECT station_id,
|
||||
CASE WHEN COALESCE(station_alias,'')='' THEN station_id ELSE station_alias END AS alias,
|
||||
latitude, longitude,
|
||||
COALESCE(z,0), COALESCE(y,0), COALESCE(x,0)
|
||||
FROM stations
|
||||
WHERE device_type='WH65LP'
|
||||
AND latitude IS NOT NULL AND longitude IS NOT NULL
|
||||
AND latitude<>0 AND longitude<>0
|
||||
ORDER BY station_id`
|
||||
rows, err := db.Query(qAll)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var out []stationInfo
|
||||
for rows.Next() {
|
||||
var s stationInfo
|
||||
if err := rows.Scan(&s.ID, &s.Alias, &s.Lat, &s.Lon, &s.Z, &s.Y, &s.X); err == nil {
|
||||
out = append(out, s)
|
||||
}
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func listTiles(ctx context.Context, db *sql.DB, z, y, x int, from, to time.Time) ([]tileRec, error) {
|
||||
const q = `
|
||||
SELECT dt, width, height, west, south, east, north, res_deg, data
|
||||
FROM radar_tiles
|
||||
WHERE z=$1 AND y=$2 AND x=$3 AND dt BETWEEN $4 AND $5
|
||||
ORDER BY dt ASC`
|
||||
rows, err := db.QueryContext(ctx, q, z, y, x, from, to)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var out []tileRec
|
||||
for rows.Next() {
|
||||
var r tileRec
|
||||
if err := rows.Scan(&r.DT, &r.Width, &r.Height, &r.West, &r.South, &r.East, &r.North, &r.ResDeg, &r.Data); err == nil {
|
||||
out = append(out, r)
|
||||
}
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func bucket10(t time.Time, loc *time.Location) time.Time {
|
||||
tt := t.In(loc)
|
||||
m := (tt.Minute() / 10) * 10
|
||||
return time.Date(tt.Year(), tt.Month(), tt.Day(), tt.Hour(), m, 0, 0, loc)
|
||||
}
|
||||
|
||||
// loadWindAt 以别名(alias)精确匹配 radar_weather;本导出按需传入 station_id 作为 alias 参数
|
||||
func loadWindAt(db *sql.DB, alias string, dt time.Time) (speedMS float64, dirDeg float64, ok bool, err error) {
|
||||
const q = `
|
||||
SELECT wind_speed, wind_direction
|
||||
FROM radar_weather
|
||||
WHERE alias=$1 AND dt=$2
|
||||
LIMIT 1`
|
||||
var s, d sql.NullFloat64
|
||||
err = db.QueryRow(q, alias, dt).Scan(&s, &d)
|
||||
if err == sql.ErrNoRows {
|
||||
return 0, 0, false, nil
|
||||
}
|
||||
if err != nil {
|
||||
return 0, 0, false, err
|
||||
}
|
||||
if !s.Valid || !d.Valid {
|
||||
return 0, 0, false, nil
|
||||
}
|
||||
return s.Float64, d.Float64, true, nil
|
||||
}
|
||||
|
||||
func loadNearestRain(db *sql.DB, stationID string, dt time.Time) (rainTotal float64, ok bool) {
|
||||
// 取最近一条累计雨量(单位mm)。如不存在返回0,false
|
||||
const q = `
|
||||
SELECT rainfall
|
||||
FROM rs485_weather_data
|
||||
WHERE station_id=$1
|
||||
ORDER BY ABS(EXTRACT(EPOCH FROM (timestamp - $2))) ASC
|
||||
LIMIT 1`
|
||||
var r sql.NullFloat64
|
||||
if err := db.QueryRow(q, stationID, dt).Scan(&r); err != nil {
|
||||
return 0, false
|
||||
}
|
||||
if !r.Valid {
|
||||
return 0, false
|
||||
}
|
||||
return r.Float64, true
|
||||
}
|
||||
|
||||
func decodeTile(t tileRec) (vals [][]*float64, xs []float64, ys []float64, err error) {
|
||||
w, h := t.Width, t.Height
|
||||
if w <= 0 || h <= 0 {
|
||||
return nil, nil, nil, fmt.Errorf("非法尺寸")
|
||||
}
|
||||
if len(t.Data) < w*h*2 {
|
||||
return nil, nil, nil, fmt.Errorf("数据长度不足")
|
||||
}
|
||||
xs = make([]float64, w)
|
||||
for c := 0; c < w; c++ {
|
||||
xs[c] = t.West + (float64(c)+0.5)*t.ResDeg
|
||||
}
|
||||
ys = make([]float64, h)
|
||||
for r := 0; r < h; r++ {
|
||||
ys[r] = t.South + (float64(r)+0.5)*t.ResDeg
|
||||
}
|
||||
vals = make([][]*float64, h)
|
||||
off := 0
|
||||
for r := 0; r < h; r++ {
|
||||
row := make([]*float64, w)
|
||||
for c := 0; c < w; c++ {
|
||||
v := int16(binary.BigEndian.Uint16(t.Data[off : off+2]))
|
||||
off += 2
|
||||
if v >= 32766 {
|
||||
row[c] = nil
|
||||
continue
|
||||
}
|
||||
dbz := float64(v) / 10.0
|
||||
if dbz < 0 {
|
||||
dbz = 0
|
||||
} else if dbz > 75 {
|
||||
dbz = 75
|
||||
}
|
||||
vv := dbz
|
||||
row[c] = &vv
|
||||
}
|
||||
vals[r] = row
|
||||
}
|
||||
return vals, xs, ys, nil
|
||||
}
|
||||
|
||||
func computeStats(vals [][]*float64, xs, ys []float64, stLat, stLon, windMS, windFromDeg float64) (
|
||||
sec40Cnt int, sec40Sum float64, sec30Cnt int, sec30Sum float64,
|
||||
cir40Cnt int, cir40Sum float64, cir30Cnt int, cir30Sum float64,
|
||||
) {
|
||||
h := len(vals)
|
||||
if h == 0 {
|
||||
return
|
||||
}
|
||||
w := len(vals[0])
|
||||
// 半径(米)与半角
|
||||
halfAngle := 30.0
|
||||
rangeM := windMS * 3 * 3600
|
||||
circleR := 8000.0
|
||||
|
||||
for r := 0; r < h; r++ {
|
||||
lat := ys[r]
|
||||
row := vals[r]
|
||||
for c := 0; c < w; c++ {
|
||||
if row[c] == nil {
|
||||
continue
|
||||
}
|
||||
dbz := *row[c]
|
||||
lon := xs[c]
|
||||
dist := haversine(stLat, stLon, lat, lon)
|
||||
|
||||
// 8km 圆
|
||||
if dist <= circleR {
|
||||
if dbz >= 40 {
|
||||
cir40Cnt++
|
||||
cir40Sum += dbz
|
||||
}
|
||||
if dbz >= 30 {
|
||||
cir30Cnt++
|
||||
cir30Sum += dbz
|
||||
}
|
||||
}
|
||||
|
||||
// 扇形(需同时满足距离与角度)
|
||||
if dist <= rangeM {
|
||||
brg := bearingDeg(stLat, stLon, lat, lon)
|
||||
if angDiff(brg, windFromDeg) <= halfAngle {
|
||||
if dbz >= 40 {
|
||||
sec40Cnt++
|
||||
sec40Sum += dbz
|
||||
}
|
||||
if dbz >= 30 {
|
||||
sec30Cnt++
|
||||
sec30Sum += dbz
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func toRad(d float64) float64 { return d * math.Pi / 180 }
|
||||
func toDeg(r float64) float64 { return r * 180 / math.Pi }
|
||||
|
||||
func haversine(lat1, lon1, lat2, lon2 float64) float64 {
|
||||
const R = 6371000.0
|
||||
dLat := toRad(lat2 - lat1)
|
||||
dLon := toRad(lon2 - lon1)
|
||||
a := math.Sin(dLat/2)*math.Sin(dLat/2) + math.Cos(toRad(lat1))*math.Cos(toRad(lat2))*math.Sin(dLon/2)*math.Sin(dLon/2)
|
||||
c := 2 * math.Atan2(math.Sqrt(a), math.Sqrt(1-a))
|
||||
return R * c
|
||||
}
|
||||
|
||||
func bearingDeg(lat1, lon1, lat2, lon2 float64) float64 {
|
||||
φ1 := toRad(lat1)
|
||||
φ2 := toRad(lat2)
|
||||
Δλ := toRad(lon2 - lon1)
|
||||
y := math.Sin(Δλ) * math.Cos(φ2)
|
||||
x := math.Cos(φ1)*math.Sin(φ2) - math.Sin(φ1)*math.Cos(φ2)*math.Cos(Δλ)
|
||||
brg := toDeg(math.Atan2(y, x))
|
||||
if brg < 0 {
|
||||
brg += 360
|
||||
}
|
||||
return brg
|
||||
}
|
||||
|
||||
func angDiff(a, b float64) float64 {
|
||||
d := math.Mod(a-b+540, 360) - 180
|
||||
if d < 0 {
|
||||
d = -d
|
||||
}
|
||||
return math.Abs(d)
|
||||
}
|
||||
553
cmd/radar_rain_export_csv/main.go
Normal file
553
cmd/radar_rain_export_csv/main.go
Normal file
@ -0,0 +1,553 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"encoding/binary"
|
||||
"encoding/csv"
|
||||
"errors"
|
||||
"flag"
|
||||
"fmt"
|
||||
"log"
|
||||
"math"
|
||||
"os"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"weatherstation/internal/database"
|
||||
)
|
||||
|
||||
type stationInfo struct {
|
||||
ID string
|
||||
Alias string
|
||||
Lat float64
|
||||
Lon float64
|
||||
Z int
|
||||
Y int
|
||||
X int
|
||||
}
|
||||
|
||||
type tileRec struct {
|
||||
DT time.Time
|
||||
Width, Height int
|
||||
West, South float64
|
||||
East, North float64
|
||||
ResDeg float64
|
||||
Data []byte
|
||||
}
|
||||
|
||||
func main() {
|
||||
var stationID string
|
||||
var startStr string
|
||||
var endStr string
|
||||
var outPath string
|
||||
var verbose bool
|
||||
var useImdroid bool
|
||||
|
||||
flag.StringVar(&stationID, "station_id", "", "站点ID(留空表示全部符合条件的站)")
|
||||
flag.StringVar(&startStr, "start", "", "起始时间(YYYY-MM-DD HH:MM:SS,CST,表示区间左端点)")
|
||||
flag.StringVar(&endStr, "end", "", "结束时间(YYYY-MM-DD HH:MM:SS,CST,表示区间左端点,非包含)")
|
||||
flag.StringVar(&outPath, "out", "radar_hourly_stats.csv", "输出CSV文件路径")
|
||||
flag.BoolVar(&verbose, "info", false, "输出详细过程信息")
|
||||
flag.BoolVar(&useImdroid, "use_imdroid", false, "输出 imdroid 预报(右端点)")
|
||||
flag.Parse()
|
||||
|
||||
if strings.TrimSpace(startStr) == "" || strings.TrimSpace(endStr) == "" {
|
||||
log.Fatalln("必须提供 --start 与 --end,格式 YYYY-MM-DD HH:MM:SS")
|
||||
}
|
||||
loc, _ := time.LoadLocation("Asia/Shanghai")
|
||||
if loc == nil {
|
||||
loc = time.FixedZone("CST", 8*3600)
|
||||
}
|
||||
startT, err := time.ParseInLocation("2006-01-02 15:04:05", startStr, loc)
|
||||
if err != nil {
|
||||
log.Fatalf("解析 start 失败: %v", err)
|
||||
}
|
||||
endT, err := time.ParseInLocation("2006-01-02 15:04:05", endStr, loc)
|
||||
if err != nil {
|
||||
log.Fatalf("解析 end 失败: %v", err)
|
||||
}
|
||||
if !endT.After(startT) {
|
||||
log.Fatalln("结束时间必须大于起始时间")
|
||||
}
|
||||
|
||||
_ = database.GetDB()
|
||||
defer database.Close()
|
||||
|
||||
stations, err := listStations(database.GetDB(), stationID)
|
||||
if err != nil {
|
||||
log.Fatalf("查询站点失败: %v", err)
|
||||
}
|
||||
if len(stations) == 0 {
|
||||
log.Fatalln("没有符合条件的站点")
|
||||
}
|
||||
if verbose {
|
||||
log.Printf("站点数量: %d", len(stations))
|
||||
for _, s := range stations {
|
||||
log.Printf("站点: id=%s alias=%s lat=%.5f lon=%.5f z/y/x=%d/%d/%d", s.ID, s.Alias, s.Lat, s.Lon, s.Z, s.Y, s.X)
|
||||
}
|
||||
}
|
||||
|
||||
f, err := os.Create(outPath)
|
||||
if err != nil {
|
||||
log.Fatalf("创建输出文件失败: %v", err)
|
||||
}
|
||||
defer f.Close()
|
||||
w := csv.NewWriter(f)
|
||||
defer w.Flush()
|
||||
|
||||
header := []string{
|
||||
"station_id",
|
||||
"station_alias",
|
||||
"hour_end",
|
||||
"rain_actual_mm",
|
||||
"wind_speed_ms",
|
||||
"wind_dir_deg",
|
||||
"openmeteo_rain_mm",
|
||||
"openmeteo_issued",
|
||||
"caiyun_rain_mm",
|
||||
"caiyun_issued",
|
||||
}
|
||||
if useImdroid {
|
||||
header = append(header, "imdroid_rain_mm", "imdroid_issued")
|
||||
}
|
||||
header = append(header, "radar_circle_max_dbz", "radar_sector_max_dbz")
|
||||
if err := w.Write(header); err != nil {
|
||||
log.Fatalf("写入CSV表头失败: %v", err)
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
totalRows := 0
|
||||
hours := buildHourSlots(startT, endT)
|
||||
|
||||
for _, s := range stations {
|
||||
if verbose {
|
||||
log.Printf("处理站点 %s,共 %d 个小时区间", s.ID, len(hours))
|
||||
}
|
||||
for _, slot := range hours {
|
||||
actual, windSpeed, windDir, hasObs, err := aggregateHourlyObs(ctx, database.GetDB(), s.ID, slot.from, slot.to)
|
||||
if err != nil {
|
||||
log.Printf("站点 %s 聚合观测失败 @%s: %v", s.ID, slot.to.Format(time.RFC3339), err)
|
||||
continue
|
||||
}
|
||||
|
||||
openRain, openIssued, hasOpen, err := findLatestForecast(ctx, database.GetDB(), s.ID, "open-meteo", slot.to)
|
||||
if err != nil {
|
||||
log.Printf("站点 %s 读取 open-meteo 预报失败 @%s: %v", s.ID, slot.to.Format(time.RFC3339), err)
|
||||
}
|
||||
caiyunRain, caiyunIssued, hasCaiyun, err := findLatestForecast(ctx, database.GetDB(), s.ID, "caiyun", slot.to)
|
||||
if err != nil {
|
||||
log.Printf("站点 %s 读取 caiyun 预报失败 @%s: %v", s.ID, slot.to.Format(time.RFC3339), err)
|
||||
}
|
||||
|
||||
var (
|
||||
imdroidRain float64
|
||||
imdroidIssued time.Time
|
||||
hasImdroid bool
|
||||
)
|
||||
if useImdroid {
|
||||
var errImdroid error
|
||||
imdroidRain, imdroidIssued, hasImdroid, errImdroid = findLatestForecast(ctx, database.GetDB(), s.ID, "imdroid", slot.to)
|
||||
if errImdroid != nil {
|
||||
log.Printf("站点 %s 读取 imdroid 预报失败 @%s: %v", s.ID, slot.to.Format(time.RFC3339), errImdroid)
|
||||
}
|
||||
}
|
||||
|
||||
circleMax, sectorMax, hasRadar, err := hourlyRadarMax(ctx, database.GetDB(), s, slot.from, slot.to, loc, verbose)
|
||||
if err != nil {
|
||||
log.Printf("站点 %s 统计雷达失败 @%s: %v", s.ID, slot.to.Format(time.RFC3339), err)
|
||||
}
|
||||
|
||||
rec := []string{
|
||||
s.ID,
|
||||
s.Alias,
|
||||
slot.to.Format("2006-01-02 15:04:05"),
|
||||
formatFloat(actual, hasObs, 3),
|
||||
formatFloat(windSpeed, hasObs && !math.IsNaN(windSpeed), 3),
|
||||
formatFloat(windDir, hasObs && !math.IsNaN(windDir), 1),
|
||||
formatFloat(openRain, hasOpen, 3),
|
||||
formatTime(openIssued, hasOpen),
|
||||
formatFloat(caiyunRain, hasCaiyun, 3),
|
||||
formatTime(caiyunIssued, hasCaiyun),
|
||||
}
|
||||
if useImdroid {
|
||||
rec = append(rec,
|
||||
formatFloat(imdroidRain, hasImdroid, 3),
|
||||
formatTime(imdroidIssued, hasImdroid),
|
||||
)
|
||||
}
|
||||
rec = append(rec,
|
||||
formatFloat(circleMax, hasRadar && !math.IsNaN(circleMax), 1),
|
||||
formatFloat(sectorMax, hasRadar && !math.IsNaN(sectorMax), 1),
|
||||
)
|
||||
if err := w.Write(rec); err != nil {
|
||||
log.Printf("写入CSV失败: %v", err)
|
||||
} else {
|
||||
totalRows++
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
w.Flush()
|
||||
if err := w.Error(); err != nil {
|
||||
log.Fatalf("写入CSV失败: %v", err)
|
||||
}
|
||||
log.Printf("完成,输出 %d 行到 %s", totalRows, outPath)
|
||||
}
|
||||
|
||||
type hourSlot struct {
|
||||
from time.Time
|
||||
to time.Time
|
||||
}
|
||||
|
||||
func buildHourSlots(from, to time.Time) []hourSlot {
|
||||
var slots []hourSlot
|
||||
cursor := from
|
||||
for cursor.Before(to) {
|
||||
end := cursor.Add(time.Hour)
|
||||
if end.After(to) {
|
||||
end = to
|
||||
}
|
||||
slots = append(slots, hourSlot{from: cursor, to: end})
|
||||
cursor = end
|
||||
}
|
||||
return slots
|
||||
}
|
||||
|
||||
func listStations(db *sql.DB, stationID string) ([]stationInfo, error) {
|
||||
if strings.TrimSpace(stationID) != "" {
|
||||
const q = `
|
||||
SELECT station_id,
|
||||
CASE WHEN COALESCE(station_alias,'')='' THEN station_id ELSE station_alias END AS alias,
|
||||
latitude, longitude,
|
||||
COALESCE(z,0), COALESCE(y,0), COALESCE(x,0)
|
||||
FROM stations
|
||||
WHERE station_id=$1
|
||||
AND latitude IS NOT NULL AND longitude IS NOT NULL
|
||||
AND latitude<>0 AND longitude<>0
|
||||
AND COALESCE(z,0)=7 AND COALESCE(y,0)=40 AND COALESCE(x,0)=102`
|
||||
var s stationInfo
|
||||
err := db.QueryRow(q, stationID).Scan(&s.ID, &s.Alias, &s.Lat, &s.Lon, &s.Z, &s.Y, &s.X)
|
||||
if err != nil {
|
||||
if errors.Is(err, sql.ErrNoRows) {
|
||||
return nil, nil
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
return []stationInfo{s}, nil
|
||||
}
|
||||
const qAll = `
|
||||
SELECT station_id,
|
||||
CASE WHEN COALESCE(station_alias,'')='' THEN station_id ELSE station_alias END AS alias,
|
||||
latitude, longitude,
|
||||
COALESCE(z,0), COALESCE(y,0), COALESCE(x,0)
|
||||
FROM stations
|
||||
WHERE device_type='WH65LP'
|
||||
AND latitude IS NOT NULL AND longitude IS NOT NULL
|
||||
AND latitude<>0 AND longitude<>0
|
||||
AND COALESCE(z,0)=7 AND COALESCE(y,0)=40 AND COALESCE(x,0)=102
|
||||
ORDER BY station_id`
|
||||
rows, err := db.Query(qAll)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var out []stationInfo
|
||||
for rows.Next() {
|
||||
var s stationInfo
|
||||
if err := rows.Scan(&s.ID, &s.Alias, &s.Lat, &s.Lon, &s.Z, &s.Y, &s.X); err == nil {
|
||||
out = append(out, s)
|
||||
}
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func aggregateHourlyObs(ctx context.Context, db *sql.DB, stationID string, from, to time.Time) (rain float64, windSpeed float64, windDir float64, ok bool, err error) {
|
||||
const q = `
|
||||
SELECT wind_speed_ms_x1000, wind_dir_deg, rain_10m_mm_x1000
|
||||
FROM rs485_weather_10min
|
||||
WHERE station_id=$1 AND bucket_start >= $2 AND bucket_start < $3`
|
||||
rows, err := db.QueryContext(ctx, q, stationID, from, to)
|
||||
if err != nil {
|
||||
return 0, 0, 0, false, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var totalRain int64
|
||||
var count int
|
||||
var sumX, sumY float64
|
||||
|
||||
for rows.Next() {
|
||||
var ws sql.NullInt64
|
||||
var wd sql.NullInt64
|
||||
var rainX sql.NullInt64
|
||||
if err := rows.Scan(&ws, &wd, &rainX); err != nil {
|
||||
return 0, 0, 0, false, err
|
||||
}
|
||||
if rainX.Valid {
|
||||
totalRain += rainX.Int64
|
||||
}
|
||||
if ws.Valid && wd.Valid {
|
||||
speed := float64(ws.Int64) / 1000.0
|
||||
dir := float64(wd.Int64)
|
||||
rad := toRad(dir)
|
||||
sumX += speed * math.Cos(rad)
|
||||
sumY += speed * math.Sin(rad)
|
||||
count++
|
||||
}
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return 0, 0, 0, false, err
|
||||
}
|
||||
|
||||
rain = float64(totalRain) / 1000.0
|
||||
windSpeed = math.NaN()
|
||||
windDir = math.NaN()
|
||||
if count > 0 {
|
||||
avgX := sumX / float64(count)
|
||||
avgY := sumY / float64(count)
|
||||
windSpeed = math.Hypot(avgX, avgY)
|
||||
if windSpeed == 0 {
|
||||
windDir = 0
|
||||
} else {
|
||||
dir := toDeg(math.Atan2(avgY, avgX))
|
||||
if dir < 0 {
|
||||
dir += 360
|
||||
}
|
||||
windDir = dir
|
||||
}
|
||||
ok = true
|
||||
}
|
||||
return rain, windSpeed, windDir, totalRain > 0 || count > 0, nil
|
||||
}
|
||||
|
||||
func findLatestForecast(ctx context.Context, db *sql.DB, stationID, provider string, forecastTime time.Time) (rain float64, issued time.Time, ok bool, err error) {
|
||||
const q = `
|
||||
SELECT issued_at, rain_mm_x1000
|
||||
FROM forecast_hourly
|
||||
WHERE station_id=$1 AND provider=$2 AND forecast_time=$3
|
||||
ORDER BY issued_at DESC
|
||||
LIMIT 1`
|
||||
var issuedAt time.Time
|
||||
var rainX sql.NullInt64
|
||||
err = db.QueryRowContext(ctx, q, stationID, provider, forecastTime).Scan(&issuedAt, &rainX)
|
||||
if err != nil {
|
||||
if errors.Is(err, sql.ErrNoRows) {
|
||||
return 0, time.Time{}, false, nil
|
||||
}
|
||||
return 0, time.Time{}, false, err
|
||||
}
|
||||
if !rainX.Valid {
|
||||
return 0, issuedAt, true, nil
|
||||
}
|
||||
return float64(rainX.Int64) / 1000.0, issuedAt, true, nil
|
||||
}
|
||||
|
||||
func hourlyRadarMax(ctx context.Context, db *sql.DB, s stationInfo, from, to time.Time, loc *time.Location, verbose bool) (circleMax float64, sectorMax float64, ok bool, err error) {
|
||||
tiles, err := listTiles(ctx, db, s.Z, s.Y, s.X, from, to)
|
||||
if err != nil {
|
||||
return math.NaN(), math.NaN(), false, err
|
||||
}
|
||||
if len(tiles) == 0 {
|
||||
return math.NaN(), math.NaN(), false, nil
|
||||
}
|
||||
circleMax = math.NaN()
|
||||
sectorMax = math.NaN()
|
||||
alias := strings.TrimSpace(s.Alias)
|
||||
if alias == "" {
|
||||
alias = s.ID
|
||||
}
|
||||
|
||||
for _, t := range tiles {
|
||||
bucket := bucket10(t.DT, loc)
|
||||
windSpeed, windDir, windOK, err := loadWindAt(db, s.ID, alias, bucket)
|
||||
if err != nil {
|
||||
if verbose {
|
||||
log.Printf("站点 %s 瓦片@%s 读取风失败: %v", s.ID, t.DT.Format(time.RFC3339), err)
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
vals, xs, ys, err := decodeTile(t)
|
||||
if err != nil {
|
||||
if verbose {
|
||||
log.Printf("站点 %s 解码瓦片失败: %v", s.ID, err)
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
for r := 0; r < len(vals); r++ {
|
||||
row := vals[r]
|
||||
lat := ys[r]
|
||||
for c := 0; c < len(row); c++ {
|
||||
v := row[c]
|
||||
if v == nil {
|
||||
continue
|
||||
}
|
||||
dbz := *v
|
||||
lon := xs[c]
|
||||
dist := haversine(s.Lat, s.Lon, lat, lon)
|
||||
|
||||
if dist <= 8000.0 {
|
||||
if math.IsNaN(circleMax) || dbz > circleMax {
|
||||
circleMax = dbz
|
||||
}
|
||||
}
|
||||
|
||||
if windOK && windSpeed > 0 {
|
||||
brg := bearingDeg(s.Lat, s.Lon, lat, lon)
|
||||
if angDiff(brg, windDir) <= 30.0 && dist <= windSpeed*3*3600 {
|
||||
if math.IsNaN(sectorMax) || dbz > sectorMax {
|
||||
sectorMax = dbz
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return circleMax, sectorMax, !math.IsNaN(circleMax) || !math.IsNaN(sectorMax), nil
|
||||
}
|
||||
|
||||
func listTiles(ctx context.Context, db *sql.DB, z, y, x int, from, to time.Time) ([]tileRec, error) {
|
||||
const q = `
|
||||
SELECT dt, width, height, west, south, east, north, res_deg, data
|
||||
FROM radar_tiles
|
||||
WHERE z=$1 AND y=$2 AND x=$3 AND dt >= $4 AND dt < $5
|
||||
ORDER BY dt ASC`
|
||||
rows, err := db.QueryContext(ctx, q, z, y, x, from, to)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var out []tileRec
|
||||
for rows.Next() {
|
||||
var r tileRec
|
||||
if err := rows.Scan(&r.DT, &r.Width, &r.Height, &r.West, &r.South, &r.East, &r.North, &r.ResDeg, &r.Data); err == nil {
|
||||
out = append(out, r)
|
||||
}
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func bucket10(t time.Time, loc *time.Location) time.Time {
|
||||
tt := t.In(loc)
|
||||
m := (tt.Minute() / 10) * 10
|
||||
return time.Date(tt.Year(), tt.Month(), tt.Day(), tt.Hour(), m, 0, 0, loc)
|
||||
}
|
||||
|
||||
func loadWindAt(db *sql.DB, stationID, alias string, dt time.Time) (speedMS float64, dirDeg float64, ok bool, err error) {
|
||||
const q = `
|
||||
SELECT wind_speed, wind_direction
|
||||
FROM radar_weather
|
||||
WHERE alias=$1 AND dt=$2
|
||||
LIMIT 1`
|
||||
tryAlias := func(a string) (float64, float64, bool, error) {
|
||||
var s, d sql.NullFloat64
|
||||
err := db.QueryRow(q, a, dt).Scan(&s, &d)
|
||||
if err == sql.ErrNoRows {
|
||||
return 0, 0, false, nil
|
||||
}
|
||||
if err != nil {
|
||||
return 0, 0, false, err
|
||||
}
|
||||
if !s.Valid || !d.Valid {
|
||||
return 0, 0, false, nil
|
||||
}
|
||||
return s.Float64, d.Float64, true, nil
|
||||
}
|
||||
if speed, dir, ok, err := tryAlias(stationID); err != nil {
|
||||
return 0, 0, false, err
|
||||
} else if ok {
|
||||
return speed, dir, true, nil
|
||||
}
|
||||
return tryAlias(alias)
|
||||
}
|
||||
|
||||
func decodeTile(t tileRec) (vals [][]*float64, xs []float64, ys []float64, err error) {
|
||||
w, h := t.Width, t.Height
|
||||
if w <= 0 || h <= 0 {
|
||||
return nil, nil, nil, fmt.Errorf("非法尺寸")
|
||||
}
|
||||
if len(t.Data) < w*h*2 {
|
||||
return nil, nil, nil, fmt.Errorf("数据长度不足")
|
||||
}
|
||||
xs = make([]float64, w)
|
||||
for c := 0; c < w; c++ {
|
||||
xs[c] = t.West + (float64(c)+0.5)*t.ResDeg
|
||||
}
|
||||
ys = make([]float64, h)
|
||||
for r := 0; r < h; r++ {
|
||||
ys[r] = t.South + (float64(r)+0.5)*t.ResDeg
|
||||
}
|
||||
vals = make([][]*float64, h)
|
||||
off := 0
|
||||
for r := 0; r < h; r++ {
|
||||
row := make([]*float64, w)
|
||||
for c := 0; c < w; c++ {
|
||||
v := int16(binary.BigEndian.Uint16(t.Data[off : off+2]))
|
||||
off += 2
|
||||
if v >= 32766 {
|
||||
row[c] = nil
|
||||
continue
|
||||
}
|
||||
dbz := float64(v) / 10.0
|
||||
if dbz < 0 {
|
||||
dbz = 0
|
||||
} else if dbz > 75 {
|
||||
dbz = 75
|
||||
}
|
||||
value := dbz
|
||||
row[c] = &value
|
||||
}
|
||||
vals[r] = row
|
||||
}
|
||||
return vals, xs, ys, nil
|
||||
}
|
||||
|
||||
func haversine(lat1, lon1, lat2, lon2 float64) float64 {
|
||||
const R = 6371000.0
|
||||
dLat := toRad(lat2 - lat1)
|
||||
dLon := toRad(lon2 - lon1)
|
||||
a := math.Sin(dLat/2)*math.Sin(dLat/2) + math.Cos(toRad(lat1))*math.Cos(toRad(lat2))*math.Sin(dLon/2)*math.Sin(dLon/2)
|
||||
c := 2 * math.Atan2(math.Sqrt(a), math.Sqrt(1-a))
|
||||
return R * c
|
||||
}
|
||||
|
||||
func bearingDeg(lat1, lon1, lat2, lon2 float64) float64 {
|
||||
φ1 := toRad(lat1)
|
||||
φ2 := toRad(lat2)
|
||||
Δλ := toRad(lon2 - lon1)
|
||||
y := math.Sin(Δλ) * math.Cos(φ2)
|
||||
x := math.Cos(φ1)*math.Sin(φ2) - math.Sin(φ1)*math.Cos(φ2)*math.Cos(Δλ)
|
||||
brg := toDeg(math.Atan2(y, x))
|
||||
if brg < 0 {
|
||||
brg += 360
|
||||
}
|
||||
return brg
|
||||
}
|
||||
|
||||
func angDiff(a, b float64) float64 {
|
||||
d := math.Mod(a-b+540, 360) - 180
|
||||
if d < 0 {
|
||||
d = -d
|
||||
}
|
||||
return math.Abs(d)
|
||||
}
|
||||
|
||||
func toRad(d float64) float64 { return d * math.Pi / 180 }
|
||||
func toDeg(r float64) float64 { return r * 180 / math.Pi }
|
||||
|
||||
func formatFloat(v float64, ok bool, digits int) string {
|
||||
if !ok || math.IsNaN(v) {
|
||||
return ""
|
||||
}
|
||||
format := fmt.Sprintf("%%.%df", digits)
|
||||
return fmt.Sprintf(format, v)
|
||||
}
|
||||
|
||||
func formatTime(t time.Time, ok bool) string {
|
||||
if !ok || t.IsZero() {
|
||||
return ""
|
||||
}
|
||||
return t.Format("2006-01-02 15:04:05")
|
||||
}
|
||||
227
cmd/rainfetch/main.go
Normal file
227
cmd/rainfetch/main.go
Normal file
@ -0,0 +1,227 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"flag"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
dbpkg "weatherstation/internal/database"
|
||||
"weatherstation/internal/rain"
|
||||
)
|
||||
|
||||
// 简单的小时雨量(CMPA)按时间范围下载器:
|
||||
// - 输入时间为北京时间(Asia/Shanghai)
|
||||
// - 构造下载路径使用 UTC(本地整点 -8h)
|
||||
// - 入库前通过 rain.StoreTileBytes 使用 URL 解析将 UTC 还原为北京时间并写库
|
||||
// 用法示例:
|
||||
//
|
||||
// go run ./cmd/rainfetch --from "2025-10-07 09:00:00" --to "2025-10-07 11:00:00" \
|
||||
// --tiles "7/40/102,7/40/104" --outdir rain_data
|
||||
func main() {
|
||||
var (
|
||||
fromStr = flag.String("from", "", "起始时间(北京时间,YYYY-MM-DD HH:MM:SS 或 YYYY-MM-DD)")
|
||||
toStr = flag.String("to", "", "结束时间(北京时间,YYYY-MM-DD HH:MM:SS 或 YYYY-MM-DD)")
|
||||
tiles = flag.String("tiles", "7/40/102,7/40/104", "瓦片列表,逗号分隔,每项为 z/y/x,如 7/40/102")
|
||||
outDir = flag.String("outdir", "rain_data", "保存目录(同时也会写入数据库)")
|
||||
baseURL = flag.String("base", "https://image.data.cma.cn/tiles/China/CMPA_RT_China_0P01_HOR-PRE_GISJPG_Tiles/%Y%m%d/%H/%M/{z}/{y}/{x}.bin", "下载基础URL模板(UTC路径时间)")
|
||||
dryRun = flag.Bool("dry", false, "仅打印将要下载的URL与目标,不实际下载写库")
|
||||
)
|
||||
flag.Parse()
|
||||
|
||||
if strings.TrimSpace(*fromStr) == "" || strings.TrimSpace(*toStr) == "" {
|
||||
log.Fatalln("必须提供 --from 与 --to(北京时间)")
|
||||
}
|
||||
|
||||
loc, _ := time.LoadLocation("Asia/Shanghai")
|
||||
if loc == nil {
|
||||
loc = time.FixedZone("CST", 8*3600)
|
||||
}
|
||||
|
||||
parseCST := func(s string) (time.Time, error) {
|
||||
s = strings.TrimSpace(s)
|
||||
var t time.Time
|
||||
var err error
|
||||
if len(s) == len("2006-01-02") {
|
||||
// 日期:按 00:00:00 处理
|
||||
if tm, e := time.ParseInLocation("2006-01-02", s, loc); e == nil {
|
||||
t = tm
|
||||
} else {
|
||||
err = e
|
||||
}
|
||||
} else {
|
||||
t, err = time.ParseInLocation("2006-01-02 15:04:05", s, loc)
|
||||
}
|
||||
return t, err
|
||||
}
|
||||
|
||||
start, err1 := parseCST(*fromStr)
|
||||
end, err2 := parseCST(*toStr)
|
||||
if err1 != nil || err2 != nil {
|
||||
log.Fatalf("解析时间失败: from=%v to=%v", err1, err2)
|
||||
}
|
||||
if end.Before(start) {
|
||||
log.Fatalln("结束时间需不小于起始时间")
|
||||
}
|
||||
|
||||
// 小时步进(包含端点):先对齐到小时
|
||||
cur := start.Truncate(time.Hour)
|
||||
end = end.Truncate(time.Hour)
|
||||
|
||||
// 解析 tiles 参数
|
||||
type tcoord struct{ z, y, x int }
|
||||
var tlist []tcoord
|
||||
for _, part := range strings.Split(*tiles, ",") {
|
||||
p := strings.TrimSpace(part)
|
||||
if p == "" {
|
||||
continue
|
||||
}
|
||||
var z, y, x int
|
||||
if _, err := fmt.Sscanf(p, "%d/%d/%d", &z, &y, &x); err != nil {
|
||||
log.Fatalf("无效的 tiles 项: %s", p)
|
||||
}
|
||||
tlist = append(tlist, tcoord{z, y, x})
|
||||
}
|
||||
if len(tlist) == 0 {
|
||||
log.Fatalln("tiles 解析后为空")
|
||||
}
|
||||
|
||||
if err := os.MkdirAll(*outDir, 0o755); err != nil {
|
||||
log.Fatalf("创建输出目录失败: %v", err)
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
total := 0
|
||||
success := 0
|
||||
for !cur.After(end) {
|
||||
// 本地整点(CST)→ UTC 路径时间
|
||||
slotLocal := cur
|
||||
slotUTC := slotLocal.Add(-8 * time.Hour).In(time.UTC)
|
||||
dateStr := slotUTC.Format("20060102")
|
||||
hh := slotUTC.Format("15")
|
||||
mm := "00"
|
||||
log.Printf("[rainfetch] 时次 local=%s, utc=%s", slotLocal.Format("2006-01-02 15:04"), slotUTC.Format("2006-01-02 15:04"))
|
||||
|
||||
for _, tc := range tlist {
|
||||
total++
|
||||
// 构造 URL
|
||||
url := *baseURL
|
||||
url = strings.ReplaceAll(url, "%Y%m%d", dateStr)
|
||||
url = strings.ReplaceAll(url, "%H", hh)
|
||||
url = strings.ReplaceAll(url, "%M", mm)
|
||||
url = strings.ReplaceAll(url, "{z}", fmt.Sprintf("%d", tc.z))
|
||||
url = strings.ReplaceAll(url, "{y}", fmt.Sprintf("%d", tc.y))
|
||||
url = strings.ReplaceAll(url, "{x}", fmt.Sprintf("%d", tc.x))
|
||||
|
||||
fname := fmt.Sprintf("rain_z%d_y%d_x%d_%s.bin", tc.z, tc.y, tc.x, slotLocal.Format("20060102_1504"))
|
||||
dest := filepath.Join(*outDir, fname)
|
||||
|
||||
if *dryRun {
|
||||
log.Printf("[rainfetch] DRY url=%s -> %s", url, dest)
|
||||
continue
|
||||
}
|
||||
|
||||
// 若 DB 已有,则跳过
|
||||
if ref, e := rain.ParseCMPATileURL(url); e == nil {
|
||||
exists, e2 := databaseHas(ctx, ref.Product, ref.DT, tc.z, tc.y, tc.x)
|
||||
if e2 == nil && exists {
|
||||
log.Printf("[rainfetch] skip exists in DB z=%d y=%d x=%d dt=%s", tc.z, tc.y, tc.x, ref.DT.Format("2006-01-02 15:04"))
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
if err := httpDownloadTo(ctx, url, dest); err != nil {
|
||||
log.Printf("[rainfetch] 下载失败 z=%d y=%d x=%d: %v", tc.z, tc.y, tc.x, err)
|
||||
continue
|
||||
}
|
||||
log.Printf("[rainfetch] 保存 %s", dest)
|
||||
|
||||
// 写库(使用 URL 解析 UTC → CST 后 upsert)
|
||||
b, rerr := os.ReadFile(dest)
|
||||
if rerr != nil {
|
||||
log.Printf("[rainfetch] 读文件失败: %v", rerr)
|
||||
continue
|
||||
}
|
||||
if err := rain.StoreTileBytes(ctx, url, b); err != nil {
|
||||
log.Printf("[rainfetch] 入库失败: %v", err)
|
||||
continue
|
||||
}
|
||||
success++
|
||||
}
|
||||
cur = cur.Add(1 * time.Hour)
|
||||
}
|
||||
log.Printf("[rainfetch] 完成:尝试 %d,成功 %d", total, success)
|
||||
}
|
||||
|
||||
// 轻量 DB 存在性检查(避免引入内部 database 包到该命令):
|
||||
// 为了避免循环依赖,这里复制一份最小 SQL 调用;实际工程也可抽取共享函数。
|
||||
// 但当前 repo 中 database.GetDB 在 internal/database 包内,雨量 API 直接使用它。
|
||||
|
||||
// 注意:为保持最小侵入,这里通过 rain.StoreTileBytes 完成入库;
|
||||
// 仅在下载前进行“是否已存在”查询,避免重复下载。为此需要访问 internal/database。
|
||||
|
||||
func databaseHas(ctx context.Context, product string, dt time.Time, z, y, x int) (bool, error) {
|
||||
const q = `SELECT 1 FROM rain_tiles WHERE product=$1 AND dt=$2 AND z=$3 AND y=$4 AND x=$5 LIMIT 1`
|
||||
var one int
|
||||
err := dbpkg.GetDB().QueryRowContext(ctx, q, product, dt, z, y, x).Scan(&one)
|
||||
if err == sql.ErrNoRows {
|
||||
return false, nil
|
||||
}
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
return true, nil
|
||||
}
|
||||
|
||||
func httpDownloadTo(ctx context.Context, url, dest string) error {
|
||||
client := &http.Client{Timeout: 20 * time.Second}
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil)
|
||||
if err != nil {
|
||||
return fmt.Errorf("build request: %w", err)
|
||||
}
|
||||
req.Header.Set("Referer", "https://data.cma.cn/")
|
||||
req.Header.Set("Origin", "https://data.cma.cn")
|
||||
req.Header.Set("User-Agent", "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/137.0.0.0 Safari/537.36")
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
return fmt.Errorf("http get: %w", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return fmt.Errorf("unexpected status: %d", resp.StatusCode)
|
||||
}
|
||||
tmp := dest + ".part"
|
||||
f, err := os.Create(tmp)
|
||||
if err != nil {
|
||||
return fmt.Errorf("create temp: %w", err)
|
||||
}
|
||||
_, copyErr := io.Copy(f, resp.Body)
|
||||
closeErr := f.Close()
|
||||
if copyErr != nil {
|
||||
_ = os.Remove(tmp)
|
||||
return fmt.Errorf("write body: %w", copyErr)
|
||||
}
|
||||
if closeErr != nil {
|
||||
_ = os.Remove(tmp)
|
||||
return fmt.Errorf("close temp: %w", closeErr)
|
||||
}
|
||||
if err := os.Rename(tmp, dest); err != nil {
|
||||
// Cross-device fallback
|
||||
data, rerr := os.ReadFile(tmp)
|
||||
if rerr != nil {
|
||||
return fmt.Errorf("read temp: %w", rerr)
|
||||
}
|
||||
if werr := os.WriteFile(dest, data, 0o644); werr != nil {
|
||||
return fmt.Errorf("write final: %w", werr)
|
||||
}
|
||||
_ = os.Remove(tmp)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
21
cmd/service-api/main.go
Normal file
21
cmd/service-api/main.go
Normal file
@ -0,0 +1,21 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"log"
|
||||
"weatherstation/internal/server"
|
||||
)
|
||||
|
||||
func main() {
|
||||
server.SetupLogger()
|
||||
|
||||
// 同时在配置端口与 8888 端口各启动一个 Gin 服务
|
||||
go func() {
|
||||
if err := server.StartGinServer(); err != nil {
|
||||
log.Fatalf("service-api (config port) failed: %v", err)
|
||||
}
|
||||
}()
|
||||
// Bigscreen 专用:在 10008 端口将根路径指向大屏页面
|
||||
if err := server.StartBigscreenServerOn(10008); err != nil {
|
||||
log.Fatalf("service-api (10008 bigscreen) failed: %v", err)
|
||||
}
|
||||
}
|
||||
33
cmd/service-exporter/main.go
Normal file
33
cmd/service-exporter/main.go
Normal file
@ -0,0 +1,33 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"log"
|
||||
"os"
|
||||
"os/signal"
|
||||
"syscall"
|
||||
"weatherstation/internal/server"
|
||||
"weatherstation/internal/tools"
|
||||
)
|
||||
|
||||
func main() {
|
||||
server.SetupLogger()
|
||||
|
||||
// If CAIYUN_TOKEN is provided, enable wind override automatically.
|
||||
opts := tools.ExporterOptions{}
|
||||
if token := os.Getenv("CAIYUN_TOKEN"); token != "" {
|
||||
opts.OverrideWindWithCaiyun = true
|
||||
opts.CaiyunToken = token
|
||||
log.Printf("[service-exporter] wind override enabled via CAIYUN_TOKEN")
|
||||
}
|
||||
|
||||
exp := tools.NewExporterWithOptions(opts)
|
||||
|
||||
ctx, stop := signal.NotifyContext(context.Background(), os.Interrupt, syscall.SIGTERM)
|
||||
defer stop()
|
||||
|
||||
if err := exp.Start(ctx); err != nil && !errors.Is(err, context.Canceled) {
|
||||
log.Fatalf("service-exporter failed: %v", err)
|
||||
}
|
||||
}
|
||||
65
cmd/service-forecast/main.go
Normal file
65
cmd/service-forecast/main.go
Normal file
@ -0,0 +1,65 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"log"
|
||||
"os"
|
||||
"os/signal"
|
||||
"syscall"
|
||||
"time"
|
||||
"weatherstation/internal/config"
|
||||
"weatherstation/internal/forecast"
|
||||
"weatherstation/internal/server"
|
||||
)
|
||||
|
||||
func hourlyLoop(ctx context.Context, fn func() error, name string) {
|
||||
for {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return
|
||||
default:
|
||||
}
|
||||
now := time.Now()
|
||||
next := now.Truncate(time.Hour).Add(time.Hour)
|
||||
t := time.NewTimer(time.Until(next))
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
t.Stop()
|
||||
return
|
||||
case <-t.C:
|
||||
}
|
||||
if err := fn(); err != nil {
|
||||
log.Printf("[%s] scheduled run failed: %v", name, err)
|
||||
} else {
|
||||
log.Printf("[%s] scheduled run completed", name)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func main() {
|
||||
server.SetupLogger()
|
||||
|
||||
ctx, stop := signal.NotifyContext(context.Background(), os.Interrupt, syscall.SIGTERM)
|
||||
defer stop()
|
||||
|
||||
// Open-Meteo hourly fetch
|
||||
go hourlyLoop(ctx, func() error { return forecast.RunOpenMeteoFetch(context.Background()) }, "open-meteo")
|
||||
|
||||
// Caiyun hourly fetch (if token configured)
|
||||
token := os.Getenv("CAIYUN_TOKEN")
|
||||
if token == "" {
|
||||
token = config.GetConfig().Forecast.CaiyunToken
|
||||
}
|
||||
if token == "" {
|
||||
log.Printf("[caiyun] token not set; caiyun scheduler disabled")
|
||||
} else {
|
||||
t := token
|
||||
go hourlyLoop(ctx, func() error { return forecast.RunCaiyunFetch(context.Background(), t) }, "caiyun")
|
||||
}
|
||||
|
||||
// CMA hourly fetch
|
||||
// go hourlyLoop(ctx, func() error { return forecast.RunCMAFetch(context.Background()) }, "cma")
|
||||
|
||||
<-ctx.Done()
|
||||
log.Println("service-forecast shutting down")
|
||||
}
|
||||
50
cmd/service-fusion/main.go
Normal file
50
cmd/service-fusion/main.go
Normal file
@ -0,0 +1,50 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"log"
|
||||
"os"
|
||||
"os/signal"
|
||||
"syscall"
|
||||
"time"
|
||||
"weatherstation/internal/fusion"
|
||||
"weatherstation/internal/server"
|
||||
)
|
||||
|
||||
func main() {
|
||||
server.SetupLogger()
|
||||
|
||||
ctx, stop := signal.NotifyContext(context.Background(), os.Interrupt, syscall.SIGTERM)
|
||||
defer stop()
|
||||
|
||||
go func() {
|
||||
for {
|
||||
if ctx.Err() != nil {
|
||||
return
|
||||
}
|
||||
now := time.Now()
|
||||
next := now.Truncate(time.Hour).Add(time.Hour).Add(5 * time.Minute)
|
||||
sleep := time.Until(next)
|
||||
if sleep < 0 {
|
||||
sleep = 0
|
||||
}
|
||||
t := time.NewTimer(sleep)
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
t.Stop()
|
||||
return
|
||||
case <-t.C:
|
||||
}
|
||||
|
||||
issued := next.Truncate(time.Hour)
|
||||
if err := fusion.RunForIssued(context.Background(), issued); err != nil {
|
||||
log.Printf("[service-fusion] run failed: %v", err)
|
||||
} else {
|
||||
log.Printf("[service-fusion] completed issued=%s", issued.Format("2006-01-02 15:04:05"))
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
<-ctx.Done()
|
||||
log.Println("service-fusion shutting down")
|
||||
}
|
||||
33
cmd/service-radar/main.go
Normal file
33
cmd/service-radar/main.go
Normal file
@ -0,0 +1,33 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"log"
|
||||
"os"
|
||||
"os/signal"
|
||||
"syscall"
|
||||
"weatherstation/internal/radar"
|
||||
"weatherstation/internal/rain"
|
||||
"weatherstation/internal/server"
|
||||
)
|
||||
|
||||
func main() {
|
||||
server.SetupLogger()
|
||||
|
||||
ctx, stop := signal.NotifyContext(context.Background(), os.Interrupt, syscall.SIGTERM)
|
||||
defer stop()
|
||||
|
||||
// Start radar scheduler with defaults (StoreToDB=true; tile defaults read inside package)
|
||||
if err := radar.Start(ctx, radar.Options{StoreToDB: true}); err != nil {
|
||||
log.Fatalf("service-radar start error: %v", err)
|
||||
}
|
||||
|
||||
// Also start CMPA hourly rain scheduler (StoreToDB=true; tiles/dir/url from env inside package)
|
||||
if err := rain.Start(ctx, rain.Options{StoreToDB: true}); err != nil {
|
||||
log.Fatalf("service-rain (embedded) start error: %v", err)
|
||||
}
|
||||
|
||||
// Keep process alive until signal
|
||||
<-ctx.Done()
|
||||
log.Println("service-radar shutting down")
|
||||
}
|
||||
28
cmd/service-rain/main.go
Normal file
28
cmd/service-rain/main.go
Normal file
@ -0,0 +1,28 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"log"
|
||||
"os"
|
||||
"os/signal"
|
||||
"syscall"
|
||||
"weatherstation/internal/rain"
|
||||
"weatherstation/internal/server"
|
||||
)
|
||||
|
||||
// service-rain: standalone CMPA hourly rain tile downloader
|
||||
// - Uses internal/rain scheduler with defaults
|
||||
// - Controlled by env vars in internal/rain (e.g., RAIN_ENABLED, RAIN_DIR, RAIN_BASE_URL)
|
||||
func main() {
|
||||
server.SetupLogger()
|
||||
|
||||
ctx, stop := signal.NotifyContext(context.Background(), os.Interrupt, syscall.SIGTERM)
|
||||
defer stop()
|
||||
|
||||
if err := rain.Start(ctx, rain.Options{StoreToDB: true}); err != nil {
|
||||
log.Fatalf("service-rain start error: %v", err)
|
||||
}
|
||||
|
||||
<-ctx.Done()
|
||||
log.Println("service-rain shutting down")
|
||||
}
|
||||
418
cmd/service-splitarea/main.go
Normal file
418
cmd/service-splitarea/main.go
Normal file
@ -0,0 +1,418 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/csv"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"flag"
|
||||
"fmt"
|
||||
"log"
|
||||
"math"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"weatherstation/internal/database"
|
||||
)
|
||||
|
||||
// Service that, at hh:45 each hour, processes current hour's :30 radar tile for z/y/x,
|
||||
// splits a user region into 0.1° grid, averages dBZ (linear domain), fetches Open‑Meteo
|
||||
// hourly variables at grid centers, and writes a CSV.
|
||||
|
||||
type radarTileRecord struct {
|
||||
DT time.Time
|
||||
Z, Y, X int
|
||||
Width, Height int
|
||||
West, South, East, North float64
|
||||
ResDeg float64
|
||||
Data []byte
|
||||
}
|
||||
|
||||
func getRadarTileAt(ctx context.Context, z, y, x int, dt time.Time) (*radarTileRecord, error) {
|
||||
const q = `SELECT dt, z, y, x, width, height, west, south, east, north, res_deg, data FROM radar_tiles WHERE z=$1 AND y=$2 AND x=$3 AND dt=$4 LIMIT 1`
|
||||
row := database.GetDB().QueryRowContext(ctx, q, z, y, x, dt)
|
||||
var r radarTileRecord
|
||||
if err := row.Scan(&r.DT, &r.Z, &r.Y, &r.X, &r.Width, &r.Height, &r.West, &r.South, &r.East, &r.North, &r.ResDeg, &r.Data); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &r, nil
|
||||
}
|
||||
|
||||
// parseBounds parses "W,S,E,N"
|
||||
func parseBounds(s string) (float64, float64, float64, float64, error) {
|
||||
parts := strings.Split(s, ",")
|
||||
if len(parts) != 4 {
|
||||
return 0, 0, 0, 0, fmt.Errorf("bounds must be 'W,S,E,N'")
|
||||
}
|
||||
var vals [4]float64
|
||||
for i := 0; i < 4; i++ {
|
||||
v, err := parseFloat(strings.TrimSpace(parts[i]))
|
||||
if err != nil {
|
||||
return 0, 0, 0, 0, fmt.Errorf("invalid bound %q: %v", parts[i], err)
|
||||
}
|
||||
vals[i] = v
|
||||
}
|
||||
w, s1, e, n := vals[0], vals[1], vals[2], vals[3]
|
||||
if !(w < e && s1 < n) {
|
||||
return 0, 0, 0, 0, errors.New("invalid bounds: require W<E and S<N")
|
||||
}
|
||||
return w, s1, e, n, nil
|
||||
}
|
||||
|
||||
func parseFloat(s string) (float64, error) {
|
||||
// Simple parser to avoid locale issues
|
||||
return strconvParseFloat(s)
|
||||
}
|
||||
|
||||
// Wrap strconv.ParseFloat to keep imports minimal in patch header
|
||||
func strconvParseFloat(s string) (float64, error) { return strconv.ParseFloat(s, 64) }
|
||||
|
||||
// align0p1 snaps to 0.1° grid
|
||||
func align0p1(w, s, e, n float64) (float64, float64, float64, float64) {
|
||||
w2 := math.Floor(w*10.0) / 10.0
|
||||
s2 := math.Floor(s*10.0) / 10.0
|
||||
e2 := math.Ceil(e*10.0) / 10.0
|
||||
n2 := math.Ceil(n*10.0) / 10.0
|
||||
return w2, s2, e2, n2
|
||||
}
|
||||
|
||||
func lonToCol(west, res float64, lon float64) int { return int(math.Floor((lon - west) / res)) }
|
||||
func latToRow(south, res float64, lat float64) int { return int(math.Floor((lat - south) / res)) }
|
||||
|
||||
// dbzFromRaw converts raw big‑endian int16 to dBZ, applying validity checks as in API
|
||||
func dbzFromRaw(v int16) (float64, bool) {
|
||||
if v >= 32766 { // invalid mask
|
||||
return 0, false
|
||||
}
|
||||
dbz := float64(v) / 10.0
|
||||
if dbz < 0 { // clip negative
|
||||
return 0, false
|
||||
}
|
||||
return dbz, true
|
||||
}
|
||||
|
||||
// linearZ average over dBZs
|
||||
func avgDbzLinear(vals []float64) float64 {
|
||||
if len(vals) == 0 {
|
||||
return math.NaN()
|
||||
}
|
||||
zsum := 0.0
|
||||
for _, d := range vals {
|
||||
zsum += math.Pow(10, d/10.0)
|
||||
}
|
||||
meanZ := zsum / float64(len(vals))
|
||||
return 10.0 * math.Log10(meanZ)
|
||||
}
|
||||
|
||||
// open‑meteo client
|
||||
type meteoResp struct {
|
||||
Hourly struct {
|
||||
Time []string `json:"time"`
|
||||
Temp []float64 `json:"temperature_2m"`
|
||||
RH []float64 `json:"relative_humidity_2m"`
|
||||
Dew []float64 `json:"dew_point_2m"`
|
||||
WS []float64 `json:"wind_speed_10m"`
|
||||
WD []float64 `json:"wind_direction_10m"`
|
||||
} `json:"hourly"`
|
||||
}
|
||||
|
||||
type meteoVals struct {
|
||||
Temp, RH, Dew, WS, WD *float64
|
||||
}
|
||||
|
||||
func fetchMeteo(ctx context.Context, client *http.Client, lon, lat float64, utcHour time.Time) (*meteoVals, error) {
|
||||
base := "https://api.open-meteo.com/v1/forecast"
|
||||
datePart := utcHour.UTC().Format("2006-01-02")
|
||||
q := url.Values{}
|
||||
q.Set("latitude", fmt.Sprintf("%.4f", lat))
|
||||
q.Set("longitude", fmt.Sprintf("%.4f", lon))
|
||||
q.Set("hourly", "dew_point_2m,wind_speed_10m,wind_direction_10m,relative_humidity_2m,temperature_2m")
|
||||
q.Set("timezone", "UTC")
|
||||
q.Set("start_date", datePart)
|
||||
q.Set("end_date", datePart)
|
||||
q.Set("wind_speed_unit", "ms")
|
||||
req, _ := http.NewRequestWithContext(ctx, http.MethodGet, base+"?"+q.Encode(), nil)
|
||||
req.Header.Set("User-Agent", "WeatherStation-splitarea/1.0")
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return nil, fmt.Errorf("open-meteo status %d", resp.StatusCode)
|
||||
}
|
||||
var obj meteoResp
|
||||
if err := json.NewDecoder(resp.Body).Decode(&obj); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
target := utcHour.UTC().Format("2006-01-02T15:00")
|
||||
idx := -1
|
||||
for i, t := range obj.Hourly.Time {
|
||||
if t == target {
|
||||
idx = i
|
||||
break
|
||||
}
|
||||
}
|
||||
if idx < 0 {
|
||||
return &meteoVals{}, nil
|
||||
}
|
||||
mv := meteoVals{}
|
||||
pick := func(arr []float64) *float64 {
|
||||
if arr == nil || idx >= len(arr) {
|
||||
return nil
|
||||
}
|
||||
v := arr[idx]
|
||||
return &v
|
||||
}
|
||||
mv.Temp = pick(obj.Hourly.Temp)
|
||||
mv.RH = pick(obj.Hourly.RH)
|
||||
mv.Dew = pick(obj.Hourly.Dew)
|
||||
mv.WS = pick(obj.Hourly.WS)
|
||||
mv.WD = pick(obj.Hourly.WD)
|
||||
return &mv, nil
|
||||
}
|
||||
|
||||
// job executes the split+augment for a specific local time (CST) and fixed minute=30 of the same hour.
|
||||
func job(ctx context.Context, z, y, x int, bounds string, tzOffset int, outDir string, runAt time.Time) error {
|
||||
loc, _ := time.LoadLocation("Asia/Shanghai")
|
||||
if loc == nil {
|
||||
loc = time.FixedZone("CST", 8*3600)
|
||||
}
|
||||
runAt = runAt.In(loc)
|
||||
|
||||
// Current hour's :30
|
||||
targetLocal := runAt.Truncate(time.Hour).Add(24 * time.Minute)
|
||||
|
||||
// Fetch tile
|
||||
rec, err := getRadarTileAt(ctx, z, y, x, targetLocal)
|
||||
if err != nil {
|
||||
return fmt.Errorf("load radar tile z=%d y=%d x=%d at %s: %w", z, y, x, targetLocal.Format("2006-01-02 15:04:05"), err)
|
||||
}
|
||||
|
||||
// Bounds
|
||||
Bw, Bs, Be, Bn, err := parseBounds(bounds)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// Clamp to tile
|
||||
if !(rec.West <= Bw && Be <= rec.East && rec.South <= Bs && Bn <= rec.North) {
|
||||
return fmt.Errorf("bounds not inside tile: tile=(%.4f,%.4f,%.4f,%.4f) B=(%.4f,%.4f,%.4f,%.4f)", rec.West, rec.South, rec.East, rec.North, Bw, Bs, Be, Bn)
|
||||
}
|
||||
Gw, Gs, Ge, Gn := align0p1(Bw, Bs, Be, Bn)
|
||||
// clamp
|
||||
Gw = math.Max(Gw, rec.West)
|
||||
Gs = math.Max(Gs, rec.South)
|
||||
Ge = math.Min(Ge, rec.East)
|
||||
Gn = math.Min(Gn, rec.North)
|
||||
if Ge <= Gw || Gn <= Gs {
|
||||
return fmt.Errorf("aligned bounds empty")
|
||||
}
|
||||
|
||||
// Grid iterate with 0.1°
|
||||
d := 0.1
|
||||
ncols := int(math.Round((Ge - Gw) / d))
|
||||
nrows := int(math.Round((Gn - Gs) / d))
|
||||
if ncols <= 0 || nrows <= 0 {
|
||||
return fmt.Errorf("grid size zero")
|
||||
}
|
||||
|
||||
// Decode int16 big‑endian as we go; avoid full decode into []int16 to save mem
|
||||
w, h := rec.Width, rec.Height
|
||||
if w <= 0 || h <= 0 || len(rec.Data) < w*h*2 {
|
||||
return fmt.Errorf("invalid tile data")
|
||||
}
|
||||
|
||||
// Prepare output dir: export_data/split_area/YYYYMMDD/HH/30 (keep legacy minute path)
|
||||
ymd := targetLocal.Format("20060102")
|
||||
hh := targetLocal.Format("15")
|
||||
dir := filepath.Join(outDir, ymd, hh, "30")
|
||||
if err := os.MkdirAll(dir, 0o755); err != nil {
|
||||
return err
|
||||
}
|
||||
base := fmt.Sprintf("%d-%d-%d", z, y, x)
|
||||
outPath := filepath.Join(dir, base+"_radar.csv")
|
||||
|
||||
// Prepare Open‑Meteo time window
|
||||
// Target UTC hour = floor(local to hour) - tzOffset
|
||||
floored := targetLocal.Truncate(time.Hour)
|
||||
utcHour := floored.Add(-time.Duration(tzOffset) * time.Hour)
|
||||
client := &http.Client{Timeout: 15 * time.Second}
|
||||
cache := map[string]*meteoVals{}
|
||||
keyOf := func(lon, lat float64) string { return fmt.Sprintf("%.4f,%.4f", lon, lat) }
|
||||
|
||||
// CSV output
|
||||
f, err := os.Create(outPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer f.Close()
|
||||
wcsv := csv.NewWriter(f)
|
||||
defer wcsv.Flush()
|
||||
// Header
|
||||
_ = wcsv.Write([]string{"longitude", "latitude", "reflectivity_dbz", "temperature_2m", "relative_humidity_2m", "dew_point_2m", "wind_speed_10m", "wind_direction_10m"})
|
||||
|
||||
// Helper to read raw at (row,col)
|
||||
readRaw := func(rr, cc int) int16 {
|
||||
off := (rr*w + cc) * 2
|
||||
return int16(uint16(rec.Data[off])<<8 | uint16(rec.Data[off+1]))
|
||||
}
|
||||
|
||||
// Iterate grid cells
|
||||
for ri := 0; ri < nrows; ri++ {
|
||||
cellS := Gs + float64(ri)*d
|
||||
cellN := cellS + d
|
||||
row0 := maxInt(0, latToRow(rec.South, rec.ResDeg, cellS))
|
||||
row1 := minInt(h, int(math.Ceil((cellN-rec.South)/rec.ResDeg)))
|
||||
for ci := 0; ci < ncols; ci++ {
|
||||
cellW := Gw + float64(ci)*d
|
||||
cellE := cellW + d
|
||||
col0 := maxInt(0, lonToCol(rec.West, rec.ResDeg, cellW))
|
||||
col1 := minInt(w, int(math.Ceil((cellE-rec.West)/rec.ResDeg)))
|
||||
// accumulate
|
||||
dbzs := make([]float64, 0, (row1-row0)*(col1-col0))
|
||||
for rr := row0; rr < row1; rr++ {
|
||||
for cc := col0; cc < col1; cc++ {
|
||||
draw := readRaw(rr, cc)
|
||||
if d, ok := dbzFromRaw(draw); ok {
|
||||
dbzs = append(dbzs, d)
|
||||
}
|
||||
}
|
||||
}
|
||||
var cellDBZStr string
|
||||
if len(dbzs) > 0 {
|
||||
cellDBZ := avgDbzLinear(dbzs)
|
||||
cellDBZStr = fmt.Sprintf("%.1f", cellDBZ)
|
||||
} else {
|
||||
cellDBZStr = ""
|
||||
}
|
||||
lon := (cellW + cellE) / 2.0
|
||||
lat := (cellS + cellN) / 2.0
|
||||
|
||||
// Fetch meteo (cache by rounded lon,lat)
|
||||
k := keyOf(lon, lat)
|
||||
mv := cache[k]
|
||||
if mv == nil {
|
||||
mv, _ = fetchMeteo(ctx, client, lon, lat, utcHour)
|
||||
cache[k] = mv
|
||||
}
|
||||
// write row
|
||||
wcsv.Write([]string{
|
||||
fmt.Sprintf("%.4f", lon), fmt.Sprintf("%.4f", lat), cellDBZStr,
|
||||
fmtOptF(mv, func(m *meteoVals) *float64 {
|
||||
if m == nil {
|
||||
return nil
|
||||
}
|
||||
return m.Temp
|
||||
}),
|
||||
fmtOptF(mv, func(m *meteoVals) *float64 {
|
||||
if m == nil {
|
||||
return nil
|
||||
}
|
||||
return m.RH
|
||||
}),
|
||||
fmtOptF(mv, func(m *meteoVals) *float64 {
|
||||
if m == nil {
|
||||
return nil
|
||||
}
|
||||
return m.Dew
|
||||
}),
|
||||
fmtOptF(mv, func(m *meteoVals) *float64 {
|
||||
if m == nil {
|
||||
return nil
|
||||
}
|
||||
return m.WS
|
||||
}),
|
||||
fmtOptF(mv, func(m *meteoVals) *float64 {
|
||||
if m == nil {
|
||||
return nil
|
||||
}
|
||||
return m.WD
|
||||
}),
|
||||
})
|
||||
}
|
||||
}
|
||||
wcsv.Flush()
|
||||
if err := wcsv.Error(); err != nil {
|
||||
return err
|
||||
}
|
||||
log.Printf("[splitarea] saved %s", outPath)
|
||||
return nil
|
||||
}
|
||||
|
||||
func fmtOptF(mv *meteoVals, pick func(*meteoVals) *float64) string {
|
||||
if mv == nil {
|
||||
return ""
|
||||
}
|
||||
p := pick(mv)
|
||||
if p == nil {
|
||||
return ""
|
||||
}
|
||||
return fmt.Sprintf("%.2f", *p)
|
||||
}
|
||||
|
||||
func maxInt(a, b int) int {
|
||||
if a > b {
|
||||
return a
|
||||
}
|
||||
return b
|
||||
}
|
||||
func minInt(a, b int) int {
|
||||
if a < b {
|
||||
return a
|
||||
}
|
||||
return b
|
||||
}
|
||||
|
||||
func getenvDefault(key, def string) string {
|
||||
if v := os.Getenv(key); v != "" {
|
||||
return v
|
||||
}
|
||||
return def
|
||||
}
|
||||
|
||||
func main() {
|
||||
var (
|
||||
z = flag.Int("z", 7, "tile z")
|
||||
y = flag.Int("y", 40, "tile y")
|
||||
x = flag.Int("x", 102, "tile x")
|
||||
b = flag.String("b", getenvDefault("SPLITAREA_B", "108.15,22.83,109.27,23.61"), "region bounds W,S,E,N")
|
||||
outDir = flag.String("out", "export_data/split_area", "output base directory")
|
||||
tzOffset = flag.Int("tz-offset", 8, "timezone offset hours to UTC for local time")
|
||||
once = flag.Bool("once", false, "run once for previous hour and exit")
|
||||
)
|
||||
flag.Parse()
|
||||
// Bounds now have a sensible default; still validate format later in job()
|
||||
// Ensure DB initialized
|
||||
_ = database.GetDB()
|
||||
|
||||
ctx := context.Background()
|
||||
if *once {
|
||||
if err := job(ctx, *z, *y, *x, *b, *tzOffset, *outDir, time.Now()); err != nil {
|
||||
log.Fatalf("run once: %v", err)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Scheduler: run hourly at hh:45 for current hour's :30 radar tile
|
||||
loc, _ := time.LoadLocation("Asia/Shanghai")
|
||||
if loc == nil {
|
||||
loc = time.FixedZone("CST", 8*3600)
|
||||
}
|
||||
for {
|
||||
now := time.Now().In(loc)
|
||||
runAt := now.Truncate(time.Hour).Add(45 * time.Minute)
|
||||
if now.After(runAt) {
|
||||
runAt = runAt.Add(time.Hour)
|
||||
}
|
||||
time.Sleep(time.Until(runAt))
|
||||
// execute
|
||||
if err := job(ctx, *z, *y, *x, *b, *tzOffset, *outDir, runAt); err != nil {
|
||||
log.Printf("[splitarea] job error: %v", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
14
cmd/service-udp/main.go
Normal file
14
cmd/service-udp/main.go
Normal file
@ -0,0 +1,14 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"log"
|
||||
"weatherstation/internal/server"
|
||||
)
|
||||
|
||||
func main() {
|
||||
server.SetupLogger()
|
||||
|
||||
if err := server.StartUDPServer(); err != nil {
|
||||
log.Fatalf("service-udp failed: %v", err)
|
||||
}
|
||||
}
|
||||
323
cmd/weatherstation/main.go
Normal file
323
cmd/weatherstation/main.go
Normal file
@ -0,0 +1,323 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"flag"
|
||||
"log"
|
||||
"os"
|
||||
"sync"
|
||||
"time"
|
||||
"weatherstation/internal/config"
|
||||
"weatherstation/internal/database"
|
||||
"weatherstation/internal/forecast"
|
||||
"weatherstation/internal/radar"
|
||||
"weatherstation/internal/rain"
|
||||
"weatherstation/internal/selftest"
|
||||
"weatherstation/internal/server"
|
||||
"weatherstation/internal/tools"
|
||||
)
|
||||
|
||||
func main() {
|
||||
// 命令行参数
|
||||
var webOnly = flag.Bool("web", false, "只启动Web服务器(Gin)")
|
||||
var udpOnly = flag.Bool("udp", false, "只启动UDP服务器")
|
||||
// 调试:回填10分钟表
|
||||
var doBackfill = flag.Bool("backfill", false, "将16s原始数据聚合写入10分钟表(调试用途)")
|
||||
var bfStation = flag.String("station", "", "指定站点ID(为空则全站回填)")
|
||||
var bfFrom = flag.String("from", "", "回填起始时间,格式:YYYY-MM-DD HH:MM:SS")
|
||||
var bfTo = flag.String("to", "", "回填结束时间,格式:YYYY-MM-DD HH:MM:SS")
|
||||
var bfWrap = flag.Float64("wrap", 0, "回绕一圈对应毫米值(mm),<=0 则降级为仅计当前值")
|
||||
// 自检控制
|
||||
var noSelftest = flag.Bool("no-selftest", false, "跳过启动自检")
|
||||
var selftestOnly = flag.Bool("selftest_only", false, "仅执行自检后退出")
|
||||
// 预报抓取
|
||||
var forecastOnly = flag.Bool("forecast_only", false, "仅执行一次open-meteo拉取并退出")
|
||||
var caiyunOnly = flag.Bool("caiyun_only", false, "仅执行一次彩云拉取并退出")
|
||||
var cmaCLI = flag.Bool("cma_cli", false, "仅执行一次CMA接口抓取并打印未来三小时")
|
||||
var cmaOnly = flag.Bool("cma_only", false, "仅执行一次CMA拉取并退出")
|
||||
var forecastDay = flag.String("forecast_day", "", "按日期抓取当天0点到当前时间+3h(格式YYYY-MM-DD)")
|
||||
// 历史数据补完
|
||||
var historicalOnly = flag.Bool("historical_only", false, "仅执行历史数据补完并退出")
|
||||
var historicalStart = flag.String("historical_start", "", "历史数据开始日期(格式YYYY-MM-DD)")
|
||||
var historicalEnd = flag.String("historical_end", "", "历史数据结束日期(格式YYYY-MM-DD)")
|
||||
// 覆盖风:使用彩云实况替换导出中的风速/风向
|
||||
var useWindOverride = flag.Bool("wind", false, "使用彩云实况覆盖导出CSV中的风速/风向")
|
||||
// 历史CSV导出
|
||||
var exportRangeOnly = flag.Bool("export_range", false, "按日期范围导出10分钟CSV(含ZTD融合),并退出。日期格式支持 YYYY-MM-DD 或 YYYYMMDD")
|
||||
var exportStart = flag.String("export_start", "", "导出起始日期(含),格式 YYYY-MM-DD 或 YYYYMMDD")
|
||||
var exportEnd = flag.String("export_end", "", "导出结束日期(含),格式 YYYY-MM-DD 或 YYYYMMDD")
|
||||
// 雷达:导入单个CMA瓦片到数据库
|
||||
var importTile = flag.Bool("import_tile", false, "导入一个CMA雷达瓦片到数据库并退出")
|
||||
var tileURL = flag.String("tile_url", "", "瓦片URL或/tiles/...路径,用于解析product/时间/z/y/x")
|
||||
var tilePath = flag.String("tile_path", "", "瓦片本地文件路径(.bin)")
|
||||
flag.Parse()
|
||||
|
||||
// 设置日志
|
||||
server.SetupLogger()
|
||||
|
||||
// 初始化数据库连接
|
||||
_ = database.GetDB() // 确保PostgreSQL连接已初始化
|
||||
defer database.Close()
|
||||
// 初始化MySQL连接(如果配置存在)
|
||||
_ = database.GetMySQL()
|
||||
defer database.CloseMySQL()
|
||||
|
||||
// 启动前自检
|
||||
if !*noSelftest {
|
||||
if err := selftest.Run(context.Background()); err != nil {
|
||||
log.Fatalf("启动自检失败: %v", err)
|
||||
}
|
||||
if *selftestOnly {
|
||||
log.Println("自检完成,按 --selftest_only 要求退出")
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// 单次 open-meteo 拉取
|
||||
if *forecastOnly {
|
||||
if err := forecast.RunOpenMeteoFetch(context.Background()); err != nil {
|
||||
log.Fatalf("open-meteo 拉取失败: %v", err)
|
||||
}
|
||||
log.Println("open-meteo 拉取完成")
|
||||
return
|
||||
}
|
||||
|
||||
// 单次 彩云 拉取(token 从环境变量 CAIYUN_TOKEN 或命令行 -caiyun_token 读取)
|
||||
if *caiyunOnly {
|
||||
token := os.Getenv("CAIYUN_TOKEN")
|
||||
if token == "" {
|
||||
// 退回配置
|
||||
token = config.GetConfig().Forecast.CaiyunToken
|
||||
if token == "" {
|
||||
log.Fatalf("未提供彩云 token,请设置环境变量 CAIYUN_TOKEN 或配置文件 forecast.caiyun_token")
|
||||
}
|
||||
}
|
||||
if err := forecast.RunCaiyunFetch(context.Background(), token); err != nil {
|
||||
log.Fatalf("caiyun 拉取失败: %v", err)
|
||||
}
|
||||
log.Println("caiyun 拉取完成")
|
||||
return
|
||||
}
|
||||
|
||||
// 单次 CMA 拉取(固定参数)写库并退出
|
||||
if *cmaOnly {
|
||||
if err := forecast.RunCMAFetch(context.Background()); err != nil {
|
||||
log.Fatalf("CMA 拉取失败: %v", err)
|
||||
}
|
||||
log.Println("CMA 拉取完成")
|
||||
return
|
||||
}
|
||||
|
||||
// 单次 CMA 拉取(固定参数)并打印三小时
|
||||
if *cmaCLI {
|
||||
if err := forecast.RunCMACLI(context.Background()); err != nil {
|
||||
log.Fatalf("CMA 拉取失败: %v", err)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// 导入一个CMA雷达瓦片到数据库
|
||||
if *importTile {
|
||||
if *tileURL == "" || *tilePath == "" {
|
||||
log.Fatalln("import_tile 需要提供 --tile_url 与 --tile_path")
|
||||
}
|
||||
if err := radar.ImportTileFile(context.Background(), *tileURL, *tilePath); err != nil {
|
||||
log.Fatalf("导入雷达瓦片失败: %v", err)
|
||||
}
|
||||
log.Println("导入雷达瓦片完成")
|
||||
return
|
||||
}
|
||||
|
||||
// 历史CSV范围导出
|
||||
if *exportRangeOnly {
|
||||
if *exportStart == "" || *exportEnd == "" {
|
||||
log.Fatalln("export_range 需要提供 --export_start 与 --export_end 日期(YYYY-MM-DD 或 YYYYMMDD)")
|
||||
}
|
||||
var opts tools.ExporterOptions
|
||||
if *useWindOverride {
|
||||
token := os.Getenv("CAIYUN_TOKEN")
|
||||
if token == "" {
|
||||
token = config.GetConfig().Forecast.CaiyunToken
|
||||
}
|
||||
if token == "" {
|
||||
log.Println("警告: 指定了 --wind 但未提供彩云 token,忽略风覆盖")
|
||||
} else {
|
||||
opts.OverrideWindWithCaiyun = true
|
||||
opts.CaiyunToken = token
|
||||
}
|
||||
}
|
||||
exporter := tools.NewExporterWithOptions(opts)
|
||||
if err := exporter.ExportRange(context.Background(), *exportStart, *exportEnd); err != nil {
|
||||
log.Fatalf("export_range 失败: %v", err)
|
||||
}
|
||||
log.Println("export_range 完成")
|
||||
return
|
||||
}
|
||||
|
||||
// 工具:按日期抓取当天0点到当前时间+3小时(两家)
|
||||
if *forecastDay != "" {
|
||||
if err := tools.RunForecastFetchForDay(context.Background(), *forecastDay); err != nil {
|
||||
log.Fatalf("forecast_day 运行失败: %v", err)
|
||||
}
|
||||
log.Println("forecast_day 完成")
|
||||
return
|
||||
}
|
||||
|
||||
// 历史数据补完
|
||||
if *historicalOnly {
|
||||
if *historicalStart == "" || *historicalEnd == "" {
|
||||
log.Fatalln("历史数据补完需要提供 --historical_start 与 --historical_end 日期(格式YYYY-MM-DD)")
|
||||
}
|
||||
if err := forecast.RunOpenMeteoHistoricalFetch(context.Background(), *historicalStart, *historicalEnd); err != nil {
|
||||
log.Fatalf("历史数据补完失败: %v", err)
|
||||
}
|
||||
log.Println("历史数据补完成")
|
||||
return
|
||||
}
|
||||
|
||||
// Backfill 调试路径
|
||||
if *doBackfill {
|
||||
if *bfFrom == "" || *bfTo == "" {
|
||||
log.Fatalln("backfill 需要提供 --from 与 --to 时间")
|
||||
}
|
||||
loc, _ := time.LoadLocation("Asia/Shanghai")
|
||||
if loc == nil {
|
||||
loc = time.FixedZone("CST", 8*3600)
|
||||
}
|
||||
fromT, err := time.ParseInLocation("2006-01-02 15:04:05", *bfFrom, loc)
|
||||
if err != nil {
|
||||
log.Fatalf("解析from失败: %v", err)
|
||||
}
|
||||
toT, err := time.ParseInLocation("2006-01-02 15:04:05", *bfTo, loc)
|
||||
if err != nil {
|
||||
log.Fatalf("解析to失败: %v", err)
|
||||
}
|
||||
ctx := context.Background()
|
||||
if err := tools.RunBackfill10Min(ctx, tools.BackfillOptions{
|
||||
StationID: *bfStation,
|
||||
FromTime: fromT,
|
||||
ToTime: toT,
|
||||
WrapCycleMM: *bfWrap,
|
||||
BucketMinutes: 10,
|
||||
}); err != nil {
|
||||
log.Fatalf("回填失败: %v", err)
|
||||
}
|
||||
log.Println("回填完成")
|
||||
return
|
||||
}
|
||||
|
||||
// 根据命令行参数启动服务
|
||||
startExporterBackground := func(wg *sync.WaitGroup) {
|
||||
if wg != nil {
|
||||
wg.Add(1)
|
||||
}
|
||||
go func() {
|
||||
defer func() {
|
||||
if wg != nil {
|
||||
wg.Done()
|
||||
}
|
||||
}()
|
||||
log.Println("启动数据导出器(10分钟)...")
|
||||
ctx := context.Background()
|
||||
// 处理 --wind 覆盖
|
||||
var opts tools.ExporterOptions
|
||||
if *useWindOverride {
|
||||
token := os.Getenv("CAIYUN_TOKEN")
|
||||
if token == "" {
|
||||
token = config.GetConfig().Forecast.CaiyunToken
|
||||
}
|
||||
if token == "" {
|
||||
log.Println("警告: 指定了 --wind 但未提供彩云 token,忽略风覆盖")
|
||||
} else {
|
||||
opts.OverrideWindWithCaiyun = true
|
||||
opts.CaiyunToken = token
|
||||
}
|
||||
}
|
||||
exporter := tools.NewExporterWithOptions(opts)
|
||||
if err := exporter.Start(ctx); err != nil {
|
||||
log.Printf("导出器退出: %v", err)
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
startRadarSchedulerBackground := func(wg *sync.WaitGroup) {
|
||||
if wg != nil {
|
||||
wg.Add(1)
|
||||
}
|
||||
go func() {
|
||||
defer func() {
|
||||
if wg != nil {
|
||||
wg.Done()
|
||||
}
|
||||
}()
|
||||
log.Println("启动雷达下载任务(每10分钟,无延迟,固定瓦片 7/40/102)...")
|
||||
ctx := context.Background()
|
||||
_ = radar.Start(ctx, radar.Options{StoreToDB: true, Z: 7, Y: 40, X: 102})
|
||||
}()
|
||||
}
|
||||
|
||||
startRainSchedulerBackground := func(wg *sync.WaitGroup) {
|
||||
if wg != nil {
|
||||
wg.Add(1)
|
||||
}
|
||||
go func() {
|
||||
defer func() {
|
||||
if wg != nil {
|
||||
wg.Done()
|
||||
}
|
||||
}()
|
||||
log.Println("启动一小时降雨下载任务(每10分钟,固定瓦片 7/40/102 与 7/40/104)...")
|
||||
ctx := context.Background()
|
||||
_ = rain.Start(ctx, rain.Options{StoreToDB: true})
|
||||
}()
|
||||
}
|
||||
|
||||
if *webOnly {
|
||||
// 只启动Web服务器 + 导出器
|
||||
startExporterBackground(nil)
|
||||
startRadarSchedulerBackground(nil)
|
||||
startRainSchedulerBackground(nil)
|
||||
log.Println("启动Web服务器模式...")
|
||||
if err := server.StartGinServer(); err != nil {
|
||||
log.Fatalf("启动Web服务器失败: %v", err)
|
||||
}
|
||||
} else if *udpOnly {
|
||||
// 只启动UDP服务器 + 导出器
|
||||
startExporterBackground(nil)
|
||||
startRadarSchedulerBackground(nil)
|
||||
startRainSchedulerBackground(nil)
|
||||
log.Println("启动UDP服务器模式...")
|
||||
if err := server.StartUDPServer(); err != nil {
|
||||
log.Fatalf("启动UDP服务器失败: %v", err)
|
||||
}
|
||||
} else {
|
||||
// 同时启动UDP和Web服务器 + 导出器
|
||||
log.Println("启动完整模式:UDP + Web服务器 + 导出器...")
|
||||
|
||||
var wg sync.WaitGroup
|
||||
wg.Add(2)
|
||||
|
||||
// 启动UDP服务器
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
log.Println("正在启动UDP服务器...")
|
||||
if err := server.StartUDPServer(); err != nil {
|
||||
log.Printf("UDP服务器异常退出: %v", err)
|
||||
}
|
||||
}()
|
||||
|
||||
// 启动Web服务器
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
log.Println("正在启动Web服务器...")
|
||||
if err := server.StartGinServer(); err != nil {
|
||||
log.Printf("Web服务器异常退出: %v", err)
|
||||
}
|
||||
}()
|
||||
|
||||
startExporterBackground(&wg)
|
||||
startRadarSchedulerBackground(&wg)
|
||||
startRainSchedulerBackground(&wg)
|
||||
wg.Wait()
|
||||
}
|
||||
}
|
||||
151
cmd/weatherstationctl/main.go
Normal file
151
cmd/weatherstationctl/main.go
Normal file
@ -0,0 +1,151 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"io/fs"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// installPrefix is the base install directory.
|
||||
// Binaries go to installPrefix/bin, assets and config go to installPrefix/.
|
||||
const installPrefix = "/opt/weatherstation"
|
||||
|
||||
func main() {
|
||||
// Ensure target directories exist
|
||||
binDir := filepath.Join(installPrefix, "bin")
|
||||
if err := os.MkdirAll(binDir, 0o755); err != nil {
|
||||
fatalf("创建目录失败: %s: %v", binDir, err)
|
||||
}
|
||||
|
||||
// Build all service-* under cmd/
|
||||
serviceDirs, err := findServiceDirs()
|
||||
if err != nil {
|
||||
fatalf("扫描服务目录失败: %v", err)
|
||||
}
|
||||
if len(serviceDirs) == 0 {
|
||||
fatalf("未发现任何 service-* 微服务目录")
|
||||
}
|
||||
|
||||
for _, svc := range serviceDirs {
|
||||
out := filepath.Join(binDir, svc)
|
||||
// 必须使用相对前缀 ./,否则 go 会将其当成标准库/模块路径
|
||||
pkg := "./" + filepath.ToSlash(filepath.Join("cmd", svc))
|
||||
fmt.Printf("编译 %s -> %s\n", pkg, out)
|
||||
if err := run("go", "build", "-o", out, pkg); err != nil {
|
||||
fatalf("编译失败 %s: %v", pkg, err)
|
||||
}
|
||||
}
|
||||
|
||||
// Copy templates, static, config.yaml to installPrefix
|
||||
// Replace existing files/directories
|
||||
if err := copyDirReplacing("templates", filepath.Join(installPrefix, "templates")); err != nil {
|
||||
fatalf("复制 templates 失败: %v", err)
|
||||
}
|
||||
if err := copyDirReplacing("static", filepath.Join(installPrefix, "static")); err != nil {
|
||||
fatalf("复制 static 失败: %v", err)
|
||||
}
|
||||
if err := copyFileReplacing("config.yaml", filepath.Join(installPrefix, "config.yaml"), 0o644); err != nil {
|
||||
// 配置文件可能不存在于仓库,但按照需求尝试复制,若不存在给出提示
|
||||
if !os.IsNotExist(err) {
|
||||
fatalf("复制 config.yaml 失败: %v", err)
|
||||
} else {
|
||||
fmt.Println("提示: 仓库根目录未找到 config.yaml,跳过复制")
|
||||
}
|
||||
}
|
||||
|
||||
fmt.Printf("完成: 微服务安装于 %s,资源已同步到 %s\n", binDir, installPrefix)
|
||||
}
|
||||
|
||||
func fatalf(format string, a ...any) {
|
||||
fmt.Fprintf(os.Stderr, format+"\n", a...)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
// findServiceDirs returns names like service-api, service-udp under cmd/.
|
||||
func findServiceDirs() ([]string, error) {
|
||||
entries, err := os.ReadDir("cmd")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var list []string
|
||||
for _, e := range entries {
|
||||
if !e.IsDir() {
|
||||
continue
|
||||
}
|
||||
name := e.Name()
|
||||
if strings.HasPrefix(name, "service-") {
|
||||
// ensure main.go exists to be buildable
|
||||
if _, err := os.Stat(filepath.Join("cmd", name, "main.go")); err == nil {
|
||||
list = append(list, name)
|
||||
}
|
||||
}
|
||||
}
|
||||
return list, nil
|
||||
}
|
||||
|
||||
func run(name string, args ...string) error {
|
||||
cmd := exec.Command(name, args...)
|
||||
cmd.Stdout = os.Stdout
|
||||
cmd.Stderr = os.Stderr
|
||||
return cmd.Run()
|
||||
}
|
||||
|
||||
func copyDirReplacing(src, dst string) error {
|
||||
st, err := os.Stat(src)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if !st.IsDir() {
|
||||
return fmt.Errorf("%s 不是目录", src)
|
||||
}
|
||||
// Remove destination to ensure clean replace
|
||||
if err := os.RemoveAll(dst); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := os.MkdirAll(dst, 0o755); err != nil {
|
||||
return err
|
||||
}
|
||||
return filepath.WalkDir(src, func(path string, d fs.DirEntry, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
rel, err := filepath.Rel(src, path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
target := filepath.Join(dst, rel)
|
||||
if d.IsDir() {
|
||||
if rel == "." {
|
||||
return nil
|
||||
}
|
||||
return os.MkdirAll(target, 0o755)
|
||||
}
|
||||
return copyFileReplacing(path, target, 0o644)
|
||||
})
|
||||
}
|
||||
|
||||
func copyFileReplacing(src, dst string, perm os.FileMode) error {
|
||||
in, err := os.Open(src)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer in.Close()
|
||||
|
||||
if err := os.MkdirAll(filepath.Dir(dst), 0o755); err != nil {
|
||||
return err
|
||||
}
|
||||
out, err := os.OpenFile(dst, os.O_CREATE|os.O_WRONLY|os.O_TRUNC, perm)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer func() { _ = out.Close() }()
|
||||
|
||||
if _, err := io.Copy(out, in); err != nil {
|
||||
return err
|
||||
}
|
||||
return out.Sync()
|
||||
}
|
||||
70
config.yaml
70
config.yaml
@ -2,17 +2,65 @@ server:
|
||||
udp_port: 10006
|
||||
|
||||
database:
|
||||
host: "8.134.185.53"
|
||||
port: 5432
|
||||
user: "weatheruser"
|
||||
password: "yourpassword"
|
||||
host: "127.0.0.1"
|
||||
port: 15432
|
||||
user: "yarnom"
|
||||
password: "fKvVq6SHjLD2bl"
|
||||
dbname: "weatherdb"
|
||||
sslmode: "disable"
|
||||
sslmode: "disable"
|
||||
|
||||
heartbeat:
|
||||
interval: 5
|
||||
message: "Hello"
|
||||
forecast:
|
||||
caiyun_token: "ZAcZq49qzibr10F0"
|
||||
|
||||
device_check:
|
||||
interval: 5
|
||||
message: "Hello"
|
||||
radar:
|
||||
realtime_enabled: true
|
||||
realtime_interval_minutes: 60
|
||||
aliases:
|
||||
- alias: "海珠雷达站"
|
||||
lat: 23.09
|
||||
lon: 113.35
|
||||
z: 7
|
||||
y: 40
|
||||
x: 104
|
||||
- alias: "番禺雷达站"
|
||||
lat: 23.0225
|
||||
lon: 113.3313
|
||||
z: 7
|
||||
y: 40
|
||||
x: 104
|
||||
- alias: "武汉江夏雷达站"
|
||||
lat: 30.459015
|
||||
lon: 114.413052
|
||||
z: 7
|
||||
y: 42
|
||||
x: 104
|
||||
- alias: "GuangXiRadarStation1"
|
||||
lat: 23.39
|
||||
lon: 108.26
|
||||
- alias: "GuangXiRadarStation2"
|
||||
lat: 22.83
|
||||
lon: 108.76
|
||||
- alias: "GuangXiRadarStation3"
|
||||
lat: 23.28
|
||||
lon: 109.34
|
||||
- alias: "GuangXiRadarStation4"
|
||||
lat: 22.35
|
||||
lon: 108.74
|
||||
|
||||
mysql:
|
||||
host: "8.134.185.53"
|
||||
port: 3306
|
||||
user: "remote"
|
||||
password: "root"
|
||||
dbname: "rtk_data"
|
||||
params: "parseTime=true&loc=Asia%2FShanghai"
|
||||
|
||||
sms:
|
||||
enabled: false
|
||||
provider: "aliyun"
|
||||
aliyun:
|
||||
access_key_id: "LTAI5tGyoSky5ZG14qYTv2Fv"
|
||||
access_key_secret: "jzfEqr9WV7ltSjO7BXV0WxozyFrvZu"
|
||||
sign_name: "英卓科技"
|
||||
template_code: "SMS_498815351"
|
||||
endpoint: "dysmsapi.aliyuncs.com"
|
||||
|
||||
@ -21,21 +21,9 @@ type DatabaseConfig struct {
|
||||
SSLMode string `yaml:"sslmode"`
|
||||
}
|
||||
|
||||
type HeartbeatConfig struct {
|
||||
Interval int `yaml:"interval"`
|
||||
Message string `yaml:"message"`
|
||||
}
|
||||
|
||||
type DeviceCheckConfig struct {
|
||||
Interval int `yaml:"interval"`
|
||||
Message string `yaml:"message"`
|
||||
}
|
||||
|
||||
type Config struct {
|
||||
Server ServerConfig `yaml:"server"`
|
||||
Database DatabaseConfig `yaml:"database"`
|
||||
Heartbeat HeartbeatConfig `yaml:"heartbeat"`
|
||||
DeviceCheck DeviceCheckConfig `yaml:"device_check"`
|
||||
Server ServerConfig `yaml:"server"`
|
||||
Database DatabaseConfig `yaml:"database"`
|
||||
}
|
||||
|
||||
var (
|
||||
|
||||
33
core/cmd/core-api/main.go
Normal file
33
core/cmd/core-api/main.go
Normal file
@ -0,0 +1,33 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"log"
|
||||
"os"
|
||||
|
||||
"weatherstation/core/internal/config"
|
||||
"weatherstation/core/internal/data"
|
||||
"weatherstation/core/internal/server"
|
||||
)
|
||||
|
||||
func main() {
|
||||
cfg := config.Load()
|
||||
_ = data.DB()
|
||||
r := server.NewRouter(server.Options{
|
||||
UIServeDir: cfg.UIServeDir,
|
||||
BigscreenDir: cfg.BigscreenDir,
|
||||
TemplateDir: cfg.TemplateDir,
|
||||
StaticDir: cfg.StaticDir,
|
||||
EnableCORS: cfg.DevEnableCORS,
|
||||
AuthSecret: cfg.AuthSecret,
|
||||
})
|
||||
|
||||
addr := cfg.Addr
|
||||
if env := os.Getenv("PORT"); env != "" {
|
||||
addr = ":" + env
|
||||
}
|
||||
|
||||
log.Printf("core-api listening on %s", addr)
|
||||
if err := r.Run(addr); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
}
|
||||
14
core/cmd/core-bigscreen/main.go
Normal file
14
core/cmd/core-bigscreen/main.go
Normal file
@ -0,0 +1,14 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"log"
|
||||
oldserver "weatherstation/internal/server"
|
||||
)
|
||||
|
||||
func main() {
|
||||
const port = 10008
|
||||
log.Printf("core-bigscreen listening on :%d", port)
|
||||
if err := oldserver.StartBigscreenServerOn(port); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
}
|
||||
54
core/cmd/core-mqtt/main.go
Normal file
54
core/cmd/core-mqtt/main.go
Normal file
@ -0,0 +1,54 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"time"
|
||||
|
||||
mqtt "github.com/eclipse/paho.mqtt.golang"
|
||||
)
|
||||
|
||||
func main() {
|
||||
broker := "wss://broker.emqx.io:8084/mqtt"
|
||||
clientID := "Mqttx_07c4e9ed"
|
||||
username := "1"
|
||||
password := "1"
|
||||
topic := "$dp"
|
||||
|
||||
opts := mqtt.NewClientOptions()
|
||||
opts.AddBroker(broker)
|
||||
opts.SetClientID(clientID)
|
||||
opts.SetUsername(username)
|
||||
opts.SetPassword(password)
|
||||
opts.SetProtocolVersion(4)
|
||||
opts.SetKeepAlive(60 * time.Second)
|
||||
opts.SetAutoReconnect(true)
|
||||
opts.SetCleanSession(true)
|
||||
|
||||
c := mqtt.NewClient(opts)
|
||||
if t := c.Connect(); t.Wait() && t.Error() != nil {
|
||||
fmt.Printf("connect error: %v\n", t.Error())
|
||||
os.Exit(1)
|
||||
}
|
||||
defer c.Disconnect(250)
|
||||
|
||||
// 构造所需数据格式(仅设备与时间变更)
|
||||
payload := map[string]any{
|
||||
"type": "hws",
|
||||
"device": "Z866",
|
||||
"Dm": 0.0001,
|
||||
"Pa": 976.7,
|
||||
"Rc": 0,
|
||||
"Sm": 0.0001,
|
||||
"Ta": 39,
|
||||
"Ua": 26.6,
|
||||
"time": time.Now().UnixMilli(),
|
||||
}
|
||||
b, _ := json.Marshal(payload)
|
||||
if t := c.Publish(topic, 1, false, b); t.Wait() && t.Error() != nil {
|
||||
fmt.Printf("publish error: %v\n", t.Error())
|
||||
os.Exit(2)
|
||||
}
|
||||
fmt.Println("published to", topic)
|
||||
}
|
||||
235
core/cmd/im_export_data/main.go
Normal file
235
core/cmd/im_export_data/main.go
Normal file
@ -0,0 +1,235 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"encoding/csv"
|
||||
"flag"
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"weatherstation/core/internal/data"
|
||||
)
|
||||
|
||||
type actualHour struct {
|
||||
HourEnd time.Time
|
||||
TempC float64
|
||||
HumidityPct float64
|
||||
PressureHpa float64
|
||||
WindSpeedMs float64
|
||||
WindDirDeg float64
|
||||
RainActualMM float64
|
||||
}
|
||||
|
||||
func main() {
|
||||
var stationID, startStr, endStr, providersCSV, outPath, tzName string
|
||||
flag.StringVar(&stationID, "station", "", "站点ID,如 RS485-002A6E")
|
||||
flag.StringVar(&startStr, "start", "", "开始时间,格式 2006-01-02 15:00")
|
||||
flag.StringVar(&endStr, "end", "", "结束时间,格式 2006-01-02 15:00(开区间)")
|
||||
flag.StringVar(&providersCSV, "providers", "caiyun,ec,wrf", "逗号分隔的预报源,默认 caiyun,ec,wrf")
|
||||
flag.StringVar(&outPath, "out", "", "输出 CSV 文件路径;留空输出到 stdout")
|
||||
flag.StringVar(&tzName, "tz", "Asia/Shanghai", "时区,例如 Asia/Shanghai")
|
||||
flag.Parse()
|
||||
|
||||
if strings.TrimSpace(stationID) == "" || strings.TrimSpace(startStr) == "" || strings.TrimSpace(endStr) == "" {
|
||||
log.Fatalf("用法: im_export_data --station RS485-XXXXXX --start '2024-08-01 00:00' --end '2024-08-02 00:00' [--providers caiyun,ec,wrf] [--out out.csv]")
|
||||
}
|
||||
|
||||
loc, _ := time.LoadLocation(tzName)
|
||||
if loc == nil {
|
||||
loc = time.FixedZone("CST", 8*3600)
|
||||
}
|
||||
parse := func(s string) time.Time {
|
||||
var t time.Time
|
||||
var err error
|
||||
for _, ly := range []string{"2006-01-02 15:04", "2006-01-02 15", "2006-01-02"} {
|
||||
t, err = time.ParseInLocation(ly, s, loc)
|
||||
if err == nil {
|
||||
return t.Truncate(time.Hour)
|
||||
}
|
||||
}
|
||||
log.Fatalf("无法解析时间: %s", s)
|
||||
return time.Time{}
|
||||
}
|
||||
start := parse(startStr)
|
||||
end := parse(endStr)
|
||||
if !end.After(start) {
|
||||
log.Fatalf("end 必须大于 start")
|
||||
}
|
||||
|
||||
providers := splitCSV(providersCSV)
|
||||
if len(providers) == 0 {
|
||||
providers = []string{"caiyun"}
|
||||
}
|
||||
|
||||
// Prepare writer
|
||||
var out *csv.Writer
|
||||
var file *os.File
|
||||
if strings.TrimSpace(outPath) != "" {
|
||||
f, err := os.Create(outPath)
|
||||
if err != nil {
|
||||
log.Fatalf("打开输出文件失败: %v", err)
|
||||
}
|
||||
defer f.Close()
|
||||
out = csv.NewWriter(f)
|
||||
file = f
|
||||
} else {
|
||||
out = csv.NewWriter(os.Stdout)
|
||||
}
|
||||
defer out.Flush()
|
||||
|
||||
// Header
|
||||
header := []string{"station_id", "hour_end", "temp_c", "humidity_pct", "wind_dir_deg", "wind_speed_ms", "pressure_hpa", "rain_actual_mm"}
|
||||
for _, p := range providers {
|
||||
header = append(header, fmt.Sprintf("%s_lead1_rain_mm", p))
|
||||
header = append(header, fmt.Sprintf("%s_lead2_rain_mm", p))
|
||||
header = append(header, fmt.Sprintf("%s_lead3_rain_mm", p))
|
||||
}
|
||||
if err := out.Write(header); err != nil {
|
||||
log.Fatalf("写入 CSV 失败: %v", err)
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
rows, err := loadActualHourly(ctx, stationID, start, end)
|
||||
if err != nil {
|
||||
log.Fatalf("查询实况失败: %v", err)
|
||||
}
|
||||
|
||||
for _, row := range rows {
|
||||
rec := []string{
|
||||
stationID,
|
||||
row.HourEnd.Format("2006-01-02 15:04:05"),
|
||||
fmt.Sprintf("%.2f", row.TempC),
|
||||
fmt.Sprintf("%.2f", row.HumidityPct),
|
||||
fmt.Sprintf("%.2f", row.WindDirDeg),
|
||||
fmt.Sprintf("%.3f", row.WindSpeedMs),
|
||||
fmt.Sprintf("%.2f", row.PressureHpa),
|
||||
fmt.Sprintf("%.3f", row.RainActualMM),
|
||||
}
|
||||
for _, p := range providers {
|
||||
// For each lead 1..3, get rain for forecast_time = hour_end, latest issued_at for that lead
|
||||
for lead := 1; lead <= 3; lead++ {
|
||||
v, _ := loadProviderRainAt(ctx, stationID, p, row.HourEnd, lead)
|
||||
if v < 0 {
|
||||
rec = append(rec, "")
|
||||
} else {
|
||||
rec = append(rec, fmt.Sprintf("%.3f", v))
|
||||
}
|
||||
}
|
||||
}
|
||||
if err := out.Write(rec); err != nil {
|
||||
log.Fatalf("写入 CSV 失败: %v", err)
|
||||
}
|
||||
}
|
||||
out.Flush()
|
||||
if err := out.Error(); err != nil {
|
||||
log.Fatalf("写入 CSV 错误: %v", err)
|
||||
}
|
||||
if file != nil {
|
||||
log.Printf("导出完成: %s,共 %d 行", outPath, len(rows))
|
||||
}
|
||||
}
|
||||
|
||||
func splitCSV(s string) []string {
|
||||
parts := strings.Split(s, ",")
|
||||
out := make([]string, 0, len(parts))
|
||||
for _, p := range parts {
|
||||
p = strings.TrimSpace(p)
|
||||
if p != "" {
|
||||
out = append(out, p)
|
||||
}
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
func loadActualHourly(ctx context.Context, stationID string, start, end time.Time) ([]actualHour, error) {
|
||||
// Right-endpoint hourly aggregation from rs485_weather_10min
|
||||
const q = `
|
||||
WITH base AS (
|
||||
SELECT * FROM rs485_weather_10min
|
||||
WHERE station_id = $1 AND bucket_start >= $2 AND bucket_start < $3
|
||||
), g AS (
|
||||
SELECT date_trunc('hour', bucket_start) AS grp,
|
||||
SUM(temp_c_x100 * sample_count)::bigint AS w_temp,
|
||||
SUM(humidity_pct * sample_count)::bigint AS w_hum,
|
||||
SUM(pressure_hpa_x100 * sample_count)::bigint AS w_p,
|
||||
SUM(solar_wm2_x100 * sample_count)::bigint AS w_solar,
|
||||
SUM(uv_index * sample_count)::bigint AS w_uv,
|
||||
SUM(wind_speed_ms_x1000 * sample_count)::bigint AS w_ws,
|
||||
MAX(wind_gust_ms_x1000) AS gust_max,
|
||||
SUM(sin(radians(wind_dir_deg)) * sample_count)::double precision AS sin_sum,
|
||||
SUM(cos(radians(wind_dir_deg)) * sample_count)::double precision AS cos_sum,
|
||||
SUM(rain_10m_mm_x1000) AS rain_sum,
|
||||
SUM(sample_count) AS n_sum
|
||||
FROM base GROUP BY 1
|
||||
)
|
||||
SELECT grp + interval '1 hour' AS hour_end,
|
||||
(w_temp/NULLIF(n_sum,0))/100.0 AS temp_c,
|
||||
(w_hum/NULLIF(n_sum,0))::double precision AS humidity_pct,
|
||||
(w_p/NULLIF(n_sum,0))/100.0 AS pressure_hpa,
|
||||
(w_ws/NULLIF(n_sum,0))/1000.0 AS wind_speed_ms,
|
||||
CASE WHEN sin_sum IS NULL OR cos_sum IS NULL THEN NULL
|
||||
ELSE (
|
||||
CASE WHEN degrees(atan2(sin_sum, cos_sum)) < 0
|
||||
THEN degrees(atan2(sin_sum, cos_sum)) + 360
|
||||
ELSE degrees(atan2(sin_sum, cos_sum)) END)
|
||||
END AS wind_dir_deg,
|
||||
(rain_sum/1000.0) AS rain_mm
|
||||
FROM g
|
||||
ORDER BY hour_end`
|
||||
|
||||
rows, err := data.DB().QueryContext(ctx, q, stationID, start, end)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var out []actualHour
|
||||
for rows.Next() {
|
||||
var t time.Time
|
||||
var ta, ua, pa, ws, dm, rain sql.NullFloat64
|
||||
if err := rows.Scan(&t, &ta, &ua, &pa, &ws, &dm, &rain); err != nil {
|
||||
continue
|
||||
}
|
||||
out = append(out, actualHour{
|
||||
HourEnd: t,
|
||||
TempC: nullF(ta),
|
||||
HumidityPct: nullF(ua),
|
||||
PressureHpa: nullF(pa),
|
||||
WindSpeedMs: nullF(ws),
|
||||
WindDirDeg: nullF(dm),
|
||||
RainActualMM: nullF(rain),
|
||||
})
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
// loadProviderRainAt returns rain(mm) for a provider at forecast_time=t with fixed lead, picking latest issued_at.
|
||||
func loadProviderRainAt(ctx context.Context, stationID, provider string, t time.Time, lead int) (float64, error) {
|
||||
const q = `
|
||||
SELECT COALESCE(rain_mm_x1000,0)::bigint
|
||||
FROM (
|
||||
SELECT rain_mm_x1000, issued_at,
|
||||
CEIL(EXTRACT(EPOCH FROM ($3 - issued_at)) / 3600.0)::int AS lead_hours
|
||||
FROM forecast_hourly
|
||||
WHERE station_id=$1 AND provider=$2 AND forecast_time=$3
|
||||
) x
|
||||
WHERE lead_hours=$4
|
||||
ORDER BY issued_at DESC
|
||||
LIMIT 1`
|
||||
var v int64
|
||||
err := data.DB().QueryRowContext(ctx, q, stationID, provider, t, lead).Scan(&v)
|
||||
if err != nil {
|
||||
return -1, err
|
||||
}
|
||||
return float64(v) / 1000.0, nil
|
||||
}
|
||||
|
||||
func nullF(n sql.NullFloat64) float64 {
|
||||
if n.Valid {
|
||||
return n.Float64
|
||||
}
|
||||
return 0
|
||||
}
|
||||
399
core/cmd/radar_hour_export/main.go
Normal file
399
core/cmd/radar_hour_export/main.go
Normal file
@ -0,0 +1,399 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"encoding/binary"
|
||||
"encoding/csv"
|
||||
"flag"
|
||||
"fmt"
|
||||
"log"
|
||||
"math"
|
||||
"os"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"weatherstation/core/internal/data"
|
||||
)
|
||||
|
||||
type station struct {
|
||||
ID string
|
||||
Lat float64
|
||||
Lon float64
|
||||
}
|
||||
|
||||
func main() {
|
||||
var stationArg string
|
||||
var timeArg string
|
||||
var startArg string
|
||||
var endArg string
|
||||
var outPath string
|
||||
var zoom int
|
||||
flag.StringVar(&stationArg, "station", "", "station id (e.g., RS485-XXXXXX or hex XXXXXX)")
|
||||
flag.StringVar(&timeArg, "time", "", "[deprecated] right-endpoint time 'YYYY-MM-DD HH:MM:SS' (e.g., 2024-08-01 17:00:00)")
|
||||
flag.StringVar(&startArg, "start", "", "range start time 'YYYY-MM-DD HH:MM:SS'")
|
||||
flag.StringVar(&endArg, "end", "", "range end time 'YYYY-MM-DD HH:MM:SS'")
|
||||
flag.StringVar(&outPath, "out", "", "output CSV file (default stdout)")
|
||||
flag.IntVar(&zoom, "z", 7, "radar tile zoom level (default 7)")
|
||||
flag.Parse()
|
||||
|
||||
if strings.TrimSpace(stationArg) == "" || (strings.TrimSpace(timeArg) == "" && (strings.TrimSpace(startArg) == "" || strings.TrimSpace(endArg) == "")) {
|
||||
log.Fatalf("usage: radar_hour_export -station RS485-XXXXXX (-time 'YYYY-MM-DD HH:MM:SS' | -start 'YYYY-MM-DD HH:MM:SS' -end 'YYYY-MM-DD HH:MM:SS') [-out file.csv] [-z 7]")
|
||||
}
|
||||
|
||||
st, err := resolveStation(stationArg)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
if st == nil {
|
||||
log.Fatalf("station not found: %s", stationArg)
|
||||
}
|
||||
|
||||
// parse time(s) in Asia/Shanghai
|
||||
loc, _ := time.LoadLocation("Asia/Shanghai")
|
||||
if loc == nil {
|
||||
loc = time.FixedZone("CST", 8*3600)
|
||||
}
|
||||
var tStart, tEnd time.Time
|
||||
if strings.TrimSpace(timeArg) != "" {
|
||||
// legacy single-hour mode: time is right-endpoint, range = (t-1h, t]
|
||||
tEnd, err = time.ParseInLocation("2006-01-02 15:04:05", timeArg, loc)
|
||||
if err != nil {
|
||||
log.Fatalf("invalid time: %v", err)
|
||||
}
|
||||
tStart = tEnd.Add(-1 * time.Hour)
|
||||
} else {
|
||||
var err1, err2 error
|
||||
tStart, err1 = time.ParseInLocation("2006-01-02 15:04:05", startArg, loc)
|
||||
tEnd, err2 = time.ParseInLocation("2006-01-02 15:04:05", endArg, loc)
|
||||
if err1 != nil || err2 != nil {
|
||||
log.Fatalf("invalid start/end time")
|
||||
}
|
||||
if !tEnd.After(tStart) {
|
||||
log.Fatalf("end must be after start")
|
||||
}
|
||||
}
|
||||
|
||||
y, x, _, err := pickTileAt(st.Lat, st.Lon, zoom)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
if y < 0 || x < 0 {
|
||||
log.Fatalf("no radar tile covering station at z=%d", zoom)
|
||||
}
|
||||
|
||||
// collect tile times within (tStart, tEnd]
|
||||
times, err := tileTimesInRange(zoom, y, x, tStart, tEnd)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
if len(times) == 0 {
|
||||
log.Printf("no radar tiles in hour window for y=%d x=%d", y, x)
|
||||
}
|
||||
|
||||
// rainfall will be computed per-hour-end for each tile time
|
||||
|
||||
// CSV writer
|
||||
var w *csv.Writer
|
||||
var f *os.File
|
||||
if outPath == "" {
|
||||
w = csv.NewWriter(os.Stdout)
|
||||
} else {
|
||||
f, err = os.Create(outPath)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
defer f.Close()
|
||||
w = csv.NewWriter(f)
|
||||
}
|
||||
defer w.Flush()
|
||||
// header
|
||||
_ = w.Write([]string{"time", "rain_mm", "wind_dir", "wind_speed", "ge30", "ge35", "ge40"})
|
||||
|
||||
// cache rainfall per hour-end to avoid repeated queries
|
||||
rainCache := make(map[time.Time]float64)
|
||||
|
||||
for _, dt := range times {
|
||||
// wind from radar_weather nearest to station at this dt
|
||||
wd, ws := nearestWind(st.Lat, st.Lon, dt)
|
||||
// load tile values at dt for the chosen y/x tile
|
||||
vals, meta, err := loadRadarTile(zoom, y, x, dt)
|
||||
if err != nil {
|
||||
log.Printf("tile load failed at %v: %v", dt, err)
|
||||
continue
|
||||
}
|
||||
// right-endpoint hour for rainfall: (hourEnd-1h, hourEnd]
|
||||
hourEnd := rightEndpointHour(dt.In(loc))
|
||||
rainMM := rainCache[hourEnd]
|
||||
if _, ok := rainCache[hourEnd]; !ok {
|
||||
rmm, _ := hourRain(st.ID, hourEnd.Add(-1*time.Hour), hourEnd)
|
||||
rainCache[hourEnd] = rmm
|
||||
rainMM = rmm
|
||||
}
|
||||
// build sector polygon using wind (fallback to circle-only if no wind)
|
||||
ge30, ge35, ge40 := 0, 0, 0
|
||||
if wd != nil && ws != nil && *ws > 0.01 {
|
||||
poly := sectorPolygon(st.Lat, st.Lon, *wd, *ws, 3*time.Hour)
|
||||
ge30, ge35, ge40 = countInPolygon(vals, meta, poly)
|
||||
} else {
|
||||
// fallback to 8km circle approximated as polygon
|
||||
poly := circlePolygon(st.Lat, st.Lon, 8000)
|
||||
ge30, ge35, ge40 = countInPolygon(vals, meta, poly)
|
||||
}
|
||||
wdStr, wsStr := "", ""
|
||||
if wd != nil {
|
||||
wdStr = fmt.Sprintf("%.0f", *wd)
|
||||
}
|
||||
if ws != nil {
|
||||
wsStr = fmt.Sprintf("%.1f", *ws)
|
||||
}
|
||||
rec := []string{dt.In(loc).Format("2006-01-02 15:04:05"), fmt.Sprintf("%.3f", rainMM), wdStr, wsStr, fmt.Sprintf("%d", ge30), fmt.Sprintf("%d", ge35), fmt.Sprintf("%d", ge40)}
|
||||
if err := w.Write(rec); err != nil {
|
||||
log.Printf("csv write failed: %v", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func resolveStation(arg string) (*station, error) {
|
||||
id := strings.TrimSpace(arg)
|
||||
if !strings.HasPrefix(strings.ToUpper(id), "RS485-") {
|
||||
// treat as hex suffix
|
||||
hex := strings.ToUpper(strings.TrimSpace(id))
|
||||
hex = filterHex(hex)
|
||||
if len(hex) > 6 {
|
||||
hex = hex[len(hex)-6:]
|
||||
}
|
||||
id = "RS485-" + hex
|
||||
}
|
||||
rows, err := data.DB().Query(`SELECT station_id, latitude, longitude FROM stations WHERE station_id = $1`, id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
if rows.Next() {
|
||||
var s station
|
||||
if err := rows.Scan(&s.ID, &s.Lat, &s.Lon); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &s, nil
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func filterHex(s string) string {
|
||||
var b strings.Builder
|
||||
for i := 0; i < len(s); i++ {
|
||||
c := s[i]
|
||||
if (c >= '0' && c <= '9') || (c >= 'A' && c <= 'F') {
|
||||
b.WriteByte(c)
|
||||
}
|
||||
}
|
||||
return b.String()
|
||||
}
|
||||
|
||||
type tileMeta struct {
|
||||
West, South, East, North float64
|
||||
W, H int
|
||||
}
|
||||
|
||||
func pickTileAt(lat, lon float64, z int) (int, int, tileMeta, error) {
|
||||
const q = `SELECT y,x,west,south,east,north FROM radar_tiles WHERE z=$1 AND $2 BETWEEN south AND north AND $3 BETWEEN west AND east ORDER BY dt DESC LIMIT 1`
|
||||
var y, x int
|
||||
var m tileMeta
|
||||
err := data.DB().QueryRow(q, z, lat, lon).Scan(&y, &x, &m.West, &m.South, &m.East, &m.North)
|
||||
if err == sql.ErrNoRows {
|
||||
return -1, -1, m, nil
|
||||
}
|
||||
return y, x, m, err
|
||||
}
|
||||
|
||||
func tileTimesInRange(z, y, x int, start, end time.Time) ([]time.Time, error) {
|
||||
const q = `SELECT dt FROM radar_tiles WHERE z=$1 AND y=$2 AND x=$3 AND dt > $4 AND dt <= $5 ORDER BY dt`
|
||||
rows, err := data.DB().Query(q, z, y, x, start, end)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var ts []time.Time
|
||||
for rows.Next() {
|
||||
var t time.Time
|
||||
if err := rows.Scan(&t); err == nil {
|
||||
ts = append(ts, t)
|
||||
}
|
||||
}
|
||||
return ts, nil
|
||||
}
|
||||
|
||||
func hourRain(stationID string, start, end time.Time) (float64, error) {
|
||||
const q = `SELECT COALESCE(SUM(rain_10m_mm_x1000)/1000.0,0) FROM rs485_weather_10min WHERE station_id=$1 AND bucket_start >= $2 AND bucket_start < $3`
|
||||
var mm float64
|
||||
err := data.DB().QueryRow(q, stationID, start, end).Scan(&mm)
|
||||
return mm, err
|
||||
}
|
||||
|
||||
type radarTile struct {
|
||||
DT time.Time
|
||||
Z, Y, X int
|
||||
W, H int
|
||||
West, South, East, North float64
|
||||
Res float64
|
||||
Data []byte
|
||||
}
|
||||
|
||||
func loadRadarTile(z, y, x int, dt time.Time) ([][]*float64, tileMeta, error) {
|
||||
const q = `SELECT dt,z,y,x,width,height,west,south,east,north,res_deg,data FROM radar_tiles WHERE z=$1 AND y=$2 AND x=$3 AND dt=$4 LIMIT 1`
|
||||
var r radarTile
|
||||
row := data.DB().QueryRow(q, z, y, x, dt)
|
||||
if err := row.Scan(&r.DT, &r.Z, &r.Y, &r.X, &r.W, &r.H, &r.West, &r.South, &r.East, &r.North, &r.Res, &r.Data); err != nil {
|
||||
return nil, tileMeta{}, err
|
||||
}
|
||||
w, h := r.W, r.H
|
||||
vals := make([][]*float64, h)
|
||||
off := 0
|
||||
for row := 0; row < h; row++ {
|
||||
rowVals := make([]*float64, w)
|
||||
for col := 0; col < w; col++ {
|
||||
v := int16(binary.BigEndian.Uint16(r.Data[off : off+2]))
|
||||
off += 2
|
||||
if v >= 32766 {
|
||||
rowVals[col] = nil
|
||||
continue
|
||||
}
|
||||
dbz := float64(v) / 10.0
|
||||
if dbz < 0 {
|
||||
dbz = 0
|
||||
} else if dbz > 75 {
|
||||
dbz = 75
|
||||
}
|
||||
vv := dbz
|
||||
rowVals[col] = &vv
|
||||
}
|
||||
vals[row] = rowVals
|
||||
}
|
||||
return vals, tileMeta{West: r.West, South: r.South, East: r.East, North: r.North, W: r.W, H: r.H}, nil
|
||||
}
|
||||
|
||||
func nearestWind(lat, lon float64, dt time.Time) (*float64, *float64) {
|
||||
// reuse server's nearest logic via data layer
|
||||
rw, err := data.RadarWeatherNearest(lat, lon, dt, 6*time.Hour)
|
||||
if err != nil || rw == nil {
|
||||
return nil, nil
|
||||
}
|
||||
var dir *float64
|
||||
var spd *float64
|
||||
if rw.WindDirection.Valid {
|
||||
d := rw.WindDirection.Float64 // normalize
|
||||
d = math.Mod(d, 360)
|
||||
if d < 0 {
|
||||
d += 360
|
||||
}
|
||||
dir = &d
|
||||
}
|
||||
if rw.WindSpeed.Valid {
|
||||
v := rw.WindSpeed.Float64
|
||||
spd = &v
|
||||
}
|
||||
return dir, spd
|
||||
}
|
||||
|
||||
// great-circle naive meter->degree approximation
|
||||
func sectorPolygon(lat, lon, windFromDeg, windSpeedMS float64, dur time.Duration) [][2]float64 {
|
||||
// convert to downwind (to-direction)
|
||||
bearingTo := math.Mod(windFromDeg+180, 360)
|
||||
radius := windSpeedMS * dur.Seconds() // meters
|
||||
// meters per degree
|
||||
latRad := lat * math.Pi / 180
|
||||
mPerDegLat := 111320.0
|
||||
mPerDegLon := 111320.0 * math.Cos(latRad)
|
||||
half := 25.0 // degrees
|
||||
var poly [][2]float64
|
||||
poly = append(poly, [2]float64{lon, lat})
|
||||
for a := -half; a <= half+1e-6; a += 2.5 {
|
||||
ang := (bearingTo + a) * math.Pi / 180
|
||||
dx := radius * math.Sin(ang)
|
||||
dy := radius * math.Cos(ang)
|
||||
dlon := dx / mPerDegLon
|
||||
dlat := dy / mPerDegLat
|
||||
poly = append(poly, [2]float64{lon + dlon, lat + dlat})
|
||||
}
|
||||
poly = append(poly, [2]float64{lon, lat})
|
||||
return poly
|
||||
}
|
||||
|
||||
func circlePolygon(lat, lon float64, radiusM float64) [][2]float64 {
|
||||
latRad := lat * math.Pi / 180
|
||||
mPerDegLat := 111320.0
|
||||
mPerDegLon := 111320.0 * math.Cos(latRad)
|
||||
var poly [][2]float64
|
||||
for a := 0.0; a <= 360.0; a += 6.0 {
|
||||
ang := a * math.Pi / 180
|
||||
dx := radiusM * math.Cos(ang)
|
||||
dy := radiusM * math.Sin(ang)
|
||||
poly = append(poly, [2]float64{lon + dx/mPerDegLon, lat + dy/mPerDegLat})
|
||||
}
|
||||
poly = append(poly, [2]float64{poly[0][0], poly[0][1]})
|
||||
return poly
|
||||
}
|
||||
|
||||
func countInPolygon(vals [][]*float64, meta tileMeta, poly [][2]float64) (int, int, int) {
|
||||
if vals == nil || len(vals) == 0 {
|
||||
return 0, 0, 0
|
||||
}
|
||||
w, h := meta.W, meta.H
|
||||
dlon := (meta.East - meta.West) / float64(w)
|
||||
dlat := (meta.North - meta.South) / float64(h)
|
||||
inPoly := func(x, y float64) bool {
|
||||
inside := false
|
||||
n := len(poly)
|
||||
for i, j := 0, n-1; i < n; j, i = i, i+1 {
|
||||
xi, yi := poly[i][0], poly[i][1]
|
||||
xj, yj := poly[j][0], poly[j][1]
|
||||
inter := ((yi > y) != (yj > y)) && (x < (xj-xi)*(y-yi)/((yj-yi)+1e-12)+xi)
|
||||
if inter {
|
||||
inside = !inside
|
||||
}
|
||||
}
|
||||
return inside
|
||||
}
|
||||
c30, c35, c40 := 0, 0, 0
|
||||
for row := 0; row < h; row++ {
|
||||
lat := meta.South + (float64(row)+0.5)*dlat
|
||||
vr := vals[row]
|
||||
if vr == nil {
|
||||
continue
|
||||
}
|
||||
for col := 0; col < w; col++ {
|
||||
v := vr[col]
|
||||
if v == nil {
|
||||
continue
|
||||
}
|
||||
vv := *v
|
||||
if vv < 30.0 {
|
||||
continue
|
||||
}
|
||||
lon := meta.West + (float64(col)+0.5)*dlon
|
||||
if !inPoly(lon, lat) {
|
||||
continue
|
||||
}
|
||||
if vv >= 30 {
|
||||
c30++
|
||||
}
|
||||
if vv >= 35 {
|
||||
c35++
|
||||
}
|
||||
if vv >= 40 {
|
||||
c40++
|
||||
}
|
||||
}
|
||||
}
|
||||
return c30, c35, c40
|
||||
}
|
||||
|
||||
// rightEndpointHour returns the right endpoint hour for a dt, meaning:
|
||||
// if dt is exactly at :00, return dt truncated to hour; otherwise, return next hour.
|
||||
func rightEndpointHour(dt time.Time) time.Time {
|
||||
t := dt.Truncate(time.Hour)
|
||||
if dt.Equal(t) {
|
||||
return t
|
||||
}
|
||||
return t.Add(time.Hour)
|
||||
}
|
||||
551
core/cmd/service-alert/main.go
Normal file
551
core/cmd/service-alert/main.go
Normal file
@ -0,0 +1,551 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"flag"
|
||||
"fmt"
|
||||
"log"
|
||||
"math"
|
||||
"sort"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"weatherstation/core/internal/config"
|
||||
"weatherstation/core/internal/data"
|
||||
"weatherstation/core/internal/sms"
|
||||
)
|
||||
|
||||
const (
|
||||
providerForecast = "imdroid_mix"
|
||||
alertTypeForecast = "forecast_3h_rain"
|
||||
alertTypeActual30m = "actual_30m_rain"
|
||||
alertTypeNeighbor30 = "actual_30m_neighbor"
|
||||
levelRed = "red"
|
||||
levelYellow = "yellow"
|
||||
forecastRedMM = 8.0
|
||||
forecastYellowMM = 4.0
|
||||
actualRedMM = 4.0
|
||||
actualYellowMM = 2.0
|
||||
halfAngleDeg = 90.0
|
||||
timeFormatShort = "2006-01-02 15:04"
|
||||
defaultCheckTimeout = 20 * time.Second
|
||||
)
|
||||
|
||||
var (
|
||||
flagOnce bool
|
||||
flagTest bool
|
||||
flagTestStIDs string
|
||||
flagTestTime string
|
||||
flagWhy bool
|
||||
)
|
||||
|
||||
func main() {
|
||||
flag.BoolVar(&flagOnce, "once", false, "run checks once immediately (no scheduling)")
|
||||
flag.BoolVar(&flagTest, "test", false, "force alerts regardless of thresholds (for dry-run)")
|
||||
flag.StringVar(&flagTestStIDs, "station", "", "comma-separated station_id list for test mode")
|
||||
flag.StringVar(&flagTestTime, "time", "", "test mode: specify end time (YYYY-MM-DD HH:MM:SS, CST)")
|
||||
flag.BoolVar(&flagWhy, "why", false, "in test mode, log reasons when alert not triggered")
|
||||
flag.Parse()
|
||||
|
||||
cfg := config.Load()
|
||||
|
||||
scli := mustInitSMS(cfg)
|
||||
|
||||
if flagOnce {
|
||||
tick := time.Now()
|
||||
runForecastCheck(scli, tick)
|
||||
runActualCheck(scli, tick)
|
||||
runNeighborActualCheck(scli, tick)
|
||||
return
|
||||
}
|
||||
|
||||
go alignAndRunHour10(func(tick time.Time) { runForecastCheck(scli, tick) })
|
||||
go alignAndRunHalfHour(func(tick time.Time) { runActualCheck(scli, tick) })
|
||||
go alignAndRunHalfHour(func(tick time.Time) { runNeighborActualCheck(scli, tick) })
|
||||
|
||||
select {}
|
||||
}
|
||||
|
||||
func mustInitSMS(cfg config.Config) *sms.Client {
|
||||
cli, err := sms.New(sms.Config{
|
||||
AccessKeyID: strings.TrimSpace(cfg.SMS.AccessKeyID),
|
||||
AccessKeySecret: strings.TrimSpace(cfg.SMS.AccessKeySecret),
|
||||
SignName: strings.TrimSpace(cfg.SMS.SignName),
|
||||
TemplateCode: strings.TrimSpace(cfg.SMS.TemplateCode),
|
||||
Endpoint: strings.TrimSpace(cfg.SMS.Endpoint),
|
||||
})
|
||||
if err != nil {
|
||||
log.Printf("sms: disabled (%v)", err)
|
||||
return nil
|
||||
}
|
||||
return cli
|
||||
}
|
||||
|
||||
func alignAndRunHour10(fn func(tick time.Time)) {
|
||||
now := time.Now()
|
||||
base := time.Date(now.Year(), now.Month(), now.Day(), now.Hour(), 10, 0, 0, now.Location())
|
||||
var next time.Time
|
||||
if now.After(base) {
|
||||
next = base.Add(time.Hour)
|
||||
} else {
|
||||
next = base
|
||||
}
|
||||
time.Sleep(time.Until(next))
|
||||
for {
|
||||
tick := time.Now().Truncate(time.Minute)
|
||||
fn(tick)
|
||||
time.Sleep(time.Hour)
|
||||
}
|
||||
}
|
||||
|
||||
func alignAndRunHalfHour(fn func(tick time.Time)) {
|
||||
now := time.Now()
|
||||
next := now.Truncate(30 * time.Minute).Add(30 * time.Minute)
|
||||
time.Sleep(time.Until(next))
|
||||
for {
|
||||
tick := time.Now().Truncate(time.Minute)
|
||||
fn(tick)
|
||||
time.Sleep(30 * time.Minute)
|
||||
}
|
||||
}
|
||||
|
||||
func runForecastCheck(scli *sms.Client, tick time.Time) {
|
||||
ctx, cancel := context.WithTimeout(context.Background(), defaultCheckTimeout)
|
||||
defer cancel()
|
||||
|
||||
stations := listFixedStations(ctx)
|
||||
stations = filterStations(stations, flagTestStIDs)
|
||||
if len(stations) == 0 {
|
||||
log.Printf("forecast: no stations after filter")
|
||||
return
|
||||
}
|
||||
|
||||
recip := loadRecipients(ctx)
|
||||
loc := mustShanghai()
|
||||
now := tick.In(loc)
|
||||
issued := time.Date(now.Year(), now.Month(), now.Day(), now.Hour(), 0, 0, 0, loc)
|
||||
next1 := issued.Add(time.Hour)
|
||||
next3 := issued.Add(3 * time.Hour)
|
||||
|
||||
for _, st := range stations {
|
||||
points, err := data.ForecastRainAtIssued(ctx, st.ID, providerForecast, issued)
|
||||
if err != nil {
|
||||
log.Printf("forecast: query station=%s err=%v", st.ID, err)
|
||||
continue
|
||||
}
|
||||
var redMax, yellowMax int64
|
||||
for _, p := range points {
|
||||
if p.ForecastTime.Before(next1) || p.ForecastTime.After(next3) {
|
||||
continue
|
||||
}
|
||||
v := int64(p.RainMMx1000)
|
||||
if v >= 8000 {
|
||||
if v > redMax {
|
||||
redMax = v
|
||||
}
|
||||
} else if v >= 4000 {
|
||||
if v > yellowMax {
|
||||
yellowMax = v
|
||||
}
|
||||
}
|
||||
}
|
||||
level := ""
|
||||
value := 0.0
|
||||
threshold := 0.0
|
||||
if redMax > 0 {
|
||||
level = levelRed
|
||||
value = float64(redMax) / 1000.0
|
||||
threshold = forecastRedMM
|
||||
} else if yellowMax > 0 {
|
||||
level = levelYellow
|
||||
value = float64(yellowMax) / 1000.0
|
||||
threshold = forecastYellowMM
|
||||
}
|
||||
if level == "" {
|
||||
if !flagTest {
|
||||
if flagWhy {
|
||||
log.Printf("forecast why: station=%s no threshold hit redMax=%d yellowMax=%d", st.ID, redMax, yellowMax)
|
||||
}
|
||||
continue
|
||||
}
|
||||
level = levelYellow
|
||||
value = forecastYellowMM
|
||||
threshold = forecastYellowMM
|
||||
}
|
||||
if flagTest {
|
||||
msg := fmt.Sprintf("【测试】站点%s 强制触发未来3小时降水预警 level=%s", st.Name, levelLabel(level))
|
||||
targetPhones := recip.forLevel(level)
|
||||
if len(targetPhones) == 0 {
|
||||
recordAlert(ctx, alertTypeForecast, st.ID, level, issued, msg, sql.NullString{})
|
||||
} else {
|
||||
sendToPhones(ctx, scli, st.Name, value, level, issued, targetPhones, msg, alertTypeForecast, st.ID)
|
||||
}
|
||||
continue
|
||||
}
|
||||
msg := fmt.Sprintf("站点%s 未来3小时单小时最大降水 %.3fmm,达到%s阈值 %.1fmm,issued_at=%s", st.Name, value, levelLabel(level), threshold, issued.Format(timeFormatShort))
|
||||
targetPhones := recip.forLevel(level)
|
||||
if len(targetPhones) == 0 {
|
||||
recordAlert(ctx, alertTypeForecast, st.ID, level, issued, msg, sql.NullString{})
|
||||
continue
|
||||
}
|
||||
sendToPhones(ctx, scli, st.Name, value, level, issued, targetPhones, msg, alertTypeForecast, st.ID)
|
||||
}
|
||||
}
|
||||
|
||||
func runActualCheck(scli *sms.Client, tick time.Time) {
|
||||
ctx, cancel := context.WithTimeout(context.Background(), defaultCheckTimeout)
|
||||
defer cancel()
|
||||
|
||||
stations := listFixedStations(ctx)
|
||||
stations = filterStations(stations, flagTestStIDs)
|
||||
if len(stations) == 0 {
|
||||
log.Printf("actual: no stations after filter")
|
||||
return
|
||||
}
|
||||
|
||||
recip := loadRecipients(ctx)
|
||||
loc := mustShanghai()
|
||||
end := tick.In(loc)
|
||||
if flagTestTime != "" {
|
||||
if t, err := time.ParseInLocation("2006-01-02 15:04:05", flagTestTime, loc); err == nil {
|
||||
end = t
|
||||
}
|
||||
}
|
||||
start := end.Add(-30 * time.Minute)
|
||||
|
||||
for _, st := range stations {
|
||||
rain, ok, err := data.SumRainMM(ctx, st.ID, start, end)
|
||||
if err != nil {
|
||||
log.Printf("actual: sum station=%s err=%v", st.ID, err)
|
||||
continue
|
||||
}
|
||||
if flagWhy {
|
||||
log.Printf("actual why: station=%s window=%s~%s rain_sum=%.3f ok=%v", st.ID, start.Format(timeFormatShort), end.Format(timeFormatShort), rain, ok)
|
||||
}
|
||||
if !ok {
|
||||
if flagWhy {
|
||||
log.Printf("actual why: station=%s no rain data", st.ID)
|
||||
}
|
||||
continue
|
||||
}
|
||||
level := ""
|
||||
threshold := 0.0
|
||||
if rain >= actualRedMM {
|
||||
level = levelRed
|
||||
threshold = actualRedMM
|
||||
} else if rain >= actualYellowMM {
|
||||
level = levelYellow
|
||||
threshold = actualYellowMM
|
||||
}
|
||||
if level == "" {
|
||||
if flagWhy {
|
||||
log.Printf("actual why: station=%s rain=%.3f below threshold", st.ID, rain)
|
||||
}
|
||||
if !flagTest {
|
||||
continue
|
||||
}
|
||||
level = levelYellow
|
||||
threshold = actualYellowMM
|
||||
rain = actualYellowMM
|
||||
}
|
||||
if flagTest {
|
||||
msg := fmt.Sprintf("【测试】站点%s 强制触发30分钟降水预警 level=%s", st.Name, levelLabel(level))
|
||||
targetPhones := recip.forLevel(level)
|
||||
if len(targetPhones) == 0 {
|
||||
recordAlert(ctx, alertTypeActual30m, st.ID, level, end, msg, sql.NullString{})
|
||||
} else {
|
||||
sendToPhones(ctx, scli, st.Name, rain, level, end, targetPhones, msg, alertTypeActual30m, st.ID)
|
||||
}
|
||||
continue
|
||||
}
|
||||
msg := fmt.Sprintf("站点%s 过去30分钟降水 %.3fmm,达到%s阈值 %.1fmm,窗口 %s - %s", st.Name, rain, levelLabel(level), threshold, start.Format(timeFormatShort), end.Format(timeFormatShort))
|
||||
targetPhones := recip.forLevel(level)
|
||||
if len(targetPhones) == 0 {
|
||||
recordAlert(ctx, alertTypeActual30m, st.ID, level, end, msg, sql.NullString{})
|
||||
log.Printf("actual: triggered station=%s level=%s rain=%.3f (no phones)", st.ID, level, rain)
|
||||
continue
|
||||
}
|
||||
sendToPhones(ctx, scli, st.Name, rain, level, end, targetPhones, msg, alertTypeActual30m, st.ID)
|
||||
log.Printf("actual: triggered station=%s level=%s rain=%.3f phones=%d", st.ID, level, rain, len(targetPhones))
|
||||
}
|
||||
}
|
||||
|
||||
func runNeighborActualCheck(scli *sms.Client, tick time.Time) {
|
||||
ctx, cancel := context.WithTimeout(context.Background(), defaultCheckTimeout)
|
||||
defer cancel()
|
||||
|
||||
allStations := listFixedStations(ctx)
|
||||
centers := filterStations(allStations, flagTestStIDs)
|
||||
if len(centers) == 0 {
|
||||
if flagWhy {
|
||||
log.Printf("neighbor why: no stations after filter")
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
recip := loadRecipients(ctx)
|
||||
loc := mustShanghai()
|
||||
end := tick.In(loc)
|
||||
if flagTestTime != "" {
|
||||
if t, err := time.ParseInLocation("2006-01-02 15:04:05", flagTestTime, loc); err == nil {
|
||||
end = t
|
||||
}
|
||||
}
|
||||
start := end.Add(-30 * time.Minute)
|
||||
|
||||
for _, center := range centers {
|
||||
if center.Latitude == 0 || center.Longitude == 0 {
|
||||
if flagWhy {
|
||||
log.Printf("neighbor why: center %s missing lat/lon", center.ID)
|
||||
}
|
||||
continue
|
||||
}
|
||||
wind, err := data.RadarWeatherNearest(center.Latitude, center.Longitude, end, 6*time.Hour)
|
||||
if err != nil {
|
||||
log.Printf("neighbor: wind query failed station=%s: %v", center.ID, err)
|
||||
continue
|
||||
}
|
||||
if wind == nil || !wind.WindDirection.Valid || !wind.WindSpeed.Valid || wind.WindSpeed.Float64 <= 0.01 {
|
||||
if flagWhy {
|
||||
log.Printf("neighbor why: center %s no wind data", center.ID)
|
||||
}
|
||||
continue
|
||||
}
|
||||
dir := wind.WindDirection.Float64
|
||||
spd := wind.WindSpeed.Float64
|
||||
radius := spd * 3600
|
||||
for _, nb := range allStations {
|
||||
if nb.ID == center.ID {
|
||||
continue
|
||||
}
|
||||
if nb.Latitude == 0 || nb.Longitude == 0 {
|
||||
if flagWhy {
|
||||
log.Printf("neighbor why: neighbor %s missing lat/lon", nb.ID)
|
||||
}
|
||||
continue
|
||||
}
|
||||
dist := haversine(center.Latitude, center.Longitude, nb.Latitude, nb.Longitude)
|
||||
brg := bearingDeg(center.Latitude, center.Longitude, nb.Latitude, nb.Longitude)
|
||||
diff := angDiff(brg, dir)
|
||||
inSector := dist <= radius && diff <= halfAngleDeg
|
||||
if !inSector {
|
||||
if flagWhy {
|
||||
log.Printf("neighbor why: center=%s neighbor=%s not in sector dist=%.1fm radius=%.1fm bearing=%.1f windFrom=%.1f diff=%.1f half=%.1f",
|
||||
center.ID, nb.ID, dist, radius, brg, dir, diff, halfAngleDeg)
|
||||
}
|
||||
continue
|
||||
}
|
||||
rain, ok, err := data.SumRainMM(ctx, nb.ID, start, end)
|
||||
if err != nil {
|
||||
log.Printf("neighbor: sum rain station=%s err=%v", nb.ID, err)
|
||||
continue
|
||||
}
|
||||
if flagWhy {
|
||||
log.Printf("neighbor why: center=%s neighbor=%s window=%s~%s rain_sum=%.3f ok=%v", center.ID, nb.ID, start.Format(timeFormatShort), end.Format(timeFormatShort), rain, ok)
|
||||
}
|
||||
if !ok {
|
||||
if flagWhy {
|
||||
log.Printf("neighbor why: neighbor %s no rain data", nb.ID)
|
||||
}
|
||||
continue
|
||||
}
|
||||
level := ""
|
||||
threshold := 0.0
|
||||
if rain >= actualRedMM {
|
||||
level = levelRed
|
||||
threshold = actualRedMM
|
||||
} else if rain >= actualYellowMM {
|
||||
level = levelYellow
|
||||
threshold = actualYellowMM
|
||||
}
|
||||
if level == "" {
|
||||
if flagWhy {
|
||||
log.Printf("neighbor why: neighbor %s rain=%.3f below threshold", nb.ID, rain)
|
||||
}
|
||||
continue
|
||||
}
|
||||
atype := alertTypeNeighbor30 + "_" + nb.ID
|
||||
msg := fmt.Sprintf("站点%s 迎风扇区内站点%s 30分钟降水 %.3fmm,达到%s阈值 %.1fmm,窗口 %s - %s", center.Name, nb.Name, rain, levelLabel(level), threshold, start.Format(timeFormatShort), end.Format(timeFormatShort))
|
||||
targetPhones := recip.forLevel(level)
|
||||
if len(targetPhones) == 0 {
|
||||
recordAlert(ctx, atype, center.ID, level, end, msg, sql.NullString{})
|
||||
log.Printf("neighbor: center=%s neighbor=%s level=%s rain=%.3f (no phones)", center.ID, nb.ID, level, rain)
|
||||
continue
|
||||
}
|
||||
sendToPhones(ctx, scli, center.Name, rain, level, end, targetPhones, msg, atype, center.ID)
|
||||
log.Printf("neighbor: center=%s neighbor=%s level=%s rain=%.3f phones=%d", center.ID, nb.ID, level, rain, len(targetPhones))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func recordAlert(ctx context.Context, alertType, stationID, level string, issuedAt time.Time, message string, phone sql.NullString) {
|
||||
_, err := data.InsertAlert(ctx, data.AlertRecord{
|
||||
AlertType: alertType,
|
||||
StationID: stationID,
|
||||
Level: level,
|
||||
IssuedAt: issuedAt,
|
||||
Message: message,
|
||||
SMSPhone: phone,
|
||||
})
|
||||
if err != nil {
|
||||
log.Printf("alert insert failed station=%s type=%s level=%s: %v", stationID, alertType, level, err)
|
||||
}
|
||||
}
|
||||
|
||||
type recipients struct {
|
||||
red []string
|
||||
yel []string
|
||||
}
|
||||
|
||||
func (r recipients) forLevel(level string) []string {
|
||||
if level == levelRed {
|
||||
return r.red
|
||||
}
|
||||
if level == levelYellow {
|
||||
return r.yel
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func loadRecipients(ctx context.Context) recipients {
|
||||
list, err := data.ListEnabledSMSRecipients(ctx)
|
||||
if err != nil {
|
||||
log.Printf("sms: load recipients failed: %v", err)
|
||||
return recipients{}
|
||||
}
|
||||
var res recipients
|
||||
for _, r := range list {
|
||||
if r.AlertLevel >= 1 {
|
||||
res.red = append(res.red, r.Phone)
|
||||
}
|
||||
if r.AlertLevel >= 2 {
|
||||
res.yel = append(res.yel, r.Phone)
|
||||
}
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
func sendToPhones(ctx context.Context, scli *sms.Client, stationName string, value float64, level string, issuedAt time.Time, phones []string, message string, alertType string, stationID string) {
|
||||
if scli == nil {
|
||||
return
|
||||
}
|
||||
name := ":" + stationName + ","
|
||||
content := format3(value) + " mm"
|
||||
alertText := "【大礼村】暴雨"
|
||||
if level == levelRed {
|
||||
alertText += "红色预警"
|
||||
} else {
|
||||
alertText += "黄色预警"
|
||||
}
|
||||
for _, ph := range phones {
|
||||
if err := scli.Send(ctx, name, content, alertText, "", []string{ph}); err != nil {
|
||||
log.Printf("sms: send failed phone=%s station=%s level=%s: %v", ph, stationID, level, err)
|
||||
continue
|
||||
}
|
||||
recordAlert(ctx, alertType, stationID, level, issuedAt, message, sql.NullString{String: ph, Valid: true})
|
||||
log.Printf("sms: sent phone=%s station=%s level=%s", ph, stationID, level)
|
||||
}
|
||||
}
|
||||
|
||||
func format3(v float64) string {
|
||||
s := fmt.Sprintf("%.3f", v)
|
||||
s = strings.TrimRight(s, "0")
|
||||
s = strings.TrimRight(s, ".")
|
||||
if s == "" {
|
||||
return "0"
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
func mustShanghai() *time.Location {
|
||||
loc, _ := time.LoadLocation("Asia/Shanghai")
|
||||
if loc == nil {
|
||||
loc = time.FixedZone("CST", 8*3600)
|
||||
}
|
||||
return loc
|
||||
}
|
||||
|
||||
func levelLabel(level string) string {
|
||||
if level == levelRed {
|
||||
return "红色"
|
||||
}
|
||||
return "黄色"
|
||||
}
|
||||
|
||||
func listFixedStations(ctx context.Context) []data.StationInfo {
|
||||
ids := []string{"RS485-002964", "RS485-002A39", "RS485-0029CB"}
|
||||
sts, err := data.ListStationsByIDs(ctx, ids)
|
||||
if err != nil || len(sts) == 0 {
|
||||
var out []data.StationInfo
|
||||
for _, id := range ids {
|
||||
out = append(out, data.StationInfo{ID: id, Name: id})
|
||||
}
|
||||
return out
|
||||
}
|
||||
// 保证顺序按 ids
|
||||
order := make(map[string]int)
|
||||
for i, id := range ids {
|
||||
order[id] = i
|
||||
}
|
||||
sort.Slice(sts, func(i, j int) bool { return order[sts[i].ID] < order[sts[j].ID] })
|
||||
return sts
|
||||
}
|
||||
|
||||
func filterStations(in []data.StationInfo, filter string) []data.StationInfo {
|
||||
f := strings.TrimSpace(filter)
|
||||
if f == "" {
|
||||
return in
|
||||
}
|
||||
parts := strings.Split(f, ",")
|
||||
m := make(map[string]struct{}, len(parts))
|
||||
for _, p := range parts {
|
||||
p = strings.TrimSpace(p)
|
||||
if p != "" {
|
||||
m[p] = struct{}{}
|
||||
}
|
||||
}
|
||||
if len(m) == 0 {
|
||||
return in
|
||||
}
|
||||
var out []data.StationInfo
|
||||
for _, st := range in {
|
||||
if _, ok := m[st.ID]; ok {
|
||||
out = append(out, st)
|
||||
}
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
func toRad(d float64) float64 { return d * math.Pi / 180 }
|
||||
func toDeg(r float64) float64 { return r * 180 / math.Pi }
|
||||
|
||||
func haversine(lat1, lon1, lat2, lon2 float64) float64 {
|
||||
const R = 6371000.0
|
||||
dLat := toRad(lat2 - lat1)
|
||||
dLon := toRad(lon2 - lon1)
|
||||
a := math.Sin(dLat/2)*math.Sin(dLat/2) + math.Cos(toRad(lat1))*math.Cos(toRad(lat2))*math.Sin(dLon/2)*math.Sin(dLon/2)
|
||||
c := 2 * math.Atan2(math.Sqrt(a), math.Sqrt(1-a))
|
||||
return R * c
|
||||
}
|
||||
|
||||
func bearingDeg(lat1, lon1, lat2, lon2 float64) float64 {
|
||||
φ1 := toRad(lat1)
|
||||
φ2 := toRad(lat2)
|
||||
Δλ := toRad(lon2 - lon1)
|
||||
y := math.Sin(Δλ) * math.Cos(φ2)
|
||||
x := math.Cos(φ1)*math.Sin(φ2) - math.Sin(φ1)*math.Cos(φ2)*math.Cos(Δλ)
|
||||
brg := toDeg(math.Atan2(y, x))
|
||||
if brg < 0 {
|
||||
brg += 360
|
||||
}
|
||||
return brg
|
||||
}
|
||||
|
||||
func angDiff(a, b float64) float64 {
|
||||
d := math.Mod(a-b+540, 360) - 180
|
||||
if d < 0 {
|
||||
d = -d
|
||||
}
|
||||
return math.Abs(d)
|
||||
}
|
||||
226
core/cmd/service-mqtt-publisher/main.go
Normal file
226
core/cmd/service-mqtt-publisher/main.go
Normal file
@ -0,0 +1,226 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log"
|
||||
"math"
|
||||
"time"
|
||||
|
||||
mqtt "github.com/eclipse/paho.mqtt.golang"
|
||||
|
||||
"weatherstation/core/internal/data"
|
||||
)
|
||||
|
||||
const (
|
||||
// 固定映射
|
||||
deviceID = "Z866"
|
||||
stationID = "RS485-002A6E"
|
||||
|
||||
// MQTT
|
||||
brokerURL = "wss://broker.emqx.io:8084/mqtt"
|
||||
clientID = "core-publisher-Z866"
|
||||
username = "1"
|
||||
password = "1"
|
||||
topic = "$dp"
|
||||
)
|
||||
|
||||
type hwsPayload struct {
|
||||
Type string `json:"type"`
|
||||
Device string `json:"device"`
|
||||
Dm float64 `json:"Dm"`
|
||||
Pa float64 `json:"Pa"`
|
||||
Rc float64 `json:"Rc"`
|
||||
Sm float64 `json:"Sm"`
|
||||
Ta float64 `json:"Ta"`
|
||||
Ua float64 `json:"Ua"`
|
||||
Time int64 `json:"time"`
|
||||
}
|
||||
|
||||
func main() {
|
||||
// 初始化 MQTT 客户端
|
||||
opts := mqtt.NewClientOptions().AddBroker(brokerURL)
|
||||
opts.SetClientID(clientID)
|
||||
opts.SetUsername(username)
|
||||
opts.SetPassword(password)
|
||||
opts.SetProtocolVersion(4)
|
||||
opts.SetKeepAlive(60 * time.Second)
|
||||
opts.SetAutoReconnect(true)
|
||||
opts.SetCleanSession(true)
|
||||
|
||||
cli := mqtt.NewClient(opts)
|
||||
if tok := cli.Connect(); tok.Wait() && tok.Error() != nil {
|
||||
log.Fatalf("MQTT 连接失败: %v", tok.Error())
|
||||
}
|
||||
defer cli.Disconnect(250)
|
||||
|
||||
// 5分钟发布任务
|
||||
go alignAndRun5m(func(tickEnd time.Time) { publishOnce(cli, tickEnd) })
|
||||
// 1小时+10分钟发布预测任务
|
||||
go alignAndRunHour10(func(tick time.Time) { publishPredict(cli, tick) })
|
||||
|
||||
select {}
|
||||
}
|
||||
|
||||
func alignAndRun5m(fn func(tickEnd time.Time)) {
|
||||
now := time.Now()
|
||||
next := now.Truncate(5 * time.Minute).Add(5 * time.Minute)
|
||||
time.Sleep(time.Until(next))
|
||||
for {
|
||||
tickEnd := time.Now().Truncate(5 * time.Minute)
|
||||
fn(tickEnd)
|
||||
time.Sleep(5 * time.Minute)
|
||||
}
|
||||
}
|
||||
|
||||
func alignAndRunHour10(fn func(tick time.Time)) {
|
||||
// 计算下一个 “整点+10分钟”
|
||||
now := time.Now()
|
||||
base := time.Date(now.Year(), now.Month(), now.Day(), now.Hour(), 10, 0, 0, now.Location())
|
||||
var next time.Time
|
||||
if now.After(base) {
|
||||
next = base.Add(1 * time.Hour)
|
||||
} else {
|
||||
next = base
|
||||
}
|
||||
time.Sleep(time.Until(next))
|
||||
for {
|
||||
tick := time.Now().Truncate(time.Minute)
|
||||
fn(tick)
|
||||
// 每小时执行一次
|
||||
time.Sleep(1 * time.Hour)
|
||||
}
|
||||
}
|
||||
|
||||
func publishOnce(cli mqtt.Client, tickEnd time.Time) {
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
|
||||
defer cancel()
|
||||
|
||||
start := tickEnd.Add(-5 * time.Minute)
|
||||
// 聚合窗口
|
||||
agg, err := data.WindowAverages(ctx, stationID, start, tickEnd)
|
||||
if err != nil {
|
||||
log.Printf("聚合失败: %v", err)
|
||||
return
|
||||
}
|
||||
// 当天累计雨量
|
||||
rc, err := data.DailyRainSinceMidnight(ctx, stationID, tickEnd)
|
||||
if err != nil {
|
||||
log.Printf("降雨查询失败: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
// 处理无效值:若无数据则用 0(<0.001 算无效)
|
||||
dm := 0.0
|
||||
if agg.Dm.Valid {
|
||||
dm = agg.Dm.Float64
|
||||
}
|
||||
sm := 0.0
|
||||
if agg.Sm.Valid {
|
||||
sm = agg.Sm.Float64
|
||||
}
|
||||
ta := 0.0
|
||||
if agg.Ta.Valid {
|
||||
ta = agg.Ta.Float64
|
||||
}
|
||||
ua := 0.0
|
||||
if agg.Ua.Valid {
|
||||
ua = agg.Ua.Float64
|
||||
}
|
||||
pa := 0.0
|
||||
if agg.Pa.Valid {
|
||||
pa = agg.Pa.Float64
|
||||
}
|
||||
|
||||
// 四舍五入:风向取整数,其它保留两位小数
|
||||
round2 := func(v float64) float64 { return math.Round(v*100) / 100 }
|
||||
dmInt := math.Round(dm)
|
||||
|
||||
payload := hwsPayload{
|
||||
Type: "hws",
|
||||
Device: deviceID,
|
||||
Dm: dmInt,
|
||||
Pa: round2(pa),
|
||||
Rc: round2(rc),
|
||||
Sm: round2(sm),
|
||||
Ta: round2(ta),
|
||||
Ua: round2(ua),
|
||||
Time: tickEnd.UnixMilli(),
|
||||
}
|
||||
b, _ := json.Marshal(payload)
|
||||
|
||||
tok := cli.Publish(topic, 1, false, b)
|
||||
tok.Wait()
|
||||
if tok.Error() != nil {
|
||||
log.Printf("发布失败: %v", tok.Error())
|
||||
return
|
||||
}
|
||||
log.Printf("发布成功 %s: %s", topic, string(b))
|
||||
}
|
||||
|
||||
// 预测发布:每小时+10分钟,从 forecast_hourly 按 issued_at=整点 选取数据
|
||||
func publishPredict(cli mqtt.Client, tick time.Time) {
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
|
||||
defer cancel()
|
||||
|
||||
// 以 CST 解析 issued_at 整点
|
||||
loc, _ := time.LoadLocation("Asia/Shanghai")
|
||||
if loc == nil {
|
||||
loc = time.FixedZone("CST", 8*3600)
|
||||
}
|
||||
now := tick.In(loc)
|
||||
issued := time.Date(now.Year(), now.Month(), now.Day(), now.Hour(), 0, 0, 0, loc)
|
||||
|
||||
const forecastStation = "RS485-002A6E"
|
||||
const provider = "imdroid_mix"
|
||||
points, err := data.ForecastRainAtIssued(ctx, forecastStation, provider, issued)
|
||||
if err != nil {
|
||||
log.Printf("预测查询失败: %v", err)
|
||||
return
|
||||
}
|
||||
if len(points) == 0 {
|
||||
log.Printf("预测无数据 issued_at=%s", issued.Format("2006-01-02 15:04:05"))
|
||||
return
|
||||
}
|
||||
|
||||
// 组装 payload
|
||||
type predictItem struct {
|
||||
PredictTime int64 `json:"predict_time"`
|
||||
PredictRainfall string `json:"predict_rainfall"`
|
||||
}
|
||||
var items []predictItem
|
||||
for _, p := range points {
|
||||
rf := float64(p.RainMMx1000) / 1000.0
|
||||
items = append(items, predictItem{
|
||||
PredictTime: p.ForecastTime.UnixMilli(),
|
||||
PredictRainfall: format3(rf),
|
||||
})
|
||||
}
|
||||
|
||||
payload := struct {
|
||||
Type string `json:"type"`
|
||||
Device string `json:"device"`
|
||||
Time int64 `json:"time"`
|
||||
Data []predictItem `json:"data"`
|
||||
}{
|
||||
Type: "predict",
|
||||
Device: deviceID,
|
||||
Time: now.UnixMilli(), // 当前“整点+10分钟”的时间
|
||||
Data: items,
|
||||
}
|
||||
b, _ := json.Marshal(payload)
|
||||
tok := cli.Publish(topic, 1, false, b)
|
||||
tok.Wait()
|
||||
if tok.Error() != nil {
|
||||
log.Printf("发布预测失败: %v", tok.Error())
|
||||
return
|
||||
}
|
||||
log.Printf("发布预测成功 %s: issued_at=%s, items=%d", topic, issued.Format("2006-01-02 15:04:05"), len(items))
|
||||
}
|
||||
|
||||
func format3(v float64) string {
|
||||
// 保留三位小数(字符串形式)
|
||||
s := fmt.Sprintf("%.3f", v)
|
||||
return s
|
||||
}
|
||||
365
core/cmd/sms-send/main.go
Normal file
365
core/cmd/sms-send/main.go
Normal file
@ -0,0 +1,365 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"flag"
|
||||
"fmt"
|
||||
"log"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"weatherstation/core/internal/config"
|
||||
"weatherstation/core/internal/data"
|
||||
"weatherstation/core/internal/sms"
|
||||
)
|
||||
|
||||
func main() {
|
||||
// Usage:
|
||||
// CORE_SMS_AK, CORE_SMS_SK, CORE_SMS_SIGN, CORE_SMS_TPL, optional CORE_SMS_ENDPOINT
|
||||
// go run ./core/cmd/sms-send --to 17308264374 --msg "Hello Yarnom" --name device-ids --time 2025-01-01 12:00
|
||||
// go run ./core/cmd/sms-send -> hourly check mode (first 10 minutes of each hour)
|
||||
var to, msg, name, tm string
|
||||
var once bool
|
||||
var testMode bool
|
||||
var testLevel int
|
||||
// test2: manual station+rain, auto decide level and send
|
||||
var test2 bool
|
||||
var station string
|
||||
var rain float64
|
||||
flag.StringVar(&to, "to", "", "comma-separated phone numbers")
|
||||
flag.StringVar(&msg, "msg", "", "message content (for ${content}, recommend numeric value)")
|
||||
flag.StringVar(&name, "name", "", "device IDs/name field for template")
|
||||
flag.StringVar(&tm, "time", "", "time field for template (unused if empty)")
|
||||
var alert string
|
||||
flag.StringVar(&alert, "alert", "", "alert text for ${alert}")
|
||||
flag.BoolVar(&once, "once", false, "run one check immediately (auto mode)")
|
||||
flag.BoolVar(&testMode, "test", false, "run in test mode (ignore thresholds)")
|
||||
flag.IntVar(&testLevel, "level", 1, "test target alert level (1=大雨-only, 2=中雨+大雨)")
|
||||
flag.BoolVar(&test2, "test2", false, "manual test by station+rain; decide yellow/red and send to recipients by alert level")
|
||||
flag.StringVar(&station, "station", "", "station name for template ${name}")
|
||||
flag.Float64Var(&rain, "rain", 0, "rainfall in mm (single hour)")
|
||||
flag.Parse()
|
||||
|
||||
cfg := config.Load()
|
||||
scli, err := sms.New(sms.Config{
|
||||
AccessKeyID: strings.TrimSpace(cfg.SMS.AccessKeyID),
|
||||
AccessKeySecret: strings.TrimSpace(cfg.SMS.AccessKeySecret),
|
||||
SignName: strings.TrimSpace(cfg.SMS.SignName),
|
||||
TemplateCode: strings.TrimSpace(cfg.SMS.TemplateCode),
|
||||
Endpoint: strings.TrimSpace(cfg.SMS.Endpoint),
|
||||
})
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
// Manual send mode when --to and --msg are provided
|
||||
if to != "" && msg != "" {
|
||||
if tm == "" {
|
||||
tm = ""
|
||||
}
|
||||
if name == "" {
|
||||
name = ""
|
||||
}
|
||||
// Manual mode: allow --alert (recommended for new template)
|
||||
phones := strings.Split(to, ",")
|
||||
if err := scli.Send(context.Background(), name, msg, alert, tm, phones); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
log.Println("sms: sent OK")
|
||||
return
|
||||
}
|
||||
|
||||
// Test mode: ignore thresholds, send to recipients of given level, append (测试)
|
||||
if testMode {
|
||||
runTestCheck(scli, testLevel)
|
||||
return
|
||||
}
|
||||
|
||||
// Test2 mode: user-provided station name and rain (mm); do not read forecast DB
|
||||
if test2 {
|
||||
runTest2(scli, station, rain)
|
||||
return
|
||||
}
|
||||
|
||||
// Auto mode: 每小时的第一个10分钟启动
|
||||
checkFn := func(tick time.Time) { runHourlyCheck(scli, tick) }
|
||||
if once {
|
||||
checkFn(time.Now())
|
||||
return
|
||||
}
|
||||
alignAndRunHour10(checkFn)
|
||||
}
|
||||
|
||||
// alignAndRunHour10 runs fn at the first 10 minutes of each hour.
|
||||
func alignAndRunHour10(fn func(tick time.Time)) {
|
||||
now := time.Now()
|
||||
base := time.Date(now.Year(), now.Month(), now.Day(), now.Hour(), 10, 0, 0, now.Location())
|
||||
var next time.Time
|
||||
if now.After(base) {
|
||||
next = base.Add(1 * time.Hour)
|
||||
} else {
|
||||
next = base
|
||||
}
|
||||
time.Sleep(time.Until(next))
|
||||
for {
|
||||
tick := time.Now().Truncate(time.Minute)
|
||||
fn(tick)
|
||||
time.Sleep(1 * time.Hour)
|
||||
}
|
||||
}
|
||||
|
||||
func runHourlyCheck(scli *sms.Client, tick time.Time) {
|
||||
// 固定 provider 和站点
|
||||
provider := "imdroid_mix"
|
||||
stationIDs := []string{"RS485-0029CB", "RS485-002A39", "RS485-002964"}
|
||||
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
||||
defer cancel()
|
||||
|
||||
// 读取收件人(enabled)
|
||||
recipients, err := data.ListEnabledSMSRecipients(ctx)
|
||||
if err != nil {
|
||||
log.Printf("sms: load recipients failed: %v", err)
|
||||
return
|
||||
}
|
||||
if len(recipients) == 0 {
|
||||
log.Printf("sms: no enabled recipients, skip")
|
||||
return
|
||||
}
|
||||
// alert_level: 1=大雨 only, 2=中雨+大雨
|
||||
var heavyPhones, moderatePhones []string
|
||||
for _, r := range recipients {
|
||||
if r.AlertLevel >= 1 {
|
||||
heavyPhones = append(heavyPhones, r.Phone)
|
||||
}
|
||||
if r.AlertLevel >= 2 {
|
||||
moderatePhones = append(moderatePhones, r.Phone)
|
||||
}
|
||||
}
|
||||
if len(heavyPhones) == 0 && len(moderatePhones) == 0 {
|
||||
log.Printf("sms: no recipients by level, skip")
|
||||
return
|
||||
}
|
||||
|
||||
// 以 CST 解析 issued_at 整点
|
||||
loc, _ := time.LoadLocation("Asia/Shanghai")
|
||||
if loc == nil {
|
||||
loc = time.FixedZone("CST", 8*3600)
|
||||
}
|
||||
now := tick.In(loc)
|
||||
issued := time.Date(now.Year(), now.Month(), now.Day(), now.Hour(), 0, 0, 0, loc)
|
||||
|
||||
// 三小时窗口:hour+1, hour+2, hour+3
|
||||
next1 := issued.Add(1 * time.Hour)
|
||||
next3 := issued.Add(3 * time.Hour)
|
||||
|
||||
// 遍历站点,计算未来三小时单小时阈值(红>黄)
|
||||
for _, sid := range stationIDs {
|
||||
points, err := data.ForecastRainAtIssued(ctx, sid, provider, issued)
|
||||
if err != nil {
|
||||
log.Printf("sms: forecast query failed station=%s: %v", sid, err)
|
||||
continue
|
||||
}
|
||||
stName, err := data.GetStationName(ctx, sid)
|
||||
if err != nil {
|
||||
stName = ""
|
||||
}
|
||||
if strings.TrimSpace(stName) == "" {
|
||||
stName = sid
|
||||
}
|
||||
var redMaxX1000 int64
|
||||
var yellowMaxX1000 int64
|
||||
for _, p := range points {
|
||||
if !p.ForecastTime.Before(next1) && !p.ForecastTime.After(next3) {
|
||||
v := int64(p.RainMMx1000)
|
||||
if v >= 8000 {
|
||||
if v > redMaxX1000 {
|
||||
redMaxX1000 = v
|
||||
}
|
||||
} else if v >= 4000 {
|
||||
if v > yellowMaxX1000 {
|
||||
yellowMaxX1000 = v
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// 判定阈值(单小时):任一>=8红色;否则任一[4,8)黄色;否则不发
|
||||
if redMaxX1000 > 0 {
|
||||
if len(heavyPhones) > 0 {
|
||||
// 模板参数格式:time: "YYYY-MM-DD HH:MM," name: ":<站点名称>," content: " <mm> mm(大雨)"
|
||||
name := ":" + stName + ","
|
||||
// 新模板字段:content=数值, alert=固定文案, time可为空(此处仍传带逗号的时间字符串以兼容)
|
||||
content := format3(float64(redMaxX1000)/1000.0) + " mm"
|
||||
alert := "【大礼村】暴雨红色预警"
|
||||
tm := "" // ${time} 不用了
|
||||
if err := scli.Send(ctx, name, content, alert, tm, heavyPhones); err != nil {
|
||||
log.Printf("sms: send heavy failed station=%s: %v", sid, err)
|
||||
} else {
|
||||
log.Printf("sms: sent HEAVY (红色) station=%s max=%.3fmm to=%d", sid, float64(redMaxX1000)/1000.0, len(heavyPhones))
|
||||
}
|
||||
}
|
||||
} else if yellowMaxX1000 > 0 {
|
||||
if len(moderatePhones) > 0 {
|
||||
name := ":" + stName + ","
|
||||
content := format3(float64(yellowMaxX1000)/1000.0) + " mm"
|
||||
alert := "【大礼村】暴雨黄色预警"
|
||||
tm := ""
|
||||
if err := scli.Send(ctx, name, content, alert, tm, moderatePhones); err != nil {
|
||||
log.Printf("sms: send moderate failed station=%s: %v", sid, err)
|
||||
} else {
|
||||
log.Printf("sms: sent MODERATE (黄色) station=%s max=%.3fmm to=%d", sid, float64(yellowMaxX1000)/1000.0, len(moderatePhones))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
log.Printf("sms: no alert station=%s", sid)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// content now only carries numeric rain value; helpers removed.
|
||||
|
||||
func format3(v float64) string {
|
||||
s := fmt.Sprintf("%.3f", v)
|
||||
s = strings.TrimRight(s, "0")
|
||||
s = strings.TrimRight(s, ".")
|
||||
if s == "" {
|
||||
return "0"
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
// runTestCheck sends messages regardless of thresholds to recipients at given alert level.
|
||||
func runTestCheck(scli *sms.Client, level int) {
|
||||
provider := "imdroid_mix"
|
||||
stationIDs := []string{"RS485-0029CB", "RS485-002A39", "RS485-002964"}
|
||||
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 15*time.Second)
|
||||
defer cancel()
|
||||
|
||||
// Load recipients (enabled) and filter by exact level
|
||||
recipients, err := data.ListEnabledSMSRecipients(ctx)
|
||||
if err != nil {
|
||||
log.Printf("sms test: load recipients failed: %v", err)
|
||||
return
|
||||
}
|
||||
var phones []string
|
||||
for _, r := range recipients {
|
||||
if r.AlertLevel == level {
|
||||
phones = append(phones, r.Phone)
|
||||
}
|
||||
}
|
||||
if len(phones) == 0 {
|
||||
log.Printf("sms test: no recipients at level=%d", level)
|
||||
return
|
||||
}
|
||||
|
||||
// 时间与窗口
|
||||
loc, _ := time.LoadLocation("Asia/Shanghai")
|
||||
if loc == nil {
|
||||
loc = time.FixedZone("CST", 8*3600)
|
||||
}
|
||||
now := time.Now().In(loc)
|
||||
issued := time.Date(now.Year(), now.Month(), now.Day(), now.Hour(), 0, 0, 0, loc)
|
||||
next1 := issued.Add(1 * time.Hour)
|
||||
next3 := issued.Add(3 * time.Hour)
|
||||
|
||||
// Iterate stations
|
||||
for _, sid := range stationIDs {
|
||||
points, err := data.ForecastRainAtIssued(ctx, sid, provider, issued)
|
||||
if err != nil {
|
||||
log.Printf("sms test: forecast query failed station=%s: %v", sid, err)
|
||||
continue
|
||||
}
|
||||
stName, err := data.GetStationName(ctx, sid)
|
||||
if err != nil {
|
||||
stName = ""
|
||||
}
|
||||
if strings.TrimSpace(stName) == "" {
|
||||
stName = sid
|
||||
}
|
||||
var sumX1000 int64
|
||||
for _, p := range points {
|
||||
if !p.ForecastTime.Before(next1) && !p.ForecastTime.After(next3) {
|
||||
sumX1000 += int64(p.RainMMx1000)
|
||||
}
|
||||
}
|
||||
name := ":" + stName + ","
|
||||
// Test mode: content=数值; alert=红色预警 + (测试)
|
||||
content := format3(float64(sumX1000)/1000.0) + " mm"
|
||||
alert := "【大礼村】暴雨红色预警(测试)"
|
||||
tm := ""
|
||||
if err := scli.Send(ctx, name, content, alert, tm, phones); err != nil {
|
||||
log.Printf("sms test: send failed station=%s: %v", sid, err)
|
||||
} else {
|
||||
log.Printf("sms test: sent station=%s sum=%.3fmm level=%d to=%d", sid, float64(sumX1000)/1000.0, level, len(phones))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// runTest2 evaluates the provided rainfall and sends to recipients by alert level:
|
||||
// - red (>=8mm): send to level>=1 (both 1 and 2)
|
||||
// - yellow ([4,8)mm): send to level>=2 only
|
||||
// No DB read for forecast; only loads recipients list.
|
||||
func runTest2(scli *sms.Client, station string, rain float64) {
|
||||
if strings.TrimSpace(station) == "" {
|
||||
log.Printf("sms test2: station name required; use --station")
|
||||
return
|
||||
}
|
||||
if rain < 0 {
|
||||
log.Printf("sms test2: rain must be >= 0")
|
||||
return
|
||||
}
|
||||
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
||||
defer cancel()
|
||||
|
||||
recipients, err := data.ListEnabledSMSRecipients(ctx)
|
||||
if err != nil {
|
||||
log.Printf("sms test2: load recipients failed: %v", err)
|
||||
return
|
||||
}
|
||||
var heavyPhones, moderatePhones []string
|
||||
for _, r := range recipients {
|
||||
if r.AlertLevel >= 1 {
|
||||
heavyPhones = append(heavyPhones, r.Phone)
|
||||
}
|
||||
if r.AlertLevel >= 2 {
|
||||
moderatePhones = append(moderatePhones, r.Phone)
|
||||
}
|
||||
}
|
||||
if len(heavyPhones) == 0 && len(moderatePhones) == 0 {
|
||||
log.Printf("sms test2: no recipients, skip")
|
||||
return
|
||||
}
|
||||
|
||||
// Decide level by rain (mm)
|
||||
name := ":" + strings.TrimSpace(station) + ","
|
||||
content := format3(rain) + " mm"
|
||||
if rain >= 8.0 {
|
||||
if len(heavyPhones) == 0 {
|
||||
log.Printf("sms test2: red alert but no level>=1 recipients")
|
||||
return
|
||||
}
|
||||
alert := "【大礼村】暴雨红色预警"
|
||||
if err := scli.Send(ctx, name, content, alert, "", heavyPhones); err != nil {
|
||||
log.Printf("sms test2: send RED failed: %v", err)
|
||||
} else {
|
||||
log.Printf("sms test2: sent RED station=%s rain=%.3fmm to=%d", station, rain, len(heavyPhones))
|
||||
}
|
||||
return
|
||||
}
|
||||
if rain >= 4.0 {
|
||||
if len(moderatePhones) == 0 {
|
||||
log.Printf("sms test2: yellow alert but no level>=2 recipients")
|
||||
return
|
||||
}
|
||||
alert := "【大礼村】暴雨黄色预警"
|
||||
if err := scli.Send(ctx, name, content, alert, "", moderatePhones); err != nil {
|
||||
log.Printf("sms test2: send YELLOW failed: %v", err)
|
||||
} else {
|
||||
log.Printf("sms test2: sent YELLOW station=%s rain=%.3fmm to=%d", station, rain, len(moderatePhones))
|
||||
}
|
||||
return
|
||||
}
|
||||
log.Printf("sms test2: rain %.3fmm below yellow threshold, no alert", rain)
|
||||
}
|
||||
291
core/cmd/v5-export/main.go
Normal file
291
core/cmd/v5-export/main.go
Normal file
@ -0,0 +1,291 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"flag"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"weatherstation/core/internal/data"
|
||||
)
|
||||
|
||||
type v5Item struct {
|
||||
FT time.Time
|
||||
Rain float64
|
||||
}
|
||||
|
||||
type v5Result struct {
|
||||
Station string
|
||||
Issued time.Time
|
||||
Base [3]float64
|
||||
Actual float64
|
||||
Prev [3]float64
|
||||
Out [3]float64
|
||||
SQLRows []string
|
||||
Skipped bool
|
||||
SkipReason string
|
||||
}
|
||||
|
||||
func main() {
|
||||
var (
|
||||
stationsCSV string
|
||||
startStr string
|
||||
endStr string
|
||||
sqlOut string
|
||||
logOut string
|
||||
tzName string
|
||||
baseProvider string
|
||||
outProvider string
|
||||
)
|
||||
|
||||
flag.StringVar(&stationsCSV, "stations", "", "逗号分隔的 station_id 列表,例如: RS485-000001,RS485-000002")
|
||||
flag.StringVar(&startStr, "start", "", "开始时间,格式: 2006-01-02 15:04 或 2006-01-02(按整点对齐)")
|
||||
flag.StringVar(&endStr, "end", "", "结束时间,格式: 2006-01-02 15:04 或 2006-01-02(不包含该时刻)")
|
||||
flag.StringVar(&sqlOut, "sql", "v5_output.sql", "输出 SQL 文件路径")
|
||||
flag.StringVar(&logOut, "log", "v5_output.log", "输出日志文件路径")
|
||||
flag.StringVar(&tzName, "tz", "Asia/Shanghai", "时区,例如 Asia/Shanghai")
|
||||
flag.StringVar(&baseProvider, "base", "imdroid_mix", "基础预报源 provider")
|
||||
flag.StringVar(&outProvider, "out", "imdroid_V5", "输出预报源 provider")
|
||||
flag.Parse()
|
||||
|
||||
if stationsCSV == "" || startStr == "" || endStr == "" {
|
||||
fmt.Println("用法示例: v5-export --stations RS485-002A6E --start '2024-08-01 00:00' --end '2024-08-02 00:00' --sql out.sql --log out.log")
|
||||
os.Exit(2)
|
||||
}
|
||||
|
||||
// logger: stdout + file
|
||||
if err := os.MkdirAll(filepath.Dir(sqlOut), 0755); err != nil && filepath.Dir(sqlOut) != "." {
|
||||
log.Fatalf("create sql dir: %v", err)
|
||||
}
|
||||
if err := os.MkdirAll(filepath.Dir(logOut), 0755); err != nil && filepath.Dir(logOut) != "." {
|
||||
log.Fatalf("create log dir: %v", err)
|
||||
}
|
||||
lf, err := os.Create(logOut)
|
||||
if err != nil {
|
||||
log.Fatalf("open log file: %v", err)
|
||||
}
|
||||
defer lf.Close()
|
||||
mw := io.MultiWriter(os.Stdout, lf)
|
||||
logger := log.New(mw, "", log.LstdFlags)
|
||||
|
||||
sf, err := os.Create(sqlOut)
|
||||
if err != nil {
|
||||
logger.Fatalf("open sql file: %v", err)
|
||||
}
|
||||
defer sf.Close()
|
||||
|
||||
loc, _ := time.LoadLocation(tzName)
|
||||
if loc == nil {
|
||||
loc = time.FixedZone("CST", 8*3600)
|
||||
}
|
||||
parseTime := func(s string) (time.Time, error) {
|
||||
layouts := []string{"2006-01-02 15:04", "2006-01-02 15", "2006-01-02"}
|
||||
var lastErr error
|
||||
for _, ly := range layouts {
|
||||
t, err := time.ParseInLocation(ly, s, loc)
|
||||
if err == nil {
|
||||
return t, nil
|
||||
}
|
||||
lastErr = err
|
||||
}
|
||||
return time.Time{}, lastErr
|
||||
}
|
||||
start, err := parseTime(startStr)
|
||||
if err != nil {
|
||||
logger.Fatalf("parse start: %v", err)
|
||||
}
|
||||
end, err := parseTime(endStr)
|
||||
if err != nil {
|
||||
logger.Fatalf("parse end: %v", err)
|
||||
}
|
||||
start = start.Truncate(time.Hour)
|
||||
end = end.Truncate(time.Hour)
|
||||
if !end.After(start) {
|
||||
logger.Fatalf("end 必须大于 start")
|
||||
}
|
||||
|
||||
stations := splitStations(stationsCSV)
|
||||
ctx := context.Background()
|
||||
|
||||
// 写文件头
|
||||
fmt.Fprintf(sf, "-- V5 Export generated at %s\n", time.Now().Format(time.RFC3339))
|
||||
fmt.Fprintf(sf, "BEGIN;\n")
|
||||
|
||||
for _, st := range stations {
|
||||
logger.Printf("处理站点 %s: %s → %s", st, start.Format("2006-01-02 15:04"), end.Format("2006-01-02 15:04"))
|
||||
for t := start; t.Before(end); t = t.Add(1 * time.Hour) {
|
||||
res := computeV5(ctx, st, t, baseProvider, outProvider, loc)
|
||||
if res.Skipped {
|
||||
logger.Printf("skip station=%s issued=%s: %s", st, t.Format("2006-01-02 15:04"), res.SkipReason)
|
||||
continue
|
||||
}
|
||||
// 日志
|
||||
logger.Printf("V5 station=%s issued=%s base=[%.3f,%.3f,%.3f] actual=%.3f prev=[%.3f,%.3f,%.3f] out=[%.3f,%.3f,%.3f]",
|
||||
st, t.Format("2006-01-02 15:04"),
|
||||
res.Base[0], res.Base[1], res.Base[2], res.Actual,
|
||||
res.Prev[0], res.Prev[1], res.Prev[2],
|
||||
res.Out[0], res.Out[1], res.Out[2])
|
||||
// SQL
|
||||
for _, row := range res.SQLRows {
|
||||
fmt.Fprintln(sf, row)
|
||||
}
|
||||
}
|
||||
}
|
||||
fmt.Fprintf(sf, "COMMIT;\n")
|
||||
logger.Printf("完成,SQL 已写入: %s ,日志: %s", sqlOut, logOut)
|
||||
}
|
||||
|
||||
func splitStations(s string) []string {
|
||||
parts := strings.Split(s, ",")
|
||||
out := make([]string, 0, len(parts))
|
||||
for _, p := range parts {
|
||||
p = strings.TrimSpace(p)
|
||||
if p != "" {
|
||||
out = append(out, p)
|
||||
}
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
func computeV5(ctx context.Context, stationID string, issued time.Time, baseProvider, outProvider string, loc *time.Location) v5Result {
|
||||
res := v5Result{Station: stationID, Issued: issued}
|
||||
|
||||
// base issued in this bucket
|
||||
baseIssued, ok, err := data.ResolveIssuedAtInBucket(ctx, stationID, baseProvider, issued)
|
||||
if err != nil || !ok {
|
||||
res.Skipped, res.SkipReason = true, fmt.Sprintf("base issued missing: %v ok=%v", err, ok)
|
||||
return res
|
||||
}
|
||||
basePoints, err := data.ForecastRainAtIssued(ctx, stationID, baseProvider, baseIssued)
|
||||
if err != nil || len(basePoints) < 3 {
|
||||
res.Skipped, res.SkipReason = true, fmt.Sprintf("base points insufficient: %v len=%d", err, len(basePoints))
|
||||
return res
|
||||
}
|
||||
|
||||
// targets times
|
||||
ft1 := issued.Add(1 * time.Hour)
|
||||
ft2 := issued.Add(2 * time.Hour)
|
||||
ft3 := issued.Add(3 * time.Hour)
|
||||
base1 := pickRain(basePoints, ft1)
|
||||
base2 := pickRain(basePoints, ft2)
|
||||
base3 := pickRain(basePoints, ft3)
|
||||
res.Base = [3]float64{base1, base2, base3}
|
||||
|
||||
// actual just-finished hour
|
||||
actual, okA, err := data.FetchActualHourlyRain(ctx, stationID, issued.Add(-time.Hour), issued)
|
||||
if err != nil || !okA {
|
||||
res.Skipped, res.SkipReason = true, fmt.Sprintf("actual missing: %v ok=%v", err, okA)
|
||||
return res
|
||||
}
|
||||
res.Actual = actual
|
||||
|
||||
// previous preds aligned to same validation time (ft1)
|
||||
p1, e1 := pickPrevPredict(ctx, stationID, baseProvider, issued.Add(-1*time.Hour), 1, ft1)
|
||||
if e1 != nil {
|
||||
res.Skipped, res.SkipReason = true, e1.Error()
|
||||
return res
|
||||
}
|
||||
p2, e2 := pickPrevPredict(ctx, stationID, baseProvider, issued.Add(-2*time.Hour), 2, ft1)
|
||||
if e2 != nil {
|
||||
res.Skipped, res.SkipReason = true, e2.Error()
|
||||
return res
|
||||
}
|
||||
p3, e3 := pickPrevPredict(ctx, stationID, baseProvider, issued.Add(-3*time.Hour), 3, ft1)
|
||||
if e3 != nil {
|
||||
res.Skipped, res.SkipReason = true, e3.Error()
|
||||
return res
|
||||
}
|
||||
res.Prev = [3]float64{p1, p2, p3}
|
||||
|
||||
r1 := actual - p1
|
||||
r2 := actual - p2
|
||||
r3 := actual - p3
|
||||
// Baseline-fallback if negative for all leads
|
||||
cand1 := base1 + 1.0*r1
|
||||
cand2 := base2 + 0.5*r2
|
||||
cand3 := base3 + (1.0/3.0)*r3
|
||||
var out1, out2, out3 float64
|
||||
if cand1 < 0 {
|
||||
out1 = base1
|
||||
} else {
|
||||
out1 = cand1
|
||||
}
|
||||
if cand2 < 0 {
|
||||
out2 = base2
|
||||
} else {
|
||||
out2 = cand2
|
||||
}
|
||||
if cand3 < 0 {
|
||||
out3 = base3
|
||||
} else {
|
||||
out3 = cand3
|
||||
}
|
||||
res.Out = [3]float64{out1, out2, out3}
|
||||
|
||||
rows := make([]string, 0, 3)
|
||||
rows = append(rows, insertRainSQL(stationID, outProvider, issued, ft1, toX1000(out1)))
|
||||
rows = append(rows, insertRainSQL(stationID, outProvider, issued, ft2, toX1000(out2)))
|
||||
rows = append(rows, insertRainSQL(stationID, outProvider, issued, ft3, toX1000(out3)))
|
||||
res.SQLRows = rows
|
||||
return res
|
||||
}
|
||||
|
||||
func pickPrevPredict(ctx context.Context, stationID, provider string, prevBucket time.Time, lead int, validFT time.Time) (float64, error) {
|
||||
iss, ok, err := data.ResolveIssuedAtInBucket(ctx, stationID, provider, prevBucket)
|
||||
if err != nil || !ok {
|
||||
return 0, fmt.Errorf("prev issued missing bucket=%s: %v ok=%v", prevBucket.Format("2006-01-02 15:04"), err, ok)
|
||||
}
|
||||
pts, err := data.ForecastRainAtIssued(ctx, stationID, provider, iss)
|
||||
if err != nil || len(pts) < lead {
|
||||
return 0, fmt.Errorf("prev points insufficient lead=%d: %v len=%d", lead, err, len(pts))
|
||||
}
|
||||
if v := pickRain(pts, validFT); v >= 0 {
|
||||
return v, nil
|
||||
}
|
||||
switch lead {
|
||||
case 1:
|
||||
return toMM(pts[0].RainMMx1000), nil
|
||||
case 2:
|
||||
if len(pts) >= 2 {
|
||||
return toMM(pts[1].RainMMx1000), nil
|
||||
}
|
||||
case 3:
|
||||
if len(pts) >= 3 {
|
||||
return toMM(pts[2].RainMMx1000), nil
|
||||
}
|
||||
}
|
||||
return 0, fmt.Errorf("prev choose failed lead=%d", lead)
|
||||
}
|
||||
|
||||
func pickRain(points []data.PredictPoint, ft time.Time) float64 {
|
||||
for _, p := range points {
|
||||
if p.ForecastTime.Equal(ft) {
|
||||
return toMM(p.RainMMx1000)
|
||||
}
|
||||
}
|
||||
return -1
|
||||
}
|
||||
|
||||
func toMM(vx1000 int32) float64 { return float64(vx1000) / 1000.0 }
|
||||
func toX1000(mm float64) int32 { return int32(mm*1000 + 0.5) }
|
||||
func clamp0(v float64) float64 {
|
||||
if v < 0 {
|
||||
return 0
|
||||
}
|
||||
return v
|
||||
}
|
||||
|
||||
func insertRainSQL(stationID, provider string, issued, ft time.Time, rainX1000 int32) string {
|
||||
// 使用 RFC3339 和 Postgres timestamptz 解析兼容的格式
|
||||
return fmt.Sprintf(
|
||||
"INSERT INTO forecast_hourly (station_id, provider, issued_at, forecast_time, rain_mm_x1000) VALUES ('%s','%s','%s','%s',%d) ON CONFLICT (station_id, provider, issued_at, forecast_time) DO UPDATE SET rain_mm_x1000=EXCLUDED.rain_mm_x1000;",
|
||||
escapeSQL(stationID), provider, issued.Format(time.RFC3339), ft.Format(time.RFC3339), rainX1000,
|
||||
)
|
||||
}
|
||||
|
||||
func escapeSQL(s string) string { return strings.ReplaceAll(s, "'", "''") }
|
||||
271
core/cmd/v5-model/main.go
Normal file
271
core/cmd/v5-model/main.go
Normal file
@ -0,0 +1,271 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"flag"
|
||||
"fmt"
|
||||
"log"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"weatherstation/core/internal/data"
|
||||
)
|
||||
|
||||
const (
|
||||
baseProvider = "imdroid_mix"
|
||||
outProvider = "imdroid_V5"
|
||||
)
|
||||
|
||||
func main() {
|
||||
var stationsCSV, issuedStr, startStr, endStr, tzName string
|
||||
flag.StringVar(&stationsCSV, "stations", "", "逗号分隔的 station_id 列表;为空则自动扫描有基线的站点")
|
||||
flag.StringVar(&issuedStr, "issued", "", "指定 issued 时间(整点),格式: 2006-01-02 15:00;为空用当前整点")
|
||||
flag.StringVar(&startStr, "start", "", "开始时间(整点),格式: 2006-01-02 15:00;与 --end 一起使用,end 为开区间")
|
||||
flag.StringVar(&endStr, "end", "", "结束时间(整点),格式: 2006-01-02 15:00;与 --start 一起使用,end 为开区间")
|
||||
flag.StringVar(&tzName, "tz", "Asia/Shanghai", "时区,例如 Asia/Shanghai")
|
||||
flag.Parse()
|
||||
|
||||
ctx := context.Background()
|
||||
loc, _ := time.LoadLocation(tzName)
|
||||
if loc == nil {
|
||||
loc = time.FixedZone("CST", 8*3600)
|
||||
}
|
||||
|
||||
parse := func(s string) (time.Time, error) {
|
||||
var t time.Time
|
||||
var err error
|
||||
for _, ly := range []string{"2006-01-02 15:04", "2006-01-02 15", "2006-01-02"} {
|
||||
t, err = time.ParseInLocation(ly, s, loc)
|
||||
if err == nil {
|
||||
return t.Truncate(time.Hour), nil
|
||||
}
|
||||
}
|
||||
return time.Time{}, err
|
||||
}
|
||||
|
||||
// Determine mode: single issued or range
|
||||
if strings.TrimSpace(startStr) != "" && strings.TrimSpace(endStr) != "" {
|
||||
start, err := parse(startStr)
|
||||
if err != nil {
|
||||
log.Fatalf("无法解析 start: %v", err)
|
||||
}
|
||||
end, err := parse(endStr)
|
||||
if err != nil {
|
||||
log.Fatalf("无法解析 end: %v", err)
|
||||
}
|
||||
if !end.After(start) {
|
||||
log.Fatalf("end 必须大于 start")
|
||||
}
|
||||
for t := start; t.Before(end); t = t.Add(time.Hour) {
|
||||
var stations []string
|
||||
if strings.TrimSpace(stationsCSV) != "" {
|
||||
stations = splitStations(stationsCSV)
|
||||
} else {
|
||||
var err error
|
||||
stations, err = listStationsWithBase(ctx, baseProvider, t)
|
||||
if err != nil {
|
||||
log.Fatalf("list stations failed: %v", err)
|
||||
}
|
||||
}
|
||||
if len(stations) == 0 {
|
||||
log.Printf("no stations to process for issued=%s", t.Format("2006-01-02 15:04:05"))
|
||||
continue
|
||||
}
|
||||
for _, st := range stations {
|
||||
if err := runForStation(ctx, st, t); err != nil {
|
||||
log.Printf("V5 station=%s issued=%s error: %v", st, t.Format("2006-01-02 15:04:05"), err)
|
||||
}
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Single issued
|
||||
var issued time.Time
|
||||
if strings.TrimSpace(issuedStr) != "" {
|
||||
var err error
|
||||
issued, err = parse(issuedStr)
|
||||
if err != nil || issued.IsZero() {
|
||||
log.Fatalf("无法解析 issued: %v", err)
|
||||
}
|
||||
} else {
|
||||
issued = time.Now().In(loc).Truncate(time.Hour)
|
||||
}
|
||||
var stations []string
|
||||
if strings.TrimSpace(stationsCSV) != "" {
|
||||
stations = splitStations(stationsCSV)
|
||||
} else {
|
||||
var err error
|
||||
stations, err = listStationsWithBase(ctx, baseProvider, issued)
|
||||
if err != nil {
|
||||
log.Fatalf("list stations failed: %v", err)
|
||||
}
|
||||
}
|
||||
if len(stations) == 0 {
|
||||
log.Printf("no stations to process for issued=%s", issued.Format("2006-01-02 15:04:05"))
|
||||
return
|
||||
}
|
||||
for _, st := range stations {
|
||||
if err := runForStation(ctx, st, issued); err != nil {
|
||||
log.Printf("V5 station=%s error: %v", st, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func listStationsWithBase(ctx context.Context, provider string, issued time.Time) ([]string, error) {
|
||||
const q = `
|
||||
SELECT DISTINCT station_id
|
||||
FROM forecast_hourly
|
||||
WHERE provider=$1 AND issued_at >= $2 AND issued_at < $2 + interval '1 hour'`
|
||||
rows, err := data.DB().QueryContext(ctx, q, provider, issued)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var out []string
|
||||
for rows.Next() {
|
||||
var id string
|
||||
if err := rows.Scan(&id); err == nil {
|
||||
out = append(out, id)
|
||||
}
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func splitStations(s string) []string {
|
||||
parts := strings.Split(s, ",")
|
||||
out := make([]string, 0, len(parts))
|
||||
for _, p := range parts {
|
||||
p = strings.TrimSpace(p)
|
||||
if p != "" {
|
||||
out = append(out, p)
|
||||
}
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
func runForStation(ctx context.Context, stationID string, issued time.Time) error {
|
||||
// 解析当前 issued 桶内的基础源发布时间(取最后一条)
|
||||
baseIssued, ok, err := data.ResolveIssuedAtInBucket(ctx, stationID, baseProvider, issued)
|
||||
if err != nil || !ok {
|
||||
return fmt.Errorf("resolve base issued failed: %v ok=%v", err, ok)
|
||||
}
|
||||
basePoints, err := data.ForecastRainAtIssued(ctx, stationID, baseProvider, baseIssued)
|
||||
if err != nil || len(basePoints) < 3 {
|
||||
return fmt.Errorf("load base points failed: %v len=%d", err, len(basePoints))
|
||||
}
|
||||
|
||||
// 取有效时间
|
||||
ft1 := issued.Add(1 * time.Hour)
|
||||
ft2 := issued.Add(2 * time.Hour)
|
||||
ft3 := issued.Add(3 * time.Hour)
|
||||
|
||||
base1, base2, base3 := pickRain(basePoints, ft1), pickRain(basePoints, ft2), pickRain(basePoints, ft3)
|
||||
|
||||
// 计算三个 horizon 的偏差:
|
||||
// r1 = 实况[issued-1,issued) - (issued-1 的 +1)
|
||||
// r2 = 实况[issued-1,issued) - (issued-2 的 +2)
|
||||
// r3 = 实况[issued-1,issued) - (issued-3 的 +3)
|
||||
actual, okA, err := data.FetchActualHourlyRain(ctx, stationID, issued.Add(-time.Hour), issued)
|
||||
if err != nil || !okA {
|
||||
return fmt.Errorf("actual not ready: %v ok=%v", err, okA)
|
||||
}
|
||||
|
||||
p1, err := pickPrevPredict(ctx, stationID, issued.Add(-1*time.Hour), 1, issued)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
p2, err := pickPrevPredict(ctx, stationID, issued.Add(-2*time.Hour), 2, issued)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
p3, err := pickPrevPredict(ctx, stationID, issued.Add(-3*time.Hour), 3, issued)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
r1 := actual - p1
|
||||
r2 := actual - p2
|
||||
r3 := actual - p3
|
||||
|
||||
// Apply baseline-fallback if negative for all leads
|
||||
cand1 := base1 + 1.0*r1
|
||||
cand2 := base2 + 0.5*r2
|
||||
cand3 := base3 + (1.0/3.0)*r3
|
||||
var out1, out2, out3 float64
|
||||
if cand1 < 0 {
|
||||
out1 = base1
|
||||
} else {
|
||||
out1 = cand1
|
||||
}
|
||||
if cand2 < 0 {
|
||||
out2 = base2
|
||||
} else {
|
||||
out2 = cand2
|
||||
}
|
||||
if cand3 < 0 {
|
||||
out3 = base3
|
||||
} else {
|
||||
out3 = cand3
|
||||
}
|
||||
|
||||
items := []data.UpsertRainItem{
|
||||
{ForecastTime: ft1, RainMMx1000: toX1000(out1)},
|
||||
{ForecastTime: ft2, RainMMx1000: toX1000(out2)},
|
||||
{ForecastTime: ft3, RainMMx1000: toX1000(out3)},
|
||||
}
|
||||
if err := data.UpsertForecastRain(ctx, stationID, outProvider, issued, items); err != nil {
|
||||
return err
|
||||
}
|
||||
log.Printf("V5 %s issued=%s base=[%.3f,%.3f,%.3f] actual=%.3f prev=[%.3f,%.3f,%.3f] out=[%.3f,%.3f,%.3f]",
|
||||
stationID, issued.Format("2006-01-02 15:04:05"),
|
||||
base1, base2, base3, actual, p1, p2, p3, out1, out2, out3,
|
||||
)
|
||||
return nil
|
||||
}
|
||||
|
||||
func pickPrevPredict(ctx context.Context, stationID string, prevBucket time.Time, lead int, validFT time.Time) (float64, error) {
|
||||
iss, ok, err := data.ResolveIssuedAtInBucket(ctx, stationID, baseProvider, prevBucket)
|
||||
if err != nil || !ok {
|
||||
return 0, fmt.Errorf("resolve prev issued fail bucket=%s: %v ok=%v", prevBucket, err, ok)
|
||||
}
|
||||
pts, err := data.ForecastRainAtIssued(ctx, stationID, baseProvider, iss)
|
||||
if err != nil || len(pts) < lead {
|
||||
return 0, fmt.Errorf("load prev points fail lead=%d: %v len=%d", lead, err, len(pts))
|
||||
}
|
||||
// 直接按 validFT 精确匹配(容错:若不存在则按 lead 取第 lead 个)
|
||||
if v := pickRain(pts, validFT); v >= 0 {
|
||||
return v, nil
|
||||
}
|
||||
switch lead {
|
||||
case 1:
|
||||
return toMM(pts[0].RainMMx1000), nil
|
||||
case 2:
|
||||
if len(pts) >= 2 {
|
||||
return toMM(pts[1].RainMMx1000), nil
|
||||
}
|
||||
case 3:
|
||||
if len(pts) >= 3 {
|
||||
return toMM(pts[2].RainMMx1000), nil
|
||||
}
|
||||
}
|
||||
return 0, fmt.Errorf("insufficient points for lead=%d", lead)
|
||||
}
|
||||
|
||||
func pickRain(points []data.PredictPoint, ft time.Time) float64 {
|
||||
for _, p := range points {
|
||||
if p.ForecastTime.Equal(ft) {
|
||||
return toMM(p.RainMMx1000)
|
||||
}
|
||||
}
|
||||
return -1
|
||||
}
|
||||
|
||||
func toMM(vx1000 int32) float64 { return float64(vx1000) / 1000.0 }
|
||||
func toX1000(mm float64) int32 { return int32(mm*1000 + 0.5) }
|
||||
func clamp0(v float64) float64 {
|
||||
if v < 0 {
|
||||
return 0
|
||||
}
|
||||
return v
|
||||
}
|
||||
288
core/cmd/v6-export/main.go
Normal file
288
core/cmd/v6-export/main.go
Normal file
@ -0,0 +1,288 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"flag"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"weatherstation/core/internal/data"
|
||||
)
|
||||
|
||||
// V6 导出工具:
|
||||
// - 以 imdroid_mix 为基线 b_t(+k)
|
||||
// - 残差 e_t(+k) 优先使用“上一轮 V6 的预测误差”,冷启动时退回使用 mix 的历史预测误差
|
||||
// - out(+1) = max(0, base1 + 1.0*e1)
|
||||
// - out(+2) = max(0, base2 + 0.5*e2)
|
||||
// - out(+3) = max(0, base3 + (1/3)*e3)
|
||||
// - 仅生成 SQL 与日志,不写库
|
||||
|
||||
const (
|
||||
outProvider = "imdroid_V6"
|
||||
)
|
||||
|
||||
type v6Out struct {
|
||||
FT time.Time
|
||||
Rain float64
|
||||
}
|
||||
|
||||
func main() {
|
||||
var stationsCSV, startStr, endStr, sqlOut, logOut, tzName, baseProvider string
|
||||
flag.StringVar(&stationsCSV, "stations", "", "逗号分隔的 station_id 列表,例如: RS485-000001,RS485-000002")
|
||||
flag.StringVar(&startStr, "start", "", "开始时间,格式: 2006-01-02 15:00 或 2006-01-02(按整点对齐)")
|
||||
flag.StringVar(&endStr, "end", "", "结束时间,格式: 2006-01-02 15:00 或 2006-01-02(不包含该时刻)")
|
||||
flag.StringVar(&sqlOut, "sql", "v6_output.sql", "输出 SQL 文件路径")
|
||||
flag.StringVar(&logOut, "log", "v6_output.log", "输出日志文件路径")
|
||||
flag.StringVar(&tzName, "tz", "Asia/Shanghai", "时区,例如 Asia/Shanghai")
|
||||
flag.StringVar(&baseProvider, "base", "imdroid_mix", "基础预报源 provider(例如: imdroid_mix, caiyun, open-meteo)")
|
||||
flag.Parse()
|
||||
|
||||
if stationsCSV == "" || startStr == "" || endStr == "" {
|
||||
fmt.Println("用法: v6-export --stations RS485-XXXXXX --start '2024-08-01 00:00' --end '2024-08-02 00:00' --sql out.sql --log out.log")
|
||||
os.Exit(2)
|
||||
}
|
||||
|
||||
if err := os.MkdirAll(filepath.Dir(sqlOut), 0755); err != nil && filepath.Dir(sqlOut) != "." {
|
||||
log.Fatalf("create sql dir: %v", err)
|
||||
}
|
||||
if err := os.MkdirAll(filepath.Dir(logOut), 0755); err != nil && filepath.Dir(logOut) != "." {
|
||||
log.Fatalf("create log dir: %v", err)
|
||||
}
|
||||
lf, err := os.Create(logOut)
|
||||
if err != nil {
|
||||
log.Fatalf("open log file: %v", err)
|
||||
}
|
||||
defer lf.Close()
|
||||
logger := log.New(io.MultiWriter(os.Stdout, lf), "", log.LstdFlags)
|
||||
|
||||
sf, err := os.Create(sqlOut)
|
||||
if err != nil {
|
||||
logger.Fatalf("open sql file: %v", err)
|
||||
}
|
||||
defer sf.Close()
|
||||
fmt.Fprintf(sf, "-- V6 Export generated at %s\nBEGIN;\n", time.Now().Format(time.RFC3339))
|
||||
|
||||
loc, _ := time.LoadLocation(tzName)
|
||||
if loc == nil {
|
||||
loc = time.FixedZone("CST", 8*3600)
|
||||
}
|
||||
parse := func(s string) (time.Time, error) {
|
||||
for _, ly := range []string{"2006-01-02 15:04", "2006-01-02 15", "2006-01-02"} {
|
||||
if t, err := time.ParseInLocation(ly, s, loc); err == nil {
|
||||
return t, nil
|
||||
}
|
||||
}
|
||||
return time.Time{}, fmt.Errorf("invalid time: %s", s)
|
||||
}
|
||||
start, err := parse(startStr)
|
||||
if err != nil {
|
||||
logger.Fatalf("parse start: %v", err)
|
||||
}
|
||||
end, err := parse(endStr)
|
||||
if err != nil {
|
||||
logger.Fatalf("parse end: %v", err)
|
||||
}
|
||||
start = start.Truncate(time.Hour)
|
||||
end = end.Truncate(time.Hour)
|
||||
if !end.After(start) {
|
||||
logger.Fatalf("end 必须大于 start")
|
||||
}
|
||||
|
||||
stations := splitStations(stationsCSV)
|
||||
ctx := context.Background()
|
||||
|
||||
for _, st := range stations {
|
||||
logger.Printf("V6 导出 站点=%s 窗口=%s→%s", st, start.Format("2006-01-02 15:04"), end.Format("2006-01-02 15:04"))
|
||||
// 维护一个“按验证时刻”的 V6 预测缓存:key=forecast_time,value=预测雨量
|
||||
v6AtTime := make(map[time.Time]float64)
|
||||
|
||||
for t := start; t.Before(end); t = t.Add(time.Hour) {
|
||||
res := computeV6AtHour(ctx, st, t, baseProvider, v6AtTime, logger)
|
||||
if res.skipped {
|
||||
logger.Printf("skip station=%s issued=%s: %s", st, t.Format("2006-01-02 15:04"), res.reason)
|
||||
continue
|
||||
}
|
||||
// 写 SQL
|
||||
for _, row := range res.sqlRows {
|
||||
fmt.Fprintln(sf, row)
|
||||
}
|
||||
// 更新缓存:将本次的 +1/+2/+3 结果写入对应的验证时刻键
|
||||
v6AtTime[t.Add(1*time.Hour)] = res.out[0]
|
||||
v6AtTime[t.Add(2*time.Hour)] = res.out[1]
|
||||
v6AtTime[t.Add(3*time.Hour)] = res.out[2]
|
||||
|
||||
logger.Printf("V6 %s issued=%s base=[%.3f,%.3f,%.3f] actual=%.3f prev=[%.3f,%.3f,%.3f] out=[%.3f,%.3f,%.3f] src=[%s,%s,%s]",
|
||||
st, t.Format("2006-01-02 15:04"), res.base[0], res.base[1], res.base[2], res.actual,
|
||||
res.prev[0], res.prev[1], res.prev[2], res.out[0], res.out[1], res.out[2],
|
||||
res.src[0], res.src[1], res.src[2])
|
||||
}
|
||||
}
|
||||
fmt.Fprintln(sf, "COMMIT;")
|
||||
logger.Printf("完成,SQL: %s 日志: %s", sqlOut, logOut)
|
||||
}
|
||||
|
||||
type v6Result struct {
|
||||
base [3]float64
|
||||
prev [3]float64
|
||||
src [3]string // 使用的前一预测来源:V6 或 mix
|
||||
out [3]float64
|
||||
actual float64
|
||||
sqlRows []string
|
||||
skipped bool
|
||||
reason string
|
||||
}
|
||||
|
||||
func computeV6AtHour(ctx context.Context, stationID string, issued time.Time, baseProvider string, v6AtTime map[time.Time]float64, logger *log.Logger) v6Result {
|
||||
var res v6Result
|
||||
|
||||
// 读取基线:当期小时桶内 mix 最新 issued 的 +1/+2/+3
|
||||
baseIssued, ok, err := data.ResolveIssuedAtInBucket(ctx, stationID, baseProvider, issued)
|
||||
if err != nil || !ok {
|
||||
res.skipped, res.reason = true, fmt.Sprintf("base issued missing: %v ok=%v", err, ok)
|
||||
return res
|
||||
}
|
||||
pts, err := data.ForecastRainAtIssued(ctx, stationID, baseProvider, baseIssued)
|
||||
if err != nil || len(pts) < 3 {
|
||||
res.skipped, res.reason = true, fmt.Sprintf("base points insufficient: %v len=%d", err, len(pts))
|
||||
return res
|
||||
}
|
||||
ft1, ft2, ft3 := issued.Add(time.Hour), issued.Add(2*time.Hour), issued.Add(3*time.Hour)
|
||||
base1, base2, base3 := pickRain(pts, ft1), pickRain(pts, ft2), pickRain(pts, ft3)
|
||||
res.base = [3]float64{base1, base2, base3}
|
||||
|
||||
// 实况:刚结束一小时 [t-1,t)
|
||||
actual, okA, err := data.FetchActualHourlyRain(ctx, stationID, issued.Add(-time.Hour), issued)
|
||||
if err != nil || !okA {
|
||||
res.skipped, res.reason = true, fmt.Sprintf("actual missing: %v ok=%v", err, okA)
|
||||
return res
|
||||
}
|
||||
res.actual = actual
|
||||
|
||||
// 前一预测(优先 V6 缓存,否则退回 mix 历史)
|
||||
// +1:需要 (t-1) 发布、验证时刻 t 的预测值
|
||||
vPrev1, src1, ok1 := prevForValidation(ctx, stationID, issued, 1, baseProvider, v6AtTime)
|
||||
vPrev2, src2, ok2 := prevForValidation(ctx, stationID, issued, 2, baseProvider, v6AtTime)
|
||||
vPrev3, src3, ok3 := prevForValidation(ctx, stationID, issued, 3, baseProvider, v6AtTime)
|
||||
if !(ok1 && ok2 && ok3) {
|
||||
// 若冷启动,允许个别 lead 不可用时跳过;也可以只输出可用的 lead,这里采取全量可用才输出
|
||||
res.skipped, res.reason = true, fmt.Sprintf("prev missing leads: h1=%v h2=%v h3=%v", ok1, ok2, ok3)
|
||||
return res
|
||||
}
|
||||
res.prev = [3]float64{vPrev1, vPrev2, vPrev3}
|
||||
res.src = [3]string{src1, src2, src3}
|
||||
|
||||
// 残差与输出
|
||||
e1 := actual - vPrev1
|
||||
e2 := actual - vPrev2
|
||||
e3 := actual - vPrev3
|
||||
cand1 := base1 + 1.0*e1
|
||||
cand2 := base2 + 0.5*e2
|
||||
cand3 := base3 + (1.0/3.0)*e3
|
||||
var out1, out2, out3 float64
|
||||
if cand1 < 0 {
|
||||
out1 = base1
|
||||
} else {
|
||||
out1 = cand1
|
||||
}
|
||||
if cand2 < 0 {
|
||||
out2 = base2
|
||||
} else {
|
||||
out2 = cand2
|
||||
}
|
||||
if cand3 < 0 {
|
||||
out3 = base3
|
||||
} else {
|
||||
out3 = cand3
|
||||
}
|
||||
res.out = [3]float64{out1, out2, out3}
|
||||
|
||||
// 生成 SQL(仅雨量 upsert)
|
||||
rows := make([]string, 0, 3)
|
||||
rows = append(rows, insertRainSQL(stationID, outProvider, issued, ft1, toX1000(out1)))
|
||||
rows = append(rows, insertRainSQL(stationID, outProvider, issued, ft2, toX1000(out2)))
|
||||
rows = append(rows, insertRainSQL(stationID, outProvider, issued, ft3, toX1000(out3)))
|
||||
res.sqlRows = rows
|
||||
return res
|
||||
}
|
||||
|
||||
// prevForValidation 返回用于“验证时刻=issued+0h”的上一预测:优先使用 V6 的缓存;如无则退回 mix 的历史。
|
||||
func prevForValidation(ctx context.Context, stationID string, issued time.Time, lead int, baseProvider string, v6AtTime map[time.Time]float64) (float64, string, bool) {
|
||||
// 需要的验证时刻
|
||||
vt := issued // 验证在 t
|
||||
// 先看 V6 缓存:我们在前面会把 V6 的结果按 forecast_time 存入 map
|
||||
if v, ok := v6AtTime[vt]; ok {
|
||||
return v, "V6", true
|
||||
}
|
||||
// 否则退回 mix 历史:在 (t-lead) 的小时桶内,取最新 issued 的 +lead
|
||||
prevBucket := issued.Add(-time.Duration(lead) * time.Hour)
|
||||
iss, ok, err := data.ResolveIssuedAtInBucket(ctx, stationID, baseProvider, prevBucket)
|
||||
if err != nil || !ok {
|
||||
return 0, "", false
|
||||
}
|
||||
pts, err := data.ForecastRainAtIssued(ctx, stationID, baseProvider, iss)
|
||||
if err != nil || len(pts) < lead {
|
||||
return 0, "", false
|
||||
}
|
||||
// 直接用验证时刻 vt 精确匹配
|
||||
if v := pickRain(pts, vt); v >= 0 {
|
||||
return v, baseProvider, true
|
||||
}
|
||||
// 或退回位置索引
|
||||
switch lead {
|
||||
case 1:
|
||||
return toMM(pts[0].RainMMx1000), baseProvider, true
|
||||
case 2:
|
||||
if len(pts) >= 2 {
|
||||
return toMM(pts[1].RainMMx1000), baseProvider, true
|
||||
}
|
||||
case 3:
|
||||
if len(pts) >= 3 {
|
||||
return toMM(pts[2].RainMMx1000), baseProvider, true
|
||||
}
|
||||
}
|
||||
return 0, "", false
|
||||
}
|
||||
|
||||
func splitStations(s string) []string {
|
||||
parts := strings.Split(s, ",")
|
||||
out := make([]string, 0, len(parts))
|
||||
for _, p := range parts {
|
||||
p = strings.TrimSpace(p)
|
||||
if p != "" {
|
||||
out = append(out, p)
|
||||
}
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
func pickRain(points []data.PredictPoint, ft time.Time) float64 {
|
||||
for _, p := range points {
|
||||
if p.ForecastTime.Equal(ft) {
|
||||
return toMM(p.RainMMx1000)
|
||||
}
|
||||
}
|
||||
return -1
|
||||
}
|
||||
|
||||
func toMM(vx1000 int32) float64 { return float64(vx1000) / 1000.0 }
|
||||
func toX1000(mm float64) int32 { return int32(mm*1000 + 0.5) }
|
||||
func clamp0(v float64) float64 {
|
||||
if v < 0 {
|
||||
return 0
|
||||
}
|
||||
return v
|
||||
}
|
||||
|
||||
func insertRainSQL(stationID, provider string, issued, ft time.Time, rainX1000 int32) string {
|
||||
return fmt.Sprintf(
|
||||
"INSERT INTO forecast_hourly (station_id, provider, issued_at, forecast_time, rain_mm_x1000) VALUES ('%s','%s','%s','%s',%d) ON CONFLICT (station_id, provider, issued_at, forecast_time) DO UPDATE SET rain_mm_x1000=EXCLUDED.rain_mm_x1000;",
|
||||
escapeSQL(stationID), provider, issued.Format(time.RFC3339), ft.Format(time.RFC3339), rainX1000,
|
||||
)
|
||||
}
|
||||
|
||||
func escapeSQL(s string) string { return strings.ReplaceAll(s, "'", "''") }
|
||||
261
core/cmd/v6-model/main.go
Normal file
261
core/cmd/v6-model/main.go
Normal file
@ -0,0 +1,261 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"flag"
|
||||
"fmt"
|
||||
"log"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"weatherstation/core/internal/data"
|
||||
)
|
||||
|
||||
const (
|
||||
v6OutProvider = "imdroid_V6"
|
||||
)
|
||||
|
||||
func main() {
|
||||
var stationsCSV, issuedStr, startStr, endStr, tzName, baseProvider string
|
||||
flag.StringVar(&stationsCSV, "stations", "", "逗号分隔的 station_id 列表;为空则自动扫描有基线的站点")
|
||||
flag.StringVar(&issuedStr, "issued", "", "指定 issued 时间(整点),格式: 2006-01-02 15:00;为空用当前整点")
|
||||
flag.StringVar(&startStr, "start", "", "开始时间(整点),格式: 2006-01-02 15:00;与 --end 一起使用,end 为开区间")
|
||||
flag.StringVar(&endStr, "end", "", "结束时间(整点),格式: 2006-01-02 15:00;与 --start 一起使用,end 为开区间")
|
||||
flag.StringVar(&tzName, "tz", "Asia/Shanghai", "时区,例如 Asia/Shanghai")
|
||||
flag.StringVar(&baseProvider, "base", "imdroid_mix", "基础预报源 provider(例如: imdroid_mix, caiyun, open-meteo)")
|
||||
flag.Parse()
|
||||
|
||||
ctx := context.Background()
|
||||
loc, _ := time.LoadLocation(tzName)
|
||||
if loc == nil {
|
||||
loc = time.FixedZone("CST", 8*3600)
|
||||
}
|
||||
|
||||
parse := func(s string) (time.Time, error) {
|
||||
var t time.Time
|
||||
var err error
|
||||
for _, ly := range []string{"2006-01-02 15:04", "2006-01-02 15", "2006-01-02"} {
|
||||
t, err = time.ParseInLocation(ly, s, loc)
|
||||
if err == nil {
|
||||
return t.Truncate(time.Hour), nil
|
||||
}
|
||||
}
|
||||
return time.Time{}, err
|
||||
}
|
||||
|
||||
if strings.TrimSpace(startStr) != "" && strings.TrimSpace(endStr) != "" {
|
||||
start, err := parse(startStr)
|
||||
if err != nil {
|
||||
log.Fatalf("无法解析 start: %v", err)
|
||||
}
|
||||
end, err := parse(endStr)
|
||||
if err != nil {
|
||||
log.Fatalf("无法解析 end: %v", err)
|
||||
}
|
||||
if !end.After(start) {
|
||||
log.Fatalf("end 必须大于 start")
|
||||
}
|
||||
for t := start; t.Before(end); t = t.Add(time.Hour) {
|
||||
var stations []string
|
||||
if strings.TrimSpace(stationsCSV) != "" {
|
||||
stations = splitStations(stationsCSV)
|
||||
} else {
|
||||
var err error
|
||||
stations, err = listStationsWithBase(ctx, baseProvider, t)
|
||||
if err != nil {
|
||||
log.Fatalf("list stations failed: %v", err)
|
||||
}
|
||||
}
|
||||
if len(stations) == 0 {
|
||||
log.Printf("no stations to process for issued=%s", t.Format("2006-01-02 15:04:05"))
|
||||
continue
|
||||
}
|
||||
for _, st := range stations {
|
||||
if err := runV6ForStation(ctx, st, t, baseProvider); err != nil {
|
||||
log.Printf("V6 station=%s issued=%s error: %v", st, t.Format("2006-01-02 15:04:05"), err)
|
||||
}
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
var issued time.Time
|
||||
if strings.TrimSpace(issuedStr) != "" {
|
||||
var err error
|
||||
issued, err = parse(issuedStr)
|
||||
if err != nil || issued.IsZero() {
|
||||
log.Fatalf("无法解析 issued: %v", err)
|
||||
}
|
||||
} else {
|
||||
issued = time.Now().In(loc).Truncate(time.Hour)
|
||||
}
|
||||
var stations []string
|
||||
if strings.TrimSpace(stationsCSV) != "" {
|
||||
stations = splitStations(stationsCSV)
|
||||
} else {
|
||||
var err error
|
||||
stations, err = listStationsWithBase(ctx, baseProvider, issued)
|
||||
if err != nil {
|
||||
log.Fatalf("list stations failed: %v", err)
|
||||
}
|
||||
}
|
||||
if len(stations) == 0 {
|
||||
log.Printf("no stations to process for issued=%s", issued.Format("2006-01-02 15:04:05"))
|
||||
return
|
||||
}
|
||||
for _, st := range stations {
|
||||
if err := runV6ForStation(ctx, st, issued, baseProvider); err != nil {
|
||||
log.Printf("V6 station=%s error: %v", st, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func splitStations(s string) []string {
|
||||
parts := strings.Split(s, ",")
|
||||
out := make([]string, 0, len(parts))
|
||||
for _, p := range parts {
|
||||
p = strings.TrimSpace(p)
|
||||
if p != "" {
|
||||
out = append(out, p)
|
||||
}
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
// listStationsWithBase 与 v5 共用逻辑,通过 forecast_hourly 检索该 issued 桶内有基线的站点
|
||||
func listStationsWithBase(ctx context.Context, provider string, issued time.Time) ([]string, error) {
|
||||
const q = `SELECT DISTINCT station_id FROM forecast_hourly WHERE provider=$1 AND issued_at >= $2 AND issued_at < $2 + interval '1 hour'`
|
||||
rows, err := data.DB().QueryContext(ctx, q, provider, issued)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var out []string
|
||||
for rows.Next() {
|
||||
var id string
|
||||
if err := rows.Scan(&id); err == nil {
|
||||
out = append(out, id)
|
||||
}
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func runV6ForStation(ctx context.Context, stationID string, issued time.Time, baseProvider string) error {
|
||||
// 基线:当期小时桶内 mix 最新 issued
|
||||
baseIssued, ok, err := data.ResolveIssuedAtInBucket(ctx, stationID, baseProvider, issued)
|
||||
if err != nil || !ok {
|
||||
return fmt.Errorf("base issued missing: %v ok=%v", err, ok)
|
||||
}
|
||||
pts, err := data.ForecastRainAtIssued(ctx, stationID, baseProvider, baseIssued)
|
||||
if err != nil || len(pts) < 3 {
|
||||
return fmt.Errorf("base points insufficient: %v len=%d", err, len(pts))
|
||||
}
|
||||
ft1, ft2, ft3 := issued.Add(time.Hour), issued.Add(2*time.Hour), issued.Add(3*time.Hour)
|
||||
base1, base2, base3 := pickRain(pts, ft1), pickRain(pts, ft2), pickRain(pts, ft3)
|
||||
|
||||
// 实况
|
||||
actual, okA, err := data.FetchActualHourlyRain(ctx, stationID, issued.Add(-time.Hour), issued)
|
||||
if err != nil || !okA {
|
||||
return fmt.Errorf("actual missing: %v ok=%v", err, okA)
|
||||
}
|
||||
|
||||
// 残差:优先 V6 历史,否则回退 mix 历史
|
||||
vPrev1, ok1 := prevV6OrMix(ctx, stationID, issued, 1, baseProvider)
|
||||
vPrev2, ok2 := prevV6OrMix(ctx, stationID, issued, 2, baseProvider)
|
||||
vPrev3, ok3 := prevV6OrMix(ctx, stationID, issued, 3, baseProvider)
|
||||
if !(ok1 && ok2 && ok3) {
|
||||
return fmt.Errorf("prev missing leads: h1=%v h2=%v h3=%v", ok1, ok2, ok3)
|
||||
}
|
||||
|
||||
e1 := actual - vPrev1
|
||||
e2 := actual - vPrev2
|
||||
e3 := actual - vPrev3
|
||||
cand1 := base1 + 1.0*e1
|
||||
cand2 := base2 + 0.5*e2
|
||||
cand3 := base3 + (1.0/3.0)*e3
|
||||
var out1, out2, out3 float64
|
||||
if cand1 < 0 {
|
||||
out1 = base1
|
||||
} else {
|
||||
out1 = cand1
|
||||
}
|
||||
if cand2 < 0 {
|
||||
out2 = base2
|
||||
} else {
|
||||
out2 = cand2
|
||||
}
|
||||
if cand3 < 0 {
|
||||
out3 = base3
|
||||
} else {
|
||||
out3 = cand3
|
||||
}
|
||||
|
||||
items := []data.UpsertRainItem{
|
||||
{ForecastTime: ft1, RainMMx1000: toX1000(out1)},
|
||||
{ForecastTime: ft2, RainMMx1000: toX1000(out2)},
|
||||
{ForecastTime: ft3, RainMMx1000: toX1000(out3)},
|
||||
}
|
||||
if err := data.UpsertForecastRain(ctx, stationID, v6OutProvider, issued, items); err != nil {
|
||||
return err
|
||||
}
|
||||
log.Printf("V6 %s issued=%s base=[%.3f,%.3f,%.3f] actual=%.3f prev=[%.3f,%.3f,%.3f] out=[%.3f,%.3f,%.3f]",
|
||||
stationID, issued.Format("2006-01-02 15:04:05"), base1, base2, base3, actual, vPrev1, vPrev2, vPrev3, out1, out2, out3)
|
||||
return nil
|
||||
}
|
||||
|
||||
func prevV6OrMix(ctx context.Context, stationID string, issued time.Time, lead int, baseProvider string) (float64, bool) {
|
||||
// 验证时刻
|
||||
vt := issued
|
||||
// 先找 V6 历史:在 (t-lead) 桶内找 v6 的 issued,取 +lead @ vt
|
||||
if iss, ok, err := data.ResolveIssuedAtInBucket(ctx, stationID, v6OutProvider, issued.Add(-time.Duration(lead)*time.Hour)); err == nil && ok {
|
||||
if pts, err := data.ForecastRainAtIssued(ctx, stationID, v6OutProvider, iss); err == nil && len(pts) >= lead {
|
||||
if v := pickRain(pts, vt); v >= 0 {
|
||||
return v, true
|
||||
}
|
||||
switch lead {
|
||||
case 1:
|
||||
return toMM(pts[0].RainMMx1000), true
|
||||
case 2:
|
||||
if len(pts) >= 2 {
|
||||
return toMM(pts[1].RainMMx1000), true
|
||||
}
|
||||
case 3:
|
||||
if len(pts) >= 3 {
|
||||
return toMM(pts[2].RainMMx1000), true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// 退回 mix 历史
|
||||
if iss, ok, err := data.ResolveIssuedAtInBucket(ctx, stationID, baseProvider, issued.Add(-time.Duration(lead)*time.Hour)); err == nil && ok {
|
||||
if pts, err := data.ForecastRainAtIssued(ctx, stationID, baseProvider, iss); err == nil && len(pts) >= lead {
|
||||
if v := pickRain(pts, vt); v >= 0 {
|
||||
return v, true
|
||||
}
|
||||
switch lead {
|
||||
case 1:
|
||||
return toMM(pts[0].RainMMx1000), true
|
||||
case 2:
|
||||
if len(pts) >= 2 {
|
||||
return toMM(pts[1].RainMMx1000), true
|
||||
}
|
||||
case 3:
|
||||
if len(pts) >= 3 {
|
||||
return toMM(pts[2].RainMMx1000), true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return 0, false
|
||||
}
|
||||
|
||||
func pickRain(points []data.PredictPoint, ft time.Time) float64 {
|
||||
for _, p := range points {
|
||||
if p.ForecastTime.Equal(ft) {
|
||||
return toMM(p.RainMMx1000)
|
||||
}
|
||||
}
|
||||
return -1
|
||||
}
|
||||
func toMM(vx1000 int32) float64 { return float64(vx1000) / 1000.0 }
|
||||
func toX1000(mm float64) int32 { return int32(mm*1000 + 0.5) }
|
||||
667
core/cmd/weather_data_export/main.go
Normal file
667
core/cmd/weather_data_export/main.go
Normal file
@ -0,0 +1,667 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"context"
|
||||
"database/sql"
|
||||
"encoding/csv"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"math"
|
||||
"net/http"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"weatherstation/internal/config"
|
||||
"weatherstation/internal/database"
|
||||
)
|
||||
|
||||
// This command maintains a rolling CSV export at weather_data_export/main.csv
|
||||
// holding the last 48 hours of 10-minute bucket data, functionally mirroring
|
||||
// internal/tools/exporter.go without reusing it directly.
|
||||
|
||||
const (
|
||||
outBaseDir = "weather_data_export"
|
||||
mainCSVName = "main.csv"
|
||||
historyDir = "history"
|
||||
csvHeader = "latitude,longitude,station_id,station_name,date_time,elevation,pressure,temperature,dewpoint,wind_speed,wind_direction,relative_humidity,ztd,pwv"
|
||||
bucketMin = 10
|
||||
windowHours = 48
|
||||
)
|
||||
|
||||
type options struct {
|
||||
// If true and CAIYUN_TOKEN provided, override wind fields from Caiyun realtime API.
|
||||
overrideWind bool
|
||||
caiyunToken string
|
||||
}
|
||||
|
||||
func main() {
|
||||
// Load config to initialize DB connections used by internal/database
|
||||
_ = config.GetConfig()
|
||||
pg := database.GetDB()
|
||||
my := database.GetMySQL() // may be nil if not configured; functions handle nil
|
||||
_ = my
|
||||
|
||||
loc, _ := time.LoadLocation("Asia/Shanghai")
|
||||
if loc == nil {
|
||||
loc = time.FixedZone("CST", 8*3600)
|
||||
}
|
||||
|
||||
// Options from env
|
||||
opts := options{
|
||||
overrideWind: isTruthy(os.Getenv("EXPORT_OVERRIDE_WIND")),
|
||||
caiyunToken: getenvDefault("CAIYUN_TOKEN", ""),
|
||||
}
|
||||
|
||||
// Ensure directories and header
|
||||
base := outBaseDir
|
||||
mainPath := filepath.Join(base, mainCSVName)
|
||||
histPath := filepath.Join(base, historyDir)
|
||||
mustMkdirAll(base)
|
||||
mustMkdirAll(histPath)
|
||||
ensureFileWithHeader(mainPath)
|
||||
|
||||
// On startup, backfill the last 48 hours (10-minute buckets) and enforce retention
|
||||
now := time.Now().In(loc)
|
||||
lastEnd := alignToPrevBucketEnd(now, bucketMin)
|
||||
firstStart := lastEnd.Add(-windowHours * time.Hour)
|
||||
// Iterate buckets: [firstStart, lastEnd] stepping 10 minutes
|
||||
for b := firstStart; !b.After(lastEnd); b = b.Add(bucketMin * time.Minute) {
|
||||
bucketStart := b
|
||||
bucketEnd := b.Add(bucketMin * time.Minute)
|
||||
if err := exportBucket(context.Background(), pg, my, loc, opts, bucketStart, bucketEnd, mainPath); err != nil {
|
||||
log.Printf("startup export bucket %s-%s failed: %v", tf(bucketStart), tf(bucketEnd), err)
|
||||
}
|
||||
}
|
||||
if err := enforceRetention(mainPath, histPath, loc, windowHours); err != nil {
|
||||
log.Printf("startup retention failed: %v", err)
|
||||
}
|
||||
|
||||
// Scheduler loop: every 10 minutes aligned to next bucket end + 10s
|
||||
for {
|
||||
now = time.Now().In(loc)
|
||||
next := alignToNextBucketEnd(now, bucketMin).Add(10 * time.Second)
|
||||
sleep := time.Until(next)
|
||||
if sleep > 0 {
|
||||
time.Sleep(sleep)
|
||||
}
|
||||
|
||||
// Current bucket is (prevEnd-10m, prevEnd]
|
||||
cur := time.Now().In(loc)
|
||||
bucketEnd := alignToPrevBucketEnd(cur, bucketMin)
|
||||
bucketStart := bucketEnd.Add(-bucketMin * time.Minute)
|
||||
|
||||
if err := exportBucket(context.Background(), pg, my, loc, opts, bucketStart, bucketEnd, mainPath); err != nil {
|
||||
log.Printf("export bucket %s-%s failed: %v", tf(bucketStart), tf(bucketEnd), err)
|
||||
}
|
||||
if err := enforceRetention(mainPath, histPath, loc, windowHours); err != nil {
|
||||
log.Printf("retention failed: %v", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// exportBucket renders one 10-minute bucket and appends to mainPath after removing
|
||||
// any existing lines for that bucket to keep idempotence.
|
||||
func exportBucket(ctx context.Context, pg, my *sql.DB, loc *time.Location, opts options, bucketStart, bucketEnd time.Time, mainPath string) error {
|
||||
// Remove any existing lines for this bucket from mainPath first
|
||||
if err := removeBucketFromMain(mainPath, loc, bucketEnd); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// First: WH65LP (10-min aggregated table)
|
||||
rows, err := pg.QueryContext(ctx, `
|
||||
SELECT
|
||||
s.latitude,
|
||||
s.longitude,
|
||||
s.device_id,
|
||||
s.altitude,
|
||||
r.pressure_hpa_x100,
|
||||
r.temp_c_x100,
|
||||
r.wind_speed_ms_x1000,
|
||||
r.wind_dir_deg,
|
||||
r.humidity_pct,
|
||||
r.bucket_start,
|
||||
s.station_id
|
||||
FROM stations s
|
||||
JOIN rs485_weather_10min r ON r.station_id = s.station_id AND r.bucket_start = $1
|
||||
WHERE s.device_type = 'WH65LP'
|
||||
AND s.latitude IS NOT NULL AND s.longitude IS NOT NULL
|
||||
AND s.latitude <> 0 AND s.longitude <> 0
|
||||
ORDER BY s.station_id`, bucketStart)
|
||||
if err != nil {
|
||||
return fmt.Errorf("query bucket rows failed: %w", err)
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
// Append rows
|
||||
f, err := os.OpenFile(mainPath, os.O_WRONLY|os.O_APPEND, 0o644)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer f.Close()
|
||||
w := bufio.NewWriter(f)
|
||||
defer w.Flush()
|
||||
|
||||
dateTimeStr := bucketEnd.In(loc).Format("2006-01-02 15:04:05")
|
||||
for rows.Next() {
|
||||
var (
|
||||
lat, lon, elev sql.NullFloat64
|
||||
deviceID string
|
||||
pX100, tX100 sql.NullInt64
|
||||
wsX1000 sql.NullInt64
|
||||
wdDeg sql.NullInt64
|
||||
rh sql.NullInt64
|
||||
bucketStartTS time.Time
|
||||
stationID string
|
||||
)
|
||||
if err := rows.Scan(&lat, &lon, &deviceID, &elev, &pX100, &tX100, &wsX1000, &wdDeg, &rh, &bucketStartTS, &stationID); err != nil {
|
||||
log.Printf("scan row failed: %v", err)
|
||||
continue
|
||||
}
|
||||
|
||||
var pressureStr, tempStr, wsStr, wdStr, rhStr string
|
||||
if pX100.Valid {
|
||||
pressureStr = fmtFloat(float64(pX100.Int64)/100.0, 2)
|
||||
}
|
||||
if tX100.Valid {
|
||||
tempStr = fmtFloat(float64(tX100.Int64)/100.0, 2)
|
||||
}
|
||||
if wsX1000.Valid {
|
||||
wsStr = fmtFloat(float64(wsX1000.Int64)/1000.0, 3)
|
||||
}
|
||||
if wdDeg.Valid {
|
||||
wdStr = fmtFloat(float64(wdDeg.Int64), 0)
|
||||
}
|
||||
if rh.Valid {
|
||||
rhStr = fmtFloat(float64(rh.Int64), 0)
|
||||
}
|
||||
|
||||
// Optional: override wind from Caiyun realtime
|
||||
if opts.overrideWind && opts.caiyunToken != "" && lat.Valid && lon.Valid {
|
||||
if spd, dir, ok := fetchCaiyunRealtimeWind(ctx, opts.caiyunToken, lat.Float64, lon.Float64); ok {
|
||||
wsStr = fmtFloat(spd, 3)
|
||||
wdStr = fmtFloat(dir, 0)
|
||||
}
|
||||
}
|
||||
|
||||
// ZTD lookup from MySQL within ±5 minutes around bucketEnd
|
||||
ztdStr := lookupZTD(ctx, my, deviceID, bucketEnd)
|
||||
|
||||
// Build CSV line: use device_id as station_id, station_name empty, dewpoint/pwv empty
|
||||
var b strings.Builder
|
||||
b.WriteString(fmtNullFloat(lat))
|
||||
b.WriteByte(',')
|
||||
b.WriteString(fmtNullFloat(lon))
|
||||
b.WriteByte(',')
|
||||
b.WriteString(deviceID)
|
||||
b.WriteByte(',')
|
||||
b.WriteByte(',') // station_name
|
||||
b.WriteString(dateTimeStr)
|
||||
b.WriteByte(',')
|
||||
b.WriteString(fmtNullFloat(elev))
|
||||
b.WriteByte(',')
|
||||
b.WriteString(pressureStr)
|
||||
b.WriteByte(',')
|
||||
b.WriteString(tempStr)
|
||||
b.WriteByte(',')
|
||||
b.WriteByte(',') // dewpoint
|
||||
b.WriteString(wsStr)
|
||||
b.WriteByte(',')
|
||||
b.WriteString(wdStr)
|
||||
b.WriteByte(',')
|
||||
b.WriteString(rhStr)
|
||||
b.WriteByte(',')
|
||||
b.WriteString(ztdStr)
|
||||
b.WriteByte(',') // pwv
|
||||
b.WriteByte('\n')
|
||||
|
||||
if _, err := w.WriteString(b.String()); err != nil {
|
||||
log.Printf("write csv failed: %v", err)
|
||||
}
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Second: RADAR stations -> latest radar_weather by station_alias
|
||||
if err := exportRadarStations(ctx, pg, loc, bucketEnd, mainPath); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// exportRadarStations appends rows for device_type='RADAR' using latest radar_weather by alias.
|
||||
func exportRadarStations(ctx context.Context, pg *sql.DB, loc *time.Location, bucketEnd time.Time, mainPath string) error {
|
||||
// Load RADAR stations
|
||||
stRows, err := pg.QueryContext(ctx, `
|
||||
SELECT name, latitude, longitude, altitude, station_alias
|
||||
FROM stations
|
||||
WHERE device_type = 'RADAR'
|
||||
AND latitude IS NOT NULL AND longitude IS NOT NULL
|
||||
`)
|
||||
if err != nil {
|
||||
return fmt.Errorf("query RADAR stations failed: %w", err)
|
||||
}
|
||||
defer stRows.Close()
|
||||
|
||||
f, err := os.OpenFile(mainPath, os.O_WRONLY|os.O_APPEND, 0o644)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer f.Close()
|
||||
w := bufio.NewWriter(f)
|
||||
defer w.Flush()
|
||||
|
||||
dateTimeStr := bucketEnd.In(loc).Format("2006-01-02 15:04:05")
|
||||
for stRows.Next() {
|
||||
var (
|
||||
name string
|
||||
lat, lon, elev sql.NullFloat64
|
||||
alias sql.NullString
|
||||
)
|
||||
if err := stRows.Scan(&name, &lat, &lon, &elev, &alias); err != nil {
|
||||
log.Printf("scan RADAR station failed: %v", err)
|
||||
continue
|
||||
}
|
||||
if !alias.Valid || strings.TrimSpace(alias.String) == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
// Latest radar_weather for this alias, not later than bucketEnd
|
||||
var (
|
||||
rwTemp, rwHum, rwWS, rwWD, rwP sql.NullFloat64
|
||||
rwDT time.Time
|
||||
)
|
||||
err := pg.QueryRowContext(ctx, `
|
||||
SELECT temperature, humidity, wind_speed, wind_direction, pressure, dt
|
||||
FROM radar_weather
|
||||
WHERE alias = $1 AND dt <= $2
|
||||
ORDER BY dt DESC
|
||||
LIMIT 1
|
||||
`, alias.String, bucketEnd).Scan(&rwTemp, &rwHum, &rwWS, &rwWD, &rwP, &rwDT)
|
||||
if err != nil {
|
||||
if !errors.Is(err, sql.ErrNoRows) {
|
||||
log.Printf("query radar_weather failed: alias=%s err=%v", alias.String, err)
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
// Map fields
|
||||
pressureStr := ""
|
||||
if rwP.Valid {
|
||||
// Convert: DB value / 100.0
|
||||
pressureStr = fmtFloat(rwP.Float64/100.0, 2)
|
||||
}
|
||||
tempStr := ""
|
||||
if rwTemp.Valid {
|
||||
tempStr = fmtFloat(rwTemp.Float64, -1)
|
||||
}
|
||||
wsStr := ""
|
||||
if rwWS.Valid {
|
||||
wsStr = fmtFloat(rwWS.Float64, 3)
|
||||
}
|
||||
wdStr := ""
|
||||
if rwWD.Valid {
|
||||
wdStr = fmtFloat(rwWD.Float64, 0)
|
||||
}
|
||||
rhStr := ""
|
||||
if rwHum.Valid {
|
||||
rhStr = fmtFloat(rwHum.Float64, -1)
|
||||
}
|
||||
|
||||
var b strings.Builder
|
||||
b.WriteString(fmtNullFloat(lat))
|
||||
b.WriteByte(',')
|
||||
b.WriteString(fmtNullFloat(lon))
|
||||
b.WriteByte(',')
|
||||
// station_id = stations.name
|
||||
b.WriteString(name)
|
||||
b.WriteByte(',')
|
||||
// station_name = stations.name
|
||||
b.WriteString(name)
|
||||
b.WriteByte(',')
|
||||
b.WriteString(dateTimeStr)
|
||||
b.WriteByte(',')
|
||||
b.WriteString(fmtNullFloat(elev))
|
||||
b.WriteByte(',')
|
||||
b.WriteString(pressureStr)
|
||||
b.WriteByte(',')
|
||||
b.WriteString(tempStr)
|
||||
b.WriteByte(',')
|
||||
b.WriteByte(',') // dewpoint blank
|
||||
b.WriteString(wsStr)
|
||||
b.WriteByte(',')
|
||||
b.WriteString(wdStr)
|
||||
b.WriteByte(',')
|
||||
b.WriteString(rhStr)
|
||||
b.WriteByte(',')
|
||||
b.WriteByte(',') // ztd blank
|
||||
b.WriteByte(',') // pwv blank
|
||||
b.WriteByte('\n')
|
||||
|
||||
if _, err := w.WriteString(b.String()); err != nil {
|
||||
log.Printf("write RADAR csv failed: %v", err)
|
||||
}
|
||||
}
|
||||
return stRows.Err()
|
||||
}
|
||||
|
||||
func lookupZTD(ctx context.Context, my *sql.DB, deviceID string, bucketEnd time.Time) string {
|
||||
if my == nil {
|
||||
return ""
|
||||
}
|
||||
var ztd sql.NullFloat64
|
||||
var ts time.Time
|
||||
err := my.QueryRowContext(ctx, `
|
||||
SELECT ztd, timestamp FROM rtk_data
|
||||
WHERE station_id = ?
|
||||
AND ABS(TIMESTAMPDIFF(MINUTE, timestamp, ?)) <= 5
|
||||
LIMIT 1
|
||||
`, deviceID, bucketEnd).Scan(&ztd, &ts)
|
||||
if err != nil {
|
||||
if !errors.Is(err, sql.ErrNoRows) {
|
||||
log.Printf("lookup ZTD failed: station=%s err=%v", deviceID, err)
|
||||
}
|
||||
return ""
|
||||
}
|
||||
if !ztd.Valid {
|
||||
return ""
|
||||
}
|
||||
// Exported as ztd*100 (to match existing exporter behavior)
|
||||
return fmtFloat(ztd.Float64*100.0, -1)
|
||||
}
|
||||
|
||||
// fetchCaiyunRealtimeWind returns speed (m/s) and direction (deg)
|
||||
func fetchCaiyunRealtimeWind(ctx context.Context, token string, lat, lon float64) (float64, float64, bool) {
|
||||
type realtimeResp struct {
|
||||
Status string `json:"status"`
|
||||
Unit string `json:"unit"`
|
||||
Result struct {
|
||||
Realtime struct {
|
||||
Status string `json:"status"`
|
||||
Wind struct {
|
||||
Speed float64 `json:"speed"`
|
||||
Direction float64 `json:"direction"`
|
||||
} `json:"wind"`
|
||||
} `json:"realtime"`
|
||||
} `json:"result"`
|
||||
}
|
||||
url := fmt.Sprintf("https://api.caiyunapp.com/v2.6/%s/%f,%f/realtime?unit=SI", token, lon, lat)
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil)
|
||||
if err != nil {
|
||||
return 0, 0, false
|
||||
}
|
||||
client := &http.Client{Timeout: 10 * time.Second}
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
return 0, 0, false
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
if resp.StatusCode/100 != 2 {
|
||||
return 0, 0, false
|
||||
}
|
||||
body, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return 0, 0, false
|
||||
}
|
||||
var data realtimeResp
|
||||
if err := json.Unmarshal(body, &data); err != nil {
|
||||
return 0, 0, false
|
||||
}
|
||||
if strings.ToLower(data.Status) != "ok" || strings.ToLower(data.Result.Realtime.Status) != "ok" {
|
||||
return 0, 0, false
|
||||
}
|
||||
spd := data.Result.Realtime.Wind.Speed
|
||||
dirRad := data.Result.Realtime.Wind.Direction
|
||||
dirDeg := dirRad * 180.0 / math.Pi
|
||||
for dirDeg < 0 {
|
||||
dirDeg += 360
|
||||
}
|
||||
for dirDeg >= 360 {
|
||||
dirDeg -= 360
|
||||
}
|
||||
return spd, dirDeg, true
|
||||
}
|
||||
|
||||
// enforceRetention keeps only rows with date_time >= now-keepHours in mainPath,
|
||||
// moving older rows to history files grouped by UTC date (CSV header ensured).
|
||||
func enforceRetention(mainPath, histDir string, loc *time.Location, keepHours int) error {
|
||||
cutoff := time.Now().In(loc).Add(-time.Duration(keepHours) * time.Hour)
|
||||
|
||||
f, err := os.Open(mainPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer f.Close()
|
||||
r := csv.NewReader(f)
|
||||
r.FieldsPerRecord = -1
|
||||
|
||||
// Read header
|
||||
records, err := r.ReadAll()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if len(records) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Prepare buffers
|
||||
header := records[0]
|
||||
var keep [][]string
|
||||
keep = append(keep, header)
|
||||
|
||||
// History writers cache per day
|
||||
writers := map[string]*csv.Writer{}
|
||||
files := map[string]*os.File{}
|
||||
ensureWriter := func(day string) (*csv.Writer, error) {
|
||||
if w, ok := writers[day]; ok {
|
||||
return w, nil
|
||||
}
|
||||
// history file path
|
||||
histPath := filepath.Join(histDir, fmt.Sprintf("weather_data_%s.csv", day))
|
||||
needHeader := ensureFileWithHeader(histPath)
|
||||
hf, err := os.OpenFile(histPath, os.O_WRONLY|os.O_APPEND, 0o644)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
hw := csv.NewWriter(hf)
|
||||
if needHeader {
|
||||
if err := hw.Write(header); err != nil {
|
||||
_ = hf.Close()
|
||||
return nil, err
|
||||
}
|
||||
hw.Flush()
|
||||
}
|
||||
writers[day] = hw
|
||||
files[day] = hf
|
||||
return hw, nil
|
||||
}
|
||||
|
||||
for i := 1; i < len(records); i++ {
|
||||
rec := records[i]
|
||||
if len(rec) < 5 {
|
||||
continue
|
||||
}
|
||||
dtStr := strings.TrimSpace(rec[4])
|
||||
dt, err := time.ParseInLocation("2006-01-02 15:04:05", dtStr, loc)
|
||||
if err != nil {
|
||||
// keep malformed lines to avoid data loss
|
||||
keep = append(keep, rec)
|
||||
continue
|
||||
}
|
||||
if !dt.Before(cutoff) {
|
||||
keep = append(keep, rec)
|
||||
continue
|
||||
}
|
||||
// Move to history file by UTC day of dt
|
||||
day := dt.UTC().Format("2006-01-02")
|
||||
w, err := ensureWriter(day)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if err := w.Write(rec); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
// Flush & close writers
|
||||
for _, w := range writers {
|
||||
w.Flush()
|
||||
}
|
||||
for _, f := range files {
|
||||
_ = f.Close()
|
||||
}
|
||||
|
||||
// Rewrite main.csv with kept rows
|
||||
tmp := mainPath + ".part"
|
||||
outf, err := os.OpenFile(tmp, os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0o644)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
cw := csv.NewWriter(outf)
|
||||
if err := cw.WriteAll(keep); err != nil {
|
||||
_ = outf.Close()
|
||||
return err
|
||||
}
|
||||
cw.Flush()
|
||||
if err := outf.Close(); err != nil {
|
||||
return err
|
||||
}
|
||||
return os.Rename(tmp, mainPath)
|
||||
}
|
||||
|
||||
// removeBucketFromMain removes all rows with date_time == bucketEnd from main CSV.
|
||||
func removeBucketFromMain(mainPath string, loc *time.Location, bucketEnd time.Time) error {
|
||||
dtStr := bucketEnd.In(loc).Format("2006-01-02 15:04:05")
|
||||
// Fast path: if file small, rewrite; otherwise stream
|
||||
in, err := os.Open(mainPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer in.Close()
|
||||
|
||||
r := csv.NewReader(in)
|
||||
r.FieldsPerRecord = -1
|
||||
recs, err := r.ReadAll()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if len(recs) == 0 {
|
||||
return nil
|
||||
}
|
||||
out := make([][]string, 0, len(recs))
|
||||
out = append(out, recs[0]) // header
|
||||
for i := 1; i < len(recs); i++ {
|
||||
rec := recs[i]
|
||||
if len(rec) < 5 {
|
||||
out = append(out, rec)
|
||||
continue
|
||||
}
|
||||
if strings.TrimSpace(rec[4]) == dtStr {
|
||||
continue // drop
|
||||
}
|
||||
out = append(out, rec)
|
||||
}
|
||||
tmp := mainPath + ".part"
|
||||
outf, err := os.OpenFile(tmp, os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0o644)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
cw := csv.NewWriter(outf)
|
||||
if err := cw.WriteAll(out); err != nil {
|
||||
_ = outf.Close()
|
||||
return err
|
||||
}
|
||||
cw.Flush()
|
||||
if err := outf.Close(); err != nil {
|
||||
return err
|
||||
}
|
||||
return os.Rename(tmp, mainPath)
|
||||
}
|
||||
|
||||
func ensureFileWithHeader(path string) bool {
|
||||
if _, err := os.Stat(path); err == nil {
|
||||
return false
|
||||
}
|
||||
_ = os.MkdirAll(filepath.Dir(path), 0o755)
|
||||
f, err := os.OpenFile(path, os.O_CREATE|os.O_WRONLY, 0o644)
|
||||
if err != nil {
|
||||
log.Printf("create csv failed: %v", err)
|
||||
return false
|
||||
}
|
||||
// Write header
|
||||
if _, err := f.WriteString(csvHeader + "\n"); err != nil {
|
||||
_ = f.Close()
|
||||
return false
|
||||
}
|
||||
_ = f.Close()
|
||||
return true
|
||||
}
|
||||
|
||||
func alignToNextBucketEnd(t time.Time, minutes int) time.Time {
|
||||
m := t.Minute()
|
||||
next := (m/minutes + 1) * minutes
|
||||
dt := time.Duration(next-m) * time.Minute
|
||||
return t.Truncate(time.Minute).Add(dt).Truncate(time.Minute)
|
||||
}
|
||||
|
||||
func alignToPrevBucketEnd(t time.Time, minutes int) time.Time {
|
||||
m := t.Minute()
|
||||
prev := (m / minutes) * minutes
|
||||
return t.Truncate(time.Minute).Add(time.Duration(prev-m) * time.Minute)
|
||||
}
|
||||
|
||||
func fmtNullFloat(v sql.NullFloat64) string {
|
||||
if v.Valid {
|
||||
return fmtFloat(v.Float64, -1)
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// fmtFloat: prec < 0 -> trim trailing zeros
|
||||
func fmtFloat(fv float64, prec int) string {
|
||||
if prec >= 0 {
|
||||
return fmt.Sprintf("%.*f", prec, fv)
|
||||
}
|
||||
s := fmt.Sprintf("%.10f", fv)
|
||||
s = strings.TrimRight(s, "0")
|
||||
s = strings.TrimRight(s, ".")
|
||||
if s == "-0" {
|
||||
s = "0"
|
||||
}
|
||||
if s == "" || s == "-" || s == "+" || s == "." {
|
||||
return "0"
|
||||
}
|
||||
if math.Abs(fv) < 1e-9 {
|
||||
return "0"
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
func isTruthy(s string) bool {
|
||||
switch strings.ToLower(strings.TrimSpace(s)) {
|
||||
case "1", "true", "yes", "on":
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
func getenvDefault(key, def string) string {
|
||||
if v := os.Getenv(key); v != "" {
|
||||
return v
|
||||
}
|
||||
return def
|
||||
}
|
||||
|
||||
func mustMkdirAll(dir string) {
|
||||
if err := os.MkdirAll(dir, 0o755); err != nil {
|
||||
log.Fatalf("mkdir %s: %v", dir, err)
|
||||
}
|
||||
}
|
||||
|
||||
func tf(t time.Time) string { return t.Format("2006-01-02 15:04:05") }
|
||||
4
core/frontend/.gitignore
vendored
Normal file
4
core/frontend/.gitignore
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
node_modules/
|
||||
dist/
|
||||
.angular/
|
||||
|
||||
133
core/frontend/angular.json
Normal file
133
core/frontend/angular.json
Normal file
@ -0,0 +1,133 @@
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/angular-cli",
|
||||
"version": 1,
|
||||
"newProjectRoot": "projects",
|
||||
"projects": {
|
||||
"weatherstation-ui": {
|
||||
"projectType": "application",
|
||||
"root": "",
|
||||
"sourceRoot": "src",
|
||||
"architect": {
|
||||
"build": {
|
||||
"builder": "@angular-devkit/build-angular:browser",
|
||||
"options": {
|
||||
"outputPath": "dist/ui",
|
||||
"index": "src/index.html",
|
||||
"main": "src/main.ts",
|
||||
"polyfills": [
|
||||
"src/polyfills.ts"
|
||||
],
|
||||
"tsConfig": "tsconfig.app.json",
|
||||
"assets": [
|
||||
{
|
||||
"glob": "**/*",
|
||||
"input": "../../static",
|
||||
"output": "static"
|
||||
}
|
||||
],
|
||||
"styles": [
|
||||
"src/styles.css"
|
||||
],
|
||||
"scripts": [],
|
||||
"baseHref": "/ui/"
|
||||
},
|
||||
"configurations": {
|
||||
"production": {
|
||||
"optimization": true,
|
||||
"outputHashing": "all",
|
||||
"sourceMap": false,
|
||||
"namedChunks": false,
|
||||
"extractLicenses": true
|
||||
},
|
||||
"development": {
|
||||
"buildOptimizer": false,
|
||||
"optimization": false,
|
||||
"vendorChunk": true,
|
||||
"extractLicenses": false,
|
||||
"sourceMap": true
|
||||
}
|
||||
},
|
||||
"defaultConfiguration": "production"
|
||||
},
|
||||
"serve": {
|
||||
"builder": "@angular-devkit/build-angular:dev-server",
|
||||
"options": {
|
||||
"browserTarget": "weatherstation-ui:build"
|
||||
},
|
||||
"configurations": {
|
||||
"production": {
|
||||
"browserTarget": "weatherstation-ui:build:production"
|
||||
},
|
||||
"development": {
|
||||
"browserTarget": "weatherstation-ui:build:development"
|
||||
}
|
||||
},
|
||||
"defaultConfiguration": "development"
|
||||
}
|
||||
}
|
||||
},
|
||||
"weatherstation-bigscreen": {
|
||||
"projectType": "application",
|
||||
"root": "bigscreen",
|
||||
"sourceRoot": "bigscreen/src",
|
||||
"architect": {
|
||||
"build": {
|
||||
"builder": "@angular-devkit/build-angular:browser",
|
||||
"options": {
|
||||
"outputPath": "dist/bigscreen",
|
||||
"index": "bigscreen/src/index.html",
|
||||
"main": "bigscreen/src/main.ts",
|
||||
"polyfills": [
|
||||
"bigscreen/src/polyfills.ts"
|
||||
],
|
||||
"tsConfig": "bigscreen/tsconfig.app.json",
|
||||
"assets": [
|
||||
{
|
||||
"glob": "**/*",
|
||||
"input": "../../static",
|
||||
"output": "static"
|
||||
}
|
||||
],
|
||||
"styles": [
|
||||
"bigscreen/src/styles.css"
|
||||
],
|
||||
"scripts": [],
|
||||
"baseHref": "/bigscreen/"
|
||||
},
|
||||
"configurations": {
|
||||
"production": {
|
||||
"optimization": true,
|
||||
"outputHashing": "all",
|
||||
"sourceMap": false,
|
||||
"namedChunks": false,
|
||||
"extractLicenses": true
|
||||
},
|
||||
"development": {
|
||||
"buildOptimizer": false,
|
||||
"optimization": false,
|
||||
"vendorChunk": true,
|
||||
"extractLicenses": false,
|
||||
"sourceMap": true
|
||||
}
|
||||
},
|
||||
"defaultConfiguration": "production"
|
||||
},
|
||||
"serve": {
|
||||
"builder": "@angular-devkit/build-angular:dev-server",
|
||||
"options": {
|
||||
"browserTarget": "weatherstation-bigscreen:build"
|
||||
},
|
||||
"configurations": {
|
||||
"production": {
|
||||
"browserTarget": "weatherstation-bigscreen:build:production"
|
||||
},
|
||||
"development": {
|
||||
"browserTarget": "weatherstation-bigscreen:build:development"
|
||||
}
|
||||
},
|
||||
"defaultConfiguration": "development"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
90
core/frontend/bigscreen/src/app/app.component.css
Normal file
90
core/frontend/bigscreen/src/app/app.component.css
Normal file
@ -0,0 +1,90 @@
|
||||
:host {
|
||||
display: block;
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
:host ::ng-deep chart-panel .chart-container {
|
||||
background: transparent !important;
|
||||
border: none !important;
|
||||
padding: 0 !important;
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
:host ::ng-deep chart-panel .station-info-title {
|
||||
justify-content: flex-end;
|
||||
display: flex;
|
||||
margin-bottom: 0.25rem !important;
|
||||
}
|
||||
|
||||
:host ::ng-deep chart-panel .accuracy-panel {
|
||||
display: inline-flex !important;
|
||||
gap: 0.75rem;
|
||||
font-size: 0.8125rem !important;
|
||||
color: #a9bfe6 !important;
|
||||
}
|
||||
|
||||
:host ::ng-deep chart-panel .accuracy-panel .label {
|
||||
color: #90a7d4 !important;
|
||||
}
|
||||
|
||||
:host ::ng-deep chart-panel .accuracy-panel .value {
|
||||
color: #ffffff !important;
|
||||
font-weight: 700;
|
||||
}
|
||||
|
||||
:host ::ng-deep chart-panel .chart-wrapper {
|
||||
height: 100% !important;
|
||||
}
|
||||
|
||||
:host ::ng-deep chart-panel canvas {
|
||||
width: 100% !important;
|
||||
height: 100% !important;
|
||||
}
|
||||
|
||||
:host ::ng-deep table-panel .table-container {
|
||||
margin-top: 0 !important;
|
||||
border-radius: 0 !important;
|
||||
border: none !important;
|
||||
background: transparent !important;
|
||||
color: var(--color-fg) !important;
|
||||
display: flex !important;
|
||||
flex-direction: column !important;
|
||||
height: 100% !important;
|
||||
overflow: hidden !important;
|
||||
padding: 0 !important;
|
||||
}
|
||||
|
||||
:host ::ng-deep table-panel .table-container > div:first-child {
|
||||
display: none !important;
|
||||
}
|
||||
|
||||
:host ::ng-deep table-panel .table-container > div:last-child {
|
||||
flex: 1 !important;
|
||||
height: 100% !important;
|
||||
min-height: 0 !important;
|
||||
overflow-y: auto !important;
|
||||
overflow-x: auto !important;
|
||||
background: transparent !important;
|
||||
}
|
||||
|
||||
:host ::ng-deep table-panel table {
|
||||
color: var(--color-fg) !important;
|
||||
background: transparent !important;
|
||||
}
|
||||
|
||||
:host ::ng-deep table-panel thead th {
|
||||
border: none !important;
|
||||
background: var(--table-head-bg) !important;
|
||||
color: var(--table-head-fg) !important;
|
||||
border-bottom: 1px solid rgba(255, 255, 255, 0.08) !important;
|
||||
}
|
||||
|
||||
:host ::ng-deep table-panel tbody td {
|
||||
border: none !important;
|
||||
border-bottom: 1px solid rgba(255, 255, 255, 0.08) !important;
|
||||
color: #dfe9ff !important;
|
||||
}
|
||||
|
||||
:host ::ng-deep table-panel tbody tr:nth-child(even) {
|
||||
background: var(--table-row-alt) !important;
|
||||
}
|
||||
140
core/frontend/bigscreen/src/app/app.component.html
Normal file
140
core/frontend/bigscreen/src/app/app.component.html
Normal file
@ -0,0 +1,140 @@
|
||||
<div
|
||||
class="screen"
|
||||
[class.alert-on]="alertLevel === 'orange'"
|
||||
[class.alert-red]="alertLevel === 'red'"
|
||||
>
|
||||
<div class="info">
|
||||
<div class="info-panel">
|
||||
<div class="info-title">防控区信息</div>
|
||||
<div class="info-content">
|
||||
<dl class="info-dl">
|
||||
<dt>防控区名称</dt><dd>大礼村委会次重点防控区</dd>
|
||||
<dt>防控区编号</dt><dd>ZJ-DL-FK05</dd>
|
||||
<dt>防控区概况</dt><dd class="break">面积1.14km²,包含斜坡单元2个,地质灾害隐患点6处,重点巡查区2个,风险点2处,威胁272人。</dd>
|
||||
</dl>
|
||||
<dl class="info-people">
|
||||
<dt>网格责任人</dt><dd class="contact"><span class="name">刘江平</span><span class="phone">15586362528</span></dd>
|
||||
<dt>网格管理员</dt><dd class="contact"><span class="name">余泽蛟</span><span class="phone">19371312586</span></dd>
|
||||
<dt>网格专管员</dt><dd class="contact"><span class="name">邴香举</span><span class="phone">13997659222</span></dd>
|
||||
<dt>网格协管员</dt><dd class="contact"><span class="name">韩超</span><span class="phone">15172881786</span></dd>
|
||||
<dt>防控区巡查员</dt><dd class="contact"><span class="name">吴大华</span><span class="phone">18572355263</span></dd>
|
||||
</dl>
|
||||
</div>
|
||||
<div class="info-banner">地灾防控 人人有责</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="left">
|
||||
<div class="chart-container" id="chartContainer">
|
||||
<div id="stationInfoTitle" class="station-info-title">{{ selectedTitle }}</div>
|
||||
<div class="chart-wrapper">
|
||||
<chart-panel [history]="history" [forecast]="forecast" [legendMode]="legendMode" [showAccuracy]="false"></chart-panel>
|
||||
</div>
|
||||
</div>
|
||||
<div class="map-container" id="mapContainer">
|
||||
<div id="map"></div>
|
||||
<div
|
||||
id="tileValueTooltip"
|
||||
class="map-tooltip"
|
||||
style="position:absolute;pointer-events:none;z-index:1003;display:none;background:rgba(0,0,0,0.65);color:#fff;font-size:12px;padding:4px 6px;border-radius:4px;"
|
||||
></div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="right">
|
||||
<div class="controls">
|
||||
<label for="stationInput">站点:</label>
|
||||
<input
|
||||
id="stationInput"
|
||||
type="text"
|
||||
placeholder=""
|
||||
[(ngModel)]="hexId"
|
||||
style="width:7.5rem;"
|
||||
/>
|
||||
|
||||
<label for="interval">粒度:</label>
|
||||
<select id="interval" [(ngModel)]="interval">
|
||||
<option value="raw">原始(16s)</option>
|
||||
<option value="10min">10分钟</option>
|
||||
<option value="30min">30分钟</option>
|
||||
<option value="1hour">1小时</option>
|
||||
</select>
|
||||
|
||||
<label for="forecastProvider">预报源:</label>
|
||||
<select id="forecastProvider" [(ngModel)]="provider">
|
||||
<option value="">不显示预报</option>
|
||||
<option value="imdroid_V6">V6</option>
|
||||
<option value="imdroid_V5">V5</option>
|
||||
<option value="imdroid_mix">V4</option>
|
||||
<option value="open-meteo">V3</option>
|
||||
<option value="caiyun">V2</option>
|
||||
<option value="imdroid">V1</option>
|
||||
</select>
|
||||
|
||||
<label for="startDate">开始:</label>
|
||||
<input type="datetime-local" id="startDate" [(ngModel)]="start" />
|
||||
|
||||
<label for="endDate">结束:</label>
|
||||
<input type="datetime-local" id="endDate" [(ngModel)]="end" />
|
||||
|
||||
<button type="button" (click)="query()">查询</button>
|
||||
|
||||
<label for="tileProduct">叠加:</label>
|
||||
<select id="tileProduct" [(ngModel)]="tileProduct" (change)="onProductChange()">
|
||||
<option value="none">不显示</option>
|
||||
<option value="rain">1h 实际降雨</option>
|
||||
<option value="radar">水汽含量</option>
|
||||
</select>
|
||||
|
||||
<button type="button" (click)="prevTile()">上一时次</button>
|
||||
<span id="tileCountInfo">{{ tileCountInfo }}</span>
|
||||
<button type="button" (click)="nextTile()">下一时次</button>
|
||||
|
||||
<label for="tileTimeSelect">时间:</label>
|
||||
<select
|
||||
id="tileTimeSelect"
|
||||
style="min-width:12rem"
|
||||
[(ngModel)]="tileDt"
|
||||
(ngModelChange)="renderTilesAt($event)"
|
||||
>
|
||||
<option [ngValue]="''">请选择时间</option>
|
||||
<option *ngFor="let t of tileTimes" [ngValue]="t">{{ t }}</option>
|
||||
</select>
|
||||
</div>
|
||||
|
||||
<div
|
||||
id="summaryPanel"
|
||||
class="summary-panel"
|
||||
[class.alert-on]="alertLevel === 'orange'"
|
||||
[class.alert-red]="alertLevel === 'red'"
|
||||
>
|
||||
<div id="futureRainSummary" class="summary-title">{{ futureRainText }}</div>
|
||||
<div id="pastAccuracySummary" class="summary-sub">{{ pastAccuracyText }}</div>
|
||||
<div id="heavyPerfSummary" class="summary-sub" *ngIf="heavyPerfText">{{ heavyPerfText }}</div>
|
||||
</div>
|
||||
|
||||
<div class="table-container" id="tableContainer">
|
||||
<div class="table-scroll">
|
||||
<table-panel
|
||||
[history]="history"
|
||||
[forecast]="forecast"
|
||||
[showPastForecast]="false"
|
||||
[endDate]="end"></table-panel>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="loading-overlay" *ngIf="isLoading">
|
||||
<div class="spinner"></div>
|
||||
</div>
|
||||
|
||||
<div *ngIf="isKmlDialogOpen" class="fixed inset-0 flex items-center justify-center bg-black bg-opacity-50" style="z-index:2000;">
|
||||
<div class="max-w-2xl bg-white text-gray-800 rounded-lg shadow-2xl border border-gray-200 overflow-hidden" style="max-height:80vh;">
|
||||
<div class="px-4 py-2 flex items-center justify-between border-b border-gray-200">
|
||||
<div class="font-semibold">{{kmlInfoTitle}}</div>
|
||||
<button class="text-sm text-gray-500 hover:text-gray-700" (click)="closeKmlPopup()">关闭</button>
|
||||
</div>
|
||||
<div class="px-4 py-3 text-sm leading-6 overflow-auto" style="max-height:calc(80vh - 48px);">
|
||||
<div [innerHTML]="kmlInfoHtml"></div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
869
core/frontend/bigscreen/src/app/app.component.ts
Normal file
869
core/frontend/bigscreen/src/app/app.component.ts
Normal file
@ -0,0 +1,869 @@
|
||||
import { AfterViewInit, Component, OnDestroy, OnInit } from '@angular/core';
|
||||
import { CommonModule } from '@angular/common';
|
||||
import { FormsModule } from '@angular/forms';
|
||||
|
||||
import { ApiService, ForecastPoint, WeatherPoint } from '../../../src/app/api.service';
|
||||
import { ChartPanelComponent } from '../../../src/app/chart-panel.component';
|
||||
import { TablePanelComponent } from '../../../src/app/table-panel.component';
|
||||
|
||||
type Station = {
|
||||
station_id: string;
|
||||
decimal_id?: string;
|
||||
latitude?: number;
|
||||
longitude?: number;
|
||||
location?: string;
|
||||
device_type?: string;
|
||||
last_update?: string;
|
||||
name?: string;
|
||||
station_alias?: string;
|
||||
};
|
||||
|
||||
type AlertLevel = 'none' | 'orange' | 'red';
|
||||
|
||||
@Component({
|
||||
selector: 'app-root',
|
||||
standalone: true,
|
||||
imports: [CommonModule, FormsModule, ChartPanelComponent, TablePanelComponent],
|
||||
templateUrl: './app.component.html',
|
||||
styleUrls: ['./app.component.css'],
|
||||
})
|
||||
export class BigscreenAppComponent implements OnInit, AfterViewInit, OnDestroy {
|
||||
constructor(private api: ApiService) {}
|
||||
|
||||
private readonly DEFAULT_STATION_HEX = '002A39';
|
||||
private readonly DEFAULT_TITLE = '第七台气象站(宾阳县细塘村东南约112米)';
|
||||
|
||||
onlineDevices = 0;
|
||||
serverTime = '';
|
||||
stations: Station[] = [];
|
||||
|
||||
hexId = this.DEFAULT_STATION_HEX;
|
||||
interval: 'raw' | '10min' | '30min' | '1hour' = '1hour';
|
||||
provider = 'imdroid_mix';
|
||||
start = '';
|
||||
end = '';
|
||||
legendMode = 'combo_standard';
|
||||
tileProduct: 'none' | 'radar' | 'rain' = 'radar';
|
||||
|
||||
history: WeatherPoint[] = [];
|
||||
forecast: ForecastPoint[] = [];
|
||||
|
||||
isLoading = false;
|
||||
|
||||
// Summary + presentation state
|
||||
selectedLocation = '';
|
||||
selectedTitle = this.DEFAULT_TITLE;
|
||||
futureRainText = '未来1~3小时降雨 -- 毫米';
|
||||
pastAccuracyText = '过去预报准确率 +1h: -- +2h: -- +3h: --';
|
||||
heavyPerfText = '';
|
||||
alertLevel: AlertLevel = 'none';
|
||||
|
||||
tileTimes: string[] = [];
|
||||
tileIndex = -1;
|
||||
tileDt = '';
|
||||
tileZ = 7;
|
||||
tileY = 40;
|
||||
tileX = 102;
|
||||
|
||||
private map: any;
|
||||
private layers: any = {};
|
||||
private stationSource: any;
|
||||
private clusterSource: any;
|
||||
private stationLayer: any;
|
||||
private clusterLayer: any;
|
||||
private kmlLayer: any;
|
||||
private readonly CLUSTER_THRESHOLD = 10;
|
||||
private tileOverlayGroup: any;
|
||||
private tileLastList: any[] = [];
|
||||
private mapReady = false;
|
||||
private stationsReady = false;
|
||||
private initialQueryScheduled = false;
|
||||
// KML popup state
|
||||
isKmlDialogOpen = false;
|
||||
kmlInfoTitle = '';
|
||||
kmlInfoHtml = '';
|
||||
|
||||
async ngOnInit() {
|
||||
this.applyChartDefaults();
|
||||
await Promise.all([this.loadStatus(), this.loadStations()]);
|
||||
this.initializeTimeRange();
|
||||
this.ensureDefaultTitle();
|
||||
this.tryInitialQuery();
|
||||
}
|
||||
|
||||
ngAfterViewInit() {
|
||||
this.initMap();
|
||||
this.reloadTileTimesAndShow();
|
||||
}
|
||||
|
||||
ngOnDestroy(): void {
|
||||
this.setBodyAlertClass('none');
|
||||
}
|
||||
|
||||
get tileCountInfo(): string {
|
||||
if (!this.tileTimes.length || this.tileIndex < 0) {
|
||||
return '共0条,第0条';
|
||||
}
|
||||
return `共${this.tileTimes.length}条,第${this.tileIndex + 1}条`;
|
||||
}
|
||||
|
||||
async query(auto = false) {
|
||||
const hex = this.hexId.trim().toUpperCase();
|
||||
if (!hex) return;
|
||||
const sid = this.makeStationIdFromHex(hex);
|
||||
if (!sid) return;
|
||||
this.hexId = hex;
|
||||
|
||||
const toFmt = (s: string) => s.replace('T', ' ') + ':00';
|
||||
const from = toFmt(this.start);
|
||||
const to = toFmt(this.end);
|
||||
|
||||
this.isLoading = true;
|
||||
try {
|
||||
const [history, forecast] = await Promise.all([
|
||||
this.api.getHistory(hex, from, to, this.interval),
|
||||
this.provider ? this.api.getForecast(sid, from, to, this.provider, 3) : Promise.resolve([]),
|
||||
]);
|
||||
this.history = history;
|
||||
this.forecast = forecast;
|
||||
} finally {
|
||||
this.isLoading = false;
|
||||
}
|
||||
|
||||
const station = this.stations.find((s) => s.station_id === sid);
|
||||
this.selectedLocation = station?.location || '';
|
||||
const titleName = station?.name || station?.station_alias || station?.station_id || '';
|
||||
this.selectedTitle = titleName ? `${titleName}${this.selectedLocation ? `(${this.selectedLocation})` : ''}` : this.selectedLocation;
|
||||
if (!this.selectedTitle) {
|
||||
this.ensureDefaultTitle();
|
||||
}
|
||||
|
||||
this.focusOnStation(station);
|
||||
this.updateSummaryPanel();
|
||||
this.reloadTileTimesAndShow();
|
||||
if (!auto) {
|
||||
this.scrollToTop();
|
||||
}
|
||||
}
|
||||
|
||||
onProductChange() {
|
||||
this.reloadTileTimesAndShow();
|
||||
}
|
||||
|
||||
async reloadTileTimesAndShow() {
|
||||
if (this.tileProduct === 'none') {
|
||||
this.clearTileOverlays();
|
||||
this.tileTimes = [];
|
||||
this.tileDt = '';
|
||||
this.tileIndex = -1;
|
||||
return;
|
||||
}
|
||||
await this.loadTileTimes(this.tileProduct);
|
||||
}
|
||||
|
||||
prevTile() {
|
||||
if (!this.tileTimes.length) return;
|
||||
if (this.tileIndex < this.tileTimes.length - 1) {
|
||||
this.tileIndex += 1;
|
||||
this.tileDt = this.tileTimes[this.tileIndex];
|
||||
this.renderTilesAt(this.tileDt);
|
||||
}
|
||||
}
|
||||
|
||||
nextTile() {
|
||||
if (!this.tileTimes.length) return;
|
||||
if (this.tileIndex > 0) {
|
||||
this.tileIndex -= 1;
|
||||
this.tileDt = this.tileTimes[this.tileIndex];
|
||||
this.renderTilesAt(this.tileDt);
|
||||
}
|
||||
}
|
||||
|
||||
private async loadStatus() {
|
||||
const s = await this.api.getStatus();
|
||||
if (s) {
|
||||
this.onlineDevices = s.online_devices || 0;
|
||||
this.serverTime = s.server_time || '';
|
||||
}
|
||||
}
|
||||
|
||||
private async loadStations() {
|
||||
this.stations = await this.api.getStations();
|
||||
this.updateStationsOnMap();
|
||||
try {
|
||||
const expected = this.makeStationIdFromHex(this.hexId || this.DEFAULT_STATION_HEX);
|
||||
const exists = !!this.stations.find((s) => s.station_id === expected);
|
||||
if (!exists) {
|
||||
const fallback = this.stations.find((s) => s.station_id && s.station_id.length >= 6);
|
||||
if (fallback) {
|
||||
const hex = fallback.station_id.slice(-6).toUpperCase();
|
||||
if (hex) this.hexId = hex;
|
||||
if (fallback.location) this.selectedTitle = fallback.location;
|
||||
}
|
||||
}
|
||||
} catch {}
|
||||
this.stationsReady = true;
|
||||
this.tryInitialQuery();
|
||||
}
|
||||
|
||||
private ensureDefaultTitle() {
|
||||
if (!this.selectedTitle) {
|
||||
this.selectedTitle = this.DEFAULT_TITLE;
|
||||
}
|
||||
if (!this.hexId) {
|
||||
this.hexId = this.DEFAULT_STATION_HEX;
|
||||
}
|
||||
this.hexId = (this.hexId || '').toUpperCase();
|
||||
}
|
||||
|
||||
private tryInitialQuery() {
|
||||
if (this.initialQueryScheduled) return;
|
||||
if (!this.mapReady || !this.stationsReady) return;
|
||||
if (!this.hexId || !this.start || !this.end) return;
|
||||
this.initialQueryScheduled = true;
|
||||
setTimeout(() => {
|
||||
this.query(true).catch(() => {
|
||||
this.initialQueryScheduled = false;
|
||||
});
|
||||
}, 300);
|
||||
}
|
||||
|
||||
private initializeTimeRange() {
|
||||
const now = new Date();
|
||||
const pad = (n: number) => String(n).padStart(2, '0');
|
||||
const toLocal = (d: Date) =>
|
||||
`${d.getFullYear()}-${pad(d.getMonth() + 1)}-${pad(d.getDate())}T${pad(d.getHours())}:${pad(d.getMinutes())}`;
|
||||
const end = new Date(now.getTime() + 4 * 3600 * 1000);
|
||||
const start = new Date(now.getTime() - 24 * 3600 * 1000);
|
||||
this.end = toLocal(end);
|
||||
this.start = toLocal(start);
|
||||
}
|
||||
|
||||
private applyChartDefaults() {
|
||||
try {
|
||||
const Chart = (window as any).Chart;
|
||||
if (Chart && Chart.defaults) {
|
||||
Chart.defaults.color = '#d5e3ff';
|
||||
Chart.defaults.borderColor = 'rgba(213,227,255,0.25)';
|
||||
if (Chart.defaults.scale && Chart.defaults.scale.grid) {
|
||||
Chart.defaults.scale.grid.color = 'rgba(213,227,255,0.12)';
|
||||
Chart.defaults.scale.ticks = Chart.defaults.scale.ticks || {};
|
||||
Chart.defaults.scale.ticks.color = '#cbd9ff';
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
/* noop */
|
||||
}
|
||||
}
|
||||
|
||||
private makeStationIdFromHex(hexRaw: string): string | null {
|
||||
if (!hexRaw) return null;
|
||||
const hex = String(hexRaw).toUpperCase().replace(/[^0-9A-F]/g, '').padStart(6, '0').slice(-6);
|
||||
if (!hex) return null;
|
||||
return `RS485-${hex}`;
|
||||
}
|
||||
|
||||
private initMap() {
|
||||
const ol: any = (window as any).ol;
|
||||
if (!ol) return;
|
||||
const tk = this.getTiandituKey();
|
||||
const mkLayer = (url: string) =>
|
||||
new ol.layer.Tile({
|
||||
source: new ol.source.XYZ({ url }),
|
||||
});
|
||||
this.layers = {
|
||||
satellite: new ol.layer.Group({
|
||||
layers: [
|
||||
mkLayer(
|
||||
`https://t{0-7}.tianditu.gov.cn/img_w/wmts?SERVICE=WMTS&REQUEST=GetTile&VERSION=1.0.0&LAYER=img&STYLE=default&TILEMATRIXSET=w&FORMAT=tiles&TILEMATRIX={z}&TILEROW={y}&TILECOL={x}&tk=${tk}`,
|
||||
),
|
||||
mkLayer(
|
||||
`https://t{0-7}.tianditu.gov.cn/cia_w/wmts?SERVICE=WMTS&REQUEST=GetTile&VERSION=1.0.0&LAYER=cia&STYLE=default&TILEMATRIXSET=w&FORMAT=tiles&TILEMATRIX={z}&TILEROW={y}&TILECOL={x}&tk=${tk}`,
|
||||
),
|
||||
],
|
||||
}),
|
||||
vector: new ol.layer.Group({
|
||||
layers: [
|
||||
mkLayer(
|
||||
`https://t{0-7}.tianditu.gov.cn/vec_w/wmts?SERVICE=WMTS&REQUEST=GetTile&VERSION=1.0.0&LAYER=vec&STYLE=default&TILEMATRIXSET=w&FORMAT=tiles&TILEMATRIX={z}&TILEROW={y}&TILECOL={x}&tk=${tk}`,
|
||||
),
|
||||
mkLayer(
|
||||
`https://t{0-7}.tianditu.gov.cn/cva_w/wmts?SERVICE=WMTS&REQUEST=GetTile&VERSION=1.0.0&LAYER=cva&STYLE=default&TILEMATRIXSET=w&FORMAT=tiles&TILEMATRIX={z}&TILEROW={y}&TILECOL={x}&tk=${tk}`,
|
||||
),
|
||||
],
|
||||
visible: false,
|
||||
}),
|
||||
terrain: new ol.layer.Group({
|
||||
layers: [
|
||||
mkLayer(
|
||||
`https://t{0-7}.tianditu.gov.cn/ter_w/wmts?SERVICE=WMTS&REQUEST=GetTile&VERSION=1.0.0&LAYER=ter&STYLE=default&TILEMATRIXSET=w&FORMAT=tiles&TILEMATRIX={z}&TILEROW={y}&TILECOL={x}&tk=${tk}`,
|
||||
),
|
||||
mkLayer(
|
||||
`https://t{0-7}.tianditu.gov.cn/cta_w/wmts?SERVICE=WMTS&REQUEST=GetTile&VERSION=1.0.0&LAYER=cta&STYLE=default&TILEMATRIXSET=w&FORMAT=tiles&TILEMATRIX={z}&TILEROW={y}&TILECOL={x}&tk=${tk}`,
|
||||
),
|
||||
],
|
||||
visible: false,
|
||||
}),
|
||||
hybrid: new ol.layer.Group({
|
||||
layers: [
|
||||
mkLayer(
|
||||
`https://t{0-7}.tianditu.gov.cn/img_w/wmts?SERVICE=WMTS&REQUEST=GetTile&VERSION=1.0.0&LAYER=img&STYLE=default&TILEMATRIXSET=w&FORMAT=tiles&TILEMATRIX={z}&TILEROW={y}&TILECOL={x}&tk=${tk}`,
|
||||
),
|
||||
mkLayer(
|
||||
`https://t{0-7}.tianditu.gov.cn/cia_w/wmts?SERVICE=WMTS&REQUEST=GetTile&VERSION=1.0.0&LAYER=cia&STYLE=default&TILEMATRIXSET=w&FORMAT=tiles&TILEMATRIX={z}&TILEROW={y}&TILECOL={x}&tk=${tk}`,
|
||||
),
|
||||
],
|
||||
visible: false,
|
||||
}),
|
||||
};
|
||||
|
||||
this.stationSource = new ol.source.Vector();
|
||||
this.clusterSource = new ol.source.Cluster({ distance: 60, minDistance: 20, source: this.stationSource });
|
||||
this.clusterLayer = new ol.layer.Vector({ source: this.clusterSource, style: (f: any) => this.createClusterStyle(f) });
|
||||
this.stationLayer = new ol.layer.Vector({ source: this.stationSource, visible: false, style: (f: any) => this.createStationStyle(f) });
|
||||
this.tileOverlayGroup = new ol.layer.Group({ layers: [], zIndex: 999, visible: true });
|
||||
|
||||
// KML overlay
|
||||
try {
|
||||
const kmlSource = new ol.source.Vector({
|
||||
url: '/static/kml/selected_polygons.kml',
|
||||
format: new ol.format.KML({ extractStyles: true }),
|
||||
});
|
||||
this.kmlLayer = new ol.layer.Vector({ source: kmlSource, zIndex: 800, visible: true });
|
||||
} catch {}
|
||||
|
||||
this.map = new ol.Map({
|
||||
target: 'map',
|
||||
layers: [this.layers.satellite, this.layers.vector, this.layers.terrain, this.layers.hybrid, this.kmlLayer, this.tileOverlayGroup, this.clusterLayer, this.stationLayer],
|
||||
view: new ol.View({ center: ol.proj.fromLonLat([108, 35]), zoom: 5, minZoom: 3, maxZoom: 18 }),
|
||||
});
|
||||
|
||||
this.map.getView().on('change:resolution', () => {
|
||||
const z = this.map.getView().getZoom();
|
||||
this.updateClusterDistance(z);
|
||||
this.updateLayerVisibility(z);
|
||||
});
|
||||
|
||||
this.map.on('singleclick', async (evt: any) => {
|
||||
const olAny: any = (window as any).ol;
|
||||
const features = this.map.getFeaturesAtPixel(evt.pixel, { layerFilter: (l: any) => l === this.stationLayer || l === this.clusterLayer });
|
||||
if (!features || features.length === 0) return;
|
||||
const feature = features[0];
|
||||
const subFeatures = feature.get('features');
|
||||
if (Array.isArray(subFeatures) && subFeatures.length > 0) {
|
||||
const extent = olAny.extent.createEmpty();
|
||||
subFeatures.forEach((sf: any) => {
|
||||
olAny.extent.extend(extent, sf.getGeometry().getExtent());
|
||||
});
|
||||
this.map.getView().fit(extent, { duration: 350, padding: [40, 40, 40, 40], maxZoom: 14 });
|
||||
return;
|
||||
}
|
||||
const hex = feature.get('stationHex');
|
||||
if (!hex) return;
|
||||
this.hexId = hex;
|
||||
await this.query();
|
||||
});
|
||||
|
||||
this.map.on('pointermove', (evt: any) => {
|
||||
const features = this.map.getFeaturesAtPixel(evt.pixel, { layerFilter: (l: any) => l === this.stationLayer || l === this.clusterLayer || l === this.kmlLayer });
|
||||
const el = this.map.getTargetElement();
|
||||
if (el) el.style.cursor = features && features.length > 0 ? 'pointer' : '';
|
||||
this.showTileTooltip(evt);
|
||||
});
|
||||
|
||||
// KML click -> open details dialog (only when not clicking stations/clusters)
|
||||
this.map.on('singleclick', (evt: any) => {
|
||||
try {
|
||||
const stHits = this.map.getFeaturesAtPixel(evt.pixel, { layerFilter: (l: any) => l === this.stationLayer || l === this.clusterLayer });
|
||||
if (stHits && stHits.length > 0) return;
|
||||
} catch {}
|
||||
try {
|
||||
let found = false;
|
||||
this.map.forEachFeatureAtPixel(
|
||||
evt.pixel,
|
||||
(f: any, layer: any) => {
|
||||
if (this.kmlLayer && layer === this.kmlLayer) {
|
||||
this.openKmlPopup(f);
|
||||
found = true;
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
},
|
||||
{ layerFilter: (l: any) => l === this.kmlLayer, hitTolerance: 6 },
|
||||
);
|
||||
if (found) return;
|
||||
} catch {}
|
||||
});
|
||||
|
||||
if (this.stations?.length) this.updateStationsOnMap();
|
||||
this.mapReady = true;
|
||||
this.tryInitialQuery();
|
||||
}
|
||||
|
||||
private openKmlPopup(feature: any) {
|
||||
try {
|
||||
const name = feature?.get ? feature.get('name') || '' : '';
|
||||
let desc = feature?.get ? feature.get('description') || '' : '';
|
||||
try {
|
||||
desc = String(desc);
|
||||
desc = desc.replace(/^<!\[CDATA\[/, '').replace(/\]\]>$/, '');
|
||||
const ta = document.createElement('textarea');
|
||||
ta.innerHTML = desc;
|
||||
desc = ta.value;
|
||||
} catch {}
|
||||
this.kmlInfoTitle = String(name || '详情');
|
||||
this.kmlInfoHtml = String(desc || '');
|
||||
this.isKmlDialogOpen = true;
|
||||
} catch {}
|
||||
}
|
||||
|
||||
closeKmlPopup() {
|
||||
this.isKmlDialogOpen = false;
|
||||
}
|
||||
|
||||
|
||||
|
||||
private getTiandituKey(): string {
|
||||
const anyWin = window as any;
|
||||
return anyWin.TIANDITU_KEY || '0c260b8a094a4e0bc507808812cefdac';
|
||||
}
|
||||
|
||||
private updateClusterDistance(zoom: number) {
|
||||
if (!this.clusterSource) return;
|
||||
const distance = zoom < this.CLUSTER_THRESHOLD ? 60 : 20;
|
||||
this.clusterSource.setDistance(distance);
|
||||
}
|
||||
|
||||
private updateLayerVisibility(zoom: number) {
|
||||
if (!this.clusterLayer || !this.stationLayer) return;
|
||||
const showCluster = zoom < this.CLUSTER_THRESHOLD;
|
||||
this.clusterLayer.setVisible(showCluster);
|
||||
this.stationLayer.setVisible(!showCluster);
|
||||
}
|
||||
|
||||
private markerIcon(isOnline: boolean) {
|
||||
const ol: any = (window as any).ol;
|
||||
const src = isOnline ? '/static/images/marker-online.svg' : '/static/images/marker-offline.svg';
|
||||
return new ol.style.Icon({ src, anchor: [0.5, 1], anchorXUnits: 'fraction', anchorYUnits: 'fraction', scale: 0.9 });
|
||||
}
|
||||
|
||||
private createStationStyle(feature: any) {
|
||||
const ol: any = (window as any).ol;
|
||||
const last = feature.get('lastUpdate');
|
||||
const online = last ? new Date(last).getTime() > Date.now() - 5 * 60 * 1000 : false;
|
||||
const location = feature.get('location') || '';
|
||||
return new ol.style.Style({
|
||||
image: this.markerIcon(online),
|
||||
text: location
|
||||
? new ol.style.Text({
|
||||
text: location,
|
||||
offsetY: -28,
|
||||
fill: new ol.style.Fill({ color: '#111' }),
|
||||
stroke: new ol.style.Stroke({ color: '#fff', width: 3 }),
|
||||
font: '12px sans-serif',
|
||||
})
|
||||
: undefined,
|
||||
});
|
||||
}
|
||||
|
||||
private createClusterStyle(feature: any) {
|
||||
const ol: any = (window as any).ol;
|
||||
const features = feature.get('features') || [];
|
||||
const size = features.length;
|
||||
const zoom = this.map.getView().getZoom();
|
||||
if (zoom < this.CLUSTER_THRESHOLD) {
|
||||
if (size > 1) {
|
||||
const radius = Math.min(16 + size * 0.8, 32);
|
||||
const fontSize = Math.min(11 + size / 12, 16);
|
||||
return new ol.style.Style({
|
||||
image: new ol.style.Circle({
|
||||
radius,
|
||||
fill: new ol.style.Fill({ color: 'rgba(0,123,255,0.8)' }),
|
||||
stroke: new ol.style.Stroke({ color: '#fff', width: 2 }),
|
||||
}),
|
||||
text: new ol.style.Text({
|
||||
text: String(size),
|
||||
fill: new ol.style.Fill({ color: '#fff' }),
|
||||
font: `bold ${fontSize}px Arial`,
|
||||
offsetY: 1,
|
||||
}),
|
||||
});
|
||||
} else {
|
||||
const f0 = features[0];
|
||||
const last = f0?.get('lastUpdate');
|
||||
const online = last ? new Date(last).getTime() > Date.now() - 5 * 60 * 1000 : false;
|
||||
const color = online ? 'rgba(0,123,255,0.8)' : 'rgba(108,117,125,0.8)';
|
||||
return new ol.style.Style({
|
||||
image: new ol.style.Circle({
|
||||
radius: 6,
|
||||
fill: new ol.style.Fill({ color }),
|
||||
stroke: new ol.style.Stroke({ color: '#fff', width: 2 }),
|
||||
}),
|
||||
});
|
||||
}
|
||||
}
|
||||
const f0 = features[0];
|
||||
const last = f0?.get('lastUpdate');
|
||||
const online = last ? new Date(last).getTime() > Date.now() - 5 * 60 * 1000 : false;
|
||||
return new ol.style.Style({
|
||||
image: this.markerIcon(online),
|
||||
});
|
||||
}
|
||||
|
||||
private updateStationsOnMap() {
|
||||
const ol: any = (window as any).ol;
|
||||
if (!ol || !this.stationSource) return;
|
||||
const features = (this.stations || []).map((s) => {
|
||||
if (typeof s.longitude !== 'number' || typeof s.latitude !== 'number') return null;
|
||||
const stationId = s.station_id || '';
|
||||
if (!stationId) return null;
|
||||
const hex = stationId ? stationId.slice(-6).toUpperCase() : '';
|
||||
if (!hex) return null;
|
||||
const f = new ol.Feature({ geometry: new ol.geom.Point(ol.proj.fromLonLat([s.longitude, s.latitude])) });
|
||||
f.set('stationId', stationId);
|
||||
f.set('stationHex', hex);
|
||||
f.set('location', s.location || '');
|
||||
f.set('lastUpdate', s.last_update || '');
|
||||
return f;
|
||||
});
|
||||
this.stationSource.clear();
|
||||
features.filter(Boolean).forEach((f: any) => this.stationSource.addFeature(f));
|
||||
}
|
||||
|
||||
private async loadTileTimes(product: 'radar' | 'rain') {
|
||||
try {
|
||||
const params = new URLSearchParams({ z: String(this.tileZ), y: String(this.tileY), x: String(this.tileX) });
|
||||
const toFmt = (s: string) => s.replace('T', ' ') + ':00';
|
||||
if (this.start && this.end) {
|
||||
params.set('from', toFmt(this.start));
|
||||
params.set('to', toFmt(this.end));
|
||||
} else {
|
||||
params.set('limit', '60');
|
||||
}
|
||||
const path = product === 'rain' ? '/api/rain/times' : '/api/radar/times';
|
||||
const r = await fetch(`${path}?${params.toString()}`);
|
||||
if (!r.ok) return;
|
||||
const j = await r.json();
|
||||
this.tileTimes = j.times || [];
|
||||
this.tileIndex = 0;
|
||||
this.tileDt = this.tileTimes[0] || '';
|
||||
if (this.tileDt) await this.renderTilesAt(this.tileDt);
|
||||
} catch {
|
||||
this.tileTimes = [];
|
||||
this.tileIndex = -1;
|
||||
this.tileDt = '';
|
||||
}
|
||||
}
|
||||
|
||||
async renderTilesAt(dt: string) {
|
||||
if (!dt) {
|
||||
this.clearTileOverlays();
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const params = new URLSearchParams({ z: String(this.tileZ), dt: dt });
|
||||
const path = this.tileProduct === 'rain' ? '/api/rain/tiles_at' : '/api/radar/tiles_at';
|
||||
const r = await fetch(`${path}?${params.toString()}`);
|
||||
if (!r.ok) {
|
||||
this.clearTileOverlays();
|
||||
return;
|
||||
}
|
||||
const j = await r.json();
|
||||
const tiles = Array.isArray(j.tiles) ? j.tiles : [];
|
||||
if (tiles.length === 0) {
|
||||
this.clearTileOverlays();
|
||||
return;
|
||||
}
|
||||
await this.renderTilesOnMap(this.tileProduct, tiles);
|
||||
const idx = this.tileTimes.findIndex((t) => t === dt);
|
||||
if (idx >= 0) {
|
||||
this.tileIndex = idx;
|
||||
}
|
||||
} catch {
|
||||
this.clearTileOverlays();
|
||||
}
|
||||
}
|
||||
|
||||
private clearTileOverlays() {
|
||||
if (!this.tileOverlayGroup) return;
|
||||
const coll = this.tileOverlayGroup.getLayers();
|
||||
if (coll) coll.clear();
|
||||
this.tileLastList = [];
|
||||
}
|
||||
|
||||
private addImageOverlayFromCanvas(canvas: HTMLCanvasElement, extent4326: [number, number, number, number]) {
|
||||
const ol: any = (window as any).ol;
|
||||
if (!ol || !this.map) return;
|
||||
const proj = this.map.getView().getProjection();
|
||||
const extentProj = ol.proj.transformExtent(extent4326, 'EPSG:4326', proj);
|
||||
const src = new ol.source.ImageStatic({ url: canvas.toDataURL('image/png'), imageExtent: extentProj, projection: proj });
|
||||
const layer = new ol.layer.Image({ source: src, opacity: 0.8, visible: true });
|
||||
this.tileOverlayGroup.getLayers().push(layer);
|
||||
}
|
||||
|
||||
private async renderTilesOnMap(product: 'none' | 'radar' | 'rain', tiles: any[]) {
|
||||
this.clearTileOverlays();
|
||||
const lastList: any[] = [];
|
||||
for (const t of tiles) {
|
||||
const w = t.width,
|
||||
h = t.height;
|
||||
if (!w || !h || !t.values) continue;
|
||||
const canvas = document.createElement('canvas');
|
||||
canvas.width = w;
|
||||
canvas.height = h;
|
||||
const ctx = canvas.getContext('2d')!;
|
||||
const img = ctx.createImageData(w, h);
|
||||
const radarColors = [
|
||||
[0, 0, 255],
|
||||
[0, 191, 255],
|
||||
[0, 255, 255],
|
||||
[127, 255, 212],
|
||||
[124, 252, 0],
|
||||
[173, 255, 47],
|
||||
[255, 255, 0],
|
||||
[255, 215, 0],
|
||||
[255, 165, 0],
|
||||
[255, 140, 0],
|
||||
[255, 69, 0],
|
||||
[255, 0, 0],
|
||||
[220, 20, 60],
|
||||
[199, 21, 133],
|
||||
[139, 0, 139],
|
||||
];
|
||||
const rainEdges = [0, 5, 7.5, 10, 12.5, 15, 17.5, 20, 25, 30, 40, 50, 75, 100, Infinity];
|
||||
const rainColors = [
|
||||
[126, 212, 121],
|
||||
[126, 212, 121],
|
||||
[110, 200, 109],
|
||||
[97, 169, 97],
|
||||
[81, 148, 76],
|
||||
[90, 158, 112],
|
||||
[143, 194, 254],
|
||||
[92, 134, 245],
|
||||
[66, 87, 240],
|
||||
[45, 48, 214],
|
||||
[26, 15, 166],
|
||||
[63, 22, 145],
|
||||
[191, 70, 148],
|
||||
[213, 1, 146],
|
||||
[213, 1, 146],
|
||||
];
|
||||
for (let row = 0; row < h; row++) {
|
||||
const srcRow = t.values[row] as (number | null)[];
|
||||
const dstRow = h - 1 - row;
|
||||
for (let col = 0; col < w; col++) {
|
||||
const v = srcRow[col];
|
||||
const off = (dstRow * w + col) * 4;
|
||||
if (v == null || v === 0) {
|
||||
img.data[off + 3] = 0;
|
||||
continue;
|
||||
}
|
||||
if (product === 'rain') {
|
||||
let idx = 0;
|
||||
while (idx < rainEdges.length - 1 && !(v >= rainEdges[idx] && v < rainEdges[idx + 1])) idx++;
|
||||
const c = rainColors[Math.min(idx, rainColors.length - 1)];
|
||||
img.data[off] = c[0];
|
||||
img.data[off + 1] = c[1];
|
||||
img.data[off + 2] = c[2];
|
||||
img.data[off + 3] = 220;
|
||||
} else {
|
||||
let bin = Math.floor(Math.max(0, Math.min(75, v)) / 5);
|
||||
if (bin >= radarColors.length) bin = radarColors.length - 1;
|
||||
const c = radarColors[bin];
|
||||
img.data[off] = c[0];
|
||||
img.data[off + 1] = c[1];
|
||||
img.data[off + 2] = c[2];
|
||||
img.data[off + 3] = 220;
|
||||
}
|
||||
}
|
||||
}
|
||||
ctx.putImageData(img, 0, 0);
|
||||
this.addImageOverlayFromCanvas(canvas, [t.west, t.south, t.east, t.north]);
|
||||
lastList.push({
|
||||
product,
|
||||
meta: { west: t.west, south: t.south, east: t.east, north: t.north, width: w, height: h },
|
||||
values: t.values,
|
||||
});
|
||||
}
|
||||
this.tileLastList = lastList;
|
||||
}
|
||||
|
||||
private focusOnStation(station?: Station) {
|
||||
const ol: any = (window as any).ol;
|
||||
if (!station || !ol || typeof station.longitude !== 'number' || typeof station.latitude !== 'number' || !this.map) return;
|
||||
this.map.getView().animate({
|
||||
center: ol.proj.fromLonLat([station.longitude, station.latitude]),
|
||||
zoom: 11,
|
||||
duration: 400,
|
||||
});
|
||||
setTimeout(() => {
|
||||
try {
|
||||
this.map.updateSize();
|
||||
} catch {
|
||||
/* noop */
|
||||
}
|
||||
}, 300);
|
||||
}
|
||||
|
||||
private scrollToTop() {
|
||||
const el = document.querySelector('.screen');
|
||||
if (el && 'scrollIntoView' in el) {
|
||||
try {
|
||||
(el as HTMLElement).scrollIntoView({ behavior: 'smooth', block: 'start' });
|
||||
} catch {
|
||||
/* noop */
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private showTileTooltip(evt: any) {
|
||||
const tip = document.getElementById('tileValueTooltip');
|
||||
if (!tip || !this.map || !this.tileLastList || this.tileLastList.length === 0) {
|
||||
if (tip) tip.style.display = 'none';
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const coord = this.map.getEventCoordinate(evt.originalEvent);
|
||||
const lonlat = (window as any).ol.proj.transform(coord, this.map.getView().getProjection(), 'EPSG:4326');
|
||||
const lon = lonlat[0],
|
||||
lat = lonlat[1];
|
||||
let value: number | null = null;
|
||||
let unit = '';
|
||||
for (const it of this.tileLastList) {
|
||||
const { west, south, east, north, width, height } = it.meta;
|
||||
if (lon < west || lon > east || lat < south || lat > north) continue;
|
||||
const px = Math.floor((lon - west) / ((east - west) / width));
|
||||
const py = Math.floor((lat - south) / ((north - south) / height));
|
||||
if (px < 0 || px >= width || py < 0 || py >= height) continue;
|
||||
const v = it.values?.[py]?.[px];
|
||||
if (v != null) {
|
||||
value = Number(v);
|
||||
unit = it.product === 'rain' ? 'mm' : 'dBZ';
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (value == null) {
|
||||
tip.style.display = 'none';
|
||||
return;
|
||||
}
|
||||
tip.textContent = `${value.toFixed(1)} ${unit}`;
|
||||
const px = evt.pixel[0] + 12;
|
||||
const py = evt.pixel[1] + 12;
|
||||
tip.style.left = `${px}px`;
|
||||
tip.style.top = `${py}px`;
|
||||
tip.style.display = 'block';
|
||||
} catch {
|
||||
if (tip) tip.style.display = 'none';
|
||||
}
|
||||
}
|
||||
|
||||
private updateSummaryPanel() {
|
||||
const historyData = Array.isArray(this.history) ? this.history : [];
|
||||
const forecastData = Array.isArray(this.forecast) ? this.forecast : [];
|
||||
|
||||
const fmt = (n: number | null | undefined) => {
|
||||
if (n == null || isNaN(Number(n))) return '--';
|
||||
return Number(n).toFixed(1);
|
||||
};
|
||||
const pad2 = (n: number) => String(n).padStart(2, '0');
|
||||
const fmtDT = (d: Date) => `${d.getFullYear()}-${pad2(d.getMonth() + 1)}-${pad2(d.getDate())} ${pad2(d.getHours())}:00:00`;
|
||||
const ceilHour = (d: Date) => {
|
||||
const t = new Date(d);
|
||||
if (t.getMinutes() || t.getSeconds() || t.getMilliseconds()) {
|
||||
t.setHours(t.getHours() + 1);
|
||||
}
|
||||
t.setMinutes(0, 0, 0);
|
||||
return t;
|
||||
};
|
||||
|
||||
const now = new Date();
|
||||
const t1 = ceilHour(now);
|
||||
const t2 = new Date(t1.getTime() + 1 * 3600 * 1000);
|
||||
const t3 = new Date(t1.getTime() + 2 * 3600 * 1000);
|
||||
const pickBestAt = (dtStr: string) => {
|
||||
const cand = forecastData.filter((x) => x && x.date_time === dtStr && x.rainfall != null);
|
||||
if (!cand.length) return null;
|
||||
cand.sort((a, b) => (a.lead_hours ?? 99) - (b.lead_hours ?? 99));
|
||||
return cand[0].rainfall ?? null;
|
||||
};
|
||||
|
||||
const r1 = pickBestAt(fmtDT(t1));
|
||||
const r2 = pickBestAt(fmtDT(t2));
|
||||
const r3 = pickBestAt(fmtDT(t3));
|
||||
const safe = (v: number | null) => (v != null ? Number(v) : 0);
|
||||
const futureSum = safe(r1) + safe(r2) + safe(r3);
|
||||
this.futureRainText = `未来1~3小时降雨 ${fmt(futureSum)} 毫米`;
|
||||
|
||||
let level: AlertLevel = 'none';
|
||||
// 阈值区间:[0,4) 小雨;[4,8) 中雨(黄色);[8, +∞) 大雨(红色)
|
||||
if (futureSum >= 8) {
|
||||
level = 'red';
|
||||
} else if (futureSum >= 4 && futureSum < 8) {
|
||||
level = 'orange';
|
||||
}
|
||||
this.alertLevel = level;
|
||||
this.setBodyAlertClass(level);
|
||||
|
||||
const bucketOf = (mm: any): number | null => {
|
||||
if (mm == null || isNaN(Number(mm))) return null;
|
||||
const v = Math.max(0, Number(mm));
|
||||
if (v === 0) return 0; // 0
|
||||
if (v > 0 && v < 4) return 1; // (0,4)
|
||||
if (v >= 4 && v < 8) return 2; // [4,8)
|
||||
return 3; // [8, +∞)
|
||||
};
|
||||
|
||||
const rainActual = new Map<string, number | null>();
|
||||
historyData.forEach((it) => {
|
||||
if (it && it.date_time) rainActual.set(it.date_time, it.rainfall ?? null);
|
||||
});
|
||||
const tally = (lead: number) => {
|
||||
let correct = 0;
|
||||
let total = 0;
|
||||
forecastData.forEach((f) => {
|
||||
if (f.lead_hours !== lead) return;
|
||||
const a = rainActual.get(f.date_time);
|
||||
if (a == null) return;
|
||||
const ba = bucketOf(a);
|
||||
const bf = bucketOf(f.rainfall);
|
||||
if (ba == null || bf == null) return;
|
||||
total += 1;
|
||||
if (ba === bf) correct += 1;
|
||||
});
|
||||
return { correct, total };
|
||||
};
|
||||
const rH1 = tally(1);
|
||||
const rH2 = tally(2);
|
||||
const rH3 = tally(3);
|
||||
const pct = (r: { correct: number; total: number }) => (r.total > 0 ? `${((r.correct / r.total) * 100).toFixed(1)}%` : '--');
|
||||
this.pastAccuracyText = `过去预报准确率 +1h: ${pct(rH1)} +2h: ${pct(rH2)} +3h: ${pct(rH3)}`;
|
||||
|
||||
// 异步刷新过去30天大雨预报表现
|
||||
this.refreshHeavyPerf().catch(()=>{});
|
||||
}
|
||||
|
||||
private async refreshHeavyPerf() {
|
||||
try {
|
||||
const sid = this.makeStationIdFromHex(this.hexId || this.DEFAULT_STATION_HEX);
|
||||
if (!sid) return;
|
||||
const days = 30;
|
||||
const now = new Date();
|
||||
const since = new Date(now.getTime() - days * 24 * 3600 * 1000);
|
||||
try { console.log(`[HeavyPerf] days=${days} window: ${since.toISOString()} ~ ${now.toISOString()}`); } catch {}
|
||||
const perf = await this.api.getForecastPerf(sid, days, this.provider || '');
|
||||
if (!perf) { this.heavyPerfText = ''; return; }
|
||||
const avg = (perf.avg_lead_hours ?? 0).toFixed(1);
|
||||
this.heavyPerfText = `过去${days}天大雨${perf.total_heavy}次,成功预报${perf.success_count}次,平均提前${avg}小时。`;
|
||||
} catch {
|
||||
this.heavyPerfText = '';
|
||||
}
|
||||
}
|
||||
|
||||
private setBodyAlertClass(level: AlertLevel) {
|
||||
const body = document.body;
|
||||
if (!body) return;
|
||||
body.classList.toggle('alert-mode', level !== 'none');
|
||||
body.classList.toggle('alert-mode-red', level === 'red');
|
||||
body.classList.toggle('alert-mode-orange', level === 'orange');
|
||||
}
|
||||
}
|
||||
16
core/frontend/bigscreen/src/index.html
Normal file
16
core/frontend/bigscreen/src/index.html
Normal file
@ -0,0 +1,16 @@
|
||||
<!doctype html>
|
||||
<html lang="zh-CN">
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<title>北斗气象站</title>
|
||||
<base href="/bigscreen/" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||
<link rel="stylesheet" href="/static/css/ol.css" />
|
||||
<link rel="stylesheet" href="/static/css/tailwind.min.css" />
|
||||
</head>
|
||||
<body>
|
||||
<app-root>Loading...</app-root>
|
||||
<script src="/static/js/chart.js"></script>
|
||||
<script src="/static/js/ol.js"></script>
|
||||
</body>
|
||||
</html>
|
||||
4
core/frontend/bigscreen/src/main.ts
Normal file
4
core/frontend/bigscreen/src/main.ts
Normal file
@ -0,0 +1,4 @@
|
||||
import { bootstrapApplication } from '@angular/platform-browser';
|
||||
import { BigscreenAppComponent } from './app/app.component';
|
||||
|
||||
bootstrapApplication(BigscreenAppComponent).catch((err) => console.error(err));
|
||||
2
core/frontend/bigscreen/src/polyfills.ts
Normal file
2
core/frontend/bigscreen/src/polyfills.ts
Normal file
@ -0,0 +1,2 @@
|
||||
// Angular default change detection requires Zone.js.
|
||||
import 'zone.js';
|
||||
528
core/frontend/bigscreen/src/styles.css
Normal file
528
core/frontend/bigscreen/src/styles.css
Normal file
@ -0,0 +1,528 @@
|
||||
:root {
|
||||
--color-bg: #0b1e39;
|
||||
--color-fg: #e6eefc;
|
||||
--color-fg-muted: #dbe7ff;
|
||||
--panel-bg: rgba(255, 255, 255, 0.04);
|
||||
--panel-border: rgba(255, 255, 255, 0.12);
|
||||
--table-head-bg: rgba(20, 45, 80, 0.98);
|
||||
--table-head-fg: #e6f0ff;
|
||||
--table-row-alt: rgba(255, 255, 255, 0.03);
|
||||
--orange-bg-1: rgba(255, 171, 64, 0.18);
|
||||
--orange-bg-2: rgba(255, 171, 64, 0.1);
|
||||
--orange-border: rgba(230, 120, 20, 0.6);
|
||||
--orange-shadow-1: rgba(255, 159, 64, 0.35);
|
||||
--orange-shadow-2: rgba(255, 159, 64, 0.1);
|
||||
--orange-shadow-3: rgba(255, 159, 64, 0.25);
|
||||
--orange-fg: #2b1900;
|
||||
--orange-fg-sub: #4a2b00;
|
||||
--space-1: 0.25rem;
|
||||
--space-2: 0.5rem;
|
||||
--space-3: 0.75rem;
|
||||
--space-4: 1rem;
|
||||
--radius-1: 0.25rem;
|
||||
--radius-2: 0.375rem;
|
||||
}
|
||||
|
||||
* {
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
html,
|
||||
body {
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
html {
|
||||
font-size: clamp(14px, 0.95vw, 18px);
|
||||
}
|
||||
|
||||
body {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
background: var(--color-bg);
|
||||
color: var(--color-fg);
|
||||
font-family: 'Segoe UI', Arial, sans-serif;
|
||||
font-size: 1rem;
|
||||
}
|
||||
|
||||
body.alert-mode {
|
||||
position: relative;
|
||||
}
|
||||
|
||||
body.alert-mode::before {
|
||||
content: '';
|
||||
position: fixed;
|
||||
inset: 0;
|
||||
pointer-events: none;
|
||||
background: radial-gradient(
|
||||
circle at 50% 50%,
|
||||
rgba(255, 159, 64, 0) 45%,
|
||||
rgba(255, 159, 64, 0.12) 70%,
|
||||
rgba(255, 159, 64, 0.2) 100%
|
||||
);
|
||||
opacity: 0.16;
|
||||
animation: alert-vignette 3s ease-in-out infinite;
|
||||
z-index: 1;
|
||||
}
|
||||
|
||||
body.alert-mode-red::before {
|
||||
background: radial-gradient(
|
||||
circle at 50% 50%,
|
||||
rgba(255, 107, 107, 0) 45%,
|
||||
rgba(255, 107, 107, 0.12) 70%,
|
||||
rgba(255, 107, 107, 0.2) 100%
|
||||
);
|
||||
}
|
||||
|
||||
@keyframes alert-vignette {
|
||||
0%,
|
||||
100% {
|
||||
opacity: 0.12;
|
||||
}
|
||||
50% {
|
||||
opacity: 0.25;
|
||||
}
|
||||
}
|
||||
|
||||
.screen {
|
||||
height: 100vh;
|
||||
width: 100vw;
|
||||
display: grid;
|
||||
/* 三列布局:2/8 info(左)、3/8 left(中)、3/8 right(右) */
|
||||
grid-template-columns: 2fr 3fr 3fr;
|
||||
gap: var(--space-2);
|
||||
padding: var(--space-2);
|
||||
}
|
||||
|
||||
.screen.alert-on {
|
||||
box-shadow: inset 0 0 20rem var(--orange-shadow-3), inset 0 0 20rem var(--orange-shadow-1);
|
||||
animation: screenGlowOrange 1.8s ease-in-out infinite;
|
||||
}
|
||||
|
||||
.screen.alert-red {
|
||||
box-shadow: inset 0 0 20rem rgba(255, 107, 107, 0.25), inset 0 0 20rem rgba(255, 107, 107, 0.35);
|
||||
animation: screenGlowRed 1.8s ease-in-out infinite;
|
||||
}
|
||||
|
||||
@keyframes screenGlowOrange {
|
||||
0%,
|
||||
100% {
|
||||
box-shadow: inset 0 0 20rem var(--orange-shadow-3), inset 0 0 20rem var(--orange-shadow-1);
|
||||
}
|
||||
50% {
|
||||
box-shadow: none;
|
||||
}
|
||||
}
|
||||
|
||||
@keyframes screenGlowRed {
|
||||
0%,
|
||||
100% {
|
||||
box-shadow: inset 0 0 20rem rgba(255, 107, 107, 0.25), inset 0 0 20rem rgba(255, 107, 107, 0.35);
|
||||
}
|
||||
50% {
|
||||
box-shadow: none;
|
||||
}
|
||||
}
|
||||
|
||||
.left,
|
||||
.info,
|
||||
.right {
|
||||
min-width: 0;
|
||||
min-height: 0;
|
||||
}
|
||||
|
||||
/* 指定三列中的列序:info->1, left->2, right->3 */
|
||||
.info { grid-column: 1; }
|
||||
.left { grid-column: 2; }
|
||||
.right { grid-column: 3; }
|
||||
|
||||
.info {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: var(--space-2);
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
.info-panel {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
background: var(--panel-bg);
|
||||
border: 1px solid var(--panel-border);
|
||||
border-radius: var(--radius-2);
|
||||
padding: var(--space-3);
|
||||
color: var(--color-fg);
|
||||
line-height: 1.6;
|
||||
/* 面板自身可滚动;banner 不再固定底部 */
|
||||
overflow: auto;
|
||||
/* 占满整列高度,并至少与视口同高 */
|
||||
flex: 1 1 auto;
|
||||
height: 100%;
|
||||
min-height: 100vh;
|
||||
}
|
||||
|
||||
.info-title {
|
||||
font-weight: 700;
|
||||
/* 大屏自适应字号(进一步小一档) */
|
||||
font-size: clamp(1.05rem, 1.2vw, 1.6rem);
|
||||
margin-bottom: var(--space-2);
|
||||
color: #dfe9ff;
|
||||
}
|
||||
|
||||
.info-content {
|
||||
/* 大屏自适应字号(进一步小一档) */
|
||||
font-size: clamp(0.95rem, 1vw, 1.35rem);
|
||||
color: #cbd9ff;
|
||||
white-space: normal;
|
||||
line-height: 1.7;
|
||||
/* 自然流布局:紧跟 banner,不撑开到面板底部 */
|
||||
flex: 0 0 auto;
|
||||
}
|
||||
|
||||
/* 结构化信息列表布局与间距优化 */
|
||||
.info-content .info-dl {
|
||||
display: grid;
|
||||
grid-template-columns: auto 1fr;
|
||||
column-gap: 0.8em;
|
||||
row-gap: 0.6em;
|
||||
margin: 0;
|
||||
}
|
||||
.info-content dt {
|
||||
margin: 0;
|
||||
font-weight: 600;
|
||||
color: #dfe9ff;
|
||||
white-space: nowrap;
|
||||
}
|
||||
.info-content dd {
|
||||
margin: 0;
|
||||
color: #cbd9ff;
|
||||
}
|
||||
.info-content dd.break {
|
||||
padding-bottom: 0.5em;
|
||||
margin-bottom: 0.4em;
|
||||
border-bottom: 1px solid rgba(255, 255, 255, 0.12);
|
||||
}
|
||||
|
||||
/* 人员信息三列对齐:角色 | 姓名 | 电话 */
|
||||
.info-content .info-people {
|
||||
display: grid;
|
||||
/* 两列:角色 | 联系框(含姓名-连接线-电话) */
|
||||
grid-template-columns: auto 1fr;
|
||||
column-gap: 0.6em;
|
||||
row-gap: 0.45em;
|
||||
margin: 0.6em 0 0 0;
|
||||
align-items: center;
|
||||
}
|
||||
.info-content .info-people dt {
|
||||
margin: 0;
|
||||
font-weight: 600;
|
||||
color: #dfe9ff;
|
||||
white-space: nowrap;
|
||||
text-align: left; /* 角色靠左 */
|
||||
}
|
||||
.info-content .info-people dd {
|
||||
margin: 0;
|
||||
}
|
||||
.info-content .info-people dd.contact {
|
||||
display: flex;
|
||||
align-items: baseline;
|
||||
gap: 0.6em; /* 名字与电话之间间距 */
|
||||
}
|
||||
.info-content .info-people dd.contact .name {
|
||||
color: #cbd9ff;
|
||||
white-space: nowrap;
|
||||
/* 名字占据除电话外的全部宽度 */
|
||||
flex: 1 1 auto;
|
||||
min-width: 0;
|
||||
text-align: left;
|
||||
border: 1px solid rgba(255, 255, 255, 0.22);
|
||||
background: rgba(255, 255, 255, 0.05);
|
||||
border-radius: 0.5rem;
|
||||
padding: 0.15em 0.55em;
|
||||
}
|
||||
.info-content .info-people dd.contact .phone {
|
||||
color: #cbd9ff;
|
||||
white-space: nowrap;
|
||||
font-variant-numeric: tabular-nums;
|
||||
letter-spacing: 0.01em;
|
||||
border: 1px solid rgba(255, 255, 255, 0.22);
|
||||
background: rgba(255, 255, 255, 0.05);
|
||||
border-radius: 0.5rem;
|
||||
padding: 0.15em 0.55em;
|
||||
flex: 0 0 auto; /* 电话保持自身宽度 */
|
||||
}
|
||||
|
||||
/* 底部红色告示条 */
|
||||
.info-banner {
|
||||
margin-top: var(--space-3);
|
||||
padding: 0.9em 1.1em; /* 注意内部留白 */
|
||||
text-align: center;
|
||||
font-weight: 800;
|
||||
letter-spacing: 0.06em;
|
||||
border-radius: 0.6rem;
|
||||
color: #ff4d4f; /* 红色字体 */
|
||||
background: transparent; /* 无红色背景 */
|
||||
border: 2px solid #ff4d4f; /* 加厚红色边框 */
|
||||
/* 自适应字号 */
|
||||
font-size: clamp(0.95rem, 1.15vw, 1.35rem);
|
||||
}
|
||||
|
||||
.left {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: var(--space-2);
|
||||
}
|
||||
|
||||
.chart-container {
|
||||
flex: 1;
|
||||
min-height: 0;
|
||||
background: var(--panel-bg);
|
||||
border: 1px solid var(--panel-border);
|
||||
border-radius: var(--radius-1);
|
||||
padding: var(--space-2);
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
}
|
||||
|
||||
.chart-wrapper {
|
||||
flex: 1;
|
||||
min-height: 0;
|
||||
}
|
||||
|
||||
.station-info-title {
|
||||
font-size: 1rem;
|
||||
margin-bottom: var(--space-2);
|
||||
color: #d5e3ff;
|
||||
text-align: center;
|
||||
font-weight: 700;
|
||||
}
|
||||
|
||||
.map-container {
|
||||
flex: 1;
|
||||
min-height: 0;
|
||||
position: relative;
|
||||
background: var(--panel-bg);
|
||||
border: 1px solid var(--panel-border);
|
||||
border-radius: var(--radius-1);
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
#map {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
.right {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: var(--space-2);
|
||||
}
|
||||
|
||||
.controls {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
flex-wrap: wrap;
|
||||
gap: var(--space-2);
|
||||
background: var(--panel-bg);
|
||||
border: 1px solid var(--panel-border);
|
||||
border-radius: var(--radius-1);
|
||||
padding: calc(var(--space-1) + 0.125rem) var(--space-2);
|
||||
color: var(--color-fg-muted);
|
||||
}
|
||||
|
||||
.controls label {
|
||||
font-size: 0.8125rem;
|
||||
color: #bfd0ef;
|
||||
}
|
||||
|
||||
.controls input,
|
||||
.controls select,
|
||||
.controls button {
|
||||
padding: var(--space-1) var(--space-2);
|
||||
font-size: 0.8125rem;
|
||||
border-radius: 0.1875rem;
|
||||
border: 1px solid rgba(255, 255, 255, 0.18);
|
||||
background: rgba(255, 255, 255, 0.06);
|
||||
color: var(--color-fg);
|
||||
}
|
||||
|
||||
.controls input::placeholder {
|
||||
color: #98b1e0;
|
||||
}
|
||||
|
||||
.controls select option {
|
||||
background: var(--color-bg);
|
||||
color: var(--color-fg);
|
||||
}
|
||||
|
||||
.controls button {
|
||||
background: linear-gradient(180deg, rgba(255, 255, 255, 0.14), rgba(255, 255, 255, 0.02));
|
||||
cursor: pointer;
|
||||
border-color: rgba(255, 255, 255, 0.18);
|
||||
}
|
||||
|
||||
.controls button:hover {
|
||||
background: linear-gradient(180deg, rgba(255, 255, 255, 0.24), rgba(255, 255, 255, 0.05));
|
||||
}
|
||||
|
||||
.summary-panel {
|
||||
background: var(--panel-bg);
|
||||
border: 1px solid var(--panel-border);
|
||||
border-radius: var(--radius-2);
|
||||
padding: var(--space-3);
|
||||
color: #dfe9ff;
|
||||
line-height: 1.6;
|
||||
box-shadow: none;
|
||||
transition: all 0.4s ease;
|
||||
}
|
||||
|
||||
.summary-panel.alert-on {
|
||||
background: linear-gradient(180deg, var(--orange-bg-1), var(--orange-bg-2));
|
||||
border: 1px solid var(--orange-border);
|
||||
box-shadow: 0 6px 24px var(--orange-shadow-1), 0 2px 10px var(--orange-shadow-3), inset 0 1px 0 var(--orange-shadow-2);
|
||||
color: var(--orange-fg);
|
||||
animation: panelGlowOrange 1.8s ease-in-out infinite;
|
||||
}
|
||||
|
||||
.summary-panel.alert-red {
|
||||
background: linear-gradient(180deg, rgba(255, 107, 107, 0.22), rgba(255, 107, 107, 0.12));
|
||||
border: 1px solid rgba(200, 40, 40, 0.6);
|
||||
box-shadow: 0 6px 24px rgba(255, 107, 107, 0.35), 0 2px 10px rgba(255, 107, 107, 0.25), inset 0 1px 0 rgba(255, 107, 107, 0.12);
|
||||
color: #3a0b0b;
|
||||
animation: panelGlowRed 1.8s ease-in-out infinite;
|
||||
}
|
||||
|
||||
.summary-title {
|
||||
font-weight: 700;
|
||||
font-size: 1rem;
|
||||
margin-bottom: var(--space-1);
|
||||
}
|
||||
|
||||
.summary-panel.alert-on .summary-title,
|
||||
.summary-panel.alert-red .summary-title {
|
||||
font-size: 1.7rem;
|
||||
}
|
||||
|
||||
.summary-sub {
|
||||
font-size: 0.875rem;
|
||||
color: #bcd0ff;
|
||||
}
|
||||
|
||||
.table-container {
|
||||
flex: 1;
|
||||
min-height: 0;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
border: 1px solid var(--panel-border);
|
||||
border-radius: var(--radius-1);
|
||||
background: var(--panel-bg);
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.table-scroll {
|
||||
flex: 1;
|
||||
min-height: 0;
|
||||
overflow-y: auto;
|
||||
overflow-x: auto;
|
||||
}
|
||||
|
||||
.loading-overlay {
|
||||
position: fixed;
|
||||
inset: 0;
|
||||
z-index: 3000;
|
||||
background: rgba(0, 0, 0, 0.35);
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
.spinner {
|
||||
width: 48px;
|
||||
height: 48px;
|
||||
border-radius: 50%;
|
||||
border: 4px solid rgba(255, 255, 255, 0.25);
|
||||
border-top-color: #4f9cff;
|
||||
animation: spin 1s linear infinite;
|
||||
}
|
||||
|
||||
@keyframes spin {
|
||||
to {
|
||||
transform: rotate(360deg);
|
||||
}
|
||||
}
|
||||
|
||||
@keyframes panelGlowOrange {
|
||||
0%,
|
||||
100% {
|
||||
background: linear-gradient(180deg, var(--orange-bg-1), var(--orange-bg-2));
|
||||
border-color: var(--orange-border);
|
||||
box-shadow: 0 6px 24px var(--orange-shadow-1), 0 2px 10px var(--orange-shadow-3), inset 0 1px 0 var(--orange-shadow-2);
|
||||
}
|
||||
50% {
|
||||
background: var(--color-bg);
|
||||
border-color: var(--panel-border);
|
||||
box-shadow: none;
|
||||
}
|
||||
}
|
||||
|
||||
@keyframes panelGlowRed {
|
||||
0%,
|
||||
100% {
|
||||
background: linear-gradient(180deg, rgba(255, 107, 107, 0.22), rgba(255, 107, 107, 0.12));
|
||||
border-color: rgba(200, 40, 40, 0.6);
|
||||
box-shadow: 0 6px 24px rgba(255, 107, 107, 0.35), 0 2px 10px rgba(255, 107, 107, 0.25), inset 0 1px 0 rgba(255, 107, 107, 0.12);
|
||||
}
|
||||
50% {
|
||||
background: var(--color-bg);
|
||||
border-color: var(--panel-border);
|
||||
box-shadow: none;
|
||||
}
|
||||
}
|
||||
|
||||
/* KML modal backdrop: full-viewport centered container */
|
||||
.kml-modal-backdrop {
|
||||
position: fixed;
|
||||
top: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
bottom: 0;
|
||||
z-index: 9999;
|
||||
display: grid;
|
||||
place-items: center;
|
||||
background: rgba(0, 0, 0, 0.5);
|
||||
}
|
||||
|
||||
.kml-modal-panel {
|
||||
width: min(92vw, 720px);
|
||||
max-height: 80vh;
|
||||
overflow: hidden;
|
||||
border: 1px solid rgba(213, 227, 255, 0.18);
|
||||
background: #0b1220;
|
||||
color: #d5e3ff;
|
||||
border-radius: 0.5rem;
|
||||
box-shadow: 0 20px 60px rgba(0, 0, 0, 0.45);
|
||||
}
|
||||
|
||||
.kml-modal-header {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
padding: 0.5rem 1rem;
|
||||
border-bottom: 1px solid rgba(213, 227, 255, 0.18);
|
||||
}
|
||||
|
||||
.kml-modal-body {
|
||||
padding: 0.75rem 1rem;
|
||||
font-size: 0.9375rem;
|
||||
line-height: 1.6;
|
||||
max-height: calc(80vh - 48px);
|
||||
overflow: auto;
|
||||
}
|
||||
|
||||
/* 统一 KML 描述内容的表格与链接样式,提升可读性 */
|
||||
/* 保留 KML 原始样式,不在此处覆盖表格/链接颜色 */
|
||||
|
||||
@media (prefers-reduced-motion: reduce) {
|
||||
* {
|
||||
animation: none !important;
|
||||
}
|
||||
}
|
||||
14
core/frontend/bigscreen/tsconfig.app.json
Normal file
14
core/frontend/bigscreen/tsconfig.app.json
Normal file
@ -0,0 +1,14 @@
|
||||
{
|
||||
"extends": "../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"outDir": "./out-tsc/app",
|
||||
"types": []
|
||||
},
|
||||
"files": [
|
||||
"src/main.ts",
|
||||
"src/polyfills.ts"
|
||||
],
|
||||
"include": [
|
||||
"src/**/*.d.ts"
|
||||
]
|
||||
}
|
||||
12103
core/frontend/package-lock.json
generated
Normal file
12103
core/frontend/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
32
core/frontend/package.json
Normal file
32
core/frontend/package.json
Normal file
@ -0,0 +1,32 @@
|
||||
{
|
||||
"name": "weatherstation-ui",
|
||||
"version": "0.0.1",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"start": "ng serve weatherstation-ui --proxy-config proxy.conf.json",
|
||||
"start:bigscreen": "ng serve weatherstation-bigscreen --proxy-config proxy.conf.json",
|
||||
"build": "npm run build:ui && npm run build:bigscreen",
|
||||
"build:ui": "ng build weatherstation-ui --configuration production --base-href /ui/",
|
||||
"build:bigscreen": "ng build weatherstation-bigscreen --configuration production --base-href /bigscreen/",
|
||||
"dev": "ng serve weatherstation-ui",
|
||||
"test": "ng test"
|
||||
},
|
||||
"dependencies": {
|
||||
"@angular/animations": "^17.3.0",
|
||||
"@angular/common": "^17.3.0",
|
||||
"@angular/compiler": "^17.3.0",
|
||||
"@angular/core": "^17.3.0",
|
||||
"@angular/forms": "^17.3.0",
|
||||
"@angular/platform-browser": "^17.3.0",
|
||||
"@angular/platform-browser-dynamic": "^17.3.0",
|
||||
"rxjs": "^7.8.1",
|
||||
"tslib": "^2.6.2",
|
||||
"zone.js": "^0.14.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@angular-devkit/build-angular": "^17.3.0",
|
||||
"@angular/cli": "^17.3.0",
|
||||
"@angular/compiler-cli": "^17.3.0",
|
||||
"typescript": "~5.3.3"
|
||||
}
|
||||
}
|
||||
14
core/frontend/proxy.conf.json
Normal file
14
core/frontend/proxy.conf.json
Normal file
@ -0,0 +1,14 @@
|
||||
{
|
||||
"/api": {
|
||||
"target": "http://localhost:10003",
|
||||
"secure": false,
|
||||
"changeOrigin": true,
|
||||
"logLevel": "info"
|
||||
},
|
||||
"/static": {
|
||||
"target": "http://localhost:10003",
|
||||
"secure": false,
|
||||
"changeOrigin": true,
|
||||
"logLevel": "info"
|
||||
}
|
||||
}
|
||||
132
core/frontend/src/app.component.html
Normal file
132
core/frontend/src/app.component.html
Normal file
@ -0,0 +1,132 @@
|
||||
<app-header #hdr [onlineDevices]="onlineDevices" [stations]="stations" (selectStation)="onSelectStation($event)"></app-header>
|
||||
|
||||
<div class="border-b" style="border-color: #ddd;margin-bottom: 1em;">
|
||||
<div class="content-narrow mx-auto" style="max-width:1200px;">
|
||||
<h1 class="text-xl md:text-3xl font-semibold px-12 my-5" style="margin:0.8em 0; ">英卓气象站</h1>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
|
||||
<div class="content-narrow mx-auto px-12" style="max-width:1200px; margin-bottom:3em;">
|
||||
<div *ngIf="isLoading" style="position:fixed;inset:0;z-index:2000;background:rgba(0,0,0,0.2);display:flex;align-items:center;justify-content:center;">
|
||||
<div class="animate-spin" style="height:40px;width:40px;border:4px solid #e5e7eb;border-top-color:#2563eb;border-radius:9999px;"></div>
|
||||
</div>
|
||||
<div class="system-info bg-gray-100 p-3 mb-5 rounded text-sm border" style="border-color:#ddd;">
|
||||
<strong>在线设备: </strong> {{onlineDevices}} 个 |
|
||||
<strong>总设备: </strong>
|
||||
<a href="#" class="text-blue-600 hover:text-blue-700 underline-offset-2" (click)="$event.preventDefault(); hdr.open()">{{ wh65lpCount }} 个</a>
|
||||
</div>
|
||||
|
||||
<div class="bg-white border rounded p-3 mb-4" style="border-color:#ddd;">
|
||||
<div class="flex flex-wrap items-center gap-3 mb-3">
|
||||
<label class="text-sm text-gray-600">站点编号</label>
|
||||
<input class="px-2 py-1 border rounded w-32 font-mono text-sm" [(ngModel)]="decimalId" placeholder="" />
|
||||
|
||||
<label class="text-sm text-gray-600">地图类型</label>
|
||||
<select class="px-2 py-1 border rounded text-sm" [(ngModel)]="mapType" (change)="switchLayer(mapType)">
|
||||
<option value="satellite">卫星图</option>
|
||||
<option value="vector">矢量图</option>
|
||||
<option value="terrain">地形图</option>
|
||||
<option value="hybrid">混合地形图</option>
|
||||
</select>
|
||||
|
||||
|
||||
<label class="text-sm text-gray-600">预报源</label>
|
||||
<select class="px-2 py-1 border rounded text-sm" [(ngModel)]="provider">
|
||||
<option value="">不显示预报</option>
|
||||
<option value="imdroid_V6">英卓 V6</option>
|
||||
<option value="imdroid_V5">英卓 V5</option>
|
||||
<option value="imdroid_mix">英卓 V4</option>
|
||||
<option value="open-meteo">英卓 V3</option>
|
||||
<option value="caiyun">英卓 V2</option>
|
||||
<option value="imdroid">英卓 V1</option>
|
||||
</select>
|
||||
|
||||
<label class="text-sm text-gray-600">图例</label>
|
||||
<select class="px-2 py-1 border rounded text-sm" [(ngModel)]="legendMode">
|
||||
<option value="combo_standard">综合</option>
|
||||
<option value="verify_all">全部对比</option>
|
||||
<option value="temp_compare">温度对比</option>
|
||||
<option value="hum_compare">湿度对比</option>
|
||||
<option value="rain_all">降水(+1/+2/+3h)</option>
|
||||
<option value="pressure_compare">气压对比</option>
|
||||
<option value="wind_compare">风速对比</option>
|
||||
</select>
|
||||
</div>
|
||||
|
||||
<div class="flex flex-wrap items-center gap-3 mb-3">
|
||||
<label class="text-sm text-gray-600">数据粒度</label>
|
||||
<select class="px-2 py-1 border rounded text-sm" [(ngModel)]="interval">
|
||||
<option value="raw">原始(16s)</option>
|
||||
<option value="10min">10分钟</option>
|
||||
<option value="30min">30分钟</option>
|
||||
<option value="1hour">1小时</option>
|
||||
</select>
|
||||
|
||||
<label class="text-sm text-gray-600">开始</label>
|
||||
<input type="datetime-local" class="px-2 py-1 border rounded text-sm" [(ngModel)]="start" />
|
||||
<label class="text-sm text-gray-600">结束</label>
|
||||
<input type="datetime-local" class="px-2 py-1 border rounded text-sm" [(ngModel)]="end" />
|
||||
<button class="bg-blue-600 text-white px-3 py-1 rounded text-sm" (click)="query()">查看历史数据</button>
|
||||
</div>
|
||||
|
||||
<div class="flex flex-wrap items-center gap-3">
|
||||
<label class="text-sm text-gray-600">叠加显示</label>
|
||||
<select class="px-2 py-1 border rounded text-sm" [(ngModel)]="tileProduct" (change)="onProductChange()">
|
||||
<option value="none">不显示</option>
|
||||
<option value="radar">水汽含量</option>
|
||||
<option value="radar_detail">水汽含量(详细)</option>
|
||||
<option value="rain">1h 实际降雨</option>
|
||||
</select>
|
||||
|
||||
<label class="text-sm text-gray-600">时间</label>
|
||||
<select class="px-2 py-1 border rounded text-sm min-w-[220px]" [(ngModel)]="tileDt" (change)="renderTilesAt(tileDt)">
|
||||
<option [ngValue]="''">请选择时间</option>
|
||||
<option *ngFor="let t of tileTimes" [ngValue]="t">{{t}}</option>
|
||||
</select>
|
||||
<button class="px-2 py-1 text-sm border rounded bg-white" (click)="prevTile()">上一时次</button>
|
||||
<span class="text-xs text-gray-800">共{{tileTimes.length}}条,第{{tileIndex>=0? (tileIndex+1):0}}条</span>
|
||||
<button class="px-2 py-1 text-sm border rounded bg-white" (click)="nextTile()">下一时次</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div id="mapContainer" class="rounded border mb-4" [ngClass]="{ 'collapsed': isMapCollapsed }" [style.borderColor]="'#ddd'" style="position:relative; overflow:hidden;" [style.height]="isMapCollapsed ? '38vh' : '60vh'">
|
||||
<div id="map" style="width:100%; height:100%;"></div>
|
||||
<button class="map-toggle-btn bg-blue-600 hover:bg-blue-700 text-white" style="position:absolute;top:10px;right:10px;z-index:1001;border-radius:4px;padding:5px 10px;font-size:12px;font-weight:bold;" (click)="toggleMap()">{{ isMapCollapsed ? '展开地图' : '折叠地图' }}</button>
|
||||
<div id="regionStats" style="position:absolute;top:50px;right:10px;z-index:1002;display:none;">
|
||||
<div style="background:#ffffff; border:1px solid #ddd; border-radius:4px; padding:8px 10px; font-size:12px; color:#111; box-shadow:0 2px 6px rgba(0,0,0,0.08); min-width: 160px;">
|
||||
<div style="font-weight:700; margin-bottom:6px;">统计</div>
|
||||
<div style="margin-bottom:4px;">
|
||||
<span style="display:inline-block; width:64px;">风向</span>
|
||||
<span id="statWindDir">--</span>
|
||||
</div>
|
||||
<div style="margin-bottom:4px;">
|
||||
<span style="display:inline-block; width:64px;">风速</span>
|
||||
<span id="statWindSpd">--</span>
|
||||
</div>
|
||||
<div><span style="display:inline-block; width:64px;">≥30 dBZ</span> <span id="statDbz30">0</span></div>
|
||||
<div><span style="display:inline-block; width:64px;">≥35 dBZ</span> <span id="statDbz35">0</span></div>
|
||||
<div><span style="display:inline-block; width:64px;">≥40 dBZ</span> <span id="statDbz40">0</span></div>
|
||||
</div>
|
||||
</div>
|
||||
<div id="tileValueTooltip" style="position:absolute;pointer-events:none;z-index:1003;display:none;background:rgba(0,0,0,0.65);color:#fff;font-size:12px;padding:4px 6px;border-radius:4px;"></div>
|
||||
</div>
|
||||
|
||||
<div *ngIf="isKmlDialogOpen" class="fixed inset-0 z-50" style="background:rgba(0,0,0,0.45);display:flex;align-items:center;justify-content:center;">
|
||||
<div class="bg-white rounded shadow-lg" style="max-height: 80vh; overflow: hidden; border: 1px solid #ddd;">
|
||||
<div class="px-4 py-2 border-b flex items-center justify-between" style="border-color:#eee;">
|
||||
<div class="font-semibold text-base">{{kmlInfoTitle}}</div>
|
||||
<button class="text-sm text-gray-600 hover:text-gray-900" (click)="closeKmlPopup()">关闭</button>
|
||||
</div>
|
||||
<div class="px-4 py-3 text-sm leading-6" style="overflow:auto; max-height: calc(80vh - 48px);" [innerHTML]="kmlInfoHtml"></div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div *ngIf="showPanels" id="chartSection" class="border rounded p-3 mb-4" style="border-color:#ddd;">
|
||||
<div class="font-bold mb-5 mt-5 text-center">{{ selectedTitle }}</div>
|
||||
<chart-panel [history]="history" [forecast]="forecast" [legendMode]="legendMode"></chart-panel>
|
||||
</div>
|
||||
<table-panel *ngIf="showPanels" [history]="history" [forecast]="forecast" [showPastForecast]="showPastForecast" [endDate]="end"></table-panel>
|
||||
</div>
|
||||
|
||||
<div style="height:3vh;"></div>
|
||||
56
core/frontend/src/app/api.service.ts
Normal file
56
core/frontend/src/app/api.service.ts
Normal file
@ -0,0 +1,56 @@
|
||||
import { Injectable } from '@angular/core';
|
||||
|
||||
export type WeatherPoint = {
|
||||
date_time: string;
|
||||
temperature?: number;
|
||||
humidity?: number;
|
||||
pressure?: number;
|
||||
wind_speed?: number;
|
||||
wind_direction?: number;
|
||||
rainfall?: number;
|
||||
rain_total?: number;
|
||||
light?: number;
|
||||
uv?: number;
|
||||
};
|
||||
|
||||
export type ForecastPoint = WeatherPoint & {
|
||||
provider?: string;
|
||||
issued_at?: string;
|
||||
precip_prob?: number;
|
||||
lead_hours?: number;
|
||||
};
|
||||
|
||||
@Injectable({ providedIn: 'root' })
|
||||
export class ApiService {
|
||||
async getStatus(): Promise<{ online_devices: number; server_time: string } | null> {
|
||||
try { const r = await fetch('/api/system/status'); return await r.json(); } catch { return null; }
|
||||
}
|
||||
|
||||
async getStations(): Promise<any[]> {
|
||||
try { const r = await fetch('/api/stations'); return await r.json(); } catch { return []; }
|
||||
}
|
||||
|
||||
async getHistory(hexId: string, from: string, to: string, interval: string): Promise<WeatherPoint[]> {
|
||||
const params = new URLSearchParams({ hex_id: hexId, start_time: from, end_time: to, interval });
|
||||
const r = await fetch(`/api/data?${params.toString()}`);
|
||||
if (!r.ok) return [];
|
||||
return await r.json();
|
||||
}
|
||||
|
||||
async getForecast(stationId: string, from: string, to: string, provider = '', versions = 1): Promise<ForecastPoint[]> {
|
||||
const params = new URLSearchParams({ station_id: stationId, from, to, provider, versions: String(versions) });
|
||||
const r = await fetch(`/api/forecast?${params.toString()}`);
|
||||
if (!r.ok) return [];
|
||||
return await r.json();
|
||||
}
|
||||
|
||||
async getForecastPerf(stationId: string, days = 30, provider = ''): Promise<{ total_heavy: number; success_count: number; avg_lead_hours: number } | null> {
|
||||
try {
|
||||
const params = new URLSearchParams({ station_id: stationId, days: String(days) });
|
||||
if (provider) params.set('provider', provider);
|
||||
const r = await fetch(`/api/forecast/perf?${params.toString()}`);
|
||||
if (!r.ok) return null;
|
||||
return await r.json();
|
||||
} catch { return null; }
|
||||
}
|
||||
}
|
||||
12
core/frontend/src/app/chart-panel.component.html
Normal file
12
core/frontend/src/app/chart-panel.component.html
Normal file
@ -0,0 +1,12 @@
|
||||
<div class="chart-container show">
|
||||
<div class="station-info-title" style="text-align:right;margin-bottom:8px" *ngIf="showAccuracy">
|
||||
<div #accPanel id="accuracyPanel" class="accuracy-panel" style="display:none;font-size:12px;color:#374151;white-space:nowrap;">
|
||||
<span class="item"><span class="label">+1h:</span><span #accH1 id="accH1" class="value">--</span></span>
|
||||
<span class="item" style="margin-left:8px"><span class="label"> +2h:</span><span #accH2 id="accH2" class="value">--</span></span>
|
||||
<span class="item" style="margin-left:8px"><span class="label"> +3h:</span><span #accH3 id="accH3" class="value">--</span></span>
|
||||
</div>
|
||||
</div>
|
||||
<div class="chart-wrapper" style="height:500px">
|
||||
<canvas #canvas id="combinedChart"></canvas>
|
||||
</div>
|
||||
</div>
|
||||
217
core/frontend/src/app/chart-panel.component.ts
Normal file
217
core/frontend/src/app/chart-panel.component.ts
Normal file
@ -0,0 +1,217 @@
|
||||
import { Component, ElementRef, Input, OnChanges, SimpleChanges, ViewChild } from '@angular/core';
|
||||
import type { ForecastPoint, WeatherPoint } from './api.service';
|
||||
|
||||
@Component({
|
||||
selector: 'chart-panel',
|
||||
standalone: true,
|
||||
templateUrl: './chart-panel.component.html'
|
||||
})
|
||||
export class ChartPanelComponent implements OnChanges {
|
||||
@Input() history: WeatherPoint[] = [];
|
||||
@Input() forecast: ForecastPoint[] = [];
|
||||
@Input() legendMode: string = 'combo_standard';
|
||||
@Input() showAccuracy: boolean = true;
|
||||
|
||||
@ViewChild('canvas', { static: true }) canvas!: ElementRef<HTMLCanvasElement>;
|
||||
@ViewChild('accPanel', { static: true }) accPanel!: ElementRef<HTMLDivElement>;
|
||||
@ViewChild('accH1', { static: true }) accH1!: ElementRef<HTMLSpanElement>;
|
||||
@ViewChild('accH2', { static: true }) accH2!: ElementRef<HTMLSpanElement>;
|
||||
@ViewChild('accH3', { static: true }) accH3!: ElementRef<HTMLSpanElement>;
|
||||
|
||||
private chart: any;
|
||||
|
||||
ngOnChanges(changes: SimpleChanges): void {
|
||||
this.render();
|
||||
}
|
||||
|
||||
private render() {
|
||||
const historyData = Array.isArray(this.history) ? this.history : [];
|
||||
const forecastData = Array.isArray(this.forecast) ? this.forecast : [];
|
||||
if (historyData.length === 0 && forecastData.length === 0) { this.destroy(); return; }
|
||||
|
||||
const labels = [...new Set([
|
||||
...historyData.map(x => x.date_time),
|
||||
...forecastData.map(x => x.date_time)
|
||||
])].sort();
|
||||
|
||||
const pickSeries = (arr: any[], key: string) => labels.map(l => {
|
||||
const it = arr.find(d => d.date_time === l); return it ? (it as any)[key] ?? null : null;
|
||||
});
|
||||
|
||||
const hTemp = pickSeries(historyData, 'temperature');
|
||||
const hHum = pickSeries(historyData, 'humidity');
|
||||
const hPres = pickSeries(historyData, 'pressure');
|
||||
const hWind = pickSeries(historyData, 'wind_speed');
|
||||
const hRain = pickSeries(historyData, 'rainfall');
|
||||
const hRainTotal = pickSeries(historyData, 'rain_total');
|
||||
|
||||
const byTime = new Map<string, any>();
|
||||
forecastData.forEach(fp => {
|
||||
const t = fp.date_time; if (!byTime.has(t)) byTime.set(t, {});
|
||||
const lead = typeof fp.lead_hours === 'number' ? fp.lead_hours : null;
|
||||
if (lead !== null && lead >= 0 && lead <= 3) {
|
||||
const bucket = byTime.get(t);
|
||||
if (bucket[lead] == null) bucket[lead] = fp; // assume DESC issued_at
|
||||
}
|
||||
});
|
||||
|
||||
const getAtLead = (l: string, field: string, lead: number) => {
|
||||
const b = byTime.get(l); if (!b || !b[lead]) return null; const v = b[lead][field as keyof ForecastPoint];
|
||||
return v == null ? null : (v as number);
|
||||
};
|
||||
|
||||
const rainH1 = labels.map(l => getAtLead(l, 'rainfall', 1));
|
||||
const rainH2 = labels.map(l => getAtLead(l, 'rainfall', 2));
|
||||
const rainH3 = labels.map(l => getAtLead(l, 'rainfall', 3));
|
||||
|
||||
const pickNearest = (l: string, field: keyof ForecastPoint) => {
|
||||
const b = byTime.get(l); if (!b) return null;
|
||||
for (const k of [0,1,2,3]) { if (b[k] && b[k][field] != null) return b[k][field] as number; }
|
||||
return null;
|
||||
};
|
||||
const fTempN = labels.map(l => pickNearest(l, 'temperature'));
|
||||
const fHumN = labels.map(l => pickNearest(l, 'humidity'));
|
||||
const fPresN = labels.map(l => pickNearest(l, 'pressure'));
|
||||
const fWindN = labels.map(l => pickNearest(l, 'wind_speed'));
|
||||
|
||||
this.destroy();
|
||||
// @ts-ignore
|
||||
const Chart = (window as any).Chart;
|
||||
if (!Chart || !this.canvas?.nativeElement) return;
|
||||
|
||||
const datasets: any[] = [
|
||||
{ label: '温度 (°C) - 实测', seriesKey: 'temp_actual', data: hTemp, borderColor: 'rgb(255,99,132)', backgroundColor: 'rgba(255,99,132,0.1)', yAxisID: 'y-temperature', tension: 0.4, spanGaps: false },
|
||||
{ label: '湿度 (%) - 实测', seriesKey: 'hum_actual', data: hHum, borderColor: 'rgb(54,162,235)', backgroundColor: 'rgba(54,162,235,0.1)', yAxisID: 'y-humidity', tension: 0.4, hidden: true, spanGaps: false },
|
||||
{ label: '大气压 (hPa) - 实测', seriesKey: 'pressure_actual', data: hPres, borderColor: 'rgb(153,102,255)', backgroundColor: 'rgba(153,102,255,0.1)', yAxisID: 'y-pressure', tension: 0.4, hidden: true, spanGaps: false },
|
||||
{ label: '风速 (m/s) - 实测', seriesKey: 'wind_actual', data: hWind, borderColor: 'rgb(75,192,192)', backgroundColor: 'rgba(75,192,192,0.1)', yAxisID: 'y-wind', tension: 0.4, hidden: true, spanGaps: false },
|
||||
{ label: '雨量 (mm) - 实测', seriesKey: 'rain_actual', data: hRain, type: 'bar', backgroundColor: 'rgba(54,162,235,0.6)', borderColor: 'rgb(54,162,235)', yAxisID: 'y-rainfall' },
|
||||
{ label: '累计雨量 (mm) - 实测', seriesKey: 'rain_total', data: hRainTotal, borderColor: 'rgb(75,192,192)', backgroundColor: 'rgba(75,192,192,0.1)', yAxisID: 'y-rainfall', tension: 0.2, spanGaps: false, pointRadius: 0, hidden: true }
|
||||
];
|
||||
if (forecastData.length > 0) {
|
||||
datasets.push(
|
||||
{ label: '雨量 (mm) - 预报 (+1h)', seriesKey: 'rain_fcst_h1', data: rainH1, type: 'bar', backgroundColor: 'rgba(255,99,71,0.55)', borderColor: 'rgb(255,99,71)', yAxisID: 'y-rainfall' },
|
||||
{ label: '雨量 (mm) - 预报 (+2h)', seriesKey: 'rain_fcst_h2', data: rainH2, type: 'bar', backgroundColor: 'rgba(255,205,86,0.55)', borderColor: 'rgb(255,205,86)', yAxisID: 'y-rainfall' },
|
||||
{ label: '雨量 (mm) - 预报 (+3h)', seriesKey: 'rain_fcst_h3', data: rainH3, type: 'bar', backgroundColor: 'rgba(76,175,80,0.55)', borderColor: 'rgb(76,175,80)', yAxisID: 'y-rainfall' },
|
||||
{ label: '温度 (°C) - 预报', seriesKey: 'temp_fcst', data: fTempN, borderColor: 'rgb(255,159,64)', backgroundColor: 'rgba(255,159,64,0.1)', borderDash: [5,5], yAxisID: 'y-temperature', tension: 0.4, spanGaps: false, hidden: true },
|
||||
{ label: '湿度 (%) - 预报', seriesKey: 'hum_fcst', data: fHumN, borderColor: 'rgb(54,162,235)', backgroundColor: 'rgba(54,162,235,0.1)', borderDash: [5,5], yAxisID: 'y-humidity', tension: 0.4, hidden: true, spanGaps: false },
|
||||
{ label: '大气压 (hPa) - 预报', seriesKey: 'pressure_fcst', data: fPresN, borderColor: 'rgb(153,102,255)', backgroundColor: 'rgba(153,102,255,0.1)', borderDash: [5,5], yAxisID: 'y-pressure', tension: 0.4, hidden: true, spanGaps: false },
|
||||
{ label: '风速 (m/s) - 预报', seriesKey: 'wind_fcst', data: fWindN, borderColor: 'rgb(75,192,192)', backgroundColor: 'rgba(75,192,192,0.1)', borderDash: [5,5], yAxisID: 'y-wind', tension: 0.4, hidden: true, spanGaps: false }
|
||||
);
|
||||
}
|
||||
|
||||
const totalLabels = labels.length;
|
||||
const tickStep = Math.max(1, Math.ceil(totalLabels / 10));
|
||||
this.chart = new Chart(this.canvas.nativeElement.getContext('2d'), {
|
||||
type: 'line',
|
||||
data: { labels, datasets },
|
||||
options: {
|
||||
responsive: true, maintainAspectRatio: false,
|
||||
interaction: { mode: 'index', intersect: false },
|
||||
layout: { padding: { top: 12, right: 16, bottom: 12, left: 16 } },
|
||||
plugins: { legend: { display: true, position: 'top', align: 'center', labels: { padding: 16 } } },
|
||||
scales: {
|
||||
x: { type: 'category', ticks: {
|
||||
autoSkip: false, maxRotation: 0, minRotation: 0,
|
||||
callback: function(value: any, index: number) {
|
||||
if (index % tickStep !== 0) return '';
|
||||
const labels = (this as any).chart?.data?.labels || [];
|
||||
const raw = labels[index] || '';
|
||||
return (typeof raw === 'string' && raw.length >= 13) ? raw.substring(5, 13) : raw;
|
||||
}
|
||||
}, grid: { display: true, drawOnChartArea: true } },
|
||||
'y-temperature': { type: 'linear', display: true, position: 'right', title: { display: true, text: '温度 (°C)' } },
|
||||
'y-humidity': { type: 'linear', display: true, position: 'right', title: { display: true, text: '湿度 (%)' }, grid: { drawOnChartArea: false }, min: 0, max: 100 },
|
||||
'y-pressure': { type: 'linear', display: true, position: 'left', title: { display: true, text: '大气压 (hPa)' }, grid: { drawOnChartArea: false } },
|
||||
'y-wind': { type: 'linear', display: true, position: 'left', title: { display: true, text: '风速 (m/s)' }, grid: { drawOnChartArea: false }, beginAtZero: true },
|
||||
'y-rainfall': { type: 'linear', display: true, position: 'right', title: { display: true, text: '雨量 (mm)' }, grid: { drawOnChartArea: false }, beginAtZero: true }
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
this.applyLegendMode(this.legendMode);
|
||||
this.updateAccuracyPanel(labels, hRain, rainH1, rainH2, rainH3);
|
||||
}
|
||||
|
||||
private applyLegendMode(mode: string) {
|
||||
if (!this.chart) return;
|
||||
const map = new Map<string, any>();
|
||||
this.chart.data.datasets.forEach((ds: any) => { if (ds.seriesKey) map.set(ds.seriesKey, ds); });
|
||||
const setVisible = (keys: string[]) => {
|
||||
const all = ['temp_actual','hum_actual','rain_actual','rain_total','temp_fcst','hum_fcst','pressure_actual','pressure_fcst','wind_actual','wind_fcst','rain_fcst_h1','rain_fcst_h2','rain_fcst_h3'];
|
||||
all.forEach(k => { const ds = map.get(k); if (ds) ds.hidden = true; });
|
||||
keys.forEach(k => { const ds = map.get(k); if (ds) ds.hidden = false; });
|
||||
};
|
||||
switch (mode) {
|
||||
case 'verify_all': setVisible(['temp_actual','temp_fcst','hum_actual','hum_fcst','pressure_actual','pressure_fcst','wind_actual','wind_fcst','rain_actual','rain_fcst_h1','rain_fcst_h2','rain_fcst_h3']); break;
|
||||
case 'temp_compare': setVisible(['temp_actual','temp_fcst']); break;
|
||||
case 'hum_compare': setVisible(['hum_actual','hum_fcst']); break;
|
||||
case 'rain_all': setVisible(['rain_actual','rain_fcst_h1','rain_fcst_h2','rain_fcst_h3']); break;
|
||||
case 'pressure_compare': setVisible(['pressure_actual','pressure_fcst']); break;
|
||||
case 'wind_compare': setVisible(['wind_actual','wind_fcst']); break;
|
||||
case 'combo_standard':
|
||||
default: setVisible(['temp_actual','temp_fcst','rain_actual','rain_fcst_h1','rain_fcst_h2','rain_fcst_h3']);
|
||||
}
|
||||
this.chart.update();
|
||||
}
|
||||
|
||||
private updateAccuracyPanel(labels: string[], historyRain: (number|null)[], h1: (number|null)[], h2: (number|null)[], h3: (number|null)[]) {
|
||||
const panel = this.accPanel?.nativeElement; if (!panel) return;
|
||||
if (!this.showAccuracy) { panel.style.display = 'none'; return; }
|
||||
const isNum = (v:any)=> v!=null && !isNaN(Number(v));
|
||||
const usedIdx = historyRain.map((v, idx) => ({v, idx})).filter(x => isNum(x.v)).map(x => x.idx);
|
||||
const total = usedIdx.length;
|
||||
if (total === 0 || labels.length === 0) { panel.style.display = 'none'; return; }
|
||||
|
||||
// Buckets: 0mm, (0,4), [4,8), [8, +∞)
|
||||
const bucketOf = (mm: any): number|null => {
|
||||
if (!isNum(mm)) return null;
|
||||
const v = Math.max(0, Number(mm));
|
||||
if (v === 0) return 0; // 0
|
||||
if (v > 0 && v < 4) return 1; // (0,4)
|
||||
if (v >= 4 && v < 8) return 2; // [4,8)
|
||||
return 3; // [8, +∞)
|
||||
};
|
||||
const nameOf = (b:number|null) => b===0 ? '0' : b===1 ? '(0,4)' : b===2 ? '[4,8)' : b===3 ? '≥8' : '--';
|
||||
|
||||
const calcFor = (arr: (number|null)[]) => {
|
||||
let correct=0;
|
||||
usedIdx.forEach(i=>{ const ba=bucketOf(historyRain[i]); const bf=bucketOf(arr[i]); if(ba!==null && bf!==null && ba===bf) correct++; });
|
||||
return {correct,total};
|
||||
};
|
||||
const fmtPct = (n: number) => `${n.toFixed(1)}%`;
|
||||
|
||||
// Detailed console logs for validation
|
||||
try {
|
||||
console.groupCollapsed('[准确率] (+1h/+2h/+3h)');
|
||||
console.log('总小时(实测):', total);
|
||||
usedIdx.forEach(i => {
|
||||
const label = labels[i];
|
||||
const a = historyRain[i];
|
||||
const bA = bucketOf(a);
|
||||
const f1 = h1[i]; const b1 = bucketOf(f1);
|
||||
const f2 = h2[i]; const b2 = bucketOf(f2);
|
||||
const f3 = h3[i]; const b3 = bucketOf(f3);
|
||||
const m1 = (bA!==null && b1!==null && bA===b1) ? 'true' : 'false';
|
||||
const m2 = (bA!==null && b2!==null && bA===b2) ? 'true' : 'false';
|
||||
const m3 = (bA!==null && b3!==null && bA===b3) ? 'true' : 'false';
|
||||
const fv = (v:any)=> (v==null||isNaN(Number(v)))? 'NULL' : Number(v).toFixed(2);
|
||||
console.log(`${label} | 实测 ${fv(a)} (${nameOf(bA)}) | +1h ${fv(f1)} (${nameOf(b1)}) ${m1} | +2h ${fv(f2)} (${nameOf(b2)}) ${m2} | +3h ${fv(f3)} (${nameOf(b3)}) ${m3}`);
|
||||
});
|
||||
} catch {}
|
||||
|
||||
const r1 = calcFor(h1), r2 = calcFor(h2), r3 = calcFor(h3);
|
||||
try {
|
||||
console.log(`命中/总 (+1h): ${r1.correct}/${r1.total} => ${fmtPct((r1.correct/total)*100)}`);
|
||||
console.log(`命中/总 (+2h): ${r2.correct}/${r2.total} => ${fmtPct((r2.correct/total)*100)}`);
|
||||
console.log(`命中/总 (+3h): ${r3.correct}/${r3.total} => ${fmtPct((r3.correct/total)*100)}`);
|
||||
console.groupEnd?.();
|
||||
} catch {}
|
||||
|
||||
this.accH1.nativeElement.textContent = total>0 ? fmtPct((r1.correct/total)*100) : '--';
|
||||
this.accH2.nativeElement.textContent = total>0 ? fmtPct((r2.correct/total)*100) : '--';
|
||||
this.accH3.nativeElement.textContent = total>0 ? fmtPct((r3.correct/total)*100) : '--';
|
||||
panel.style.display = 'block';
|
||||
}
|
||||
|
||||
destroy() { if (this.chart) { this.chart.destroy(); this.chart = null; } }
|
||||
}
|
||||
24
core/frontend/src/app/header.component.html
Normal file
24
core/frontend/src/app/header.component.html
Normal file
@ -0,0 +1,24 @@
|
||||
<div class="device-modal" *ngIf="modalOpen" (click)="close()" style="position:fixed;inset:0;background:rgba(0,0,0,.3);display:flex;align-items:center;justify-content:center;z-index:2000">
|
||||
<div class="device-modal-content" (click)="$event.stopPropagation()" style="background:#fff;width:80%;max-width:640px;border-radius:8px;box-shadow:0 10px 25px rgba(0,0,0,.15);display:flex;flex-direction:column;max-height:70vh;">
|
||||
<div class="device-list-header" style="padding:12px 16px;border-bottom:1px solid #eee;display:flex;justify-content:space-between;align-items:center;">
|
||||
<div class="text-sm">设备列表</div>
|
||||
<button (click)="close()" class="text-xl leading-none">×</button>
|
||||
</div>
|
||||
<div class="device-list" style="flex:1;overflow:auto;">
|
||||
<button class="device-item" *ngFor="let s of pageItems" (click)="choose(s)" style="padding:10px 16px;border-bottom:1px solid #f5f5f5;display:flex;justify-content:space-between;align-items:center;width:100%;text-align:left;">
|
||||
<div>
|
||||
<div class="text-sm" *ngIf="s.location">{{s.location}}</div>
|
||||
<div class="text-xs text-gray-500">设备编号:{{s.station_id}}</div>
|
||||
</div>
|
||||
<div class="text-xs" [class.text-green-600]="isOnline(s)" [class.text-gray-500]="!isOnline(s)">{{ isOnline(s) ? '在线' : '离线' }}</div>
|
||||
</button>
|
||||
</div>
|
||||
<div class="device-list-footer" style="padding:10px;border-top:1px solid #eee;display:flex;justify-content:space-between;align-items:center;">
|
||||
<div class="pagination text-xs text-gray-600">第 {{currentPage}} 页,共 {{totalPages}} 页</div>
|
||||
<div class="flex gap-2">
|
||||
<button class="px-2 py-1 border rounded" (click)="prev()" [disabled]="currentPage<=1">上一页</button>
|
||||
<button class="px-2 py-1 border rounded" (click)="next()" [disabled]="currentPage>=totalPages">下一页</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
52
core/frontend/src/app/header.component.ts
Normal file
52
core/frontend/src/app/header.component.ts
Normal file
@ -0,0 +1,52 @@
|
||||
import { Component, EventEmitter, Input, Output } from '@angular/core';
|
||||
import { CommonModule } from '@angular/common';
|
||||
|
||||
type Station = { station_id: string; device_type?: string; name?: string; station_alias?: string; location?: string; last_update?: string };
|
||||
|
||||
@Component({
|
||||
selector: 'app-header',
|
||||
standalone: true,
|
||||
imports: [CommonModule],
|
||||
templateUrl: './header.component.html'
|
||||
})
|
||||
export class HeaderComponent {
|
||||
@Input() onlineDevices = 0;
|
||||
@Input() stations: Station[] = [];
|
||||
@Output() selectStation = new EventEmitter<{ stationId: string, hex: string }>();
|
||||
|
||||
modalOpen = false;
|
||||
currentPage = 1;
|
||||
itemsPerPage = 10;
|
||||
|
||||
get wh65Count(): number { return (this.stations||[]).filter(s => s.device_type === 'WH65LP').length; }
|
||||
get totalPages(): number { return Math.max(1, Math.ceil(this.stations.length / this.itemsPerPage)); }
|
||||
get pageItems(): Station[] {
|
||||
const start = (this.currentPage - 1) * this.itemsPerPage;
|
||||
return this.stations.slice(start, start + this.itemsPerPage);
|
||||
}
|
||||
|
||||
open() { this.modalOpen = true; this.currentPage = 1; }
|
||||
close() { this.modalOpen = false; }
|
||||
prev() { if (this.currentPage > 1) this.currentPage--; }
|
||||
next() { if (this.currentPage < this.totalPages) this.currentPage++; }
|
||||
|
||||
hexSuffix(s: Station): string {
|
||||
const id = s.station_id || '';
|
||||
if (id.length <= 6) return id.toUpperCase();
|
||||
return id.slice(-6).toUpperCase();
|
||||
}
|
||||
|
||||
isOnline(s: Station): boolean {
|
||||
const lu = (s as any).last_update as string | undefined;
|
||||
if (!lu) return false;
|
||||
const t = new Date(lu.replace(' ', 'T')).getTime();
|
||||
if (isNaN(t)) return false;
|
||||
return Date.now() - t <= 5 * 60 * 1000;
|
||||
}
|
||||
|
||||
choose(s: Station) {
|
||||
const hex = this.hexSuffix(s);
|
||||
if (hex) this.selectStation.emit({ stationId: s.station_id, hex });
|
||||
this.close();
|
||||
}
|
||||
}
|
||||
36
core/frontend/src/app/table-panel.component.html
Normal file
36
core/frontend/src/app/table-panel.component.html
Normal file
@ -0,0 +1,36 @@
|
||||
<div class="table-container show" style="margin-top:16px; border:1px solid #ddd; border-radius:4px; background:#fff;">
|
||||
<div style="padding:8px 12px; font-size:12px; color:#666; display:flex; align-items:center; gap:8px;">
|
||||
<label style="display:flex; align-items:center; gap:6px;">
|
||||
<input type="checkbox" [(ngModel)]="showPastForecast" (ngModelChange)="build()"> 显示历史预报
|
||||
</label>
|
||||
</div>
|
||||
<div style="overflow-x:auto;">
|
||||
<table class="min-w-full text-sm text-center" style="width:100%; border-collapse:collapse;">
|
||||
<thead>
|
||||
<tr>
|
||||
<th *ngFor="let h of headers" style="border:1px solid #ddd; padding:12px 8px; background:#f8f9fa; font-weight:bold;">{{h}}</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr *ngFor="let r of rows" [style.background-color]="r.isForecast ? 'rgba(255,165,0,0.08)' : ''">
|
||||
<td style="border:1px solid #ddd; padding:12px 8px;">
|
||||
<div>
|
||||
{{ r.date_time }}
|
||||
<span *ngIf="r.isForecast" class="text-xs text-orange-500">[预报]</span>
|
||||
</div>
|
||||
<div *ngIf="r.isForecast" class="text-xs text-gray-500">(发布: {{ r.issued_hhmm || '-' }}{{ r.lead_hours!=null ? ' +' + r.lead_hours + 'h' : '' }})</div>
|
||||
</td>
|
||||
<td style="border:1px solid #ddd; padding:12px 8px;">{{r.temperature}}</td>
|
||||
<td style="border:1px solid #ddd; padding:12px 8px;">{{r.humidity}}</td>
|
||||
<td style="border:1px solid #ddd; padding:12px 8px;">{{r.pressure}}</td>
|
||||
<td style="border:1px solid #ddd; padding:12px 8px;">{{r.wind_speed}}</td>
|
||||
<td style="border:1px solid #ddd; padding:12px 8px;">{{r.wind_direction}}</td>
|
||||
<td style="border:1px solid #ddd; padding:12px 8px;">{{r.rainfall}}</td>
|
||||
<td *ngIf="headers.includes('降水概率 (%)')" style="border:1px solid #ddd; padding:12px 8px;">{{r.precip_prob ?? '-'}}</td>
|
||||
<td style="border:1px solid #ddd; padding:12px 8px;">{{r.light}}</td>
|
||||
<td style="border:1px solid #ddd; padding:12px 8px;">{{r.uv}}</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
66
core/frontend/src/app/table-panel.component.ts
Normal file
66
core/frontend/src/app/table-panel.component.ts
Normal file
@ -0,0 +1,66 @@
|
||||
import { Component, Input, OnChanges } from '@angular/core';
|
||||
import { CommonModule } from '@angular/common';
|
||||
import type { ForecastPoint, WeatherPoint } from './api.service';
|
||||
|
||||
import { FormsModule } from '@angular/forms';
|
||||
|
||||
@Component({
|
||||
selector: 'table-panel',
|
||||
standalone: true,
|
||||
imports: [CommonModule, FormsModule],
|
||||
templateUrl: './table-panel.component.html'
|
||||
})
|
||||
export class TablePanelComponent implements OnChanges {
|
||||
@Input() history: WeatherPoint[] = [];
|
||||
@Input() forecast: ForecastPoint[] = [];
|
||||
@Input() showPastForecast = false;
|
||||
@Input() endDate: string = '';
|
||||
|
||||
headers: string[] = ['时间','温度 (°C)','湿度 (%)','气压 (hPa)','风速 (m/s)','风向 (°)','雨量 (mm)'];
|
||||
rows: any[] = [];
|
||||
hasForecast = false;
|
||||
|
||||
ngOnChanges(): void { this.build(); }
|
||||
|
||||
build() {
|
||||
const endTs = this.endDate ? new Date(this.endDate).getTime() : Date.now();
|
||||
const future3h = endTs + 3*60*60*1000;
|
||||
const filteredFcst = (Array.isArray(this.forecast)?this.forecast:[]).filter(it => {
|
||||
const t = new Date(it.date_time.replace(' ', 'T')).getTime();
|
||||
const isFuture3h = t > endTs && t <= future3h;
|
||||
const isPast = t <= endTs;
|
||||
return isFuture3h || (this.showPastForecast && isPast);
|
||||
}).map(it => ({...it, __source: '预报'}));
|
||||
|
||||
const hist = (Array.isArray(this.history)?this.history:[]).map(it => ({...it, __source: '实测'}));
|
||||
const all: any[] = [...hist as any[], ...filteredFcst as any[]];
|
||||
this.hasForecast = filteredFcst.length > 0;
|
||||
this.headers = ['时间','温度 (°C)','湿度 (%)','气压 (hPa)','风速 (m/s)','风向 (°)','雨量 (mm)'];
|
||||
if (this.hasForecast) this.headers = [...this.headers, '降水概率 (%)'];
|
||||
this.headers = [...this.headers, '光照 (lux)','紫外线'];
|
||||
|
||||
const fmt = (v: any, d: number) => (v===null || v===undefined || v==='' || isNaN(Number(v))) ? '' : Number(v).toFixed(d);
|
||||
this.rows = all.sort((a,b)=> new Date(b.date_time).getTime() - new Date(a.date_time).getTime()).map(row => ({
|
||||
date_time: row.date_time,
|
||||
isForecast: row.__source==='预报',
|
||||
issued_at: (row as any).issued_at,
|
||||
issued_hhmm: (()=>{
|
||||
const ia = (row as any).issued_at as string | undefined;
|
||||
if (!ia) return null;
|
||||
// expected format: YYYY-MM-DD HH:MM:SS
|
||||
const t = ia.includes(' ') ? ia.split(' ')[1] : ia;
|
||||
return t ? t.substring(0,5) : null;
|
||||
})(),
|
||||
lead_hours: (row as any).lead_hours,
|
||||
temperature: fmt(row.temperature,1),
|
||||
humidity: fmt(row.humidity,1),
|
||||
pressure: fmt(row.pressure,1),
|
||||
wind_speed: fmt(row.wind_speed,1),
|
||||
wind_direction: fmt(row.wind_direction,0),
|
||||
rainfall: fmt(row.rainfall,2),
|
||||
precip_prob: (row.__source==='预报' && (row as any).precip_prob!=null) ? (row as any).precip_prob : null,
|
||||
light: fmt(row.light,0),
|
||||
uv: fmt(row.uv,1)
|
||||
}));
|
||||
}
|
||||
}
|
||||
16
core/frontend/src/index.html
Normal file
16
core/frontend/src/index.html
Normal file
@ -0,0 +1,16 @@
|
||||
<!doctype html>
|
||||
<html lang="zh-CN">
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>英卓气象站</title>
|
||||
<base href="/ui/">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<link rel="stylesheet" href="/static/css/ol.css">
|
||||
<link rel="stylesheet" href="/static/css/tailwind.min.css">
|
||||
</head>
|
||||
<body>
|
||||
<app-root>Loading...</app-root>
|
||||
<script src="/static/js/chart.js"></script>
|
||||
<script src="/static/js/ol.js"></script>
|
||||
</body>
|
||||
</html>
|
||||
665
core/frontend/src/main.ts
Normal file
665
core/frontend/src/main.ts
Normal file
@ -0,0 +1,665 @@
|
||||
import { bootstrapApplication } from '@angular/platform-browser';
|
||||
import { Component, OnInit, AfterViewInit } from '@angular/core';
|
||||
import { CommonModule } from '@angular/common';
|
||||
import { FormsModule } from '@angular/forms';
|
||||
import { ApiService, ForecastPoint, WeatherPoint } from './app/api.service';
|
||||
import { ChartPanelComponent } from './app/chart-panel.component';
|
||||
import { TablePanelComponent } from './app/table-panel.component';
|
||||
import { HeaderComponent } from './app/header.component';
|
||||
|
||||
type Station = {
|
||||
station_id: string;
|
||||
decimal_id?: string;
|
||||
latitude?: number;
|
||||
longitude?: number;
|
||||
location?: string;
|
||||
device_type?: string;
|
||||
last_update?: string;
|
||||
name?: string;
|
||||
station_alias?: string;
|
||||
};
|
||||
|
||||
@Component({
|
||||
selector: 'app-root',
|
||||
standalone: true,
|
||||
imports: [CommonModule, FormsModule, ChartPanelComponent, TablePanelComponent, HeaderComponent],
|
||||
templateUrl: './app.component.html',
|
||||
})
|
||||
export class AppComponent implements OnInit, AfterViewInit {
|
||||
constructor(private api: ApiService) {}
|
||||
|
||||
onlineDevices = 0;
|
||||
serverTime = '';
|
||||
stations: Station[] = [];
|
||||
mapType = 'satellite';
|
||||
|
||||
decimalId = '';
|
||||
interval = '1hour';
|
||||
start = '';
|
||||
end = '';
|
||||
// 默认英卓 V4
|
||||
provider = 'imdroid_mix';
|
||||
legendMode = 'combo_standard';
|
||||
showPastForecast = false;
|
||||
showPanels = false;
|
||||
selectedLocation = '';
|
||||
selectedTitle = '';
|
||||
isLoading = false;
|
||||
|
||||
history: WeatherPoint[] = [];
|
||||
forecast: ForecastPoint[] = [];
|
||||
|
||||
private map: any;
|
||||
private layers: any = {};
|
||||
private stationSource: any;
|
||||
private clusterSource: any;
|
||||
private stationLayer: any;
|
||||
private clusterLayer: any;
|
||||
private kmlLayer: any;
|
||||
private kmlOverlay: any;
|
||||
private CLUSTER_THRESHOLD = 10;
|
||||
private tileOverlayGroup: any;
|
||||
private windOverlayLayer: any;
|
||||
private tileLastList: any[] = [];
|
||||
private refreshTimer: any;
|
||||
private mapEventsBound = false;
|
||||
tileTimes: string[] = [];
|
||||
tileIndex = -1;
|
||||
tileZ = 7; tileY = 40; tileX = 102;
|
||||
tileDt = '';
|
||||
tileProduct: 'none'|'radar'|'radar_detail'|'rain' = 'radar';
|
||||
isMapCollapsed = false;
|
||||
kmlInfoTitle = '';
|
||||
kmlInfoHtml = '';
|
||||
isKmlDialogOpen = false;
|
||||
|
||||
async ngOnInit() {
|
||||
await Promise.all([this.loadStatus(), this.loadStations()]);
|
||||
const now = new Date();
|
||||
const pad = (n:number)=> String(n).padStart(2,'0');
|
||||
const toLocal = (d: Date) => `${d.getFullYear()}-${pad(d.getMonth()+1)}-${pad(d.getDate())}T${pad(d.getHours())}:${pad(d.getMinutes())}`;
|
||||
const end = new Date(now.getTime() + 4*3600*1000); // 当前时间后4小时
|
||||
const start = new Date(now.getTime() - 24*3600*1000); // 当前时间前24小时
|
||||
this.end = toLocal(end);
|
||||
this.start = toLocal(start);
|
||||
|
||||
// 每10分钟检查并刷新数据(状态与站点),无刷新页面
|
||||
this.refreshTimer = setInterval(() => { this.refreshDataTick(); }, 10 * 60 * 1000);
|
||||
}
|
||||
|
||||
ngAfterViewInit() { this.initMap(); this.reloadTileTimesAndShow(); }
|
||||
|
||||
private async loadStatus() {
|
||||
const s = await this.api.getStatus();
|
||||
if (s) { this.onlineDevices = s.online_devices || 0; this.serverTime = s.server_time || ''; }
|
||||
}
|
||||
|
||||
private async loadStations() { this.stations = await this.api.getStations(); this.updateStationsOnMap(); }
|
||||
|
||||
private async refreshDataTick() {
|
||||
try {
|
||||
await Promise.all([this.loadStatus(), this.loadStations()]);
|
||||
// 若已选择设备并显示图表,则同时刷新历史与预报数据(不触发滚动/动画)
|
||||
if (this.showPanels && this.decimalId) {
|
||||
await this.query(true);
|
||||
}
|
||||
} catch {}
|
||||
}
|
||||
|
||||
private getTiandituKey(): string {
|
||||
const anyWin = (window as any);
|
||||
return anyWin.TIANDITU_KEY || '0c260b8a094a4e0bc507808812cefdac';
|
||||
}
|
||||
|
||||
get wh65lpCount(): number {
|
||||
return (this.stations || []).filter(s => s.device_type === 'WH65LP').length;
|
||||
}
|
||||
|
||||
private initMap() {
|
||||
const ol: any = (window as any).ol; if (!ol) return;
|
||||
const tk = this.getTiandituKey();
|
||||
const mkLayer = (url: string) => new ol.layer.Tile({ source: new ol.source.XYZ({ url }) });
|
||||
this.layers = {
|
||||
satellite: new ol.layer.Group({ layers: [
|
||||
mkLayer(`https://t{0-7}.tianditu.gov.cn/img_w/wmts?SERVICE=WMTS&REQUEST=GetTile&VERSION=1.0.0&LAYER=img&STYLE=default&TILEMATRIXSET=w&FORMAT=tiles&TILEMATRIX={z}&TILEROW={y}&TILECOL={x}&tk=${tk}`),
|
||||
mkLayer(`https://t{0-7}.tianditu.gov.cn/cia_w/wmts?SERVICE=WMTS&REQUEST=GetTile&VERSION=1.0.0&LAYER=cia&STYLE=default&TILEMATRIXSET=w&FORMAT=tiles&TILEMATRIX={z}&TILEROW={y}&TILECOL={x}&tk=${tk}`)
|
||||
]}),
|
||||
vector: new ol.layer.Group({ layers: [
|
||||
mkLayer(`https://t{0-7}.tianditu.gov.cn/vec_w/wmts?SERVICE=WMTS&REQUEST=GetTile&VERSION=1.0.0&LAYER=vec&STYLE=default&TILEMATRIXSET=w&FORMAT=tiles&TILEMATRIX={z}&TILEROW={y}&TILECOL={x}&tk=${tk}`),
|
||||
mkLayer(`https://t{0-7}.tianditu.gov.cn/cva_w/wmts?SERVICE=WMTS&REQUEST=GetTile&VERSION=1.0.0&LAYER=cva&STYLE=default&TILEMATRIXSET=w&FORMAT=tiles&TILEMATRIX={z}&TILEROW={y}&TILECOL={x}&tk=${tk}`)
|
||||
], visible: false}),
|
||||
terrain: new ol.layer.Group({ layers: [
|
||||
mkLayer(`https://t{0-7}.tianditu.gov.cn/ter_w/wmts?SERVICE=WMTS&REQUEST=GetTile&VERSION=1.0.0&LAYER=ter&STYLE=default&TILEMATRIXSET=w&FORMAT=tiles&TILEMATRIX={z}&TILEROW={y}&TILECOL={x}&tk=${tk}`),
|
||||
mkLayer(`https://t{0-7}.tianditu.gov.cn/cta_w/wmts?SERVICE=WMTS&REQUEST=GetTile&VERSION=1.0.0&LAYER=cta&STYLE=default&TILEMATRIXSET=w&FORMAT=tiles&TILEMATRIX={z}&TILEROW={y}&TILECOL={x}&tk=${tk}`)
|
||||
], visible: false}),
|
||||
hybrid: new ol.layer.Group({ layers: [
|
||||
mkLayer(`https://t{0-7}.tianditu.gov.cn/img_w/wmts?SERVICE=WMTS&REQUEST=GetTile&VERSION=1.0.0&LAYER=img&STYLE=default&TILEMATRIXSET=w&FORMAT=tiles&TILEMATRIX={z}&TILEROW={y}&TILECOL={x}&tk=${tk}`),
|
||||
mkLayer(`https://t{0-7}.tianditu.gov.cn/cia_w/wmts?SERVICE=WMTS&REQUEST=GetTile&VERSION=1.0.0&LAYER=cia&STYLE=default&TILEMATRIXSET=w&FORMAT=tiles&TILEMATRIX={z}&TILEROW={y}&TILECOL={x}&tk=${tk}`)
|
||||
], visible: false})
|
||||
};
|
||||
|
||||
this.stationSource = new ol.source.Vector();
|
||||
this.clusterSource = new ol.source.Cluster({ distance: 60, minDistance: 20, source: this.stationSource });
|
||||
this.clusterLayer = new ol.layer.Vector({ source: this.clusterSource, style: (f:any)=> this.createClusterStyle(f) });
|
||||
this.stationLayer = new ol.layer.Vector({ source: this.stationSource, visible: false, style: (f:any)=> this.createStationStyle(f) });
|
||||
this.tileOverlayGroup = new ol.layer.Group({ layers: [], zIndex: 999, visible: true });
|
||||
this.windOverlayLayer = new ol.layer.Vector({ source: new ol.source.Vector(), zIndex: 1000, visible: true });
|
||||
|
||||
// Load KML overlay from /static/kml/selected_polygons.kml
|
||||
try {
|
||||
const kmlSource = new ol.source.Vector({
|
||||
url: '/static/kml/selected_polygons.kml',
|
||||
format: new ol.format.KML({ extractStyles: true })
|
||||
});
|
||||
this.kmlLayer = new ol.layer.Vector({ source: kmlSource, zIndex: 800, visible: true });
|
||||
} catch {}
|
||||
|
||||
this.map = new ol.Map({ target: 'map', layers: [
|
||||
this.layers.satellite,
|
||||
this.layers.vector,
|
||||
this.layers.terrain,
|
||||
this.layers.hybrid,
|
||||
this.kmlLayer,
|
||||
this.tileOverlayGroup,
|
||||
this.windOverlayLayer,
|
||||
this.clusterLayer,
|
||||
this.stationLayer
|
||||
], view: new ol.View({ center: ol.proj.fromLonLat([108, 35]), zoom: 5, minZoom: 3, maxZoom: 18 }) });
|
||||
|
||||
// 使用全屏遮罩的页面级弹窗显示 KML 详情
|
||||
|
||||
this.map.getView().on('change:resolution', () => {
|
||||
const z = this.map.getView().getZoom();
|
||||
this.updateClusterDistance(z);
|
||||
this.updateLayerVisibility(z);
|
||||
});
|
||||
|
||||
if (this.stations?.length) this.updateStationsOnMap();
|
||||
}
|
||||
|
||||
openKmlPopup(feature: any, coordinate: any) {
|
||||
try {
|
||||
const name = feature?.get ? (feature.get('name') || '') : '';
|
||||
let desc = feature?.get ? (feature.get('description') || '') : '';
|
||||
// Cleanup KML-wrapped CDATA and decode HTML entities
|
||||
try {
|
||||
desc = String(desc);
|
||||
desc = desc.replace(/^<!\[CDATA\[/, '').replace(/\]\]>$/, '');
|
||||
const ta = document.createElement('textarea'); ta.innerHTML = desc; desc = ta.value;
|
||||
} catch {}
|
||||
this.kmlInfoTitle = String(name || '详情');
|
||||
this.kmlInfoHtml = String(desc || '');
|
||||
// 使用页面级模态对话框显示
|
||||
this.isKmlDialogOpen = true;
|
||||
} catch {}
|
||||
}
|
||||
|
||||
closeKmlPopup() {
|
||||
try {
|
||||
this.isKmlDialogOpen = false;
|
||||
} catch {}
|
||||
}
|
||||
|
||||
switchLayer(layerType: string) {
|
||||
const layers = this.layers; if (!layers) return;
|
||||
Object.keys(layers).forEach(key => { if (layers[key].setVisible) layers[key].setVisible(key === layerType); });
|
||||
}
|
||||
|
||||
private updateClusterDistance(zoom: number) {
|
||||
if (!this.clusterSource) return;
|
||||
const distance = zoom < this.CLUSTER_THRESHOLD ? 60 : 20;
|
||||
this.clusterSource.setDistance(distance);
|
||||
}
|
||||
|
||||
private updateLayerVisibility(zoom: number) {
|
||||
if (!this.clusterLayer || !this.stationLayer) return;
|
||||
const showCluster = zoom < this.CLUSTER_THRESHOLD;
|
||||
this.clusterLayer.setVisible(showCluster);
|
||||
this.stationLayer.setVisible(!showCluster);
|
||||
}
|
||||
|
||||
private markerIcon(isOnline: boolean) {
|
||||
const ol: any = (window as any).ol;
|
||||
const src = isOnline ? '/static/images/marker-online.svg' : '/static/images/marker-offline.svg';
|
||||
return new ol.style.Icon({ src, anchor: [0.5,1], anchorXUnits: 'fraction', anchorYUnits: 'fraction', scale: 0.9 });
|
||||
}
|
||||
|
||||
private createStationStyle(feature: any) {
|
||||
const ol: any = (window as any).ol;
|
||||
const last = feature.get('lastUpdate');
|
||||
const online = last ? (new Date(last).getTime() > Date.now() - 5*60*1000) : false;
|
||||
const location = feature.get('location') || '';
|
||||
return new ol.style.Style({
|
||||
image: this.markerIcon(online),
|
||||
text: location ? new ol.style.Text({ text: location, offsetY: -28, fill: new ol.style.Fill({ color: '#111' }), stroke: new ol.style.Stroke({ color: '#fff', width: 3 }), font: '12px sans-serif' }) : undefined
|
||||
});
|
||||
}
|
||||
|
||||
private createClusterStyle(feature: any) {
|
||||
const ol: any = (window as any).ol;
|
||||
const features = feature.get('features') || [];
|
||||
const size = features.length;
|
||||
const zoom = this.map.getView().getZoom();
|
||||
if (zoom < this.CLUSTER_THRESHOLD) {
|
||||
if (size > 1) {
|
||||
const radius = Math.min(16 + size * 0.8, 32);
|
||||
const fontSize = Math.min(11 + size/12, 16);
|
||||
return new ol.style.Style({
|
||||
image: new ol.style.Circle({ radius, fill: new ol.style.Fill({ color: 'rgba(0,123,255,0.8)' }), stroke: new ol.style.Stroke({ color: '#fff', width: 2 }) }),
|
||||
text: new ol.style.Text({ text: String(size), fill: new ol.style.Fill({ color: '#fff' }), font: `bold ${fontSize}px Arial`, offsetY: 1 })
|
||||
});
|
||||
} else {
|
||||
const f0 = features[0];
|
||||
const last = f0?.get('lastUpdate');
|
||||
const online = last ? (new Date(last).getTime() > Date.now() - 5*60*1000) : false;
|
||||
const color = online ? 'rgba(0,123,255,0.8)' : 'rgba(108,117,125,0.8)';
|
||||
return new ol.style.Style({
|
||||
image: new ol.style.Circle({ radius: 6, fill: new ol.style.Fill({ color }), stroke: new ol.style.Stroke({ color: '#fff', width: 2 }) })
|
||||
});
|
||||
}
|
||||
}
|
||||
return this.createStationStyle(features[0]);
|
||||
}
|
||||
|
||||
private updateStationsOnMap() {
|
||||
const ol: any = (window as any).ol; if (!ol || !this.stationSource) return;
|
||||
this.stationSource.clear();
|
||||
(this.stations||[]).forEach(s => {
|
||||
if (typeof s.longitude !== 'number' || typeof s.latitude !== 'number') return;
|
||||
const f = new ol.Feature({ geometry: new ol.geom.Point(ol.proj.fromLonLat([s.longitude, s.latitude])), lastUpdate: (s as any).last_update || '', stationId: s.station_id, location: s.location || '' });
|
||||
this.stationSource.addFeature(f);
|
||||
});
|
||||
// click to select
|
||||
if (this.map && !this.mapEventsBound) {
|
||||
this.map.on('singleclick', async (evt:any) => {
|
||||
// 先尝试命中 KML 要素
|
||||
try {
|
||||
let handledKml = false;
|
||||
this.map.forEachFeatureAtPixel(evt.pixel, (f:any, layer:any) => {
|
||||
if (this.kmlLayer && layer === this.kmlLayer) {
|
||||
this.openKmlPopup(f, evt.coordinate);
|
||||
handledKml = true;
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}, { layerFilter: (l:any)=> l===this.kmlLayer, hitTolerance: 6 });
|
||||
if (handledKml) return;
|
||||
} catch {}
|
||||
// 再处理站点/聚合点击
|
||||
const olAny: any = (window as any).ol;
|
||||
const features = this.map.getFeaturesAtPixel(evt.pixel, { layerFilter: (l:any)=> l===this.stationLayer || l===this.clusterLayer });
|
||||
if (!features || features.length===0) return;
|
||||
let f = features[0];
|
||||
const subs = f.get('features');
|
||||
if (subs && subs.length>0) {
|
||||
const view = this.map.getView();
|
||||
const ex = olAny.extent.createEmpty();
|
||||
for (const sf of subs) {
|
||||
olAny.extent.extend(ex, sf.getGeometry().getExtent());
|
||||
}
|
||||
view.fit(ex, { duration: 300, maxZoom: 14, padding: [40,40,40,40] });
|
||||
return;
|
||||
}
|
||||
const sid = f.get('stationId');
|
||||
if (!sid) return;
|
||||
const hex = String(sid).slice(-6).toUpperCase();
|
||||
this.decimalId = hex;
|
||||
await this.query();
|
||||
this.scrollToChart();
|
||||
});
|
||||
this.map.on('pointermove', (evt:any) => {
|
||||
const features = this.map.getFeaturesAtPixel(evt.pixel, { layerFilter: (l:any)=> l===this.stationLayer || l===this.clusterLayer || l===this.kmlLayer });
|
||||
const el = this.map.getTargetElement();
|
||||
if (el) el.style.cursor = (features && features.length>0) ? 'pointer' : '';
|
||||
this.showTileTooltip(evt);
|
||||
});
|
||||
this.mapEventsBound = true;
|
||||
}
|
||||
}
|
||||
|
||||
async loadTileTimes(product: 'radar'|'rain'|'radar_detail') {
|
||||
try {
|
||||
const params = new URLSearchParams({ z: String(this.tileZ), y: String(this.tileY), x: String(this.tileX) });
|
||||
// 若指定了开始/结束时间,则按时间范围查询;否则按最近limit条
|
||||
const toFmt = (s: string) => s.replace('T',' ') + ':00';
|
||||
if (this.start && this.end) {
|
||||
params.set('from', toFmt(this.start));
|
||||
params.set('to', toFmt(this.end));
|
||||
} else {
|
||||
params.set('limit', '60');
|
||||
}
|
||||
const path = product==='rain' ? '/api/rain/times' : '/api/radar/times';
|
||||
const r = await fetch(`${path}?${params.toString()}`);
|
||||
if (!r.ok) return;
|
||||
const j = await r.json();
|
||||
this.tileTimes = j.times || [];
|
||||
this.tileIndex = 0;
|
||||
this.tileDt = this.tileTimes[0] || '';
|
||||
if (this.tileDt) await this.renderTilesAt(this.tileDt);
|
||||
} catch {}
|
||||
}
|
||||
|
||||
async renderTilesAt(dt: string) {
|
||||
try {
|
||||
const params = new URLSearchParams({ z: String(this.tileZ), dt: dt });
|
||||
const isRain = this.tileProduct === 'rain';
|
||||
const path = isRain ? '/api/rain/tiles_at' : '/api/radar/tiles_at';
|
||||
const r = await fetch(`${path}?${params.toString()}`);
|
||||
if (!r.ok) { this.clearTileOverlays(); return; }
|
||||
const j = await r.json();
|
||||
const tiles = Array.isArray(j.tiles) ? j.tiles : [];
|
||||
if (tiles.length === 0) { this.clearTileOverlays(); this.clearWindOverlays(); return; }
|
||||
await this.renderTilesOnMap(isRain ? 'rain' : 'radar', tiles);
|
||||
// 同步 tileIndex 以匹配选择的时次
|
||||
const idx = this.tileTimes.findIndex(t => t === dt);
|
||||
if (idx >= 0) { this.tileIndex = idx; this.tileDt = dt; }
|
||||
if (!isRain && this.tileProduct === 'radar_detail') {
|
||||
this.drawWindOverlays(dt);
|
||||
} else {
|
||||
this.clearWindOverlays();
|
||||
}
|
||||
} catch { this.clearTileOverlays(); }
|
||||
}
|
||||
|
||||
clearTileOverlays() {
|
||||
if (!this.tileOverlayGroup) return;
|
||||
const coll = this.tileOverlayGroup.getLayers();
|
||||
if (coll) coll.clear();
|
||||
}
|
||||
|
||||
clearWindOverlays() {
|
||||
const ol: any = (window as any).ol; if (!ol || !this.windOverlayLayer) return;
|
||||
const src = this.windOverlayLayer.getSource();
|
||||
if (src) src.clear();
|
||||
try { const box = document.getElementById('regionStats'); if (box) box.style.display='none'; } catch {}
|
||||
}
|
||||
|
||||
private getSelectedStation(): Station | undefined {
|
||||
try {
|
||||
const sid = this.makeStationIdFromHex(this.decimalId || '');
|
||||
if (!sid) return undefined;
|
||||
return this.stations.find(s => s.station_id === sid);
|
||||
} catch { return undefined; }
|
||||
}
|
||||
|
||||
private async drawWindOverlays(dtStr: string) {
|
||||
const ol: any = (window as any).ol; if (!ol || !this.map || !this.windOverlayLayer) return;
|
||||
const src = this.windOverlayLayer.getSource(); if (!src) return;
|
||||
src.clear();
|
||||
// 选择中心:优先当前选中站点,否则地图中心
|
||||
let centerLon = 108, centerLat = 35;
|
||||
const st = this.getSelectedStation();
|
||||
if (st && typeof st.longitude==='number' && typeof st.latitude==='number') { centerLon = st.longitude!; centerLat = st.latitude!; }
|
||||
else {
|
||||
try {
|
||||
const c = this.map.getView().getCenter();
|
||||
const lonlat = ol.proj.toLonLat(c, this.map.getView().getProjection());
|
||||
centerLon = lonlat[0]; centerLat = lonlat[1];
|
||||
} catch {}
|
||||
}
|
||||
// 调用后端根据经纬度与时间查询最近的 radar_weather 风数据
|
||||
let windDir: number|null = null, windSpd: number|null = null;
|
||||
try {
|
||||
const params = new URLSearchParams({ lat: String(centerLat), lon: String(centerLon), dt: dtStr });
|
||||
const r = await fetch(`/api/radar/weather_nearest?${params.toString()}`);
|
||||
if (r.ok) {
|
||||
const j = await r.json();
|
||||
if (j && j.wind_direction != null) windDir = Number(j.wind_direction);
|
||||
if (j && j.wind_speed != null) windSpd = Number(j.wind_speed);
|
||||
if (isFinite(windDir as any)) windDir = ((windDir as number) % 360 + 360) % 360;
|
||||
if (!isFinite(windSpd as any)) windSpd = null;
|
||||
}
|
||||
} catch {}
|
||||
const proj = this.map.getView().getProjection();
|
||||
const center = ol.proj.fromLonLat([centerLon, centerLat], proj);
|
||||
|
||||
// 8km 圆(红色虚线边框)
|
||||
try {
|
||||
const circle = new ol.geom.Circle(center, 8000);
|
||||
const cf = new ol.Feature({ geometry: circle });
|
||||
cf.setStyle(new ol.style.Style({ stroke: new ol.style.Stroke({ color: 'rgba(255,0,0,0.95)', width: 2, lineDash: [6,4] }), fill: new ol.style.Fill({ color: 'rgba(255,0,0,0.03)' }) }));
|
||||
src.addFeature(cf);
|
||||
} catch {}
|
||||
|
||||
if (windDir != null && windSpd != null && windSpd > 0.01) {
|
||||
const bearingTo = windDir;
|
||||
const hours = 3;
|
||||
const radius = windSpd * 3600 * hours; // m
|
||||
const half = 25; // 半角
|
||||
const pts: number[][] = [];
|
||||
pts.push(center);
|
||||
for (let a = -half; a <= half; a += 2.5) {
|
||||
const ang = (bearingTo + a) * Math.PI / 180;
|
||||
const dx = radius * Math.sin(ang);
|
||||
const dy = radius * Math.cos(ang);
|
||||
pts.push([center[0] + dx, center[1] + dy]);
|
||||
}
|
||||
pts.push(center);
|
||||
const poly = new ol.geom.Polygon([pts]);
|
||||
const pf = new ol.Feature({ geometry: poly });
|
||||
pf.setStyle(new ol.style.Style({ stroke: new ol.style.Stroke({ color: 'rgba(255,0,0,0.95)', width: 2, lineDash: [6,4] }), fill: new ol.style.Fill({ color: 'rgba(255,0,0,0.05)' }) }));
|
||||
src.addFeature(pf);
|
||||
|
||||
// 统计扇形区域内的强回波像元数量(基于当前已加载的雷达瓦片)
|
||||
try {
|
||||
const polyLonLat: [number,number][] = pts.map(p => {
|
||||
const lonlat = ol.proj.toLonLat(p, this.map.getView().getProjection());
|
||||
return [lonlat[0], lonlat[1]];
|
||||
});
|
||||
const counts = this.countDbzInPolygon(polyLonLat);
|
||||
this.updateRegionStats(counts, windDir, windSpd);
|
||||
} catch {}
|
||||
}
|
||||
}
|
||||
|
||||
private updateRegionStats(counts?: { ge30:number; ge35:number; ge40:number } | null, windDir?: number|null, windSpd?: number|null) {
|
||||
try {
|
||||
const box = document.getElementById('regionStats');
|
||||
const s30 = document.getElementById('statDbz30');
|
||||
const s35 = document.getElementById('statDbz35');
|
||||
const s40 = document.getElementById('statDbz40');
|
||||
const sDir = document.getElementById('statWindDir');
|
||||
const sSpd = document.getElementById('statWindSpd');
|
||||
if (!box || !s30 || !s35 || !s40 || !sDir || !sSpd) return;
|
||||
if (!counts) { box.style.display='none'; return; }
|
||||
s30.textContent = String(counts.ge30);
|
||||
s35.textContent = String(counts.ge35);
|
||||
s40.textContent = String(counts.ge40);
|
||||
if (windDir != null && isFinite(windDir)) { sDir.textContent = String(Math.round(windDir)); } else { sDir.textContent = '--'; }
|
||||
if (windSpd != null && isFinite(windSpd)) { sSpd.textContent = String(Math.round((windSpd as number)*10)/10); } else { sSpd.textContent = '--'; }
|
||||
box.style.display = 'block';
|
||||
} catch {}
|
||||
}
|
||||
|
||||
private countDbzInPolygon(poly: [number,number][]): { ge30:number; ge35:number; ge40:number } | null {
|
||||
// 需要已有雷达瓦片数据
|
||||
const tiles = (this.tileLastList || []).filter(it => it && it.product==='radar');
|
||||
if (!tiles.length) return null;
|
||||
const ptInPoly = (x:number,y:number, polygon:[number,number][]) => {
|
||||
// ray casting
|
||||
let inside = false;
|
||||
for (let i=0, j=polygon.length-1; i<polygon.length; j=i++) {
|
||||
const xi = polygon[i][0], yi = polygon[i][1];
|
||||
const xj = polygon[j][0], yj = polygon[j][1];
|
||||
const intersect = ((yi>y) !== (yj>y)) && (x < (xj - xi) * (y - yi) / ((yj - yi) || 1e-9) + xi);
|
||||
if (intersect) inside = !inside;
|
||||
}
|
||||
return inside;
|
||||
};
|
||||
let c30=0, c35=0, c40=0;
|
||||
for (const t of tiles) {
|
||||
const { west, south, east, north, width, height } = t.meta;
|
||||
const dlon = (east - west) / width;
|
||||
const dlat = (north - south) / height;
|
||||
const vals: (number|null)[][] = t.values || [];
|
||||
for (let row=0; row<height; row++) {
|
||||
const lat = south + (row + 0.5) * dlat;
|
||||
const srcRow = vals[row] as (number|null)[];
|
||||
if (!srcRow) continue;
|
||||
for (let col=0; col<width; col++) {
|
||||
const v = srcRow[col];
|
||||
if (v == null || v < 30) continue; // 低于30无需落点判断
|
||||
const lon = west + (col + 0.5) * dlon;
|
||||
if (!ptInPoly(lon, lat, poly)) continue;
|
||||
if (v >= 30) c30++;
|
||||
if (v >= 35) c35++;
|
||||
if (v >= 40) c40++;
|
||||
}
|
||||
}
|
||||
}
|
||||
return { ge30: c30, ge35: c35, ge40: c40 };
|
||||
}
|
||||
|
||||
addImageOverlayFromCanvas(canvas: HTMLCanvasElement, extent4326: [number,number,number,number]) {
|
||||
const ol: any = (window as any).ol; if (!ol || !this.map) return;
|
||||
const proj = this.map.getView().getProjection();
|
||||
const extentProj = ol.proj.transformExtent(extent4326, 'EPSG:4326', proj);
|
||||
const src = new ol.source.ImageStatic({ url: canvas.toDataURL('image/png'), imageExtent: extentProj, projection: proj });
|
||||
const layer = new ol.layer.Image({ source: src, opacity: 0.8, visible: true });
|
||||
this.tileOverlayGroup.getLayers().push(layer);
|
||||
}
|
||||
|
||||
async renderTilesOnMap(product: 'none'|'radar'|'rain', tiles: any[]) {
|
||||
this.clearTileOverlays();
|
||||
const lastList: any[] = [];
|
||||
for (const t of tiles) {
|
||||
const w = t.width, h = t.height; if (!w||!h||!t.values) continue;
|
||||
const canvas = document.createElement('canvas'); canvas.width=w; canvas.height=h;
|
||||
const ctx = canvas.getContext('2d')!; const img = ctx.createImageData(w,h);
|
||||
const radarColors = [[0,0,255],[0,191,255],[0,255,255],[127,255,212],[124,252,0],[173,255,47],[255,255,0],[255,215,0],[255,165,0],[255,140,0],[255,69,0],[255,0,0],[220,20,60],[199,21,133],[139,0,139]];
|
||||
const rainEdges = [0,5,7.5,10,12.5,15,17.5,20,25,30,40,50,75,100, Infinity];
|
||||
const rainColors = [
|
||||
[126,212,121], [126,212,121], [110,200,109], [97,169,97], [81,148,76], [90,158,112],
|
||||
[143,194,254], [92,134,245], [66,87,240], [45,48,214], [26,15,166], [63,22,145], [191,70,148], [213,1,146], [213,1,146]
|
||||
];
|
||||
for (let row=0; row<h; row++) {
|
||||
const srcRow = t.values[row] as (number|null)[];
|
||||
const dstRow = (h - 1 - row);
|
||||
for (let col=0; col<w; col++) {
|
||||
const v = srcRow[col];
|
||||
const off = (dstRow*w + col)*4;
|
||||
if (v==null || v===0) { img.data[off+3]=0; continue; }
|
||||
if (product==='rain') {
|
||||
let idx=0; while(idx<rainEdges.length-1 && !(v>=rainEdges[idx] && v<rainEdges[idx+1])) idx++;
|
||||
const c = rainColors[Math.min(idx, rainColors.length-1)]; img.data[off]=c[0]; img.data[off+1]=c[1]; img.data[off+2]=c[2]; img.data[off+3]=220;
|
||||
} else {
|
||||
let bin = Math.floor(Math.max(0, Math.min(75, v))/5); if (bin>=radarColors.length) bin=radarColors.length-1;
|
||||
const c = radarColors[bin]; img.data[off]=c[0]; img.data[off+1]=c[1]; img.data[off+2]=c[2]; img.data[off+3]=220;
|
||||
}
|
||||
}
|
||||
}
|
||||
ctx.putImageData(img,0,0);
|
||||
this.addImageOverlayFromCanvas(canvas, [t.west, t.south, t.east, t.north]);
|
||||
lastList.push({ product, meta: { west: t.west, south: t.south, east: t.east, north: t.north, width: w, height: h }, values: t.values });
|
||||
}
|
||||
this.tileLastList = lastList;
|
||||
}
|
||||
|
||||
onProductChange() { this.reloadTileTimesAndShow(); }
|
||||
async reloadTileTimesAndShow() {
|
||||
if (this.tileProduct==='none') { this.clearTileOverlays(); this.tileTimes=[]; this.tileDt=''; return; }
|
||||
await this.loadTileTimes(this.tileProduct);
|
||||
}
|
||||
|
||||
private makeStationIdFromHex(hexRaw: string): string | null {
|
||||
if (!hexRaw) return null;
|
||||
const hex = String(hexRaw).toUpperCase().replace(/[^0-9A-F]/g, '').padStart(6, '0').slice(-6);
|
||||
if (!hex) return null;
|
||||
return `RS485-${hex}`;
|
||||
}
|
||||
|
||||
async query(suppressUX: boolean = false) {
|
||||
const dec = this.decimalId.trim();
|
||||
if (!dec) return;
|
||||
const sid = this.makeStationIdFromHex(dec);
|
||||
if (!sid) return;
|
||||
const toFmt = (s: string) => s.replace('T',' ') + ':00';
|
||||
const from = toFmt(this.start);
|
||||
const to = toFmt(this.end);
|
||||
this.isLoading = true;
|
||||
try {
|
||||
[this.history, this.forecast] = await Promise.all([
|
||||
this.api.getHistory(dec, from, to, this.interval),
|
||||
this.provider ? this.api.getForecast(sid, from, to, this.provider, 3) : Promise.resolve([])
|
||||
]);
|
||||
} finally {
|
||||
this.isLoading = false;
|
||||
}
|
||||
this.showPanels = true;
|
||||
if (!suppressUX) this.isMapCollapsed = true;
|
||||
const st = this.stations.find(s => s.station_id === sid);
|
||||
const ol: any = (window as any).ol;
|
||||
if (!suppressUX) {
|
||||
if (st && ol && typeof st.longitude === 'number' && typeof st.latitude === 'number' && this.map) {
|
||||
this.map.getView().animate({ center: ol.proj.fromLonLat([st.longitude, st.latitude]), zoom: 11, duration: 400 });
|
||||
}
|
||||
}
|
||||
this.selectedLocation = (st && st.location) ? st.location : '';
|
||||
const titleName = st?.name || st?.station_alias || st?.station_id || '';
|
||||
this.selectedTitle = titleName ? `${titleName}${this.selectedLocation ? ` | ${this.selectedLocation}` : ''}` : (this.selectedLocation || '');
|
||||
this.reloadTileTimesAndShow();
|
||||
if (!suppressUX) {
|
||||
setTimeout(()=>{ try{ this.map.updateSize(); }catch{} }, 300);
|
||||
this.scrollToChart();
|
||||
}
|
||||
}
|
||||
|
||||
onSelectStation(ev: { stationId: string, hex: string }) {
|
||||
this.decimalId = ev.hex;
|
||||
this.query();
|
||||
}
|
||||
|
||||
prevTile() {
|
||||
if (!this.tileTimes || this.tileTimes.length===0) return;
|
||||
if (this.tileIndex < this.tileTimes.length-1) {
|
||||
this.tileIndex += 1;
|
||||
this.tileDt = this.tileTimes[this.tileIndex];
|
||||
this.renderTilesAt(this.tileDt);
|
||||
}
|
||||
}
|
||||
|
||||
nextTile() {
|
||||
if (!this.tileTimes || this.tileTimes.length===0) return;
|
||||
if (this.tileIndex > 0) {
|
||||
this.tileIndex -= 1;
|
||||
this.tileDt = this.tileTimes[this.tileIndex];
|
||||
this.renderTilesAt(this.tileDt);
|
||||
}
|
||||
}
|
||||
|
||||
toggleMap() {
|
||||
this.isMapCollapsed = !this.isMapCollapsed;
|
||||
setTimeout(()=>{ try{ this.map.updateSize(); }catch{} }, 300);
|
||||
}
|
||||
|
||||
private scrollToChart() {
|
||||
const el = document.getElementById('chartSection');
|
||||
if (el) { try { el.scrollIntoView({ behavior: 'smooth', block: 'start' }); } catch {} }
|
||||
}
|
||||
|
||||
private showTileTooltip(evt:any) {
|
||||
const tip = document.getElementById('tileValueTooltip');
|
||||
if (!tip || !this.map || !this.tileLastList || this.tileLastList.length===0) { if (tip) tip.style.display='none'; return; }
|
||||
try {
|
||||
const coord = this.map.getEventCoordinate(evt.originalEvent);
|
||||
const lonlat = (window as any).ol.proj.transform(coord, this.map.getView().getProjection(), 'EPSG:4326');
|
||||
const lon = lonlat[0], lat = lonlat[1];
|
||||
let value: number|null = null; let unit = '';
|
||||
for (const it of this.tileLastList) {
|
||||
const { west,south,east,north,width,height } = it.meta;
|
||||
if (lon < west || lon > east || lat < south || lat > north) continue;
|
||||
const px = Math.floor((lon - west) / ((east - west) / width));
|
||||
const py = Math.floor((lat - south) / ((north - south) / height));
|
||||
if (px < 0 || px >= width || py < 0 || py >= height) continue;
|
||||
const v = it.values?.[py]?.[px];
|
||||
if (v != null) { value = Number(v); unit = it.product==='rain' ? 'mm' : 'dBZ'; break; }
|
||||
}
|
||||
if (value == null) { tip.style.display='none'; return; }
|
||||
tip.textContent = `${value.toFixed(1)} ${unit}`;
|
||||
const px = evt.pixel[0] + 12; const py = evt.pixel[1] + 12;
|
||||
tip.style.left = `${px}px`; tip.style.top = `${py}px`; tip.style.display='block';
|
||||
} catch { if (tip) tip.style.display='none'; }
|
||||
}
|
||||
}
|
||||
|
||||
bootstrapApplication(AppComponent).catch(err => console.error(err));
|
||||
3
core/frontend/src/polyfills.ts
Normal file
3
core/frontend/src/polyfills.ts
Normal file
@ -0,0 +1,3 @@
|
||||
// Angular zone support (default change detection)
|
||||
import 'zone.js';
|
||||
|
||||
4
core/frontend/src/styles.css
Normal file
4
core/frontend/src/styles.css
Normal file
@ -0,0 +1,4 @@
|
||||
/* Global styles (optional) */
|
||||
html, body { height: 100%; margin: 0; }
|
||||
body { font-family: system-ui, -apple-system, Segoe UI, Roboto, Helvetica, Arial, Noto Sans; }
|
||||
|
||||
14
core/frontend/tsconfig.app.json
Normal file
14
core/frontend/tsconfig.app.json
Normal file
@ -0,0 +1,14 @@
|
||||
{
|
||||
"extends": "./tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"outDir": "./out-tsc/app",
|
||||
"types": []
|
||||
},
|
||||
"files": [
|
||||
"src/main.ts",
|
||||
"src/polyfills.ts"
|
||||
],
|
||||
"include": [
|
||||
"src/**/*.d.ts"
|
||||
]
|
||||
}
|
||||
27
core/frontend/tsconfig.json
Normal file
27
core/frontend/tsconfig.json
Normal file
@ -0,0 +1,27 @@
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/tsconfig",
|
||||
"compileOnSave": false,
|
||||
"compilerOptions": {
|
||||
"baseUrl": "./",
|
||||
"outDir": "./out-tsc",
|
||||
"sourceMap": true,
|
||||
"declaration": false,
|
||||
"downlevelIteration": true,
|
||||
"experimentalDecorators": true,
|
||||
"module": "es2022",
|
||||
"moduleResolution": "bundler",
|
||||
"resolveJsonModule": true,
|
||||
"target": "es2022",
|
||||
"useDefineForClassFields": false,
|
||||
"lib": [
|
||||
"es2022",
|
||||
"dom"
|
||||
],
|
||||
"strict": true,
|
||||
"noImplicitOverride": true,
|
||||
"noPropertyAccessFromIndexSignature": false,
|
||||
"noImplicitReturns": true,
|
||||
"noFallthroughCasesInSwitch": true
|
||||
}
|
||||
}
|
||||
|
||||
74
core/internal/auth/auth.go
Normal file
74
core/internal/auth/auth.go
Normal file
@ -0,0 +1,74 @@
|
||||
package auth
|
||||
|
||||
import (
|
||||
"crypto/hmac"
|
||||
"crypto/sha256"
|
||||
"encoding/hex"
|
||||
"errors"
|
||||
"fmt"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"golang.org/x/crypto/bcrypt"
|
||||
)
|
||||
|
||||
// HashPassword returns a bcrypt hash for the given plain text password.
|
||||
func HashPassword(password string) (string, error) {
|
||||
if password == "" {
|
||||
return "", errors.New("empty password")
|
||||
}
|
||||
b, err := bcrypt.GenerateFromPassword([]byte(password), bcrypt.DefaultCost)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return string(b), nil
|
||||
}
|
||||
|
||||
// CheckPassword verifies a bcrypt hash against the given plain text password.
|
||||
func CheckPassword(hash, password string) bool {
|
||||
if hash == "" || password == "" {
|
||||
return false
|
||||
}
|
||||
return bcrypt.CompareHashAndPassword([]byte(hash), []byte(password)) == nil
|
||||
}
|
||||
|
||||
// MakeSessionToken builds a simple HMAC-signed token: username|expUnix|sigHex
|
||||
func MakeSessionToken(username string, ttl time.Duration, secret []byte) (string, time.Time) {
|
||||
if ttl <= 0 {
|
||||
ttl = 24 * time.Hour
|
||||
}
|
||||
exp := time.Now().Add(ttl).UTC()
|
||||
payload := username + "|" + strconv.FormatInt(exp.Unix(), 10)
|
||||
mac := hmac.New(sha256.New, secret)
|
||||
mac.Write([]byte(payload))
|
||||
sig := hex.EncodeToString(mac.Sum(nil))
|
||||
return payload + "|" + sig, exp
|
||||
}
|
||||
|
||||
// ParseSessionToken validates token and returns username if valid.
|
||||
func ParseSessionToken(token string, secret []byte) (string, bool) {
|
||||
parts := strings.Split(token, "|")
|
||||
if len(parts) != 3 {
|
||||
return "", false
|
||||
}
|
||||
username := parts[0]
|
||||
expStr := parts[1]
|
||||
sigHex := parts[2]
|
||||
// recompute
|
||||
payload := fmt.Sprintf("%s|%s", username, expStr)
|
||||
mac := hmac.New(sha256.New, secret)
|
||||
mac.Write([]byte(payload))
|
||||
if hex.EncodeToString(mac.Sum(nil)) != sigHex {
|
||||
return "", false
|
||||
}
|
||||
// expiry check
|
||||
expUnix, err := strconv.ParseInt(expStr, 10, 64)
|
||||
if err != nil {
|
||||
return "", false
|
||||
}
|
||||
if time.Now().UTC().After(time.Unix(expUnix, 0)) {
|
||||
return "", false
|
||||
}
|
||||
return username, true
|
||||
}
|
||||
26
core/internal/auth/auth_test.go
Normal file
26
core/internal/auth/auth_test.go
Normal file
@ -0,0 +1,26 @@
|
||||
package auth
|
||||
|
||||
import (
|
||||
"os"
|
||||
"testing"
|
||||
)
|
||||
|
||||
// TestGenerateHash helps generate and verify password hashes.
|
||||
// Usage:
|
||||
//
|
||||
// PW="your-password" go test ./core/internal/auth -run TestGenerateHash -v
|
||||
func TestGenerateHash(t *testing.T) {
|
||||
pw := os.Getenv("PW")
|
||||
if pw == "" {
|
||||
t.Skip("set PW env to generate a hash")
|
||||
return
|
||||
}
|
||||
hash, err := HashPassword(pw)
|
||||
if err != nil {
|
||||
t.Fatalf("HashPassword error: %v", err)
|
||||
}
|
||||
if !CheckPassword(hash, pw) {
|
||||
t.Fatalf("CheckPassword failed for generated hash")
|
||||
}
|
||||
t.Logf("password hash: %s", hash)
|
||||
}
|
||||
150
core/internal/config/config.go
Normal file
150
core/internal/config/config.go
Normal file
@ -0,0 +1,150 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"path/filepath"
|
||||
legacy "weatherstation/internal/config"
|
||||
|
||||
"gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
type Config struct {
|
||||
Addr string
|
||||
UIServeDir string
|
||||
BigscreenDir string
|
||||
TemplateDir string
|
||||
StaticDir string
|
||||
DevEnableCORS bool
|
||||
Legacy *legacy.Config
|
||||
SMS SmsConfig
|
||||
AuthSecret string
|
||||
}
|
||||
|
||||
// SmsConfig holds Aliyun SMS settings loaded from config/env.
|
||||
type SmsConfig struct {
|
||||
AccessKeyID string
|
||||
AccessKeySecret string
|
||||
SignName string
|
||||
TemplateCode string
|
||||
Endpoint string // optional, default dysmsapi.aliyuncs.com
|
||||
}
|
||||
|
||||
func Load() Config {
|
||||
lg := legacy.GetConfig()
|
||||
|
||||
port := lg.Server.WebPort
|
||||
if port <= 0 {
|
||||
port = 8080
|
||||
}
|
||||
|
||||
cfg := Config{
|
||||
Addr: fmt.Sprintf(":%d", port),
|
||||
UIServeDir: "core/frontend/dist/ui",
|
||||
BigscreenDir: "core/frontend/dist/bigscreen",
|
||||
TemplateDir: "templates",
|
||||
StaticDir: "static",
|
||||
DevEnableCORS: true,
|
||||
Legacy: lg,
|
||||
SMS: SmsConfig{},
|
||||
AuthSecret: "change-me-dev", // default dev secret; override with CORE_AUTH_SECRET
|
||||
}
|
||||
|
||||
// Try load SMS from YAML (same search order as legacy)
|
||||
type smsAliyun struct {
|
||||
AccessKeyID string `yaml:"access_key_id"`
|
||||
AccessKeySecret string `yaml:"access_key_secret"`
|
||||
SignName string `yaml:"sign_name"`
|
||||
TemplateCode string `yaml:"template_code"`
|
||||
Endpoint string `yaml:"endpoint"`
|
||||
}
|
||||
var smsFile struct {
|
||||
SMS struct {
|
||||
Enabled bool `yaml:"enabled"`
|
||||
Provider string `yaml:"provider"`
|
||||
Aliyun smsAliyun `yaml:"aliyun"`
|
||||
} `yaml:"sms"`
|
||||
}
|
||||
// Probe possible locations
|
||||
exePath, _ := os.Executable()
|
||||
exeDir := ""
|
||||
if exePath != "" {
|
||||
exeDir = filepath.Dir(exePath)
|
||||
}
|
||||
candidates := []string{
|
||||
filepath.Join(exeDir, "config.yaml"),
|
||||
filepath.Join(exeDir, "..", "config.yaml"),
|
||||
"config.yaml",
|
||||
"../config.yaml",
|
||||
"../../config.yaml",
|
||||
"/etc/weatherstation/config.yaml",
|
||||
filepath.Join(os.Getenv("HOME"), ".weatherstation", "config.yaml"),
|
||||
}
|
||||
for _, p := range candidates {
|
||||
if b, err := os.ReadFile(p); err == nil {
|
||||
if err := yaml.Unmarshal(b, &smsFile); err == nil {
|
||||
cfg.SMS.AccessKeyID = smsFile.SMS.Aliyun.AccessKeyID
|
||||
cfg.SMS.AccessKeySecret = smsFile.SMS.Aliyun.AccessKeySecret
|
||||
if v := smsFile.SMS.Aliyun.SignName; v != "" {
|
||||
cfg.SMS.SignName = v
|
||||
}
|
||||
if v := smsFile.SMS.Aliyun.TemplateCode; v != "" {
|
||||
cfg.SMS.TemplateCode = v
|
||||
}
|
||||
if v := smsFile.SMS.Aliyun.Endpoint; v != "" {
|
||||
cfg.SMS.Endpoint = v
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if v := os.Getenv("CORE_ADDR"); v != "" {
|
||||
cfg.Addr = v
|
||||
}
|
||||
if v := os.Getenv("CORE_UI_DIR"); v != "" {
|
||||
cfg.UIServeDir = v
|
||||
}
|
||||
if v := os.Getenv("CORE_BIGSCREEN_DIR"); v != "" {
|
||||
cfg.BigscreenDir = v
|
||||
}
|
||||
if v := os.Getenv("CORE_TEMPLATE_DIR"); v != "" {
|
||||
cfg.TemplateDir = v
|
||||
}
|
||||
if v := os.Getenv("CORE_STATIC_DIR"); v != "" {
|
||||
cfg.StaticDir = v
|
||||
}
|
||||
if v := os.Getenv("CORE_ENABLE_CORS"); v != "" {
|
||||
if v == "0" || v == "false" {
|
||||
cfg.DevEnableCORS = false
|
||||
} else {
|
||||
cfg.DevEnableCORS = true
|
||||
}
|
||||
}
|
||||
|
||||
// Auth secret; do not log value
|
||||
if v := os.Getenv("CORE_AUTH_SECRET"); v != "" {
|
||||
cfg.AuthSecret = v
|
||||
}
|
||||
|
||||
// SMS settings (do not log secrets)
|
||||
if v := os.Getenv("CORE_SMS_AK"); v != "" {
|
||||
cfg.SMS.AccessKeyID = v
|
||||
}
|
||||
if v := os.Getenv("CORE_SMS_SK"); v != "" {
|
||||
cfg.SMS.AccessKeySecret = v
|
||||
}
|
||||
if v := os.Getenv("CORE_SMS_SIGN"); v != "" {
|
||||
cfg.SMS.SignName = v
|
||||
}
|
||||
if v := os.Getenv("CORE_SMS_TPL"); v != "" {
|
||||
cfg.SMS.TemplateCode = v
|
||||
}
|
||||
if v := os.Getenv("CORE_SMS_ENDPOINT"); v != "" {
|
||||
cfg.SMS.Endpoint = v
|
||||
}
|
||||
|
||||
log.Printf("config: addr=%s ui=%s bigscreen=%s tpl=%s static=%s cors=%v sms.sign=%v sms.tpl=%v auth.secret=%v", cfg.Addr, cfg.UIServeDir, cfg.BigscreenDir, cfg.TemplateDir, cfg.StaticDir, cfg.DevEnableCORS, cfg.SMS.SignName != "", cfg.SMS.TemplateCode != "", cfg.AuthSecret != "")
|
||||
return cfg
|
||||
}
|
||||
41
core/internal/data/alerts.go
Normal file
41
core/internal/data/alerts.go
Normal file
@ -0,0 +1,41 @@
|
||||
package data
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"time"
|
||||
)
|
||||
|
||||
type AlertRecord struct {
|
||||
AlertType string
|
||||
StationID string
|
||||
Level string
|
||||
IssuedAt time.Time
|
||||
Message string
|
||||
SMSPhone sql.NullString
|
||||
}
|
||||
|
||||
// InsertAlert writes one alert row; returns inserted id or 0 when skipped by conflict.
|
||||
func InsertAlert(ctx context.Context, ar AlertRecord) (int64, error) {
|
||||
const q = `
|
||||
INSERT INTO alerts (alert_type, station_id, level, issued_at, message, sms_phone)
|
||||
VALUES ($1,$2,$3,$4,$5,$6)
|
||||
ON CONFLICT DO NOTHING
|
||||
RETURNING id`
|
||||
var id int64
|
||||
err := DB().QueryRowContext(ctx, q,
|
||||
ar.AlertType,
|
||||
ar.StationID,
|
||||
ar.Level,
|
||||
ar.IssuedAt,
|
||||
ar.Message,
|
||||
ar.SMSPhone,
|
||||
).Scan(&id)
|
||||
if err == sql.ErrNoRows {
|
||||
return 0, nil
|
||||
}
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return id, nil
|
||||
}
|
||||
39
core/internal/data/db.go
Normal file
39
core/internal/data/db.go
Normal file
@ -0,0 +1,39 @@
|
||||
package data
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"sync"
|
||||
|
||||
legacycfg "weatherstation/internal/config"
|
||||
|
||||
_ "github.com/lib/pq"
|
||||
)
|
||||
|
||||
var (
|
||||
dbOnce sync.Once
|
||||
dbInst *sql.DB
|
||||
)
|
||||
|
||||
// DB returns a shared Postgres connection opened using legacy internal/config.
|
||||
func DB() *sql.DB {
|
||||
dbOnce.Do(func() {
|
||||
cfg := legacycfg.GetConfig().Database
|
||||
connStr := fmt.Sprintf(
|
||||
"host=%s port=%d user=%s password=%s dbname=%s sslmode=%s",
|
||||
cfg.Host, cfg.Port, cfg.User, cfg.Password, cfg.DBName, cfg.SSLMode,
|
||||
)
|
||||
d, err := sql.Open("postgres", connStr)
|
||||
if err != nil {
|
||||
panic(fmt.Errorf("open DB failed: %w", err))
|
||||
}
|
||||
if err := d.Ping(); err != nil {
|
||||
panic(fmt.Errorf("ping DB failed: %w", err))
|
||||
}
|
||||
dbInst = d
|
||||
})
|
||||
if dbInst == nil {
|
||||
panic("database not initialized: check config and drivers")
|
||||
}
|
||||
return dbInst
|
||||
}
|
||||
85
core/internal/data/forecast.go
Normal file
85
core/internal/data/forecast.go
Normal file
@ -0,0 +1,85 @@
|
||||
package data
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"time"
|
||||
)
|
||||
|
||||
type PredictPoint struct {
|
||||
ForecastTime time.Time
|
||||
RainMMx1000 int32
|
||||
}
|
||||
|
||||
// ForecastRainAtIssued returns forecast hourly rows for a given station/provider at an exact issued_at time.
|
||||
func ForecastRainAtIssued(ctx context.Context, stationID, provider string, issuedAt time.Time) ([]PredictPoint, error) {
|
||||
const q = `
|
||||
SELECT forecast_time, rain_mm_x1000
|
||||
FROM forecast_hourly
|
||||
WHERE station_id=$1 AND provider=$2 AND issued_at=$3
|
||||
ORDER BY forecast_time ASC`
|
||||
rows, err := DB().QueryContext(ctx, q, stationID, provider, issuedAt)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var out []PredictPoint
|
||||
for rows.Next() {
|
||||
var p PredictPoint
|
||||
var rain sql.NullInt32
|
||||
if err := rows.Scan(&p.ForecastTime, &rain); err != nil {
|
||||
continue
|
||||
}
|
||||
if rain.Valid {
|
||||
p.RainMMx1000 = rain.Int32
|
||||
}
|
||||
out = append(out, p)
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
// ResolveIssuedAtInBucket finds the latest issued_at in [bucket, bucket+1h) for station/provider.
|
||||
func ResolveIssuedAtInBucket(ctx context.Context, stationID, provider string, bucketHour time.Time) (time.Time, bool, error) {
|
||||
const q = `SELECT issued_at FROM forecast_hourly WHERE station_id=$1 AND provider=$2 AND issued_at >= $3 AND issued_at < $3 + interval '1 hour' ORDER BY issued_at DESC LIMIT 1`
|
||||
var t time.Time
|
||||
err := DB().QueryRowContext(ctx, q, stationID, provider, bucketHour).Scan(&t)
|
||||
if err == sql.ErrNoRows {
|
||||
return time.Time{}, false, nil
|
||||
}
|
||||
if err != nil {
|
||||
return time.Time{}, false, err
|
||||
}
|
||||
return t, true, nil
|
||||
}
|
||||
|
||||
// UpsertForecastRain writes rain-only rows for a provider at issued_at, upserting by key.
|
||||
// Only the rain_mm_x1000 column is set/updated; other columns remain NULL or unchanged.
|
||||
type UpsertRainItem struct {
|
||||
ForecastTime time.Time
|
||||
RainMMx1000 int32
|
||||
}
|
||||
|
||||
func UpsertForecastRain(ctx context.Context, stationID, provider string, issuedAt time.Time, items []UpsertRainItem) error {
|
||||
if len(items) == 0 {
|
||||
return nil
|
||||
}
|
||||
const q = `
|
||||
INSERT INTO forecast_hourly (
|
||||
station_id, provider, issued_at, forecast_time, rain_mm_x1000
|
||||
) VALUES ($1,$2,$3,$4,$5)
|
||||
ON CONFLICT (station_id, provider, issued_at, forecast_time) DO UPDATE SET
|
||||
rain_mm_x1000 = EXCLUDED.rain_mm_x1000`
|
||||
tx, err := DB().BeginTx(ctx, &sql.TxOptions{})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer func() {
|
||||
_ = tx.Rollback()
|
||||
}()
|
||||
for _, it := range items {
|
||||
if _, err := tx.ExecContext(ctx, q, stationID, provider, issuedAt, it.ForecastTime, it.RainMMx1000); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return tx.Commit()
|
||||
}
|
||||
292
core/internal/data/metrics.go
Normal file
292
core/internal/data/metrics.go
Normal file
@ -0,0 +1,292 @@
|
||||
package data
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"time"
|
||||
)
|
||||
|
||||
type BinaryScores struct {
|
||||
N int64 `json:"n"`
|
||||
N11 int64 `json:"n11"` // 预报雨 且 实况雨
|
||||
N01 int64 `json:"n01"` // 预报不雨 但 实况雨(漏报)
|
||||
N10 int64 `json:"n10"` // 预报雨 但 实况不雨(空报)
|
||||
N00 int64 `json:"n00"` // 预报不雨 且 实况不雨
|
||||
CSI float64 `json:"csi"` // Threat Score / Critical Success Index
|
||||
POD float64 `json:"pod"` // Probability of Detection
|
||||
FAR float64 `json:"far"` // False Alarm Rate
|
||||
}
|
||||
|
||||
// ForecastBinaryScores computes contingency counts and TS/POD/FAR over [since, until) by hour.
|
||||
// provider may be empty -> treat as required provider? We'll require explicit provider; if empty, default to imdroid_mix.
|
||||
func ForecastBinaryScores(stationID string, since, until time.Time, provider string, thresholdMM float64) (BinaryScores, error) {
|
||||
if provider == "" {
|
||||
provider = "imdroid_mix"
|
||||
}
|
||||
thrX1000 := int64(thresholdMM * 1000.0)
|
||||
const q = `
|
||||
WITH base AS (
|
||||
SELECT bucket_start, rain_10m_mm_x1000
|
||||
FROM rs485_weather_10min
|
||||
WHERE station_id = $1 AND bucket_start >= $2 AND bucket_start < $3
|
||||
), actual AS (
|
||||
SELECT date_trunc('hour', bucket_start) + interval '1 hour' AS hour,
|
||||
SUM(rain_10m_mm_x1000)::bigint AS rain_sum
|
||||
FROM base
|
||||
GROUP BY 1
|
||||
), fc AS (
|
||||
SELECT forecast_time,
|
||||
ROW_NUMBER() OVER (PARTITION BY forecast_time ORDER BY issued_at DESC) AS rn,
|
||||
COALESCE(rain_mm_x1000,0)::bigint AS rain_x1000
|
||||
FROM forecast_hourly
|
||||
WHERE station_id = $1 AND provider = $4 AND forecast_time >= $2 AND forecast_time < $3
|
||||
), pair AS (
|
||||
SELECT a.hour AS t,
|
||||
a.rain_sum AS actual_x1000,
|
||||
COALESCE(f.rain_x1000,0) AS pred_x1000
|
||||
FROM actual a
|
||||
LEFT JOIN (SELECT forecast_time, rain_x1000 FROM fc WHERE rn=1) f ON f.forecast_time = a.hour
|
||||
)
|
||||
SELECT
|
||||
COUNT(*) AS n,
|
||||
SUM(CASE WHEN pred_x1000 > $5 AND actual_x1000 > $5 THEN 1 ELSE 0 END) AS n11,
|
||||
SUM(CASE WHEN pred_x1000 <= $5 AND actual_x1000 > $5 THEN 1 ELSE 0 END) AS n01,
|
||||
SUM(CASE WHEN pred_x1000 > $5 AND actual_x1000 <= $5 THEN 1 ELSE 0 END) AS n10,
|
||||
SUM(CASE WHEN pred_x1000 <= $5 AND actual_x1000 <= $5 THEN 1 ELSE 0 END) AS n00
|
||||
FROM pair`
|
||||
|
||||
row := DB().QueryRow(q, stationID, since, until, provider, thrX1000)
|
||||
var s BinaryScores
|
||||
if err := row.Scan(&s.N, &s.N11, &s.N01, &s.N10, &s.N00); err != nil {
|
||||
if err == sql.ErrNoRows {
|
||||
return BinaryScores{}, nil
|
||||
}
|
||||
return BinaryScores{}, fmt.Errorf("query scores: %w", err)
|
||||
}
|
||||
h, m, f := float64(s.N11), float64(s.N01), float64(s.N10)
|
||||
if (h + m + f) > 0 {
|
||||
s.CSI = h / (h + m + f)
|
||||
}
|
||||
if (h + m) > 0 {
|
||||
s.POD = h / (h + m)
|
||||
}
|
||||
if (h + f) > 0 {
|
||||
s.FAR = f / (h + f)
|
||||
}
|
||||
return s, nil
|
||||
}
|
||||
|
||||
// ForecastBinaryScoresMixedOnlyForecast computes scores where a forecast exists for that hour (mixed latest), using INNER JOIN on forecast.
|
||||
func ForecastBinaryScoresMixedOnlyForecast(stationID string, since, until time.Time, provider string, thresholdMM float64) (BinaryScores, error) {
|
||||
if provider == "" {
|
||||
provider = "imdroid_mix"
|
||||
}
|
||||
thrX1000 := int64(thresholdMM * 1000.0)
|
||||
const q = `
|
||||
WITH base AS (
|
||||
SELECT bucket_start, rain_10m_mm_x1000
|
||||
FROM rs485_weather_10min
|
||||
WHERE station_id = $1 AND bucket_start >= $2 AND bucket_start < $3
|
||||
), actual AS (
|
||||
SELECT date_trunc('hour', bucket_start) + interval '1 hour' AS hour,
|
||||
SUM(rain_10m_mm_x1000)::bigint AS rain_sum
|
||||
FROM base
|
||||
GROUP BY 1
|
||||
), fc AS (
|
||||
SELECT forecast_time,
|
||||
ROW_NUMBER() OVER (PARTITION BY forecast_time ORDER BY issued_at DESC) AS rn,
|
||||
COALESCE(rain_mm_x1000,0)::bigint AS rain_x1000
|
||||
FROM forecast_hourly
|
||||
WHERE station_id = $1 AND provider = $4 AND forecast_time >= $2 AND forecast_time < $3
|
||||
), pair AS (
|
||||
SELECT a.hour AS t,
|
||||
a.rain_sum AS actual_x1000,
|
||||
f.rain_x1000 AS pred_x1000
|
||||
FROM actual a
|
||||
INNER JOIN (SELECT forecast_time, rain_x1000 FROM fc WHERE rn=1) f ON f.forecast_time = a.hour
|
||||
)
|
||||
SELECT
|
||||
COUNT(*) AS n,
|
||||
SUM(CASE WHEN pred_x1000 > $5 AND actual_x1000 > $5 THEN 1 ELSE 0 END) AS n11,
|
||||
SUM(CASE WHEN pred_x1000 <= $5 AND actual_x1000 > $5 THEN 1 ELSE 0 END) AS n01,
|
||||
SUM(CASE WHEN pred_x1000 > $5 AND actual_x1000 <= $5 THEN 1 ELSE 0 END) AS n10,
|
||||
SUM(CASE WHEN pred_x1000 <= $5 AND actual_x1000 <= $5 THEN 1 ELSE 0 END) AS n00
|
||||
FROM pair`
|
||||
row := DB().QueryRow(q, stationID, since, until, provider, thrX1000)
|
||||
var s BinaryScores
|
||||
if err := row.Scan(&s.N, &s.N11, &s.N01, &s.N10, &s.N00); err != nil {
|
||||
if err == sql.ErrNoRows {
|
||||
return BinaryScores{}, nil
|
||||
}
|
||||
return BinaryScores{}, fmt.Errorf("query scores(mixed): %w", err)
|
||||
}
|
||||
h, m, f := float64(s.N11), float64(s.N01), float64(s.N10)
|
||||
if (h + m + f) > 0 {
|
||||
s.CSI = h / (h + m + f)
|
||||
}
|
||||
if (h + m) > 0 {
|
||||
s.POD = h / (h + m)
|
||||
}
|
||||
if (h + f) > 0 {
|
||||
s.FAR = f / (h + f)
|
||||
}
|
||||
return s, nil
|
||||
}
|
||||
|
||||
// ForecastBinaryScoresLead computes scores for a fixed lead (1/2/3), counting only hours where that lead exists.
|
||||
func ForecastBinaryScoresLead(stationID string, since, until time.Time, provider string, thresholdMM float64, lead int) (BinaryScores, error) {
|
||||
if provider == "" {
|
||||
provider = "imdroid_mix"
|
||||
}
|
||||
thrX1000 := int64(thresholdMM * 1000.0)
|
||||
const q = `
|
||||
WITH base AS (
|
||||
SELECT bucket_start, rain_10m_mm_x1000
|
||||
FROM rs485_weather_10min
|
||||
WHERE station_id = $1 AND bucket_start >= $2 AND bucket_start < $3
|
||||
), actual AS (
|
||||
SELECT date_trunc('hour', bucket_start) + interval '1 hour' AS hour,
|
||||
SUM(rain_10m_mm_x1000)::bigint AS rain_sum
|
||||
FROM base
|
||||
GROUP BY 1
|
||||
), raw AS (
|
||||
SELECT forecast_time, issued_at,
|
||||
CEIL(EXTRACT(EPOCH FROM (forecast_time - issued_at)) / 3600.0)::int AS lead_hours,
|
||||
COALESCE(rain_mm_x1000,0)::bigint AS rain_x1000
|
||||
FROM forecast_hourly
|
||||
WHERE station_id = $1 AND provider = $4 AND forecast_time >= $2 AND forecast_time < $3
|
||||
), fc AS (
|
||||
SELECT forecast_time, rain_x1000,
|
||||
ROW_NUMBER() OVER (PARTITION BY forecast_time ORDER BY issued_at DESC) AS rn
|
||||
FROM raw WHERE lead_hours = $5
|
||||
), pair AS (
|
||||
SELECT a.hour AS t,
|
||||
a.rain_sum AS actual_x1000,
|
||||
f.rain_x1000 AS pred_x1000
|
||||
FROM actual a
|
||||
INNER JOIN (SELECT forecast_time, rain_x1000 FROM fc WHERE rn=1) f ON f.forecast_time = a.hour
|
||||
)
|
||||
SELECT
|
||||
COUNT(*) AS n,
|
||||
SUM(CASE WHEN pred_x1000 > $6 AND actual_x1000 > $6 THEN 1 ELSE 0 END) AS n11,
|
||||
SUM(CASE WHEN pred_x1000 <= $6 AND actual_x1000 > $6 THEN 1 ELSE 0 END) AS n01,
|
||||
SUM(CASE WHEN pred_x1000 > $6 AND actual_x1000 <= $6 THEN 1 ELSE 0 END) AS n10,
|
||||
SUM(CASE WHEN pred_x1000 <= $6 AND actual_x1000 <= $6 THEN 1 ELSE 0 END) AS n00
|
||||
FROM pair`
|
||||
row := DB().QueryRow(q, stationID, since, until, provider, lead, thrX1000)
|
||||
var s BinaryScores
|
||||
if err := row.Scan(&s.N, &s.N11, &s.N01, &s.N10, &s.N00); err != nil {
|
||||
if err == sql.ErrNoRows {
|
||||
return BinaryScores{}, nil
|
||||
}
|
||||
return BinaryScores{}, fmt.Errorf("query scores(lead): %w", err)
|
||||
}
|
||||
h, m, f := float64(s.N11), float64(s.N01), float64(s.N10)
|
||||
if (h + m + f) > 0 {
|
||||
s.CSI = h / (h + m + f)
|
||||
}
|
||||
if (h + m) > 0 {
|
||||
s.POD = h / (h + m)
|
||||
}
|
||||
if (h + f) > 0 {
|
||||
s.FAR = f / (h + f)
|
||||
}
|
||||
return s, nil
|
||||
}
|
||||
|
||||
// BinaryRow holds per-hour aligned actual/pred pair for detail view.
|
||||
type BinaryRow struct {
|
||||
T time.Time
|
||||
ActualMM float64
|
||||
PredMM float64
|
||||
}
|
||||
|
||||
// ForecastBinarySeriesMixed returns per-hour rows for hours with a forecast (mixed latest).
|
||||
func ForecastBinarySeriesMixed(stationID string, since, until time.Time, provider string) ([]BinaryRow, error) {
|
||||
if provider == "" {
|
||||
provider = "imdroid_mix"
|
||||
}
|
||||
const q = `
|
||||
WITH base AS (
|
||||
SELECT bucket_start, rain_10m_mm_x1000
|
||||
FROM rs485_weather_10min
|
||||
WHERE station_id = $1 AND bucket_start >= $2 AND bucket_start < $3
|
||||
), actual AS (
|
||||
SELECT date_trunc('hour', bucket_start) + interval '1 hour' AS hour,
|
||||
SUM(rain_10m_mm_x1000)::bigint AS rain_sum
|
||||
FROM base
|
||||
GROUP BY 1
|
||||
), fc AS (
|
||||
SELECT forecast_time,
|
||||
ROW_NUMBER() OVER (PARTITION BY forecast_time ORDER BY issued_at DESC) AS rn,
|
||||
COALESCE(rain_mm_x1000,0)::bigint AS rain_x1000
|
||||
FROM forecast_hourly
|
||||
WHERE station_id = $1 AND provider = $4 AND forecast_time >= $2 AND forecast_time < $3
|
||||
)
|
||||
SELECT a.hour, a.rain_sum, f.rain_x1000
|
||||
FROM actual a
|
||||
INNER JOIN (SELECT forecast_time, rain_x1000 FROM fc WHERE rn=1) f ON f.forecast_time = a.hour
|
||||
ORDER BY a.hour`
|
||||
rows, err := DB().Query(q, stationID, since, until, provider)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var out []BinaryRow
|
||||
for rows.Next() {
|
||||
var t time.Time
|
||||
var ax, px int64
|
||||
if err := rows.Scan(&t, &ax, &px); err != nil {
|
||||
continue
|
||||
}
|
||||
out = append(out, BinaryRow{T: t, ActualMM: float64(ax) / 1000.0, PredMM: float64(px) / 1000.0})
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
// ForecastBinarySeriesLead returns per-hour rows for a fixed lead (latest issued per hour for that lead only).
|
||||
func ForecastBinarySeriesLead(stationID string, since, until time.Time, provider string, lead int) ([]BinaryRow, error) {
|
||||
if provider == "" {
|
||||
provider = "imdroid_mix"
|
||||
}
|
||||
const q = `
|
||||
WITH base AS (
|
||||
SELECT bucket_start, rain_10m_mm_x1000
|
||||
FROM rs485_weather_10min
|
||||
WHERE station_id = $1 AND bucket_start >= $2 AND bucket_start < $3
|
||||
), actual AS (
|
||||
SELECT date_trunc('hour', bucket_start) + interval '1 hour' AS hour,
|
||||
SUM(rain_10m_mm_x1000)::bigint AS rain_sum
|
||||
FROM base
|
||||
GROUP BY 1
|
||||
), raw AS (
|
||||
SELECT forecast_time, issued_at,
|
||||
CEIL(EXTRACT(EPOCH FROM (forecast_time - issued_at)) / 3600.0)::int AS lead_hours,
|
||||
COALESCE(rain_mm_x1000,0)::bigint AS rain_x1000
|
||||
FROM forecast_hourly
|
||||
WHERE station_id = $1 AND provider = $4 AND forecast_time >= $2 AND forecast_time < $3
|
||||
), fc AS (
|
||||
SELECT forecast_time, rain_x1000,
|
||||
ROW_NUMBER() OVER (PARTITION BY forecast_time ORDER BY issued_at DESC) AS rn
|
||||
FROM raw WHERE lead_hours = $5
|
||||
)
|
||||
SELECT a.hour, a.rain_sum, f.rain_x1000
|
||||
FROM actual a
|
||||
INNER JOIN (SELECT forecast_time, rain_x1000 FROM fc WHERE rn=1) f ON f.forecast_time = a.hour
|
||||
ORDER BY a.hour`
|
||||
rows, err := DB().Query(q, stationID, since, until, provider, lead)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var out []BinaryRow
|
||||
for rows.Next() {
|
||||
var t time.Time
|
||||
var ax, px int64
|
||||
if err := rows.Scan(&t, &ax, &px); err != nil {
|
||||
continue
|
||||
}
|
||||
out = append(out, BinaryRow{T: t, ActualMM: float64(ax) / 1000.0, PredMM: float64(px) / 1000.0})
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
45
core/internal/data/radar_weather.go
Normal file
45
core/internal/data/radar_weather.go
Normal file
@ -0,0 +1,45 @@
|
||||
package data
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"time"
|
||||
)
|
||||
|
||||
type RadarWeather struct {
|
||||
Alias string
|
||||
Lat float64
|
||||
Lon float64
|
||||
DT time.Time
|
||||
Temperature sql.NullFloat64
|
||||
Humidity sql.NullFloat64
|
||||
CloudRate sql.NullFloat64
|
||||
Visibility sql.NullFloat64
|
||||
DSWRF sql.NullFloat64
|
||||
WindSpeed sql.NullFloat64
|
||||
WindDirection sql.NullFloat64
|
||||
Pressure sql.NullFloat64
|
||||
}
|
||||
|
||||
// RadarWeatherNearest returns the nearest radar_weather row to (lat,lon) around dt within a window.
|
||||
// It orders by absolute time difference then squared distance.
|
||||
func RadarWeatherNearest(lat, lon float64, dt time.Time, window time.Duration) (*RadarWeather, error) {
|
||||
from := dt.Add(-window)
|
||||
to := dt.Add(window)
|
||||
const q = `
|
||||
SELECT alias, lat, lon, dt, temperature, humidity, cloudrate, visibility, dswrf,
|
||||
wind_speed, wind_direction, pressure
|
||||
FROM radar_weather
|
||||
WHERE dt BETWEEN $1 AND $2
|
||||
ORDER BY ABS(EXTRACT(EPOCH FROM (dt - $3))) ASC,
|
||||
((lat - $4)*(lat - $4) + (lon - $5)*(lon - $5)) ASC
|
||||
LIMIT 1`
|
||||
row := DB().QueryRow(q, from, to, dt, lat, lon)
|
||||
var rw RadarWeather
|
||||
if err := row.Scan(&rw.Alias, &rw.Lat, &rw.Lon, &rw.DT, &rw.Temperature, &rw.Humidity, &rw.CloudRate, &rw.Visibility, &rw.DSWRF, &rw.WindSpeed, &rw.WindDirection, &rw.Pressure); err != nil {
|
||||
if err == sql.ErrNoRows {
|
||||
return nil, nil
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
return &rw, nil
|
||||
}
|
||||
35
core/internal/data/rain.go
Normal file
35
core/internal/data/rain.go
Normal file
@ -0,0 +1,35 @@
|
||||
package data
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"time"
|
||||
)
|
||||
|
||||
// FetchActualHourlyRain sums rs485_weather_10min.rain_10m_mm_x1000 over [start,end) and returns mm.
|
||||
func FetchActualHourlyRain(ctx context.Context, stationID string, start, end time.Time) (float64, bool, error) {
|
||||
const q = `SELECT SUM(rain_10m_mm_x1000) FROM rs485_weather_10min WHERE station_id=$1 AND bucket_start >= $2 AND bucket_start < $3`
|
||||
var sum sql.NullInt64
|
||||
err := DB().QueryRowContext(ctx, q, stationID, start, end).Scan(&sum)
|
||||
if err != nil {
|
||||
return 0, false, err
|
||||
}
|
||||
if !sum.Valid {
|
||||
return 0, false, nil
|
||||
}
|
||||
return float64(sum.Int64) / 1000.0, true, nil
|
||||
}
|
||||
|
||||
// SumRainMM sums rain_10m_mm_x1000 over [start,end) and returns mm.
|
||||
func SumRainMM(ctx context.Context, stationID string, start, end time.Time) (float64, bool, error) {
|
||||
const q = `SELECT SUM(rain_10m_mm_x1000) FROM rs485_weather_10min WHERE station_id=$1 AND bucket_start >= $2 AND bucket_start < $3`
|
||||
var sum sql.NullInt64
|
||||
err := DB().QueryRowContext(ctx, q, stationID, start, end).Scan(&sum)
|
||||
if err != nil {
|
||||
return 0, false, err
|
||||
}
|
||||
if !sum.Valid {
|
||||
return 0, false, nil
|
||||
}
|
||||
return float64(sum.Int64) / 1000.0, true, nil
|
||||
}
|
||||
84
core/internal/data/raw.go
Normal file
84
core/internal/data/raw.go
Normal file
@ -0,0 +1,84 @@
|
||||
package data
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"math"
|
||||
"time"
|
||||
)
|
||||
|
||||
type WindowAgg struct {
|
||||
Ta sql.NullFloat64
|
||||
Ua sql.NullFloat64
|
||||
Pa sql.NullFloat64
|
||||
Sm sql.NullFloat64
|
||||
Dm sql.NullFloat64
|
||||
}
|
||||
|
||||
// WindowAverages computes averages on rs485_weather_data over [start,end) window.
|
||||
// Filters values < 0.001 as invalid. Wind direction averaged vectorially.
|
||||
func WindowAverages(ctx context.Context, stationID string, start, end time.Time) (WindowAgg, error) {
|
||||
const q = `
|
||||
SELECT
|
||||
AVG(temperature) AS ta,
|
||||
AVG(humidity) AS ua,
|
||||
AVG(pressure) AS pa,
|
||||
AVG(CASE WHEN wind_speed >= 0.001 THEN wind_speed END) AS sm,
|
||||
DEGREES(
|
||||
ATAN2(
|
||||
AVG(CASE WHEN wind_speed >= 0.001 THEN SIN(RADIANS(wind_direction)) END),
|
||||
AVG(CASE WHEN wind_speed >= 0.001 THEN COS(RADIANS(wind_direction)) END)
|
||||
)
|
||||
) AS dm
|
||||
FROM rs485_weather_data
|
||||
WHERE station_id = $1 AND timestamp >= $2 AND timestamp < $3`
|
||||
var agg WindowAgg
|
||||
err := DB().QueryRowContext(ctx, q, stationID, start, end).Scan(
|
||||
&agg.Ta, &agg.Ua, &agg.Pa, &agg.Sm, &agg.Dm,
|
||||
)
|
||||
if err != nil {
|
||||
return WindowAgg{}, err
|
||||
}
|
||||
// Normalize Dm to [0,360)
|
||||
if agg.Dm.Valid {
|
||||
v := agg.Dm.Float64
|
||||
if v < 0 {
|
||||
v += 360.0
|
||||
}
|
||||
// handle NaN from no data
|
||||
if math.IsNaN(v) {
|
||||
agg.Dm.Valid = false
|
||||
} else {
|
||||
agg.Dm.Float64 = v
|
||||
}
|
||||
}
|
||||
return agg, nil
|
||||
}
|
||||
|
||||
// DailyRainSinceMidnight computes current_day_rain = max(0, latest - baselineAtMidnight).
|
||||
// If baseline is null, returns 0.
|
||||
func DailyRainSinceMidnight(ctx context.Context, stationID string, now time.Time) (float64, error) {
|
||||
// Midnight in Asia/Shanghai
|
||||
loc, _ := time.LoadLocation("Asia/Shanghai")
|
||||
if loc == nil {
|
||||
loc = time.FixedZone("CST", 8*3600)
|
||||
}
|
||||
dayStart := time.Date(now.In(loc).Year(), now.In(loc).Month(), now.In(loc).Day(), 0, 0, 0, 0, loc)
|
||||
|
||||
var baseline sql.NullFloat64
|
||||
const qBase = `SELECT rainfall FROM rs485_weather_data WHERE station_id=$1 AND timestamp <= $2 ORDER BY timestamp DESC LIMIT 1`
|
||||
_ = DB().QueryRowContext(ctx, qBase, stationID, dayStart).Scan(&baseline)
|
||||
|
||||
var current sql.NullFloat64
|
||||
const qCur = `SELECT rainfall FROM rs485_weather_data WHERE station_id=$1 ORDER BY timestamp DESC LIMIT 1`
|
||||
_ = DB().QueryRowContext(ctx, qCur, stationID).Scan(¤t)
|
||||
|
||||
if !current.Valid || !baseline.Valid {
|
||||
return 0, nil
|
||||
}
|
||||
v := current.Float64 - baseline.Float64
|
||||
if v < 0 {
|
||||
v = 0
|
||||
}
|
||||
return v, nil
|
||||
}
|
||||
31
core/internal/data/sms.go
Normal file
31
core/internal/data/sms.go
Normal file
@ -0,0 +1,31 @@
|
||||
package data
|
||||
|
||||
import (
|
||||
"context"
|
||||
)
|
||||
|
||||
// SMSRecipient models an entry in sms_recipients.
|
||||
type SMSRecipient struct {
|
||||
Phone string
|
||||
Enabled bool
|
||||
AlertLevel int
|
||||
}
|
||||
|
||||
// ListEnabledSMSRecipients returns all enabled recipients.
|
||||
func ListEnabledSMSRecipients(ctx context.Context) ([]SMSRecipient, error) {
|
||||
const q = `SELECT phone, enabled, alert_level FROM sms_recipients WHERE enabled = TRUE`
|
||||
rows, err := DB().QueryContext(ctx, q)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var out []SMSRecipient
|
||||
for rows.Next() {
|
||||
var r SMSRecipient
|
||||
if err := rows.Scan(&r.Phone, &r.Enabled, &r.AlertLevel); err != nil {
|
||||
continue
|
||||
}
|
||||
out = append(out, r)
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
110
core/internal/data/station.go
Normal file
110
core/internal/data/station.go
Normal file
@ -0,0 +1,110 @@
|
||||
package data
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// GetStationName returns stations.name by station_id; empty string if not found/null.
|
||||
func GetStationName(ctx context.Context, stationID string) (string, error) {
|
||||
const q = `SELECT COALESCE(name, '') FROM stations WHERE station_id = $1`
|
||||
var name sql.NullString
|
||||
err := DB().QueryRowContext(ctx, q, stationID).Scan(&name)
|
||||
if err != nil {
|
||||
if err == sql.ErrNoRows {
|
||||
return "", nil
|
||||
}
|
||||
return "", err
|
||||
}
|
||||
if name.Valid {
|
||||
return name.String, nil
|
||||
}
|
||||
return "", nil
|
||||
}
|
||||
|
||||
// StationInfo contains minimal fields for alert checks.
|
||||
type StationInfo struct {
|
||||
ID string
|
||||
Name string
|
||||
Location string
|
||||
Latitude float64
|
||||
Longitude float64
|
||||
Altitude float64
|
||||
}
|
||||
|
||||
// ListEligibleStations returns WH65LP stations with required non-empty fields.
|
||||
func ListEligibleStations(ctx context.Context) ([]StationInfo, error) {
|
||||
const q = `
|
||||
SELECT
|
||||
station_id,
|
||||
COALESCE(NULLIF(BTRIM(name), ''), station_id) AS name,
|
||||
location,
|
||||
latitude::float8,
|
||||
longitude::float8,
|
||||
altitude::float8
|
||||
FROM stations
|
||||
WHERE
|
||||
device_type = 'WH65LP' AND
|
||||
name IS NOT NULL AND BTRIM(name) <> '' AND
|
||||
location IS NOT NULL AND BTRIM(location) <> '' AND
|
||||
latitude IS NOT NULL AND latitude <> 0 AND
|
||||
longitude IS NOT NULL AND longitude <> 0 AND
|
||||
altitude IS NOT NULL AND altitude <> 0`
|
||||
|
||||
rows, err := DB().QueryContext(ctx, q)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var out []StationInfo
|
||||
for rows.Next() {
|
||||
var st StationInfo
|
||||
if err := rows.Scan(&st.ID, &st.Name, &st.Location, &st.Latitude, &st.Longitude, &st.Altitude); err != nil {
|
||||
continue
|
||||
}
|
||||
out = append(out, st)
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
// ListStationsByIDs returns stations by a given id list (ignores missing ones).
|
||||
func ListStationsByIDs(ctx context.Context, ids []string) ([]StationInfo, error) {
|
||||
if len(ids) == 0 {
|
||||
return nil, nil
|
||||
}
|
||||
var placeholders []string
|
||||
args := make([]interface{}, 0, len(ids))
|
||||
for i, id := range ids {
|
||||
placeholders = append(placeholders, fmt.Sprintf("$%d", i+1))
|
||||
args = append(args, id)
|
||||
}
|
||||
q := fmt.Sprintf(`
|
||||
SELECT
|
||||
station_id,
|
||||
COALESCE(NULLIF(BTRIM(name), ''), station_id) AS name,
|
||||
location,
|
||||
latitude::float8,
|
||||
longitude::float8,
|
||||
altitude::float8
|
||||
FROM stations
|
||||
WHERE station_id IN (%s)`, strings.Join(placeholders, ","))
|
||||
|
||||
rows, err := DB().QueryContext(ctx, q, args...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var out []StationInfo
|
||||
for rows.Next() {
|
||||
var st StationInfo
|
||||
if err := rows.Scan(&st.ID, &st.Name, &st.Location, &st.Latitude, &st.Longitude, &st.Altitude); err != nil {
|
||||
continue
|
||||
}
|
||||
out = append(out, st)
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
330
core/internal/data/store.go
Normal file
330
core/internal/data/store.go
Normal file
@ -0,0 +1,330 @@
|
||||
package data
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"time"
|
||||
"weatherstation/pkg/types"
|
||||
)
|
||||
|
||||
func OnlineDevices() int {
|
||||
const query = `SELECT COUNT(DISTINCT station_id) FROM rs485_weather_data WHERE timestamp > NOW() - INTERVAL '5 minutes'`
|
||||
var count int
|
||||
if err := DB().QueryRow(query).Scan(&count); err != nil {
|
||||
return 0
|
||||
}
|
||||
return count
|
||||
}
|
||||
|
||||
func Stations() ([]types.Station, error) {
|
||||
const query = `
|
||||
SELECT s.station_id,
|
||||
COALESCE(s.station_alias, '') as station_alias,
|
||||
COALESCE(s.password, '') as station_name,
|
||||
'WH65LP' as device_type,
|
||||
COALESCE(MAX(r.timestamp), '1970-01-01'::timestamp) as last_update,
|
||||
COALESCE(s.latitude, 0) as latitude,
|
||||
COALESCE(s.longitude, 0) as longitude,
|
||||
COALESCE(s.name, '') as name,
|
||||
COALESCE(s.location, '') as location,
|
||||
COALESCE(s.z, 0) as z,
|
||||
COALESCE(s.y, 0) as y,
|
||||
COALESCE(s.x, 0) as x
|
||||
FROM stations s
|
||||
LEFT JOIN rs485_weather_data r ON s.station_id = r.station_id
|
||||
WHERE s.station_id LIKE 'RS485-%'
|
||||
GROUP BY s.station_id, s.station_alias, s.password, s.latitude, s.longitude, s.name, s.location, s.z, s.y, s.x, s.created_at
|
||||
ORDER BY (COALESCE(MAX(r.timestamp), '1970-01-01'::timestamp) > NOW() - INTERVAL '5 minutes') DESC,
|
||||
s.created_at DESC`
|
||||
rows, err := DB().Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var stations []types.Station
|
||||
for rows.Next() {
|
||||
var s types.Station
|
||||
var last time.Time
|
||||
if err := rows.Scan(&s.StationID, &s.StationAlias, &s.StationName, &s.DeviceType, &last, &s.Latitude, &s.Longitude, &s.Name, &s.Location, &s.Z, &s.Y, &s.X); err != nil {
|
||||
continue
|
||||
}
|
||||
s.LastUpdate = last.Format("2006-01-02 15:04:05")
|
||||
stations = append(stations, s)
|
||||
}
|
||||
return stations, nil
|
||||
}
|
||||
|
||||
func SeriesRaw(stationID string, start, end time.Time) ([]types.WeatherPoint, error) {
|
||||
const query = `
|
||||
SELECT
|
||||
to_char(timestamp, 'YYYY-MM-DD HH24:MI:SS') AS date_time,
|
||||
COALESCE(temperature, 0) AS temperature,
|
||||
COALESCE(humidity, 0) AS humidity,
|
||||
COALESCE(pressure, 0) AS pressure,
|
||||
COALESCE(wind_speed, 0) AS wind_speed,
|
||||
COALESCE(wind_direction, 0) AS wind_direction,
|
||||
COALESCE(rainfall, 0) AS rainfall,
|
||||
COALESCE(light, 0) AS light,
|
||||
COALESCE(uv, 0) AS uv,
|
||||
COALESCE(rainfall, 0) AS rain_total
|
||||
FROM rs485_weather_data
|
||||
WHERE station_id = $1 AND timestamp >= $2 AND timestamp <= $3
|
||||
ORDER BY timestamp`
|
||||
rows, err := DB().Query(query, stationID, start, end)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var points []types.WeatherPoint
|
||||
for rows.Next() {
|
||||
var p types.WeatherPoint
|
||||
if err := rows.Scan(&p.DateTime, &p.Temperature, &p.Humidity, &p.Pressure, &p.WindSpeed, &p.WindDir, &p.Rainfall, &p.Light, &p.UV, &p.RainTotal); err != nil {
|
||||
continue
|
||||
}
|
||||
points = append(points, p)
|
||||
}
|
||||
return points, nil
|
||||
}
|
||||
|
||||
func SeriesFrom10Min(stationID string, start, end time.Time, interval string) ([]types.WeatherPoint, error) {
|
||||
log.Printf("查询数据: stationID=%s, start=%v, end=%v, interval=%s", stationID, start.Format("2006-01-02 15:04:05"), end.Format("2006-01-02 15:04:05"), interval)
|
||||
var query string
|
||||
switch interval {
|
||||
case "10min":
|
||||
query = `
|
||||
SELECT
|
||||
to_char(bucket_start + interval '10 minutes', 'YYYY-MM-DD HH24:MI:SS') AS date_time,
|
||||
ROUND(temp_c_x100/100.0, 2) AS temperature,
|
||||
ROUND(humidity_pct::numeric, 2) AS humidity,
|
||||
ROUND(pressure_hpa_x100/100.0, 2) AS pressure,
|
||||
ROUND(wind_speed_ms_x1000/1000.0, 3) AS wind_speed,
|
||||
ROUND(wind_dir_deg::numeric, 2) AS wind_direction,
|
||||
ROUND(rain_10m_mm_x1000/1000.0, 3) AS rainfall,
|
||||
ROUND(solar_wm2_x100/100.0, 2) AS light,
|
||||
ROUND(uv_index::numeric, 2) AS uv,
|
||||
ROUND(rain_total_mm_x1000/1000.0, 3) AS rain_total
|
||||
FROM rs485_weather_10min
|
||||
WHERE station_id = $1 AND bucket_start >= $2 AND bucket_start <= $3
|
||||
ORDER BY bucket_start + interval '10 minutes'`
|
||||
case "30min":
|
||||
query = buildAggFrom10MinQuery("30 minutes")
|
||||
default:
|
||||
query = buildAggFrom10MinQuery("1 hour")
|
||||
}
|
||||
rows, err := DB().Query(query, stationID, start, end)
|
||||
if err != nil {
|
||||
log.Printf("查询失败: %v", err)
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var points []types.WeatherPoint
|
||||
for rows.Next() {
|
||||
var p types.WeatherPoint
|
||||
if err := rows.Scan(&p.DateTime, &p.Temperature, &p.Humidity, &p.Pressure, &p.WindSpeed, &p.WindDir, &p.Rainfall, &p.Light, &p.UV, &p.RainTotal); err != nil {
|
||||
continue
|
||||
}
|
||||
points = append(points, p)
|
||||
}
|
||||
return points, nil
|
||||
}
|
||||
|
||||
func buildAggFrom10MinQuery(interval string) string {
|
||||
return `
|
||||
WITH base AS (
|
||||
SELECT * FROM rs485_weather_10min
|
||||
WHERE station_id = $1 AND bucket_start >= $2 AND bucket_start <= $3
|
||||
), g AS (
|
||||
SELECT
|
||||
CASE '` + interval + `'
|
||||
WHEN '1 hour' THEN date_trunc('hour', bucket_start)
|
||||
WHEN '30 minutes' THEN
|
||||
date_trunc('hour', bucket_start) +
|
||||
CASE WHEN date_part('minute', bucket_start) >= 30
|
||||
THEN '30 minutes'::interval
|
||||
ELSE '0 minutes'::interval
|
||||
END
|
||||
END AS grp,
|
||||
SUM(temp_c_x100 * sample_count)::bigint AS w_temp,
|
||||
SUM(humidity_pct * sample_count)::bigint AS w_hum,
|
||||
SUM(pressure_hpa_x100 * sample_count)::bigint AS w_p,
|
||||
SUM(solar_wm2_x100 * sample_count)::bigint AS w_solar,
|
||||
SUM(uv_index * sample_count)::bigint AS w_uv,
|
||||
SUM(wind_speed_ms_x1000 * sample_count)::bigint AS w_ws,
|
||||
MAX(wind_gust_ms_x1000) AS gust_max,
|
||||
SUM(sin(radians(wind_dir_deg)) * sample_count)::double precision AS sin_sum,
|
||||
SUM(cos(radians(wind_dir_deg)) * sample_count)::double precision AS cos_sum,
|
||||
SUM(rain_10m_mm_x1000) AS rain_sum,
|
||||
SUM(sample_count) AS n_sum,
|
||||
MAX(rain_total_mm_x1000) AS rain_total_max
|
||||
FROM base
|
||||
GROUP BY 1
|
||||
)
|
||||
SELECT
|
||||
to_char(grp + '` + interval + `'::interval, 'YYYY-MM-DD HH24:MI:SS') AS date_time,
|
||||
ROUND((w_temp/NULLIF(n_sum,0))/100.0, 2) AS temperature,
|
||||
ROUND((w_hum/NULLIF(n_sum,0))::numeric, 2) AS humidity,
|
||||
ROUND((w_p/NULLIF(n_sum,0))/100.0, 2) AS pressure,
|
||||
ROUND((w_ws/NULLIF(n_sum,0))/1000.0, 3) AS wind_speed,
|
||||
ROUND((CASE WHEN degrees(atan2(sin_sum, cos_sum)) < 0
|
||||
THEN degrees(atan2(sin_sum, cos_sum)) + 360
|
||||
ELSE degrees(atan2(sin_sum, cos_sum)) END)::numeric, 2) AS wind_direction,
|
||||
ROUND((rain_sum/1000.0)::numeric, 3) AS rainfall,
|
||||
ROUND((w_solar/NULLIF(n_sum,0))/100.0, 2) AS light,
|
||||
ROUND((w_uv/NULLIF(n_sum,0))::numeric, 2) AS uv,
|
||||
ROUND((rain_total_max/1000.0)::numeric, 3) AS rain_total
|
||||
FROM g
|
||||
ORDER BY grp + '` + interval + `'::interval`
|
||||
}
|
||||
|
||||
func Forecast(stationID string, start, end time.Time, provider string, versions int) ([]types.ForecastPoint, error) {
|
||||
var query string
|
||||
var args []interface{}
|
||||
if versions <= 0 {
|
||||
versions = 1
|
||||
}
|
||||
if provider != "" {
|
||||
if provider == "open-meteo" {
|
||||
query = `
|
||||
WITH ranked AS (
|
||||
SELECT station_id, provider, issued_at, forecast_time,
|
||||
temp_c_x100, humidity_pct, wind_speed_ms_x1000, wind_gust_ms_x1000,
|
||||
wind_dir_deg, rain_mm_x1000, precip_prob_pct, uv_index, pressure_hpa_x100,
|
||||
ROW_NUMBER() OVER (PARTITION BY forecast_time ORDER BY issued_at DESC) AS rn,
|
||||
CEIL(EXTRACT(EPOCH FROM (forecast_time - issued_at)) / 3600.0)::int AS lead_hours
|
||||
FROM forecast_hourly
|
||||
WHERE station_id = $1 AND provider IN ('open-meteo','open-meteo_historical')
|
||||
AND forecast_time BETWEEN $2 AND $3
|
||||
)
|
||||
SELECT to_char(forecast_time, 'YYYY-MM-DD HH24:MI:SS') as date_time,
|
||||
provider,
|
||||
to_char(issued_at, 'YYYY-MM-DD HH24:MI:SS') as issued_at,
|
||||
ROUND(temp_c_x100::numeric / 100.0, 2) as temperature,
|
||||
humidity_pct as humidity,
|
||||
ROUND(pressure_hpa_x100::numeric / 100.0, 2) as pressure,
|
||||
ROUND(wind_speed_ms_x1000::numeric / 1000.0, 2) as wind_speed,
|
||||
wind_dir_deg as wind_direction,
|
||||
ROUND(rain_mm_x1000::numeric / 1000.0, 3) as rainfall,
|
||||
precip_prob_pct as precip_prob,
|
||||
uv_index as uv,
|
||||
lead_hours
|
||||
FROM ranked WHERE rn <= $4
|
||||
ORDER BY forecast_time, issued_at DESC`
|
||||
args = []interface{}{stationID, start.Format("2006-01-02 15:04:05-07"), end.Format("2006-01-02 15:04:05-07"), versions}
|
||||
} else {
|
||||
query = `
|
||||
WITH ranked AS (
|
||||
SELECT station_id, provider, issued_at, forecast_time,
|
||||
temp_c_x100, humidity_pct, wind_speed_ms_x1000, wind_gust_ms_x1000,
|
||||
wind_dir_deg, rain_mm_x1000, precip_prob_pct, uv_index, pressure_hpa_x100,
|
||||
ROW_NUMBER() OVER (PARTITION BY forecast_time ORDER BY issued_at DESC) AS rn,
|
||||
CEIL(EXTRACT(EPOCH FROM (forecast_time - issued_at)) / 3600.0)::int AS lead_hours
|
||||
FROM forecast_hourly
|
||||
WHERE station_id = $1 AND provider = $2
|
||||
AND forecast_time BETWEEN $3 AND $4
|
||||
)
|
||||
SELECT to_char(forecast_time, 'YYYY-MM-DD HH24:MI:SS') as date_time,
|
||||
provider,
|
||||
to_char(issued_at, 'YYYY-MM-DD HH24:MI:SS') as issued_at,
|
||||
ROUND(temp_c_x100::numeric / 100.0, 2) as temperature,
|
||||
humidity_pct as humidity,
|
||||
ROUND(pressure_hpa_x100::numeric / 100.0, 2) as pressure,
|
||||
ROUND(wind_speed_ms_x1000::numeric / 1000.0, 2) as wind_speed,
|
||||
wind_dir_deg as wind_direction,
|
||||
ROUND(rain_mm_x1000::numeric / 1000.0, 3) as rainfall,
|
||||
precip_prob_pct as precip_prob,
|
||||
uv_index as uv,
|
||||
lead_hours
|
||||
FROM ranked WHERE rn <= $5
|
||||
ORDER BY forecast_time, issued_at DESC`
|
||||
args = []interface{}{stationID, provider, start.Format("2006-01-02 15:04:05-07"), end.Format("2006-01-02 15:04:05-07"), versions}
|
||||
}
|
||||
} else {
|
||||
query = `
|
||||
WITH ranked AS (
|
||||
SELECT station_id, provider, issued_at, forecast_time,
|
||||
temp_c_x100, humidity_pct, wind_speed_ms_x1000, wind_gust_ms_x1000,
|
||||
wind_dir_deg, rain_mm_x1000, precip_prob_pct, uv_index, pressure_hpa_x100,
|
||||
ROW_NUMBER() OVER (PARTITION BY provider, forecast_time ORDER BY issued_at DESC) AS rn,
|
||||
CEIL(EXTRACT(EPOCH FROM (forecast_time - issued_at)) / 3600.0)::int AS lead_hours
|
||||
FROM forecast_hourly
|
||||
WHERE station_id = $1 AND forecast_time BETWEEN $2 AND $3
|
||||
)
|
||||
SELECT to_char(forecast_time, 'YYYY-MM-DD HH24:MI:SS') as date_time,
|
||||
provider,
|
||||
to_char(issued_at, 'YYYY-MM-DD HH24:MI:SS') as issued_at,
|
||||
ROUND(temp_c_x100::numeric / 100.0, 2) as temperature,
|
||||
humidity_pct as humidity,
|
||||
ROUND(pressure_hpa_x100::numeric / 100.0, 2) as pressure,
|
||||
ROUND(wind_speed_ms_x1000::numeric / 1000.0, 2) as wind_speed,
|
||||
wind_dir_deg as wind_direction,
|
||||
ROUND(rain_mm_x1000::numeric / 1000.0, 3) as rainfall,
|
||||
precip_prob_pct as precip_prob,
|
||||
uv_index as uv,
|
||||
lead_hours
|
||||
FROM ranked WHERE rn <= $4
|
||||
ORDER BY forecast_time, provider, issued_at DESC`
|
||||
args = []interface{}{stationID, start.Format("2006-01-02 15:04:05-07"), end.Format("2006-01-02 15:04:05-07"), versions}
|
||||
}
|
||||
rows, err := DB().Query(query, args...)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("查询预报数据失败: %v", err)
|
||||
}
|
||||
defer rows.Close()
|
||||
var points []types.ForecastPoint
|
||||
for rows.Next() {
|
||||
var p types.ForecastPoint
|
||||
if err := rows.Scan(&p.DateTime, &p.Provider, &p.IssuedAt, &p.Temperature, &p.Humidity, &p.Pressure, &p.WindSpeed, &p.WindDir, &p.Rainfall, &p.PrecipProb, &p.UV, &p.LeadHours); err != nil {
|
||||
log.Printf("数据扫描错误: %v", err)
|
||||
continue
|
||||
}
|
||||
p.Source = "forecast"
|
||||
points = append(points, p)
|
||||
}
|
||||
return points, nil
|
||||
}
|
||||
|
||||
// HeavyRainPerf 统计过去N天中每小时实际降雨>=8mm的次数、
|
||||
// 其中任意+1/+2/+3小时预报达到>=8mm的成功次数,以及成功中的平均提前小时(取最大命中lead)。
|
||||
func HeavyRainPerf(stationID string, since time.Time, provider string) (types.HeavyRainPerf, error) {
|
||||
// 默认 provider 允许为空时使用 imdroid_mix
|
||||
if provider == "" {
|
||||
provider = "imdroid_mix"
|
||||
}
|
||||
// 截止到当前小时
|
||||
now := time.Now()
|
||||
// SQL: 聚合10分钟表为每小时实况,筛选>=8mm的小时;
|
||||
// 对应小时在 forecast_hourly 中,计算lead_hours,并挑出>=8mm的最大lead。
|
||||
const q = `
|
||||
WITH actual AS (
|
||||
SELECT date_trunc('hour', bucket_start + interval '10 minutes') AS hour,
|
||||
SUM(rain_10m_mm_x1000) AS rain_sum
|
||||
FROM rs485_weather_10min
|
||||
WHERE station_id = $1 AND bucket_start >= $2 AND bucket_start < $3
|
||||
GROUP BY 1
|
||||
), heavy AS (
|
||||
SELECT hour FROM actual WHERE rain_sum >= 8000
|
||||
), fc AS (
|
||||
SELECT forecast_time,
|
||||
CEIL(EXTRACT(EPOCH FROM (forecast_time - issued_at)) / 3600.0)::int AS lead_hours,
|
||||
rain_mm_x1000
|
||||
FROM forecast_hourly
|
||||
WHERE station_id = $1 AND provider = $4
|
||||
)
|
||||
SELECT
|
||||
COUNT(*) AS total_heavy,
|
||||
COUNT(max_hit_lead) AS success_count,
|
||||
COALESCE(AVG(max_hit_lead::numeric), 0)::float8 AS avg_lead_hours
|
||||
FROM (
|
||||
SELECT h.hour,
|
||||
MAX(CASE WHEN f.lead_hours BETWEEN 1 AND 3 AND f.rain_mm_x1000 >= 8000 THEN f.lead_hours ELSE NULL END) AS max_hit_lead
|
||||
FROM heavy h
|
||||
LEFT JOIN fc f ON f.forecast_time = h.hour
|
||||
GROUP BY h.hour
|
||||
) s`
|
||||
var perf types.HeavyRainPerf
|
||||
err := DB().QueryRow(q, stationID, since, now, provider).Scan(&perf.TotalHeavy, &perf.SuccessCount, &perf.AvgLeadHours)
|
||||
if err != nil {
|
||||
return types.HeavyRainPerf{}, err
|
||||
}
|
||||
return perf, nil
|
||||
}
|
||||
32
core/internal/data/users.go
Normal file
32
core/internal/data/users.go
Normal file
@ -0,0 +1,32 @@
|
||||
package data
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"errors"
|
||||
"time"
|
||||
)
|
||||
|
||||
type User struct {
|
||||
Username string
|
||||
Password string // bcrypt hash
|
||||
CreatedAt time.Time
|
||||
}
|
||||
|
||||
func GetUser(username string) (*User, error) {
|
||||
const q = `SELECT username, password, created_at FROM users WHERE username = $1`
|
||||
var u User
|
||||
err := DB().QueryRow(q, username).Scan(&u.Username, &u.Password, &u.CreatedAt)
|
||||
if err != nil {
|
||||
if errors.Is(err, sql.ErrNoRows) {
|
||||
return nil, nil
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
return &u, nil
|
||||
}
|
||||
|
||||
func CreateUser(username, passwordHash string) error {
|
||||
const q = `INSERT INTO users (username, password, created_at) VALUES ($1, $2, NOW()) ON CONFLICT (username) DO NOTHING`
|
||||
_, err := DB().Exec(q, username, passwordHash)
|
||||
return err
|
||||
}
|
||||
51
core/internal/server/auth_handlers.go
Normal file
51
core/internal/server/auth_handlers.go
Normal file
@ -0,0 +1,51 @@
|
||||
package server
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
"weatherstation/core/internal/auth"
|
||||
"weatherstation/core/internal/data"
|
||||
)
|
||||
|
||||
func handleLogin(opts Options) gin.HandlerFunc {
|
||||
secret := []byte(opts.AuthSecret)
|
||||
return func(c *gin.Context) {
|
||||
username := c.PostForm("username")
|
||||
password := c.PostForm("password")
|
||||
if username == "" || password == "" {
|
||||
c.String(http.StatusBadRequest, "missing username or password")
|
||||
return
|
||||
}
|
||||
u, err := data.GetUser(username)
|
||||
if err != nil {
|
||||
c.String(http.StatusInternalServerError, "login error")
|
||||
return
|
||||
}
|
||||
if u == nil || !auth.CheckPassword(u.Password, password) {
|
||||
// simple failure
|
||||
c.String(http.StatusUnauthorized, "invalid credentials")
|
||||
return
|
||||
}
|
||||
token, exp := auth.MakeSessionToken(username, 24*time.Hour, secret)
|
||||
// set HttpOnly cookie
|
||||
// maxAge in seconds
|
||||
maxAge := int(time.Until(exp).Seconds())
|
||||
c.SetCookie("core_session", token, maxAge, "/", "", false, true)
|
||||
c.Redirect(http.StatusFound, "/bigscreen")
|
||||
}
|
||||
}
|
||||
|
||||
func handleLogout(opts Options) gin.HandlerFunc {
|
||||
return func(c *gin.Context) {
|
||||
// expire cookie immediately
|
||||
c.SetCookie("core_session", "", -1, "/", "", false, true)
|
||||
c.Redirect(http.StatusFound, "/admin/login")
|
||||
}
|
||||
}
|
||||
|
||||
// parseToken wraps auth.ParseSessionToken for local use.
|
||||
func parseToken(token string, secret []byte) (string, bool) {
|
||||
return auth.ParseSessionToken(token, secret)
|
||||
}
|
||||
437
core/internal/server/handlers.go
Normal file
437
core/internal/server/handlers.go
Normal file
@ -0,0 +1,437 @@
|
||||
package server
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
"weatherstation/core/internal/data"
|
||||
)
|
||||
|
||||
import "github.com/gin-gonic/gin"
|
||||
|
||||
func handleHealth(c *gin.Context) {
|
||||
c.JSON(http.StatusOK, gin.H{"status": "ok", "ts": time.Now().UTC().Format(time.RFC3339)})
|
||||
}
|
||||
|
||||
func handleSystemStatus(c *gin.Context) {
|
||||
online := data.OnlineDevices()
|
||||
c.JSON(http.StatusOK, gin.H{
|
||||
"online_devices": online,
|
||||
"server_time": time.Now().Format("2006-01-02 15:04:05"),
|
||||
})
|
||||
}
|
||||
|
||||
func handleStations(c *gin.Context) {
|
||||
stations, err := data.Stations()
|
||||
if err != nil {
|
||||
c.JSON(http.StatusInternalServerError, gin.H{"error": fmt.Sprintf("query stations failed: %v", err)})
|
||||
return
|
||||
}
|
||||
for i := range stations {
|
||||
if len(stations[i].StationID) > 6 {
|
||||
hexID := stations[i].StationID[len(stations[i].StationID)-6:]
|
||||
if decimalID, err := strconv.ParseInt(hexID, 16, 64); err == nil {
|
||||
stations[i].DecimalID = strconv.FormatInt(decimalID, 10)
|
||||
}
|
||||
}
|
||||
}
|
||||
c.JSON(http.StatusOK, stations)
|
||||
}
|
||||
|
||||
func handleData(c *gin.Context) {
|
||||
idParam := c.Query("hex_id")
|
||||
startTime := c.Query("start_time")
|
||||
endTime := c.Query("end_time")
|
||||
interval := c.DefaultQuery("interval", "1hour")
|
||||
|
||||
if idParam == "" {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "missing hex_id"})
|
||||
return
|
||||
}
|
||||
upper := strings.ToUpper(idParam)
|
||||
var b strings.Builder
|
||||
for i := 0; i < len(upper); i++ {
|
||||
ch := upper[i]
|
||||
if (ch >= '0' && ch <= '9') || (ch >= 'A' && ch <= 'F') {
|
||||
b.WriteByte(ch)
|
||||
}
|
||||
}
|
||||
hex := b.String()
|
||||
if hex == "" {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid hex_id"})
|
||||
return
|
||||
}
|
||||
if len(hex) < 6 {
|
||||
hex = strings.Repeat("0", 6-len(hex)) + hex
|
||||
} else if len(hex) > 6 {
|
||||
hex = hex[len(hex)-6:]
|
||||
}
|
||||
stationID := fmt.Sprintf("RS485-%s", hex)
|
||||
|
||||
loc, _ := time.LoadLocation("Asia/Shanghai")
|
||||
if loc == nil {
|
||||
loc = time.FixedZone("CST", 8*3600)
|
||||
}
|
||||
start, err := time.ParseInLocation("2006-01-02 15:04:05", startTime, loc)
|
||||
if err != nil {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid start_time"})
|
||||
return
|
||||
}
|
||||
end, err := time.ParseInLocation("2006-01-02 15:04:05", endTime, loc)
|
||||
if err != nil {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid end_time"})
|
||||
return
|
||||
}
|
||||
|
||||
var points interface{}
|
||||
if interval == "raw" {
|
||||
points, err = data.SeriesRaw(stationID, start, end)
|
||||
} else {
|
||||
points, err = data.SeriesFrom10Min(stationID, start, end, interval)
|
||||
}
|
||||
if err != nil {
|
||||
c.JSON(http.StatusInternalServerError, gin.H{"error": fmt.Sprintf("query failed: %v", err)})
|
||||
return
|
||||
}
|
||||
c.JSON(http.StatusOK, points)
|
||||
}
|
||||
|
||||
func handleForecast(c *gin.Context) {
|
||||
stationID := c.Query("station_id")
|
||||
from := c.Query("from")
|
||||
to := c.Query("to")
|
||||
provider := c.Query("provider")
|
||||
versionsStr := c.DefaultQuery("versions", "1")
|
||||
versions, _ := strconv.Atoi(versionsStr)
|
||||
if versions <= 0 {
|
||||
versions = 1
|
||||
}
|
||||
|
||||
if stationID == "" {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "missing station_id"})
|
||||
return
|
||||
}
|
||||
|
||||
loc, _ := time.LoadLocation("Asia/Shanghai")
|
||||
if loc == nil {
|
||||
loc = time.FixedZone("CST", 8*3600)
|
||||
}
|
||||
|
||||
var start, end time.Time
|
||||
var err error
|
||||
if from == "" || to == "" {
|
||||
now := time.Now().In(loc)
|
||||
start = now.Truncate(time.Hour).Add(1 * time.Hour)
|
||||
end = start.Add(3 * time.Hour)
|
||||
} else {
|
||||
start, err = time.ParseInLocation("2006-01-02 15:04:05", from, loc)
|
||||
if err != nil {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid from"})
|
||||
return
|
||||
}
|
||||
end, err = time.ParseInLocation("2006-01-02 15:04:05", to, loc)
|
||||
if err != nil {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid to"})
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
points, err := data.Forecast(stationID, start, end, provider, versions)
|
||||
if err != nil {
|
||||
c.JSON(http.StatusInternalServerError, gin.H{"error": fmt.Sprintf("query forecast failed: %v", err)})
|
||||
return
|
||||
}
|
||||
c.JSON(http.StatusOK, points)
|
||||
}
|
||||
|
||||
// handleForecastPerf 过去N天大雨预报表现
|
||||
// GET /api/forecast/perf?station_id=RS485-XXXXXX&days=30&provider=imdroid_mix
|
||||
func handleForecastPerf(c *gin.Context) {
|
||||
stationID := c.Query("station_id")
|
||||
if stationID == "" {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "missing station_id"})
|
||||
return
|
||||
}
|
||||
daysStr := c.DefaultQuery("days", "30")
|
||||
provider := c.Query("provider")
|
||||
days, _ := strconv.Atoi(daysStr)
|
||||
if days <= 0 || days > 365 {
|
||||
days = 30
|
||||
}
|
||||
|
||||
loc, _ := time.LoadLocation("Asia/Shanghai")
|
||||
if loc == nil {
|
||||
loc = time.FixedZone("CST", 8*3600)
|
||||
}
|
||||
now := time.Now().In(loc)
|
||||
since := now.AddDate(0, 0, -days)
|
||||
|
||||
perf, err := data.HeavyRainPerf(stationID, since, provider)
|
||||
if err != nil {
|
||||
c.JSON(http.StatusInternalServerError, gin.H{"error": fmt.Sprintf("query perf failed: %v", err)})
|
||||
return
|
||||
}
|
||||
c.JSON(http.StatusOK, perf)
|
||||
}
|
||||
|
||||
// handleTSPage renders a very simple HTML page to query TS metrics by station and compare providers (+1/+2/+3/mixed).
|
||||
// Route: GET /TS?station_id=RS485-XXXXXX&start=YYYY-MM-DD+HH:MM:SS&end=YYYY-MM-DD+HH:MM:SS&threshold=0.1
|
||||
func handleTSPage(c *gin.Context) {
|
||||
stationID := c.Query("station_id")
|
||||
startStr := c.Query("start")
|
||||
endStr := c.Query("end")
|
||||
thrStr := c.DefaultQuery("threshold", "0.1")
|
||||
providerSel := c.DefaultQuery("provider", "all")
|
||||
viewSel := c.DefaultQuery("view", "all")
|
||||
detailSel := c.DefaultQuery("detail", "none")
|
||||
|
||||
loc, _ := time.LoadLocation("Asia/Shanghai")
|
||||
if loc == nil {
|
||||
loc = time.FixedZone("CST", 8*3600)
|
||||
}
|
||||
now := time.Now().In(loc)
|
||||
// defaults: last 30 days
|
||||
start := now.AddDate(0, 0, -30).Truncate(time.Hour)
|
||||
end := now.Truncate(time.Hour)
|
||||
if strings.TrimSpace(startStr) != "" {
|
||||
if t, err := time.ParseInLocation("2006-01-02 15:04:05", startStr, loc); err == nil {
|
||||
start = t
|
||||
}
|
||||
}
|
||||
if strings.TrimSpace(endStr) != "" {
|
||||
if t, err := time.ParseInLocation("2006-01-02 15:04:05", endStr, loc); err == nil {
|
||||
end = t
|
||||
}
|
||||
}
|
||||
if !end.After(start) {
|
||||
end = start.Add(24 * time.Hour)
|
||||
}
|
||||
thr, _ := strconv.ParseFloat(thrStr, 64)
|
||||
|
||||
// Providers to compare or single
|
||||
allProviders := []string{"imdroid_V6", "imdroid_V5", "imdroid_mix", "caiyun", "open-meteo", "imdroid"}
|
||||
var providers []string
|
||||
if providerSel == "all" || strings.TrimSpace(providerSel) == "" {
|
||||
providers = allProviders
|
||||
} else {
|
||||
providers = []string{providerSel}
|
||||
}
|
||||
|
||||
// Build HTML
|
||||
var b strings.Builder
|
||||
b.WriteString("<html><head><meta charset=\"utf-8\"></head><body>")
|
||||
b.WriteString("<h2>TS</h2>")
|
||||
b.WriteString("<form method='GET' action='/TS'>")
|
||||
b.WriteString("站点ID: <input type='text' name='station_id' value='" + htmlEscape(stationID) + "' style='width:180px'> ")
|
||||
b.WriteString("开始: <input type='text' name='start' value='" + start.Format("2006-01-02 15:04:05") + "' style='width:180px'> ")
|
||||
b.WriteString("结束: <input type='text' name='end' value='" + end.Format("2006-01-02 15:04:05") + "' style='width:180px'> ")
|
||||
// 阈值固定为默认值,不在页面提供输入框
|
||||
// provider select
|
||||
b.WriteString("预报源: <select name='provider'>")
|
||||
opts := append([]string{"all"}, allProviders...)
|
||||
for _, p := range opts {
|
||||
sel := ""
|
||||
if p == providerSel {
|
||||
sel = " selected"
|
||||
}
|
||||
b.WriteString("<option value='" + htmlEscape(p) + "'" + sel + ">" + htmlEscape(p) + "</option>")
|
||||
}
|
||||
b.WriteString("</select> ")
|
||||
// forecast point(select fixed lead or all)
|
||||
b.WriteString("预报点: <select name='view'>")
|
||||
viewOpts := []struct {
|
||||
v string
|
||||
n string
|
||||
}{{"all", "所有(+1/+2/+3)"}, {"lead1", "+1h"}, {"lead2", "+2h"}, {"lead3", "+3h"}}
|
||||
for _, it := range viewOpts {
|
||||
sel := ""
|
||||
if it.v == viewSel {
|
||||
sel = " selected"
|
||||
}
|
||||
b.WriteString("<option value='" + it.v + "'" + sel + ">" + it.n + "</option>")
|
||||
}
|
||||
b.WriteString("</select> ")
|
||||
// detail select
|
||||
b.WriteString("详细: <select name='detail'>")
|
||||
detOpts := []struct {
|
||||
v string
|
||||
n string
|
||||
}{{"none", "不显示"}, {"brief", "简略"}, {"detailed", "详细"}}
|
||||
for _, it := range detOpts {
|
||||
sel := ""
|
||||
if it.v == detailSel {
|
||||
sel = " selected"
|
||||
}
|
||||
b.WriteString("<option value='" + it.v + "'" + sel + ">" + it.n + "</option>")
|
||||
}
|
||||
b.WriteString("</select> ")
|
||||
b.WriteString("<button type='submit'>搜索</button></form>")
|
||||
// Helper: common station IDs for quick input
|
||||
b.WriteString("<div style='margin:8px 0;'>站点:</div>")
|
||||
b.WriteString("<table border='1' cellpadding='6' cellspacing='0'>")
|
||||
b.WriteString("<tr><th>station_id</th><th>location</th></tr>")
|
||||
b.WriteString("<tr><td>RS485-002A44</td><td>第二台气象站(伶俐镇那车1号大桥西北约324米)</td></tr>")
|
||||
b.WriteString("<tr><td>RS485-0029CD</td><td>第四台气象站(宾阳县宾州镇塘山2号大桥西北约293米)</td></tr>")
|
||||
b.WriteString("<tr><td>RS485-0029CA</td><td>第五台气象站(宾阳县陈平镇和平3号大桥东北约197米)</td></tr>")
|
||||
b.WriteString("<tr><td>RS485-0029C3</td><td>第七台气象站(宾阳县细塘村东南约112米)</td></tr>")
|
||||
b.WriteString("<tr><td>RS485-002A30</td><td>第八台气象站(宾阳县松塘村西南约161米)</td></tr>")
|
||||
b.WriteString("<tr><td>RS485-002964</td><td>兴山县一号气象站(湖北省宜昌市兴山县昭君镇贺家院子东北约484米)</td></tr>")
|
||||
b.WriteString("<tr><td>RS485-002A39</td><td>兴山县二号气象站(湖北省宜昌市兴山县昭君镇黄家堑东北约244米)</td></tr>")
|
||||
b.WriteString("<tr><td>RS485-002A69</td><td>第一台气象站(宾阳县陈平镇大平村西南约325米)(V6使用彩云为基础数据)</td></tr>")
|
||||
b.WriteString("<tr><td>RS485-002A7B</td><td>第三台气象站(宾阳县细坡西南约263米)(V6使用彩云为基础数据)</td></tr>")
|
||||
b.WriteString("</table><hr/>")
|
||||
|
||||
if strings.TrimSpace(stationID) == "" {
|
||||
b.WriteString("</body></html>")
|
||||
c.Data(http.StatusOK, "text/html; charset=utf-8", []byte(b.String()))
|
||||
return
|
||||
}
|
||||
|
||||
// header + legend
|
||||
b.WriteString("<div>时间窗: " + start.Format("2006-01-02 15:04:05") + " ~ " + end.Format("2006-01-02 15:04:05") + "</div>")
|
||||
b.WriteString("<div style='margin:6px 0;color:#444;'>说明:n 为样本数;n11 为预报降雨且实况降雨(Hit);n01 为预报不降雨但实况降雨(漏报);n10 为预报降雨但实况不降雨(误报);n00 为预报与实况均不降雨。CSI 为临界成功指数,POD 为命中率,FAR 为误报率。</div>")
|
||||
b.WriteString("<table border='1' cellpadding='6' cellspacing='0'>")
|
||||
b.WriteString("<tr><th>Provider</th><th>预报点</th><th>n</th><th>n11</th><th>n01</th><th>n10</th><th>n00</th><th>CSI</th><th>POD</th><th>FAR</th></tr>")
|
||||
|
||||
addRow := func(pv, view string, s data.BinaryScores) {
|
||||
b.WriteString(fmt.Sprintf("<tr><td>%s</td><td>%s</td><td>%d</td><td>%d</td><td>%d</td><td>%d</td><td>%d</td><td>%.3f</td><td>%.3f</td><td>%.3f</td></tr>",
|
||||
htmlEscape(pv), htmlEscape(view), s.N, s.N11, s.N01, s.N10, s.N00, s.CSI, s.POD, s.FAR))
|
||||
}
|
||||
|
||||
var renderScores func(pv, view string)
|
||||
renderScores = func(pv, view string) {
|
||||
switch view {
|
||||
case "lead1":
|
||||
if s, err := data.ForecastBinaryScoresLead(stationID, start, end, pv, thr, 1); err == nil {
|
||||
addRow(pv, "+1h", s)
|
||||
} else {
|
||||
addRow(pv, "+1h", data.BinaryScores{})
|
||||
}
|
||||
case "lead2":
|
||||
if s, err := data.ForecastBinaryScoresLead(stationID, start, end, pv, thr, 2); err == nil {
|
||||
addRow(pv, "+2h", s)
|
||||
} else {
|
||||
addRow(pv, "+2h", data.BinaryScores{})
|
||||
}
|
||||
case "lead3":
|
||||
if s, err := data.ForecastBinaryScoresLead(stationID, start, end, pv, thr, 3); err == nil {
|
||||
addRow(pv, "+3h", s)
|
||||
} else {
|
||||
addRow(pv, "+3h", data.BinaryScores{})
|
||||
}
|
||||
case "all":
|
||||
renderScores(pv, "lead1")
|
||||
renderScores(pv, "lead2")
|
||||
renderScores(pv, "lead3")
|
||||
}
|
||||
}
|
||||
for _, pv := range providers {
|
||||
renderScores(pv, viewSel)
|
||||
}
|
||||
b.WriteString("</table>")
|
||||
|
||||
// detail view only when a single provider and single view selected
|
||||
if providerSel != "all" && viewSel != "all" && (detailSel == "brief" || detailSel == "detailed") {
|
||||
var rows []data.BinaryRow
|
||||
var err error
|
||||
switch viewSel {
|
||||
case "lead1":
|
||||
rows, err = data.ForecastBinarySeriesLead(stationID, start, end, providerSel, 1)
|
||||
case "lead2":
|
||||
rows, err = data.ForecastBinarySeriesLead(stationID, start, end, providerSel, 2)
|
||||
case "lead3":
|
||||
rows, err = data.ForecastBinarySeriesLead(stationID, start, end, providerSel, 3)
|
||||
}
|
||||
if err == nil {
|
||||
b.WriteString("<h3>明细 (" + htmlEscape(providerSel) + ", " + htmlEscape(viewSel) + ")</h3>")
|
||||
b.WriteString("<table border='1' cellpadding='6' cellspacing='0'>")
|
||||
b.WriteString("<tr><th>编号</th><th>时间</th><th>实况(mm)</th><th>预报(mm)</th><th>发生?</th><th>命中?</th></tr>")
|
||||
var n, n11, n01, n10, n00 int64
|
||||
idx := 0
|
||||
for _, r := range rows {
|
||||
actualEvt := r.ActualMM > thr
|
||||
predEvt := r.PredMM > thr
|
||||
if detailSel == "brief" && !(actualEvt || predEvt) {
|
||||
continue
|
||||
}
|
||||
idx++
|
||||
n++
|
||||
if predEvt && actualEvt {
|
||||
n11++
|
||||
} else if !predEvt && actualEvt {
|
||||
n01++
|
||||
} else if predEvt && !actualEvt {
|
||||
n10++
|
||||
} else {
|
||||
n00++
|
||||
}
|
||||
hit := "否"
|
||||
if predEvt && actualEvt {
|
||||
hit = "是"
|
||||
}
|
||||
b.WriteString(fmt.Sprintf("<tr><td>%d</td><td>%s</td><td>%.3f</td><td>%.3f</td><td>%s</td><td>%s</td></tr>", idx, r.T.Format("2006-01-02 15:04:05"), r.ActualMM, r.PredMM, boolCN(actualEvt), hit))
|
||||
}
|
||||
var csi, pod, far float64
|
||||
h, m, f := float64(n11), float64(n01), float64(n10)
|
||||
if (h + m + f) > 0 {
|
||||
csi = h / (h + m + f)
|
||||
}
|
||||
if (h + m) > 0 {
|
||||
pod = h / (h + m)
|
||||
}
|
||||
if (h + f) > 0 {
|
||||
far = f / (h + f)
|
||||
}
|
||||
b.WriteString("</table>")
|
||||
b.WriteString(fmt.Sprintf("<div>统计: n=%d n11=%d n01=%d n10=%d n00=%d | CSI=%.3f POD=%.3f FAR=%.3f</div>", n, n11, n01, n10, n00, csi, pod, far))
|
||||
}
|
||||
}
|
||||
b.WriteString("</body></html>")
|
||||
c.Data(http.StatusOK, "text/html; charset=utf-8", []byte(b.String()))
|
||||
}
|
||||
|
||||
func htmlEscape(s string) string {
|
||||
r := strings.NewReplacer("&", "&", "<", "<", ">", ">", "\"", """, "'", "'")
|
||||
return r.Replace(s)
|
||||
}
|
||||
|
||||
func boolCN(b bool) string {
|
||||
if b {
|
||||
return "是"
|
||||
}
|
||||
return "否"
|
||||
}
|
||||
|
||||
// handleForecastScores 过去N天的二分类评分(TS/CSI, POD, FAR)
|
||||
// GET /api/forecast/scores?station_id=RS485-XXXXXX&days=30&provider=imdroid_mix&threshold=0
|
||||
func handleForecastScores(c *gin.Context) {
|
||||
stationID := c.Query("station_id")
|
||||
if stationID == "" {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "missing station_id"})
|
||||
return
|
||||
}
|
||||
daysStr := c.DefaultQuery("days", "30")
|
||||
provider := c.Query("provider")
|
||||
thrStr := c.DefaultQuery("threshold", "0")
|
||||
days, _ := strconv.Atoi(daysStr)
|
||||
if days <= 0 || days > 365 {
|
||||
days = 30
|
||||
}
|
||||
thresholdMM, _ := strconv.ParseFloat(thrStr, 64)
|
||||
|
||||
loc, _ := time.LoadLocation("Asia/Shanghai")
|
||||
if loc == nil {
|
||||
loc = time.FixedZone("CST", 8*3600)
|
||||
}
|
||||
now := time.Now().In(loc)
|
||||
since := now.AddDate(0, 0, -days)
|
||||
|
||||
scores, err := data.ForecastBinaryScores(stationID, since, now, provider, thresholdMM)
|
||||
if err != nil {
|
||||
c.JSON(http.StatusInternalServerError, gin.H{"error": fmt.Sprintf("query scores failed: %v", err)})
|
||||
return
|
||||
}
|
||||
c.JSON(http.StatusOK, scores)
|
||||
}
|
||||
172
core/internal/server/radar_handlers.go
Normal file
172
core/internal/server/radar_handlers.go
Normal file
@ -0,0 +1,172 @@
|
||||
package server
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"encoding/binary"
|
||||
"net/http"
|
||||
"time"
|
||||
"weatherstation/core/internal/data"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
)
|
||||
|
||||
type radarTileRecord struct {
|
||||
DT time.Time
|
||||
Z int
|
||||
Y int
|
||||
X int
|
||||
Width int
|
||||
Height int
|
||||
West float64
|
||||
South float64
|
||||
East float64
|
||||
North float64
|
||||
ResDeg float64
|
||||
Data []byte
|
||||
}
|
||||
|
||||
type radarTileResponse struct {
|
||||
DT string `json:"dt"`
|
||||
Z int `json:"z"`
|
||||
Y int `json:"y"`
|
||||
X int `json:"x"`
|
||||
Width int `json:"width"`
|
||||
Height int `json:"height"`
|
||||
West float64 `json:"west"`
|
||||
South float64 `json:"south"`
|
||||
East float64 `json:"east"`
|
||||
North float64 `json:"north"`
|
||||
ResDeg float64 `json:"res_deg"`
|
||||
Values [][]*float64 `json:"values"`
|
||||
}
|
||||
|
||||
func handleRadarTimes(c *gin.Context) {
|
||||
z := parseInt(c.Query("z"), 7)
|
||||
y := parseInt(c.Query("y"), 40)
|
||||
x := parseInt(c.Query("x"), 102)
|
||||
fromStr := c.Query("from")
|
||||
toStr := c.Query("to")
|
||||
loc, _ := time.LoadLocation("Asia/Shanghai")
|
||||
if loc == nil {
|
||||
loc = time.FixedZone("CST", 8*3600)
|
||||
}
|
||||
|
||||
var rows *sql.Rows
|
||||
var err error
|
||||
if fromStr != "" && toStr != "" {
|
||||
from, err1 := time.ParseInLocation("2006-01-02 15:04:05", fromStr, loc)
|
||||
to, err2 := time.ParseInLocation("2006-01-02 15:04:05", toStr, loc)
|
||||
if err1 != nil || err2 != nil {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid time range"})
|
||||
return
|
||||
}
|
||||
const qRange = `SELECT dt FROM radar_tiles WHERE z=$1 AND y=$2 AND x=$3 AND dt BETWEEN $4 AND $5 ORDER BY dt DESC`
|
||||
rows, err = data.DB().Query(qRange, z, y, x, from, to)
|
||||
} else {
|
||||
limit := parseInt(c.Query("limit"), 48)
|
||||
const q = `SELECT dt FROM radar_tiles WHERE z=$1 AND y=$2 AND x=$3 ORDER BY dt DESC LIMIT $4`
|
||||
rows, err = data.DB().Query(q, z, y, x, limit)
|
||||
}
|
||||
if err != nil {
|
||||
c.JSON(http.StatusInternalServerError, gin.H{"error": "query failed"})
|
||||
return
|
||||
}
|
||||
defer rows.Close()
|
||||
var times []string
|
||||
for rows.Next() {
|
||||
var dt time.Time
|
||||
if err := rows.Scan(&dt); err == nil {
|
||||
times = append(times, dt.In(loc).Format("2006-01-02 15:04:05"))
|
||||
}
|
||||
}
|
||||
c.JSON(http.StatusOK, gin.H{"times": times})
|
||||
}
|
||||
|
||||
func handleRadarTilesAt(c *gin.Context) {
|
||||
z := parseInt(c.Query("z"), 7)
|
||||
dtStr := c.Query("dt")
|
||||
if dtStr == "" {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "missing dt"})
|
||||
return
|
||||
}
|
||||
loc, _ := time.LoadLocation("Asia/Shanghai")
|
||||
if loc == nil {
|
||||
loc = time.FixedZone("CST", 8*3600)
|
||||
}
|
||||
dt, err := time.ParseInLocation("2006-01-02 15:04:05", dtStr, loc)
|
||||
if err != nil {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid dt"})
|
||||
return
|
||||
}
|
||||
|
||||
const q = `SELECT dt,z,y,x,width,height,west,south,east,north,res_deg,data FROM radar_tiles WHERE z=$1 AND dt=$2 ORDER BY y,x`
|
||||
rows, qerr := data.DB().Query(q, z, dt)
|
||||
if qerr != nil {
|
||||
c.JSON(http.StatusInternalServerError, gin.H{"error": "db failed"})
|
||||
return
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var tiles []radarTileResponse
|
||||
for rows.Next() {
|
||||
var r radarTileRecord
|
||||
if err := rows.Scan(&r.DT, &r.Z, &r.Y, &r.X, &r.Width, &r.Height, &r.West, &r.South, &r.East, &r.North, &r.ResDeg, &r.Data); err != nil {
|
||||
continue
|
||||
}
|
||||
w, h := r.Width, r.Height
|
||||
if w <= 0 || h <= 0 || len(r.Data) < w*h*2 {
|
||||
continue
|
||||
}
|
||||
vals := make([][]*float64, h)
|
||||
off := 0
|
||||
for row := 0; row < h; row++ {
|
||||
rowVals := make([]*float64, w)
|
||||
for col := 0; col < w; col++ {
|
||||
v := int16(binary.BigEndian.Uint16(r.Data[off : off+2]))
|
||||
off += 2
|
||||
if v >= 32766 {
|
||||
rowVals[col] = nil
|
||||
continue
|
||||
}
|
||||
dbz := float64(v) / 10.0
|
||||
if dbz < 0 {
|
||||
dbz = 0
|
||||
} else if dbz > 75 {
|
||||
dbz = 75
|
||||
}
|
||||
vv := dbz
|
||||
rowVals[col] = &vv
|
||||
}
|
||||
vals[row] = rowVals
|
||||
}
|
||||
tiles = append(tiles, radarTileResponse{DT: r.DT.In(loc).Format("2006-01-02 15:04:05"), Z: r.Z, Y: r.Y, X: r.X, Width: w, Height: h, West: r.West, South: r.South, East: r.East, North: r.North, ResDeg: r.ResDeg, Values: vals})
|
||||
}
|
||||
if len(tiles) == 0 {
|
||||
c.JSON(http.StatusNotFound, gin.H{"error": "not found"})
|
||||
return
|
||||
}
|
||||
c.JSON(http.StatusOK, gin.H{"tiles": tiles})
|
||||
}
|
||||
|
||||
func parseInt(s string, def int) int {
|
||||
if s == "" {
|
||||
return def
|
||||
}
|
||||
n := 0
|
||||
sign := 1
|
||||
i := 0
|
||||
if s[0] == '-' || s[0] == '+' {
|
||||
if s[0] == '-' {
|
||||
sign = -1
|
||||
}
|
||||
i = 1
|
||||
}
|
||||
for ; i < len(s); i++ {
|
||||
ch := s[i]
|
||||
if ch < '0' || ch > '9' {
|
||||
return def
|
||||
}
|
||||
n = n*10 + int(ch-'0')
|
||||
}
|
||||
return sign * n
|
||||
}
|
||||
69
core/internal/server/radar_weather_handlers.go
Normal file
69
core/internal/server/radar_weather_handlers.go
Normal file
@ -0,0 +1,69 @@
|
||||
package server
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"net/http"
|
||||
"strconv"
|
||||
"time"
|
||||
"weatherstation/core/internal/data"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
)
|
||||
|
||||
// GET /api/radar/weather_nearest?lat=..&lon=..&dt=YYYY-MM-DD HH:MM:SS
|
||||
func handleRadarWeatherNearest(c *gin.Context) {
|
||||
latStr := c.Query("lat")
|
||||
lonStr := c.Query("lon")
|
||||
if latStr == "" || lonStr == "" {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "missing lat/lon"})
|
||||
return
|
||||
}
|
||||
lat, err1 := strconv.ParseFloat(latStr, 64)
|
||||
lon, err2 := strconv.ParseFloat(lonStr, 64)
|
||||
if err1 != nil || err2 != nil {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid lat/lon"})
|
||||
return
|
||||
}
|
||||
dtStr := c.Query("dt")
|
||||
loc, _ := time.LoadLocation("Asia/Shanghai")
|
||||
if loc == nil {
|
||||
loc = time.FixedZone("CST", 8*3600)
|
||||
}
|
||||
dt := time.Now().In(loc)
|
||||
if dtStr != "" {
|
||||
if x, err := time.ParseInLocation("2006-01-02 15:04:05", dtStr, loc); err == nil {
|
||||
dt = x
|
||||
}
|
||||
}
|
||||
// search window +/- 6h
|
||||
rw, err := data.RadarWeatherNearest(lat, lon, dt, 6*time.Hour)
|
||||
if err != nil {
|
||||
c.JSON(http.StatusInternalServerError, gin.H{"error": "query failed"})
|
||||
return
|
||||
}
|
||||
if rw == nil {
|
||||
c.JSON(http.StatusNotFound, gin.H{"error": "not found"})
|
||||
return
|
||||
}
|
||||
c.JSON(http.StatusOK, gin.H{
|
||||
"alias": rw.Alias,
|
||||
"lat": rw.Lat,
|
||||
"lon": rw.Lon,
|
||||
"dt": rw.DT.In(loc).Format("2006-01-02 15:04:05"),
|
||||
"temperature": f64(rw.Temperature),
|
||||
"humidity": f64(rw.Humidity),
|
||||
"cloudrate": f64(rw.CloudRate),
|
||||
"visibility": f64(rw.Visibility),
|
||||
"dswrf": f64(rw.DSWRF),
|
||||
"wind_speed": f64(rw.WindSpeed),
|
||||
"wind_direction": f64(rw.WindDirection),
|
||||
"pressure": f64(rw.Pressure),
|
||||
})
|
||||
}
|
||||
|
||||
func f64(v sql.NullFloat64) interface{} {
|
||||
if v.Valid {
|
||||
return v.Float64
|
||||
}
|
||||
return nil
|
||||
}
|
||||
152
core/internal/server/rain_handlers.go
Normal file
152
core/internal/server/rain_handlers.go
Normal file
@ -0,0 +1,152 @@
|
||||
package server
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"encoding/binary"
|
||||
"net/http"
|
||||
"time"
|
||||
"weatherstation/core/internal/data"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
)
|
||||
|
||||
type rainTileRecord struct {
|
||||
DT time.Time
|
||||
Z int
|
||||
Y int
|
||||
X int
|
||||
Width int
|
||||
Height int
|
||||
West float64
|
||||
South float64
|
||||
East float64
|
||||
North float64
|
||||
ResDeg float64
|
||||
Data []byte
|
||||
}
|
||||
|
||||
type rainTileResponse struct {
|
||||
DT string `json:"dt"`
|
||||
Z int `json:"z"`
|
||||
Y int `json:"y"`
|
||||
X int `json:"x"`
|
||||
Width int `json:"width"`
|
||||
Height int `json:"height"`
|
||||
West float64 `json:"west"`
|
||||
South float64 `json:"south"`
|
||||
East float64 `json:"east"`
|
||||
North float64 `json:"north"`
|
||||
ResDeg float64 `json:"res_deg"`
|
||||
Values [][]*float64 `json:"values"`
|
||||
}
|
||||
|
||||
func handleRainTimes(c *gin.Context) {
|
||||
z := parseInt(c.Query("z"), 7)
|
||||
y := parseInt(c.Query("y"), 40)
|
||||
x := parseInt(c.Query("x"), 102)
|
||||
fromStr := c.Query("from")
|
||||
toStr := c.Query("to")
|
||||
loc, _ := time.LoadLocation("Asia/Shanghai")
|
||||
if loc == nil {
|
||||
loc = time.FixedZone("CST", 8*3600)
|
||||
}
|
||||
|
||||
var rows *sql.Rows
|
||||
var err error
|
||||
if fromStr != "" && toStr != "" {
|
||||
from, err1 := time.ParseInLocation("2006-01-02 15:04:05", fromStr, loc)
|
||||
to, err2 := time.ParseInLocation("2006-01-02 15:04:05", toStr, loc)
|
||||
if err1 != nil || err2 != nil {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid time range"})
|
||||
return
|
||||
}
|
||||
const qRange = `SELECT dt FROM rain_tiles WHERE z=$1 AND y=$2 AND x=$3 AND dt BETWEEN $4 AND $5 ORDER BY dt DESC`
|
||||
rows, err = data.DB().Query(qRange, z, y, x, from, to)
|
||||
} else {
|
||||
limit := parseInt(c.Query("limit"), 48)
|
||||
const q = `SELECT dt FROM rain_tiles WHERE z=$1 AND y=$2 AND x=$3 ORDER BY dt DESC LIMIT $4`
|
||||
rows, err = data.DB().Query(q, z, y, x, limit)
|
||||
}
|
||||
if err != nil {
|
||||
c.JSON(http.StatusInternalServerError, gin.H{"error": "query failed"})
|
||||
return
|
||||
}
|
||||
defer rows.Close()
|
||||
var times []string
|
||||
for rows.Next() {
|
||||
var dt time.Time
|
||||
if err := rows.Scan(&dt); err == nil {
|
||||
times = append(times, dt.In(loc).Format("2006-01-02 15:04:05"))
|
||||
}
|
||||
}
|
||||
c.JSON(http.StatusOK, gin.H{"times": times})
|
||||
}
|
||||
|
||||
func handleRainTilesAt(c *gin.Context) {
|
||||
z := parseInt(c.Query("z"), 7)
|
||||
dtStr := c.Query("dt")
|
||||
if dtStr == "" {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "missing dt"})
|
||||
return
|
||||
}
|
||||
loc, _ := time.LoadLocation("Asia/Shanghai")
|
||||
if loc == nil {
|
||||
loc = time.FixedZone("CST", 8*3600)
|
||||
}
|
||||
dt, err := time.ParseInLocation("2006-01-02 15:04:05", dtStr, loc)
|
||||
if err != nil {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid dt"})
|
||||
return
|
||||
}
|
||||
|
||||
const q = `SELECT dt,z,y,x,width,height,west,south,east,north,res_deg,data FROM rain_tiles WHERE z=$1 AND dt=$2 ORDER BY y,x`
|
||||
rows, qerr := data.DB().Query(q, z, dt)
|
||||
if qerr != nil {
|
||||
c.JSON(http.StatusInternalServerError, gin.H{"error": "db failed"})
|
||||
return
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var tiles []rainTileResponse
|
||||
for rows.Next() {
|
||||
var r rainTileRecord
|
||||
if err := rows.Scan(&r.DT, &r.Z, &r.Y, &r.X, &r.Width, &r.Height, &r.West, &r.South, &r.East, &r.North, &r.ResDeg, &r.Data); err != nil {
|
||||
continue
|
||||
}
|
||||
w, h := r.Width, r.Height
|
||||
if w <= 0 || h <= 0 || len(r.Data) < w*h*2 {
|
||||
continue
|
||||
}
|
||||
vals := decodeRain(r.Data, w, h)
|
||||
tiles = append(tiles, rainTileResponse{DT: r.DT.In(loc).Format("2006-01-02 15:04:05"), Z: r.Z, Y: r.Y, X: r.X, Width: w, Height: h, West: r.West, South: r.South, East: r.East, North: r.North, ResDeg: r.ResDeg, Values: vals})
|
||||
}
|
||||
if len(tiles) == 0 {
|
||||
c.JSON(http.StatusNotFound, gin.H{"error": "not found"})
|
||||
return
|
||||
}
|
||||
c.JSON(http.StatusOK, gin.H{"tiles": tiles})
|
||||
}
|
||||
|
||||
func decodeRain(buf []byte, w, h int) [][]*float64 {
|
||||
vals := make([][]*float64, h)
|
||||
off := 0
|
||||
for row := 0; row < h; row++ {
|
||||
rowVals := make([]*float64, w)
|
||||
for col := 0; col < w; col++ {
|
||||
v := int16(binary.BigEndian.Uint16(buf[off : off+2]))
|
||||
off += 2
|
||||
if v >= 32766 {
|
||||
rowVals[col] = nil
|
||||
continue
|
||||
}
|
||||
mm := float64(v) / 10.0
|
||||
if mm < 0 {
|
||||
mm = 0
|
||||
}
|
||||
vv := mm
|
||||
rowVals[col] = &vv
|
||||
}
|
||||
vals[row] = rowVals
|
||||
}
|
||||
return vals
|
||||
}
|
||||
164
core/internal/server/router.go
Normal file
164
core/internal/server/router.go
Normal file
@ -0,0 +1,164 @@
|
||||
package server
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
)
|
||||
|
||||
type Options struct {
|
||||
UIServeDir string
|
||||
BigscreenDir string
|
||||
TemplateDir string
|
||||
StaticDir string
|
||||
EnableCORS bool
|
||||
AuthSecret string
|
||||
}
|
||||
|
||||
func NewRouter(opts Options) *gin.Engine {
|
||||
r := gin.New()
|
||||
r.Use(gin.Logger())
|
||||
r.Use(gin.Recovery())
|
||||
|
||||
if opts.EnableCORS {
|
||||
r.Use(func(c *gin.Context) {
|
||||
c.Writer.Header().Set("Access-Control-Allow-Origin", "*")
|
||||
c.Writer.Header().Set("Access-Control-Allow-Methods", "GET, POST, PUT, PATCH, DELETE, OPTIONS")
|
||||
c.Writer.Header().Set("Access-Control-Allow-Headers", "Content-Type, Authorization")
|
||||
if c.Request.Method == http.MethodOptions {
|
||||
c.AbortWithStatus(http.StatusNoContent)
|
||||
return
|
||||
}
|
||||
c.Next()
|
||||
})
|
||||
}
|
||||
|
||||
if strings.TrimSpace(opts.StaticDir) != "" {
|
||||
r.Static("/static", opts.StaticDir)
|
||||
}
|
||||
// Do not render legacy templates; keep core frontend under /ui
|
||||
|
||||
api := r.Group("/api")
|
||||
{
|
||||
api.GET("/health", handleHealth)
|
||||
api.GET("/system/status", handleSystemStatus)
|
||||
api.GET("/stations", handleStations)
|
||||
api.GET("/data", handleData)
|
||||
api.GET("/forecast", handleForecast)
|
||||
api.GET("/forecast/perf", handleForecastPerf)
|
||||
api.GET("/forecast/scores", handleForecastScores)
|
||||
api.GET("/radar/times", handleRadarTimes)
|
||||
api.GET("/radar/tiles_at", handleRadarTilesAt)
|
||||
api.GET("/radar/weather_nearest", handleRadarWeatherNearest)
|
||||
api.GET("/rain/times", handleRainTimes)
|
||||
api.GET("/rain/tiles_at", handleRainTilesAt)
|
||||
}
|
||||
|
||||
// Simple TS page (no CSS), at /TS
|
||||
r.GET("/TS", handleTSPage)
|
||||
|
||||
hasUI := strings.TrimSpace(opts.UIServeDir) != ""
|
||||
if hasUI {
|
||||
// Serve built Angular assets under /ui for static files
|
||||
r.Static("/ui", opts.UIServeDir)
|
||||
// Serve Angular index.html at root
|
||||
r.GET("/", func(c *gin.Context) {
|
||||
c.File(filepath.Join(opts.UIServeDir, "index.html"))
|
||||
})
|
||||
}
|
||||
|
||||
hasBigscreen := strings.TrimSpace(opts.BigscreenDir) != ""
|
||||
var bigscreenIndex string
|
||||
if hasBigscreen {
|
||||
bigscreenDir := filepath.Clean(opts.BigscreenDir)
|
||||
bigscreenIndex = filepath.Join(bigscreenDir, "index.html")
|
||||
// auth guard
|
||||
requireAuth := func(c *gin.Context) bool {
|
||||
if strings.TrimSpace(opts.AuthSecret) == "" {
|
||||
// no secret configured -> deny by default
|
||||
c.Redirect(http.StatusFound, "/admin/login")
|
||||
c.Abort()
|
||||
return false
|
||||
}
|
||||
cookie, err := c.Cookie("core_session")
|
||||
if err != nil || cookie == "" {
|
||||
c.Redirect(http.StatusFound, "/admin/login")
|
||||
c.Abort()
|
||||
return false
|
||||
}
|
||||
if _, ok := validateSession(cookie, opts.AuthSecret); !ok {
|
||||
c.Redirect(http.StatusFound, "/admin/login")
|
||||
c.Abort()
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
serveBigscreenIndex := func(c *gin.Context) {
|
||||
if !requireAuth(c) {
|
||||
return
|
||||
}
|
||||
c.File(bigscreenIndex)
|
||||
}
|
||||
r.GET("/bigscreen", serveBigscreenIndex)
|
||||
r.GET("/bigscreen/*filepath", func(c *gin.Context) {
|
||||
if !requireAuth(c) {
|
||||
return
|
||||
}
|
||||
rel := strings.TrimPrefix(c.Param("filepath"), "/")
|
||||
if rel == "" {
|
||||
serveBigscreenIndex(c)
|
||||
return
|
||||
}
|
||||
full := filepath.Join(bigscreenDir, filepath.FromSlash(rel))
|
||||
if !strings.HasPrefix(full, bigscreenDir+string(os.PathSeparator)) && full != bigscreenDir {
|
||||
c.AbortWithStatus(http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
if info, err := os.Stat(full); err == nil && !info.IsDir() {
|
||||
c.File(full)
|
||||
return
|
||||
}
|
||||
serveBigscreenIndex(c)
|
||||
})
|
||||
}
|
||||
|
||||
// Admin login routes
|
||||
r.GET("/admin/login", func(c *gin.Context) {
|
||||
if strings.TrimSpace(opts.TemplateDir) != "" {
|
||||
c.File(filepath.Join(opts.TemplateDir, "login.html"))
|
||||
return
|
||||
}
|
||||
c.String(http.StatusOK, "login page not configured")
|
||||
})
|
||||
r.POST("/admin/login", handleLogin(opts))
|
||||
r.GET("/admin/logout", handleLogout(opts))
|
||||
|
||||
// Optional SPA fallback: serve index.html for non-API, non-static routes
|
||||
r.NoRoute(func(c *gin.Context) {
|
||||
p := c.Request.URL.Path
|
||||
if strings.HasPrefix(p, "/api/") || strings.HasPrefix(p, "/static/") {
|
||||
c.AbortWithStatus(http.StatusNotFound)
|
||||
return
|
||||
}
|
||||
if hasBigscreen && strings.HasPrefix(p, "/bigscreen") {
|
||||
c.File(bigscreenIndex)
|
||||
return
|
||||
}
|
||||
if hasUI {
|
||||
c.File(filepath.Join(opts.UIServeDir, "index.html"))
|
||||
return
|
||||
}
|
||||
c.AbortWithStatus(http.StatusNotFound)
|
||||
})
|
||||
|
||||
return r
|
||||
}
|
||||
|
||||
// validateSession bridges to auth package without importing at top to avoid circular style concerns.
|
||||
func validateSession(token, secret string) (string, bool) {
|
||||
// local shim to avoid exposing auth from here
|
||||
return parseToken(token, []byte(secret))
|
||||
}
|
||||
90
core/internal/sms/sms.go
Normal file
90
core/internal/sms/sms.go
Normal file
@ -0,0 +1,90 @@
|
||||
package sms
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
openapi "github.com/alibabacloud-go/darabonba-openapi/v2/client"
|
||||
dysmsapi20170525 "github.com/alibabacloud-go/dysmsapi-20170525/v3/client"
|
||||
tea "github.com/alibabacloud-go/tea/tea"
|
||||
)
|
||||
|
||||
// Config holds Aliyun SMS configuration.
|
||||
type Config struct {
|
||||
AccessKeyID string
|
||||
AccessKeySecret string
|
||||
SignName string
|
||||
TemplateCode string
|
||||
Endpoint string // default: dysmsapi.aliyuncs.com
|
||||
}
|
||||
|
||||
type Client struct {
|
||||
cfg Config
|
||||
client *dysmsapi20170525.Client
|
||||
}
|
||||
|
||||
// New creates an Aliyun SMS client. Does not send any request.
|
||||
func New(cfg Config) (*Client, error) {
|
||||
if strings.TrimSpace(cfg.AccessKeyID) == "" || strings.TrimSpace(cfg.AccessKeySecret) == "" {
|
||||
return nil, errors.New("sms: access key not configured")
|
||||
}
|
||||
if strings.TrimSpace(cfg.SignName) == "" || strings.TrimSpace(cfg.TemplateCode) == "" {
|
||||
return nil, errors.New("sms: signName/templateCode not configured")
|
||||
}
|
||||
if strings.TrimSpace(cfg.Endpoint) == "" {
|
||||
cfg.Endpoint = "dysmsapi.aliyuncs.com"
|
||||
}
|
||||
|
||||
oc := &openapi.Config{
|
||||
AccessKeyId: tea.String(cfg.AccessKeyID),
|
||||
AccessKeySecret: tea.String(cfg.AccessKeySecret),
|
||||
Endpoint: tea.String(cfg.Endpoint),
|
||||
}
|
||||
cli, err := dysmsapi20170525.NewClient(oc)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &Client{cfg: cfg, client: cli}, nil
|
||||
}
|
||||
|
||||
// TemplateData matches the Java version: time/name/content
|
||||
type TemplateData struct {
|
||||
Time string `json:"time"`
|
||||
Name string `json:"name"`
|
||||
Content string `json:"content"`
|
||||
Alert string `json:"alert"`
|
||||
}
|
||||
|
||||
// Send sends the template message to one or more phone numbers.
|
||||
// name/content/alert/msgTime map to template ${name}, ${content}, ${alert}, ${time}.
|
||||
func (c *Client) Send(ctx context.Context, name, content, alert, msgTime string, phones []string) error {
|
||||
if len(phones) == 0 {
|
||||
return errors.New("sms: empty phone list")
|
||||
}
|
||||
payload := TemplateData{Time: msgTime, Name: name, Content: content, Alert: alert}
|
||||
b, _ := json.Marshal(payload)
|
||||
param := string(b)
|
||||
|
||||
// Aliyun supports multiple comma-separated numbers, but keep batches small if needed.
|
||||
joined := strings.Join(phones, ",")
|
||||
req := &dysmsapi20170525.SendSmsRequest{
|
||||
PhoneNumbers: tea.String(joined),
|
||||
SignName: tea.String(c.cfg.SignName),
|
||||
TemplateCode: tea.String(c.cfg.TemplateCode),
|
||||
TemplateParam: tea.String(param),
|
||||
}
|
||||
|
||||
// Execute request
|
||||
resp, err := c.client.SendSms(req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
code := tea.StringValue(resp.Body.Code)
|
||||
if strings.ToUpper(code) != "OK" {
|
||||
return fmt.Errorf("sms: send failed code=%s message=%s requestId=%s", code, tea.StringValue(resp.Body.Message), tea.StringValue(resp.Body.RequestId))
|
||||
}
|
||||
return nil
|
||||
}
|
||||
7
db/migrations/02.sql
Normal file
7
db/migrations/02.sql
Normal file
@ -0,0 +1,7 @@
|
||||
-- 02.sql: create simple users table
|
||||
CREATE TABLE IF NOT EXISTS users (
|
||||
username TEXT PRIMARY KEY,
|
||||
password TEXT NOT NULL, -- bcrypt hash
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||
);
|
||||
|
||||
9
db/migrations/03.sql
Normal file
9
db/migrations/03.sql
Normal file
@ -0,0 +1,9 @@
|
||||
-- 03.sql: recipients for SMS alerts
|
||||
-- Table to store phone numbers, enabled flag, and alert level (1=red only, 2=yellow+red)
|
||||
-- PostgreSQL has no native unsigned int; use integer with CHECK constraints.
|
||||
CREATE TABLE IF NOT EXISTS sms_recipients (
|
||||
phone TEXT PRIMARY KEY,
|
||||
enabled BOOLEAN NOT NULL DEFAULT TRUE,
|
||||
alert_level INTEGER NOT NULL DEFAULT 2 CHECK (alert_level >= 1)
|
||||
);
|
||||
|
||||
28
db/migrations/04_sms_recipients_seed_20251125.sql
Normal file
28
db/migrations/04_sms_recipients_seed_20251125.sql
Normal file
@ -0,0 +1,28 @@
|
||||
-- 04_sms_recipients_seed_20251125.sql: add/update specific SMS recipients
|
||||
-- Sets enabled = FALSE and alert_level = 1 for listed phone numbers.
|
||||
-- Idempotent via ON CONFLICT on primary key (phone).
|
||||
|
||||
INSERT INTO sms_recipients (phone, enabled, alert_level) VALUES
|
||||
('13114458208', FALSE, 1),
|
||||
('13986807953', FALSE, 1),
|
||||
('13207210509', FALSE, 1),
|
||||
('13886680872', FALSE, 1),
|
||||
('13477172662', FALSE, 1),
|
||||
('13177094329', FALSE, 1),
|
||||
('13165617999', FALSE, 1),
|
||||
('13217179901', FALSE, 1),
|
||||
('18571017120', FALSE, 1),
|
||||
('18674205345', FALSE, 1),
|
||||
('18871769640', FALSE, 1),
|
||||
('15587930225', FALSE, 1),
|
||||
('13545715958', FALSE, 1),
|
||||
('15629386907', FALSE, 1),
|
||||
('15971633321', FALSE, 1),
|
||||
('15671074991', FALSE, 1),
|
||||
('18727254175', FALSE, 1),
|
||||
('13477108587', FALSE, 1),
|
||||
('15897521649', FALSE, 1)
|
||||
ON CONFLICT (phone) DO UPDATE SET
|
||||
enabled = EXCLUDED.enabled,
|
||||
alert_level = EXCLUDED.alert_level;
|
||||
|
||||
23
db/migrations/05_alerts.sql
Normal file
23
db/migrations/05_alerts.sql
Normal file
@ -0,0 +1,23 @@
|
||||
-- 05_alerts.sql: table for alerts/warnings
|
||||
CREATE TABLE IF NOT EXISTS alerts (
|
||||
id BIGSERIAL PRIMARY KEY,
|
||||
alert_type TEXT NOT NULL,
|
||||
station_id VARCHAR(50) NOT NULL,
|
||||
level VARCHAR(10) NOT NULL CHECK (level IN ('yellow', 'red')),
|
||||
issued_at TIMESTAMPTZ NOT NULL,
|
||||
message TEXT,
|
||||
sms_phone TEXT,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT now()
|
||||
);
|
||||
|
||||
-- One record per alert event (no SMS stored)
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS alerts_uniq_event
|
||||
ON alerts (alert_type, station_id, issued_at, level)
|
||||
WHERE sms_phone IS NULL;
|
||||
|
||||
-- One record per phone recipient
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS alerts_uniq_phone
|
||||
ON alerts (alert_type, station_id, issued_at, level, sms_phone)
|
||||
WHERE sms_phone IS NOT NULL;
|
||||
|
||||
CREATE INDEX IF NOT EXISTS alerts_station_idx ON alerts (station_id);
|
||||
10
db/migrations/20251013_add_forecast_weights_current.sql
Normal file
10
db/migrations/20251013_add_forecast_weights_current.sql
Normal file
@ -0,0 +1,10 @@
|
||||
-- Create snapshot table for per-station forecast fusion weights
|
||||
CREATE TABLE IF NOT EXISTS forecast_weights_current (
|
||||
station_id TEXT PRIMARY KEY,
|
||||
w_open_meteo DOUBLE PRECISION NOT NULL,
|
||||
w_caiyun DOUBLE PRECISION NOT NULL,
|
||||
w_imdroid DOUBLE PRECISION NOT NULL,
|
||||
last_issued_at TIMESTAMPTZ NOT NULL,
|
||||
updated_at TIMESTAMPTZ NOT NULL
|
||||
);
|
||||
|
||||
566
db/schema.sql
Normal file
566
db/schema.sql
Normal file
@ -0,0 +1,566 @@
|
||||
--
|
||||
-- PostgreSQL database dump
|
||||
--
|
||||
|
||||
-- Dumped from database version 14.18 (Ubuntu 14.18-0ubuntu0.22.04.1)
|
||||
-- Dumped by pg_dump version 17.5
|
||||
|
||||
SET statement_timeout = 0;
|
||||
SET lock_timeout = 0;
|
||||
SET idle_in_transaction_session_timeout = 0;
|
||||
SET transaction_timeout = 0;
|
||||
SET client_encoding = 'UTF8';
|
||||
SET standard_conforming_strings = on;
|
||||
SELECT pg_catalog.set_config('search_path', '', false);
|
||||
SET check_function_bodies = false;
|
||||
SET xmloption = content;
|
||||
SET client_min_messages = warning;
|
||||
SET row_security = off;
|
||||
|
||||
--
|
||||
-- Name: public; Type: SCHEMA; Schema: -; Owner: -
|
||||
--
|
||||
|
||||
-- *not* creating schema, since initdb creates it
|
||||
|
||||
|
||||
SET default_tablespace = '';
|
||||
|
||||
SET default_table_access_method = heap;
|
||||
|
||||
--
|
||||
-- Name: rs485_weather_data; Type: TABLE; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
CREATE TABLE public.rs485_weather_data (
|
||||
id integer NOT NULL,
|
||||
station_id character varying(50) NOT NULL,
|
||||
"timestamp" timestamp with time zone NOT NULL,
|
||||
temperature double precision,
|
||||
humidity double precision,
|
||||
wind_speed double precision,
|
||||
wind_direction double precision,
|
||||
rainfall double precision,
|
||||
light double precision,
|
||||
uv double precision,
|
||||
pressure double precision,
|
||||
raw_data text
|
||||
);
|
||||
|
||||
|
||||
--
|
||||
-- Name: rs485_weather_data_bak; Type: TABLE; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
CREATE TABLE public.rs485_weather_data_bak (
|
||||
id integer,
|
||||
station_id character varying(50),
|
||||
"timestamp" timestamp without time zone,
|
||||
temperature numeric(5,2),
|
||||
humidity numeric(5,2),
|
||||
wind_speed numeric(5,2),
|
||||
wind_direction numeric(5,2),
|
||||
rainfall numeric(5,2),
|
||||
light numeric(15,2),
|
||||
uv numeric(8,2),
|
||||
pressure numeric(7,2),
|
||||
raw_data text
|
||||
);
|
||||
|
||||
|
||||
--
|
||||
-- Name: rs485_weather_data_id_seq; Type: SEQUENCE; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
CREATE SEQUENCE public.rs485_weather_data_id_seq
|
||||
AS integer
|
||||
START WITH 1
|
||||
INCREMENT BY 1
|
||||
NO MINVALUE
|
||||
NO MAXVALUE
|
||||
CACHE 1;
|
||||
|
||||
|
||||
--
|
||||
-- Name: rs485_weather_data_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
ALTER SEQUENCE public.rs485_weather_data_id_seq OWNED BY public.rs485_weather_data.id;
|
||||
|
||||
|
||||
--
|
||||
-- Name: stations; Type: TABLE; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
CREATE TABLE public.stations (
|
||||
station_id character varying(50) NOT NULL,
|
||||
device_id character varying(50),
|
||||
password character varying(50) NOT NULL,
|
||||
name character varying(100),
|
||||
location character varying(100),
|
||||
latitude numeric(10,6),
|
||||
longitude numeric(10,6),
|
||||
altitude numeric(8,3),
|
||||
created_at timestamp with time zone DEFAULT CURRENT_TIMESTAMP,
|
||||
last_update timestamp with time zone,
|
||||
software_type character varying(100),
|
||||
device_type character varying(20) DEFAULT 'UNKNOWN'::character varying NOT NULL,
|
||||
CONSTRAINT check_device_type CHECK (((device_type)::text = ANY ((ARRAY['ECOWITT'::character varying, 'WH65LP'::character varying, 'UNKNOWN'::character varying])::text[])))
|
||||
);
|
||||
|
||||
|
||||
--
|
||||
-- Name: TABLE stations; Type: COMMENT; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
COMMENT ON TABLE public.stations IS '气象站设备信息表,存储设备的基本信息和认证信息';
|
||||
|
||||
|
||||
--
|
||||
-- Name: COLUMN stations.device_type; Type: COMMENT; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
COMMENT ON COLUMN public.stations.device_type IS 'ECOWITT: WIFI型, WH65LP: 485型';
|
||||
|
||||
|
||||
--
|
||||
-- Name: weather_data; Type: TABLE; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
CREATE TABLE public.weather_data (
|
||||
id integer NOT NULL,
|
||||
station_id character varying(50) NOT NULL,
|
||||
"timestamp" timestamp with time zone NOT NULL,
|
||||
temp_f integer,
|
||||
humidity integer,
|
||||
dewpoint_f integer,
|
||||
windchill_f integer,
|
||||
wind_dir integer,
|
||||
wind_speed_mph integer,
|
||||
wind_gust_mph integer,
|
||||
rain_in integer,
|
||||
daily_rain_in integer,
|
||||
weekly_rain_in integer,
|
||||
monthly_rain_in integer,
|
||||
yearly_rain_in integer,
|
||||
total_rain_in integer,
|
||||
solar_radiation integer,
|
||||
uv integer,
|
||||
indoor_temp_f integer,
|
||||
indoor_humidity integer,
|
||||
abs_barometer_in integer,
|
||||
barometer_in integer,
|
||||
low_battery boolean,
|
||||
raw_data text
|
||||
);
|
||||
|
||||
|
||||
--
|
||||
-- Name: TABLE weather_data; Type: COMMENT; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
COMMENT ON TABLE public.weather_data IS '气象站数据表,存储所有气象观测数据,数值型数据以整数形式存储,查询时需进行转换';
|
||||
|
||||
|
||||
--
|
||||
-- Name: COLUMN weather_data.id; Type: COMMENT; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
COMMENT ON COLUMN public.weather_data.id IS '自增主键ID';
|
||||
|
||||
|
||||
--
|
||||
-- Name: COLUMN weather_data.station_id; Type: COMMENT; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
COMMENT ON COLUMN public.weather_data.station_id IS '气象站ID,外键关联stations表';
|
||||
|
||||
|
||||
--
|
||||
-- Name: COLUMN weather_data."timestamp"; Type: COMMENT; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
COMMENT ON COLUMN public.weather_data."timestamp" IS '数据记录时间,使用UTC+8时区(中国标准时间)';
|
||||
|
||||
|
||||
--
|
||||
-- Name: COLUMN weather_data.temp_f; Type: COMMENT; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
COMMENT ON COLUMN public.weather_data.temp_f IS '室外温度,存储值=实际值×10,单位:华氏度,查询时需除以10,如768表示76.8°F';
|
||||
|
||||
|
||||
--
|
||||
-- Name: COLUMN weather_data.humidity; Type: COMMENT; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
COMMENT ON COLUMN public.weather_data.humidity IS '室外湿度,单位:百分比,如53表示53%';
|
||||
|
||||
|
||||
--
|
||||
-- Name: COLUMN weather_data.dewpoint_f; Type: COMMENT; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
COMMENT ON COLUMN public.weather_data.dewpoint_f IS '露点温度,存储值=实际值×10,单位:华氏度,查询时需除以10,如585表示58.5°F';
|
||||
|
||||
|
||||
--
|
||||
-- Name: COLUMN weather_data.windchill_f; Type: COMMENT; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
COMMENT ON COLUMN public.weather_data.windchill_f IS '风寒指数,存储值=实际值×10,单位:华氏度,查询时需除以10,如768表示76.8°F';
|
||||
|
||||
|
||||
--
|
||||
-- Name: COLUMN weather_data.wind_dir; Type: COMMENT; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
COMMENT ON COLUMN public.weather_data.wind_dir IS '风向,单位:角度(0-359),如44表示东北风(44°)';
|
||||
|
||||
|
||||
--
|
||||
-- Name: COLUMN weather_data.wind_speed_mph; Type: COMMENT; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
COMMENT ON COLUMN public.weather_data.wind_speed_mph IS '风速,存储值=实际值×100,单位:英里/小时,查询时需除以100,如100表示1.00mph';
|
||||
|
||||
|
||||
--
|
||||
-- Name: COLUMN weather_data.wind_gust_mph; Type: COMMENT; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
COMMENT ON COLUMN public.weather_data.wind_gust_mph IS '阵风速度,存储值=实际值×100,单位:英里/小时,查询时需除以100,如100表示1.00mph';
|
||||
|
||||
|
||||
--
|
||||
-- Name: COLUMN weather_data.rain_in; Type: COMMENT; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
COMMENT ON COLUMN public.weather_data.rain_in IS '当前降雨速率,存储值=实际值×1000,单位:英寸/小时,查询时需除以1000,如500表示0.500英寸/小时';
|
||||
|
||||
|
||||
--
|
||||
-- Name: COLUMN weather_data.daily_rain_in; Type: COMMENT; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
COMMENT ON COLUMN public.weather_data.daily_rain_in IS '日降雨量,存储值=实际值×1000,单位:英寸,查询时需除以1000,如500表示0.500英寸';
|
||||
|
||||
|
||||
--
|
||||
-- Name: COLUMN weather_data.weekly_rain_in; Type: COMMENT; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
COMMENT ON COLUMN public.weather_data.weekly_rain_in IS '周降雨量,存储值=实际值×1000,单位:英寸,查询时需除以1000,如500表示0.500英寸';
|
||||
|
||||
|
||||
--
|
||||
-- Name: COLUMN weather_data.monthly_rain_in; Type: COMMENT; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
COMMENT ON COLUMN public.weather_data.monthly_rain_in IS '月降雨量,存储值=实际值×1000,单位:英寸,查询时需除以1000,如79表示0.079英寸';
|
||||
|
||||
|
||||
--
|
||||
-- Name: COLUMN weather_data.yearly_rain_in; Type: COMMENT; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
COMMENT ON COLUMN public.weather_data.yearly_rain_in IS '年降雨量,存储值=实际值×1000,单位:英寸,查询时需除以1000,如79表示0.079英寸';
|
||||
|
||||
|
||||
--
|
||||
-- Name: COLUMN weather_data.total_rain_in; Type: COMMENT; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
COMMENT ON COLUMN public.weather_data.total_rain_in IS '总降雨量,存储值=实际值×1000,单位:英寸,查询时需除以1000,如79表示0.079英寸';
|
||||
|
||||
|
||||
--
|
||||
-- Name: COLUMN weather_data.solar_radiation; Type: COMMENT; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
COMMENT ON COLUMN public.weather_data.solar_radiation IS '太阳辐射,存储值=实际值×100,单位:W/m²,查询时需除以100,如172表示1.72W/m²';
|
||||
|
||||
|
||||
--
|
||||
-- Name: COLUMN weather_data.uv; Type: COMMENT; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
COMMENT ON COLUMN public.weather_data.uv IS '紫外线指数,整数值,如0表示无紫外线';
|
||||
|
||||
|
||||
--
|
||||
-- Name: COLUMN weather_data.indoor_temp_f; Type: COMMENT; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
COMMENT ON COLUMN public.weather_data.indoor_temp_f IS '室内温度,存储值=实际值×10,单位:华氏度,查询时需除以10,如837表示83.7°F';
|
||||
|
||||
|
||||
--
|
||||
-- Name: COLUMN weather_data.indoor_humidity; Type: COMMENT; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
COMMENT ON COLUMN public.weather_data.indoor_humidity IS '室内湿度,单位:百分比,如48表示48%';
|
||||
|
||||
|
||||
--
|
||||
-- Name: COLUMN weather_data.abs_barometer_in; Type: COMMENT; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
COMMENT ON COLUMN public.weather_data.abs_barometer_in IS '绝对气压,存储值=实际值×1000,单位:英寸汞柱,查询时需除以1000,如29320表示29.320英寸汞柱';
|
||||
|
||||
|
||||
--
|
||||
-- Name: COLUMN weather_data.barometer_in; Type: COMMENT; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
COMMENT ON COLUMN public.weather_data.barometer_in IS '相对气压,存储值=实际值×1000,单位:英寸汞柱,查询时需除以1000,如29805表示29.805英寸汞柱';
|
||||
|
||||
|
||||
--
|
||||
-- Name: COLUMN weather_data.low_battery; Type: COMMENT; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
COMMENT ON COLUMN public.weather_data.low_battery IS '低电量标志,布尔值,true表示电量低';
|
||||
|
||||
|
||||
--
|
||||
-- Name: COLUMN weather_data.raw_data; Type: COMMENT; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
COMMENT ON COLUMN public.weather_data.raw_data IS '原始数据字符串';
|
||||
|
||||
|
||||
--
|
||||
-- Name: weather_data_id_seq; Type: SEQUENCE; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
CREATE SEQUENCE public.weather_data_id_seq
|
||||
AS integer
|
||||
START WITH 1
|
||||
INCREMENT BY 1
|
||||
NO MINVALUE
|
||||
NO MAXVALUE
|
||||
CACHE 1;
|
||||
|
||||
|
||||
--
|
||||
-- Name: weather_data_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
ALTER SEQUENCE public.weather_data_id_seq OWNED BY public.weather_data.id;
|
||||
|
||||
|
||||
--
|
||||
-- Name: rs485_weather_data id; Type: DEFAULT; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.rs485_weather_data ALTER COLUMN id SET DEFAULT nextval('public.rs485_weather_data_id_seq'::regclass);
|
||||
|
||||
|
||||
--
|
||||
-- Name: weather_data id; Type: DEFAULT; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.weather_data ALTER COLUMN id SET DEFAULT nextval('public.weather_data_id_seq'::regclass);
|
||||
|
||||
|
||||
--
|
||||
-- Name: rs485_weather_data rs485_udx; Type: CONSTRAINT; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.rs485_weather_data
|
||||
ADD CONSTRAINT rs485_udx UNIQUE (station_id, "timestamp");
|
||||
|
||||
|
||||
--
|
||||
-- Name: rs485_weather_data rs485_weather_data_pkey; Type: CONSTRAINT; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.rs485_weather_data
|
||||
ADD CONSTRAINT rs485_weather_data_pkey PRIMARY KEY (id);
|
||||
|
||||
|
||||
--
|
||||
-- Name: stations stations_pkey; Type: CONSTRAINT; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.stations
|
||||
ADD CONSTRAINT stations_pkey PRIMARY KEY (station_id);
|
||||
|
||||
|
||||
--
|
||||
-- Name: weather_data weather_data_pkey; Type: CONSTRAINT; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.weather_data
|
||||
ADD CONSTRAINT weather_data_pkey PRIMARY KEY (id);
|
||||
|
||||
|
||||
--
|
||||
-- Name: idx_rwd_station_time; Type: INDEX; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
CREATE INDEX idx_rwd_station_time ON public.rs485_weather_data USING btree (station_id, "timestamp");
|
||||
|
||||
|
||||
--
|
||||
-- Name: idx_rwd_time; Type: INDEX; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
CREATE INDEX idx_rwd_time ON public.rs485_weather_data USING btree ("timestamp");
|
||||
|
||||
|
||||
--
|
||||
-- Name: idx_weather_data_station_timestamp; Type: INDEX; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
CREATE INDEX idx_weather_data_station_timestamp ON public.weather_data USING btree (station_id, "timestamp");
|
||||
|
||||
|
||||
--
|
||||
-- Name: INDEX idx_weather_data_station_timestamp; Type: COMMENT; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
COMMENT ON INDEX public.idx_weather_data_station_timestamp IS '气象站ID和时间戳的复合索引';
|
||||
|
||||
|
||||
--
|
||||
-- Name: idx_weather_data_timestamp; Type: INDEX; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
CREATE INDEX idx_weather_data_timestamp ON public.weather_data USING btree ("timestamp");
|
||||
|
||||
|
||||
--
|
||||
-- Name: INDEX idx_weather_data_timestamp; Type: COMMENT; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
COMMENT ON INDEX public.idx_weather_data_timestamp IS '时间戳索引';
|
||||
|
||||
|
||||
--
|
||||
-- Name: rs485_weather_data rs485_weather_data_station_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.rs485_weather_data
|
||||
ADD CONSTRAINT rs485_weather_data_station_id_fkey FOREIGN KEY (station_id) REFERENCES public.stations(station_id);
|
||||
|
||||
|
||||
--
|
||||
-- Name: weather_data weather_data_station_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.weather_data
|
||||
ADD CONSTRAINT weather_data_station_id_fkey FOREIGN KEY (station_id) REFERENCES public.stations(station_id);
|
||||
|
||||
|
||||
--
|
||||
-- PostgreSQL database dump complete
|
||||
--
|
||||
|
||||
|
||||
-- Name: rs485_weather_10min; Type: TABLE; Schema: public; Owner: -
|
||||
-- 用途:10分钟粒度聚合(长期保留,缩放整数存储)
|
||||
--
|
||||
CREATE TABLE IF NOT EXISTS public.rs485_weather_10min (
|
||||
id SERIAL PRIMARY KEY,
|
||||
station_id character varying(50) NOT NULL,
|
||||
"bucket_start" timestamp with time zone NOT NULL,
|
||||
temp_c_x100 integer,
|
||||
humidity_pct integer,
|
||||
wind_speed_ms_x1000 integer,
|
||||
wind_gust_ms_x1000 integer,
|
||||
wind_dir_deg integer,
|
||||
rain_10m_mm_x1000 integer,
|
||||
rain_total_mm_x1000 integer,
|
||||
solar_wm2_x100 integer,
|
||||
uv_index integer,
|
||||
pressure_hpa_x100 integer,
|
||||
sample_count integer DEFAULT 0 NOT NULL
|
||||
);
|
||||
|
||||
-- 约束与索引
|
||||
ALTER TABLE ONLY public.rs485_weather_10min
|
||||
ADD CONSTRAINT r10_udx UNIQUE (station_id, "bucket_start");
|
||||
|
||||
ALTER TABLE ONLY public.rs485_weather_10min
|
||||
ADD CONSTRAINT rs485_weather_10min_station_id_fkey FOREIGN KEY (station_id) REFERENCES public.stations(station_id);
|
||||
|
||||
CREATE INDEX idx_r10_station_time ON public.rs485_weather_10min USING btree (station_id, "bucket_start");
|
||||
|
||||
COMMENT ON TABLE public.rs485_weather_10min IS '10分钟聚合数据表,数值型以缩放整数存储(温度×100、风速×1000等)';
|
||||
COMMENT ON COLUMN public.rs485_weather_10min."bucket_start" IS '10分钟桶开始时间(与CST对齐分桶,存储为timestamptz)';
|
||||
COMMENT ON COLUMN public.rs485_weather_10min.temp_c_x100 IS '10分钟平均温度,单位℃×100';
|
||||
COMMENT ON COLUMN public.rs485_weather_10min.humidity_pct IS '10分钟平均湿度,单位%';
|
||||
COMMENT ON COLUMN public.rs485_weather_10min.wind_speed_ms_x1000 IS '10分钟平均风速,单位m/s×1000';
|
||||
COMMENT ON COLUMN public.rs485_weather_10min.wind_gust_ms_x1000 IS '10分钟最大阵风,单位m/s×1000';
|
||||
COMMENT ON COLUMN public.rs485_weather_10min.wind_dir_deg IS '10分钟风向向量平均,单位度(0-359)';
|
||||
COMMENT ON COLUMN public.rs485_weather_10min.rain_10m_mm_x1000 IS '10分钟降雨量,按“带回绕正增量”计算,单位mm×1000';
|
||||
COMMENT ON COLUMN public.rs485_weather_10min.rain_total_mm_x1000 IS '桶末设备累计降雨(自开机起累加,0..FFFF回绕),单位mm×1000';
|
||||
COMMENT ON COLUMN public.rs485_weather_10min.solar_wm2_x100 IS '10分钟平均太阳辐射,单位W/m²×100';
|
||||
COMMENT ON COLUMN public.rs485_weather_10min.uv_index IS '10分钟平均紫外线指数';
|
||||
COMMENT ON COLUMN public.rs485_weather_10min.pressure_hpa_x100 IS '10分钟平均气压,单位hPa×100';
|
||||
COMMENT ON COLUMN public.rs485_weather_10min.sample_count IS '10分钟样本数量';
|
||||
|
||||
|
||||
--
|
||||
-- Name: forecast_hourly; Type: TABLE; Schema: public; Owner: -
|
||||
-- 用途:小时级预报(版本化:issued_at 为预报方案发布时间)
|
||||
--
|
||||
CREATE TABLE IF NOT EXISTS public.forecast_hourly (
|
||||
id SERIAL PRIMARY KEY,
|
||||
station_id character varying(50) NOT NULL,
|
||||
provider character varying(50) NOT NULL,
|
||||
issued_at timestamp with time zone NOT NULL,
|
||||
forecast_time timestamp with time zone NOT NULL,
|
||||
temp_c_x100 integer,
|
||||
humidity_pct integer,
|
||||
wind_speed_ms_x1000 integer,
|
||||
wind_gust_ms_x1000 integer,
|
||||
wind_dir_deg integer,
|
||||
rain_mm_x1000 integer,
|
||||
precip_prob_pct integer,
|
||||
uv_index integer,
|
||||
pressure_hpa_x100 integer
|
||||
);
|
||||
|
||||
-- 约束与索引
|
||||
ALTER TABLE ONLY public.forecast_hourly
|
||||
ADD CONSTRAINT forecast_hourly_udx UNIQUE (station_id, provider, issued_at, forecast_time);
|
||||
|
||||
ALTER TABLE ONLY public.forecast_hourly
|
||||
ADD CONSTRAINT forecast_hourly_station_id_fkey FOREIGN KEY (station_id) REFERENCES public.stations(station_id);
|
||||
|
||||
CREATE INDEX idx_fcast_station_time ON public.forecast_hourly USING btree (station_id, forecast_time);
|
||||
|
||||
-- 注释
|
||||
COMMENT ON TABLE public.forecast_hourly IS '小时级预报表,按issued_at版本化;要素使用缩放整数存储';
|
||||
COMMENT ON COLUMN public.forecast_hourly.issued_at IS '预报方案发布时间(版本时间)';
|
||||
COMMENT ON COLUMN public.forecast_hourly.forecast_time IS '目标小时时间戳';
|
||||
COMMENT ON COLUMN public.forecast_hourly.rain_mm_x1000 IS '该小时降雨量,单位mm×1000';
|
||||
|
||||
--
|
||||
-- Name: radar_weather; Type: TABLE; Schema: public; Owner: -
|
||||
-- 用途:雷达站实时气象(彩云实时),每10分钟采样一条
|
||||
--
|
||||
CREATE TABLE IF NOT EXISTS public.radar_weather (
|
||||
id SERIAL PRIMARY KEY,
|
||||
alias TEXT NOT NULL,
|
||||
lat DOUBLE PRECISION NOT NULL,
|
||||
lon DOUBLE PRECISION NOT NULL,
|
||||
dt TIMESTAMPTZ NOT NULL,
|
||||
temperature DOUBLE PRECISION,
|
||||
humidity DOUBLE PRECISION,
|
||||
cloudrate DOUBLE PRECISION,
|
||||
visibility DOUBLE PRECISION,
|
||||
dswrf DOUBLE PRECISION,
|
||||
wind_speed DOUBLE PRECISION,
|
||||
wind_direction DOUBLE PRECISION,
|
||||
pressure DOUBLE PRECISION,
|
||||
created_at TIMESTAMPTZ DEFAULT now()
|
||||
);
|
||||
|
||||
-- 约束与索引
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS radar_weather_udx ON public.radar_weather(alias, dt);
|
||||
CREATE INDEX IF NOT EXISTS idx_radar_weather_dt ON public.radar_weather(dt);
|
||||
COMMENT ON TABLE public.radar_weather IS '雷达站实时气象数据表(彩云Realtime),按10分钟存档';
|
||||
123
export/export.sh
Normal file
123
export/export.sh
Normal file
@ -0,0 +1,123 @@
|
||||
#!/bin/bash
|
||||
|
||||
# 设置环境变量
|
||||
export PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
|
||||
export LANG=en_US.UTF-8
|
||||
export LC_ALL=en_US.UTF-8
|
||||
|
||||
# 设置PostgreSQL环境变量
|
||||
export PGPASSWORD="你的密码" # 替换为实际的密码
|
||||
|
||||
# 设置数据库连接参数
|
||||
DB_HOST="localhost"
|
||||
DB_PORT="5432"
|
||||
DB_NAME="weatherdb"
|
||||
DB_USER="yarnom"
|
||||
EXPORT_DIR="/home/yarnom/Archive/code/WeatherStation/exportData"
|
||||
|
||||
# 添加日志功能
|
||||
LOG_FILE="$EXPORT_DIR/export.log"
|
||||
|
||||
# 记录开始时间
|
||||
echo "=== 开始导出: $(date) ===" >> "$LOG_FILE"
|
||||
|
||||
# 确保导出目录存在
|
||||
mkdir -p "$EXPORT_DIR/data"
|
||||
|
||||
# 获取当前时间和10分钟前的时间
|
||||
CURRENT_DATE=$(date +"%Y-%m-%d")
|
||||
END_TIME=$(date +"%Y-%m-%d %H:%M:00")
|
||||
START_TIME=$(date -d "10 minutes ago" +"%Y-%m-%d %H:%M:00")
|
||||
|
||||
# 记录时间范围
|
||||
echo "导出时间范围: $START_TIME 到 $END_TIME" >> "$LOG_FILE"
|
||||
|
||||
# 设置当天的数据文件
|
||||
CURRENT_FILE="$EXPORT_DIR/data/weather_data_${CURRENT_DATE}.csv"
|
||||
LAST_EXPORT_TIME_FILE="$EXPORT_DIR/last_export_time"
|
||||
|
||||
# 检查是否需要创建新文件(新的一天)
|
||||
if [ -f "$LAST_EXPORT_TIME_FILE" ]; then
|
||||
LAST_DATE=$(head -n 1 "$LAST_EXPORT_TIME_FILE" | cut -d' ' -f1)
|
||||
if [ "$LAST_DATE" != "$CURRENT_DATE" ]; then
|
||||
# 新的一天,将昨天的文件压缩存档
|
||||
YESTERDAY=$(date -d "yesterday" +"%Y-%m-%d")
|
||||
if [ -f "$EXPORT_DIR/data/weather_data_${YESTERDAY}.csv" ]; then
|
||||
gzip "$EXPORT_DIR/data/weather_data_${YESTERDAY}.csv"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# 如果是新文件,创建表头
|
||||
if [ ! -f "$CURRENT_FILE" ]; then
|
||||
echo "创建新文件: $CURRENT_FILE" >> "$LOG_FILE"
|
||||
echo "latitude,longitude,station_id,station_name,date_time,elevation,pressure,temperature,dewpoint,wind_speed,wind_direction,relative_humidity,ztd,pwv" > "$CURRENT_FILE"
|
||||
fi
|
||||
|
||||
# 导出新数据并追加到当天的文件
|
||||
psql -h "$DB_HOST" -p "$DB_PORT" -d "$DB_NAME" -U "$DB_USER" -A -F "," -t -c "
|
||||
WITH avg_data AS (
|
||||
SELECT
|
||||
s.station_id,
|
||||
COALESCE(s.password, s.station_id) as output_station_id, -- 如果password为空则使用station_id
|
||||
'$END_TIME'::timestamp as date_time,
|
||||
-- 气压、温度取平均
|
||||
ROUND(AVG(r.pressure)::numeric, 2) as pressure,
|
||||
ROUND(AVG(r.temperature)::numeric, 2) as temperature,
|
||||
-- 风速取平均
|
||||
ROUND(AVG(r.wind_speed)::numeric, 2) as wind_speed,
|
||||
-- 风向使用矢量平均
|
||||
ROUND(DEGREES(ATAN2(
|
||||
AVG(SIN(RADIANS(r.wind_direction))),
|
||||
AVG(COS(RADIANS(r.wind_direction)))
|
||||
))::numeric + CASE
|
||||
WHEN DEGREES(ATAN2(
|
||||
AVG(SIN(RADIANS(r.wind_direction))),
|
||||
AVG(COS(RADIANS(r.wind_direction)))
|
||||
)) < 0 THEN 360
|
||||
ELSE 0
|
||||
END, 2) as wind_direction,
|
||||
-- 湿度取平均
|
||||
ROUND(AVG(r.humidity)::numeric, 2) as relative_humidity
|
||||
FROM stations s
|
||||
JOIN rs485_weather_data r ON s.station_id = r.station_id
|
||||
WHERE r.timestamp >= '$START_TIME' AND r.timestamp < '$END_TIME'
|
||||
GROUP BY s.station_id, s.password
|
||||
)
|
||||
SELECT
|
||||
'0', -- latitude
|
||||
'0', -- longitude
|
||||
output_station_id, -- station_id (使用password字段)
|
||||
'', -- station_name
|
||||
date_time, -- date_time
|
||||
'0', -- elevation
|
||||
COALESCE(pressure::text, '0'),
|
||||
COALESCE(temperature::text, '0'),
|
||||
'0', -- dewpoint
|
||||
COALESCE(wind_speed::text, '0'),
|
||||
COALESCE(wind_direction::text, '0'),
|
||||
COALESCE(relative_humidity::text, '0'),
|
||||
'', -- ztd
|
||||
'' -- pwv
|
||||
FROM avg_data
|
||||
ORDER BY output_station_id;" >> "$CURRENT_FILE" 2>> "$LOG_FILE"
|
||||
|
||||
# 检查psql执行状态
|
||||
if [ $? -eq 0 ]; then
|
||||
echo "数据导出成功" >> "$LOG_FILE"
|
||||
else
|
||||
echo "数据导出失败" >> "$LOG_FILE"
|
||||
fi
|
||||
|
||||
# 更新最后导出时间
|
||||
echo "$END_TIME" > "$LAST_EXPORT_TIME_FILE"
|
||||
|
||||
# 记录结束时间
|
||||
echo "=== 结束导出: $(date) ===" >> "$LOG_FILE"
|
||||
echo "" >> "$LOG_FILE"
|
||||
|
||||
# 保持日志文件大小合理(保留最后1000行)
|
||||
tail -n 1000 "$LOG_FILE" > "${LOG_FILE}.tmp" && mv "${LOG_FILE}.tmp" "$LOG_FILE"
|
||||
|
||||
# 清除密码环境变量(安全考虑)
|
||||
unset PGPASSWORD
|
||||
135
export/export_daily.sh
Normal file
135
export/export_daily.sh
Normal file
@ -0,0 +1,135 @@
|
||||
#!/bin/bash
|
||||
|
||||
# 设置环境变量
|
||||
export PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
|
||||
export LANG=en_US.UTF-8
|
||||
export LC_ALL=en_US.UTF-8
|
||||
|
||||
# 设置PostgreSQL环境变量
|
||||
export PGPASSWORD="root" # 替换为实际的密码
|
||||
|
||||
# 设置数据库连接参数
|
||||
DB_HOST="8.134.185.53"
|
||||
DB_PORT="5432"
|
||||
DB_NAME="weatherdb"
|
||||
DB_USER="yarnom"
|
||||
EXPORT_DIR="/home/yarnom/Archive/code/WeatherStation/exportData"
|
||||
|
||||
# 添加日志功能
|
||||
LOG_FILE="$EXPORT_DIR/export_daily.log"
|
||||
|
||||
# 检查是否提供了日期参数
|
||||
if [ $# -ne 1 ]; then
|
||||
echo "使用方法: $0 YYYY-MM-DD"
|
||||
echo "示例: $0 2024-02-20"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# 获取输入的日期
|
||||
TARGET_DATE="$1"
|
||||
|
||||
# 验证日期格式
|
||||
if ! date -d "$TARGET_DATE" >/dev/null 2>&1; then
|
||||
echo "错误:无效的日期格式。请使用 YYYY-MM-DD 格式。"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# 记录开始时间
|
||||
echo "=== 开始导出日期 $TARGET_DATE 的数据: $(date) ===" >> "$LOG_FILE"
|
||||
|
||||
# 确保导出目录存在
|
||||
mkdir -p "$EXPORT_DIR/daily_exports"
|
||||
|
||||
# 设置输出文件
|
||||
OUTPUT_FILE="$EXPORT_DIR/daily_exports/weather_data_${TARGET_DATE}.csv"
|
||||
|
||||
# 创建表头
|
||||
echo "latitude,longitude,station_id,station_name,date_time,elevation,pressure,temperature,dewpoint,wind_speed,wind_direction,relative_humidity,ztd,pwv" > "$OUTPUT_FILE"
|
||||
|
||||
# 导出数据
|
||||
psql -h "$DB_HOST" -p "$DB_PORT" -d "$DB_NAME" -U "$DB_USER" -A -F "," -t -c "
|
||||
WITH time_series AS (
|
||||
SELECT generate_series(
|
||||
'$TARGET_DATE 00:00:00'::timestamp,
|
||||
'$TARGET_DATE 23:59:59'::timestamp,
|
||||
'10 minutes'::interval
|
||||
) as interval_start
|
||||
),
|
||||
avg_data AS (
|
||||
SELECT
|
||||
s.station_id,
|
||||
COALESCE(s.password, s.station_id) as output_station_id,
|
||||
ts.interval_start as date_time,
|
||||
s.latitude,
|
||||
s.longitude,
|
||||
s.altitude as elevation,
|
||||
-- 气压、温度取平均
|
||||
ROUND(AVG(r.pressure)::numeric, 2) as pressure,
|
||||
ROUND(AVG(r.temperature)::numeric, 2) as temperature,
|
||||
-- 风速取平均
|
||||
ROUND(AVG(r.wind_speed)::numeric, 2) as wind_speed,
|
||||
-- 风向使用矢量平均
|
||||
ROUND(DEGREES(ATAN2(
|
||||
AVG(SIN(RADIANS(r.wind_direction))),
|
||||
AVG(COS(RADIANS(r.wind_direction)))
|
||||
))::numeric + CASE
|
||||
WHEN DEGREES(ATAN2(
|
||||
AVG(SIN(RADIANS(r.wind_direction))),
|
||||
AVG(COS(RADIANS(r.wind_direction)))
|
||||
)) < 0 THEN 360
|
||||
ELSE 0
|
||||
END, 2) as wind_direction,
|
||||
-- 湿度取平均
|
||||
ROUND(AVG(r.humidity)::numeric, 2) as relative_humidity
|
||||
FROM time_series ts
|
||||
CROSS JOIN stations s
|
||||
LEFT JOIN rs485_weather_data r ON s.station_id = r.station_id
|
||||
AND r.timestamp >= ts.interval_start
|
||||
AND r.timestamp < ts.interval_start + '10 minutes'::interval
|
||||
GROUP BY s.station_id, s.password, ts.interval_start, s.latitude, s.longitude, s.altitude
|
||||
)
|
||||
SELECT
|
||||
COALESCE(latitude::text, '0'), -- latitude
|
||||
COALESCE(longitude::text, '0'), -- longitude
|
||||
output_station_id, -- station_id
|
||||
'', -- station_name
|
||||
date_time, -- date_time
|
||||
COALESCE(elevation::text, '0'), -- elevation
|
||||
COALESCE(pressure::text, '0'), -- pressure
|
||||
COALESCE(temperature::text, '0'), -- temperature
|
||||
'0', -- dewpoint
|
||||
COALESCE(wind_speed::text, '0'), -- wind_speed
|
||||
COALESCE(wind_direction::text, '0'), -- wind_direction
|
||||
COALESCE(relative_humidity::text, '0'), -- relative_humidity
|
||||
'', -- ztd
|
||||
'' -- pwv
|
||||
FROM avg_data
|
||||
ORDER BY date_time, output_station_id;" >> "$OUTPUT_FILE" 2>> "$LOG_FILE"
|
||||
|
||||
# 检查psql执行状态
|
||||
if [ $? -eq 0 ]; then
|
||||
echo "数据导出成功到文件: $OUTPUT_FILE"
|
||||
echo "数据导出成功" >> "$LOG_FILE"
|
||||
else
|
||||
echo "数据导出失败"
|
||||
echo "数据导出失败" >> "$LOG_FILE"
|
||||
fi
|
||||
|
||||
# 记录结束时间
|
||||
echo "=== 结束导出: $(date) ===" >> "$LOG_FILE"
|
||||
echo "" >> "$LOG_FILE"
|
||||
|
||||
# 保持日志文件大小合理(保留最后1000行)
|
||||
tail -n 1000 "$LOG_FILE" > "${LOG_FILE}.tmp" && mv "${LOG_FILE}.tmp" "$LOG_FILE"
|
||||
|
||||
# 清除密码环境变量(安全考虑)
|
||||
unset PGPASSWORD
|
||||
|
||||
# 如果导出成功,显示一些统计信息
|
||||
if [ $? -eq 0 ]; then
|
||||
echo "统计信息:"
|
||||
echo "----------------------------------------"
|
||||
echo "总记录数:$(tail -n +2 "$OUTPUT_FILE" | wc -l)"
|
||||
echo "文件大小:$(du -h "$OUTPUT_FILE" | cut -f1)"
|
||||
echo "文件位置:$OUTPUT_FILE"
|
||||
fi
|
||||
60
export/get.sh
Normal file
60
export/get.sh
Normal file
@ -0,0 +1,60 @@
|
||||
#!/bin/bash
|
||||
|
||||
# 设置远程服务器信息
|
||||
REMOTE_USER="root"
|
||||
REMOTE_HOST="8.134.185.53"
|
||||
REMOTE_PORT="30001"
|
||||
REMOTE_PATH="/root/rain/weather-station/dataTransfer/data/"
|
||||
LOCAL_PATH="/home/imdroid/Build_WRF/cycling_data" # 使用当前目录
|
||||
LOG_FILE="${LOCAL_PATH}/sync.log"
|
||||
|
||||
# 确保本地目录存在
|
||||
mkdir -p "$LOCAL_PATH"
|
||||
|
||||
# 记录开始时间
|
||||
echo "=== 开始同步: $(date) ===" >> "$LOG_FILE"
|
||||
|
||||
# 使用rsync进行增量同步
|
||||
# -a: 归档模式,保持所有文件属性
|
||||
# -v: 详细输出
|
||||
# -z: 传输时压缩数据
|
||||
# -t: 保持时间戳
|
||||
# -P: 显示进度并允许断点续传
|
||||
# -e: 指定ssh命令和端口
|
||||
# --delete: 删除目标目录中源目录没有的文件
|
||||
# --timeout=60: 设置超时时间
|
||||
# --bwlimit=1000: 限制带宽(KB/s)
|
||||
rsync -avzt \
|
||||
-P \
|
||||
-e "ssh -p ${REMOTE_PORT}" \
|
||||
--delete \
|
||||
--timeout=60 \
|
||||
--bwlimit=1000 \
|
||||
--include="*.csv" \
|
||||
--include="*.gz" \
|
||||
--exclude="*" \
|
||||
--log-file="$LOG_FILE" \
|
||||
"${REMOTE_USER}@${REMOTE_HOST}:${REMOTE_PATH}" \
|
||||
"${LOCAL_PATH}/"
|
||||
|
||||
# 检查rsync退出状态
|
||||
if [ $? -eq 0 ]; then
|
||||
echo "同步成功完成" >> "$LOG_FILE"
|
||||
|
||||
# 检查是否有新文件同步
|
||||
NEW_FILES=$(find "${LOCAL_PATH}" -type f -mmin -10 \( -name "*.csv" -o -name "*.gz" \))
|
||||
if [ ! -z "$NEW_FILES" ]; then
|
||||
echo "新同步的文件:" >> "$LOG_FILE"
|
||||
echo "$NEW_FILES" >> "$LOG_FILE"
|
||||
fi
|
||||
else
|
||||
echo "同步失败,错误代码: $?" >> "$LOG_FILE"
|
||||
# 可以在这里添加告警通知(如发送邮件)
|
||||
fi
|
||||
|
||||
# 记录同步时间
|
||||
echo "=== 结束同步: $(date) ===" >> "$LOG_FILE"
|
||||
echo "" >> "$LOG_FILE"
|
||||
|
||||
# 保持日志文件大小合理(保留最后1000行)
|
||||
tail -n 1000 "$LOG_FILE" > "${LOG_FILE}.tmp" && mv "${LOG_FILE}.tmp" "$LOG_FILE"
|
||||
60
go.mod
60
go.mod
@ -5,32 +5,54 @@ go 1.23.0
|
||||
toolchain go1.24.5
|
||||
|
||||
require (
|
||||
github.com/bytedance/sonic v1.13.3 // indirect
|
||||
github.com/bytedance/sonic/loader v0.3.0 // indirect
|
||||
github.com/cloudwego/base64x v0.1.5 // indirect
|
||||
github.com/alibabacloud-go/darabonba-openapi/v2 v2.0.10
|
||||
github.com/alibabacloud-go/dysmsapi-20170525/v3 v3.0.6
|
||||
github.com/alibabacloud-go/tea v1.3.13
|
||||
github.com/eclipse/paho.mqtt.golang v1.4.3
|
||||
github.com/gin-gonic/gin v1.10.1
|
||||
github.com/go-sql-driver/mysql v1.8.1
|
||||
github.com/lib/pq v1.10.9
|
||||
gopkg.in/yaml.v3 v3.0.1
|
||||
)
|
||||
|
||||
require (
|
||||
filippo.io/edwards25519 v1.1.0 // indirect
|
||||
github.com/alibabacloud-go/alibabacloud-gateway-spi v0.0.5 // indirect
|
||||
github.com/alibabacloud-go/debug v1.0.1 // indirect
|
||||
github.com/alibabacloud-go/endpoint-util v1.1.0 // indirect
|
||||
github.com/alibabacloud-go/openapi-util v0.1.0 // indirect
|
||||
github.com/alibabacloud-go/tea-utils v1.3.1 // indirect
|
||||
github.com/alibabacloud-go/tea-utils/v2 v2.0.7 // indirect
|
||||
github.com/alibabacloud-go/tea-xml v1.1.3 // indirect
|
||||
github.com/aliyun/credentials-go v1.3.10 // indirect
|
||||
github.com/bytedance/sonic v1.11.6 // indirect
|
||||
github.com/bytedance/sonic/loader v0.1.1 // indirect
|
||||
github.com/clbanning/mxj/v2 v2.7.0 // indirect
|
||||
github.com/cloudwego/base64x v0.1.4 // indirect
|
||||
github.com/cloudwego/iasm v0.2.0 // indirect
|
||||
github.com/gabriel-vasile/mimetype v1.4.9 // indirect
|
||||
github.com/gin-contrib/sse v1.1.0 // indirect
|
||||
github.com/gin-gonic/gin v1.10.1 // indirect
|
||||
github.com/gabriel-vasile/mimetype v1.4.3 // indirect
|
||||
github.com/gin-contrib/sse v0.1.0 // indirect
|
||||
github.com/go-playground/locales v0.14.1 // indirect
|
||||
github.com/go-playground/universal-translator v0.18.1 // indirect
|
||||
github.com/go-playground/validator/v10 v10.22.0 // indirect
|
||||
github.com/goccy/go-json v0.10.5 // indirect
|
||||
github.com/go-playground/validator/v10 v10.20.0 // indirect
|
||||
github.com/goccy/go-json v0.10.2 // indirect
|
||||
github.com/gorilla/websocket v1.5.0 // indirect
|
||||
github.com/json-iterator/go v1.1.12 // indirect
|
||||
github.com/klauspost/cpuid/v2 v2.2.8 // indirect
|
||||
github.com/klauspost/cpuid/v2 v2.2.7 // indirect
|
||||
github.com/leodido/go-urn v1.4.0 // indirect
|
||||
github.com/lib/pq v1.10.9 // indirect
|
||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
|
||||
github.com/modern-go/reflect2 v1.0.2 // indirect
|
||||
github.com/pelletier/go-toml/v2 v2.2.4 // indirect
|
||||
github.com/pelletier/go-toml/v2 v2.2.2 // indirect
|
||||
github.com/tjfoc/gmsm v1.4.1 // indirect
|
||||
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
|
||||
github.com/ugorji/go/codec v1.3.0 // indirect
|
||||
golang.org/x/arch v0.19.0 // indirect
|
||||
golang.org/x/crypto v0.40.0 // indirect
|
||||
golang.org/x/net v0.42.0 // indirect
|
||||
golang.org/x/sys v0.34.0 // indirect
|
||||
golang.org/x/text v0.27.0 // indirect
|
||||
google.golang.org/protobuf v1.36.6 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||
github.com/ugorji/go/codec v1.2.12 // indirect
|
||||
golang.org/x/arch v0.8.0 // indirect
|
||||
golang.org/x/crypto v0.24.0 // indirect
|
||||
golang.org/x/net v0.26.0 // indirect
|
||||
golang.org/x/sync v0.7.0 // indirect
|
||||
golang.org/x/sys v0.21.0 // indirect
|
||||
golang.org/x/text v0.16.0 // indirect
|
||||
google.golang.org/protobuf v1.34.1 // indirect
|
||||
gopkg.in/ini.v1 v1.67.0 // indirect
|
||||
)
|
||||
|
||||
317
go.sum
317
go.sum
@ -1,35 +1,135 @@
|
||||
github.com/bytedance/sonic v1.13.3 h1:MS8gmaH16Gtirygw7jV91pDCN33NyMrPbN7qiYhEsF0=
|
||||
github.com/bytedance/sonic v1.13.3/go.mod h1:o68xyaF9u2gvVBuGHPlUVCy+ZfmNNO5ETf1+KgkJhz4=
|
||||
cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
|
||||
filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA=
|
||||
filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4=
|
||||
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||
github.com/alibabacloud-go/alibabacloud-gateway-pop v0.0.6 h1:eIf+iGJxdU4U9ypaUfbtOWCsZSbTb8AUHvyPrxu6mAA=
|
||||
github.com/alibabacloud-go/alibabacloud-gateway-pop v0.0.6/go.mod h1:4EUIoxs/do24zMOGGqYVWgw0s9NtiylnJglOeEB5UJo=
|
||||
github.com/alibabacloud-go/alibabacloud-gateway-spi v0.0.4/go.mod h1:sCavSAvdzOjul4cEqeVtvlSaSScfNsTQ+46HwlTL1hc=
|
||||
github.com/alibabacloud-go/alibabacloud-gateway-spi v0.0.5 h1:zE8vH9C7JiZLNJJQ5OwjU9mSi4T9ef9u3BURT6LCLC8=
|
||||
github.com/alibabacloud-go/alibabacloud-gateway-spi v0.0.5/go.mod h1:tWnyE9AjF8J8qqLk645oUmVUnFybApTQWklQmi5tY6g=
|
||||
github.com/alibabacloud-go/darabonba-array v0.1.0 h1:vR8s7b1fWAQIjEjWnuF0JiKsCvclSRTfDzZHTYqfufY=
|
||||
github.com/alibabacloud-go/darabonba-array v0.1.0/go.mod h1:BLKxr0brnggqOJPqT09DFJ8g3fsDshapUD3C3aOEFaI=
|
||||
github.com/alibabacloud-go/darabonba-encode-util v0.0.2 h1:1uJGrbsGEVqWcWxrS9MyC2NG0Ax+GpOM5gtupki31XE=
|
||||
github.com/alibabacloud-go/darabonba-encode-util v0.0.2/go.mod h1:JiW9higWHYXm7F4PKuMgEUETNZasrDM6vqVr/Can7H8=
|
||||
github.com/alibabacloud-go/darabonba-map v0.0.2 h1:qvPnGB4+dJbJIxOOfawxzF3hzMnIpjmafa0qOTp6udc=
|
||||
github.com/alibabacloud-go/darabonba-map v0.0.2/go.mod h1:28AJaX8FOE/ym8OUFWga+MtEzBunJwQGceGQlvaPGPc=
|
||||
github.com/alibabacloud-go/darabonba-openapi/v2 v2.0.2/go.mod h1:5JHVmnHvGzR2wNdgaW1zDLQG8kOC4Uec8ubkMogW7OQ=
|
||||
github.com/alibabacloud-go/darabonba-openapi/v2 v2.0.10 h1:GEYkMApgpKEVDn6z12DcH1EGYpDYRB8JxsazM4Rywak=
|
||||
github.com/alibabacloud-go/darabonba-openapi/v2 v2.0.10/go.mod h1:26a14FGhZVELuz2cc2AolvW4RHmIO3/HRwsdHhaIPDE=
|
||||
github.com/alibabacloud-go/darabonba-signature-util v0.0.7 h1:UzCnKvsjPFzApvODDNEYqBHMFt1w98wC7FOo0InLyxg=
|
||||
github.com/alibabacloud-go/darabonba-signature-util v0.0.7/go.mod h1:oUzCYV2fcCH797xKdL6BDH8ADIHlzrtKVjeRtunBNTQ=
|
||||
github.com/alibabacloud-go/darabonba-string v1.0.2 h1:E714wms5ibdzCqGeYJ9JCFywE5nDyvIXIIQbZVFkkqo=
|
||||
github.com/alibabacloud-go/darabonba-string v1.0.2/go.mod h1:93cTfV3vuPhhEwGGpKKqhVW4jLe7tDpo3LUM0i0g6mA=
|
||||
github.com/alibabacloud-go/debug v0.0.0-20190504072949-9472017b5c68/go.mod h1:6pb/Qy8c+lqua8cFpEy7g39NRRqOWc3rOwAy8m5Y2BY=
|
||||
github.com/alibabacloud-go/debug v1.0.0/go.mod h1:8gfgZCCAC3+SCzjWtY053FrOcd4/qlH6IHTI4QyICOc=
|
||||
github.com/alibabacloud-go/debug v1.0.1 h1:MsW9SmUtbb1Fnt3ieC6NNZi6aEwrXfDksD4QA6GSbPg=
|
||||
github.com/alibabacloud-go/debug v1.0.1/go.mod h1:8gfgZCCAC3+SCzjWtY053FrOcd4/qlH6IHTI4QyICOc=
|
||||
github.com/alibabacloud-go/dysmsapi-20170525/v3 v3.0.6 h1:UTl97mt2qfavxveqCkaVg4tKaZUPzA9RKbFIRaIdtdg=
|
||||
github.com/alibabacloud-go/dysmsapi-20170525/v3 v3.0.6/go.mod h1:UWpcGrWwTbES9QW7OQ7xDffukMJ/l7lzioixIz8+lgY=
|
||||
github.com/alibabacloud-go/endpoint-util v1.1.0 h1:r/4D3VSw888XGaeNpP994zDUaxdgTSHBbVfZlzf6b5Q=
|
||||
github.com/alibabacloud-go/endpoint-util v1.1.0/go.mod h1:O5FuCALmCKs2Ff7JFJMudHs0I5EBgecXXxZRyswlEjE=
|
||||
github.com/alibabacloud-go/openapi-util v0.0.11/go.mod h1:sQuElr4ywwFRlCCberQwKRFhRzIyG4QTP/P4y1CJ6Ws=
|
||||
github.com/alibabacloud-go/openapi-util v0.1.0 h1:0z75cIULkDrdEhkLWgi9tnLe+KhAFE/r5Pb3312/eAY=
|
||||
github.com/alibabacloud-go/openapi-util v0.1.0/go.mod h1:sQuElr4ywwFRlCCberQwKRFhRzIyG4QTP/P4y1CJ6Ws=
|
||||
github.com/alibabacloud-go/tea v1.1.0/go.mod h1:IkGyUSX4Ba1V+k4pCtJUc6jDpZLFph9QMy2VUPTwukg=
|
||||
github.com/alibabacloud-go/tea v1.1.7/go.mod h1:/tmnEaQMyb4Ky1/5D+SE1BAsa5zj/KeGOFfwYm3N/p4=
|
||||
github.com/alibabacloud-go/tea v1.1.8/go.mod h1:/tmnEaQMyb4Ky1/5D+SE1BAsa5zj/KeGOFfwYm3N/p4=
|
||||
github.com/alibabacloud-go/tea v1.1.11/go.mod h1:/tmnEaQMyb4Ky1/5D+SE1BAsa5zj/KeGOFfwYm3N/p4=
|
||||
github.com/alibabacloud-go/tea v1.1.17/go.mod h1:nXxjm6CIFkBhwW4FQkNrolwbfon8Svy6cujmKFUq98A=
|
||||
github.com/alibabacloud-go/tea v1.1.19/go.mod h1:nXxjm6CIFkBhwW4FQkNrolwbfon8Svy6cujmKFUq98A=
|
||||
github.com/alibabacloud-go/tea v1.1.20/go.mod h1:nXxjm6CIFkBhwW4FQkNrolwbfon8Svy6cujmKFUq98A=
|
||||
github.com/alibabacloud-go/tea v1.2.2/go.mod h1:CF3vOzEMAG+bR4WOql8gc2G9H3EkH3ZLAQdpmpXMgwk=
|
||||
github.com/alibabacloud-go/tea v1.3.13 h1:WhGy6LIXaMbBM6VBYcsDCz6K/TPsT1Ri2hPmmZffZ94=
|
||||
github.com/alibabacloud-go/tea v1.3.13/go.mod h1:A560v/JTQ1n5zklt2BEpurJzZTI8TUT+Psg2drWlxRg=
|
||||
github.com/alibabacloud-go/tea-utils v1.3.1 h1:iWQeRzRheqCMuiF3+XkfybB3kTgUXkXX+JMrqfLeB2I=
|
||||
github.com/alibabacloud-go/tea-utils v1.3.1/go.mod h1:EI/o33aBfj3hETm4RLiAxF/ThQdSngxrpF8rKUDJjPE=
|
||||
github.com/alibabacloud-go/tea-utils/v2 v2.0.0/go.mod h1:U5MTY10WwlquGPS34DOeomUGBB0gXbLueiq5Trwu0C4=
|
||||
github.com/alibabacloud-go/tea-utils/v2 v2.0.3/go.mod h1:sj1PbjPodAVTqGTA3olprfeeqqmwD0A5OQz94o9EuXQ=
|
||||
github.com/alibabacloud-go/tea-utils/v2 v2.0.5/go.mod h1:dL6vbUT35E4F4bFTHL845eUloqaerYBYPsdWR2/jhe4=
|
||||
github.com/alibabacloud-go/tea-utils/v2 v2.0.6/go.mod h1:qxn986l+q33J5VkialKMqT/TTs3E+U9MJpd001iWQ9I=
|
||||
github.com/alibabacloud-go/tea-utils/v2 v2.0.7 h1:WDx5qW3Xa5ZgJ1c8NfqJkF6w+AU5wB8835UdhPr6Ax0=
|
||||
github.com/alibabacloud-go/tea-utils/v2 v2.0.7/go.mod h1:qxn986l+q33J5VkialKMqT/TTs3E+U9MJpd001iWQ9I=
|
||||
github.com/alibabacloud-go/tea-xml v1.1.2/go.mod h1:Rq08vgCcCAjHyRi/M7xlHKUykZCEtyBy9+DPF6GgEu8=
|
||||
github.com/alibabacloud-go/tea-xml v1.1.3 h1:7LYnm+JbOq2B+T/B0fHC4Ies4/FofC4zHzYtqw7dgt0=
|
||||
github.com/alibabacloud-go/tea-xml v1.1.3/go.mod h1:Rq08vgCcCAjHyRi/M7xlHKUykZCEtyBy9+DPF6GgEu8=
|
||||
github.com/aliyun/credentials-go v1.1.2/go.mod h1:ozcZaMR5kLM7pwtCMEpVmQ242suV6qTJya2bDq4X1Tw=
|
||||
github.com/aliyun/credentials-go v1.3.1/go.mod h1:8jKYhQuDawt8x2+fusqa1Y6mPxemTsBEN04dgcAcYz0=
|
||||
github.com/aliyun/credentials-go v1.3.6/go.mod h1:1LxUuX7L5YrZUWzBrRyk0SwSdH4OmPrib8NVePL3fxM=
|
||||
github.com/aliyun/credentials-go v1.3.10 h1:45Xxrae/evfzQL9V10zL3xX31eqgLWEaIdCoPipOEQA=
|
||||
github.com/aliyun/credentials-go v1.3.10/go.mod h1:Jm6d+xIgwJVLVWT561vy67ZRP4lPTQxMbEYRuT2Ti1U=
|
||||
github.com/bytedance/sonic v1.11.6 h1:oUp34TzMlL+OY1OUWxHqsdkgC/Zfc85zGqw9siXjrc0=
|
||||
github.com/bytedance/sonic v1.11.6/go.mod h1:LysEHSvpvDySVdC2f87zGWf6CIKJcAvqab1ZaiQtds4=
|
||||
github.com/bytedance/sonic/loader v0.1.1 h1:c+e5Pt1k/cy5wMveRDyk2X4B9hF4g7an8N3zCYjJFNM=
|
||||
github.com/bytedance/sonic/loader v0.1.1/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU=
|
||||
github.com/bytedance/sonic/loader v0.3.0 h1:dskwH8edlzNMctoruo8FPTJDF3vLtDT0sXZwvZJyqeA=
|
||||
github.com/bytedance/sonic/loader v0.3.0/go.mod h1:N8A3vUdtUebEY2/VQC0MyhYeKUFosQU6FxH2JmUe6VI=
|
||||
github.com/cloudwego/base64x v0.1.5 h1:XPciSp1xaq2VCSt6lF0phncD4koWyULpl5bUxbfCyP4=
|
||||
github.com/cloudwego/base64x v0.1.5/go.mod h1:0zlkT4Wn5C6NdauXdJRhSKRlJvmclQ1hhJgA0rcu/8w=
|
||||
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
|
||||
github.com/clbanning/mxj/v2 v2.5.5/go.mod h1:hNiWqW14h+kc+MdF9C6/YoRfjEJoR3ou6tn/Qo+ve2s=
|
||||
github.com/clbanning/mxj/v2 v2.7.0 h1:WA/La7UGCanFe5NpHF0Q3DNtnCsVoxbPKuyBNHWRyME=
|
||||
github.com/clbanning/mxj/v2 v2.7.0/go.mod h1:hNiWqW14h+kc+MdF9C6/YoRfjEJoR3ou6tn/Qo+ve2s=
|
||||
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
|
||||
github.com/cloudwego/base64x v0.1.4 h1:jwCgWpFanWmN8xoIUHa2rtzmkd5J2plF/dnLS6Xd/0Y=
|
||||
github.com/cloudwego/base64x v0.1.4/go.mod h1:0zlkT4Wn5C6NdauXdJRhSKRlJvmclQ1hhJgA0rcu/8w=
|
||||
github.com/cloudwego/iasm v0.2.0 h1:1KNIy1I1H9hNNFEEH3DVnI4UujN+1zjpuk6gwHLTssg=
|
||||
github.com/cloudwego/iasm v0.2.0/go.mod h1:8rXZaNYT2n95jn+zTI1sDr+IgcD2GVs0nlbbQPiEFhY=
|
||||
github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/gabriel-vasile/mimetype v1.4.9 h1:5k+WDwEsD9eTLL8Tz3L0VnmVh9QxGjRmjBvAG7U/oYY=
|
||||
github.com/gabriel-vasile/mimetype v1.4.9/go.mod h1:WnSQhFKJuBlRyLiKohA/2DtIlPFAbguNaG7QCHcyGok=
|
||||
github.com/gin-contrib/sse v1.1.0 h1:n0w2GMuUpWDVp7qSpvze6fAu9iRxJY4Hmj6AmBOU05w=
|
||||
github.com/gin-contrib/sse v1.1.0/go.mod h1:hxRZ5gVpWMT7Z0B0gSNYqqsSCNIJMjzvm6fqCz9vjwM=
|
||||
github.com/eclipse/paho.mqtt.golang v1.4.3 h1:2kwcUGn8seMUfWndX0hGbvH8r7crgcJguQNCyp70xik=
|
||||
github.com/eclipse/paho.mqtt.golang v1.4.3/go.mod h1:CSYvoAlsMkhYOXh/oKyxa8EcBci6dVkLCbo5tTC1RIE=
|
||||
github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
|
||||
github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98=
|
||||
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
|
||||
github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uqxgUFjbI0=
|
||||
github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk=
|
||||
github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE=
|
||||
github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI=
|
||||
github.com/gin-gonic/gin v1.10.1 h1:T0ujvqyCSqRopADpgPgiTT63DUQVSfojyME59Ei63pQ=
|
||||
github.com/gin-gonic/gin v1.10.1/go.mod h1:4PMNQiOhvDRa013RKVbsiNwoyezlm2rm0uX/T7kzp5Y=
|
||||
github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s=
|
||||
github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
|
||||
github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA=
|
||||
github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY=
|
||||
github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY=
|
||||
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
|
||||
github.com/go-playground/validator/v10 v10.22.0 h1:k6HsTZ0sTnROkhS//R0O+55JgM8C4Bx7ia+JlgcnOao=
|
||||
github.com/go-playground/validator/v10 v10.22.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM=
|
||||
github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4=
|
||||
github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
|
||||
github.com/go-playground/validator/v10 v10.20.0 h1:K9ISHbSaI0lyB2eWMPJo+kOS/FBExVwjEviJTixqxL8=
|
||||
github.com/go-playground/validator/v10 v10.20.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM=
|
||||
github.com/go-sql-driver/mysql v1.8.1 h1:LedoTUt/eveggdHS9qUFC1EFSa8bU2+1pZjSRpvNJ1Y=
|
||||
github.com/go-sql-driver/mysql v1.8.1/go.mod h1:wEBSXgmK//2ZFJyE+qWnIsVGmvmEKlqwuVSjsCm7DZg=
|
||||
github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU=
|
||||
github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
|
||||
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
|
||||
github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
|
||||
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
||||
github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
||||
github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw=
|
||||
github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8=
|
||||
github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA=
|
||||
github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs=
|
||||
github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w=
|
||||
github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0=
|
||||
github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
|
||||
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
|
||||
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
|
||||
github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
|
||||
github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
|
||||
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
|
||||
github.com/gopherjs/gopherjs v0.0.0-20200217142428-fce0ec30dd00/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
|
||||
github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWmnc=
|
||||
github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
|
||||
github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
|
||||
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
|
||||
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
|
||||
github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
|
||||
github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
|
||||
github.com/klauspost/cpuid/v2 v2.2.8 h1:+StwCXwm9PdpiEkPyzBXIy+M9KUb4ODm0Zarf1kS5BM=
|
||||
github.com/klauspost/cpuid/v2 v2.2.8/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws=
|
||||
github.com/klauspost/cpuid/v2 v2.2.7 h1:ZWSB3igEs+d0qvnxR/ZBzXVmxkgt8DdzP6m9pfuVLDM=
|
||||
github.com/klauspost/cpuid/v2 v2.2.7/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws=
|
||||
github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M=
|
||||
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
||||
github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
|
||||
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
||||
github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ=
|
||||
github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI=
|
||||
github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw=
|
||||
@ -39,40 +139,193 @@ github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D
|
||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
|
||||
github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
|
||||
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
|
||||
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
|
||||
github.com/pelletier/go-toml/v2 v2.2.4 h1:mye9XuhQ6gvn5h28+VilKrrPoQVanw5PMw/TB0t5Ec4=
|
||||
github.com/pelletier/go-toml/v2 v2.2.4/go.mod h1:2gIqNv+qfxSVS7cM2xJQKtLSTLUE9V8t9Stt+h56mCY=
|
||||
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs=
|
||||
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
|
||||
github.com/pelletier/go-toml/v2 v2.2.2 h1:aYUidT7k73Pcl9nb2gScu7NSrKCSHIDE89b3+6Wq+LM=
|
||||
github.com/pelletier/go-toml/v2 v2.2.2/go.mod h1:1t835xjRzz80PqgE6HHgN2JOsmgYu/h4qDAS4n929Rs=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
|
||||
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
|
||||
github.com/smartystreets/assertions v1.1.0/go.mod h1:tcbTF8ujkAEcZ8TElKY+i30BzYlVhC/LOxJk7iOWnoo=
|
||||
github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE=
|
||||
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
|
||||
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
|
||||
github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA=
|
||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||
github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
|
||||
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
||||
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
|
||||
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
||||
github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
|
||||
github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
||||
github.com/tjfoc/gmsm v1.3.2/go.mod h1:HaUcFuY0auTiaHB9MHFGCPx5IaLhTUd2atbCFBQXn9w=
|
||||
github.com/tjfoc/gmsm v1.4.1 h1:aMe1GlZb+0bLjn+cKTPEvvn9oUEBlJitaZiiBwsbgho=
|
||||
github.com/tjfoc/gmsm v1.4.1/go.mod h1:j4INPkHWMrhJb38G+J6W4Tw0AbuN8Thu3PbdVYhVcTE=
|
||||
github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI=
|
||||
github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08=
|
||||
github.com/ugorji/go/codec v1.3.0 h1:Qd2W2sQawAfG8XSvzwhBeoGq71zXOC/Q1E9y/wUcsUA=
|
||||
github.com/ugorji/go/codec v1.3.0/go.mod h1:pRBVtBSKl77K30Bv8R2P+cLSGaTtex6fsA2Wjqmfxj4=
|
||||
golang.org/x/arch v0.19.0 h1:LmbDQUodHThXE+htjrnmVD73M//D9GTH6wFZjyDkjyU=
|
||||
golang.org/x/arch v0.19.0/go.mod h1:bdwinDaKcfZUGpH09BB7ZmOfhalA8lQdzl62l8gGWsk=
|
||||
golang.org/x/crypto v0.40.0 h1:r4x+VvoG5Fm+eJcxMaY8CQM7Lb0l1lsmjGBQ6s8BfKM=
|
||||
golang.org/x/crypto v0.40.0/go.mod h1:Qr1vMER5WyS2dfPHAlsOj01wgLbsyWtFn/aY+5+ZdxY=
|
||||
golang.org/x/net v0.42.0 h1:jzkYrhi3YQWD6MLBJcsklgQsoAcw89EcZbJw8Z614hs=
|
||||
golang.org/x/net v0.42.0/go.mod h1:FF1RA5d3u7nAYA4z2TkclSCKh68eSXtiFwcWQpPXdt8=
|
||||
github.com/ugorji/go/codec v1.2.12 h1:9LC83zGrHhuUA9l16C9AHXAqEV/2wBQ4nkvumAE65EE=
|
||||
github.com/ugorji/go/codec v1.2.12/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg=
|
||||
github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||
github.com/yuin/goldmark v1.1.30/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||
golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
|
||||
golang.org/x/arch v0.8.0 h1:3wRIsP3pM4yUptoR96otTUOXI367OS0+c9eeRi9doIc=
|
||||
golang.org/x/arch v0.8.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20191219195013-becbf705a915/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/crypto v0.0.0-20200510223506-06a226fb4e37/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/crypto v0.0.0-20201012173705-84dcc777aaee/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||
golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc=
|
||||
golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4=
|
||||
golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg=
|
||||
golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
|
||||
golang.org/x/crypto v0.21.0/go.mod h1:0BP7YvVV9gBbVKyeTG0Gyn+gZm94bibOW5BjDEYAOMs=
|
||||
golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8=
|
||||
golang.org/x/crypto v0.24.0 h1:mnl8DM0o513X8fdIkmyFE/5hTYxbwYOjDS/+rK6qpRI=
|
||||
golang.org/x/crypto v0.24.0/go.mod h1:Z1PMYSOR5nyMcyAVAIQSKCDwalqy85Aqn1x3Ws4L5DM=
|
||||
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
|
||||
golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
|
||||
golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
|
||||
golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||
golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||
golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
|
||||
golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
|
||||
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
|
||||
golang.org/x/net v0.0.0-20201010224723-4f7140c49acb/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
|
||||
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||
golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
|
||||
golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk=
|
||||
golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE=
|
||||
golang.org/x/net v0.20.0/go.mod h1:z8BVo6PvndSri0LbOE3hAn0apkU+1YvI6E70E9jsnvY=
|
||||
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
|
||||
golang.org/x/net v0.23.0/go.mod h1:JKghWKKOSdJwpW2GEx0Ja7fmaKnMsbu+MWVZTokSYmg=
|
||||
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
|
||||
golang.org/x/net v0.26.0 h1:soB7SVo0PWrY4vPW/+ay0jKDNScG2X9wFeYlXIvJsOQ=
|
||||
golang.org/x/net v0.26.0/go.mod h1:5YKkiSynbBIh3p6iOc/vibscux0x38BZDkn8sCUPxHE=
|
||||
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
|
||||
golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M=
|
||||
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200509044756-6aff5f38e54f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.34.0 h1:H5Y5sJ2L2JRdyv7ROF1he/lPdvFsd0mJHFw2ThKHxLA=
|
||||
golang.org/x/sys v0.34.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
|
||||
golang.org/x/text v0.27.0 h1:4fGWRpyh641NLlecmyl4LOe6yDdfaYNrGb2zdfo4JV4=
|
||||
golang.org/x/text v0.27.0/go.mod h1:1D28KMCvyooCX9hBiosv5Tz/+YLxj0j7XhWjpSUF7CU=
|
||||
google.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY=
|
||||
google.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY=
|
||||
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.18.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.21.0 h1:rF+pYz3DAGSQAxAu1CbC7catZg4ebC4UIeIhKxBZvws=
|
||||
golang.org/x/sys v0.21.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
||||
golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo=
|
||||
golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU=
|
||||
golang.org/x/term v0.13.0/go.mod h1:LTmsnFJwVN6bCy1rVCoS+qHT1HhALEFxKncY3WNNh4U=
|
||||
golang.org/x/term v0.16.0/go.mod h1:yn7UURbUtPyrVJPGPq404EukNFxcm/foM+bV/bfcDsY=
|
||||
golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk=
|
||||
golang.org/x/term v0.18.0/go.mod h1:ILwASektA3OnRv7amZ1xhE/KTR+u50pbXfZ03+6Nx58=
|
||||
golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY=
|
||||
golang.org/x/term v0.21.0/go.mod h1:ooXLefLobQVslOqselCNF4SxFAaoS6KujMbsGzSDmX0=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
|
||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
|
||||
golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
|
||||
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||
golang.org/x/text v0.16.0 h1:a94ExnEXNtEwYLGJSIUxnWoxoRz/ZcCsV63ROupILh4=
|
||||
golang.org/x/text v0.16.0/go.mod h1:GhwF1Be+LQoKShO3cGOHzqOgRrGaYc9AvblQOmPVHnI=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
|
||||
golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
||||
golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
||||
golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
|
||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20200509030707-2212a7e161a5/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
|
||||
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
|
||||
golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58=
|
||||
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
|
||||
google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
|
||||
google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
|
||||
google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=
|
||||
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
|
||||
google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
|
||||
google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY=
|
||||
google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=
|
||||
google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
|
||||
google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
|
||||
google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM=
|
||||
google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE=
|
||||
google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo=
|
||||
google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
|
||||
google.golang.org/protobuf v1.34.1 h1:9ddQBjfCyZPOHPUiPxpYESBLc+T8P3E+Vo4IbKZgFWg=
|
||||
google.golang.org/protobuf v1.34.1/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU=
|
||||
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/ini.v1 v1.56.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
|
||||
gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA=
|
||||
gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
|
||||
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||
honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||
nullprogram.com/x/optparse v1.0.0/go.mod h1:KdyPE+Igbe0jQUrVfMqDMeJQIJZEuyV7pjYmp6pbG50=
|
||||
rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4=
|
||||
|
||||
147
internal/config/config.go
Normal file
147
internal/config/config.go
Normal file
@ -0,0 +1,147 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sync"
|
||||
|
||||
"gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
type ServerConfig struct {
|
||||
WebPort int `yaml:"web_port"` // Gin Web服务器端口
|
||||
UDPPort int `yaml:"udp_port"` // UDP服务器端口
|
||||
}
|
||||
|
||||
type DatabaseConfig struct {
|
||||
Host string `yaml:"host"`
|
||||
Port int `yaml:"port"`
|
||||
User string `yaml:"user"`
|
||||
Password string `yaml:"password"`
|
||||
DBName string `yaml:"dbname"`
|
||||
SSLMode string `yaml:"sslmode"`
|
||||
}
|
||||
|
||||
// ForecastConfig 预报相关配置
|
||||
type ForecastConfig struct {
|
||||
CaiyunToken string `yaml:"caiyun_token"`
|
||||
}
|
||||
|
||||
// RadarConfig 雷达相关配置
|
||||
type RadarConfig struct {
|
||||
// RealtimeIntervalMinutes 彩云实况拉取周期(分钟)。允许值:10、30、60。默认 10。
|
||||
RealtimeIntervalMinutes int `yaml:"realtime_interval_minutes"`
|
||||
// RealtimeEnabled 是否启用彩云实况定时任务。默认 false(不下载)。
|
||||
RealtimeEnabled bool `yaml:"realtime_enabled"`
|
||||
// Aliases 配置化的雷达别名列表(可用于前端选择与实况拉取)。
|
||||
Aliases []RadarAlias `yaml:"aliases"`
|
||||
}
|
||||
|
||||
// RadarAlias 配置中的雷达别名条目
|
||||
type RadarAlias struct {
|
||||
Alias string `yaml:"alias"`
|
||||
Lat float64 `yaml:"lat"`
|
||||
Lon float64 `yaml:"lon"`
|
||||
Z int `yaml:"z"`
|
||||
Y int `yaml:"y"`
|
||||
X int `yaml:"x"`
|
||||
}
|
||||
|
||||
// MySQLConfig MySQL 连接配置(用于 rtk_data)
|
||||
type MySQLConfig struct {
|
||||
Host string `yaml:"host"`
|
||||
Port int `yaml:"port"`
|
||||
User string `yaml:"user"`
|
||||
Password string `yaml:"password"`
|
||||
DBName string `yaml:"dbname"`
|
||||
Params string `yaml:"params"` // 例如: parseTime=true&loc=Asia%2FShanghai
|
||||
}
|
||||
|
||||
type Config struct {
|
||||
Server ServerConfig `yaml:"server"`
|
||||
Database DatabaseConfig `yaml:"database"`
|
||||
Forecast ForecastConfig `yaml:"forecast"`
|
||||
Radar RadarConfig `yaml:"radar"`
|
||||
MySQL MySQLConfig `yaml:"mysql"`
|
||||
}
|
||||
|
||||
var (
|
||||
instance *Config
|
||||
once sync.Once
|
||||
)
|
||||
|
||||
// GetConfig 返回配置单例
|
||||
func GetConfig() *Config {
|
||||
once.Do(func() {
|
||||
instance = &Config{}
|
||||
if err := instance.loadConfig(); err != nil {
|
||||
panic(fmt.Sprintf("加载配置文件失败: %v", err))
|
||||
}
|
||||
})
|
||||
return instance
|
||||
}
|
||||
|
||||
// loadConfig 从配置文件加载配置
|
||||
func (c *Config) loadConfig() error {
|
||||
// 尝试多个位置查找配置文件(兼容从仓库根目录、bin目录、系统安装路径运行)
|
||||
exePath, _ := os.Executable()
|
||||
exeDir := ""
|
||||
if exePath != "" {
|
||||
exeDir = filepath.Dir(exePath)
|
||||
}
|
||||
// 优先顺序:可执行文件所在目录,其次其父目录;然后回退到工作目录及上级,再到系统级/用户级
|
||||
configPaths := []string{
|
||||
// 可执行文件所在目录优先
|
||||
filepath.Join(exeDir, "config.yaml"),
|
||||
filepath.Join(exeDir, "..", "config.yaml"),
|
||||
// 工作目录及其上级
|
||||
"config.yaml",
|
||||
"../config.yaml",
|
||||
"../../config.yaml",
|
||||
// 系统级与用户级
|
||||
"/etc/weatherstation/config.yaml",
|
||||
filepath.Join(os.Getenv("HOME"), ".weatherstation", "config.yaml"),
|
||||
}
|
||||
|
||||
var data []byte
|
||||
var err error
|
||||
for _, path := range configPaths {
|
||||
if data, err = os.ReadFile(path); err == nil {
|
||||
break
|
||||
}
|
||||
}
|
||||
if err != nil {
|
||||
return fmt.Errorf("未找到配置文件: %v", err)
|
||||
}
|
||||
|
||||
if err := yaml.Unmarshal(data, c); err != nil {
|
||||
return fmt.Errorf("解析配置文件失败: %v", err)
|
||||
}
|
||||
|
||||
return c.validate()
|
||||
}
|
||||
|
||||
// validate 验证配置有效性
|
||||
func (c *Config) validate() error {
|
||||
if c.Server.WebPort <= 0 {
|
||||
c.Server.WebPort = 10003 // 默认Web端口
|
||||
}
|
||||
if c.Server.UDPPort <= 0 {
|
||||
c.Server.UDPPort = 10001 // 默认UDP端口
|
||||
}
|
||||
if c.Database.SSLMode == "" {
|
||||
c.Database.SSLMode = "disable" // 默认禁用SSL
|
||||
}
|
||||
if c.MySQL.Port <= 0 {
|
||||
c.MySQL.Port = 3306
|
||||
}
|
||||
// Radar 默认拉取周期
|
||||
if c.Radar.RealtimeIntervalMinutes != 10 && c.Radar.RealtimeIntervalMinutes != 30 && c.Radar.RealtimeIntervalMinutes != 60 {
|
||||
c.Radar.RealtimeIntervalMinutes = 10
|
||||
}
|
||||
// 默认关闭实时抓取(可按需开启)
|
||||
// 若用户已有旧配置未设置该字段,默认为 false
|
||||
// CaiyunToken 允许为空:表示不启用彩云定时任务
|
||||
return nil
|
||||
}
|
||||
88
internal/database/db.go
Normal file
88
internal/database/db.go
Normal file
@ -0,0 +1,88 @@
|
||||
package database
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"sync"
|
||||
"weatherstation/internal/config"
|
||||
|
||||
_ "github.com/go-sql-driver/mysql"
|
||||
_ "github.com/lib/pq"
|
||||
)
|
||||
|
||||
var (
|
||||
instance *sql.DB
|
||||
once sync.Once
|
||||
)
|
||||
|
||||
// GetDB 返回数据库连接单例
|
||||
func GetDB() *sql.DB {
|
||||
once.Do(func() {
|
||||
cfg := config.GetConfig()
|
||||
connStr := fmt.Sprintf(
|
||||
"host=%s port=%d user=%s password=%s dbname=%s sslmode=%s",
|
||||
cfg.Database.Host,
|
||||
cfg.Database.Port,
|
||||
cfg.Database.User,
|
||||
cfg.Database.Password,
|
||||
cfg.Database.DBName,
|
||||
cfg.Database.SSLMode,
|
||||
)
|
||||
|
||||
var err error
|
||||
instance, err = sql.Open("postgres", connStr)
|
||||
if err != nil {
|
||||
panic(fmt.Sprintf("无法连接到数据库: %v", err))
|
||||
}
|
||||
|
||||
if err = instance.Ping(); err != nil {
|
||||
panic(fmt.Sprintf("数据库连接测试失败: %v", err))
|
||||
}
|
||||
})
|
||||
return instance
|
||||
}
|
||||
|
||||
// Close 关闭数据库连接
|
||||
func Close() error {
|
||||
if instance != nil {
|
||||
return instance.Close()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// -------------------- MySQL 连接(rtk_data) --------------------
|
||||
|
||||
var (
|
||||
mysqlInstance *sql.DB
|
||||
mysqlOnce sync.Once
|
||||
)
|
||||
|
||||
// GetMySQL 返回 MySQL 连接单例(rtk_data)
|
||||
func GetMySQL() *sql.DB {
|
||||
mysqlOnce.Do(func() {
|
||||
cfg := config.GetConfig().MySQL
|
||||
var dsn string
|
||||
if cfg.Params != "" {
|
||||
dsn = fmt.Sprintf("%s:%s@tcp(%s:%d)/%s?%s", cfg.User, cfg.Password, cfg.Host, cfg.Port, cfg.DBName, cfg.Params)
|
||||
} else {
|
||||
dsn = fmt.Sprintf("%s:%s@tcp(%s:%d)/%s", cfg.User, cfg.Password, cfg.Host, cfg.Port, cfg.DBName)
|
||||
}
|
||||
var err error
|
||||
mysqlInstance, err = sql.Open("mysql", dsn)
|
||||
if err != nil {
|
||||
panic(fmt.Sprintf("无法连接到 MySQL: %v", err))
|
||||
}
|
||||
if err = mysqlInstance.Ping(); err != nil {
|
||||
panic(fmt.Sprintf("MySQL 连接测试失败: %v", err))
|
||||
}
|
||||
})
|
||||
return mysqlInstance
|
||||
}
|
||||
|
||||
// CloseMySQL 关闭 MySQL 连接
|
||||
func CloseMySQL() error {
|
||||
if mysqlInstance != nil {
|
||||
return mysqlInstance.Close()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
444
internal/database/models.go
Normal file
444
internal/database/models.go
Normal file
@ -0,0 +1,444 @@
|
||||
package database
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"log"
|
||||
"time"
|
||||
"weatherstation/pkg/types"
|
||||
)
|
||||
|
||||
// GetOnlineDevicesCount 获取在线设备数量
|
||||
func GetOnlineDevicesCount(db *sql.DB) int {
|
||||
query := `
|
||||
SELECT COUNT(DISTINCT station_id)
|
||||
FROM rs485_weather_data
|
||||
WHERE timestamp > NOW() - INTERVAL '5 minutes'`
|
||||
|
||||
var count int
|
||||
if err := db.QueryRow(query).Scan(&count); err != nil {
|
||||
return 0
|
||||
}
|
||||
return count
|
||||
}
|
||||
|
||||
// GetStations 获取所有WH65LP站点列表
|
||||
func GetStations(db *sql.DB) ([]types.Station, error) {
|
||||
query := `
|
||||
SELECT DISTINCT s.station_id,
|
||||
COALESCE(s.station_alias, '') as station_alias,
|
||||
COALESCE(s.password, '') as station_name,
|
||||
'WH65LP' as device_type,
|
||||
COALESCE(MAX(r.timestamp), '1970-01-01'::timestamp) as last_update,
|
||||
COALESCE(s.latitude, 0) as latitude,
|
||||
COALESCE(s.longitude, 0) as longitude,
|
||||
COALESCE(s.name, '') as name,
|
||||
COALESCE(s.location, '') as location,
|
||||
COALESCE(s.z, 0) as z,
|
||||
COALESCE(s.y, 0) as y,
|
||||
COALESCE(s.x, 0) as x
|
||||
FROM stations s
|
||||
LEFT JOIN rs485_weather_data r ON s.station_id = r.station_id
|
||||
WHERE s.station_id LIKE 'RS485-%'
|
||||
GROUP BY s.station_id, s.station_alias, s.password, s.latitude, s.longitude, s.name, s.location, s.z, s.y, s.x
|
||||
ORDER BY s.station_id`
|
||||
|
||||
rows, err := db.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var stations []types.Station
|
||||
for rows.Next() {
|
||||
var station types.Station
|
||||
var lastUpdate time.Time
|
||||
err := rows.Scan(
|
||||
&station.StationID,
|
||||
&station.StationAlias,
|
||||
&station.StationName,
|
||||
&station.DeviceType,
|
||||
&lastUpdate,
|
||||
&station.Latitude,
|
||||
&station.Longitude,
|
||||
&station.Name,
|
||||
&station.Location,
|
||||
&station.Z,
|
||||
&station.Y,
|
||||
&station.X,
|
||||
)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
station.LastUpdate = lastUpdate.Format("2006-01-02 15:04:05")
|
||||
stations = append(stations, station)
|
||||
}
|
||||
|
||||
return stations, nil
|
||||
}
|
||||
|
||||
// GetWeatherData 获取指定站点的历史天气数据
|
||||
func GetWeatherData(db *sql.DB, stationID string, startTime, endTime time.Time, interval string) ([]types.WeatherPoint, error) {
|
||||
// 构建查询SQL(统一风向矢量平均,雨量为累计量的正增量求和)
|
||||
var query string
|
||||
var intervalStr string
|
||||
switch interval {
|
||||
case "10min":
|
||||
intervalStr = "10 minutes"
|
||||
case "30min":
|
||||
intervalStr = "30 minutes"
|
||||
default: // 1hour
|
||||
intervalStr = "1 hour"
|
||||
}
|
||||
query = buildWeatherDataQuery(intervalStr)
|
||||
|
||||
rows, err := db.Query(query, intervalStr, stationID, startTime, endTime)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var points []types.WeatherPoint
|
||||
for rows.Next() {
|
||||
var point types.WeatherPoint
|
||||
err := rows.Scan(
|
||||
&point.DateTime,
|
||||
&point.Temperature,
|
||||
&point.Humidity,
|
||||
&point.Pressure,
|
||||
&point.WindSpeed,
|
||||
&point.WindDir,
|
||||
&point.Rainfall,
|
||||
&point.Light,
|
||||
&point.UV,
|
||||
)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
points = append(points, point)
|
||||
}
|
||||
|
||||
return points, nil
|
||||
}
|
||||
|
||||
// GetSeriesFrom10Min 基于10分钟聚合表返回 10m/30m/1h 数据(风向向量平均、降雨求和、加权平均)
|
||||
func GetSeriesFrom10Min(db *sql.DB, stationID string, startTime, endTime time.Time, interval string) ([]types.WeatherPoint, error) {
|
||||
log.Printf("查询数据: stationID=%s, start=%v, end=%v, interval=%s",
|
||||
stationID, startTime.Format("2006-01-02 15:04:05"), endTime.Format("2006-01-02 15:04:05"), interval)
|
||||
|
||||
var query string
|
||||
switch interval {
|
||||
case "10min":
|
||||
query = `
|
||||
SELECT
|
||||
to_char(bucket_start + interval '10 minutes', 'YYYY-MM-DD HH24:MI:SS') AS date_time,
|
||||
ROUND(temp_c_x100/100.0, 2) AS temperature,
|
||||
ROUND(humidity_pct::numeric, 2) AS humidity,
|
||||
ROUND(pressure_hpa_x100/100.0, 2) AS pressure,
|
||||
ROUND(wind_speed_ms_x1000/1000.0, 3) AS wind_speed,
|
||||
ROUND(wind_dir_deg::numeric, 2) AS wind_direction,
|
||||
ROUND(rain_10m_mm_x1000/1000.0, 3) AS rainfall,
|
||||
ROUND(solar_wm2_x100/100.0, 2) AS light,
|
||||
ROUND(uv_index::numeric, 2) AS uv,
|
||||
ROUND(rain_total_mm_x1000/1000.0, 3) AS rain_total
|
||||
FROM rs485_weather_10min
|
||||
WHERE station_id = $1 AND bucket_start >= $2 AND bucket_start <= $3
|
||||
ORDER BY bucket_start + interval '10 minutes'`
|
||||
case "30min":
|
||||
query = buildAggFrom10MinQuery("30 minutes")
|
||||
default: // 1hour
|
||||
query = buildAggFrom10MinQuery("1 hour")
|
||||
}
|
||||
|
||||
// // 调试输出完整SQL
|
||||
// debugSQL := fmt.Sprintf("-- SQL for %s\n%s\n-- Params: stationID=%s, start=%v, end=%v",
|
||||
// interval, query, stationID, startTime, endTime)
|
||||
// log.Println(debugSQL)
|
||||
|
||||
rows, err := db.Query(query, stationID, startTime, endTime)
|
||||
if err != nil {
|
||||
log.Printf("查询失败: %v", err)
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var points []types.WeatherPoint
|
||||
for rows.Next() {
|
||||
var p types.WeatherPoint
|
||||
if err := rows.Scan(&p.DateTime, &p.Temperature, &p.Humidity, &p.Pressure, &p.WindSpeed, &p.WindDir, &p.Rainfall, &p.Light, &p.UV, &p.RainTotal); err != nil {
|
||||
continue
|
||||
}
|
||||
points = append(points, p)
|
||||
}
|
||||
return points, nil
|
||||
}
|
||||
|
||||
// buildAggFrom10MinQuery 返回从10分钟表再聚合的SQL(interval 支持 '30 minutes' 或 '1 hour')
|
||||
func buildAggFrom10MinQuery(interval string) string {
|
||||
return `
|
||||
WITH base AS (
|
||||
SELECT * FROM rs485_weather_10min
|
||||
WHERE station_id = $1 AND bucket_start >= $2 AND bucket_start <= $3
|
||||
), g AS (
|
||||
SELECT
|
||||
CASE '` + interval + `'
|
||||
WHEN '1 hour' THEN date_trunc('hour', bucket_start)
|
||||
WHEN '30 minutes' THEN
|
||||
date_trunc('hour', bucket_start) +
|
||||
CASE WHEN date_part('minute', bucket_start) >= 30
|
||||
THEN '30 minutes'::interval
|
||||
ELSE '0 minutes'::interval
|
||||
END
|
||||
END AS grp,
|
||||
SUM(temp_c_x100 * sample_count)::bigint AS w_temp,
|
||||
SUM(humidity_pct * sample_count)::bigint AS w_hum,
|
||||
SUM(pressure_hpa_x100 * sample_count)::bigint AS w_p,
|
||||
SUM(solar_wm2_x100 * sample_count)::bigint AS w_solar,
|
||||
SUM(uv_index * sample_count)::bigint AS w_uv,
|
||||
SUM(wind_speed_ms_x1000 * sample_count)::bigint AS w_ws,
|
||||
MAX(wind_gust_ms_x1000) AS gust_max,
|
||||
SUM(sin(radians(wind_dir_deg)) * sample_count)::double precision AS sin_sum,
|
||||
SUM(cos(radians(wind_dir_deg)) * sample_count)::double precision AS cos_sum,
|
||||
SUM(rain_10m_mm_x1000) AS rain_sum,
|
||||
SUM(sample_count) AS n_sum,
|
||||
MAX(rain_total_mm_x1000) AS rain_total_max
|
||||
FROM base
|
||||
GROUP BY 1
|
||||
)
|
||||
SELECT
|
||||
to_char(grp + '` + interval + `'::interval, 'YYYY-MM-DD HH24:MI:SS') AS date_time,
|
||||
ROUND((w_temp/NULLIF(n_sum,0))/100.0, 2) AS temperature,
|
||||
ROUND((w_hum/NULLIF(n_sum,0))::numeric, 2) AS humidity,
|
||||
ROUND((w_p/NULLIF(n_sum,0))/100.0, 2) AS pressure,
|
||||
ROUND((w_ws/NULLIF(n_sum,0))/1000.0, 3) AS wind_speed,
|
||||
ROUND((CASE WHEN degrees(atan2(sin_sum, cos_sum)) < 0
|
||||
THEN degrees(atan2(sin_sum, cos_sum)) + 360
|
||||
ELSE degrees(atan2(sin_sum, cos_sum)) END)::numeric, 2) AS wind_direction,
|
||||
ROUND((rain_sum/1000.0)::numeric, 3) AS rainfall,
|
||||
ROUND((w_solar/NULLIF(n_sum,0))/100.0, 2) AS light,
|
||||
ROUND((w_uv/NULLIF(n_sum,0))::numeric, 2) AS uv,
|
||||
ROUND((rain_total_max/1000.0)::numeric, 3) AS rain_total
|
||||
FROM g
|
||||
ORDER BY grp + '` + interval + `'::interval`
|
||||
}
|
||||
|
||||
// buildWeatherDataQuery 构建天气数据查询SQL
|
||||
func buildWeatherDataQuery(interval string) string {
|
||||
return `
|
||||
WITH base AS (
|
||||
SELECT
|
||||
date_trunc('hour', timestamp) +
|
||||
(floor(date_part('minute', timestamp) / extract(epoch from $1::interval) * 60) * $1::interval) as time_group,
|
||||
timestamp as ts,
|
||||
temperature, humidity, pressure, wind_speed, wind_direction, rainfall, light, uv
|
||||
FROM rs485_weather_data
|
||||
WHERE station_id = $2 AND timestamp BETWEEN $3 AND $4
|
||||
),
|
||||
rain_inc AS (
|
||||
SELECT time_group, GREATEST(rainfall - LAG(rainfall) OVER (PARTITION BY time_group ORDER BY ts), 0) AS inc
|
||||
FROM base
|
||||
),
|
||||
rain_sum AS (
|
||||
SELECT time_group, SUM(inc) AS rainfall
|
||||
FROM rain_inc
|
||||
GROUP BY time_group
|
||||
),
|
||||
grouped_data AS (
|
||||
SELECT
|
||||
time_group,
|
||||
AVG(temperature) as temperature,
|
||||
AVG(humidity) as humidity,
|
||||
AVG(pressure) as pressure,
|
||||
AVG(wind_speed) as wind_speed,
|
||||
DEGREES(ATAN2(AVG(SIN(RADIANS(wind_direction))), AVG(COS(RADIANS(wind_direction))))) AS wind_direction_raw,
|
||||
AVG(light) as light,
|
||||
AVG(uv) as uv
|
||||
FROM base
|
||||
GROUP BY time_group
|
||||
)
|
||||
SELECT
|
||||
to_char(g.time_group, 'YYYY-MM-DD HH24:MI:SS') as date_time,
|
||||
ROUND(g.temperature::numeric, 2) as temperature,
|
||||
ROUND(g.humidity::numeric, 2) as humidity,
|
||||
ROUND(g.pressure::numeric, 2) as pressure,
|
||||
ROUND(g.wind_speed::numeric, 2) as wind_speed,
|
||||
ROUND((CASE WHEN g.wind_direction_raw < 0 THEN g.wind_direction_raw + 360 ELSE g.wind_direction_raw END)::numeric, 2) AS wind_direction,
|
||||
ROUND(COALESCE(r.rainfall, 0)::numeric, 3) as rainfall,
|
||||
ROUND(g.light::numeric, 2) as light,
|
||||
ROUND(g.uv::numeric, 2) as uv
|
||||
FROM grouped_data g
|
||||
LEFT JOIN rain_sum r ON r.time_group = g.time_group
|
||||
ORDER BY g.time_group`
|
||||
}
|
||||
|
||||
// GetForecastData 获取指定站点的预报数据(支持返回每个forecast_time的多版本issued_at)
|
||||
func GetForecastData(db *sql.DB, stationID string, startTime, endTime time.Time, provider string, versions int) ([]types.ForecastPoint, error) {
|
||||
var query string
|
||||
var args []interface{}
|
||||
|
||||
if versions <= 0 {
|
||||
versions = 1
|
||||
}
|
||||
|
||||
if provider != "" {
|
||||
if provider == "open-meteo" {
|
||||
// 合并实时与历史,按 issued_at 降序为每个 forecast_time 取前 N 个版本
|
||||
query = `
|
||||
WITH ranked AS (
|
||||
SELECT
|
||||
station_id, provider, issued_at, forecast_time,
|
||||
temp_c_x100, humidity_pct, wind_speed_ms_x1000, wind_gust_ms_x1000,
|
||||
wind_dir_deg, rain_mm_x1000, precip_prob_pct, uv_index, pressure_hpa_x100,
|
||||
ROW_NUMBER() OVER (PARTITION BY forecast_time ORDER BY issued_at DESC) AS rn,
|
||||
CEIL(EXTRACT(EPOCH FROM (forecast_time - issued_at)) / 3600.0)::int AS lead_hours
|
||||
FROM forecast_hourly
|
||||
WHERE station_id = $1 AND provider IN ('open-meteo','open-meteo_historical')
|
||||
AND forecast_time BETWEEN $2 AND $3
|
||||
)
|
||||
SELECT
|
||||
to_char(forecast_time, 'YYYY-MM-DD HH24:MI:SS') as date_time,
|
||||
provider,
|
||||
to_char(issued_at, 'YYYY-MM-DD HH24:MI:SS') as issued_at,
|
||||
ROUND(temp_c_x100::numeric / 100.0, 2) as temperature,
|
||||
humidity_pct as humidity,
|
||||
ROUND(pressure_hpa_x100::numeric / 100.0, 2) as pressure,
|
||||
ROUND(wind_speed_ms_x1000::numeric / 1000.0, 2) as wind_speed,
|
||||
wind_dir_deg as wind_direction,
|
||||
ROUND(rain_mm_x1000::numeric / 1000.0, 3) as rainfall,
|
||||
precip_prob_pct as precip_prob,
|
||||
uv_index as uv,
|
||||
lead_hours
|
||||
FROM ranked
|
||||
WHERE rn <= $4
|
||||
ORDER BY forecast_time, issued_at DESC`
|
||||
args = []interface{}{stationID, startTime.Format("2006-01-02 15:04:05-07"), endTime.Format("2006-01-02 15:04:05-07"), versions}
|
||||
} else {
|
||||
query = `
|
||||
WITH ranked AS (
|
||||
SELECT
|
||||
station_id, provider, issued_at, forecast_time,
|
||||
temp_c_x100, humidity_pct, wind_speed_ms_x1000, wind_gust_ms_x1000,
|
||||
wind_dir_deg, rain_mm_x1000, precip_prob_pct, uv_index, pressure_hpa_x100,
|
||||
ROW_NUMBER() OVER (PARTITION BY forecast_time ORDER BY issued_at DESC) AS rn,
|
||||
CEIL(EXTRACT(EPOCH FROM (forecast_time - issued_at)) / 3600.0)::int AS lead_hours
|
||||
FROM forecast_hourly
|
||||
WHERE station_id = $1 AND provider = $2
|
||||
AND forecast_time BETWEEN $3 AND $4
|
||||
)
|
||||
SELECT
|
||||
to_char(forecast_time, 'YYYY-MM-DD HH24:MI:SS') as date_time,
|
||||
provider,
|
||||
to_char(issued_at, 'YYYY-MM-DD HH24:MI:SS') as issued_at,
|
||||
ROUND(temp_c_x100::numeric / 100.0, 2) as temperature,
|
||||
humidity_pct as humidity,
|
||||
ROUND(pressure_hpa_x100::numeric / 100.0, 2) as pressure,
|
||||
ROUND(wind_speed_ms_x1000::numeric / 1000.0, 2) as wind_speed,
|
||||
wind_dir_deg as wind_direction,
|
||||
ROUND(rain_mm_x1000::numeric / 1000.0, 3) as rainfall,
|
||||
precip_prob_pct as precip_prob,
|
||||
uv_index as uv,
|
||||
lead_hours
|
||||
FROM ranked
|
||||
WHERE rn <= $5
|
||||
ORDER BY forecast_time, issued_at DESC`
|
||||
args = []interface{}{stationID, provider, startTime.Format("2006-01-02 15:04:05-07"), endTime.Format("2006-01-02 15:04:05-07"), versions}
|
||||
}
|
||||
} else {
|
||||
// 不指定预报提供商:对每个 provider,forecast_time 返回前 N 个 issued_at 版本
|
||||
query = `
|
||||
WITH ranked AS (
|
||||
SELECT
|
||||
station_id, provider, issued_at, forecast_time,
|
||||
temp_c_x100, humidity_pct, wind_speed_ms_x1000, wind_gust_ms_x1000,
|
||||
wind_dir_deg, rain_mm_x1000, precip_prob_pct, uv_index, pressure_hpa_x100,
|
||||
ROW_NUMBER() OVER (PARTITION BY provider, forecast_time ORDER BY issued_at DESC) AS rn,
|
||||
CEIL(EXTRACT(EPOCH FROM (forecast_time - issued_at)) / 3600.0)::int AS lead_hours
|
||||
FROM forecast_hourly
|
||||
WHERE station_id = $1 AND forecast_time BETWEEN $2 AND $3
|
||||
)
|
||||
SELECT
|
||||
to_char(forecast_time, 'YYYY-MM-DD HH24:MI:SS') as date_time,
|
||||
provider,
|
||||
to_char(issued_at, 'YYYY-MM-DD HH24:MI:SS') as issued_at,
|
||||
ROUND(temp_c_x100::numeric / 100.0, 2) as temperature,
|
||||
humidity_pct as humidity,
|
||||
ROUND(pressure_hpa_x100::numeric / 100.0, 2) as pressure,
|
||||
ROUND(wind_speed_ms_x1000::numeric / 1000.0, 2) as wind_speed,
|
||||
wind_dir_deg as wind_direction,
|
||||
ROUND(rain_mm_x1000::numeric / 1000.0, 3) as rainfall,
|
||||
precip_prob_pct as precip_prob,
|
||||
uv_index as uv,
|
||||
lead_hours
|
||||
FROM ranked
|
||||
WHERE rn <= $4
|
||||
ORDER BY forecast_time, provider, issued_at DESC`
|
||||
args = []interface{}{stationID, startTime.Format("2006-01-02 15:04:05-07"), endTime.Format("2006-01-02 15:04:05-07"), versions}
|
||||
}
|
||||
|
||||
rows, err := db.Query(query, args...)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("查询预报数据失败: %v", err)
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var points []types.ForecastPoint
|
||||
for rows.Next() {
|
||||
var point types.ForecastPoint
|
||||
err := rows.Scan(
|
||||
&point.DateTime,
|
||||
&point.Provider,
|
||||
&point.IssuedAt,
|
||||
&point.Temperature,
|
||||
&point.Humidity,
|
||||
&point.Pressure,
|
||||
&point.WindSpeed,
|
||||
&point.WindDir,
|
||||
&point.Rainfall,
|
||||
&point.PrecipProb,
|
||||
&point.UV,
|
||||
&point.LeadHours,
|
||||
)
|
||||
if err != nil {
|
||||
log.Printf("数据扫描错误: %v", err)
|
||||
continue
|
||||
}
|
||||
point.Source = "forecast"
|
||||
points = append(points, point)
|
||||
}
|
||||
|
||||
return points, nil
|
||||
}
|
||||
|
||||
func GetSeriesRaw(db *sql.DB, stationID string, startTime, endTime time.Time) ([]types.WeatherPoint, error) {
|
||||
query := `
|
||||
SELECT
|
||||
to_char(timestamp, 'YYYY-MM-DD HH24:MI:SS') AS date_time,
|
||||
COALESCE(temperature, 0) AS temperature,
|
||||
COALESCE(humidity, 0) AS humidity,
|
||||
COALESCE(pressure, 0) AS pressure,
|
||||
COALESCE(wind_speed, 0) AS wind_speed,
|
||||
COALESCE(wind_direction, 0) AS wind_direction,
|
||||
COALESCE(rainfall, 0) AS rainfall,
|
||||
COALESCE(light, 0) AS light,
|
||||
COALESCE(uv, 0) AS uv,
|
||||
COALESCE(rainfall, 0) AS rain_total
|
||||
FROM rs485_weather_data
|
||||
WHERE station_id = $1 AND timestamp >= $2 AND timestamp <= $3
|
||||
ORDER BY timestamp`
|
||||
|
||||
rows, err := db.Query(query, stationID, startTime, endTime)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var points []types.WeatherPoint
|
||||
for rows.Next() {
|
||||
var p types.WeatherPoint
|
||||
if err := rows.Scan(&p.DateTime, &p.Temperature, &p.Humidity, &p.Pressure, &p.WindSpeed, &p.WindDir, &p.Rainfall, &p.Light, &p.UV, &p.RainTotal); err != nil {
|
||||
continue
|
||||
}
|
||||
points = append(points, p)
|
||||
}
|
||||
return points, nil
|
||||
}
|
||||
78
internal/database/radar_tiles.go
Normal file
78
internal/database/radar_tiles.go
Normal file
@ -0,0 +1,78 @@
|
||||
package database
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto/md5"
|
||||
"database/sql"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"math"
|
||||
"time"
|
||||
)
|
||||
|
||||
// UpsertRadarTile stores a radar tile into table `radar_tiles`.
|
||||
// Assumes the table exists with schema compatible to columns used below.
|
||||
func UpsertRadarTile(ctx context.Context, db *sql.DB, product string, dt time.Time, z, y, x int, width, height int, data []byte) error {
|
||||
if width == 0 {
|
||||
width = 256
|
||||
}
|
||||
if height == 0 {
|
||||
height = 256
|
||||
}
|
||||
step := 360.0 / math.Pow(2, float64(z))
|
||||
west := -180.0 + float64(x)*step
|
||||
south := -90.0 + float64(y)*step
|
||||
east := west + step
|
||||
north := south + step
|
||||
res := step / float64(width)
|
||||
|
||||
sum := md5.Sum(data)
|
||||
md5hex := hex.EncodeToString(sum[:])
|
||||
|
||||
q := `
|
||||
INSERT INTO radar_tiles (
|
||||
product, dt, z, y, x, width, height,
|
||||
west, south, east, north, res_deg,
|
||||
data, checksum_md5
|
||||
) VALUES (
|
||||
$1,$2,$3,$4,$5,$6,$7,
|
||||
$8,$9,$10,$11,$12,
|
||||
$13,$14
|
||||
)
|
||||
ON CONFLICT (product, dt, z, y, x)
|
||||
DO UPDATE SET
|
||||
width = EXCLUDED.width,
|
||||
height = EXCLUDED.height,
|
||||
west = EXCLUDED.west,
|
||||
south = EXCLUDED.south,
|
||||
east = EXCLUDED.east,
|
||||
north = EXCLUDED.north,
|
||||
res_deg = EXCLUDED.res_deg,
|
||||
data = EXCLUDED.data,
|
||||
checksum_md5 = EXCLUDED.checksum_md5`
|
||||
|
||||
_, err := db.ExecContext(ctx, q,
|
||||
product, dt, z, y, x, width, height,
|
||||
west, south, east, north, res,
|
||||
data, md5hex,
|
||||
)
|
||||
if err != nil {
|
||||
return fmt.Errorf("upsert radar tile (%s %s z=%d y=%d x=%d): %w", product, dt.Format(time.RFC3339), z, y, x, err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// HasRadarTile reports whether a radar tile exists for the given key.
|
||||
// It checks by (product, dt, z, y, x) in table `radar_tiles`.
|
||||
func HasRadarTile(ctx context.Context, db *sql.DB, product string, dt time.Time, z, y, x int) (bool, error) {
|
||||
const q = `SELECT 1 FROM radar_tiles WHERE product=$1 AND dt=$2 AND z=$3 AND y=$4 AND x=$5 LIMIT 1`
|
||||
var one int
|
||||
err := db.QueryRowContext(ctx, q, product, dt, z, y, x).Scan(&one)
|
||||
if err == sql.ErrNoRows {
|
||||
return false, nil
|
||||
}
|
||||
if err != nil {
|
||||
return false, fmt.Errorf("check radar tile exists: %w", err)
|
||||
}
|
||||
return true, nil
|
||||
}
|
||||
70
internal/database/radar_weather.go
Normal file
70
internal/database/radar_weather.go
Normal file
@ -0,0 +1,70 @@
|
||||
package database
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"time"
|
||||
)
|
||||
|
||||
// UpsertRadarWeather stores a realtime snapshot for a radar station.
|
||||
// Table schema (expected):
|
||||
//
|
||||
// CREATE TABLE IF NOT EXISTS radar_weather (
|
||||
// id SERIAL PRIMARY KEY,
|
||||
// alias TEXT NOT NULL,
|
||||
// lat DOUBLE PRECISION NOT NULL,
|
||||
// lon DOUBLE PRECISION NOT NULL,
|
||||
// dt TIMESTAMPTZ NOT NULL,
|
||||
// temperature DOUBLE PRECISION,
|
||||
// humidity DOUBLE PRECISION,
|
||||
// cloudrate DOUBLE PRECISION,
|
||||
// visibility DOUBLE PRECISION,
|
||||
// dswrf DOUBLE PRECISION,
|
||||
// wind_speed DOUBLE PRECISION,
|
||||
// wind_direction DOUBLE PRECISION,
|
||||
// pressure DOUBLE PRECISION,
|
||||
// created_at TIMESTAMPTZ DEFAULT now()
|
||||
// );
|
||||
// CREATE UNIQUE INDEX IF NOT EXISTS radar_weather_udx ON radar_weather(alias, dt);
|
||||
func UpsertRadarWeather(
|
||||
ctx context.Context,
|
||||
db *sql.DB,
|
||||
alias string,
|
||||
lat, lon float64,
|
||||
dt time.Time,
|
||||
temperature, humidity, cloudrate, visibility, dswrf, windSpeed, windDir, pressure float64,
|
||||
) error {
|
||||
const q = `
|
||||
INSERT INTO radar_weather (
|
||||
alias, lat, lon, dt,
|
||||
temperature, humidity, cloudrate, visibility, dswrf,
|
||||
wind_speed, wind_direction, pressure
|
||||
) VALUES (
|
||||
$1,$2,$3,$4,
|
||||
$5,$6,$7,$8,$9,
|
||||
$10,$11,$12
|
||||
)
|
||||
ON CONFLICT (alias, dt)
|
||||
DO UPDATE SET
|
||||
lat = EXCLUDED.lat,
|
||||
lon = EXCLUDED.lon,
|
||||
temperature = EXCLUDED.temperature,
|
||||
humidity = EXCLUDED.humidity,
|
||||
cloudrate = EXCLUDED.cloudrate,
|
||||
visibility = EXCLUDED.visibility,
|
||||
dswrf = EXCLUDED.dswrf,
|
||||
wind_speed = EXCLUDED.wind_speed,
|
||||
wind_direction = EXCLUDED.wind_direction,
|
||||
pressure = EXCLUDED.pressure`
|
||||
|
||||
_, err := db.ExecContext(ctx, q,
|
||||
alias, lat, lon, dt,
|
||||
temperature, humidity, cloudrate, visibility, dswrf,
|
||||
windSpeed, windDir, pressure,
|
||||
)
|
||||
if err != nil {
|
||||
return fmt.Errorf("upsert radar_weather (%s %s): %w", alias, dt.Format(time.RFC3339), err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
77
internal/database/rain_tiles.go
Normal file
77
internal/database/rain_tiles.go
Normal file
@ -0,0 +1,77 @@
|
||||
package database
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto/md5"
|
||||
"database/sql"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"math"
|
||||
"time"
|
||||
)
|
||||
|
||||
// UpsertRainTile stores a rain tile into table `rain_tiles`.
|
||||
// The tiling scheme is equal-angle EPSG:4326 like radar tiles.
|
||||
func UpsertRainTile(ctx context.Context, db *sql.DB, product string, dt time.Time, z, y, x int, width, height int, data []byte) error {
|
||||
if width == 0 {
|
||||
width = 256
|
||||
}
|
||||
if height == 0 {
|
||||
height = 256
|
||||
}
|
||||
step := 360.0 / math.Pow(2, float64(z))
|
||||
west := -180.0 + float64(x)*step
|
||||
south := -90.0 + float64(y)*step
|
||||
east := west + step
|
||||
north := south + step
|
||||
res := step / float64(width)
|
||||
|
||||
sum := md5.Sum(data)
|
||||
md5hex := hex.EncodeToString(sum[:])
|
||||
|
||||
q := `
|
||||
INSERT INTO rain_tiles (
|
||||
product, dt, z, y, x, width, height,
|
||||
west, south, east, north, res_deg,
|
||||
data, checksum_md5
|
||||
) VALUES (
|
||||
$1,$2,$3,$4,$5,$6,$7,
|
||||
$8,$9,$10,$11,$12,
|
||||
$13,$14
|
||||
)
|
||||
ON CONFLICT (product, dt, z, y, x)
|
||||
DO UPDATE SET
|
||||
width = EXCLUDED.width,
|
||||
height = EXCLUDED.height,
|
||||
west = EXCLUDED.west,
|
||||
south = EXCLUDED.south,
|
||||
east = EXCLUDED.east,
|
||||
north = EXCLUDED.north,
|
||||
res_deg = EXCLUDED.res_deg,
|
||||
data = EXCLUDED.data,
|
||||
checksum_md5 = EXCLUDED.checksum_md5`
|
||||
|
||||
_, err := db.ExecContext(ctx, q,
|
||||
product, dt, z, y, x, width, height,
|
||||
west, south, east, north, res,
|
||||
data, md5hex,
|
||||
)
|
||||
if err != nil {
|
||||
return fmt.Errorf("upsert rain tile (%s %s z=%d y=%d x=%d): %w", product, dt.Format(time.RFC3339), z, y, x, err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// HasRainTile reports whether a rain tile exists for the given key.
|
||||
func HasRainTile(ctx context.Context, db *sql.DB, product string, dt time.Time, z, y, x int) (bool, error) {
|
||||
const q = `SELECT 1 FROM rain_tiles WHERE product=$1 AND dt=$2 AND z=$3 AND y=$4 AND x=$5 LIMIT 1`
|
||||
var one int
|
||||
err := db.QueryRowContext(ctx, q, product, dt, z, y, x).Scan(&one)
|
||||
if err == sql.ErrNoRows {
|
||||
return false, nil
|
||||
}
|
||||
if err != nil {
|
||||
return false, fmt.Errorf("check rain tile exists: %w", err)
|
||||
}
|
||||
return true, nil
|
||||
}
|
||||
38
internal/database/stations_coords.go
Normal file
38
internal/database/stations_coords.go
Normal file
@ -0,0 +1,38 @@
|
||||
package database
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
)
|
||||
|
||||
// StationCoord holds a station_id with geographic coordinates.
|
||||
type StationCoord struct {
|
||||
StationID string
|
||||
Lat float64
|
||||
Lon float64
|
||||
}
|
||||
|
||||
// ListWH65LPStationsWithLatLon returns WH65LP stations that have non-null and non-zero lat/lon.
|
||||
func ListWH65LPStationsWithLatLon(ctx context.Context, db *sql.DB) ([]StationCoord, error) {
|
||||
const q = `
|
||||
SELECT station_id, latitude, longitude
|
||||
FROM stations
|
||||
WHERE device_type = 'WH65LP'
|
||||
AND latitude IS NOT NULL AND longitude IS NOT NULL
|
||||
AND latitude <> 0 AND longitude <> 0
|
||||
ORDER BY station_id`
|
||||
rows, err := db.QueryContext(ctx, q)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var out []StationCoord
|
||||
for rows.Next() {
|
||||
var s StationCoord
|
||||
if err := rows.Scan(&s.StationID, &s.Lat, &s.Lon); err != nil {
|
||||
continue
|
||||
}
|
||||
out = append(out, s)
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
44
internal/database/weights.go
Normal file
44
internal/database/weights.go
Normal file
@ -0,0 +1,44 @@
|
||||
package database
|
||||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
)
|
||||
|
||||
type Triad struct {
|
||||
OpenMeteo float64
|
||||
Caiyun float64
|
||||
Imdroid float64
|
||||
}
|
||||
|
||||
// GetWeightsCurrent returns the last saved triad and its issued_at for a station.
|
||||
// If no row exists, returns ok=false.
|
||||
func GetWeightsCurrent(ctx context.Context, stationID string) (triad Triad, lastIssued time.Time, ok bool, err error) {
|
||||
db := GetDB()
|
||||
row := db.QueryRowContext(ctx, `
|
||||
SELECT w_open_meteo, w_caiyun, w_imdroid, last_issued_at
|
||||
FROM forecast_weights_current
|
||||
WHERE station_id=$1`, stationID)
|
||||
var w1, w2, w3 float64
|
||||
var li time.Time
|
||||
if e := row.Scan(&w1, &w2, &w3, &li); e != nil {
|
||||
return Triad{}, time.Time{}, false, nil
|
||||
}
|
||||
return Triad{OpenMeteo: w1, Caiyun: w2, Imdroid: w3}, li, true, nil
|
||||
}
|
||||
|
||||
// UpsertWeightsCurrent saves the triad snapshot for the station.
|
||||
func UpsertWeightsCurrent(ctx context.Context, stationID string, triad Triad, issued time.Time) error {
|
||||
db := GetDB()
|
||||
_, err := db.ExecContext(ctx, `
|
||||
INSERT INTO forecast_weights_current (station_id, w_open_meteo, w_caiyun, w_imdroid, last_issued_at, updated_at)
|
||||
VALUES ($1,$2,$3,$4,$5, NOW())
|
||||
ON CONFLICT (station_id)
|
||||
DO UPDATE SET w_open_meteo=EXCLUDED.w_open_meteo,
|
||||
w_caiyun=EXCLUDED.w_caiyun,
|
||||
w_imdroid=EXCLUDED.w_imdroid,
|
||||
last_issued_at=EXCLUDED.last_issued_at,
|
||||
updated_at=NOW()`,
|
||||
stationID, triad.OpenMeteo, triad.Caiyun, triad.Imdroid, issued)
|
||||
return err
|
||||
}
|
||||
219
internal/forecast/caiyun.go
Normal file
219
internal/forecast/caiyun.go
Normal file
@ -0,0 +1,219 @@
|
||||
package forecast
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"math"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"weatherstation/internal/database"
|
||||
)
|
||||
|
||||
// 彩云返回结构(仅取用需要的字段)
|
||||
type caiyunHourly struct {
|
||||
Status string `json:"status"`
|
||||
Result struct {
|
||||
Hourly struct {
|
||||
Status string `json:"status"`
|
||||
Temperature []struct {
|
||||
Datetime string `json:"datetime"`
|
||||
Value float64 `json:"value"`
|
||||
} `json:"temperature"`
|
||||
Humidity []struct {
|
||||
Datetime string `json:"datetime"`
|
||||
Value float64 `json:"value"`
|
||||
} `json:"humidity"`
|
||||
Wind []struct {
|
||||
Datetime string `json:"datetime"`
|
||||
Speed float64 `json:"speed"`
|
||||
Direction float64 `json:"direction"`
|
||||
} `json:"wind"`
|
||||
Precipitation []struct {
|
||||
Datetime string `json:"datetime"`
|
||||
Value float64 `json:"value"`
|
||||
Probability float64 `json:"probability"`
|
||||
} `json:"precipitation"`
|
||||
Pressure []struct {
|
||||
Datetime string `json:"datetime"`
|
||||
Value float64 `json:"value"`
|
||||
} `json:"pressure"`
|
||||
} `json:"hourly"`
|
||||
} `json:"result"`
|
||||
}
|
||||
|
||||
// RunCaiyunFetch 拉取各站点未来三小时并写入 forecast_hourly(provider=caiyun)
|
||||
func RunCaiyunFetch(ctx context.Context, token string) error {
|
||||
log.Printf("彩云抓取开始,token=%s", token)
|
||||
db := database.GetDB()
|
||||
stations, err := loadStationsWithLatLon(ctx, db)
|
||||
if err != nil {
|
||||
log.Printf("加载站点失败: %v", err)
|
||||
return err
|
||||
}
|
||||
log.Printf("找到 %d 个有经纬度的站点", len(stations))
|
||||
|
||||
client := &http.Client{Timeout: 15 * time.Second}
|
||||
loc, _ := time.LoadLocation("Asia/Shanghai")
|
||||
if loc == nil {
|
||||
loc = time.FixedZone("CST", 8*3600)
|
||||
}
|
||||
|
||||
issuedAt := time.Now().In(loc)
|
||||
startHour := issuedAt.Truncate(time.Hour)
|
||||
// 彩云小时接口返回的是“左端点”时刻(例如 13:00 表示 13:00-14:00 区间)。
|
||||
// 我们将左端点列表保留为 startHour, startHour+1h, startHour+2h,并在写库时统一 +1h
|
||||
// 使得 forecast_time 表示区间右端,与实测聚合对齐。
|
||||
leftEdges := []time.Time{startHour, startHour.Add(1 * time.Hour), startHour.Add(2 * time.Hour)}
|
||||
|
||||
for _, s := range stations {
|
||||
if !s.lat.Valid || !s.lon.Valid {
|
||||
continue
|
||||
}
|
||||
url := fmt.Sprintf("https://api.caiyunapp.com/v2.6/%s/%f,%f/hourly?hourlysteps=4&unit=metric:v2", token, s.lon.Float64, s.lat.Float64)
|
||||
log.Printf("请求彩云 API: %s", url)
|
||||
req, _ := http.NewRequestWithContext(ctx, http.MethodGet, url, nil)
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
log.Printf("caiyun 请求失败 station=%s err=%v", s.id, err)
|
||||
continue
|
||||
}
|
||||
log.Printf("彩云响应状态码: %d", resp.StatusCode)
|
||||
var data caiyunHourly
|
||||
body, _ := io.ReadAll(resp.Body)
|
||||
log.Printf("彩云响应内容: %s", string(body))
|
||||
resp.Body.Close()
|
||||
|
||||
if err := json.Unmarshal(body, &data); err != nil {
|
||||
log.Printf("caiyun 解码失败 station=%s err=%v", s.id, err)
|
||||
continue
|
||||
}
|
||||
|
||||
log.Printf("彩云响应解析: status=%s", data.Status)
|
||||
|
||||
// 彩云时间戳形式例如 2022-05-26T16:00+08:00,需按CST解析
|
||||
// 建立 time->vals 映射
|
||||
table := map[time.Time]struct {
|
||||
rain float64
|
||||
temp float64
|
||||
rh float64
|
||||
ws float64
|
||||
wdir float64
|
||||
prob float64
|
||||
pres float64
|
||||
}{}
|
||||
|
||||
// 温度 ℃
|
||||
for _, t := range data.Result.Hourly.Temperature {
|
||||
log.Printf("解析时间: %s", t.Datetime)
|
||||
if ft, err := time.ParseInLocation("2006-01-02T15:04-07:00", t.Datetime, loc); err == nil {
|
||||
log.Printf("解析结果: %v", ft)
|
||||
v := table[ft]
|
||||
v.temp = t.Value
|
||||
table[ft] = v
|
||||
} else {
|
||||
log.Printf("时间解析失败: %v", err)
|
||||
}
|
||||
}
|
||||
// 湿度 比例(0..1) 转换为 %
|
||||
for _, h := range data.Result.Hourly.Humidity {
|
||||
if ft, err := time.ParseInLocation("2006-01-02T15:04-07:00", h.Datetime, loc); err == nil {
|
||||
v := table[ft]
|
||||
v.rh = h.Value * 100.0
|
||||
table[ft] = v
|
||||
}
|
||||
}
|
||||
// 风:metric:v2速度为km/h,这里转换为m/s;方向为度
|
||||
for _, w := range data.Result.Hourly.Wind {
|
||||
if ft, err := time.ParseInLocation("2006-01-02T15:04-07:00", w.Datetime, loc); err == nil {
|
||||
v := table[ft]
|
||||
v.ws = w.Speed / 3.6
|
||||
v.wdir = w.Direction
|
||||
table[ft] = v
|
||||
}
|
||||
}
|
||||
// 降水 该小时量 mm,概率 0..1 → %
|
||||
for _, p := range data.Result.Hourly.Precipitation {
|
||||
if ft, err := time.ParseInLocation("2006-01-02T15:04-07:00", p.Datetime, loc); err == nil {
|
||||
v := table[ft]
|
||||
v.rain = p.Value
|
||||
// 直接使用API返回的概率值,只进行范围限制
|
||||
prob := p.Probability
|
||||
// 四舍五入并确保在0-100范围内
|
||||
prob = math.Round(prob)
|
||||
if prob < 0 {
|
||||
prob = 0
|
||||
}
|
||||
if prob > 100 {
|
||||
prob = 100
|
||||
}
|
||||
v.prob = prob
|
||||
table[ft] = v
|
||||
}
|
||||
}
|
||||
// 气压:单位为 Pa,转换为 hPa(Pa/100)
|
||||
for _, pr := range data.Result.Hourly.Pressure {
|
||||
if ft, err := time.ParseInLocation("2006-01-02T15:04-07:00", pr.Datetime, loc); err == nil {
|
||||
v := table[ft]
|
||||
v.pres = pr.Value / 100.0
|
||||
table[ft] = v
|
||||
}
|
||||
}
|
||||
|
||||
log.Printf("处理时间点(彩云左端): %v", leftEdges)
|
||||
for _, left := range leftEdges {
|
||||
v, ok := table[left]
|
||||
if !ok {
|
||||
log.Printf("时间点无数据: %s", left.Format(time.RFC3339))
|
||||
continue
|
||||
}
|
||||
ft := left.Add(1 * time.Hour)
|
||||
log.Printf("写入预报点: station=%s forecast_time=%s (source=%s) rain=%.3f temp=%.2f rh=%.1f ws=%.3f wdir=%.1f prob=%.1f pres=%.2f",
|
||||
s.id, ft.Format(time.RFC3339), left.Format(time.RFC3339), v.rain, v.temp, v.rh, v.ws, v.wdir, v.prob, v.pres)
|
||||
err := upsertForecastCaiyun(ctx, db, s.id, issuedAt, ft,
|
||||
int64(v.rain*1000.0), // mm → x1000
|
||||
int64(v.temp*100.0), // °C → x100
|
||||
int64(v.rh), // %
|
||||
int64(v.ws*1000.0), // m/s → x1000
|
||||
int64(0), // gust: 彩云小时接口无阵风,置0
|
||||
int64(v.wdir), // 度
|
||||
int64(v.prob), // %
|
||||
int64(v.pres*100.0), // hPa → x100
|
||||
)
|
||||
if err != nil {
|
||||
log.Printf("写入forecast失败(caiyun) station=%s time=%s err=%v", s.id, ft.Format(time.RFC3339), err)
|
||||
} else {
|
||||
log.Printf("写入forecast成功(caiyun) station=%s time=%s", s.id, ft.Format(time.RFC3339))
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func upsertForecastCaiyun(ctx context.Context, db *sql.DB, stationID string, issuedAt, forecastTime time.Time,
|
||||
rainMmX1000, tempCx100, humidityPct, wsMsX1000, gustMsX1000, wdirDeg, probPct, pressureHpaX100 int64,
|
||||
) error {
|
||||
_, err := db.ExecContext(ctx, `
|
||||
INSERT INTO forecast_hourly (
|
||||
station_id, provider, issued_at, forecast_time,
|
||||
rain_mm_x1000, temp_c_x100, humidity_pct, wind_speed_ms_x1000,
|
||||
wind_gust_ms_x1000, wind_dir_deg, precip_prob_pct, pressure_hpa_x100
|
||||
) VALUES ($1, 'caiyun', $2, $3, $4, $5, $6, $7, $8, $9, $10, $11)
|
||||
ON CONFLICT (station_id, provider, issued_at, forecast_time)
|
||||
DO UPDATE SET
|
||||
rain_mm_x1000 = EXCLUDED.rain_mm_x1000,
|
||||
temp_c_x100 = EXCLUDED.temp_c_x100,
|
||||
humidity_pct = EXCLUDED.humidity_pct,
|
||||
wind_speed_ms_x1000 = EXCLUDED.wind_speed_ms_x1000,
|
||||
wind_gust_ms_x1000 = EXCLUDED.wind_gust_ms_x1000,
|
||||
wind_dir_deg = EXCLUDED.wind_dir_deg,
|
||||
precip_prob_pct = EXCLUDED.precip_prob_pct,
|
||||
pressure_hpa_x100 = EXCLUDED.pressure_hpa_x100
|
||||
`, stationID, issuedAt, forecastTime,
|
||||
rainMmX1000, tempCx100, humidityPct, wsMsX1000, gustMsX1000, wdirDeg, probPct, pressureHpaX100)
|
||||
return err
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user