Revert "feat: 新增雷达图"

This reverts commit 4fa9822405104095a9923e9762a2f65a1973d903.
This commit is contained in:
yarnom 2025-09-23 09:33:07 +08:00
parent 2c7f9a0f47
commit a03c60469f
8 changed files with 7 additions and 836 deletions

View File

@ -1,58 +0,0 @@
package radarfetch
import (
"encoding/json"
"fmt"
"net/http"
"time"
)
// Caiyun token and endpoint (fixed per user instruction)
const caiyunToken = "ZAcZq49qzibr10F0"
type caiyunRealtimeResp struct {
Status string `json:"status"`
Result struct {
Realtime struct {
Temperature float64 `json:"temperature"`
Humidity float64 `json:"humidity"`
Pressure float64 `json:"pressure"`
Wind struct {
Speed float64 `json:"speed"`
Direction float64 `json:"direction"`
} `json:"wind"`
} `json:"realtime"`
} `json:"result"`
}
// FetchCaiyunRealtime fetches 10m wind plus T/RH/P for given lon,lat.
// Returns: speed(m/s), dir_from(deg), tempC, humidity(0-1), pressurePa
func FetchCaiyunRealtime(lon, lat float64) (float64, float64, float64, float64, float64, error) {
url := fmt.Sprintf("https://api.caiyunapp.com/v2.6/%s/%.6f,%.6f/realtime?unit=metric", caiyunToken, lon, lat)
req, _ := http.NewRequest("GET", url, nil)
req.Header.Set("Accept", "application/json")
cli := &http.Client{Timeout: 8 * time.Second}
resp, err := cli.Do(req)
if err != nil {
return 0, 0, 0, 0, 0, err
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
return 0, 0, 0, 0, 0, fmt.Errorf("caiyun http %d", resp.StatusCode)
}
var rr caiyunRealtimeResp
if err := json.NewDecoder(resp.Body).Decode(&rr); err != nil {
return 0, 0, 0, 0, 0, err
}
if rr.Status != "ok" {
return 0, 0, 0, 0, 0, fmt.Errorf("caiyun status %s", rr.Status)
}
rt := rr.Result.Realtime
return rt.Wind.Speed, rt.Wind.Direction, rt.Temperature, rt.Humidity, rt.Pressure, nil
}
// Backward-compatible wrapper (wind only)
func FetchCaiyunWind(lon, lat float64) (float64, float64, error) {
s, d, _, _, _, err := FetchCaiyunRealtime(lon, lat)
return s, d, err
}

View File

@ -1,103 +0,0 @@
package radarfetch
import (
"fmt"
"image"
"image/color"
"image/png"
"os"
"path/filepath"
)
// AttachClusterPNGs renders a tiny PNG for each cluster by flood-filling
// from its centroid on the thresholded mask, cropping to the cluster bbox.
// It writes files to outDir/clusters/cluster-<id>.png and returns updated clusters
// with PNG field filled.
func AttachClusterPNGs(grid [][]*float64, thr float64, clusters []Cluster, outDir string) ([]Cluster, error) {
const W, H = 256, 256
if len(grid) != H || (len(grid) > 0 && len(grid[0]) != W) {
return clusters, nil
}
// precompute threshold mask
mask := make([][]bool, H)
for r := 0; r < H; r++ {
mask[r] = make([]bool, W)
for c := 0; c < W; c++ {
if grid[r][c] == nil {
continue
}
if *grid[r][c] >= thr {
mask[r][c] = true
}
}
}
outDir = filepath.Join(outDir, "clusters")
_ = os.MkdirAll(outDir, 0o755)
for i := range clusters {
cl := &clusters[i]
// BFS from (Row,Col) within mask to reconstruct membership
r0, c0 := cl.Row, cl.Col
if r0 < 0 || r0 >= H || c0 < 0 || c0 >= W || !mask[r0][c0] {
// skip if centroid not on mask
continue
}
minR, minC := cl.MinRow, cl.MinCol
maxR, maxC := cl.MaxRow, cl.MaxCol
w := maxC - minC + 1
h := maxR - minR + 1
if w <= 0 || h <= 0 || w > W || h > H {
continue
}
img := image.NewRGBA(image.Rect(0, 0, w, h))
// init transparent
for y := 0; y < h; y++ {
for x := 0; x < w; x++ {
img.SetRGBA(x, y, color.RGBA{0, 0, 0, 0})
}
}
// flood fill within bbox
vis := make([][]bool, H)
for r := 0; r < H; r++ {
vis[r] = make([]bool, W)
}
stack := [][2]int{{r0, c0}}
vis[r0][c0] = true
dirs := [][2]int{{-1, 0}, {1, 0}, {0, -1}, {0, 1}, {-1, -1}, {-1, 1}, {1, -1}, {1, 1}}
for len(stack) > 0 {
cur := stack[len(stack)-1]
stack = stack[:len(stack)-1]
rr, cc := cur[0], cur[1]
if rr < minR || rr > maxR || cc < minC || cc > maxC {
continue
}
// paint
dbz := grid[rr][cc]
if dbz != nil {
col := colorForDBZ(*dbz)
img.SetRGBA(cc-minC, rr-minR, col)
}
for _, d := range dirs {
nr, nc := rr+d[0], cc+d[1]
if nr < 0 || nr >= H || nc < 0 || nc >= W {
continue
}
if vis[nr][nc] || !mask[nr][nc] {
continue
}
vis[nr][nc] = true
stack = append(stack, [2]int{nr, nc})
}
}
// write file
name := fmt.Sprintf("cluster-%d.png", cl.ID)
p := filepath.Join(outDir, name)
f, err := os.Create(p)
if err != nil {
continue
}
_ = png.Encode(f, img)
_ = f.Close()
cl.PNG = filepath.Join(filepath.Base(outDir), name)
}
return clusters, nil
}

View File

@ -166,30 +166,6 @@ func runDownload(outRoot string, tzOffset int, chinaURL, huananURL, nanningURL,
return fmt.Errorf("render PNG: %w", err)
}
// Decode grid and detect clusters (>=40 dBZ)
fmt.Println("[radar] decode grid & detect clusters ...")
grid := make([][]*float64, 256)
{
const w, h = 256, 256
if len(binBytes) == w*h*2 {
for row := 0; row < h; row++ {
line := make([]*float64, w)
for col := 0; col < w; col++ {
off := (row*w + col) * 2
u := uint16(binBytes[off])<<8 | uint16(binBytes[off+1])
vv := int16(u)
if vv == 32767 || vv < 0 {
line[col] = nil
continue
}
dbz := float64(vv) / 10.0
line[col] = &dbz
}
grid[row] = line
}
}
}
// 3) Write metadata and update latest
w, s, e, n, res := Bounds4326(z, y, x)
meta := Metadata{
@ -207,22 +183,6 @@ func runDownload(outRoot string, tzOffset int, chinaURL, huananURL, nanningURL,
Sizes: Sizes{PNG: fileSize(filepath.Join(outDir, "nmc_huanan.png")), BIN: int64(len(binBytes))},
CreatedAt: time.Now().Format(time.RFC3339),
}
// Attach clusters if grid decoded
if grid[0] != nil {
meta.Clusters = SegmentClusters(grid, Bounds{West: w, South: s, East: e, North: n}, res, 40.0)
// Render small PNGs per cluster
if len(meta.Clusters) > 0 {
if updated, err2 := AttachClusterPNGs(grid, 40.0, meta.Clusters, outDir); err2 == nil {
meta.Clusters = updated
}
}
meta.AnalysisNote = "clusters>=40dBZ; samples=center+4rays (N/S/E/W)"
// Build wind query plan with defaults
meta.QueryParams = QueryParams{MinAreaPx: 9, StrongDBZOverride: 50, MaxSamplesPerCluster: 5, MaxCandidatesTotal: 25}
cl2, cands := PlanWindQuery(meta.Clusters, meta.QueryParams)
meta.Clusters = cl2
meta.QueryCandidates = cands
}
if err := WriteMetadata(filepath.Join(outDir, "metadata.json"), &meta); err != nil {
return fmt.Errorf("write metadata: %w", err)
}

View File

@ -1,65 +0,0 @@
package radarfetch
// PlanWindQuery marks clusters as eligible or not based on params and
// returns a flattened list of sample points for eligible clusters.
func PlanWindQuery(clusters []Cluster, params QueryParams) ([]Cluster, []QueryCandidate) {
if params.MinAreaPx <= 0 {
params.MinAreaPx = 9
}
if params.StrongDBZOverride <= 0 {
params.StrongDBZOverride = 50
}
if params.MaxSamplesPerCluster <= 0 {
params.MaxSamplesPerCluster = 5
}
if params.MaxCandidatesTotal <= 0 {
params.MaxCandidatesTotal = 25
}
out := make([]QueryCandidate, 0, len(clusters)*2)
for i := range clusters {
cl := &clusters[i]
eligible := cl.AreaPx >= params.MinAreaPx || cl.MaxDBZ >= params.StrongDBZOverride
if !eligible {
cl.EligibleForQuery = false
cl.SkipReason = "too_small_and_weak"
continue
}
cl.EligibleForQuery = true
cl.SkipReason = ""
// choose up to MaxSamplesPerCluster from samples (prefer center first)
if len(cl.Samples) == 0 {
continue
}
// order: center first, then others as-is
picked := 0
// ensure center first if exists
for _, s := range cl.Samples {
if s.Role == "center" {
out = append(out, QueryCandidate{ClusterID: cl.ID, Role: s.Role, Lon: s.Lon, Lat: s.Lat})
picked++
break
}
}
for _, s := range cl.Samples {
if picked >= params.MaxSamplesPerCluster {
break
}
if s.Role == "center" {
continue
}
out = append(out, QueryCandidate{ClusterID: cl.ID, Role: s.Role, Lon: s.Lon, Lat: s.Lat})
picked++
}
if picked == 0 {
// fallback: take first
s := cl.Samples[0]
out = append(out, QueryCandidate{ClusterID: cl.ID, Role: s.Role, Lon: s.Lon, Lat: s.Lat})
}
}
// cap total
if len(out) > params.MaxCandidatesTotal {
out = out[:params.MaxCandidatesTotal]
}
return clusters, out
}

View File

@ -1,163 +0,0 @@
package radarfetch
import (
"math"
)
// SegmentClusters finds 8-connected regions where dBZ >= thr (e.g., 40),
// computes stats and recommended sampling points per cluster.
// Input grid: 256x256, invalid as NaN; bounds/res used to compute lon/lat.
func SegmentClusters(grid [][]*float64, bounds Bounds, resDeg float64, thr float64) []Cluster {
const W, H = 256, 256
if len(grid) != H || (len(grid) > 0 && len(grid[0]) != W) {
return nil
}
// Mask of eligible pixels
mask := make([][]bool, H)
for r := 0; r < H; r++ {
mask[r] = make([]bool, W)
for c := 0; c < W; c++ {
if grid[r][c] == nil {
continue
}
v := *grid[r][c]
if v >= thr {
mask[r][c] = true
}
}
}
// Visited flags
vis := make([][]bool, H)
for r := 0; r < H; r++ {
vis[r] = make([]bool, W)
}
// 8-neighborhood
nbr := [8][2]int{{-1, -1}, {-1, 0}, {-1, 1}, {0, -1}, {0, 1}, {1, -1}, {1, 0}, {1, 1}}
var clusters []Cluster
clusterID := 0
for r := 0; r < H; r++ {
for c := 0; c < W; c++ {
if !mask[r][c] || vis[r][c] {
continue
}
// BFS/DFS stack
stack := [][2]int{{r, c}}
vis[r][c] = true
// stats
area := 0
sumW := 0.0
sumWR := 0.0
sumWC := 0.0
maxDBZ := -math.MaxFloat64
sumDBZ := 0.0
minR, minC := r, c
maxR, maxC := r, c
pixels := make([][2]int, 0, 512)
for len(stack) > 0 {
cur := stack[len(stack)-1]
stack = stack[:len(stack)-1]
rr, cc := cur[0], cur[1]
area++
dbz := *grid[rr][cc]
w := dbz // dBZ-weighted centroid
sumW += w
sumWR += float64(rr) * w
sumWC += float64(cc) * w
if dbz > maxDBZ {
maxDBZ = dbz
}
sumDBZ += dbz
if rr < minR {
minR = rr
}
if cc < minC {
minC = cc
}
if rr > maxR {
maxR = rr
}
if cc > maxC {
maxC = cc
}
pixels = append(pixels, [2]int{rr, cc})
for _, d := range nbr {
nr, nc := rr+d[0], cc+d[1]
if nr < 0 || nr >= H || nc < 0 || nc >= W {
continue
}
if vis[nr][nc] || !mask[nr][nc] {
continue
}
vis[nr][nc] = true
stack = append(stack, [2]int{nr, nc})
}
}
if area == 0 {
continue
}
// centroid (row/col)
cr, cc := 0.0, 0.0
if sumW > 0 {
cr = sumWR / sumW
cc = sumWC / sumW
} else {
// fallback to geometric center
cr = float64(minR+maxR) / 2.0
cc = float64(minC+maxC) / 2.0
}
// Convert centroid to lon/lat (pixel center)
clon := bounds.West + (cc+0.5)*resDeg
clat := bounds.South + (cr+0.5)*resDeg
// Sample points: center + four rays (N,S,E,W) until boundary
samples := make([]Sample, 0, 5)
samples = append(samples, Sample{Row: int(math.Round(cr)), Col: int(math.Round(cc)), Lon: clon, Lat: clat, Role: "center"})
// helper to step ray and clamp to last in-mask pixel
stepRay := func(dr, dc int, role string) {
rr := int(math.Round(cr))
cc2 := int(math.Round(cc))
lastR, lastC := rr, cc2
for {
rr += dr
cc2 += dc
if rr < 0 || rr >= H || cc2 < 0 || cc2 >= W {
break
}
if !mask[rr][cc2] {
break
}
lastR, lastC = rr, cc2
}
lon := bounds.West + (float64(lastC)+0.5)*resDeg
lat := bounds.South + (float64(lastR)+0.5)*resDeg
if lastR != samples[0].Row || lastC != samples[0].Col {
samples = append(samples, Sample{Row: lastR, Col: lastC, Lon: lon, Lat: lat, Role: role})
}
}
stepRay(-1, 0, "ray_n")
stepRay(1, 0, "ray_s")
stepRay(0, 1, "ray_e")
stepRay(0, -1, "ray_w")
avgDBZ := sumDBZ / float64(area)
cluster := Cluster{
ID: clusterID,
AreaPx: area,
MaxDBZ: maxDBZ,
AvgDBZ: avgDBZ,
Row: int(math.Round(cr)),
Col: int(math.Round(cc)),
Lon: clon,
Lat: clat,
MinRow: minR, MinCol: minC, MaxRow: maxR, MaxCol: maxC,
Samples: samples,
}
clusters = append(clusters, cluster)
clusterID++
}
}
return clusters
}

View File

@ -47,65 +47,6 @@ type Metadata struct {
Files Files `json:"files"`
Sizes Sizes `json:"sizes"`
CreatedAt string `json:"created_at"`
// Cloud clusters detected from single-frame CREF (>=40 dBZ)
// Optional; may be empty when detection fails.
Clusters []Cluster `json:"clusters,omitempty"`
// Optional notes about sampling strategy or thresholds used
AnalysisNote string `json:"analysis_note,omitempty"`
// Wind query planning parameters and candidates
QueryParams QueryParams `json:"query_params,omitempty"`
QueryCandidates []QueryCandidate `json:"query_candidates,omitempty"`
}
// Cluster represents a connected echo region above threshold.
type Cluster struct {
ID int `json:"id"`
AreaPx int `json:"area_px"`
MaxDBZ float64 `json:"max_dbz"`
AvgDBZ float64 `json:"avg_dbz"`
// Pixel-space centroid (row, col) using dBZ-weighted center
Row int `json:"row"`
Col int `json:"col"`
// Centroid lon/lat of pixel center
Lon float64 `json:"lon"`
Lat float64 `json:"lat"`
// Bounding box in pixel coords (inclusive)
MinRow int `json:"min_row"`
MinCol int `json:"min_col"`
MaxRow int `json:"max_row"`
MaxCol int `json:"max_col"`
// Recommended sample points for downstream wind queries
Samples []Sample `json:"samples"`
// Optional path to a small PNG rendering of this cluster (copied to latest)
PNG string `json:"png,omitempty"`
// Eligibility for downstream wind query
EligibleForQuery bool `json:"eligible_for_query,omitempty"`
SkipReason string `json:"skip_reason,omitempty"`
}
type Sample struct {
Row int `json:"row"`
Col int `json:"col"`
Lon float64 `json:"lon"`
Lat float64 `json:"lat"`
// role: center | ray_n | ray_s | ray_e | ray_w
Role string `json:"role"`
}
// Parameters controlling wind query candidate selection.
type QueryParams struct {
MinAreaPx int `json:"min_area_px"`
StrongDBZOverride float64 `json:"strong_dbz_override"`
MaxSamplesPerCluster int `json:"max_samples_per_cluster"`
MaxCandidatesTotal int `json:"max_candidates_total"`
}
// A single candidate point to query external wind API.
type QueryCandidate struct {
ClusterID int `json:"cluster_id"`
Role string `json:"role"`
Lon float64 `json:"lon"`
Lat float64 `json:"lat"`
}
func WriteMetadata(path string, m *Metadata) error {
@ -133,8 +74,6 @@ func UpdateLatest(root string, curDir string, m *Metadata) error {
src := filepath.Join(curDir, name)
data, e2 := os.ReadFile(src)
if e2 == nil {
// ensure parent dir exists for nested paths like "clusters/..."
_ = os.MkdirAll(filepath.Dir(dst), 0o755)
_ = os.WriteFile(dst, data, 0o644)
}
}
@ -146,14 +85,5 @@ func UpdateLatest(root string, curDir string, m *Metadata) error {
if m.Files.CMAPNG != "" {
copyFile(filepath.Base(m.Files.CMAPNG))
}
// copy cluster PNGs if present
if len(m.Clusters) > 0 {
for _, cl := range m.Clusters {
if cl.PNG == "" {
continue
}
copyFile(cl.PNG)
}
}
return nil
}

View File

@ -11,11 +11,9 @@ import (
"time"
"weatherstation/internal/config"
"weatherstation/internal/database"
rf "weatherstation/internal/radarfetch"
"weatherstation/pkg/types"
"github.com/gin-gonic/gin"
"math"
)
// StartGinServer 启动Gin Web服务器
@ -46,7 +44,6 @@ func StartGinServer() error {
api.GET("/forecast", getForecastHandler)
api.GET("/radar/latest", radarLatestHandler)
api.GET("/radar/latest/grid", radarLatestGridHandler)
api.GET("/radar/latest/wind", radarLatestWindHandler)
}
// 获取配置的Web端口
@ -335,182 +332,3 @@ func intFromMeta(m map[string]any, key string) int {
}
return 0
}
// radarLatestWindHandler queries Caiyun realtime wind for the latest query candidates
// and provides per-cluster aggregated wind and basic coming/ETA analysis toward station.
func radarLatestWindHandler(c *gin.Context) {
// Constants per user request
const (
stationLat = 23.097234
stationLon = 108.715433
)
// Read latest metadata into struct
latestRoot := "./radar_data/latest"
metaPath := latestRoot + "/metadata.json"
b, err := os.ReadFile(metaPath)
if err != nil {
c.JSON(http.StatusNotFound, gin.H{"error": "未找到最新雷达元数据"})
return
}
var meta rf.Metadata
if err := json.Unmarshal(b, &meta); err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": "解析元数据失败"})
return
}
// For each query candidate, call Caiyun
type Wind struct {
Speed float64 `json:"speed_ms"`
DirFrom float64 `json:"dir_from_deg"`
DirTo float64 `json:"dir_to_deg"`
U float64 `json:"u_east_ms"`
V float64 `json:"v_north_ms"`
TempC float64 `json:"temp_c"`
RH float64 `json:"rh"` // 0-1
PressureHpa float64 `json:"pressure_hpa"`
}
type CandOut struct {
rf.QueryCandidate
Wind *Wind `json:"wind,omitempty"`
Error string `json:"error,omitempty"`
}
outs := make([]CandOut, 0, len(meta.QueryCandidates))
for _, q := range meta.QueryCandidates {
speed, dirFrom, tempC, rh, pPa, err := rf.FetchCaiyunRealtime(q.Lon, q.Lat)
co := CandOut{QueryCandidate: q}
if err != nil {
co.Error = err.Error()
} else {
dirTo := mathMod(dirFrom+180.0, 360.0)
u, v := windVectorUV(speed, dirTo)
// pressure in hPa for display
pHpa := pPa / 100.0
co.Wind = &Wind{Speed: speed, DirFrom: dirFrom, DirTo: dirTo, U: u, V: v, TempC: tempC, RH: rh, PressureHpa: pHpa}
}
outs = append(outs, co)
}
// Aggregate by cluster id
agg := map[int][]Wind{}
for _, co := range outs {
if co.Wind == nil {
continue
}
agg[co.ClusterID] = append(agg[co.ClusterID], *co.Wind)
}
type ClusterAnal struct {
ClusterID int `json:"cluster_id"`
Lon float64 `json:"lon"`
Lat float64 `json:"lat"`
AreaPx int `json:"area_px"`
MaxDBZ float64 `json:"max_dbz"`
SpeedMS float64 `json:"speed_ms"`
DirToDeg float64 `json:"dir_to_deg"`
U float64 `json:"u_east_ms"`
V float64 `json:"v_north_ms"`
Coming bool `json:"coming"`
ETAMin float64 `json:"eta_min,omitempty"`
DistanceKm float64 `json:"distance_km"`
LateralKm float64 `json:"lateral_km"`
RCloudKm float64 `json:"r_cloud_km"`
}
analyses := []ClusterAnal{}
// helpers
mPerDegLat := 111320.0
mPerDegLon := func(lat float64) float64 { return 111320.0 * math.Cos(lat*math.Pi/180.0) }
cellDims := func(lat float64) (float64, float64) { // width (lon), height (lat) in meters per pixel
return meta.ResDeg * mPerDegLon(lat), meta.ResDeg * mPerDegLat
}
const hitRadiusM = 5000.0
for _, cl := range meta.Clusters {
winds := agg[cl.ID]
if len(winds) == 0 {
continue
}
// vector average in u,v (to-direction)
sumU, sumV := 0.0, 0.0
for _, wv := range winds {
sumU += wv.U
sumV += wv.V
}
u := sumU / float64(len(winds))
v := sumV / float64(len(winds))
speed := math.Hypot(u, v)
dirTo := uvToDirTo(u, v)
// project geometry
wx, wy := mPerDegLon(cl.Lat), mPerDegLat
// position of cluster and station in meters (local tangent plane)
px := (cl.Lon - stationLon) * wx
py := (cl.Lat - stationLat) * wy
// vector from cluster to station
dx := -px
dy := -py
d := math.Hypot(dx, dy)
// radial component of velocity towards station
if d == 0 {
d = 1e-6
}
vr := (dx*u + dy*v) / d
// cluster equivalent radius
cw, ch := cellDims(cl.Lat)
areaM2 := float64(cl.AreaPx) * cw * ch
rCloud := math.Sqrt(areaM2 / math.Pi)
// lateral offset (perpendicular distance from station line)
vnorm := math.Hypot(u, v)
lateral := 0.0
if vnorm > 0 {
// |d x vhat|
vx, vy := u/vnorm, v/vnorm
lateral = math.Abs(dx*vy - dy*vx)
}
coming := vr > 0 && lateral <= (rCloud+hitRadiusM)
etaMin := 0.0
if coming && vr > 0 {
distToEdge := d - (rCloud + hitRadiusM)
if distToEdge < 0 {
distToEdge = 0
}
etaMin = distToEdge / vr / 60.0
}
analyses = append(analyses, ClusterAnal{
ClusterID: cl.ID,
Lon: cl.Lon, Lat: cl.Lat,
AreaPx: cl.AreaPx, MaxDBZ: cl.MaxDBZ,
SpeedMS: speed, DirToDeg: dirTo, U: u, V: v,
Coming: coming, ETAMin: round2(etaMin),
DistanceKm: round2(d / 1000.0), LateralKm: round2(lateral / 1000.0), RCloudKm: round2(rCloud / 1000.0),
})
}
c.JSON(http.StatusOK, gin.H{
"station": gin.H{"lon": stationLon, "lat": stationLat},
"params": meta.QueryParams,
"candidates": outs,
"clusters": analyses,
})
}
func windVectorUV(speed, dirTo float64) (u, v float64) {
// dirTo: 0=north, 90=east
rad := dirTo * math.Pi / 180.0
u = speed * math.Sin(rad)
v = speed * math.Cos(rad)
return
}
func uvToDirTo(u, v float64) float64 {
// inverse of above
rad := math.Atan2(u, v) // atan2(y,x) but here y=u (east), x=v (north)
deg := rad * 180.0 / math.Pi
if deg < 0 {
deg += 360.0
}
return deg
}
func mathMod(a, m float64) float64 { // positive modulo
r := math.Mod(a, m)
if r < 0 {
r += m
}
return r
}
func round2(x float64) float64 { return math.Round(x*100.0) / 100.0 }

View File

@ -685,13 +685,13 @@
}
}
function bindRadarTabs() {
var ids = ['china','huanan','nanning','cma'];
ids.forEach(function(k){
var el = document.getElementById('radar-tab-' + k);
if (el) el.onclick = function(){ setRadarImage(k); };
});
}
function bindRadarTabs() {
var ids = ['china','huanan','nanning','cma'];
ids.forEach(function(k){
var el = document.getElementById('radar-tab-' + k);
if (el) el.onclick = function(){ setRadarImage(k); };
});
}
function setRadarImage(kind) {
var images = window.RadarLatestImages || {};
@ -719,9 +719,6 @@
const data = await res.json();
window.RadarLatestGrid = data;
renderPlotlyHeat(data);
renderClustersPanel();
renderWindQueryList();
renderWindResults();
}
function renderPlotlyHeat(payload){
@ -805,151 +802,6 @@
});
});
}
function renderClustersPanel(){
// fetch meta to read clusters
fetch('/api/radar/latest').then(r=>r.json()).then(function(resp){
var meta = resp.meta || {};
var clusters = meta.clusters || [];
var host = '/radar/latest/';
var containerId = 'radar-clusters';
var parent = document.getElementById(containerId);
if (!parent) {
var sec = document.createElement('div');
sec.id = containerId;
sec.className = 'mt-4';
var root = document.getElementById('view-radar').querySelector('.radar-grid');
root.appendChild(sec);
parent = sec;
}
if (!clusters.length) { parent.innerHTML = '<div class="text-sm text-gray-500">暂无 >=40 dBZ 云团</div>'; return; }
var html = '<div class="text-sm text-gray-700 mb-2">云团dBZ≥40共 ' + clusters.length + ' 个</div>';
html += '<div class="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-3">';
clusters.forEach(function(cl){
var png = cl.png ? (host + cl.png) : '';
html += '<div class="border border-gray-200 rounded p-2">';
if (png) {
html += '<div class="mb-2 flex items-center justify-center" style="background:#fafafa">'
+ '<img src="'+png+'" style="image-rendering: pixelated; max-width: 100%; max-height: 120px;" />'
+ '</div>';
}
html += '<div class="text-xs text-gray-600">'
+ 'ID: '+cl.id+' | 像元: '+cl.area_px+'<br/>'
+ '质心: '+cl.lon.toFixed(4)+', '+cl.lat.toFixed(4)+'<br/>'
+ 'dBZ: max '+cl.max_dbz.toFixed(1)+' / avg '+cl.avg_dbz.toFixed(1)
+ '</div>';
if (cl.samples && cl.samples.length) {
html += '<div class="mt-1 text-xs text-gray-600">采样点: ' + cl.samples.map(function(s){
return s.role+':('+s.lon.toFixed(3)+','+s.lat.toFixed(3)+')';
}).join(' | ') + '</div>';
}
html += '</div>';
});
html += '</div>';
parent.innerHTML = html;
}).catch(function(){ /* ignore */ });
}
function renderWindQueryList(){
fetch('/api/radar/latest').then(r=>r.json()).then(function(resp){
var meta = resp.meta || {};
var params = meta.query_params || {};
var cands = meta.query_candidates || [];
var containerId = 'radar-wind-query';
var parent = document.getElementById(containerId);
if (!parent) {
var sec = document.createElement('div');
sec.id = containerId;
sec.className = 'mt-4';
var root = document.getElementById('view-radar').querySelector('.radar-grid');
root.appendChild(sec);
parent = sec;
}
var html = '<div class="text-sm text-gray-700 mb-2">风场查询参数</div>';
html += '<div class="text-xs text-gray-600 mb-2">'
+ 'min_area_px='+ (params.min_area_px||9)
+ 'strong_dbz_override=' + (params.strong_dbz_override||50)
+ 'max_samples_per_cluster=' + (params.max_samples_per_cluster||5)
+ 'max_candidates_total=' + (params.max_candidates_total||25)
+ '</div>';
if (!cands.length) {
html += '<div class="text-xs text-gray-500">暂无需要查询的采样点</div>';
} else {
html += '<div class="text-sm text-gray-700 mb-1">需要查询的采样点(共 '+cands.length+' 个)</div>';
html += '<ul class="list-disc pl-5 text-xs text-gray-700">';
cands.forEach(function(p){
html += '<li>cluster='+p.cluster_id+' | '+p.role+' | lon='+p.lon.toFixed(4)+', lat='+p.lat.toFixed(4)+'</li>';
});
html += '</ul>';
}
parent.innerHTML = html;
}).catch(function(){});
}
function renderWindResults(){
fetch('/api/radar/latest/wind').then(r=>r.json()).then(function(resp){
var station = resp.station || {};
var cands = resp.candidates || [];
var clusters = resp.clusters || [];
var containerId = 'radar-wind-results';
var parent = document.getElementById(containerId);
if (!parent) {
var sec = document.createElement('div');
sec.id = containerId;
sec.className = 'mt-4';
var root = document.getElementById('view-radar').querySelector('.radar-grid');
root.appendChild(sec);
parent = sec;
}
var html = '<div class="text-sm text-gray-700 mb-2">风场查询结果(彩云 10m 实况)</div>';
// cluster summary
if (clusters.length) {
html += '<div class="text-xs text-gray-700 mb-2">云团汇总:</div>';
html += '<div class="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-3 mb-3">';
clusters.forEach(function(cl){
html += '<div class="border border-gray-200 rounded p-2 text-xs text-gray-700">'
+ 'ID '+cl.cluster_id+' | 距离 '+(cl.distance_km||0).toFixed(1)+' km<br/>'
+ '风 '+(cl.speed_ms||0).toFixed(1)+' m/s, 去向 '+(cl.dir_to_deg||0).toFixed(0)+'°<br/>'
+ (cl.coming?('<span class="text-green-700">朝向</span>, ETA '+(cl.eta_min||0).toFixed(1)+' 分钟'):'<span class="text-gray-500">非朝向</span>')
+ '</div>';
});
html += '</div>';
}
// candidate details
if (cands.length) {
html += '<div class="text-xs text-gray-700 mb-2">采样点明细:</div>';
html += '<div class="overflow-x-auto"><table class="min-w-full text-xs text-gray-700"><thead><tr>'
+ '<th class="px-2 py-1 border">cluster</th>'
+ '<th class="px-2 py-1 border">role</th>'
+ '<th class="px-2 py-1 border">lon</th>'
+ '<th class="px-2 py-1 border">lat</th>'
+ '<th class="px-2 py-1 border">spd(m/s)</th>'
+ '<th class="px-2 py-1 border">dir_from(°)</th>'
+ '<th class="px-2 py-1 border">T(°C)</th>'
+ '<th class="px-2 py-1 border">RH</th>'
+ '<th class="px-2 py-1 border">P(hPa)</th>'
+ '<th class="px-2 py-1 border">err</th>'
+ '</tr></thead><tbody>';
cands.forEach(function(p){
var w = p.wind || {};
html += '<tr>'
+ '<td class="px-2 py-1 border">'+p.cluster_id+'</td>'
+ '<td class="px-2 py-1 border">'+p.role+'</td>'
+ '<td class="px-2 py-1 border">'+p.lon.toFixed(4)+'</td>'
+ '<td class="px-2 py-1 border">'+p.lat.toFixed(4)+'</td>'
+ '<td class="px-2 py-1 border">'+(w.speed_ms!=null?w.speed_ms.toFixed(1):'')+'</td>'
+ '<td class="px-2 py-1 border">'+(w.dir_from_deg!=null?w.dir_from_deg.toFixed(0):'')+'</td>'
+ '<td class="px-2 py-1 border">'+(w.temp_c!=null?w.temp_c.toFixed(1):'')+'</td>'
+ '<td class="px-2 py-1 border">'+(w.rh!=null?(w.rh*100).toFixed(0)+'%':'')+'</td>'
+ '<td class="px-2 py-1 border">'+(w.pressure_hpa!=null?w.pressure_hpa.toFixed(1):'')+'</td>'
+ '<td class="px-2 py-1 border">'+(p.error||'')+'</td>'
+ '</tr>';
});
html += '</tbody></table></div>';
}
parent.innerHTML = html;
}).catch(function(){});
}
})();
</script>
<script defer src="/static/js/alpinejs.min.js"></script>