Player-UI (playerserver): - Lokale Kiosk-Seite unter /player mit orientierungsgerechtem Splash-Bild - Splash-PNGs (Portrait/Landscape) eingebettet via go:embed - Unteres-Drittel-Overlay mit erweiterbaren Sysinfo-Items (Hostname, Uptime) - /api/now-playing und /api/sysinfo JSON-Endpunkte - iframe-Overlay fuer spaetere Inhalts-URL Ansible-Rolle signage_display (neu): - Pakete: xserver-xorg-core, xinit, openbox, chromium, unclutter - Kiosk-Skript mit openbox als WM (noetig fuer korrektes --kiosk-Vollbild) - systemd-Unit mit Conflicts=getty@tty1 (behebt TTY-Blockierung beim Start) - Chromium Managed Policy: TranslateEnabled=false, Notifications/Geolocation blockiert - --lang=de Flag gegen Sprachauswahl-Dialog Ansible-Rolle signage_player (erweitert): - Legt signage_user an falls nicht vorhanden - PlayerListenAddr und PlayerContentURL in Konfiguration - journald volatile Storage (SD-Karten-Schonung) Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
249 lines
6.2 KiB
Go
249 lines
6.2 KiB
Go
package app
|
|
|
|
import (
|
|
"context"
|
|
"fmt"
|
|
"log"
|
|
"os"
|
|
"sync"
|
|
"time"
|
|
|
|
"git.az-it.net/az/morz-infoboard/player/agent/internal/config"
|
|
"git.az-it.net/az/morz-infoboard/player/agent/internal/mqttheartbeat"
|
|
"git.az-it.net/az/morz-infoboard/player/agent/internal/playerserver"
|
|
"git.az-it.net/az/morz-infoboard/player/agent/internal/statusreporter"
|
|
)
|
|
|
|
type Status string
|
|
|
|
type Connectivity string
|
|
|
|
const (
|
|
StatusStarting Status = "starting"
|
|
StatusRunning Status = "running"
|
|
StatusStopped Status = "stopped"
|
|
|
|
ConnectivityUnknown Connectivity = "unknown"
|
|
ConnectivityOnline Connectivity = "online"
|
|
ConnectivityDegraded Connectivity = "degraded"
|
|
ConnectivityOffline Connectivity = "offline"
|
|
)
|
|
|
|
const offlineFailureThreshold = 3
|
|
|
|
type HealthSnapshot struct {
|
|
Status Status
|
|
ServerConnectivity Connectivity
|
|
ScreenID string
|
|
ServerBaseURL string
|
|
MQTTBroker string
|
|
HeartbeatEvery int
|
|
StartedAt time.Time
|
|
LastHeartbeatAt time.Time
|
|
}
|
|
|
|
type App struct {
|
|
Config config.Config
|
|
logger *log.Logger
|
|
now func() time.Time
|
|
reporter statusSender
|
|
mqttPub mqttSender
|
|
|
|
mu sync.RWMutex
|
|
status Status
|
|
serverConnectivity Connectivity
|
|
consecutiveReportFailures int
|
|
startedAt time.Time
|
|
lastHeartbeatAt time.Time
|
|
}
|
|
|
|
type statusSender interface {
|
|
Send(ctx context.Context, snapshot statusreporter.Snapshot) error
|
|
}
|
|
|
|
type mqttSender interface {
|
|
SendHeartbeat(status, connectivity string, ts time.Time) error
|
|
Close()
|
|
}
|
|
|
|
func New() (*App, error) {
|
|
cfg, err := config.Load()
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
|
|
logger := log.New(os.Stdout, "agent ", log.LstdFlags|log.LUTC)
|
|
|
|
var mqttPub mqttSender
|
|
if cfg.MQTTBroker != "" {
|
|
mqttPub = mqttheartbeat.New(cfg.MQTTBroker, cfg.ScreenID, cfg.MQTTUsername, cfg.MQTTPassword)
|
|
logger.Printf("event=mqtt_enabled broker=%s", cfg.MQTTBroker)
|
|
} else {
|
|
logger.Printf("event=mqtt_disabled reason=no_broker_configured")
|
|
}
|
|
|
|
return newApp(cfg, logger, time.Now, statusreporter.New(cfg.ServerBaseURL, nil, time.Now), mqttPub), nil
|
|
}
|
|
|
|
func newApp(cfg config.Config, logger *log.Logger, now func() time.Time, reporter statusSender, mqttPub mqttSender) *App {
|
|
if logger == nil {
|
|
logger = log.New(os.Stdout, "agent ", log.LstdFlags|log.LUTC)
|
|
}
|
|
|
|
if now == nil {
|
|
now = time.Now
|
|
}
|
|
|
|
return &App{
|
|
Config: cfg,
|
|
logger: logger,
|
|
now: now,
|
|
reporter: reporter,
|
|
mqttPub: mqttPub,
|
|
status: StatusStarting,
|
|
serverConnectivity: ConnectivityUnknown,
|
|
}
|
|
}
|
|
|
|
func (a *App) Snapshot() HealthSnapshot {
|
|
a.mu.RLock()
|
|
defer a.mu.RUnlock()
|
|
|
|
return HealthSnapshot{
|
|
Status: a.status,
|
|
ServerConnectivity: a.serverConnectivity,
|
|
ScreenID: a.Config.ScreenID,
|
|
ServerBaseURL: a.Config.ServerBaseURL,
|
|
MQTTBroker: a.Config.MQTTBroker,
|
|
HeartbeatEvery: a.Config.HeartbeatEvery,
|
|
StartedAt: a.startedAt,
|
|
LastHeartbeatAt: a.lastHeartbeatAt,
|
|
}
|
|
}
|
|
|
|
func (a *App) Run(ctx context.Context) error {
|
|
if a.Config.ScreenID == "" {
|
|
return fmt.Errorf("screen id is required")
|
|
}
|
|
|
|
select {
|
|
case <-ctx.Done():
|
|
a.mu.Lock()
|
|
a.status = StatusStopped
|
|
a.mu.Unlock()
|
|
return nil
|
|
default:
|
|
}
|
|
|
|
a.mu.Lock()
|
|
a.startedAt = a.now()
|
|
a.mu.Unlock()
|
|
|
|
a.logger.Printf(
|
|
"event=agent_configured screen_id=%s server_url=%s mqtt_broker=%s heartbeat_every_seconds=%d player_addr=%s",
|
|
a.Config.ScreenID,
|
|
a.Config.ServerBaseURL,
|
|
a.Config.MQTTBroker,
|
|
a.Config.HeartbeatEvery,
|
|
a.Config.PlayerListenAddr,
|
|
)
|
|
|
|
ps := playerserver.New(a.Config.PlayerListenAddr, func() playerserver.NowPlaying {
|
|
snap := a.Snapshot()
|
|
return playerserver.NowPlaying{
|
|
URL: a.Config.PlayerContentURL,
|
|
Status: string(snap.Status),
|
|
Connectivity: string(snap.ServerConnectivity),
|
|
}
|
|
})
|
|
go func() {
|
|
if err := ps.Run(ctx); err != nil {
|
|
a.logger.Printf("event=player_server_error error=%v", err)
|
|
}
|
|
}()
|
|
|
|
a.emitHeartbeat()
|
|
a.mu.Lock()
|
|
a.status = StatusRunning
|
|
a.mu.Unlock()
|
|
|
|
ticker := time.NewTicker(time.Duration(a.Config.HeartbeatEvery) * time.Second)
|
|
defer ticker.Stop()
|
|
|
|
reportTicker := time.NewTicker(time.Duration(a.Config.StatusReportEvery) * time.Second)
|
|
defer reportTicker.Stop()
|
|
a.reportStatus(ctx)
|
|
|
|
for {
|
|
select {
|
|
case <-ctx.Done():
|
|
a.mu.Lock()
|
|
a.status = StatusStopped
|
|
a.mu.Unlock()
|
|
if a.mqttPub != nil {
|
|
a.mqttPub.Close()
|
|
}
|
|
a.logger.Printf("event=agent_stopped screen_id=%s", a.Config.ScreenID)
|
|
return nil
|
|
case <-ticker.C:
|
|
a.emitHeartbeat()
|
|
case <-reportTicker.C:
|
|
a.reportStatus(ctx)
|
|
}
|
|
}
|
|
}
|
|
|
|
func (a *App) emitHeartbeat() {
|
|
now := a.now()
|
|
|
|
a.mu.Lock()
|
|
a.lastHeartbeatAt = now
|
|
status := a.status
|
|
connectivity := a.serverConnectivity
|
|
a.mu.Unlock()
|
|
|
|
if a.mqttPub != nil {
|
|
if err := a.mqttPub.SendHeartbeat(string(status), string(connectivity), now); err != nil {
|
|
a.logger.Printf("event=mqtt_heartbeat_failed screen_id=%s error=%v", a.Config.ScreenID, err)
|
|
}
|
|
}
|
|
}
|
|
|
|
func (a *App) reportStatus(ctx context.Context) {
|
|
if a.reporter == nil {
|
|
return
|
|
}
|
|
|
|
snapshot := a.Snapshot()
|
|
payloadConnectivity := snapshot.ServerConnectivity
|
|
if payloadConnectivity == ConnectivityUnknown || payloadConnectivity == ConnectivityOnline || payloadConnectivity == ConnectivityDegraded || payloadConnectivity == ConnectivityOffline {
|
|
payloadConnectivity = ConnectivityOnline
|
|
}
|
|
|
|
err := a.reporter.Send(ctx, statusreporter.Snapshot{
|
|
Status: string(snapshot.Status),
|
|
ServerConnectivity: string(payloadConnectivity),
|
|
ScreenID: snapshot.ScreenID,
|
|
ServerBaseURL: snapshot.ServerBaseURL,
|
|
MQTTBroker: snapshot.MQTTBroker,
|
|
HeartbeatEverySeconds: snapshot.HeartbeatEvery,
|
|
StartedAt: snapshot.StartedAt,
|
|
LastHeartbeatAt: snapshot.LastHeartbeatAt,
|
|
})
|
|
if err != nil {
|
|
a.mu.Lock()
|
|
a.consecutiveReportFailures++
|
|
a.serverConnectivity = ConnectivityDegraded
|
|
if a.consecutiveReportFailures >= offlineFailureThreshold {
|
|
a.serverConnectivity = ConnectivityOffline
|
|
}
|
|
a.mu.Unlock()
|
|
a.logger.Printf("event=status_report_failed screen_id=%s error=%v", a.Config.ScreenID, err)
|
|
return
|
|
}
|
|
|
|
a.mu.Lock()
|
|
a.consecutiveReportFailures = 0
|
|
a.serverConnectivity = ConnectivityOnline
|
|
a.mu.Unlock()
|
|
}
|