feat(screens): Screen-Übersicht mit On-Demand-Screenshots für Multi-Screen-User
- GET /manage: neue Übersichtsseite mit Bulma-Karten für screen_user mit ≥2 Screens
- handleScreenUserRedirect leitet bei ≥2 Screens auf /manage statt auf ersten Screen
- On-Demand-Screenshot-Flow via MQTT:
- Backend publiziert signage/screen/{slug}/screenshot-request beim Seitenaufruf
- Player-Agent empfängt Topic, ruft TakeAndSendOnce() auf
- Player POST /api/v1/player/screenshot → Backend speichert in ScreenshotStore (RAM)
- GET /api/v1/screens/{screenId}/screenshot liefert gespeichertes Bild (authOnly)
- ScreenshotStore: In-Memory, thread-safe, kein Persistenz-Overhead
- JS-Retry nach 4s in Templates (Screenshot braucht 1-3s für MQTT-Roundtrip)
- manageTmpl zeigt Screenshot-Thumbnail beim Einzelscreen-Aufruf
- Doku: neue Endpoints, MQTT-Topics, Screenshot-Flow in SERVER-KONZEPT.md
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
parent
47f65da228
commit
b73da77835
19 changed files with 265 additions and 52 deletions
|
|
@ -1,5 +1,5 @@
|
|||
---
|
||||
morz_server_base_url: "http://10.0.0.70:8080"
|
||||
morz_server_base_url: "http://192.168.64.1:8080"
|
||||
morz_mqtt_broker: "tcp://dockerbox.morz.de:1883"
|
||||
morz_heartbeat_every_seconds: 30
|
||||
morz_status_report_every_seconds: 60
|
||||
|
|
|
|||
|
|
@ -5,6 +5,9 @@ all:
|
|||
hosts:
|
||||
info10:
|
||||
info01-dev:
|
||||
info11-dev:
|
||||
info12-dev:
|
||||
debi:
|
||||
signage_servers:
|
||||
hosts:
|
||||
dockerbox:
|
||||
|
|
|
|||
|
|
@ -5,8 +5,8 @@ signage_timezone: "Europe/Berlin"
|
|||
signage_base_packages:
|
||||
- curl
|
||||
- ca-certificates
|
||||
- rsync
|
||||
- htop
|
||||
- vim-tiny
|
||||
- bash-completion
|
||||
- ntp
|
||||
- rsync
|
||||
- chrony
|
||||
|
|
|
|||
|
|
@ -1,10 +1,4 @@
|
|||
---
|
||||
- name: Restart cron
|
||||
ansible.builtin.systemd:
|
||||
name: cron
|
||||
state: restarted
|
||||
become: true
|
||||
|
||||
- name: Restart journald
|
||||
ansible.builtin.systemd:
|
||||
name: systemd-journald
|
||||
|
|
|
|||
|
|
@ -1,9 +1,12 @@
|
|||
---
|
||||
- name: Update apt cache and upgrade installed packages
|
||||
- name: Update apt cache
|
||||
ansible.builtin.apt:
|
||||
update_cache: true
|
||||
become: true
|
||||
|
||||
- name: Upgrade installed packages
|
||||
ansible.builtin.apt:
|
||||
upgrade: dist
|
||||
cache_valid_time: 3600
|
||||
become: true
|
||||
|
||||
- name: Install base packages
|
||||
|
|
@ -16,11 +19,18 @@
|
|||
community.general.timezone:
|
||||
name: "{{ signage_timezone }}"
|
||||
become: true
|
||||
notify: Restart cron
|
||||
|
||||
- name: Ensure NTP service is enabled and running
|
||||
- name: Disable systemd-timesyncd if present (chrony replaces it)
|
||||
ansible.builtin.systemd:
|
||||
name: ntp
|
||||
name: systemd-timesyncd
|
||||
enabled: false
|
||||
state: stopped
|
||||
become: true
|
||||
failed_when: false
|
||||
|
||||
- name: Ensure chrony NTP service is enabled and running
|
||||
ansible.builtin.systemd:
|
||||
name: chrony
|
||||
enabled: true
|
||||
state: started
|
||||
become: true
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ signage_user: morz
|
|||
signage_config_dir: /etc/signage
|
||||
signage_binary_dest: /usr/local/bin/morz-agent
|
||||
|
||||
morz_server_base_url: "http://10.0.0.70:8080"
|
||||
morz_server_base_url: "http://192.168.64.1:8080"
|
||||
morz_mqtt_broker: ""
|
||||
morz_mqtt_username: ""
|
||||
morz_mqtt_password: ""
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@
|
|||
signage_admin_token: ""
|
||||
|
||||
# Server base URL reachable from the Ansible controller
|
||||
signage_server_base_url: "http://10.0.0.70:8080"
|
||||
signage_server_base_url: "http://192.168.64.1:8080"
|
||||
|
||||
# SSH public key to deploy to the signage user
|
||||
signage_ssh_public_key: ""
|
||||
|
|
|
|||
|
|
@ -35,6 +35,7 @@ services:
|
|||
MORZ_INFOBOARD_MQTT_BROKER: "tcp://mosquitto:1883"
|
||||
MORZ_INFOBOARD_ADMIN_PASSWORD: "${MORZ_INFOBOARD_ADMIN_PASSWORD}"
|
||||
MORZ_INFOBOARD_DEV_MODE: "${MORZ_INFOBOARD_DEV_MODE:-false}"
|
||||
TZ: "Europe/Berlin"
|
||||
MORZ_INFOBOARD_DEFAULT_TENANT: "${MORZ_INFOBOARD_DEFAULT_TENANT:-morz}"
|
||||
volumes:
|
||||
- uploads:/uploads
|
||||
|
|
|
|||
|
|
@ -91,6 +91,16 @@ Aufgaben:
|
|||
- Events
|
||||
- Kommandos und ACKs
|
||||
|
||||
### MQTT-Topics (implementiert)
|
||||
|
||||
| Topic | Publisher | Subscriber | Beschreibung |
|
||||
|----------------------------------------------|------------|---------------|---------------------------------------------------|
|
||||
| `signage/screen/{slug}/playlist-changed` | Backend | Player-Agent | Benachrichtigung bei Playlist-Aenderung; Backend debounced 2 s |
|
||||
| `signage/screen/{slug}/screenshot-request` | Backend | Player-Agent | Fordert sofortigen On-Demand-Screenshot an |
|
||||
|
||||
Der Backend-`Notifier` (`internal/mqttnotifier/notifier.go`) veroeffentlicht beide Topics.
|
||||
Der Player-`Subscriber` (`player/agent/internal/mqttsubscriber/subscriber.go`) abonniert beide Topics fuer den eigenen Screen-Slug. Auf ein `screenshot-request`-Signal ruft der Agent `Screenshotter.TakeAndSendOnce(ctx)` auf und laedt das Bild direkt per `POST /api/v1/player/screenshot` hoch.
|
||||
|
||||
### Dateispeicher
|
||||
|
||||
Aufgaben:
|
||||
|
|
@ -210,9 +220,16 @@ Der Server speichert:
|
|||
|
||||
- letzten bekannten Heartbeat
|
||||
- letzten Status
|
||||
- letzten Screenshot
|
||||
- letzten Screenshot (In-Memory, nicht persistiert)
|
||||
- aktuelle Inhaltsquelle pro Screen
|
||||
|
||||
### Screenshot-Flow
|
||||
|
||||
1. Der Player-Agent sendet periodisch (Intervall: `MORZ_INFOBOARD_SCREENSHOT_EVERY`) einen Screenshot per `POST /api/v1/player/screenshot` (Multipart, kein Auth).
|
||||
2. Alternativ kann das Backend per MQTT-Topic `signage/screen/{slug}/screenshot-request` einen On-Demand-Screenshot anfordern (`Notifier.RequestScreenshot(slug)`). Der Player-Agent empfaengt das Signal und ruft `Screenshotter.TakeAndSendOnce(ctx)` auf.
|
||||
3. Das Backend speichert den Screenshot im `ScreenshotStore` (In-Memory, keyed by `screen_id`). Pro Screen wird nur der jeweils neueste Screenshot gehalten.
|
||||
4. Eingeloggte Benutzer koennen den Screenshot unter `GET /api/v1/screens/{screenId}/screenshot` abrufen (`authOnly`). Der Response-Header enthaelt den vom Player gemeldeten MIME-Typ sowie `Cache-Control: no-store`.
|
||||
|
||||
Die Admin-UI soll damit erkennen:
|
||||
|
||||
- online/offline
|
||||
|
|
|
|||
|
|
@ -199,7 +199,10 @@ func (a *App) Run(ctx context.Context) error {
|
|||
// Self-register this screen in the backend (best-effort, non-blocking).
|
||||
go a.registerScreen(ctx)
|
||||
|
||||
// Subscribe to playlist-changed MQTT notifications (optional; fallback = polling).
|
||||
// Screenshot-Instanz immer anlegen (für periodische und On-Demand-Screenshots).
|
||||
ss := screenshot.New(a.Config.ScreenID, a.Config.ServerBaseURL, a.Config.ScreenshotEvery, a.logger)
|
||||
|
||||
// Subscribe to playlist-changed and screenshot-request MQTT notifications (optional; fallback = polling).
|
||||
sub := mqttsubscriber.New(
|
||||
a.Config.MQTTBroker,
|
||||
a.Config.ScreenID,
|
||||
|
|
@ -213,10 +216,15 @@ func (a *App) Run(ctx context.Context) error {
|
|||
}
|
||||
a.logger.Printf("event=mqtt_playlist_notification screen_id=%s", a.Config.ScreenID)
|
||||
},
|
||||
func() {
|
||||
a.logger.Printf("event=mqtt_screenshot_request screen_id=%s", a.Config.ScreenID)
|
||||
go ss.TakeAndSendOnce(ctx)
|
||||
},
|
||||
)
|
||||
if sub != nil {
|
||||
a.logger.Printf("event=mqtt_subscriber_enabled broker=%s screen_id=%s topic=%s",
|
||||
a.Config.MQTTBroker, a.Config.ScreenID, mqttsubscriber.Topic(a.Config.ScreenID))
|
||||
a.logger.Printf("event=mqtt_subscriber_enabled broker=%s screen_id=%s topic=%s screenshot_topic=%s",
|
||||
a.Config.MQTTBroker, a.Config.ScreenID, mqttsubscriber.Topic(a.Config.ScreenID),
|
||||
mqttsubscriber.ScreenshotRequestTopic(a.Config.ScreenID))
|
||||
defer sub.Close()
|
||||
}
|
||||
|
||||
|
|
@ -225,7 +233,6 @@ func (a *App) Run(ctx context.Context) error {
|
|||
|
||||
// Phase 6: Periodische Screenshot-Erzeugung, wenn konfiguriert.
|
||||
if a.Config.ScreenshotEvery > 0 {
|
||||
ss := screenshot.New(a.Config.ScreenID, a.Config.ServerBaseURL, a.Config.ScreenshotEvery, a.logger)
|
||||
go ss.Run(ctx)
|
||||
a.logger.Printf("event=screenshot_enabled screen_id=%s interval_seconds=%d",
|
||||
a.Config.ScreenID, a.Config.ScreenshotEvery)
|
||||
|
|
|
|||
|
|
@ -17,20 +17,28 @@ const (
|
|||
|
||||
// playlistChangedTopicTemplate is the topic the backend publishes to.
|
||||
playlistChangedTopic = "signage/screen/%s/playlist-changed"
|
||||
|
||||
// screenshotRequestTopicTemplate is the topic the backend publishes to for on-demand screenshots.
|
||||
screenshotRequestTopicTemplate = "signage/screen/%s/screenshot-request"
|
||||
)
|
||||
|
||||
// PlaylistChangedFunc is called when a debounced playlist-changed notification arrives.
|
||||
type PlaylistChangedFunc func()
|
||||
|
||||
// ScreenshotRequestFunc is called when a screenshot-request notification arrives.
|
||||
type ScreenshotRequestFunc func()
|
||||
|
||||
// Subscriber listens for playlist-changed notifications on MQTT and calls the
|
||||
// provided callback at most once per debounceDuration.
|
||||
type Subscriber struct {
|
||||
client mqtt.Client
|
||||
timer *time.Timer
|
||||
onChange PlaylistChangedFunc
|
||||
onScreenshotRequest ScreenshotRequestFunc
|
||||
|
||||
// timerC serializes timer resets through a dedicated goroutine.
|
||||
resetC chan struct{}
|
||||
screenshotReqC chan struct{}
|
||||
stopC chan struct{}
|
||||
}
|
||||
|
||||
|
|
@ -39,23 +47,32 @@ func Topic(screenSlug string) string {
|
|||
return "signage/screen/" + screenSlug + "/playlist-changed"
|
||||
}
|
||||
|
||||
// ScreenshotRequestTopic returns the MQTT topic for on-demand screenshot requests for a given screenSlug.
|
||||
func ScreenshotRequestTopic(screenSlug string) string {
|
||||
return "signage/screen/" + screenSlug + "/screenshot-request"
|
||||
}
|
||||
|
||||
// New creates a Subscriber that connects to broker and subscribes to the
|
||||
// playlist-changed topic for screenSlug. onChange is called (in its own
|
||||
// goroutine) at most once per debounceDuration.
|
||||
// onScreenshotRequest is called (in its own goroutine) when a screenshot-request message arrives.
|
||||
//
|
||||
// Returns nil when broker is empty — callers must handle nil.
|
||||
func New(broker, screenSlug, username, password string, onChange PlaylistChangedFunc) *Subscriber {
|
||||
func New(broker, screenSlug, username, password string, onChange PlaylistChangedFunc, onScreenshotRequest ScreenshotRequestFunc) *Subscriber {
|
||||
if broker == "" {
|
||||
return nil
|
||||
}
|
||||
|
||||
s := &Subscriber{
|
||||
onChange: onChange,
|
||||
onScreenshotRequest: onScreenshotRequest,
|
||||
resetC: make(chan struct{}, 16),
|
||||
screenshotReqC: make(chan struct{}, 16),
|
||||
stopC: make(chan struct{}),
|
||||
}
|
||||
|
||||
topic := Topic(screenSlug)
|
||||
screenshotTopic := ScreenshotRequestTopic(screenSlug)
|
||||
|
||||
opts := mqtt.NewClientOptions().
|
||||
AddBroker(broker).
|
||||
|
|
@ -72,6 +89,12 @@ func New(broker, screenSlug, username, password string, onChange PlaylistChanged
|
|||
default: // channel full — debounce timer will fire anyway
|
||||
}
|
||||
})
|
||||
c.Subscribe(screenshotTopic, 0, func(_ mqtt.Client, _ mqtt.Message) { //nolint:errcheck
|
||||
select {
|
||||
case s.screenshotReqC <- struct{}{}:
|
||||
default: // channel full — request already pending
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
if username != "" {
|
||||
|
|
@ -104,6 +127,10 @@ func (s *Subscriber) run() {
|
|||
timer = time.AfterFunc(debounceDuration, func() {
|
||||
go s.onChange()
|
||||
})
|
||||
case <-s.screenshotReqC:
|
||||
if s.onScreenshotRequest != nil {
|
||||
go s.onScreenshotRequest()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -78,6 +78,12 @@ func (s *Screenshotter) Run(ctx context.Context) {
|
|||
}
|
||||
}
|
||||
|
||||
// TakeAndSendOnce macht genau einen Screenshot und lädt ihn hoch.
|
||||
// Nicht-blockierend gegenüber dem periodischen Loop.
|
||||
func (s *Screenshotter) TakeAndSendOnce(ctx context.Context) {
|
||||
s.takeAndSend(ctx)
|
||||
}
|
||||
|
||||
// takeAndSend erzeugt einen Screenshot und sendet ihn an den Server.
|
||||
func (s *Screenshotter) takeAndSend(ctx context.Context) {
|
||||
path, err := s.capture()
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ COPY . .
|
|||
RUN go build -o /out/backend ./cmd/api
|
||||
|
||||
FROM alpine:3.22
|
||||
RUN apk add --no-cache tzdata
|
||||
WORKDIR /app
|
||||
COPY --from=build /out/backend /usr/local/bin/backend
|
||||
EXPOSE 8080
|
||||
|
|
|
|||
|
|
@ -76,6 +76,9 @@ func New() (*App, error) {
|
|||
// Non-fatal: server starts even if admin setup fails.
|
||||
}
|
||||
|
||||
// Screenshot store (in-memory).
|
||||
ss := httpapi.NewScreenshotStore()
|
||||
|
||||
// MQTT notifier (no-op when broker not configured).
|
||||
notifier := mqttnotifier.New(cfg.MQTTBroker, cfg.MQTTUsername, cfg.MQTTPassword)
|
||||
if cfg.MQTTBroker != "" {
|
||||
|
|
@ -92,6 +95,7 @@ func New() (*App, error) {
|
|||
PlaylistStore: playlists,
|
||||
AuthStore: authStore,
|
||||
Notifier: notifier,
|
||||
ScreenshotStore: ss,
|
||||
Config: cfg,
|
||||
UploadDir: cfg.UploadDir,
|
||||
Logger: logger,
|
||||
|
|
|
|||
|
|
@ -379,9 +379,16 @@ func parseOptionalTime(s string) (*time.Time, error) {
|
|||
if s == "" {
|
||||
return nil, nil
|
||||
}
|
||||
// Accept RFC3339 (API) and datetime-local HTML input format.
|
||||
for _, layout := range []string{time.RFC3339, "2006-01-02T15:04", "2006-01-02T15:04:05"} {
|
||||
if t, err := time.Parse(layout, s); err == nil {
|
||||
// RFC3339 already carries timezone info — use as-is.
|
||||
if t, err := time.Parse(time.RFC3339, s); err == nil {
|
||||
return &t, nil
|
||||
}
|
||||
// datetime-local HTML inputs ("2006-01-02T15:04" / "2006-01-02T15:04:05") carry
|
||||
// no timezone. Interpret them as local time so the value the user sees in their
|
||||
// browser matches what PostgreSQL stores and what NOW() (also local on the DB
|
||||
// server) is compared against.
|
||||
for _, layout := range []string{"2006-01-02T15:04:05", "2006-01-02T15:04"} {
|
||||
if t, err := time.ParseInLocation(layout, s, time.Local); err == nil {
|
||||
return &t, nil
|
||||
}
|
||||
}
|
||||
|
|
|
|||
59
server/backend/internal/httpapi/screenshot.go
Normal file
59
server/backend/internal/httpapi/screenshot.go
Normal file
|
|
@ -0,0 +1,59 @@
|
|||
package httpapi
|
||||
|
||||
import (
|
||||
"io"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
const maxScreenshotSize = 3 << 20 // 3 MB
|
||||
|
||||
func handlePlayerScreenshot(store *ScreenshotStore) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
r.Body = http.MaxBytesReader(w, r.Body, maxScreenshotSize)
|
||||
if err := r.ParseMultipartForm(maxScreenshotSize); err != nil {
|
||||
http.Error(w, "bad multipart form", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
screenID := r.FormValue("screen_id")
|
||||
if screenID == "" {
|
||||
http.Error(w, "screen_id required", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
file, header, err := r.FormFile("screenshot")
|
||||
if err != nil {
|
||||
http.Error(w, "screenshot file required", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
data, err := io.ReadAll(file)
|
||||
if err != nil {
|
||||
http.Error(w, "read error", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
mimeType := header.Header.Get("Content-Type")
|
||||
if mimeType == "" {
|
||||
mimeType = "image/png"
|
||||
}
|
||||
|
||||
store.Save(screenID, data, mimeType)
|
||||
w.WriteHeader(http.StatusOK)
|
||||
}
|
||||
}
|
||||
|
||||
func handleGetScreenshot(store *ScreenshotStore) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
screenID := r.PathValue("screenId")
|
||||
data, mimeType, ok := store.Get(screenID)
|
||||
if !ok {
|
||||
http.NotFound(w, r)
|
||||
return
|
||||
}
|
||||
w.Header().Set("Content-Type", mimeType)
|
||||
w.Header().Set("Cache-Control", "no-store")
|
||||
w.Write(data) //nolint:errcheck
|
||||
}
|
||||
}
|
||||
33
server/backend/internal/httpapi/screenshot_store.go
Normal file
33
server/backend/internal/httpapi/screenshot_store.go
Normal file
|
|
@ -0,0 +1,33 @@
|
|||
package httpapi
|
||||
|
||||
import "sync"
|
||||
|
||||
type screenshotRecord struct {
|
||||
Data []byte
|
||||
MimeType string
|
||||
}
|
||||
|
||||
type ScreenshotStore struct {
|
||||
mu sync.RWMutex
|
||||
records map[string]screenshotRecord
|
||||
}
|
||||
|
||||
func NewScreenshotStore() *ScreenshotStore {
|
||||
return &ScreenshotStore{records: make(map[string]screenshotRecord)}
|
||||
}
|
||||
|
||||
func (s *ScreenshotStore) Save(screenID string, data []byte, mimeType string) {
|
||||
s.mu.Lock()
|
||||
defer s.mu.Unlock()
|
||||
s.records[screenID] = screenshotRecord{Data: data, MimeType: mimeType}
|
||||
}
|
||||
|
||||
func (s *ScreenshotStore) Get(screenID string) ([]byte, string, bool) {
|
||||
s.mu.RLock()
|
||||
defer s.mu.RUnlock()
|
||||
rec, ok := s.records[screenID]
|
||||
if !ok {
|
||||
return nil, "", false
|
||||
}
|
||||
return rec.Data, rec.MimeType, true
|
||||
}
|
||||
|
|
@ -436,12 +436,13 @@ var statusTemplateFuncs = template.FuncMap{
|
|||
}
|
||||
|
||||
var statusPageTemplate = template.Must(template.New("status-page").Funcs(statusTemplateFuncs).Parse(`<!DOCTYPE html>
|
||||
<html lang="de">
|
||||
<html lang="de" data-theme="light">
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<meta name="color-scheme" content="light">
|
||||
<meta http-equiv="refresh" content="{{.RefreshSeconds}}">
|
||||
<title>Bildschirmstatus</title>
|
||||
<title>Bildschirmstatus – morz infoboard</title>
|
||||
` + statusPageCSSBlock + `
|
||||
</head>
|
||||
<body>
|
||||
|
|
@ -509,7 +510,7 @@ var statusPageTemplate = template.Must(template.New("status-page").Funcs(statusT
|
|||
</div>
|
||||
|
||||
<div class="field">
|
||||
<label for="server_connectivity">Serverkonnektivität</label>
|
||||
<label for="server_connectivity">Verbindung zum Server</label>
|
||||
<select id="server_connectivity" name="server_connectivity">
|
||||
<option value="" {{if eq .Filters.ServerConnectivity ""}}selected{{end}}>Alle</option>
|
||||
<option value="online" {{if eq .Filters.ServerConnectivity "online"}}selected{{end}}>Online</option>
|
||||
|
|
@ -520,7 +521,7 @@ var statusPageTemplate = template.Must(template.New("status-page").Funcs(statusT
|
|||
</div>
|
||||
|
||||
<div class="field">
|
||||
<label for="stale">Aktualität</label>
|
||||
<label for="stale">Meldungsalter</label>
|
||||
<select id="stale" name="stale">
|
||||
<option value="" {{if eq .Filters.Stale ""}}selected{{end}}>Alle</option>
|
||||
<option value="true" {{if eq .Filters.Stale "true"}}selected{{end}}>Nur veraltet</option>
|
||||
|
|
@ -529,7 +530,7 @@ var statusPageTemplate = template.Must(template.New("status-page").Funcs(statusT
|
|||
</div>
|
||||
|
||||
<div class="field">
|
||||
<label for="derived_state">Abgeleiteter Status</label>
|
||||
<label for="derived_state">Gesamtstatus</label>
|
||||
<select id="derived_state" name="derived_state">
|
||||
<option value="" {{if eq .Filters.DerivedState ""}}selected{{end}}>Alle</option>
|
||||
<option value="online" {{if eq .Filters.DerivedState "online"}}selected{{end}}>Online</option>
|
||||
|
|
@ -539,8 +540,8 @@ var statusPageTemplate = template.Must(template.New("status-page").Funcs(statusT
|
|||
</div>
|
||||
|
||||
<div class="field full">
|
||||
<label for="updated_since">Aktualisiert seit (RFC3339)</label>
|
||||
<input id="updated_since" name="updated_since" type="text" placeholder="2026-03-22T16:05:00Z" value="{{.Filters.UpdatedSince}}">
|
||||
<label for="updated_since">Aktualisiert seit</label>
|
||||
<input id="updated_since" name="updated_since" type="datetime-local" value="{{.Filters.UpdatedSince}}">
|
||||
</div>
|
||||
|
||||
<div class="field">
|
||||
|
|
@ -643,16 +644,43 @@ var statusPageTemplate = template.Must(template.New("status-page").Funcs(statusT
|
|||
updateRelTimes();
|
||||
setInterval(updateRelTimes, 30000);
|
||||
})();
|
||||
|
||||
// Beim Laden: RFC3339-Wert in datetime-local-Format konvertieren (YYYY-MM-DDTHH:MM)
|
||||
(function() {
|
||||
var input = document.getElementById('updated_since');
|
||||
if (input && input.value) {
|
||||
var d = new Date(input.value);
|
||||
if (!isNaN(d)) {
|
||||
var pad = function(n) { return n < 10 ? '0' + n : '' + n; };
|
||||
input.value = d.getFullYear() + '-' + pad(d.getMonth()+1) + '-' + pad(d.getDate()) +
|
||||
'T' + pad(d.getHours()) + ':' + pad(d.getMinutes());
|
||||
}
|
||||
}
|
||||
})();
|
||||
|
||||
// Beim Submit: datetime-local Wert zu RFC3339 konvertieren
|
||||
(function() {
|
||||
var form = document.querySelector('form.filter-form');
|
||||
if (form) {
|
||||
form.addEventListener('submit', function(e) {
|
||||
var input = document.getElementById('updated_since');
|
||||
if (input && input.value) {
|
||||
input.value = new Date(input.value).toISOString();
|
||||
}
|
||||
});
|
||||
}
|
||||
})();
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
`))
|
||||
|
||||
var screenDetailTemplate = template.Must(template.New("screen-detail").Funcs(statusTemplateFuncs).Parse(`<!DOCTYPE html>
|
||||
<html lang="de">
|
||||
<html lang="de" data-theme="light">
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<meta name="color-scheme" content="light">
|
||||
<meta http-equiv="refresh" content="{{.RefreshSeconds}}">
|
||||
<title>{{.Record.ScreenID}} – Bildschirmstatus</title>
|
||||
` + statusPageCSSBlock + `
|
||||
|
|
@ -662,6 +690,9 @@ var screenDetailTemplate = template.Must(template.New("screen-detail").Funcs(sta
|
|||
<section class="hero">
|
||||
<div class="hero-top">
|
||||
<div>
|
||||
<!-- N5: ScreenID (Slug) als Titel. Displayname könnte hier ergänzt werden,
|
||||
wenn handleScreenDetailPage zusätzlich *store.ScreenStore erhält
|
||||
und GetBySlug(ctx, screenID) aufruft. -->
|
||||
<h1>{{.Record.ScreenID}}</h1>
|
||||
<p class="lead">Detailansicht auf Basis des zuletzt akzeptierten Status-Reports.</p>
|
||||
</div>
|
||||
|
|
@ -781,10 +812,11 @@ var screenDetailTemplate = template.Must(template.New("screen-detail").Funcs(sta
|
|||
`))
|
||||
|
||||
var statusPageErrorTemplate = template.Must(template.New("status-error").Funcs(statusTemplateFuncs).Parse(`<!DOCTYPE html>
|
||||
<html lang="de">
|
||||
<html lang="de" data-theme="light">
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<meta name="color-scheme" content="light">
|
||||
<title>Ungültiger Filter – Bildschirmstatus</title>
|
||||
` + statusPageCSSBlock + `
|
||||
</head>
|
||||
|
|
|
|||
|
|
@ -83,6 +83,18 @@ func (n *Notifier) NotifyChanged(screenSlug string) {
|
|||
})
|
||||
}
|
||||
|
||||
// RequestScreenshot publishes a screenshot-request message to the screen's MQTT topic.
|
||||
// It is a no-op when the client is not connected.
|
||||
func (n *Notifier) RequestScreenshot(screenSlug string) {
|
||||
if n.client == nil {
|
||||
return
|
||||
}
|
||||
topic := fmt.Sprintf("signage/screen/%s/screenshot-request", screenSlug)
|
||||
payload := []byte(fmt.Sprintf(`{"ts":%d}`, time.Now().UnixMilli()))
|
||||
token := n.client.Publish(topic, 0, false, payload)
|
||||
token.WaitTimeout(3 * time.Second)
|
||||
}
|
||||
|
||||
func (n *Notifier) publish(screenSlug string) {
|
||||
topic := Topic(screenSlug)
|
||||
payload := []byte(fmt.Sprintf(`{"ts":%d}`, time.Now().UnixMilli()))
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue