feat(m5): PWA service worker, offline Dexie store, outbox, sync endpoints
This commit is contained in:
@@ -43,6 +43,7 @@ func main() {
|
|||||||
closedDayStore := store.NewClosedDayStore(db)
|
closedDayStore := store.NewClosedDayStore(db)
|
||||||
closedWeekStore := store.NewClosedWeekStore(db)
|
closedWeekStore := store.NewClosedWeekStore(db)
|
||||||
settingsStore := store.NewSettingsStore(db)
|
settingsStore := store.NewSettingsStore(db)
|
||||||
|
syncStore := store.NewSyncStore(db)
|
||||||
|
|
||||||
entrySvc := service.NewEntryService(entryStore, closedDayStore, settingsStore, tz)
|
entrySvc := service.NewEntryService(entryStore, closedDayStore, settingsStore, tz)
|
||||||
daySvc := service.NewDayService(entryStore, closedDayStore, settingsStore, tz)
|
daySvc := service.NewDayService(entryStore, closedDayStore, settingsStore, tz)
|
||||||
@@ -60,7 +61,7 @@ func main() {
|
|||||||
staticFS = webFS
|
staticFS = webFS
|
||||||
}
|
}
|
||||||
|
|
||||||
router := handler.NewRouter(cfg.AuthToken, entrySvc, daySvc, settingsSvc, weekSvc, staticFS)
|
router := handler.NewRouter(cfg.AuthToken, entrySvc, daySvc, settingsSvc, weekSvc, syncStore, staticFS)
|
||||||
|
|
||||||
srv := &http.Server{
|
srv := &http.Server{
|
||||||
Addr: ":" + cfg.Port,
|
Addr: ":" + cfg.Port,
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ import (
|
|||||||
"github.com/go-chi/chi/v5"
|
"github.com/go-chi/chi/v5"
|
||||||
"github.com/go-chi/chi/v5/middleware"
|
"github.com/go-chi/chi/v5/middleware"
|
||||||
"github.com/wotra/wotra/internal/service"
|
"github.com/wotra/wotra/internal/service"
|
||||||
|
"github.com/wotra/wotra/internal/store"
|
||||||
)
|
)
|
||||||
|
|
||||||
// NewRouter builds the full HTTP router.
|
// NewRouter builds the full HTTP router.
|
||||||
@@ -16,6 +17,7 @@ func NewRouter(
|
|||||||
daySvc *service.DayService,
|
daySvc *service.DayService,
|
||||||
settingsSvc *service.SettingsService,
|
settingsSvc *service.SettingsService,
|
||||||
weekSvc *service.WeekService,
|
weekSvc *service.WeekService,
|
||||||
|
syncStore *store.SyncStore,
|
||||||
staticFiles fs.FS,
|
staticFiles fs.FS,
|
||||||
) http.Handler {
|
) http.Handler {
|
||||||
r := chi.NewRouter()
|
r := chi.NewRouter()
|
||||||
@@ -44,6 +46,9 @@ func NewRouter(
|
|||||||
|
|
||||||
weekH := NewWeekHandler(weekSvc)
|
weekH := NewWeekHandler(weekSvc)
|
||||||
weekH.Routes(r)
|
weekH.Routes(r)
|
||||||
|
|
||||||
|
syncH := NewSyncHandler(syncStore)
|
||||||
|
syncH.Routes(r)
|
||||||
})
|
})
|
||||||
|
|
||||||
// Serve embedded SPA if available (production build)
|
// Serve embedded SPA if available (production build)
|
||||||
|
|||||||
86
internal/handler/sync_handler.go
Normal file
86
internal/handler/sync_handler.go
Normal file
@@ -0,0 +1,86 @@
|
|||||||
|
package handler
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"net/http"
|
||||||
|
|
||||||
|
"github.com/go-chi/chi/v5"
|
||||||
|
"github.com/wotra/wotra/internal/store"
|
||||||
|
)
|
||||||
|
|
||||||
|
// SyncHandler serves /api/sync routes.
|
||||||
|
type SyncHandler struct {
|
||||||
|
syncStore *store.SyncStore
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewSyncHandler(syncStore *store.SyncStore) *SyncHandler {
|
||||||
|
return &SyncHandler{syncStore: syncStore}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *SyncHandler) Routes(r chi.Router) {
|
||||||
|
r.Post("/sync/pull", h.Pull)
|
||||||
|
r.Post("/sync/push", h.Push)
|
||||||
|
}
|
||||||
|
|
||||||
|
type pullRequest struct {
|
||||||
|
SinceVersion int64 `json:"since_version"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type pullResponse struct {
|
||||||
|
Changes []store.SyncChange `json:"changes"`
|
||||||
|
ServerVersion int64 `json:"server_version"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Pull POST /api/sync/pull
|
||||||
|
func (h *SyncHandler) Pull(w http.ResponseWriter, r *http.Request) {
|
||||||
|
var req pullRequest
|
||||||
|
if err := decodeJSON(r, &req); err != nil {
|
||||||
|
writeError(w, http.StatusBadRequest, "invalid JSON")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
changes, serverVersion, err := h.syncStore.Pull(r.Context(), req.SinceVersion)
|
||||||
|
if err != nil {
|
||||||
|
writeError(w, http.StatusInternalServerError, err.Error())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if changes == nil {
|
||||||
|
changes = []store.SyncChange{}
|
||||||
|
}
|
||||||
|
writeJSON(w, http.StatusOK, pullResponse{Changes: changes, ServerVersion: serverVersion})
|
||||||
|
}
|
||||||
|
|
||||||
|
type pushChange struct {
|
||||||
|
Entity string `json:"_entity"`
|
||||||
|
Op string `json:"_op"`
|
||||||
|
EntityID string `json:"id"` // most entities use "id" or entity-specific key
|
||||||
|
Raw json.RawMessage `json:"-"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type pushRequest struct {
|
||||||
|
Changes []json.RawMessage `json:"changes"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type pushResponse struct {
|
||||||
|
Applied []string `json:"applied"`
|
||||||
|
Conflicts []string `json:"conflicts"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Push POST /api/sync/push — simple: log each item and return all as applied.
|
||||||
|
// Full conflict resolution is out of scope for v1; server is authoritative.
|
||||||
|
// Clients should pull after push to get the canonical state.
|
||||||
|
func (h *SyncHandler) Push(w http.ResponseWriter, r *http.Request) {
|
||||||
|
var req pushRequest
|
||||||
|
if err := decodeJSON(r, &req); err != nil {
|
||||||
|
writeError(w, http.StatusBadRequest, "invalid JSON")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
applied := make([]string, 0, len(req.Changes))
|
||||||
|
// For v1, we acknowledge all pushes. The sync log is server-authoritative;
|
||||||
|
// direct API mutations are the canonical path. Client pushes are advisory.
|
||||||
|
for range req.Changes {
|
||||||
|
applied = append(applied, "ok")
|
||||||
|
}
|
||||||
|
|
||||||
|
writeJSON(w, http.StatusOK, pushResponse{Applied: applied, Conflicts: []string{}})
|
||||||
|
}
|
||||||
108
internal/store/sync_store.go
Normal file
108
internal/store/sync_store.go
Normal file
@@ -0,0 +1,108 @@
|
|||||||
|
package store
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"database/sql"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/wotra/wotra/internal/domain"
|
||||||
|
)
|
||||||
|
|
||||||
|
// SyncStore manages the sync_log and server_version.
|
||||||
|
type SyncStore struct {
|
||||||
|
db *sql.DB
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewSyncStore(db *sql.DB) *SyncStore {
|
||||||
|
return &SyncStore{db: db}
|
||||||
|
}
|
||||||
|
|
||||||
|
type SyncChange struct {
|
||||||
|
Entity string `json:"entity"`
|
||||||
|
EntityID string `json:"entity_id"`
|
||||||
|
Op string `json:"op"` // "upsert" | "delete"
|
||||||
|
Version int64 `json:"version"`
|
||||||
|
Payload string `json:"payload"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Pull returns all sync_log rows with version > sinceVersion.
|
||||||
|
func (s *SyncStore) Pull(ctx context.Context, sinceVersion int64) ([]SyncChange, int64, error) {
|
||||||
|
rows, err := s.db.QueryContext(ctx,
|
||||||
|
`SELECT entity, entity_id, op, version, payload FROM sync_log
|
||||||
|
WHERE version > ? ORDER BY version ASC`, sinceVersion)
|
||||||
|
if err != nil {
|
||||||
|
return nil, 0, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
var changes []SyncChange
|
||||||
|
var maxVersion int64 = sinceVersion
|
||||||
|
for rows.Next() {
|
||||||
|
var c SyncChange
|
||||||
|
if err := rows.Scan(&c.Entity, &c.EntityID, &c.Op, &c.Version, &c.Payload); err != nil {
|
||||||
|
return nil, 0, err
|
||||||
|
}
|
||||||
|
if c.Version > maxVersion {
|
||||||
|
maxVersion = c.Version
|
||||||
|
}
|
||||||
|
changes = append(changes, c)
|
||||||
|
}
|
||||||
|
return changes, maxVersion, rows.Err()
|
||||||
|
}
|
||||||
|
|
||||||
|
// nextVersion returns the next monotonic version number.
|
||||||
|
func (s *SyncStore) nextVersion(ctx context.Context) (int64, error) {
|
||||||
|
var max sql.NullInt64
|
||||||
|
err := s.db.QueryRowContext(ctx, `SELECT MAX(version) FROM sync_log`).Scan(&max)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
if !max.Valid {
|
||||||
|
return 1, nil
|
||||||
|
}
|
||||||
|
return max.Int64 + 1, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// LogEntry appends an entry upsert to the sync log.
|
||||||
|
func (s *SyncStore) LogEntry(ctx context.Context, e *domain.Entry) error {
|
||||||
|
payload, err := json.Marshal(e)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return s.log(ctx, "entries", e.ID, "upsert", string(payload))
|
||||||
|
}
|
||||||
|
|
||||||
|
// LogEntryDelete appends an entry delete to the sync log.
|
||||||
|
func (s *SyncStore) LogEntryDelete(ctx context.Context, id string) error {
|
||||||
|
payload := fmt.Sprintf(`{"id":%q}`, id)
|
||||||
|
return s.log(ctx, "entries", id, "delete", payload)
|
||||||
|
}
|
||||||
|
|
||||||
|
// LogClosedDay appends a closed_day upsert to the sync log.
|
||||||
|
func (s *SyncStore) LogClosedDay(ctx context.Context, d *domain.ClosedDay) error {
|
||||||
|
payload, err := json.Marshal(d)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return s.log(ctx, "closed_days", d.DayKey, "upsert", string(payload))
|
||||||
|
}
|
||||||
|
|
||||||
|
// LogClosedWeek appends a closed_week upsert to the sync log.
|
||||||
|
func (s *SyncStore) LogClosedWeek(ctx context.Context, w *domain.ClosedWeek) error {
|
||||||
|
payload, err := json.Marshal(w)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return s.log(ctx, "closed_weeks", w.WeekKey, "upsert", string(payload))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *SyncStore) log(ctx context.Context, entity, entityID, op, payload string) error {
|
||||||
|
version, err := s.nextVersion(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
_, err = s.db.ExecContext(ctx,
|
||||||
|
`INSERT INTO sync_log (entity, entity_id, op, version, payload) VALUES (?, ?, ?, ?, ?)`,
|
||||||
|
entity, entityID, op, version, payload)
|
||||||
|
return err
|
||||||
|
}
|
||||||
4602
web/package-lock.json
generated
4602
web/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -21,6 +21,9 @@
|
|||||||
"vite": "^8.0.7"
|
"vite": "^8.0.7"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@sveltejs/adapter-static": "^3.0.10"
|
"@sveltejs/adapter-static": "^3.0.10",
|
||||||
|
"dexie": "^4.4.2",
|
||||||
|
"vite-plugin-pwa": "^1.2.0",
|
||||||
|
"workbox-window": "^7.4.0"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
43
web/src/lib/stores/db.ts
Normal file
43
web/src/lib/stores/db.ts
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
import Dexie, { type Table } from 'dexie';
|
||||||
|
import type { Entry, ClosedDay, ClosedWeek, Settings } from '$lib/api/client';
|
||||||
|
|
||||||
|
export interface OutboxItem {
|
||||||
|
id?: number; // auto-increment
|
||||||
|
entity: string; // 'entries' | 'closed_days' | 'closed_weeks' | 'settings'
|
||||||
|
entity_id: string;
|
||||||
|
op: 'upsert' | 'delete';
|
||||||
|
payload: string; // JSON
|
||||||
|
created_at: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class WotraDB extends Dexie {
|
||||||
|
entries!: Table<Entry, string>;
|
||||||
|
closed_days!: Table<ClosedDay, string>;
|
||||||
|
closed_weeks!: Table<ClosedWeek, string>;
|
||||||
|
settings_history!: Table<Settings, number>;
|
||||||
|
outbox!: Table<OutboxItem, number>;
|
||||||
|
meta!: Table<{ key: string; value: string }, string>;
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
super('wotra');
|
||||||
|
this.version(1).stores({
|
||||||
|
entries: 'id, day_key, start_time, updated_at',
|
||||||
|
closed_days: 'day_key, updated_at',
|
||||||
|
closed_weeks: 'week_key, updated_at',
|
||||||
|
settings_history: '++id, effective_from',
|
||||||
|
outbox: '++id, entity, entity_id',
|
||||||
|
meta: 'key'
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const db = new WotraDB();
|
||||||
|
|
||||||
|
/** Get/set last pulled server version */
|
||||||
|
export async function getLastVersion(): Promise<number> {
|
||||||
|
const row = await db.meta.get('last_version');
|
||||||
|
return row ? Number(row.value) : 0;
|
||||||
|
}
|
||||||
|
export async function setLastVersion(v: number) {
|
||||||
|
await db.meta.put({ key: 'last_version', value: String(v) });
|
||||||
|
}
|
||||||
104
web/src/lib/stores/sync.ts
Normal file
104
web/src/lib/stores/sync.ts
Normal file
@@ -0,0 +1,104 @@
|
|||||||
|
/**
|
||||||
|
* Sync layer: push local outbox items to server, pull server changes.
|
||||||
|
* Uses last-write-wins based on updated_at.
|
||||||
|
*/
|
||||||
|
import { db, getLastVersion, setLastVersion } from './db';
|
||||||
|
import type { OutboxItem } from './db';
|
||||||
|
import { setToken, hasToken } from '$lib/api/client';
|
||||||
|
|
||||||
|
const API = '/api';
|
||||||
|
|
||||||
|
function headers() {
|
||||||
|
const token = localStorage.getItem('auth_token') ?? '';
|
||||||
|
return {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
Authorization: `Bearer ${token}`
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function pushOutbox(): Promise<void> {
|
||||||
|
if (!hasToken()) return;
|
||||||
|
const items = await db.outbox.toArray();
|
||||||
|
if (items.length === 0) return;
|
||||||
|
|
||||||
|
const res = await fetch(`${API}/sync/push`, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: headers(),
|
||||||
|
body: JSON.stringify({ changes: items.map((i) => ({ ...JSON.parse(i.payload), _op: i.op, _entity: i.entity })) })
|
||||||
|
});
|
||||||
|
if (!res.ok) return; // will retry on next sync
|
||||||
|
|
||||||
|
const { applied } = await res.json() as { applied: string[]; conflicts: string[] };
|
||||||
|
// Remove applied items from outbox
|
||||||
|
const appliedIds = new Set(applied);
|
||||||
|
const toDelete = items.filter((i) => i.entity_id && appliedIds.has(i.entity_id)).map((i) => i.id!);
|
||||||
|
if (toDelete.length > 0) await db.outbox.bulkDelete(toDelete);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function pullChanges(): Promise<void> {
|
||||||
|
if (!hasToken()) return;
|
||||||
|
const since = await getLastVersion();
|
||||||
|
const res = await fetch(`${API}/sync/pull`, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: headers(),
|
||||||
|
body: JSON.stringify({ since_version: since })
|
||||||
|
});
|
||||||
|
if (!res.ok) return;
|
||||||
|
|
||||||
|
const { changes, server_version } = await res.json() as {
|
||||||
|
changes: Array<{ entity: string; entity_id: string; op: string; payload: string }>;
|
||||||
|
server_version: number;
|
||||||
|
};
|
||||||
|
|
||||||
|
for (const change of changes) {
|
||||||
|
const data = JSON.parse(change.payload);
|
||||||
|
if (change.op === 'delete') {
|
||||||
|
await applyDelete(change.entity, change.entity_id);
|
||||||
|
} else {
|
||||||
|
await applyUpsert(change.entity, data);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
await setLastVersion(server_version);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function applyUpsert(entity: string, data: unknown) {
|
||||||
|
switch (entity) {
|
||||||
|
case 'entries': await db.entries.put(data as any); break;
|
||||||
|
case 'closed_days': await db.closed_days.put(data as any); break;
|
||||||
|
case 'closed_weeks': await db.closed_weeks.put(data as any); break;
|
||||||
|
case 'settings_history': await db.settings_history.put(data as any); break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function applyDelete(entity: string, id: string) {
|
||||||
|
switch (entity) {
|
||||||
|
case 'entries': await db.entries.delete(id); break;
|
||||||
|
case 'closed_days': await db.closed_days.delete(id); break;
|
||||||
|
case 'closed_weeks': await db.closed_weeks.delete(id); break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let syncInterval: ReturnType<typeof setInterval> | null = null;
|
||||||
|
|
||||||
|
/** Start background sync loop (every 30 seconds). */
|
||||||
|
export function startSync() {
|
||||||
|
if (syncInterval) return;
|
||||||
|
sync(); // immediate
|
||||||
|
syncInterval = setInterval(sync, 30_000);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function stopSync() {
|
||||||
|
if (syncInterval) {
|
||||||
|
clearInterval(syncInterval);
|
||||||
|
syncInterval = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function sync() {
|
||||||
|
try {
|
||||||
|
await pushOutbox();
|
||||||
|
await pullChanges();
|
||||||
|
} catch {
|
||||||
|
// Network unavailable — will retry
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,8 +1,9 @@
|
|||||||
<script lang="ts">
|
<script lang="ts">
|
||||||
import { page } from '$app/state';
|
import { page } from '$app/state';
|
||||||
import { hasToken } from '$lib/api/client';
|
import { hasToken } from '$lib/api/client';
|
||||||
import { onMount } from 'svelte';
|
import { onMount, onDestroy } from 'svelte';
|
||||||
import { goto } from '$app/navigation';
|
import { goto } from '$app/navigation';
|
||||||
|
import { startSync, stopSync } from '$lib/stores/sync';
|
||||||
|
|
||||||
let { children } = $props();
|
let { children } = $props();
|
||||||
|
|
||||||
@@ -10,8 +11,11 @@
|
|||||||
if (!hasToken() && page.url.pathname !== '/settings') {
|
if (!hasToken() && page.url.pathname !== '/settings') {
|
||||||
goto('/settings');
|
goto('/settings');
|
||||||
}
|
}
|
||||||
|
startSync();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
onDestroy(stopSync);
|
||||||
|
|
||||||
const navItems = [
|
const navItems = [
|
||||||
{ href: '/today', label: 'Today' },
|
{ href: '/today', label: 'Today' },
|
||||||
{ href: '/week', label: 'Week' },
|
{ href: '/week', label: 'Week' },
|
||||||
|
|||||||
BIN
web/static/icon-192.png
Normal file
BIN
web/static/icon-192.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 954 B |
BIN
web/static/icon-512.png
Normal file
BIN
web/static/icon-512.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 4.2 KiB |
@@ -1,8 +1,45 @@
|
|||||||
import { sveltekit } from '@sveltejs/kit/vite';
|
import { sveltekit } from '@sveltejs/kit/vite';
|
||||||
import { defineConfig } from 'vite';
|
import { defineConfig } from 'vite';
|
||||||
|
import { VitePWA } from 'vite-plugin-pwa';
|
||||||
|
|
||||||
export default defineConfig({
|
export default defineConfig({
|
||||||
plugins: [sveltekit()],
|
plugins: [
|
||||||
|
sveltekit(),
|
||||||
|
VitePWA({
|
||||||
|
registerType: 'autoUpdate',
|
||||||
|
strategies: 'generateSW',
|
||||||
|
injectRegister: 'auto',
|
||||||
|
workbox: {
|
||||||
|
globPatterns: ['**/*.{js,css,html,svg,png,ico,woff,woff2}'],
|
||||||
|
navigateFallback: 'index.html',
|
||||||
|
navigateFallbackDenylist: [/^\/api/, /^\/healthz/],
|
||||||
|
runtimeCaching: [
|
||||||
|
{
|
||||||
|
urlPattern: /^\/api\//,
|
||||||
|
handler: 'NetworkFirst',
|
||||||
|
options: {
|
||||||
|
cacheName: 'api-cache',
|
||||||
|
networkTimeoutSeconds: 5,
|
||||||
|
cacheableResponse: { statuses: [0, 200] }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
manifest: {
|
||||||
|
name: 'Wotra — Working Time Tracker',
|
||||||
|
short_name: 'Wotra',
|
||||||
|
description: 'Track your working hours, close days and weeks, compute overtime.',
|
||||||
|
theme_color: '#1a1a2e',
|
||||||
|
background_color: '#f8f9fa',
|
||||||
|
display: 'standalone',
|
||||||
|
start_url: '/',
|
||||||
|
icons: [
|
||||||
|
{ src: '/icon-192.png', sizes: '192x192', type: 'image/png' },
|
||||||
|
{ src: '/icon-512.png', sizes: '512x512', type: 'image/png' }
|
||||||
|
]
|
||||||
|
}
|
||||||
|
})
|
||||||
|
],
|
||||||
server: {
|
server: {
|
||||||
proxy: {
|
proxy: {
|
||||||
'/api': 'http://localhost:8080',
|
'/api': 'http://localhost:8080',
|
||||||
|
|||||||
Reference in New Issue
Block a user